Merge "Remove ericsson-build3 from Jenkins"
authorTrevor Bramwell <tbramwell@linuxfoundation.org>
Wed, 19 Aug 2020 22:33:10 +0000 (22:33 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Wed, 19 Aug 2020 22:33:10 +0000 (22:33 +0000)
116 files changed:
jjb/3rd_party_ci/create-apex-vms.sh [deleted file]
jjb/3rd_party_ci/detect-snapshot.sh [deleted file]
jjb/3rd_party_ci/download-netvirt-artifact.sh [deleted file]
jjb/3rd_party_ci/install-netvirt.sh [deleted file]
jjb/3rd_party_ci/odl-netvirt.yaml [deleted file]
jjb/3rd_party_ci/postprocess-netvirt.sh [deleted file]
jjb/airship/airship.yaml
jjb/airship/cntt.yaml
jjb/apex/apex-build.sh [deleted file]
jjb/apex/apex-deploy.sh [deleted file]
jjb/apex/apex-download-artifact.sh [deleted file]
jjb/apex/apex-fetch-logs.sh [deleted file]
jjb/apex/apex-fetch-snap-info.sh [deleted file]
jjb/apex/apex-functest-scenario.sh [deleted file]
jjb/apex/apex-iso-verify.sh [deleted file]
jjb/apex/apex-jjb-renderer.py [deleted file]
jjb/apex/apex-project-jobs.yaml [deleted file]
jjb/apex/apex-rtd-jobs.yaml [deleted file]
jjb/apex/apex-snapshot-create.sh [deleted file]
jjb/apex/apex-snapshot-deploy.sh [deleted file]
jjb/apex/apex-unit-test.sh [deleted file]
jjb/apex/apex-upload-artifact.sh [deleted file]
jjb/apex/apex-verify-jobs.yaml [deleted file]
jjb/apex/apex.yaml [deleted file]
jjb/apex/apex.yaml.j2 [deleted file]
jjb/apex/scenarios.yaml.hidden [deleted file]
jjb/apex/update-build-result.groovy [deleted file]
jjb/availability/availability-rtd-jobs.yaml [deleted file]
jjb/availability/availability.yaml [deleted file]
jjb/bottlenecks/bottlenecks-cleanup.sh [deleted file]
jjb/bottlenecks/bottlenecks-project-jobs.yaml [deleted file]
jjb/bottlenecks/bottlenecks-rtd-jobs.yaml [deleted file]
jjb/bottlenecks/bottlenecks-run-suite.sh [deleted file]
jjb/cirv/cirv-views.yaml [moved from jjb/ovno/ovno-views.yaml with 52% similarity]
jjb/cirv/cirv.yaml [new file with mode: 0644]
jjb/cperf/cirros-upload.yaml.ansible [deleted file]
jjb/cperf/cperf-ci-jobs.yaml [deleted file]
jjb/cperf/cperf-prepare-robot.sh [deleted file]
jjb/cperf/cperf-robot-netvirt-csit.sh [deleted file]
jjb/cperf/cperf-upload-logs-csit.sh [deleted file]
jjb/cperf/cperf-views.yaml [deleted file]
jjb/cperf/csit-clean.yaml.ansible [deleted file]
jjb/cperf/parse-node-yaml.py [deleted file]
jjb/daisy4nfv/daisy-daily-jobs.yaml [deleted file]
jjb/daisy4nfv/daisy-deploy.sh [deleted file]
jjb/daisy4nfv/daisy-project-jobs.yaml [deleted file]
jjb/daisy4nfv/daisy-rtd-jobs.yaml [deleted file]
jjb/daisy4nfv/daisy4nfv-basic.sh [deleted file]
jjb/daisy4nfv/daisy4nfv-build-kolla-image.sh [deleted file]
jjb/daisy4nfv/daisy4nfv-build.sh [deleted file]
jjb/daisy4nfv/daisy4nfv-download-artifact.sh [deleted file]
jjb/daisy4nfv/daisy4nfv-merge-jobs.yaml [deleted file]
jjb/daisy4nfv/daisy4nfv-smoke-test.sh [deleted file]
jjb/daisy4nfv/daisy4nfv-upload-artifact.sh [deleted file]
jjb/daisy4nfv/daisy4nfv-verify-jobs.yaml [deleted file]
jjb/fuel/fuel-daily-jobs.yaml
jjb/functest/functest-alpine.sh [deleted file]
jjb/functest/functest-cleanup.sh [deleted file]
jjb/functest/functest-daily-jobs.yaml [deleted file]
jjb/functest/functest-docker.yaml
jjb/functest/functest-env-presetup.sh [deleted file]
jjb/functest/functest-exit.sh [deleted file]
jjb/functest/functest-k8.sh [deleted file]
jjb/functest/functest-kubernetes-docker.yaml
jjb/functest/functest-kubernetes-pi.yaml [new file with mode: 0644]
jjb/functest/functest-kubernetes-project-jobs.yaml
jjb/functest/functest-kubernetes.yaml
jjb/functest/functest-pi.yaml [new file with mode: 0644]
jjb/functest/functest-project-jobs.yaml
jjb/functest/functest-suite.sh [deleted file]
jjb/functest/functest.ovn.yaml
jjb/functest/functest.yaml
jjb/functest/xtesting-docker.yaml
jjb/functest/xtesting-pi.yaml [new file with mode: 0644]
jjb/functest/xtesting-project-jobs.yaml
jjb/functest/xtesting.yaml
jjb/global/releng-macros.yaml
jjb/ipv6/ipv6-rtd-jobs.yaml [deleted file]
jjb/ipv6/ipv6-views.yaml [deleted file]
jjb/ipv6/ipv6.yaml [deleted file]
jjb/kuberef/kuberef-run-linting.sh [new file with mode: 0755]
jjb/kuberef/kuberef-verify-jobs.yaml [new file with mode: 0644]
jjb/kuberef/kuberef-views.yaml [new file with mode: 0644]
jjb/kvmfornfv/kvmfornfv-build.sh [deleted file]
jjb/kvmfornfv/kvmfornfv-download-artifact.sh [deleted file]
jjb/kvmfornfv/kvmfornfv-rtd-jobs.yaml [deleted file]
jjb/kvmfornfv/kvmfornfv-test.sh [deleted file]
jjb/kvmfornfv/kvmfornfv-upload-artifact.sh [deleted file]
jjb/kvmfornfv/kvmfornfv-views.yaml [deleted file]
jjb/kvmfornfv/kvmfornfv.yaml [deleted file]
jjb/laas/laas.yml
jjb/onosfw/onosfw-rtd-jobs.yaml [deleted file]
jjb/onosfw/onosfw-views.yaml [deleted file]
jjb/onosfw/onosfw.yaml [deleted file]
jjb/ovno/ovno-rtd-jobs.yaml [deleted file]
jjb/releng/opnfv-docker-arm.yaml
jjb/releng/opnfv-docker.yaml
jjb/releng/releng-release-jobs.yaml
jjb/sdnvpn/sdnvpn-rtd-jobs.yaml [deleted file]
jjb/sdnvpn/sdnvpn-views.yaml [deleted file]
jjb/sfc/sfc-project-jobs.yaml [deleted file]
jjb/sfc/sfc-rtd-jobs.yaml [deleted file]
jjb/sfc/sfc-views.yaml [deleted file]
jjb/snaps/snaps-rtd-jobs.yaml [deleted file]
jjb/snaps/snaps-verify-jobs.yaml [deleted file]
jjb/snaps/snaps-views.yaml [deleted file]
jjb/ves/ves-rtd-jobs.yaml [deleted file]
jjb/ves/ves-views.yaml [deleted file]
jjb/ves/ves.yaml [deleted file]
jjb/yardstick/yardstick-cleanup.sh [deleted file]
jjb/yardstick/yardstick-daily-jobs.yaml [deleted file]
jjb/yardstick/yardstick-daily.sh [deleted file]
jjb/yardstick/yardstick-get-k8s-conf.sh [deleted file]
jjb/yardstick/yardstick-project-jobs.yaml [deleted file]
jjb/yardstick/yardstick-rtd-jobs.yaml [deleted file]
releases/kali/functest.yaml [new file with mode: 0644]

diff --git a/jjb/3rd_party_ci/create-apex-vms.sh b/jjb/3rd_party_ci/create-apex-vms.sh
deleted file mode 100755 (executable)
index 0744ac8..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# clone opnfv sdnvpn repo
-git clone https://gerrit.opnfv.org/gerrit/p/sdnvpn.git $WORKSPACE/sdnvpn
-
-. $WORKSPACE/sdnvpn/odl-pipeline/odl-pipeline-common.sh
-pushd $LIB
-./test_environment.sh --env-number $APEX_ENV_NUMBER --cloner-info $CLONER_INFO --snapshot-disks $SNAPSHOT_DISKS --vjump-hosts $VIRTUAL_JUMPHOSTS
-popd
diff --git a/jjb/3rd_party_ci/detect-snapshot.sh b/jjb/3rd_party_ci/detect-snapshot.sh
deleted file mode 100755 (executable)
index 77788aa..0000000
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-echo "Detecting requested OpenStack branch and topology type in gerrit comment"
-parsed_comment=$(echo $GERRIT_EVENT_COMMENT_TEXT | sed -n 's/^.*check-opnfv\s*//p')
-parsed_comment=$(echo $parsed_comment | sed -n 's/\s*$//p')
-if [ ! -z "$parsed_comment" ]; then
-  if echo $parsed_comment | grep -E '^[a-z]+-(no)?ha'; then
-    os_version=${parsed_comment%%"-"*}
-    topo=${parsed_comment#*"-"}
-    echo "OS version detected in gerrit comment: ${os_version}"
-    echo "Topology type detected in gerrit comment: ${topo}"
-  else
-    echo "Invalid format given for scenario in gerrit comment: ${parsed_comment}...aborting"
-    exit 1
-  fi
-else
-  echo "No scenario given in gerrit comment, will use default (master OpenStack, noha)"
-  os_version='master'
-  topo='noha'
-fi
-
-echo "Writing variables to file"
-cat > detected_snapshot << EOI
-OS_VERSION=$os_version
-TOPOLOGY=$topo
-SNAP_CACHE=$HOME/snap_cache/$os_version/$topo
-EOI
diff --git a/jjb/3rd_party_ci/download-netvirt-artifact.sh b/jjb/3rd_party_ci/download-netvirt-artifact.sh
deleted file mode 100755 (executable)
index ac7f76c..0000000
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-echo "Attempting to fetch the artifact location from ODL Jenkins"
-if [ "$ODL_BRANCH" != 'master' ]; then
-  DIST=$(echo ${ODL_BRANCH} | sed -rn 's#([a-zA-Z]+)/([a-zA-Z]+)#\2#p')
-  ODL_BRANCH=$(echo ${ODL_BRANCH} | sed -rn 's#([a-zA-Z]+)/([a-zA-Z]+)#\1%2F\2#p')
-else
-  DIST='neon'
-fi
-
-echo "ODL Distribution is ${DIST}"
-ODL_ZIP="karaf-SNAPSHOT.zip"
-CHANGE_DETAILS_URL="https://git.opendaylight.org/gerrit/changes/${GERRIT_PROJECT}~${ODL_BRANCH}~${GERRIT_CHANGE_ID}/detail"
-# due to limitation with the Jenkins Gerrit Trigger, we need to use Gerrit REST API to get the change details
-ODL_BUILD_JOB_NUM=$(curl --fail ${CHANGE_DETAILS_URL} | grep -Eo "${GERRIT_PROJECT}-distribution-check-${DIST}/[0-9]+" | tail -1 | grep -Eo [0-9]+)
-DISTRO_CHECK_CONSOLE_LOG="https://logs.opendaylight.org/releng/vex-yul-odl-jenkins-1/${GERRIT_PROJECT}-distribution-check-${DIST}/${ODL_BUILD_JOB_NUM}/console.log.gz"
-NETVIRT_ARTIFACT_URL=$(curl --fail --compressed ${DISTRO_CHECK_CONSOLE_LOG} | grep 'BUNDLE_URL' | cut -d = -f 2)
-
-echo -e "URL to artifact is\n\t$NETVIRT_ARTIFACT_URL"
-
-echo "Downloading the artifact. This could take time..."
-if ! wget -q -O $ODL_ZIP $NETVIRT_ARTIFACT_URL; then
-    echo "The artifact does not exist! Probably removed due to ODL Jenkins artifact retention policy."
-    echo "Use 'recheck' on the gerrit to get artifact rebuilt."
-    exit 1
-fi
-
-#TODO(trozet) remove this once odl-pipeline accepts zip files
-echo "Converting artifact zip to tar.gz"
-UNZIPPED_DIR=`dirname $(unzip -qql ${ODL_ZIP} | head -n1 | tr -s ' ' | cut -d' ' -f5-)`
-unzip ${ODL_ZIP}
-tar czf /tmp/${NETVIRT_ARTIFACT} ${UNZIPPED_DIR}
-
-echo "Download complete"
-ls -al /tmp/${NETVIRT_ARTIFACT}
diff --git a/jjb/3rd_party_ci/install-netvirt.sh b/jjb/3rd_party_ci/install-netvirt.sh
deleted file mode 100755 (executable)
index 232d60e..0000000
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-SNAP_CACHE=$HOME/snap_cache/$OS_VERSION/$TOPOLOGY
-# clone opnfv sdnvpn repo
-git clone https://gerrit.opnfv.org/gerrit/p/sdnvpn.git $WORKSPACE/sdnvpn
-
-if [ ! -f "/tmp/${NETVIRT_ARTIFACT}" ]; then
-  echo "ERROR: /tmp/${NETVIRT_ARTIFACT} specified as NetVirt Artifact, but file does not exist"
-  exit 1
-fi
-
-if [ ! -f "${SNAP_CACHE}/node.yaml" ]; then
-  echo "ERROR: node.yaml pod config missing in ${SNAP_CACHE}"
-  exit 1
-fi
-
-if [ ! -f "${SNAP_CACHE}/id_rsa" ]; then
-  echo "ERROR: id_rsa ssh creds missing in ${SNAP_CACHE}"
-  exit 1
-fi
-
-# TODO (trozet) snapshot should have already been unpacked into cache folder
-# but we really should check the cache here, and not use a single cache folder
-# for when we support multiple jobs on a single slave
-pushd sdnvpn/odl-pipeline/lib > /dev/null
-git fetch https://gerrit.opnfv.org/gerrit/sdnvpn refs/changes/17/59017/5 && git checkout FETCH_HEAD
-./odl_reinstaller.sh --pod-config ${SNAP_CACHE}/node.yaml \
-  --odl-artifact /tmp/${NETVIRT_ARTIFACT} --ssh-key-file ${SNAP_CACHE}/id_rsa
-popd > /dev/null
diff --git a/jjb/3rd_party_ci/odl-netvirt.yaml b/jjb/3rd_party_ci/odl-netvirt.yaml
deleted file mode 100644 (file)
index 15d2848..0000000
+++ /dev/null
@@ -1,287 +0,0 @@
----
-- project:
-    name: 'netvirt'
-
-    project: 'netvirt'
-
-    installer: 'netvirt'
-    #####################################
-    # branch definitions
-    #####################################
-    stream:
-      - master:
-          branch: '{stream}'
-          gs-pathname: ''
-          disabled: false
-      - oxygen:
-          branch: 'stable/oxygen'
-          gs-pathname: ''
-          disabled: false
-    #####################################
-    # patch verification phases
-    #####################################
-    phase:
-      - 'create-apex-vms':
-          slave-label: 'apex-virtual-master'
-      - 'install-netvirt':
-          slave-label: 'apex-virtual-master'
-      - 'postprocess':
-          slave-label: 'apex-virtual-master'
-    #####################################
-    # jobs
-    #####################################
-    jobs:
-      - 'odl-netvirt-verify-virtual-{stream}'
-      - 'odl-netvirt-verify-virtual-{phase}-{stream}'
-
-#####################################
-# job templates
-#####################################
-- job-template:
-    name: 'odl-netvirt-verify-virtual-{stream}'
-
-    project-type: multijob
-
-    disabled: '{obj:disabled}'
-
-    concurrent: true
-
-    properties:
-      - logrotate-default
-      - throttle:
-          enabled: true
-          max-total: 5
-          max-per-node: 1
-          option: 'project'
-      - build-blocker:
-          use-build-blocker: true
-          blocking-jobs:
-            - 'apex-verify.*'
-            - 'apex-.*-promote.*'
-            - 'apex-virtual.*'
-            - 'odl-netvirt-verify-virtual-create-apex-vms-.*'
-            - 'odl-netvirt-verify-virtual-install-netvirt-.*'
-            - 'functest-netvirt-virtual-suite-.*'
-            - 'odl-netvirt-verify-virtual-postprocess-.*'
-          blocking-level: 'NODE'
-    scm:
-      - git:
-          url: https://gerrit.opnfv.org/gerrit/apex
-          branches:
-            - 'origin/master'
-          timeout: 15
-          wipe-workspace: true
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - string:
-          name: NETVIRT_ARTIFACT
-          default: distribution-karaf.tar.gz
-      - 'apex-virtual-master-defaults'
-
-    triggers:
-      - gerrit:
-          server-name: 'git.opendaylight.org'
-          trigger-on:
-            # yamllint disable rule:line-length
-            # - comment-added-contains-event:
-            #     comment-contains-value: 'https://jenkins.opendaylight.org/releng/job/netvirt-patch-test-current-carbon/.*?/ : SUCCESS'
-            # - comment-added-contains-event:
-            #     comment-contains-value: 'https://jenkins.opendaylight.org/releng/job/netvirt-patch-test-current-carbon/.*?/ : UNSTABLE'
-            # yamllint enable rule:line-length
-            - comment-added-contains-event:
-                comment-contains-value: 'check-opnfv'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '*'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-          readable-message: true
-
-    builders:
-      - description-setter:
-          description: "Built on $NODE_NAME"
-      - detect-opnfv-snapshot
-      - inject:
-          properties-file: detected_snapshot
-      - multijob:
-          name: create-apex-vms
-          condition: SUCCESSFUL
-          projects:
-            - name: 'odl-netvirt-verify-virtual-create-apex-vms-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                BRANCH=$BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_ID=$GERRIT_CHANGE_ID
-                GERRIT_PATCHSET_NUMBER=$GERRIT_PATCHSET_NUMBER
-                GERRIT_PATCHSET_REVISION=$GERRIT_PATCHSET_REVISION
-                NETVIRT_ARTIFACT=$NETVIRT_ARTIFACT
-                APEX_ENV_NUMBER=$APEX_ENV_NUMBER
-                GERRIT_EVENT_COMMENT_TEXT=$GERRIT_EVENT_COMMENT_TEXT
-                TOPOLOGY=$TOPOLOGY
-                OS_VERSION=$OS_VERSION
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: true
-      - multijob:
-          name: install-netvirt
-          condition: SUCCESSFUL
-          projects:
-            - name: 'odl-netvirt-verify-virtual-install-netvirt-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                ODL_BRANCH=$BRANCH
-                BRANCH=$BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_ID=$GERRIT_CHANGE_ID
-                GERRIT_PATCHSET_NUMBER=$GERRIT_PATCHSET_NUMBER
-                GERRIT_PATCHSET_REVISION=$GERRIT_PATCHSET_REVISION
-                GERRIT_PROJECT=$GERRIT_PROJECT
-                NETVIRT_ARTIFACT=$NETVIRT_ARTIFACT
-                TOPOLOGY=$TOPOLOGY
-                OS_VERSION=$OS_VERSION
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: true
-      - multijob:
-          name: csit
-          condition: ALWAYS
-          projects:
-            - name: cperf-apex-csit-master
-              predefined-parameters: |
-                ODL_BRANCH=$BRANCH
-                RC_FILE_PATH=$SNAP_CACHE/overcloudrc
-                NODE_FILE_PATH=$SNAP_CACHE/node.yaml
-                SSH_KEY_PATH=$SNAP_CACHE/id_rsa
-                ODL_CONTAINERIZED=false
-                OS_VERSION=$OS_VERSION
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: false
-      - multijob:
-          name: csit-collect-logs
-          condition: ALWAYS
-          projects:
-            - name: cperf-upload-logs-csit
-              predefined-parameters: |
-                ODL_BRANCH=$BRANCH
-                OS_VERSION=$OS_VERSION
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: false
-      - multijob:
-          name: apex-fetch-logs
-          condition: ALWAYS
-          projects:
-            - name: 'apex-fetch-logs-{stream}'
-              current-parameters: false
-              node-parameters: true
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-      - multijob:
-          name: postprocess
-          condition: ALWAYS
-          projects:
-            - name: 'odl-netvirt-verify-virtual-postprocess-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                BRANCH=$BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_ID=$GERRIT_CHANGE_ID
-                GERRIT_PATCHSET_NUMBER=$GERRIT_PATCHSET_NUMBER
-                GERRIT_PATCHSET_REVISION=$GERRIT_PATCHSET_REVISION
-                NETVIRT_ARTIFACT=$NETVIRT_ARTIFACT
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: false
-
-- job-template:
-    name: 'odl-netvirt-verify-virtual-{phase}-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    concurrent: true
-
-    properties:
-      - logrotate-default
-      - throttle:
-          enabled: true
-          max-total: 5
-          max-per-node: 1
-          option: 'project'
-      - build-blocker:
-          use-build-blocker: true
-          blocking-jobs:
-            - 'odl-netvirt-verify-virtual-create-apex-vms-.*'
-            - 'odl-netvirt-verify-virtual-install-netvirt-.*'
-            - 'functest-netvirt-virtual-suite-.*'
-            - 'odl-netvirt-verify-virtual-postprocess-.*'
-          blocking-level: 'NODE'
-
-    wrappers:
-      - ssh-agent-wrapper
-      - timeout:
-          timeout: 360
-          fail: true
-
-    scm:
-      - git:
-          url: https://gerrit.opnfv.org/gerrit/apex
-          branches:
-            - 'origin/master'
-          timeout: 15
-          wipe-workspace: true
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - '{slave-label}-defaults'
-      - '{installer}-defaults'
-      - string:
-          name: DEPLOY_SCENARIO
-          default: 'os-odl-nofeature-noha'
-          description: 'Scenario to deploy and test'
-      - string:
-          name: GS_URL
-          default: artifacts.opnfv.org/apex
-          description: "URL to Google Storage with snapshot artifacts."
-
-    builders:
-      - description-setter:
-          description: "Built on $NODE_NAME"
-      - '{project}-verify-{phase}-builder'
-#####################################
-# builder macros
-#####################################
-- builder:
-    name: 'netvirt-verify-create-apex-vms-builder'
-    builders:
-      - shell:
-          !include-raw: ../apex/apex-snapshot-deploy.sh
-- builder:
-    name: 'netvirt-verify-install-netvirt-builder'
-    builders:
-      - shell:
-          !include-raw: ./download-netvirt-artifact.sh
-      - shell:
-          !include-raw: ./install-netvirt.sh
-- builder:
-    name: 'netvirt-verify-postprocess-builder'
-    builders:
-      - shell:
-          !include-raw: ./postprocess-netvirt.sh
-
-- builder:
-    name: 'detect-opnfv-snapshot'
-    builders:
-      - shell:
-          !include-raw: ./detect-snapshot.sh
diff --git a/jjb/3rd_party_ci/postprocess-netvirt.sh b/jjb/3rd_party_ci/postprocess-netvirt.sh
deleted file mode 100755 (executable)
index 7965142..0000000
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# clone opnfv sdnvpn repo
-git clone https://gerrit.opnfv.org/gerrit/p/sdnvpn.git $WORKSPACE/sdnvpn
-. $WORKSPACE/sdnvpn/odl-pipeline/odl-pipeline-common.sh
-pushd $LIB
-./post_process.sh
-popd
index 0767159..f8cd2cd 100644 (file)
       - cinder_test
       - odl
       - tempest_smoke
+      - tempest_horizon
     jobs:
       - 'airship-{repo}-{container}-{tag}-{test}-run'
 
     <<: *airship-params
     container: 'functest-smoke'
     test:
-      - neutron-tempest-plugin-api
+      - tempest_neutron
       - tempest_cinder
       - tempest_keystone
+      - tempest_heat
       - rally_sanity
       - refstack_defcore
       - tempest_full
       - tempest_scenario
       - tempest_slow
       - patrole
-      - neutron_trunk
       - networking-bgpvpn
       - networking-sfc
-      - barbican
+      - tempest_barbican
     jobs:
       - 'airship-{repo}-{container}-{tag}-{test}-run'
 
 
 - job-template:
     name: 'airship-{tag}-daily'
+    disabled: true
     project-type: multijob
     triggers:
       - timed: '@daily'
               <<: *airship-jobs
             - name: 'airship-opnfv-functest-healthcheck-{tag}-tempest_smoke-run'
               <<: *airship-jobs
+            - name: 'airship-opnfv-functest-healthcheck-{tag}-tempest_horizon-run'
+              <<: *airship-jobs
       - multijob:
           name: opnfv/functest-smoke:{functest_tag}
           condition: ALWAYS
           projects:
-            - name: 'airship-opnfv-functest-smoke-{tag}-neutron-tempest-plugin-api-run'
+            - name: 'airship-opnfv-functest-smoke-{tag}-tempest_neutron-run'
+              <<: *airship-jobs
+            - name: 'airship-opnfv-functest-smoke-{tag}-tempest_cinder-run'
               <<: *airship-jobs
-            - name: 'airship-opnfv-functest-smoke-{tag}-neutron-tempest_cinder-run'
+            - name: 'airship-opnfv-functest-smoke-{tag}-tempest_keystone-run'
               <<: *airship-jobs
-            - name: 'airship-opnfv-functest-smoke-{tag}-neutron-tempest_keystone-run'
+            - name: 'airship-opnfv-functest-smoke-{tag}-tempest_heat-run'
               <<: *airship-jobs
             - name: 'airship-opnfv-functest-smoke-{tag}-rally_sanity-run'
               <<: *airship-jobs
               <<: *airship-jobs
             - name: 'airship-opnfv-functest-smoke-{tag}-patrole-run'
               <<: *airship-jobs
-            - name: 'airship-opnfv-functest-smoke-{tag}-neutron_trunk-run'
-              <<: *airship-jobs
             - name: 'airship-opnfv-functest-smoke-{tag}-networking-bgpvpn-run'
               <<: *airship-jobs
             - name: 'airship-opnfv-functest-smoke-{tag}-networking-sfc-run'
               <<: *airship-jobs
-            - name: 'airship-opnfv-functest-smoke-{tag}-barbican-run'
+            - name: 'airship-opnfv-functest-smoke-{tag}-tempest_barbican-run'
               <<: *airship-jobs
       - multijob:
           name: opnfv/functest-benchmarking:{functest_tag}
 
 - job-template:
     name: 'airship-{tag}-gate'
+    disabled: true
     project-type: multijob
     triggers:
       - airship-patchset-created:
           projects:
             - name: 'airship-opnfv-functest-smoke-{tag}-tempest_scenario-run'
               <<: *airship-jobs
-            - name: 'airship-opnfv-functest-smoke-{tag}-neutron_trunk-run'
-              <<: *airship-jobs
             - name: 'airship-opnfv-functest-smoke-{tag}-networking-bgpvpn-run'
               <<: *airship-jobs
             - name: 'airship-opnfv-functest-smoke-{tag}-networking-sfc-run'
               <<: *airship-jobs
-            - name: 'airship-opnfv-functest-smoke-{tag}-barbican-run'
+            - name: 'airship-opnfv-functest-smoke-{tag}-tempest_barbican-run'
               <<: *airship-jobs
       - multijob:
           name: opnfv/functest-benchmarking:{functest_tag}
index ea8e51d..adc91fd 100644 (file)
@@ -31,7 +31,7 @@
     parameters:
       - string:
           name: DEBUG
-          default: 'true'
+          default: 'false'
 
 - parameter:
     name: cntt-EXTERNAL_NETWORK
     jobs:
       - 'cntt-{repo}-{container}-{tag}-pull'
 
+- project:
+    name: 'cntt-opnfv-functest-benchmarking-cntt-pull'
+    <<: *cntt-params
+    container: 'functest-benchmarking-cntt'
+    jobs:
+      - 'cntt-{repo}-{container}-{tag}-pull'
+
 - project:
     name: 'cntt-opnfv-functest-vnf-pull'
     <<: *cntt-params
     jobs:
       - 'cntt-{repo}-{container}-{tag}-rmi'
 
+- project:
+    name: 'cntt-opnfv-functest-benchmarking-cntt-rmi'
+    <<: *cntt-params
+    container: 'functest-benchmarking-cntt'
+    jobs:
+      - 'cntt-{repo}-{container}-{tag}-rmi'
+
 - project:
     name: 'cntt-opnfv-functest-vnf-rmi'
     <<: *cntt-params
       - cntt-build_tag:
           build_tag: ''
       - cntt-DEBUG:
-          DEBUG: 'true'
+          DEBUG: 'false'
       - cntt-EXTERNAL_NETWORK:
           EXTERNAL_NETWORK: public
     builders:
       - cinder_test
       - odl
       - tempest_smoke
+      - tempest_horizon
     jobs:
       - 'cntt-{repo}-{container}-{tag}-{test}-run'
 
     <<: *cntt-params
     container: 'functest-smoke'
     test:
-      - neutron-tempest-plugin-api
+      - tempest_neutron
       - tempest_cinder
       - tempest_keystone
+      - tempest_heat
       - rally_sanity
       - refstack_defcore
       - tempest_full
       - tempest_scenario
       - tempest_slow
       - patrole
-      - neutron_trunk
       - networking-bgpvpn
       - networking-sfc
-      - barbican
+      - tempest_barbican
     jobs:
       - 'cntt-{repo}-{container}-{tag}-{test}-run'
 
     <<: *cntt-params
     container: 'functest-smoke-cntt'
     test:
-      - neutron-tempest-plugin-api
-      - tempest_cinder
-      - tempest_keystone
-      - rally_sanity
-      - tempest_full
-      - tempest_scenario
-      - tempest_slow
+      - tempest_neutron_cntt
+      - tempest_cinder_cntt
+      - tempest_keystone_cntt
+      - tempest_heat_cntt
+      - rally_sanity_cntt
+      - tempest_full_cntt
+      - tempest_scenario_cntt
+      - tempest_slow_cntt
     jobs:
       - 'cntt-{repo}-{container}-{tag}-{test}-run'
 
     jobs:
       - 'cntt-{repo}-{container}-{tag}-{test}-run'
 
+- project:
+    name: 'cntt-opnfv-functest-benchmarking-cntt'
+    <<: *cntt-params
+    container: 'functest-benchmarking-cntt'
+    test:
+      - rally_full_cntt
+      - rally_jobs_cntt
+    jobs:
+      - 'cntt-{repo}-{container}-{tag}-{test}-run'
+
 - project:
     name: 'cntt-opnfv-functest-vnf'
     <<: *cntt-params
           fi
           sudo docker run --rm \
             -e S3_ENDPOINT_URL=https://storage.googleapis.com \
-            -e S3_DST_URL=s3://artifacts.opnfv.org/airship \
-            -e HTTP_DST_URL=http://artifacts.opnfv.org/airship/ \
+            -e S3_DST_URL=s3://artifacts.opnfv.org/cntt \
+            -e HTTP_DST_URL=http://artifacts.opnfv.org/cntt/ \
             -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
             -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
             -e BUILD_TAG=$BUILD_TAG \
       - cntt-branch:
           branch: 'master'
       - cntt-DEBUG:
-          DEBUG: 'true'
+          DEBUG: 'false'
       - cntt-EXTERNAL_NETWORK:
           EXTERNAL_NETWORK: public
     properties:
               <<: *cntt-jobs
             - name: 'cntt-opnfv-functest-benchmarking-{tag}-rmi'
               <<: *cntt-jobs
+            - name: 'cntt-opnfv-functest-benchmarking-cntt-{tag}-rmi'
+              <<: *cntt-jobs
             - name: 'cntt-opnfv-functest-vnf-{tag}-rmi'
               <<: *cntt-jobs
       - multijob:
               <<: *cntt-jobs
             - name: 'cntt-opnfv-functest-benchmarking-{tag}-pull'
               <<: *cntt-jobs
+            - name: 'cntt-opnfv-functest-benchmarking-cntt-{tag}-pull'
+              <<: *cntt-jobs
             - name: 'cntt-opnfv-functest-vnf-{tag}-pull'
               <<: *cntt-jobs
       - multijob:
               <<: *cntt-jobs
             - name: 'cntt-opnfv-functest-healthcheck-{tag}-tempest_smoke-run'
               <<: *cntt-jobs
+            - name: 'cntt-opnfv-functest-healthcheck-{tag}-tempest_horizon-run'
+              <<: *cntt-jobs
       - multijob:
           name: opnfv/functest-smoke:{functest_tag}
           condition: ALWAYS
           projects:
-            - name: 'cntt-opnfv-functest-smoke-{tag}-neutron-tempest-plugin-api-run'
+            - name: 'cntt-opnfv-functest-smoke-{tag}-tempest_neutron-run'
               <<: *cntt-jobs
             - name: 'cntt-opnfv-functest-smoke-{tag}-tempest_cinder-run'
               <<: *cntt-jobs
             - name: 'cntt-opnfv-functest-smoke-{tag}-tempest_keystone-run'
               <<: *cntt-jobs
+            - name: 'cntt-opnfv-functest-smoke-{tag}-tempest_heat-run'
+              <<: *cntt-jobs
             - name: 'cntt-opnfv-functest-smoke-{tag}-rally_sanity-run'
               <<: *cntt-jobs
             - name: 'cntt-opnfv-functest-smoke-{tag}-refstack_defcore-run'
               <<: *cntt-jobs
             - name: 'cntt-opnfv-functest-smoke-{tag}-patrole-run'
               <<: *cntt-jobs
-            - name: 'cntt-opnfv-functest-smoke-{tag}-neutron_trunk-run'
-              <<: *cntt-jobs
             - name: 'cntt-opnfv-functest-smoke-{tag}-networking-bgpvpn-run'
               <<: *cntt-jobs
             - name: 'cntt-opnfv-functest-smoke-{tag}-networking-sfc-run'
               <<: *cntt-jobs
-            - name: 'cntt-opnfv-functest-smoke-{tag}-barbican-run'
+            - name: 'cntt-opnfv-functest-smoke-{tag}-tempest_barbican-run'
               <<: *cntt-jobs
       - multijob:
           name: opnfv/functest-smoke-cntt:{tag}
           condition: ALWAYS
           projects:
-            - name: 'cntt-opnfv-functest-smoke-cntt-{tag}-neutron-tempest-plugin-api-run'
+            - name: 'cntt-opnfv-functest-smoke-cntt-{tag}-tempest_neutron_cntt-run'
               <<: *cntt-jobs
-            - name: 'cntt-opnfv-functest-smoke-cntt-{tag}-tempest_cinder-run'
+            - name: 'cntt-opnfv-functest-smoke-cntt-{tag}-tempest_cinder_cntt-run'
               <<: *cntt-jobs
-            - name: 'cntt-opnfv-functest-smoke-cntt-{tag}-tempest_keystone-run'
+            - name: 'cntt-opnfv-functest-smoke-cntt-{tag}-tempest_keystone_cntt-run'
               <<: *cntt-jobs
-            - name: 'cntt-opnfv-functest-smoke-cntt-{tag}-rally_sanity-run'
+            - name: 'cntt-opnfv-functest-smoke-cntt-{tag}-tempest_heat_cntt-run'
               <<: *cntt-jobs
-            - name: 'cntt-opnfv-functest-smoke-cntt-{tag}-tempest_full-run'
+            - name: 'cntt-opnfv-functest-smoke-cntt-{tag}-rally_sanity_cntt-run'
               <<: *cntt-jobs
-            - name: 'cntt-opnfv-functest-smoke-cntt-{tag}-tempest_scenario-run'
+            - name: 'cntt-opnfv-functest-smoke-cntt-{tag}-tempest_full_cntt-run'
               <<: *cntt-jobs
-            - name: 'cntt-opnfv-functest-smoke-cntt-{tag}-tempest_slow-run'
+            - name: 'cntt-opnfv-functest-smoke-cntt-{tag}-tempest_scenario_cntt-run'
+              <<: *cntt-jobs
+            - name: 'cntt-opnfv-functest-smoke-cntt-{tag}-tempest_slow_cntt-run'
               <<: *cntt-jobs
       - multijob:
           name: opnfv/functest-benchmarking:{functest_tag}
               <<: *cntt-jobs
             - name: 'cntt-opnfv-functest-benchmarking-{tag}-shaker-run'
               <<: *cntt-jobs
+      - multijob:
+          name: opnfv/functest-benchmarking-cntt:{functest_tag}
+          condition: ALWAYS
+          projects:
+            - name: 'cntt-opnfv-functest-benchmarking-cntt-{tag}-rally_full_cntt-run'
+              <<: *cntt-jobs
+            - name: 'cntt-opnfv-functest-benchmarking-cntt-{tag}-rally_jobs_cntt-run'
+              <<: *cntt-jobs
       - multijob:
           name: opnfv/functest-vnf:{functest_tag}
           condition: ALWAYS
 
 - job-template:
     name: 'cntt-{tag}-gate'
-    project-type: multijob
     disabled: true
+    project-type: multijob
     triggers:
       - cntt-patchset-created:
           branch: '{branch}'
       - cntt-build_tag:
           build_tag: ''
       - cntt-DEBUG:
-          DEBUG: 'true'
+          DEBUG: 'false'
       - cntt-EXTERNAL_NETWORK:
           EXTERNAL_NETWORK: public
     properties:
               <<: *cntt-jobs
             - name: 'cntt-opnfv-functest-benchmarking-{tag}-rmi'
               <<: *cntt-jobs
+            - name: 'cntt-opnfv-functest-benchmarking-cntt-{tag}-rmi'
+              <<: *cntt-jobs
             - name: 'cntt-opnfv-functest-vnf-{tag}-rmi'
               <<: *cntt-jobs
       - multijob:
               <<: *cntt-jobs
             - name: 'cntt-opnfv-functest-benchmarking-{tag}-pull'
               <<: *cntt-jobs
+            - name: 'cntt-opnfv-functest-benchmarking-cntt-{tag}-pull'
+              <<: *cntt-jobs
             - name: 'cntt-opnfv-functest-vnf-{tag}-pull'
               <<: *cntt-jobs
       - multijob:
           projects:
             - name: 'cntt-opnfv-functest-smoke-{tag}-tempest_scenario-run'
               <<: *cntt-jobs
-            - name: 'cntt-opnfv-functest-smoke-{tag}-neutron_trunk-run'
-              <<: *cntt-jobs
             - name: 'cntt-opnfv-functest-smoke-{tag}-networking-bgpvpn-run'
               <<: *cntt-jobs
             - name: 'cntt-opnfv-functest-smoke-{tag}-networking-sfc-run'
               <<: *cntt-jobs
-            - name: 'cntt-opnfv-functest-smoke-{tag}-barbican-run'
+            - name: 'cntt-opnfv-functest-smoke-{tag}-tempest_barbican-run'
               <<: *cntt-jobs
       - multijob:
           name: opnfv/functest-benchmarking:{functest_tag}
diff --git a/jjb/apex/apex-build.sh b/jjb/apex/apex-build.sh
deleted file mode 100755 (executable)
index 618d181..0000000
+++ /dev/null
@@ -1,98 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-# log info to console
-echo "Starting the build of Apex using OpenStack Master packages. This will take some time..."
-echo "---------------------------------------------------------------------------------------"
-echo
-# create the cache directory if it doesn't exist
-[[ -d $CACHE_DIRECTORY ]] || mkdir -p $CACHE_DIRECTORY
-# set OPNFV_ARTIFACT_VERSION
-if echo $ARTIFACT_VERSION | grep "dev" 1> /dev/null; then
-  GERRIT_PATCHSET_NUMBER=$(echo $GERRIT_REFSPEC | grep -Eo '[0-9]+$')
-  export OPNFV_ARTIFACT_VERSION="dev${GERRIT_CHANGE_NUMBER}_${GERRIT_PATCHSET_NUMBER}"
-  if [[ "$BRANCH" != 'stable/fraser' ]]; then
-    # build rpm
-    export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY --rpms"
-  else
-    export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY"
-  fi
-elif echo $BUILD_TAG | grep "csit" 1> /dev/null; then
-  export OPNFV_ARTIFACT_VERSION=csit${BUILD_NUMBER}
-  export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY"
-elif [ "$ARTIFACT_VERSION" == "daily" ]; then
-  export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d")
-  if [[ "$BRANCH" != 'stable/fraser' ]]; then
-    export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY --rpms"
-  else
-    export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY --iso"
-  fi
-else
-  export OPNFV_ARTIFACT_VERSION=${ARTIFACT_VERSION}
-  if [[ "$BRANCH" != 'stable/fraser' ]]; then
-    export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY --rpms"
-  else
-    export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY --iso"
-  fi
-fi
-
-# Temporary hack until we fix apex build script
-BUILD_DIRECTORY=${WORKSPACE}/build
-
-# start the build
-pushd ${BUILD_DIRECTORY}
-make clean
-popd
-export PYTHONPATH=${WORKSPACE}
-python3 apex/build.py $BUILD_ARGS
-RPM_VERSION=$(grep Version: $WORKSPACE/build/rpm_specs/opnfv-apex.spec | awk '{ print $2 }')-$(echo $OPNFV_ARTIFACT_VERSION | tr -d '_-')
-# list the contents of BUILD_OUTPUT directory
-echo "Build Directory is ${BUILD_DIRECTORY}/../.build"
-echo "Build Directory Contents:"
-echo "-------------------------"
-ls -al ${BUILD_DIRECTORY}/../.build
-
-# list the contents of CACHE directory
-echo "Cache Directory is ${CACHE_DIRECTORY}"
-echo "Cache Directory Contents:"
-echo "-------------------------"
-ls -al $CACHE_DIRECTORY
-
-if [[ "$BUILD_ARGS" =~ '--iso' && "$BRANCH" == 'stable/fraser' ]]; then
-  mkdir -p /tmp/apex-iso/
-  rm -f /tmp/apex-iso/*.iso
-  cp -f $BUILD_DIRECTORY/../.build/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso /tmp/apex-iso/
-fi
-
-if ! echo $ARTIFACT_VERSION | grep "dev" 1> /dev/null; then
-  echo "Writing opnfv.properties file"
-  if [ "$BRANCH" == 'stable/fraser' ]; then
-    # save information regarding artifact into file
-    (
-      echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
-      echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
-      echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
-      echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
-      echo "OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/../.build/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso | cut -d' ' -f1)"
-      echo "OPNFV_SRPM_URL=$GS_URL/opnfv-apex-$RPM_VERSION.src.rpm"
-      echo "OPNFV_RPM_URL=$GS_URL/opnfv-apex-$RPM_VERSION.noarch.rpm"
-      echo "OPNFV_RPM_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/../.build/noarch/opnfv-apex-$RPM_VERSION.noarch.rpm | cut -d' ' -f1)"
-      echo "OPNFV_BUILD_URL=$BUILD_URL"
-    ) > $WORKSPACE/opnfv.properties
-  else
-    # save information regarding artifact into file
-    # we only generate the python package for master
-    (
-      echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
-      echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
-      echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
-      echo "OPNFV_SRPM_URL=$GS_URL/python34-opnfv-apex-$RPM_VERSION.src.rpm"
-      echo "OPNFV_RPM_URL=$GS_URL/python34-opnfv-apex-$RPM_VERSION.noarch.rpm"
-      echo "OPNFV_RPM_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/../.build/noarch/python34-opnfv-apex-$RPM_VERSION.noarch.rpm | cut -d' ' -f1)"
-      echo "OPNFV_BUILD_URL=$BUILD_URL"
-    ) > $WORKSPACE/opnfv.properties
-  fi
-fi
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/apex/apex-deploy.sh b/jjb/apex/apex-deploy.sh
deleted file mode 100755 (executable)
index 09d6ca6..0000000
+++ /dev/null
@@ -1,213 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-IPV6_FLAG=False
-ALLINONE_FLAG=False
-CSIT_ENV_FLAG=False
-FUNCTEST_ENV_FLAG=False
-
-# log info to console
-echo "Starting the Apex deployment."
-echo "--------------------------------------------------------"
-echo
-
-if [ -z ${DEPLOY_SCENARIO+x} ]; then
-  echo "Deploy scenario not set!"
-  exit 1
-else
-  echo "Deploy scenario: ${DEPLOY_SCENARIO}"
-fi
-
-# Dev or RPM/ISO build
-if [[ "$ARTIFACT_VERSION" =~ dev ]]; then
-  # Settings for deploying from git workspace
-  DEPLOY_SETTINGS_DIR="${WORKSPACE}/config/deploy"
-  NETWORK_SETTINGS_DIR="${WORKSPACE}/config/network"
-  CLEAN_CMD="opnfv-clean"
-  # if we are using master, then we are downloading/caching upstream images
-  # we want to use that built in mechanism to avoid re-downloading every job
-  # so we use a dedicated folder to hold the upstream cache
-  UPSTREAM_CACHE=$HOME/upstream_cache
-  if [[ "$BRANCH" != 'stable/fraser' ]]; then
-    mkdir -p ${UPSTREAM_CACHE}
-    RESOURCES=$UPSTREAM_CACHE
-  else
-    RESOURCES="${WORKSPACE}/.build/"
-  fi
-  CONFIG="${WORKSPACE}/build"
-  BASE=$CONFIG
-  IMAGES=$RESOURCES
-  LIB="${WORKSPACE}/lib"
-  DEPLOY_CMD="opnfv-deploy --image-dir ${RESOURCES}"
-  # Ensure artifacts were downloaded and extracted correctly
-  # TODO(trozet) add verification here
-
-  # Install dev build
-  sudo rm -rf /tmp/.build
-  mv -f .build /tmp/
-  sudo pip3 install --upgrade --force-reinstall .
-  mv -f /tmp/.build ${WORKSPACE}/
-else
-  DEPLOY_SETTINGS_DIR="/etc/opnfv-apex/"
-  NETWORK_SETTINGS_DIR="/etc/opnfv-apex/"
-  CLEAN_CMD="opnfv-clean"
-  # set to use different directory here because upon RPM removal this
-  # directory will be wiped in daily
-  UPSTREAM_CACHE=$HOME/upstream_cache
-  if [[ "$BRANCH" != 'stable/fraser' ]]; then
-    mkdir -p ${UPSTREAM_CACHE}
-    RESOURCES=$UPSTREAM_CACHE
-  else
-    RESOURCES="/var/opt/opnfv/images"
-  fi
-  DEPLOY_CMD="opnfv-deploy --image-dir ${RESOURCES}"
-  CONFIG="/var/opt/opnfv"
-  BASE=$CONFIG
-  IMAGES=$RESOURCES
-  LIB="/var/opt/opnfv/lib"
-  sudo mkdir -p /var/log/apex
-  sudo chmod 777 /var/log/apex
-  cd /var/log/apex
-fi
-
-# Install Dependencies
-# Make sure python34 dependencies are installed
-dependencies="epel-release python34 python34-devel libvirt-devel python34-pip \
-ansible python34-PyYAML python34-jinja2 python34-setuptools python-tox ansible"
-
-for dep_pkg in $dependencies; do
-  if ! rpm -q ${dep_pkg} > /dev/null; then
-    if ! sudo yum install -y ${dep_pkg}; then
-      echo "Failed to install ${dep_pkg}"
-      exit 1
-    fi
-  fi
-done
-
-if [[ "$JOB_NAME" =~ "virtual" ]]; then
-  # Make sure ipxe-roms-qemu package is updated to latest.
-  # This package is needed for multi virtio nic PXE boot in virtual environment.
-  sudo yum update -y ipxe-roms-qemu
-fi
-
-if [ "$OPNFV_CLEAN" == 'yes' ]; then
-  if sudo test -e '/root/inventory/pod_settings.yaml'; then
-    clean_opts='-i /root/inventory/pod_settings.yaml'
-  else
-    clean_opts=''
-  fi
-
-  sudo ${CLEAN_CMD} ${clean_opts}
-fi
-
-# These are add-ons to regular scenarios where you can do like
-# os-nosdn-nofeature-noha-ipv6, or os-nosdn-nofeature-noha-allinone
-if echo ${DEPLOY_SCENARIO} | grep ipv6; then
-  IPV6_FLAG=True
-  DEPLOY_SCENARIO=$(echo ${DEPLOY_SCENARIO} |  sed 's/-ipv6//')
-  echo "INFO: IPV6 Enabled"
-fi
-
-if echo ${DEPLOY_SCENARIO} | grep allinone; then
-  ALLINONE_FLAG=True
-  DEPLOY_SCENARIO=$(echo ${DEPLOY_SCENARIO} |  sed 's/-allinone//')
-  echo "INFO: All in one deployment detected"
-fi
-
-if echo ${DEPLOY_SCENARIO} | grep csit; then
-  CSIT_ENV_FLAG=True
-  DEPLOY_SCENARIO=$(echo ${DEPLOY_SCENARIO} |  sed 's/-csit//')
-  echo "INFO: CSIT env requested in deploy scenario"
-elif echo ${DEPLOY_SCENARIO} | grep functest; then
-  FUNCTEST_ENV_FLAG=True
-  DEPLOY_SCENARIO=$(echo ${DEPLOY_SCENARIO} |  sed 's/-functest//')
-  echo "INFO: Functest env requested in deploy scenario"
-fi
-
-echo "Deploy Scenario set to ${DEPLOY_SCENARIO}"
-DEPLOY_FILE="${DEPLOY_SETTINGS_DIR}/${DEPLOY_SCENARIO}.yaml"
-
-if [ ! -e "$DEPLOY_FILE" ]; then
-  echo "ERROR: Required settings file missing: Deploy settings file ${DEPLOY_FILE}"
-fi
-
-if [[ "$JOB_NAME" =~ "virtual" ]]; then
-  # settings for virtual deployment
-  DEPLOY_CMD="${DEPLOY_CMD} -v"
-  if [[ "${DEPLOY_SCENARIO}" =~ fdio|ovs ]]; then
-    DEPLOY_CMD="${DEPLOY_CMD} --virtual-default-ram 12 --virtual-compute-ram 7"
-  fi
-  if [[ "$ALLINONE_FLAG" == "True" ]]; then
-    DEPLOY_CMD="${DEPLOY_CMD} --virtual-computes 0"
-  elif [[ "$PROMOTE" == "True" ]]; then
-    DEPLOY_CMD="${DEPLOY_CMD} --virtual-computes 2"
-  fi
-
-  if [[ "$FUNCTEST_ENV_FLAG" == "True"  || "$CSIT_ENV_FLAG" == "True" ]]; then
-    if [[ "$CSIT_ENV_FLAG" == "True" ]]; then
-      ENV_TYPE="csit"
-    else
-      ENV_TYPE="functest"
-    fi
-    if [ -z ${OS_VERSION+x} ]; then
-      echo "INFO: OS_VERSION not passed to deploy, detecting based on branch and scenario"
-      case $BRANCH in
-        master)
-          if [[ "$DEPLOY_SCENARIO" =~ "rocky" ]]; then
-            OS_VERSION=rocky
-          else
-            OS_VERSION=master
-          fi
-          ;;
-        *gambia)
-          OS_VERSION=queens
-          ;;
-        *)
-          echo "Unable to detection OS_VERSION, aborting"
-          exit 1
-          ;;
-      esac
-    fi
-    if [[ "$OS_VERSION" != "master" ]]; then
-      SNAP_ENV="${ENV_TYPE}-${OS_VERSION}-environment.yaml"
-    else
-      SNAP_ENV="${ENV_TYPE}-environment.yaml"
-    fi
-    DEPLOY_CMD="${DEPLOY_CMD} -e ${SNAP_ENV}"
-  fi
-else
-  # settings for bare metal deployment
-  NETWORK_SETTINGS_DIR="/root/network"
-  INVENTORY_FILE="/root/inventory/pod_settings.yaml"
-
-  if ! sudo test -e "$INVENTORY_FILE"; then
-    echo "ERROR: Required settings file missing: Inventory settings file ${INVENTORY_FILE}"
-    exit 1
-  fi
-  # include inventory file for bare metal deployment
-  DEPLOY_CMD="${DEPLOY_CMD} -i ${INVENTORY_FILE}"
-fi
-
-if [ "$IPV6_FLAG" == "True" ]; then
-  NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings_v6.yaml"
-elif [[ "$CSIT_ENV_FLAG" == "True"  || "$FUNCTEST_ENV_FLAG" == "True" ]]; then
-  # We use csit network settings which is single network for snapshots
-  NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings_csit.yaml"
-else
-  NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings.yaml"
-fi
-
-# Check that network settings file exists
-if ! sudo test -e "$NETWORK_FILE"; then
-  echo "ERROR: Required settings file missing: Network Settings file ${NETWORK_FILE}"
-  exit 1
-fi
-
-# start deployment
-sudo ${DEPLOY_CMD} -d ${DEPLOY_FILE} -n ${NETWORK_FILE} --debug
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/apex/apex-download-artifact.sh b/jjb/apex/apex-download-artifact.sh
deleted file mode 100755 (executable)
index bc3311d..0000000
+++ /dev/null
@@ -1,89 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# log info to console
-echo "Downloading the Apex artifact. This could take some time..."
-echo "--------------------------------------------------------"
-echo
-
-[[ -d $BUILD_DIRECTORY ]] || mkdir -p $BUILD_DIRECTORY
-
-if [ -z "$DEPLOY_SCENARIO" ]; then
-  echo "Deploy scenario not set!"
-  exit 1
-else
-  echo "Deploy scenario: ${DEPLOY_SCENARIO}"
-fi
-
-if [[ "$ARTIFACT_VERSION" =~ dev ]]; then
-  if [[ "$BRANCH" != 'stable/fraser' ]]; then
-    echo "Skipping download of artifacts for master/gambia branch"
-  else
-    # dev build
-    GERRIT_PATCHSET_NUMBER=$(echo $GERRIT_REFSPEC | grep -Eo '[0-9]+$')
-    export OPNFV_ARTIFACT_VERSION="dev${GERRIT_CHANGE_NUMBER}_${GERRIT_PATCHSET_NUMBER}"
-    # get build artifact
-    pushd ${BUILD_DIRECTORY} > /dev/null
-    echo "Downloading packaged dev build: apex-${OPNFV_ARTIFACT_VERSION}.tar.gz"
-    curl --fail -s -o $BUILD_DIRECTORY/apex-${OPNFV_ARTIFACT_VERSION}.tar.gz http://$GS_URL/apex-${OPNFV_ARTIFACT_VERSION}.tar.gz
-    tar -xvf apex-${OPNFV_ARTIFACT_VERSION}.tar.gz
-    popd > /dev/null
-  fi
-else
-  echo "Will use RPMs..."
-
-  # Must be RPMs/ISO
-  echo "Downloading latest properties file"
-
-  # get the properties file in order to get info regarding artifacts
-  curl --fail -s -o $BUILD_DIRECTORY/opnfv.properties http://$GS_URL/latest.properties
-
-  # source the file so we get OPNFV vars
-  source $BUILD_DIRECTORY/opnfv.properties
-
-  RPM_INSTALL_PATH=$(echo "http://"$OPNFV_RPM_URL | sed 's/\/'"$(basename $OPNFV_RPM_URL)"'//')
-  RPM_LIST=$(basename $OPNFV_RPM_URL)
-  # find version of RPM
-  VERSION_EXTENSION=$(echo $(basename $RPM_LIST) | grep -Eo '[0-9]+\.[0-9]+-([0-9]{8}|[a-z]+-[0-9]\.[0-9]+)')
-  if [ "$BRANCH" == 'stable/fraser' ]; then
-    # build RPM List which already includes base Apex RPM
-    RPM_LIST+=" opnfv-apex-undercloud-${VERSION_EXTENSION}.noarch.rpm"
-    RPM_LIST+=" python34-opnfv-apex-${VERSION_EXTENSION}.noarch.rpm"
-  fi
-
-  # remove old / install new RPMs
-  if rpm -q python34-opnfv-apex > /dev/null; then
-    INSTALLED_RPMS=$(rpm -qa | grep apex)
-    if [ -n "$INSTALLED_RPMS" ]; then
-      sudo yum remove -y ${INSTALLED_RPMS}
-    fi
-  fi
-  # Create an rpms dir on slave
-  mkdir -p ~/apex_rpms
-  pushd ~/apex_rpms
-  # Remove older rpms which do not match this version
-  find . ! -name "*${VERSION_EXTENSION}.noarch.rpm" -type f -exec rm -f {} +
-  # Download RPM only if changed on server
-  for rpm in $RPM_LIST; do
-    wget -N ${RPM_INSTALL_PATH}/${rpm}
-  done
-  if ! sudo yum install -y $RPM_LIST; then
-    echo "Unable to install new RPMs: $RPM_LIST"
-    exit 1
-  fi
-  popd
-fi
-
-# TODO: Uncomment these lines to verify SHA512SUMs once the sums are
-# fixed.
-# echo "$OPNFV_ARTIFACT_SHA512SUM $BUILD_DIRECTORY/apex.iso" | sha512sum -c
-# echo "$OPNFV_RPM_SHA512SUM $BUILD_DIRECTORY/$(basename $OPNFV_RPM_URL)" | sha512sum -c
-
-# list the files
-ls -al $BUILD_DIRECTORY
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/apex/apex-fetch-logs.sh b/jjb/apex/apex-fetch-logs.sh
deleted file mode 100755 (executable)
index bdb2252..0000000
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/bin/env bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# log info to console
-echo "Fetching logs from overcloud. This could take some time..."
-echo "--------------------------------------------------------"
-echo
-
-if sudo opnfv-pyutil --fetch-logs; then
-  LOG_LOCATION=$(cat apex_util.log | grep 'Log retrieval complete' | grep -Eo '/tmp/.+$')
-  if [ -z "$LOG_LOCATION" ]; then
-      echo "WARNING: Unable to determine log location.  Logs will not be uploaded"
-      exit 0
-  else
-    sudo chmod 777 ${LOG_LOCATION}
-    UPLOAD_LOCATION="${GS_URL}/logs/${JOB_NAME}/${BUILD_NUMBER}/"
-    gsutil -m cp -r ${LOG_LOCATION} gs://${UPLOAD_LOCATION} > gsutil.latest_logs.log
-    echo -e "Logs available at: \n$(find ${LOG_LOCATION} -type f | sed -n 's#^/tmp/#http://'$UPLOAD_LOCATION'#p')"
-  fi
-else
-  echo "WARNING: Log retrieval failed.  No logs will be uploaded"
-  exit 0
-fi
diff --git a/jjb/apex/apex-fetch-snap-info.sh b/jjb/apex/apex-fetch-snap-info.sh
deleted file mode 100755 (executable)
index 3324aca..0000000
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/usr/bin/env bash
-
-##############################################################################
-# Copyright (c) 2018 Tim Rozet (Red Hat) and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-echo "Fetching overcloudrc, ssh key, and node.yaml from deployment..."
-
-SSH_OPTIONS=(-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null -o LogLevel=error)
-
-tmp_dir=/tmp/snap
-rm -rf ${tmp_dir}
-mkdir -p ${tmp_dir}
-
-# TODO(trozet) remove this after fix goes in for tripleo_inspector to copy these
-pushd ${tmp_dir} > /dev/null
-echo "Copying overcloudrc and ssh key from Undercloud..."
-# Store overcloudrc
-UNDERCLOUD=$(sudo virsh domifaddr undercloud | grep -Eo '[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+')
-sudo scp ${SSH_OPTIONS[@]} stack@${UNDERCLOUD}:overcloudrc ./
-# Copy out ssh key of stack from undercloud
-sudo scp ${SSH_OPTIONS[@]} stack@${UNDERCLOUD}:.ssh/id_rsa ./
-sudo chmod 0600 id_rsa
-popd > /dev/null
-
-echo "Gathering introspection information"
-git clone https://gerrit.opnfv.org/gerrit/sdnvpn.git
-pushd sdnvpn/odl-pipeline/lib > /dev/null
-sudo ./tripleo_introspector.sh --out-file ${tmp_dir}/node.yaml
-popd > /dev/null
-sudo rm -rf sdnvpn
-
-sudo chown jenkins-ci:jenkins-ci ${tmp_dir}/*
-
-ls -lrt ${tmp_dir}
-
-echo "Fetch complete"
diff --git a/jjb/apex/apex-functest-scenario.sh b/jjb/apex/apex-functest-scenario.sh
deleted file mode 100644 (file)
index f1cb74e..0000000
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-features=$(echo $DEPLOY_SCENARIO | sed -r -n 's/os-.+-(.+)-(noha|ha)/\1/p')
-if [ "$features" == 'rocky' ]; then
-  functest_scenario=$(echo $DEPLOY_SCENARIO | sed -r -n 's/(os-.+?)-rocky-(noha|ha)/\1-nofeature-\2/p')
-  echo "DOCKER_TAG=hunter" > functest_scenario
-elif [[ "$features" =~ 'rocky' ]]; then
-  functest_scenario=$(echo $DEPLOY_SCENARIO | sed -r -n 's/(os-.+?)-(.+)_rocky-(noha|ha)/\1-\2-\3/p')
-  echo "DOCKER_TAG=hunter" > functest_scenario
-else
-  functest_scenario=$(echo $DEPLOY_SCENARIO | sed -r -n 's/-(noha|ha).*/-\1/p')
-  echo "DOCKER_TAG=$([[ ${BRANCH##*/} == "master" ]] && \
-    echo "latest" || echo ${BRANCH##*/})" > functest_scenario
-fi
-echo "DEPLOY_SCENARIO=$functest_scenario" >> functest_scenario
diff --git a/jjb/apex/apex-iso-verify.sh b/jjb/apex/apex-iso-verify.sh
deleted file mode 100755 (executable)
index c29d7cb..0000000
+++ /dev/null
@@ -1,75 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# log info to console
-echo "Starting the Apex iso verify."
-echo "--------------------------------------------------------"
-echo
-
-if [ "$BRANCH" != 'stable/fraser' ]; then
-  echo "Skipping Apex iso verify for ${BRANCH} branch"
-  exit 0
-fi
-
-# Must be RPMs/ISO
-echo "Downloading latest properties file"
-
-# get the properties file in order to get info regarding artifacts
-curl --fail -s -o opnfv.properties http://$GS_URL/latest.properties
-
-# source the file so we get OPNFV vars
-source opnfv.properties
-
-if ! rpm -q virt-install > /dev/null; then
-  sudo yum -y install virt-install
-fi
-
-# define a clean function
-rm_apex_iso_verify () {
-if sudo virsh list --all | grep apex-iso-verify | grep running; then
-    sudo virsh destroy apex-iso-verify
-fi
-if sudo virsh list --all | grep apex-iso-verify; then
-    sudo virsh undefine apex-iso-verify
-fi
-}
-
-# Make sure a pre-existing iso-verify isn't there
-rm_apex_iso_verify
-
-#make sure there is not an existing console log file for the VM
-sudo rm -f /var/log/libvirt/qemu/apex-iso-verify-console.log
-
-# run an install from the iso
-# This streams a serial console to tcp port 3737 on localhost
-sudo virt-install -n apex-iso-verify -r 4096 --vcpus 4 --os-variant=rhel7 \
- --accelerate -v --noautoconsole \
- --disk path=/var/lib/libvirt/images/apex-iso-verify.qcow2,size=30,format=qcow2 \
- -l /tmp/apex-iso/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso \
- --extra-args 'console=ttyS0 console=ttyS0,115200n8 serial inst.ks=file:/iso-verify.ks inst.stage2=hd:LABEL=OPNFV\x20CentOS\x207\x20x86_64:/' \
- --initrd-inject ci/iso-verify.ks \
- --serial file,path=/var/log/libvirt/qemu/apex-iso-verify-console.log
-
-echo "Waiting for install to finish..."
-sleep 10
-end_time=$(($SECONDS+1500))
-while ! [[ `sudo tail -n1 /var/log/libvirt/qemu/apex-iso-verify-console.log` =~ 'Power down' ]]; do
-  if [ $SECONDS -gt $end_time ] || ! sudo virsh list --all | grep apex-iso-verify | grep running > /dev/null; then
-    sudo cat /var/log/libvirt/qemu/apex-iso-verify-console.log
-    sudo virsh list --all
-    echo "Error: Failed to find power down message after install"
-    exit 1
-  fi
-  sleep 10
-done
-
-sudo cat /var/log/libvirt/qemu/apex-iso-verify-console.log
-
-# clean up
-rm_apex_iso_verify
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/apex/apex-jjb-renderer.py b/jjb/apex/apex-jjb-renderer.py
deleted file mode 100644 (file)
index 58dc4ff..0000000
+++ /dev/null
@@ -1,51 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Tim Rozet (trozet@redhat.com) and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-import pprint
-import yaml
-from jinja2 import Environment
-from jinja2 import FileSystemLoader
-
-
-def render_jjb():
-    """Render JJB output from scenarios.yaml.hidden file and jinja
-    template"""
-
-    gspathname = dict()
-    branch = dict()
-    build_slave = dict()
-    env = Environment(loader=FileSystemLoader('./'), autoescape=True,
-                      keep_trailing_newline=True)
-
-    with open('scenarios.yaml.hidden') as _:
-        scenarios = yaml.safe_load(_)
-
-    template = env.get_template('apex.yaml.j2')
-
-    print("Scenarios are: ")
-    pprint.pprint(scenarios)
-
-    for stream in scenarios:
-        if stream == 'master':
-            gspathname['master'] = ''
-            branch[stream] = stream
-        else:
-            gspathname[stream] = '/' + stream
-            branch[stream] = 'stable/' + stream
-        build_slave[stream] = 'apex-baremetal-{}'.format(stream)
-
-    output = template.render(scenarios=scenarios, gspathname=gspathname,
-                             branch=branch, build_slave=build_slave)
-
-    with open('./apex.yaml', 'w') as fh:
-        fh.write(output)
-
-
-if __name__ == "__main__":
-    render_jjb()
diff --git a/jjb/apex/apex-project-jobs.yaml b/jjb/apex/apex-project-jobs.yaml
deleted file mode 100644 (file)
index c581c59..0000000
+++ /dev/null
@@ -1,146 +0,0 @@
----
-- project:
-    name: 'apex-project-jobs'
-    project: 'apex'
-
-    stream:
-      - master: &master
-          branch: 'master'
-          gs-pathname: ''
-          concurrent-builds: 3
-          disabled: false
-      - hunter: &hunter
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          concurrent-builds: 3
-          disabled: false
-      - gambia: &gambia
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          concurrent-builds: 3
-          disabled: false
-      - fraser: &fraser
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          concurrent-builds: 3
-          disabled: false
-      - euphrates: &euphrates
-          branch: 'stable/euphrates'
-          gs-pathname: '/euphrates'
-          concurrent-builds: 3
-          disabled: true
-      - danube: &danube
-          branch: 'stable/danube'
-          gs-pathname: '/danube'
-          concurrent-builds: 1
-          disabled: true
-
-    jobs:
-      - 'apex-build-{stream}'
-      - 'apex-verify-iso-{stream}'
-
-# Build phase
-- job-template:
-    name: 'apex-build-{stream}'
-
-    # Job template for builds
-    #
-    # Required Variables:
-    #     stream:    branch with - in place of / (eg. stable)
-    #     branch:    branch (eg. stable)
-    node: 'apex-build-master'
-
-    disabled: false
-
-    concurrent: true
-
-    parameters:
-      - '{project}-defaults'
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - apex-parameter:
-          gs-pathname: '{gs-pathname}'
-
-    scm:
-      - git-scm-gerrit
-
-    wrappers:
-      - timeout:
-          timeout: 150
-          fail: true
-
-    properties:
-      - logrotate-default
-      - throttle:
-          max-per-node: '{concurrent-builds}'
-          max-total: 10
-          option: 'project'
-      - build-blocker:
-          use-build-blocker: true
-          blocking-level: 'NODE'
-          blocking-jobs:
-            - 'apex-verify-iso-{stream}'
-
-    builders:
-      - 'apex-build'
-      - inject:
-          properties-content: ARTIFACT_TYPE=rpm
-      - 'apex-upload-artifact'
-
-# ISO verify job
-- job-template:
-    name: 'apex-verify-iso-{stream}'
-
-    # Job template for builds
-    #
-    # Required Variables:
-    #     stream:    branch with - in place of / (eg. stable)
-    #     branch:    branch (eg. stable)
-    node: 'apex-virtual-master'
-
-    disabled: false
-
-    concurrent: true
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - apex-parameter:
-          gs-pathname: '{gs-pathname}'
-      - string:
-          name: GIT_BASE
-          default: https://gerrit.opnfv.org/gerrit/$PROJECT
-          description: "Used for overriding the GIT URL coming from parameters macro."
-
-    scm:
-      - git-scm
-
-    properties:
-      - logrotate-default
-      - throttle:
-          max-per-node: 1
-          max-total: 10
-          option: 'project'
-
-    builders:
-      - 'apex-iso-verify'
-      - inject:
-          properties-content: ARTIFACT_TYPE=iso
-      - 'apex-upload-artifact'
-
-########################
-# builder macros
-########################
-- builder:
-    name: 'apex-build'
-    builders:
-      - shell:
-          !include-raw: ./apex-build.sh
-
-- builder:
-    name: 'apex-iso-verify'
-    builders:
-      - shell:
-          !include-raw: ./apex-iso-verify.sh
diff --git a/jjb/apex/apex-rtd-jobs.yaml b/jjb/apex/apex-rtd-jobs.yaml
deleted file mode 100644 (file)
index 48e4949..0000000
+++ /dev/null
@@ -1,20 +0,0 @@
----
-- project:
-    name: apex-rtd
-    project: apex
-    project-name: apex
-
-    project-pattern: 'apex'
-    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-apex/47262/'
-    rtd-token: '134db049c774ab06c41db432e3a042a982f50edf'
-
-    stream:
-      - master:
-          branch: '{stream}'
-          disabled: false
-      - gambia:
-          branch: 'stable/{stream}'
-          disabled: false
-
-    jobs:
-      - '{project-name}-rtd-jobs'
diff --git a/jjb/apex/apex-snapshot-create.sh b/jjb/apex/apex-snapshot-create.sh
deleted file mode 100644 (file)
index e8bf60b..0000000
+++ /dev/null
@@ -1,105 +0,0 @@
-#!/usr/bin/env bash
-##############################################################################
-# Copyright (c) 2016 Tim Rozet (Red Hat) and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-SSH_OPTIONS=(-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null -o LogLevel=error)
-
-if [ -z "$SNAP_TYPE" ]; then
-  echo "ERROR: SNAP_TYPE not provided...exiting"
-  exit 1
-fi
-
-echo "Creating Apex snapshot..."
-echo "-------------------------"
-echo
-
-# create tmp directory
-tmp_dir=$(pwd)/.tmp
-mkdir -p ${tmp_dir}
-
-# info should have already been collected in apex-fetch-snap-info so copy it
-cp -r /tmp/snap/* ${tmp_dir}/
-
-echo "Shutting down nodes"
-# Shut down nodes
-nodes=$(sudo virsh list | grep -Eo "baremetal[0-9]")
-for node in $nodes; do
-  sudo virsh shutdown ${node} --mode acpi
-done
-
-for node in $nodes; do
-  count=0
-  while [ "$count" -lt 10 ]; do
-    sleep 10
-    if sudo virsh list | grep ${node}; then
-       echo "Waiting for $node to shutdown, try $count"
-    else
-       break
-    fi
-    count=$((count+1))
-  done
-
-  if [ "$count" -ge 10 ]; then
-    echo "Node $node failed to shutdown"
-    exit 1
-  fi
-done
-
-pushd ${tmp_dir} > /dev/null
-echo "Gathering virsh definitions"
-# copy qcow2s, virsh definitions
-for node in $nodes; do
-  sudo cp -f /var/lib/libvirt/images/${node}.qcow2 ./
-  sudo virsh dumpxml ${node} > ${node}.xml
-done
-
-# copy virsh net definitions
-sudo virsh net-dumpxml admin > admin.xml
-
-sudo chown jenkins-ci:jenkins-ci *
-
-# tar up artifacts
-DATE=`date +%Y-%m-%d`
-tar czf ../apex-${SNAP_TYPE}-snap-${DATE}.tar.gz .
-popd > /dev/null
-sudo rm -rf ${tmp_dir}
-echo "Snapshot saved as apex-${SNAP_TYPE}-snap-${DATE}.tar.gz"
-
-# update opnfv properties file
-snap_sha=$(sha512sum apex-${SNAP_TYPE}-snap-${DATE}.tar.gz | cut -d' ' -f1)
-
-if curl --fail -O -L http://$GS_URL/snapshot.properties; then
-  # TODO(trozet): deprecate OPNFV_SNAP_URL for CSIT_SNAP_URL
-  if [ "$SNAP_TYPE" == 'csit' ]; then
-    sed -i '/^OPNFV_SNAP_URL=/{h;s#=.*#='${GS_URL}'/apex-csit-snap-'${DATE}'.tar.gz#};${x;/^$/{s##OPNFV_SNAP_URL='${GS_URL}'/apex-csit-snap-'${DATE}'.tar.gz#;H};x}' snapshot.properties
-    sed -i '/^OPNFV_SNAP_SHA512SUM=/{h;s/=.*/='${snap_sha}'/};${x;/^$/{s//OPNFV_SNAP_SHA512SUM='${snap_sha}'/;H};x}' snapshot.properties
-  fi
-  sed -i '/^'${SNAP_TYPE}'_SNAP_URL=/{h;s#=.*#='${GS_URL}'/apex-'${SNAP_TYPE}'-snap-'${DATE}'.tar.gz#};${x;/^$/{s##'${SNAP_TYPE}'_SNAP_URL='${GS_URL}'/apex-'${SNAP_TYPE}'-snap-'${DATE}'.tar.gz#;H};x}' snapshot.properties
-  sed -i '/^'${SNAP_TYPE}'_SNAP_SHA512SUM=/{h;s/=.*/='${snap_sha}'/};${x;/^$/{s//'${SNAP_TYPE}'_SNAP_SHA512SUM='${snap_sha}'/;H};x}' snapshot.properties
-else
-  cat << EOF > snapshot.properties
-${SNAP_TYPE}_SNAP_URL=${GS_URL}/apex-${SNAP_TYPE}-snap-${DATE}.tar.gz
-${SNAP_TYPE}_SNAP_SHA512SUM=${snap_sha}
-EOF
-  # TODO(trozet): deprecate OPNFV_SNAP_URL for CSIT_SNAP_URL
-  if [ "$SNAP_TYPE" == 'csit' ]; then
-    cat << EOF >> snapshot.properties
-OPNFV_SNAP_URL=${GS_URL}/apex-csit-snap-${DATE}.tar.gz
-OPNFV_SNAP_SHA512SUM=${snap_sha}
-EOF
-  fi
-fi
-echo "${SNAP_TYPE}_SNAP_URL=$GS_URL/apex-${SNAP_TYPE}-snap-${DATE}.tar.gz"
-echo "${SNAP_TYPE}_SNAP_SHA512SUM=$(sha512sum apex-${SNAP_TYPE}-snap-${DATE}.tar.gz | cut -d' ' -f1)"
-echo "Updated properties file: "
-cat snapshot.properties
diff --git a/jjb/apex/apex-snapshot-deploy.sh b/jjb/apex/apex-snapshot-deploy.sh
deleted file mode 100644 (file)
index dd69df3..0000000
+++ /dev/null
@@ -1,184 +0,0 @@
-#!/usr/bin/env bash
-##############################################################################
-# Copyright (c) 2016 Tim Rozet (Red Hat) and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-SSH_OPTIONS=(-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null -o LogLevel=error)
-SNAP_CACHE=$HOME/snap_cache
-
-
-echo "Deploying Apex snapshot..."
-echo "--------------------------"
-echo
-
-if [ -z "$SNAP_TYPE" ]; then
-  echo "ERROR: SNAP_TYPE not provided...exiting"
-  exit 1
-fi
-
-echo "Cleaning server"
-pushd ci > /dev/null
-sudo opnfv-clean
-popd > /dev/null
-
-full_snap_url="gs://${GS_URL}/${OS_VERSION}/${TOPOLOGY}"
-
-echo "Downloading latest snapshot properties file"
-if ! gsutil cp ${full_snap_url}/snapshot.properties $WORKSPACE/opnfv.properties; then
-  echo "ERROR: Unable to find snapshot.properties at ${full_snap_url}...exiting"
-  exit 1
-fi
-
-echo "Properties contents:"
-cat ${WORKSPACE}/opnfv.properties
-
-# find latest check sum
-latest_snap_checksum=$(cat ${WORKSPACE}/opnfv.properties | grep ${SNAP_TYPE}_SNAP_SHA512SUM | awk -F "=" '{print $2}')
-if [ -z "$latest_snap_checksum" ]; then
-  echo "ERROR: checksum of latest snapshot from snapshot.properties is null!"
-  exit 1
-fi
-
-local_snap_checksum=""
-SNAP_CACHE=${SNAP_CACHE}/${OS_VERSION}/${TOPOLOGY}
-
-# check snap cache directory exists
-# if snapshot cache exists, find the checksum
-if [ -d "$SNAP_CACHE" ]; then
-  latest_snap=$(ls ${SNAP_CACHE} | grep tar.gz | grep $SNAP_TYPE | tail -n 1)
-  if [ -n "$latest_snap" ]; then
-    local_snap_checksum=$(sha512sum ${SNAP_CACHE}/${latest_snap} | cut -d' ' -f1)
-    echo "Local snap checksum is: ${local_snap_checksum}"
-  fi
-else
-  mkdir -p ${SNAP_CACHE}
-fi
-
-# compare check sum and download latest snap if not up to date
-if [ "$local_snap_checksum" != "$latest_snap_checksum" ]; then
-  snap_url=$(cat opnfv.properties | grep ${SNAP_TYPE}_SNAP_URL | awk -F "=" '{print $2}')
-  # TODO(trozet): Remove this once OPNFV url is deprecated
-  if [[ -z "$snap_url" && "$SNAP_TYPE" == 'csit' ]]; then
-      echo "WARN: Unable to find snap url for ${SNAP_TYPE}, attempting to use OPNFV"
-      snap_url=$(cat opnfv.properties | grep OPNFV_SNAP_URL | awk -F "=" '{print $2}')
-  fi
-  if [ -z "$snap_url" ]; then
-    echo "ERROR: Snap URL from snapshot.properties is null!"
-    exit 1
-  fi
-  echo "INFO: SHA mismatch, will download latest snapshot"
-  # wipe cache
-  rm -rf ${SNAP_CACHE}/*
-  gsutil cp "gs://${snap_url}" ${SNAP_CACHE}/
-  snap_tar=$(basename ${snap_url})
-else
-  snap_tar=${latest_snap}
-fi
-
-echo "INFO: Snapshot to be used is ${snap_tar}"
-
-# move to snap cache dir and unpack
-pushd ${SNAP_CACHE} > /dev/null
-tar xvf ${snap_tar}
-
-# create each network
-virsh_networks=$(ls *.xml | grep -v baremetal)
-
-if [ -z "$virsh_networks" ]; then
-  echo "ERROR: no virsh networks found in snapshot unpack"
-  exit 1
-fi
-
-echo "Checking overcloudrc"
-if ! stat overcloudrc; then
-  echo "ERROR: overcloudrc does not exist in snap unpack"
-  exit 1
-fi
-
-for network_def in ${virsh_networks}; do
-  sudo virsh net-create ${network_def}
-  network=$(echo ${network_def} | awk -F '.' '{print $1}')
-  if ! sudo virsh net-list | grep ${network}; then
-    sudo virsh net-start ${network}
-  fi
-  echo "Checking if OVS bridge is missing for network: ${network}"
-  if ! sudo ovs-vsctl show | grep "br-${network}"; then
-    sudo ovs-vsctl add-br br-${network}
-    echo "OVS Bridge created: br-${network}"
-    if [ "br-${network}" == 'br-admin' ]; then
-      echo "Configuring IP 192.0.2.99 on br-admin"
-      sudo ip addr add  192.0.2.99/24 dev br-admin
-      sudo ip link set up dev br-admin
-    elif [ "br-${network}" == 'br-external' ]; then
-      echo "Configuring IP 192.168.37.1 on br-external"
-      sudo ip addr add  192.168.37.1/24 dev br-external
-      sudo ip link set up dev br-external
-      # Routes for admin network
-      # The overcloud controller is multi-homed and will fail to respond
-      # to traffic from the functest container due to reverse-path-filtering
-      # This route allows reverse traffic, by forcing admin network destined
-      # traffic through the external network for controller IPs only.
-      # Compute nodes have no ip on external interfaces.
-      controller_ips=$(cat overcloudrc | grep -Eo "192.0.2.[0-9]+")
-      for ip in $controller_ips; do
-        sudo ip route add ${ip}/32 dev br-external
-      done
-    fi
-  fi
-done
-
-echo "Virsh networks up: $(sudo virsh net-list)"
-echo "Bringing up Overcloud VMs..."
-virsh_vm_defs=$(ls baremetal*.xml)
-
-if [ -z "$virsh_vm_defs" ]; then
-  echo "ERROR: no virsh VMs found in snapshot unpack"
-  exit 1
-fi
-
-for node_def in ${virsh_vm_defs}; do
-  sed  -ri "s/machine='[^\s]+'/machine='pc'/" ${node_def}
-  sudo virsh define ${node_def}
-  node=$(echo ${node_def} | awk -F '.' '{print $1}')
-  sudo cp -f ${node}.qcow2 /var/lib/libvirt/images/
-  sudo virsh start ${node}
-  echo "Node: ${node} started"
-done
-
-# copy overcloudrc for functest
-mkdir -p $HOME/cloner-info
-cp -f overcloudrc $HOME/cloner-info/
-
-admin_controller_ip=$(cat overcloudrc | grep -Eo -m 1 "192.0.2.[0-9]+" | head -1)
-netvirt_url="http://${admin_controller_ip}:8081/restconf/operational/network-topology:network-topology/topology/netvirt:1"
-
-source overcloudrc
-counter=1
-while [ "$counter" -le 10 ]; do
-  echo "Checking if OpenStack is up"
-  if nc -z ${admin_controller_ip} 9696 > /dev/null; then
-    echo "Overcloud Neutron is up...Checking if OpenDaylight NetVirt is up..."
-    if curl --fail --silent -u admin:${SDN_CONTROLLER_PASSWORD} ${netvirt_url} > /dev/null; then
-      echo "OpenDaylight is up.  Overcloud deployment complete"
-      exit 0
-    else
-      echo "OpenDaylight not yet up, try ${counter}"
-    fi
-  else
-    echo "Neutron not yet up, try ${counter}"
-  fi
-  counter=$((counter+1))
-  sleep 60
-done
-
-echo "ERROR: Deployment not up after 10 minutes...exiting."
-exit 1
diff --git a/jjb/apex/apex-unit-test.sh b/jjb/apex/apex-unit-test.sh
deleted file mode 100755 (executable)
index 3f15847..0000000
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-# log info to console
-echo "Starting unit tests for Apex..."
-echo "---------------------------------------------------------------------------------------"
-echo
-
-PATH=$PATH:/usr/sbin
-
-
-pushd build/ > /dev/null
-for pkg in yamllint rpmlint iproute epel-release python34-devel python34-nose python34-PyYAML python-pep8 python34-mock python34-pip; do
-  if ! rpm -q ${pkg} > /dev/null; then
-    if ! sudo yum install -y ${pkg}; then
-      echo "Failed to install ${pkg} package..."
-      exit 1
-    fi
-  fi
-done
-
-# Make sure coverage is installed
-if ! python3 -c "import coverage" &> /dev/null; then sudo pip3 install coverage; fi
-
-make rpmlint
-make python-pep8-check
-make yamllint
-make python-tests
-popd > /dev/null
-
-echo "--------------------------------------------------------"
-echo "Unit Tests Done!"
diff --git a/jjb/apex/apex-upload-artifact.sh b/jjb/apex/apex-upload-artifact.sh
deleted file mode 100755 (executable)
index 00a0a1c..0000000
+++ /dev/null
@@ -1,163 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-if [ -z "$ARTIFACT_TYPE" ]; then
-  echo "ERROR: ARTIFACT_TYPE not provided...exiting"
-  exit 1
-fi
-
-# log info to console
-echo "Uploading the Apex ${ARTIFACT_TYPE} artifact. This could take some time..."
-echo "--------------------------------------------------------"
-echo
-
-if [[ ! "$ARTIFACT_VERSION" =~ dev ]]; then
-  source $BUILD_DIRECTORY/../opnfv.properties
-fi
-
-importkey () {
-  # clone releng repository
-  echo "Cloning releng repository..."
-  [ -d releng ] && rm -rf releng
-  git clone https://gerrit.opnfv.org/gerrit/releng $WORKSPACE/releng/ &> /dev/null
-  #this is where we import the siging key
-  if [ -f $WORKSPACE/releng/utils/gpg_import_key.sh ]; then
-    if ! $WORKSPACE/releng/utils/gpg_import_key.sh; then
-      echo "WARNING: Failed to run gpg key import"
-    fi
-  fi
-}
-
-signrpm () {
-  for artifact in $RPM_LIST $SRPM_LIST; do
-    echo "Signing artifact: ${artifact}"
-    gpg2 -vvv --batch --yes --no-tty \
-      --default-key opnfv-helpdesk@rt.linuxfoundation.org \
-      --passphrase besteffort \
-      --detach-sig $artifact
-      gsutil cp "$artifact".sig gs://$GS_URL/$(basename "$artifact".sig)
-      echo "Upload complete for ${artifact} signature"
-  done
-}
-
-signiso () {
-  gpg2 -vvv --batch --yes --no-tty \
-    --default-key opnfv-helpdesk@rt.linuxfoundation.org  \
-    --passphrase besteffort \
-    --detach-sig $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso
-
-  gsutil cp $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso.sig gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso.sig
-  echo "ISO signature Upload Complete!"
-}
-
-uploadiso () {
-  gsutil cp $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > gsutil.iso.log
-  echo "ISO Upload Complete!"
-}
-
-uploadrpm () {
-  for artifact in $RPM_LIST $SRPM_LIST; do
-    echo "Uploading artifact: ${artifact}"
-    gsutil cp $artifact gs://$GS_URL/$(basename $artifact) > gsutil.iso.log
-    echo "Upload complete for ${artifact}"
-  done
-  gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log
-  gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log
-
-  # Make the property files viewable on the artifact site
-  gsutil -m setmeta \
-    -h "Content-Type:text/html" \
-    -h "Cache-Control:private, max-age=0, no-transform" \
-    gs://$GS_URL/latest.properties \
-    gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > /dev/null 2>&1
-}
-
-uploadsnap () {
-  # Uploads snapshot artifact and updated properties file
-  echo "Uploading snapshot artifacts"
-  # snapshot dir is the same node in the create job workspace
-  # only 1 promotion job can run at a time on a slave
-  snapshot_dir="${WORKSPACE}/../apex-create-snapshot"
-  if [ -z "$SNAP_TYPE" ]; then
-    echo "ERROR: SNAP_TYPE not provided...exiting"
-    exit 1
-  fi
-  gsutil cp ${snapshot_dir}/apex-${SNAP_TYPE}-snap-`date +%Y-%m-%d`.tar.gz gs://$GS_URL/ > gsutil.iso.log
-  gsutil cp ${snapshot_dir}/snapshot.properties gs://$GS_URL/snapshot.properties > gsutil.latest.log
-  echo "Upload complete for Snapshot"
-}
-
-uploadimages () {
-  # Uploads dev tarball
-  GERRIT_PATCHSET_NUMBER=$(echo $GERRIT_REFSPEC | grep -Eo '[0-9]+$')
-  export OPNFV_ARTIFACT_VERSION="dev${GERRIT_CHANGE_NUMBER}_${GERRIT_PATCHSET_NUMBER}"
-  echo "Uploading development build tarball"
-  pushd $BUILD_DIRECTORY > /dev/null
-  tar czf apex-${OPNFV_ARTIFACT_VERSION}.tar.gz *.qcow2 *.vmlinuz *.initrd
-  gsutil cp apex-${OPNFV_ARTIFACT_VERSION}.tar.gz gs://$GS_URL/apex-${OPNFV_ARTIFACT_VERSION}.tar.gz > gsutil.latest.log
-  popd > /dev/null
-}
-
-# Always import the signing key, if it's available the artifacts will be
-# signed before being uploaded
-importkey
-
-if gpg2 --list-keys | grep "opnfv-helpdesk@rt.linuxfoundation.org"; then
-  echo "Signing Key avaliable"
-  SIGN_ARTIFACT="true"
-fi
-
-if [ "$ARTIFACT_TYPE" == 'snapshot' ]; then
-  uploadsnap
-elif [ "$ARTIFACT_TYPE" == 'iso' ]; then
-  if [[ "$ARTIFACT_VERSION" =~ dev || "$BRANCH" != 'stable/fraser' ]]; then
-    echo "Skipping ISO artifact upload for ${ARTIFACT_TYPE} due to dev/${BRANCH} build"
-    exit 0
-  fi
-  if [[ -n "$SIGN_ARTIFACT" && "$SIGN_ARTIFACT" == "true" ]]; then
-    signiso
-  fi
-  uploadiso
-elif [ "$ARTIFACT_TYPE" == 'rpm' ]; then
-  if [[ "$ARTIFACT_VERSION" =~ dev ]]; then
-    if [[ "$BRANCH" != 'stable/fraser' ]]; then
-      echo "will not upload artifacts, ${BRANCH} uses upstream"
-      ARTIFACT_TYPE=none
-    else
-      echo "dev build detected, will upload image tarball"
-      ARTIFACT_TYPE=tarball
-      uploadimages
-    fi
-  else
-    RPM_INSTALL_PATH=$BUILD_DIRECTORY/noarch
-    # RPM URL should be python package for master, and is only package we need
-    RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL)
-    SRPM_INSTALL_PATH=$BUILD_DIRECTORY
-    SRPM_LIST=$SRPM_INSTALL_PATH/$(basename $OPNFV_SRPM_URL)
-    if [[ "$BRANCH" == 'stable/fraser' ]]; then
-      VERSION_EXTENSION=$(echo $(basename $OPNFV_RPM_URL) | sed 's/opnfv-apex-//')
-      RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-undercloud-${VERSION_EXTENSION}"
-      RPM_LIST+=" ${RPM_INSTALL_PATH}/python34-opnfv-apex-${VERSION_EXTENSION}"
-      VERSION_EXTENSION=$(echo $(basename $OPNFV_SRPM_URL) | sed 's/opnfv-apex-//')
-      SRPM_LIST+=" ${SRPM_INSTALL_PATH}/opnfv-apex-undercloud-${VERSION_EXTENSION}"
-      SRPM_LIST+=" ${SRPM_INSTALL_PATH}/python34-opnfv-apex-${VERSION_EXTENSION}"
-    fi
-
-    if [[ -n "$SIGN_ARTIFACT" && "$SIGN_ARTIFACT" == "true" ]]; then
-      signrpm
-    fi
-    uploadrpm
-  fi
-else
-  echo "ERROR: Unknown artifact type ${ARTIFACT_TYPE} to upload...exiting"
-  exit 1
-fi
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
-if [ "$ARTIFACT_TYPE" == 'iso' ]; then echo "ISO Artifact is available as http://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"; fi
-if [ "$ARTIFACT_TYPE" == 'rpm' ]; then echo "RPM Artifact is available as http://$GS_URL/$(basename $OPNFV_RPM_URL)"; fi
-if [ "$ARTIFACT_TYPE" == 'tarball' ]; then echo "Dev tarball Artifact is available as http://$GS_URL/apex-${OPNFV_ARTIFACT_VERSION}.tar.gz)"; fi
diff --git a/jjb/apex/apex-verify-jobs.yaml b/jjb/apex/apex-verify-jobs.yaml
deleted file mode 100644 (file)
index e8115eb..0000000
+++ /dev/null
@@ -1,409 +0,0 @@
----
-- project:
-    name: 'apex-verify-jobs'
-    project: 'apex'
-    jobs:
-      - 'apex-verify-{stream}'
-      - 'apex-verify-gate-{stream}'
-      - 'apex-verify-unit-tests-{stream}'
-    stream:
-      - master: &master
-          branch: '{stream}'
-          gs-pathname: ''
-          verify-scenario: 'os-nosdn-nofeature-noha'
-          disabled: false
-      - hunter: &hunter
-          branch: '{stream}'
-          gs-pathname: ''
-          verify-scenario: 'os-nosdn-nofeature-noha'
-          disabled: false
-      - gambia: &gambia
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          verify-scenario: 'os-nosdn-nofeature-ha'
-          disabled: false
-      - fraser: &fraser
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          verify-scenario: 'os-nosdn-nofeature-ha'
-          disabled: false
-      - danube: &danube
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          verify-scenario: 'os-odl_l3-nofeature-ha'
-          disabled: true
-      - euphrates: &euphrates
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          verify-scenario: 'os-odl-nofeature-ha'
-          disabled: false
-
-# Unit Test
-- job-template:
-    name: 'apex-verify-unit-tests-{stream}'
-
-    node: 'apex-build-master'
-
-    concurrent: true
-
-    disabled: '{obj:disabled}'
-
-    parameters:
-      - apex-parameter:
-          gs-pathname: '{gs-pathname}'
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-
-    scm:
-      - git-scm-gerrit
-
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - patchset-created-event:
-                exclude-drafts: 'false'
-                exclude-trivial-rebase: 'false'
-                exclude-no-code-change: 'true'
-            - draft-published-event
-            - comment-added-contains-event:
-                comment-contains-value: 'recheck'
-            - comment-added-contains-event:
-                comment-contains-value: 'reverify'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: 'apex'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-              disable-strict-forbidden-file-verification: 'false'
-              file-paths:
-                - compare-type: ANT
-                  pattern: 'apex/tests/**'
-              forbidden-file-paths:
-                - compare-type: ANT
-                  pattern: '*'
-                - compare-type: ANT
-                  pattern: 'apex/*'
-                - compare-type: ANT
-                  pattern: 'build/**'
-                - compare-type: ANT
-                  pattern: 'lib/**'
-                - compare-type: ANT
-                  pattern: 'config/**'
-                - compare-type: ANT
-                  pattern: 'apex/build/**'
-                - compare-type: ANT
-                  pattern: 'apex/common/**'
-                - compare-type: ANT
-                  pattern: 'apex/inventory/**'
-                - compare-type: ANT
-                  pattern: 'apex/network/**'
-                - compare-type: ANT
-                  pattern: 'apex/overcloud/**'
-                - compare-type: ANT
-                  pattern: 'apex/settings/**'
-                - compare-type: ANT
-                  pattern: 'apex/undercloud/**'
-                - compare-type: ANT
-                  pattern: 'apex/virtual/**'
-
-    properties:
-      - logrotate-default
-      - throttle:
-          max-per-node: 1
-          max-total: 10
-          option: 'project'
-
-    builders:
-      - apex-unit-test
-
-# Verify
-- job-template:
-    name: 'apex-verify-{stream}'
-
-    concurrent: true
-
-    disabled: '{obj:disabled}'
-
-    project-type: 'multijob'
-
-    parameters:
-      - '{project}-virtual-{stream}-defaults'
-      - apex-parameter:
-          gs-pathname: '{gs-pathname}/dev'
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - string:
-          name: ARTIFACT_VERSION
-          default: dev
-          description: "Used for overriding the ARTIFACT_VERSION"
-    scm:
-      - git-scm-gerrit
-
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - patchset-created-event:
-                exclude-drafts: 'false'
-                exclude-trivial-rebase: 'false'
-                exclude-no-code-change: 'true'
-            - draft-published-event
-            - comment-added-contains-event:
-                comment-contains-value: 'recheck'
-            - comment-added-contains-event:
-                comment-contains-value: 'reverify'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: 'apex'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-              disable-strict-forbidden-file-verification: 'true'
-              file-paths:
-                - compare-type: ANT
-                  pattern: '*'
-                - compare-type: ANT
-                  pattern: 'apex/*'
-                - compare-type: ANT
-                  pattern: 'build/**'
-                - compare-type: ANT
-                  pattern: 'ci/**'
-                - compare-type: ANT
-                  pattern: 'lib/**'
-                - compare-type: ANT
-                  pattern: 'config/**'
-                - compare-type: ANT
-                  pattern: 'apex/build/**'
-                - compare-type: ANT
-                  pattern: 'apex/builders/**'
-                - compare-type: ANT
-                  pattern: 'apex/common/**'
-                - compare-type: ANT
-                  pattern: 'apex/inventory/**'
-                - compare-type: ANT
-                  pattern: 'apex/network/**'
-                - compare-type: ANT
-                  pattern: 'apex/overcloud/**'
-                - compare-type: ANT
-                  pattern: 'apex/settings/**'
-                - compare-type: ANT
-                  pattern: 'apex/undercloud/**'
-                - compare-type: ANT
-                  pattern: 'apex/virtual/**'
-              forbidden-file-paths:
-                - compare-type: ANT
-                  pattern: 'apex/tests/**'
-                - compare-type: ANT
-                  pattern: 'docs/**'
-
-    properties:
-      - logrotate-default
-      - throttle:
-          max-per-node: 3
-          max-total: 10
-          option: 'project'
-      - build-blocker:
-          use-build-blocker: true
-          blocking-level: 'NODE'
-          blocking-jobs:
-            - 'apex-.*-promote.*'
-    builders:
-      - description-setter:
-          description: "Built on $NODE_NAME"
-      - multijob:
-          name: basic
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-verify-unit-tests-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: false
-              kill-phase-on: FAILURE
-              abort-all-job: true
-              git-revision: true
-      - multijob:
-          name: build
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-build-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                ARTIFACT_VERSION=$ARTIFACT_VERSION
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: false
-              kill-phase-on: FAILURE
-              abort-all-job: true
-              git-revision: true
-      - multijob:
-          name: deploy-virtual
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-virtual-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                ARTIFACT_VERSION=$ARTIFACT_VERSION
-                DEPLOY_SCENARIO={verify-scenario}
-                OPNFV_CLEAN=yes
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-                FUNCTEST_SUITE_NAME=tempest_smoke
-                FUNCTEST_MODE=testcase
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: true
-              git-revision: true
-
-# Verify Scenario Gate
-- job-template:
-    name: 'apex-verify-gate-{stream}'
-
-    concurrent: true
-
-    disabled: '{obj:disabled}'
-
-    project-type: 'multijob'
-
-    parameters:
-      - '{project}-virtual-{stream}-defaults'
-      - apex-parameter:
-          gs-pathname: '{gs-pathname}/dev'
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - string:
-          name: GIT_BASE
-          default: https://gerrit.opnfv.org/gerrit/$PROJECT
-          description: "Used for overriding the GIT URL coming from parameters macro."
-      - string:
-          name: ARTIFACT_VERSION
-          default: dev
-          description: "Used for overriding the ARTIFACT_VERSION"
-
-    scm:
-      - git-scm-gerrit
-
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - comment-added-contains-event:
-                comment-contains-value: '^Patch Set [0-9]+: Code-Review\+2.*start-gate-scenario:.*'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: 'apex'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-              file-paths:
-                - compare-type: ANT
-                  pattern: 'ci/**'
-                - compare-type: ANT
-                  pattern: 'build/**'
-                - compare-type: ANT
-                  pattern: 'lib/**'
-                - compare-type: ANT
-                  pattern: 'config/**'
-                - compare-type: ANT
-                  pattern: 'apex/**'
-
-    properties:
-      - logrotate-default
-      - build-blocker:
-          use-build-blocker: true
-          blocking-level: 'NODE'
-          blocking-jobs:
-            - 'apex-verify.*'
-            - 'apex-virtual.*'
-            - 'apex-.*-promote.*'
-            - 'odl-netvirt.*'
-      - throttle:
-          max-per-node: 1
-          max-total: 10
-          option: 'project'
-
-    builders:
-      # yamllint disable rule:line-length
-      - shell: |
-          echo DEPLOY_SCENARIO=$(echo $GERRIT_EVENT_COMMENT_TEXT | grep start-gate-scenario | grep -Eo '(os|k8s)-.*$') > detected_scenario
-      # yamllint enable rule:line-length
-      - inject:
-          properties-file: detected_scenario
-      - multijob:
-          name: deploy-virtual
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-deploy-virtual-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                ARTIFACT_VERSION=$ARTIFACT_VERSION
-                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-                OPNFV_CLEAN=yes
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-                GERRIT_EVENT_COMMENT_TEXT=$GERRIT_EVENT_COMMENT_TEXT
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: true
-              git-revision: true
-      - shell:
-          !include-raw-escape: ./apex-functest-scenario.sh
-      - inject:
-          properties-file: functest_scenario
-          override-build-parameters: true
-      - multijob:
-          name: functest-smoke
-          condition: ALWAYS
-          projects:
-            - name: 'functest-apex-virtual-suite-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-                DOCKER_TAG=$DOCKER_TAG
-                FUNCTEST_SUITE_NAME=tempest_smoke
-                FUNCTEST_MODE=testcase
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: true
-              kill-phase-on: NEVER
-              enable-condition: "def m = '$DEPLOY_SCENARIO' != 'k8s-nosdn-nofeature-noha'"
-              abort-all-job: true
-              git-revision: false
-      - multijob:
-          name: apex-fetch-logs
-          projects:
-            - name: 'apex-fetch-logs-{stream}'
-              current-parameters: true
-              predefined-parameters: |
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-              node-parameters: true
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-
-########################
-# builder macros
-########################
-- builder:
-    name: apex-unit-test
-    builders:
-      - shell:
-          !include-raw: ./apex-unit-test.sh
diff --git a/jjb/apex/apex.yaml b/jjb/apex/apex.yaml
deleted file mode 100644 (file)
index fb2eaa2..0000000
+++ /dev/null
@@ -1,2008 +0,0 @@
----
-- project:
-    name: 'apex'
-    project: 'apex'
-    jobs:
-      - 'apex-fetch-logs-{stream}'
-      - 'apex-runner-cperf-{stream}'
-      - 'apex-virtual-{stream}'
-      - 'apex-deploy-{platform}-{stream}'
-      - 'apex-daily-{stream}'
-      - 'apex-{snap_type}-promote-daily-{stream}-os-{os_version}-{topology}'
-      - 'apex-fdio-promote-daily-{stream}'
-      - 'apex-{scenario}-baremetal-{scenario_stream}'
-      - 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
-      - 'apex-upload-snapshot'
-      - 'apex-create-snapshot'
-      - 'apex-fetch-snap-info'
-      - 'apex-flex-daily-os-nosdn-nofeature-ha-{stream}'
-    # stream:    branch with - in place of / (eg. stable-arno)
-    # branch:    branch (eg. stable/arno)
-    stream:
-      - master: &master
-          branch: 'master'
-          gs-pathname: ''
-          build-slave: 'apex-build-master'
-          virtual-slave: 'apex-virtual-master'
-          baremetal-slave: 'apex-baremetal-master'
-          verify-scenario: 'os-nosdn-nofeature-noha'
-          scenario_stream: 'master'
-          disable_daily: true
-          disable_promote: true
-      - hunter: &hunter
-          branch: 'stable/hunter'
-          gs-pathname: '/hunter'
-          build-slave: 'apex-build-master'
-          virtual-slave: 'apex-virtual-master'
-          baremetal-slave: 'apex-baremetal-master'
-          verify-scenario: 'os-nosdn-nofeature-ha'
-          scenario_stream: 'hunter'
-          disable_daily: true
-          disable_promote: true
-      - gambia: &gambia
-          branch: 'stable/gambia'
-          gs-pathname: '/gambia'
-          build-slave: 'apex-build-master'
-          virtual-slave: 'apex-virtual-master'
-          baremetal-slave: 'apex-baremetal-master'
-          verify-scenario: 'os-nosdn-nofeature-ha'
-          scenario_stream: 'gambia'
-          disable_daily: true
-          disable_promote: true
-      - fraser: &fraser
-          branch: 'stable/fraser'
-          gs-pathname: '/fraser'
-          build-slave: 'apex-build-master'
-          virtual-slave: 'apex-virtual-master'
-          baremetal-slave: 'apex-baremetal-master'
-          verify-scenario: 'os-nosdn-nofeature-ha'
-          scenario_stream: 'fraser'
-          disable_daily: true
-          disable_promote: true
-      - euphrates: &euphrates
-          branch: 'stable/euphrates'
-          gs-pathname: '/euphrates'
-          build-slave: 'apex-build-master'
-          virtual-slave: 'apex-virtual-master'
-          baremetal-slave: 'apex-baremetal-master'
-          verify-scenario: 'os-odl-nofeature-ha'
-          scenario_stream: 'euphrates'
-          disable_daily: true
-          disable_promote: true
-      - danube: &danube
-          branch: 'stable/danube'
-          gs-pathname: '/danube'
-          build-slave: 'apex-build-danube'
-          virtual-slave: 'apex-virtual-danube'
-          baremetal-slave: 'apex-baremetal-danube'
-          verify-scenario: 'os-odl_l3-nofeature-ha'
-          scenario_stream: 'danube'
-          disabled: true
-          disable_daily: true
-          disable_promote: true
-
-    scenario:
-      - 'os-nosdn-nofeature-noha':
-          <<: *danube
-      - 'os-nosdn-nofeature-ha':
-          <<: *danube
-      - 'os-nosdn-nofeature-ha-ipv6':
-          <<: *danube
-      - 'os-nosdn-ovs-noha':
-          <<: *danube
-      - 'os-nosdn-ovs-ha':
-          <<: *danube
-      - 'os-nosdn-fdio-noha':
-          <<: *danube
-      - 'os-nosdn-fdio-ha':
-          <<: *danube
-      - 'os-nosdn-kvm-ha':
-          <<: *danube
-      - 'os-nosdn-kvm-noha':
-          <<: *danube
-      - 'os-odl_l2-fdio-noha':
-          <<: *danube
-      - 'os-odl_l2-fdio-ha':
-          <<: *danube
-      - 'os-odl_netvirt-fdio-noha':
-          <<: *danube
-      - 'os-odl_l2-sfc-noha':
-          <<: *danube
-      - 'os-odl_l3-nofeature-noha':
-          <<: *danube
-      - 'os-odl_l3-nofeature-ha':
-          <<: *danube
-      - 'os-odl_l3-ovs-noha':
-          <<: *danube
-      - 'os-odl_l3-ovs-ha':
-          <<: *danube
-      - 'os-odl-bgpvpn-ha':
-          <<: *danube
-      - 'os-odl-gluon-noha':
-          <<: *danube
-      - 'os-odl_l3-fdio-noha':
-          <<: *danube
-      - 'os-odl_l3-fdio-ha':
-          <<: *danube
-      - 'os-odl_l3-fdio_dvr-noha':
-          <<: *danube
-      - 'os-odl_l3-fdio_dvr-ha':
-          <<: *danube
-      - 'os-odl_l3-csit-noha':
-          <<: *danube
-      - 'os-onos-nofeature-ha':
-          <<: *danube
-      - 'os-ovn-nofeature-noha':
-          <<: *danube
-      - 'os-nosdn-nofeature-noha':
-          <<: *master
-      - 'os-nosdn-nofeature-ha':
-          <<: *master
-      - 'os-nosdn-nofeature-noha':
-          <<: *gambia
-      - 'os-nosdn-nofeature-ha':
-          <<: *gambia
-      - 'os-nosdn-nofeature-ha-ipv6':
-          <<: *gambia
-      - 'os-odl-nofeature-noha':
-          <<: *gambia
-      - 'os-odl-nofeature-ha':
-          <<: *gambia
-      - 'k8s-nosdn-nofeature-noha':
-          <<: *gambia
-      - 'os-odl-bgpvpn-ha':
-          <<: *gambia
-      - 'os-odl-bgpvpn-noha':
-          <<: *gambia
-      - 'os-odl-sfc-ha':
-          <<: *gambia
-      - 'os-odl-sfc-noha':
-          <<: *gambia
-      - 'os-nosdn-calipso-noha':
-          <<: *gambia
-      - 'os-ovn-nofeature-ha':
-          <<: *gambia
-      - 'os-nosdn-nofeature-ha':
-          <<: *fraser
-      - 'os-odl-bgpvpn-ha':
-          <<: *fraser
-      - 'os-nosdn-nofeature-noha':
-          <<: *hunter
-      - 'os-nosdn-nofeature-ha':
-          <<: *hunter
-      - 'os-ovn-nofeature-ha':
-          <<: *hunter
-      - 'os-nosdn-nofeature-noha':
-          <<: *euphrates
-      - 'os-nosdn-nofeature-ha':
-          <<: *euphrates
-      - 'os-odl-nofeature-ha':
-          <<: *euphrates
-      - 'os-odl-nofeature-noha':
-          <<: *euphrates
-      - 'os-odl-bgpvpn-ha':
-          <<: *euphrates
-      - 'os-ovn-nofeature-noha':
-          <<: *euphrates
-      - 'os-nosdn-fdio-noha':
-          <<: *euphrates
-      - 'os-nosdn-fdio-ha':
-          <<: *euphrates
-      - 'os-nosdn-bar-ha':
-          <<: *euphrates
-      - 'os-nosdn-bar-noha':
-          <<: *euphrates
-      - 'os-nosdn-nofeature-ha-ipv6':
-          <<: *euphrates
-      - 'os-nosdn-ovs_dpdk-noha':
-          <<: *euphrates
-      - 'os-nosdn-ovs_dpdk-ha':
-          <<: *euphrates
-      - 'os-nosdn-kvm_ovs_dpdk-noha':
-          <<: *euphrates
-      - 'os-nosdn-kvm_ovs_dpdk-ha':
-          <<: *euphrates
-      - 'os-odl-sfc-noha':
-          <<: *euphrates
-      - 'os-odl-sfc-ha':
-          <<: *euphrates
-
-    platform:
-      - 'baremetal'
-      - 'virtual'
-
-    os_version:
-      - 'queens':
-          os_scenario: 'nofeature'
-          odl_branch: 'stable/oxygen'
-      - 'rocky':
-          os_scenario: 'rocky'
-          odl_branch: 'stable/oxygen'
-      - 'master':
-          os_scenario: 'nofeature'
-          odl_branch: 'stable/fluorine'
-
-    topology:
-      - 'noha'
-      - 'ha'
-      - 'noha-allinone'
-
-    snap_type:
-      - csit:
-          sdn: 'odl'
-      - functest:
-          sdn: 'nosdn'
-# Fetch Logs Job
-- job-template:
-    name: 'apex-fetch-logs-{stream}'
-
-    concurrent: true
-
-    disabled: false
-    scm:
-      - git-scm-gerrit
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - apex-parameter:
-          gs-pathname: '{gs-pathname}'
-    # yamllint enable rule:line-length
-    properties:
-      - logrotate-default
-      - throttle:
-          max-per-node: 1
-          max-total: 10
-          option: 'project'
-
-    builders:
-      - 'apex-fetch-logs'
-
-- job-template:
-    name: 'apex-runner-cperf-{stream}'
-
-    # runner cperf job
-    project-type: 'multijob'
-    node: 'intel-pod2'
-
-    disabled: false
-
-    parameters:
-      - apex-parameter:
-          gs-pathname: '{gs-pathname}'
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - string:
-          name: GIT_BASE
-          default: https://gerrit.opnfv.org/gerrit/$PROJECT
-          description: "Used for overriding the GIT URL coming from parameters macro."
-
-    scm:
-      - git-scm
-
-    properties:
-      - logrotate-default
-      - build-blocker:
-          use-build-blocker: false
-          blocking-level: 'NODE'
-          blocking-jobs:
-            - 'apex-deploy.*'
-      - throttle:
-          max-per-node: 1
-          max-total: 10
-          option: 'project'
-
-    builders:
-      - description-setter:
-          description: "Deployed on $NODE_NAME"
-      - multijob:
-          name: 'Baremetal Deploy'
-          condition: ALWAYS
-          projects:
-            - name: 'apex-deploy-baremetal-{stream}'
-              node-parameters: false
-              current-parameters: true
-              predefined-parameters: |
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=
-                OPNFV_CLEAN=yes
-                DEPLOY_SCENARIO={verify-scenario}
-              kill-phase-on: FAILURE
-              abort-all-job: true
-              git-revision: false
-      - multijob:
-          name: CPERF
-          condition: SUCCESSFUL
-          projects:
-            - name: 'cperf-apex-intel-pod2-daily-master'
-              node-parameters: true
-              current-parameters: false
-              predefined-parameters:
-                DEPLOY_SCENARIO={verify-scenario}
-              kill-phase-on: NEVER
-              abort-all-job: false
-              git-revision: false
-
-# Deploy job
-- job-template:
-    name: 'apex-deploy-{platform}-{stream}'
-
-    concurrent: true
-
-    disabled: false
-    quiet-period: 30
-    scm:
-      - git-scm-gerrit
-
-    wrappers:
-      - timeout:
-          timeout: 140
-          fail: true
-
-    parameters:
-      - '{project}-{platform}-{stream}-defaults'
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - apex-parameter:
-          gs-pathname: '{gs-pathname}'
-      - string:
-          name: DEPLOY_SCENARIO
-          default: '{verify-scenario}'
-          description: "Scenario to deploy with."
-      # yamllint disable rule:line-length
-      - string:
-          name: OPNFV_CLEAN
-          default: 'no'
-          description: "Use yes in lower case to invoke clean. Indicates if the deploy environment should be cleaned before deployment"
-
-    # yamllint enable rule:line-length
-    properties:
-      - logrotate-default
-      - build-blocker:
-          use-build-blocker: true
-          blocking-level: 'NODE'
-          blocking-jobs:
-            - 'apex-deploy.*'
-            - 'functest.*'
-            - 'yardstick.*'
-            - 'dovetail.*'
-            - 'storperf.*'
-            - 'odl-netvirt.*'
-      - throttle:
-          max-per-node: 1
-          max-total: 10
-          option: 'project'
-
-    builders:
-      - description-setter:
-          description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
-      - 'apex-download-artifact'
-      - 'apex-deploy'
-      - 'clean-workspace'
-
-
-# Virtual Deploy and Test
-- job-template:
-    name: 'apex-virtual-{stream}'
-
-    project-type: 'multijob'
-
-    concurrent: true
-
-    disabled: false
-
-    scm:
-      - git-scm-gerrit
-
-    parameters:
-      - '{project}-defaults'
-      - '{project}-virtual-{stream}-defaults'
-      - 'functest-suite-parameter'
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - apex-parameter:
-          gs-pathname: '{gs-pathname}'
-      - string:
-          name: DEPLOY_SCENARIO
-          default: '{verify-scenario}'
-          description: "Scenario to deploy with."
-      - string:
-          name: ARTIFACT_VERSION
-          default: dev
-          description: "Used for overriding the ARTIFACT_VERSION"
-
-    properties:
-      - logrotate-default
-      - build-blocker:
-          use-build-blocker: true
-          blocking-level: 'NODE'
-          blocking-jobs:
-            - 'apex-runner.*'
-            - 'apex-run.*'
-            - 'apex-virtual-.*'
-            - 'apex-verify-gate-.*'
-            - 'odl-netvirt.*'
-            - 'apex-.*-promote.*'
-      - throttle:
-          max-per-node: 1
-          max-total: 10
-          option: 'project'
-
-    builders:
-      - description-setter:
-          description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
-      - multijob:
-          name: deploy-virtual
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-deploy-virtual-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                ARTIFACT_VERSION=$ARTIFACT_VERSION
-                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-                OPNFV_CLEAN=yes
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-                PROMOTE=$PROMOTE
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: true
-              git-revision: true
-      - multijob:
-          name: functest-smoke
-          condition: ALWAYS
-          projects:
-            - name: 'functest-apex-virtual-suite-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-                FUNCTEST_MODE=$FUNCTEST_MODE
-                FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: true
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-      - multijob:
-          name: apex-fetch-logs
-          projects:
-            - name: 'apex-fetch-logs-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: true
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-
-# Baremetal Deploy and Test
-- job-template:
-    name: 'apex-{scenario}-baremetal-{scenario_stream}'
-
-    project-type: 'multijob'
-
-    disabled: false
-
-    scm:
-      - git-scm
-
-    parameters:
-      - '{project}-defaults'
-      - '{project}-baremetal-{scenario_stream}-defaults'
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - apex-parameter:
-          gs-pathname: '{gs-pathname}'
-      - string:
-          name: DEPLOY_SCENARIO
-          default: '{scenario}'
-          description: "Scenario to deploy with."
-
-    properties:
-      - logrotate-default
-      - build-blocker:
-          use-build-blocker: true
-          blocking-level: 'NODE'
-          blocking-jobs:
-            - 'apex-verify.*'
-            - 'apex-runner.*'
-            - 'apex-.*-promote.*'
-            - 'apex-run.*'
-            - 'apex-.+-baremetal-.+'
-      - throttle:
-          max-per-node: 1
-          max-total: 10
-          option: 'project'
-
-    builders:
-      - description-setter:
-          description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
-      - multijob:
-          name: 'Baremetal Deploy'
-          condition: SUCCESSFUL
-          execution-type: SEQUENTIALLY
-          projects:
-            - name: 'apex-deploy-baremetal-{scenario_stream}'
-              node-parameters: true
-              current-parameters: true
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=
-                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-fetch-logs-{scenario_stream}'
-              current-parameters: true
-              predefined-parameters: |
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=
-              node-parameters: true
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-      - shell:
-          !include-raw-escape: ./apex-functest-scenario.sh
-      - inject:
-          properties-file: functest_scenario
-          override-build-parameters: true
-      - multijob:
-          name: 'OPNFV Test Suite'
-          condition: ALWAYS
-          projects:
-            - name: 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
-              node-parameters: true
-              current-parameters: false
-              predefined-parameters: |
-                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-                DOCKER_TAG=$DOCKER_TAG
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-      - multijob:
-          name: apex-fetch-logs
-          projects:
-            - name: 'apex-fetch-logs-{scenario_stream}'
-              current-parameters: true
-              predefined-parameters: |
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=
-              node-parameters: true
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-    publishers:
-      - groovy-postbuild:
-          script:
-            !include-raw-escape: ./update-build-result.groovy
-
-# Baremetal test job
-- job-template:
-    name: 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
-
-    project-type: 'multijob'
-
-    disabled: false
-
-    parameters:
-      - '{project}-defaults'
-      - '{project}-baremetal-{scenario_stream}-defaults'
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - apex-parameter:
-          gs-pathname: '{gs-pathname}'
-      - string:
-          name: DEPLOY_SCENARIO
-          default: '{scenario}'
-          description: "Scenario to deploy with."
-      - string:
-          name: DOCKER_TAG
-          default: ''
-          description: Default docker tag to pass to functest
-
-    properties:
-      - logrotate-default
-      - build-blocker:
-          use-build-blocker: true
-          blocking-level: 'NODE'
-          blocking-jobs:
-            - 'apex-verify.*'
-            - 'apex-runner.*'
-            - 'apex-run.*'
-            - 'apex-testsuite-.+-baremetal-.+'
-      - throttle:
-          max-per-node: 1
-          max-total: 10
-          option: 'project'
-
-    builders:
-      - description-setter:
-          description: "Testing on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
-      - multijob:
-          name: Functest
-          condition: ALWAYS
-          projects:
-            - name: 'functest-apex-baremetal-daily-{scenario_stream}'
-              node-parameters: true
-              current-parameters: false
-              predefined-parameters: |
-                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-                DOCKER_TAG=$DOCKER_TAG
-              kill-phase-on: NEVER
-              abort-all-job: false
-              git-revision: false
-      - multijob:
-          name: Yardstick
-          condition: ALWAYS
-          projects:
-            - name: 'yardstick-apex-baremetal-daily-{scenario_stream}'
-              node-parameters: true
-              current-parameters: false
-              predefined-parameters:
-                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-              kill-phase-on: NEVER
-              abort-all-job: false
-              git-revision: false
-      - multijob:
-          name: Dovetail-default-mandatory
-          condition: ALWAYS
-          projects:
-            - name: 'dovetail-apex-baremetal-default-mandatory-{scenario_stream}'
-              node-parameters: true
-              current-parameters: false
-              predefined-parameters:
-                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-              kill-phase-on: NEVER
-              enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/"
-              abort-all-job: false
-              git-revision: false
-      - multijob:
-          name: Dovetail-default-optional
-          condition: ALWAYS
-          projects:
-            - name: 'dovetail-apex-baremetal-default-optional-{scenario_stream}'
-              node-parameters: true
-              current-parameters: false
-              predefined-parameters:
-                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-              kill-phase-on: NEVER
-              enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/"
-              abort-all-job: false
-              git-revision: false
-      - multijob:
-          name: Dovetail-proposed_tests
-          condition: ALWAYS
-          projects:
-            - name: 'dovetail-apex-baremetal-proposed_tests-optional-{scenario_stream}'
-              node-parameters: true
-              current-parameters: false
-              predefined-parameters:
-                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-              kill-phase-on: NEVER
-              enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/"
-              abort-all-job: false
-              git-revision: false
-      - multijob:
-          name: StorPerf
-          condition: ALWAYS
-          projects:
-            - name: 'storperf-apex-baremetal-daily-{scenario_stream}'
-              node-parameters: true
-              current-parameters: false
-              predefined-parameters:
-                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-              enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-nosdn-nofeature-ha/"
-              kill-phase-on: NEVER
-              abort-all-job: false
-              git-revision: false
-# Build status is always success due conditional plugin prefetching
-# build status before multijob phases execute
-#        - conditional-step:
-#            condition-kind: current-status
-#            condition-worst: SUCCESS
-#            condtion-best: SUCCESS
-#            on-evaluation-failure: mark-unstable
-#            steps:
-#                - shell: 'echo "Tests Passed"'
-
-- job-template:
-    name: 'apex-daily-{stream}'
-
-    # Job template for daily build
-    #
-    # Required Variables:
-    #     stream:    branch with - in place of / (eg. stable)
-    #     branch:    branch (eg. stable)
-    project-type: 'multijob'
-
-    disabled: '{obj:disable_daily}'
-
-    scm:
-      - git-scm
-
-    parameters:
-      - '{project}-defaults'
-      - '{project}-baremetal-{stream}-defaults'
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - apex-parameter:
-          gs-pathname: '{gs-pathname}'
-
-    properties:
-      - logrotate-default
-      - build-blocker:
-          use-build-blocker: true
-          blocking-level: 'NODE'
-          blocking-jobs:
-            - 'apex-daily.*'
-
-    triggers:
-      - 'apex-{stream}'
-
-    builders:
-      - multijob:
-          name: build
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-build-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: true
-              git-revision: true
-      - multijob:
-          name: 'Verify and upload ISO'
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-verify-iso-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                BUILD_DIRECTORY=$WORKSPACE/../apex-build-{stream}/.build
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: true
-              git-revision: true
-      - apex-builder-{stream}
-
-# snapshot info fetch
-- job-template:
-    name: 'apex-fetch-snap-info'
-
-    disabled: false
-
-    parameters:
-      - '{project}-defaults'
-
-    builders:
-      - shell:
-          !include-raw-escape: ./apex-fetch-snap-info.sh
-
-# snapshot create
-- job-template:
-    name: 'apex-create-snapshot'
-
-    disabled: false
-
-    parameters:
-      - '{project}-defaults'
-
-    builders:
-      - shell:
-          !include-raw-escape: ./apex-snapshot-create.sh
-
-# snapshot upload
-- job-template:
-    name: 'apex-upload-snapshot'
-
-    disabled: false
-
-    parameters:
-      - '{project}-defaults'
-
-    builders:
-      - inject:
-          properties-content: ARTIFACT_TYPE=snapshot
-      - 'apex-upload-artifact'
-
-# CSIT promote
-- job-template:
-    name: 'apex-{snap_type}-promote-daily-{stream}-os-{os_version}-{topology}'
-
-    # Job template for promoting CSIT Snapshots
-    #
-    # Required Variables:
-    #     stream:    branch with - in place of / (eg. stable)
-    #     branch:    branch (eg. stable)
-    node: '{virtual-slave}'
-    project-type: 'multijob'
-    disabled: '{disable_promote}'
-
-    scm:
-      - git-scm
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - apex-parameter:
-          gs-pathname: '{gs-pathname}'
-      - string:
-          name: ARTIFACT_VERSION
-          default: dev
-          description: "Used for overriding the ARTIFACT_VERSION"
-      - string:
-          name: PROMOTE
-          default: 'True'
-          description: "Used for overriding the PROMOTE"
-      - string:
-          name: GS_URL
-          default: 'artifacts.opnfv.org/apex/{os_version}/{topology}'
-          description: "User for overriding GS_URL from apex params"
-      - string:
-          name: OS_VERSION
-          default: '{os_version}'
-          description: OpenStack version short name
-      - string:
-          name: ODL_BRANCH
-          default: '{odl_branch}'
-          description: ODL branch being used
-      - string:
-          name: FORCE_PROMOTE
-          default: 'False'
-          description: "Used to force promotion and skip CSIT"
-      - string:
-          name: SNAP_TYPE
-          default: '{snap_type}'
-          description: Type of snapshot to promote
-    properties:
-      - build-blocker:
-          use-build-blocker: true
-          blocking-level: 'NODE'
-          blocking-jobs:
-            - 'apex-verify.*'
-            - 'apex-runner.*'
-            - 'apex-daily.*'
-            - 'apex-.*-promote.*'
-            - 'odl-netvirt.*'
-      - throttle:
-          max-per-node: 1
-          max-total: 10
-          option: 'project'
-
-    triggers:
-      - '{stream}-{snap_type}-{os_version}'
-
-    builders:
-      - multijob:
-          name: apex-virtual-deploy
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-deploy-virtual-{stream}'
-              current-parameters: true
-              predefined-parameters: |
-                DEPLOY_SCENARIO=os-{sdn}-{os_scenario}-{topology}-{snap_type}
-                OPNFV_CLEAN=yes
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: true
-              git-revision: true
-      - multijob:
-          name: fetch snapshot info
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-fetch-snap-info'
-              current-parameters: true
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: true
-              git-revision: false
-      - multijob:
-          name: test phase
-          condition: SUCCESSFUL
-          execution-type: SEQUENTIALLY
-          projects:
-            - name: cperf-apex-csit-master
-              predefined-parameters: |
-                ODL_BRANCH=$ODL_BRANCH
-                RC_FILE_PATH=/tmp/snap/overcloudrc
-                NODE_FILE_PATH=/tmp/snap/node.yaml
-                SSH_KEY_PATH=/tmp/snap/id_rsa
-                ODL_CONTAINERIZED=true
-                OS_VERSION=$OS_VERSION
-                SKIP_CSIT=$FORCE_PROMOTE
-                SNAP_TYPE=$SNAP_TYPE
-              node-parameters: true
-              kill-phase-on: NEVER
-              abort-all-job: false
-              enable-condition: "def m = '$SNAP_TYPE' ==~ /csit/"
-            - name: cperf-upload-logs-csit
-              predefined-parameters: |
-                ODL_BRANCH=$ODL_BRANCH
-                OS_VERSION=$OS_VERSION
-                SNAP_TYPE=$SNAP_TYPE
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: false
-              enable-condition: "def m = '$SNAP_TYPE' ==~ /csit/"
-            - name: 'functest-apex-virtual-suite-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-                DOCKER_TAG=$DOCKER_TAG
-                FUNCTEST_SUITE_NAME=tempest_smoke
-                FUNCTEST_MODE=testcase
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: true
-              kill-phase-on: NEVER
-              enable-condition: "def m = '$SNAP_TYPE' ==~ /functest/"
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-fetch-logs-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: true
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-      - multijob:
-          name: create snapshot
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-create-snapshot'
-              current-parameters: true
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: true
-              git-revision: false
-      - multijob:
-          name: upload snapshot
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-upload-snapshot'
-              current-parameters: true
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: true
-              git-revision: false
-
-# FDIO promote
-- job-template:
-    name: 'apex-fdio-promote-daily-{stream}'
-
-    # Job template for promoting CSIT Snapshots
-    #
-    # Required Variables:
-    #     stream:    branch with - in place of / (eg. stable)
-    #     branch:    branch (eg. stable)
-    node: '{virtual-slave}'
-    project-type: 'multijob'
-    disabled: false
-
-    scm:
-      - git-scm
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - apex-parameter:
-          gs-pathname: '{gs-pathname}'
-
-    properties:
-      - build-blocker:
-          use-build-blocker: true
-          blocking-level: 'NODE'
-          blocking-jobs:
-            - 'apex-verify.*'
-            - 'apex-deploy.*'
-            - 'apex-runner.*'
-            - 'apex-daily.*'
-
-    builders:
-      - multijob:
-          name: build
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-build-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: false
-              kill-phase-on: FAILURE
-              abort-all-job: true
-              git-revision: true
-      - multijob:
-          name: deploy-virtual
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-deploy-virtual-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                DEPLOY_SCENARIO=os-odl_netvirt-fdio-noha
-                OPNFV_CLEAN=yes
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-                PROMOTE=True
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: true
-              git-revision: false
-      - multijob:
-          name: create snapshot
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-create-snapshot'
-              current-parameters: false
-              predefined-parameters: |
-                SNAP_TYPE=fdio
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: true
-              git-revision: false
-      - multijob:
-          name: upload snapshot
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-upload-snapshot'
-              current-parameters: false
-              predefined-parameters: |
-                SNAP_TYPE=fdio
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: true
-              git-revision: false
-
-# Flex job
-- job-template:
-    name: 'apex-flex-daily-os-nosdn-nofeature-ha-{stream}'
-
-    project-type: 'multijob'
-
-    disabled: true
-
-    node: 'flex-pod2'
-
-    scm:
-      - git-scm
-
-    triggers:
-      - 'apex-{stream}'
-
-    parameters:
-      - '{project}-defaults'
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - apex-parameter:
-          gs-pathname: '{gs-pathname}'
-      - string:
-          name: DEPLOY_SCENARIO
-          default: 'os-nosdn-nofeature-ha'
-          description: "Scenario to deploy with."
-      - string:
-          name: GIT_BASE
-          default: https://gerrit.opnfv.org/gerrit/$PROJECT
-          description: 'Git URL to use on this Jenkins Slave'
-      - string:
-          name: SSH_KEY
-          default: /root/.ssh/id_rsa
-          description: 'SSH key to use for Apex'
-
-    properties:
-      - logrotate-default
-      - build-blocker:
-          use-build-blocker: true
-          blocking-level: 'NODE'
-          blocking-jobs:
-            - 'apex-verify.*'
-            - 'apex-runner.*'
-            - 'apex-.*-promote.*'
-            - 'apex-run.*'
-            - 'apex-.+-baremetal-.+'
-      - throttle:
-          max-per-node: 1
-          max-total: 10
-          option: 'project'
-
-    builders:
-      - description-setter:
-          description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
-      - multijob:
-          name: 'Baremetal Deploy'
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-deploy-baremetal-{stream}'
-              node-parameters: true
-              current-parameters: true
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=
-                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-              kill-phase-on: FAILURE
-              abort-all-job: true
-              git-revision: false
-      - multijob:
-          name: Yardstick
-          condition: ALWAYS
-          projects:
-            - name: 'yardstick-apex-baremetal-daily-{stream}'
-              node-parameters: true
-              current-parameters: false
-              predefined-parameters:
-                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-              kill-phase-on: NEVER
-              abort-all-job: false
-              git-revision: false
-
-
-########################
-# parameter macros
-########################
-- parameter:
-    name: apex-parameter
-    parameters:
-      - string:
-          name: ARTIFACT_NAME
-          default: 'latest'
-          description: "RPM Artifact name that will be appended to GS_URL to deploy a specific artifact"
-      - string:
-          name: ARTIFACT_VERSION
-          default: 'daily'
-          description: "Artifact version type"
-      - string:
-          name: BUILD_DIRECTORY
-          default: $WORKSPACE/.build
-          description: "Directory where the build artifact will be located upon the completion of the build."
-      - string:
-          name: CACHE_DIRECTORY
-          default: $HOME/opnfv/apex-cache{gs-pathname}
-          description: "Directory where the cache to be used during the build is located."
-      # yamllint disable rule:line-length
-      - string:
-          name: GIT_BASE
-          default: https://gerrit.opnfv.org/gerrit/$PROJECT
-          description: "Used for overriding the GIT URL coming from Global Jenkins configuration in case if the stuff is done on none-LF HW."
-      # yamllint enable rule:line-length
-      - string:
-          name: GS_PATHNAME
-          default: '{gs-pathname}'
-          description: "Version directory where opnfv artifacts are stored in gs repository"
-      - string:
-          name: GS_URL
-          default: $GS_BASE{gs-pathname}
-          description: "URL to Google Storage."
-      - string:
-          name: PROMOTE
-          default: 'False'
-          description: "Flag to know if we should promote/upload snapshot artifacts."
-
-########################
-# builder macros
-########################
-
-# danube Builder
-- builder:
-    name: apex-builder-danube
-    builders:
-      - multijob:
-          name: Baremetal Deploy and Test Phase
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-os-nosdn-nofeature-noha-baremetal-danube'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-nosdn-nofeature-ha-baremetal-danube'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-nosdn-nofeature-ha-ipv6-baremetal-danube'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-nosdn-ovs-noha-baremetal-danube'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-nosdn-ovs-ha-baremetal-danube'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-nosdn-fdio-noha-baremetal-danube'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-nosdn-fdio-ha-baremetal-danube'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-nosdn-kvm-ha-baremetal-danube'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-nosdn-kvm-noha-baremetal-danube'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-odl_l2-fdio-noha-baremetal-danube'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-odl_l2-fdio-ha-baremetal-danube'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-odl_netvirt-fdio-noha-baremetal-danube'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-odl_l2-sfc-noha-baremetal-danube'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-odl_l3-nofeature-noha-baremetal-danube'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-odl_l3-nofeature-ha-baremetal-danube'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-odl_l3-ovs-noha-baremetal-danube'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-odl_l3-ovs-ha-baremetal-danube'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-odl-bgpvpn-ha-baremetal-danube'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-odl-gluon-noha-baremetal-danube'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-odl_l3-fdio-noha-baremetal-danube'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-odl_l3-fdio-ha-baremetal-danube'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-odl_l3-fdio_dvr-noha-baremetal-danube'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-odl_l3-fdio_dvr-ha-baremetal-danube'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-odl_l3-csit-noha-baremetal-danube'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-onos-nofeature-ha-baremetal-danube'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-ovn-nofeature-noha-baremetal-danube'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-
-# master Builder
-- builder:
-    name: apex-builder-master
-    builders:
-      - multijob:
-          name: Baremetal Deploy and Test Phase
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-os-nosdn-nofeature-noha-baremetal-master'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-nosdn-nofeature-ha-baremetal-master'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-
-# gambia Builder
-- builder:
-    name: apex-builder-gambia
-    builders:
-      - multijob:
-          name: Baremetal Deploy and Test Phase
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-os-nosdn-nofeature-noha-baremetal-gambia'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-nosdn-nofeature-ha-baremetal-gambia'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-nosdn-nofeature-ha-ipv6-baremetal-gambia'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-odl-nofeature-noha-baremetal-gambia'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-odl-nofeature-ha-baremetal-gambia'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-k8s-nosdn-nofeature-noha-baremetal-gambia'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-odl-bgpvpn-ha-baremetal-gambia'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-odl-bgpvpn-noha-baremetal-gambia'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-odl-sfc-ha-baremetal-gambia'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-odl-sfc-noha-baremetal-gambia'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-nosdn-calipso-noha-baremetal-gambia'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-ovn-nofeature-ha-baremetal-gambia'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-
-# fraser Builder
-- builder:
-    name: apex-builder-fraser
-    builders:
-      - multijob:
-          name: Baremetal Deploy and Test Phase
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-os-nosdn-nofeature-ha-baremetal-fraser'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-odl-bgpvpn-ha-baremetal-fraser'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-
-# hunter Builder
-- builder:
-    name: apex-builder-hunter
-    builders:
-      - multijob:
-          name: Baremetal Deploy and Test Phase
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-os-nosdn-nofeature-noha-baremetal-hunter'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-nosdn-nofeature-ha-baremetal-hunter'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-ovn-nofeature-ha-baremetal-hunter'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-
-# euphrates Builder
-- builder:
-    name: apex-builder-euphrates
-    builders:
-      - multijob:
-          name: Baremetal Deploy and Test Phase
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-os-nosdn-nofeature-noha-baremetal-euphrates'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-nosdn-nofeature-ha-baremetal-euphrates'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-odl-nofeature-ha-baremetal-euphrates'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-odl-nofeature-noha-baremetal-euphrates'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-odl-bgpvpn-ha-baremetal-euphrates'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-ovn-nofeature-noha-baremetal-euphrates'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-nosdn-fdio-noha-baremetal-euphrates'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-nosdn-fdio-ha-baremetal-euphrates'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-nosdn-bar-ha-baremetal-euphrates'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-nosdn-bar-noha-baremetal-euphrates'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-nosdn-nofeature-ha-ipv6-baremetal-euphrates'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-nosdn-ovs_dpdk-noha-baremetal-euphrates'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-nosdn-ovs_dpdk-ha-baremetal-euphrates'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-nosdn-kvm_ovs_dpdk-noha-baremetal-euphrates'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-nosdn-kvm_ovs_dpdk-ha-baremetal-euphrates'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-odl-sfc-noha-baremetal-euphrates'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-os-odl-sfc-ha-baremetal-euphrates'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-- builder:
-    name: 'apex-upload-artifact'
-    builders:
-      - shell:
-          !include-raw: ./apex-upload-artifact.sh
-
-- builder:
-    name: 'apex-download-artifact'
-    builders:
-      - shell:
-          !include-raw: ./apex-download-artifact.sh
-
-- builder:
-    name: 'apex-deploy'
-    builders:
-      - shell:
-          !include-raw: ./apex-deploy.sh
-
-- builder:
-    name: 'apex-fetch-logs'
-    builders:
-      - shell:
-          !include-raw: ./apex-fetch-logs.sh
-
-#######################
-# trigger macros
-# timed is in format: 'min hour daymonth month dayweek'
-########################
-- trigger:
-    name: 'apex-master'
-    triggers:
-      - timed: '0 0 1-31/2 * *'
-
-- trigger:
-    name: 'apex-hunter'
-    triggers:
-      - timed: '0 4 2-30/2 * *'
-
-- trigger:
-    name: 'apex-gambia'
-    triggers:
-      - timed: '0 4 2-30/2 * *'
-
-- trigger:
-    name: 'apex-fraser'
-    triggers:
-      - timed: '0 0 2-30/2 * *'
-
-- trigger:
-    name: 'apex-euphrates'
-    triggers:
-      - timed: '0 0 2-30/2 * *'
-
-- trigger:
-    name: 'apex-danube'
-    triggers:
-      - timed: '0 3 1 1 7'
-
-- trigger:
-    name: 'master-csit-master'
-    triggers:
-      - timed: '0 5 * * *'
-
-- trigger:
-    name: 'master-csit-rocky'
-    triggers:
-      - timed: '0 5 * * *'
-
-- trigger:
-    name: 'master-csit-queens'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'hunter-csit-master'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'hunter-csit-rocky'
-    triggers:
-      - timed: '0 5 * * *'
-
-- trigger:
-    name: 'hunter-csit-queens'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'gambia-csit-master'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'gambia-csit-rocky'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'gambia-csit-queens'
-    triggers:
-      - timed: '0 5 * * *'
-
-- trigger:
-    name: 'fraser-csit-master'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'fraser-csit-rocky'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'fraser-csit-queens'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'euphrates-csit-master'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'euphrates-csit-rocky'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'euphrates-csit-queens'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'danube-csit-master'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'danube-csit-rocky'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'danube-csit-queens'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'master-functest-master'
-    triggers:
-      - timed: '0 3 * * *'
-
-- trigger:
-    name: 'master-functest-rocky'
-    triggers:
-      - timed: '0 3 * * *'
-
-- trigger:
-    name: 'master-functest-queens'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'hunter-functest-master'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'hunter-functest-rocky'
-    triggers:
-      - timed: '0 3 * * *'
-
-- trigger:
-    name: 'hunter-functest-queens'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'gambia-functest-master'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'gambia-functest-rocky'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'gambia-functest-queens'
-    triggers:
-      - timed: '0 3 * * *'
-
-- trigger:
-    name: 'fraser-functest-master'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'fraser-functest-rocky'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'fraser-functest-queens'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'euphrates-functest-master'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'euphrates-functest-rocky'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'euphrates-functest-queens'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'danube-functest-master'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'danube-functest-rocky'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'danube-functest-queens'
-    triggers:
-      - timed: ''
diff --git a/jjb/apex/apex.yaml.j2 b/jjb/apex/apex.yaml.j2
deleted file mode 100644 (file)
index 293aace..0000000
+++ /dev/null
@@ -1,1361 +0,0 @@
----
-- project:
-    name: 'apex'
-    project: 'apex'
-    jobs:
-      - 'apex-fetch-logs-{stream}'
-      - 'apex-runner-cperf-{stream}'
-      - 'apex-virtual-{stream}'
-      - 'apex-deploy-{platform}-{stream}'
-      - 'apex-daily-{stream}'
-      - 'apex-{snap_type}-promote-daily-{stream}-os-{os_version}-{topology}'
-      - 'apex-fdio-promote-daily-{stream}'
-      - 'apex-{scenario}-baremetal-{scenario_stream}'
-      - 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
-      - 'apex-upload-snapshot'
-      - 'apex-create-snapshot'
-      - 'apex-fetch-snap-info'
-      - 'apex-flex-daily-os-nosdn-nofeature-ha-{stream}'
-    # stream:    branch with - in place of / (eg. stable-arno)
-    # branch:    branch (eg. stable/arno)
-    stream:
-      - master: &master
-          branch: 'master'
-          gs-pathname: ''
-          build-slave: 'apex-build-master'
-          virtual-slave: 'apex-virtual-master'
-          baremetal-slave: 'apex-baremetal-master'
-          verify-scenario: 'os-nosdn-nofeature-noha'
-          scenario_stream: 'master'
-          disable_daily: true
-          disable_promote: true
-      - hunter: &hunter
-          branch: 'stable/hunter'
-          gs-pathname: '/hunter'
-          build-slave: 'apex-build-master'
-          virtual-slave: 'apex-virtual-master'
-          baremetal-slave: 'apex-baremetal-master'
-          verify-scenario: 'os-nosdn-nofeature-ha'
-          scenario_stream: 'hunter'
-          disable_daily: true
-          disable_promote: true
-      - gambia: &gambia
-          branch: 'stable/gambia'
-          gs-pathname: '/gambia'
-          build-slave: 'apex-build-master'
-          virtual-slave: 'apex-virtual-master'
-          baremetal-slave: 'apex-baremetal-master'
-          verify-scenario: 'os-nosdn-nofeature-ha'
-          scenario_stream: 'gambia'
-          disable_daily: true
-          disable_promote: true
-      - fraser: &fraser
-          branch: 'stable/fraser'
-          gs-pathname: '/fraser'
-          build-slave: 'apex-build-master'
-          virtual-slave: 'apex-virtual-master'
-          baremetal-slave: 'apex-baremetal-master'
-          verify-scenario: 'os-nosdn-nofeature-ha'
-          scenario_stream: 'fraser'
-          disable_daily: true
-          disable_promote: true
-      - euphrates: &euphrates
-          branch: 'stable/euphrates'
-          gs-pathname: '/euphrates'
-          build-slave: 'apex-build-master'
-          virtual-slave: 'apex-virtual-master'
-          baremetal-slave: 'apex-baremetal-master'
-          verify-scenario: 'os-odl-nofeature-ha'
-          scenario_stream: 'euphrates'
-          disable_daily: true
-          disable_promote: true
-      - danube: &danube
-          branch: 'stable/danube'
-          gs-pathname: '/danube'
-          build-slave: 'apex-build-danube'
-          virtual-slave: 'apex-virtual-danube'
-          baremetal-slave: 'apex-baremetal-danube'
-          verify-scenario: 'os-odl_l3-nofeature-ha'
-          scenario_stream: 'danube'
-          disabled: true
-          disable_daily: true
-          disable_promote: true
-
-    scenario:
-      {%- for stream in scenarios %}
-      {%- for scenario in scenarios[stream] %}
-      - '{{scenario}}':
-          <<: *{{stream}}
-      {%- endfor %}
-      {%- endfor %}
-
-    platform:
-      - 'baremetal'
-      - 'virtual'
-
-    os_version:
-      - 'queens':
-          os_scenario: 'nofeature'
-          odl_branch: 'stable/oxygen'
-      - 'rocky':
-          os_scenario: 'rocky'
-          odl_branch: 'stable/oxygen'
-      - 'master':
-          os_scenario: 'nofeature'
-          odl_branch: 'stable/fluorine'
-
-    topology:
-      - 'noha'
-      - 'ha'
-      - 'noha-allinone'
-
-    snap_type:
-      - csit:
-          sdn: 'odl'
-      - functest:
-          sdn: 'nosdn'
-# Fetch Logs Job
-- job-template:
-    name: 'apex-fetch-logs-{stream}'
-
-    concurrent: true
-
-    disabled: false
-    scm:
-      - git-scm-gerrit
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - apex-parameter:
-          gs-pathname: '{gs-pathname}'
-    # yamllint enable rule:line-length
-    properties:
-      - logrotate-default
-      - throttle:
-          max-per-node: 1
-          max-total: 10
-          option: 'project'
-
-    builders:
-      - 'apex-fetch-logs'
-
-- job-template:
-    name: 'apex-runner-cperf-{stream}'
-
-    # runner cperf job
-    project-type: 'multijob'
-    node: 'intel-pod2'
-
-    disabled: false
-
-    parameters:
-      - apex-parameter:
-          gs-pathname: '{gs-pathname}'
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - string:
-          name: GIT_BASE
-          default: https://gerrit.opnfv.org/gerrit/$PROJECT
-          description: "Used for overriding the GIT URL coming from parameters macro."
-
-    scm:
-      - git-scm
-
-    properties:
-      - logrotate-default
-      - build-blocker:
-          use-build-blocker: false
-          blocking-level: 'NODE'
-          blocking-jobs:
-            - 'apex-deploy.*'
-      - throttle:
-          max-per-node: 1
-          max-total: 10
-          option: 'project'
-
-    builders:
-      - description-setter:
-          description: "Deployed on $NODE_NAME"
-      - multijob:
-          name: 'Baremetal Deploy'
-          condition: ALWAYS
-          projects:
-            - name: 'apex-deploy-baremetal-{stream}'
-              node-parameters: false
-              current-parameters: true
-              predefined-parameters: |
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=
-                OPNFV_CLEAN=yes
-                DEPLOY_SCENARIO={verify-scenario}
-              kill-phase-on: FAILURE
-              abort-all-job: true
-              git-revision: false
-      - multijob:
-          name: CPERF
-          condition: SUCCESSFUL
-          projects:
-            - name: 'cperf-apex-intel-pod2-daily-master'
-              node-parameters: true
-              current-parameters: false
-              predefined-parameters:
-                DEPLOY_SCENARIO={verify-scenario}
-              kill-phase-on: NEVER
-              abort-all-job: false
-              git-revision: false
-
-# Deploy job
-- job-template:
-    name: 'apex-deploy-{platform}-{stream}'
-
-    concurrent: true
-
-    disabled: false
-    quiet-period: 30
-    scm:
-      - git-scm-gerrit
-
-    wrappers:
-      - timeout:
-          timeout: 140
-          fail: true
-
-    parameters:
-      - '{project}-{platform}-{stream}-defaults'
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - apex-parameter:
-          gs-pathname: '{gs-pathname}'
-      - string:
-          name: DEPLOY_SCENARIO
-          default: '{verify-scenario}'
-          description: "Scenario to deploy with."
-      # yamllint disable rule:line-length
-      - string:
-          name: OPNFV_CLEAN
-          default: 'no'
-          description: "Use yes in lower case to invoke clean. Indicates if the deploy environment should be cleaned before deployment"
-
-    # yamllint enable rule:line-length
-    properties:
-      - logrotate-default
-      - build-blocker:
-          use-build-blocker: true
-          blocking-level: 'NODE'
-          blocking-jobs:
-            - 'apex-deploy.*'
-            - 'functest.*'
-            - 'yardstick.*'
-            - 'dovetail.*'
-            - 'storperf.*'
-            - 'odl-netvirt.*'
-      - throttle:
-          max-per-node: 1
-          max-total: 10
-          option: 'project'
-
-    builders:
-      - description-setter:
-          description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
-      - 'apex-download-artifact'
-      - 'apex-deploy'
-      - 'clean-workspace'
-
-
-# Virtual Deploy and Test
-- job-template:
-    name: 'apex-virtual-{stream}'
-
-    project-type: 'multijob'
-
-    concurrent: true
-
-    disabled: false
-
-    scm:
-      - git-scm-gerrit
-
-    parameters:
-      - '{project}-defaults'
-      - '{project}-virtual-{stream}-defaults'
-      - 'functest-suite-parameter'
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - apex-parameter:
-          gs-pathname: '{gs-pathname}'
-      - string:
-          name: DEPLOY_SCENARIO
-          default: '{verify-scenario}'
-          description: "Scenario to deploy with."
-      - string:
-          name: ARTIFACT_VERSION
-          default: dev
-          description: "Used for overriding the ARTIFACT_VERSION"
-
-    properties:
-      - logrotate-default
-      - build-blocker:
-          use-build-blocker: true
-          blocking-level: 'NODE'
-          blocking-jobs:
-            - 'apex-runner.*'
-            - 'apex-run.*'
-            - 'apex-virtual-.*'
-            - 'apex-verify-gate-.*'
-            - 'odl-netvirt.*'
-            - 'apex-.*-promote.*'
-      - throttle:
-          max-per-node: 1
-          max-total: 10
-          option: 'project'
-
-    builders:
-      - description-setter:
-          description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
-      - multijob:
-          name: deploy-virtual
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-deploy-virtual-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                ARTIFACT_VERSION=$ARTIFACT_VERSION
-                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-                OPNFV_CLEAN=yes
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-                PROMOTE=$PROMOTE
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: true
-              git-revision: true
-      - multijob:
-          name: functest-smoke
-          condition: ALWAYS
-          projects:
-            - name: 'functest-apex-virtual-suite-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-                FUNCTEST_MODE=$FUNCTEST_MODE
-                FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: true
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-      - multijob:
-          name: apex-fetch-logs
-          projects:
-            - name: 'apex-fetch-logs-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: true
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-
-# Baremetal Deploy and Test
-- job-template:
-    name: 'apex-{scenario}-baremetal-{scenario_stream}'
-
-    project-type: 'multijob'
-
-    disabled: false
-
-    scm:
-      - git-scm
-
-    parameters:
-      - '{project}-defaults'
-      - '{project}-baremetal-{scenario_stream}-defaults'
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - apex-parameter:
-          gs-pathname: '{gs-pathname}'
-      - string:
-          name: DEPLOY_SCENARIO
-          default: '{scenario}'
-          description: "Scenario to deploy with."
-
-    properties:
-      - logrotate-default
-      - build-blocker:
-          use-build-blocker: true
-          blocking-level: 'NODE'
-          blocking-jobs:
-            - 'apex-verify.*'
-            - 'apex-runner.*'
-            - 'apex-.*-promote.*'
-            - 'apex-run.*'
-            - 'apex-.+-baremetal-.+'
-      - throttle:
-          max-per-node: 1
-          max-total: 10
-          option: 'project'
-
-    builders:
-      - description-setter:
-          description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
-      - multijob:
-          name: 'Baremetal Deploy'
-          condition: SUCCESSFUL
-          execution-type: SEQUENTIALLY
-          projects:
-            - name: 'apex-deploy-baremetal-{scenario_stream}'
-              node-parameters: true
-              current-parameters: true
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=
-                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-fetch-logs-{scenario_stream}'
-              current-parameters: true
-              predefined-parameters: |
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=
-              node-parameters: true
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-      - shell:
-          !include-raw-escape: ./apex-functest-scenario.sh
-      - inject:
-          properties-file: functest_scenario
-          override-build-parameters: true
-      - multijob:
-          name: 'OPNFV Test Suite'
-          condition: ALWAYS
-          projects:
-            - name: 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
-              node-parameters: true
-              current-parameters: false
-              predefined-parameters: |
-                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-                DOCKER_TAG=$DOCKER_TAG
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-      - multijob:
-          name: apex-fetch-logs
-          projects:
-            - name: 'apex-fetch-logs-{scenario_stream}'
-              current-parameters: true
-              predefined-parameters: |
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=
-              node-parameters: true
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-    publishers:
-      - groovy-postbuild:
-          script:
-            !include-raw-escape: ./update-build-result.groovy
-
-# Baremetal test job
-- job-template:
-    name: 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
-
-    project-type: 'multijob'
-
-    disabled: false
-
-    parameters:
-      - '{project}-defaults'
-      - '{project}-baremetal-{scenario_stream}-defaults'
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - apex-parameter:
-          gs-pathname: '{gs-pathname}'
-      - string:
-          name: DEPLOY_SCENARIO
-          default: '{scenario}'
-          description: "Scenario to deploy with."
-      - string:
-          name: DOCKER_TAG
-          default: ''
-          description: Default docker tag to pass to functest
-
-    properties:
-      - logrotate-default
-      - build-blocker:
-          use-build-blocker: true
-          blocking-level: 'NODE'
-          blocking-jobs:
-            - 'apex-verify.*'
-            - 'apex-runner.*'
-            - 'apex-run.*'
-            - 'apex-testsuite-.+-baremetal-.+'
-      - throttle:
-          max-per-node: 1
-          max-total: 10
-          option: 'project'
-
-    builders:
-      - description-setter:
-          description: "Testing on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
-      - multijob:
-          name: Functest
-          condition: ALWAYS
-          projects:
-            - name: 'functest-apex-baremetal-daily-{scenario_stream}'
-              node-parameters: true
-              current-parameters: false
-              predefined-parameters: |
-                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-                DOCKER_TAG=$DOCKER_TAG
-              kill-phase-on: NEVER
-              abort-all-job: false
-              git-revision: false
-      - multijob:
-          name: Yardstick
-          condition: ALWAYS
-          projects:
-            - name: 'yardstick-apex-baremetal-daily-{scenario_stream}'
-              node-parameters: true
-              current-parameters: false
-              predefined-parameters:
-                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-              kill-phase-on: NEVER
-              abort-all-job: false
-              git-revision: false
-      - multijob:
-          name: Dovetail-default-mandatory
-          condition: ALWAYS
-          projects:
-            - name: 'dovetail-apex-baremetal-default-mandatory-{scenario_stream}'
-              node-parameters: true
-              current-parameters: false
-              predefined-parameters:
-                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-              kill-phase-on: NEVER
-              enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/"
-              abort-all-job: false
-              git-revision: false
-      - multijob:
-          name: Dovetail-default-optional
-          condition: ALWAYS
-          projects:
-            - name: 'dovetail-apex-baremetal-default-optional-{scenario_stream}'
-              node-parameters: true
-              current-parameters: false
-              predefined-parameters:
-                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-              kill-phase-on: NEVER
-              enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/"
-              abort-all-job: false
-              git-revision: false
-      - multijob:
-          name: Dovetail-proposed_tests
-          condition: ALWAYS
-          projects:
-            - name: 'dovetail-apex-baremetal-proposed_tests-optional-{scenario_stream}'
-              node-parameters: true
-              current-parameters: false
-              predefined-parameters:
-                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-              kill-phase-on: NEVER
-              enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/"
-              abort-all-job: false
-              git-revision: false
-      - multijob:
-          name: StorPerf
-          condition: ALWAYS
-          projects:
-            - name: 'storperf-apex-baremetal-daily-{scenario_stream}'
-              node-parameters: true
-              current-parameters: false
-              predefined-parameters:
-                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-              enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-nosdn-nofeature-ha/"
-              kill-phase-on: NEVER
-              abort-all-job: false
-              git-revision: false
-# Build status is always success due conditional plugin prefetching
-# build status before multijob phases execute
-#        - conditional-step:
-#            condition-kind: current-status
-#            condition-worst: SUCCESS
-#            condtion-best: SUCCESS
-#            on-evaluation-failure: mark-unstable
-#            steps:
-#                - shell: 'echo "Tests Passed"'
-
-- job-template:
-    name: 'apex-daily-{stream}'
-
-    # Job template for daily build
-    #
-    # Required Variables:
-    #     stream:    branch with - in place of / (eg. stable)
-    #     branch:    branch (eg. stable)
-    project-type: 'multijob'
-
-    disabled: '{obj:disable_daily}'
-
-    scm:
-      - git-scm
-
-    parameters:
-      - '{project}-defaults'
-      - '{project}-baremetal-{stream}-defaults'
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - apex-parameter:
-          gs-pathname: '{gs-pathname}'
-
-    properties:
-      - logrotate-default
-      - build-blocker:
-          use-build-blocker: true
-          blocking-level: 'NODE'
-          blocking-jobs:
-            - 'apex-daily.*'
-
-    triggers:
-      - 'apex-{stream}'
-
-    builders:
-      - multijob:
-          name: build
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-build-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: true
-              git-revision: true
-      - multijob:
-          name: 'Verify and upload ISO'
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-verify-iso-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                BUILD_DIRECTORY=$WORKSPACE/../apex-build-{stream}/.build
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: true
-              git-revision: true
-      - apex-builder-{stream}
-
-# snapshot info fetch
-- job-template:
-    name: 'apex-fetch-snap-info'
-
-    disabled: false
-
-    parameters:
-      - '{project}-defaults'
-
-    builders:
-      - shell:
-          !include-raw-escape: ./apex-fetch-snap-info.sh
-
-# snapshot create
-- job-template:
-    name: 'apex-create-snapshot'
-
-    disabled: false
-
-    parameters:
-      - '{project}-defaults'
-
-    builders:
-      - shell:
-          !include-raw-escape: ./apex-snapshot-create.sh
-
-# snapshot upload
-- job-template:
-    name: 'apex-upload-snapshot'
-
-    disabled: false
-
-    parameters:
-      - '{project}-defaults'
-
-    builders:
-      - inject:
-          properties-content: ARTIFACT_TYPE=snapshot
-      - 'apex-upload-artifact'
-
-# CSIT promote
-- job-template:
-    name: 'apex-{snap_type}-promote-daily-{stream}-os-{os_version}-{topology}'
-
-    # Job template for promoting CSIT Snapshots
-    #
-    # Required Variables:
-    #     stream:    branch with - in place of / (eg. stable)
-    #     branch:    branch (eg. stable)
-    node: '{virtual-slave}'
-    project-type: 'multijob'
-    disabled: '{disable_promote}'
-
-    scm:
-      - git-scm
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - apex-parameter:
-          gs-pathname: '{gs-pathname}'
-      - string:
-          name: ARTIFACT_VERSION
-          default: dev
-          description: "Used for overriding the ARTIFACT_VERSION"
-      - string:
-          name: PROMOTE
-          default: 'True'
-          description: "Used for overriding the PROMOTE"
-      - string:
-          name: GS_URL
-          default: 'artifacts.opnfv.org/apex/{os_version}/{topology}'
-          description: "User for overriding GS_URL from apex params"
-      - string:
-          name: OS_VERSION
-          default: '{os_version}'
-          description: OpenStack version short name
-      - string:
-          name: ODL_BRANCH
-          default: '{odl_branch}'
-          description: ODL branch being used
-      - string:
-          name: FORCE_PROMOTE
-          default: 'False'
-          description: "Used to force promotion and skip CSIT"
-      - string:
-          name: SNAP_TYPE
-          default: '{snap_type}'
-          description: Type of snapshot to promote
-    properties:
-      - build-blocker:
-          use-build-blocker: true
-          blocking-level: 'NODE'
-          blocking-jobs:
-            - 'apex-verify.*'
-            - 'apex-runner.*'
-            - 'apex-daily.*'
-            - 'apex-.*-promote.*'
-            - 'odl-netvirt.*'
-      - throttle:
-          max-per-node: 1
-          max-total: 10
-          option: 'project'
-
-    triggers:
-      - '{stream}-{snap_type}-{os_version}'
-
-    builders:
-      - multijob:
-          name: apex-virtual-deploy
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-deploy-virtual-{stream}'
-              current-parameters: true
-              predefined-parameters: |
-                DEPLOY_SCENARIO=os-{sdn}-{os_scenario}-{topology}-{snap_type}
-                OPNFV_CLEAN=yes
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: true
-              git-revision: true
-      - multijob:
-          name: fetch snapshot info
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-fetch-snap-info'
-              current-parameters: true
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: true
-              git-revision: false
-      - multijob:
-          name: test phase
-          condition: SUCCESSFUL
-          execution-type: SEQUENTIALLY
-          projects:
-            - name: cperf-apex-csit-master
-              predefined-parameters: |
-                ODL_BRANCH=$ODL_BRANCH
-                RC_FILE_PATH=/tmp/snap/overcloudrc
-                NODE_FILE_PATH=/tmp/snap/node.yaml
-                SSH_KEY_PATH=/tmp/snap/id_rsa
-                ODL_CONTAINERIZED=true
-                OS_VERSION=$OS_VERSION
-                SKIP_CSIT=$FORCE_PROMOTE
-                SNAP_TYPE=$SNAP_TYPE
-              node-parameters: true
-              kill-phase-on: NEVER
-              abort-all-job: false
-              enable-condition: "def m = '$SNAP_TYPE' ==~ /csit/"
-            - name: cperf-upload-logs-csit
-              predefined-parameters: |
-                ODL_BRANCH=$ODL_BRANCH
-                OS_VERSION=$OS_VERSION
-                SNAP_TYPE=$SNAP_TYPE
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: false
-              enable-condition: "def m = '$SNAP_TYPE' ==~ /csit/"
-            - name: 'functest-apex-virtual-suite-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-                DOCKER_TAG=$DOCKER_TAG
-                FUNCTEST_SUITE_NAME=tempest_smoke
-                FUNCTEST_MODE=testcase
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: true
-              kill-phase-on: NEVER
-              enable-condition: "def m = '$SNAP_TYPE' ==~ /functest/"
-              abort-all-job: true
-              git-revision: false
-            - name: 'apex-fetch-logs-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: true
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-      - multijob:
-          name: create snapshot
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-create-snapshot'
-              current-parameters: true
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: true
-              git-revision: false
-      - multijob:
-          name: upload snapshot
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-upload-snapshot'
-              current-parameters: true
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: true
-              git-revision: false
-
-# FDIO promote
-- job-template:
-    name: 'apex-fdio-promote-daily-{stream}'
-
-    # Job template for promoting CSIT Snapshots
-    #
-    # Required Variables:
-    #     stream:    branch with - in place of / (eg. stable)
-    #     branch:    branch (eg. stable)
-    node: '{virtual-slave}'
-    project-type: 'multijob'
-    disabled: false
-
-    scm:
-      - git-scm
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - apex-parameter:
-          gs-pathname: '{gs-pathname}'
-
-    properties:
-      - build-blocker:
-          use-build-blocker: true
-          blocking-level: 'NODE'
-          blocking-jobs:
-            - 'apex-verify.*'
-            - 'apex-deploy.*'
-            - 'apex-runner.*'
-            - 'apex-daily.*'
-
-    builders:
-      - multijob:
-          name: build
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-build-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: false
-              kill-phase-on: FAILURE
-              abort-all-job: true
-              git-revision: true
-      - multijob:
-          name: deploy-virtual
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-deploy-virtual-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                DEPLOY_SCENARIO=os-odl_netvirt-fdio-noha
-                OPNFV_CLEAN=yes
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-                PROMOTE=True
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: true
-              git-revision: false
-      - multijob:
-          name: create snapshot
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-create-snapshot'
-              current-parameters: false
-              predefined-parameters: |
-                SNAP_TYPE=fdio
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: true
-              git-revision: false
-      - multijob:
-          name: upload snapshot
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-upload-snapshot'
-              current-parameters: false
-              predefined-parameters: |
-                SNAP_TYPE=fdio
-              node-parameters: true
-              kill-phase-on: FAILURE
-              abort-all-job: true
-              git-revision: false
-
-# Flex job
-- job-template:
-    name: 'apex-flex-daily-os-nosdn-nofeature-ha-{stream}'
-
-    project-type: 'multijob'
-
-    disabled: true
-
-    node: 'flex-pod2'
-
-    scm:
-      - git-scm
-
-    triggers:
-      - 'apex-{stream}'
-
-    parameters:
-      - '{project}-defaults'
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - apex-parameter:
-          gs-pathname: '{gs-pathname}'
-      - string:
-          name: DEPLOY_SCENARIO
-          default: 'os-nosdn-nofeature-ha'
-          description: "Scenario to deploy with."
-      - string:
-          name: GIT_BASE
-          default: https://gerrit.opnfv.org/gerrit/$PROJECT
-          description: 'Git URL to use on this Jenkins Slave'
-      - string:
-          name: SSH_KEY
-          default: /root/.ssh/id_rsa
-          description: 'SSH key to use for Apex'
-
-    properties:
-      - logrotate-default
-      - build-blocker:
-          use-build-blocker: true
-          blocking-level: 'NODE'
-          blocking-jobs:
-            - 'apex-verify.*'
-            - 'apex-runner.*'
-            - 'apex-.*-promote.*'
-            - 'apex-run.*'
-            - 'apex-.+-baremetal-.+'
-      - throttle:
-          max-per-node: 1
-          max-total: 10
-          option: 'project'
-
-    builders:
-      - description-setter:
-          description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
-      - multijob:
-          name: 'Baremetal Deploy'
-          condition: SUCCESSFUL
-          projects:
-            - name: 'apex-deploy-baremetal-{stream}'
-              node-parameters: true
-              current-parameters: true
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-                GERRIT_BRANCH=$GERRIT_BRANCH
-                GERRIT_REFSPEC=
-                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-              kill-phase-on: FAILURE
-              abort-all-job: true
-              git-revision: false
-      - multijob:
-          name: Yardstick
-          condition: ALWAYS
-          projects:
-            - name: 'yardstick-apex-baremetal-daily-{stream}'
-              node-parameters: true
-              current-parameters: false
-              predefined-parameters:
-                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-              kill-phase-on: NEVER
-              abort-all-job: false
-              git-revision: false
-
-
-########################
-# parameter macros
-########################
-- parameter:
-    name: apex-parameter
-    parameters:
-      - string:
-          name: ARTIFACT_NAME
-          default: 'latest'
-          description: "RPM Artifact name that will be appended to GS_URL to deploy a specific artifact"
-      - string:
-          name: ARTIFACT_VERSION
-          default: 'daily'
-          description: "Artifact version type"
-      - string:
-          name: BUILD_DIRECTORY
-          default: $WORKSPACE/.build
-          description: "Directory where the build artifact will be located upon the completion of the build."
-      - string:
-          name: CACHE_DIRECTORY
-          default: $HOME/opnfv/apex-cache{gs-pathname}
-          description: "Directory where the cache to be used during the build is located."
-      # yamllint disable rule:line-length
-      - string:
-          name: GIT_BASE
-          default: https://gerrit.opnfv.org/gerrit/$PROJECT
-          description: "Used for overriding the GIT URL coming from Global Jenkins configuration in case if the stuff is done on none-LF HW."
-      # yamllint enable rule:line-length
-      - string:
-          name: GS_PATHNAME
-          default: '{gs-pathname}'
-          description: "Version directory where opnfv artifacts are stored in gs repository"
-      - string:
-          name: GS_URL
-          default: $GS_BASE{gs-pathname}
-          description: "URL to Google Storage."
-      - string:
-          name: PROMOTE
-          default: 'False'
-          description: "Flag to know if we should promote/upload snapshot artifacts."
-
-########################
-# builder macros
-########################
-{% for stream in scenarios %}
-# {{ stream }} Builder
-- builder:
-    name: apex-builder-{{ stream }}
-    builders:
-      - multijob:
-          name: Baremetal Deploy and Test Phase
-          condition: SUCCESSFUL
-          projects:
-{%- for scenario in scenarios[stream] %}
-            - name: 'apex-{{ scenario }}-baremetal-{{ stream }}'
-              node-parameters: false
-              current-parameters: false
-              predefined-parameters: |
-                OPNFV_CLEAN=yes
-              kill-phase-on: NEVER
-              abort-all-job: true
-              git-revision: false
-{%- endfor %}
-{% endfor -%}
-
-- builder:
-    name: 'apex-upload-artifact'
-    builders:
-      - shell:
-          !include-raw: ./apex-upload-artifact.sh
-
-- builder:
-    name: 'apex-download-artifact'
-    builders:
-      - shell:
-          !include-raw: ./apex-download-artifact.sh
-
-- builder:
-    name: 'apex-deploy'
-    builders:
-      - shell:
-          !include-raw: ./apex-deploy.sh
-
-- builder:
-    name: 'apex-fetch-logs'
-    builders:
-      - shell:
-          !include-raw: ./apex-fetch-logs.sh
-
-#######################
-# trigger macros
-# timed is in format: 'min hour daymonth month dayweek'
-########################
-- trigger:
-    name: 'apex-master'
-    triggers:
-      - timed: '0 0 1-31/2 * *'
-
-- trigger:
-    name: 'apex-hunter'
-    triggers:
-      - timed: '0 4 2-30/2 * *'
-
-- trigger:
-    name: 'apex-gambia'
-    triggers:
-      - timed: '0 4 2-30/2 * *'
-
-- trigger:
-    name: 'apex-fraser'
-    triggers:
-      - timed: '0 0 2-30/2 * *'
-
-- trigger:
-    name: 'apex-euphrates'
-    triggers:
-      - timed: '0 0 2-30/2 * *'
-
-- trigger:
-    name: 'apex-danube'
-    triggers:
-      - timed: '0 3 1 1 7'
-
-- trigger:
-    name: 'master-csit-master'
-    triggers:
-      - timed: '0 5 * * *'
-
-- trigger:
-    name: 'master-csit-rocky'
-    triggers:
-      - timed: '0 5 * * *'
-
-- trigger:
-    name: 'master-csit-queens'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'hunter-csit-master'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'hunter-csit-rocky'
-    triggers:
-      - timed: '0 5 * * *'
-
-- trigger:
-    name: 'hunter-csit-queens'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'gambia-csit-master'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'gambia-csit-rocky'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'gambia-csit-queens'
-    triggers:
-      - timed: '0 5 * * *'
-
-- trigger:
-    name: 'fraser-csit-master'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'fraser-csit-rocky'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'fraser-csit-queens'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'euphrates-csit-master'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'euphrates-csit-rocky'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'euphrates-csit-queens'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'danube-csit-master'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'danube-csit-rocky'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'danube-csit-queens'
-    triggers:
-      - timed: ''
-- trigger:
-    name: 'master-functest-master'
-    triggers:
-      - timed: '0 3 * * *'
-
-- trigger:
-    name: 'master-functest-rocky'
-    triggers:
-      - timed: '0 3 * * *'
-
-- trigger:
-    name: 'master-functest-queens'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'hunter-functest-master'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'hunter-functest-rocky'
-    triggers:
-      - timed: '0 3 * * *'
-
-- trigger:
-    name: 'hunter-functest-queens'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'gambia-functest-master'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'gambia-functest-rocky'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'gambia-functest-queens'
-    triggers:
-      - timed: '0 3 * * *'
-
-- trigger:
-    name: 'fraser-functest-master'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'fraser-functest-rocky'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'fraser-functest-queens'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'euphrates-functest-master'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'euphrates-functest-rocky'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'euphrates-functest-queens'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'danube-functest-master'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'danube-functest-rocky'
-    triggers:
-      - timed: ''
-
-- trigger:
-    name: 'danube-functest-queens'
-    triggers:
-      - timed: ''
diff --git a/jjb/apex/scenarios.yaml.hidden b/jjb/apex/scenarios.yaml.hidden
deleted file mode 100644 (file)
index d9d32b4..0000000
+++ /dev/null
@@ -1,68 +0,0 @@
-master:
-  - 'os-nosdn-nofeature-noha'
-  - 'os-nosdn-nofeature-ha'
-hunter:
-  - 'os-nosdn-nofeature-noha'
-  - 'os-nosdn-nofeature-ha'
-  - 'os-ovn-nofeature-ha'
-gambia:
-  - 'os-nosdn-nofeature-noha'
-  - 'os-nosdn-nofeature-ha'
-  - 'os-nosdn-nofeature-ha-ipv6'
-  - 'os-odl-nofeature-noha'
-  - 'os-odl-nofeature-ha'
-  - 'k8s-nosdn-nofeature-noha'
-  - 'os-odl-bgpvpn-ha'
-  - 'os-odl-bgpvpn-noha'
-  - 'os-odl-sfc-ha'
-  - 'os-odl-sfc-noha'
-  - 'os-nosdn-calipso-noha'
-  - 'os-ovn-nofeature-ha'
-fraser:
-  - 'os-nosdn-nofeature-ha'
-  - 'os-odl-bgpvpn-ha'
-euphrates:
-  - 'os-nosdn-nofeature-noha'
-  - 'os-nosdn-nofeature-ha'
-  - 'os-odl-nofeature-ha'
-  - 'os-odl-nofeature-noha'
-  - 'os-odl-bgpvpn-ha'
-  - 'os-ovn-nofeature-noha'
-  - 'os-nosdn-fdio-noha'
-  - 'os-nosdn-fdio-ha'
-  - 'os-nosdn-bar-ha'
-  - 'os-nosdn-bar-noha'
-  - 'os-nosdn-nofeature-ha-ipv6'
-  - 'os-nosdn-ovs_dpdk-noha'
-  - 'os-nosdn-ovs_dpdk-ha'
-  - 'os-nosdn-kvm_ovs_dpdk-noha'
-  - 'os-nosdn-kvm_ovs_dpdk-ha'
-  - 'os-odl-sfc-noha'
-  - 'os-odl-sfc-ha'
-danube:
-  - 'os-nosdn-nofeature-noha'
-  - 'os-nosdn-nofeature-ha'
-  - 'os-nosdn-nofeature-ha-ipv6'
-  - 'os-nosdn-ovs-noha'
-  - 'os-nosdn-ovs-ha'
-  - 'os-nosdn-fdio-noha'
-  - 'os-nosdn-fdio-ha'
-  - 'os-nosdn-kvm-ha'
-  - 'os-nosdn-kvm-noha'
-  - 'os-odl_l2-fdio-noha'
-  - 'os-odl_l2-fdio-ha'
-  - 'os-odl_netvirt-fdio-noha'
-  - 'os-odl_l2-sfc-noha'
-  - 'os-odl_l3-nofeature-noha'
-  - 'os-odl_l3-nofeature-ha'
-  - 'os-odl_l3-ovs-noha'
-  - 'os-odl_l3-ovs-ha'
-  - 'os-odl-bgpvpn-ha'
-  - 'os-odl-gluon-noha'
-  - 'os-odl_l3-fdio-noha'
-  - 'os-odl_l3-fdio-ha'
-  - 'os-odl_l3-fdio_dvr-noha'
-  - 'os-odl_l3-fdio_dvr-ha'
-  - 'os-odl_l3-csit-noha'
-  - 'os-onos-nofeature-ha'
-  - 'os-ovn-nofeature-noha'
diff --git a/jjb/apex/update-build-result.groovy b/jjb/apex/update-build-result.groovy
deleted file mode 100644 (file)
index 9edca6b..0000000
+++ /dev/null
@@ -1,5 +0,0 @@
-import hudson.model.*
-if (manager.logContains('^.*apex-deploy-baremetal.*SUCCESS$')
-      && manager.build.@result == hudson.model.Result.FAILURE) {
-    manager.build.@result = hudson.model.Result.UNSTABLE
-}
diff --git a/jjb/availability/availability-rtd-jobs.yaml b/jjb/availability/availability-rtd-jobs.yaml
deleted file mode 100644 (file)
index 8488ea7..0000000
+++ /dev/null
@@ -1,25 +0,0 @@
----
-- project:
-    name: availability-rtd
-    project: availability
-    project-name: availability
-
-    gerrit-skip-vote: true
-    project-pattern: 'availability'
-    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-availability/47351/'
-    rtd-token: '3ae514b14073e1eacb697d3eddee62a26c8c891c'
-
-    stream:
-      - master:
-          branch: '{stream}'
-          disabled: false
-      - hunter: &hunter
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          disabled: false
-      - gambia:
-          branch: 'stable/{stream}'
-          disabled: false
-
-    jobs:
-      - '{project-name}-rtd-jobs'
diff --git a/jjb/availability/availability.yaml b/jjb/availability/availability.yaml
deleted file mode 100644 (file)
index 2d34734..0000000
+++ /dev/null
@@ -1,8 +0,0 @@
----
-- project:
-    name: availability
-
-    project: '{name}'
-
-    jobs:
-      - '{project}-verify-basic'
diff --git a/jjb/bottlenecks/bottlenecks-cleanup.sh b/jjb/bottlenecks/bottlenecks-cleanup.sh
deleted file mode 100644 (file)
index d0e2088..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/bin/bash
-##############################################################################
-# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-#clean up correlated dockers and their images
-bash $WORKSPACE/docker/docker_cleanup.sh -d bottlenecks --debug
-bash $WORKSPACE/docker/docker_cleanup.sh -d Bottlenecks --debug
-bash $WORKSPACE/docker/docker_cleanup.sh -d yardstick --debug
-bash $WORKSPACE/docker/docker_cleanup.sh -d kibana --debug
-bash $WORKSPACE/docker/docker_cleanup.sh -d elasticsearch --debug
-bash $WORKSPACE/docker/docker_cleanup.sh -d influxdb --debug
diff --git a/jjb/bottlenecks/bottlenecks-project-jobs.yaml b/jjb/bottlenecks/bottlenecks-project-jobs.yaml
deleted file mode 100644 (file)
index 4561810..0000000
+++ /dev/null
@@ -1,222 +0,0 @@
----
-###################################################
-# Non-ci jobs for Bottlenecks project
-# They will only be enabled on request by projects!
-###################################################
-- project:
-    name: bottlenecks-project-jobs
-
-    project: 'bottlenecks'
-
-    jobs:
-      - 'bottlenecks-verify-{stream}'
-      - 'bottlenecks-merge-{stream}'
-      - 'bottlenecks-{suite}-upload-artifacts-{stream}'
-
-    stream:
-      - master:
-          branch: '{stream}'
-          # This is used for common project file storage
-          gs-pathname: ''
-          # This is used for different test suite dependent packages storage
-          gs-packagepath: '/{suite}'
-          disabled: false
-      - hunter: &hunter
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          gs-packagepath: '/{stream}/{suite}'
-          disabled: false
-
-    suite:
-      - 'posca_stress_traffic'
-      - 'posca_stress_ping'
-
-################################
-# job templates
-################################
-
-- job-template:
-    name: 'bottlenecks-verify-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'opnfv-build-ubuntu-defaults'
-
-    scm:
-      - git-scm-gerrit
-
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - patchset-created-event:
-                exclude-drafts: 'false'
-                exclude-trivial-rebase: 'false'
-                exclude-no-code-change: 'false'
-            - draft-published-event
-            - comment-added-contains-event:
-                comment-contains-value: 'recheck'
-            - comment-added-contains-event:
-                comment-contains-value: 'reverify'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-
-    builders:
-      - bottlenecks-unit-tests
-
-- job-template:
-    name: 'bottlenecks-merge-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'opnfv-build-ubuntu-defaults'
-
-    scm:
-      - git-scm
-
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - change-merged-event
-            - comment-added-contains-event:
-                comment-contains-value: 'remerge'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-
-    builders:
-      - bottlenecks-hello
-
-- job-template:
-    name: 'bottlenecks-{suite}-upload-artifacts-{stream}'
-
-
-    disabled: '{obj:disabled}'
-
-    concurrent: true
-
-    properties:
-      - logrotate-default
-      - throttle:
-          enabled: true
-          max-total: 1
-          max-per-node: 1
-          option: 'project'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'opnfv-build-ubuntu-defaults'
-      - bottlenecks-parameter:
-          gs-packagepath: '{gs-packagepath}'
-
-    scm:
-      - git-scm
-
-    builders:
-      - 'bottlenecks-builder-upload-artifact'
-      - 'bottlenecks-artifact-workspace-cleanup'
-
-####################
-# parameter macros
-####################
-- parameter:
-    name: bottlenecks-parameter
-    parameters:
-      - string:
-          name: CACHE_DIR
-          default: $WORKSPACE/cache{gs-packagepath}
-          description: "the cache to store packages downloaded from public IP"
-      - string:
-          name: SUITE_URL
-          default: gs://artifacts.opnfv.org/bottlenecks{gs-packagepath}
-          description: "LF artifacts url for storage of bottlenecks packages"
-      - string:
-          name: PACKAGE_URL
-          default: http://205.177.226.237:9999/bottlenecks{gs-packagepath}/
-          description: "the url where we store the packages used for bottlenecks\
-            \ rubbos"
-
-####################################
-# builders for bottlenecks project
-####################################
-- builder:
-    name: bottlenecks-builder-upload-artifact
-    builders:
-      - shell: |
-          #!/bin/bash
-          set -o errexit
-
-          echo "Bottlenecks: upload to artifacts from the public IP"
-
-          [[ -d $CACHE_DIR ]] || mkdir -p $CACHE_DIR
-
-          for file in $(curl -s $PACKAGE_URL |
-                             grep href |
-                             sed 's/.*href="//' |
-                             sed 's/".*//' |
-                             grep '^[a-zA-Z].*'); do
-               curl --connect-timeout 10 -o $CACHE_DIR/$file $PACKAGE_URL$file -v
-               echo "bottlenecks: copy file $CACHE_DIR/$file to $SUITE_URL"
-               gsutil cp $CACHE_DIR/$file $SUITE_URL
-          done
-
-- builder:
-    name: bottlenecks-artifact-workspace-cleanup
-    builders:
-      - shell: |
-          #!/bin/bash
-          set -o errexit
-
-          echo "Bottlenecks: cleanup cache used for storage downloaded packages"
-
-          /bin/rm -rf $CACHE_DIR
-
-- builder:
-    name: bottlenecks-unit-tests
-    builders:
-      - shell: |
-          #!/bin/bash
-          set -o errexit
-          set -o pipefail
-
-          echo "Running unit tests..."
-          cd $WORKSPACE
-          virtualenv $WORKSPACE/bottlenecks_venv
-          source $WORKSPACE/bottlenecks_venv/bin/activate
-
-          # install python packages
-          easy_install -U setuptools
-          easy_install -U pip
-          pip install -r $WORKSPACE/requirements/verify.txt
-
-          # unit tests
-          /bin/bash $WORKSPACE/verify.sh
-
-          deactivate
-
-- builder:
-    name: bottlenecks-hello
-    builders:
-      - shell: |
-          #!/bin/bash
-          set -o errexit
-
-          echo -e "Wellcome to Bottlenecks! \nMerge event is planning to support more functions! "
diff --git a/jjb/bottlenecks/bottlenecks-rtd-jobs.yaml b/jjb/bottlenecks/bottlenecks-rtd-jobs.yaml
deleted file mode 100644 (file)
index 57ecd80..0000000
+++ /dev/null
@@ -1,21 +0,0 @@
----
-- project:
-    name: bottlenecks-rtd
-    project: bottlenecks
-    project-name: bottlenecks
-
-    gerrit-skip-vote: true
-    project-pattern: 'bottlenecks'
-    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-bottlenecks/47355/'
-    rtd-token: '95dd0dbdde4a219b5196ffb86e15401b7b927885'
-
-    stream:
-      - master:
-          branch: '{stream}'
-          disabled: false
-      - hunter:
-          branch: 'stable/{stream}'
-          disabled: false
-
-    jobs:
-      - '{project-name}-rtd-jobs'
diff --git a/jjb/bottlenecks/bottlenecks-run-suite.sh b/jjb/bottlenecks/bottlenecks-run-suite.sh
deleted file mode 100644 (file)
index 2c044ea..0000000
+++ /dev/null
@@ -1,125 +0,0 @@
-#!/bin/bash
-##############################################################################
-# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-#set -e
-[[ $GERRIT_REFSPEC_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-BOTTLENECKS_IMAGE=opnfv/bottlenecks
-REPORT="True"
-
-RELENG_REPO=${WORKSPACE}/releng
-[ -d ${RELENG_REPO} ] && rm -rf ${RELENG_REPO}
-git clone https://gerrit.opnfv.org/gerrit/releng ${RELENG_REPO} >${redirect}
-
-YARDSTICK_REPO=${WORKSPACE}/yardstick
-[ -d ${YARDSTICK_REPO} ] && rm -rf ${YARDSTICK_REPO}
-git clone https://gerrit.opnfv.org/gerrit/yardstick ${YARDSTICK_REPO} >${redirect}
-
-OPENRC=/tmp/admin_rc.sh
-OS_CACERT=/tmp/os_cacert
-
-BOTTLENECKS_CONFIG=/tmp
-KUBESTONE_TEST_DIR=/home/opnfv/bottlenecks/testsuites/kubestone/testcases
-
-# Pulling Bottlenecks docker and passing environment variables
-echo "INFO: pulling Bottlenecks docker ${DOCKER_TAG}"
-docker pull opnfv/bottlenecks:${DOCKER_TAG} >$redirect
-
-opts="--privileged=true -id"
-envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
-      -e NODE_NAME=${NODE_NAME} -e EXTERNAL_NET=${EXTERNAL_NETWORK} \
-      -e BRANCH=${BRANCH} -e GERRIT_REFSPEC_DEBUG=${GERRIT_REFSPEC_DEBUG} \
-      -e BOTTLENECKS_DB_TARGET=${BOTTLENECKS_DB_TARGET} -e PACKAGE_URL=${PACKAGE_URL} \
-      -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} -e BUILD_TAG=${BUILD_TAG}"
-docker_volume="-v /var/run/docker.sock:/var/run/docker.sock -v /tmp:/tmp"
-
-cmd="docker run ${opts} ${envs} --name bottlenecks-load-master ${docker_volume} opnfv/bottlenecks:${DOCKER_TAG} /bin/bash"
-echo "BOTTLENECKS INFO: running docker run commond: ${cmd}"
-${cmd} >$redirect
-sleep 5
-
-# Run test suite
-if [[ $SUITE_NAME == *posca* ]]; then
-    POSCA_SCRIPT=/home/opnfv/bottlenecks/testsuites/posca
-    sudo rm -f ${OPENRC}
-
-    if [[ -f ${OPENRC} ]]; then
-        echo "BOTTLENECKS INFO: openstack credentials path is ${OPENRC}"
-        cat ${OPENRC}
-    else
-        echo "BOTTLENECKS ERROR: couldn't find openstack rc file: ${OPENRC}, please check if the it's been properly provided."
-        exit 1
-    fi
-
-    # Finding and crearting POD description files from different deployments
-    ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
-
-    if [ "$INSTALLER_TYPE" == "fuel" ]; then
-        echo "Fetching id_rsa file from jump_server $INSTALLER_IP..."
-        sshpass -p r00tme sudo scp $ssh_options root@${INSTALLER_IP}:~/.ssh/id_rsa ${BOTTLENECKS_CONFIG}/id_rsa
-    fi
-
-    if [ "$INSTALLER_TYPE" == "apex" ]; then
-        echo "Fetching id_rsa file from jump_server $INSTALLER_IP..."
-        sudo scp $ssh_options stack@${INSTALLER_IP}:~/.ssh/id_rsa ${BOTTLENECKS_CONFIG}/id_rsa
-    fi
-
-    set +e
-
-    sudo -H pip install virtualenv
-
-    cd ${RELENG_REPO}/modules
-    sudo virtualenv venv
-    source venv/bin/activate
-    sudo -H pip install -e ./ >/dev/null
-    sudo -H pip install netaddr
-
-    if [[ ${INSTALLER_TYPE} == fuel ]]; then
-        options="-u root -p r00tme"
-    elif [[ ${INSTALLER_TYPE} == apex ]]; then
-        options="-u stack -k /root/.ssh/id_rsa"
-    else
-        echo "Don't support to generate pod.yaml on ${INSTALLER_TYPE} currently."
-    fi
-
-    deactivate
-
-    sudo rm -rf ${RELENG_REPO}/modules/venv
-    sudo rm -rf ${RELENG_REPO}/modules/opnfv.egg-info
-
-    set -e
-
-    cd ${WORKSPACE}
-
-    if [ -f ${BOTTLENECKS_CONFIG}/pod.yaml ]; then
-        echo "FILE: ${BOTTLENECKS_CONFIG}/pod.yaml:"
-        cat ${BOTTLENECKS_CONFIG}/pod.yaml
-    else
-        echo "ERROR: cannot find file ${BOTTLENECKS_CONFIG}/pod.yaml. Please check if it is existing."
-        sudo ls -al ${BOTTLENECKS_CONFIG}
-    fi
-
-    # Running test cases through Bottlenecks docker
-    if [[ $SUITE_NAME == posca_stress_traffic ]]; then
-        TEST_CASE=posca_factor_system_bandwidth
-    elif [[ $SUITE_NAME == posca_stress_ping ]]; then
-        TEST_CASE=posca_factor_ping
-    else
-        TEST_CASE=$SUITE_NAME
-    fi
-    testcase_cmd="docker exec bottlenecks-load-master python ${POSCA_SCRIPT}/../run_testsuite.py testcase $TEST_CASE $REPORT"
-    echo "BOTTLENECKS INFO: running test case ${TEST_CASE} with report indicator: ${testcase_cmd}"
-    ${testcase_cmd} >$redirect
-elif [[ $SUITE_NAME == *kubestone* ]]; then
-    if [[ $SUITE_NAME == kubestone_deployment_capacity ]]; then
-        TEST_CASE=${KUBESTONE_TEST_DIR}/deployment_capacity.yaml
-    fi
-    testcase_cmd="docker exec bottlenecks-load-master python ${KUBESTONE_TEST_DIR}/../stress_test.py -c $TEST_CASE"
-    echo "BOTTLENECKS INFO: running test case ${TEST_CASE} with report indicator: ${testcase_cmd}"
-    ${testcase_cmd} >$redirect
-fi
similarity index 52%
rename from jjb/ovno/ovno-views.yaml
rename to jjb/cirv/cirv-views.yaml
index 54633cc..17d4e87 100644 (file)
@@ -1,6 +1,6 @@
 ---
 - project:
-    name: ovno-view
+    name: cirv-view
     views:
       - project-view
-    project-name: ovno
+    project-name: cirv
diff --git a/jjb/cirv/cirv.yaml b/jjb/cirv/cirv.yaml
new file mode 100644 (file)
index 0000000..e628f43
--- /dev/null
@@ -0,0 +1,71 @@
+---
+- cirv-project-params: &cirv-project-params
+    name: 'cirv-project-params'
+    tag:
+      - latest:
+          branch: master
+          slave: lf-build2
+
+- builder:
+    name: cirv-run-tox
+    builders:
+      - shell: tox
+
+- trigger:
+    name: cirv-project-patchset-created
+    triggers:
+      - gerrit:
+          server-name: 'gerrit.opnfv.org'
+          trigger-on:
+            - patchset-created-event
+            - comment-added-contains-event:
+                comment-contains-value: 'recheck'
+            - comment-added-contains-event:
+                comment-contains-value: 'reverify'
+          projects:
+            - project-compare-type: 'ANT'
+              project-pattern: 'cirv'
+              branches:
+                - branch-compare-type: 'ANT'
+                  branch-pattern: '**/{branch}'
+          skip-vote:
+            successful: false
+            failed: false
+            unstable: false
+            notbuilt: false
+
+- parameter:
+    name: cirv-project-slave
+    parameters:
+      - label:
+          name: slave
+          default: '{slave}'
+
+- scm:
+    name: cirv-project-scm
+    scm:
+      - git:
+          url: https://gerrit.opnfv.org/gerrit/cirv
+          refspec: '+refs/changes/*:refs/changes/*'
+          branches:
+            - '{ref}'
+
+- job-template:
+    name: 'cirv-run-tox-{tag}'
+    triggers:
+      - cirv-project-patchset-created:
+          branch: '{branch}'
+    scm:
+      - cirv-project-scm:
+          ref: $GERRIT_REFSPEC
+    parameters:
+      - cirv-project-slave:
+          slave: '{slave}'
+    builders:
+      - cirv-run-tox
+
+- project:
+    name: 'cirv-run-tox'
+    <<: *cirv-project-params
+    jobs:
+      - 'cirv-run-tox-{tag}'
diff --git a/jjb/cperf/cirros-upload.yaml.ansible b/jjb/cperf/cirros-upload.yaml.ansible
deleted file mode 100644 (file)
index 855bb1f..0000000
+++ /dev/null
@@ -1,39 +0,0 @@
----
-- hosts: all
-  tasks:
-    - copy:
-        src: "{{ lookup('env', 'WORKSPACE') }}/{{ item }}"
-        dest: "/home/heat-admin/{{ item }}"
-        owner: heat-admin
-        group: heat-admin
-        mode: 0775
-      with_items:
-        - cirros-0.3.5-x86_64-disk.img
-        - overcloudrc
-    - name: Upload cirros glance image
-      shell: >
-        source /home/heat-admin/overcloudrc && openstack image create
-        cirros-0.3.5-x86_64-disk --public
-        --file /home/heat-admin/cirros-0.3.5-x86_64-disk.img
-        --disk-format qcow2 --container-format bare
-    - name: Create nano flavor
-      shell: >
-        source /home/heat-admin/overcloudrc && openstack flavor create
-        --id 42 --ram 64 --disk 0 --vcpus 1 m1.nano
-    - name: Open CSIT TCP port for netcat
-      iptables:
-        chain: INPUT
-        action: insert
-        protocol: tcp
-        destination_port: 12345
-        jump: ACCEPT
-      become: yes
-    - name: Open CSIT UDP port for netcat
-      iptables:
-        chain: INPUT
-        action: insert
-        protocol: udp
-        destination_port: 12345
-        jump: ACCEPT
-      become: yes
-
diff --git a/jjb/cperf/cperf-ci-jobs.yaml b/jjb/cperf/cperf-ci-jobs.yaml
deleted file mode 100644 (file)
index 61bdebd..0000000
+++ /dev/null
@@ -1,210 +0,0 @@
----
-###################################
-# job configuration for cperf
-###################################
-- project:
-    name: cperf-ci-jobs
-    project: cperf
-
-    # -------------------------------
-    # BRANCH ANCHORS
-    # -------------------------------
-    stream: master
-    branch: '{stream}'
-    gs-pathname: ''
-    docker-tag: 'latest'
-
-    installer: apex
-
-    testsuite:
-      - csit
-      - cbench
-
-    jobs:
-      - 'cperf-{installer}-{testsuite}-{stream}'
-      - 'cperf-upload-logs-csit'
-
-################################
-# job template
-################################
-- job-template:
-    name: 'cperf-{installer}-{testsuite}-{stream}'
-
-    concurrent: true
-
-    properties:
-      - throttle:
-          enabled: true
-          max-per-node: 1
-          option: 'project'
-
-    wrappers:
-      - build-name:
-          name: '$BUILD_NUMBER Suite: $CPERF_SUITE_NAME ODL BRANCH: $ODL_BRANCH'
-      - timeout:
-          timeout: 400
-          abort: true
-
-    parameters:
-      - cperf-parameter:
-          testsuite: '{testsuite}'
-          gs-pathname: '{gs-pathname}'
-          docker-tag: '{docker-tag}'
-          stream: '{stream}'
-
-    builders:
-      - 'cperf-{testsuite}-builder'
-
-- job-template:
-    name: 'cperf-upload-logs-csit'
-
-    concurrent: true
-
-    disabled: false
-
-    parameters:
-      - cperf-parameter:
-          testsuite: 'csit'
-          gs-pathname: '{gs-pathname}'
-          docker-tag: '{docker-tag}'
-          stream: '{stream}'
-
-    # yamllint enable rule:line-length
-    properties:
-      - logrotate-default
-      - throttle:
-          max-per-node: 1
-          max-total: 10
-          option: 'project'
-
-    builders:
-      - 'cperf-upload-logs-csit'
-
-########################
-# parameter macros
-########################
-- parameter:
-    name: cperf-parameter
-    parameters:
-      - string:
-          name: CPERF_SUITE_NAME
-          default: '{testsuite}'
-          description: "Suite name to run"
-      - string:
-          name: ODL_BRANCH
-          default: 'master'
-          description: "Branch that OpenDaylight is running"
-      - string:
-          name: OS_VERSION
-          default: 'master'
-          description: "OpenStack version (short name, no stable/ prefix)"
-      - string:
-          name: GS_PATHNAME
-          default: '{gs-pathname}'
-          description: "Version directory where the opnfv documents will be stored in gs repository"
-      - string:
-          name: CI_DEBUG
-          default: 'false'
-          description: "Show debug output information"
-      - string:
-          name: DOCKER_TAG
-          default: '{docker-tag}'
-          description: 'Tag to pull docker image'
-      - string:
-          name: RC_FILE_PATH
-          default: ''
-          description: "Path to the OS credentials file if given"
-      - string:
-          name: SSH_KEY_PATH
-          default: ''
-          description: "Path to the private SSH key to access OPNFV nodes"
-      - string:
-          name: NODE_FILE_PATH
-          default: ''
-          description: "Path to the yaml file describing overcloud nodes"
-      - string:
-          name: ODL_CONTAINERIZED
-          default: 'true'
-          description: "boolean set true if ODL on overcloud is a container"
-
-########################
-# trigger macros
-########################
-
-########################
-# builder macros
-########################
-- builder:
-    name: cperf-csit-builder
-    builders:
-      - 'cperf-cleanup'
-      - 'cperf-prepare-robot'
-      - 'cperf-robot-netvirt-csit'
-
-- builder:
-    name: cperf-cbench-builder
-    builders:
-      - 'cperf-cleanup'
-      - 'cperf-prepare-robot'
-      - 'cperf-robot-cbench'
-
-- builder:
-    name: cperf-prepare-robot
-    builders:
-      - shell:
-          !include-raw: ./cperf-prepare-robot.sh
-
-- builder:
-    name: cperf-robot-cbench
-    builders:
-      - shell: |
-          #!/bin/bash
-          set -o errexit
-          set -o nounset
-          set -o pipefail
-
-          # cbench requires the openflow drop test feature to be installed.
-          sshpass -p karaf ssh -o StrictHostKeyChecking=no \
-                               -o UserKnownHostsFile=/dev/null \
-                               -o LogLevel=error \
-                               -p 8101 karaf@$SDN_CONTROLLER_IP \
-                                feature:install odl-openflowplugin-flow-services-ui odl-openflowplugin-drop-test
-
-          robot_cmd="pybot -e exclude -L TRACE -d /tmp \
-                      -v ODL_SYSTEM_1_IP:${SDN_CONTROLLER_IP} \
-                      -v ODL_SYSTEM_IP:${SDN_CONTROLLER_IP} \
-                      -v BUNDLEFOLDER:/opt/opendaylight \
-                      -v RESTCONFPORT:8081 \
-                      -v USER_HOME:/tmp \
-                      -v USER:heat-admin \
-                      -v ODL_SYSTEM_USER:heat-admin \
-                      -v TOOLS_SYSTEM_IP:localhost \
-                      -v of_port:6653"
-          robot_suite="/home/opnfv/repos/odl_test/csit/suites/openflowplugin/Performance/010_Cbench.robot"
-
-          docker run -i -v /tmp:/tmp opnfv/cperf:$DOCKER_TAG ${robot_cmd} ${robot_suite}
-
-- builder:
-    name: cperf-robot-netvirt-csit
-    builders:
-      - shell:
-          !include-raw: ./cperf-robot-netvirt-csit.sh
-
-- builder:
-    name: cperf-cleanup
-    builders:
-      - shell: |
-          #!/bin/bash
-          [[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-
-          echo "Cleaning up docker containers/images..."
-          # Remove previous running containers if exist
-          if [[ ! -z $(docker ps -a | grep opnfv/cperf) ]]; then
-              echo "Removing existing opnfv/cperf containers..."
-              docker ps -a | grep opnfv/cperf | awk '{print $1}' | xargs docker rm -f >${redirect}
-          fi
-
-- builder:
-    name: cperf-upload-logs-csit
-    builders:
-      - shell: !include-raw: ./cperf-upload-logs-csit.sh
diff --git a/jjb/cperf/cperf-prepare-robot.sh b/jjb/cperf/cperf-prepare-robot.sh
deleted file mode 100755 (executable)
index d88c6d5..0000000
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/usr/bin/env bash
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-if [ -z ${RC_FILE_PATH+x} ]; then
-  undercloud_mac=$(sudo virsh domiflist undercloud | grep default | \
-                   grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
-  INSTALLER_IP=$(/usr/sbin/arp -e | grep ${undercloud_mac} | awk {'print $1'})
-  sudo scp -o StrictHostKeyChecking=no root@$INSTALLER_IP:/home/stack/overcloudrc /tmp/overcloudrc
-else
-  cp -f $RC_FILE_PATH ${WORKSPACE}/overcloudrc
-fi
-
-sudo chmod 755 ${WORKSPACE}/overcloudrc
-source ${WORKSPACE}/overcloudrc
-
-# copy ssh key for robot
-
-if [ -z ${SSH_KEY_PATH+x} ]; then
-  sudo scp -o StrictHostKeyChecking=no root@$INSTALLER_IP:/home/stack/.ssh/id_rsa ${WORKSPACE}/
-  sudo chown -R jenkins-ci:jenkins-ci ${WORKSPACE}/
-  # done with sudo. jenkins-ci is the user from this point
-  chmod 0600 ${WORKSPACE}/id_rsa
-else
-  cp -f ${SSH_KEY_PATH} ${WORKSPACE}/
-fi
-
-docker pull opnfv/cperf:$DOCKER_TAG
-
-sudo mkdir -p /tmp/robot_results
diff --git a/jjb/cperf/cperf-robot-netvirt-csit.sh b/jjb/cperf/cperf-robot-netvirt-csit.sh
deleted file mode 100755 (executable)
index eac1316..0000000
+++ /dev/null
@@ -1,186 +0,0 @@
-#!/usr/bin/env bash
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-if [[ ! -z ${SKIP_CSIT+x} && "$SKIP_CSIT" == "True" ]]; then
-  echo "Skipping csit run"
-  exit 0
-fi
-
-if [ "$OS_VERSION" == 'master' ]; then
-  FULL_OS_VER='master'
-else
-  FULL_OS_VER="stable/${OS_VERSION}"
-fi
-
-if [ "$ODL_BRANCH" == 'master' ]; then
-  ODL_STREAM='neon'
-else
-  ODL_STREAM=${ODL_BRANCH#"stable/"}
-fi
-
-echo "ODL Stream set: ${ODL_STREAM} and OS Version is ${FULL_OS_VER}"
-
-sudo rm -rf releng
-git clone https://gerrit.opnfv.org/gerrit/releng.git
-REL_PATH='releng/jjb/cperf'
-
-# NOTE: sourcing overcloudrc unsets any variable with OS_ prefix
-source ${WORKSPACE}/overcloudrc
-# note SDN_CONTROLLER_IP is set in overcloudrc, which is the VIP
-# for admin/public network (since we are running single network deployment)
-
-NUM_CONTROL_NODES=$(python ${REL_PATH}/parse-node-yaml.py num_nodes --file $NODE_FILE_PATH)
-NUM_COMPUTE_NODES=$(python ${REL_PATH}/parse-node-yaml.py num_nodes --node-type compute --file $NODE_FILE_PATH)
-
-echo "Number of Control nodes found: ${NUM_CONTROL_NODES}"
-echo "Number of Compute nodes found: ${NUM_COMPUTE_NODES}"
-
-# Only 1 combo or ctrl node is specified, even for OS HA deployments
-# Currently supported combinations are:
-# 0cmb-1ctl-2cmp
-# 1cmb-0ctl-0cmp
-# 1cmb-0ctl-1cmp
-if [ "$NUM_COMPUTE_NODES" -eq 0 ]; then
-  OPENSTACK_TOPO="1cmb-0ctl-0cmp"
-else
-  OPENSTACK_TOPO="0cmb-1ctl-2cmp"
-fi
-
-idx=1
-EXTRA_ROBOT_ARGS=""
-for idx in `seq 1 $NUM_CONTROL_NODES`; do
-  CONTROLLER_IP=$(python ${REL_PATH}/parse-node-yaml.py get_value -k address --node-number ${idx} --file $NODE_FILE_PATH)
-  EXTRA_ROBOT_ARGS+=" -v ODL_SYSTEM_${idx}_IP:${CONTROLLER_IP} \
-                      -v OS_CONTROL_NODE_${idx}_IP:${CONTROLLER_IP} \
-                      -v ODL_SYSTEM_${idx}_IP:${CONTROLLER_IP} \
-                      -v HA_PROXY_${idx}_IP:${SDN_CONTROLLER_IP}"
-done
-
-# In all-in-one these Compute IPs still need to be passed to robot
-if [ "$NUM_COMPUTE_NODES" -eq 0 ]; then
-  EXTRA_ROBOT_ARGS+=" -v OS_COMPUTE_1_IP:'' -v OS_COMPUTE_2_IP:''"
-else
-  idx=1
-  for idx in `seq 1 $NUM_COMPUTE_NODES`; do
-    COMPUTE_IP=$(python ${REL_PATH}/parse-node-yaml.py get_value -k address --node-type compute --node-number ${idx} --file $NODE_FILE_PATH)
-    EXTRA_ROBOT_ARGS+=" -v OS_COMPUTE_${idx}_IP:${COMPUTE_IP}"
-  done
-fi
-
-CONTROLLER_1_IP=$(python ${REL_PATH}/parse-node-yaml.py get_value -k address --node-number 1 --file $NODE_FILE_PATH)
-
-if [ "$ODL_CONTAINERIZED" == 'false' ]; then
-  EXTRA_ROBOT_ARGS+=" -v NODE_KARAF_COUNT_COMMAND:'ps axf | grep org.apache.karaf | grep -v grep | wc -l || echo 0' \
-                      -v NODE_START_COMMAND:'sudo systemctl start opendaylight_api' \
-                      -v NODE_KILL_COMMAND:'sudo systemctl stop opendaylight_api' \
-                      -v NODE_STOP_COMMAND:'sudo systemctl stop opendaylight_api' \
-                      -v NODE_FREEZE_COMMAND:'sudo systemctl stop opendaylight_api' "
-else
-  EXTRA_ROBOT_ARGS+=" -v NODE_KARAF_COUNT_COMMAND:'sudo docker ps | grep opendaylight_api | wc -l || echo 0' \
-                      -v NODE_START_COMMAND:'sudo docker start opendaylight_api' \
-                      -v NODE_KILL_COMMAND:'sudo docker stop opendaylight_api' \
-                      -v NODE_STOP_COMMAND:'sudo docker stop opendaylight_api' \
-                      -v NODE_FREEZE_COMMAND:'sudo docker stop opendaylight_api' "
-fi
-
-# FIXME(trozet) remove this once it is fixed in csit
-# Upload glance image into openstack
-wget -O ${WORKSPACE}/cirros-0.3.5-x86_64-disk.img http://download.cirros-cloud.net/0.3.5/cirros-0.3.5-x86_64-disk.img
-export ANSIBLE_HOST_KEY_CHECKING=False
-ansible-playbook -i ${CONTROLLER_1_IP}, -u heat-admin --key-file ${WORKSPACE}/id_rsa ${REL_PATH}/cirros-upload.yaml.ansible --ssh-extra-args='-o StrictHostKeyChecking=no  -o UserKnownHostsFile=/dev/null' -vvv
-
-LOGS_LOCATION=/tmp/robot_results
-
-robot_cmd="pybot \
-  --removekeywords wuks \
-  --xunit robotxunit.xml \
-  --name 'CSIT' \
-  -e exclude \
-  -d $LOGS_LOCATION \
-  -v BUNDLEFOLDER:/opt/opendaylight \
-  -v CONTROLLER_USER:heat-admin \
-  -v CMP_INSTANCES_SHARED_PATH:/var/lib/nova/instances/ \
-  -v DEFAULT_LINUX_PROMPT:\$ \
-  -v DEFAULT_LINUX_PROMPT_STRICT:]\$ \
-  -v DEFAULT_USER:heat-admin \
-  -v DEVSTACK_DEPLOY_PATH:/tmp \
-  -v EXTERNAL_GATEWAY:$CONTROLLER_1_IP \
-  -v EXTERNAL_PNF:$CONTROLLER_1_IP \
-  -v EXTERNAL_SUBNET:192.0.2.0/24 \
-  -v EXTERNAL_SUBNET_ALLOCATION_POOL:start=192.0.2.100,end=192.0.2.200 \
-  -v EXTERNAL_INTERNET_ADDR:$CONTROLLER_1_IP  \
-  -v HA_PROXY_IP:$SDN_CONTROLLER_IP \
-  -v NUM_ODL_SYSTEM:$NUM_CONTROL_NODES \
-  -v NUM_OS_SYSTEM:$(($NUM_CONTROL_NODES + $NUM_COMPUTE_NODES)) \
-  -v NUM_TOOLS_SYSTEM:0 \
-  -v ODL_SNAT_MODE:conntrack \
-  -v ODL_STREAM:$ODL_STREAM \
-  -v ODL_SYSTEM_IP:$CONTROLLER_1_IP \
-  -v OS_CONTROL_NODE_IP:$CONTROLLER_1_IP \
-  -v OPENSTACK_BRANCH:$FULL_OS_VER \
-  -v OPENSTACK_TOPO:$OPENSTACK_TOPO \
-  -v OS_USER:heat-admin \
-  -v ODL_ENABLE_L3_FWD:yes \
-  -v ODL_SYSTEM_USER:heat-admin \
-  -v ODL_SYSTEM_PROMPT:\$ \
-  -v PRE_CLEAN_OPENSTACK_ALL:True \
-  -v PUBLIC_PHYSICAL_NETWORK:datacentre \
-  -v RESTCONFPORT:8081 \
-  -v ODL_RESTCONF_USER:admin \
-  -v ODL_RESTCONF_PASSWORD:$SDN_CONTROLLER_PASSWORD \
-  -v KARAF_PROMPT_LOGIN:'opendaylight-user' \
-  -v KARAF_PROMPT:'opendaylight-user.*root.*>' \
-  -v SECURITY_GROUP_MODE:stateful \
-  -v USER:heat-admin \
-  -v USER_HOME:\$HOME \
-  -v TOOLS_SYSTEM_IP:'' \
-  -v NODE_ROLE_INDEX_START:0 \
-  -v WORKSPACE:/tmp  \
-  $EXTRA_ROBOT_ARGS \
-  -v of_port:6653 "
-
-SUITE_HOME='/home/opnfv/repos/odl_test/csit/suites'
-
-# Disabled suites
-#
-# ${SUITE_HOME}/netvirt/vpnservice/vpn_basic_ipv6.robot
-# This suite fails with an error indicating the connection was closed
-# to the overcloud control node:
-# https://build.opnfv.org/ci/job/cperf-apex-csit-master/104/consoleFull
-#
-# Minimize HA CSIT as it does not pass all suites
-if [ "$NUM_CONTROL_NODES" -eq 3 ]; then
-  suites="${SUITE_HOME}/openstack/connectivity/l2.robot \
-          ${SUITE_HOME}/openstack/connectivity/l3.robot"
-else
-  suites="${SUITE_HOME}/openstack/connectivity/l2.robot \
-          ${SUITE_HOME}/openstack/connectivity/l3.robot \
-          ${SUITE_HOME}/openstack/connectivity/live_migration.robot \
-          ${SUITE_HOME}/openstack/connectivity/external_network.robot \
-          ${SUITE_HOME}/openstack/connectivity/security_group.robot \
-          ${SUITE_HOME}/openstack/securitygroup/neutron_security_group.robot \
-          ${SUITE_HOME}/openstack/securitygroup/security_group_l3bcast.robot \
-          ${SUITE_HOME}/netvirt/vpnservice/vpn_basic.robot \
-          ${SUITE_HOME}/netvirt/elan/elan.robot \
-          ${SUITE_HOME}/netvirt/vpnservice/arp_learning.robot \
-          ${SUITE_HOME}/netvirt/l2l3_gatewaymac_arp.robot \
-          ${SUITE_HOME}/integration/Create_JVM_Plots.robot"
-fi
-
-echo "Robot command set: ${robot_cmd}"
-echo "Running robot..."
-docker run -i --net=host \
-  -v ${LOGS_LOCATION}:${LOGS_LOCATION} \
-  -v ${WORKSPACE}/id_rsa:/tmp/id_rsa \
-  -v ${WORKSPACE}/overcloudrc:/tmp/overcloudrc \
-  opnfv/cperf:$DOCKER_TAG \
-  /bin/bash -c "source /tmp/overcloudrc; mkdir -p \$HOME/.ssh; cp /tmp/id_rsa \$HOME/.ssh; \
-  cd /home/opnfv/repos/odl_test/ && git pull origin master; \
-  pip install odltools; \
-  ${robot_cmd} ${suites};"
-
-echo "Running post CSIT clean"
-ansible-playbook -i ${CONTROLLER_1_IP}, -u heat-admin --key-file ${WORKSPACE}/id_rsa ${REL_PATH}/csit-clean.yaml.ansible --ssh-extra-args='-o StrictHostKeyChecking=no  -o UserKnownHostsFile=/dev/null' -vvv
diff --git a/jjb/cperf/cperf-upload-logs-csit.sh b/jjb/cperf/cperf-upload-logs-csit.sh
deleted file mode 100644 (file)
index bd86804..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/usr/bin/env bash
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-ODL_STREAM=${ODL_BRANCH#"stable/"}
-
-LOGS_LOCATION=/tmp/robot_results
-UPLOAD_LOCATION=artifacts.opnfv.org/cperf/cperf-apex-csit-${ODL_STREAM}/${BUILD_NUMBER}/
-echo "Uploading robot logs to ${UPLOAD_LOCATION}"
-gsutil -m cp -r -v ${LOGS_LOCATION} gs://${UPLOAD_LOCATION} > gsutil.latest_logs.log
diff --git a/jjb/cperf/cperf-views.yaml b/jjb/cperf/cperf-views.yaml
deleted file mode 100644 (file)
index ef982e8..0000000
+++ /dev/null
@@ -1,6 +0,0 @@
----
-- project:
-    name: cperf-view
-    views:
-      - project-view
-    project-name: cperf
diff --git a/jjb/cperf/csit-clean.yaml.ansible b/jjb/cperf/csit-clean.yaml.ansible
deleted file mode 100644 (file)
index 0151dd8..0000000
+++ /dev/null
@@ -1,11 +0,0 @@
----
-- hosts: all
-  tasks:
-    - name: Delete cirros glance image
-      shell: >
-        source /home/heat-admin/overcloudrc && openstack image delete
-        cirros-0.3.5-x86_64-disk
-    - name: Delete nano flavor
-      shell: >
-        source /home/heat-admin/overcloudrc && openstack flavor delete
-        m1.nano
diff --git a/jjb/cperf/parse-node-yaml.py b/jjb/cperf/parse-node-yaml.py
deleted file mode 100644 (file)
index 5a75755..0000000
+++ /dev/null
@@ -1,71 +0,0 @@
-##############################################################################
-# Copyright (c) 2018 Tim Rozet (trozet@redhat.com) and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-import argparse
-import sys
-import yaml
-
-
-def get_node_data_by_number(node_type, node_number):
-    node_idx = 1
-    for node_name, node_data in data['servers'].items():
-        if node_type == node_data['type']:
-            if node_idx == node_number:
-                return node_name, node_data
-            else:
-                node_idx += 1
-
-
-def get_node_value(node_type, node_number, key):
-    node_name, node_data = get_node_data_by_number(node_type, node_number)
-    if not key and node_name is not None:
-        return node_name
-    elif node_data and isinstance(node_data, dict) and key in node_data:
-        return node_data[key]
-
-
-def get_number_of_nodes(node_type):
-    nodes = data['servers']
-    num_nodes = 0
-    for node_name, node_data in nodes.items():
-        if node_data['type'] == node_type:
-            num_nodes += 1
-    return num_nodes
-
-
-FUNCTION_MAP = {'num_nodes':
-                {'func': get_number_of_nodes,
-                 'args': ['node_type']},
-                'get_value':
-                    {'func': get_node_value,
-                     'args': ['node_type', 'node_number', 'key']},
-                }
-
-if __name__ == "__main__":
-    parser = argparse.ArgumentParser()
-    parser.add_argument('command', choices=FUNCTION_MAP.keys())
-    parser.add_argument('-f', '--file',
-                        dest='node_file',
-                        required=True)
-    parser.add_argument('--node-type',
-                        default='controller',
-                        required=False)
-    parser.add_argument('--node-number',
-                        default=1,
-                        type=int,
-                        required=False)
-    parser.add_argument('-k', '--key',
-                        required=False)
-    args = parser.parse_args(sys.argv[1:])
-    with open(args.node_file, 'r') as fh:
-        data = yaml.safe_load(fh)
-    assert 'servers' in data
-    func = FUNCTION_MAP[args.command]['func']
-    args = [getattr(args, x) for x in FUNCTION_MAP[args.command]['args']]
-    print(func(*args))
diff --git a/jjb/daisy4nfv/daisy-daily-jobs.yaml b/jjb/daisy4nfv/daisy-daily-jobs.yaml
deleted file mode 100644 (file)
index 4a7e6e9..0000000
+++ /dev/null
@@ -1,423 +0,0 @@
----
-# jenkins job templates for Daisy
-# TODO
-# [ ] enable baremetal jobs after baremetal deployment finish
-# [ ] enable jobs in danuble
-# [ ] add more scenarios
-# [ ] integration with yardstick
-
-- project:
-
-    name: 'daisy'
-    project: '{name}'
-    installer: '{name}'
-
-    # -------------------------------
-    # BRANCH ANCHORS
-    # -------------------------------
-    master: &master
-      stream: master
-      branch: '{stream}'
-      disabled: true
-      gs-pathname: ''
-    fraser: &fraser
-      stream: fraser
-      branch: 'stable/{stream}'
-      gs-pathname: '/{stream}'
-      disabled: true
-
-    # -------------------------------
-    # POD, INSTALLER, AND BRANCH MAPPING
-    # -------------------------------
-    pod:
-      # -------------------------------
-      #        CI PODs
-      # -------------------------------
-      - baremetal:
-          slave-label: daisy-baremetal
-          <<: *master
-      - virtual:
-          slave-label: daisy-virtual
-          <<: *master
-      - baremetal:
-          slave-label: daisy-baremetal
-          <<: *fraser
-      - virtual:
-          slave-label: daisy-virtual
-          <<: *fraser
-      # -------------------------------
-      #        None-CI PODs
-      # -------------------------------
-      - zte-pod3:
-          slave-label: zte-pod3
-          <<: *master
-      - zte-pod3:
-          slave-label: zte-pod3
-          <<: *fraser
-      - zte-pod9:
-          slave-label: zte-pod9
-          <<: *master
-      - zte-pod9:
-          slave-label: zte-pod9
-          <<: *fraser
-
-    # -------------------------------
-    #       scenarios
-    # -------------------------------
-    scenario:
-      # HA scenarios
-      - 'os-nosdn-nofeature-ha':
-          auto-trigger-name: 'daisy-{scenario}-{pod}-daily-{stream}-trigger'
-      # NOHA scenarios
-      - 'os-nosdn-nofeature-noha':
-          auto-trigger-name: 'daisy-{scenario}-{pod}-daily-{stream}-trigger'
-      # ODL_L3 scenarios
-      - 'os-odl-nofeature-ha':
-          auto-trigger-name: 'daisy-{scenario}-{pod}-daily-{stream}-trigger'
-      # ovs_dpdk scenarios
-      - 'os-nosdn-ovs_dpdk-noha':
-          auto-trigger-name: 'daisy-{scenario}-{pod}-daily-{stream}-trigger'
-
-    jobs:
-      - '{project}-{scenario}-{pod}-daily-{stream}'
-      - '{project}-deploy-{pod}-daily-{stream}'
-
-########################
-# job templates
-########################
-- job-template:
-    name: '{project}-{scenario}-{pod}-daily-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    concurrent: false
-
-    properties:
-      - logrotate-default
-      - throttle:
-          enabled: true
-          max-total: 4
-          max-per-node: 1
-          option: 'project'
-      - build-blocker:
-          use-build-blocker: true
-          blocking-jobs:
-            - 'daisy-os-.*?-{pod}-daily-.*?'
-            - 'daisy-daily-.*'
-            - 'daisy-kolla-build-.*'
-          blocking-level: 'NODE'
-
-    wrappers:
-      - build-name:
-          name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
-
-    triggers:
-      - '{auto-trigger-name}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - '{installer}-defaults'
-      - '{slave-label}-defaults':
-          installer: '{installer}'
-      - 'testapi-parameter'
-      - 'daisy-project-parameter':
-          gs-pathname: '{gs-pathname}'
-      - string:
-          name: DEPLOY_SCENARIO
-          default: '{scenario}'
-
-    builders:
-      - description-setter:
-          description: "POD: $NODE_NAME"
-      - trigger-builds:
-          - project: 'daisy-deploy-{pod}-daily-{stream}'
-            current-parameters: false
-            predefined-parameters: |
-              DEPLOY_SCENARIO={scenario}
-              INSTALLER_VERSION={stream}
-              UPSTREAM_JOB_NAME=$JOB_NAME
-              UPSTREAM_BUILD_ID=$BUILD_ID
-            same-node: true
-            block: true
-      - trigger-builds:
-          - project: 'functest-daisy-{pod}-daily-{stream}'
-            current-parameters: false
-            predefined-parameters: |
-              DEPLOY_SCENARIO={scenario}
-              INSTALLER_VERSION={stream}
-              UPSTREAM_JOB_NAME=$JOB_NAME
-              UPSTREAM_BUILD_ID=$BUILD_ID
-            same-node: true
-            block: true
-            block-thresholds:
-              build-step-failure-threshold: 'never'
-              failure-threshold: 'never'
-              unstable-threshold: 'FAILURE'
-      - trigger-builds:
-          - project: 'yardstick-daisy-{pod}-daily-{stream}'
-            current-parameters: false
-            predefined-parameters: |
-              DEPLOY_SCENARIO={scenario}
-              INSTALLER_VERSION={stream}
-              UPSTREAM_JOB_NAME=$JOB_NAME
-              UPSTREAM_BUILD_ID=$BUILD_ID
-            block: true
-            same-node: true
-            block-thresholds:
-              build-step-failure-threshold: 'never'
-              failure-threshold: 'never'
-              unstable-threshold: 'FAILURE'
-
-- job-template:
-    name: '{project}-deploy-{pod}-daily-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    concurrent: true
-
-    properties:
-      - logrotate-default
-      - throttle:
-          enabled: true
-          max-total: 4
-          max-per-node: 1
-          option: 'project'
-      - build-blocker:
-          use-build-blocker: true
-          blocking-jobs:
-            - 'daisy-kolla-build-.*'
-            - '{installer}-(build|deploy|test)-daily-(fraser|master)'
-            - '{installer}-deploy-(baremetal|virtual|zte-pod3|zte-pod9)-daily-(fraser|master)'
-            - '(functest|yardstick)-{installer}-(baremetal|virtual|zte-pod3|zte-pod9)-daily-(fraser|master)'
-          blocking-level: 'NODE'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - '{installer}-defaults'
-      - '{slave-label}-defaults':
-          installer: '{installer}'
-      - 'testapi-parameter'
-      - 'daisy-project-parameter':
-          gs-pathname: '{gs-pathname}'
-      - 'deploy-scenario'
-      - string:
-          name: DEPLOY_TIMEOUT
-          default: '150'
-          description: 'Deployment timeout in minutes'
-
-    scm:
-      - git-scm
-
-    wrappers:
-      - build-name:
-          name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
-
-    builders:
-      - description-setter:
-          description: "POD: $NODE_NAME"
-      - 'track-begin-timestamp'
-      - shell:
-          !include-raw-escape: ./daisy4nfv-download-artifact.sh
-      - shell:
-          !include-raw-escape: ./daisy-deploy.sh
-    publishers:
-      - 'report-provision-result'
-
-########################
-# trigger macros
-########################
-# ----------------------------------------------
-# Triggers for job running on daisy-baremetal against master branch
-# ----------------------------------------------
-# Basic HA Scenarios
-- trigger:
-    name: 'daisy-os-nosdn-nofeature-ha-baremetal-daily-master-trigger'
-    triggers:
-      - timed: ''
-# Basic NOHA Scenarios
-- trigger:
-    name: 'daisy-os-nosdn-nofeature-noha-baremetal-daily-master-trigger'
-    triggers:
-      - timed: ''
-# ODL Scenarios
-- trigger:
-    name: 'daisy-os-odl-nofeature-ha-baremetal-daily-master-trigger'
-    triggers:
-      - timed: ''
-# ovs_dpdk Scenarios
-- trigger:
-    name: 'daisy-os-nosdn-ovs_dpdk-noha-baremetal-daily-master-trigger'
-    triggers:
-      - timed: ''
-
-# ----------------------------------------------
-# Triggers for job running on daisy-virtual against master branch
-# ----------------------------------------------
-# Basic HA Scenarios
-- trigger:
-    name: 'daisy-os-nosdn-nofeature-ha-virtual-daily-master-trigger'
-    triggers:
-      - timed: '0 16 * * *'
-# Basic NOHA Scenarios
-- trigger:
-    name: 'daisy-os-nosdn-nofeature-noha-virtual-daily-master-trigger'
-    triggers:
-      - timed: ''
-# ODL Scenarios
-- trigger:
-    name: 'daisy-os-odl-nofeature-ha-virtual-daily-master-trigger'
-    triggers:
-      - timed: '0 12 * * *'
-# ovs_dpdk Scenarios
-- trigger:
-    name: 'daisy-os-nosdn-ovs_dpdk-noha-virtual-daily-master-trigger'
-    triggers:
-      - timed: ''
-
-# ----------------------------------------------
-# Triggers for job running on daisy-baremetal against fraser branch
-# ----------------------------------------------
-# Basic HA Scenarios
-- trigger:
-    name: 'daisy-os-nosdn-nofeature-ha-baremetal-daily-fraser-trigger'
-    triggers:
-      - timed: '0 0,6 * * *'
-# Basic NOHA Scenarios
-- trigger:
-    name: 'daisy-os-nosdn-nofeature-noha-baremetal-daily-fraser-trigger'
-    triggers:
-      - timed: ''
-# ODL Scenarios
-- trigger:
-    name: 'daisy-os-odl-nofeature-ha-baremetal-daily-fraser-trigger'
-    triggers:
-      - timed: '0 12,18 * * *'
-# ovs_dpdk Scenarios
-- trigger:
-    name: 'daisy-os-nosdn-ovs_dpdk-noha-baremetal-daily-fraser-trigger'
-    triggers:
-      - timed: ''
-
-# ----------------------------------------------
-# Triggers for job running on daisy-virtual against fraser branch
-# ----------------------------------------------
-# Basic HA Scenarios
-- trigger:
-    name: 'daisy-os-nosdn-nofeature-ha-virtual-daily-fraser-trigger'
-    triggers:
-      - timed: '0 0 * * *'
-# Basic NOHA Scenarios
-- trigger:
-    name: 'daisy-os-nosdn-nofeature-noha-virtual-daily-fraser-trigger'
-    triggers:
-      - timed: ''
-# ODL Scenarios
-- trigger:
-    name: 'daisy-os-odl-nofeature-ha-virtual-daily-fraser-trigger'
-    triggers:
-      - timed: '0 20 * * *'
-# ovs_dpdk Scenarios
-- trigger:
-    name: 'daisy-os-nosdn-ovs_dpdk-noha-virtual-daily-fraser-trigger'
-    triggers:
-      - timed: ''
-
-# ----------------------------------------------
-# Triggers for job running on zte-pod3 against master branch
-# ----------------------------------------------
-# Basic HA Scenarios
-- trigger:
-    name: 'daisy-os-nosdn-nofeature-ha-zte-pod3-daily-master-trigger'
-    triggers:
-      - timed: ''
-# Basic NOHA Scenarios
-- trigger:
-    name: 'daisy-os-nosdn-nofeature-noha-zte-pod3-daily-master-trigger'
-    triggers:
-      - timed: ''
-# ODL Scenarios
-- trigger:
-    name: 'daisy-os-odl-nofeature-ha-zte-pod3-daily-master-trigger'
-    triggers:
-      - timed: ''
-# ovs_dpdk Scenarios
-- trigger:
-    name: 'daisy-os-nosdn-ovs_dpdk-noha-zte-pod3-daily-master-trigger'
-    triggers:
-      - timed: ''
-
-# ----------------------------------------------
-# Triggers for job running on zte-pod3 against fraser branch
-# ----------------------------------------------
-# Basic HA Scenarios
-- trigger:
-    name: 'daisy-os-nosdn-nofeature-ha-zte-pod3-daily-fraser-trigger'
-    triggers:
-      - timed: ''
-# Basic NOHA Scenarios
-- trigger:
-    name: 'daisy-os-nosdn-nofeature-noha-zte-pod3-daily-fraser-trigger'
-    triggers:
-      - timed: ''
-# ODL Scenarios
-- trigger:
-    name: 'daisy-os-odl-nofeature-ha-zte-pod3-daily-fraser-trigger'
-    triggers:
-      - timed: '0 16,22 * * *'
-# ovs_dpdk Scenarios
-- trigger:
-    name: 'daisy-os-nosdn-ovs_dpdk-noha-zte-pod3-daily-fraser-trigger'
-    triggers:
-      - timed: ''
-
-# ----------------------------------------------
-# ZTE POD9 Triggers running against master branch
-# ----------------------------------------------
-# ovs_dpdk Scenarios
-- trigger:
-    name: 'daisy-os-nosdn-ovs_dpdk-noha-zte-pod9-daily-master-trigger'
-    triggers:
-      - timed: '0 20 * * *'
-# Basic HA Scenarios
-- trigger:
-    name: 'daisy-os-nosdn-nofeature-ha-zte-pod9-daily-master-trigger'
-    triggers:
-      - timed: ''
-# Basic NOHA Scenarios
-- trigger:
-    name: 'daisy-os-nosdn-nofeature-noha-zte-pod9-daily-master-trigger'
-    triggers:
-      - timed: ''
-# ODL Scenarios
-- trigger:
-    name: 'daisy-os-odl-nofeature-ha-zte-pod9-daily-master-trigger'
-    triggers:
-      - timed: ''
-
-# ----------------------------------------------
-# ZTE POD9 Triggers running against fraser branch
-# ----------------------------------------------
-# ovs_dpdk Scenarios
-- trigger:
-    name: 'daisy-os-nosdn-ovs_dpdk-noha-zte-pod9-daily-fraser-trigger'
-    triggers:
-      - timed: '0 10 * * *'
-# Basic HA Scenarios
-- trigger:
-    name: 'daisy-os-nosdn-nofeature-ha-zte-pod9-daily-fraser-trigger'
-    triggers:
-      - timed: ''
-# Basic NOHA Scenarios
-- trigger:
-    name: 'daisy-os-nosdn-nofeature-noha-zte-pod9-daily-fraser-trigger'
-    triggers:
-      - timed: ''
-# ODL Scenarios
-- trigger:
-    name: 'daisy-os-odl-nofeature-ha-zte-pod9-daily-fraser-trigger'
-    triggers:
-      - timed: ''
diff --git a/jjb/daisy4nfv/daisy-deploy.sh b/jjb/daisy4nfv/daisy-deploy.sh
deleted file mode 100755 (executable)
index 950b9be..0000000
+++ /dev/null
@@ -1,75 +0,0 @@
-#!/bin/bash
-set -o nounset
-set -o pipefail
-
-echo "--------------------------------------------------------"
-echo "This is $INSTALLER_TYPE deploy job!"
-echo "--------------------------------------------------------"
-
-DEPLOY_SCENARIO=${DEPLOY_SCENARIO:-"os-nosdn-nofeature-noha"}
-BRIDGE=${BRIDGE:-pxebr}
-LAB_NAME=${NODE_NAME/-*}
-POD_NAME=${NODE_NAME/*-}
-deploy_ret=0
-
-if [[ ! "$NODE_NAME" =~ "-virtual" ]] && [[ ! "$LAB_NAME" =~ (zte) ]]; then
-    echo "Unsupported lab $LAB_NAME for now, Cannot continue!"
-    exit $deploy_ret
-fi
-
-# clone the securedlab/pharos repo
-cd $WORKSPACE
-
-# There are no PDFs in euphrates branch of pharos repo.
-if [[  "$BRANCH" =~ "euphrates" ]]; then
-    CONFIG_REPO_NAME=securedlab
-else
-    CONFIG_REPO_NAME=pharos
-fi
-
-if [[  "$BRANCH" =~ "master" ]]; then
-    DOCTOR_OPT="-d 1"
-else
-    DOCTOR_OPT=""
-fi
-
-LABS_DIR=/var/tmp/opnfv-${CONFIG_REPO_NAME}
-
-echo "Cloning ${CONFIG_REPO_NAME} repo $BRANCH to $LABS_DIR"
-sudo rm -rf $LABS_DIR
-git clone ssh://jenkins-zte@gerrit.opnfv.org:29418/${CONFIG_REPO_NAME} \
-    --quiet --branch $BRANCH $LABS_DIR
-
-DEPLOY_COMMAND="sudo -E ./ci/deploy/deploy.sh -L $LABS_DIR \
-                -l $LAB_NAME -p $POD_NAME -B $BRIDGE -s $DEPLOY_SCENARIO \
-                $DOCTOR_OPT"
-
-# log info to console
-echo """
-Deployment parameters
---------------------------------------------------------
-Scenario: $DEPLOY_SCENARIO
-LAB: $LAB_NAME
-POD: $POD_NAME
-BRIDGE: $BRIDGE
-
-Starting the deployment using $INSTALLER_TYPE. This could take some time...
---------------------------------------------------------
-Issuing command
-$DEPLOY_COMMAND
-"""
-
-# start the deployment
-$DEPLOY_COMMAND
-
-if [ $? -ne 0 ]; then
-    echo
-    echo "Depolyment failed!"
-    deploy_ret=1
-else
-    echo
-    echo "--------------------------------------------------------"
-    echo "Deployment done!"
-fi
-
-exit $deploy_ret
diff --git a/jjb/daisy4nfv/daisy-project-jobs.yaml b/jjb/daisy4nfv/daisy-project-jobs.yaml
deleted file mode 100644 (file)
index 7914548..0000000
+++ /dev/null
@@ -1,312 +0,0 @@
----
-######################################################################
-# Add daily jobs, for buidoing, deploying and testing
-# TODO:
-# - [ ] Add yardstick and functest for test stage
-# - [x] Use daisy-baremetal-defauls for choosing baremetal deployment
-######################################################################
-
-#############################
-# Job configuration for daisy
-#############################
-- project:
-    name: daisy-project-jobs
-
-    project: 'daisy'
-
-    installer: 'daisy'
-
-    stream:
-      - master:
-          branch: '{stream}'
-          gs-pathname: ''
-          disabled: true
-      - fraser:
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          disabled: true
-
-    phase:
-      - 'build':
-          slave-label: 'opnfv-build-centos'
-      - 'deploy':
-          slave-label: 'daisy-baremetal'
-      - 'test':
-          slave-label: 'opnfv-build-centos'
-
-    jobs:
-      - '{installer}-daily-{stream}'
-      - '{installer}-{phase}-daily-{stream}'
-      - '{installer}-kolla-build-{stream}'
-
-#############################
-# docker build job templates
-#############################
-- job-template:
-    name: '{installer}-kolla-build-{stream}'
-    disabled: false
-    concurrent: true
-
-    properties:
-      - logrotate-default
-      - throttle:
-          enabled: true
-          max-total: 4
-          option: 'project'
-      - build-blocker:
-          use-build-blocker: true
-          # Note: Need to block all jobs which may create daisy VM.
-          blocking-jobs:
-            - '{installer}-kolla-build-.*'
-            - 'daisy-deploy-.*'
-            - 'daisy-daily-.*'
-          blocking-level: 'NODE'
-
-    scm:
-      - git-scm
-
-    triggers:
-      - 'daisy-kolla-build-{stream}-trigger'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'daisy-virtual-defaults'
-      - '{installer}-defaults'
-      - '{installer}-project-parameter':
-          gs-pathname: '{gs-pathname}'
-
-    wrappers:
-      - ssh-agent-wrapper
-      - timeout:
-          timeout: 720
-          fail: true
-
-    builders:
-      - description-setter:
-          description: "Built on $NODE_NAME"
-      - shell:
-          !include-raw-escape: ./daisy4nfv-build-kolla-image.sh
-
-    publishers:
-      - '{installer}-recipients'
-      - email-jenkins-admins-on-failure
-
-- trigger:
-    name: 'daisy-kolla-build-fraser-trigger'
-    triggers:
-      - timed: '0 0 * * 0'
-
-- trigger:
-    name: 'daisy-kolla-build-master-trigger'
-    triggers:
-      - timed: '0 12 * * 0'
-
-
-########################
-# job templates
-########################
-- job-template:
-    name: '{installer}-daily-{stream}'
-
-    project-type: multijob
-
-    disabled: false
-
-    concurrent: true
-
-    properties:
-      - logrotate-default
-      - throttle:
-          enabled: true
-          max-total: 4
-          option: 'project'
-      - build-blocker:
-          use-build-blocker: true
-          blocking-jobs:
-            - '{installer}-daily-.*'
-            - '{installer}-kolla-build-.*'
-            - 'daisy4nfv-merge-build-.*'
-            - 'daisy4nfv-verify-build-.*'
-          blocking-level: 'NODE'
-
-    scm:
-      - git-scm
-
-    triggers:
-      - timed: '0 8 * * *'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'opnfv-build-centos-defaults'
-      - '{installer}-defaults'
-      - '{installer}-project-parameter':
-          gs-pathname: '{gs-pathname}'
-
-    wrappers:
-      - ssh-agent-wrapper
-      - timeout:
-          timeout: 360
-          fail: true
-
-    builders:
-      - description-setter:
-          description: "Built on $NODE_NAME"
-      - multijob:
-          name: build
-          condition: SUCCESSFUL
-          projects:
-            - name: '{installer}-build-daily-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                BRANCH=$BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: false
-              kill-phase-on: FAILURE
-              abort-all-job: true
-      - multijob:
-          name: deploy
-          condition: SUCCESSFUL
-          projects:
-            - name: '{installer}-deploy-daily-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                BRANCH=$BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: false
-              kill-phase-on: FAILURE
-              abort-all-job: true
-      - multijob:
-          name: test
-          condition: SUCCESSFUL
-          projects:
-            - name: '{installer}-test-daily-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                BRANCH=$BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: false
-              kill-phase-on: FAILURE
-              abort-all-job: true
-
-    publishers:
-      - '{installer}-recipients'
-      - email-jenkins-admins-on-failure
-
-- job-template:
-    name: '{installer}-{phase}-daily-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    concurrent: true
-
-    properties:
-      - logrotate-default
-      - throttle:
-          enabled: true
-          max-total: 6
-          option: 'project'
-      - build-blocker:
-          use-build-blocker: true
-          blocking-jobs:
-            - '{installer}-daily-(build|deploy|test)-(fraser|master)'
-            - '{installer}-.*-(baremetal|virtual|zte-pod3|zte-pod9)-daily-(fraser|master)'
-            - '(functest|yardstick)-{installer}-(baremetal|virtual|zte-pod3|zte-pod9)-daily-(fraser|master)'
-          blocking-level: 'NODE'
-
-    scm:
-      - git-scm
-
-    wrappers:
-      - ssh-agent-wrapper
-      - timeout:
-          timeout: 360
-          fail: true
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - '{installer}-defaults'
-      - '{slave-label}-defaults'
-      - string:
-          name: GIT_BASE
-          default: https://gerrit.opnfv.org/gerrit/$PROJECT
-          description: 'Git URL to use on this Jenkins Slave'
-      - string:
-          name: DEPLOY_SCENARIO
-          default: 'os-nosdn-nofeature-ha'
-      - '{installer}-project-parameter':
-          gs-pathname: '{gs-pathname}'
-
-    builders:
-      - description-setter:
-          description: "Built on $NODE_NAME"
-      - '{installer}-{phase}-daily-macro'
-
-#####################################
-# builder macros
-#####################################
-- builder:
-    name: 'daisy-build-daily-macro'
-    builders:
-      - shell:
-          !include-raw: ./daisy4nfv-basic.sh
-      - shell:
-          !include-raw: ./daisy4nfv-build.sh
-      - shell:
-          !include-raw: ./daisy4nfv-upload-artifact.sh
-      - 'clean-workspace'
-
-- builder:
-    name: 'daisy-deploy-daily-macro'
-    builders:
-      - shell:
-          !include-raw: ./daisy4nfv-download-artifact.sh
-      - shell:
-          !include-raw: ./daisy-deploy.sh
-
-- builder:
-    name: 'daisy-test-daily-macro'
-    builders:
-      - shell: |
-          #!/bin/bash
-
-          echo "Not activated!"
-
-#####################################
-# parameter macros
-#####################################
-- publisher:
-    name: 'daisy-recipients'
-    publishers:
-      # yamllint disable rule:line-length
-      - email:
-          recipients: hu.zhijiang@zte.com.cn lu.yao135@zte.com.cn zhou.ya@zte.com.cn yangyang1@zte.com.cn julienjut@gmail.com
-      # yamllint enable rule:line-length
-      - email-jenkins-admins-on-failure
-
-- parameter:
-    name: 'daisy-project-parameter'
-    parameters:
-      - string:
-          name: BUILD_DIRECTORY
-          default: $WORKSPACE/build_output
-          description: "Directory where the build artifact will be located upon the completion of the build."
-      - string:
-          name: CACHE_DIRECTORY
-          default: $HOME/opnfv/cache/$INSTALLER_TYPE
-          description: "Directory where the cache to be used during the build is located."
-      - string:
-          name: GS_URL
-          default: artifacts.opnfv.org/$PROJECT{gs-pathname}
-          description: "URL to Google Storage."
diff --git a/jjb/daisy4nfv/daisy-rtd-jobs.yaml b/jjb/daisy4nfv/daisy-rtd-jobs.yaml
deleted file mode 100644 (file)
index 2e7689a..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
----
-- project:
-    name: daisy-rtd
-    project: daisy
-    project-name: daisy
-
-    project-pattern: 'daisy'
-    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-daisy/47361/'
-    rtd-token: '265efe14ff0bb3fa0d4ea66d6be1b7b511d5d713'
-
-    jobs:
-      - '{project-name}-rtd-jobs'
diff --git a/jjb/daisy4nfv/daisy4nfv-basic.sh b/jjb/daisy4nfv/daisy4nfv-basic.sh
deleted file mode 100755 (executable)
index 87f5482..0000000
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/bash
-
-echo "--------------------------------------------------------"
-echo "This is diasy4nfv basic job!"
-echo "--------------------------------------------------------"
-
diff --git a/jjb/daisy4nfv/daisy4nfv-build-kolla-image.sh b/jjb/daisy4nfv/daisy4nfv-build-kolla-image.sh
deleted file mode 100755 (executable)
index 0441ea1..0000000
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/bin/bash
-##############################################################################
-# Copyright (c) 2016 ZTE Coreporation and others.
-# hu.zhijiang@zte.com.cn
-# sun.jing22@zte.com.cn
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-upload_image_to_opnfv () {
-    image=$1
-
-    sha512sum -b $image > $image.sha512sum
-    gsutil cp $image.sha512sum gs://$GS_URL/upstream/$image.sha512sum
-
-    echo "Uploading $INSTALLER_TYPE artifact. This could take some time..."
-    echo
-    gsutil cp $image gs://$GS_URL/upstream/$image
-    gsutil -m setmeta \
-        -h "Cache-Control:private, max-age=0, no-transform" \
-        gs://$GS_URL/upstream/$image
-
-    # check if we uploaded the file successfully to see if things are fine
-    gsutil ls gs://$GS_URL/upstream/$image
-    if [[ $? -ne 0 ]]; then
-        echo "Problem while uploading artifact!"
-        exit 1
-    fi
-}
-
-
-
-echo "--------------------------------------------------------"
-echo "This is diasy4nfv kolla image build job!"
-echo "--------------------------------------------------------"
-
-# start the build
-cd $WORKSPACE
-rm -rf docker_build_dir
-mkdir -p docker_build_dir
-
-# -j is for deciding which branch will be used when building,
-# only for OPNFV
-sudo -E ./ci/kolla-build.sh -j $JOB_NAME -w $WORKSPACE/docker_build_dir
-
-if [ $? -ne 0 ]; then
-    echo
-    echo "Kolla build failed!"
-    deploy_ret=1
-else
-    echo
-    echo "--------------------------------------------------------"
-    echo "Kolla build done!"
-fi
-
-image=$(ls $WORKSPACE/docker_build_dir/kolla-build-output/kolla-image-*.tgz)
-upload_image_to_opnfv $image
-
-echo
-echo "--------------------------------------------------------"
-echo "All done!"
diff --git a/jjb/daisy4nfv/daisy4nfv-build.sh b/jjb/daisy4nfv/daisy4nfv-build.sh
deleted file mode 100755 (executable)
index a081b3b..0000000
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/bin/bash
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-echo "--------------------------------------------------------"
-echo "This is diasy4nfv build job!"
-echo "--------------------------------------------------------"
-
-# set OPNFV_ARTIFACT_VERSION
-if [[ "$JOB_NAME" =~ "merge" ]]; then
-    echo "Building Daisy4nfv ISO for a merged change"
-    export OPNFV_ARTIFACT_VERSION="gerrit-$GERRIT_CHANGE_NUMBER"
-else
-    export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
-fi
-
-# build output directory
-OUTPUT_DIR=$WORKSPACE/build_output
-mkdir -p $OUTPUT_DIR
-
-# start the build
-cd $WORKSPACE
-./ci/build.sh $OUTPUT_DIR $OPNFV_ARTIFACT_VERSION
-
-# save information regarding artifact into file
-(
-    echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
-    echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
-    echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
-    echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.bin"
-    echo "OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $OUTPUT_DIR/opnfv-$OPNFV_ARTIFACT_VERSION.bin | cut -d' ' -f1)"
-    echo "OPNFV_ARTIFACT_URL_ISO=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
-    echo "OPNFV_ARTIFACT_SHA512SUM_ISO=$(sha512sum $OUTPUT_DIR/opnfv-$OPNFV_ARTIFACT_VERSION.iso | cut -d' ' -f1)"
-    echo "OPNFV_BUILD_URL=$BUILD_URL"
-) > $WORKSPACE/opnfv.properties
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/daisy4nfv/daisy4nfv-download-artifact.sh b/jjb/daisy4nfv/daisy4nfv-download-artifact.sh
deleted file mode 100755 (executable)
index ae5ca38..0000000
+++ /dev/null
@@ -1,86 +0,0 @@
-#!/bin/bash
-##############################################################################
-# Copyright (c) 2016 ZTE Coreporation and others.
-# hu.zhijiang@zte.com.cn
-# sun.jing22@zte.com.cn
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o pipefail
-
-# use proxy url to replace the nomral URL, for googleusercontent.com will be blocked randomly
-[[ "$NODE_NAME" =~ (zte) ]] && GS_URL=${GS_BASE_PROXY%%/*}/$GS_URL
-
-if [[ "$JOB_NAME" =~ "merge" ]]; then
-    echo "Downloading http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties"
-    # get the properties file for the Daisy4nfv BIN built for a merged change
-    curl -L -s -o $WORKSPACE/latest.properties http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties
-else
-    # get the latest.properties file in order to get info regarding latest artifact
-    echo "Downloading http://$GS_URL/latest.properties"
-    curl -L -s -o $WORKSPACE/latest.properties http://$GS_URL/latest.properties
-fi
-
-# check if we got the file
-[[ -f $WORKSPACE/latest.properties ]] || exit 1
-
-# source the file so we get artifact metadata
-source $WORKSPACE/latest.properties
-
-# echo the info about artifact that is used during the deployment
-OPNFV_ARTIFACT=${OPNFV_ARTIFACT_URL/*\/}
-echo "Using $OPNFV_ARTIFACT for deployment"
-
-[[ "$NODE_NAME" =~ (zte) ]] && OPNFV_ARTIFACT_URL=${GS_BASE_PROXY%%/*}/$OPNFV_ARTIFACT_URL
-
-if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
-    # check if we already have the image to avoid redownload
-    BINSTORE="/bin_mount/opnfv_ci/${BRANCH##*/}"
-    if [[ -f "$BINSTORE/$OPNFV_ARTIFACT" && ! -z $OPNFV_ARTIFACT_SHA512SUM ]]; then
-        echo "BIN exists locally. Starting to check the sha512sum."
-        if [[ $OPNFV_ARTIFACT_SHA512SUM = $(sha512sum -b $BINSTORE/$OPNFV_ARTIFACT | cut -d' ' -f1) ]]; then
-            echo "Sha512sum is verified. Skipping the download and using the file from BIN store."
-            ln -s $BINSTORE/$OPNFV_ARTIFACT $WORKSPACE/opnfv.bin
-            echo "--------------------------------------------------------"
-            echo
-            ls -al $WORKSPACE/opnfv.bin
-            echo
-            echo "--------------------------------------------------------"
-            echo "Done!"
-            exit 0
-        fi
-    fi
-fi
-
-# log info to console
-echo "Downloading the $INSTALLER_TYPE artifact using URL http://$OPNFV_ARTIFACT_URL"
-echo "This could take some time... Now the time is $(date -u)"
-echo "--------------------------------------------------------"
-echo
-
-# download the file
-if [[ "$NODE_NAME" =~ (zte) ]] && [ -x "$(command -v aria2c)" ]; then
-    DOWNLOAD_CMD="aria2c -x 3 --allow-overwrite=true -d $WORKSPACE -o opnfv.bin"
-else
-    DOWNLOAD_CMD="curl -L -s -o $WORKSPACE/opnfv.bin"
-fi
-
-maxretries=3
-cnt=0
-rc=1
-while [ $cnt -lt $maxretries ] && [ $rc -ne 0 ]
-do
-    cnt=$[cnt + 1]
-    $DOWNLOAD_CMD http://$OPNFV_ARTIFACT_URL > gsutil.bin.log 2>&1
-    rc=$?
-done
-
-# list the file
-ls -al $WORKSPACE/opnfv.bin
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/daisy4nfv/daisy4nfv-merge-jobs.yaml b/jjb/daisy4nfv/daisy4nfv-merge-jobs.yaml
deleted file mode 100644 (file)
index c261c23..0000000
+++ /dev/null
@@ -1,226 +0,0 @@
----
-- project:
-    name: 'daisy4nfv-merge-jobs'
-
-    project: 'daisy'
-
-    installer: 'daisy'
-
-    ###########################################################
-    # use alias to keep the jobs'name existed already unchanged
-    ###########################################################
-    alias: 'daisy4nfv'
-
-    #####################################
-    # branch definitions
-    #####################################
-    stream:
-      - master:
-          branch: '{stream}'
-          gs-pathname: ''
-          disabled: true
-      - fraser:
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          disabled: true
-    #####################################
-    # patch merge phases
-    #####################################
-    phase:
-      - 'build':
-          slave-label: 'opnfv-build-centos'
-      - 'deploy-virtual':
-          slave-label: 'daisy-virtual'
-
-    #####################################
-    # jobs
-    #####################################
-    jobs:
-      - '{alias}-merge-{stream}'
-      - '{alias}-merge-{phase}-{stream}'
-
-#####################################
-# job templates
-#####################################
-- job-template:
-    name: '{alias}-merge-{stream}'
-
-    project-type: multijob
-
-    disabled: false
-
-    concurrent: true
-
-    properties:
-      - logrotate-default
-      - throttle:
-          enabled: true
-          max-total: 4
-          option: 'project'
-      - build-blocker:
-          use-build-blocker: true
-          blocking-jobs:
-            - '{alias}-merge-(master|fraser)'
-          blocking-level: 'NODE'
-
-    scm:
-      - git-scm
-
-    wrappers:
-      - ssh-agent-wrapper
-      - timeout:
-          timeout: 360
-          fail: true
-
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - change-merged-event
-            - comment-added-contains-event:
-                comment-contains-value: 'remerge'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-              file-paths:
-                - compare-type: ANT
-                  pattern: 'ci/**'
-                - compare-type: ANT
-                  pattern: 'code/**'
-                - compare-type: ANT
-                  pattern: 'deploy/**'
-              disable-strict-forbidden-file-verification: 'true'
-              forbidden-file-paths:
-                - compare-type: ANT
-                  pattern: 'docs/**'
-                - compare-type: ANT
-                  pattern: '.gitignore'
-          readable-message: true
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'opnfv-build-centos-defaults'
-      - '{alias}-merge-defaults':
-          gs-pathname: '{gs-pathname}'
-
-    builders:
-      - description-setter:
-          description: "Built on $NODE_NAME"
-      - multijob:
-          name: build
-          condition: SUCCESSFUL
-          projects:
-            - name: '{alias}-merge-build-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                BRANCH=$BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: false
-              kill-phase-on: FAILURE
-              abort-all-job: true
-      - multijob:
-          name: deploy-virtual
-          condition: SUCCESSFUL
-          projects:
-            - name: '{alias}-merge-deploy-virtual-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                BRANCH=$BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: false
-              kill-phase-on: FAILURE
-              abort-all-job: true
-
-- job-template:
-    name: '{alias}-merge-{phase}-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    concurrent: true
-
-    properties:
-      - logrotate-default
-      - throttle:
-          enabled: true
-          max-total: 4
-          option: 'project'
-      - build-blocker:
-          use-build-blocker: true
-          blocking-jobs:
-            - '{alias}-merge-{phase}-.*'
-            - '{installer}-daily-.*'
-          blocking-level: 'NODE'
-
-    scm:
-      - git-scm
-
-    wrappers:
-      - ssh-agent-wrapper
-      - timeout:
-          timeout: 360
-          fail: true
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - '{installer}-defaults'
-      - '{slave-label}-defaults'
-      - '{alias}-merge-defaults':
-          gs-pathname: '{gs-pathname}'
-
-    builders:
-      - description-setter:
-          description: "Built on $NODE_NAME"
-      - '{project}-merge-{phase}-macro'
-
-#####################################
-# builder macros
-#####################################
-- builder:
-    name: 'daisy-merge-build-macro'
-    builders:
-      - shell:
-          !include-raw: ./daisy4nfv-basic.sh
-      - shell:
-          !include-raw: ./daisy4nfv-build.sh
-      - shell:
-          !include-raw: ./daisy4nfv-upload-artifact.sh
-      - 'clean-workspace'
-
-- builder:
-    name: 'daisy-merge-deploy-virtual-macro'
-    builders:
-      - shell:
-          !include-raw: ./daisy4nfv-download-artifact.sh
-      - shell:
-          !include-raw: ./daisy-deploy.sh
-      - 'clean-workspace'
-
-#####################################
-# parameter macros
-#####################################
-- parameter:
-    name: 'daisy4nfv-merge-defaults'
-    parameters:
-      - string:
-          name: BUILD_DIRECTORY
-          default: $WORKSPACE/build_output
-          description: "Directory where the build artifact will be located upon the completion of the build."
-      - string:
-          name: CACHE_DIRECTORY
-          default: $HOME/opnfv/cache/$INSTALLER_TYPE
-          description: "Directory where the cache to be used during the build is located."
-      - string:
-          name: GS_URL
-          default: artifacts.opnfv.org/$PROJECT{gs-pathname}
-          description: "URL to Google Storage."
diff --git a/jjb/daisy4nfv/daisy4nfv-smoke-test.sh b/jjb/daisy4nfv/daisy4nfv-smoke-test.sh
deleted file mode 100755 (executable)
index bd6eb7e..0000000
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/bash
-
-echo "--------------------------------------------------------"
-echo "This is diasy4nfv smoke test job!"
-echo "--------------------------------------------------------"
-
diff --git a/jjb/daisy4nfv/daisy4nfv-upload-artifact.sh b/jjb/daisy4nfv/daisy4nfv-upload-artifact.sh
deleted file mode 100755 (executable)
index def4f6a..0000000
+++ /dev/null
@@ -1,96 +0,0 @@
-#!/bin/bash
-##############################################################################
-# Copyright (c) 2016 ZTE Coreporation and others.
-# hu.zhijiang@zte.com.cn
-# sun.jing22@zte.com.cn
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o pipefail
-
-# check if we built something
-if [ -f $WORKSPACE/.noupload ]; then
-    echo "Nothing new to upload. Exiting."
-    /bin/rm -f $WORKSPACE/.noupload
-    exit 0
-fi
-
-# source the opnfv.properties to get ARTIFACT_VERSION
-source $WORKSPACE/opnfv.properties
-
-importkey () {
-# clone releng repository
-echo "Cloning releng repository..."
-[ -d releng ] && rm -rf releng
-git clone https://gerrit.opnfv.org/gerrit/releng $WORKSPACE/releng/ &> /dev/null
-#this is where we import the siging key
-if [ -f $WORKSPACE/releng/utils/gpg_import_key.sh ]; then
-  source $WORKSPACE/releng/utils/gpg_import_key.sh
-fi
-}
-
-signbin () {
-gpg2 -vvv --batch --yes --no-tty \
-  --default-key opnfv-helpdesk@rt.linuxfoundation.org  \
-  --passphrase besteffort \
-  --detach-sig $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.bin
-
-gsutil cp $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.bin.sig gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.bin.sig
-echo "BIN signature Upload Complete!"
-}
-
-uploadbin () {
-# log info to console
-echo "Uploading $INSTALLER_TYPE artifact. This could take some time..."
-echo
-
-cd $WORKSPACE
-# upload artifact and additional files to google storage
-gsutil cp $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.bin \
-    gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.bin > gsutil.bin.log 2>&1
-gsutil cp $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso \
-    gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso >> gsutil.bin.log 2>&1
-gsutil cp $WORKSPACE/opnfv.properties \
-    gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log 2>&1
-if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
-    gsutil cp $WORKSPACE/opnfv.properties \
-    gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1
-elif [[ "$JOB_NAME" =~ "merge" ]]; then
-    echo "Uploaded Daisy4nfv artifacts for a merged change"
-fi
-
-gsutil -m setmeta \
-    -h "Content-Type:text/html" \
-    -h "Cache-Control:private, max-age=0, no-transform" \
-    gs://$GS_URL/latest.properties \
-    gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > /dev/null 2>&1
-
-gsutil -m setmeta \
-    -h "Cache-Control:private, max-age=0, no-transform" \
-    gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.bin > /dev/null 2>&1
-
-# disabled errexit due to gsutil setmeta complaints
-#   BadRequestException: 400 Invalid argument
-# check if we uploaded the file successfully to see if things are fine
-gsutil ls gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.bin > /dev/null 2>&1
-if [[ $? -ne 0 ]]; then
-    echo "Problem while uploading artifact!"
-    echo "Check log $WORKSPACE/gsutil.bin.log on the machine where this build is done."
-    exit 1
-fi
-
-echo "Done!"
-echo
-echo "--------------------------------------------------------"
-echo
-echo "Artifact is available as http://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.bin"
-echo
-echo "--------------------------------------------------------"
-echo
-}
-
-importkey
-signbin
-uploadbin
diff --git a/jjb/daisy4nfv/daisy4nfv-verify-jobs.yaml b/jjb/daisy4nfv/daisy4nfv-verify-jobs.yaml
deleted file mode 100644 (file)
index c8c1db0..0000000
+++ /dev/null
@@ -1,225 +0,0 @@
----
-- project:
-    name: 'daisy4nfv-verify-jobs'
-    project: 'daisy'
-    installer: 'daisy'
-    ##########################################################
-    # use alias to keep the jobs'name existed alread unchanged
-    ##########################################################
-    alias: 'daisy4nfv'
-
-    #####################################
-    # branch definitions
-    #####################################
-    stream:
-      - master:
-          branch: '{stream}'
-          gs-pathname: ''
-          disabled: true
-      - fraser:
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          disabled: true
-
-    #####################################
-    # patch verification phases
-    #####################################
-    phase:
-      - unit:
-          slave-label: 'opnfv-build'
-      - build:
-          slave-label: 'opnfv-build-centos'
-
-    #####################################
-    # jobs
-    #####################################
-    jobs:
-      - '{alias}-verify-{stream}'
-      - '{alias}-verify-{phase}-{stream}'
-
-#####################################
-# job templates
-#####################################
-- job-template:
-    name: '{alias}-verify-{stream}'
-    project-type: multijob
-    disabled: false
-    concurrent: true
-    properties:
-      - logrotate-default
-      - throttle:
-          enabled: true
-          max-total: 4
-          option: 'project'
-      - build-blocker:
-          use-build-blocker: true
-          blocking-jobs:
-            - '{alias}-merge-build-.*'
-          blocking-level: 'NODE'
-
-    scm:
-      - git-scm-gerrit
-
-    wrappers:
-      - ssh-agent-wrapper
-      - timeout:
-          timeout: 360
-          fail: true
-
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - patchset-created-event:
-                exclude-drafts: 'false'
-                exclude-trivial-rebase: 'false'
-                exclude-no-code-change: 'false'
-            - draft-published-event
-            - comment-added-contains-event:
-                comment-contains-value: 'recheck'
-            - comment-added-contains-event:
-                comment-contains-value: 'reverify'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-              file-paths:
-                - compare-type: ANT
-                  pattern: 'ci/**'
-                - compare-type: ANT
-                  pattern: 'code/**'
-                - compare-type: ANT
-                  pattern: 'deploy/**'
-                - compare-type: ANT
-                  pattern: 'tests/**'
-              disable-strict-forbidden-file-verification: 'true'
-              forbidden-file-paths:
-                - compare-type: ANT
-                  pattern: 'docs/**'
-                - compare-type: ANT
-                  pattern: '.gitignore'
-          readable-message: true
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'opnfv-build-centos-defaults'
-      - '{alias}-verify-defaults':
-          gs-pathname: '{gs-pathname}'
-
-    builders:
-      - description-setter:
-          description: "Built on $NODE_NAME"
-      - multijob:
-          name: unit
-          condition: SUCCESSFUL
-          projects:
-            - name: '{alias}-verify-unit-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                BRANCH=$BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: false
-              kill-phase-on: FAILURE
-              abort-all-job: true
-      - multijob:
-          name: build
-          condition: SUCCESSFUL
-          projects:
-            - name: '{alias}-verify-build-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                BRANCH=$BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: false
-              kill-phase-on: FAILURE
-              abort-all-job: true
-
-- job-template:
-    name: '{alias}-verify-{phase}-{stream}'
-    disabled: '{obj:disabled}'
-    concurrent: true
-    properties:
-      - logrotate-default
-      - throttle:
-          enabled: true
-          max-total: 6
-          option: 'project'
-      - build-blocker:
-          use-build-blocker: true
-          blocking-jobs:
-            - '{alias}-merge-build-.*'
-            - '{alias}-verify-build-.*'
-            - '{installer}-daily-.*'
-          blocking-level: 'NODE'
-
-    scm:
-      - git-scm-gerrit
-
-    wrappers:
-      - ssh-agent-wrapper
-      - timeout:
-          timeout: 360
-          fail: true
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - '{installer}-defaults'
-      - '{slave-label}-defaults'
-      - '{alias}-verify-defaults':
-          gs-pathname: '{gs-pathname}'
-
-    builders:
-      - description-setter:
-          description: "Built on $NODE_NAME"
-      - '{project}-verify-{phase}-macro'
-
-#####################################
-# builder macros
-#####################################
-- builder:
-    name: 'daisy-verify-build-macro'
-    builders:
-      - shell:
-          !include-raw: ./daisy4nfv-basic.sh
-      - shell:
-          !include-raw: ./daisy4nfv-build.sh
-      - 'clean-workspace'
-
-- builder:
-    name: daisy-verify-unit-macro
-    builders:
-      - shell: |
-          #!/bin/bash
-          set -o errexit
-          set -o pipefail
-          set -o xtrace
-          tox -e py27
-
-#####################################
-# parameter macros
-#####################################
-- parameter:
-    name: 'daisy4nfv-verify-defaults'
-    parameters:
-      - string:
-          name: BUILD_DIRECTORY
-          default: $WORKSPACE/build_output
-          description: "Directory where the build artifact will be located upon the completion of the build."
-      - string:
-          name: CACHE_DIRECTORY
-          default: $HOME/opnfv/cache/$INSTALLER_TYPE
-          description: "Directory where the cache to be used during the build is located."
-      - string:
-          name: GS_URL
-          default: artifacts.opnfv.org/$PROJECT{gs-pathname}
-          description: "URL to Google Storage."
index 5a63d09..c1bab19 100644 (file)
@@ -50,6 +50,7 @@
       - itri-pod1:
           slave-label: itri-pod1
           <<: *master
+          disabled: true
     # -------------------------------
     #       scenarios
     # -------------------------------
diff --git a/jjb/functest/functest-alpine.sh b/jjb/functest/functest-alpine.sh
deleted file mode 100755 (executable)
index 14143d2..0000000
+++ /dev/null
@@ -1,270 +0,0 @@
-#!/bin/bash
-
-set -e
-set +u
-set +o pipefail
-
-REPO=${REPO:-opnfv}
-CI_LOOP=${CI_LOOP:-daily}
-TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results
-ENERGY_RECORDER_API_URL=http://energy.opnfv.fr/resources
-DOCKER_TAG=${DOCKER_TAG:-$([[ ${BRANCH##*/} == "master" ]] && echo "latest" || echo ${BRANCH##*/})}
-
-run_tiers() {
-    tiers=$1
-    cmd_opt="run_tests -r -t all"
-    [[ $BUILD_TAG =~ "suite" ]] && cmd_opt="run_tests -t all"
-    for tier in ${tiers[@]}; do
-        FUNCTEST_IMAGE=${REPO}/functest-${tier}:${DOCKER_TAG}
-        echo "Functest: Pulling Functest Docker image ${FUNCTEST_IMAGE} ..."
-        docker pull ${FUNCTEST_IMAGE}>/dev/null
-        cmd="docker run --rm  ${envs} ${volumes} ${TESTCASE_OPTIONS} ${FUNCTEST_IMAGE} /bin/bash -c '${cmd_opt}'"
-        echo "Running Functest tier '${tier}'. CMD: ${cmd}"
-        eval ${cmd}
-        ret_value=$?
-        if [ ${ret_value} != 0 ]; then
-            echo ${ret_value} > ${ret_val_file}
-            if [ ${tier} == 'healthcheck' ]; then
-                echo "Healthcheck tier failed. Exiting Functest..."
-                skip_tests=1
-                break
-            fi
-        fi
-    done
-}
-
-run_test() {
-    test_name=$1
-    cmd_opt="run_tests -t ${test_name}"
-    # Determine which Functest image should be used for the test case
-    case ${test_name} in
-        connection_check|tenantnetwork1|tenantnetwork2|vmready1|vmready2|singlevm1|singlevm2|vping_ssh|vping_userdata|cinder_test|odl|api_check|snaps_health_check|tempest_smoke)
-            FUNCTEST_IMAGE=${REPO}/functest-healthcheck:${DOCKER_TAG} ;;
-        neutron-tempest-plugin-api|rally_sanity|refstack_defcore|tempest_full|tempest_scenario|patrole|snaps_smoke|neutron_trunk|networking-bgpvpn|networking-sfc|barbican)
-            FUNCTEST_IMAGE=${REPO}/functest-smoke:${DOCKER_TAG} ;;
-        rally_full|rally_jobs|shaker|vmtp)
-            FUNCTEST_IMAGE=${REPO}/functest-benchmarking:${DOCKER_TAG} ;;
-        cloudify|cloudify_ims|heat_ims|vyos_vrouter|juju_epc)
-            FUNCTEST_IMAGE=${REPO}/functest-vnf:${DOCKER_TAG} ;;
-        *)
-            echo "Unkown test case $test_name"
-            exit 1
-            ;;
-    esac
-    echo "Functest: Pulling Functest Docker image ${FUNCTEST_IMAGE} ..."
-    docker pull ${FUNCTEST_IMAGE}>/dev/null
-    cmd="docker run --rm ${envs} ${volumes} ${TESTCASE_OPTIONS} ${FUNCTEST_IMAGE} /bin/bash -c '${cmd_opt}'"
-    echo "Running Functest test case '${test_name}'. CMD: ${cmd}"
-    eval ${cmd}
-    ret_value=$?
-    if [ ${ret_value} != 0 ]; then
-      echo ${ret_value} > ${ret_val_file}
-    fi
-}
-
-
-redirect="/dev/stdout"
-FUNCTEST_DIR=/home/opnfv/functest
-DEPLOY_TYPE=baremetal
-[[ $BUILD_TAG =~ "virtual" ]] && DEPLOY_TYPE=virt
-HOST_ARCH=$(uname -m)
-
-# Prepare OpenStack credentials volume
-rc_file=${HOME}/opnfv-openrc.sh
-
-if [[ ${INSTALLER_TYPE} == 'joid' ]]; then
-    rc_file=$LAB_CONFIG/admin-openrc
-elif [[ ${INSTALLER_TYPE} == 'fuel' ]] && [[ "${DEPLOY_SCENARIO}" =~ -ha$ ]]; then
-    cacert_file_vol="-v ${HOME}/os_cacert:/etc/ssl/certs/mcp_os_cacert"
-fi
-
-rc_file_vol="-v ${rc_file}:${FUNCTEST_DIR}/conf/env_file"
-
-echo "Functest: Start Docker and prepare environment"
-
-echo "Functest: Download images that will be used by test cases"
-images_dir="${HOME}/opnfv/functest/images"
-download_script=${WORKSPACE}/functest/ci/download_images.sh
-chmod +x ${download_script}
-${download_script} ${images_dir} ${DEPLOY_SCENARIO} ${HOST_ARCH} 2> ${redirect}
-
-images_vol="-v ${images_dir}:${FUNCTEST_DIR}/images"
-
-dir_result="${HOME}/opnfv/functest/results/${BRANCH##*/}"
-mkdir -p ${dir_result}
-sudo rm -rf ${dir_result}/*
-results_vol="-v ${dir_result}:${FUNCTEST_DIR}/results"
-custom_params=
-test -f ${HOME}/opnfv/functest/custom/params_${DOCKER_TAG} && custom_params=$(cat ${HOME}/opnfv/functest/custom/params_${DOCKER_TAG})
-
-envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
-    -e NODE_NAME=${NODE_NAME} -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} \
-    -e BUILD_TAG=${BUILD_TAG} -e DEPLOY_TYPE=${DEPLOY_TYPE} -e CI_LOOP=${CI_LOOP} \
-    -e TEST_DB_URL=${TEST_DB_URL} -e ENERGY_RECORDER_API_URL=${ENERGY_RECORDER_API_URL} \
-    -e DEBUG=true"
-
-ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
-
-if [ "${INSTALLER_TYPE}" == 'fuel' ]; then
-    COMPUTE_ARCH=$(ssh -l ubuntu ${INSTALLER_IP} -i ${SSH_KEY} ${ssh_options} \
-        "sudo salt 'cmp*' grains.get cpuarch --out yaml | awk '{print \$2; exit}'")
-    IMAGE_PROPERTIES="hw_disk_bus:scsi,hw_scsi_model:virtio-scsi"
-    envs="${envs} -e POD_ARCH=${COMPUTE_ARCH} -e BLOCK_MIGRATION=false"
-fi
-
-if [[ ${INSTALLER_TYPE} == 'fuel' && ${DEPLOY_SCENARIO} == 'os-nosdn-nofeature-noha' ]]; then
-    libvirt_vol="-v ${ssh_key}:${FUNCTEST_DIR}/conf/libvirt_key"
-    envs="${envs} -e LIBVIRT_USER=ubuntu -e LIBVIRT_KEY_PATH=${FUNCTEST_DIR}/conf/libvirt_key"
-fi
-
-if [[ ${DEPLOY_SCENARIO} == *"ovs"* ]] || [[ ${DEPLOY_SCENARIO} == *"fdio"* ]]; then
-    if [[ -n ${IMAGE_PROPERTIES} ]]; then
-        IMAGE_PROPERTIES="${IMAGE_PROPERTIES},hw_mem_page_size:large"
-    else
-        IMAGE_PROPERTIES="hw_mem_page_size:large"
-    fi
-    FLAVOR_EXTRA_SPECS="hw:mem_page_size:large"
-fi
-
-if [[ -n ${IMAGE_PROPERTIES} ]] || [[ -n ${FLAVOR_EXTRA_SPECS} ]]; then
-    envs="${envs} -e IMAGE_PROPERTIES=${IMAGE_PROPERTIES} -e FLAVOR_EXTRA_SPECS=${FLAVOR_EXTRA_SPECS}"
-fi
-
-tempest_conf_yaml=$(mktemp)
-case ${INSTALLER_TYPE} in
-apex)
-    cat << EOF > "${tempest_conf_yaml}"
----
-compute-feature-enabled:
-    shelve: false
-    vnc_console: true
-    block_migration_for_live_migration: false
-identity-feature-enabled:
-    api_v2: false
-    api_v2_admin: false
-image-feature-enabled:
-    api_v2: true
-    api_v1: false
-object-storage:
-    operator_role: SwiftOperator
-volume:
-    storage_protocol: ceph
-volume-feature-enabled:
-    backup: false
-EOF
-    ;;
-fuel)
-    cat << EOF > "${tempest_conf_yaml}"
----
-compute-feature-enabled:
-    shelve: false
-    vnc_console: false
-    spice_console: true
-identity-feature-enabled:
-    api_v2: false
-    api_v2_admin: false
-image-feature-enabled:
-    api_v2: true
-    api_v1: false
-volume:
-    storage_protocol: iSCSI
-volume-feature-enabled:
-    backup: false
-EOF
-    ;;
-*)
-    cat << EOF > "${tempest_conf_yaml}"
----
-compute-feature-enabled:
-    shelve: false
-    vnc_console: false
-identity-feature-enabled:
-    api_v2: false
-    api_v2_admin: false
-image-feature-enabled:
-    api_v2: true
-    api_v1: false
-volume:
-    storage_protocol: iSCSI
-volume-feature-enabled:
-    backup: false
-EOF
-    ;;
-esac
-case ${BRANCH} in
-master)
-    cat << EOF >> "${tempest_conf_yaml}"
-compute:
-    max_microversion: latest
-EOF
-    ;;
-stable/hunter)
-    cat << EOF >> "${tempest_conf_yaml}"
-compute:
-    max_microversion: 2.65
-EOF
-    ;;
-esac
-echo "tempest_conf.yaml:" && cat "${tempest_conf_yaml}"
-
-volumes="${images_vol} ${results_vol} ${sshkey_vol} ${libvirt_vol} \
-    ${userconfig_vol} ${rc_file_vol} ${cacert_file_vol} \
-    -v ${tempest_conf_yaml}:/usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml"
-
-if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
-    blacklist_yaml=$(mktemp)
-    cat << EOF >> "${blacklist_yaml}"
----
--
-    scenarios:
-        - os-ovn-nofeature-ha
-    tests:
-        - neutron_tempest_plugin.api.admin.test_agent_management
-        - neutron_tempest_plugin.api.admin.test_dhcp_agent_scheduler
-        - patrole_tempest_plugin.tests.api.network.test_agents_rbac
-        - patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_create_network_provider_network_type
-        - patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_create_network_provider_segmentation_id
-        - tempest.api.network.admin.test_agent_management
-        - tempest.api.network.admin.test_dhcp_agent_scheduler
-        - tempest.api.object_storage.test_crossdomain.CrossdomainTest.test_get_crossdomain_policy
--
-    scenarios:
-        - os-nosdn-nofeature-ha
-    tests:
-        - tempest.api.object_storage.test_crossdomain.CrossdomainTest.test_get_crossdomain_policy
--
-    scenarios:
-        - os-nosdn-nofeature-noha
-    tests:
-        - tempest.api.object_storage.test_crossdomain.CrossdomainTest.test_get_crossdomain_policy
-EOF
-    volumes="${volumes} -v ${blacklist_yaml}:/usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/blacklist.yaml"
-fi
-
-ret_val_file="${HOME}/opnfv/functest/results/${BRANCH##*/}/return_value"
-echo 0 > ${ret_val_file}
-
-set +e
-
-if [ ${FUNCTEST_MODE} == 'testcase' ]; then
-    echo "FUNCTEST_MODE=testcase, FUNCTEST_SUITE_NAME=${FUNCTEST_SUITE_NAME}"
-    run_test ${FUNCTEST_SUITE_NAME}
-elif [ ${FUNCTEST_MODE} == 'tier' ]; then
-    echo "FUNCTEST_MODE=tier, FUNCTEST_TIER=${FUNCTEST_TIER}"
-    tiers=(${FUNCTEST_TIER})
-    run_tiers ${tiers}
-else
-    tests=()
-    skip_tests=0
-    if [ "${HOST_ARCH}" != "aarch64" ]; then
-        tiers=(healthcheck smoke benchmarking vnf)
-    else
-        tiers=(healthcheck smoke benchmarking)
-    fi
-    run_tiers ${tiers}
-    if [ ${skip_tests} -eq 0 ]; then
-        for test in "${tests[@]}"; do
-            run_test "$test"
-        done
-    fi
-fi
diff --git a/jjb/functest/functest-cleanup.sh b/jjb/functest/functest-cleanup.sh
deleted file mode 100755 (executable)
index c21b543..0000000
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/bin/bash
-
-[[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-
-echo "Cleaning up docker containers/images..."
-FUNCTEST_IMAGE=opnfv/functest
-
-# Remove containers along with image opnfv/functest*:<none>
-dangling_images=($(docker images -f "dangling=true" | grep $FUNCTEST_IMAGE | awk '{print $3}'))
-if [[ -n ${dangling_images} ]]; then
-    echo "  Removing $FUNCTEST_IMAGE:<none> images and their containers..."
-    for image_id in "${dangling_images[@]}"; do
-        echo "      Removing image_id: $image_id and its containers"
-        containers=$(docker ps -a | grep $image_id | awk '{print $1}')
-        if [[ -n "$containers" ]];then
-            docker rm -f $containers >${redirect}
-        fi
-        docker rmi $image_id >${redirect}
-    done
-fi
-
-# Remove previous running containers if exist
-functest_containers=$(docker ps -a | grep $FUNCTEST_IMAGE | awk '{print $1}')
-if [[ -n ${functest_containers} ]]; then
-    echo "  Removing existing $FUNCTEST_IMAGE containers..."
-    docker rm -f $functest_containers >${redirect}
-fi
-
-# Remove existing images if exist
-if [[ $CLEAN_DOCKER_IMAGES == true ]]; then
-    functest_image_tags=($(docker images | grep $FUNCTEST_IMAGE | awk '{print $2}'))
-    if [[ -n ${functest_image_tags} ]]; then
-        echo "  Docker images to be removed:" >${redirect}
-        (docker images | head -1 && docker images | grep $FUNCTEST_IMAGE) >${redirect}
-        for tag in "${functest_image_tags[@]}"; do
-            echo "      Removing docker image $FUNCTEST_IMAGE:$tag..."
-            docker rmi $FUNCTEST_IMAGE:$tag >${redirect}
-        done
-    fi
-fi
diff --git a/jjb/functest/functest-daily-jobs.yaml b/jjb/functest/functest-daily-jobs.yaml
deleted file mode 100644 (file)
index 3cdff3d..0000000
+++ /dev/null
@@ -1,304 +0,0 @@
----
-###################################
-# job configuration for functest
-###################################
-- project:
-    name: functest-daily
-
-    project: functest
-
-    # -------------------------------
-    # BRANCH ANCHORS
-    # -------------------------------
-    master: &master
-      stream: master
-      branch: '{stream}'
-      gs-pathname: ''
-    iruya: &iruya
-      stream: iruya
-      branch: 'stable/{stream}'
-      gs-pathname: '/{stream}'
-    hunter: &hunter
-      stream: hunter
-      branch: 'stable/{stream}'
-      gs-pathname: '/{stream}'
-    # -------------------------------
-    # POD, INSTALLER, AND BRANCH MAPPING
-    # -------------------------------
-    #    Installers using labels
-    #            CI PODs
-    # This section should only contain the installers
-    # that have been switched using labels for slaves
-    # -------------------------------
-    pod:
-      # fuel CI PODs
-      - baremetal:
-          slave-label: fuel-baremetal
-          installer: fuel
-          <<: *master
-      - virtual:
-          slave-label: fuel-virtual
-          installer: fuel
-          <<: *master
-      - baremetal:
-          slave-label: fuel-baremetal
-          installer: fuel
-          <<: *iruya
-      - virtual:
-          slave-label: fuel-virtual
-          installer: fuel
-          <<: *iruya
-      # apex CI PODs
-      - virtual:
-          slave-label: apex-virtual-master
-          installer: apex
-          <<: *master
-      - baremetal:
-          slave-label: apex-baremetal-master
-          installer: apex
-          <<: *master
-      - virtual:
-          slave-label: apex-virtual-master
-          installer: apex
-          <<: *iruya
-      - baremetal:
-          slave-label: apex-baremetal-master
-          installer: apex
-          <<: *iruya
-      # armband CI PODs
-      - armband-baremetal:
-          slave-label: armband-baremetal
-          installer: fuel
-          <<: *master
-      - armband-baremetal:
-          slave-label: armband-baremetal
-          installer: fuel
-          <<: *iruya
-      # fuel NONE CI PODs
-      - zte-pod1:
-          slave-label: fuel-baremetal
-          installer: fuel
-          <<: *master
-      - itri-pod1:
-          slave-label: fuel-baremetal
-          installer: fuel
-          <<: *master
-
-    testsuite:
-      - 'suite':
-          job-timeout: 60
-      - 'daily':
-          job-timeout: 600
-
-    jobs:
-      - 'functest-{installer}-{pod}-{testsuite}-{stream}'
-
-################################
-# job template
-################################
-- job-template:
-    name: 'functest-{installer}-{pod}-{testsuite}-{stream}'
-
-    concurrent: true
-
-    properties:
-      - logrotate-default
-      - throttle:
-          enabled: true
-          max-per-node: 1
-          max-total: 10
-          option: 'project'
-
-    wrappers:
-      - build-name:
-          name: '$BUILD_NUMBER Suite: $FUNCTEST_MODE Scenario: $DEPLOY_SCENARIO'
-      - timeout:
-          timeout: '{job-timeout}'
-          abort: true
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - '{installer}-defaults'
-      - '{slave-label}-defaults'
-      - 'functest-{testsuite}-parameter'
-      - string:
-          name: DEPLOY_SCENARIO
-          default: 'os-nosdn-nofeature-noha'
-      - string:
-          name: DOCKER_TAG
-          default: ''
-      - string:
-          name: CLEAN_DOCKER_IMAGES
-          default: 'false'
-          description: 'Remove downloaded docker images (opnfv/functest*:*)'
-      - functest-parameter:
-          gs-pathname: '{gs-pathname}'
-
-    scm:
-      - git-scm
-
-    builders:
-      - description-setter:
-          description: "Built on $NODE_NAME"
-      - 'functest-{testsuite}-builder'
-
-
-########################
-# parameter macros
-########################
-- parameter:
-    name: functest-daily-parameter
-    parameters:
-      - string:
-          name: FUNCTEST_MODE
-          default: 'daily'
-          description: "Daily suite name to run"
-
-- parameter:
-    name: functest-suite-parameter
-    parameters:
-      - choice:
-          name: FUNCTEST_MODE
-          choices:
-            - 'tier'
-            - 'testcase'
-          default: 'tier'
-          description: "Test case or Tier to be run"
-      - choice:
-          name: FUNCTEST_SUITE_NAME
-          choices:
-            - 'connection_check'
-            - 'api_check'
-            - 'snaps_health_check'
-            - 'vping_ssh'
-            - 'vping_userdata'
-            - 'cinder_test'
-            - 'tempest_smoke'
-            - 'rally_sanity'
-            - 'refstack_defcore'
-            - 'patrole'
-            - 'odl'
-            - 'snaps_smoke'
-            - 'shaker'
-            - 'vmtp'
-            - 'neutron_trunk'
-            - 'tempest_full'
-            - 'rally_full'
-            - 'cloudify_ims'
-            - 'vyos_vrouter'
-            - 'juju_epc'
-            - 'parser'
-          default: 'connection_check'
-      - choice:
-          name: FUNCTEST_TIER
-          choices:
-            - 'healthcheck'
-            - 'smoke'
-            - 'benchmarking'
-            - 'components'
-            - 'vnf'
-            - 'parser'
-          default: 'healthcheck'
-      - string:
-          name: TESTCASE_OPTIONS
-          default: ''
-          description: 'Additional parameters specific to test case(s)'
-
-- parameter:
-    name: functest-parameter
-    parameters:
-      - string:
-          name: GS_PATHNAME
-          default: '{gs-pathname}'
-          description: "Version directory where the opnfv documents will be stored in gs repository"
-      - string:
-          name: FUNCTEST_REPO_DIR
-          default: "/home/opnfv/repos/functest"
-          description: "Directory where the Functest repository is cloned"
-      - string:
-          name: PUSH_RESULTS_TO_DB
-          default: "true"
-          description: "Push the results of all the tests to the resultDB"
-      - string:
-          name: CI_DEBUG
-          default: 'false'
-          description: "Show debug output information"
-      - string:
-          name: RC_FILE_PATH
-          default: ''
-          description: "Path to the OS credentials file if given"
-      - string:
-          name: REPO
-          default: "opnfv"
-          description: "Repository name for functest images"
-########################
-# trigger macros
-########################
-- trigger:
-    name: 'functest-master'
-    triggers:
-      - pollscm:
-          cron: "H 9 * * *"
-########################
-# builder macros
-########################
-- builder:
-    name: functest-daily-builder
-    builders:
-      - 'functest-cleanup'
-      - 'functest-daily'
-      - 'functest-store-results'
-      - 'functest-exit'
-
-- builder:
-    name: functest-suite-builder
-    builders:
-      - 'functest-cleanup'
-      - 'functest-daily'
-      - 'functest-store-results'
-      - 'functest-exit'
-
-- builder:
-    name: functest-daily
-    builders:
-      # yamllint disable rule:indentation
-      - conditional-step:
-          condition-kind: regex-match
-          regex: "os-.*"
-          label: '$DEPLOY_SCENARIO'
-          steps:
-            - shell:
-                !include-raw:
-                    - ./functest-env-presetup.sh
-                    - ../../utils/fetch_os_creds.sh
-                    - ./functest-alpine.sh
-      - conditional-step:
-          condition-kind: regex-match
-          regex: "k8-.*"
-          label: '$DEPLOY_SCENARIO'
-          steps:
-            - shell:
-                !include-raw:
-                    - ../../utils/fetch_k8_conf.sh
-                    - ./functest-k8.sh
-
-# yamllint enable rule:indentation
-- builder:
-    name: functest-store-results
-    builders:
-      - shell:
-          !include-raw: ../../utils/push-test-logs.sh
-
-- builder:
-    name: functest-cleanup
-    builders:
-      - shell:
-          !include-raw: ./functest-cleanup.sh
-
-- builder:
-    name: functest-exit
-    builders:
-      - shell:
-          !include-raw: ./functest-exit.sh
index 69c994d..d2549ef 100644 (file)
@@ -12,6 +12,9 @@
       - master:
           branch: '{stream}'
           disabled: false
+      - kali:
+          branch: 'stable/{stream}'
+          disabled: false
       - jerma:
           branch: 'stable/{stream}'
           disabled: false
     exclude:
       - stream: 'master'
         image: 'tempest'
+      - stream: 'kali'
+        image: 'tempest'
       - stream: 'jerma'
         image: 'tempest'
       - stream: 'master'
         image: 'features'
+      - stream: 'kali'
+        image: 'features'
       - stream: 'jerma'
         image: 'features'
       - stream: 'iruya'
diff --git a/jjb/functest/functest-env-presetup.sh b/jjb/functest/functest-env-presetup.sh
deleted file mode 100755 (executable)
index 510670b..0000000
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env bash
-set -o errexit
-set -o pipefail
-
-# Fetch INSTALLER_IP for APEX deployments
-if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
-    if [ -n "$RC_FILE_PATH" ]; then
-        echo "RC_FILE_PATH is set: ${RC_FILE_PATH}...skipping detecting UC IP"
-    else
-        echo "Gathering IP information for Apex installer VM"
-        ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
-        if sudo virsh list | grep undercloud; then
-            echo "Installer VM detected"
-            undercloud_mac=$(sudo virsh domiflist undercloud | grep default | \
-                      grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
-            export INSTALLER_IP=$(/usr/sbin/arp -e | grep ${undercloud_mac} | awk {'print $1'})
-            export sshkey_vol="-v /root/.ssh/id_rsa:/root/.ssh/id_rsa"
-            sudo scp $ssh_options root@${INSTALLER_IP}:/home/stack/stackrc ${HOME}/stackrc
-            export stackrc_vol="-v ${HOME}/stackrc:/home/opnfv/functest/conf/stackrc"
-
-            if sudo iptables -C FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable 2> ${redirect}; then
-                sudo iptables -D FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable
-            fi
-            if sudo iptables -C FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable 2> ${redirect}; then
-                sudo iptables -D FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable
-            fi
-            echo "Installer ip is ${INSTALLER_IP}"
-        else
-            echo "No available installer VM exists and no credentials provided...exiting"
-            exit 1
-        fi
-    fi
-
-elif [[ ${INSTALLER_TYPE} == 'daisy' ]]; then
-    echo "Gathering IP information for Daisy installer VM"
-    if sudo virsh list | grep daisy; then
-        echo "Installer VM detected"
-
-        bridge_name=$(sudo virsh domiflist daisy | grep vnet | awk '{print $3}')
-        echo "Bridge is $bridge_name"
-
-        installer_mac=$(sudo virsh domiflist daisy | grep vnet | \
-                      grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
-        export INSTALLER_IP=$(/usr/sbin/arp -e -i $bridge_name | grep ${installer_mac} | head -n 1 | awk {'print $1'})
-
-        echo "Installer ip is ${INSTALLER_IP}"
-    else
-        echo "No available installer VM exists...exiting"
-        exit 1
-    fi
-
-elif [[ ${INSTALLER_TYPE} == 'fuel' ]]; then
-    if [[ ! "${BRANCH}" =~ "danube" ]]; then
-        echo "Map mcp ssh_key"
-        export sshkey_vol="-v ${SSH_KEY:-/var/lib/opnfv/mcp.rsa}:/root/.ssh/id_rsa"
-    fi
-fi
-
diff --git a/jjb/functest/functest-exit.sh b/jjb/functest/functest-exit.sh
deleted file mode 100644 (file)
index 925a3cf..0000000
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/bash
-
-ret_val_file="${HOME}/opnfv/functest/results/${BRANCH##*/}/return_value"
-if [ ! -f ${ret_val_file} ]; then
-    echo "Return value not found!"
-    exit -1
-fi
-
-ret_val=`cat ${ret_val_file}`
-
-exit ${ret_val}
diff --git a/jjb/functest/functest-k8.sh b/jjb/functest/functest-k8.sh
deleted file mode 100755 (executable)
index fb0e955..0000000
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/bin/bash
-
-set -e
-set +u
-set +o pipefail
-
-redirect="/dev/stdout"
-FUNCTEST_DIR=/home/opnfv/functest
-
-admin_conf_file_vol="-v ${HOME}/admin.conf:/root/.kube/config"
-cat ${HOME}/admin.conf
-
-dir_result="${HOME}/opnfv/functest/results/${BRANCH##*/}"
-mkdir -p ${dir_result}
-sudo rm -rf ${dir_result}/*
-results_vol="-v ${dir_result}:${FUNCTEST_DIR}/results"
-
-volumes="${results_vol} ${admin_conf_file_vol}"
-
-envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} \
-    -e NODE_NAME=${NODE_NAME} -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} \
-    -e BUILD_TAG=${BUILD_TAG}"
-
-DOCKER_TAG=${DOCKER_TAG:-$([[ ${BRANCH##*/} == "master" ]] && echo "latest" || echo ${BRANCH##*/})}
-
-set +e
-
-ret_val_file="${HOME}/opnfv/functest/results/${BRANCH##*/}/return_value"
-echo 0 > ${ret_val_file}
-
-FUNCTEST_IMAGES="\
-opnfv/functest-kubernetes-healthcheck:${DOCKER_TAG} \
-opnfv/functest-kubernetes-smoke:${DOCKER_TAG}"
-cmd_opt="run_tests -r -t all"
-
-for image in ${FUNCTEST_IMAGES}; do
-    echo "Pulling Docker image ${image} ..."
-    docker pull "${image}" >/dev/null
-    cmd="docker run --rm ${envs} ${volumes} ${image} /bin/bash -c '${cmd_opt}'"
-    echo "Running Functest k8s test cases, CMD: ${cmd}"
-    eval ${cmd}
-    ret_value=$?
-    if [ ${ret_value} != 0 ]; then
-        echo ${ret_value} > ${ret_val_file}
-    fi
-done
index 21fbdb4..d62691a 100644 (file)
@@ -12,6 +12,9 @@
       - master:
           branch: '{stream}'
           disabled: false
+      - kali:
+          branch: 'stable/{stream}'
+          disabled: false
       - jerma:
           branch: 'stable/{stream}'
           disabled: false
@@ -33,6 +36,8 @@
       - 'core'
       - 'healthcheck'
       - 'smoke'
+      - 'cnf'
+      - 'security'
 
     # settings for jobs run in multijob phases
     build-job-settings: &build-job-settings
             - name: 'functest-kubernetes-core-docker-manifest-{stream}'
               <<: *manifest-job-settings
       - multijob:
-          name: 'build functest-kubernetes-healthcheck images'
+          name: 'build functest-kubernetes-[healthcheck,cnf,security] images'
           execution-type: PARALLEL
           projects:
             - name: 'functest-kubernetes-healthcheck-docker-build-amd64-{stream}'
               <<: *build-job-settings
             - name: 'functest-kubernetes-healthcheck-docker-build-arm64-{stream}'
               <<: *build-job-settings
+            - name: 'functest-kubernetes-cnf-docker-build-amd64-{stream}'
+              <<: *build-job-settings
+            - name: 'functest-kubernetes-cnf-docker-build-arm64-{stream}'
+              <<: *build-job-settings
+            - name: 'functest-kubernetes-security-docker-build-amd64-{stream}'
+              <<: *build-job-settings
+            - name: 'functest-kubernetes-security-docker-build-arm64-{stream}'
+              <<: *build-job-settings
       - multijob:
-          name: 'publish functest-kubernetes-healthcheck manifests'
+          name: 'publish functest-kubernetes-[healthcheck,cnf,security] manifests'
           execution-type: PARALLEL
           projects:
             - name: 'functest-kubernetes-healthcheck-docker-manifest-{stream}'
               <<: *manifest-job-settings
+            - name: 'functest-kubernetes-cnf-docker-manifest-{stream}'
+              <<: *manifest-job-settings
+            - name: 'functest-kubernetes-security-docker-manifest-{stream}'
+              <<: *manifest-job-settings
       - multijob:
-          name: 'build all functest-kubernetes images'
+          name: 'build functest-kubernetes-smoke image'
           condition: SUCCESSFUL
           execution-type: PARALLEL
           projects:
             - name: 'functest-kubernetes-smoke-docker-build-arm64-{stream}'
               <<: *build-job-settings
       - multijob:
-          name: 'publish all manifests'
+          name: 'publish functest-kubernetes-smoke manifests'
           condition: SUCCESSFUL
           execution-type: PARALLEL
           projects:
           #!/bin/bash -ex
           case "{arch_tag}" in
           "arm64")
-              sudo amd64_dirs= arm64_dirs=docker/{image} bash ./build.sh ;;
+              sudo amd64_dirs= arm_dirs= arm64_dirs=docker/{image} bash ./build.sh ;;
           *)
-              sudo amd64_dirs=docker/{image} arm64_dirs= bash ./build.sh ;;
+              sudo amd64_dirs=docker/{image} arm_dirs= arm64_dirs= bash ./build.sh ;;
           esac
           exit $?
 
diff --git a/jjb/functest/functest-kubernetes-pi.yaml b/jjb/functest/functest-kubernetes-pi.yaml
new file mode 100644 (file)
index 0000000..4df9eef
--- /dev/null
@@ -0,0 +1,477 @@
+---
+- functest-kubernetes-pi-jobs: &functest-kubernetes-pi-jobs
+    name: 'functest-kubernetes-pi-jobs'
+    current-parameters: true
+
+- functest-kubernetes-pi-params: &functest-kubernetes-pi-params
+    name: 'functest-kubernetes-pi-params'
+    repo: 'ollivier'
+    port:
+    tag:
+      - latest:
+          branch: master
+          slave: lf-virtual1-5
+      - kali:
+          branch: stable/kali
+          slave: lf-virtual1-4
+      - jerma:
+          branch: stable/jerma
+          slave: lf-virtual1-3
+      - iruya:
+          branch: stable/iruya
+          slave: lf-virtual1-2
+      - hunter:
+          branch: stable/hunter
+          slave: lf-virtual1-1
+      - arm-latest:
+          branch: master
+          slave: lf-virtual1-5
+      - arm-kali:
+          branch: stable/kali
+          slave: lf-virtual1-4
+      - arm-jerma:
+          branch: stable/jerma
+          slave: lf-virtual1-3
+      - arm-iruya:
+          branch: stable/iruya
+          slave: lf-virtual1-2
+      - arm-hunter:
+          branch: stable/hunter
+          slave: lf-virtual1-1
+      - arm64-latest:
+          branch: master
+          slave: lf-virtual1-5
+      - arm64-kali:
+          branch: stable/kali
+          slave: lf-virtual1-4
+      - arm64-jerma:
+          branch: stable/jerma
+          slave: lf-virtual1-3
+      - arm64-iruya:
+          branch: stable/iruya
+          slave: lf-virtual1-2
+      - arm64-hunter:
+          branch: stable/hunter
+          slave: lf-virtual1-1
+
+- parameter:
+    name: functest-kubernetes-pi-slave
+    parameters:
+      - label:
+          name: slave
+          default: '{slave}'
+
+- parameter:
+    name: functest-kubernetes-pi-build_tag
+    parameters:
+      - random-string:
+          name: build_tag
+
+- parameter:
+    name: functest-kubernetes-pi-branch
+    parameters:
+      - string:
+          name: branch
+          default: '{branch}'
+
+- parameter:
+    name: functest-kubernetes-pi-DEPLOY_SCENARIO
+    parameters:
+      - string:
+          name: DEPLOY_SCENARIO
+          default: k8-nosdn-nofeature-noha
+- parameter:
+    name: functest-kubernetes-pi-DEBUG
+    parameters:
+      - string:
+          name: DEBUG
+          default: 'true'
+
+- functest-kubernetes-pi-containers: &functest-kubernetes-pi-containers
+    name: 'functest-kubernetes-pi-containers'
+    repo: '{repo}'
+    port: '{port}'
+    container: '{container}'
+    tag: '{tag}'
+
+- functest-kubernetes-pi-run-containers: &functest-kubernetes-pi-run-containers
+    name: 'functest-kubernetes-pi-run-containers'
+    <<: *functest-kubernetes-pi-containers
+    test: '{test}'
+    privileged: '{privileged}'
+    network: '{network}'
+
+- builder:
+    name: functest-kubernetes-pi-pull-containers
+    builders:
+      - shell: |
+          set +x
+          if [ "{repo}" = "_" ]; then
+            image={container}:{tag}
+          elif [ "{port}" = "None" ]; then
+            image={repo}/{container}:{tag}
+          else
+            image={repo}:{port}/{container}:{tag}
+          fi
+          docker pull $image
+
+- builder:
+    name: functest-kubernetes-pi-run-containers
+    builders:
+      - shell: |
+          set +x
+          [ ! -z "$WORKSPACE" ] && rm -rf $WORKSPACE/* || true
+          if [ "{repo}" = "_" ]; then
+            image={container}:{tag}
+          elif [ "{port}" = "None" ]; then
+            image={repo}/{container}:{tag}
+          else
+            image={repo}:{port}/{container}:{tag}
+          fi
+          docker run --rm \
+            --privileged={privileged} \
+            --network={network} \
+            -e S3_ENDPOINT_URL=https://storage.googleapis.com \
+            -e S3_DST_URL=s3://artifacts.opnfv.org/functest-kubernetes/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
+            -e HTTP_DST_URL=http://artifacts.opnfv.org/functest-kubernetes/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
+            -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
+            -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
+            -e NODE_NAME=$slave \
+            -e BUILD_TAG=$BUILD_TAG \
+            -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
+            -e DEPLOY_SCENARIO=$DEPLOY_SCENARIO \
+            -e DEBUG=$DEBUG \
+            -v /home/opnfv/functest-kubernetes/config.{tag}:/root/.kube/config \
+            -v /home/opnfv/functest-kubernetes/.boto:/root/.boto \
+            $image run_tests -t {test} -p -r
+
+- builder:
+    name: functest-kubernetes-pi-remove-images
+    builders:
+      - shell: |
+          set +x
+          if [ "{repo}" = "_" ]; then
+            image={container}:{tag}
+          elif [ "{port}" = "None" ]; then
+            image={repo}/{container}:{tag}
+          else
+            image={repo}:{port}/{container}:{tag}
+          fi
+          docker rmi $image || true
+
+
+- job-template:
+    name: 'functest-kubernetes-pi-{repo}-{container}-{tag}-pull'
+    parameters:
+      - functest-kubernetes-pi-slave:
+          slave: '{slave}'
+    builders:
+      - functest-kubernetes-pi-pull-containers:
+          <<: *functest-kubernetes-pi-containers
+
+- project:
+    name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-pull'
+    <<: *functest-kubernetes-pi-params
+    container: 'functest-kubernetes-healthcheck'
+    exclude:
+      - tag: arm-hunter
+      - tag: arm64-hunter
+    jobs:
+      - 'functest-kubernetes-pi-{repo}-{container}-{tag}-pull'
+
+- project:
+    name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-pull'
+    <<: *functest-kubernetes-pi-params
+    container: 'functest-kubernetes-smoke'
+    exclude:
+      - tag: arm-hunter
+      - tag: arm64-hunter
+    jobs:
+      - 'functest-kubernetes-pi-{repo}-{container}-{tag}-pull'
+
+- project:
+    name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-pull'
+    <<: *functest-kubernetes-pi-params
+    container: 'functest-kubernetes-security'
+    jobs:
+      - 'functest-kubernetes-pi-{repo}-{container}-{tag}-pull'
+
+- project:
+    name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-pull'
+    <<: *functest-kubernetes-pi-params
+    container: 'functest-kubernetes-cnf'
+    jobs:
+      - 'functest-kubernetes-pi-{repo}-{container}-{tag}-pull'
+
+- job-template:
+    name: 'functest-kubernetes-pi-{repo}-{container}-{tag}-rmi'
+    parameters:
+      - functest-kubernetes-pi-slave:
+          slave: '{slave}'
+    builders:
+      - functest-kubernetes-pi-remove-images:
+          <<: *functest-kubernetes-pi-containers
+
+- project:
+    name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-rmi'
+    <<: *functest-kubernetes-pi-params
+    container: 'functest-kubernetes-healthcheck'
+    exclude:
+      - tag: arm-hunter
+      - tag: arm64-hunter
+    jobs:
+      - 'functest-kubernetes-pi-{repo}-{container}-{tag}-rmi'
+
+- project:
+    name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-rmi'
+    <<: *functest-kubernetes-pi-params
+    container: 'functest-kubernetes-smoke'
+    exclude:
+      - tag: arm-hunter
+      - tag: arm64-hunter
+    jobs:
+      - 'functest-kubernetes-pi-{repo}-{container}-{tag}-rmi'
+
+- project:
+    name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-rmi'
+    <<: *functest-kubernetes-pi-params
+    container: 'functest-kubernetes-security'
+    jobs:
+      - 'functest-kubernetes-pi-{repo}-{container}-{tag}-rmi'
+
+- project:
+    name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-rmi'
+    <<: *functest-kubernetes-pi-params
+    container: 'functest-kubernetes-cnf'
+    jobs:
+      - 'functest-kubernetes-pi-{repo}-{container}-{tag}-rmi'
+
+- job-template:
+    name: 'functest-kubernetes-pi-{repo}-{container}-{tag}-{test}-run'
+    parameters:
+      - functest-kubernetes-pi-slave:
+          slave: '{slave}'
+      - functest-kubernetes-pi-build_tag:
+          build_tag: ''
+      - functest-kubernetes-pi-DEPLOY_SCENARIO:
+          DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+      - functest-kubernetes-pi-DEBUG:
+          DEBUG: 'true'
+    builders:
+      - functest-kubernetes-pi-run-containers:
+          <<: *functest-kubernetes-pi-run-containers
+
+- job-template:
+    name: 'functest-kubernetes-pi-{repo}-functest-kubernetes-cnf-{tag}-k8s_vims-run'
+    properties:
+      - build-blocker:
+          use-build-blocker: true
+          blocking-level: 'GLOBAL'
+          blocking-jobs:
+            - '^functest-kubernetes(-pi)?-*-k8s_vims-run$'
+    parameters:
+      - functest-kubernetes-pi-slave:
+          slave: '{slave}'
+      - functest-kubernetes-pi-build_tag:
+          build_tag: ''
+      - functest-kubernetes-pi-DEPLOY_SCENARIO:
+          DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+      - functest-kubernetes-pi-DEBUG:
+          DEBUG: 'true'
+    builders:
+      - functest-kubernetes-pi-run-containers:
+          <<: *functest-kubernetes-pi-run-containers
+
+- project:
+    name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck'
+    <<: *functest-kubernetes-pi-params
+    container: 'functest-kubernetes-healthcheck'
+    test:
+      - k8s_smoke
+    exclude:
+      - tag: arm-hunter
+      - tag: arm64-hunter
+    privileged: 'false'
+    network: bridge
+    jobs:
+      - 'functest-kubernetes-pi-{repo}-{container}-{tag}-{test}-run'
+
+- project:
+    name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke'
+    <<: *functest-kubernetes-pi-params
+    container: 'functest-kubernetes-smoke'
+    test:
+      - xrally_kubernetes
+      - k8s_conformance
+    exclude:
+      - tag: arm-hunter
+      - tag: arm64-hunter
+    privileged: 'false'
+    network: bridge
+    jobs:
+      - 'functest-kubernetes-pi-{repo}-{container}-{tag}-{test}-run'
+
+- project:
+    name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security'
+    <<: *functest-kubernetes-pi-params
+    container: 'functest-kubernetes-security'
+    test:
+      - kube_hunter
+      - kube_bench
+    privileged: 'false'
+    network: bridge
+    jobs:
+      - 'functest-kubernetes-pi-{repo}-{container}-{tag}-{test}-run'
+
+- project:
+    name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf'
+    <<: *functest-kubernetes-pi-params
+    container: 'functest-kubernetes-cnf'
+    test:
+      - k8s_vims
+    privileged: 'false'
+    network: bridge
+    jobs:
+      - 'functest-kubernetes-pi-{repo}-functest-kubernetes-cnf-{tag}-k8s_vims-run'
+
+
+- builder:
+    name: functest-kubernetes-pi-zip
+    builders:
+      - shell: |
+          set +x
+          [ ! -z "$WORKSPACE" ] && rm -rf $WORKSPACE/* || true
+          if [ "{repo}" = "_" ]; then
+            image={container}:{tag}
+          elif [ "{port}" = "None" ]; then
+            image={repo}/{container}:{tag}
+          else
+            image={repo}:{port}/{container}:{tag}
+          fi
+          docker run --rm \
+            -e S3_ENDPOINT_URL=https://storage.googleapis.com \
+            -e S3_DST_URL=s3://artifacts.opnfv.org/functest-kubernetes \
+            -e HTTP_DST_URL=http://artifacts.opnfv.org/functest-kubernetes \
+            -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
+            -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
+            -e BUILD_TAG=$BUILD_TAG \
+            -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
+            -e DEPLOY_SCENARIO=$DEPLOY_SCENARIO \
+            -e DEBUG=$DEBUG \
+            -v /home/opnfv/functest-kubernetes/config.{tag}:/root/.kube/config \
+            -v /home/opnfv/functest-kubernetes/.boto:/root/.boto \
+            $image zip_campaign
+
+- job-template:
+    name: 'functest-kubernetes-pi-{tag}-zip'
+    parameters:
+      - functest-kubernetes-pi-slave:
+          slave: '{slave}'
+      - functest-kubernetes-pi-build_tag:
+          build_tag: ''
+      - functest-kubernetes-pi-DEPLOY_SCENARIO:
+          DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+      - functest-kubernetes-pi-DEBUG:
+          DEBUG: 'true'
+    builders:
+      - functest-kubernetes-pi-zip:
+          <<: *functest-kubernetes-pi-containers
+
+- project:
+    name: 'functest-kubernetes-pi-{tag}-zip'
+    <<: *functest-kubernetes-pi-params
+    container: 'functest-kubernetes-security'
+    jobs:
+      - 'functest-kubernetes-pi-{tag}-zip'
+
+- job-template:
+    name: 'functest-kubernetes-pi-{tag}-daily'
+    project-type: multijob
+    triggers:
+      - timed: '@daily'
+    parameters:
+      - functest-kubernetes-pi-slave:
+          slave: '{slave}'
+      - functest-kubernetes-pi-build_tag:
+          build_tag: ''
+      - functest-kubernetes-pi-DEPLOY_SCENARIO:
+          DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+      - functest-kubernetes-pi-DEBUG:
+          DEBUG: 'true'
+    properties:
+      - build-blocker:
+          use-build-blocker: true
+          blocking-level: 'NODE'
+          blocking-jobs:
+            - '^functest-kubernetes(-pi)?-(arm.*-|amd64-)*[a-z]+-(daily|gate|check)$'
+    builders:
+      - multijob:
+          name: remove former images
+          projects:
+            - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-{tag}-rmi'
+              <<: *functest-kubernetes-pi-jobs
+            - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-rmi'
+              <<: *functest-kubernetes-pi-jobs
+            - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-{tag}-rmi'
+              <<: *functest-kubernetes-pi-jobs
+            - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-{tag}-rmi'
+              <<: *functest-kubernetes-pi-jobs
+      - multijob:
+          name: pull containers
+          projects:
+            - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-{tag}-pull'
+              <<: *functest-kubernetes-pi-jobs
+            - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-pull'
+              <<: *functest-kubernetes-pi-jobs
+            - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-{tag}-pull'
+              <<: *functest-kubernetes-pi-jobs
+            - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-{tag}-pull'
+              <<: *functest-kubernetes-pi-jobs
+      - multijob:
+          name: ollivier/functest-kubernetes-healthcheck:{tag}
+          projects:
+            - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-{tag}-k8s_smoke-run'
+              <<: *functest-kubernetes-pi-jobs
+      - multijob:
+          name: ollivier/functest-kubernetes-smoke:{tag}
+          projects:
+            - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-xrally_kubernetes-run'
+              <<: *functest-kubernetes-pi-jobs
+            - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-k8s_conformance-run'
+              <<: *functest-kubernetes-pi-jobs
+      - multijob:
+          name: ollivier/functest-kubernetes-security:{tag}
+          projects:
+            - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-{tag}-kube_hunter-run'
+              <<: *functest-kubernetes-pi-jobs
+            - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-{tag}-kube_bench-run'
+              <<: *functest-kubernetes-pi-jobs
+      - multijob:
+          name: ollivier/functest-kubernetes-cnf:{tag}
+          projects:
+            - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-{tag}-k8s_vims-run'
+              <<: *functest-kubernetes-pi-jobs
+      - multijob:
+          name: dump all campaign data
+          projects:
+            - name: 'functest-kubernetes-pi-{tag}-zip'
+              <<: *functest-kubernetes-pi-jobs
+
+
+- project:
+    name: 'functest-kubernetes-pi'
+    <<: *functest-kubernetes-pi-params
+    jobs:
+      - 'functest-kubernetes-pi-{tag}-daily'
+
+- view:
+    name: functest-kubernetes-pi
+    view-type: list
+    columns:
+      - status
+      - weather
+      - job
+      - last-success
+      - last-failure
+      - last-duration
+    regex: ^functest-kubernetes-pi-(arm.*-|amd64-)*[a-z]+-daily$
index 2e1a410..7458d8e 100644 (file)
@@ -5,6 +5,9 @@
       - latest:
           branch: master
           slave: lf-virtual1
+      - kali:
+          branch: stable/kali
+          slave: lf-virtual1
       - jerma:
           branch: stable/jerma
           slave: lf-virtual1
index fcc1eb1..db5c500 100644 (file)
     tag:
       - latest:
           branch: master
-          slave: lf-virtual8
+          slave: lf-virtual1-5
+      - kali:
+          branch: stable/kali
+          slave: lf-virtual1-4
       - jerma:
           branch: stable/jerma
-          slave: lf-virtual8
+          slave: lf-virtual1-3
       - iruya:
           branch: stable/iruya
-          slave: lf-virtual7
+          slave: lf-virtual1-2
       - hunter:
           branch: stable/hunter
-          slave: lf-virtual5
+          slave: lf-virtual1-1
+      - arm64-latest:
+          branch: master
+          slave: lf-virtual1-5
+      - arm64-kali:
+          branch: stable/kali
+          slave: lf-virtual1-4
+      - arm64-jerma:
+          branch: stable/jerma
+          slave: lf-virtual1-3
+      - arm64-iruya:
+          branch: stable/iruya
+          slave: lf-virtual1-2
+      - arm64-hunter:
+          branch: stable/hunter
+          slave: lf-virtual1-1
 
 - parameter:
     name: functest-kubernetes-slave
           name: branch
           default: '{branch}'
 
-- parameter:
-    name: functest-kubernetes-DEBUG
-    parameters:
-      - string:
-          name: DEBUG
-          default: 'true'
 - parameter:
     name: functest-kubernetes-DEPLOY_SCENARIO
     parameters:
       - string:
           name: DEPLOY_SCENARIO
           default: k8-nosdn-nofeature-noha
+- parameter:
+    name: functest-kubernetes-DEBUG
+    parameters:
+      - string:
+          name: DEBUG
+          default: 'true'
 
 - functest-kubernetes-containers: &functest-kubernetes-containers
     name: 'functest-kubernetes-containers'
@@ -65,6 +83,8 @@
     name: 'functest-kubernetes-run-containers'
     <<: *functest-kubernetes-containers
     test: '{test}'
+    privileged: '{privileged}'
+    network: '{network}'
 
 - builder:
     name: functest-kubernetes-pull-containers
             image={repo}:{port}/{container}:{tag}
           fi
           docker run --rm \
+            --privileged={privileged} \
+            --network={network} \
             -e S3_ENDPOINT_URL=https://storage.googleapis.com \
             -e S3_DST_URL=s3://artifacts.opnfv.org/functest-kubernetes/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
             -e HTTP_DST_URL=http://artifacts.opnfv.org/functest-kubernetes/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
             -e NODE_NAME=$slave \
             -e BUILD_TAG=$BUILD_TAG \
             -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
-            -e DEBUG=$DEBUG \
             -e DEPLOY_SCENARIO=$DEPLOY_SCENARIO \
+            -e DEBUG=$DEBUG \
             -v /home/opnfv/functest-kubernetes/config.{tag}:/root/.kube/config \
             -v /home/opnfv/functest-kubernetes/.boto:/root/.boto \
             $image run_tests -t {test} -p -r
     jobs:
       - 'functest-kubernetes-{repo}-{container}-{tag}-pull'
 
+- project:
+    name: 'functest-kubernetes-opnfv-functest-kubernetes-security-pull'
+    <<: *functest-kubernetes-params
+    container: 'functest-kubernetes-security'
+    jobs:
+      - 'functest-kubernetes-{repo}-{container}-{tag}-pull'
+
+- project:
+    name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-pull'
+    <<: *functest-kubernetes-params
+    container: 'functest-kubernetes-cnf'
+    jobs:
+      - 'functest-kubernetes-{repo}-{container}-{tag}-pull'
+
 - job-template:
     name: 'functest-kubernetes-{repo}-{container}-{tag}-rmi'
     parameters:
     jobs:
       - 'functest-kubernetes-{repo}-{container}-{tag}-rmi'
 
+- project:
+    name: 'functest-kubernetes-opnfv-functest-kubernetes-security-rmi'
+    <<: *functest-kubernetes-params
+    container: 'functest-kubernetes-security'
+    jobs:
+      - 'functest-kubernetes-{repo}-{container}-{tag}-rmi'
+
+- project:
+    name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-rmi'
+    <<: *functest-kubernetes-params
+    container: 'functest-kubernetes-cnf'
+    jobs:
+      - 'functest-kubernetes-{repo}-{container}-{tag}-rmi'
+
 - job-template:
     name: 'functest-kubernetes-{repo}-{container}-{tag}-{test}-run'
     parameters:
           slave: '{slave}'
       - functest-kubernetes-build_tag:
           build_tag: ''
+      - functest-kubernetes-DEPLOY_SCENARIO:
+          DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
       - functest-kubernetes-DEBUG:
           DEBUG: 'true'
+    builders:
+      - functest-kubernetes-run-containers:
+          <<: *functest-kubernetes-run-containers
+
+- job-template:
+    name: 'functest-kubernetes-{repo}-functest-kubernetes-cnf-{tag}-k8s_vims-run'
+    properties:
+      - build-blocker:
+          use-build-blocker: true
+          blocking-level: 'GLOBAL'
+          blocking-jobs:
+            - '^functest-kubernetes(-pi)?-*-k8s_vims-run$'
+    parameters:
+      - functest-kubernetes-slave:
+          slave: '{slave}'
+      - functest-kubernetes-build_tag:
+          build_tag: ''
       - functest-kubernetes-DEPLOY_SCENARIO:
           DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+      - functest-kubernetes-DEBUG:
+          DEBUG: 'true'
     builders:
       - functest-kubernetes-run-containers:
           <<: *functest-kubernetes-run-containers
     container: 'functest-kubernetes-healthcheck'
     test:
       - k8s_smoke
+    privileged: 'false'
+    network: bridge
     jobs:
       - 'functest-kubernetes-{repo}-{container}-{tag}-{test}-run'
 
     test:
       - xrally_kubernetes
       - k8s_conformance
-    exclude:
-      - tag: iruya
-        test: xrally_kubernetes
-      - tag: hunter
-        test: xrally_kubernetes
+    privileged: 'false'
+    network: bridge
     jobs:
       - 'functest-kubernetes-{repo}-{container}-{tag}-{test}-run'
 
+- project:
+    name: 'functest-kubernetes-opnfv-functest-kubernetes-security'
+    <<: *functest-kubernetes-params
+    container: 'functest-kubernetes-security'
+    test:
+      - kube_hunter
+      - kube_bench
+    privileged: 'false'
+    network: bridge
+    jobs:
+      - 'functest-kubernetes-{repo}-{container}-{tag}-{test}-run'
+
+- project:
+    name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf'
+    <<: *functest-kubernetes-params
+    container: 'functest-kubernetes-cnf'
+    test:
+      - k8s_vims
+    privileged: 'false'
+    network: bridge
+    jobs:
+      - 'functest-kubernetes-{repo}-functest-kubernetes-cnf-{tag}-k8s_vims-run'
+
 
 - builder:
     name: functest-kubernetes-zip
             -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
             -e BUILD_TAG=$BUILD_TAG \
             -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
-            -e DEBUG=$DEBUG \
             -e DEPLOY_SCENARIO=$DEPLOY_SCENARIO \
+            -e DEBUG=$DEBUG \
             -v /home/opnfv/functest-kubernetes/config.{tag}:/root/.kube/config \
             -v /home/opnfv/functest-kubernetes/.boto:/root/.boto \
             $image zip_campaign
           slave: '{slave}'
       - functest-kubernetes-build_tag:
           build_tag: ''
-      - functest-kubernetes-DEBUG:
-          DEBUG: 'true'
       - functest-kubernetes-DEPLOY_SCENARIO:
           DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+      - functest-kubernetes-DEBUG:
+          DEBUG: 'true'
     builders:
       - functest-kubernetes-zip:
           <<: *functest-kubernetes-containers
           slave: '{slave}'
       - functest-kubernetes-build_tag:
           build_tag: ''
-      - functest-kubernetes-DEBUG:
-          DEBUG: 'true'
       - functest-kubernetes-DEPLOY_SCENARIO:
           DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+      - functest-kubernetes-DEBUG:
+          DEBUG: 'true'
     properties:
       - build-blocker:
           use-build-blocker: true
           blocking-level: 'NODE'
           blocking-jobs:
-            - '^functest-kubernetes-{tag}-(daily|check|gate)$'
+            - '^functest-kubernetes(-pi)?-(arm.*-|amd64-)*[a-z]+-(daily|gate|check)$'
     builders:
       - multijob:
           name: remove former images
               <<: *functest-kubernetes-jobs
             - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-rmi'
               <<: *functest-kubernetes-jobs
+            - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-rmi'
+              <<: *functest-kubernetes-jobs
+            - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-rmi'
+              <<: *functest-kubernetes-jobs
       - multijob:
           name: pull containers
           projects:
               <<: *functest-kubernetes-jobs
             - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-pull'
               <<: *functest-kubernetes-jobs
+            - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-pull'
+              <<: *functest-kubernetes-jobs
+            - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-pull'
+              <<: *functest-kubernetes-jobs
       - multijob:
           name: opnfv/functest-kubernetes-healthcheck:{tag}
           projects:
               <<: *functest-kubernetes-jobs
       - multijob:
           name: opnfv/functest-kubernetes-smoke:{tag}
-          execution-type: SEQUENTIALLY
           projects:
             - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-xrally_kubernetes-run'
               <<: *functest-kubernetes-jobs
             - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-k8s_conformance-run'
               <<: *functest-kubernetes-jobs
+      - multijob:
+          name: opnfv/functest-kubernetes-security:{tag}
+          projects:
+            - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-kube_hunter-run'
+              <<: *functest-kubernetes-jobs
+            - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-kube_bench-run'
+              <<: *functest-kubernetes-jobs
+      - multijob:
+          name: opnfv/functest-kubernetes-cnf:{tag}
+          projects:
+            - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-k8s_vims-run'
+              <<: *functest-kubernetes-jobs
       - multijob:
           name: dump all campaign data
           projects:
           ref: $branch
 
 - project:
-    name: 'functest-kubernetes-_-golang-1.13-alpine3.11-rmi'
+    name: 'functest-kubernetes-_-golang-1.15-alpine3.12-rmi'
     repo: _
     port:
     container: golang
-    tag: '1.13-alpine3.11'
+    tag: '1.15-alpine3.12'
     slave: master
+    exclude:
+      - tag: arm64-latest
+      - tag: arm64-kali
+      - tag: arm64-jerma
+      - tag: arm64-iruya
+      - tag: arm64-hunter
     jobs:
       - 'functest-kubernetes-{repo}-{container}-{tag}-rmi'
 
 - project:
-    name: 'functest-kubernetes-_-golang-1.13-alpine3.11-pull'
+    name: 'functest-kubernetes-_-golang-1.15-alpine3.12-pull'
     repo: _
     port:
     container: golang
-    tag: '1.13-alpine3.11'
+    tag: '1.15-alpine3.12'
     slave: master
+    exclude:
+      - tag: arm64-latest
+      - tag: arm64-kali
+      - tag: arm64-jerma
+      - tag: arm64-iruya
+      - tag: arm64-hunter
     jobs:
       - 'functest-kubernetes-{repo}-{container}-{tag}-pull'
 
     container: functest-kubernetes-core
     ref_arg: BRANCH
     path: docker/core
+    exclude:
+      - tag: arm64-latest
+      - tag: arm64-kali
+      - tag: arm64-jerma
+      - tag: arm64-iruya
+      - tag: arm64-hunter
     jobs:
       - 'functest-kubernetes-{repo}-{container}-{tag}-gate'
       - 'functest-kubernetes-{repo}-{container}-{tag}-check'
     container: functest-kubernetes-healthcheck
     ref_arg:
     path: docker/healthcheck
+    exclude:
+      - tag: arm64-latest
+      - tag: arm64-kali
+      - tag: arm64-jerma
+      - tag: arm64-iruya
+      - tag: arm64-hunter
+    jobs:
+      - 'functest-kubernetes-{repo}-{container}-{tag}-gate'
+      - 'functest-kubernetes-{repo}-{container}-{tag}-check'
+
+- project:
+    name: functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-build
+    <<: *functest-kubernetes-params
+    container: functest-kubernetes-cnf
+    ref_arg: BRANCH
+    path: docker/cnf
+    exclude:
+      - tag: arm64-latest
+      - tag: arm64-kali
+      - tag: arm64-jerma
+      - tag: arm64-iruya
+      - tag: arm64-hunter
+    jobs:
+      - 'functest-kubernetes-{repo}-{container}-{tag}-gate'
+      - 'functest-kubernetes-{repo}-{container}-{tag}-check'
+
+- project:
+    name: functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-build
+    <<: *functest-kubernetes-params
+    container: functest-kubernetes-security
+    ref_arg: BRANCH
+    path: docker/security
+    exclude:
+      - tag: arm64-latest
+      - tag: arm64-kali
+      - tag: arm64-jerma
+      - tag: arm64-iruya
+      - tag: arm64-hunter
     jobs:
       - 'functest-kubernetes-{repo}-{container}-{tag}-gate'
       - 'functest-kubernetes-{repo}-{container}-{tag}-check'
     container: functest-kubernetes-smoke
     ref_arg:
     path: docker/smoke
+    exclude:
+      - tag: arm64-latest
+      - tag: arm64-kali
+      - tag: arm64-jerma
+      - tag: arm64-iruya
+      - tag: arm64-hunter
     jobs:
       - 'functest-kubernetes-{repo}-{container}-{tag}-gate'
       - 'functest-kubernetes-{repo}-{container}-{tag}-check'
           build_tag: ''
       - functest-kubernetes-branch:
           branch: '{branch}'
-      - functest-kubernetes-DEBUG:
-          DEBUG: 'true'
       - functest-kubernetes-DEPLOY_SCENARIO:
           DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+      - functest-kubernetes-DEBUG:
+          DEBUG: 'true'
     properties:
       - build-blocker:
           use-build-blocker: true
           blocking-level: 'NODE'
           blocking-jobs:
-            - '^functest-kubernetes-{tag}-(daily|check|gate)$'
+            - '^functest-kubernetes(-pi)?-(arm.*-|amd64-)*[a-z]+-(daily|gate|check)$'
     builders:
       - multijob:
           name: remove former images
               <<: *functest-kubernetes-jobs
             - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-rmi'
               <<: *functest-kubernetes-jobs
+            - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-rmi'
+              <<: *functest-kubernetes-jobs
+            - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-rmi'
+              <<: *functest-kubernetes-jobs
       - multijob:
           name: remove dependencies
           projects:
-            - name: 'functest-kubernetes-_-golang-1.13-alpine3.11-rmi'
+            - name: 'functest-kubernetes-_-golang-1.15-alpine3.12-rmi'
               <<: *functest-kubernetes-jobs
       - multijob:
           name: pull dependencies
           projects:
-            - name: 'functest-kubernetes-_-golang-1.13-alpine3.11-pull'
+            - name: 'functest-kubernetes-_-golang-1.15-alpine3.12-pull'
               <<: *functest-kubernetes-jobs
       - multijob:
           name: build opnfv/functest-kubernetes-core
           projects:
             - name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-check'
               <<: *functest-kubernetes-jobs
+            - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-check'
+              <<: *functest-kubernetes-jobs
+            - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-check'
+              <<: *functest-kubernetes-jobs
       - multijob:
           name: build containers
           projects:
               <<: *functest-kubernetes-jobs
       - multijob:
           name: opnfv/functest-kubernetes-smoke:{tag}
-          execution-type: SEQUENTIALLY
           projects:
             - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-xrally_kubernetes-run'
               <<: *functest-kubernetes-jobs
             - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-k8s_conformance-run'
               <<: *functest-kubernetes-jobs
+      - multijob:
+          name: opnfv/functest-kubernetes-security:{tag}
+          projects:
+            - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-kube_hunter-run'
+              <<: *functest-kubernetes-jobs
+            - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-kube_bench-run'
+              <<: *functest-kubernetes-jobs
+      - multijob:
+          name: opnfv/functest-kubernetes-cnf:{tag}
+          projects:
+            - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-k8s_vims-run'
+              <<: *functest-kubernetes-jobs
 
 - trigger:
     name: functest-kubernetes-patchset-created
           slave: '{slave}'
       - functest-kubernetes-build_tag:
           build_tag: ''
-      - functest-kubernetes-DEBUG:
-          DEBUG: 'true'
       - functest-kubernetes-DEPLOY_SCENARIO:
           DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+      - functest-kubernetes-DEBUG:
+          DEBUG: 'true'
     properties:
       - build-blocker:
           use-build-blocker: true
           blocking-level: 'NODE'
           blocking-jobs:
-            - '^functest-kubernetes-{tag}-(daily|check|gate)$'
+            - '^functest-kubernetes(-pi)?-(arm.*-|amd64-)*[a-z]+-(daily|gate|check)$'
     builders:
       - multijob:
           name: remove former images
               <<: *functest-kubernetes-jobs
             - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-rmi'
               <<: *functest-kubernetes-jobs
+            - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-rmi'
+              <<: *functest-kubernetes-jobs
+            - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-rmi'
+              <<: *functest-kubernetes-jobs
       - multijob:
           name: remove dependencies
           projects:
-            - name: 'functest-kubernetes-_-golang-1.13-alpine3.11-rmi'
+            - name: 'functest-kubernetes-_-golang-1.15-alpine3.12-rmi'
               <<: *functest-kubernetes-jobs
       - multijob:
           name: pull dependencies
           projects:
-            - name: 'functest-kubernetes-_-golang-1.13-alpine3.11-pull'
+            - name: 'functest-kubernetes-_-golang-1.15-alpine3.12-pull'
               <<: *functest-kubernetes-jobs
       - multijob:
           name: build opnfv/functest-kubernetes-core
           projects:
             - name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-gate'
               <<: *functest-kubernetes-jobs
+            - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-gate'
+              <<: *functest-kubernetes-jobs
+            - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-gate'
+              <<: *functest-kubernetes-jobs
       - multijob:
           name: build containers
           projects:
               <<: *functest-kubernetes-jobs
       - multijob:
           name: opnfv/functest-kubernetes-smoke:{tag}
-          execution-type: SEQUENTIALLY
           projects:
             - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-xrally_kubernetes-run'
               <<: *functest-kubernetes-jobs
             - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-k8s_conformance-run'
               <<: *functest-kubernetes-jobs
+      - multijob:
+          name: opnfv/functest-kubernetes-security:{tag}
+          projects:
+            - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-kube_hunter-run'
+              <<: *functest-kubernetes-jobs
+            - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-kube_bench-run'
+              <<: *functest-kubernetes-jobs
+      - multijob:
+          name: opnfv/functest-kubernetes-cnf:{tag}
+          projects:
+            - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-k8s_vims-run'
+              <<: *functest-kubernetes-jobs
 
 - project:
     name: 'functest-kubernetes'
     <<: *functest-kubernetes-params
     jobs:
       - 'functest-kubernetes-{tag}-daily'
+
+- project:
+    name: 'functest-kubernetes-gate'
+    <<: *functest-kubernetes-params
+    exclude:
+      - tag: arm64-latest
+      - tag: arm64-kali
+      - tag: arm64-jerma
+      - tag: arm64-iruya
+      - tag: arm64-hunter
+    jobs:
       - 'functest-kubernetes-{tag}-check'
       - 'functest-kubernetes-{tag}-gate'
 
       - last-success
       - last-failure
       - last-duration
-    regex: ^functest-kubernetes-[a-z]+-(daily|check|gate)$
+    regex: ^functest-kubernetes-(arm.*-|amd64-)*[a-z]+-daily$
+
+- view:
+    name: functest-kubernetes-gate
+    view-type: list
+    columns:
+      - status
+      - weather
+      - job
+      - last-success
+      - last-failure
+      - last-duration
+    regex: ^functest-kubernetes-(arm.*-|amd64-)*[a-z]+-gate$
diff --git a/jjb/functest/functest-pi.yaml b/jjb/functest/functest-pi.yaml
new file mode 100644 (file)
index 0000000..51aea30
--- /dev/null
@@ -0,0 +1,890 @@
+---
+- functest-pi-jobs: &functest-pi-jobs
+    name: 'functest-pi-jobs'
+    current-parameters: true
+
+- functest-pi-params: &functest-pi-params
+    name: 'functest-pi-params'
+    repo: 'ollivier'
+    port:
+    tag:
+      - latest:
+          branch: master
+          slave: lf-virtual9
+          dashboard_url: http://172.30.13.94
+      - kali:
+          branch: stable/kali
+          slave: lf-pod4-3
+          dashboard_url: http://172.30.12.88
+      - jerma:
+          branch: stable/jerma
+          slave: lf-pod4
+          dashboard_url: http://172.30.12.83
+      - iruya:
+          branch: stable/iruya
+          slave: lf-virtual4
+          dashboard_url: http://172.30.13.89
+      - hunter:
+          branch: stable/hunter
+          slave: lf-virtual6
+          dashboard_url: http://172.30.13.91
+      - arm-latest:
+          branch: master
+          slave: lf-virtual9
+          dashboard_url: http://172.30.13.94
+      - arm-kali:
+          branch: stable/kali
+          slave: lf-pod4-3
+          dashboard_url: http://172.30.12.88
+      - arm-jerma:
+          branch: stable/jerma
+          slave: lf-pod4
+          dashboard_url: http://172.30.12.83
+      - arm-iruya:
+          branch: stable/iruya
+          slave: lf-virtual4
+          dashboard_url: http://172.30.13.89
+      - arm-hunter:
+          branch: stable/hunter
+          slave: lf-virtual6
+          dashboard_url: http://172.30.13.91
+      - arm64-latest:
+          branch: master
+          slave: lf-virtual9
+          dashboard_url: http://172.30.13.94
+      - arm64-kali:
+          branch: stable/kali
+          slave: lf-pod4-3
+          dashboard_url: http://172.30.12.88
+      - arm64-jerma:
+          branch: stable/jerma
+          slave: lf-pod4
+          dashboard_url: http://172.30.12.83
+      - arm64-iruya:
+          branch: stable/iruya
+          slave: lf-virtual4
+          dashboard_url: http://172.30.13.89
+      - arm64-hunter:
+          branch: stable/hunter
+          slave: lf-virtual6
+          dashboard_url: http://172.30.13.91
+
+- parameter:
+    name: functest-pi-slave
+    parameters:
+      - label:
+          name: slave
+          default: '{slave}'
+
+- parameter:
+    name: functest-pi-build_tag
+    parameters:
+      - random-string:
+          name: build_tag
+
+- parameter:
+    name: functest-pi-branch
+    parameters:
+      - string:
+          name: branch
+          default: '{branch}'
+
+- parameter:
+    name: functest-pi-DEBUG
+    parameters:
+      - string:
+          name: DEBUG
+          default: 'true'
+- parameter:
+    name: functest-pi-EXTERNAL_NETWORK
+    parameters:
+      - string:
+          name: EXTERNAL_NETWORK
+          default: public
+- parameter:
+    name: functest-pi-VOLUME_DEVICE_NAME
+    parameters:
+      - string:
+          name: VOLUME_DEVICE_NAME
+          default: sdb
+- parameter:
+    name: functest-pi-IMAGE_PROPERTIES
+    parameters:
+      - string:
+          name: IMAGE_PROPERTIES
+          default: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+
+- functest-pi-containers: &functest-pi-containers
+    name: 'functest-pi-containers'
+    repo: '{repo}'
+    port: '{port}'
+    container: '{container}'
+    tag: '{tag}'
+
+- functest-pi-run-containers: &functest-pi-run-containers
+    name: 'functest-pi-run-containers'
+    <<: *functest-pi-containers
+    test: '{test}'
+    dashboard_url: '{dashboard_url}'
+    privileged: '{privileged}'
+    network: '{network}'
+
+- builder:
+    name: functest-pi-pull-containers
+    builders:
+      - shell: |
+          set +x
+          if [ "{repo}" = "_" ]; then
+            image={container}:{tag}
+          elif [ "{port}" = "None" ]; then
+            image={repo}/{container}:{tag}
+          else
+            image={repo}:{port}/{container}:{tag}
+          fi
+          sudo docker pull $image
+
+- builder:
+    name: functest-pi-run-containers
+    builders:
+      - shell: |
+          set +x
+          [ ! -z "$WORKSPACE" ] && sudo rm -rf $WORKSPACE/* || true
+          if [ "{repo}" = "_" ]; then
+            image={container}:{tag}
+          elif [ "{port}" = "None" ]; then
+            image={repo}/{container}:{tag}
+          else
+            image={repo}:{port}/{container}:{tag}
+          fi
+          volumes=""
+          case "{tag}" in
+            arm-hunter)
+              volumes="-v /usr/bin/qemu-arm-static:/usr/bin/qemu-arm-static" ;;
+            arm64-hunter)
+              volumes="-v /usr/bin/qemu-aarch64-static:/usr/bin/qemu-aarch64-static" ;;
+          esac
+          sudo docker run --rm \
+            --privileged={privileged} \
+            --network={network} \
+            -e S3_ENDPOINT_URL=https://storage.googleapis.com \
+            -e S3_DST_URL=s3://artifacts.opnfv.org/functest/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
+            -e HTTP_DST_URL=http://artifacts.opnfv.org/functest/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
+            -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
+            -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
+            -e NODE_NAME=$slave \
+            -e BUILD_TAG=$BUILD_TAG \
+            -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
+            -e DEBUG=$DEBUG \
+            -e EXTERNAL_NETWORK=$EXTERNAL_NETWORK \
+            -e DASHBOARD_URL={dashboard_url} \
+            -e VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME \
+            -e IMAGE_PROPERTIES=$IMAGE_PROPERTIES \
+            -v /home/opnfv/functest/.boto:/root/.boto \
+            -v /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file \
+            -v /home/opnfv/functest/images:/home/opnfv/functest/images \
+            $volumes $image run_tests -t {test} -r -p
+
+- builder:
+    name: functest-pi-remove-images
+    builders:
+      - shell: |
+          set +x
+          if [ "{repo}" = "_" ]; then
+            image={container}:{tag}
+          elif [ "{port}" = "None" ]; then
+            image={repo}/{container}:{tag}
+          else
+            image={repo}:{port}/{container}:{tag}
+          fi
+          sudo docker rmi $image || true
+
+- job-template:
+    name: 'functest-pi-{repo}-{container}-{tag}-pull'
+    parameters:
+      - functest-pi-slave:
+          slave: '{slave}'
+    builders:
+      - functest-pi-pull-containers:
+          <<: *functest-pi-containers
+
+- project:
+    name: 'functest-pi-ollivier-functest-healthcheck-pull'
+    <<: *functest-pi-params
+    container: 'functest-healthcheck'
+    jobs:
+      - 'functest-pi-{repo}-{container}-{tag}-pull'
+
+- project:
+    name: 'functest-pi-ollivier-functest-smoke-pull'
+    <<: *functest-pi-params
+    container: 'functest-smoke'
+    jobs:
+      - 'functest-pi-{repo}-{container}-{tag}-pull'
+
+- project:
+    name: 'functest-pi-ollivier-functest-smoke-cntt-pull'
+    <<: *functest-pi-params
+    container: 'functest-smoke-cntt'
+    jobs:
+      - 'functest-pi-{repo}-{container}-{tag}-pull'
+
+- project:
+    name: 'functest-pi-ollivier-functest-benchmarking-pull'
+    <<: *functest-pi-params
+    container: 'functest-benchmarking'
+    jobs:
+      - 'functest-pi-{repo}-{container}-{tag}-pull'
+
+- project:
+    name: 'functest-pi-ollivier-functest-benchmarking-cntt-pull'
+    <<: *functest-pi-params
+    container: 'functest-benchmarking-cntt'
+    jobs:
+      - 'functest-pi-{repo}-{container}-{tag}-pull'
+
+- project:
+    name: 'functest-pi-ollivier-functest-vnf-pull'
+    <<: *functest-pi-params
+    container: 'functest-vnf'
+    jobs:
+      - 'functest-pi-{repo}-{container}-{tag}-pull'
+
+- job-template:
+    name: 'functest-pi-{repo}-{container}-{tag}-rmi'
+    parameters:
+      - functest-pi-slave:
+          slave: '{slave}'
+    builders:
+      - functest-pi-remove-images:
+          <<: *functest-pi-containers
+
+- project:
+    name: 'functest-pi-ollivier-functest-healthcheck-rmi'
+    <<: *functest-pi-params
+    container: 'functest-healthcheck'
+    jobs:
+      - 'functest-pi-{repo}-{container}-{tag}-rmi'
+
+- project:
+    name: 'functest-pi-ollivier-functest-smoke-rmi'
+    <<: *functest-pi-params
+    container: 'functest-smoke'
+    jobs:
+      - 'functest-pi-{repo}-{container}-{tag}-rmi'
+
+- project:
+    name: 'functest-pi-ollivier-functest-smoke-cntt-rmi'
+    <<: *functest-pi-params
+    container: 'functest-smoke-cntt'
+    jobs:
+      - 'functest-pi-{repo}-{container}-{tag}-rmi'
+
+- project:
+    name: 'functest-pi-ollivier-functest-benchmarking-rmi'
+    <<: *functest-pi-params
+    container: 'functest-benchmarking'
+    jobs:
+      - 'functest-pi-{repo}-{container}-{tag}-rmi'
+
+- project:
+    name: 'functest-pi-ollivier-functest-benchmarking-cntt-rmi'
+    <<: *functest-pi-params
+    container: 'functest-benchmarking-cntt'
+    jobs:
+      - 'functest-pi-{repo}-{container}-{tag}-rmi'
+
+- project:
+    name: 'functest-pi-ollivier-functest-vnf-rmi'
+    <<: *functest-pi-params
+    container: 'functest-vnf'
+    jobs:
+      - 'functest-pi-{repo}-{container}-{tag}-rmi'
+
+- job-template:
+    name: 'functest-pi-{repo}-{container}-{tag}-{test}-run'
+    parameters:
+      - functest-pi-slave:
+          slave: '{slave}'
+      - functest-pi-build_tag:
+          build_tag: ''
+      - functest-pi-DEBUG:
+          DEBUG: 'true'
+      - functest-pi-EXTERNAL_NETWORK:
+          EXTERNAL_NETWORK: public
+      - functest-pi-VOLUME_DEVICE_NAME:
+          VOLUME_DEVICE_NAME: sdb
+      - functest-pi-IMAGE_PROPERTIES:
+          IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+    builders:
+      - functest-pi-run-containers:
+          <<: *functest-pi-run-containers
+
+- project:
+    name: 'functest-pi-ollivier-functest-healthcheck'
+    <<: *functest-pi-params
+    container: 'functest-healthcheck'
+    test:
+      - connection_check
+      - tenantnetwork1
+      - tenantnetwork2
+      - vmready1
+      - vmready2
+      - singlevm1
+      - singlevm2
+      - vping_ssh
+      - vping_userdata
+      - cinder_test
+      - odl
+      - tempest_smoke
+      - tempest_horizon
+    privileged: 'false'
+    network: bridge
+    jobs:
+      - 'functest-pi-{repo}-{container}-{tag}-{test}-run'
+
+- project:
+    name: 'functest-pi-ollivier-functest-smoke'
+    <<: *functest-pi-params
+    container: 'functest-smoke'
+    test:
+      - tempest_neutron
+      - tempest_cinder
+      - tempest_keystone
+      - tempest_heat
+      - tempest_telemetry
+      - rally_sanity
+      - refstack_defcore
+      - refstack_compute
+      - refstack_object
+      - refstack_platform
+      - tempest_full
+      - tempest_scenario
+      - tempest_slow
+      - patrole
+      - patrole_admin
+      - patrole_member
+      - patrole_reader
+      - networking-bgpvpn
+      - networking-sfc
+      - tempest_barbican
+      - tempest_octavia
+      - tempest_cyborg
+    exclude:
+      - tag: latest
+        test: refstack_defcore
+      - tag: latest
+        test: networking-bgpvpn
+      - tag: latest
+        test: networking-sfc
+      - tag: latest
+        test: patrole
+      - tag: kali
+        test: refstack_defcore
+      - tag: kali
+        test: networking-bgpvpn
+      - tag: kali
+        test: networking-sfc
+      - tag: kali
+        test: patrole_admin
+      - tag: kali
+        test: patrole_member
+      - tag: kali
+        test: patrole_reader
+      - tag: jerma
+        test: refstack_defcore
+      - tag: jerma
+        test: networking-bgpvpn
+      - tag: jerma
+        test: networking-sfc
+      - tag: jerma
+        test: patrole_admin
+      - tag: jerma
+        test: patrole_member
+      - tag: jerma
+        test: patrole_reader
+      - tag: jerma
+        test: tempest_cyborg
+      - tag: iruya
+        test: refstack_defcore
+      - tag: iruya
+        test: patrole_admin
+      - tag: iruya
+        test: patrole_member
+      - tag: iruya
+        test: patrole_reader
+      - tag: iruya
+        test: tempest_cyborg
+      - tag: hunter
+        test: refstack_compute
+      - tag: hunter
+        test: refstack_object
+      - tag: hunter
+        test: refstack_platform
+      - tag: hunter
+        test: tempest_octavia
+      - tag: hunter
+        test: tempest_telemetry
+      - tag: hunter
+        test: patrole_admin
+      - tag: hunter
+        test: patrole_member
+      - tag: hunter
+        test: patrole_reader
+      - tag: hunter
+        test: tempest_cyborg
+      - tag: arm-latest
+        test: refstack_defcore
+      - tag: arm-latest
+        test: networking-bgpvpn
+      - tag: arm-latest
+        test: networking-sfc
+      - tag: arm-latest
+        test: patrole
+      - tag: arm-kali
+        test: refstack_defcore
+      - tag: arm-kali
+        test: networking-bgpvpn
+      - tag: arm-kali
+        test: networking-sfc
+      - tag: arm-kali
+        test: patrole_admin
+      - tag: arm-kali
+        test: patrole_member
+      - tag: arm-kali
+        test: patrole_reader
+      - tag: arm-jerma
+        test: refstack_defcore
+      - tag: arm-jerma
+        test: networking-bgpvpn
+      - tag: arm-jerma
+        test: networking-sfc
+      - tag: arm-jerma
+        test: patrole_admin
+      - tag: arm-jerma
+        test: patrole_member
+      - tag: arm-jerma
+        test: patrole_reader
+      - tag: arm-jerma
+        test: tempest_cyborg
+      - tag: arm-iruya
+        test: refstack_defcore
+      - tag: arm-iruya
+        test: patrole_admin
+      - tag: arm-iruya
+        test: patrole_member
+      - tag: arm-iruya
+        test: patrole_reader
+      - tag: arm-iruya
+        test: tempest_cyborg
+      - tag: arm-hunter
+        test: refstack_compute
+      - tag: arm-hunter
+        test: refstack_object
+      - tag: arm-hunter
+        test: refstack_platform
+      - tag: arm-hunter
+        test: tempest_octavia
+      - tag: arm-hunter
+        test: tempest_telemetry
+      - tag: arm-hunter
+        test: patrole_admin
+      - tag: arm-hunter
+        test: patrole_member
+      - tag: arm-hunter
+        test: patrole_reader
+      - tag: arm-hunter
+        test: tempest_cyborg
+      - tag: arm64-latest
+        test: refstack_defcore
+      - tag: arm64-latest
+        test: networking-bgpvpn
+      - tag: arm64-latest
+        test: networking-sfc
+      - tag: arm64-latest
+        test: patrole
+      - tag: arm64-kali
+        test: refstack_defcore
+      - tag: arm64-kali
+        test: networking-bgpvpn
+      - tag: arm64-kali
+        test: networking-sfc
+      - tag: arm64-kali
+        test: patrole_admin
+      - tag: arm64-kali
+        test: patrole_member
+      - tag: arm64-kali
+        test: patrole_reader
+      - tag: arm64-jerma
+        test: refstack_defcore
+      - tag: arm64-jerma
+        test: networking-bgpvpn
+      - tag: arm64-jerma
+        test: networking-sfc
+      - tag: arm64-jerma
+        test: patrole_admin
+      - tag: arm64-jerma
+        test: patrole_member
+      - tag: arm64-jerma
+        test: patrole_reader
+      - tag: arm64-jerma
+        test: tempest_cyborg
+      - tag: arm64-iruya
+        test: refstack_defcore
+      - tag: arm64-iruya
+        test: patrole_admin
+      - tag: arm64-iruya
+        test: patrole_member
+      - tag: arm64-iruya
+        test: patrole_reader
+      - tag: arm64-iruya
+        test: tempest_cyborg
+      - tag: arm64-hunter
+        test: refstack_compute
+      - tag: arm64-hunter
+        test: refstack_object
+      - tag: arm64-hunter
+        test: refstack_platform
+      - tag: arm64-hunter
+        test: tempest_octavia
+      - tag: arm64-hunter
+        test: tempest_telemetry
+      - tag: arm64-hunter
+        test: patrole_admin
+      - tag: arm64-hunter
+        test: patrole_member
+      - tag: arm64-hunter
+        test: patrole_reader
+      - tag: arm64-hunter
+        test: tempest_cyborg
+    privileged: 'false'
+    network: bridge
+    jobs:
+      - 'functest-pi-{repo}-{container}-{tag}-{test}-run'
+
+- project:
+    name: 'functest-pi-ollivier-functest-smoke-cntt'
+    <<: *functest-pi-params
+    container: 'functest-smoke-cntt'
+    test:
+      - tempest_neutron_cntt
+      - tempest_cinder_cntt
+      - tempest_keystone_cntt
+      - tempest_heat_cntt
+      - rally_sanity_cntt
+      - tempest_full_cntt
+      - tempest_scenario_cntt
+      - tempest_slow_cntt
+    privileged: 'false'
+    network: bridge
+    jobs:
+      - 'functest-pi-{repo}-{container}-{tag}-{test}-run'
+
+- project:
+    name: 'functest-pi-ollivier-functest-benchmarking'
+    <<: *functest-pi-params
+    container: 'functest-benchmarking'
+    test:
+      - rally_full
+      - rally_jobs
+      - vmtp
+      - shaker
+    privileged: 'false'
+    network: bridge
+    jobs:
+      - 'functest-pi-{repo}-{container}-{tag}-{test}-run'
+
+- project:
+    name: 'functest-pi-ollivier-functest-benchmarking-cntt'
+    <<: *functest-pi-params
+    container: 'functest-benchmarking-cntt'
+    test:
+      - rally_full_cntt
+      - rally_jobs_cntt
+    privileged: 'false'
+    network: bridge
+    jobs:
+      - 'functest-pi-{repo}-{container}-{tag}-{test}-run'
+
+- project:
+    name: 'functest-pi-ollivier-functest-vnf'
+    <<: *functest-pi-params
+    container: 'functest-vnf'
+    test:
+      - cloudify
+      - cloudify_ims
+      - heat_ims
+      - vyos_vrouter
+      - juju_epc
+    privileged: 'false'
+    network: bridge
+    jobs:
+      - 'functest-pi-{repo}-{container}-{tag}-{test}-run'
+
+
+- builder:
+    name: functest-pi-zip
+    builders:
+      - shell: |
+          set +x
+          [ ! -z "$WORKSPACE" ] && sudo rm -rf $WORKSPACE/* || true
+          if [ "{repo}" = "_" ]; then
+            image={container}:{tag}
+          elif [ "{port}" = "None" ]; then
+            image={repo}/{container}:{tag}
+          else
+            image={repo}:{port}/{container}:{tag}
+          fi
+          volumes=""
+          case "{tag}" in
+            arm-hunter)
+              volumes="-v /usr/bin/qemu-arm-static:/usr/bin/qemu-arm-static" ;;
+            arm64-hunter)
+              volumes="-v /usr/bin/qemu-aarch64-static:/usr/bin/qemu-aarch64-static" ;;
+          esac
+          sudo docker run --rm \
+            -e S3_ENDPOINT_URL=https://storage.googleapis.com \
+            -e S3_DST_URL=s3://artifacts.opnfv.org/functest \
+            -e HTTP_DST_URL=http://artifacts.opnfv.org/functest \
+            -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
+            -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
+            -e BUILD_TAG=$BUILD_TAG \
+            -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
+            -e DEBUG=$DEBUG \
+            -e EXTERNAL_NETWORK=$EXTERNAL_NETWORK \
+            -e VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME \
+            -e IMAGE_PROPERTIES=$IMAGE_PROPERTIES \
+            -v /home/opnfv/functest/.boto:/root/.boto \
+            -v /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file \
+            -v /home/opnfv/functest/images:/home/opnfv/functest/images \
+            $volumes $image zip_campaign
+
+- job-template:
+    name: 'functest-pi-{tag}-zip'
+    parameters:
+      - functest-pi-slave:
+          slave: '{slave}'
+      - functest-pi-build_tag:
+          build_tag: ''
+      - functest-pi-DEBUG:
+          DEBUG: 'true'
+      - functest-pi-EXTERNAL_NETWORK:
+          EXTERNAL_NETWORK: public
+      - functest-pi-VOLUME_DEVICE_NAME:
+          VOLUME_DEVICE_NAME: sdb
+      - functest-pi-IMAGE_PROPERTIES:
+          IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+    builders:
+      - functest-pi-zip:
+          <<: *functest-pi-containers
+
+- project:
+    name: 'functest-pi-{tag}-zip'
+    <<: *functest-pi-params
+    container: 'functest-healthcheck'
+    jobs:
+      - 'functest-pi-{tag}-zip'
+
+- job-template:
+    name: 'functest-pi-{tag}-daily'
+    project-type: multijob
+    triggers:
+      - timed: '@weekly'
+    parameters:
+      - functest-pi-slave:
+          slave: '{slave}'
+      - functest-pi-build_tag:
+          build_tag: ''
+      - functest-pi-DEBUG:
+          DEBUG: 'true'
+      - functest-pi-EXTERNAL_NETWORK:
+          EXTERNAL_NETWORK: public
+      - functest-pi-VOLUME_DEVICE_NAME:
+          VOLUME_DEVICE_NAME: sdb
+      - functest-pi-IMAGE_PROPERTIES:
+          IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+    properties:
+      - build-blocker:
+          use-build-blocker: true
+          blocking-level: 'NODE'
+          blocking-jobs:
+            - '^functest(-pi)?(-ovn)?-(arm.*-|amd64-)*[a-z]+-(daily|gate|check)$'
+    builders:
+      - multijob:
+          name: remove former images
+          projects:
+            - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-rmi'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-{tag}-rmi'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-rmi'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-benchmarking-{tag}-rmi'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-benchmarking-cntt-{tag}-rmi'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-vnf-{tag}-rmi'
+              <<: *functest-pi-jobs
+      - multijob:
+          name: pull containers
+          projects:
+            - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-pull'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-{tag}-pull'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-pull'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-benchmarking-{tag}-pull'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-benchmarking-cntt-{tag}-pull'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-vnf-{tag}-pull'
+              <<: *functest-pi-jobs
+      - multijob:
+          name: ollivier/functest-healthcheck:{tag}
+          projects:
+            - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-connection_check-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-tenantnetwork1-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-tenantnetwork2-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-vmready1-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-vmready2-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-singlevm1-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-singlevm2-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-vping_ssh-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-vping_userdata-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-cinder_test-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-odl-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-tempest_smoke-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-tempest_horizon-run'
+              <<: *functest-pi-jobs
+      - multijob:
+          name: ollivier/functest-smoke:{tag}
+          projects:
+            - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_neutron-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_cinder-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_keystone-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_heat-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_telemetry-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-{tag}-rally_sanity-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-{tag}-refstack_defcore-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-{tag}-refstack_compute-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-{tag}-refstack_object-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-{tag}-refstack_platform-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_full-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_scenario-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_slow-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-{tag}-patrole-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-{tag}-patrole_admin-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-{tag}-patrole_member-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-{tag}-patrole_reader-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-{tag}-networking-bgpvpn-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-{tag}-networking-sfc-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_barbican-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_octavia-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_cyborg-run'
+              <<: *functest-pi-jobs
+      - multijob:
+          name: ollivier/functest-smoke-cntt:{tag}
+          projects:
+            - name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-tempest_neutron_cntt-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-tempest_cinder_cntt-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-tempest_keystone_cntt-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-tempest_heat_cntt-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-rally_sanity_cntt-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-tempest_full_cntt-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-tempest_scenario_cntt-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-tempest_slow_cntt-run'
+              <<: *functest-pi-jobs
+      - multijob:
+          name: ollivier/functest-benchmarking:{tag}
+          projects:
+            - name: 'functest-pi-ollivier-functest-benchmarking-{tag}-rally_full-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-benchmarking-{tag}-rally_jobs-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-benchmarking-{tag}-vmtp-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-benchmarking-{tag}-shaker-run'
+              <<: *functest-pi-jobs
+      - multijob:
+          name: ollivier/functest-benchmarking-cntt:{tag}
+          projects:
+            - name: 'functest-pi-ollivier-functest-benchmarking-cntt-{tag}-rally_full_cntt-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-benchmarking-cntt-{tag}-rally_jobs_cntt-run'
+              <<: *functest-pi-jobs
+      - multijob:
+          name: ollivier/functest-vnf:{tag}
+          projects:
+            - name: 'functest-pi-ollivier-functest-vnf-{tag}-cloudify-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-vnf-{tag}-cloudify_ims-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-vnf-{tag}-heat_ims-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-vnf-{tag}-vyos_vrouter-run'
+              <<: *functest-pi-jobs
+            - name: 'functest-pi-ollivier-functest-vnf-{tag}-juju_epc-run'
+              <<: *functest-pi-jobs
+      - multijob:
+          name: dump all campaign data
+          projects:
+            - name: 'functest-pi-{tag}-zip'
+              <<: *functest-pi-jobs
+
+- project:
+    name: 'functest-pi'
+    <<: *functest-pi-params
+    jobs:
+      - 'functest-pi-{tag}-daily'
+
+- view:
+    name: functest-pi
+    view-type: list
+    columns:
+      - status
+      - weather
+      - job
+      - last-success
+      - last-failure
+      - last-duration
+    regex: ^functest-pi(-ovn)?-(arm.*-|amd64-)*[a-z]+-daily$
index 306c3ca..5aa9e71 100644 (file)
@@ -5,6 +5,9 @@
       - latest:
           branch: master
           slave: lf-virtual1
+      - kali:
+          branch: stable/kali
+          slave: lf-virtual1
       - jerma:
           branch: stable/jerma
           slave: lf-virtual1
diff --git a/jjb/functest/functest-suite.sh b/jjb/functest/functest-suite.sh
deleted file mode 100755 (executable)
index 469a577..0000000
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/bin/bash
-
-container_id=$(docker ps -a | grep opnfv/functest | awk '{print $1}' | head -1)
-if [ -z $container_id ]; then
-    echo "Functest container not found"
-    exit 1
-fi
-
-global_ret_val=0
-
-tests=($(echo $FUNCTEST_SUITE_NAME | tr "," "\n"))
-for test in ${tests[@]}; do
-    cmd="run_tests -t $test"
-    docker exec $container_id $cmd
-    let global_ret_val+=$?
-done
-
-ret_val_file="${HOME}/opnfv/functest/results/${BRANCH##*/}/return_value"
-echo ${global_ret_val}>${ret_val_file}
-
-exit 0
index 1dd48fb..2dce177 100644 (file)
@@ -8,8 +8,8 @@
     repo: 'opnfv'
     port:
     tag:
-      - jerma:
-          branch: stable/jerma
+      - latest:
+          branch: master
           slave: lf-pod4-2
           dashboard_url: http://172.30.12.85
 
           fi
           if [ "{tag}" = "latest" ]; then
             py=3.8
+          elif [ "{tag}" = "kali" ]; then
+            py=3.8
           elif [ "{tag}" = "jerma" ]; then
             py=3.7
           elif [ "{tag}" = "iruya" ]; then
             -e IMAGE_PROPERTIES=hw_scsi_model:virtio-scsi,hw_disk_bus:scsi \
             -v /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file \
             -v /home/opnfv/functest/images:/home/opnfv/functest/images \
-            -v /home/opnfv/functest/tempest_conf.yaml:/usr/lib/python$py/\
-          site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/\
+            -v /home/opnfv/functest/tempest_conf.yaml:/src/functest/\
+          functest/opnfv_tests/openstack/tempest/custom_tests/\
           tempest_conf.yaml \
             -v /home/opnfv/functest/.boto:/root/.boto \
             $image run_tests -t {test} -r -p
     jobs:
       - 'functest-ovn-{repo}-{container}-{tag}-pull'
 
+- project:
+    name: 'functest-ovn-opnfv-functest-benchmarking-cntt-pull'
+    <<: *functest-ovn-params
+    container: 'functest-benchmarking-cntt'
+    jobs:
+      - 'functest-ovn-{repo}-{container}-{tag}-pull'
+
 - project:
     name: 'functest-ovn-opnfv-functest-vnf-pull'
     <<: *functest-ovn-params
     jobs:
       - 'functest-ovn-{repo}-{container}-{tag}-rmi'
 
+- project:
+    name: 'functest-ovn-opnfv-functest-benchmarking-cntt-rmi'
+    <<: *functest-ovn-params
+    container: 'functest-benchmarking-cntt'
+    jobs:
+      - 'functest-ovn-{repo}-{container}-{tag}-rmi'
+
 - project:
     name: 'functest-ovn-opnfv-functest-vnf-rmi'
     <<: *functest-ovn-params
       - odl
       - tempest_smoke
       - tempest_horizon
-    exclude:
-      - tag: hunter
-        test: tempest_horizon
     jobs:
       - 'functest-ovn-{repo}-{container}-{tag}-{test}-run'
 
     <<: *functest-ovn-params
     container: 'functest-smoke'
     test:
-      - neutron-tempest-plugin-api
+      - tempest_neutron
       - tempest_cinder
       - tempest_keystone
+      - tempest_heat
+      - tempest_telemetry
       - rally_sanity
       - refstack_defcore
       - refstack_compute
       - tempest_scenario
       - tempest_slow
       - patrole
-      - neutron_trunk
+      - patrole_admin
+      - patrole_member
+      - patrole_reader
       - networking-bgpvpn
       - networking-sfc
-      - barbican
-      - octavia
+      - tempest_barbican
+      - tempest_octavia
+      - tempest_cyborg
     exclude:
       - tag: latest
         test: refstack_defcore
+      - tag: latest
+        test: networking-bgpvpn
+      - tag: latest
+        test: networking-sfc
+      - tag: latest
+        test: patrole
+      - tag: kali
+        test: refstack_defcore
+      - tag: kali
+        test: networking-bgpvpn
+      - tag: kali
+        test: networking-sfc
+      - tag: kali
+        test: patrole_admin
+      - tag: kali
+        test: patrole_member
+      - tag: kali
+        test: patrole_reader
       - tag: jerma
         test: refstack_defcore
+      - tag: jerma
+        test: networking-bgpvpn
+      - tag: jerma
+        test: networking-sfc
+      - tag: jerma
+        test: patrole_admin
+      - tag: jerma
+        test: patrole_member
+      - tag: jerma
+        test: patrole_reader
+      - tag: jerma
+        test: tempest_cyborg
       - tag: iruya
         test: refstack_defcore
       - tag: iruya
-        test: octavia
+        test: patrole_admin
+      - tag: iruya
+        test: patrole_member
+      - tag: iruya
+        test: patrole_reader
+      - tag: iruya
+        test: tempest_cyborg
       - tag: hunter
         test: refstack_compute
       - tag: hunter
       - tag: hunter
         test: refstack_platform
       - tag: hunter
-        test: octavia
+        test: tempest_octavia
+      - tag: hunter
+        test: tempest_telemetry
+      - tag: hunter
+        test: patrole_admin
+      - tag: hunter
+        test: patrole_member
+      - tag: hunter
+        test: patrole_reader
+      - tag: hunter
+        test: tempest_cyborg
     jobs:
       - 'functest-ovn-{repo}-{container}-{tag}-{test}-run'
 
     <<: *functest-ovn-params
     container: 'functest-smoke-cntt'
     test:
-      - neutron-tempest-plugin-api
-      - tempest_cinder
-      - tempest_keystone
-      - rally_sanity
-      - tempest_full
-      - tempest_scenario
-      - tempest_slow
+      - tempest_neutron_cntt
+      - tempest_cinder_cntt
+      - tempest_keystone_cntt
+      - tempest_heat_cntt
+      - rally_sanity_cntt
+      - tempest_full_cntt
+      - tempest_scenario_cntt
+      - tempest_slow_cntt
     jobs:
       - 'functest-ovn-{repo}-{container}-{tag}-{test}-run'
 
     jobs:
       - 'functest-ovn-{repo}-{container}-{tag}-{test}-run'
 
+- project:
+    name: 'functest-ovn-opnfv-functest-benchmarking-cntt'
+    <<: *functest-ovn-params
+    container: 'functest-benchmarking-cntt'
+    test:
+      - rally_full_cntt
+      - rally_jobs_cntt
+    jobs:
+      - 'functest-ovn-{repo}-{container}-{tag}-{test}-run'
+
 - project:
     name: 'functest-ovn-opnfv-functest-vnf'
     <<: *functest-ovn-params
     builders:
       - shell: |
           set +x
-          [ ! -z "$WORKSPACE" ] && rm -rf $WORKSPACE/* || true
+          [ ! -z "$WORKSPACE" ] && sudo rm -rf $WORKSPACE/* || true
           if [ "{repo}" = "_" ]; then
             image={container}:{tag}
           elif [ "{port}" = "None" ]; then
               <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-benchmarking-{tag}-rmi'
               <<: *functest-ovn-jobs
+            - name: 'functest-ovn-opnfv-functest-benchmarking-cntt-{tag}-rmi'
+              <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-vnf-{tag}-rmi'
               <<: *functest-ovn-jobs
       - multijob:
               <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-benchmarking-{tag}-pull'
               <<: *functest-ovn-jobs
+            - name: 'functest-ovn-opnfv-functest-benchmarking-cntt-{tag}-pull'
+              <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-vnf-{tag}-pull'
               <<: *functest-ovn-jobs
       - multijob:
       - multijob:
           name: opnfv/functest-smoke:{tag}
           projects:
-            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-neutron-tempest-plugin-api-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-tempest_neutron-run'
               <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-smoke-{tag}-tempest_cinder-run'
               <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-smoke-{tag}-tempest_keystone-run'
               <<: *functest-ovn-jobs
+            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-tempest_heat-run'
+              <<: *functest-ovn-jobs
+            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-tempest_telemetry-run'
+              <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-smoke-{tag}-rally_sanity-run'
               <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-smoke-{tag}-refstack_defcore-run'
               <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-smoke-{tag}-patrole-run'
               <<: *functest-ovn-jobs
-            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-neutron_trunk-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-patrole_admin-run'
+              <<: *functest-ovn-jobs
+            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-patrole_member-run'
+              <<: *functest-ovn-jobs
+            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-patrole_reader-run'
               <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-smoke-{tag}-networking-bgpvpn-run'
               <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-smoke-{tag}-networking-sfc-run'
               <<: *functest-ovn-jobs
-            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-barbican-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-tempest_barbican-run'
+              <<: *functest-ovn-jobs
+            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-tempest_octavia-run'
               <<: *functest-ovn-jobs
-            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-octavia-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-tempest_cyborg-run'
               <<: *functest-ovn-jobs
       - multijob:
           name: opnfv/functest-smoke-cntt:{tag}
           projects:
-            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-neutron-tempest-plugin-api-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_neutron_cntt-run'
+              <<: *functest-ovn-jobs
+            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_cinder_cntt-run'
               <<: *functest-ovn-jobs
-            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_cinder-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_keystone_cntt-run'
               <<: *functest-ovn-jobs
-            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_keystone-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_heat_cntt-run'
               <<: *functest-ovn-jobs
-            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-rally_sanity-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-rally_sanity_cntt-run'
               <<: *functest-ovn-jobs
-            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_full-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_full_cntt-run'
               <<: *functest-ovn-jobs
-            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_scenario-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_scenario_cntt-run'
               <<: *functest-ovn-jobs
-            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_slow-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_slow_cntt-run'
               <<: *functest-ovn-jobs
       - multijob:
           name: opnfv/functest-benchmarking:{tag}
               <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-benchmarking-{tag}-shaker-run'
               <<: *functest-ovn-jobs
+      - multijob:
+          name: opnfv/functest-benchmarking-cntt:{tag}
+          projects:
+            - name: 'functest-ovn-opnfv-functest-benchmarking-cntt-{tag}-rally_full_cntt-run'
+              <<: *functest-ovn-jobs
+            - name: 'functest-ovn-opnfv-functest-benchmarking-cntt-{tag}-rally_jobs_cntt-run'
+              <<: *functest-ovn-jobs
       - multijob:
           name: opnfv/functest-vnf:{tag}
-          execution-type: SEQUENTIALLY
           projects:
             - name: 'functest-ovn-opnfv-functest-vnf-{tag}-cloudify-run'
               <<: *functest-ovn-jobs
           ref: $branch
 
 - project:
-    name: 'functest-ovn-_-alpine-3.11-rmi'
+    name: 'functest-ovn-_-alpine-3.12-rmi'
     repo: _
     port:
     container: alpine
-    tag: '3.11'
+    tag: '3.12'
     slave: master
     jobs:
       - 'functest-ovn-{repo}-{container}-{tag}-rmi'
 
 - project:
-    name: 'functest-ovn-_-alpine-3.11-pull'
+    name: 'functest-ovn-_-alpine-3.12-pull'
     repo: _
     port:
     container: alpine
-    tag: '3.11'
+    tag: '3.12'
     slave: master
     jobs:
       - 'functest-ovn-{repo}-{container}-{tag}-pull'
 
 - project:
-    name: functest-ovn-opnfv-functest-ovn-core-{tag}-build
+    name: functest-ovn-opnfv-functest-core-{tag}-build
     <<: *functest-ovn-params
-    container: functest-ovn-core
+    container: functest-core
     ref_arg: BRANCH
     path: docker/core
     jobs:
       - 'functest-ovn-{repo}-{container}-{tag}-check'
 
 - project:
-    name: functest-ovn-opnfv-functest-ovn-tempest-{tag}-build
+    name: functest-ovn-opnfv-functest-tempest-{tag}-build
     <<: *functest-ovn-params
-    container: functest-ovn-tempest
+    container: functest-tempest
     ref_arg: BRANCH
     path: docker/tempest
     jobs:
       - 'functest-ovn-{repo}-{container}-{tag}-check'
     exclude:
       - tag: latest
+      - tag: kali
       - tag: jerma
 
 - project:
       - 'functest-ovn-{repo}-{container}-{tag}-gate'
       - 'functest-ovn-{repo}-{container}-{tag}-check'
 
+- project:
+    name: functest-ovn-opnfv-functest-smoke-cntt-{tag}-build
+    <<: *functest-ovn-params
+    container: functest-smoke-cntt
+    ref_arg: BRANCH
+    path: docker/smoke-cntt
+    jobs:
+      - 'functest-ovn-{repo}-{container}-{tag}-gate'
+      - 'functest-ovn-{repo}-{container}-{tag}-check'
+
 - project:
     name: functest-ovn-opnfv-functest-benchmarking-{tag}-build
     <<: *functest-ovn-params
       - 'functest-ovn-{repo}-{container}-{tag}-check'
 
 - project:
-    name: functest-ovn-opnfv-functest-vnf-{tag}-build
+    name: functest-ovn-opnfv-functest-benchmarking-cntt-{tag}-build
     <<: *functest-ovn-params
-    container: functest-vnf
-    ref_arg:
-    path: docker/vnf
+    container: functest-benchmarking-cntt
+    ref_arg: BRANCH
+    path: docker/benchmarking-cntt
     jobs:
       - 'functest-ovn-{repo}-{container}-{tag}-gate'
       - 'functest-ovn-{repo}-{container}-{tag}-check'
 
 - project:
-    name: functest-ovn-opnfv-functest-smoke-cntt-{tag}-build
+    name: functest-ovn-opnfv-functest-vnf-{tag}-build
     <<: *functest-ovn-params
-    container: functest-smoke-cntt
-    ref_arg: BRANCH
-    path: docker/smoke-cntt
+    container: functest-vnf
+    ref_arg:
+    path: docker/vnf
     jobs:
       - 'functest-ovn-{repo}-{container}-{tag}-gate'
       - 'functest-ovn-{repo}-{container}-{tag}-check'
               <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-benchmarking-{tag}-rmi'
               <<: *functest-ovn-jobs
+            - name: 'functest-ovn-opnfv-functest-benchmarking-cntt-{tag}-rmi'
+              <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-vnf-{tag}-rmi'
               <<: *functest-ovn-jobs
       - multijob:
           name: remove dependencies
           projects:
-            - name: 'functest-ovn-_-alpine-3.11-rmi'
+            - name: 'functest-ovn-_-alpine-3.12-rmi'
               <<: *functest-ovn-jobs
       - multijob:
           name: pull dependencies
           projects:
-            - name: 'functest-ovn-_-alpine-3.11-pull'
+            - name: 'functest-ovn-_-alpine-3.12-pull'
               <<: *functest-ovn-jobs
       - multijob:
-          name: build opnfv/functest-ovn-core
+          name: build opnfv/functest-core
           projects:
-            - name: 'functest-ovn-opnfv-functest-ovn-core-{tag}-check'
+            - name: 'functest-ovn-opnfv-functest-core-{tag}-check'
               <<: *functest-ovn-jobs
       - multijob:
-          name: build opnfv/functest-ovn-tempest
+          name: build opnfv/functest-tempest
           projects:
-            - name: 'functest-ovn-opnfv-functest-ovn-tempest-{tag}-check'
+            - name: 'functest-ovn-opnfv-functest-tempest-{tag}-check'
               <<: *functest-ovn-jobs
       - multijob:
           name: build containers
             - name: 'functest-ovn-opnfv-functest-vnf-{tag}-check'
               <<: *functest-ovn-jobs
       - multijob:
-          name: build opnfv/functest-smoke-cntt
+          name: build cntt containers
           projects:
             - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-check'
               <<: *functest-ovn-jobs
+            - name: 'functest-ovn-opnfv-functest-benchmarking-cntt-{tag}-check'
+              <<: *functest-ovn-jobs
       - multijob:
           name: opnfv/functest-healthcheck:{tag}
           projects:
       - multijob:
           name: opnfv/functest-smoke:{tag}
           projects:
-            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-neutron-tempest-plugin-api-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-tempest_neutron-run'
               <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-smoke-{tag}-tempest_cinder-run'
               <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-smoke-{tag}-tempest_keystone-run'
               <<: *functest-ovn-jobs
+            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-tempest_heat-run'
+              <<: *functest-ovn-jobs
+            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-tempest_telemetry-run'
+              <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-smoke-{tag}-rally_sanity-run'
               <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-smoke-{tag}-refstack_defcore-run'
               <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-smoke-{tag}-patrole-run'
               <<: *functest-ovn-jobs
-            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-neutron_trunk-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-patrole_admin-run'
+              <<: *functest-ovn-jobs
+            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-patrole_member-run'
+              <<: *functest-ovn-jobs
+            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-patrole_reader-run'
               <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-smoke-{tag}-networking-bgpvpn-run'
               <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-smoke-{tag}-networking-sfc-run'
               <<: *functest-ovn-jobs
-            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-barbican-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-tempest_barbican-run'
               <<: *functest-ovn-jobs
-            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-octavia-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-tempest_octavia-run'
+              <<: *functest-ovn-jobs
+            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-tempest_cyborg-run'
               <<: *functest-ovn-jobs
       - multijob:
           name: opnfv/functest-smoke-cntt:{tag}
           projects:
-            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-neutron-tempest-plugin-api-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_neutron_cntt-run'
+              <<: *functest-ovn-jobs
+            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_cinder_cntt-run'
               <<: *functest-ovn-jobs
-            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_cinder-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_keystone_cntt-run'
               <<: *functest-ovn-jobs
-            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_keystone-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_heat_cntt-run'
               <<: *functest-ovn-jobs
-            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-rally_sanity-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-rally_sanity_cntt-run'
               <<: *functest-ovn-jobs
-            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_full-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_full_cntt-run'
               <<: *functest-ovn-jobs
-            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_scenario-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_scenario_cntt-run'
               <<: *functest-ovn-jobs
-            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_slow-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_slow_cntt-run'
               <<: *functest-ovn-jobs
       - multijob:
           name: opnfv/functest-benchmarking:{tag}
               <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-benchmarking-{tag}-shaker-run'
               <<: *functest-ovn-jobs
+      - multijob:
+          name: opnfv/functest-benchmarking-cntt:{tag}
+          projects:
+            - name: 'functest-ovn-opnfv-functest-benchmarking-cntt-{tag}-rally_full_cntt-run'
+              <<: *functest-ovn-jobs
+            - name: 'functest-ovn-opnfv-functest-benchmarking-cntt-{tag}-rally_jobs_cntt-run'
+              <<: *functest-ovn-jobs
       - multijob:
           name: opnfv/functest-vnf:{tag}
-          execution-type: SEQUENTIALLY
           projects:
             - name: 'functest-ovn-opnfv-functest-vnf-{tag}-cloudify-run'
               <<: *functest-ovn-jobs
               <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-benchmarking-{tag}-rmi'
               <<: *functest-ovn-jobs
+            - name: 'functest-ovn-opnfv-functest-benchmarking-cntt-{tag}-rmi'
+              <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-vnf-{tag}-rmi'
               <<: *functest-ovn-jobs
       - multijob:
           name: remove dependencies
           projects:
-            - name: 'functest-ovn-_-alpine-3.11-rmi'
+            - name: 'functest-ovn-_-alpine-3.12-rmi'
               <<: *functest-ovn-jobs
       - multijob:
           name: pull dependencies
           projects:
-            - name: 'functest-ovn-_-alpine-3.11-pull'
+            - name: 'functest-ovn-_-alpine-3.12-pull'
               <<: *functest-ovn-jobs
       - multijob:
-          name: build opnfv/functest-ovn-core
+          name: build opnfv/functest-core
           projects:
-            - name: 'functest-ovn-opnfv-functest-ovn-core-{tag}-gate'
+            - name: 'functest-ovn-opnfv-functest-core-{tag}-gate'
               <<: *functest-ovn-jobs
       - multijob:
-          name: build opnfv/functest-ovn-tempest
+          name: build opnfv/functest-tempest
           projects:
-            - name: 'functest-ovn-opnfv-functest-ovn-tempest-{tag}-gate'
+            - name: 'functest-ovn-opnfv-functest-tempest-{tag}-gate'
               <<: *functest-ovn-jobs
       - multijob:
           name: build containers
             - name: 'functest-ovn-opnfv-functest-vnf-{tag}-gate'
               <<: *functest-ovn-jobs
       - multijob:
-          name: build opnfv/functest-smoke-cntt
+          name: build cntt containers
           projects:
             - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-gate'
               <<: *functest-ovn-jobs
+            - name: 'functest-ovn-opnfv-functest-benchmarking-cntt-{tag}-gate'
+              <<: *functest-ovn-jobs
       - multijob:
           name: opnfv/functest-healthcheck:{tag}
           projects:
       - multijob:
           name: opnfv/functest-smoke:{tag}
           projects:
-            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-neutron-tempest-plugin-api-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-tempest_neutron-run'
               <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-smoke-{tag}-tempest_cinder-run'
               <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-smoke-{tag}-tempest_keystone-run'
               <<: *functest-ovn-jobs
+            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-tempest_heat-run'
+              <<: *functest-ovn-jobs
+            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-tempest_telemetry-run'
+              <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-smoke-{tag}-rally_sanity-run'
               <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-smoke-{tag}-refstack_defcore-run'
               <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-smoke-{tag}-patrole-run'
               <<: *functest-ovn-jobs
-            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-neutron_trunk-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-patrole_admin-run'
+              <<: *functest-ovn-jobs
+            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-patrole_member-run'
+              <<: *functest-ovn-jobs
+            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-patrole_reader-run'
               <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-smoke-{tag}-networking-bgpvpn-run'
               <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-smoke-{tag}-networking-sfc-run'
               <<: *functest-ovn-jobs
-            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-barbican-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-tempest_barbican-run'
               <<: *functest-ovn-jobs
-            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-octavia-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-tempest_octavia-run'
+              <<: *functest-ovn-jobs
+            - name: 'functest-ovn-opnfv-functest-smoke-{tag}-tempest_cyborg-run'
               <<: *functest-ovn-jobs
       - multijob:
           name: opnfv/functest-smoke-cntt:{tag}
           projects:
-            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-neutron-tempest-plugin-api-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_neutron_cntt-run'
+              <<: *functest-ovn-jobs
+            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_cinder_cntt-run'
               <<: *functest-ovn-jobs
-            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_cinder-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_keystone_cntt-run'
               <<: *functest-ovn-jobs
-            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_keystone-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_heat_cntt-run'
               <<: *functest-ovn-jobs
-            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-rally_sanity-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-rally_sanity_cntt-run'
               <<: *functest-ovn-jobs
-            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_full-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_full_cntt-run'
               <<: *functest-ovn-jobs
-            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_scenario-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_scenario_cntt-run'
               <<: *functest-ovn-jobs
-            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_slow-run'
+            - name: 'functest-ovn-opnfv-functest-smoke-cntt-{tag}-tempest_slow_cntt-run'
               <<: *functest-ovn-jobs
       - multijob:
           name: opnfv/functest-benchmarking:{tag}
               <<: *functest-ovn-jobs
             - name: 'functest-ovn-opnfv-functest-benchmarking-{tag}-shaker-run'
               <<: *functest-ovn-jobs
+      - multijob:
+          name: opnfv/functest-benchmarking-cntt:{tag}
+          projects:
+            - name: 'functest-ovn-opnfv-functest-benchmarking-cntt-{tag}-rally_full_cntt-run'
+              <<: *functest-ovn-jobs
+            - name: 'functest-ovn-opnfv-functest-benchmarking-cntt-{tag}-rally_jobs_cntt-run'
+              <<: *functest-ovn-jobs
       - multijob:
           name: opnfv/functest-vnf:{tag}
-          execution-type: SEQUENTIALLY
           projects:
             - name: 'functest-ovn-opnfv-functest-vnf-{tag}-cloudify-run'
               <<: *functest-ovn-jobs
index 663db8c..120fca8 100644 (file)
           branch: master
           slave: lf-virtual9
           dashboard_url: http://172.30.13.94
+      - kali:
+          branch: stable/kali
+          slave: lf-pod4-3
+          dashboard_url: http://172.30.12.88
       - jerma:
           branch: stable/jerma
           slave: lf-pod4
           branch: stable/hunter
           slave: lf-virtual6
           dashboard_url: http://172.30.13.91
+      - arm64-latest:
+          branch: master
+          slave: lf-virtual9
+          dashboard_url: http://172.30.13.94
+      - arm64-kali:
+          branch: stable/kali
+          slave: lf-pod4-3
+          dashboard_url: http://172.30.12.88
+      - arm64-jerma:
+          branch: stable/jerma
+          slave: lf-pod4
+          dashboard_url: http://172.30.12.83
+      - arm64-iruya:
+          branch: stable/iruya
+          slave: lf-virtual4
+          dashboard_url: http://172.30.13.89
+      - arm64-hunter:
+          branch: stable/hunter
+          slave: lf-virtual6
+          dashboard_url: http://172.30.13.91
 
 - parameter:
     name: functest-slave
           name: EXTERNAL_NETWORK
           default: public
 
+- parameter:
+    name: functest-VOLUME_DEVICE_NAME
+    parameters:
+      - string:
+          name: VOLUME_DEVICE_NAME
+          default: sdb
+
+- parameter:
+    name: functest-IMAGE_PROPERTIES
+    parameters:
+      - string:
+          name: IMAGE_PROPERTIES
+          default: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+
 - functest-containers: &functest-containers
     name: 'functest-containers'
     repo: '{repo}'
     <<: *functest-containers
     test: '{test}'
     dashboard_url: '{dashboard_url}'
-
-- functest-build-containers: &functest-build-containers
-    name: 'functest-build-containers'
-    <<: *functest-containers
-    ref_arg: '{ref_arg}'
-    path: '{path}'
+    privileged: '{privileged}'
+    network: '{network}'
 
 - builder:
     name: functest-pull-containers
           fi
           sudo docker pull $image
 
-- builder:
-    name: functest-build-containers
-    builders:
-      - shell: |
-          set +x
-          if [ "{repo}" = "_" ]; then
-            image={container}:{tag}
-          elif [ "{port}" = "None" ]; then
-            image={repo}/{container}:{tag}
-          else
-            image={repo}:{port}/{container}:{tag}
-          fi
-          if [ "{ref_arg}" = "None" ]; then
-            build_arg=""
-          else
-            build_arg="--build-arg {ref_arg}={ref}"
-          fi
-          cd {path}
-          sudo docker build $build_arg \
-            --pull=false --no-cache --force-rm=true \
-            -t $image .
-
 - builder:
     name: functest-run-containers
     builders:
           else
             image={repo}:{port}/{container}:{tag}
           fi
-          if [ "{tag}" = "latest" ]; then
-            py=3.7
-          elif [ "{tag}" = "jerma" ]; then
-            py=3.7
-          elif [ "{tag}" = "iruya" ]; then
-            py=3.6
-          else
-            py=2.7
-          fi
+          volumes=""
+          case "{tag}" in
+            arm-hunter)
+              volumes="-v /usr/bin/qemu-arm-static:/usr/bin/qemu-arm-static" ;;
+            arm64-hunter)
+              volumes="-v /usr/bin/qemu-aarch64-static:/usr/bin/qemu-aarch64-static" ;;
+          esac
           sudo docker run --rm \
+            --privileged={privileged} \
+            --network={network} \
             -e S3_ENDPOINT_URL=https://storage.googleapis.com \
             -e S3_DST_URL=s3://artifacts.opnfv.org/functest/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
             -e HTTP_DST_URL=http://artifacts.opnfv.org/functest/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
             -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
+            -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
             -e NODE_NAME=$slave \
             -e BUILD_TAG=$BUILD_TAG \
             -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
             -e DEBUG=$DEBUG \
             -e EXTERNAL_NETWORK=$EXTERNAL_NETWORK \
             -e DASHBOARD_URL={dashboard_url} \
-            -e VOLUME_DEVICE_NAME=sdb \
-            -e IMAGE_PROPERTIES=hw_scsi_model:virtio-scsi,hw_disk_bus:scsi \
+            -e VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME \
+            -e IMAGE_PROPERTIES=$IMAGE_PROPERTIES \
+            -v /home/opnfv/functest/.boto:/root/.boto \
             -v /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file \
             -v /home/opnfv/functest/images:/home/opnfv/functest/images \
-            -v /home/opnfv/functest/tempest_blacklist.yaml:/usr/lib/python$py/\
-          site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/\
-          blacklist.yaml \
-            -v /home/opnfv/functest/.boto:/root/.boto \
-            $image run_tests -t {test} -r -p
+            $volumes $image run_tests -t {test} -r -p
 
 - builder:
     name: functest-remove-images
           fi
           sudo docker rmi $image || true
 
+- functest-build-containers: &functest-build-containers
+    name: 'functest-build-containers'
+    <<: *functest-containers
+    ref_arg: '{ref_arg}'
+    path: '{path}'
+
+- builder:
+    name: functest-build-containers
+    builders:
+      - shell: |
+          set +x
+          if [ "{repo}" = "_" ]; then
+            image={container}:{tag}
+          elif [ "{port}" = "None" ]; then
+            image={repo}/{container}:{tag}
+          else
+            image={repo}:{port}/{container}:{tag}
+          fi
+          if [ "{ref_arg}" = "None" ]; then
+            build_arg=""
+          else
+            build_arg="--build-arg {ref_arg}={ref}"
+          fi
+          cd {path}
+          sudo docker build $build_arg \
+            --pull=false --no-cache --force-rm=true \
+            -t $image .
+
 - scm:
     name: functest-scm
     scm:
     jobs:
       - 'functest-{repo}-{container}-{tag}-pull'
 
+- project:
+    name: 'functest-opnfv-functest-benchmarking-cntt-pull'
+    <<: *functest-params
+    container: 'functest-benchmarking-cntt'
+    jobs:
+      - 'functest-{repo}-{container}-{tag}-pull'
+
 - project:
     name: 'functest-opnfv-functest-vnf-pull'
     <<: *functest-params
     jobs:
       - 'functest-{repo}-{container}-{tag}-rmi'
 
+- project:
+    name: 'functest-opnfv-functest-benchmarking-cntt-rmi'
+    <<: *functest-params
+    container: 'functest-benchmarking-cntt'
+    jobs:
+      - 'functest-{repo}-{container}-{tag}-rmi'
+
 - project:
     name: 'functest-opnfv-functest-vnf-rmi'
     <<: *functest-params
           DEBUG: 'true'
       - functest-EXTERNAL_NETWORK:
           EXTERNAL_NETWORK: public
+      - functest-VOLUME_DEVICE_NAME:
+          VOLUME_DEVICE_NAME: sdb
+      - functest-IMAGE_PROPERTIES:
+          IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
     builders:
       - functest-run-containers:
           <<: *functest-run-containers
       - odl
       - tempest_smoke
       - tempest_horizon
-    exclude:
-      - tag: hunter
-        test: tempest_horizon
+    privileged: 'false'
+    network: bridge
     jobs:
       - 'functest-{repo}-{container}-{tag}-{test}-run'
 
     <<: *functest-params
     container: 'functest-smoke'
     test:
-      - neutron-tempest-plugin-api
+      - tempest_neutron
       - tempest_cinder
       - tempest_keystone
+      - tempest_heat
+      - tempest_telemetry
       - rally_sanity
       - refstack_defcore
       - refstack_compute
       - tempest_scenario
       - tempest_slow
       - patrole
-      - neutron_trunk
+      - patrole_admin
+      - patrole_member
+      - patrole_reader
       - networking-bgpvpn
       - networking-sfc
-      - barbican
-      - octavia
+      - tempest_barbican
+      - tempest_octavia
+      - tempest_cyborg
     exclude:
       - tag: latest
         test: refstack_defcore
+      - tag: latest
+        test: networking-bgpvpn
+      - tag: latest
+        test: networking-sfc
+      - tag: latest
+        test: patrole
+      - tag: kali
+        test: refstack_defcore
+      - tag: kali
+        test: networking-bgpvpn
+      - tag: kali
+        test: networking-sfc
+      - tag: kali
+        test: patrole_admin
+      - tag: kali
+        test: patrole_member
+      - tag: kali
+        test: patrole_reader
       - tag: jerma
         test: refstack_defcore
+      - tag: jerma
+        test: networking-bgpvpn
+      - tag: jerma
+        test: networking-sfc
+      - tag: jerma
+        test: patrole_admin
+      - tag: jerma
+        test: patrole_member
+      - tag: jerma
+        test: patrole_reader
+      - tag: jerma
+        test: tempest_cyborg
       - tag: iruya
         test: refstack_defcore
       - tag: iruya
-        test: octavia
+        test: patrole_admin
+      - tag: iruya
+        test: patrole_member
+      - tag: iruya
+        test: patrole_reader
+      - tag: iruya
+        test: tempest_cyborg
       - tag: hunter
         test: refstack_compute
       - tag: hunter
       - tag: hunter
         test: refstack_platform
       - tag: hunter
-        test: octavia
+        test: tempest_octavia
+      - tag: hunter
+        test: tempest_telemetry
+      - tag: hunter
+        test: patrole_admin
+      - tag: hunter
+        test: patrole_member
+      - tag: hunter
+        test: patrole_reader
+      - tag: hunter
+        test: tempest_cyborg
+      - tag: arm64-latest
+        test: refstack_defcore
+      - tag: arm64-latest
+        test: networking-bgpvpn
+      - tag: arm64-latest
+        test: networking-sfc
+      - tag: arm64-latest
+        test: patrole
+      - tag: arm64-kali
+        test: refstack_defcore
+      - tag: arm64-kali
+        test: networking-bgpvpn
+      - tag: arm64-kali
+        test: networking-sfc
+      - tag: arm64-kali
+        test: patrole_admin
+      - tag: arm64-kali
+        test: patrole_member
+      - tag: arm64-kali
+        test: patrole_reader
+      - tag: arm64-jerma
+        test: refstack_defcore
+      - tag: arm64-jerma
+        test: networking-bgpvpn
+      - tag: arm64-jerma
+        test: networking-sfc
+      - tag: arm64-jerma
+        test: patrole_admin
+      - tag: arm64-jerma
+        test: patrole_member
+      - tag: arm64-jerma
+        test: patrole_reader
+      - tag: arm64-jerma
+        test: tempest_cyborg
+      - tag: arm64-iruya
+        test: refstack_defcore
+      - tag: arm64-iruya
+        test: patrole_admin
+      - tag: arm64-iruya
+        test: patrole_member
+      - tag: arm64-iruya
+        test: patrole_reader
+      - tag: arm64-iruya
+        test: tempest_cyborg
+      - tag: arm64-hunter
+        test: refstack_compute
+      - tag: arm64-hunter
+        test: refstack_object
+      - tag: arm64-hunter
+        test: refstack_platform
+      - tag: arm64-hunter
+        test: tempest_octavia
+      - tag: arm64-hunter
+        test: tempest_telemetry
+      - tag: arm64-hunter
+        test: patrole_admin
+      - tag: arm64-hunter
+        test: patrole_member
+      - tag: arm64-hunter
+        test: patrole_reader
+      - tag: arm64-hunter
+        test: tempest_cyborg
+    privileged: 'false'
+    network: bridge
     jobs:
       - 'functest-{repo}-{container}-{tag}-{test}-run'
 
     <<: *functest-params
     container: 'functest-smoke-cntt'
     test:
-      - neutron-tempest-plugin-api
-      - tempest_cinder
-      - tempest_keystone
-      - rally_sanity
-      - tempest_full
-      - tempest_scenario
-      - tempest_slow
+      - tempest_neutron_cntt
+      - tempest_cinder_cntt
+      - tempest_keystone_cntt
+      - tempest_heat_cntt
+      - rally_sanity_cntt
+      - tempest_full_cntt
+      - tempest_scenario_cntt
+      - tempest_slow_cntt
+    privileged: 'false'
+    network: bridge
     jobs:
       - 'functest-{repo}-{container}-{tag}-{test}-run'
 
       - rally_jobs
       - vmtp
       - shaker
+    privileged: 'false'
+    network: bridge
+    jobs:
+      - 'functest-{repo}-{container}-{tag}-{test}-run'
+
+- project:
+    name: 'functest-opnfv-functest-benchmarking-cntt'
+    <<: *functest-params
+    container: 'functest-benchmarking-cntt'
+    test:
+      - rally_full_cntt
+      - rally_jobs_cntt
+    privileged: 'false'
+    network: bridge
     jobs:
       - 'functest-{repo}-{container}-{tag}-{test}-run'
 
       - heat_ims
       - vyos_vrouter
       - juju_epc
+    privileged: 'false'
+    network: bridge
     jobs:
       - 'functest-{repo}-{container}-{tag}-{test}-run'
 
+
 - builder:
     name: functest-zip
     builders:
       - shell: |
           set +x
-          [ ! -z "$WORKSPACE" ] && rm -rf $WORKSPACE/* || true
+          [ ! -z "$WORKSPACE" ] && sudo rm -rf $WORKSPACE/* || true
           if [ "{repo}" = "_" ]; then
             image={container}:{tag}
           elif [ "{port}" = "None" ]; then
           else
             image={repo}:{port}/{container}:{tag}
           fi
+          volumes=""
+          case "{tag}" in
+            arm-hunter)
+              volumes="-v /usr/bin/qemu-arm-static:/usr/bin/qemu-arm-static" ;;
+            arm64-hunter)
+              volumes="-v /usr/bin/qemu-aarch64-static:/usr/bin/qemu-aarch64-static" ;;
+          esac
           sudo docker run --rm \
             -e S3_ENDPOINT_URL=https://storage.googleapis.com \
             -e S3_DST_URL=s3://artifacts.opnfv.org/functest \
             -e HTTP_DST_URL=http://artifacts.opnfv.org/functest \
             -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
+            -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
             -e BUILD_TAG=$BUILD_TAG \
             -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
+            -e DEBUG=$DEBUG \
+            -e EXTERNAL_NETWORK=$EXTERNAL_NETWORK \
+            -e VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME \
+            -e IMAGE_PROPERTIES=$IMAGE_PROPERTIES \
             -v /home/opnfv/functest/.boto:/root/.boto \
-            $image zip_campaign
+            -v /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file \
+            -v /home/opnfv/functest/images:/home/opnfv/functest/images \
+            $volumes $image zip_campaign
 
 - job-template:
     name: 'functest-{tag}-zip'
     parameters:
+      - functest-slave:
+          slave: '{slave}'
       - functest-build_tag:
           build_tag: ''
+      - functest-DEBUG:
+          DEBUG: 'true'
+      - functest-EXTERNAL_NETWORK:
+          EXTERNAL_NETWORK: public
+      - functest-VOLUME_DEVICE_NAME:
+          VOLUME_DEVICE_NAME: sdb
+      - functest-IMAGE_PROPERTIES:
+          IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
     builders:
       - functest-zip:
           <<: *functest-containers
     name: 'functest-{tag}-daily'
     project-type: multijob
     triggers:
-      - timed: '@daily'
+      - timed: '@weekly'
     parameters:
       - functest-slave:
           slave: '{slave}'
           DEBUG: 'true'
       - functest-EXTERNAL_NETWORK:
           EXTERNAL_NETWORK: public
+      - functest-VOLUME_DEVICE_NAME:
+          VOLUME_DEVICE_NAME: sdb
+      - functest-IMAGE_PROPERTIES:
+          IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
     properties:
       - build-blocker:
           use-build-blocker: true
           blocking-level: 'NODE'
           blocking-jobs:
-            - '^functest-{tag}-(daily|check|gate)$'
+            - '^functest(-pi)?(-ovn)?-(arm.*-|amd64-)*[a-z]+-(daily|gate|check)$'
     builders:
       - multijob:
           name: remove former images
               <<: *functest-jobs
             - name: 'functest-opnfv-functest-benchmarking-{tag}-rmi'
               <<: *functest-jobs
+            - name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-rmi'
+              <<: *functest-jobs
             - name: 'functest-opnfv-functest-vnf-{tag}-rmi'
               <<: *functest-jobs
       - multijob:
               <<: *functest-jobs
             - name: 'functest-opnfv-functest-benchmarking-{tag}-pull'
               <<: *functest-jobs
+            - name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-pull'
+              <<: *functest-jobs
             - name: 'functest-opnfv-functest-vnf-{tag}-pull'
               <<: *functest-jobs
       - multijob:
       - multijob:
           name: opnfv/functest-smoke:{tag}
           projects:
-            - name: 'functest-opnfv-functest-smoke-{tag}-neutron-tempest-plugin-api-run'
+            - name: 'functest-opnfv-functest-smoke-{tag}-tempest_neutron-run'
               <<: *functest-jobs
             - name: 'functest-opnfv-functest-smoke-{tag}-tempest_cinder-run'
               <<: *functest-jobs
             - name: 'functest-opnfv-functest-smoke-{tag}-tempest_keystone-run'
               <<: *functest-jobs
+            - name: 'functest-opnfv-functest-smoke-{tag}-tempest_heat-run'
+              <<: *functest-jobs
+            - name: 'functest-opnfv-functest-smoke-{tag}-tempest_telemetry-run'
+              <<: *functest-jobs
             - name: 'functest-opnfv-functest-smoke-{tag}-rally_sanity-run'
               <<: *functest-jobs
             - name: 'functest-opnfv-functest-smoke-{tag}-refstack_defcore-run'
               <<: *functest-jobs
             - name: 'functest-opnfv-functest-smoke-{tag}-patrole-run'
               <<: *functest-jobs
-            - name: 'functest-opnfv-functest-smoke-{tag}-neutron_trunk-run'
+            - name: 'functest-opnfv-functest-smoke-{tag}-patrole_admin-run'
+              <<: *functest-jobs
+            - name: 'functest-opnfv-functest-smoke-{tag}-patrole_member-run'
+              <<: *functest-jobs
+            - name: 'functest-opnfv-functest-smoke-{tag}-patrole_reader-run'
               <<: *functest-jobs
             - name: 'functest-opnfv-functest-smoke-{tag}-networking-bgpvpn-run'
               <<: *functest-jobs
             - name: 'functest-opnfv-functest-smoke-{tag}-networking-sfc-run'
               <<: *functest-jobs
-            - name: 'functest-opnfv-functest-smoke-{tag}-barbican-run'
+            - name: 'functest-opnfv-functest-smoke-{tag}-tempest_barbican-run'
               <<: *functest-jobs
-            - name: 'functest-opnfv-functest-smoke-{tag}-octavia-run'
+            - name: 'functest-opnfv-functest-smoke-{tag}-tempest_octavia-run'
+              <<: *functest-jobs
+            - name: 'functest-opnfv-functest-smoke-{tag}-tempest_cyborg-run'
               <<: *functest-jobs
       - multijob:
           name: opnfv/functest-smoke-cntt:{tag}
           projects:
-            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-neutron-tempest-plugin-api-run'
+            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_neutron_cntt-run'
+              <<: *functest-jobs
+            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_cinder_cntt-run'
               <<: *functest-jobs
-            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_cinder-run'
+            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_keystone_cntt-run'
               <<: *functest-jobs
-            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_keystone-run'
+            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_heat_cntt-run'
               <<: *functest-jobs
-            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-rally_sanity-run'
+            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-rally_sanity_cntt-run'
               <<: *functest-jobs
-            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_full-run'
+            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_full_cntt-run'
               <<: *functest-jobs
-            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_scenario-run'
+            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_scenario_cntt-run'
               <<: *functest-jobs
-            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_slow-run'
+            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_slow_cntt-run'
               <<: *functest-jobs
       - multijob:
           name: opnfv/functest-benchmarking:{tag}
               <<: *functest-jobs
             - name: 'functest-opnfv-functest-benchmarking-{tag}-shaker-run'
               <<: *functest-jobs
+      - multijob:
+          name: opnfv/functest-benchmarking-cntt:{tag}
+          projects:
+            - name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-rally_full_cntt-run'
+              <<: *functest-jobs
+            - name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-rally_jobs_cntt-run'
+              <<: *functest-jobs
       - multijob:
           name: opnfv/functest-vnf:{tag}
-          execution-type: SEQUENTIALLY
           projects:
             - name: 'functest-opnfv-functest-vnf-{tag}-cloudify-run'
               <<: *functest-jobs
           ref: $branch
 
 - project:
-    name: 'functest-_-alpine-3.11-rmi'
+    name: 'functest-_-alpine-3.12-rmi'
     repo: _
     port:
     container: alpine
-    tag: '3.11'
+    tag: '3.12'
     slave: master
+    exclude:
+      - tag: arm64-latest
+      - tag: arm64-kali
+      - tag: arm64-jerma
+      - tag: arm64-iruya
+      - tag: arm64-hunter
     jobs:
       - 'functest-{repo}-{container}-{tag}-rmi'
 
 - project:
-    name: 'functest-_-alpine-3.11-pull'
+    name: 'functest-_-alpine-3.12-pull'
     repo: _
     port:
     container: alpine
-    tag: '3.11'
+    tag: '3.12'
     slave: master
+    exclude:
+      - tag: arm64-latest
+      - tag: arm64-kali
+      - tag: arm64-jerma
+      - tag: arm64-iruya
+      - tag: arm64-hunter
     jobs:
       - 'functest-{repo}-{container}-{tag}-pull'
 
     container: functest-core
     ref_arg: BRANCH
     path: docker/core
+    exclude:
+      - tag: arm64-latest
+      - tag: arm64-kali
+      - tag: arm64-jerma
+      - tag: arm64-iruya
+      - tag: arm64-hunter
     jobs:
       - 'functest-{repo}-{container}-{tag}-gate'
       - 'functest-{repo}-{container}-{tag}-check'
       - 'functest-{repo}-{container}-{tag}-check'
     exclude:
       - tag: latest
+      - tag: kali
       - tag: jerma
+      - tag: arm64-latest
+      - tag: arm64-kali
+      - tag: arm64-jerma
+      - tag: arm64-iruya
+      - tag: arm64-hunter
 
 - project:
     name: functest-opnfv-functest-healthcheck-{tag}-build
     container: functest-healthcheck
     ref_arg: BRANCH
     path: docker/healthcheck
+    exclude:
+      - tag: arm64-latest
+      - tag: arm64-kali
+      - tag: arm64-jerma
+      - tag: arm64-iruya
+      - tag: arm64-hunter
     jobs:
       - 'functest-{repo}-{container}-{tag}-gate'
       - 'functest-{repo}-{container}-{tag}-check'
     container: functest-smoke
     ref_arg: BRANCH
     path: docker/smoke
+    exclude:
+      - tag: arm64-latest
+      - tag: arm64-kali
+      - tag: arm64-jerma
+      - tag: arm64-iruya
+      - tag: arm64-hunter
     jobs:
       - 'functest-{repo}-{container}-{tag}-gate'
       - 'functest-{repo}-{container}-{tag}-check'
     container: functest-benchmarking
     ref_arg: BRANCH
     path: docker/benchmarking
+    exclude:
+      - tag: arm64-latest
+      - tag: arm64-kali
+      - tag: arm64-jerma
+      - tag: arm64-iruya
+      - tag: arm64-hunter
+    jobs:
+      - 'functest-{repo}-{container}-{tag}-gate'
+      - 'functest-{repo}-{container}-{tag}-check'
+
+- project:
+    name: functest-opnfv-functest-benchmarking-cntt-{tag}-build
+    <<: *functest-params
+    container: functest-benchmarking-cntt
+    ref_arg: BRANCH
+    path: docker/benchmarking-cntt
+    exclude:
+      - tag: arm64-latest
+      - tag: arm64-kali
+      - tag: arm64-jerma
+      - tag: arm64-iruya
+      - tag: arm64-hunter
     jobs:
       - 'functest-{repo}-{container}-{tag}-gate'
       - 'functest-{repo}-{container}-{tag}-check'
     container: functest-vnf
     ref_arg:
     path: docker/vnf
+    exclude:
+      - tag: arm64-latest
+      - tag: arm64-kali
+      - tag: arm64-jerma
+      - tag: arm64-iruya
+      - tag: arm64-hunter
     jobs:
       - 'functest-{repo}-{container}-{tag}-gate'
       - 'functest-{repo}-{container}-{tag}-check'
     container: functest-smoke-cntt
     ref_arg: BRANCH
     path: docker/smoke-cntt
+    exclude:
+      - tag: arm64-latest
+      - tag: arm64-kali
+      - tag: arm64-jerma
+      - tag: arm64-iruya
+      - tag: arm64-hunter
     jobs:
       - 'functest-{repo}-{container}-{tag}-gate'
       - 'functest-{repo}-{container}-{tag}-check'
           DEBUG: 'true'
       - functest-EXTERNAL_NETWORK:
           EXTERNAL_NETWORK: public
+      - functest-VOLUME_DEVICE_NAME:
+          VOLUME_DEVICE_NAME: sdb
+      - functest-IMAGE_PROPERTIES:
+          IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
     properties:
       - build-blocker:
           use-build-blocker: true
           blocking-level: 'NODE'
           blocking-jobs:
-            - '^functest-{tag}-(daily|check|gate)$'
+            - '^functest(-pi)?(-ovn)?-(arm.*-|amd64-)*[a-z]+-(daily|gate|check)$'
     builders:
       - multijob:
           name: remove former images
               <<: *functest-jobs
             - name: 'functest-opnfv-functest-benchmarking-{tag}-rmi'
               <<: *functest-jobs
+            - name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-rmi'
+              <<: *functest-jobs
             - name: 'functest-opnfv-functest-vnf-{tag}-rmi'
               <<: *functest-jobs
       - multijob:
           name: remove dependencies
           projects:
-            - name: 'functest-_-alpine-3.11-rmi'
+            - name: 'functest-_-alpine-3.12-rmi'
               <<: *functest-jobs
       - multijob:
           name: pull dependencies
           projects:
-            - name: 'functest-_-alpine-3.11-pull'
+            - name: 'functest-_-alpine-3.12-pull'
               <<: *functest-jobs
       - multijob:
           name: build opnfv/functest-core
             - name: 'functest-opnfv-functest-vnf-{tag}-check'
               <<: *functest-jobs
       - multijob:
-          name: build opnfv/functest-smoke-cntt
+          name: build cntt containers
           projects:
             - name: 'functest-opnfv-functest-smoke-cntt-{tag}-check'
               <<: *functest-jobs
+            - name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-check'
+              <<: *functest-jobs
       - multijob:
           name: opnfv/functest-healthcheck:{tag}
           projects:
       - multijob:
           name: opnfv/functest-smoke:{tag}
           projects:
-            - name: 'functest-opnfv-functest-smoke-{tag}-neutron-tempest-plugin-api-run'
+            - name: 'functest-opnfv-functest-smoke-{tag}-tempest_neutron-run'
               <<: *functest-jobs
             - name: 'functest-opnfv-functest-smoke-{tag}-tempest_cinder-run'
               <<: *functest-jobs
             - name: 'functest-opnfv-functest-smoke-{tag}-tempest_keystone-run'
               <<: *functest-jobs
+            - name: 'functest-opnfv-functest-smoke-{tag}-tempest_heat-run'
+              <<: *functest-jobs
+            - name: 'functest-opnfv-functest-smoke-{tag}-tempest_telemetry-run'
+              <<: *functest-jobs
             - name: 'functest-opnfv-functest-smoke-{tag}-rally_sanity-run'
               <<: *functest-jobs
             - name: 'functest-opnfv-functest-smoke-{tag}-refstack_defcore-run'
               <<: *functest-jobs
             - name: 'functest-opnfv-functest-smoke-{tag}-patrole-run'
               <<: *functest-jobs
-            - name: 'functest-opnfv-functest-smoke-{tag}-neutron_trunk-run'
+            - name: 'functest-opnfv-functest-smoke-{tag}-patrole_admin-run'
+              <<: *functest-jobs
+            - name: 'functest-opnfv-functest-smoke-{tag}-patrole_member-run'
+              <<: *functest-jobs
+            - name: 'functest-opnfv-functest-smoke-{tag}-patrole_reader-run'
               <<: *functest-jobs
             - name: 'functest-opnfv-functest-smoke-{tag}-networking-bgpvpn-run'
               <<: *functest-jobs
             - name: 'functest-opnfv-functest-smoke-{tag}-networking-sfc-run'
               <<: *functest-jobs
-            - name: 'functest-opnfv-functest-smoke-{tag}-barbican-run'
+            - name: 'functest-opnfv-functest-smoke-{tag}-tempest_barbican-run'
+              <<: *functest-jobs
+            - name: 'functest-opnfv-functest-smoke-{tag}-tempest_octavia-run'
               <<: *functest-jobs
-            - name: 'functest-opnfv-functest-smoke-{tag}-octavia-run'
+            - name: 'functest-opnfv-functest-smoke-{tag}-tempest_cyborg-run'
               <<: *functest-jobs
       - multijob:
           name: opnfv/functest-smoke-cntt:{tag}
           projects:
-            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-neutron-tempest-plugin-api-run'
+            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_neutron_cntt-run'
               <<: *functest-jobs
-            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_cinder-run'
+            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_cinder_cntt-run'
               <<: *functest-jobs
-            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_keystone-run'
+            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_keystone_cntt-run'
               <<: *functest-jobs
-            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-rally_sanity-run'
+            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_heat_cntt-run'
               <<: *functest-jobs
-            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_full-run'
+            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-rally_sanity_cntt-run'
               <<: *functest-jobs
-            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_scenario-run'
+            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_full_cntt-run'
               <<: *functest-jobs
-            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_slow-run'
+            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_scenario_cntt-run'
+              <<: *functest-jobs
+            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_slow_cntt-run'
               <<: *functest-jobs
       - multijob:
           name: opnfv/functest-benchmarking:{tag}
               <<: *functest-jobs
             - name: 'functest-opnfv-functest-benchmarking-{tag}-shaker-run'
               <<: *functest-jobs
+      - multijob:
+          name: opnfv/functest-benchmarking-cntt:{tag}
+          projects:
+            - name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-rally_full_cntt-run'
+              <<: *functest-jobs
+            - name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-rally_jobs_cntt-run'
+              <<: *functest-jobs
       - multijob:
           name: opnfv/functest-vnf:{tag}
-          execution-type: SEQUENTIALLY
           projects:
             - name: 'functest-opnfv-functest-vnf-{tag}-cloudify-run'
               <<: *functest-jobs
           DEBUG: 'true'
       - functest-EXTERNAL_NETWORK:
           EXTERNAL_NETWORK: public
+      - functest-VOLUME_DEVICE_NAME:
+          VOLUME_DEVICE_NAME: sdb
+      - functest-IMAGE_PROPERTIES:
+          IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
     properties:
       - build-blocker:
           use-build-blocker: true
           blocking-level: 'NODE'
           blocking-jobs:
-            - '^functest-{tag}-(daily|check|gate)$'
+            - '^functest(-pi)?(-ovn)?-(arm.*-|amd64-)*[a-z]+-(daily|gate|check)$'
     builders:
       - multijob:
           name: remove former images
               <<: *functest-jobs
             - name: 'functest-opnfv-functest-benchmarking-{tag}-rmi'
               <<: *functest-jobs
+            - name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-rmi'
+              <<: *functest-jobs
             - name: 'functest-opnfv-functest-vnf-{tag}-rmi'
               <<: *functest-jobs
       - multijob:
           name: remove dependencies
           projects:
-            - name: 'functest-_-alpine-3.11-rmi'
+            - name: 'functest-_-alpine-3.12-rmi'
               <<: *functest-jobs
       - multijob:
           name: pull dependencies
           projects:
-            - name: 'functest-_-alpine-3.11-pull'
+            - name: 'functest-_-alpine-3.12-pull'
               <<: *functest-jobs
       - multijob:
           name: build opnfv/functest-core
             - name: 'functest-opnfv-functest-vnf-{tag}-gate'
               <<: *functest-jobs
       - multijob:
-          name: build opnfv/functest-smoke-cntt
+          name: build cntt containers
           projects:
             - name: 'functest-opnfv-functest-smoke-cntt-{tag}-gate'
               <<: *functest-jobs
+            - name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-gate'
+              <<: *functest-jobs
       - multijob:
           name: opnfv/functest-healthcheck:{tag}
           projects:
       - multijob:
           name: opnfv/functest-smoke:{tag}
           projects:
-            - name: 'functest-opnfv-functest-smoke-{tag}-neutron-tempest-plugin-api-run'
+            - name: 'functest-opnfv-functest-smoke-{tag}-tempest_neutron-run'
               <<: *functest-jobs
             - name: 'functest-opnfv-functest-smoke-{tag}-tempest_cinder-run'
               <<: *functest-jobs
             - name: 'functest-opnfv-functest-smoke-{tag}-tempest_keystone-run'
               <<: *functest-jobs
+            - name: 'functest-opnfv-functest-smoke-{tag}-tempest_heat-run'
+              <<: *functest-jobs
+            - name: 'functest-opnfv-functest-smoke-{tag}-tempest_telemetry-run'
+              <<: *functest-jobs
             - name: 'functest-opnfv-functest-smoke-{tag}-rally_sanity-run'
               <<: *functest-jobs
             - name: 'functest-opnfv-functest-smoke-{tag}-refstack_defcore-run'
               <<: *functest-jobs
             - name: 'functest-opnfv-functest-smoke-{tag}-patrole-run'
               <<: *functest-jobs
-            - name: 'functest-opnfv-functest-smoke-{tag}-neutron_trunk-run'
+            - name: 'functest-opnfv-functest-smoke-{tag}-patrole_admin-run'
+              <<: *functest-jobs
+            - name: 'functest-opnfv-functest-smoke-{tag}-patrole_member-run'
+              <<: *functest-jobs
+            - name: 'functest-opnfv-functest-smoke-{tag}-patrole_reader-run'
               <<: *functest-jobs
             - name: 'functest-opnfv-functest-smoke-{tag}-networking-bgpvpn-run'
               <<: *functest-jobs
             - name: 'functest-opnfv-functest-smoke-{tag}-networking-sfc-run'
               <<: *functest-jobs
-            - name: 'functest-opnfv-functest-smoke-{tag}-barbican-run'
+            - name: 'functest-opnfv-functest-smoke-{tag}-tempest_barbican-run'
+              <<: *functest-jobs
+            - name: 'functest-opnfv-functest-smoke-{tag}-tempest_octavia-run'
               <<: *functest-jobs
-            - name: 'functest-opnfv-functest-smoke-{tag}-octavia-run'
+            - name: 'functest-opnfv-functest-smoke-{tag}-tempest_cyborg-run'
               <<: *functest-jobs
       - multijob:
           name: opnfv/functest-smoke-cntt:{tag}
           projects:
-            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-neutron-tempest-plugin-api-run'
+            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_neutron_cntt-run'
+              <<: *functest-jobs
+            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_cinder_cntt-run'
               <<: *functest-jobs
-            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_cinder-run'
+            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_keystone_cntt-run'
               <<: *functest-jobs
-            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_keystone-run'
+            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_heat_cntt-run'
               <<: *functest-jobs
-            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-rally_sanity-run'
+            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-rally_sanity_cntt-run'
               <<: *functest-jobs
-            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_full-run'
+            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_full_cntt-run'
               <<: *functest-jobs
-            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_scenario-run'
+            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_scenario_cntt-run'
               <<: *functest-jobs
-            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_slow-run'
+            - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_slow_cntt-run'
               <<: *functest-jobs
       - multijob:
           name: opnfv/functest-benchmarking:{tag}
               <<: *functest-jobs
             - name: 'functest-opnfv-functest-benchmarking-{tag}-shaker-run'
               <<: *functest-jobs
+      - multijob:
+          name: opnfv/functest-benchmarking-cntt:{tag}
+          projects:
+            - name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-rally_full_cntt-run'
+              <<: *functest-jobs
+            - name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-rally_jobs_cntt-run'
+              <<: *functest-jobs
       - multijob:
           name: opnfv/functest-vnf:{tag}
-          execution-type: SEQUENTIALLY
           projects:
             - name: 'functest-opnfv-functest-vnf-{tag}-cloudify-run'
               <<: *functest-jobs
     <<: *functest-params
     jobs:
       - 'functest-{tag}-daily'
+
+- project:
+    name: 'functest-gate'
+    <<: *functest-params
+    exclude:
+      - tag: arm64-latest
+      - tag: arm64-kali
+      - tag: arm64-jerma
+      - tag: arm64-iruya
+      - tag: arm64-hunter
+    jobs:
       - 'functest-{tag}-check'
       - 'functest-{tag}-gate'
 
       - last-success
       - last-failure
       - last-duration
-    regex: ^functest(-ovn)?-[a-z]+-(daily|check|gate)$
+    regex: ^functest(-ovn)?-(arm.*-|amd64-)*[a-z]+-daily$
+
+- view:
+    name: functest-gate
+    view-type: list
+    columns:
+      - status
+      - weather
+      - job
+      - last-success
+      - last-failure
+      - last-duration
+    regex: ^functest(-ovn)?-(arm.*-|amd64-)*[a-z]+-gate$
index 246ceef..8c9a495 100644 (file)
@@ -12,6 +12,9 @@
       - master:
           branch: '{stream}'
           disabled: false
+      - kali:
+          branch: 'stable/{stream}'
+          disabled: false
       - jerma:
           branch: 'stable/{stream}'
           disabled: false
       - 'arm64':
           slave_label: 'opnfv-build-ubuntu-arm'
 
+    image:
+      - 'core'
+      - 'mts'
+
+    exclude:
+      - stream: 'kali'
+        image: 'mts'
+      - stream: 'jerma'
+        image: 'mts'
+      - stream: 'iruya'
+        image: 'mts'
+      - stream: 'hunter'
+        image: 'mts'
+
     # settings for jobs run in multijob phases
     build-job-settings: &build-job-settings
       current-parameters: false
@@ -53,8 +70,8 @@
     # yamllint enable rule:key-duplicates
     jobs:
       - "xtesting-docker-{stream}"
-      - "xtesting-docker-build-{arch_tag}-{stream}"
-      - "xtesting-docker-manifest-{stream}"
+      - "xtesting-{image}-docker-build-{arch_tag}-{stream}"
+      - "xtesting-{image}-docker-manifest-{stream}"
 
 ########################
 # job templates
           name: 'build xtesting images'
           execution-type: PARALLEL
           projects:
-            - name: 'xtesting-docker-build-amd64-{stream}'
+            - name: 'xtesting-core-docker-build-amd64-{stream}'
               <<: *build-job-settings
-            - name: 'xtesting-docker-build-arm64-{stream}'
+            - name: 'xtesting-core-docker-build-arm64-{stream}'
               <<: *build-job-settings
       - multijob:
           name: 'publish xtesting manifests'
           execution-type: PARALLEL
           projects:
-            - name: 'xtesting-docker-manifest-{stream}'
+            - name: 'xtesting-core-docker-manifest-{stream}'
+              <<: *manifest-job-settings
+      - multijob:
+          name: 'build xtesting-mts images'
+          execution-type: PARALLEL
+          projects:
+            - name: 'xtesting-mts-docker-build-amd64-{stream}'
+              <<: *build-job-settings
+            - name: 'xtesting-mts-docker-build-arm64-{stream}'
+              <<: *build-job-settings
+      - multijob:
+          name: 'publish xtesting-mts manifests'
+          execution-type: PARALLEL
+          projects:
+            - name: 'xtesting-mts-docker-manifest-{stream}'
               <<: *manifest-job-settings
 
+
     publishers:
       - 'xtesting-amd64-recipients'
       - 'xtesting-arm64-recipients'
 
 - job-template:
-    name: 'xtesting-docker-build-{arch_tag}-{stream}'
+    name: 'xtesting-{image}-docker-build-{arch_tag}-{stream}'
     disabled: '{obj:disabled}'
     parameters:
       - xtesting-job-parameters:
     builders:
       - shell: |
           #!/bin/bash -ex
-          sudo arch={arch_tag} bash ./build.sh
+          case "{arch_tag}" in
+          "arm64")
+              sudo arch=arm64 amd64_dirs= arm64_dirs=docker/{image} arm_dirs= bash ./build.sh ;;
+          *)
+              sudo arch=amd64 amd64_dirs=docker/{image} arm64_dirs= arm_dirs= bash ./build.sh ;;
+          esac
           exit $?
 
 - job-template:
-    name: 'xtesting-docker-manifest-{stream}'
+    name: 'xtesting-{image}-docker-manifest-{stream}'
 
     parameters:
       - project-parameter:
       - string:
           name: REPO
           default: "opnfv"
-          description: "Repository name for functest images"
+          description: "Repository name for xtesting images"
 
 
     disabled: '{obj:disabled}'
           *)
               tag="{stream}" ;;
           esac
+          case "{image}" in
+          "core")
+              img="" ;;
+          *)
+              img="-{image}" ;;
+          esac
           sudo manifest-tool push from-args \
               --platforms linux/amd64,linux/arm64 \
-              --template $REPO/xtesting:ARCH-$tag \
-              --target $REPO/xtesting:$tag
+              --template $REPO/xtesting$img:ARCH-$tag \
+              --target $REPO/xtesting$img:$tag
           exit $?
 
 - parameter:
       - string:
           name: REPO
           default: "opnfv"
-          description: "Repository name for functest images"
+          description: "Repository name for xtesting images"
 
 # publisher macros
 - publisher:
diff --git a/jjb/functest/xtesting-pi.yaml b/jjb/functest/xtesting-pi.yaml
new file mode 100644 (file)
index 0000000..f27f193
--- /dev/null
@@ -0,0 +1,431 @@
+---
+- xtesting-pi-jobs: &xtesting-pi-jobs
+    name: 'xtesting-pi-jobs'
+    current-parameters: true
+
+- xtesting-pi-params: &xtesting-pi-params
+    name: 'xtesting-pi-params'
+    repo: 'ollivier'
+    port:
+    tag:
+      - latest:
+          branch: master
+          slave: lf-virtual1
+      - kali:
+          branch: stable/kali
+          slave: lf-virtual1
+      - jerma:
+          branch: stable/jerma
+          slave: lf-virtual1
+      - iruya:
+          branch: stable/iruya
+          slave: lf-virtual1
+      - hunter:
+          branch: stable/hunter
+          slave: lf-virtual1
+      - arm-latest:
+          branch: master
+          slave: lf-virtual1
+      - arm-kali:
+          branch: stable/kali
+          slave: lf-virtual1
+      - arm-jerma:
+          branch: stable/jerma
+          slave: lf-virtual1
+      - arm-iruya:
+          branch: stable/iruya
+          slave: lf-virtual1
+      - arm-hunter:
+          branch: stable/hunter
+          slave: lf-virtual1
+      - arm64-latest:
+          branch: master
+          slave: lf-virtual1
+      - arm64-kali:
+          branch: stable/kali
+          slave: lf-virtual1
+      - arm64-jerma:
+          branch: stable/jerma
+          slave: lf-virtual1
+      - arm64-iruya:
+          branch: stable/iruya
+          slave: lf-virtual1
+      - arm64-hunter:
+          branch: stable/hunter
+          slave: lf-virtual1
+
+- parameter:
+    name: xtesting-pi-slave
+    parameters:
+      - label:
+          name: slave
+          default: '{slave}'
+
+- parameter:
+    name: xtesting-pi-build_tag
+    parameters:
+      - random-string:
+          name: build_tag
+
+- parameter:
+    name: xtesting-pi-branch
+    parameters:
+      - string:
+          name: branch
+          default: '{branch}'
+
+- parameter:
+    name: xtesting-pi-DEBUG
+    parameters:
+      - string:
+          name: DEBUG
+          default: 'true'
+
+- xtesting-pi-containers: &xtesting-pi-containers
+    name: 'xtesting-pi-containers'
+    repo: '{repo}'
+    port: '{port}'
+    container: '{container}'
+    tag: '{tag}'
+
+- xtesting-pi-run-containers: &xtesting-pi-run-containers
+    name: 'xtesting-pi-run-containers'
+    <<: *xtesting-pi-containers
+    test: '{test}'
+    privileged: '{privileged}'
+    network: '{network}'
+
+- builder:
+    name: xtesting-pi-pull-containers
+    builders:
+      - shell: |
+          set +x
+          if [ "{repo}" = "_" ]; then
+            image={container}:{tag}
+          elif [ "{port}" = "None" ]; then
+            image={repo}/{container}:{tag}
+          else
+            image={repo}:{port}/{container}:{tag}
+          fi
+          sudo docker pull $image
+
+- builder:
+    name: xtesting-pi-run-containers
+    builders:
+      - shell: |
+          set +x
+          [ ! -z "$WORKSPACE" ] && sudo rm -rf $WORKSPACE/* || true
+          if [ "{repo}" = "_" ]; then
+            image={container}:{tag}
+          elif [ "{port}" = "None" ]; then
+            image={repo}/{container}:{tag}
+          else
+            image={repo}:{port}/{container}:{tag}
+          fi
+          sudo docker run --rm \
+            --privileged={privileged} \
+            --network={network} \
+            -e S3_ENDPOINT_URL=https://storage.googleapis.com \
+            -e S3_DST_URL=s3://artifacts.opnfv.org/xtesting/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
+            -e HTTP_DST_URL=http://artifacts.opnfv.org/xtesting/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
+            -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
+            -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
+            -e NODE_NAME=$slave \
+            -e BUILD_TAG=$BUILD_TAG \
+            -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
+            -e DEBUG=$DEBUG \
+            -v /home/opnfv/xtesting/.boto:/root/.boto \
+            $image run_tests -t {test} -p -r
+
+- builder:
+    name: xtesting-pi-remove-images
+    builders:
+      - shell: |
+          set +x
+          if [ "{repo}" = "_" ]; then
+            image={container}:{tag}
+          elif [ "{port}" = "None" ]; then
+            image={repo}/{container}:{tag}
+          else
+            image={repo}:{port}/{container}:{tag}
+          fi
+          sudo docker rmi $image || true
+
+
+- job-template:
+    name: 'xtesting-pi-{repo}-{container}-{tag}-pull'
+    parameters:
+      - xtesting-pi-slave:
+          slave: '{slave}'
+    builders:
+      - xtesting-pi-pull-containers:
+          <<: *xtesting-pi-containers
+
+- project:
+    name: 'xtesting-pi-ollivier-xtesting-pull'
+    <<: *xtesting-pi-params
+    container: 'xtesting'
+    jobs:
+      - 'xtesting-pi-{repo}-{container}-{tag}-pull'
+
+- project:
+    name: 'xtesting-pi-ollivier-xtesting-mts-pull'
+    <<: *xtesting-pi-params
+    container: 'xtesting-mts'
+    exclude:
+      - tag: hunter
+      - tag: iruya
+      - tag: jerma
+      - tag: kali
+      - tag: arm-hunter
+      - tag: arm-iruya
+      - tag: arm-jerma
+      - tag: arm-kali
+      - tag: arm-latest
+      - tag: arm64-hunter
+      - tag: arm64-iruya
+      - tag: arm64-jerma
+      - tag: arm64-kali
+    jobs:
+      - 'xtesting-pi-{repo}-{container}-{tag}-pull'
+
+- job-template:
+    name: 'xtesting-pi-{repo}-{container}-{tag}-rmi'
+    parameters:
+      - xtesting-pi-slave:
+          slave: '{slave}'
+    builders:
+      - xtesting-pi-remove-images:
+          <<: *xtesting-pi-containers
+
+- project:
+    name: 'xtesting-pi-ollivier-xtesting-rmi'
+    <<: *xtesting-pi-params
+    container: 'xtesting'
+    jobs:
+      - 'xtesting-pi-{repo}-{container}-{tag}-rmi'
+
+- project:
+    name: 'xtesting-pi-ollivier-xtesting-mts-rmi'
+    <<: *xtesting-pi-params
+    container: 'xtesting-mts'
+    exclude:
+      - tag: hunter
+      - tag: iruya
+      - tag: jerma
+      - tag: kali
+      - tag: arm-hunter
+      - tag: arm-iruya
+      - tag: arm-jerma
+      - tag: arm-kali
+      - tag: arm-latest
+      - tag: arm64-hunter
+      - tag: arm64-iruya
+      - tag: arm64-jerma
+      - tag: arm64-kali
+    jobs:
+      - 'xtesting-pi-{repo}-{container}-{tag}-rmi'
+
+- job-template:
+    name: 'xtesting-pi-{repo}-{container}-{tag}-{test}-run'
+    parameters:
+      - xtesting-pi-slave:
+          slave: '{slave}'
+      - xtesting-pi-build_tag:
+          build_tag: ''
+      - xtesting-pi-DEBUG:
+          DEBUG: 'true'
+    builders:
+      - xtesting-pi-run-containers:
+          <<: *xtesting-pi-run-containers
+
+- project:
+    name: 'xtesting-pi-ollivier-xtesting'
+    <<: *xtesting-pi-params
+    container: 'xtesting'
+    test:
+      - first
+      - second
+      - third
+      - fourth
+      - fifth
+      - sixth
+    exclude:
+      - tag: hunter
+        test: sixth
+      - tag: iruya
+        test: sixth
+      - tag: arm-hunter
+        test: sixth
+      - tag: arm-iruya
+        test: sixth
+      - tag: arm64-hunter
+        test: sixth
+      - tag: arm64-iruya
+        test: sixth
+    privileged: 'false'
+    network: bridge
+    jobs:
+      - 'xtesting-pi-{repo}-{container}-{tag}-{test}-run'
+
+- project:
+    name: 'xtesting-pi-ollivier-xtesting-mts'
+    <<: *xtesting-pi-params
+    container: 'xtesting-mts'
+    test:
+      - seventh
+    exclude:
+      - tag: hunter
+        test: seventh
+      - tag: iruya
+        test: seventh
+      - tag: jerma
+        test: seventh
+      - tag: kali
+        test: seventh
+      - tag: arm-hunter
+        test: seventh
+      - tag: arm-iruya
+        test: seventh
+      - tag: arm-jerma
+        test: seventh
+      - tag: arm-kali
+        test: seventh
+      - tag: arm-latest
+        test: seventh
+      - tag: arm64-hunter
+        test: seventh
+      - tag: arm64-iruya
+        test: seventh
+      - tag: arm64-jerma
+        test: seventh
+      - tag: arm64-kali
+        test: seventh
+    privileged: 'false'
+    network: bridge
+    jobs:
+      - 'xtesting-pi-{repo}-{container}-{tag}-{test}-run'
+
+
+- builder:
+    name: xtesting-pi-zip
+    builders:
+      - shell: |
+          set +x
+          [ ! -z "$WORKSPACE" ] && sudo rm -rf $WORKSPACE/* || true
+          if [ "{repo}" = "_" ]; then
+            image={container}:{tag}
+          elif [ "{port}" = "None" ]; then
+            image={repo}/{container}:{tag}
+          else
+            image={repo}:{port}/{container}:{tag}
+          fi
+          sudo docker run --rm \
+            -e S3_ENDPOINT_URL=https://storage.googleapis.com \
+            -e S3_DST_URL=s3://artifacts.opnfv.org/xtesting \
+            -e HTTP_DST_URL=http://artifacts.opnfv.org/xtesting \
+            -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
+            -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
+            -e BUILD_TAG=$BUILD_TAG \
+            -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
+            -e DEBUG=$DEBUG \
+            -v /home/opnfv/xtesting/.boto:/root/.boto \
+            $image zip_campaign
+
+- job-template:
+    name: 'xtesting-pi-{tag}-zip'
+    parameters:
+      - xtesting-pi-slave:
+          slave: '{slave}'
+      - xtesting-pi-build_tag:
+          build_tag: ''
+      - xtesting-pi-DEBUG:
+          DEBUG: 'true'
+    builders:
+      - xtesting-pi-zip:
+          <<: *xtesting-pi-containers
+
+- project:
+    name: 'xtesting-pi-{tag}-zip'
+    <<: *xtesting-pi-params
+    container: 'xtesting'
+    jobs:
+      - 'xtesting-pi-{tag}-zip'
+
+- job-template:
+    name: 'xtesting-pi-{tag}-daily'
+    project-type: multijob
+    triggers:
+      - timed: '@daily'
+    parameters:
+      - xtesting-pi-slave:
+          slave: '{slave}'
+      - xtesting-pi-build_tag:
+          build_tag: ''
+      - xtesting-pi-DEBUG:
+          DEBUG: 'true'
+    properties:
+      - build-blocker:
+          use-build-blocker: true
+          blocking-level: 'NODE'
+          blocking-jobs:
+            - '^xtesting-pi-{tag}-(daily|check|gate)$'
+    builders:
+      - multijob:
+          name: remove former images
+          projects:
+            - name: 'xtesting-pi-ollivier-xtesting-{tag}-rmi'
+              <<: *xtesting-pi-jobs
+            - name: 'xtesting-pi-ollivier-xtesting-mts-{tag}-rmi'
+              <<: *xtesting-pi-jobs
+      - multijob:
+          name: pull containers
+          projects:
+            - name: 'xtesting-pi-ollivier-xtesting-{tag}-pull'
+              <<: *xtesting-pi-jobs
+            - name: 'xtesting-pi-ollivier-xtesting-mts-{tag}-pull'
+              <<: *xtesting-pi-jobs
+      - multijob:
+          name: ollivier/xtesting:{tag}
+          projects:
+            - name: 'xtesting-pi-ollivier-xtesting-{tag}-first-run'
+              <<: *xtesting-pi-jobs
+            - name: 'xtesting-pi-ollivier-xtesting-{tag}-second-run'
+              <<: *xtesting-pi-jobs
+            - name: 'xtesting-pi-ollivier-xtesting-{tag}-third-run'
+              <<: *xtesting-pi-jobs
+            - name: 'xtesting-pi-ollivier-xtesting-{tag}-fourth-run'
+              <<: *xtesting-pi-jobs
+            - name: 'xtesting-pi-ollivier-xtesting-{tag}-fifth-run'
+              <<: *xtesting-pi-jobs
+            - name: 'xtesting-pi-ollivier-xtesting-{tag}-sixth-run'
+              <<: *xtesting-pi-jobs
+      - multijob:
+          name: ollivier/xtesting-mts:{tag}
+          projects:
+            - name: 'xtesting-pi-ollivier-xtesting-mts-{tag}-seventh-run'
+              <<: *xtesting-pi-jobs
+      - multijob:
+          name: dump all campaign data
+          projects:
+            - name: 'xtesting-pi-{tag}-zip'
+              <<: *xtesting-pi-jobs
+
+
+- project:
+    name: 'xtesting-pi'
+    <<: *xtesting-pi-params
+    jobs:
+      - 'xtesting-pi-{tag}-daily'
+
+- view:
+    name: xtesting-pi
+    view-type: list
+    columns:
+      - status
+      - weather
+      - job
+      - last-success
+      - last-failure
+      - last-duration
+    regex: ^xtesting-pi-(arm.*-|amd64-)*[a-z]+-daily$
index a1013fe..61ae1b0 100644 (file)
@@ -5,6 +5,9 @@
       - latest:
           branch: master
           slave: lf-virtual1
+      - kali:
+          branch: stable/kali
+          slave: lf-virtual1
       - jerma:
           branch: stable/jerma
           slave: lf-virtual1
index 44231e4..40fd3ab 100644 (file)
@@ -11,6 +11,9 @@
       - latest:
           branch: master
           slave: lf-virtual1
+      - kali:
+          branch: stable/kali
+          slave: lf-virtual1
       - jerma:
           branch: stable/jerma
           slave: lf-virtual1
       - hunter:
           branch: stable/hunter
           slave: lf-virtual1
+      - arm64-latest:
+          branch: master
+          slave: lf-virtual1
+      - arm64-kali:
+          branch: stable/kali
+          slave: lf-virtual1
+      - arm64-jerma:
+          branch: stable/jerma
+          slave: lf-virtual1
+      - arm64-iruya:
+          branch: stable/iruya
+          slave: lf-virtual1
+      - arm64-hunter:
+          branch: stable/hunter
+          slave: lf-virtual1
 
 - parameter:
     name: xtesting-slave
@@ -59,6 +77,8 @@
     name: 'xtesting-run-containers'
     <<: *xtesting-containers
     test: '{test}'
+    privileged: '{privileged}'
+    network: '{network}'
 
 - builder:
     name: xtesting-pull-containers
             image={repo}:{port}/{container}:{tag}
           fi
           sudo docker run --rm \
+            --privileged={privileged} \
+            --network={network} \
             -e S3_ENDPOINT_URL=https://storage.googleapis.com \
             -e S3_DST_URL=s3://artifacts.opnfv.org/xtesting/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
             -e HTTP_DST_URL=http://artifacts.opnfv.org/xtesting/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
     jobs:
       - 'xtesting-{repo}-{container}-{tag}-pull'
 
+- project:
+    name: 'xtesting-opnfv-xtesting-mts-pull'
+    <<: *xtesting-params
+    container: 'xtesting-mts'
+    exclude:
+      - tag: hunter
+      - tag: iruya
+      - tag: jerma
+      - tag: kali
+      - tag: arm64-hunter
+      - tag: arm64-iruya
+      - tag: arm64-jerma
+      - tag: arm64-kali
+    jobs:
+      - 'xtesting-{repo}-{container}-{tag}-pull'
+
 - job-template:
     name: 'xtesting-{repo}-{container}-{tag}-rmi'
     parameters:
     jobs:
       - 'xtesting-{repo}-{container}-{tag}-rmi'
 
+- project:
+    name: 'xtesting-opnfv-xtesting-mts-rmi'
+    <<: *xtesting-params
+    container: 'xtesting-mts'
+    exclude:
+      - tag: hunter
+      - tag: iruya
+      - tag: jerma
+      - tag: kali
+      - tag: arm64-hunter
+      - tag: arm64-iruya
+      - tag: arm64-jerma
+      - tag: arm64-kali
+    jobs:
+      - 'xtesting-{repo}-{container}-{tag}-rmi'
+
 - job-template:
     name: 'xtesting-{repo}-{container}-{tag}-{test}-run'
     parameters:
         test: sixth
       - tag: iruya
         test: sixth
+      - tag: arm64-hunter
+        test: sixth
+      - tag: arm64-iruya
+        test: sixth
+    privileged: 'false'
+    network: bridge
     jobs:
       - 'xtesting-{repo}-{container}-{tag}-{test}-run'
 
+- project:
+    name: 'xtesting-opnfv-xtesting-mts'
+    <<: *xtesting-params
+    container: 'xtesting-mts'
+    test:
+      - seventh
+    exclude:
+      - tag: hunter
+        test: seventh
+      - tag: iruya
+        test: seventh
+      - tag: jerma
+        test: seventh
+      - tag: kali
+        test: seventh
+      - tag: arm64-hunter
+        test: seventh
+      - tag: arm64-iruya
+        test: seventh
+      - tag: arm64-jerma
+        test: seventh
+      - tag: arm64-kali
+        test: seventh
+    privileged: 'false'
+    network: bridge
+    jobs:
+      - 'xtesting-{repo}-{container}-{tag}-{test}-run'
 
 - builder:
     name: xtesting-zip
           projects:
             - name: 'xtesting-opnfv-xtesting-{tag}-rmi'
               <<: *xtesting-jobs
+            - name: 'xtesting-opnfv-xtesting-mts-{tag}-rmi'
+              <<: *xtesting-jobs
       - multijob:
           name: pull containers
           projects:
             - name: 'xtesting-opnfv-xtesting-{tag}-pull'
               <<: *xtesting-jobs
+            - name: 'xtesting-opnfv-xtesting-mts-{tag}-pull'
+              <<: *xtesting-jobs
       - multijob:
           name: opnfv/xtesting:{tag}
           projects:
               <<: *xtesting-jobs
             - name: 'xtesting-opnfv-xtesting-{tag}-sixth-run'
               <<: *xtesting-jobs
+      - multijob:
+          name: opnfv/xtesting-mts:{tag}
+          projects:
+            - name: 'xtesting-opnfv-xtesting-mts-{tag}-seventh-run'
+              <<: *xtesting-jobs
       - multijob:
           name: dump all campaign data
           projects:
           ref: $branch
 
 - project:
-    name: 'xtesting-_-alpine-3.11-rmi'
+    name: 'xtesting-_-alpine-3.12-rmi'
     repo: _
     port:
     container: alpine
-    tag: '3.11'
+    tag: '3.12'
     slave: master
     jobs:
       - 'xtesting-{repo}-{container}-{tag}-rmi'
 
 - project:
-    name: 'xtesting-_-alpine-3.11-pull'
+    name: 'xtesting-_-alpine-3.12-pull'
     repo: _
     port:
     container: alpine
-    tag: '3.11'
+    tag: '3.12'
     slave: master
+    exclude:
+      - tag: arm64-hunter
+      - tag: arm64-iruya
+      - tag: arm64-jerma
+      - tag: arm64-kali
+      - tag: arm64-latest
     jobs:
       - 'xtesting-{repo}-{container}-{tag}-pull'
 
     <<: *xtesting-params
     container: xtesting
     ref_arg: BRANCH
-    path: docker
+    path: docker/core
+    exclude:
+      - tag: arm64-hunter
+      - tag: arm64-iruya
+      - tag: arm64-jerma
+      - tag: arm64-kali
+      - tag: arm64-latest
+    jobs:
+      - 'xtesting-{repo}-{container}-{tag}-gate'
+      - 'xtesting-{repo}-{container}-{tag}-check'
+
+- project:
+    name: xtesting-opnfv-xtesting-mts-{tag}-build
+    <<: *xtesting-params
+    container: xtesting-mts
+    ref_arg: BRANCH
+    path: docker/mts
+    exclude:
+      - tag: arm64-hunter
+      - tag: arm64-iruya
+      - tag: arm64-jerma
+      - tag: arm64-kali
+      - tag: arm64-latest
     jobs:
       - 'xtesting-{repo}-{container}-{tag}-gate'
       - 'xtesting-{repo}-{container}-{tag}-check'
           projects:
             - name: 'xtesting-opnfv-xtesting-{tag}-rmi'
               <<: *xtesting-jobs
+            - name: 'xtesting-opnfv-xtesting-mts-{tag}-rmi'
+              <<: *xtesting-jobs
       - multijob:
           name: remove dependencies
           projects:
-            - name: 'xtesting-_-alpine-3.11-rmi'
+            - name: 'xtesting-_-alpine-3.12-rmi'
               <<: *xtesting-jobs
       - multijob:
           name: pull dependencies
           projects:
-            - name: 'xtesting-_-alpine-3.11-pull'
+            - name: 'xtesting-_-alpine-3.12-pull'
               <<: *xtesting-jobs
       - multijob:
           name: opnfv/xtesting
           projects:
             - name: 'xtesting-opnfv-xtesting-{tag}-check'
               <<: *xtesting-jobs
+      - multijob:
+          name: opnfv/xtesting-mts
+          projects:
+            - name: 'xtesting-opnfv-xtesting-mts-{tag}-check'
+              <<: *xtesting-jobs
       - multijob:
           name: opnfv/xtesting:{tag}
           projects:
               <<: *xtesting-jobs
             - name: 'xtesting-opnfv-xtesting-{tag}-sixth-run'
               <<: *xtesting-jobs
+      - multijob:
+          name: opnfv/xtesting-mts:{tag}
+          projects:
+            - name: 'xtesting-opnfv-xtesting-mts-{tag}-seventh-run'
+              <<: *xtesting-jobs
 
 - trigger:
     name: xtesting-patchset-created
           projects:
             - name: 'xtesting-opnfv-xtesting-{tag}-rmi'
               <<: *xtesting-jobs
+            - name: 'xtesting-opnfv-xtesting-mts-{tag}-rmi'
+              <<: *xtesting-jobs
       - multijob:
           name: remove dependencies
           projects:
-            - name: 'xtesting-_-alpine-3.11-rmi'
+            - name: 'xtesting-_-alpine-3.12-rmi'
               <<: *xtesting-jobs
       - multijob:
           name: pull dependencies
           projects:
-            - name: 'xtesting-_-alpine-3.11-pull'
+            - name: 'xtesting-_-alpine-3.12-pull'
               <<: *xtesting-jobs
       - multijob:
           name: opnfv/xtesting
           projects:
             - name: 'xtesting-opnfv-xtesting-{tag}-gate'
               <<: *xtesting-jobs
+      - multijob:
+          name: opnfv/xtesting-mts
+          projects:
+            - name: 'xtesting-opnfv-xtesting-mts-{tag}-gate'
+              <<: *xtesting-jobs
       - multijob:
           name: opnfv/xtesting:{tag}
           projects:
               <<: *xtesting-jobs
             - name: 'xtesting-opnfv-xtesting-{tag}-sixth-run'
               <<: *xtesting-jobs
+      - multijob:
+          name: opnfv/xtesting-mts:{tag}
+          projects:
+            - name: 'xtesting-opnfv-xtesting-mts-{tag}-seventh-run'
+              <<: *xtesting-jobs
 
 - project:
     name: 'xtesting'
     <<: *xtesting-params
     jobs:
       - 'xtesting-{tag}-daily'
+
+- project:
+    name: 'xtesting-gate'
+    <<: *xtesting-params
+    exclude:
+      - tag: arm64-hunter
+      - tag: arm64-iruya
+      - tag: arm64-jerma
+      - tag: arm64-kali
+      - tag: arm64-latest
+    jobs:
       - 'xtesting-{tag}-check'
       - 'xtesting-{tag}-gate'
 
       - last-success
       - last-failure
       - last-duration
-    regex: ^xtesting-[a-z]+-(daily|check|gate)$
+    regex: ^xtesting-(arm.*-|amd64-)*[a-z]+-daily$
+
+- view:
+    name: xtesting-gate
+    view-type: list
+    columns:
+      - status
+      - weather
+      - job
+      - last-success
+      - last-failure
+      - last-duration
+    regex: ^xtesting-(arm.*-|amd64-)*[a-z]+-gate$
index 0599035..ef37ef9 100644 (file)
       - email-ext:
           <<: *email_ptl_defaults
           recipients: >
-            fatih.degirmenci@ericsson.com
             feng.xiaowei@zte.com.cn
 - publisher:
     name: 'email-releng-utils-ptl'
           <<: *email_ptl_defaults
           recipients: >
             fatih.degirmenci@ericsson.com
+          disable-publisher: true
 
 - publisher:
     name: 'email-samplevnf-ptl'
diff --git a/jjb/ipv6/ipv6-rtd-jobs.yaml b/jjb/ipv6/ipv6-rtd-jobs.yaml
deleted file mode 100644 (file)
index e51173e..0000000
+++ /dev/null
@@ -1,24 +0,0 @@
----
-- project:
-    name: ipv6-rtd
-    project: ipv6
-    project-name: ipv6
-
-    project-pattern: 'ipv6'
-    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-ipv6/47370/'
-    rtd-token: 'e7abb4e2c3f3f0dfc1a8feefe39b27f4a4f9b98a'
-
-    stream:
-      - master:
-          branch: '{stream}'
-          disabled: false
-      - hunter: &hunter
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          disabled: false
-      - gambia:
-          branch: 'stable/{stream}'
-          disabled: false
-
-    jobs:
-      - '{project-name}-rtd-jobs'
diff --git a/jjb/ipv6/ipv6-views.yaml b/jjb/ipv6/ipv6-views.yaml
deleted file mode 100644 (file)
index 936ca42..0000000
+++ /dev/null
@@ -1,6 +0,0 @@
----
-- project:
-    name: ipv6-view
-    views:
-      - project-view
-    project-name: ipv6
diff --git a/jjb/ipv6/ipv6.yaml b/jjb/ipv6/ipv6.yaml
deleted file mode 100644 (file)
index 2946ec7..0000000
+++ /dev/null
@@ -1,8 +0,0 @@
----
-- project:
-    name: ipv6
-
-    project: '{name}'
-
-    jobs:
-      - '{project}-verify-basic'
diff --git a/jjb/kuberef/kuberef-run-linting.sh b/jjb/kuberef/kuberef-run-linting.sh
new file mode 100755 (executable)
index 0000000..4f681ac
--- /dev/null
@@ -0,0 +1,56 @@
+#!/bin/bash
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 Samsung Electronics
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+set -o nounset
+set -o pipefail
+set -o xtrace
+
+# shellcheck disable=SC1091
+source /etc/os-release || source /usr/lib/os-release
+
+pkgs=""
+if ! command -v shellcheck; then
+    case ${ID,,} in
+        *suse*|rhel|centos|fedora)
+            pkgs="ShellCheck"
+        ;;
+        ubuntu|debian)
+            pkgs="shellcheck"
+        ;;
+    esac
+fi
+
+if ! command -v pip; then
+    pkgs+=" python-pip"
+fi
+
+if [ -n "$pkgs" ]; then
+    case ${ID,,} in
+        *suse*)
+            sudo zypper install --gpg-auto-import-keys refresh
+            sudo -H -E zypper install -y --no-recommends "$pkgs"
+        ;;
+        ubuntu|debian)
+            sudo apt-get update
+            sudo -H -E apt-get -y --no-install-recommends install "$pkgs"
+        ;;
+        rhel|centos|fedora)
+            PKG_MANAGER=$(command -v dnf || command -v yum)
+            if ! sudo "$PKG_MANAGER" repolist | grep "epel/"; then
+                sudo -H -E "$PKG_MANAGER" -q -y install epel-release
+            fi
+            sudo "$PKG_MANAGER" updateinfo --assumeyes
+            sudo -H -E "${PKG_MANAGER}" -y install "$pkgs"
+        ;;
+    esac
+fi
+
+tox -e lint
+bash -c 'shopt -s globstar; shellcheck  **/*.sh'
diff --git a/jjb/kuberef/kuberef-verify-jobs.yaml b/jjb/kuberef/kuberef-verify-jobs.yaml
new file mode 100644 (file)
index 0000000..dfe6d72
--- /dev/null
@@ -0,0 +1,51 @@
+---
+- project:
+    name: kuberef-verify
+    project: kuberef
+    stream:
+      - master:
+          branch: '{stream}'
+    jobs:
+      - '{project}-verify-lint-{stream}'
+
+- job-template:
+    name: '{project}-verify-lint-{stream}'
+    scm:
+      - kuberef-project-scm
+    triggers:
+      - gerrit:
+          server-name: 'gerrit.opnfv.org'
+          trigger-on:
+            - patchset-created-event
+            - comment-added-contains-event:
+                comment-contains-value: 'recheck'
+            - comment-added-contains-event:
+                comment-contains-value: 'reverify'
+          projects:
+            - project-compare-type: 'ANT'
+              project-pattern: 'kuberef'
+              branches:
+                - branch-compare-type: 'ANT'
+                  branch-pattern: '**/{branch}'
+          skip-vote:
+            successful: false
+            failed: false
+            unstable: false
+            notbuilt: false
+    builders:
+      - kubref-run-linting
+
+- scm:
+    name: kuberef-project-scm
+    scm:
+      - git:
+          url: https://gerrit.opnfv.org/gerrit/kuberef
+          refspec: '+refs/changes/*:refs/changes/*'
+          branches:
+            - '{ref}'
+
+- builder:
+    name: kubref-run-linting
+    builders:
+      - shell:
+          !include-raw: ./kuberef-run-linting.sh
diff --git a/jjb/kuberef/kuberef-views.yaml b/jjb/kuberef/kuberef-views.yaml
new file mode 100644 (file)
index 0000000..7b19c2d
--- /dev/null
@@ -0,0 +1,6 @@
+---
+- project:
+    name: kuberef-view
+    views:
+      - project-view
+    project-name: kuberef
diff --git a/jjb/kvmfornfv/kvmfornfv-build.sh b/jjb/kvmfornfv/kvmfornfv-build.sh
deleted file mode 100755 (executable)
index ff91849..0000000
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-# build output directory
-OUTPUT_DIR=$WORKSPACE/build_output
-mkdir -p $OUTPUT_DIR
-
-# start the build
-cd $WORKSPACE
-./ci/build.sh $OUTPUT_DIR
diff --git a/jjb/kvmfornfv/kvmfornfv-download-artifact.sh b/jjb/kvmfornfv/kvmfornfv-download-artifact.sh
deleted file mode 100755 (executable)
index ea37eb2..0000000
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
-    JOB_TYPE=${BASH_REMATCH[0]}
-else
-    echo "Unable to determine job type!"
-    exit 1
-fi
-
-# do stuff differently based on the job type
-case "$JOB_TYPE" in
-    verify)
-        echo "Downloading artifacts for the change $GERRIT_CHANGE_NUMBER. This could take some time..."
-        GS_UPLOAD_LOCATION="gs://artifacts.opnfv.org/$PROJECT/review/$GERRIT_CHANGE_NUMBER"
-        ;;
-    daily)
-        gsutil cp gs://$GS_URL/latest.properties $WORKSPACE/latest.properties
-        source $WORKSPACE/latest.properties
-        GS_UPLOAD_LOCATION=$OPNFV_ARTIFACT_URL
-        echo "Downloading artifacts from $GS_UPLOAD_LOCATION for daily run. This could take some time..."
-        ;;
-    *)
-        echo "Artifact download is not enabled for $JOB_TYPE jobs"
-        exit 1
-esac
-
-GS_GUESTIMAGE_LOCATION="gs://artifacts.opnfv.org/$PROJECT/guest-image"
-/bin/mkdir -p $WORKSPACE/build_output
-gsutil cp -r $GS_UPLOAD_LOCATION/* $WORKSPACE/build_output > $WORKSPACE/gsutil.log 2>&1
-gsutil cp $GS_GUESTIMAGE_LOCATION/guest1.sha512 $WORKSPACE/build_output > $WORKSPACE/gsutil.log 2>&1
-
-echo "--------------------------------------------------------"
-ls -al $WORKSPACE/build_output
-echo "--------------------------------------------------------"
-echo
-echo "Downloaded artifacts!"
diff --git a/jjb/kvmfornfv/kvmfornfv-rtd-jobs.yaml b/jjb/kvmfornfv/kvmfornfv-rtd-jobs.yaml
deleted file mode 100644 (file)
index 7d0b925..0000000
+++ /dev/null
@@ -1,13 +0,0 @@
----
-- project:
-    name: kvmfornfv-rtd
-    project: kvmfornfv
-    project-name: kvmfornfv
-
-    gerrit-skip-vote: true
-    project-pattern: 'kvmfornfv'
-    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-kvmfornfv/47372/'
-    rtd-token: '32ae6f0ad54181a27fd38d99821a021f5087554a'
-
-    jobs:
-      - '{project-name}-rtd-jobs'
diff --git a/jjb/kvmfornfv/kvmfornfv-test.sh b/jjb/kvmfornfv/kvmfornfv-test.sh
deleted file mode 100755 (executable)
index b31d61c..0000000
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/bin/bash
-##########################################################
-##This script includes executing cyclictest scripts.
-##########################################################
-#The latest build packages are stored in build_output
-
-ls -al $WORKSPACE/build_output
-
-if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
-    JOB_TYPE=${BASH_REMATCH[0]}
-else
-    echo "Unable to determine job type!"
-    exit 1
-fi
-
-echo $TEST_NAME
-
-# do stuff differently based on the job type
-case "$JOB_TYPE" in
-    verify)
-        #start the test
-        cd $WORKSPACE
-        ./ci/test_kvmfornfv.sh $JOB_TYPE
-        ;;
-    daily)
-        #start the test
-        cd $WORKSPACE
-        ./ci/test_kvmfornfv.sh $JOB_TYPE $TEST_NAME
-        ;;
-    *)
-        echo "Test is not enabled for $JOB_TYPE jobs"
-        exit 1
-esac
diff --git a/jjb/kvmfornfv/kvmfornfv-upload-artifact.sh b/jjb/kvmfornfv/kvmfornfv-upload-artifact.sh
deleted file mode 100755 (executable)
index 91b6f44..0000000
+++ /dev/null
@@ -1,85 +0,0 @@
-#!/bin/bash
-set -o nounset
-
-if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
-    JOB_TYPE=${BASH_REMATCH[0]}
-else
-    echo "Unable to determine job type!"
-    exit 1
-fi
-
-case "$JOB_TYPE" in
-    verify)
-       OPNFV_ARTIFACT_VERSION="gerrit-$GERRIT_CHANGE_NUMBER"
-       GS_UPLOAD_LOCATION="gs://artifacts.opnfv.org/$PROJECT/review/$GERRIT_CHANGE_NUMBER"
-       echo "Removing outdated artifacts produced for the previous patch for the change $GERRIT_CHANGE_NUMBER"
-       gsutil ls $GS_UPLOAD_LOCATION > /dev/null 2>&1 && gsutil rm -r $GS_UPLOAD_LOCATION
-       echo "Uploading artifacts for the change $GERRIT_CHANGE_NUMBER. This could take some time..."
-       ;;
-    daily)
-        echo "Uploading daily artifacts This could take some time..."
-        OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
-        GS_UPLOAD_LOCATION="gs://$GS_URL/$OPNFV_ARTIFACT_VERSION"
-        GS_LOG_LOCATION="gs://$GS_URL/logs-$(date -u +"%Y-%m-%d")"/
-        ;;
-    *)
-        echo "Artifact upload is not enabled for $JOB_TYPE jobs"
-        exit 1
-esac
-
-# save information regarding artifacts into file
-(
-    echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
-    echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
-    echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
-    echo "OPNFV_ARTIFACT_URL=$GS_UPLOAD_LOCATION"
-    echo "OPNFV_BUILD_URL=$BUILD_URL"
-) > $WORKSPACE/opnfv.properties
-source $WORKSPACE/opnfv.properties
-
-# upload artifacts
-if [[ "$PHASE" == "build" ]]; then
-    gsutil cp -r $WORKSPACE/build_output/* $GS_UPLOAD_LOCATION > $WORKSPACE/gsutil.log 2>&1
-    gsutil -m setmeta -r \
-        -h "Cache-Control:private, max-age=0, no-transform" \
-        $GS_UPLOAD_LOCATION > /dev/null 2>&1
-else
-    if [[ "$JOB_TYPE" == "daily" ]]; then
-        log_dir=$WORKSPACE/build_output/log
-        if [[ -d "$log_dir" ]]; then
-            #Uploading logs to artifacts
-            echo "Uploading artifacts for future debugging needs...."
-            gsutil cp -r $WORKSPACE/build_output/log-*.tar.gz $GS_LOG_LOCATION > $WORKSPACE/gsutil.log 2>&1
-            # verifying the logs uploaded by cyclictest daily test job
-            gsutil ls $GS_LOG_LOCATION > /dev/null 2>&1
-            if [[ $? -ne 0 ]]; then
-                echo "Problem while uploading logs to artifacts!"
-                echo "Check log $WORKSPACE/gsutil.log on $NODE_NAME"
-                exit 1
-            fi
-        else
-            echo "No test logs/artifacts available for uploading"
-        fi
-    fi
-fi
-
-# upload metadata file for the artifacts built by daily job
-if [[ "$JOB_TYPE" == "daily" && "$PHASE" == "build" ]]; then
-    gsutil cp $WORKSPACE/opnfv.properties $GS_UPLOAD_LOCATION/opnfv.properties > $WORKSPACE/gsutil.log 2>&1
-    gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > $WORKSPACE/gsutil.log 2>&1
-    gsutil -m setmeta -r \
-        -h "Cache-Control:private, max-age=0, no-transform" \
-        $GS_UPLOAD_LOCATION/opnfv.properties \
-        gs://$GS_URL/latest.properties > /dev/null 2>&1
-fi
-
-# verifying the artifacts uploading by verify/daily build job
-if [[ "$PHASE" == "build" ]]; then
-    gsutil ls $GS_UPLOAD_LOCATION > /dev/null 2>&1
-    if [[ $? -ne 0 ]]; then
-        echo "Problem while uploading artifacts!"
-        echo "Check log $WORKSPACE/gsutil.log on $NODE_NAME"
-        exit 1
-    fi
-fi
-echo "Uploaded artifacts!"
diff --git a/jjb/kvmfornfv/kvmfornfv-views.yaml b/jjb/kvmfornfv/kvmfornfv-views.yaml
deleted file mode 100644 (file)
index a029276..0000000
+++ /dev/null
@@ -1,6 +0,0 @@
----
-- project:
-    name: kvmfornfv-view
-    views:
-      - project-view
-    project-name: kvmfornfv
diff --git a/jjb/kvmfornfv/kvmfornfv.yaml b/jjb/kvmfornfv/kvmfornfv.yaml
deleted file mode 100644 (file)
index ad497e9..0000000
+++ /dev/null
@@ -1,386 +0,0 @@
----
-- project:
-    name: kvmfornfv
-
-    project: '{name}'
-
-    stream:
-      - master:
-          branch: '{stream}'
-          gs-pathname: ''
-          disabled: false
-      - euphrates:
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          disabled: true
-    #####################################
-    # patch verification phases
-    #####################################
-    phase:
-      - 'build':
-          slave-label: 'opnfv-build-ubuntu'
-      - 'test':
-          slave-label: 'intel-pod10'
-    #####################################
-    # patch verification phases
-    #####################################
-    testname:
-      - 'cyclictest'
-      - 'packet_forward'
-      - 'livemigration'
-    #####################################
-    # patch verification phases
-    #####################################
-    jobs:
-      - 'kvmfornfv-verify-{stream}'
-      - 'kvmfornfv-verify-{phase}-{stream}'
-      - 'kvmfornfv-merge-{stream}'
-      - 'kvmfornfv-daily-{stream}'
-      - 'kvmfornfv-daily-build-{stream}'
-      - 'kvmfornfv-{testname}-daily-test-{stream}'
-
-#####################################
-# job templates
-#####################################
-- job-template:
-    name: 'kvmfornfv-verify-{stream}'
-
-    project-type: multijob
-
-    disabled: '{obj:disabled}'
-
-    concurrent: true
-
-    properties:
-      - logrotate-default
-      - throttle:
-          enabled: true
-          max-total: 3
-          option: 'project'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'opnfv-build-ubuntu-defaults'
-
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - patchset-created-event:
-                exclude-drafts: 'false'
-                exclude-trivial-rebase: 'false'
-                exclude-no-code-change: 'false'
-            - draft-published-event
-            - comment-added-contains-event:
-                comment-contains-value: 'recheck'
-            - comment-added-contains-event:
-                comment-contains-value: 'reverify'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-              disable-strict-forbidden-file-verification: 'true'
-              forbidden-file-paths:
-                - compare-type: ANT
-                  pattern: 'docs/**'
-
-    builders:
-      - description-setter:
-          description: "Built on $NODE_NAME"
-      - multijob:
-          name: build
-          condition: SUCCESSFUL
-          projects:
-            - name: 'kvmfornfv-verify-build-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                BRANCH=$BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-              node-parameters: false
-              kill-phase-on: FAILURE
-              abort-all-job: true
-      - multijob:
-          name: test
-          condition: SUCCESSFUL
-          projects:
-            - name: 'kvmfornfv-verify-test-{stream}'
-              current-parameters: false
-              predefined-parameters: |
-                BRANCH=$BRANCH
-                GERRIT_REFSPEC=$GERRIT_REFSPEC
-                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-              node-parameters: false
-              kill-phase-on: FAILURE
-              abort-all-job: true
-
-- job-template:
-    name: 'kvmfornfv-verify-{phase}-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    concurrent: true
-
-    scm:
-      - git-scm-gerrit
-
-    wrappers:
-      - ssh-agent-wrapper
-      - timeout:
-          timeout: 360
-          fail: true
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - '{slave-label}-defaults'
-      - 'kvmfornfv-defaults':
-          gs-pathname: '{gs-pathname}'
-      - string:
-          name: PHASE
-          default: '{phase}'
-          description: "Execution of kvmfornfv daily '{phase}' job ."
-
-    builders:
-      - description-setter:
-          description: "Built on $NODE_NAME"
-      - '{project}-verify-{phase}-macro'
-
-- job-template:
-    name: 'kvmfornfv-merge-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'opnfv-build-ubuntu-defaults'
-      - 'kvmfornfv-defaults':
-          gs-pathname: '{gs-pathname}'
-
-    scm:
-      - git-scm
-
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - change-merged-event
-            - comment-added-contains-event:
-                comment-contains-value: 'remerge'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-
-    builders:
-      - shell:
-          !include-raw: ./kvmfornfv-build.sh
-
-- job-template:
-    name: 'kvmfornfv-daily-{stream}'
-
-    project-type: multijob
-
-    disabled: '{obj:disabled}'
-
-    concurrent: false
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'opnfv-build-ubuntu-defaults'
-      - 'kvmfornfv-defaults':
-          gs-pathname: '{gs-pathname}'
-
-    scm:
-      - git-scm
-
-    triggers:
-      - timed: '@midnight'
-
-    builders:
-      - description-setter:
-          description: "Built on $NODE_NAME"
-      - multijob:
-          name: cyclictest-build
-          condition: SUCCESSFUL
-          projects:
-            - name: 'kvmfornfv-daily-build-{stream}'
-              current-parameters: false
-              node-parameters: false
-              git-revision: true
-              kill-phase-on: FAILURE
-              abort-all-job: true
-      - multijob:
-          name: cyclictest-test
-          condition: SUCCESSFUL
-          projects:
-            - name: 'kvmfornfv-cyclictest-daily-test-{stream}'
-              current-parameters: false
-              node-parameters: false
-              git-revision: true
-              kill-phase-on: FAILURE
-              abort-all-job: true
-      - multijob:
-          name: packetforward-test
-          condition: SUCCESSFUL
-          projects:
-            - name: 'kvmfornfv-packet_forward-daily-test-{stream}'
-              current-parameters: false
-              node-parameters: false
-              git-revision: true
-              kill-phase-on: FAILURE
-              abort-all-job: true
-      - multijob:
-          name: livemigration-test
-          condition: SUCCESSFUL
-          projects:
-            - name: 'kvmfornfv-livemigration-daily-test-{stream}'
-              current-parameters: false
-              node-parameters: false
-              git-revision: true
-              kill-phase-on: FAILURE
-              abort-all-job: true
-
-- job-template:
-    name: 'kvmfornfv-daily-build-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    concurrent: false
-
-    scm:
-      - git-scm
-
-    wrappers:
-      - ssh-agent-wrapper
-      - timeout:
-          timeout: 360
-          fail: true
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'opnfv-build-ubuntu-defaults'
-      - 'kvmfornfv-defaults':
-          gs-pathname: '{gs-pathname}'
-      - string:
-          name: PHASE
-          default: 'build'
-          description: "Execution of kvmfornfv daily 'build' job ."
-
-    builders:
-      - description-setter:
-          description: "Built on $NODE_NAME"
-      - '{project}-daily-build-macro'
-
-- job-template:
-    name: 'kvmfornfv-{testname}-daily-test-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    concurrent: false
-
-    scm:
-      - git-scm
-
-    wrappers:
-      - ssh-agent-wrapper
-      - timeout:
-          timeout: 360
-          fail: true
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'intel-pod10-defaults'
-      - 'kvmfornfv-defaults':
-          gs-pathname: '{gs-pathname}'
-      - string:
-          name: TEST_NAME
-          default: '{testname}'
-          description: "Daily job to execute kvmfornfv '{testname}' testcase."
-      - string:
-          name: PHASE
-          default: 'test'
-          description: "Execution of kvmfornfv daily 'test' job ."
-
-    builders:
-      - description-setter:
-          description: "Built on $NODE_NAME"
-      - '{project}-{testname}-daily-test-macro'
-
-#####################################
-# builder macros
-#####################################
-- builder:
-    name: 'kvmfornfv-verify-build-macro'
-    builders:
-      - shell:
-          !include-raw: ./kvmfornfv-build.sh
-      - shell:
-          !include-raw: ./kvmfornfv-upload-artifact.sh
-
-- builder:
-    name: 'kvmfornfv-verify-test-macro'
-    builders:
-      - shell:
-          !include-raw: ./kvmfornfv-download-artifact.sh
-      - shell:
-          !include-raw: ./kvmfornfv-test.sh
-
-- builder:
-    name: 'kvmfornfv-daily-build-macro'
-    builders:
-      - shell:
-          !include-raw: ./kvmfornfv-build.sh
-      - shell:
-          !include-raw: ./kvmfornfv-upload-artifact.sh
-
-- builder:
-    name: 'kvmfornfv-cyclictest-daily-test-macro'
-    builders:
-      - shell:
-          !include-raw: ./kvmfornfv-download-artifact.sh
-      - shell:
-          !include-raw: ./kvmfornfv-test.sh
-      - shell:
-          !include-raw: ./kvmfornfv-upload-artifact.sh
-
-- builder:
-    name: 'kvmfornfv-packet_forward-daily-test-macro'
-    builders:
-      - shell:
-          !include-raw: ./kvmfornfv-download-artifact.sh
-      - shell:
-          !include-raw: ./kvmfornfv-test.sh
-
-- builder:
-    name: 'kvmfornfv-livemigration-daily-test-macro'
-    builders:
-      - shell:
-          !include-raw: ./kvmfornfv-download-artifact.sh
-      - shell:
-          !include-raw: ./kvmfornfv-test.sh
-
-#####################################
-# parameter macros
-#####################################
-- parameter:
-    name: 'kvmfornfv-defaults'
-    parameters:
-      - string:
-          name: GS_URL
-          default: artifacts.opnfv.org/$PROJECT{gs-pathname}
-          description: "URL to Google Storage."
index 76c475e..12b5eeb 100644 (file)
@@ -20,7 +20,6 @@
     project: 'laas'
     project-name: 'laas'
     build-days-to-keep: 7
-    tox-dir: 'dashboard'
 
     stream:
       - master
diff --git a/jjb/onosfw/onosfw-rtd-jobs.yaml b/jjb/onosfw/onosfw-rtd-jobs.yaml
deleted file mode 100644 (file)
index a7a5e73..0000000
+++ /dev/null
@@ -1,13 +0,0 @@
----
-- project:
-    name: onosfw-rtd
-    project: onosfw
-    project-name: onosfw
-
-    gerrit-skip-vote: true
-    project-pattern: 'onosfw'
-    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-onosfw/47378/'
-    rtd-token: '1ad406bcdf2d627e2e18fbcd6605f3456b05bb3d'
-
-    jobs:
-      - '{project-name}-rtd-jobs'
diff --git a/jjb/onosfw/onosfw-views.yaml b/jjb/onosfw/onosfw-views.yaml
deleted file mode 100644 (file)
index 5e8920f..0000000
+++ /dev/null
@@ -1,6 +0,0 @@
----
-- project:
-    name: onosfw-view
-    views:
-      - project-view
-    project-name: onosfw
diff --git a/jjb/onosfw/onosfw.yaml b/jjb/onosfw/onosfw.yaml
deleted file mode 100644 (file)
index 58a50bd..0000000
+++ /dev/null
@@ -1,192 +0,0 @@
----
-- project:
-
-    name: onosfw
-
-    jobs:
-      - 'onosfw-verify-{stream}'
-      - 'onosfw-daily-{stream}'
-      - 'onosfw-build-{stream}'
-
-    # only master branch is enabled at the moment to keep no of jobs sane
-    stream:
-      - master:
-          branch: '{stream}'
-          gs-pathname: ''
-          disabled: false
-      - euphrates:
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          disabled: false
-
-    project: 'onosfw'
-
-########################
-# job templates
-########################
-- job-template:
-    name: 'onosfw-verify-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'opnfv-build-ubuntu-defaults'
-
-    scm:
-      - git-scm-gerrit
-
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - patchset-created-event:
-                exclude-drafts: 'false'
-                exclude-trivial-rebase: 'false'
-                exclude-no-code-change: 'false'
-            - draft-published-event
-            - comment-added-contains-event:
-                comment-contains-value: 'recheck'
-            - comment-added-contains-event:
-                comment-contains-value: 'reverify'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-              disable-strict-forbidden-file-verification: 'true'
-              forbidden-file-paths:
-                - compare-type: ANT
-                  pattern: 'docs/**|.gitignore'
-
-    builders:
-      - 'builder-onosfw-helloworld'
-
-- job-template:
-    name: 'onosfw-daily-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'opnfv-build-ubuntu-defaults'
-
-    scm:
-      - git-scm
-
-    triggers:
-      - timed: '@midnight'
-
-    builders:
-      - trigger-builds:
-          - project: 'onosfw-build-{stream}'
-            git-revision: true
-            block: true
-
-- job-template:
-    name: 'onosfw-build-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'opnfv-build-ubuntu-defaults'
-      - string:
-          name: GS_URL
-          default: '$GS_BASE{gs-pathname}'
-          description: "Directory where the build artifact will be located upon the completion of the build."
-
-    scm:
-      - git-scm
-
-    builders:
-      - 'builder-onosfw-helloworld'
-
-########################
-# builder macros
-########################
-- builder:
-    name: 'builder-onosfw-build'
-    builders:
-      - shell: |
-          #!/bin/bash
-          set -o errexit
-          set -o nounset
-          set -o pipefail
-
-          # log info to console
-          echo "Starting the build of $PROJECT. This could take some time..."
-          echo "--------------------------------------------------------"
-          echo
-
-          # create the cache directory if it doesn't exist
-          [[ -d $CACHE_DIRECTORY ]] || mkdir -p $CACHE_DIRECTORY
-          [[ -d $BUILD_DIRECTORY ]] || mkdir -p $BUILD_DIRECTORY
-
-          # set OPNFV_ARTIFACT_VERSION
-          export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
-
-          # start the build
-          cd $WORKSPACE/
-          ./ci/build.sh  $BUILD_DIRECTORY/
-
-          # list the build artifacts
-          ls -al $BUILD_DIRECTORY
-
-          # save information regarding artifact into file
-          (
-              echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
-              echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
-              echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
-              echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
-              echo "OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/onosfw.iso | cut -d' ' -f1)"
-              echo "OPNFV_BUILD_URL=$BUILD_URL"
-          ) > $BUILD_DIRECTORY/opnfv.properties
-          echo
-          echo "--------------------------------------------------------"
-          echo "Done!"
-
-
-# yamllint disable rule:line-length
-- builder:
-    name: 'builder-onosfw-upload-artifact'
-    builders:
-      - shell: |
-          #!/bin/bash
-          set -o errexit
-          set -o nounset
-          set -o pipefail
-
-          # log info to console
-          echo "Uploading the $INSTALLER artifact. This could take some time..."
-          echo "--------------------------------------------------------"
-          echo
-
-          # source the opnfv.properties to get ARTIFACT_VERSION
-          source $BUILD_DIRECTORY/opnfv.properties
-
-          # upload artifact and additional files to google storage
-          gsutil cp $BUILD_DIRECTORY/onosfw.iso gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > gsutil.iso.log 2>&1
-          gsutil cp $BUILD_DIRECTORY/opnfv.properties gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log 2>&1
-          gsutil cp $BUILD_DIRECTORY/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1
-
-          echo
-          echo "--------------------------------------------------------"
-          echo "Done!"
-          echo "Artifact is available as http://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
-# yamllint enable rule:line-length
-
-
-- builder:
-    name: 'builder-onosfw-helloworld'
-    builders:
-      - shell: |
-          #!/bin/bash
-          echo "Hello world!"
diff --git a/jjb/ovno/ovno-rtd-jobs.yaml b/jjb/ovno/ovno-rtd-jobs.yaml
deleted file mode 100644 (file)
index c5d661d..0000000
+++ /dev/null
@@ -1,13 +0,0 @@
----
-- project:
-    name: ovno-rtd
-    project: ovno
-    project-name: ovno
-
-    gerrit-skip-vote: true
-    project-pattern: 'ovno'
-    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-ovno/47382/'
-    rtd-token: 'd393a62c6ee0b06979d0bb28f0b43e88208ce2c1'
-
-    jobs:
-      - '{project-name}-rtd-jobs'
index a413326..e8a227f 100644 (file)
           cristina.pauna@enea.com
           alexandru.avadanii@enea.com
           mbeierl@vmware.com
-    yardstick-arm-receivers: &yardstick-arm-receivers
-      receivers: >
-          cristina.pauna@enea.com
-          alexandru.avadanii@enea.com
-          delia.popescu@enea.com
     other-receivers: &other-receivers
       receivers: ''
 
           arch_tag: 'aarch64'
           <<: *master
           <<: *storperf-arm-receivers
-      - 'yardstick':
-          project: 'yardstick'
-          <<: *master
-          <<: *yardstick-arm-receivers
 
       # projects with jobs for stable/euphrates
       - 'dovetail':
           <<: *euphrates
           <<: *dovetail-arm-receivers
 
-      # projects with jobs for stable/hunter
-      - 'yardstick':
-          project: 'yardstick'
-          <<: *hunter
-          <<: *yardstick-arm-receivers
-
       # projects with jobs for stable/iruya
-      - 'yardstick':
-          project: 'yardstick'
-          <<: *iruya
-          <<: *yardstick-arm-receivers
       - 'storperf-master':
           project: 'storperf'
           dockerdir: 'docker/storperf-master'
index 24b1b85..ac1c574 100644 (file)
@@ -26,9 +26,6 @@
     storperf-receivers: &storperf-receivers
       receivers: >
           mbeierl@vmware.com
-    yardstick-receivers: &yardstick-receivers
-      receivers: >
-          rexlee8776@gmail.com
     other-receivers: &other-receivers
       receivers: ''
 
           dockerfile: 'docker/barometer-collectd-experimental/Dockerfile'
           <<: *master
           <<: *other-receivers
-      - 'barometer-collectd-master':
+      - 'barometer-collectd-latest':
           project: 'barometer'
           dockerdir: '.'
-          dockerfile: 'docker/barometer-collectd-master/Dockerfile'
+          dockerfile: 'docker/barometer-collectd-latest/Dockerfile'
           <<: *master
           <<: *other-receivers
       - 'barometer-dma':
           dockerdir: 'docker/barometer-snmp'
           <<: *master
           <<: *other-receivers
-      - 'bottlenecks':
-          project: 'bottlenecks'
-          <<: *master
-          <<: *other-receivers
       - 'clover':
           project: 'clover'
           dockerdir: '.'
           dockerfile: 'Dockerfile'
           <<: *master
           <<: *other-receivers
-      - 'cperf':
-          project: 'cperf'
-          <<: *master
-          <<: *other-receivers
       - 'dovetail':
           project: 'dovetail'
           <<: *master
           dockerfile: 'worker/Dockerfile'
           <<: *master
           <<: *laas_receivers
-      - 'qtip':
-          project: 'qtip'
-          dockerdir: '.'
-          dockerfile: 'docker/Dockerfile.local'
-          <<: *master
-          <<: *other-receivers
-      - 'qtip-nettest':
-          project: 'qtip'
-          dockerdir: 'contrib/nettest'
-          <<: *master
-          <<: *other-receivers
       - 'storperf-master':
           project: 'storperf'
           dockerdir: 'docker/storperf-master'
           arch_tag: 'x86_64'
           <<: *master
           <<: *storperf-receivers
-      - 'yardstick':
-          project: 'yardstick'
-          <<: *master
-          <<: *yardstick-receivers
-      - 'yardstick-ubuntu-18.04':
-          project: 'yardstick'
-          dockerfile: 'Dockerfile_ubuntu18'
-          <<: *master
-          <<: *yardstick-receivers
-      - 'yardstick-image-k8s':
-          project: 'yardstick'
-          dockerdir: 'docker/k8s'
-          <<: *master
-          <<: *yardstick-receivers
 
       # projects with jobs for hunter
-      - 'bottlenecks':
-          project: 'bottlenecks'
-          <<: *hunter
-          <<: *other-receivers
       - 'dovetail':
           project: 'dovetail'
           <<: *hunter
           <<: *other-receivers
-      - 'yardstick':
-          project: 'yardstick'
-          <<: *hunter
-          <<: *yardstick-receivers
 
       # projects with jobs for iruya
-      - 'yardstick':
-          project: 'yardstick'
-          <<: *iruya
-          <<: *yardstick-receivers
       - 'barometer-dma':
           project: 'barometer'
           dockerdir: 'docker/barometer-dma'
     jobs:
       - "{dockerrepo}-docker-build-push-{stream}"
 
-- project:
-
-    name: opnfv-monitor-docker        # projects which only monitor dedicated file or path
-
-    dockerfile: "Dockerfile"
-    dockerdir: "docker"
-    arch_tag: ""
-    extra_build_args: ""
-
-    project:
-      # projects with jobs for master
-      - 'daisy':
-          dockerrepo: 'daisy'
-          <<: *master
-
-    jobs:
-      - '{project}-docker-build-push-monitor-{stream}'
-
 ########################
 # job templates
 ########################
       - email:
           recipients: '{receivers}'
       - email-jenkins-admins-on-failure
-
-- job-template:
-    name: '{project}-docker-build-push-monitor-{stream}'
-    disabled: '{obj:disabled}'
-    parameters: *parameters
-
-    scm:
-      - git-scm
-
-    builders: *builders
-
-    # trigger only matching the file name
-    triggers:
-      - gerrit:
-          trigger-on:
-            - change-merged-event
-            - comment-added-contains-event:
-                comment-contains-value: 'remerge'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-              file-paths:
-                - compare-type: ANT
-                  pattern: 'docker/**'
index d23779c..cb318d1 100644 (file)
@@ -18,6 +18,7 @@
       - hunter
       - iruya
       - jerma
+      - kali
 
     jobs:
       - 'releng-release-{stream}-verify'
diff --git a/jjb/sdnvpn/sdnvpn-rtd-jobs.yaml b/jjb/sdnvpn/sdnvpn-rtd-jobs.yaml
deleted file mode 100644 (file)
index a01544f..0000000
+++ /dev/null
@@ -1,20 +0,0 @@
----
-- project:
-    name: sdnvpn-rtd
-    project: sdnvpn
-    project-name: sdnvpn
-
-    project-pattern: 'sdnvpn'
-    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-sdnvpn/47391/'
-    rtd-token: '1efdc48a9819be55a28137937674f1f744d02fe0'
-
-    stream:
-      - master:
-          branch: '{stream}'
-          disabled: false
-      - gambia:
-          branch: 'stable/{stream}'
-          disabled: false
-
-    jobs:
-      - '{project-name}-rtd-jobs'
diff --git a/jjb/sdnvpn/sdnvpn-views.yaml b/jjb/sdnvpn/sdnvpn-views.yaml
deleted file mode 100644 (file)
index dfa6dfa..0000000
+++ /dev/null
@@ -1,6 +0,0 @@
----
-- project:
-    name: sdnvpn-view
-    views:
-      - project-view
-    project-name: sdnvpn
diff --git a/jjb/sfc/sfc-project-jobs.yaml b/jjb/sfc/sfc-project-jobs.yaml
deleted file mode 100644 (file)
index 47a6b54..0000000
+++ /dev/null
@@ -1,110 +0,0 @@
----
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
-- project:
-    name: sfc-project-jobs
-
-    project: 'sfc'
-
-    jobs:
-      - 'sfc-verify-{stream}'
-
-    stream:
-      - master:
-          branch: '{stream}'
-          gs-pathname: ''
-          disabled: false
-      - fraser:
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          disabled: true
-      - hunter: &hunter
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          disabled: false
-
-- job-template:
-    name: 'sfc-verify-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'opnfv-build-ubuntu-defaults'
-
-    scm:
-      - git-scm-gerrit
-
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - patchset-created-event:
-                exclude-drafts: 'false'
-                exclude-trivial-rebase: 'false'
-                exclude-no-code-change: 'false'
-            - draft-published-event
-            - comment-added-contains-event:
-                comment-contains-value: 'recheck'
-            - comment-added-contains-event:
-                comment-contains-value: 'reverify'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-              disable-strict-forbidden-file-verification: 'true'
-              forbidden-file-paths:
-                - compare-type: ANT
-                  pattern: 'docs/**|.gitignore'
-
-    builders:
-      - sfc-unit-tests-and-docs
-
-    publishers:
-      - sfc-unit-tests-and-docs-publisher
-
-################################
-# job builders
-################################
-
-- builder:
-    name: sfc-unit-tests-and-docs
-    builders:
-      - shell: |
-          #!/bin/bash
-          virtualenv /var/tmp/sfc
-          source /var/tmp/sfc/bin/activate
-          pip install tox
-          cd $WORKSPACE && tox
-
-################################
-# job publishers
-################################
-- publisher:
-    name: 'sfc-unit-tests-and-docs-publisher'
-    publishers:
-      - junit:
-          results: nosetests.xml
-      - cobertura:
-          report-file: "coverage.xml"
-          only-stable: "true"
-          fail-no-reports: "true"
-          health-auto-update: "true"
-          stability-auto-update: "true"
-          zoom-coverage-chart: "true"
-          targets:
-            - files:
-                healthy: 0
-                unhealthy: 0
-                failing: 0
-            - method:
-                healthy: 0
-                unhealthy: 0
-                failing: 0
-      - email-jenkins-admins-on-failure
diff --git a/jjb/sfc/sfc-rtd-jobs.yaml b/jjb/sfc/sfc-rtd-jobs.yaml
deleted file mode 100644 (file)
index a65da54..0000000
+++ /dev/null
@@ -1,20 +0,0 @@
----
-- project:
-    name: sfc-rtd
-    project: sfc
-    project-name: sfc
-
-    project-pattern: 'sfc'
-    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-sfc/47392/'
-    rtd-token: 'bc4419f4dded5c816071b042ac32c03ac6108700'
-
-    stream:
-      - master:
-          branch: '{stream}'
-          disabled: false
-      - hunter:
-          branch: 'stable/{stream}'
-          disabled: false
-
-    jobs:
-      - '{project-name}-rtd-jobs'
diff --git a/jjb/sfc/sfc-views.yaml b/jjb/sfc/sfc-views.yaml
deleted file mode 100644 (file)
index b2884ba..0000000
+++ /dev/null
@@ -1,7 +0,0 @@
----
-- project:
-    name: sfc-view
-    views:
-      - common-view
-    view-name: sfc
-    view-regex: ^sfc.*
diff --git a/jjb/snaps/snaps-rtd-jobs.yaml b/jjb/snaps/snaps-rtd-jobs.yaml
deleted file mode 100644 (file)
index 2159c1a..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
----
-- project:
-    name: snaps-rtd
-    project: snaps
-    project-name: snaps
-
-    project-pattern: 'snaps'
-    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-snaps/47393/'
-    rtd-token: '8fa2d732997534df1e91a87d6dc3ee60bb56508b'
-
-    jobs:
-      - '{project-name}-rtd-jobs'
diff --git a/jjb/snaps/snaps-verify-jobs.yaml b/jjb/snaps/snaps-verify-jobs.yaml
deleted file mode 100644 (file)
index 2055bc1..0000000
+++ /dev/null
@@ -1,81 +0,0 @@
----
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
-- project:
-    name: snaps
-
-    project: '{name}'
-
-    jobs:
-      - 'snaps-verify-{stream}'
-
-    stream:
-      - master:
-          branch: '{stream}'
-          gs-pathname: ''
-          disabled: true
-      - hunter: &hunter
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          disabled: false
-      - fraser:
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          disabled: true
-
-- job-template:
-    name: 'snaps-verify-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    concurrent: false
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - string:
-          name: DEPLOYMENT_HOST_IP
-          default: 192.168.122.2
-          description: 'IP of the deployment node'
-      - string:
-          name: CONTROLLER_IP
-          default: 192.168.122.3
-          description: 'IP of the controller node'
-      - 'intel-virtual10-defaults'
-
-    scm:
-      - git-scm-gerrit
-
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - patchset-created-event:
-                exclude-drafts: 'false'
-                exclude-trivial-rebase: 'false'
-                exclude-no-code-change: 'false'
-            - draft-published-event
-            - comment-added-contains-event:
-                comment-contains-value: 'recheck'
-            - comment-added-contains-event:
-                comment-contains-value: 'reverify'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-              disable-strict-forbidden-file-verification: 'true'
-              forbidden-file-paths:
-                - compare-type: ANT
-                  pattern: 'docs/**|.gitignore'
-
-    builders:
-      - shell: |
-          #!/bin/bash
-
-          cd $WORKSPACE/ci
-          ./run_tests.sh $DEPLOYMENT_HOST_IP $CONTROLLER_IP
diff --git a/jjb/snaps/snaps-views.yaml b/jjb/snaps/snaps-views.yaml
deleted file mode 100644 (file)
index a4fc90e..0000000
+++ /dev/null
@@ -1,6 +0,0 @@
----
-- project:
-    name: snaps-view
-    views:
-      - project-view
-    project-name: snaps
diff --git a/jjb/ves/ves-rtd-jobs.yaml b/jjb/ves/ves-rtd-jobs.yaml
deleted file mode 100644 (file)
index 67b611b..0000000
+++ /dev/null
@@ -1,13 +0,0 @@
----
-- project:
-    name: ves-rtd
-    project: ves
-    project-name: ves
-
-    gerrit-skip-vote: true
-    project-pattern: 'ves'
-    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-ves/47396/'
-    rtd-token: 'ea5026fc44841e7721529b95a9ebc1b29950e2ce'
-
-    jobs:
-      - '{project-name}-rtd-jobs'
diff --git a/jjb/ves/ves-views.yaml b/jjb/ves/ves-views.yaml
deleted file mode 100644 (file)
index 6331a6d..0000000
+++ /dev/null
@@ -1,6 +0,0 @@
----
-- project:
-    name: ves-view
-    views:
-      - project-view
-    project-name: ves
diff --git a/jjb/ves/ves.yaml b/jjb/ves/ves.yaml
deleted file mode 100644 (file)
index f8c5da2..0000000
+++ /dev/null
@@ -1,8 +0,0 @@
----
-- project:
-    name: ves
-
-    project: '{name}'
-
-    jobs:
-      - '{project}-verify-basic'
diff --git a/jjb/yardstick/yardstick-cleanup.sh b/jjb/yardstick/yardstick-cleanup.sh
deleted file mode 100755 (executable)
index 47bf9bd..0000000
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/bin/bash
-[[ ${CI_DEBUG} == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-
-# Remove containers along with image opnfv/yardstick*:<none>
-dangling_images=($(docker images -f "dangling=true" | awk '/opnfv[/]yardstick/ {print $3}'))
-if [[ ${#dangling_images[@]} -eq 0 ]] ; then
-    echo "Removing opnfv/yardstick:<none> images and their containers..."
-    for image_id in "${dangling_images[@]}"; do
-        echo "      Removing image_id: $image_id and its containers"
-        containers=$(docker ps -a | awk "/${image_id}/ {print \$1}")
-        if [[ -n "$containers" ]];then
-            docker rm -f "${containers}" >${redirect}
-        fi
-        docker rmi "${image_id}" >${redirect}
-    done
-fi
-
-echo "Cleaning up docker containers/images..."
-# Remove previous running containers if exist
-if docker ps -a | grep -q opnfv/yardstick; then
-    echo "Removing existing opnfv/yardstick containers..."
-    docker ps -a | awk "/${image_id}/ {print \$1}" | xargs docker rm -f >${redirect}
-
-fi
-
-# Remove existing images if exist
-if docker images | grep -q opnfv/yardstick; then
-    echo "Docker images to remove:"
-    docker images | head -1 && docker images | grep opnfv/yardstick
-    image_ids=($(docker images | awk '/opnfv[/]yardstick/ {print $3}'))
-    for id in "${image_ids[@]}"; do
-        echo "Removing docker image id $id..."
-        docker rmi "${id}" >${redirect}
-    done
-fi
-
diff --git a/jjb/yardstick/yardstick-daily-jobs.yaml b/jjb/yardstick/yardstick-daily-jobs.yaml
deleted file mode 100644 (file)
index e12eee7..0000000
+++ /dev/null
@@ -1,509 +0,0 @@
----
-###################################
-# job configuration for yardstick
-###################################
-- project:
-    name: yardstick
-
-    project: '{name}'
-
-    # -------------------------------
-    # BRANCH ANCHORS
-    # -------------------------------
-    master: &master
-      stream: master
-      branch: '{stream}'
-      gs-pathname: ''
-      docker-tag: 'latest'
-    hunter: &hunter
-      stream: hunter
-      branch: 'stable/{stream}'
-      gs-pathname: '{stream}'
-      docker-tag: 'stable'
-    # -------------------------------
-    # POD, INSTALLER, AND BRANCH MAPPING
-    # -------------------------------
-    #    Installers using labels
-    #            CI PODs
-    # This section should only contain the installers
-    # that have been switched using labels for slaves
-    # -------------------------------
-    pod:
-      # apex CI PODs
-      - virtual:
-          slave-label: apex-virtual-master
-          installer: apex
-          auto-trigger-name: 'daily-trigger-disabled'
-          <<: *master
-      - baremetal:
-          slave-label: apex-baremetal-master
-          installer: apex
-          auto-trigger-name: 'daily-trigger-disabled'
-          <<: *master
-      - virtual:
-          slave-label: apex-virtual-master
-          installer: apex
-          auto-trigger-name: 'daily-trigger-disabled'
-          <<: *hunter
-      - baremetal:
-          slave-label: apex-baremetal-master
-          installer: apex
-          auto-trigger-name: 'daily-trigger-disabled'
-          <<: *hunter
-      # fuel CI PODs
-      - baremetal:
-          slave-label: fuel-baremetal
-          installer: fuel
-          auto-trigger-name: 'daily-trigger-disabled'
-          <<: *master
-      - virtual:
-          slave-label: fuel-virtual
-          installer: fuel
-          auto-trigger-name: 'daily-trigger-disabled'
-          <<: *master
-      - baremetal:
-          slave-label: fuel-baremetal
-          installer: fuel
-          auto-trigger-name: 'daily-trigger-disabled'
-          <<: *hunter
-      - virtual:
-          slave-label: fuel-virtual
-          installer: fuel
-          auto-trigger-name: 'daily-trigger-disabled'
-          <<: *hunter
-      # armband CI PODs
-      - armband-baremetal:
-          slave-label: armband-baremetal
-          installer: fuel
-          auto-trigger-name: 'daily-trigger-disabled'
-          <<: *master
-      - armband-virtual:
-          slave-label: armband-virtual
-          installer: fuel
-          auto-trigger-name: 'daily-trigger-disabled'
-          <<: *master
-      - armband-baremetal:
-          slave-label: armband-baremetal
-          installer: fuel
-          auto-trigger-name: 'daily-trigger-disabled'
-          <<: *hunter
-      - armband-virtual:
-          slave-label: armband-virtual
-          installer: fuel
-          auto-trigger-name: 'daily-trigger-disabled'
-          <<: *hunter
-      # joid CI PODs
-      - baremetal:
-          slave-label: joid-baremetal
-          installer: joid
-          auto-trigger-name: 'daily-trigger-disabled'
-          <<: *master
-      - virtual:
-          slave-label: joid-virtual
-          installer: joid
-          auto-trigger-name: 'daily-trigger-disabled'
-          <<: *master
-      - baremetal:
-          slave-label: joid-baremetal
-          installer: joid
-          auto-trigger-name: 'daily-trigger-disabled'
-          <<: *hunter
-      - virtual:
-          slave-label: joid-virtual
-          installer: joid
-          auto-trigger-name: 'daily-trigger-disabled'
-          <<: *hunter
-      # daisy CI PODs
-      - baremetal:
-          slave-label: daisy-baremetal
-          installer: daisy
-          auto-trigger-name: 'daily-trigger-disabled'
-          <<: *master
-      - baremetal:
-          slave-label: daisy-baremetal
-          installer: daisy
-          auto-trigger-name: 'daily-trigger-disabled'
-          <<: *hunter
-      - virtual:
-          slave-label: daisy-virtual
-          installer: daisy
-          auto-trigger-name: 'daily-trigger-disabled'
-          <<: *master
-      # -------------------------------
-      #        None-CI PODs
-      # -------------------------------
-      - orange-pod1:
-          slave-label: '{pod}'
-          installer: joid
-          auto-trigger-name: 'daily-trigger-disabled'
-          <<: *master
-      - itri-pod1:
-          slave-label: '{pod}'
-          installer: fuel
-          auto-trigger-name: 'daily-trigger-disabled'
-          <<: *master
-      - zte-pod1:
-          slave-label: '{pod}'
-          installer: fuel
-          auto-trigger-name: 'daily-trigger-disabled'
-          <<: *master
-      - zte-pod1:
-          slave-label: '{pod}'
-          installer: fuel
-          auto-trigger-name: 'daily-trigger-disabled'
-          <<: *hunter
-      - zte-pod2:
-          slave-label: '{pod}'
-          installer: daisy
-          auto-trigger-name: 'daily-trigger-disabled'
-          <<: *master
-      - zte-pod3:
-          slave-label: '{pod}'
-          installer: daisy
-          auto-trigger-name: 'daily-trigger-disabled'
-          <<: *master
-      - zte-pod3:
-          slave-label: '{pod}'
-          installer: daisy
-          auto-trigger-name: 'daily-trigger-disabled'
-          <<: *hunter
-      - zte-pod9:
-          slave-label: '{pod}'
-          installer: daisy
-          auto-trigger-name: 'daily-trigger-disabled'
-          <<: *master
-      - zte-pod9:
-          slave-label: '{pod}'
-          installer: daisy
-          auto-trigger-name: 'daily-trigger-disabled'
-          <<: *hunter
-      - orange-pod2:
-          slave-label: '{pod}'
-          installer: joid
-          auto-trigger-name: 'daily-trigger-disabled'
-          <<: *master
-      - flex-pod1:
-          slave-label: '{pod}'
-          installer: apex
-          auto-trigger-name: 'daily-trigger-disabled'
-          <<: *master
-    # -------------------------------
-    testsuite:
-      - 'daily'
-
-    jobs:
-      - 'yardstick-{installer}-{pod}-{testsuite}-{stream}'
-
-################################
-# job templates
-################################
-- job-template:
-    name: 'yardstick-{installer}-{pod}-{testsuite}-{stream}'
-
-    disabled: false
-
-    concurrent: true
-
-    properties:
-      - logrotate-default
-      - throttle:
-          enabled: true
-          max-per-node: 1
-          option: 'project'
-
-    wrappers:
-      - build-name:
-          name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
-      - timeout:
-          timeout: 180
-          abort: true
-
-    triggers:
-      - '{auto-trigger-name}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - '{installer}-defaults'
-      - '{slave-label}-defaults'
-      - 'yardstick-params-{slave-label}'
-      - string:
-          name: DEPLOY_SCENARIO
-          default: 'os-odl_l2-nofeature-ha'
-      - string:
-          name: DOCKER_TAG
-          default: '{docker-tag}'
-          description: 'Tag to pull docker image'
-      - string:
-          name: YARDSTICK_SCENARIO_SUITE_NAME
-          default: opnfv_${{DEPLOY_SCENARIO}}_{testsuite}.yaml
-          description: 'Path to test scenario suite'
-      - string:
-          name: CI_DEBUG
-          default: 'false'
-          description: "Show debut output information"
-
-    scm:
-      - git-scm
-
-    builders:
-      - description-setter:
-          description: "POD: $NODE_NAME"
-      - 'yardstick-cleanup'
-      - 'yardstick-fetch-creds'
-      - 'yardstick-{testsuite}'
-      - 'yardstick-store-results'
-
-    publishers:
-      - email:
-          recipients: jean.gaoliang@huawei.com limingjiang@huawei.com
-      - email-jenkins-admins-on-failure
-
-########################
-# builder macros
-########################
-- builder:
-    name: yardstick-daily
-    builders:
-      - shell:
-          !include-raw: ./yardstick-daily.sh
-
-- builder:
-    name: yardstick-fetch-creds
-    builders:
-      # yamllint disable rule:indentation
-      - conditional-step:
-          condition-kind: regex-match
-          regex: "os-.*"
-          label: '$DEPLOY_SCENARIO'
-          steps:
-            - shell:
-                !include-raw: ../../utils/fetch_os_creds.sh
-      - conditional-step:
-          condition-kind: regex-match
-          regex: "k8-.*"
-          label: '$DEPLOY_SCENARIO'
-          steps:
-            - shell:
-                !include-raw: ./yardstick-get-k8s-conf.sh
-
-- builder:
-    name: yardstick-store-results
-    builders:
-      - shell:
-          !include-raw: ../../utils/push-test-logs.sh
-
-- builder:
-    name: yardstick-cleanup
-    builders:
-      - shell:
-          !include-raw: ./yardstick-cleanup.sh
-########################
-# parameter macros
-########################
-- parameter:
-    name: 'yardstick-params-apex-virtual-master'
-    parameters:
-      - string:
-          name: YARDSTICK_DB_BACKEND
-          default: '-i 104.197.68.199:8086'
-          description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
-    name: 'yardstick-params-apex-baremetal-master'
-    parameters:
-      - string:
-          name: YARDSTICK_DB_BACKEND
-          default: '-i 104.197.68.199:8086'
-          description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
-    name: 'yardstick-params-apex-virtual-hunter'
-    parameters:
-      - string:
-          name: YARDSTICK_DB_BACKEND
-          default: '-i 104.197.68.199:8086'
-          description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
-    name: 'yardstick-params-apex-baremetal-hunter'
-    parameters:
-      - string:
-          name: YARDSTICK_DB_BACKEND
-          default: '-i 104.197.68.199:8086'
-          description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
-    name: 'yardstick-params-fuel-baremetal'
-    parameters:
-      - string:
-          name: YARDSTICK_DB_BACKEND
-          default: '-i 104.197.68.199:8086'
-          description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
-    name: 'yardstick-params-fuel-virtual'
-    parameters:
-      - string:
-          name: YARDSTICK_DB_BACKEND
-          default: '-i 104.197.68.199:8086'
-          description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
-    name: 'yardstick-params-armband-baremetal'
-    parameters:
-      - string:
-          name: YARDSTICK_DB_BACKEND
-          default: '-i 104.197.68.199:8086'
-          description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
-    name: 'yardstick-params-armband-virtual'
-    parameters:
-      - string:
-          name: YARDSTICK_DB_BACKEND
-          default: '-i 104.197.68.199:8086'
-          description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
-    name: 'yardstick-params-joid-baremetal'
-    parameters:
-      - string:
-          name: YARDSTICK_DB_BACKEND
-          default: '-i 104.197.68.199:8086'
-          description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
-    name: 'yardstick-params-joid-virtual'
-    parameters:
-      - string:
-          name: YARDSTICK_DB_BACKEND
-          default: '-i 104.197.68.199:8086'
-          description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
-    name: 'yardstick-params-intel-pod8'
-    parameters:
-      - string:
-          name: YARDSTICK_DB_BACKEND
-          default: '-i 104.197.68.199:8086'
-          description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
-    name: 'yardstick-params-daisy-baremetal'
-    parameters:
-      - string:
-          name: YARDSTICK_DB_BACKEND
-          default: '-i 104.197.68.199:8086'
-          description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
-    name: 'yardstick-params-daisy-virtual'
-    parameters:
-      - string:
-          name: YARDSTICK_DB_BACKEND
-          default: '-i 104.197.68.199:8086'
-          description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
-    name: 'yardstick-params-itri-pod1'
-    parameters:
-      - string:
-          name: YARDSTICK_DB_BACKEND
-          default: '-i 104.197.68.199:8086'
-          description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
-    name: 'yardstick-params-zte-pod1'
-    parameters:
-      - string:
-          name: YARDSTICK_DB_BACKEND
-          default: '-i 104.197.68.199:8086'
-          description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
-    name: 'yardstick-params-zte-pod2'
-    parameters:
-      - string:
-          name: YARDSTICK_DB_BACKEND
-          default: '-i 104.197.68.199:8086'
-          description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
-    name: 'yardstick-params-zte-pod3'
-    parameters:
-      - string:
-          name: YARDSTICK_DB_BACKEND
-          default: '-i 104.197.68.199:8086'
-          description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
-    name: 'yardstick-params-zte-pod9'
-    parameters:
-      - string:
-          name: YARDSTICK_DB_BACKEND
-          default: '-i 104.197.68.199:8086'
-          description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
-    name: 'yardstick-params-orange-pod1'
-    parameters:
-      - string:
-          name: YARDSTICK_DB_BACKEND
-          default: '-i 104.197.68.199:8086'
-          description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
-    name: 'yardstick-params-orange-pod2'
-    parameters:
-      - string:
-          name: YARDSTICK_DB_BACKEND
-          default: '-i 104.197.68.199:8086'
-          description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
-    name: 'yardstick-params-virtual'
-    parameters:
-      - string:
-          name: YARDSTICK_DB_BACKEND
-          default: '-i 104.197.68.199:8086'
-          description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
-    name: 'yardstick-params-huawei-pod3'
-    parameters:
-      - string:
-          name: YARDSTICK_DB_BACKEND
-          default: '-i 104.197.68.199:8086'
-          description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
-    name: 'yardstick-params-huawei-pod4'
-    parameters:
-      - string:
-          name: YARDSTICK_DB_BACKEND
-          default: '-i 104.197.68.199:8086'
-          description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
-    name: 'yardstick-params-flex-pod1'
-    parameters:
-      - string:
-          name: YARDSTICK_DB_BACKEND
-          default: '-i 104.197.68.199:8086'
-          description: 'Arguments to use in order to choose the backend DB'
-
-#######################
-## trigger macros
-#######################
-# trigger for PODs to only run yardstick test suites
-- trigger:
-    name: 'yardstick-daily-huawei-pod3-trigger'
-    triggers:
-      - timed: '0 1 * * *'
-
-- trigger:
-    name: 'yardstick-daily-huawei-pod4-trigger'
-    triggers:
-      - timed: ''
diff --git a/jjb/yardstick/yardstick-daily.sh b/jjb/yardstick/yardstick-daily.sh
deleted file mode 100755 (executable)
index 91b3e8e..0000000
+++ /dev/null
@@ -1,75 +0,0 @@
-#!/bin/bash
-set -e
-[[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-
-rc_file_vol=""
-cacert_file_vol=""
-sshkey=""
-
-rc_file_vol="-v ${HOME}/opnfv-openrc.sh:/etc/yardstick/openstack.creds"
-
-if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
-    instack_mac=$(sudo virsh domiflist undercloud | grep default | \
-                  grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
-    INSTALLER_IP=$(/usr/sbin/arp -e | grep ${instack_mac} | awk {'print $1'})
-    sshkey="-v /root/.ssh/id_rsa:/root/.ssh/id_rsa"
-    if [[ -n $(sudo iptables -L FORWARD |grep "REJECT"|grep "reject-with icmp-port-unreachable") ]]; then
-        #note: this happens only in opnfv-lf-pod1
-        sudo iptables -D FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable
-        sudo iptables -D FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable
-    fi
-fi
-
-if [[ ${INSTALLER_TYPE} == 'joid' ]]; then
-    if [[ "${DEPLOY_SCENARIO:0:2}" == "k8" ]];then
-        rc_file_vol="-v ${HOME}/admin.conf:/etc/yardstick/admin.conf"
-    else
-        # If production lab then creds may be retrieved dynamically
-        # creds are on the jumphost, always in the same folder
-        rc_file_vol="-v $LAB_CONFIG/admin-openrc:/etc/yardstick/openstack.creds"
-        # If dev lab, credentials may not be the default ones, just provide a path to put them into docker
-        # replace the default one by the customized one provided by jenkins config
-    fi
-elif [[ ${INSTALLER_TYPE} == 'fuel' ]]; then
-    if [[ "${DEPLOY_SCENARIO:0:2}" == 'k8' ]]; then
-        rc_file_vol="-v ${HOME}/admin.conf:/etc/yardstick/admin.conf"
-    else
-        cacert_file_vol="-v ${HOME}/os_cacert:/etc/ssl/certs/mcp_os_cacert"
-    fi
-    sshkey="-v ${SSH_KEY}:/root/.ssh/mcp.rsa"
-fi
-
-opts="--privileged=true --rm"
-envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
-    -e NODE_NAME=${NODE_NAME} -e EXTERNAL_NETWORK=${EXTERNAL_NETWORK} \
-    -e YARDSTICK_BRANCH=${BRANCH} -e BRANCH=${BRANCH} \
-    -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} -e CI_DEBUG=true"
-
-if [[ "${INSTALLER_TYPE}" == 'fuel' ]]; then
-    envs+=" -e SSH_KEY=/root/.ssh/mcp.rsa"
-fi
-
-# Pull the image with correct tag
-DOCKER_REPO='opnfv/yardstick'
-if [ "$(uname -m)" = 'aarch64' ]; then
-    DOCKER_REPO="${DOCKER_REPO}_$(uname -m)"
-fi
-echo "Yardstick: Pulling image ${DOCKER_REPO}:${DOCKER_TAG}"
-docker pull ${DOCKER_REPO}:$DOCKER_TAG >$redirect
-docker images
-
-# map log directory
-branch=${BRANCH##*/}
-dir_result="${HOME}/opnfv/yardstick/results/${branch}"
-mkdir -p ${dir_result}
-sudo rm -rf ${dir_result}/*
-map_log_dir="-v ${dir_result}:/tmp/yardstick"
-
-# Run docker
-cmd="sudo docker run ${opts} ${envs} ${rc_file_vol} ${cacert_file_vol} ${map_log_dir} ${sshkey} ${DOCKER_REPO}:${DOCKER_TAG} \
-exec_tests.sh ${YARDSTICK_DB_BACKEND} ${YARDSTICK_SCENARIO_SUITE_NAME}"
-
-echo "Yardstick: Running docker cmd: ${cmd}"
-${cmd}
-
-echo "Yardstick: done!"
diff --git a/jjb/yardstick/yardstick-get-k8s-conf.sh b/jjb/yardstick/yardstick-get-k8s-conf.sh
deleted file mode 100755 (executable)
index 3af2dcb..0000000
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/bash
-set -e
-
-dest_path="$HOME/admin.conf"
-
-if [[ "${DEPLOY_SCENARIO:0:2}" == "k8" ]];then
-    if [[ ${INSTALLER_TYPE} == 'joid' ]];then
-        juju scp kubernetes-master/0:config "${dest_path}"
-    elif [[ ${INSTALLER_TYPE} == 'fuel' ]];then
-        echo "Getting kubernetes config ..."
-        docker cp -L fuel:/opt/kubernetes.config "${dest_path}"
-    fi
-fi
diff --git a/jjb/yardstick/yardstick-project-jobs.yaml b/jjb/yardstick/yardstick-project-jobs.yaml
deleted file mode 100644 (file)
index e4d0be6..0000000
+++ /dev/null
@@ -1,194 +0,0 @@
----
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
-- project:
-    name: yardstick-project-jobs
-
-    project: 'yardstick'
-
-    jobs:
-      - 'yardstick-verify-{stream}'
-      - 'yardstick-merge-{stream}'
-
-    stream:
-      - master:
-          branch: '{stream}'
-          gs-pathname: ''
-          disabled: false
-      - iruya: &iruya
-          branch: 'stable/{stream}'
-          gs-pathname: '/{stream}'
-          disabled: false
-
-################################
-# job templates
-################################
-
-- job-template:
-    name: 'yardstick-verify-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'ericsson-build4-defaults'
-
-    scm:
-      - git-scm-gerrit
-
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - patchset-created-event:
-                exclude-drafts: 'false'
-                exclude-trivial-rebase: 'false'
-                exclude-no-code-change: 'false'
-            - draft-published-event
-            - comment-added-contains-event:
-                comment-contains-value: 'recheck'
-            - comment-added-contains-event:
-                comment-contains-value: 'reverify'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-
-    wrappers:
-      - ssh-agent-wrapper
-      - timeout:
-          timeout: 30
-          fail: true
-
-    builders:
-      - yardstick-unit-tests-python-3
-      - yardstick-functional-tests-python-3
-      - yardstick-coverage-tests
-      - yardstick-pep8-tests
-
-- job-template:
-    name: 'yardstick-merge-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-          branch: '{branch}'
-      - 'ericsson-build4-defaults'
-      - string:
-          name: GS_URL
-          default: '$GS_BASE{gs-pathname}'
-          description: "Directory where the build artifact will be located upon\
-            \ the completion of the build."
-
-    scm:
-      - git-scm
-
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - change-merged-event
-            - comment-added-contains-event:
-                comment-contains-value: 'remerge'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-
-    wrappers:
-      - ssh-agent-wrapper
-      - timeout:
-          timeout: 30
-          fail: true
-
-    builders:
-      - yardstick-unit-tests-python-3
-      - yardstick-functional-tests-python-3
-      - yardstick-coverage-tests
-      - yardstick-pep8-tests
-
-################################
-# job builders
-################################
-
-- builder:
-    name: yardstick-unit-tests-python-3
-    builders:
-      - shell: |
-          #!/bin/bash
-          set -o errexit
-          set -o pipefail
-
-          sudo apt-get -y autoremove
-          sudo apt-get install -y build-essential python-dev python3-dev
-          sudo apt-get -y clean && sudo apt-get -y autoremove
-
-          echo "Running unit tests in Python 3 ..."
-          cd $WORKSPACE
-          tox -epy3
-
-- builder:
-    name: yardstick-functional-tests-python-3
-    builders:
-      - shell: |
-          #!/bin/bash
-          set -o errexit
-          set -o pipefail
-
-          sudo apt-get -y autoremove
-          sudo apt-get install -y build-essential python-dev python3-dev rabbitmq-server
-          sudo apt-get -y clean && sudo apt-get -y autoremove
-
-          echo "Configure RabbitMQ service"
-          sudo service rabbitmq-server restart
-          sudo rabbitmqctl start_app
-          if [ -z "$(sudo rabbitmqctl list_users | grep yardstick)" ]; then
-            sudo rabbitmqctl add_user yardstick yardstick
-            sudo rabbitmqctl set_permissions yardstick ".*" ".*" ".*"
-          fi
-
-          echo "Running functional tests in Python 3 ..."
-          cd $WORKSPACE
-          tox -efunctional-py3
-
-- builder:
-    name: yardstick-coverage-tests
-    builders:
-      - shell: |
-          #!/bin/bash
-          set -o errexit
-          set -o pipefail
-
-          sudo apt-get -y autoremove
-          sudo apt-get install -y build-essential python-dev python3-dev
-          sudo apt-get -y clean && sudo apt-get -y autoremove
-
-          echo "Running coverage tests ..."
-          cd $WORKSPACE
-          tox -ecoverage
-
-- builder:
-    name: yardstick-pep8-tests
-    builders:
-      - shell: |
-          #!/bin/bash
-          set -o errexit
-          set -o pipefail
-
-          sudo apt-get -y autoremove
-          sudo apt-get install -y build-essential python-dev python3-dev
-          sudo apt-get -y clean && sudo apt-get -y autoremove
-
-          echo "Running style guidelines (PEP8) tests ..."
-          cd $WORKSPACE
-          tox -epep8
diff --git a/jjb/yardstick/yardstick-rtd-jobs.yaml b/jjb/yardstick/yardstick-rtd-jobs.yaml
deleted file mode 100644 (file)
index 213ec6a..0000000
+++ /dev/null
@@ -1,20 +0,0 @@
----
-- project:
-    name: yardstick-rtd
-    project: yardstick
-    project-name: yardstick
-
-    project-pattern: 'yardstick'
-    rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-yardstick/47399/'
-    rtd-token: '6aa883824f3917c7db5ffa1fe9168817fb5feb68'
-
-    stream:
-      - master:
-          branch: '{stream}'
-          disabled: false
-      - hunter:
-          branch: 'stable/{stream}'
-          disabled: false
-
-    jobs:
-      - '{project-name}-rtd-jobs'
diff --git a/releases/kali/functest.yaml b/releases/kali/functest.yaml
new file mode 100644 (file)
index 0000000..efe0500
--- /dev/null
@@ -0,0 +1,15 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+  - name: stable/kali
+    location:
+      functest: f74088bb1ae93feaf56c7ec7d1f1e78c97d8de9a
+  - name: stable/kali
+    location:
+      functest-kubernetes: 83d3161c52c8cc8d9e4f52d7a693bbcc32508379
+  - name: stable/kali
+    location:
+      functest-xtesting: 0997d4c739baf30d13529d4408b3761e5c5e8919