# -------------------------------
scenario:
- 'os-nosdn-nofeature-noha':
- auto-trigger-name: 'daily-trigger-disabled'
deploy-scenario: 'os-nosdn-nofeature'
installer-type: 'osa'
- xci-flavor: 'mini'
- - 'os-nosdn-osm-noha':
- auto-trigger-name: 'daily-trigger-disabled'
- deploy-scenario: 'os-nosdn-osm'
- installer-type: 'osa'
- xci-flavor: 'mini'
- - 'os-odl-nofeature-noha':
- auto-trigger-name: 'daily-trigger-disabled'
- deploy-scenario: 'os-odl-nofeature'
- installer-type: 'osa'
- xci-flavor: 'mini'
- - 'os-odl-bgpvpn-noha':
- auto-trigger-name: 'daily-trigger-disabled'
- deploy-scenario: 'os-odl-bgpvpn'
- installer-type: 'osa'
- xci-flavor: 'mini'
- - 'os-odl-sfc-noha':
- auto-trigger-name: 'daily-trigger-disabled'
- deploy-scenario: 'os-odl-sfc'
- installer-type: 'osa'
- xci-flavor: 'mini'
- - 'k8-nosdn-nofeature-noha':
- auto-trigger-name: 'daily-trigger-disabled'
- deploy-scenario: 'k8-nosdn-nofeature'
- installer-type: 'kubespray'
- xci-flavor: 'mini'
- - 'k8-canal-nofeature-noha':
- auto-trigger-name: 'daily-trigger-disabled'
- deploy-scenario: 'k8-canal-nofeature'
- installer-type: 'kubespray'
- xci-flavor: 'mini'
+ xci-flavor: 'noha'
- 'k8-calico-nofeature-noha':
- auto-trigger-name: 'daily-trigger-disabled'
deploy-scenario: 'k8-calico-nofeature'
installer-type: 'kubespray'
- xci-flavor: 'mini'
- - 'k8-contiv-nofeature-noha':
- auto-trigger-name: 'daily-trigger-disabled'
- deploy-scenario: 'k8-contiv-nofeature'
- installer-type: 'kubespray'
- xci-flavor: 'mini'
- - 'k8-flannel-nofeature-noha':
- auto-trigger-name: 'daily-trigger-disabled'
- deploy-scenario: 'k8-flannel-nofeature'
- installer-type: 'kubespray'
- xci-flavor: 'mini'
- - 'k8-nosdn-istio-noha':
- auto-trigger-name: 'daily-trigger-disabled'
- deploy-scenario: 'k8-nosdn-istio'
- installer-type: 'kubespray'
- xci-flavor: 'mini'
+ xci-flavor: 'noha'
# -------------------------------
# XCI PODs
# -------------------------------
pod:
- - virtual:
+ - baremetal:
<<: *master
# -------------------------------
# -------------------------------
distro:
- 'ubuntu':
- disabled: false
- slave-label: xci-virtual
+ disabled: true
+ slave-label: xci-baremetal
- 'centos':
disabled: true
- slave-label: xci-virtual
+ slave-label: xci-baremetal
- 'opensuse':
- disabled: false
- slave-label: xci-virtual
+ disabled: true
+ slave-label: xci-baremetal
# -------------------------------
# Phases
phase:
- 'deploy'
- 'functest'
+ - 'yardstick'
# -------------------------------
# jobs
- '^xci-functest.*'
- '^bifrost-.*periodic.*'
- '^osa-.*periodic.*'
- block-level: 'NODE'
+ blocking-level: 'NODE'
- logrotate-default
+ triggers:
+ - timed: '@midnight'
+
parameters:
- string:
name: DEPLOY_SCENARIO
- label:
name: SLAVE_LABEL
default: '{slave-label}'
+ all-nodes: false
+ node-eligibility: 'ignore-offline'
- string:
name: XCI_DISTRO
default: '{distro}'
+ - string:
+ name: FUNCTEST_VERSION
+ default: 'hunter'
- string:
name: FUNCTEST_MODE
default: 'tier'
- string:
name: FUNCTEST_SUITE_NAME
- default: 'healthcheck'
+ default: 'smoke'
- string:
name: CI_LOOP
default: 'daily'
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
- triggers:
- - '{auto-trigger-name}'
-
wrappers:
- ssh-agent-wrapper
- build-timeout:
XCI_FLAVOR=$XCI_FLAVOR
CI_LOOP=$CI_LOOP
XCI_DISTRO=$XCI_DISTRO
+ FUNCTEST_VERSION=$FUNCTEST_VERSION
+ FUNCTEST_MODE=$FUNCTEST_MODE
+ FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
same-node: true
block: true
- trigger-builds:
XCI_FLAVOR=$XCI_FLAVOR
CI_LOOP=$CI_LOOP
XCI_DISTRO=$XCI_DISTRO
+ FUNCTEST_VERSION=$FUNCTEST_VERSION
FUNCTEST_MODE=$FUNCTEST_MODE
FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
same-node: true
build-step-failure-threshold: 'never'
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
+ - trigger-builds:
+ - project: 'xci-yardstick-{pod}-{distro}-daily-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ INSTALLER_TYPE=$INSTALLER_TYPE
+ XCI_FLAVOR=$XCI_FLAVOR
+ CI_LOOP=$CI_LOOP
+ XCI_DISTRO=$XCI_DISTRO
+ same-node: true
+ block: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
publishers:
# yamllint disable rule:line-length
- '^xci-functest.*'
- '^bifrost-.*periodic.*'
- '^osa-.*periodic.*'
- block-level: 'NODE'
+ blocking-level: 'NODE'
- throttle:
enabled: true
max-per-node: 1
default: 'osa'
- string:
name: XCI_FLAVOR
- default: 'mini'
+ default: 'noha'
- string:
name: XCI_DISTRO
default: 'ubuntu'
- label:
name: SLAVE_LABEL
default: '{slave-label}'
+ all-nodes: false
+ node-eligibility: 'ignore-offline'
+ - string:
+ name: FUNCTEST_VERSION
+ default: 'hunter'
- string:
name: FUNCTEST_MODE
default: 'tier'
- string:
name: FUNCTEST_SUITE_NAME
- default: 'healthcheck'
+ default: 'smoke'
- string:
name: CI_LOOP
default: 'daily'
# --------------------------
# builder macros
# --------------------------
-# These need to be properly fixed once the basic deployment and functest
-# jobs are working outside of clean vm.
-# One of the ugly fixes is moving functest preparation step into the
-# deployment job itself since functest preparation requires some of the
+# These need to be properly fixed once the basic deployment, functest, and
+# yardstick jobs are working outside of clean vm.
+# One of the ugly fixes is moving test preparation step into the
+# deployment job itself since test preparation requires some of the
# things from deployment job. With clean VM, this wasn't an issue
# since everything was on clean VM. When we move things out of clean
# VM, things are done in workspaces of the jobs that are different.
echo "Removing $vm"
sudo virsh destroy $vm > /dev/null 2>&1 || true
sudo virsh undefine $vm > /dev/null 2>&1 || true
+ sudo killall -r vbmc > /dev/null 2>&1 || true
+ sudo rm -rf /root/.vbmc > /dev/null 2>&1 || true
done
echo "---------------------------------------------------------------------------------"
- cd $WORKSPACE/xci && ./xci-deploy.sh
+ # select the right pdf/idf
+ pdf=$WORKSPACE/xci/var/${NODE_NAME}-pdf.yml
+ idf=$WORKSPACE/xci/var/${NODE_NAME}-idf.yml
+ if [[ "$NODE_NAME" =~ "virtual" ]]; then
+ pdf=$WORKSPACE/xci/var/pdf.yml
+ idf=$WORKSPACE/xci/var/idf.yml
+ fi
- echo "Prepare OPNFV VM for Functest"
+ cd $WORKSPACE/xci && ./xci-deploy.sh -p $pdf -i $idf
+
+ echo "Prepare OPNFV VM for Tests"
echo "---------------------------------------------------------------------------------"
export XCI_PATH=$WORKSPACE
export XCI_VENV=${XCI_PATH}/venv
echo $var
done < ${XCI_PATH}/.cache/xci.env && cd ${XCI_PATH}/xci && \
ansible-playbook -i playbooks/dynamic_inventory.py playbooks/prepare-tests.yml
-
- echo "Run Functest"
+ ssh root@192.168.122.2 "/root/prepare-tests.sh"
echo "---------------------------------------------------------------------------------"
- builder:
builders:
- shell: |
#!/bin/bash
- set -o errexit
set -o pipefail
ssh root@192.168.122.2 "/root/run-functest.sh"
+ functest_exit=$?
+
+ case ${DEPLOY_SCENARIO[0]} in
+ os-*)
+ FUNCTEST_LOG=/root/functest-results/functest.log
+ ;;
+ k8-*)
+ FUNCTEST_LOG=/root/functest-results/functest-kubernetes.log
+ ;;
+ *)
+ echo "Unable to determine the installer. Exiting!"
+ exit $functest_exit
+ ;;
+ esac
+
+ echo "Functest log"
+ echo "---------------------------------------------------------------------------------"
+ ssh root@192.168.122.2 "cat $FUNCTEST_LOG"
+ echo "---------------------------------------------------------------------------------"
+ exit ${functest_exit}
+- builder:
+ name: 'xci-daily-yardstick-macro'
+ builders:
+ - shell: |
+ #!/bin/bash
+ set -o errexit
+ set -o pipefail
+
+ ssh root@192.168.122.2 "/root/run-yardstick.sh"