From: Morgan Richomme Date: Mon, 11 Sep 2017 07:31:59 +0000 (+0000) Subject: Merge "run Functest daily in xci" X-Git-Url: https://gerrit.opnfv.org/gerrit/gitweb?a=commitdiff_plain;h=9e686dafc73f390c13ca05bbeea7f7729aa57f38;hp=720399137357dc4eb2b1379e842c4e85c37f07a8;p=releng.git Merge "run Functest daily in xci" --- diff --git a/.yamllint b/.yamllint new file mode 100644 index 000000000..4402f17fa --- /dev/null +++ b/.yamllint @@ -0,0 +1,8 @@ +--- +extends: default + +rules: + # 120 chars should be enough and don't fail if a line is longer + line-length: + max: 120 + level: warning diff --git a/jjb/apex/apex-build.sh b/jjb/apex/apex-build.sh index 58d9f1a40..ad94ba3d7 100755 --- a/jjb/apex/apex-build.sh +++ b/jjb/apex/apex-build.sh @@ -28,8 +28,10 @@ fi BUILD_DIRECTORY=${WORKSPACE}/build # start the build -cd $WORKSPACE/ci -./build.sh $BUILD_ARGS +pushd ${BUILD_DIRECTORY} +make clean +popd +python3 apex/build.py $BUILD_ARGS RPM_VERSION=$(grep Version: $WORKSPACE/build/rpm_specs/opnfv-apex.spec | awk '{ print $2 }')-$(echo $OPNFV_ARTIFACT_VERSION | tr -d '_-') # list the contents of BUILD_OUTPUT directory echo "Build Directory is ${BUILD_DIRECTORY}/../.build" diff --git a/jjb/apex/apex-deploy.sh b/jjb/apex/apex-deploy.sh index 4244f4427..a47e3a5db 100755 --- a/jjb/apex/apex-deploy.sh +++ b/jjb/apex/apex-deploy.sh @@ -10,6 +10,8 @@ echo "Starting the Apex deployment." echo "--------------------------------------------------------" echo +sudo rm -rf /tmp/tmp* + if [ -z "$DEPLOY_SCENARIO" ]; then echo "Deploy scenario not set!" exit 1 @@ -35,7 +37,7 @@ if [[ "$ARTIFACT_VERSION" =~ dev ]]; then # Settings for deploying from git workspace DEPLOY_SETTINGS_DIR="${WORKSPACE}/config/deploy" NETWORK_SETTINGS_DIR="${WORKSPACE}/config/network" - DEPLOY_CMD="${WORKSPACE}/ci/deploy.sh" + DEPLOY_CMD="opnfv-deploy --image-dir ${WORKSPACE}/.build" CLEAN_CMD="${WORKSPACE}/ci/clean.sh" RESOURCES="${WORKSPACE}/.build/" CONFIG="${WORKSPACE}/build" @@ -46,6 +48,11 @@ if [[ "$ARTIFACT_VERSION" =~ dev ]]; then # Ensure artifacts were downloaded and extracted correctly # TODO(trozet) add verification here + # Install dev build + mkdir -p ~/tmp + mv -f .build ~/tmp/ + sudo pip3 install --upgrade --force-reinstall . + mv -f ~/tmp/.build . else DEPLOY_SETTINGS_DIR="/etc/opnfv-apex/" NETWORK_SETTINGS_DIR="/etc/opnfv-apex/" @@ -63,7 +70,10 @@ fi # Install Dependencies # Make sure python34 dependencies are installed -for dep_pkg in epel-release python34 python34-PyYAML python34-setuptools; do +dependencies="epel-release python34 python34-devel libvirt-devel python34-pip \ +ansible python34-PyYAML python34-jinja2 python34-setuptools python-tox ansible" + +for dep_pkg in $dependencies; do if ! rpm -q ${dep_pkg} > /dev/null; then if ! sudo yum install -y ${dep_pkg}; then echo "Failed to install ${dep_pkg}" @@ -72,31 +82,12 @@ for dep_pkg in epel-release python34 python34-PyYAML python34-setuptools; do fi done -# Make sure jinja2 is installed -for python_pkg in jinja2; do - if ! python3.4 -c "import $python_pkg"; then - echo "$python_pkg package not found for python3.4, attempting to install..." - if ! sudo easy_install-3.4 $python_pkg; then - echo -e "Failed to install $python_pkg package for python3.4" - exit 1 - fi - fi -done - if [[ "$JOB_NAME" =~ "virtual" ]]; then # Make sure ipxe-roms-qemu package is updated to latest. # This package is needed for multi virtio nic PXE boot in virtual environment. sudo yum update -y ipxe-roms-qemu - if [ -z ${PYTHONPATH:-} ]; then - export PYTHONPATH=${WORKSPACE}/lib/python - else - export PYTHONPATH=$PYTHONPATH:${WORKSPACE}/lib/python - fi fi -# set env vars to deploy cmd -DEPLOY_CMD="BASE=${BASE} IMAGES=${IMAGES} LIB=${LIB} ${DEPLOY_CMD}" - if [ "$OPNFV_CLEAN" == 'yes' ]; then if sudo test -e '/root/inventory/pod_settings.yaml'; then clean_opts='-i /root/inventory/pod_settings.yaml' @@ -104,7 +95,7 @@ if [ "$OPNFV_CLEAN" == 'yes' ]; then clean_opts='' fi - sudo BASE=${BASE} LIB=${LIB} ${CLEAN_CMD} ${clean_opts} + sudo ${CLEAN_CMD} ${clean_opts} fi if echo ${DEPLOY_SCENARIO} | grep ipv6; then diff --git a/jjb/apex/apex-download-artifact.sh b/jjb/apex/apex-download-artifact.sh index 860cd60a5..a11fb65b8 100755 --- a/jjb/apex/apex-download-artifact.sh +++ b/jjb/apex/apex-download-artifact.sh @@ -21,7 +21,7 @@ if [[ "$ARTIFACT_VERSION" =~ dev ]]; then tar -xvf apex-${OPNFV_ARTIFACT_VERSION}.tar.gz popd > /dev/null else - echo "Will download RPMs..." + echo "Will use RPMs..." # Must be RPMs/ISO echo "Downloading latest properties file" @@ -33,13 +33,13 @@ else source $BUILD_DIRECTORY/opnfv.properties RPM_INSTALL_PATH=$(echo "http://"$OPNFV_RPM_URL | sed 's/\/'"$(basename $OPNFV_RPM_URL)"'//') - RPM_LIST=${RPM_INSTALL_PATH}/$(basename $OPNFV_RPM_URL) + RPM_LIST=$(basename $OPNFV_RPM_URL) # find version of RPM VERSION_EXTENSION=$(echo $(basename $RPM_LIST) | grep -Eo '[0-9]+\.[0-9]+-([0-9]{8}|[a-z]+-[0-9]\.[0-9]+)') # build RPM List which already includes base Apex RPM - RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-undercloud-${VERSION_EXTENSION}.noarch.rpm" - RPM_LIST+=" ${RPM_INSTALL_PATH}/python34-opnfv-apex-${VERSION_EXTENSION}.noarch.rpm" + RPM_LIST+=" opnfv-apex-undercloud-${VERSION_EXTENSION}.noarch.rpm" + RPM_LIST+=" python34-opnfv-apex-${VERSION_EXTENSION}.noarch.rpm" # remove old / install new RPMs if rpm -q opnfv-apex > /dev/null; then @@ -48,10 +48,20 @@ else sudo yum remove -y ${INSTALLED_RPMS} fi fi + # Create an rpms dir on slave + mkdir -p ~/apex_rpms + pushd ~/apex_rpms + # Remove older rpms which do not match this version + find . ! -name "*${VERSION_EXTENSION}.noarch.rpm" -type f -exec rm -f {} + + # Download RPM only if changed on server + for rpm in $RPM_LIST; do + wget -N ${RPM_INSTALL_PATH}/${rpm} + done if ! sudo yum install -y $RPM_LIST; then echo "Unable to install new RPMs: $RPM_LIST" exit 1 fi + popd fi # TODO: Uncomment these lines to verify SHA512SUMs once the sums are diff --git a/jjb/apex/apex-project-jobs.yml b/jjb/apex/apex-project-jobs.yml new file mode 100644 index 000000000..973ad913a --- /dev/null +++ b/jjb/apex/apex-project-jobs.yml @@ -0,0 +1,127 @@ +--- +- project: + name: 'apex-project-jobs' + project: 'apex' + + stream: + - master: &master + branch: 'master' + gs-pathname: '' + concurrent-builds: 3 + disabled: false + + - danube: &danube + branch: 'stable/danube' + gs-pathname: '/danube' + concurrent-builds: 1 + disabled: true + + jobs: + - 'apex-build-{stream}' + - 'apex-verify-iso-{stream}' + +# Build phase +- job-template: + name: 'apex-build-{stream}' + + # Job template for builds + # + # Required Variables: + # stream: branch with - in place of / (eg. stable) + # branch: branch (eg. stable) + node: 'apex-build-{stream}' + + disabled: false + + concurrent: true + + parameters: + - '{project}-defaults' + - project-parameter: + project: '{project}' + branch: '{branch}' + - apex-parameter: + gs-pathname: '{gs-pathname}' + + scm: + - git-scm-gerrit + + wrappers: + - timeout: + timeout: 150 + fail: true + + properties: + - logrotate-default + - throttle: + max-per-node: '{concurrent-builds}' + max-total: 10 + option: 'project' + - build-blocker: + use-build-blocker: true + block-level: 'NODE' + blocking-jobs: + - 'apex-verify-iso-{stream}' + + builders: + - 'apex-build' + - inject: + properties-content: ARTIFACT_TYPE=rpm + - 'apex-upload-artifact' + +# ISO verify job +- job-template: + name: 'apex-verify-iso-{stream}' + + # Job template for builds + # + # Required Variables: + # stream: branch with - in place of / (eg. stable) + # branch: branch (eg. stable) + node: 'apex-virtual-{stream}' + + disabled: false + + concurrent: true + + parameters: + - project-parameter: + project: '{project}' + branch: '{branch}' + - apex-parameter: + gs-pathname: '{gs-pathname}' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: "Used for overriding the GIT URL coming from parameters macro." + + scm: + - git-scm + + properties: + - logrotate-default + - throttle: + max-per-node: 1 + max-total: 10 + option: 'project' + + builders: + - 'apex-iso-verify' + - inject: + properties-content: ARTIFACT_TYPE=iso + - 'apex-upload-artifact' + +######################## +# builder macros +######################## +- builder: + name: 'apex-build' + builders: + - shell: + !include-raw: ./apex-build.sh + +- builder: + name: 'apex-iso-verify' + builders: + - shell: + !include-raw: ./apex-iso-verify.sh diff --git a/jjb/apex/apex-unit-test.sh b/jjb/apex/apex-unit-test.sh index 12cb862b0..3112c9d36 100755 --- a/jjb/apex/apex-unit-test.sh +++ b/jjb/apex/apex-unit-test.sh @@ -8,10 +8,24 @@ echo "-------------------------------------------------------------------------- echo -pushd ci/ > /dev/null -sudo BASE="${WORKSPACE}/build" LIB="${WORKSPACE}/lib" ./clean.sh -./test.sh -popd +pushd build/ > /dev/null +for pkg in yamllint rpmlint iproute epel-release python34-devel python34-nose python34-PyYAML python-pep8 python34-mock python34-pip; do + if ! rpm -q ${pkg} > /dev/null; then + if ! sudo yum install -y ${pkg}; then + echo "Failed to install ${pkg} package..." + exit 1 + fi + fi +done + +# Make sure coverage is installed +if ! python3 -c "import coverage" &> /dev/null; then sudo pip3 install coverage; fi + +make rpmlint +make python-pep8-check +make yamllint +make python-tests +popd > /dev/null echo "--------------------------------------------------------" echo "Unit Tests Done!" diff --git a/jjb/apex/apex-workspace-cleanup.sh b/jjb/apex/apex-workspace-cleanup.sh deleted file mode 100755 index d2f71a562..000000000 --- a/jjb/apex/apex-workspace-cleanup.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash -set -o errexit -set -o nounset -set -o pipefail - -# delete everything that is in $WORKSPACE -sudo /bin/rm -rf $WORKSPACE diff --git a/jjb/apex/apex.yml b/jjb/apex/apex.yml index 51f59f7b1..c71bee1d0 100644 --- a/jjb/apex/apex.yml +++ b/jjb/apex/apex.yml @@ -6,17 +6,16 @@ - 'apex-verify-gate-{stream}' - 'apex-verify-unit-tests-{stream}' - 'apex-runner-cperf-{stream}' - - 'apex-build-{stream}' - 'apex-deploy-{platform}-{stream}' - 'apex-daily-master' - 'apex-daily-danube' - 'apex-csit-promote-daily-{stream}' - 'apex-fdio-promote-daily-{stream}' - - 'apex-verify-iso-{stream}' - 'apex-{scenario}-baremetal-{scenario_stream}' - 'apex-testsuite-{scenario}-baremetal-{scenario_stream}' - 'apex-upload-snapshot' - 'apex-create-snapshot' + - 'apex-flex-daily-os-nosdn-nofeature-ha-{stream}' # stream: branch with - in place of / (eg. stable-arno) # branch: branch (eg. stable/arno) stream: @@ -27,7 +26,6 @@ virtual-slave: 'apex-virtual-master' baremetal-slave: 'apex-baremetal-master' verify-scenario: 'os-odl-nofeature-ha' - concurrent-builds: 3 scenario_stream: 'master' - danube: &danube @@ -37,7 +35,6 @@ virtual-slave: 'apex-virtual-danube' baremetal-slave: 'apex-baremetal-danube' verify-scenario: 'os-odl_l3-nofeature-ha' - concurrent-builds: 1 scenario_stream: 'danube' disabled: true @@ -178,9 +175,37 @@ branches: - branch-compare-type: 'ANT' branch-pattern: '**/{branch}' + disable-strict-forbidden-file-verification: 'false' file-paths: - compare-type: ANT pattern: 'apex/tests/**' + forbidden-file-paths: + - compare-type: ANT + pattern: '*' + - compare-type: ANT + pattern: 'apex/*' + - compare-type: ANT + pattern: 'build/**' + - compare-type: ANT + pattern: 'lib/**' + - compare-type: ANT + pattern: 'config/**' + - compare-type: ANT + pattern: 'apex/build/**' + - compare-type: ANT + pattern: 'apex/common/**' + - compare-type: ANT + pattern: 'apex/inventory/**' + - compare-type: ANT + pattern: 'apex/network/**' + - compare-type: ANT + pattern: 'apex/overcloud/**' + - compare-type: ANT + pattern: 'apex/settings/**' + - compare-type: ANT + pattern: 'apex/undercloud/**' + - compare-type: ANT + pattern: 'apex/virtual/**' properties: - logrotate-default - throttle: @@ -234,9 +259,12 @@ branches: - branch-compare-type: 'ANT' branch-pattern: '**/{branch}' + disable-strict-forbidden-file-verification: 'true' file-paths: - compare-type: ANT - pattern: 'ci/**' + pattern: '*' + - compare-type: ANT + pattern: 'apex/*' - compare-type: ANT pattern: 'build/**' - compare-type: ANT @@ -244,12 +272,36 @@ - compare-type: ANT pattern: 'config/**' - compare-type: ANT - pattern: 'apex/**' + pattern: 'apex/build/**' + - compare-type: ANT + pattern: 'apex/common/**' + - compare-type: ANT + pattern: 'apex/inventory/**' + - compare-type: ANT + pattern: 'apex/network/**' + - compare-type: ANT + pattern: 'apex/overcloud/**' + - compare-type: ANT + pattern: 'apex/settings/**' + - compare-type: ANT + pattern: 'apex/undercloud/**' + - compare-type: ANT + pattern: 'apex/virtual/**' + forbidden-file-paths: + - compare-type: ANT + pattern: 'apex/tests/**' + - compare-type: ANT + pattern: 'docs/**' properties: - logrotate-default + - build-blocker: + use-build-blocker: true + block-level: 'NODE' + blocking-jobs: + - 'apex-verify.*' - throttle: - max-per-node: 3 + max-per-node: 1 max-total: 10 option: 'project' @@ -372,11 +424,18 @@ pattern: 'lib/**' - compare-type: ANT pattern: 'config/**' + - compare-type: ANT + pattern: 'apex/**' properties: - logrotate-default + - build-blocker: + use-build-blocker: true + block-level: 'NODE' + blocking-jobs: + - 'apex-verify.*' - throttle: - max-per-node: 3 + max-per-node: 1 max-total: 10 option: 'project' @@ -400,6 +459,10 @@ kill-phase-on: FAILURE abort-all-job: true git-revision: true + - shell: | + echo DEPLOY_SCENARIO=$(echo $GERRIT_EVENT_COMMENT_TEXT | grep start-gate-scenario | grep -Eo 'os-.*') > detected_scenario + - inject: + properties-file: detected_scenario - multijob: name: functest-smoke condition: SUCCESSFUL @@ -407,7 +470,7 @@ - name: 'functest-apex-virtual-suite-{stream}' current-parameters: false predefined-parameters: | - DEPLOY_SCENARIO={verify-scenario} + DEPLOY_SCENARIO=$DEPLOY_SCENARIO FUNCTEST_SUITE_NAME=healthcheck GERRIT_BRANCH=$GERRIT_BRANCH GERRIT_REFSPEC=$GERRIT_REFSPEC @@ -484,97 +547,6 @@ abort-all-job: false git-revision: false -# Build phase -- job-template: - name: 'apex-build-{stream}' - - # Job template for builds - # - # Required Variables: - # stream: branch with - in place of / (eg. stable) - # branch: branch (eg. stable) - node: '{build-slave}' - - disabled: false - - concurrent: true - - parameters: - - '{project}-defaults' - - project-parameter: - project: '{project}' - branch: '{branch}' - - apex-parameter: - gs-pathname: '{gs-pathname}' - - scm: - - git-scm-gerrit - - wrappers: - - timeout: - timeout: 150 - fail: true - - properties: - - logrotate-default - - throttle: - max-per-node: {concurrent-builds} - max-total: 10 - option: 'project' - - build-blocker: - use-build-blocker: true - block-level: 'NODE' - blocking-jobs: - - 'apex-verify-iso-{stream}' - - builders: - - 'apex-build' - - inject: - properties-content: ARTIFACT_TYPE=rpm - - 'apex-upload-artifact' - -# ISO verify job -- job-template: - name: 'apex-verify-iso-{stream}' - - # Job template for builds - # - # Required Variables: - # stream: branch with - in place of / (eg. stable) - # branch: branch (eg. stable) - node: '{virtual-slave}' - - disabled: false - - concurrent: true - - parameters: - - project-parameter: - project: '{project}' - branch: '{branch}' - - apex-parameter: - gs-pathname: '{gs-pathname}' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: "Used for overriding the GIT URL coming from parameters macro." - - scm: - - git-scm - - properties: - - logrotate-default - - throttle: - max-per-node: 1 - max-total: 10 - option: 'project' - - builders: - - 'apex-iso-verify' - - inject: - properties-content: ARTIFACT_TYPE=iso - - 'apex-upload-artifact' - # Deploy job - job-template: name: 'apex-deploy-{platform}-{stream}' @@ -590,7 +562,7 @@ wrappers: - timeout: - timeout: 120 + timeout: 140 fail: true parameters: @@ -630,7 +602,7 @@ description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO" - 'apex-download-artifact' - 'apex-deploy' - - 'apex-workspace-cleanup' + - 'clean-workspace' # Baremetal Deploy and Test @@ -790,6 +762,7 @@ current-parameters: false predefined-parameters: DEPLOY_SCENARIO=$DEPLOY_SCENARIO + enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-nosdn-nofeature-ha/" kill-phase-on: NEVER abort-all-job: false git-revision: false @@ -1605,6 +1578,85 @@ abort-all-job: true git-revision: false +# Flex job +- job-template: + name: 'apex-flex-daily-os-nosdn-nofeature-ha-{stream}' + + project-type: 'multijob' + + disabled: false + + node: 'flex-pod2' + + scm: + - git-scm + triggers: + - 'apex-{stream}' + parameters: + - '{project}-defaults' + - project-parameter: + project: '{project}' + branch: '{branch}' + - apex-parameter: + gs-pathname: '{gs-pathname}' + - string: + name: DEPLOY_SCENARIO + default: 'os-nosdn-nofeature-ha' + description: "Scenario to deploy with." + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - string: + name: SSH_KEY + default: /root/.ssh/id_rsa + description: 'SSH key to use for Apex' + properties: + - logrotate-default + - build-blocker: + use-build-blocker: true + block-level: 'NODE' + blocking-jobs: + - 'apex-verify.*' + - 'apex-runner.*' + - 'apex-.*-promote.*' + - 'apex-run.*' + - 'apex-.+-baremetal-.+' + - throttle: + max-per-node: 1 + max-total: 10 + option: 'project' + builders: + - description-setter: + description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO" + - multijob: + name: 'Baremetal Deploy' + condition: SUCCESSFUL + projects: + - name: 'apex-deploy-baremetal-{stream}' + node-parameters: true + current-parameters: true + predefined-parameters: | + OPNFV_CLEAN=yes + GERRIT_BRANCH=$GERRIT_BRANCH + GERRIT_REFSPEC= + DEPLOY_SCENARIO=$DEPLOY_SCENARIO + kill-phase-on: FAILURE + abort-all-job: true + git-revision: false + - multijob: + name: Yardstick + condition: ALWAYS + projects: + - name: 'yardstick-apex-baremetal-daily-{stream}' + node-parameters: true + current-parameters: false + predefined-parameters: + DEPLOY_SCENARIO=$DEPLOY_SCENARIO + kill-phase-on: NEVER + abort-all-job: false + git-revision: false + ######################## # parameter macros ######################## @@ -1653,25 +1705,6 @@ - shell: !include-raw: ./apex-unit-test.sh -- builder: - name: 'apex-build' - builders: - - shell: - !include-raw: ./apex-build.sh - -- builder: - name: 'apex-workspace-cleanup' - builders: - - shell: - !include-raw: ./apex-workspace-cleanup.sh - -- builder: - name: 'apex-iso-verify' - builders: - - shell: - !include-raw: ./apex-iso-verify.sh - - - builder: name: 'apex-upload-artifact' builders: diff --git a/jjb/apex/apex.yml.j2 b/jjb/apex/apex.yml.j2 index 99076fbc6..356c718d5 100644 --- a/jjb/apex/apex.yml.j2 +++ b/jjb/apex/apex.yml.j2 @@ -6,17 +6,16 @@ - 'apex-verify-gate-{stream}' - 'apex-verify-unit-tests-{stream}' - 'apex-runner-cperf-{stream}' - - 'apex-build-{stream}' - 'apex-deploy-{platform}-{stream}' - 'apex-daily-master' - 'apex-daily-danube' - 'apex-csit-promote-daily-{stream}' - 'apex-fdio-promote-daily-{stream}' - - 'apex-verify-iso-{stream}' - 'apex-{scenario}-baremetal-{scenario_stream}' - 'apex-testsuite-{scenario}-baremetal-{scenario_stream}' - 'apex-upload-snapshot' - 'apex-create-snapshot' + - 'apex-flex-daily-os-nosdn-nofeature-ha-{stream}' # stream: branch with - in place of / (eg. stable-arno) # branch: branch (eg. stable/arno) stream: @@ -27,7 +26,6 @@ virtual-slave: 'apex-virtual-master' baremetal-slave: 'apex-baremetal-master' verify-scenario: 'os-odl-nofeature-ha' - concurrent-builds: 3 scenario_stream: 'master' - danube: &danube @@ -37,7 +35,6 @@ virtual-slave: 'apex-virtual-danube' baremetal-slave: 'apex-baremetal-danube' verify-scenario: 'os-odl_l3-nofeature-ha' - concurrent-builds: 1 scenario_stream: 'danube' disabled: true @@ -90,9 +87,37 @@ branches: - branch-compare-type: 'ANT' branch-pattern: '**/{branch}' + disable-strict-forbidden-file-verification: 'false' file-paths: - compare-type: ANT pattern: 'apex/tests/**' + forbidden-file-paths: + - compare-type: ANT + pattern: '*' + - compare-type: ANT + pattern: 'apex/*' + - compare-type: ANT + pattern: 'build/**' + - compare-type: ANT + pattern: 'lib/**' + - compare-type: ANT + pattern: 'config/**' + - compare-type: ANT + pattern: 'apex/build/**' + - compare-type: ANT + pattern: 'apex/common/**' + - compare-type: ANT + pattern: 'apex/inventory/**' + - compare-type: ANT + pattern: 'apex/network/**' + - compare-type: ANT + pattern: 'apex/overcloud/**' + - compare-type: ANT + pattern: 'apex/settings/**' + - compare-type: ANT + pattern: 'apex/undercloud/**' + - compare-type: ANT + pattern: 'apex/virtual/**' properties: - logrotate-default - throttle: @@ -146,9 +171,12 @@ branches: - branch-compare-type: 'ANT' branch-pattern: '**/{branch}' + disable-strict-forbidden-file-verification: 'true' file-paths: - compare-type: ANT - pattern: 'ci/**' + pattern: '*' + - compare-type: ANT + pattern: 'apex/*' - compare-type: ANT pattern: 'build/**' - compare-type: ANT @@ -156,12 +184,36 @@ - compare-type: ANT pattern: 'config/**' - compare-type: ANT - pattern: 'apex/**' + pattern: 'apex/build/**' + - compare-type: ANT + pattern: 'apex/common/**' + - compare-type: ANT + pattern: 'apex/inventory/**' + - compare-type: ANT + pattern: 'apex/network/**' + - compare-type: ANT + pattern: 'apex/overcloud/**' + - compare-type: ANT + pattern: 'apex/settings/**' + - compare-type: ANT + pattern: 'apex/undercloud/**' + - compare-type: ANT + pattern: 'apex/virtual/**' + forbidden-file-paths: + - compare-type: ANT + pattern: 'apex/tests/**' + - compare-type: ANT + pattern: 'docs/**' properties: - logrotate-default + - build-blocker: + use-build-blocker: true + block-level: 'NODE' + blocking-jobs: + - 'apex-verify.*' - throttle: - max-per-node: 3 + max-per-node: 1 max-total: 10 option: 'project' @@ -284,11 +336,18 @@ pattern: 'lib/**' - compare-type: ANT pattern: 'config/**' + - compare-type: ANT + pattern: 'apex/**' properties: - logrotate-default + - build-blocker: + use-build-blocker: true + block-level: 'NODE' + blocking-jobs: + - 'apex-verify.*' - throttle: - max-per-node: 3 + max-per-node: 1 max-total: 10 option: 'project' @@ -312,6 +371,10 @@ kill-phase-on: FAILURE abort-all-job: true git-revision: true + - shell: | + echo DEPLOY_SCENARIO=$(echo $GERRIT_EVENT_COMMENT_TEXT | grep start-gate-scenario | grep -Eo 'os-.*') > detected_scenario + - inject: + properties-file: detected_scenario - multijob: name: functest-smoke condition: SUCCESSFUL @@ -319,7 +382,7 @@ - name: 'functest-apex-virtual-suite-{stream}' current-parameters: false predefined-parameters: | - DEPLOY_SCENARIO={verify-scenario} + DEPLOY_SCENARIO=$DEPLOY_SCENARIO FUNCTEST_SUITE_NAME=healthcheck GERRIT_BRANCH=$GERRIT_BRANCH GERRIT_REFSPEC=$GERRIT_REFSPEC @@ -396,97 +459,6 @@ abort-all-job: false git-revision: false -# Build phase -- job-template: - name: 'apex-build-{stream}' - - # Job template for builds - # - # Required Variables: - # stream: branch with - in place of / (eg. stable) - # branch: branch (eg. stable) - node: '{build-slave}' - - disabled: false - - concurrent: true - - parameters: - - '{project}-defaults' - - project-parameter: - project: '{project}' - branch: '{branch}' - - apex-parameter: - gs-pathname: '{gs-pathname}' - - scm: - - git-scm-gerrit - - wrappers: - - timeout: - timeout: 150 - fail: true - - properties: - - logrotate-default - - throttle: - max-per-node: {concurrent-builds} - max-total: 10 - option: 'project' - - build-blocker: - use-build-blocker: true - block-level: 'NODE' - blocking-jobs: - - 'apex-verify-iso-{stream}' - - builders: - - 'apex-build' - - inject: - properties-content: ARTIFACT_TYPE=rpm - - 'apex-upload-artifact' - -# ISO verify job -- job-template: - name: 'apex-verify-iso-{stream}' - - # Job template for builds - # - # Required Variables: - # stream: branch with - in place of / (eg. stable) - # branch: branch (eg. stable) - node: '{virtual-slave}' - - disabled: false - - concurrent: true - - parameters: - - project-parameter: - project: '{project}' - branch: '{branch}' - - apex-parameter: - gs-pathname: '{gs-pathname}' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: "Used for overriding the GIT URL coming from parameters macro." - - scm: - - git-scm - - properties: - - logrotate-default - - throttle: - max-per-node: 1 - max-total: 10 - option: 'project' - - builders: - - 'apex-iso-verify' - - inject: - properties-content: ARTIFACT_TYPE=iso - - 'apex-upload-artifact' - # Deploy job - job-template: name: 'apex-deploy-{platform}-{stream}' @@ -502,7 +474,7 @@ wrappers: - timeout: - timeout: 120 + timeout: 140 fail: true parameters: @@ -542,7 +514,7 @@ description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO" - 'apex-download-artifact' - 'apex-deploy' - - 'apex-workspace-cleanup' + - 'clean-workspace' # Baremetal Deploy and Test @@ -702,6 +674,7 @@ current-parameters: false predefined-parameters: DEPLOY_SCENARIO=$DEPLOY_SCENARIO + enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-nosdn-nofeature-ha/" kill-phase-on: NEVER abort-all-job: false git-revision: false @@ -1032,6 +1005,85 @@ abort-all-job: true git-revision: false +# Flex job +- job-template: + name: 'apex-flex-daily-os-nosdn-nofeature-ha-{stream}' + + project-type: 'multijob' + + disabled: false + + node: 'flex-pod2' + + scm: + - git-scm + triggers: + - 'apex-{stream}' + parameters: + - '{project}-defaults' + - project-parameter: + project: '{project}' + branch: '{branch}' + - apex-parameter: + gs-pathname: '{gs-pathname}' + - string: + name: DEPLOY_SCENARIO + default: 'os-nosdn-nofeature-ha' + description: "Scenario to deploy with." + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - string: + name: SSH_KEY + default: /root/.ssh/id_rsa + description: 'SSH key to use for Apex' + properties: + - logrotate-default + - build-blocker: + use-build-blocker: true + block-level: 'NODE' + blocking-jobs: + - 'apex-verify.*' + - 'apex-runner.*' + - 'apex-.*-promote.*' + - 'apex-run.*' + - 'apex-.+-baremetal-.+' + - throttle: + max-per-node: 1 + max-total: 10 + option: 'project' + builders: + - description-setter: + description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO" + - multijob: + name: 'Baremetal Deploy' + condition: SUCCESSFUL + projects: + - name: 'apex-deploy-baremetal-{stream}' + node-parameters: true + current-parameters: true + predefined-parameters: | + OPNFV_CLEAN=yes + GERRIT_BRANCH=$GERRIT_BRANCH + GERRIT_REFSPEC= + DEPLOY_SCENARIO=$DEPLOY_SCENARIO + kill-phase-on: FAILURE + abort-all-job: true + git-revision: false + - multijob: + name: Yardstick + condition: ALWAYS + projects: + - name: 'yardstick-apex-baremetal-daily-{stream}' + node-parameters: true + current-parameters: false + predefined-parameters: + DEPLOY_SCENARIO=$DEPLOY_SCENARIO + kill-phase-on: NEVER + abort-all-job: false + git-revision: false + ######################## # parameter macros ######################## @@ -1080,25 +1132,6 @@ - shell: !include-raw: ./apex-unit-test.sh -- builder: - name: 'apex-build' - builders: - - shell: - !include-raw: ./apex-build.sh - -- builder: - name: 'apex-workspace-cleanup' - builders: - - shell: - !include-raw: ./apex-workspace-cleanup.sh - -- builder: - name: 'apex-iso-verify' - builders: - - shell: - !include-raw: ./apex-iso-verify.sh - - - builder: name: 'apex-upload-artifact' builders: diff --git a/jjb/armband/armband-ci-jobs.yml b/jjb/armband/armband-ci-jobs.yml index f1bff072c..faa5971e1 100644 --- a/jjb/armband/armband-ci-jobs.yml +++ b/jjb/armband/armband-ci-jobs.yml @@ -44,65 +44,17 @@ installer: fuel <<: *master #-------------------------------- -# NONE-CI POD's -#-------------------------------- -# euphrates -#-------------------------------- - - arm-pod2: - slave-label: arm-pod2 - installer: fuel - <<: *euphrates - - arm-pod5: - slave-label: arm-pod5 - installer: fuel - <<: *euphrates - - arm-pod4: - slave-label: arm-pod4 - installer: fuel - <<: *euphrates - - arm-virtual2: - slave-label: arm-virtual2 - installer: fuel - <<: *euphrates -#-------------------------------- -# master -#-------------------------------- - - arm-pod2: - slave-label: arm-pod2 - installer: fuel - <<: *master - - arm-pod5: - slave-label: arm-pod5 - installer: fuel - <<: *master - - arm-pod4: - slave-label: arm-pod4 - installer: fuel - <<: *master - - arm-virtual2: - slave-label: arm-virtual2 - installer: fuel - <<: *master -#-------------------------------- # scenarios #-------------------------------- scenario: # HA scenarios - 'os-nosdn-nofeature-ha': auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger' - - 'os-odl_l2-nofeature-ha': - auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger' - - 'os-odl_l3-nofeature-ha': - auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger' - - 'os-odl_l2-bgpvpn-ha': - auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger' - - 'os-odl_l2-sfc-ha': + - 'os-odl-nofeature-ha': auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger' # NOHA scenarios - - 'os-odl_l2-nofeature-noha': - auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger' - - 'os-odl_l2-sfc-noha': + - 'os-nosdn-nofeature-noha': auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger' jobs: @@ -130,6 +82,7 @@ use-build-blocker: true blocking-jobs: - '{installer}-os-.*?-{pod}-daily-.*' + - 'armband-verify-.*' block-level: 'NODE' wrappers: @@ -143,14 +96,13 @@ - project-parameter: project: '{project}' branch: '{branch}' - - '{installer}-defaults' + - '{installer}-defaults': + gs-pathname: '{gs-pathname}' - '{slave-label}-defaults': installer: '{installer}' - string: name: DEPLOY_SCENARIO default: '{scenario}' - - armband-ci-parameter: - gs-pathname: '{gs-pathname}' builders: - trigger-builds: @@ -187,10 +139,10 @@ # 3.only proposed_tests testsuite here(refstack, ha, ipv6, bgpvpn) # 4.not used for release criteria or compliance, # only to debug the dovetail tool bugs with arm pods - # 5.only run against scenario os-(nosdn|odl_l2)-(nofeature-bgpvpn)-ha + # 5.only run against scenario os-(nosdn|odl)-(nofeature-bgpvpn)-ha - conditional-step: condition-kind: regex-match - regex: os-(nosdn|odl_l2)-(nofeature|bgpvpn)-ha + regex: os-(nosdn|odl)-(nofeature|bgpvpn)-ha label: '{scenario}' steps: - trigger-builds: @@ -228,14 +180,13 @@ - project-parameter: project: '{project}' branch: '{branch}' - - '{installer}-defaults' + - '{installer}-defaults': + gs-pathname: '{gs-pathname}' - '{slave-label}-defaults': installer: '{installer}' - string: name: DEPLOY_SCENARIO - default: 'os-odl_l2-nofeature-ha' - - armband-ci-parameter: - gs-pathname: '{gs-pathname}' + default: 'os-odl-nofeature-ha' scm: - git-scm @@ -246,38 +197,13 @@ builders: - shell: - !include-raw-escape: ./armband-download-artifact.sh - - shell: - !include-raw-escape: ./armband-deploy.sh + !include-raw-escape: ../fuel/fuel-deploy.sh publishers: - email: recipients: armband@enea.com - email-jenkins-admins-on-failure -######################## -# parameter macros -######################## -- parameter: - name: armband-ci-parameter - parameters: - - string: - name: BUILD_DIRECTORY - default: $WORKSPACE/build_output - description: "Directory where the build artifact will be located upon the completion of the build." - - string: - name: CACHE_DIRECTORY - default: $HOME/opnfv/cache/$INSTALLER_TYPE - description: "Directory where the cache to be used during the build is located." - - string: - name: GS_URL - default: artifacts.opnfv.org/$PROJECT{gs-pathname} - description: "URL to Google Storage." - - string: - name: SSH_KEY - default: "/tmp/mcp.rsa" - description: "Path to private SSH key to access environment nodes. For MCP deployments only." - ######################## # trigger macros ######################## @@ -285,376 +211,60 @@ #----------------------------------------------------------------- # Enea Armband CI Baremetal Triggers running against master branch #----------------------------------------------------------------- -- trigger: - name: 'fuel-os-odl_l2-nofeature-ha-armband-baremetal-master-trigger' - triggers: - - timed: '' - trigger: name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-master-trigger' triggers: - timed: '0 1 * * *' - trigger: - name: 'fuel-os-odl_l3-nofeature-ha-armband-baremetal-master-trigger' - triggers: - - timed: '0 16 * * *' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-armband-baremetal-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-nofeature-noha-armband-baremetal-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-ha-armband-baremetal-master-trigger' + name: 'fuel-os-nosdn-nofeature-noha-armband-baremetal-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-sfc-noha-armband-baremetal-master-trigger' + name: 'fuel-os-odl-nofeature-ha-armband-baremetal-master-trigger' triggers: - - timed: '' - + - timed: '0 16 * * *' #---------------------------------------------------------------------- # Enea Armband CI Baremetal Triggers running against euphrates branch #---------------------------------------------------------------------- -- trigger: - name: 'fuel-os-odl_l2-nofeature-ha-armband-baremetal-euphrates-trigger' - triggers: - - timed: '' - trigger: name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-euphrates-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-armband-baremetal-euphrates-trigger' + name: 'fuel-os-nosdn-nofeature-noha-armband-baremetal-euphrates-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l3-nofeature-ha-armband-baremetal-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-nofeature-noha-armband-baremetal-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-ha-armband-baremetal-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-noha-armband-baremetal-euphrates-trigger' + name: 'fuel-os-odl-nofeature-ha-armband-baremetal-euphrates-trigger' triggers: - timed: '' #--------------------------------------------------------------- # Enea Armband CI Virtual Triggers running against master branch #--------------------------------------------------------------- -- trigger: - name: 'fuel-os-odl_l2-nofeature-ha-armband-virtual-master-trigger' - triggers: - - timed: '' - trigger: name: 'fuel-os-nosdn-nofeature-ha-armband-virtual-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l3-nofeature-ha-armband-virtual-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-armband-virtual-master-trigger' + name: 'fuel-os-nosdn-nofeature-noha-armband-virtual-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-nofeature-noha-armband-virtual-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-ha-armband-virtual-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-noha-armband-virtual-master-trigger' + name: 'fuel-os-odl-nofeature-ha-armband-virtual-master-trigger' triggers: - timed: '' #-------------------------------------------------------------------- # Enea Armband CI Virtual Triggers running against euphrates branch #-------------------------------------------------------------------- -- trigger: - name: 'fuel-os-odl_l2-nofeature-ha-armband-virtual-euphrates-trigger' - triggers: - - timed: '' - trigger: name: 'fuel-os-nosdn-nofeature-ha-armband-virtual-euphrates-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l3-nofeature-ha-armband-virtual-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-armband-virtual-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-nofeature-noha-armband-virtual-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-ha-armband-virtual-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-noha-armband-virtual-euphrates-trigger' - triggers: - - timed: '' - -#-------------------------------------------------------------------- -# Enea Armband Non CI Virtual Triggers running against euphrates branch -#-------------------------------------------------------------------- -- trigger: - name: 'fuel-os-odl_l2-nofeature-ha-arm-virtual2-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-nosdn-nofeature-ha-arm-virtual2-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l3-nofeature-ha-arm-virtual2-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-arm-virtual2-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-nofeature-noha-arm-virtual2-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-ha-arm-virtual2-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-noha-arm-virtual2-euphrates-trigger' - triggers: - - timed: '' - -#-------------------------------------------------------------------- -# Enea Armband Non CI Virtual Triggers running against master branch -#-------------------------------------------------------------------- -- trigger: - name: 'fuel-os-odl_l2-nofeature-ha-arm-virtual2-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-nosdn-nofeature-ha-arm-virtual2-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l3-nofeature-ha-arm-virtual2-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-arm-virtual2-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-nofeature-noha-arm-virtual2-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-ha-arm-virtual2-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-noha-arm-virtual2-master-trigger' - triggers: - - timed: '' - -#---------------------------------------------------------- -# Enea Armband POD 2 Triggers running against master branch -#---------------------------------------------------------- -- trigger: - name: 'fuel-os-odl_l2-nofeature-ha-arm-pod2-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-nosdn-nofeature-ha-arm-pod2-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l3-nofeature-ha-arm-pod2-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod2-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-nofeature-noha-arm-pod2-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-ha-arm-pod2-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-noha-arm-pod2-master-trigger' - triggers: - - timed: '' -#--------------------------------------------------------------- -# Enea Armband POD 2 Triggers running against euphrates branch -#--------------------------------------------------------------- -- trigger: - name: 'fuel-os-odl_l2-nofeature-ha-arm-pod2-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-nosdn-nofeature-ha-arm-pod2-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l3-nofeature-ha-arm-pod2-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod2-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-nofeature-noha-arm-pod2-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-ha-arm-pod2-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-noha-arm-pod2-euphrates-trigger' - triggers: - - timed: '' -#---------------------------------------------------------- -# Enea Armband POD 3 Triggers running against master branch -#---------------------------------------------------------- -- trigger: - name: 'fuel-os-odl_l2-nofeature-ha-arm-pod5-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-nosdn-nofeature-ha-arm-pod5-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l3-nofeature-ha-arm-pod5-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod5-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-nofeature-noha-arm-pod5-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-ha-arm-pod5-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-noha-arm-pod5-master-trigger' - triggers: - - timed: '' -#--------------------------------------------------------------- -# Enea Armband POD 3 Triggers running against euphrates branch -#--------------------------------------------------------------- -- trigger: - name: 'fuel-os-odl_l2-nofeature-ha-arm-pod5-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-nosdn-nofeature-ha-arm-pod5-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l3-nofeature-ha-arm-pod5-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod5-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-nofeature-noha-arm-pod5-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-ha-arm-pod5-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-noha-arm-pod5-euphrates-trigger' - triggers: - - timed: '' -#-------------------------------------------------------------------------- -# Enea Armband POD 3 Triggers running against master branch (aarch64 slave) -#-------------------------------------------------------------------------- -- trigger: - name: 'fuel-os-odl_l2-nofeature-ha-arm-pod4-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-nosdn-nofeature-ha-arm-pod4-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l3-nofeature-ha-arm-pod4-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod4-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-nofeature-noha-arm-pod4-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-ha-arm-pod4-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-noha-arm-pod4-master-trigger' - triggers: - - timed: '' -#-------------------------------------------------------------------------- -# Enea Armband POD 3 Triggers running against euphrates branch (aarch64 slave) -#-------------------------------------------------------------------------- -- trigger: - name: 'fuel-os-odl_l2-nofeature-ha-arm-pod4-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-nosdn-nofeature-ha-arm-pod4-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l3-nofeature-ha-arm-pod4-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod4-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-nofeature-noha-arm-pod4-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-ha-arm-pod4-euphrates-trigger' + name: 'fuel-os-nosdn-nofeature-noha-armband-virtual-euphrates-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-sfc-noha-arm-pod4-euphrates-trigger' + name: 'fuel-os-odl-nofeature-ha-armband-virtual-euphrates-trigger' triggers: - timed: '' diff --git a/jjb/armband/armband-deploy.sh b/jjb/armband/armband-deploy.sh deleted file mode 100755 index 05679aa27..000000000 --- a/jjb/armband/armband-deploy.sh +++ /dev/null @@ -1,127 +0,0 @@ -#!/bin/bash -# SPDX-license-identifier: Apache-2.0 -############################################################################## -# Copyright (c) 2016 Ericsson AB and others. -# (c) 2017 Enea Software AB -# All rights reserved. This program and the accompanying materials -# are made available under the terms of the Apache License, Version 2.0 -# which accompanies this distribution, and is available at -# http://www.apache.org/licenses/LICENSE-2.0 -############################################################################## -set -o nounset -set -o pipefail - -export TERM="vt220" - -if [[ "$BRANCH" != 'master' ]]; then - # source the file so we get OPNFV vars - source latest.properties - - # echo the info about artifact that is used during the deployment - echo "Using ${OPNFV_ARTIFACT_URL/*\/} for deployment" -fi - -if [[ "$JOB_NAME" =~ "merge" ]]; then - # set simplest scenario for virtual deploys to run for merges - DEPLOY_SCENARIO="os-nosdn-nofeature-ha" -elif [[ "$BRANCH" != 'master' ]]; then - # for none-merge deployments - # checkout the commit that was used for building the downloaded artifact - # to make sure the ISO and deployment mechanism uses same versions - echo "Checking out $OPNFV_GIT_SHA1" - git checkout $OPNFV_GIT_SHA1 --quiet -fi - -# set deployment parameters -export TMPDIR=${WORKSPACE}/tmpdir - -LAB_NAME=${NODE_NAME/-*} -POD_NAME=${NODE_NAME/*-} - -# we currently support enea -if [[ ! $LAB_NAME =~ (arm|enea) ]]; then - echo "Unsupported/unidentified lab $LAB_NAME. Cannot continue!" - exit 1 -fi - -echo "Using configuration for $LAB_NAME" - -# create TMPDIR if it doesn't exist -mkdir -p $TMPDIR - -cd $WORKSPACE -if [[ $LAB_CONFIG_URL =~ ^(git|ssh):// ]]; then - echo "Cloning securedlab repo $BRANCH" - git clone --quiet --branch $BRANCH $LAB_CONFIG_URL lab-config - LAB_CONFIG_URL=file://${WORKSPACE}/lab-config - - # Source local_env if present, which contains POD-specific config - local_env="${WORKSPACE}/lab-config/labs/$LAB_NAME/$POD_NAME/fuel/config/local_env" - if [ -e $local_env ]; then - echo "-- Sourcing local environment file" - source $local_env - fi -fi - -# releng wants us to use nothing else but opnfv.iso for now. We comply. -ISO_FILE=$WORKSPACE/opnfv.iso - -# log file name -FUEL_LOG_FILENAME="${JOB_NAME}_${BUILD_NUMBER}.log.tar.gz" - -# Deploy Cache (to enable just create the deploy-cache subdir) -# NOTE: Only available when ISO files are cached using ISOSTORE mechanism -DEPLOY_CACHE=${ISOSTORE:-/iso_mount/opnfv_ci}/${BRANCH##*/}/deploy-cache -if [[ -d "${DEPLOY_CACHE}" ]]; then - echo "Deploy cache dir present." - echo "--------------------------------------------------------" - echo "Fuel@OPNFV deploy cache: ${DEPLOY_CACHE}" - DEPLOY_CACHE="-C ${DEPLOY_CACHE}" -else - DEPLOY_CACHE="" -fi - -# construct the command -DEPLOY_COMMAND="$WORKSPACE/ci/deploy.sh -b ${LAB_CONFIG_URL} \ - -l $LAB_NAME -p $POD_NAME -s $DEPLOY_SCENARIO -i file://${ISO_FILE} \ - -B ${DEFAULT_BRIDGE:-pxebr} -S $TMPDIR -L $WORKSPACE/$FUEL_LOG_FILENAME \ - ${DEPLOY_CACHE}" - -# log info to console -echo "Deployment parameters" -echo "--------------------------------------------------------" -echo "Scenario: $DEPLOY_SCENARIO" -echo "Lab: $LAB_NAME" -echo "POD: $POD_NAME" -[[ "$BRANCH" != 'master' ]] && echo "ISO: ${OPNFV_ARTIFACT_URL/*\/}" -echo -echo "Starting the deployment using $INSTALLER_TYPE. This could take some time..." -echo "--------------------------------------------------------" -echo - -# start the deployment -echo "Issuing command" -echo "$DEPLOY_COMMAND" -echo - -$DEPLOY_COMMAND -exit_code=$? - -echo -echo "--------------------------------------------------------" -echo "Deployment is done!" - -# upload logs for baremetal deployments -# work with virtual deployments is still going on so we skip that for the timebeing -if [[ "$JOB_NAME" =~ "baremetal-daily" ]]; then - echo "Uploading deployment logs" - gsutil cp $WORKSPACE/$FUEL_LOG_FILENAME gs://$GS_URL/logs/$FUEL_LOG_FILENAME > /dev/null 2>&1 - echo "Logs are available as http://$GS_URL/logs/$FUEL_LOG_FILENAME" -fi - -if [[ $exit_code -ne 0 ]]; then - echo "Deployment failed!" - exit $exit_code -else - echo "Deployment is successful!" -fi diff --git a/jjb/armband/armband-download-artifact.sh b/jjb/armband/armband-download-artifact.sh deleted file mode 100755 index 4f83305d4..000000000 --- a/jjb/armband/armband-download-artifact.sh +++ /dev/null @@ -1,77 +0,0 @@ -#!/bin/bash -# SPDX-license-identifier: Apache-2.0 -############################################################################## -# Copyright (c) 2016 Ericsson AB and others. -# (c) 2017 Enea AB -# All rights reserved. This program and the accompanying materials -# are made available under the terms of the Apache License, Version 2.0 -# which accompanies this distribution, and is available at -# http://www.apache.org/licenses/LICENSE-2.0 -############################################################################## -set -o errexit -set -o pipefail - -# disable Fuel ISO download for master branch -[[ "$BRANCH" == 'master' ]] && exit 0 - -echo "Host info: $(hostname) $(hostname -I)" - -# Configurable environment variables: -# ISOSTORE (/iso_mount/opnfv_ci) - -if [[ "$JOB_NAME" =~ "merge" ]]; then - echo "Downloading http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties" - # get the properties file for the Armband Fuel ISO built for a merged change - curl -f -s -o $WORKSPACE/latest.properties http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties -else - # get the latest.properties file in order to get info regarding latest artifact - echo "Downloading http://$GS_URL/latest.properties" - curl -f -s -o $WORKSPACE/latest.properties http://$GS_URL/latest.properties -fi - -# source the file so we get artifact metadata, it will exit if it doesn't exist -source latest.properties - -# echo the info about artifact that is used during the deployment -OPNFV_ARTIFACT=${OPNFV_ARTIFACT_URL/*\/} -echo "Using $OPNFV_ARTIFACT for deployment" - -# Releng doesn't want us to use anything but opnfv.iso for now. We comply. -ISO_FILE=${WORKSPACE}/opnfv.iso - -# using ISOs for verify & merge jobs from local storage will be enabled later -if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then - # check if we already have the ISO to avoid redownload - ISOSTORE=${ISOSTORE:-/iso_mount/opnfv_ci}/${BRANCH##*/} - if [[ -f "$ISOSTORE/$OPNFV_ARTIFACT" ]]; then - echo "ISO exists locally. Skipping the download and using the file from ISO store" - ln -s $ISOSTORE/$OPNFV_ARTIFACT ${ISO_FILE} - echo "--------------------------------------------------------" - echo - ls -al ${ISO_FILE} - echo - echo "--------------------------------------------------------" - echo "Done!" - exit 0 - fi -fi - -# Use gsutils if available -if $(which gsutil &>/dev/null); then - DOWNLOAD_URL="gs://$OPNFV_ARTIFACT_URL" - CMD="gsutil cp ${DOWNLOAD_URL} ${ISO_FILE}" -else - # download image - # -f returns error if the file was not found or on server error - DOWNLOAD_URL="http://$OPNFV_ARTIFACT_URL" - CMD="curl -f -s -o ${ISO_FILE} ${DOWNLOAD_URL}" -fi - -# log info to console -echo "Downloading the $INSTALLER_TYPE artifact using URL $DOWNLOAD_URL" -echo "This could take some time..." -echo "--------------------------------------------------------" -echo "$CMD" -$CMD -echo "--------------------------------------------------------" -echo "Done!" diff --git a/jjb/armband/armband-project-jobs.yml b/jjb/armband/armband-project-jobs.yml deleted file mode 100644 index 0623b5576..000000000 --- a/jjb/armband/armband-project-jobs.yml +++ /dev/null @@ -1,95 +0,0 @@ -################################################### -# All the jobs except verify have been removed! -# They will only be enabled on request by projects! -################################################### -- project: - name: armband - - project: '{name}' - - installer: 'fuel' - - jobs: - - 'armband-{installer}-build-daily-{stream}' - - stream: - - master: - branch: '{stream}' - gs-pathname: '' - disabled: false - - euphrates: - branch: 'stable/{stream}' - gs-pathname: '/{stream}' - disabled: true - -- job-template: - name: 'armband-{installer}-build-daily-{stream}' - - disabled: '{obj:disabled}' - - concurrent: false - - properties: - - logrotate-default - - throttle: - enabled: true - max-total: 1 - max-per-node: 1 - option: 'project' - - parameters: - - project-parameter: - project: '{project}' - branch: '{branch}' - - 'opnfv-build-enea-defaults' - - '{installer}-defaults' - - armband-project-parameter: - gs-pathname: '{gs-pathname}' - - scm: - - git-scm - - triggers: - - pollscm: - cron: '0 H/4 * * *' - - wrappers: - - timeout: - timeout: 360 - fail: true - - builders: - - shell: - !include-raw-escape: ./build.sh - - shell: - !include-raw-escape: ./upload-artifacts.sh - - publishers: - - email: - recipients: armband@enea.com - - email-jenkins-admins-on-failure - -######################## -# parameter macros -######################## -- parameter: - name: armband-project-parameter - parameters: - - string: - name: BUILD_DIRECTORY - default: $WORKSPACE/build_output - description: "Directory where the build artifact will be located upon the completion of the build." - - string: - name: CACHE_DIRECTORY - default: $HOME/opnfv/cache/$INSTALLER_TYPE - description: "Directory where the cache to be used during the build is located." - - string: - name: GS_URL - default: artifacts.opnfv.org/$PROJECT{gs-pathname} - description: "URL to Google Storage." - - choice: - name: FORCE_BUILD - choices: - - 'false' - - 'true' - description: 'Force build even if there are no changes in the armband repo. Default false' diff --git a/jjb/armband/armband-verify-jobs.yml b/jjb/armband/armband-verify-jobs.yml index c9476b1a2..c43dc7f82 100644 --- a/jjb/armband/armband-verify-jobs.yml +++ b/jjb/armband/armband-verify-jobs.yml @@ -21,13 +21,11 @@ ##################################### phase: - 'basic': - slave-label: 'opnfv-build-enea' - - 'build': - slave-label: 'opnfv-build-enea' + slave-label: 'armband-virtual' - 'deploy-virtual': - slave-label: 'opnfv-build-enea' + slave-label: 'armband-virtual' - 'smoke-test': - slave-label: 'opnfv-build-enea' + slave-label: 'armband-virtual' ##################################### # jobs ##################################### @@ -52,6 +50,11 @@ enabled: true max-total: 4 option: 'project' + - build-blocker: + use-build-blocker: true + blocking-jobs: + - 'fuel-os-.*?-virtual-daily-.*' + block-level: 'NODE' scm: - git-scm-gerrit @@ -96,8 +99,9 @@ - project-parameter: project: '{project}' branch: '{branch}' - - 'opnfv-build-enea-defaults' - - 'armband-verify-defaults': + - 'armband-virtual-defaults': + installer: '{installer}' + - '{installer}-defaults': gs-pathname: '{gs-pathname}' builders: @@ -117,20 +121,6 @@ node-parameters: false kill-phase-on: FAILURE abort-all-job: true - - multijob: - name: build - condition: SUCCESSFUL - projects: - - name: 'armband-verify-build-{stream}' - current-parameters: false - predefined-parameters: | - BRANCH=$BRANCH - GERRIT_REFSPEC=$GERRIT_REFSPEC - GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER - GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE - node-parameters: false - kill-phase-on: FAILURE - abort-all-job: true - multijob: name: deploy-virtual condition: SUCCESSFUL @@ -171,7 +161,8 @@ - logrotate-default - throttle: enabled: true - max-total: 6 + max-total: 2 + max-per-node: 1 option: 'project' - build-blocker: use-build-blocker: true @@ -193,8 +184,9 @@ project: '{project}' branch: '{branch}' - '{slave-label}-defaults' - - '{installer}-defaults' - - 'armband-verify-defaults': + - 'armband-virtual-defaults': + installer: '{installer}' + - '{installer}-defaults': gs-pathname: '{gs-pathname}' builders: @@ -212,21 +204,11 @@ echo "Not activated!" -- builder: - name: 'armband-verify-build-macro' - builders: - - shell: - !include-raw: ./build.sh - - shell: - !include-raw: ./armband-workspace-cleanup.sh - - builder: name: 'armband-verify-deploy-virtual-macro' builders: - - shell: | - #!/bin/bash - - echo "Not activated!" + - shell: + !include-raw: ../fuel/fuel-deploy.sh - builder: name: 'armband-verify-smoke-test-macro' @@ -235,21 +217,3 @@ #!/bin/bash echo "Not activated!" -##################################### -# parameter macros -##################################### -- parameter: - name: 'armband-verify-defaults' - parameters: - - string: - name: BUILD_DIRECTORY - default: $WORKSPACE/build_output - description: "Directory where the build artifact will be located upon the completion of the build." - - string: - name: CACHE_DIRECTORY - default: $HOME/opnfv/cache/$INSTALLER_TYPE - description: "Directory where the cache to be used during the build is located." - - string: - name: GS_URL - default: artifacts.opnfv.org/$PROJECT{gs-pathname} - description: "URL to Google Storage." diff --git a/jjb/armband/armband-workspace-cleanup.sh b/jjb/armband/armband-workspace-cleanup.sh deleted file mode 100755 index d8948c7a0..000000000 --- a/jjb/armband/armband-workspace-cleanup.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash -# SPDX-license-identifier: Apache-2.0 -############################################################################## -# Copyright (c) 2016 Ericsson AB and others. -# All rights reserved. This program and the accompanying materials -# are made available under the terms of the Apache License, Version 2.0 -# which accompanies this distribution, and is available at -# http://www.apache.org/licenses/LICENSE-2.0 -############################################################################## -set -o errexit -set -o nounset -set -o pipefail - -# delete the $WORKSPACE to open some space -/bin/rm -rf $WORKSPACE diff --git a/jjb/armband/build.sh b/jjb/armband/build.sh deleted file mode 100755 index 29c01bb38..000000000 --- a/jjb/armband/build.sh +++ /dev/null @@ -1,114 +0,0 @@ -#!/bin/bash -# SPDX-license-identifier: Apache-2.0 -############################################################################## -# Copyright (c) 2016 Ericsson AB and others. -# Copyright (c) 2017 Enea AB. -# All rights reserved. This program and the accompanying materials -# are made available under the terms of the Apache License, Version 2.0 -# which accompanies this distribution, and is available at -# http://www.apache.org/licenses/LICENSE-2.0 -############################################################################## - -# disable Armband iso build for master branch -if [[ "$BRANCH" == 'master' ]]; then - touch $WORKSPACE/.noupload - echo "--------------------------------------------------------" - echo "Done!" - exit 0 -fi - -set -o errexit -set -o nounset -set -o pipefail - -export TERM="vt220" - -echo "Host info: $(hostname) $(hostname -I)" - -cd $WORKSPACE - -# Armband requires initializing git submodules (e.g. for Fuel's clean_cache.sh) -make submodules-init - -# remove the expired items from cache -test -f $WORKSPACE/ci/clean_cache.sh && $WORKSPACE/ci/clean_cache.sh $CACHE_DIRECTORY - -LATEST_ISO_PROPERTIES=$WORKSPACE/latest.iso.properties -if [[ "$JOB_NAME" =~ "daily" ]]; then - # check to see if we already have an artifact on artifacts.opnfv.org - # for this commit during daily builds - echo "Checking to see if we already built and stored Armband Fuel ISO for this commit" - - curl -s -o $LATEST_ISO_PROPERTIES http://$GS_URL/latest.properties 2>/dev/null -fi - -# get metadata of latest ISO -if grep -q OPNFV_GIT_SHA1 $LATEST_ISO_PROPERTIES 2>/dev/null; then - LATEST_ISO_SHA1=$(grep OPNFV_GIT_SHA1 $LATEST_ISO_PROPERTIES | cut -d'=' -f2) - LATEST_ISO_URL=$(grep OPNFV_ARTIFACT_URL $LATEST_ISO_PROPERTIES | cut -d'=' -f2) -else - LATEST_ISO_SHA1=none -fi - -# get current SHA1 -CURRENT_SHA1=$(git rev-parse HEAD) - -# set FORCE_BUILD to false for non-daily builds -FORCE_BUILD=${FORCE_BUILD:-false} - -if [[ "$CURRENT_SHA1" == "$LATEST_ISO_SHA1" && "$FORCE_BUILD" == "false" ]]; then - echo "***************************************************" - echo " An ISO has already been built for this commit" - echo " $LATEST_ISO_URL" - echo "***************************************************" -else - echo "This commit has not been built yet or forced build! Proceeding with the build." - /bin/rm -f $LATEST_ISO_PROPERTIES - echo -fi - -# log info to console -echo "Starting the build of Armband $INSTALLER_TYPE. This could take some time..." -echo "-----------------------------------------------------------" -echo - -# create the cache directory if it doesn't exist -mkdir -p $CACHE_DIRECTORY - -# set OPNFV_ARTIFACT_VERSION -if [[ "$JOB_NAME" =~ "merge" ]]; then - echo "Building Fuel ISO for a merged change" - export OPNFV_ARTIFACT_VERSION="gerrit-$GERRIT_CHANGE_NUMBER" - echo "Not supported" - exit 1 -else - export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S") -fi - -NOCACHE_PATTERN="verify: no-cache" -if [[ "$JOB_NAME" =~ "verify" && "$GERRIT_CHANGE_COMMIT_MESSAGE" =~ "$NOCACHE_PATTERN" ]]; then - echo "The cache will not be used for this build!" - NOCACHE_ARG="-f P" -fi -NOCACHE_ARG=${NOCACHE_ARG:-} - -# start the build -cd $WORKSPACE/ci -./build.sh -v $OPNFV_ARTIFACT_VERSION $NOCACHE_ARG -c file://$CACHE_DIRECTORY $BUILD_DIRECTORY - -# list the build artifacts -ls -al $BUILD_DIRECTORY - -# save information regarding artifact into file -( - echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION" - echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)" - echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)" - echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso" - echo "OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso | cut -d' ' -f1)" - echo "OPNFV_BUILD_URL=$BUILD_URL" -) > $WORKSPACE/opnfv.properties - -echo -echo "--------------------------------------------------------" -echo "Done!" diff --git a/jjb/armband/upload-artifacts.sh b/jjb/armband/upload-artifacts.sh deleted file mode 100755 index 97987e2c5..000000000 --- a/jjb/armband/upload-artifacts.sh +++ /dev/null @@ -1,93 +0,0 @@ -#!/bin/bash -# SPDX-license-identifier: Apache-2.0 -############################################################################## -# Copyright (c) 2016 Ericsson AB and others. -# All rights reserved. This program and the accompanying materials -# are made available under the terms of the Apache License, Version 2.0 -# which accompanies this distribution, and is available at -# http://www.apache.org/licenses/LICENSE-2.0 -############################################################################## -set -o pipefail - -# configurable environment variables: -# ISOSTORE (/iso_mount/opnfv_ci) - -# check if we built something -if [ -f $WORKSPACE/.noupload ]; then - echo "Nothing new to upload. Exiting." - /bin/rm -f $WORKSPACE/.noupload - exit 0 -fi - -# source the opnfv.properties to get ARTIFACT_VERSION -source $WORKSPACE/opnfv.properties - - -# storing ISOs for verify & merge jobs will be done once we get the disk array -if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then - # store ISO locally on NFS first - ISOSTORE=${ISOSTORE:-/iso_mount/opnfv_ci} - if [[ -d "$ISOSTORE" ]]; then - ISOSTORE=${ISOSTORE}/${BRANCH##*/} - mkdir -p $ISOSTORE - - # remove all but most recent 3 ISOs first to keep iso_mount clean & tidy - cd $ISOSTORE - ls -tp | grep -v '/' | tail -n +4 | xargs -d '\n' /bin/rm -f -- - - # store ISO - echo "Storing latest ISO in local storage" - touch .storing - /bin/cp -f $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso \ - $ISOSTORE/opnfv-$OPNFV_ARTIFACT_VERSION.iso - rm .storing - fi -fi - -# log info to console -echo "Uploading armband artifacts. This could take some time..." -echo - -echo "Started at $(date)" -cd $WORKSPACE -# upload artifact and additional files to google storage -gsutil cp $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso \ - gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > gsutil.iso.log 2>&1 -gsutil cp $WORKSPACE/opnfv.properties \ - gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log 2>&1 -if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then - gsutil cp $WORKSPACE/opnfv.properties \ - gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1 -elif [[ "$JOB_NAME" =~ "merge" ]]; then - echo "Uploaded Armband Fuel ISO for a merged change" -fi -echo "Ended at $(date)" - -gsutil -m setmeta \ - -h "Content-Type:text/html" \ - -h "Cache-Control:private, max-age=0, no-transform" \ - gs://$GS_URL/latest.properties \ - gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > /dev/null 2>&1 - -gsutil -m setmeta \ - -h "Cache-Control:private, max-age=0, no-transform" \ - gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > /dev/null 2>&1 - -# disabled errexit due to gsutil setmeta complaints -# BadRequestException: 400 Invalid argument -# check if we uploaded the file successfully to see if things are fine -gsutil ls gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > /dev/null 2>&1 -if [[ $? -ne 0 ]]; then - echo "Problem while uploading artifact!" - echo "Check log $WORKSPACE/gsutil.iso.log on the machine where this build is done." - exit 1 -fi - -echo "Done!" -echo -echo "--------------------------------------------------------" -echo -echo "Artifact is available as http://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso" -echo -echo "--------------------------------------------------------" -echo diff --git a/jjb/auto/auto.yml b/jjb/auto/auto.yml new file mode 100644 index 000000000..fefa37626 --- /dev/null +++ b/jjb/auto/auto.yml @@ -0,0 +1,56 @@ +--- +- project: + name: auto + + project: '{name}' + + stream: + - master: + branch: '{stream}' + gs-pathname: '' + disabled: false + + jobs: + - 'auto-verify-{stream}' + +- job-template: + name: 'auto-verify-{stream}' + + disabled: '{obj:disabled}' + + parameters: + - project-parameter: + project: '{project}' + branch: '{branch}' + - 'opnfv-build-defaults' + + scm: + - git-scm-gerrit + + triggers: + - gerrit: + server-name: 'gerrit.opnfv.org' + trigger-on: + - patchset-created-event: + exclude-drafts: 'false' + exclude-trivial-rebase: 'false' + exclude-no-code-change: 'false' + - draft-published-event + - comment-added-contains-event: + comment-contains-value: 'recheck' + - comment-added-contains-event: + comment-contains-value: 'reverify' + projects: + - project-compare-type: 'ANT' + project-pattern: '{project}' + branches: + - branch-compare-type: 'ANT' + branch-pattern: '**/{branch}' + disable-strict-forbidden-file-verification: 'true' + forbidden-file-paths: + - compare-type: ANT + pattern: 'docs/**|.gitignore' + + builders: + - shell: | + echo "Nothing to verify!" diff --git a/jjb/bottlenecks/bottlenecks-run-suite.sh b/jjb/bottlenecks/bottlenecks-run-suite.sh index a7570431d..6d4d2d8d1 100644 --- a/jjb/bottlenecks/bottlenecks-run-suite.sh +++ b/jjb/bottlenecks/bottlenecks-run-suite.sh @@ -125,7 +125,7 @@ if [[ $SUITE_NAME == *posca* ]]; then -e NODE_NAME=${NODE_NAME} -e EXTERNAL_NET=${EXTERNAL_NETWORK} \ -e BRANCH=${BRANCH} -e GERRIT_REFSPEC_DEBUG=${GERRIT_REFSPEC_DEBUG} \ -e BOTTLENECKS_DB_TARGET=${BOTTLENECKS_DB_TARGET} -e PACKAGE_URL=${PACKAGE_URL} \ - -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO}" + -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} -e BUILD_TAG=${BUILD_TAG}" docker_volume="-v /var/run/docker.sock:/var/run/docker.sock -v /tmp:/tmp" cmd="docker run ${opts} ${envs} --name bottlenecks-load-master ${docker_volume} opnfv/bottlenecks:${DOCKER_TAG} /bin/bash" diff --git a/jjb/ci_gate_security/anteater-report-to-gerrit.sh b/jjb/ci_gate_security/anteater-report-to-gerrit.sh index 00a78ceba..a749d1db4 100644 --- a/jjb/ci_gate_security/anteater-report-to-gerrit.sh +++ b/jjb/ci_gate_security/anteater-report-to-gerrit.sh @@ -13,7 +13,7 @@ if [[ -e securityaudit.log ]] ; then EXITSTATUS=1 fi - cat securityaudit.log | awk -F"ERROR - " '{print $2}' | sed -e "s/\"/\\\\\"/g;s/\'/\\\\\'/g"> shortlog + awk -F"ERROR - " '{print $2}' securityaudit.log | sed -e "s/\"/\\\\\"/g;s/\'/\\\\/g"> shortlog ssh -p 29418 gerrit.opnfv.org \ "gerrit review -p $GERRIT_PROJECT \ diff --git a/jjb/ci_gate_security/opnfv-ci-gate-security.yml b/jjb/ci_gate_security/opnfv-ci-gate-security.yml index 55d629cb4..59479e73d 100644 --- a/jjb/ci_gate_security/opnfv-ci-gate-security.yml +++ b/jjb/ci_gate_security/opnfv-ci-gate-security.yml @@ -77,7 +77,7 @@ comment-contains-value: 'reverify' projects: - project-compare-type: 'REG_EXP' - project-pattern: 'apex|armband|bamboo|barometer|bottlenecks|calipso|compass4nfv|conductor|cooper|cperf|daisy|doctor|dovetail|dpacc|enfv|escalator|fds|functest|octopus|pharos|releng|sandbox|yardstick' + project-pattern: 'apex|armband|bamboo|barometer|bottlenecks|calipso|compass4nfv|conductor|cooper|cperf|daisy|doctor|dovetail|dpacc|enfv|escalator|fds|fuel|functest|octopus|pharos|releng|sandbox|yardstick' branches: - branch-compare-type: 'ANT' branch-pattern: '**/{branch}' diff --git a/jjb/compass4nfv/compass-project-jobs.yml b/jjb/compass4nfv/compass-project-jobs.yml index 59482459e..e612ef65f 100644 --- a/jjb/compass4nfv/compass-project-jobs.yml +++ b/jjb/compass4nfv/compass-project-jobs.yml @@ -21,14 +21,14 @@ disabled: false jobs: - - 'compass-build-iso-{stream}' + - '{installer}-build-daily-{stream}' - 'compass-build-ppa-{stream}' ######################## # job templates ######################## - job-template: - name: 'compass-build-iso-{stream}' + name: '{installer}-build-daily-{stream}' disabled: '{obj:disabled}' @@ -64,8 +64,7 @@ !include-raw-escape: ./compass-build.sh - shell: !include-raw-escape: ./compass-upload-artifact.sh - - shell: - !include-raw-escape: ./compass-workspace-cleanup.sh + - 'clean-workspace' - job-template: name: 'compass-build-ppa-{stream}' diff --git a/jjb/compass4nfv/compass-workspace-cleanup.sh b/jjb/compass4nfv/compass-workspace-cleanup.sh deleted file mode 100644 index 98201af9b..000000000 --- a/jjb/compass4nfv/compass-workspace-cleanup.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash -set -o errexit -set -o nounset -set -o pipefail - -# delete everything that is in $WORKSPACE -/bin/rm -rf $WORKSPACE \ No newline at end of file diff --git a/jjb/openretriever/openretriever-project.yml b/jjb/container4nfv/container4nfv-project.yml similarity index 94% rename from jjb/openretriever/openretriever-project.yml rename to jjb/container4nfv/container4nfv-project.yml index 3bcfab6d3..3b29b36f8 100644 --- a/jjb/openretriever/openretriever-project.yml +++ b/jjb/container4nfv/container4nfv-project.yml @@ -3,12 +3,12 @@ # They will only be enabled on request by projects! ################################################### - project: - name: openretriever + name: container4nfv project: '{name}' jobs: - - 'openretriever-verify-{stream}' + - 'container4nfv-verify-{stream}' stream: - master: @@ -21,7 +21,7 @@ disabled: false - job-template: - name: 'openretriever-verify-{stream}' + name: 'container4nfv-verify-{stream}' disabled: '{obj:disabled}' diff --git a/jjb/daisy4nfv/daisy-project-jobs.yml b/jjb/daisy4nfv/daisy-project-jobs.yml index fd0da7928..bcb89105e 100644 --- a/jjb/daisy4nfv/daisy-project-jobs.yml +++ b/jjb/daisy4nfv/daisy-project-jobs.yml @@ -54,6 +54,8 @@ use-build-blocker: true blocking-jobs: - '{installer}-daily-.*' + - 'daisy4nfv-merge-build-.*' + - 'daisy4nfv-verify-build-.*' block-level: 'NODE' scm: @@ -183,8 +185,7 @@ !include-raw: ./daisy4nfv-build.sh - shell: !include-raw: ./daisy4nfv-upload-artifact.sh - - shell: - !include-raw: ./daisy4nfv-workspace-cleanup.sh + - 'clean-workspace' - builder: name: 'daisy-deploy-daily-macro' diff --git a/jjb/daisy4nfv/daisy4nfv-merge-jobs.yml b/jjb/daisy4nfv/daisy4nfv-merge-jobs.yml index 561ffbe24..97d830f9f 100644 --- a/jjb/daisy4nfv/daisy4nfv-merge-jobs.yml +++ b/jjb/daisy4nfv/daisy4nfv-merge-jobs.yml @@ -154,6 +154,7 @@ use-build-blocker: true blocking-jobs: - '{alias}-merge-{phase}-.*' + - '{installer}-daily-.*' block-level: 'NODE' scm: @@ -191,8 +192,7 @@ !include-raw: ./daisy4nfv-build.sh - shell: !include-raw: ./daisy4nfv-upload-artifact.sh - - shell: - !include-raw: ./daisy4nfv-workspace-cleanup.sh + - 'clean-workspace' - builder: name: 'daisy-merge-deploy-virtual-macro' @@ -201,8 +201,7 @@ !include-raw: ./daisy4nfv-download-artifact.sh - shell: !include-raw: ./daisy-deploy.sh - - shell: - !include-raw: ./daisy4nfv-workspace-cleanup.sh + - 'clean-workspace' ##################################### # parameter macros diff --git a/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml b/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml index dff0ff0a4..1828ce459 100644 --- a/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml +++ b/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml @@ -50,7 +50,7 @@ - build-blocker: use-build-blocker: true blocking-jobs: - - '{installer}-merge-build-.*' + - '{alias}-merge-build-.*' block-level: 'NODE' scm: @@ -147,8 +147,9 @@ - build-blocker: use-build-blocker: true blocking-jobs: - - '{installer}-merge-build-.*' + - '{alias}-merge-build-.*' - '{alias}-verify-build-.*' + - '{installer}-daily-.*' block-level: 'NODE' scm: @@ -182,8 +183,7 @@ !include-raw: ./daisy4nfv-basic.sh - shell: !include-raw: ./daisy4nfv-build.sh - - shell: - !include-raw: ./daisy4nfv-workspace-cleanup.sh + - 'clean-workspace' - builder: name: daisy-verify-unit-macro diff --git a/jjb/daisy4nfv/daisy4nfv-workspace-cleanup.sh b/jjb/daisy4nfv/daisy4nfv-workspace-cleanup.sh deleted file mode 100755 index 26f7e9a01..000000000 --- a/jjb/daisy4nfv/daisy4nfv-workspace-cleanup.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash -############################################################################## -# Copyright (c) 2016 ZTE Coreporation and others. -# hu.zhijiang@zte.com.cn -# sun.jing22@zte.com.cn -# All rights reserved. This program and the accompanying materials -# are made available under the terms of the Apache License, Version 2.0 -# which accompanies this distribution, and is available at -# http://www.apache.org/licenses/LICENSE-2.0 -############################################################################## -set -o errexit -set -o nounset -set -o pipefail - -# delete the $WORKSPACE to open some space -/bin/rm -rf $WORKSPACE diff --git a/jjb/doctor/doctor.yml b/jjb/doctor/doctor.yml index d535d6109..b007e1432 100644 --- a/jjb/doctor/doctor.yml +++ b/jjb/doctor/doctor.yml @@ -37,9 +37,9 @@ is-python: true pod: - - arm-pod2: + - armband-baremetal: slave-label: '{pod}' - - arm-pod5: + - armband-virtual: slave-label: '{pod}' jobs: diff --git a/jjb/dovetail/dovetail-ci-jobs.yml b/jjb/dovetail/dovetail-ci-jobs.yml index 92b1db356..938fdf915 100644 --- a/jjb/dovetail/dovetail-ci-jobs.yml +++ b/jjb/dovetail/dovetail-ci-jobs.yml @@ -25,7 +25,7 @@ branch: 'stable/{stream}' dovetail-branch: master gs-pathname: '/{stream}' - docker-tag: 'cvp.0.5.0' + docker-tag: 'cvp.0.6.0' #----------------------------------- # POD, PLATFORM, AND BRANCH MAPPING @@ -137,21 +137,6 @@ SUT: compass auto-trigger-name: 'daily-trigger-disabled' <<: *master - - arm-pod2: - slave-label: '{pod}' - SUT: fuel - auto-trigger-name: 'daily-trigger-disabled' - <<: *master - - arm-pod5: - slave-label: '{pod}' - SUT: fuel - auto-trigger-name: 'daily-trigger-disabled' - <<: *master - - arm-virtual2: - slave-label: '{pod}' - SUT: fuel - auto-trigger-name: 'daily-trigger-disabled' - <<: *master - zte-pod1: slave-label: zte-pod1 SUT: fuel @@ -177,6 +162,11 @@ SUT: fuel auto-trigger-name: 'daily-trigger-disabled' <<: *danube + - huawei-pod4: + slave-label: huawei-pod4 + SUT: apex + auto-trigger-name: 'apex-huawei-pod4-{testsuite}-danube-trigger' + <<: *danube #-------------------------------- testsuite: - 'compliance_set' @@ -206,7 +196,7 @@ - build-name: name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO' - timeout: - timeout: 180 + timeout: 300 abort: true - fix-workspace-permissions @@ -273,3 +263,15 @@ builders: - shell: !include-raw: ./dovetail-cleanup.sh + +#-------------------------- +# trigger macros +#-------------------------- +- trigger: + name: 'apex-huawei-pod4-proposed_tests-danube-trigger' + triggers: + - timed: '0 1 * * *' +- trigger: + name: 'apex-huawei-pod4-compliance_set-danube-trigger' + triggers: + - timed: '' diff --git a/jjb/dovetail/dovetail-cleanup.sh b/jjb/dovetail/dovetail-cleanup.sh index 3ae0cbcc9..2d66fe022 100755 --- a/jjb/dovetail/dovetail-cleanup.sh +++ b/jjb/dovetail/dovetail-cleanup.sh @@ -12,16 +12,16 @@ # clean up dependent project docker images, which has no containers and image tag None clean_images=(opnfv/functest opnfv/yardstick opnfv/testapi mongo) for clean_image in "${clean_images[@]}"; do - echo "Removing image $image_id, which has no containers and image tag is None" dangling_images=($(docker images -f "dangling=true" | grep ${clean_image} | awk '{print $3}')) if [[ -n ${dangling_images} ]]; then for image_id in "${dangling_images[@]}"; do + echo "Removing image $image_id, which has no containers and image tag is None" docker rmi $image_id >${redirect} done fi done -echo "Remove containers with image opnfv/dovetail:..." +echo "Remove dovetail images with tag None and containers with these images ..." dangling_images=($(docker images -f "dangling=true" | grep opnfv/dovetail | awk '{print $3}')) if [[ -n ${dangling_images} ]]; then for image_id in "${dangling_images[@]}"; do @@ -37,13 +37,13 @@ if [[ ! -z $(docker ps -a | grep opnfv/dovetail) ]]; then docker ps -a | grep opnfv/dovetail | awk '{print $1}' | xargs docker rm -f >${redirect} fi -echo "Remove dovetail existing images if exist..." -if [[ ! -z $(docker images | grep opnfv/dovetail) ]]; then - echo "Docker images to remove:" - docker images | head -1 && docker images | grep opnfv/dovetail >${redirect} - image_tags=($(docker images | grep opnfv/dovetail | awk '{print $2}')) - for tag in "${image_tags[@]}"; do - echo "Removing docker image opnfv/dovetail:$tag..." - docker rmi opnfv/dovetail:$tag >${redirect} - done -fi +#echo "Remove dovetail existing images if exist..." +#if [[ ! -z $(docker images | grep opnfv/dovetail) ]]; then +# echo "Docker images to remove:" +# docker images | head -1 && docker images | grep opnfv/dovetail >${redirect} +# image_tags=($(docker images | grep opnfv/dovetail | awk '{print $2}')) +# for tag in "${image_tags[@]}"; do +# echo "Removing docker image opnfv/dovetail:$tag..." +# docker rmi opnfv/dovetail:$tag >${redirect} +# done +#fi diff --git a/jjb/dovetail/dovetail-run.sh b/jjb/dovetail/dovetail-run.sh index 7dd6a2ddc..9c4e205ae 100755 --- a/jjb/dovetail/dovetail-run.sh +++ b/jjb/dovetail/dovetail-run.sh @@ -175,7 +175,7 @@ fi # Modify tempest_conf.yaml file tempest_conf_file=${DOVETAIL_CONFIG}/tempest_conf.yaml -if [ ${INSTALLER_TYPE} == 'compass' ]; then +if [[ ${INSTALLER_TYPE} == 'compass' || ${INSTALLER_TYPE} == 'apex' ]]; then volume_device='vdb' else volume_device='vdc' diff --git a/jjb/dpacc/dpacc.yml b/jjb/dpacc/dpacc.yml index 63eb044ad..39f3d563c 100644 --- a/jjb/dpacc/dpacc.yml +++ b/jjb/dpacc/dpacc.yml @@ -1,3 +1,4 @@ +--- ################################################### # All the jobs except verify have been removed! # They will only be enabled on request by projects! @@ -8,17 +9,17 @@ project: '{name}' jobs: - - 'dpacc-verify-{stream}' + - 'dpacc-verify-{stream}' stream: - - master: - branch: '{stream}' - gs-pathname: '' - disabled: false - - danube: - branch: 'stable/{stream}' - gs-pathname: '/{stream}' - disabled: false + - master: + branch: '{stream}' + gs-pathname: '' + disabled: false + - danube: + branch: 'stable/{stream}' + gs-pathname: '/{stream}' + disabled: false - job-template: name: 'dpacc-verify-{stream}' @@ -26,38 +27,38 @@ disabled: '{obj:disabled}' parameters: - - project-parameter: - project: '{project}' - branch: '{branch}' - - 'opnfv-build-ubuntu-defaults' + - project-parameter: + project: '{project}' + branch: '{branch}' + - 'opnfv-build-ubuntu-defaults' scm: - - git-scm-gerrit + - git-scm-gerrit triggers: - - gerrit: - server-name: 'gerrit.opnfv.org' - trigger-on: - - patchset-created-event: - exclude-drafts: 'false' - exclude-trivial-rebase: 'false' - exclude-no-code-change: 'false' - - draft-published-event - - comment-added-contains-event: - comment-contains-value: 'recheck' - - comment-added-contains-event: - comment-contains-value: 'reverify' - projects: - - project-compare-type: 'ANT' - project-pattern: '{project}' - branches: - - branch-compare-type: 'ANT' - branch-pattern: '**/{branch}' - disable-strict-forbidden-file-verification: 'true' - forbidden-file-paths: - - compare-type: ANT - pattern: 'docs/**|.gitignore' + - gerrit: + server-name: 'gerrit.opnfv.org' + trigger-on: + - patchset-created-event: + exclude-drafts: 'false' + exclude-trivial-rebase: 'false' + exclude-no-code-change: 'false' + - draft-published-event + - comment-added-contains-event: + comment-contains-value: 'recheck' + - comment-added-contains-event: + comment-contains-value: 'reverify' + projects: + - project-compare-type: 'ANT' + project-pattern: '{project}' + branches: + - branch-compare-type: 'ANT' + branch-pattern: '**/{branch}' + disable-strict-forbidden-file-verification: 'true' + forbidden-file-paths: + - compare-type: ANT + pattern: 'docs/**|.gitignore' builders: - - shell: | - echo "Nothing to verify!" + - shell: | + echo "Nothing to verify!" diff --git a/jjb/fuel/fuel-build.sh b/jjb/fuel/fuel-build.sh deleted file mode 100755 index 2c0d12a80..000000000 --- a/jjb/fuel/fuel-build.sh +++ /dev/null @@ -1,109 +0,0 @@ -#!/bin/bash -# SPDX-license-identifier: Apache-2.0 -############################################################################## -# Copyright (c) 2016 Ericsson AB and others. -# All rights reserved. This program and the accompanying materials -# are made available under the terms of the Apache License, Version 2.0 -# which accompanies this distribution, and is available at -# http://www.apache.org/licenses/LICENSE-2.0 -############################################################################## - -# disable Fuel iso build for master branch -if [[ "$BRANCH" == 'master' ]]; then - touch $WORKSPACE/.noupload - echo "--------------------------------------------------------" - echo "Done!" - exit 0 -fi - -set -o errexit -set -o nounset -set -o pipefail - -export TERM="vt220" - -cd $WORKSPACE - -# remove the expired items from cache -test -f $WORKSPACE/ci/clean_cache.sh && $WORKSPACE/ci/clean_cache.sh $CACHE_DIRECTORY - -LATEST_ISO_PROPERTIES=$WORKSPACE/latest.iso.properties -if [[ "$JOB_NAME" =~ "daily" ]]; then - # check to see if we already have an artifact on artifacts.opnfv.org - # for this commit during daily builds - echo "Checking to see if we already built and stored Fuel ISO for this commit" - - curl -s -o $LATEST_ISO_PROPERTIES http://$GS_URL/latest.properties 2>/dev/null -fi - -# get metadata of latest ISO -if grep -q OPNFV_GIT_SHA1 $LATEST_ISO_PROPERTIES 2>/dev/null; then - LATEST_ISO_SHA1=$(grep OPNFV_GIT_SHA1 $LATEST_ISO_PROPERTIES | cut -d'=' -f2) - LATEST_ISO_URL=$(grep OPNFV_ARTIFACT_URL $LATEST_ISO_PROPERTIES | cut -d'=' -f2) -else - LATEST_ISO_SHA1=none -fi - -# get current SHA1 -CURRENT_SHA1=$(git rev-parse HEAD) - -# set FORCE_BUILD to false for non-daily builds -FORCE_BUILD=${FORCE_BUILD:-false} - -if [[ "$CURRENT_SHA1" == "$LATEST_ISO_SHA1" && "$FORCE_BUILD" == "false" ]]; then - echo "***************************************************" - echo " An ISO has already been built for this commit" - echo " $LATEST_ISO_URL" - echo "***************************************************" -# echo "Nothing new to build. Exiting." -# touch $WORKSPACE/.noupload -# exit 0 -else - echo "This commit has not been built yet or forced build! Proceeding with the build." - /bin/rm -f $LATEST_ISO_PROPERTIES - echo -fi - -# log info to console -echo "Starting the build of $INSTALLER_TYPE. This could take some time..." -echo "--------------------------------------------------------" -echo - -# create the cache directory if it doesn't exist -mkdir -p $CACHE_DIRECTORY - -# set OPNFV_ARTIFACT_VERSION -if [[ "$JOB_NAME" =~ "merge" ]]; then - echo "Building Fuel ISO for a merged change" - export OPNFV_ARTIFACT_VERSION="gerrit-$GERRIT_CHANGE_NUMBER" -else - export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S") -fi - -NOCACHE_PATTERN="verify: no-cache" -if [[ "$JOB_NAME" =~ "verify" && "$GERRIT_CHANGE_COMMIT_MESSAGE" =~ "$NOCACHE_PATTERN" ]]; then - echo "The cache will not be used for this build!" - NOCACHE_ARG="-f P" -fi -NOCACHE_ARG=${NOCACHE_ARG:-} - -# start the build -cd $WORKSPACE/ci -./build.sh -v $OPNFV_ARTIFACT_VERSION $NOCACHE_ARG -c file://$CACHE_DIRECTORY $BUILD_DIRECTORY - -# list the build artifacts -ls -al $BUILD_DIRECTORY - -# save information regarding artifact into file -( - echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION" - echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)" - echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)" - echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso" - echo "OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso | cut -d' ' -f1)" - echo "OPNFV_BUILD_URL=$BUILD_URL" -) > $WORKSPACE/opnfv.properties - -echo -echo "--------------------------------------------------------" -echo "Done!" diff --git a/jjb/fuel/fuel-daily-jobs.yml b/jjb/fuel/fuel-daily-jobs.yml index cbdd3ddba..c30cfed72 100644 --- a/jjb/fuel/fuel-daily-jobs.yml +++ b/jjb/fuel/fuel-daily-jobs.yml @@ -20,6 +20,11 @@ branch: 'stable/{stream}' disabled: true gs-pathname: '/{stream}' + danube: &danube + stream: danube + branch: 'stable/{stream}' + disabled: false + gs-pathname: '/{stream}' #-------------------------------- # POD, INSTALLER, AND BRANCH MAPPING #-------------------------------- @@ -53,6 +58,9 @@ - zte-pod3: slave-label: zte-pod3 <<: *euphrates + - zte-pod1: + slave-label: zte-pod1 + <<: *danube #-------------------------------- # scenarios #-------------------------------- @@ -60,18 +68,12 @@ # HA scenarios - 'os-nosdn-nofeature-ha': auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger' - - 'os-odl_l2-nofeature-ha': - auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger' - - 'os-odl_l3-nofeature-ha': + - 'os-odl-nofeature-ha': auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger' - 'os-onos-sfc-ha': auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger' - 'os-onos-nofeature-ha': auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger' - - 'os-odl_l2-sfc-ha': - auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger' - - 'os-odl_l2-bgpvpn-ha': - auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger' - 'os-nosdn-kvm-ha': auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger' - 'os-nosdn-ovs-ha': @@ -85,18 +87,12 @@ # NOHA scenarios - 'os-nosdn-nofeature-noha': auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger' - - 'os-odl_l2-nofeature-noha': - auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger' - - 'os-odl_l3-nofeature-noha': + - 'os-odl-nofeature-noha': auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger' - 'os-onos-sfc-noha': auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger' - 'os-onos-nofeature-noha': auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger' - - 'os-odl_l2-sfc-noha': - auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger' - - 'os-odl_l2-bgpvpn-noha': - auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger' - 'os-nosdn-kvm-noha': auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger' - 'os-nosdn-ovs-noha': @@ -132,6 +128,7 @@ blocking-jobs: - 'fuel-os-.*?-{pod}-daily-.*' - 'fuel-os-.*?-{pod}-weekly-.*' + - 'fuel-verify-.*' block-level: 'NODE' wrappers: @@ -145,14 +142,13 @@ - project-parameter: project: '{project}' branch: '{branch}' - - '{installer}-defaults' + - '{installer}-defaults': + gs-pathname: '{gs-pathname}' - '{slave-label}-defaults': installer: '{installer}' - string: name: DEPLOY_SCENARIO default: '{scenario}' - - fuel-ci-parameter: - gs-pathname: '{gs-pathname}' builders: - description-setter: @@ -190,11 +186,12 @@ # 2.here the stream means the SUT stream, dovetail stream is defined in its own job # 3.only debug testsuite here(refstack, ha, ipv6, bgpvpn) # 4.not used for release criteria or compliance, - # only to debug the dovetail tool bugs with bgpvpn - # 5,only run against scenario os-odl_l2-bgpvpn-ha(regex used here, can extend to more scenarios future) + # only to debug the dovetail tool bugs with bgpvpn and nosdn-nofeature + # 5.only run against scenario os-odl-bgpvpn-ha(regex used here, can extend to more scenarios future) + # 6.ZTE pod1, os-nosdn-nofeature-ha and os-odl-bgpvpn-ha, run against danube - conditional-step: condition-kind: regex-match - regex: os-odl_l2-bgpvpn-ha + regex: os-(nosdn-nofeature|odl-bgpvpn)-ha label: '{scenario}' steps: - trigger-builds: @@ -208,33 +205,6 @@ build-step-failure-threshold: 'never' failure-threshold: 'never' unstable-threshold: 'FAILURE' - # ZTE pod1 weekly(Sunday), os-odl_l2-nofeature-ha, run against master and euphrates - - conditional-step: - condition-kind: and - condition-operands: - - condition-kind: regex-match - regex: os-odl_l2-nofeature-ha - label: '{scenario}' - - condition-kind: regex-match - regex: zte-pod1 - label: '{pod}' - - condition-kind: day-of-week - day-selector: select-days - days: - SAT: true - use-build-time: true - steps: - - trigger-builds: - - project: 'dovetail-fuel-zte-pod1-proposed_tests-{stream}' - current-parameters: false - predefined-parameters: - DEPLOY_SCENARIO={scenario} - block: true - same-node: true - block-thresholds: - build-step-failure-threshold: 'never' - failure-threshold: 'never' - unstable-threshold: 'FAILURE' publishers: - email: @@ -268,14 +238,13 @@ - project-parameter: project: '{project}' branch: '{branch}' - - '{installer}-defaults' + - '{installer}-defaults': + gs-pathname: '{gs-pathname}' - '{slave-label}-defaults': installer: '{installer}' - string: name: DEPLOY_SCENARIO - default: 'os-odl_l2-nofeature-ha' - - fuel-ci-parameter: - gs-pathname: '{gs-pathname}' + default: 'os-odl-nofeature-ha' - string: name: DEPLOY_TIMEOUT default: '150' @@ -302,28 +271,6 @@ - email-jenkins-admins-on-failure ######################## -# parameter macros -######################## -- parameter: - name: fuel-ci-parameter - parameters: - - string: - name: BUILD_DIRECTORY - default: $WORKSPACE/build_output - description: "Directory where the build artifact will be located upon the completion of the build." - - string: - name: CACHE_DIRECTORY - default: $HOME/opnfv/cache/$INSTALLER_TYPE - description: "Directory where the cache to be used during the build is located." - - string: - name: GS_URL - default: artifacts.opnfv.org/$PROJECT{gs-pathname} - description: "URL to Google Storage." - - string: - name: SSH_KEY - default: "/tmp/mcp.rsa" - description: "Path to private SSH key to access environment nodes. For MCP deployments only." -######################## # trigger macros ######################## #----------------------------------------------- @@ -335,11 +282,7 @@ triggers: - timed: '5 20 * * *' - trigger: - name: 'fuel-os-odl_l2-nofeature-ha-baremetal-daily-master-trigger' - triggers: - - timed: '' # '5 23 * * *' -- trigger: - name: 'fuel-os-odl_l3-nofeature-ha-baremetal-daily-master-trigger' + name: 'fuel-os-odl-nofeature-ha-baremetal-daily-master-trigger' triggers: - timed: '5 2 * * *' - trigger: @@ -354,14 +297,6 @@ name: 'fuel-os-onos-nofeature-ha-baremetal-daily-master-trigger' triggers: - timed: '' # '5 8 * * *' -- trigger: - name: 'fuel-os-odl_l2-sfc-ha-baremetal-daily-master-trigger' - triggers: - - timed: '' # '5 11 * * *' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-baremetal-daily-master-trigger' - triggers: - - timed: '' # '5 14 * * *' - trigger: name: 'fuel-os-nosdn-kvm-ha-baremetal-daily-master-trigger' triggers: @@ -380,11 +315,7 @@ triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-nofeature-noha-baremetal-daily-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l3-nofeature-noha-baremetal-daily-master-trigger' + name: 'fuel-os-odl-nofeature-noha-baremetal-daily-master-trigger' triggers: - timed: '' - trigger: @@ -395,14 +326,6 @@ name: 'fuel-os-onos-nofeature-noha-baremetal-daily-master-trigger' triggers: - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-noha-baremetal-daily-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-noha-baremetal-daily-master-trigger' - triggers: - - timed: '' - trigger: name: 'fuel-os-nosdn-kvm-noha-baremetal-daily-master-trigger' triggers: @@ -428,11 +351,7 @@ triggers: - timed: '' # '0 20 * * *' - trigger: - name: 'fuel-os-odl_l2-nofeature-ha-baremetal-daily-euphrates-trigger' - triggers: - - timed: '' # '0 23 * * *' -- trigger: - name: 'fuel-os-odl_l3-nofeature-ha-baremetal-daily-euphrates-trigger' + name: 'fuel-os-odl-nofeature-ha-baremetal-daily-euphrates-trigger' triggers: - timed: '' # '0 2 * * *' - trigger: @@ -443,14 +362,6 @@ name: 'fuel-os-onos-nofeature-ha-baremetal-daily-euphrates-trigger' triggers: - timed: '' # '0 8 * * *' -- trigger: - name: 'fuel-os-odl_l2-sfc-ha-baremetal-daily-euphrates-trigger' - triggers: - - timed: '' # '0 11 * * *' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-baremetal-daily-euphrates-trigger' - triggers: - - timed: '' # '0 14 * * *' - trigger: name: 'fuel-os-nosdn-kvm-ha-baremetal-daily-euphrates-trigger' triggers: @@ -473,11 +384,7 @@ triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-nofeature-noha-baremetal-daily-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l3-nofeature-noha-baremetal-daily-euphrates-trigger' + name: 'fuel-os-odl-nofeature-noha-baremetal-daily-euphrates-trigger' triggers: - timed: '' - trigger: @@ -488,14 +395,6 @@ name: 'fuel-os-onos-nofeature-noha-baremetal-daily-euphrates-trigger' triggers: - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-noha-baremetal-daily-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-noha-baremetal-daily-euphrates-trigger' - triggers: - - timed: '' - trigger: name: 'fuel-os-nosdn-kvm-noha-baremetal-daily-euphrates-trigger' triggers: @@ -520,11 +419,7 @@ triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-nofeature-ha-virtual-daily-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l3-nofeature-ha-virtual-daily-master-trigger' + name: 'fuel-os-odl-nofeature-ha-virtual-daily-master-trigger' triggers: - timed: '' - trigger: @@ -535,14 +430,6 @@ name: 'fuel-os-onos-nofeature-ha-virtual-daily-master-trigger' triggers: - timed: '' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-virtual-daily-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-ha-virtual-daily-master-trigger' - triggers: - - timed: '' - trigger: name: 'fuel-os-nosdn-kvm-ha-virtual-daily-master-trigger' triggers: @@ -565,11 +452,7 @@ triggers: - timed: '5 13 * * *' - trigger: - name: 'fuel-os-odl_l2-nofeature-noha-virtual-daily-master-trigger' - triggers: - - timed: '35 15 * * *' -- trigger: - name: 'fuel-os-odl_l3-nofeature-noha-virtual-daily-master-trigger' + name: 'fuel-os-odl-nofeature-noha-virtual-daily-master-trigger' triggers: - timed: '5 18 * * *' - trigger: @@ -580,14 +463,6 @@ name: 'fuel-os-onos-nofeature-noha-virtual-daily-master-trigger' triggers: - timed: '' # '5 23 * * *' -- trigger: - name: 'fuel-os-odl_l2-sfc-noha-virtual-daily-master-trigger' - triggers: - - timed: '' # '35 1 * * *' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-noha-virtual-daily-master-trigger' - triggers: - - timed: '' # '5 4 * * *' - trigger: name: 'fuel-os-nosdn-kvm-noha-virtual-daily-master-trigger' triggers: @@ -612,11 +487,7 @@ triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-nofeature-ha-virtual-daily-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l3-nofeature-ha-virtual-daily-euphrates-trigger' + name: 'fuel-os-odl-nofeature-ha-virtual-daily-euphrates-trigger' triggers: - timed: '' - trigger: @@ -627,14 +498,6 @@ name: 'fuel-os-onos-nofeature-ha-virtual-daily-euphrates-trigger' triggers: - timed: '' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-virtual-daily-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-ha-virtual-daily-euphrates-trigger' - triggers: - - timed: '' - trigger: name: 'fuel-os-nosdn-kvm-ha-virtual-daily-euphrates-trigger' triggers: @@ -657,11 +520,7 @@ triggers: - timed: '' # '0 13 * * *' - trigger: - name: 'fuel-os-odl_l2-nofeature-noha-virtual-daily-euphrates-trigger' - triggers: - - timed: '' # '30 15 * * *' -- trigger: - name: 'fuel-os-odl_l3-nofeature-noha-virtual-daily-euphrates-trigger' + name: 'fuel-os-odl-nofeature-noha-virtual-daily-euphrates-trigger' triggers: - timed: '' # '0 18 * * *' - trigger: @@ -672,14 +531,6 @@ name: 'fuel-os-onos-nofeature-noha-virtual-daily-euphrates-trigger' triggers: - timed: '' # '0 23 * * *' -- trigger: - name: 'fuel-os-odl_l2-sfc-noha-virtual-daily-euphrates-trigger' - triggers: - - timed: '' # '30 1 * * *' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-noha-virtual-daily-euphrates-trigger' - triggers: - - timed: '' # '0 4 * * *' - trigger: name: 'fuel-os-nosdn-kvm-noha-virtual-daily-euphrates-trigger' triggers: @@ -704,11 +555,7 @@ triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-nofeature-ha-zte-pod1-daily-master-trigger' - triggers: - - timed: '0 10 * * *' -- trigger: - name: 'fuel-os-odl_l3-nofeature-ha-zte-pod1-daily-master-trigger' + name: 'fuel-os-odl-nofeature-ha-zte-pod1-daily-master-trigger' triggers: - timed: '' - trigger: @@ -719,14 +566,6 @@ name: 'fuel-os-onos-nofeature-ha-zte-pod1-daily-master-trigger' triggers: - timed: '' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod1-daily-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-ha-zte-pod1-daily-master-trigger' - triggers: - - timed: '' - trigger: name: 'fuel-os-nosdn-kvm-ha-zte-pod1-daily-master-trigger' triggers: @@ -749,11 +588,7 @@ triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-nofeature-noha-zte-pod1-daily-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l3-nofeature-noha-zte-pod1-daily-master-trigger' + name: 'fuel-os-odl-nofeature-noha-zte-pod1-daily-master-trigger' triggers: - timed: '' - trigger: @@ -764,14 +599,6 @@ name: 'fuel-os-onos-nofeature-noha-zte-pod1-daily-master-trigger' triggers: - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-noha-zte-pod1-daily-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-noha-zte-pod1-daily-master-trigger' - triggers: - - timed: '' - trigger: name: 'fuel-os-nosdn-kvm-noha-zte-pod1-daily-master-trigger' triggers: @@ -796,11 +623,7 @@ triggers: - timed: '0 10 * * *' - trigger: - name: 'fuel-os-odl_l2-nofeature-ha-zte-pod3-daily-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l3-nofeature-ha-zte-pod3-daily-master-trigger' + name: 'fuel-os-odl-nofeature-ha-zte-pod3-daily-master-trigger' triggers: - timed: '' - trigger: @@ -811,14 +634,6 @@ name: 'fuel-os-onos-nofeature-ha-zte-pod3-daily-master-trigger' triggers: - timed: '' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod3-daily-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-ha-zte-pod3-daily-master-trigger' - triggers: - - timed: '' - trigger: name: 'fuel-os-nosdn-kvm-ha-zte-pod3-daily-master-trigger' triggers: @@ -841,11 +656,7 @@ triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-nofeature-noha-zte-pod3-daily-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l3-nofeature-noha-zte-pod3-daily-master-trigger' + name: 'fuel-os-odl-nofeature-noha-zte-pod3-daily-master-trigger' triggers: - timed: '' - trigger: @@ -856,14 +667,6 @@ name: 'fuel-os-onos-nofeature-noha-zte-pod3-daily-master-trigger' triggers: - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-noha-zte-pod3-daily-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-noha-zte-pod3-daily-master-trigger' - triggers: - - timed: '' - trigger: name: 'fuel-os-nosdn-kvm-noha-zte-pod3-daily-master-trigger' triggers: @@ -888,11 +691,7 @@ triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-nofeature-ha-zte-pod1-daily-euphrates-trigger' - triggers: - - timed: '' # '0 2 * * *' -- trigger: - name: 'fuel-os-odl_l3-nofeature-ha-zte-pod1-daily-euphrates-trigger' + name: 'fuel-os-odl-nofeature-ha-zte-pod1-daily-euphrates-trigger' triggers: - timed: '' - trigger: @@ -903,14 +702,6 @@ name: 'fuel-os-onos-nofeature-ha-zte-pod1-daily-euphrates-trigger' triggers: - timed: '' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod1-daily-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-ha-zte-pod1-daily-euphrates-trigger' - triggers: - - timed: '' - trigger: name: 'fuel-os-nosdn-kvm-ha-zte-pod1-daily-euphrates-trigger' triggers: @@ -933,11 +724,7 @@ triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-nofeature-noha-zte-pod1-daily-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l3-nofeature-noha-zte-pod1-daily-euphrates-trigger' + name: 'fuel-os-odl-nofeature-noha-zte-pod1-daily-euphrates-trigger' triggers: - timed: '' - trigger: @@ -948,14 +735,6 @@ name: 'fuel-os-onos-nofeature-noha-zte-pod1-daily-euphrates-trigger' triggers: - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-noha-zte-pod1-daily-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-noha-zte-pod1-daily-euphrates-trigger' - triggers: - - timed: '' - trigger: name: 'fuel-os-nosdn-kvm-noha-zte-pod1-daily-euphrates-trigger' triggers: @@ -980,11 +759,7 @@ triggers: - timed: '' # '0 18 * * *' - trigger: - name: 'fuel-os-odl_l2-nofeature-ha-zte-pod3-daily-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l3-nofeature-ha-zte-pod3-daily-euphrates-trigger' + name: 'fuel-os-odl-nofeature-ha-zte-pod3-daily-euphrates-trigger' triggers: - timed: '' - trigger: @@ -995,14 +770,6 @@ name: 'fuel-os-onos-nofeature-ha-zte-pod3-daily-euphrates-trigger' triggers: - timed: '' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod3-daily-euphrates-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-ha-zte-pod3-daily-euphrates-trigger' - triggers: - - timed: '' - trigger: name: 'fuel-os-nosdn-kvm-ha-zte-pod3-daily-euphrates-trigger' triggers: @@ -1025,42 +792,98 @@ triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-nofeature-noha-zte-pod3-daily-euphrates-trigger' + name: 'fuel-os-odl-nofeature-noha-zte-pod3-daily-euphrates-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l3-nofeature-noha-zte-pod3-daily-euphrates-trigger' + name: 'fuel-os-onos-sfc-noha-zte-pod3-daily-euphrates-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-onos-sfc-noha-zte-pod3-daily-euphrates-trigger' + name: 'fuel-os-onos-nofeature-noha-zte-pod3-daily-euphrates-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-onos-nofeature-noha-zte-pod3-daily-euphrates-trigger' + name: 'fuel-os-nosdn-kvm-noha-zte-pod3-daily-euphrates-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-sfc-noha-zte-pod3-daily-euphrates-trigger' + name: 'fuel-os-nosdn-ovs-noha-zte-pod3-daily-euphrates-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-bgpvpn-noha-zte-pod3-daily-euphrates-trigger' + name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod3-daily-euphrates-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-nosdn-kvm-noha-zte-pod3-daily-euphrates-trigger' + name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod3-daily-euphrates-trigger' triggers: - timed: '' +#------------------------------------------------ +# ZTE POD1 Triggers running against danube branch +#------------------------------------------------ - trigger: - name: 'fuel-os-nosdn-ovs-noha-zte-pod3-daily-euphrates-trigger' + name: 'fuel-os-nosdn-nofeature-ha-zte-pod1-daily-danube-trigger' + triggers: + - timed: '0 2 * * 6' +- trigger: + name: 'fuel-os-odl-nofeature-ha-zte-pod1-daily-danube-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod3-daily-euphrates-trigger' + name: 'fuel-os-onos-sfc-ha-zte-pod1-daily-danube-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod3-daily-euphrates-trigger' + name: 'fuel-os-onos-nofeature-ha-zte-pod1-daily-danube-trigger' + triggers: + - timed: '' +- trigger: + name: 'fuel-os-nosdn-kvm-ha-zte-pod1-daily-danube-trigger' + triggers: + - timed: '' +- trigger: + name: 'fuel-os-nosdn-ovs-ha-zte-pod1-daily-danube-trigger' + triggers: + - timed: '' +- trigger: + name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod1-daily-danube-trigger' + triggers: + - timed: '' +- trigger: + name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod1-daily-danube-trigger' + triggers: + - timed: '' +# NOHA Scenarios +- trigger: + name: 'fuel-os-nosdn-nofeature-noha-zte-pod1-daily-danube-trigger' + triggers: + - timed: '' +- trigger: + name: 'fuel-os-odl-nofeature-noha-zte-pod1-daily-danube-trigger' + triggers: + - timed: '' +- trigger: + name: 'fuel-os-onos-sfc-noha-zte-pod1-daily-danube-trigger' + triggers: + - timed: '' +- trigger: + name: 'fuel-os-onos-nofeature-noha-zte-pod1-daily-danube-trigger' + triggers: + - timed: '' +- trigger: + name: 'fuel-os-nosdn-kvm-noha-zte-pod1-daily-danube-trigger' + triggers: + - timed: '' +- trigger: + name: 'fuel-os-nosdn-ovs-noha-zte-pod1-daily-danube-trigger' + triggers: + - timed: '' +- trigger: + name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod1-daily-danube-trigger' + triggers: + - timed: '' +- trigger: + name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod1-daily-danube-trigger' triggers: - timed: '' diff --git a/jjb/fuel/fuel-deploy.sh b/jjb/fuel/fuel-deploy.sh index eebd8bc71..6525c7ccb 100755 --- a/jjb/fuel/fuel-deploy.sh +++ b/jjb/fuel/fuel-deploy.sh @@ -1,7 +1,7 @@ #!/bin/bash # SPDX-license-identifier: Apache-2.0 ############################################################################## -# Copyright (c) 2016 Ericsson AB and others. +# Copyright (c) 2017 Ericsson AB, Mirantis Inc., Enea Software AB and others. # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, Version 2.0 # which accompanies this distribution, and is available at @@ -12,7 +12,7 @@ set -o pipefail export TERM="vt220" -if [[ "$BRANCH" != 'master' ]]; then +if [[ "$BRANCH" =~ 'danube' ]]; then # source the file so we get OPNFV vars source latest.properties @@ -20,82 +20,99 @@ if [[ "$BRANCH" != 'master' ]]; then echo "Using ${OPNFV_ARTIFACT_URL/*\/} for deployment" fi -if [[ "$JOB_NAME" =~ "merge" ]]; then - # set simplest scenario for virtual deploys to run for merges +# shellcheck disable=SC2153 +if [[ "${JOB_NAME}" =~ 'verify' ]]; then + # set simplest scenario for virtual deploys to run for verify DEPLOY_SCENARIO="os-nosdn-nofeature-ha" -elif [[ "$BRANCH" != 'master' ]]; then - # for none-merge deployments +elif [[ "${BRANCH}" =~ 'danube' ]]; then + # for Danube deployments (no artifact for current master or newer branches) # checkout the commit that was used for building the downloaded artifact # to make sure the ISO and deployment mechanism uses same versions - echo "Checking out $OPNFV_GIT_SHA1" - git checkout $OPNFV_GIT_SHA1 --quiet + echo "Checking out ${OPNFV_GIT_SHA1}" + git checkout "${OPNFV_GIT_SHA1}" --quiet fi # set deployment parameters -export TMPDIR=$HOME/tmpdir +export TMPDIR=${HOME}/tmpdir BRIDGE=${BRIDGE:-pxebr} +# shellcheck disable=SC2153 LAB_NAME=${NODE_NAME/-*} +# shellcheck disable=SC2153 POD_NAME=${NODE_NAME/*-} - -if [[ "$NODE_NAME" =~ "virtual" ]]; then - POD_NAME="virtual_kvm" -fi - -# we currently support ericsson, intel, lf and zte labs -if [[ ! "$LAB_NAME" =~ (ericsson|intel|lf|zte) ]]; then - echo "Unsupported/unidentified lab $LAB_NAME. Cannot continue!" - exit 1 +# Armband might override LAB_CONFIG_URL, all others use the default +LAB_CONFIG_URL=${LAB_CONFIG_URL:-'ssh://jenkins-ericsson@gerrit.opnfv.org:29418/securedlab'} + +# Fuel requires deploy script to be ran with sudo, Armband does not +SUDO='sudo -E' +if [ "${PROJECT}" = 'fuel' ]; then + # Fuel does not use any POD-specific configuration for virtual deploys + if [[ "${NODE_NAME}" =~ "virtual" ]]; then + POD_NAME="virtual_kvm" + fi + # Fuel currently supports ericsson, intel, lf and zte labs + if [[ ! "${LAB_NAME}" =~ (ericsson|intel|lf|zte) ]]; then + echo "Unsupported/unidentified lab ${LAB_NAME}. Cannot continue!" + exit 1 + fi else - echo "Using configuration for $LAB_NAME" + SUDO= + # Armband currently supports arm, enea labs + if [[ ! "${LAB_NAME}" =~ (arm|enea) ]]; then + echo "Unsupported/unidentified lab ${LAB_NAME}. Cannot continue!" + exit 1 + fi fi -# create TMPDIR if it doesn't exist -export TMPDIR=$HOME/tmpdir -mkdir -p $TMPDIR - -# change permissions down to TMPDIR -chmod a+x $HOME -chmod a+x $TMPDIR - -# clone the securedlab repo -cd $WORKSPACE -echo "Cloning securedlab repo $BRANCH" -git clone ssh://jenkins-ericsson@gerrit.opnfv.org:29418/securedlab --quiet \ - --branch $BRANCH - -# Source local_env if present, which contains POD-specific config -local_env="${WORKSPACE}/securedlab/labs/$LAB_NAME/$POD_NAME/fuel/config/local_env" -if [ -e "${local_env}" ]; then - echo "-- Sourcing local environment file" - source "${local_env}" +echo "Using configuration for ${LAB_NAME}" + +# create TMPDIR if it doesn't exist, change permissions +mkdir -p "${TMPDIR}" +chmod a+x "${HOME}" "${TMPDIR}" + +cd "${WORKSPACE}" || exit 1 +if [[ "${LAB_CONFIG_URL}" =~ ^(git|ssh):// ]]; then + echo "Cloning securedlab repo ${BRANCH}" + git clone --quiet --branch "${BRANCH}" "${LAB_CONFIG_URL}" lab-config + LAB_CONFIG_URL=file://${WORKSPACE}/lab-config + + # Source local_env if present, which contains POD-specific config + local_env="${WORKSPACE}/lab-config/labs/${LAB_NAME}/${POD_NAME}/fuel/config/local_env" + if [ -e "${local_env}" ]; then + echo "-- Sourcing local environment file" + source "${local_env}" + fi fi +# releng wants us to use nothing else but opnfv.iso for now. We comply. +ISO_FILE=file://${WORKSPACE}/opnfv.iso + # log file name FUEL_LOG_FILENAME="${JOB_NAME}_${BUILD_NUMBER}.log.tar.gz" # construct the command -DEPLOY_COMMAND="sudo $WORKSPACE/ci/deploy.sh -b file://$WORKSPACE/securedlab \ - -l $LAB_NAME -p $POD_NAME -s $DEPLOY_SCENARIO -i file://$WORKSPACE/opnfv.iso \ - -B ${DEFAULT_BRIDGE:-${BRIDGE}} -S $TMPDIR -L $WORKSPACE/$FUEL_LOG_FILENAME" +DEPLOY_COMMAND="${SUDO} ${WORKSPACE}/ci/deploy.sh -b ${LAB_CONFIG_URL} \ + -l ${LAB_NAME} -p ${POD_NAME} -s ${DEPLOY_SCENARIO} -i ${ISO_FILE} \ + -B ${DEFAULT_BRIDGE:-${BRIDGE}} -S ${TMPDIR} \ + -L ${WORKSPACE}/${FUEL_LOG_FILENAME}" # log info to console echo "Deployment parameters" echo "--------------------------------------------------------" -echo "Scenario: $DEPLOY_SCENARIO" -echo "Lab: $LAB_NAME" -echo "POD: $POD_NAME" -[[ "$BRANCH" != 'master' ]] && echo "ISO: ${OPNFV_ARTIFACT_URL/*\/}" +echo "Scenario: ${DEPLOY_SCENARIO}" +echo "Lab: ${LAB_NAME}" +echo "POD: ${POD_NAME}" +[[ "${BRANCH}" != 'master' ]] && echo "ISO: ${OPNFV_ARTIFACT_URL/*\/}" echo -echo "Starting the deployment using $INSTALLER_TYPE. This could take some time..." +echo "Starting the deployment using ${INSTALLER_TYPE}. This could take some time..." echo "--------------------------------------------------------" echo # start the deployment echo "Issuing command" -echo "$DEPLOY_COMMAND" +echo "${DEPLOY_COMMAND}" echo -$DEPLOY_COMMAND +${DEPLOY_COMMAND} exit_code=$? echo @@ -103,17 +120,18 @@ echo "--------------------------------------------------------" echo "Deployment is done!" # upload logs for baremetal deployments -# work with virtual deployments is still going on so we skip that for the timebeing -if [[ "$JOB_NAME" =~ (baremetal-daily|baremetal-weekly) ]]; then +# work with virtual deployments is still going on, so skip that for now +if [[ "${JOB_NAME}" =~ (baremetal-daily|baremetal-weekly) ]]; then echo "Uploading deployment logs" - gsutil cp $WORKSPACE/$FUEL_LOG_FILENAME gs://$GS_URL/logs/$FUEL_LOG_FILENAME > /dev/null 2>&1 - echo "Logs are available as http://$GS_URL/logs/$FUEL_LOG_FILENAME" + gsutil cp "${WORKSPACE}/${FUEL_LOG_FILENAME}" \ + "gs://${GS_URL}/logs/${FUEL_LOG_FILENAME}" > /dev/null 2>&1 + echo "Logs are available at http://${GS_URL}/logs/${FUEL_LOG_FILENAME}" fi -if [[ $exit_code -ne 0 ]]; then +if [[ "${exit_code}" -ne 0 ]]; then echo "Deployment failed!" - exit $exit_code -else - echo "Deployment is successful!" - exit 0 + exit "${exit_code}" fi + +echo "Deployment is successful!" +exit 0 diff --git a/jjb/fuel/fuel-download-artifact.sh b/jjb/fuel/fuel-download-artifact.sh index c3b8253de..02ca10305 100755 --- a/jjb/fuel/fuel-download-artifact.sh +++ b/jjb/fuel/fuel-download-artifact.sh @@ -11,20 +11,14 @@ set -o errexit set -o pipefail # disable Fuel ISO download for master branch -[[ "$BRANCH" == 'master' ]] && exit 0 +[[ ! "$BRANCH" =~ (danube) ]] && exit 0 -# use proxy url to replace the nomral URL, for googleusercontent.com will be blocked randomly +# use proxy url to replace the normal URL, or googleusercontent.com will be blocked randomly [[ "$NODE_NAME" =~ (zte) ]] && GS_URL=${GS_BASE_PROXY%%/*}/$GS_URL -if [[ "$JOB_NAME" =~ "merge" ]]; then - echo "Downloading http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties" - # get the properties file for the Fuel ISO built for a merged change - curl -L -s -o $WORKSPACE/latest.properties http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties -else - # get the latest.properties file in order to get info regarding latest artifact - echo "Downloading http://$GS_URL/latest.properties" - curl -L -s -o $WORKSPACE/latest.properties http://$GS_URL/latest.properties -fi +# get the latest.properties file in order to get info regarding latest artifact +echo "Downloading http://$GS_URL/latest.properties" +curl -L -s -o $WORKSPACE/latest.properties http://$GS_URL/latest.properties # check if we got the file [[ -f $WORKSPACE/latest.properties ]] || exit 1 @@ -36,21 +30,18 @@ source $WORKSPACE/latest.properties OPNFV_ARTIFACT=${OPNFV_ARTIFACT_URL/*\/} echo "Using $OPNFV_ARTIFACT for deployment" -# using ISOs for verify & merge jobs from local storage will be enabled later -if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then - # check if we already have the ISO to avoid redownload - ISOSTORE="/iso_mount/opnfv_ci/${BRANCH##*/}" - if [[ -f "$ISOSTORE/$OPNFV_ARTIFACT" ]]; then - echo "ISO exists locally. Skipping the download and using the file from ISO store" - ln -s $ISOSTORE/$OPNFV_ARTIFACT $WORKSPACE/opnfv.iso - echo "--------------------------------------------------------" - echo - ls -al $WORKSPACE/opnfv.iso - echo - echo "--------------------------------------------------------" - echo "Done!" - exit 0 - fi +# check if we already have the ISO to avoid redownload +ISOSTORE="/iso_mount/opnfv_ci/${BRANCH##*/}" +if [[ -f "$ISOSTORE/$OPNFV_ARTIFACT" ]]; then + echo "ISO exists locally. Skipping the download and using the file from ISO store" + ln -s $ISOSTORE/$OPNFV_ARTIFACT $WORKSPACE/opnfv.iso + echo "--------------------------------------------------------" + echo + ls -al $WORKSPACE/opnfv.iso + echo + echo "--------------------------------------------------------" + echo "Done!" + exit 0 fi [[ "$NODE_NAME" =~ (zte) ]] && OPNFV_ARTIFACT_URL=${GS_BASE_PROXY%%/*}/$OPNFV_ARTIFACT_URL diff --git a/jjb/fuel/fuel-project-jobs.yml b/jjb/fuel/fuel-project-jobs.yml index 6bb7e51d0..cfcbf3695 100644 --- a/jjb/fuel/fuel-project-jobs.yml +++ b/jjb/fuel/fuel-project-jobs.yml @@ -19,201 +19,11 @@ disabled: true jobs: - - 'fuel-build-daily-{stream}' - - 'fuel-merge-build-{stream}' - - 'fuel-merge-deploy-virtual-{stream}' - 'fuel-deploy-generic-daily-{stream}' ######################## # job templates ######################## -- job-template: - name: 'fuel-build-daily-{stream}' - - disabled: '{obj:disabled}' - - concurrent: false - - properties: - - logrotate-default - - throttle: - enabled: true - max-total: 1 - max-per-node: 1 - option: 'project' - - parameters: - - project-parameter: - project: '{project}' - branch: '{branch}' - - 'opnfv-build-ubuntu-defaults' - - '{installer}-defaults' - - choice: - name: FORCE_BUILD - choices: - - 'false' - - 'true' - description: "Force build even if there is no changes in fuel repo. Default false" - - fuel-project-parameter: - gs-pathname: '{gs-pathname}' - - scm: - - git-scm - - triggers: - - timed: '0 H/4 * * *' - - wrappers: - - timeout: - timeout: 360 - fail: true - - builders: - - shell: - !include-raw-escape: ./fuel-build.sh - - shell: - !include-raw-escape: ./fuel-upload-artifact.sh - - shell: - !include-raw-escape: ./fuel-workspace-cleanup.sh - - publishers: - - email: - recipients: fzhadaev@mirantis.com - - email-jenkins-admins-on-failure - -- job-template: - name: 'fuel-merge-build-{stream}' - - disabled: '{obj:disabled}' - - concurrent: true - - parameters: - - project-parameter: - project: '{project}' - branch: '{branch}' - - 'opnfv-build-ubuntu-defaults' - - '{installer}-defaults' - - fuel-project-parameter: - gs-pathname: '{gs-pathname}' - - scm: - - git-scm - - wrappers: - - ssh-agent-wrapper - - timeout: - timeout: 360 - fail: true - - triggers: - - gerrit: - server-name: 'gerrit.opnfv.org' - trigger-on: - - change-merged-event - - comment-added-contains-event: - comment-contains-value: 'remerge' - projects: - - project-compare-type: 'ANT' - project-pattern: '{project}' - branches: - - branch-compare-type: 'ANT' - branch-pattern: '**/{branch}' - file-paths: - - compare-type: ANT - pattern: 'ci/**' - - compare-type: ANT - pattern: 'build/**' - - compare-type: ANT - pattern: 'deploy/**' - disable-strict-forbidden-file-verification: 'true' - forbidden-file-paths: - - compare-type: ANT - pattern: 'docs/**' - - builders: - - shell: - !include-raw-escape: ./fuel-build.sh - - shell: - !include-raw-escape: ./fuel-upload-artifact.sh - - shell: - !include-raw-escape: ./fuel-workspace-cleanup.sh - -- job-template: - name: 'fuel-merge-deploy-virtual-{stream}' - - disabled: true - - concurrent: true - - properties: - - logrotate-default - - throttle: - enabled: true - max-total: 2 - max-per-node: 1 - option: 'project' - - build-blocker: - use-build-blocker: true - blocking-jobs: - - 'fuel-os-.*?-virtual-daily-.*' - - 'fuel-merge-deploy-virtual-.*' - block-level: 'NODE' - - parameters: - - project-parameter: - project: '{project}' - branch: '{branch}' - - 'fuel-virtual-defaults': - installer: '{installer}' - - '{installer}-defaults' - - fuel-project-parameter: - gs-pathname: '{gs-pathname}' - scm: - - git-scm - - wrappers: - - ssh-agent-wrapper - - triggers: - - gerrit: - server-name: 'gerrit.opnfv.org' - trigger-on: - - change-merged-event - - comment-added-contains-event: - comment-contains-value: 'remerge' - projects: - - project-compare-type: 'ANT' - project-pattern: '{project}' - branches: - - branch-compare-type: 'ANT' - branch-pattern: '**/{branch}' - file-paths: - - compare-type: ANT - pattern: 'ci/**' - - compare-type: ANT - pattern: 'build/**' - - compare-type: ANT - pattern: 'deploy/**' - disable-strict-forbidden-file-verification: 'true' - forbidden-file-paths: - - compare-type: ANT - pattern: 'docs/**' - dependency-jobs: 'fuel-merge-build-{stream}' - - builders: - - shell: - !include-raw-escape: ./fuel-download-artifact.sh - - shell: - !include-raw-escape: ./fuel-deploy.sh - - shell: - !include-raw-escape: ./fuel-workspace-cleanup.sh - - publishers: - - email: - recipients: fzhadaev@mirantis.com - - email-jenkins-admins-on-failure - - job-template: name: 'fuel-deploy-generic-daily-{stream}' @@ -238,14 +48,15 @@ - project-parameter: project: '{project}' branch: '{branch}' - - '{installer}-defaults' + - '{installer}-defaults': + gs-pathname: '{gs-pathname}' - string: name: GIT_BASE default: https://gerrit.opnfv.org/gerrit/$PROJECT description: 'Git URL to use on this Jenkins Slave' - string: name: DEPLOY_SCENARIO - default: 'os-odl_l2-nofeature-ha' + default: 'os-odl-nofeature-ha' - node: name: SLAVE_NAME description: 'Slave name on Jenkins' @@ -255,8 +66,6 @@ - ericsson-pod1 default-slaves: - ericsson-pod2 - - fuel-project-parameter: - gs-pathname: '{gs-pathname}' scm: - git-scm @@ -266,26 +75,5 @@ name: '$BUILD_NUMBER - POD: $NODE_NAME Scenario: $DEPLOY_SCENARIO' builders: - - shell: - !include-raw-escape: ./fuel-download-artifact.sh - shell: !include-raw-escape: ./fuel-deploy.sh - -######################## -# parameter macros -######################## -- parameter: - name: fuel-project-parameter - parameters: - - string: - name: BUILD_DIRECTORY - default: $WORKSPACE/build_output - description: "Directory where the build artifact will be located upon the completion of the build." - - string: - name: CACHE_DIRECTORY - default: $HOME/opnfv/cache/$INSTALLER_TYPE - description: "Directory where the cache to be used during the build is located." - - string: - name: GS_URL - default: artifacts.opnfv.org/$PROJECT{gs-pathname} - description: "URL to Google Storage." diff --git a/jjb/fuel/fuel-upload-artifact.sh b/jjb/fuel/fuel-upload-artifact.sh deleted file mode 100755 index d1ac3509b..000000000 --- a/jjb/fuel/fuel-upload-artifact.sh +++ /dev/null @@ -1,118 +0,0 @@ -#!/bin/bash -# SPDX-license-identifier: Apache-2.0 -############################################################################## -# Copyright (c) 2016 Ericsson AB and others. -# All rights reserved. This program and the accompanying materials -# are made available under the terms of the Apache License, Version 2.0 -# which accompanies this distribution, and is available at -# http://www.apache.org/licenses/LICENSE-2.0 -############################################################################## -set -o pipefail - -# check if we built something -if [ -f $WORKSPACE/.noupload ]; then - echo "Nothing new to upload. Exiting." - /bin/rm -f $WORKSPACE/.noupload - exit 0 -fi - -# source the opnfv.properties to get ARTIFACT_VERSION -source $WORKSPACE/opnfv.properties - -nfsstore () { -# storing ISOs for verify & merge jobs will be done once we get the disk array -if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then - # store ISO locally on NFS first - ISOSTORE="/iso_mount/opnfv_ci/${BRANCH##*/}" - if [[ -d "$ISOSTORE" ]]; then - # remove all but most recent 5 ISOs first to keep iso_mount clean & tidy - cd $ISOSTORE - ls -tp | grep -v '/' | tail -n +6 | xargs -d '\n' /bin/rm -f -- - - # store ISO - echo "Storing $INSTALLER_TYPE artifact on NFS..." - /bin/cp -f $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso \ - $ISOSTORE/opnfv-$OPNFV_ARTIFACT_VERSION.iso - fi -fi -} - -importkey () { -# clone releng repository -echo "Cloning releng repository..." -[ -d releng ] && rm -rf releng -git clone https://gerrit.opnfv.org/gerrit/releng $WORKSPACE/releng/ &> /dev/null -#this is where we import the siging key -if [ -f $WORKSPACE/releng/utils/gpg_import_key.sh ]; then - source $WORKSPACE/releng/utils/gpg_import_key.sh -fi -} - -signiso () { -gpg2 -vvv --batch --yes --no-tty \ - --default-key opnfv-helpdesk@rt.linuxfoundation.org \ - --passphrase besteffort \ - --detach-sig $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso - -gsutil cp $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso.sig gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso.sig -echo "ISO signature Upload Complete!" -} - -uploadiso () { -# log info to console -echo "Uploading $INSTALLER_TYPE artifact. This could take some time..." -echo - -cd $WORKSPACE -# upload artifact and additional files to google storage -gsutil cp $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso \ - gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > gsutil.iso.log 2>&1 -gsutil cp $WORKSPACE/opnfv.properties \ - gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log 2>&1 -if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then - gsutil cp $WORKSPACE/opnfv.properties \ - gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1 -elif [[ "$JOB_NAME" =~ "merge" ]]; then - echo "Uploaded Fuel ISO for a merged change" -fi - -gsutil -m setmeta \ - -h "Content-Type:text/html" \ - -h "Cache-Control:private, max-age=0, no-transform" \ - gs://$GS_URL/latest.properties \ - gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > /dev/null 2>&1 - -gsutil -m setmeta \ - -h "Cache-Control:private, max-age=0, no-transform" \ - gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > /dev/null 2>&1 - -# disabled errexit due to gsutil setmeta complaints -# BadRequestException: 400 Invalid argument -# check if we uploaded the file successfully to see if things are fine -gsutil ls gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > /dev/null 2>&1 -if [[ $? -ne 0 ]]; then - echo "Problem while uploading artifact!" - echo "Check log $WORKSPACE/gsutil.iso.log on the machine where this build is done." - exit 1 -fi - -echo "Done!" -echo -echo "--------------------------------------------------------" -echo -echo "Artifact is available as http://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso" -echo -echo "--------------------------------------------------------" -echo -} - -nfsstore - -if [[ "$JOB_NAME" =~ merge ]]; then - uploadiso -elif [[ "$JOB_NAME" =~ build ]]; then - importkey - signiso - uploadiso -fi - diff --git a/jjb/fuel/fuel-verify-jobs.yml b/jjb/fuel/fuel-verify-jobs.yml index 469ca9258..45197fc4e 100644 --- a/jjb/fuel/fuel-verify-jobs.yml +++ b/jjb/fuel/fuel-verify-jobs.yml @@ -21,13 +21,11 @@ ##################################### phase: - 'basic': - slave-label: 'opnfv-build-ubuntu' - - 'build': - slave-label: 'opnfv-build-ubuntu' + slave-label: 'fuel-virtual' - 'deploy-virtual': - slave-label: 'opnfv-build-ubuntu' + slave-label: 'fuel-virtual' - 'smoke-test': - slave-label: 'opnfv-build-ubuntu' + slave-label: 'fuel-virtual' ##################################### # jobs ##################################### @@ -52,6 +50,11 @@ enabled: true max-total: 4 option: 'project' + - build-blocker: + use-build-blocker: true + blocking-jobs: + - 'fuel-os-.*?-virtual-daily-.*' + block-level: 'NODE' scm: - git-scm-gerrit @@ -85,9 +88,7 @@ - compare-type: ANT pattern: 'ci/**' - compare-type: ANT - pattern: 'build/**' - - compare-type: ANT - pattern: 'deploy/**' + pattern: 'mcp/**' disable-strict-forbidden-file-verification: 'true' forbidden-file-paths: - compare-type: ANT @@ -98,8 +99,9 @@ - project-parameter: project: '{project}' branch: '{branch}' - - 'opnfv-build-ubuntu-defaults' - - 'fuel-verify-defaults': + - 'fuel-virtual-defaults': + installer: '{installer}' + - '{installer}-defaults': gs-pathname: '{gs-pathname}' builders: @@ -119,20 +121,6 @@ node-parameters: false kill-phase-on: FAILURE abort-all-job: true - - multijob: - name: build - condition: SUCCESSFUL - projects: - - name: 'fuel-verify-build-{stream}' - current-parameters: false - predefined-parameters: | - BRANCH=$BRANCH - GERRIT_REFSPEC=$GERRIT_REFSPEC - GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER - GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE - node-parameters: false - kill-phase-on: FAILURE - abort-all-job: true - multijob: name: deploy-virtual condition: SUCCESSFUL @@ -173,7 +161,8 @@ - logrotate-default - throttle: enabled: true - max-total: 6 + max-total: 2 + max-per-node: 1 option: 'project' - build-blocker: use-build-blocker: true @@ -195,8 +184,9 @@ project: '{project}' branch: '{branch}' - '{slave-label}-defaults' - - '{installer}-defaults' - - 'fuel-verify-defaults': + - 'fuel-virtual-defaults': + installer: '{installer}' + - '{installer}-defaults': gs-pathname: '{gs-pathname}' builders: @@ -214,21 +204,11 @@ echo "Not activated!" -- builder: - name: 'fuel-verify-build-macro' - builders: - - shell: - !include-raw: ./fuel-build.sh - - shell: - !include-raw: ./fuel-workspace-cleanup.sh - - builder: name: 'fuel-verify-deploy-virtual-macro' builders: - - shell: | - #!/bin/bash - - echo "Not activated!" + - shell: + !include-raw: ./fuel-deploy.sh - builder: name: 'fuel-verify-smoke-test-macro' @@ -237,21 +217,3 @@ #!/bin/bash echo "Not activated!" -##################################### -# parameter macros -##################################### -- parameter: - name: 'fuel-verify-defaults' - parameters: - - string: - name: BUILD_DIRECTORY - default: $WORKSPACE/build_output - description: "Directory where the build artifact will be located upon the completion of the build." - - string: - name: CACHE_DIRECTORY - default: $HOME/opnfv/cache/$INSTALLER_TYPE - description: "Directory where the cache to be used during the build is located." - - string: - name: GS_URL - default: artifacts.opnfv.org/$PROJECT{gs-pathname} - description: "URL to Google Storage." diff --git a/jjb/fuel/fuel-weekly-jobs.yml b/jjb/fuel/fuel-weekly-jobs.yml index 57e36e164..e1563ea38 100644 --- a/jjb/fuel/fuel-weekly-jobs.yml +++ b/jjb/fuel/fuel-weekly-jobs.yml @@ -72,6 +72,7 @@ blocking-jobs: - 'fuel-os-.*?-{pod}-daily-.*' - 'fuel-os-.*?-{pod}-weekly-.*' + - 'fuel-verify-.*' block-level: 'NODE' wrappers: @@ -85,14 +86,13 @@ - project-parameter: project: '{project}' branch: '{branch}' - - '{installer}-defaults' + - '{installer}-defaults': + gs-pathname: '{gs-pathname}' - '{slave-label}-defaults': installer: '{installer}' - string: name: DEPLOY_SCENARIO default: '{scenario}' - - fuel-weekly-parameter: - gs-pathname: '{gs-pathname}' builders: - description-setter: @@ -148,14 +148,13 @@ - project-parameter: project: '{project}' branch: '{branch}' - - '{installer}-defaults' + - '{installer}-defaults': + gs-pathname: '{gs-pathname}' - '{slave-label}-defaults': installer: '{installer}' - string: name: DEPLOY_SCENARIO - default: 'os-odl_l2-nofeature-ha' - - fuel-weekly-parameter: - gs-pathname: '{gs-pathname}' + default: 'os-odl-nofeature-ha' - string: name: DEPLOY_TIMEOUT default: '150' @@ -171,8 +170,6 @@ builders: - description-setter: description: "Built on $NODE_NAME" - - shell: - !include-raw-escape: ./fuel-download-artifact.sh - shell: !include-raw-escape: ./fuel-deploy.sh @@ -182,24 +179,6 @@ - email-jenkins-admins-on-failure ######################## -# parameter macros -######################## -- parameter: - name: fuel-weekly-parameter - parameters: - - string: - name: BUILD_DIRECTORY - default: $WORKSPACE/build_output - description: "Directory where the build artifact will be located upon the completion of the build." - - string: - name: CACHE_DIRECTORY - default: $HOME/opnfv/cache/$INSTALLER_TYPE - description: "Directory where the cache to be used during the build is located." - - string: - name: GS_URL - default: artifacts.opnfv.org/$PROJECT{gs-pathname} - description: "URL to Google Storage." -######################## # trigger macros ######################## #----------------------------------------------- diff --git a/jjb/fuel/fuel-workspace-cleanup.sh b/jjb/fuel/fuel-workspace-cleanup.sh deleted file mode 100755 index d8948c7a0..000000000 --- a/jjb/fuel/fuel-workspace-cleanup.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash -# SPDX-license-identifier: Apache-2.0 -############################################################################## -# Copyright (c) 2016 Ericsson AB and others. -# All rights reserved. This program and the accompanying materials -# are made available under the terms of the Apache License, Version 2.0 -# which accompanies this distribution, and is available at -# http://www.apache.org/licenses/LICENSE-2.0 -############################################################################## -set -o errexit -set -o nounset -set -o pipefail - -# delete the $WORKSPACE to open some space -/bin/rm -rf $WORKSPACE diff --git a/jjb/functest/functest-alpine.sh b/jjb/functest/functest-alpine.sh old mode 100644 new mode 100755 index f0e08e171..9be9fe57f --- a/jjb/functest/functest-alpine.sh +++ b/jjb/functest/functest-alpine.sh @@ -68,9 +68,14 @@ volumes="${images_vol} ${results_vol} ${sshkey_vol} ${rc_file_vol} ${cacert_file set +e -tiers=(healthcheck smoke features vnf) +if ${FUNCTEST_SUITE_NAME} == 'healthcheck'; then + tiers=(healthcheck) +else + tiers=(healthcheck smoke features vnf) +fi + for tier in ${tiers[@]}; do - FUNCTEST_IMAGE=ollivier/functest-${tier} + FUNCTEST_IMAGE=opnfv/functest-${tier} echo "Functest: Pulling Functest Docker image ${FUNCTEST_IMAGE} ..." docker pull ${FUNCTEST_IMAGE}>/dev/null cmd="docker run ${envs} ${volumes} ${FUNCTEST_IMAGE}" diff --git a/jjb/functest/functest-daily-jobs.yml b/jjb/functest/functest-daily-jobs.yml index 23649fc08..d34437265 100644 --- a/jjb/functest/functest-daily-jobs.yml +++ b/jjb/functest/functest-daily-jobs.yml @@ -150,22 +150,6 @@ slave-label: '{pod}' installer: apex <<: *master - - arm-pod2: - slave-label: '{pod}' - installer: fuel - <<: *master - - arm-pod5: - slave-label: '{pod}' - installer: fuel - <<: *master - - arm-pod4: - slave-label: '{pod}' - installer: fuel - <<: *master - - arm-virtual2: - slave-label: '{pod}' - installer: fuel - <<: *master - zte-pod1: slave-label: '{pod}' installer: fuel @@ -186,22 +170,6 @@ slave-label: '{pod}' installer: fuel <<: *danube - - arm-pod2: - slave-label: '{pod}' - installer: fuel - <<: *danube - - arm-pod5: - slave-label: '{pod}' - installer: fuel - <<: *danube - - arm-pod4: - slave-label: '{pod}' - installer: fuel - <<: *danube - - arm-virtual2: - slave-label: '{pod}' - installer: fuel - <<: *danube # PODs for verify jobs triggered by each patch upload # - ool-virtual1: # slave-label: '{pod}' diff --git a/jjb/functest/functest-project-jobs.yml b/jjb/functest/functest-project-jobs.yml index 07d5df46e..c25e4ab4a 100644 --- a/jjb/functest/functest-project-jobs.yml +++ b/jjb/functest/functest-project-jobs.yml @@ -9,6 +9,7 @@ jobs: - 'functest-verify-{stream}' + - 'functest-verify-{phase}-{stream}' - 'functest-docs-upload-{stream}' stream: @@ -21,11 +22,21 @@ gs-pathname: '/{stream}' disabled: true + phase: + - 'unit-tests-and-docs': + slave-label: 'opnfv-build-ubuntu' + - 'build-x86_64': + slave-label: 'opnfv-build-ubuntu' + - 'build-aarch64': + slave-label: 'opnfv-build-ubuntu-arm' + - job-template: name: 'functest-verify-{stream}' disabled: '{obj:disabled}' + project-type: 'multijob' + parameters: - project-parameter: project: '{project}' @@ -35,6 +46,109 @@ scm: - git-scm-gerrit + triggers: + - 'functest-verify-triggers-macro': + project: '{project}' + branch: '{branch}' + + builders: + - shell: | + #!/bin/bash + # we do nothing here as the main stuff will be done + # in phase jobs + echo "Triggering phase jobs!" + - multijob: + name: 'functest-build-and-unittest' + execution-type: PARALLEL + projects: + - name: 'functest-verify-unit-tests-and-docs-{stream}' + current-parameters: false + predefined-parameters: | + GERRIT_BRANCH=$GERRIT_BRANCH + GERRIT_REFSPEC=$GERRIT_REFSPEC + GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER + GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE + git-revision: true + node-parameters: false + kill-phase-on: FAILURE + abort-all-job: false + - name: 'functest-verify-build-x86_64-{stream}' + current-parameters: false + predefined-parameters: | + GERRIT_BRANCH=$GERRIT_BRANCH + GERRIT_REFSPEC=$GERRIT_REFSPEC + GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER + GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE + ARCH=x86_64 + git-revision: true + node-parameters: false + kill-phase-on: FAILURE + abort-all-job: false + - name: 'functest-verify-build-aarch64-{stream}' + current-parameters: false + predefined-parameters: | + GERRIT_BRANCH=$GERRIT_BRANCH + GERRIT_REFSPEC=$GERRIT_REFSPEC + GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER + GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE + ARCH=aarch64 + git-revision: true + node-parameters: false + kill-phase-on: FAILURE + abort-all-job: false + +- job-template: + name: 'functest-verify-{phase}-{stream}' + + disabled: '{obj:disabled}' + + wrappers: + - ssh-agent-wrapper + - build-timeout: + timeout: 30 + + parameters: + - project-parameter: + project: '{project}' + branch: '{branch}' + - '{slave-label}-defaults' + + scm: + - git-scm-gerrit + + builders: + - 'functest-verify-{phase}-builders-macro' + + publishers: + - 'functest-verify-{phase}-publishers-macro' + +- job-template: + name: 'functest-docs-upload-{stream}' + + disabled: '{obj:disabled}' + + parameters: + - project-parameter: + project: '{project}' + branch: '{branch}' + - 'opnfv-build-ubuntu-defaults' + + scm: + - git-scm + + triggers: + - 'functest-docs-upload-triggers-macro': + project: '{project}' + branch: '{branch}' + + builders: + - functest-upload-doc-artifact + +################################ +# job triggers +################################ +- trigger: + name: 'functest-verify-triggers-macro' triggers: - gerrit: server-name: 'gerrit.opnfv.org' @@ -58,44 +172,8 @@ forbidden-file-paths: - compare-type: ANT pattern: 'docs/**|.gitignore' - - builders: - - functest-unit-tests-and-docs-build - - publishers: - - junit: - results: nosetests.xml - - cobertura: - report-file: "coverage.xml" - only-stable: "true" - health-auto-update: "true" - stability-auto-update: "true" - zoom-coverage-chart: "true" - targets: - - files: - healthy: 10 - unhealthy: 20 - failing: 30 - - method: - healthy: 50 - unhealthy: 40 - failing: 30 - - email-jenkins-admins-on-failure - -- job-template: - name: 'functest-docs-upload-{stream}' - - disabled: '{obj:disabled}' - - parameters: - - project-parameter: - project: '{project}' - branch: '{branch}' - - 'opnfv-build-ubuntu-defaults' - - scm: - - git-scm - +- trigger: + name: 'functest-docs-upload-triggers-macro' triggers: - gerrit: server-name: 'gerrit.opnfv.org' @@ -113,23 +191,60 @@ forbidden-file-paths: - compare-type: ANT pattern: 'docs/**|.gitignore' - - builders: - - functest-upload-doc-artifact - ################################ # job builders ################################ - builder: - name: functest-unit-tests-and-docs-build + name: 'functest-verify-unit-tests-and-docs-builders-macro' builders: - shell: | cd $WORKSPACE && tox - - builder: - name: functest-upload-doc-artifact + name: 'functest-verify-build-x86_64-builders-macro' + builders: + - shell: | + echo "Not activated!" +- builder: + name: 'functest-verify-build-aarch64-builders-macro' + builders: + - shell: | + echo "Not activated!" +- builder: + name: 'functest-upload-doc-artifact' builders: - shell: | cd $WORKSPACE && tox -edocs wget -O - https://git.opnfv.org/releng/plain/utils/upload-artifact.sh | bash -s "api/_build" "docs" +################################ +# job publishers +################################ +- publisher: + name: 'functest-verify-unit-tests-and-docs-publishers-macro' + publishers: + - junit: + results: nosetests.xml + - cobertura: + report-file: "coverage.xml" + only-stable: "true" + health-auto-update: "true" + stability-auto-update: "true" + zoom-coverage-chart: "true" + targets: + - files: + healthy: 10 + unhealthy: 20 + failing: 30 + - method: + healthy: 50 + unhealthy: 40 + failing: 30 + - email-jenkins-admins-on-failure +- publisher: + name: 'functest-verify-build-x86_64-publishers-macro' + publishers: + - email-jenkins-admins-on-failure +- publisher: + name: 'functest-verify-build-aarch64-publishers-macro' + publishers: + - email-jenkins-admins-on-failure diff --git a/jjb/functest/set-functest-env.sh b/jjb/functest/set-functest-env.sh index e54c3bf13..f18f054a5 100755 --- a/jjb/functest/set-functest-env.sh +++ b/jjb/functest/set-functest-env.sh @@ -6,15 +6,20 @@ set +o pipefail [[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null" +DEPLOY_TYPE=baremetal +[[ $BUILD_TAG =~ "virtual" ]] && DEPLOY_TYPE=virt +HOST_ARCH=$(uname -m) + # Prepare OpenStack credentials volume +rc_file_vol="-v ${HOME}/opnfv-openrc.sh:/home/opnfv/functest/conf/openstack.creds" + if [[ ${INSTALLER_TYPE} == 'joid' ]]; then rc_file_vol="-v $LAB_CONFIG/admin-openrc:/home/opnfv/functest/conf/openstack.creds" elif [[ ${INSTALLER_TYPE} == 'compass' && ${BRANCH} == 'master' ]]; then cacert_file_vol="-v ${HOME}/os_cacert:/home/opnfv/functest/conf/os_cacert" echo "export OS_CACERT=/home/opnfv/functest/conf/os_cacert" >> ${HOME}/opnfv-openrc.sh - rc_file_vol="-v ${HOME}/opnfv-openrc.sh:/home/opnfv/functest/conf/openstack.creds" -else - rc_file_vol="-v ${HOME}/opnfv-openrc.sh:/home/opnfv/functest/conf/openstack.creds" +elif [[ ${INSTALLER_TYPE} == 'fuel' && ${DEPLOY_TYPE} == 'baremetal' ]]; then + cacert_file_vol="-v ${HOME}/os_cacert:/etc/ssl/certs/mcp_os_cacert" fi @@ -23,17 +28,13 @@ if ! sudo iptables -C FORWARD -j RETURN 2> ${redirect} || ! sudo iptables -L FOR sudo iptables -I FORWARD -j RETURN fi -DEPLOY_TYPE=baremetal -[[ $BUILD_TAG =~ "virtual" ]] && DEPLOY_TYPE=virt -HOST_ARCH=$(uname -m) - echo "Functest: Start Docker and prepare environment" if [ "$BRANCH" != 'stable/danube' ]; then echo "Functest: Download images that will be used by test cases" images_dir="${HOME}/opnfv/functest/images" chmod +x ${WORKSPACE}/functest/ci/download_images.sh - ${WORKSPACE}/functest/ci/download_images.sh ${images_dir} > ${redirect} 2>&1 + ${WORKSPACE}/functest/ci/download_images.sh ${images_dir} > ${redirect} images_vol="-v ${images_dir}:/home/opnfv/functest/images" echo "Functest: Images successfully downloaded" fi diff --git a/jjb/global/installer-params.yml b/jjb/global/installer-params.yml index a81dce3fe..b64a41e91 100644 --- a/jjb/global/installer-params.yml +++ b/jjb/global/installer-params.yml @@ -1,142 +1,148 @@ +--- - parameter: name: 'apex-defaults' parameters: - - string: - name: INSTALLER_IP - default: '192.168.X.X' - description: 'IP of the installer' - - string: - name: INSTALLER_TYPE - default: apex - description: 'Installer used for deploying OPNFV on this POD' - - string: - name: EXTERNAL_NETWORK - default: 'external' - description: 'external network for test' + - string: + name: INSTALLER_IP + default: '192.168.X.X' + description: 'IP of the installer' + - string: + name: INSTALLER_TYPE + default: apex + description: 'Installer used for deploying OPNFV on this POD' + - string: + name: EXTERNAL_NETWORK + default: 'external' + description: 'external network for test' - parameter: name: 'compass-defaults' parameters: - - string: - name: INSTALLER_IP - default: '192.168.200.2' - description: 'IP of the installer' - - string: - name: INSTALLER_TYPE - default: compass - description: 'Installer used for deploying OPNFV on this POD' - - string: - name: EXTERNAL_NETWORK - default: 'ext-net' - description: 'external network for test' + - string: + name: INSTALLER_IP + default: '192.168.200.2' + description: 'IP of the installer' + - string: + name: INSTALLER_TYPE + default: compass + description: 'Installer used for deploying OPNFV on this POD' + - string: + name: EXTERNAL_NETWORK + default: 'ext-net' + description: 'external network for test' - parameter: name: 'fuel-defaults' parameters: - - string: - name: INSTALLER_IP - default: '10.20.0.2' - description: 'IP of the installer' - - string: - name: SALT_MASTER_IP - default: '192.168.10.100' - description: 'IP of the salt master (for mcp deployments)' - - string: - name: SSH_KEY - default: '/tmp/mcp.rsa' - description: 'Path to private SSH key to access environment nodes' - - string: - name: INSTALLER_TYPE - default: fuel - description: 'Installer used for deploying OPNFV on this POD' - - string: - name: EXTERNAL_NETWORK - default: 'admin_floating_net' - description: 'external network for test' - - string: - name: BRIDGE - default: 'pxebr' - description: 'Bridge(s) to be used by salt master' + - string: + name: INSTALLER_IP + default: '10.20.0.2' + description: 'IP of the installer' + - string: + name: SALT_MASTER_IP + default: '192.168.10.100' + description: 'IP of the salt master (for mcp deployments)' + - string: + name: SSH_KEY + default: "/var/lib/opnfv/mcp.rsa" + description: 'Path to private SSH key to access environment nodes' + - string: + name: INSTALLER_TYPE + default: fuel + description: 'Installer used for deploying OPNFV on this POD' + - string: + name: EXTERNAL_NETWORK + default: 'admin_floating_net' + description: 'external network for test' + - string: + name: BRIDGE + default: 'pxebr' + description: 'Bridge(s) to be used by salt master' + - string: + name: GS_URL + default: '$GS_BASE{gs-pathname}' + description: "URL to Google Storage." - parameter: name: 'joid-defaults' parameters: - - string: - name: INSTALLER_IP - default: '192.168.122.5' - description: 'IP of the installer' - - string: - name: INSTALLER_TYPE - default: joid - description: 'Installer used for deploying OPNFV on this POD' - - string: - name: MODEL - default: 'os' - description: 'Model to deploy (os|k8)' - - string: - name: OS_RELEASE - default: 'ocata' - description: 'OpenStack release (mitaka|newton|ocata)' - - string: - name: EXTERNAL_NETWORK - default: ext-net - description: "External network used for Floating ips." - - string: - name: LAB_CONFIG - default: "$HOME/joid_config" - description: "Local lab config and Openstack openrc location" - - string: - name: MAAS_REINSTALL - default: 'false' - description: "Reinstall MAAS and Bootstrap before deploy [true/false]" - - string: - name: UBUNTU_DISTRO - default: 'xenial' - description: "Ubuntu distribution to use for Openstack (xenial)" - - string: - name: CPU_ARCHITECTURE - default: 'amd64' - description: "CPU Architecture to use for Ubuntu distro " + - string: + name: INSTALLER_IP + default: '192.168.122.5' + description: 'IP of the installer' + - string: + name: INSTALLER_TYPE + default: joid + description: 'Installer used for deploying OPNFV on this POD' + - string: + name: MODEL + default: 'os' + description: 'Model to deploy (os|k8)' + - string: + name: OS_RELEASE + default: 'ocata' + description: 'OpenStack release (mitaka|newton|ocata)' + - string: + name: EXTERNAL_NETWORK + default: ext-net + description: "External network used for Floating ips." + - string: + name: LAB_CONFIG + default: "$HOME/joid_config" + description: "Local lab config and Openstack openrc location" + - string: + name: MAAS_REINSTALL + default: 'false' + description: "Reinstall MAAS and Bootstrap before deploy [true/false]" + - string: + name: UBUNTU_DISTRO + default: 'xenial' + description: "Ubuntu distribution to use for Openstack (xenial)" + - string: + name: CPU_ARCHITECTURE + default: 'amd64' + description: "CPU Architecture to use for Ubuntu distro " - parameter: name: 'daisy-defaults' parameters: - - string: - name: INSTALLER_IP - default: '10.20.7.3' - description: 'IP of the installer' - - string: - name: INSTALLER_TYPE - default: daisy - description: 'Installer used for deploying OPNFV on this POD' - - string: - name: BRIDGE - default: 'br7' - description: 'pxe bridge for booting of Daisy master' + - string: + name: INSTALLER_IP + default: '10.20.7.3' + description: 'IP of the installer' + - string: + name: INSTALLER_TYPE + default: daisy + description: 'Installer used for deploying OPNFV on this POD' + - string: + name: BRIDGE + default: 'br7' + description: 'pxe bridge for booting of Daisy master' - parameter: name: 'infra-defaults' parameters: - - string: - name: INSTALLER_IP - default: '192.168.122.2' - description: 'IP of the installer' - - string: - name: INSTALLER_TYPE - default: infra - description: 'Installer used for deploying OPNFV on this POD' + - string: + name: INSTALLER_IP + default: '192.168.122.2' + description: 'IP of the installer' + - string: + name: INSTALLER_TYPE + default: infra + description: 'Installer used for deploying OPNFV on this POD' + - parameter: name: 'netvirt-defaults' parameters: - - string: - name: INSTALLER_IP - default: '192.168.X.X' - description: 'IP of the installer' - - string: - name: INSTALLER_TYPE - default: apex - description: 'Installer used for deploying OPNFV on this POD' - - string: - name: EXTERNAL_NETWORK - default: 'external' - description: 'external network for test' + - string: + name: INSTALLER_IP + default: '192.168.X.X' + description: 'IP of the installer' + - string: + name: INSTALLER_TYPE + default: apex + description: 'Installer used for deploying OPNFV on this POD' + - string: + name: EXTERNAL_NETWORK + default: 'external' + description: 'external network for test' diff --git a/jjb/global/releng-defaults.yml b/jjb/global/releng-defaults.yml index 75e00f983..2e94767e8 100644 --- a/jjb/global/releng-defaults.yml +++ b/jjb/global/releng-defaults.yml @@ -1,17 +1,18 @@ +--- # jjb defaults - defaults: name: global wrappers: - - ssh-agent-wrapper + - ssh-agent-wrapper project-type: freestyle node: master properties: - - logrotate-default + - logrotate-default publishers: # Any project that has a publisher will not have this macro diff --git a/jjb/global/releng-macros.yml b/jjb/global/releng-macros.yml index 75fe8b3c3..59415f5ca 100644 --- a/jjb/global/releng-macros.yml +++ b/jjb/global/releng-macros.yml @@ -1,3 +1,4 @@ +--- # Releng macros # # NOTE: make sure macros are listed in execution ordered. @@ -14,155 +15,155 @@ - parameter: name: project-parameter parameters: - - string: - name: PROJECT - default: '{project}' - description: "JJB configured PROJECT parameter to identify an opnfv Gerrit project" - - string: - name: GS_BASE - default: artifacts.opnfv.org/$PROJECT - description: "URL to Google Storage." - - string: - name: GS_BASE_PROXY - default: build.opnfv.org/artifacts.opnfv.org/$PROJECT - description: "URL to Google Storage proxy" - - string: - name: BRANCH - default: '{branch}' - description: "JJB configured BRANCH parameter (e.g. master, stable/danube)" - - string: - name: GERRIT_BRANCH - default: '{branch}' - description: "JJB configured GERRIT_BRANCH parameter (deprecated)" + - string: + name: PROJECT + default: '{project}' + description: "JJB configured PROJECT parameter to identify an opnfv Gerrit project" + - string: + name: GS_BASE + default: artifacts.opnfv.org/$PROJECT + description: "URL to Google Storage." + - string: + name: GS_BASE_PROXY + default: build.opnfv.org/artifacts.opnfv.org/$PROJECT + description: "URL to Google Storage proxy" + - string: + name: BRANCH + default: '{branch}' + description: "JJB configured BRANCH parameter (e.g. master, stable/danube)" + - string: + name: GERRIT_BRANCH + default: '{branch}' + description: "JJB configured GERRIT_BRANCH parameter (deprecated)" - property: name: logrotate-default properties: - - build-discarder: - days-to-keep: 60 - num-to-keep: 200 - artifact-days-to-keep: 60 - artifact-num-to-keep: 200 + - build-discarder: + days-to-keep: 60 + num-to-keep: 200 + artifact-days-to-keep: 60 + artifact-num-to-keep: 200 - scm: name: git-scm scm: - - git: &git-scm-defaults - credentials-id: '$SSH_CREDENTIAL_ID' - url: '$GIT_BASE' - branches: - - 'origin/$BRANCH' - timeout: 15 + - git: &git-scm-defaults + credentials-id: '$SSH_CREDENTIAL_ID' + url: '$GIT_BASE' + branches: + - 'origin/$BRANCH' + timeout: 15 - scm: name: git-scm-gerrit scm: - - git: - choosing-strategy: 'gerrit' - refspec: '$GERRIT_REFSPEC' - <<: *git-scm-defaults + - git: + choosing-strategy: 'gerrit' + refspec: '$GERRIT_REFSPEC' + <<: *git-scm-defaults - scm: name: git-scm-with-submodules scm: - - git: - credentials-id: '$SSH_CREDENTIAL_ID' - url: '$GIT_BASE' - refspec: '' - branches: - - 'refs/heads/{branch}' - skip-tag: true - wipe-workspace: true - submodule: - recursive: true - timeout: 20 + - git: + credentials-id: '$SSH_CREDENTIAL_ID' + url: '$GIT_BASE' + refspec: '' + branches: + - 'refs/heads/{branch}' + skip-tag: true + wipe-workspace: true + submodule: + recursive: true + timeout: 20 - trigger: name: 'daily-trigger-disabled' triggers: - - timed: '' + - timed: '' - trigger: name: 'weekly-trigger-disabled' triggers: - - timed: '' + - timed: '' - trigger: name: gerrit-trigger-patchset-created triggers: - - gerrit: - server-name: 'gerrit.opnfv.org' - trigger-on: - - patchset-created-event: - exclude-drafts: 'false' - exclude-trivial-rebase: 'false' - exclude-no-code-change: 'false' - - draft-published-event - - comment-added-contains-event: - comment-contains-value: 'recheck' - - comment-added-contains-event: - comment-contains-value: 'reverify' - projects: - - project-compare-type: 'ANT' - project-pattern: '{project}' - branches: - - branch-compare-type: 'ANT' - branch-pattern: '**/{branch}' - file-paths: - - compare-type: 'ANT' - pattern: '{files}' - skip-vote: - successful: false - failed: false - unstable: false - notbuilt: false + - gerrit: + server-name: 'gerrit.opnfv.org' + trigger-on: + - patchset-created-event: + exclude-drafts: 'false' + exclude-trivial-rebase: 'false' + exclude-no-code-change: 'false' + - draft-published-event + - comment-added-contains-event: + comment-contains-value: 'recheck' + - comment-added-contains-event: + comment-contains-value: 'reverify' + projects: + - project-compare-type: 'ANT' + project-pattern: '{project}' + branches: + - branch-compare-type: 'ANT' + branch-pattern: '**/{branch}' + file-paths: + - compare-type: 'ANT' + pattern: '{files}' + skip-vote: + successful: false + failed: false + unstable: false + notbuilt: false - trigger: name: gerrit-trigger-change-merged triggers: - - gerrit: - server-name: 'gerrit.opnfv.org' - trigger-on: - - change-merged-event - - comment-added-contains-event: - comment-contains-value: 'remerge' - projects: - - project-compare-type: 'ANT' - project-pattern: '{project}' - branches: - - branch-compare-type: 'ANT' - branch-pattern: '**/{branch}' - file-paths: - - compare-type: 'ANT' - pattern: '{files}' + - gerrit: + server-name: 'gerrit.opnfv.org' + trigger-on: + - change-merged-event + - comment-added-contains-event: + comment-contains-value: 'remerge' + projects: + - project-compare-type: 'ANT' + project-pattern: '{project}' + branches: + - branch-compare-type: 'ANT' + branch-pattern: '**/{branch}' + file-paths: + - compare-type: 'ANT' + pattern: '{files}' - trigger: name: 'experimental' triggers: - - gerrit: - server-name: 'gerrit.opnfv.org' - trigger-on: - - comment-added-contains-event: - comment-contains-value: 'check-experimental' - projects: - - project-compare-type: 'ANT' - project-pattern: '{project}' - branches: - - branch-compare-type: 'ANT' - branch-pattern: '**/{branch}' - file-paths: - - compare-type: 'ANT' - pattern: '{files}' - skip-vote: - successful: true - failed: true - unstable: true - notbuilt: true + - gerrit: + server-name: 'gerrit.opnfv.org' + trigger-on: + - comment-added-contains-event: + comment-contains-value: 'check-experimental' + projects: + - project-compare-type: 'ANT' + project-pattern: '{project}' + branches: + - branch-compare-type: 'ANT' + branch-pattern: '**/{branch}' + file-paths: + - compare-type: 'ANT' + pattern: '{files}' + skip-vote: + successful: true + failed: true + unstable: true + notbuilt: true - wrapper: name: ssh-agent-wrapper wrappers: - - ssh-agent-credentials: - users: - - 'd42411ac011ad6f3dd2e1fa34eaa5d87f910eb2e' + - ssh-agent-credentials: + users: + - 'd42411ac011ad6f3dd2e1fa34eaa5d87f910eb2e' - wrapper: name: build-timeout @@ -175,97 +176,97 @@ - wrapper: name: fix-workspace-permissions wrappers: - - pre-scm-buildstep: + - pre-scm-buildstep: - shell: | - #!/bin/bash - sudo chown -R $USER:$USER $WORKSPACE || exit 1 + #!/bin/bash + sudo chown -R $USER:$USER $WORKSPACE || exit 1 - builder: name: build-html-and-pdf-docs-output builders: - - shell: | - #!/bin/bash - set -o errexit - set -o xtrace - export PATH=$PATH:/usr/local/bin/ - git clone ssh://gerrit.opnfv.org:29418/opnfvdocs docs_build/_opnfvdocs - GERRIT_COMMENT=gerrit_comment.txt ./docs_build/_opnfvdocs/scripts/docs-build.sh + - shell: | + #!/bin/bash + set -o errexit + set -o xtrace + export PATH=$PATH:/usr/local/bin/ + git clone ssh://gerrit.opnfv.org:29418/opnfvdocs docs_build/_opnfvdocs + GERRIT_COMMENT=gerrit_comment.txt ./docs_build/_opnfvdocs/scripts/docs-build.sh - builder: name: upload-under-review-docs-to-opnfv-artifacts builders: - - shell: | - #!/bin/bash - set -o errexit - set -o pipefail - set -o xtrace - export PATH=$PATH:/usr/local/bin/ - - [[ $GERRIT_CHANGE_NUMBER =~ .+ ]] - [[ -d docs_output ]] || exit 0 - - echo - echo "###########################" - echo "UPLOADING DOCS UNDER REVIEW" - echo "###########################" - echo - - gs_base="artifacts.opnfv.org/$PROJECT/review" - gs_path="$gs_base/$GERRIT_CHANGE_NUMBER" - local_path="upload/$GERRIT_CHANGE_NUMBER" - - mkdir -p upload - mv docs_output "$local_path" - gsutil -m cp -r "$local_path" "gs://$gs_base" - - gsutil -m setmeta \ - -h "Content-Type:text/html" \ - -h "Cache-Control:private, max-age=0, no-transform" \ - "gs://$gs_path"/**.html > /dev/null 2>&1 - - echo "Document link(s):" >> gerrit_comment.txt - find "$local_path" | grep -e 'index.html$' -e 'pdf$' | \ - sed -e "s|^$local_path| http://$gs_path|" >> gerrit_comment.txt + - shell: | + #!/bin/bash + set -o errexit + set -o pipefail + set -o xtrace + export PATH=$PATH:/usr/local/bin/ + + [[ $GERRIT_CHANGE_NUMBER =~ .+ ]] + [[ -d docs_output ]] || exit 0 + + echo + echo "###########################" + echo "UPLOADING DOCS UNDER REVIEW" + echo "###########################" + echo + + gs_base="artifacts.opnfv.org/$PROJECT/review" + gs_path="$gs_base/$GERRIT_CHANGE_NUMBER" + local_path="upload/$GERRIT_CHANGE_NUMBER" + + mkdir -p upload + mv docs_output "$local_path" + gsutil -m cp -r "$local_path" "gs://$gs_base" + + gsutil -m setmeta \ + -h "Content-Type:text/html" \ + -h "Cache-Control:private, max-age=0, no-transform" \ + "gs://$gs_path"/**.html > /dev/null 2>&1 + + echo "Document link(s):" >> gerrit_comment.txt + find "$local_path" | grep -e 'index.html$' -e 'pdf$' | \ + sed -e "s|^$local_path| http://$gs_path|" >> gerrit_comment.txt - builder: name: upload-generated-docs-to-opnfv-artifacts builders: - - shell: | - #!/bin/bash - set -o errexit - set -o pipefail - set -o xtrace - export PATH=$PATH:/usr/local/bin/ + - shell: | + #!/bin/bash + set -o errexit + set -o pipefail + set -o xtrace + export PATH=$PATH:/usr/local/bin/ - [[ -d docs_output ]] || exit 0 + [[ -d docs_output ]] || exit 0 - echo - echo "########################" - echo "UPLOADING GENERATED DOCS" - echo "########################" - echo + echo + echo "########################" + echo "UPLOADING GENERATED DOCS" + echo "########################" + echo - echo "gs_path="$GS_URL/docs"" - echo "local_path="upload/docs"" + echo "gs_path="$GS_URL/docs"" + echo "local_path="upload/docs"" - gs_path="$GS_URL/docs" - local_path="upload/docs" + gs_path="$GS_URL/docs" + local_path="upload/docs" - mkdir -p upload - mv docs_output "$local_path" - ls "$local_path" + mkdir -p upload + mv docs_output "$local_path" + ls "$local_path" - echo "gsutil -m cp -r "$local_path"/* "gs://$gs_path"" - gsutil -m cp -r "$local_path"/* "gs://$gs_path" + echo "gsutil -m cp -r "$local_path"/* "gs://$gs_path"" + gsutil -m cp -r "$local_path"/* "gs://$gs_path" - gsutil -m setmeta \ - -h "Content-Type:text/html" \ - -h "Cache-Control:private, max-age=0, no-transform" \ - "gs://$gs_path"/**.html > /dev/null 2>&1 + gsutil -m setmeta \ + -h "Content-Type:text/html" \ + -h "Cache-Control:private, max-age=0, no-transform" \ + "gs://$gs_path"/**.html > /dev/null 2>&1 - echo "Document link(s):" >> gerrit_comment.txt - find "$local_path" | grep -e 'index.html$' -e 'pdf$' | \ - sed -e "s|^$local_path| http://$gs_path|" >> gerrit_comment.txt + echo "Document link(s):" >> gerrit_comment.txt + find "$local_path" | grep -e 'index.html$' -e 'pdf$' | \ + sed -e "s|^$local_path| http://$gs_path|" >> gerrit_comment.txt # To take advantage of this macro, have your build write # out the file 'gerrit_comment.txt' with information to post @@ -273,213 +274,223 @@ - builder: name: report-build-result-to-gerrit builders: - - shell: | - #!/bin/bash - set -o errexit - set -o pipefail - set -o xtrace - export PATH=$PATH:/usr/local/bin/ - if [[ -e gerrit_comment.txt ]] ; then - echo - echo "posting review comment to gerrit..." - echo - cat gerrit_comment.txt - echo - ssh -p 29418 gerrit.opnfv.org \ - "gerrit review -p $GERRIT_PROJECT \ - -m '$(cat gerrit_comment.txt)' \ - $GERRIT_PATCHSET_REVISION \ - --notify NONE" - fi + - shell: | + #!/bin/bash + set -o errexit + set -o pipefail + set -o xtrace + export PATH=$PATH:/usr/local/bin/ + if [[ -e gerrit_comment.txt ]] ; then + echo + echo "posting review comment to gerrit..." + echo + cat gerrit_comment.txt + echo + ssh -p 29418 gerrit.opnfv.org \ + "gerrit review -p $GERRIT_PROJECT \ + -m '$(cat gerrit_comment.txt)' \ + $GERRIT_PATCHSET_REVISION \ + --notify NONE" + fi - builder: name: remove-old-docs-from-opnfv-artifacts builders: - - shell: | - #!/bin/bash - set -o errexit - set -o pipefail - set -o xtrace - export PATH=$PATH:/usr/local/bin/ - - [[ $GERRIT_CHANGE_NUMBER =~ .+ ]] - - gs_path="artifacts.opnfv.org/$PROJECT/review/$GERRIT_CHANGE_NUMBER" - - if gsutil ls "gs://$gs_path" > /dev/null 2>&1 ; then - echo - echo "Deleting Out-of-dated Documents..." - gsutil -m rm -r "gs://$gs_path" - fi - gs_path="artifacts.opnfv.org/review/$GERRIT_CHANGE_NUMBER" - - if gsutil ls "gs://$gs_path" > /dev/null 2>&1 ; then - echo - echo "Deleting Out-of-dated Documents..." - gsutil -m rm -r "gs://$gs_path" - fi + - shell: | + #!/bin/bash + set -o errexit + set -o pipefail + set -o xtrace + export PATH=$PATH:/usr/local/bin/ + + [[ $GERRIT_CHANGE_NUMBER =~ .+ ]] + + gs_path="artifacts.opnfv.org/$PROJECT/review/$GERRIT_CHANGE_NUMBER" + + if gsutil ls "gs://$gs_path" > /dev/null 2>&1 ; then + echo + echo "Deleting Out-of-dated Documents..." + gsutil -m rm -r "gs://$gs_path" + fi + gs_path="artifacts.opnfv.org/review/$GERRIT_CHANGE_NUMBER" + + if gsutil ls "gs://$gs_path" > /dev/null 2>&1 ; then + echo + echo "Deleting Out-of-dated Documents..." + gsutil -m rm -r "gs://$gs_path" + fi - builder: name: build-and-upload-artifacts-json-api builders: - - shell: | - #!/bin/bash - set -o errexit - set -o pipefail - export PATH=$PATH:/usr/local/bin/ + - shell: | + #!/bin/bash + set -o errexit + set -o pipefail + export PATH=$PATH:/usr/local/bin/ - virtualenv -p python2.7 $WORKSPACE/releng_artifacts - source $WORKSPACE/releng_artifacts/bin/activate + virtualenv -p python2.7 $WORKSPACE/releng_artifacts + source $WORKSPACE/releng_artifacts/bin/activate - # install python packages - pip install google-api-python-client + # install python packages + pip install google-api-python-client - # generate and upload index file - echo "Generating Artifacts API ..." - python $WORKSPACE/utils/opnfv-artifacts.py > index.json - gsutil cp index.json gs://artifacts.opnfv.org/index.json + # generate and upload index file + echo "Generating Artifacts API ..." + python $WORKSPACE/utils/opnfv-artifacts.py > index.json + gsutil cp index.json gs://artifacts.opnfv.org/index.json - deactivate + deactivate - builder: name: lint-python-code builders: - - shell: | - #!/bin/bash - set -o errexit - set -o pipefail - set -o xtrace - export PATH=$PATH:/usr/local/bin/ - - virtualenv -p python2.7 $WORKSPACE/releng_flake8 - source $WORKSPACE/releng_flake8/bin/activate - - # install python packages - pip install "flake8==2.6.2" - - # generate and upload lint log - echo "Running flake8 code on $PROJECT ..." - - # Get number of flake8 violations. If none, this will be an - # empty string: "" - FLAKE_COUNT="$(find . \ + - shell: | + #!/bin/bash + set -o errexit + set -o pipefail + set -o xtrace + export PATH=$PATH:/usr/local/bin/ + + virtualenv -p python2.7 $WORKSPACE/releng_flake8 + source $WORKSPACE/releng_flake8/bin/activate + + # install python packages + pip install "flake8==2.6.2" + + # generate and upload lint log + echo "Running flake8 code on $PROJECT ..." + + # Get number of flake8 violations. If none, this will be an + # empty string: "" + FLAKE_COUNT="$(find . \ + -path './releng_flake8' -prune -o \ + -path './.tox' -prune -o \ + -type f -name "*.py" -print | \ + xargs flake8 --exit-zero -qq --count 2>&1)" + + # Ensure we start with a clean environment + rm -f lint.log + + if [ ! -z $FLAKE_COUNT ]; then + echo "Flake8 Violations: $FLAKE_COUNT" > lint.log + find . \ -path './releng_flake8' -prune -o \ -path './.tox' -prune -o \ -type f -name "*.py" -print | \ - xargs flake8 --exit-zero -qq --count 2>&1)" - - # Ensure we start with a clean environment - rm -f lint.log - - if [ ! -z $FLAKE_COUNT ]; then - echo "Flake8 Violations: $FLAKE_COUNT" > lint.log - find . \ - -path './releng_flake8' -prune -o \ - -path './.tox' -prune -o \ - -type f -name "*.py" -print | \ - xargs flake8 --exit-zero --first >> violation.log - SHOWN=$(wc -l violation.log | cut -d' ' -f1) - echo -e "First $SHOWN shown\n---" >> lint.log - cat violation.log >> lint.log - sed -r -i '4,$s/^/ /g' lint.log - rm violation.log - fi - - deactivate + xargs flake8 --exit-zero --first >> violation.log + SHOWN=$(wc -l violation.log | cut -d' ' -f1) + echo -e "First $SHOWN shown\n---" >> lint.log + cat violation.log >> lint.log + sed -r -i '4,$s/^/ /g' lint.log + rm violation.log + fi + + deactivate - builder: name: report-lint-result-to-gerrit builders: - - shell: | - #!/bin/bash - set -o errexit - set -o pipefail - set -o xtrace - export PATH=$PATH:/usr/local/bin/ + - shell: | + #!/bin/bash + set -o errexit + set -o pipefail + set -o xtrace + export PATH=$PATH:/usr/local/bin/ - # If no violations were found, no lint log will exist. - if [[ -e lint.log ]] ; then - echo -e "\nposting linting report to gerrit...\n" + # If no violations were found, no lint log will exist. + if [[ -e lint.log ]] ; then + echo -e "\nposting linting report to gerrit...\n" - cat lint.log - echo + cat lint.log + echo - ssh -p 29418 gerrit.opnfv.org \ - "gerrit review -p $GERRIT_PROJECT \ - -m \"$(cat lint.log)\" \ - $GERRIT_PATCHSET_REVISION \ - --notify NONE" + ssh -p 29418 gerrit.opnfv.org \ + "gerrit review -p $GERRIT_PROJECT \ + -m \"$(cat lint.log)\" \ + $GERRIT_PATCHSET_REVISION \ + --notify NONE" - exit 1 - fi + exit 1 + fi - builder: name: upload-review-docs builders: - - build-html-and-pdf-docs-output - - upload-under-review-docs-to-opnfv-artifacts - - report-build-result-to-gerrit + - build-html-and-pdf-docs-output + - upload-under-review-docs-to-opnfv-artifacts + - report-build-result-to-gerrit - builder: name: upload-merged-docs builders: - - build-html-and-pdf-docs-output - - upload-generated-docs-to-opnfv-artifacts - - report-build-result-to-gerrit - - remove-old-docs-from-opnfv-artifacts + - build-html-and-pdf-docs-output + - upload-generated-docs-to-opnfv-artifacts + - report-build-result-to-gerrit + - remove-old-docs-from-opnfv-artifacts - builder: name: check-bash-syntax builders: - - shell: "find . -name '*.sh' | xargs bash -n" + - shell: "find . -name '*.sh' | xargs bash -n" - builder: name: lint-yaml-code builders: - - shell: | - #!/bin/bash - set -o errexit - set -o pipefail - set -o xtrace - export PATH=$PATH:/usr/local/bin/ - - # install python packages - pip install "yamllint==1.6.0" + - shell: | + #!/bin/bash + set -o errexit + set -o pipefail + set -o xtrace + export PATH=$PATH:/usr/local/bin/ + + # install python packages + pip install "yamllint==1.6.0" + + # generate and upload lint log + echo "Running yaml code on $PROJECT ..." + + # Ensure we start with a clean environment + rm -f yaml-violation.log lint.log + + # Get number of yaml violations. If none, this will be an + # empty string: "" + find . \ + -type f -name "*.yml" -print \ + -o -name "*.yaml" -print | \ + xargs yamllint > yaml-violation.log || true + + if [ -s "yaml-violation.log" ]; then + SHOWN=$(cat yaml-violation.log| grep -v "^$" |wc -l) + echo -e "First $SHOWN shown\n---" > lint.log + cat yaml-violation.log >> lint.log + sed -r -i '4,$s/^/ /g' lint.log + fi - # generate and upload lint log - echo "Running yaml code on $PROJECT ..." - - # Ensure we start with a clean environment - rm -f yaml-violation.log lint.log - - # Get number of yaml violations. If none, this will be an - # empty string: "" - find . \ - -type f -name "*.yml" -print \ - -o -name "*.yaml" -print | \ - xargs yamllint > yaml-violation.log || true - - if [ -s "yaml-violation.log" ]; then - SHOWN=$(cat yaml-violation.log| grep -v "^$" |wc -l) - echo -e "First $SHOWN shown\n---" > lint.log - cat yaml-violation.log >> lint.log - sed -r -i '4,$s/^/ /g' lint.log - fi +- builder: + name: clean-workspace + builders: + - shell: | + #!/bin/bash + set -o errexit + set -o nounset + set -o pipefail + sudo /bin/rm -rf "$WORKSPACE" - builder: name: clean-workspace-log builders: - - shell: | - find $WORKSPACE -type f -name '*.log' | xargs rm -f + - shell: | + find $WORKSPACE -type f -name '*.log' | xargs rm -f - publisher: name: archive-artifacts publishers: - - archive: - artifacts: '{artifacts}' - allow-empty: true - fingerprint: true - latest-only: true + - archive: + artifacts: '{artifacts}' + allow-empty: true + fingerprint: true + latest-only: true - publisher: name: publish-coverage diff --git a/jjb/global/slave-params.yml b/jjb/global/slave-params.yml index 9234206a5..a4ee4d9f1 100644 --- a/jjb/global/slave-params.yml +++ b/jjb/global/slave-params.yml @@ -1,3 +1,4 @@ +--- ##################################################### # Parameters for slaves using old labels # This will be cleaned up once the new job structure and @@ -6,1046 +7,1024 @@ - parameter: name: 'apex-baremetal-master-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'apex-baremetal-master' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - - string: - name: SSH_KEY - default: /root/.ssh/id_rsa - description: 'SSH key to use for Apex' - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - lf-pod1 - default-slaves: - - lf-pod1 + - label: + name: SLAVE_LABEL + default: 'apex-baremetal-master' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - string: + name: SSH_KEY + default: /root/.ssh/id_rsa + description: 'SSH key to use for Apex' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - lf-pod1 + default-slaves: + - lf-pod1 + - parameter: name: 'apex-baremetal-danube-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'apex-baremetal-danube' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - - string: - name: SSH_KEY - default: /root/.ssh/id_rsa - description: 'SSH key to use for Apex' - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - lf-pod1 - default-slaves: - - lf-pod1 + - label: + name: SLAVE_LABEL + default: 'apex-baremetal-danube' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - string: + name: SSH_KEY + default: /root/.ssh/id_rsa + description: 'SSH key to use for Apex' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - lf-pod1 + default-slaves: + - lf-pod1 + - parameter: name: 'apex-virtual-master-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'apex-virtual-master' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - - string: - name: SSH_KEY - default: /root/.ssh/id_rsa - description: 'SSH key to use for Apex' - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - lf-virtual2 - - lf-virtual3 - default-slaves: - - lf-virtual2 - - lf-virtual3 + - label: + name: SLAVE_LABEL + default: 'apex-virtual-master' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - string: + name: SSH_KEY + default: /root/.ssh/id_rsa + description: 'SSH key to use for Apex' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - lf-virtual2 + - lf-virtual3 + default-slaves: + - lf-virtual2 + - lf-virtual3 - parameter: name: 'apex-virtual-danube-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'apex-virtual-danube' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - - string: - name: SSH_KEY - default: /root/.ssh/id_rsa - description: 'SSH key to use for Apex' - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - lf-pod3 - default-slaves: - - lf-pod3 + - label: + name: SLAVE_LABEL + default: 'apex-virtual-danube' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - string: + name: SSH_KEY + default: /root/.ssh/id_rsa + description: 'SSH key to use for Apex' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - lf-pod3 + default-slaves: + - lf-pod3 + - parameter: name: 'lf-pod1-defaults' parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - lf-pod1 - default-slaves: - - lf-pod1 - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - - string: - name: SSH_KEY - default: /root/.ssh/id_rsa - description: 'SSH key to use for Apex' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - lf-pod1 + default-slaves: + - lf-pod1 + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - string: + name: SSH_KEY + default: /root/.ssh/id_rsa + description: 'SSH key to use for Apex' + - parameter: name: 'lf-pod3-defaults' parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - lf-pod3 - default-slaves: - - lf-pod3 - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - - string: - name: SSH_KEY - default: /root/.ssh/id_rsa - description: 'SSH key to use for Apex' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - lf-pod3 + default-slaves: + - lf-pod3 + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - string: + name: SSH_KEY + default: /root/.ssh/id_rsa + description: 'SSH key to use for Apex' + ##################################################### # Parameters for CI baremetal PODs ##################################################### - parameter: name: 'apex-baremetal-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'apex-baremetal' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - - string: - name: SSH_KEY - default: /root/.ssh/id_rsa - description: 'SSH key to use for Apex' + - label: + name: SLAVE_LABEL + default: 'apex-baremetal' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - string: + name: SSH_KEY + default: /root/.ssh/id_rsa + description: 'SSH key to use for Apex' - parameter: name: 'compass-baremetal-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'compass-baremetal' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - label: + name: SLAVE_LABEL + default: 'compass-baremetal' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'compass-baremetal-master-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'compass-baremetal-master' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - label: + name: SLAVE_LABEL + default: 'compass-baremetal-master' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'compass-baremetal-branch-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'compass-baremetal-branch' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - label: + name: SLAVE_LABEL + default: 'compass-baremetal-branch' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'fuel-baremetal-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'fuel-baremetal' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - label: + name: SLAVE_LABEL + default: 'fuel-baremetal' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'armband-baremetal-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'armband-baremetal' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - - string: - name: LAB_CONFIG_URL - default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab - description: 'Base URI to the configuration directory' + - label: + name: SLAVE_LABEL + default: 'armband-baremetal' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - string: + name: LAB_CONFIG_URL + default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab + description: 'Base URI to the configuration directory' + - parameter: name: 'joid-baremetal-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'joid-baremetal' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - - string: - name: EXTERNAL_NETWORK - default: ext-net - description: "External network floating ips" + - label: + name: SLAVE_LABEL + default: 'joid-baremetal' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - string: + name: EXTERNAL_NETWORK + default: ext-net + description: "External network floating ips" + - parameter: name: 'daisy-baremetal-defaults' parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - zte-pod2 - default-slaves: - - zte-pod2 - - label: - name: SLAVE_LABEL - default: 'daisy-baremetal' - - string: - name: INSTALLER_IP - default: '10.20.7.3' - description: 'IP of the installer' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - zte-pod2 + default-slaves: + - zte-pod2 + - label: + name: SLAVE_LABEL + default: 'daisy-baremetal' + - string: + name: INSTALLER_IP + default: '10.20.7.3' + description: 'IP of the installer' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + ##################################################### # Parameters for CI virtual PODs ##################################################### - parameter: name: 'apex-virtual-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'apex-virtual' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - - string: - name: SSH_KEY - default: /root/.ssh/id_rsa - description: 'SSH key to use for Apex' + - label: + name: SLAVE_LABEL + default: 'apex-virtual' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - string: + name: SSH_KEY + default: /root/.ssh/id_rsa + description: 'SSH key to use for Apex' + - parameter: name: 'compass-virtual-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'compass-virtual' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - label: + name: SLAVE_LABEL + default: 'compass-virtual' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'compass-virtual-master-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'compass-virtual-master' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - label: + name: SLAVE_LABEL + default: 'compass-virtual-master' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'compass-virtual-branch-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'compass-virtual-branch' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - label: + name: SLAVE_LABEL + default: 'compass-virtual-branch' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'fuel-virtual-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'fuel-virtual' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - label: + name: SLAVE_LABEL + default: 'fuel-virtual' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'armband-virtual-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'armband-virtual' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - - string: - name: LAB_CONFIG_URL - default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab - description: 'Base URI to the configuration directory' + - label: + name: SLAVE_LABEL + default: 'armband-virtual' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - string: + name: LAB_CONFIG_URL + default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab + description: 'Base URI to the configuration directory' + - parameter: name: 'joid-virtual-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'joid-virtual' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - label: + name: SLAVE_LABEL + default: 'joid-virtual' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'daisy-virtual-defaults' parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - zte-virtual1 - - zte-virtual2 - default-slaves: - - zte-virtual1 - - label: - name: SLAVE_LABEL - default: 'daisy-virtual' - - string: - name: INSTALLER_IP - default: '10.20.11.2' - description: 'IP of the installer' - - string: - name: BRIDGE - default: 'daisy1' - description: 'pxe bridge for booting of Daisy master' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - zte-virtual1 + - zte-virtual2 + default-slaves: + - zte-virtual1 + - label: + name: SLAVE_LABEL + default: 'daisy-virtual' + - string: + name: INSTALLER_IP + default: '10.20.11.2' + description: 'IP of the installer' + - string: + name: BRIDGE + default: 'daisy1' + description: 'pxe bridge for booting of Daisy master' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + ##################################################### # Parameters for build slaves ##################################################### -- parameter: - name: 'opnfv-build-enea-defaults' - parameters: - - label: - name: SLAVE_LABEL - default: 'opnfv-build-enea' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - - string: - name: BUILD_DIRECTORY - default: $WORKSPACE/build_output - description: "Directory where the build artifact will be located upon the completion of the build." - parameter: name: 'opnfv-build-centos-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'opnfv-build-centos' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - - string: - name: BUILD_DIRECTORY - default: $WORKSPACE/build_output - description: "Directory where the build artifact will be located upon the completion of the build." + - label: + name: SLAVE_LABEL + default: 'opnfv-build-centos' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - string: + name: BUILD_DIRECTORY + default: $WORKSPACE/build_output + description: "Directory where the build artifact will be located upon the completion of the build." + - parameter: name: 'opnfv-build-ubuntu-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'opnfv-build-ubuntu' - description: 'Slave label on Jenkins' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - - string: - name: BUILD_DIRECTORY - default: $WORKSPACE/build_output - description: "Directory where the build artifact will be located upon the completion of the build." + - label: + name: SLAVE_LABEL + default: 'opnfv-build-ubuntu' + description: 'Slave label on Jenkins' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - string: + name: BUILD_DIRECTORY + default: $WORKSPACE/build_output + description: "Directory where the build artifact will be located upon the completion of the build." + - parameter: name: 'opnfv-build-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'opnfv-build' - description: 'Slave label on Jenkins' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - - string: - name: BUILD_DIRECTORY - default: $WORKSPACE/build_output - description: "Directory where the build artifact will be located upon the completion of the build." + - label: + name: SLAVE_LABEL + default: 'opnfv-build' + description: 'Slave label on Jenkins' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - string: + name: BUILD_DIRECTORY + default: $WORKSPACE/build_output + description: "Directory where the build artifact will be located upon the completion of the build." + - parameter: name: 'huawei-build-defaults' parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - huawei-build - default-slaves: - - huawei-build - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - huawei-build + default-slaves: + - huawei-build + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'opnfv-build-ubuntu-arm-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'opnfv-build-ubuntu-arm' - description: 'Slave label on Jenkins' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - - string: - name: BUILD_DIRECTORY - default: $WORKSPACE/build_output - description: "Directory where the build artifact will be located upon the completion of the build." + - label: + name: SLAVE_LABEL + default: 'opnfv-build-ubuntu-arm' + description: 'Slave label on Jenkins' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - string: + name: BUILD_DIRECTORY + default: $WORKSPACE/build_output + description: "Directory where the build artifact will be located upon the completion of the build." + ##################################################### # Parameters for none-CI PODs ##################################################### - parameter: name: 'ericsson-pod1-defaults' parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - ericsson-pod1 - default-slaves: - - ericsson-pod1 - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - ericsson-pod1 + default-slaves: + - ericsson-pod1 + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'cengn-pod1-defaults' parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - cengn-pod1 - default-slaves: - - cengn-pod1 - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - cengn-pod1 + default-slaves: + - cengn-pod1 + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'intel-pod1-defaults' parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - intel-pod1 - default-slaves: - - intel-pod1 - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - intel-pod1 + default-slaves: + - intel-pod1 + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'intel-pod2-defaults' parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - intel-pod2 - default-slaves: - - intel-pod2 - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - - string: - name: SSH_KEY - default: /root/.ssh/id_rsa - description: 'SSH key to use for Apex' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - intel-pod2 + default-slaves: + - intel-pod2 + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - string: + name: SSH_KEY + default: /root/.ssh/id_rsa + description: 'SSH key to use for Apex' + - parameter: name: 'intel-pod9-defaults' parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - intel-pod9 - default-slaves: - - intel-pod9 - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - intel-pod9 + default-slaves: + - intel-pod9 + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'intel-pod10-defaults' parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - intel-pod10 - default-slaves: - - intel-pod10 - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - intel-pod10 + default-slaves: + - intel-pod10 + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'intel-pod12-defaults' parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - intel-pod12 - default-slaves: - - intel-pod12 - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - intel-pod12 + default-slaves: + - intel-pod12 + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'huawei-pod3-defaults' parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - huawei-pod3 - default-slaves: - - huawei-pod3 - - label: - name: SLAVE_LABEL - default: 'huawei-test' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - huawei-pod3 + default-slaves: + - huawei-pod3 + - label: + name: SLAVE_LABEL + default: 'huawei-test' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'huawei-pod4-defaults' parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - huawei-pod4 - default-slaves: - - huawei-pod4 - - label: - name: SLAVE_LABEL - default: 'huawei-test' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - huawei-pod4 + default-slaves: + - huawei-pod4 + - label: + name: SLAVE_LABEL + default: 'huawei-test' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'intel-pod8-defaults' parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - intel-pod8 - default-slaves: - - intel-pod8 - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - intel-pod8 + default-slaves: + - intel-pod8 + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + - parameter: name: 'huawei-virtual5-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'huawei-virtual5' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT + - label: + name: SLAVE_LABEL + default: 'huawei-virtual5' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + - parameter: name: 'huawei-virtual7-defaults' parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - huawei-virtual7 - default-slaves: - - huawei-virtual7 - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - huawei-virtual7 + default-slaves: + - huawei-virtual7 + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + - parameter: name: 'huawei-pod7-defaults' parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - huawei-pod7 - default-slaves: - - huawei-pod7 - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - huawei-pod7 + default-slaves: + - huawei-pod7 + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + - parameter: name: 'zte-pod1-defaults' parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - zte-pod1 - default-slaves: - - zte-pod1 - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - - string: - name: INSTALLER_IP - default: '10.20.6.2' - description: 'IP of the installer' - - string: - name: BRIDGE - default: 'br6' - description: 'pxe bridge for booting of Fuel master' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - zte-pod1 + default-slaves: + - zte-pod1 + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - string: + name: INSTALLER_IP + default: '10.20.6.2' + description: 'IP of the installer' + - string: + name: BRIDGE + default: 'br6' + description: 'pxe bridge for booting of Fuel master' + - parameter: name: 'zte-pod2-defaults' parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - zte-pod2 - default-slaves: - - zte-pod2 - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - - string: - name: INSTALLER_IP - default: '10.20.7.3' - description: 'IP of the installer' - - string: - name: BRIDGE - default: 'br7' - description: 'pxe bridge for booting of Fuel master' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - zte-pod2 + default-slaves: + - zte-pod2 + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - string: + name: INSTALLER_IP + default: '10.20.7.3' + description: 'IP of the installer' + - string: + name: BRIDGE + default: 'br7' + description: 'pxe bridge for booting of Fuel master' + - parameter: name: 'zte-pod3-defaults' parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - zte-pod3 - default-slaves: - - zte-pod3 - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - - string: - name: BRIDGE - default: 'br0' - description: 'pxe bridge for booting of Fuel master' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - zte-pod3 + default-slaves: + - zte-pod3 + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - string: + name: BRIDGE + default: 'br0' + description: 'pxe bridge for booting of Fuel master' + - parameter: name: zte-pod4-defaults parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - zte-pod4 - default-slaves: - - zte-pod4 - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - zte-pod4 + default-slaves: + - zte-pod4 + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'juniper-pod1-defaults' parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - juniper-pod1 - default-slaves: - - juniper-pod1 - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - - string: - name: CEPH_DISKS - default: /srv - description: "Disks to use by ceph (comma separated list)" + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - juniper-pod1 + default-slaves: + - juniper-pod1 + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - string: + name: CEPH_DISKS + default: /srv + description: "Disks to use by ceph (comma separated list)" + - parameter: name: 'orange-pod1-defaults' parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - orange-pod1 - default-slaves: - - orange-pod1 - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - orange-pod1 + default-slaves: + - orange-pod1 + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'orange-pod2-defaults' parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - orange-pod2 - default-slaves: - - orange-pod2 - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - orange-pod2 + default-slaves: + - orange-pod2 + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'orange-pod5-defaults' parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - orange-pod5 - default-slaves: - - orange-pod5 - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - orange-pod5 + default-slaves: + - orange-pod5 + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'dell-pod1-defaults' parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - dell-pod1 - default-slaves: - - dell-pod1 - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - dell-pod1 + default-slaves: + - dell-pod1 + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'dell-pod2-defaults' parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - dell-pod2 - default-slaves: - - dell-pod2 - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - dell-pod2 + default-slaves: + - dell-pod2 + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'nokia-pod1-defaults' parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - nokia-pod1 - default-slaves: - - nokia-pod1 - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - - string: - name: SSH_KEY - default: /root/.ssh/id_rsa - description: 'SSH key to use for Apex' -- parameter: - name: 'arm-pod2-defaults' - parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - arm-pod2 - default-slaves: - - arm-pod2 - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - - string: - name: LAB_CONFIG_URL - default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab - description: 'Base URI to the configuration directory' -- parameter: - name: 'arm-pod5-defaults' - parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - arm-pod5 - default-slaves: - - arm-pod5 - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - - string: - name: LAB_CONFIG_URL - default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab - description: 'Base URI to the configuration directory' -- parameter: - name: 'arm-pod4-defaults' - parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - arm-pod4 - default-slaves: - - arm-pod4 - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - - string: - name: LAB_CONFIG_URL - default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab - description: 'Base URI to the configuration directory' -- parameter: - name: 'arm-virtual2-defaults' - parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - arm-virtual2 - default-slaves: - - arm-virtual2 - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - - string: - name: LAB_CONFIG_URL - default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab - description: 'Base URI to the configuration directory' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - nokia-pod1 + default-slaves: + - nokia-pod1 + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - string: + name: SSH_KEY + default: /root/.ssh/id_rsa + description: 'SSH key to use for Apex' + - parameter: name: 'intel-virtual6-defaults' parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - intel-virtual6 - default-slaves: - - intel-virtual6 - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - intel-virtual6 + default-slaves: + - intel-virtual6 + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'intel-virtual10-defaults' parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - intel-virtual10 - default-slaves: - - intel-virtual10 - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - intel-virtual10 + default-slaves: + - intel-virtual10 + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'doctor-defaults' parameters: - - node: - name: SLAVE_NAME - description: 'Slave name on Jenkins' - allowed-slaves: - - '{default-slave}' - default-slaves: - - '{default-slave}' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - - string: - name: SSH_KEY - default: /root/.ssh/id_rsa - description: 'SSH key to be used' + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - '{default-slave}' + default-slaves: + - '{default-slave}' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - string: + name: SSH_KEY + default: /root/.ssh/id_rsa + description: 'SSH key to be used' + - parameter: name: 'doctor-apex-verify-defaults' parameters: - - 'doctor-defaults': - default-slave: 'doctor-apex-verify' + - 'doctor-defaults': + default-slave: 'doctor-apex-verify' + - parameter: name: 'doctor-fuel-verify-defaults' parameters: - - 'doctor-defaults': - default-slave: 'doctor-fuel-verify' + - 'doctor-defaults': + default-slave: 'doctor-fuel-verify' + - parameter: name: 'doctor-joid-verify-defaults' parameters: - - 'doctor-defaults': - default-slave: 'doctor-joid-verify' + - 'doctor-defaults': + default-slave: 'doctor-joid-verify' + - parameter: name: 'multisite-virtual-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'multisite-virtual' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - label: + name: SLAVE_LABEL + default: 'multisite-virtual' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'ericsson-virtual5-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'ericsson-virtual5' - - string: - name: GIT_BASE - default: https://git.opendaylight.org/gerrit/p/$PROJECT.git - description: 'Git URL to use on this Jenkins Slave' + - label: + name: SLAVE_LABEL + default: 'ericsson-virtual5' + - string: + name: GIT_BASE + default: https://git.opendaylight.org/gerrit/p/$PROJECT.git + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'ericsson-virtual12-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'ericsson-virtual12' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - label: + name: SLAVE_LABEL + default: 'ericsson-virtual12' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'ericsson-virtual13-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'ericsson-virtual13' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - label: + name: SLAVE_LABEL + default: 'ericsson-virtual13' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'ericsson-virtual-pod1bl01-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'ericsson-virtual-pod1bl01' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - label: + name: SLAVE_LABEL + default: 'ericsson-virtual-pod1bl01' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'odl-netvirt-virtual-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'odl-netvirt-virtual' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - label: + name: SLAVE_LABEL + default: 'odl-netvirt-virtual' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - parameter: name: 'odl-netvirt-virtual-intel-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'odl-netvirt-virtual-intel' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' + - label: + name: SLAVE_LABEL + default: 'odl-netvirt-virtual-intel' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + ##################################################### # These slaves are just dummy slaves for sandbox jobs ##################################################### - parameter: name: 'sandbox-baremetal-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'sandbox-baremetal' - description: 'Slave label on Jenkins' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - - string: - name: BUILD_DIRECTORY - default: $WORKSPACE/build_output - description: "Directory where the build artifact will be located upon the completion of the build." + - label: + name: SLAVE_LABEL + default: 'sandbox-baremetal' + description: 'Slave label on Jenkins' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - string: + name: BUILD_DIRECTORY + default: $WORKSPACE/build_output + description: "Directory where the build artifact will be located upon the completion of the build." + - parameter: name: 'sandbox-virtual-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'sandbox-virtual' - description: 'Slave label on Jenkins' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - - string: - name: BUILD_DIRECTORY - default: $WORKSPACE/build_output - description: "Directory where the build artifact will be located upon the completion of the build." + - label: + name: SLAVE_LABEL + default: 'sandbox-virtual' + description: 'Slave label on Jenkins' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - string: + name: BUILD_DIRECTORY + default: $WORKSPACE/build_output + description: "Directory where the build artifact will be located upon the completion of the build." + - parameter: name: 'dummy-pod1-defaults' parameters: - - label: - name: SLAVE_LABEL - default: 'dummy-pod1' - description: 'Slave label on Jenkins' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - - string: - name: BUILD_DIRECTORY - default: $WORKSPACE/build_output - description: "Directory where the build artifact will be located upon the completion of the build." + - label: + name: SLAVE_LABEL + default: 'dummy-pod1' + description: 'Slave label on Jenkins' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' + - string: + name: BUILD_DIRECTORY + default: $WORKSPACE/build_output + description: "Directory where the build artifact will be located upon the completion of the build." diff --git a/jjb/opnfvdocs/project.cfg b/jjb/opnfvdocs/project.cfg index 1ea05c1d4..0722b4036 100644 --- a/jjb/opnfvdocs/project.cfg +++ b/jjb/opnfvdocs/project.cfg @@ -5,6 +5,7 @@ bottlenecks compass4nfv copper conductor +container4nfv daisy doctor domino @@ -24,7 +25,6 @@ movie multisite octopus onosfw -openretriever ovno ovsnfv parser diff --git a/jjb/releng/generate-job-list.sh b/jjb/releng/generate-job-list.sh new file mode 100755 index 000000000..0e35eaad4 --- /dev/null +++ b/jjb/releng/generate-job-list.sh @@ -0,0 +1,68 @@ +#!/bin/bash +# SPDX-license-identifier: Apache-2.0 +############################################################################## +# Copyright (c) 2016 Linux Foundation and others. +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## + +set -o errexit +set -o nounset +set -o pipefail + +# Job Number Formatter +function JOBS { + local NUMS=$1 + if [ $NUMS == 1 ]; then + echo -n "Job" + else + echo -n "Jobs" + fi +} + +# We expect job_output to exist prior to this being run and contain the +# output from jenkins-jobs test + +echo "> Generating list of previous JJB jobs..." +git checkout -q -b previous-commit HEAD^ +jenkins-jobs -l ERROR test -r jjb -o job_output_prev +git checkout -q - && git branch -q -d previous-commit + +echo "> Finding job changes ..." +diff -r -q job_output job_output_prev &> job_diff.txt || true + +# Only in (job_output) = NEW +# Only in (job_output_prev) = DELETED +# Files ... differ = MODIFIED + +declare -a JOBS_ADDED=($(grep 'job_output:' job_diff.txt | cut -d':' -f2- | sed 's/^[ \t]*//;s/[ \t]*$//')) +declare -a JOBS_MODIFIED=($(grep 'differ$' job_diff.txt | sed "s/Files job_output\/\(.*\) and.*/\1/g")) +declare -a JOBS_REMOVED=($(grep 'job_output_prev:' job_diff.txt | cut -d ':' -f2- | sed 's/^[ \t]*//;s/[ \t]*$//')) + +NUM_JOBS_ADDED=${#JOBS_ADDED[@]} +NUM_JOBS_MODIFIED=${#JOBS_MODIFIED[@]} +NUM_JOBS_REMOVED=${#JOBS_REMOVED[@]} + +echo "> Writing gerrit comment ..." +if [ $NUM_JOBS_ADDED -gt 0 ]; then + JOB_STRING="$(JOBS $NUM_JOBS_ADDED)" + { printf "Added %s %s:\n\n" "${NUM_JOBS_ADDED}" "$JOB_STRING"; + printf '* %s\n' "${JOBS_ADDED[@]}"; + printf "\n"; } >> gerrit_comment.txt +fi + +if [ $NUM_JOBS_MODIFIED -gt 0 ]; then + JOB_STRING="$(JOBS $NUM_JOBS_MODIFIED)" + { printf "Modified %s %s:\n\n" "${NUM_JOBS_MODIFIED}" "$JOB_STRING"; + printf '* %s\n' "${JOBS_MODIFIED[@]}"; + printf "\n"; } >> gerrit_comment.txt +fi + +if [ $NUM_JOBS_REMOVED -gt 0 ]; then + JOB_STRING="$(JOBS $NUM_JOBS_REMOVED)" + { printf "Removed %s %s:\n\n" "${NUM_JOBS_REMOVED}" "$JOB_STRING"; + printf '* %s\n' "${JOBS_REMOVED[@]}"; + printf "\n"; } >> gerrit_comment.txt +fi diff --git a/jjb/releng/opnfv-docker-arm.yml b/jjb/releng/opnfv-docker-arm.yml index d70640a7c..842d4967d 100644 --- a/jjb/releng/opnfv-docker-arm.yml +++ b/jjb/releng/opnfv-docker-arm.yml @@ -23,6 +23,12 @@ cristina.pauna@enea.com alexandru.avadanii@enea.com alexandru.nemes@enea.com + yardstick-arm-receivers: &yardstick-arm-receivers + receivers: > + cristina.pauna@enea.com + alexandru.avadanii@enea.com + alexandru.nemes@enea.com + catalina.focsa@enea.com other-receivers: &other-receivers receivers: '' @@ -34,6 +40,9 @@ - 'dovetail': <<: *master <<: *dovetail-arm-receivers + - 'yardstick': + <<: *master + <<: *yardstick-arm-receivers # projects with jobs for stable jobs: diff --git a/jjb/releng/opnfv-docker.yml b/jjb/releng/opnfv-docker.yml index 9d27329ed..414eba255 100644 --- a/jjb/releng/opnfv-docker.yml +++ b/jjb/releng/opnfv-docker.yml @@ -60,6 +60,11 @@ dockerdir: 'docker/storperf-master' <<: *master <<: *other-receivers + - 'storperf-graphite': + project: 'storperf' + dockerdir: 'docker/storperf-graphite' + <<: *master + <<: *other-receivers - 'storperf-httpfrontend': project: 'storperf' dockerdir: 'docker/storperf-httpfrontend' diff --git a/jjb/releng/opnfv-repo-archiver.sh b/jjb/releng/opnfv-repo-archiver.sh new file mode 100644 index 000000000..c9fdba379 --- /dev/null +++ b/jjb/releng/opnfv-repo-archiver.sh @@ -0,0 +1,66 @@ +#!/bin/bash +# SPDX-license-identifier: Apache-2.0 +############################################################################## +# Copyright (c) 2016 Linux Foundation and others. +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## +set -o errexit +set -o pipefail +export PATH=$PATH:/usr/local/bin/ + +DATE="$(date +%Y%m%d)" + +declare -a PROJECT_LIST +EXCLUDE_PROJECTS="All-Projects|All-Users|securedlab" +CLONE_PATH="$WORKSPACE/opnfv-repos" + +# Generate project list from gerrit +PROJECT_LIST=($(ssh -p 29418 jenkins-ci@gerrit.opnfv.org gerrit ls-projects | egrep -v $EXCLUDE_PROJECTS)) + +echo "Cloning all OPNFV repositories" +echo "------------------------------" + +for PROJECT in "${PROJECT_LIST[@]}"; do + echo "> Cloning $PROJECT" + if [ ! -d "$CLONE_PATH/$PROJECT" ]; then + git clone "https://gerrit.opnfv.org/gerrit/$PROJECT.git" $CLONE_PATH/$PROJECT + else + pushd "$CLONE_PATH/$PROJECT" &>/dev/null + git pull -f + popd &> /dev/null + fi + + # Don't license scan kernel or qemu in kvmfornfv + if [ "$PROJECT" == "kvmfornfv" ]; then + rm -rf "$CLONE_PATH/$PROJECT/{kernel,qemu}" + fi +done + +echo "Finished cloning OPNFV repositories" +echo "-----------------------------------" + +# Copy repos and clear git data +echo "Copying repos to $WORKSPACE/opnfv-archive and removing .git files" +cp -R $CLONE_PATH $WORKSPACE/opnfv-archive +find $WORKSPACE/opnfv-archive -type d -iname '.git' -exec rm -rf {} + +find $WORKSPACE/opnfv-archive -type f -iname '.git*' -exec rm -rf {} + + +# Create archive +echo "Creating archive: opnfv-archive-$DATE.tar.gz" +echo "--------------------------------------" +cd $WORKSPACE +tar -czf "opnfv-archive-$DATE.tar.gz" opnfv-archive && rm -rf opnfv-archive +echo "Archiving Complete." + +echo "Uploading artifacts" +echo "--------------------------------------" + +gsutil cp "$WORKSPACE/opnfv-archive-$DATE.tar.gz" \ + "gs://opnfv-archive/opnfv-archive-$DATE.tar.gz" 2>&1 + +rm -f opnfv-archive-$DATE.tar.gz + +echo "Finished" diff --git a/jjb/releng/opnfv-utils.yml b/jjb/releng/opnfv-utils.yml index 717bb3cbc..721b5dede 100644 --- a/jjb/releng/opnfv-utils.yml +++ b/jjb/releng/opnfv-utils.yml @@ -4,6 +4,9 @@ jobs: - 'prune-docker-images' + - 'archive-repositories' + - 'check-status-of-slaves' + ######################## # job templates ######################## @@ -37,3 +40,50 @@ triggers: - timed: '@midnight' + +- job-template: + name: 'archive-repositories' + + disabled: false + + concurrent: true + + parameters: + - node: + name: SLAVE_NAME + description: Where to create the archive + default-slaves: + - master + allowed-multiselect: false + ignore-offline-nodes: true + + triggers: + - timed: '@monthly' + + builders: + - shell: + !include-raw-escape: opnfv-repo-archiver.sh + +- job-template: + name: 'check-status-of-slaves' + + disabled: false + + concurrent: true + + parameters: + - node: + name: SLAVE_NAME + description: We don't want workspace wiped. so I just threw the script on the master + default-slaves: + - master + allowed-multiselect: false + ignore-offline-nodes: true + + triggers: + - timed: '@midnight' + + builders: + - shell: | + cd /opt/jenkins-ci/slavemonitor + bash slave-monitor-0.1.sh | sort diff --git a/jjb/releng/releng-ci-jobs.yml b/jjb/releng/releng-ci-jobs.yml index dc9bfd5dc..42d88fb7a 100644 --- a/jjb/releng/releng-ci-jobs.yml +++ b/jjb/releng/releng-ci-jobs.yml @@ -3,6 +3,7 @@ jobs: - 'releng-verify-jjb' - 'releng-merge-jjb' + - 'releng-comment-jjb' - 'releng-generate-artifacts-api' project: 'releng' @@ -54,6 +55,35 @@ artifacts: 'job_output/*' - email-jenkins-admins-on-failure +- job-template: + name: releng-comment-jjb + + parameters: + - project-parameter: + project: '{project}' + branch: 'master' + scm: + - git-scm-gerrit + + triggers: + - experimental: + project: '{project}' + branch: 'master' + files: 'jjb/**' + + builders: + - copyartifact: + project: releng-merge-jjb + filter: "job_output/*" + which-build: last-successful + stable: true + optional: false + flatten: false + do-not-fingerprint: true + - shell: + !include-raw-escape: generate-job-list.sh + - report-build-result-to-gerrit + - job-template: name: 'releng-merge-jjb' diff --git a/jjb/securedlab/check-jinja2.yml b/jjb/securedlab/check-jinja2.yml index 1e85536e7..430ced560 100644 --- a/jjb/securedlab/check-jinja2.yml +++ b/jjb/securedlab/check-jinja2.yml @@ -70,6 +70,12 @@ pattern: '**/*.jinja2' - compare-type: ANT pattern: '**/*.yaml' + skip-vote: + successful: true + failed: true + unstable: true + notbuilt: true + builders: - check-jinja diff --git a/jjb/sfc/sfc-project-jobs.yml b/jjb/sfc/sfc-project-jobs.yml new file mode 100644 index 000000000..379fe793a --- /dev/null +++ b/jjb/sfc/sfc-project-jobs.yml @@ -0,0 +1,70 @@ +################################################### +# All the jobs except verify have been removed! +# They will only be enabled on request by projects! +################################################### +- project: + name: sfc-project-jobs + + project: 'sfc' + + jobs: + - 'sfc-verify-{stream}' + stream: + - master: + branch: '{stream}' + gs-pathname: '' + disabled: false + - danube: + branch: 'stable/{stream}' + gs-pathname: '/{stream}' + disabled: true + +- job-template: + name: 'sfc-verify-{stream}' + + disabled: '{obj:disabled}' + + parameters: + - project-parameter: + project: '{project}' + branch: '{branch}' + - 'opnfv-build-ubuntu-defaults' + scm: + - git-scm-gerrit + + triggers: + - gerrit: + server-name: 'gerrit.opnfv.org' + trigger-on: + - patchset-created-event: + exclude-drafts: 'false' + exclude-trivial-rebase: 'false' + exclude-no-code-change: 'false' + - draft-published-event + - comment-added-contains-event: + comment-contains-value: 'recheck' + - comment-added-contains-event: + comment-contains-value: 'reverify' + projects: + - project-compare-type: 'ANT' + project-pattern: '{project}' + branches: + - branch-compare-type: 'ANT' + branch-pattern: '**/{branch}' + disable-strict-forbidden-file-verification: 'true' + forbidden-file-paths: + - compare-type: ANT + pattern: 'docs/**|.gitignore' + + builders: + - sfc-unit-tests + +################################ +# job builders +################################ + +- builder: + name: sfc-unit-tests + builders: + - shell: | + cd $WORKSPACE && yamllint $(git ls-tree -r HEAD --name-only | egrep 'yml$|yaml$') diff --git a/jjb/storperf/storperf-verify-jobs.yml b/jjb/storperf/storperf-verify-jobs.yml index 55c4e4ce0..f99ceeacb 100644 --- a/jjb/storperf/storperf-verify-jobs.yml +++ b/jjb/storperf/storperf-verify-jobs.yml @@ -122,7 +122,7 @@ wrappers: - ssh-agent-wrapper - build-timeout: - timeout: 30 + timeout: 60 parameters: - project-parameter: diff --git a/jjb/xci/bifrost-periodic-jobs.yml b/jjb/xci/bifrost-periodic-jobs.yml index 7ef11a4cf..a1892fc5f 100644 --- a/jjb/xci/bifrost-periodic-jobs.yml +++ b/jjb/xci/bifrost-periodic-jobs.yml @@ -49,7 +49,7 @@ - 'suse': disabled: true slave-label: xci-suse-virtual - dib-os-release: '42.2' + dib-os-release: '42.3' dib-os-element: 'opensuse-minimal' dib-os-packages: 'vim,less,bridge-utils,iputils,rsyslog,curl' extra-dib-elements: 'openssh-server' diff --git a/jjb/xci/bifrost-verify-jobs.yml b/jjb/xci/bifrost-verify-jobs.yml index ef604fcfc..af51c2b79 100644 --- a/jjb/xci/bifrost-verify-jobs.yml +++ b/jjb/xci/bifrost-verify-jobs.yml @@ -34,7 +34,7 @@ extra-dib-elements: 'openssh-server' - 'suse': disabled: false - dib-os-release: '42.2' + dib-os-release: '42.3' dib-os-element: 'opensuse-minimal' dib-os-packages: 'vim,less,bridge-utils,iputils,rsyslog,curl' extra-dib-elements: 'openssh-server' diff --git a/jjb/xci/osa-periodic-jobs.yml b/jjb/xci/osa-periodic-jobs.yml index 350ee766b..7311baab4 100644 --- a/jjb/xci/osa-periodic-jobs.yml +++ b/jjb/xci/osa-periodic-jobs.yml @@ -1,149 +1,227 @@ - project: - project: 'releng-xci' + name: 'opnfv-osa-periodic' - name: 'os-periodic' -#-------------------------------- -# Branch Anchors -#-------------------------------- -# the versions stated here default to branches which then later -# on used for checking out the branches, pulling in head of the branch. - master: &master - stream: master - openstack-osa-version: '{stream}' - opnfv-releng-version: 'master' - gs-pathname: '' - ocata: &ocata - stream: ocata - openstack-osa-version: 'stable/{stream}' - opnfv-releng-version: 'master' - gs-pathname: '/{stream}' + project: 'releng-xci' #-------------------------------- -# XCI PODs +# branches #-------------------------------- - pod: - - virtual: - <<: *master - - virtual: - <<: *ocata + stream: + - master: + branch: '{stream}' #-------------------------------- -# Supported Distros +# distros #-------------------------------- distro: - 'xenial': disabled: false - slave-label: xci-xenial-virtual - dib-os-release: 'xenial' - dib-os-element: 'ubuntu-minimal' - dib-os-packages: 'vlan,vim,less,bridge-utils,sudo,language-pack-en,iputils-ping,rsyslog,curl,python,debootstrap,ifenslave,ifenslave-2.6,lsof,lvm2,tcpdump,nfs-kernel-server,chrony,iptables' - extra-dib-elements: 'openssh-server' - 'centos7': disabled: true - slave-label: xci-centos7-virtual - dib-os-release: '7' - dib-os-element: 'centos7' - dib-os-packages: 'vim,less,bridge-utils,iputils,rsyslog,curl' - extra-dib-elements: 'openssh-server' - 'suse': disabled: true - slave-label: xci-suse-virtual - dib-os-release: '42.2' - dib-os-element: 'opensuse-minimal' - dib-os-packages: 'vim,less,bridge-utils,iputils,rsyslog,curl' - extra-dib-elements: 'openssh-server' - +#-------------------------------- +# type +#-------------------------------- + type: + - virtual +#-------------------------------- +# phases +#-------------------------------- + phase: + - 'deploy' + - 'healthcheck' #-------------------------------- # jobs #-------------------------------- jobs: - - 'osa-deploy-{pod}-{distro}-periodic-{stream}' - + - 'osa-periodic-{distro}-{type}-{stream}' + - 'osa-periodic-{phase}-{type}-{stream}' #-------------------------------- # job templates #-------------------------------- - job-template: - name: 'osa-deploy-{pod}-{distro}-periodic-{stream}' + name: 'osa-periodic-{distro}-{type}-{stream}' + + project-type: multijob disabled: '{obj:disabled}' concurrent: false properties: + - logrotate-default - build-blocker: use-build-blocker: true blocking-jobs: - - '^xci-os.*' - - '^xci-deploy.*' - - '^xci-functest.*' - - '^bifrost-.*periodic.*' - - '^osa-.*periodic.*' + - 'xci-verify-.*' + - 'bifrost-verify-.*' + - 'bifrost-periodic-.*' + - 'osa-verify-.*' + - 'osa-periodic-.*' block-level: 'NODE' + + wrappers: + - ssh-agent-wrapper + - build-timeout: + timeout: 240 + - fix-workspace-permissions + + scm: + - git-scm-osa + + triggers: + - pollscm: + cron: "@midnight" + ignore-post-commit-hooks: True + + parameters: + - project-parameter: + project: '{project}' + branch: '{branch}' + - label: + name: SLAVE_LABEL + default: 'xci-virtual-{distro}' + + builders: + - description-setter: + description: "Built on $NODE_NAME" + - multijob: + name: deploy + condition: SUCCESSFUL + projects: + - name: 'osa-periodic-deploy-{type}-{stream}' + current-parameters: true + predefined-parameters: | + DISTRO={distro} + DEPLOY_SCENARIO=os-nosdn-nofeature-noha + git-revision: true + node-parameters: true + kill-phase-on: FAILURE + abort-all-job: true + - multijob: + name: healthcheck + condition: SUCCESSFUL + projects: + - name: 'osa-periodic-healthcheck-{type}-{stream}' + current-parameters: true + predefined-parameters: | + DISTRO={distro} + DEPLOY_SCENARIO=os-nosdn-nofeature-noha + FUNCTEST_SUITE_NAME=healthcheck + node-parameters: true + kill-phase-on: NEVER + abort-all-job: false + +- job-template: + name: 'osa-periodic-{phase}-{type}-{stream}' + + disabled: false + + concurrent: true + + properties: - logrotate-default + - build-blocker: + use-build-blocker: true + blocking-jobs: + - 'xci-verify-deploy-.*' + - 'xci-verify-healthcheck-.*' + - 'bifrost-verify-.*' + - 'bifrost-periodic-.*' + - 'osa-verify-deploy-.*' + - 'osa-verify-halthcheck-.*' + - 'osa-periodic-deploy-.*' + - 'osa-periodic-healthcheck-.*' + block-level: 'NODE' parameters: - project-parameter: project: '{project}' - branch: '{opnfv-releng-version}' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - - string: - name: XCI_FLAVOR - default: 'ha' + branch: '{branch}' + - label: + name: SLAVE_LABEL + default: 'xci-virtual-{distro}' - string: name: OPENSTACK_OSA_VERSION - default: '{openstack-osa-version}' - - string: - name: OPNFV_RELENG_VERSION - default: '{opnfv-releng-version}' + default: 'master' - string: name: DISTRO - default: '{distro}' + default: 'xenial' - string: - name: DIB_OS_RELEASE - default: '{dib-os-release}' + name: DEPLOY_SCENARIO + default: 'os-nosdn-nofeature-noha' - string: - name: DIB_OS_ELEMENT - default: '{dib-os-element}' + name: XCI_FLAVOR + default: 'mini' - string: - name: DIB_OS_PACKAGES - default: '{dib-os-packages}' + name: XCI_LOOP + default: 'periodic' - string: - name: EXTRA_DIB_ELEMENTS - default: '{extra-dib-elements}' + name: OPNFV_RELENG_DEV_PATH + default: $WORKSPACE/releng-xci - string: - name: CLEAN_DIB_IMAGES - default: 'true' - - label: - name: SLAVE_LABEL - default: '{slave-label}' + name: FUNCTEST_SUITE_NAME + default: 'healthcheck' - string: name: ANSIBLE_VERBOSITY - default: '' + default: '-vvvv' - string: - name: XCI_LOOP - default: 'periodic' - - wrappers: - - fix-workspace-permissions + name: FORCE_MASTER + default: 'true' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT scm: - - git-scm + - git-scm-osa - # trigger is disabled until we know which jobs we will have - # and adjust stuff accordingly - triggers: - - timed: '' # '@midnight' + wrappers: + - ssh-agent-wrapper + - build-timeout: + timeout: 240 + - fix-workspace-permissions builders: - description-setter: - description: "Built on $NODE_NAME - Scenario: $DEPLOY_SCENARIO" - - 'osa-deploy-builder' + description: "Built on $NODE_NAME" + - 'osa-periodic-{phase}-macro' -#--------------------------- +#-------------------------------- # builder macros -#--------------------------- +#-------------------------------- - builder: - name: osa-deploy-builder + name: 'osa-periodic-deploy-macro' builders: - - shell: - !include-raw: ./xci-deploy.sh + - shell: | + #!/bin/bash + + # here we will + # - clone releng-xci repo as the jobs are running against openstack gerrit + # and we need to clone releng-xci ourselves to $OPNFV_RELENG_DEV_PATH + # - run sources-branch-updater.sh from osa to update/pin the role versions + # at the time this job gets triggered against osa master in case if the + # deployment succeeds and we decide to bump version used by xci + # - copy generated role versions into $OPNFV_RELENG_DEV_PATH/xci/file + # - start the deployment by executing xci-deploy.sh as usual + # + # we might also need to pin versions of openstack services as well. + + echo "Hello World!" + +- builder: + name: 'osa-periodic-healthcheck-macro' + builders: + - shell: | + #!/bin/bash + + echo "Hello World!" +#-------------------------------- +# scm macro +#-------------------------------- +- scm: + name: git-scm-osa + scm: + - git: + url: https://review.openstack.org/p/openstack/openstack-ansible.git + branches: + - master + timeout: 15 diff --git a/jjb/xci/xci-daily-jobs.yml b/jjb/xci/xci-daily-jobs.yml index c7d59d5bb..441acf5f0 100644 --- a/jjb/xci/xci-daily-jobs.yml +++ b/jjb/xci/xci-daily-jobs.yml @@ -16,10 +16,6 @@ stream: master opnfv-releng-version: master gs-pathname: '' - ocata: &ocata - stream: ocata - opnfv-releng-version: master - gs-pathname: '/{stream}' #-------------------------------- # Scenarios #-------------------------------- @@ -30,14 +26,18 @@ - 'os-nosdn-nofeature-noha': auto-trigger-name: 'daily-trigger-disabled' xci-flavor: 'noha' + - 'os-odl-sfc-ha': + auto-trigger-name: 'daily-trigger-disabled' + xci-flavor: 'ha' + - 'os-odl-sfc-noha': + auto-trigger-name: 'daily-trigger-disabled' + xci-flavor: 'noha' #-------------------------------- # XCI PODs #-------------------------------- pod: - virtual: <<: *master - - virtual: - <<: *ocata #-------------------------------- # Supported Distros #-------------------------------- @@ -59,7 +59,7 @@ - 'suse': disabled: true slave-label: xci-suse-virtual - dib-os-release: '42.2' + dib-os-release: '42.3' dib-os-element: 'opensuse-minimal' dib-os-packages: 'vim,less,bridge-utils,iputils,rsyslog,curl' extra-dib-elements: 'openssh-server' diff --git a/jjb/xci/xci-verify-jobs.yml b/jjb/xci/xci-verify-jobs.yml index 8d1ee55a4..5fca9bdbb 100644 --- a/jjb/xci/xci-verify-jobs.yml +++ b/jjb/xci/xci-verify-jobs.yml @@ -75,7 +75,7 @@ - patchset-created-event: exclude-drafts: 'false' exclude-trivial-rebase: 'false' - exclude-no-code-change: 'false' + exclude-no-code-change: 'true' - draft-published-event - comment-added-contains-event: comment-contains-value: 'recheck' @@ -87,19 +87,16 @@ branches: - branch-compare-type: 'ANT' branch-pattern: '**/{branch}' - file-paths: - - compare-type: ANT - pattern: 'xci/**' - disable-strict-forbidden-file-verification: 'true' + disable-strict-forbidden-file-verification: 'false' forbidden-file-paths: - compare-type: ANT pattern: 'bifrost/**' - compare-type: ANT - pattern: 'openstack-ansible/**' + pattern: 'prototypes/**' - compare-type: ANT - pattern: 'puppet-infracloud/**' + pattern: 'upstream/**' - compare-type: ANT - pattern: 'README.rst' + pattern: '**/README.rst' readable-message: true parameters: @@ -220,7 +217,7 @@ - builder: name: 'xci-verify-healthcheck-macro' builders: - - shell: | - #!/bin/bash - - echo "Hello World!" + - shell: + !include-raw: ../../utils/fetch_os_creds.sh + - shell: + !include-raw: ../functest/functest-alpine.sh diff --git a/jjb/yardstick/yardstick-daily-jobs.yml b/jjb/yardstick/yardstick-daily-jobs.yml index 39935abc2..937e83fb5 100644 --- a/jjb/yardstick/yardstick-daily-jobs.yml +++ b/jjb/yardstick/yardstick-daily-jobs.yml @@ -166,36 +166,6 @@ installer: fuel auto-trigger-name: 'daily-trigger-disabled' <<: *danube - - arm-pod2: - slave-label: '{pod}' - installer: fuel - auto-trigger-name: 'daily-trigger-disabled' - <<: *master - - arm-pod2: - slave-label: '{pod}' - installer: fuel - auto-trigger-name: 'daily-trigger-disabled' - <<: *danube - - arm-pod5: - slave-label: '{pod}' - installer: fuel - auto-trigger-name: 'daily-trigger-disabled' - <<: *master - - arm-pod5: - slave-label: '{pod}' - installer: fuel - auto-trigger-name: 'daily-trigger-disabled' - <<: *danube - - arm-virtual2: - slave-label: '{pod}' - installer: fuel - auto-trigger-name: 'daily-trigger-disabled' - <<: *master - - arm-virtual2: - slave-label: '{pod}' - installer: fuel - auto-trigger-name: 'daily-trigger-disabled' - <<: *danube - orange-pod2: slave-label: '{pod}' installer: joid @@ -281,6 +251,7 @@ description: "POD: $NODE_NAME" - 'yardstick-cleanup' - 'yardstick-fetch-os-creds' + - 'yardstick-fetch-k8s-conf' - 'yardstick-{testsuite}' - 'yardstick-store-results' @@ -310,6 +281,12 @@ - shell: !include-raw: ../../utils/fetch_os_creds.sh +- builder: + name: yardstick-fetch-k8s-conf + builders: + - shell: + !include-raw: ./yardstick-get-k8s-conf.sh + - builder: name: yardstick-store-results builders: @@ -380,13 +357,6 @@ name: YARDSTICK_DB_BACKEND default: '-i 104.197.68.199:8086' description: 'Arguments to use in order to choose the backend DB' -- parameter: - name: 'yardstick-params-arm-virtual2' - parameters: - - string: - name: YARDSTICK_DB_BACKEND - default: '-i 104.197.68.199:8086' - description: 'Arguments to use in order to choose the backend DB' - parameter: name: 'yardstick-params-joid-baremetal' parameters: @@ -455,22 +425,6 @@ default: '-i 104.197.68.199:8086' description: 'Arguments to use in order to choose the backend DB' -- parameter: - name: 'yardstick-params-arm-pod2' - parameters: - - string: - name: YARDSTICK_DB_BACKEND - default: '-i 104.197.68.199:8086' - description: 'Arguments to use in order to choose the backend DB' - -- parameter: - name: 'yardstick-params-arm-pod5' - parameters: - - string: - name: YARDSTICK_DB_BACKEND - default: '-i 104.197.68.199:8086' - description: 'Arguments to use in order to choose the backend DB' - - parameter: name: 'yardstick-params-virtual' parameters: @@ -515,4 +469,4 @@ - trigger: name: 'yardstick-daily-huawei-pod4-trigger' triggers: - - timed: '' \ No newline at end of file + - timed: '' diff --git a/jjb/yardstick/yardstick-daily.sh b/jjb/yardstick/yardstick-daily.sh index 56d087473..aaefba26e 100755 --- a/jjb/yardstick/yardstick-daily.sh +++ b/jjb/yardstick/yardstick-daily.sh @@ -19,11 +19,15 @@ if [[ ${INSTALLER_TYPE} == 'apex' ]]; then fi if [[ ${INSTALLER_TYPE} == 'joid' ]]; then - # If production lab then creds may be retrieved dynamically - # creds are on the jumphost, always in the same folder - rc_file_vol="-v $LAB_CONFIG/admin-openrc:/etc/yardstick/openstack.creds" - # If dev lab, credentials may not be the default ones, just provide a path to put them into docker - # replace the default one by the customized one provided by jenkins config + if [[ "${DEPLOY_SCENARIO:0:2}" == "k8" ]];then + rc_file_vol="-v /home/ubuntu/config:/etc/yardstick/admin.conf" + else + # If production lab then creds may be retrieved dynamically + # creds are on the jumphost, always in the same folder + rc_file_vol="-v $LAB_CONFIG/admin-openrc:/etc/yardstick/openstack.creds" + # If dev lab, credentials may not be the default ones, just provide a path to put them into docker + # replace the default one by the customized one provided by jenkins config + fi elif [[ ${INSTALLER_TYPE} == 'compass' && ${BRANCH} == 'master' ]]; then cacert_file_vol="-v ${HOME}/os_cacert:/etc/yardstick/os_cacert" echo "export OS_CACERT=/etc/yardstick/os_cacert" >> ${HOME}/opnfv-openrc.sh @@ -55,8 +59,14 @@ sudo rm -rf ${dir_result}/* map_log_dir="-v ${dir_result}:/tmp/yardstick" # Run docker -cmd="sudo docker run ${opts} ${envs} ${rc_file_vol} ${cacert_file_vol} ${map_log_dir} ${sshkey} opnfv/yardstick:${DOCKER_TAG} \ +if [[ ${INSTALLER_TYPE} == "joid" && "${DEPLOY_SCENARIO:0:2}" == "k8" ]];then + juju ssh kubernetes-master/0 sudo apt-get install -y docker.io + cmd="juju ssh kubernetes-master/0 sudo docker run ${opts} ${envs} ${rc_file_vol} ${cacert_file_vol} ${map_log_dir} ${sshkey} opnfv/yardstick:${DOCKER_TAG} exec_tests.sh ${YARDSTICK_DB_BACKEND} ${YARDSTICK_SCENARIO_SUITE_NAME}" +else + cmd="sudo docker run ${opts} ${envs} ${rc_file_vol} ${cacert_file_vol} ${map_log_dir} ${sshkey} opnfv/yardstick:${DOCKER_TAG} \ exec_tests.sh ${YARDSTICK_DB_BACKEND} ${YARDSTICK_SCENARIO_SUITE_NAME}" +fi + echo "Yardstick: Running docker cmd: ${cmd}" ${cmd} diff --git a/jjb/yardstick/yardstick-get-k8s-conf.sh b/jjb/yardstick/yardstick-get-k8s-conf.sh new file mode 100755 index 000000000..e93367f9a --- /dev/null +++ b/jjb/yardstick/yardstick-get-k8s-conf.sh @@ -0,0 +1,8 @@ +#!/bin/bash +set -e + +dest_path="$HOME/admin.conf" + +if [[ "${DEPLOY_SCENARIO:0:2}" == "k8" ]];then + juju scp kubernetes-master/0:config "${dest_path}" +fi diff --git a/utils/fetch_os_creds.sh b/utils/fetch_os_creds.sh index 312e1ac5c..3bc66f942 100755 --- a/utils/fetch_os_creds.sh +++ b/utils/fetch_os_creds.sh @@ -95,6 +95,7 @@ fi ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no" # Start fetching the files +info "Fetching rc file..." if [ "$installer_type" == "fuel" ]; then verify_connectivity $installer_ip if [ "${BRANCH}" == "master" ]; then @@ -110,8 +111,12 @@ if [ "$installer_type" == "fuel" ]; then "sudo salt --out txt 'ctl*' pillar.get _param:openstack_control_address | awk '{print \$2; exit}'" | \ sed 's/ //g') &> /dev/null - info "Fetching rc file from controller $controller_ip..." + info "... from controller $controller_ip..." ssh ${ssh_options} ubuntu@${controller_ip} "sudo cat /root/keystonercv3" > $dest_path + + if [[ $BUILD_TAG =~ "baremetal" ]]; then + ssh ${ssh_options} ubuntu@${installer_ip} "cat /etc/ssl/certs/os_cacert" > $os_cacert + fi else #ip_fuel="10.20.0.2" env=$(sshpass -p r00tme ssh 2>/dev/null ${ssh_options} root@${installer_ip} \ @@ -130,7 +135,7 @@ if [ "$installer_type" == "fuel" ]; then error "The controller $controller_ip is not up. Please check that the POD is correctly deployed." fi - info "Fetching rc file from controller $controller_ip..." + info "... from controller $controller_ip..." sshpass -p r00tme ssh 2>/dev/null ${ssh_options} root@${installer_ip} \ "scp ${ssh_options} ${controller_ip}:/root/openrc ." &> /dev/null sshpass -p r00tme scp 2>/dev/null ${ssh_options} root@${installer_ip}:~/openrc $dest_path &> /dev/null @@ -144,11 +149,18 @@ if [ "$installer_type" == "fuel" ]; then echo $auth_url >> $dest_path elif [ "$installer_type" == "apex" ]; then + if ! ipcalc -c $installer_ip; then + installer_ip=$(sudo virsh domifaddr undercloud | grep -Eo '[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}') + if [ -z "$installer_ip" ] || ! $(ipcalc -c $installer_ip); then + echo "Unable to find valid IP for Apex undercloud: ${installer_ip}" + exit 1 + fi + fi verify_connectivity $installer_ip # The credentials file is located in the Instack VM (192.0.2.1) # NOTE: This might change for bare metal deployments - info "Fetching rc file from Instack VM $installer_ip..." + info "... from Instack VM $installer_ip..." if [ -f /root/.ssh/id_rsa ]; then chmod 600 /root/.ssh/id_rsa fi @@ -158,7 +170,7 @@ elif [ "$installer_type" == "compass" ]; then if [ "${BRANCH}" == "master" ]; then sudo docker cp compass-tasks:/opt/openrc $dest_path &> /dev/null sudo chown $(whoami):$(whoami) $dest_path - sudo docker cp compass-tasks:/opt/os_cacert $os_cacert &> /dev/null + sudo docker cp compass-tasks:/opt/os_cacert $os_cacert else verify_connectivity $installer_ip controller_ip=$(sshpass -p'root' ssh 2>/dev/null $ssh_options root@${installer_ip} \ @@ -170,7 +182,7 @@ elif [ "$installer_type" == "compass" ]; then error "The controller $controller_ip is not up. Please check that the POD is correctly deployed." fi - info "Fetching rc file from controller $controller_ip..." + info "... from controller $controller_ip..." sshpass -p root ssh 2>/dev/null $ssh_options root@${installer_ip} \ "scp $ssh_options ${controller_ip}:/opt/admin-openrc.sh ." &> /dev/null sshpass -p root scp 2>/dev/null $ssh_options root@${installer_ip}:~/admin-openrc.sh $dest_path &> /dev/null @@ -194,7 +206,7 @@ elif [ "$installer_type" == "compass" ]; then elif [ "$installer_type" == "joid" ]; then # do nothing...for the moment # we can either do a scp from the jumphost or use the -v option to transmit the param to the docker file - echo "Do nothing, creds will be provided through volume option at docker creation for joid" + info "Do nothing, creds will be provided through volume option at docker creation for joid" elif [ "$installer_type" == "foreman" ]; then #ip_foreman="172.30.10.73" @@ -231,6 +243,10 @@ elif [ "$installer_type" == "daisy" ]; then sshpass -p r00tme scp 2>/dev/null $ssh_options root@${installer_ip}:/etc/kolla/admin-openrc.sh $dest_path &> /dev/null +elif ["$installer_type" == "osa"]; then + # Get RC file from control server + filename=$(ssh -o StrictHostKeyChecking=no root@${controller_ip} find /var/lib/lxc/controller00_nova_api_placement_container-* -name openrc) + scp root@${controller_ip}:${filename} ${destpath} else error "Installer $installer is not supported by this script" fi diff --git a/utils/slave-monitor-0.1.sh b/utils/slave-monitor-0.1.sh new file mode 100644 index 000000000..161aaef21 --- /dev/null +++ b/utils/slave-monitor-0.1.sh @@ -0,0 +1,98 @@ +#!/bin/bash +# SPDX-license-identifier: Apache-2.0 +############################################################################## +# Copyright (c) 2016 Linux Foundation and others. +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## + +#This will put a bunch of files in the pwd. you have been warned. +#Counts how long slaves have been online or offline + + +#Yes I know about jq +curlcommand() { +curl -s "https://build.opnfv.org/ci/computer/api/json?tree=computer\[displayName,offline\]" \ + | awk -v k=":" '{n=split($0,a,","); for (i=1; i<=n; i++) print a[i]}' \ + | grep -v "_class" \ + | awk 'NR%2{printf "%s ",$0;next;}1' \ + | awk -F":" '{print $2,$3}' \ + | awk '{print $1,$3}' \ + | sed s,\},,g \ + | sed s,],,g \ + | sed s,\",,g +} + +if [ -f podoutput-current ]; then + cp podoutput-current podoutput-lastiteration +fi + +curlcommand > podoutput-current + +declare -A slavescurrent slaveslastiteration + +while read -r name status ; do + slavescurrent["$name"]="$status" +done < <(cat podoutput-current) + +while read -r name status ; do + slaveslastiteration["$name"]=$status +done < <(cat podoutput-lastiteration) + +main () { +for slavename in "${!slavescurrent[@]}"; do + #Slave is online. Mark it down. + if [ "${slavescurrent[$slavename]}" == "false" ]; then + + if [ -f "$slavename"-offline ]; then + echo "removing offline status from $slavename slave was offline for $(cat "$slavename"-offline ) iterations" + rm "$slavename"-offline + fi + + if ! [ -f "$slavename"-online ]; then + echo "1" > "$slavename"-online + elif [ -f "$slavename"-online ]; then + #read and increment slavename + read -r -d $'\x04' var < "$slavename"-online + ((var++)) + echo -n "ONLINE $slavename " + echo "for $var iterations" + echo "$var" > "$slavename"-online + fi + fi + + #went offline since last iteration. + if [ "${slavescurrent[$slavename]}" == "false" ] && [ "${slaveslastiteration[$slavename]}" == "true" ]; then + echo "JUST WENT OFFLINE $slavename " + if [ -f "$slavename"-online ]; then + echo "removing online status from $slavename. slave was online for $(cat "$slavename"-online ) iterations" + rm "$slavename"-online + fi + + fi + + #slave is offline + if [ "${slavescurrent[$slavename]}" == "true" ]; then + if ! [ -f "$slavename"-offline ]; then + echo "1" > "$slavename"-offline + fi + + if [ -f "$slavename"-offline ]; then + #read and increment slavename + read -r -d $'\x04' var < "$slavename"-offline + ((var++)) + echo "$var" > "$slavename"-offline + if [ "$var" -gt "30" ]; then + echo "OFFLINE FOR $var ITERATIONS REMOVE $slavename " + else + echo "OFFLINE $slavename FOR $var ITERATIONS " + fi + fi + fi + +done +} + +main diff --git a/utils/test/reporting/docker/reporting.sh b/utils/test/reporting/docker/reporting.sh index 076dc4719..6cc7a7c9e 100755 --- a/utils/test/reporting/docker/reporting.sh +++ b/utils/test/reporting/docker/reporting.sh @@ -4,7 +4,7 @@ export PYTHONPATH="${PYTHONPATH}:./reporting" export CONFIG_REPORTING_YAML=./reporting/reporting.yaml declare -a versions=(danube master) -declare -a projects=(functest storperf yardstick qtip) +declare -a projects=(functest storperf yardstick qtip vsperf bottlenecks) project=$1 reporting_type=$2 @@ -32,6 +32,7 @@ cp -Rf js display # yardstick | status # storperf | status # qtip | status +# vsperf | status function report_project() { diff --git a/utils/test/reporting/reporting/bottlenecks/__init__.py b/utils/test/reporting/reporting/bottlenecks/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/utils/test/reporting/reporting/bottlenecks/reporting-status.py b/utils/test/reporting/reporting/bottlenecks/reporting-status.py new file mode 100644 index 000000000..8966d0690 --- /dev/null +++ b/utils/test/reporting/reporting/bottlenecks/reporting-status.py @@ -0,0 +1,145 @@ +#!/usr/bin/python +# +# This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +import datetime +import os + +import jinja2 + +import reporting.utils.reporting_utils as rp_utils +import reporting.utils.scenarioResult as sr + +INSTALLERS = rp_utils.get_config('general.installers') +VERSIONS = rp_utils.get_config('general.versions') +PERIOD = rp_utils.get_config('general.period') + +# Logger +LOGGER = rp_utils.getLogger("Bottlenecks-Status") +reportingDate = datetime.datetime.now().strftime("%Y-%m-%d %H:%M") + +LOGGER.info("*******************************************") +LOGGER.info("* Generating reporting scenario status *") +LOGGER.info("* Data retention = %s days *", PERIOD) +LOGGER.info("* *") +LOGGER.info("*******************************************") + +# retrieve the list of bottlenecks tests +BOTTLENECKS_TESTS = rp_utils.get_config('bottlenecks.test_list') +LOGGER.info("Bottlenecks tests: %s", BOTTLENECKS_TESTS) + +# For all the versions +for version in VERSIONS: + # For all the installers + for installer in INSTALLERS: + # get scenarios results data + scenario_results = rp_utils.getScenarios("bottlenecks", + "posca_factor_ping", + installer, + version) + LOGGER.info("scenario_results: %s", scenario_results) + + scenario_stats = rp_utils.getScenarioStats(scenario_results) + LOGGER.info("scenario_stats: %s", scenario_stats) + items = {} + scenario_result_criteria = {} + + # From each scenarios get results list + for s, s_result in scenario_results.items(): + LOGGER.info("---------------------------------") + LOGGER.info("installer %s, version %s, scenario %s", installer, + version, s) + ten_criteria = len(s_result) + + ten_score = 0 + for v in s_result: + if "PASS" in v['criteria']: + ten_score += 1 + + LOGGER.info("ten_score: %s / %s", (ten_score, ten_criteria)) + + four_score = 0 + try: + LASTEST_TESTS = rp_utils.get_config( + 'general.nb_iteration_tests_success_criteria') + s_result.sort(key=lambda x: x['start_date']) + four_result = s_result[-LASTEST_TESTS:] + LOGGER.debug("four_result: {}".format(four_result)) + LOGGER.debug("LASTEST_TESTS: {}".format(LASTEST_TESTS)) + # logger.debug("four_result: {}".format(four_result)) + four_criteria = len(four_result) + for v in four_result: + if "PASS" in v['criteria']: + four_score += 1 + LOGGER.info("4 Score: %s / %s ", (four_score, + four_criteria)) + except Exception: + LOGGER.error("Impossible to retrieve the four_score") + + try: + s_status = (four_score * 100) / four_criteria + except Exception: + s_status = 0 + LOGGER.info("Score percent = %s", str(s_status)) + s_four_score = str(four_score) + '/' + str(four_criteria) + s_ten_score = str(ten_score) + '/' + str(ten_criteria) + s_score_percent = str(s_status) + + LOGGER.debug(" s_status: %s", s_status) + if s_status == 100: + LOGGER.info(">>>>> scenario OK, save the information") + else: + LOGGER.info(">>>> scenario not OK, last 4 iterations = %s, \ + last 10 days = %s", (s_four_score, s_ten_score)) + + s_url = "" + if len(s_result) > 0: + build_tag = s_result[len(s_result)-1]['build_tag'] + LOGGER.debug("Build tag: %s", build_tag) + s_url = s_url = rp_utils.getJenkinsUrl(build_tag) + LOGGER.info("last jenkins url: %s", s_url) + + # Save daily results in a file + path_validation_file = ("./display/" + version + + "/bottlenecks/scenario_history.txt") + + if not os.path.exists(path_validation_file): + with open(path_validation_file, 'w') as f: + info = 'date,scenario,installer,details,score\n' + f.write(info) + + with open(path_validation_file, "a") as f: + info = (reportingDate + "," + s + "," + installer + + "," + s_ten_score + "," + + str(s_score_percent) + "\n") + f.write(info) + + scenario_result_criteria[s] = sr.ScenarioResult(s_status, + s_four_score, + s_ten_score, + s_score_percent, + s_url) + + LOGGER.info("--------------------------") + + templateLoader = jinja2.FileSystemLoader(".") + templateEnv = jinja2.Environment(loader=templateLoader, + autoescape=True) + + TEMPLATE_FILE = ("./reporting/bottlenecks/template" + "/index-status-tmpl.html") + template = templateEnv.get_template(TEMPLATE_FILE) + + outputText = template.render(scenario_results=scenario_result_criteria, + installer=installer, + period=PERIOD, + version=version, + date=reportingDate) + + with open("./display/" + version + + "/bottlenecks/status-" + installer + ".html", "wb") as fh: + fh.write(outputText) diff --git a/utils/test/reporting/reporting/bottlenecks/template/index-status-tmpl.html b/utils/test/reporting/reporting/bottlenecks/template/index-status-tmpl.html new file mode 100644 index 000000000..c4497ac1b --- /dev/null +++ b/utils/test/reporting/reporting/bottlenecks/template/index-status-tmpl.html @@ -0,0 +1,114 @@ + + + + + + + + + + + + + + + +
+
+

Bottlenecks status page ({{version}}, {{date}})

+ +
+
+
+
+ +

Reported values represent the percentage of completed + + CI tests (posca_factor_ping) during the reporting period, where results + + were communicated to the Test Database.

+
+

List of last scenarios ({{version}}) run over the last {{period}} days

+ + + + + + + + + {% for scenario,result in scenario_results.iteritems() -%} + + + + + + + + {%- endfor %} +
ScenarioStatusTrendLast 4 IterationsLast 10 Days
{{scenario}}
{{scenario_results[scenario].getFourDaysScore()}}{{scenario_results[scenario].getTenDaysScore()}}
+
+ + +
+
+
diff --git a/utils/test/reporting/reporting/functest/reporting-status.py b/utils/test/reporting/reporting/functest/reporting-status.py index c7c2051a3..02bf67d0e 100755 --- a/utils/test/reporting/reporting/functest/reporting-status.py +++ b/utils/test/reporting/reporting/functest/reporting-status.py @@ -7,18 +7,19 @@ # http://www.apache.org/licenses/LICENSE-2.0 # import datetime -import jinja2 import os import sys import time +import jinja2 + import testCase as tc import scenarioResult as sr +import reporting.utils.reporting_utils as rp_utils -# manage conf -import utils.reporting_utils as rp_utils - -"""Functest reporting status""" +""" +Functest reporting status +""" # Logger logger = rp_utils.getLogger("Functest-Status") @@ -106,7 +107,8 @@ for version in versions: for installer in installers: # get scenarios - scenario_results = rp_utils.getScenarios(healthcheck, + scenario_results = rp_utils.getScenarios("functest", + "connection_check", installer, version) # get nb of supported architecture (x86, aarch64) @@ -219,7 +221,7 @@ for version in versions: logger.debug("No results found") items[s] = testCases2BeDisplayed - except: + except Exception: logger.error("Error: installer %s, version %s, scenario %s" % (installer, version, s)) logger.error("No data available: %s" % (sys.exc_info()[0])) @@ -279,13 +281,13 @@ for version in versions: template = templateEnv.get_template(TEMPLATE_FILE) outputText = template.render( - scenario_stats=scenario_stats, - scenario_results=scenario_result_criteria, - items=items, - installer=installer_display, - period=period, - version=version, - date=reportingDate) + scenario_stats=scenario_stats, + scenario_results=scenario_result_criteria, + items=items, + installer=installer_display, + period=period, + version=version, + date=reportingDate) with open("./display/" + version + "/functest/status-" + @@ -298,8 +300,6 @@ for version in versions: # Generate outputs for export # pdf - # TODO Change once web site updated...use the current one - # to test pdf production url_pdf = rp_utils.get_config('general.url') pdf_path = ("./display/" + version + "/functest/status-" + installer_display + ".html") diff --git a/utils/test/reporting/reporting/functest/reporting-tempest.py b/utils/test/reporting/reporting/functest/reporting-tempest.py index bc2885639..d78d9a19d 100755 --- a/utils/test/reporting/reporting/functest/reporting-tempest.py +++ b/utils/test/reporting/reporting/functest/reporting-tempest.py @@ -8,58 +8,57 @@ # http://www.apache.org/licenses/LICENSE-2.0 # SPDX-license-identifier: Apache-2.0 -from urllib2 import Request, urlopen, URLError from datetime import datetime import json -import jinja2 import os -# manage conf -import utils.reporting_utils as rp_utils +from urllib2 import Request, urlopen, URLError +import jinja2 + +import reporting.utils.reporting_utils as rp_utils -installers = rp_utils.get_config('general.installers') -items = ["tests", "Success rate", "duration"] +INSTALLERS = rp_utils.get_config('general.installers') +ITEMS = ["tests", "Success rate", "duration"] CURRENT_DIR = os.getcwd() PERIOD = rp_utils.get_config('general.period') -criteria_nb_test = 165 -criteria_duration = 1800 -criteria_success_rate = 90 +CRITERIA_NB_TEST = 100 +CRITERIA_DURATION = 1800 +CRITERIA_SUCCESS_RATE = 100 logger = rp_utils.getLogger("Tempest") logger.info("************************************************") logger.info("* Generating reporting Tempest_smoke_serial *") -logger.info("* Data retention = %s days *" % PERIOD) +logger.info("* Data retention = %s days *", PERIOD) logger.info("* *") logger.info("************************************************") logger.info("Success criteria:") -logger.info("nb tests executed > %s s " % criteria_nb_test) -logger.info("test duration < %s s " % criteria_duration) -logger.info("success rate > %s " % criteria_success_rate) +logger.info("nb tests executed > %s s ", CRITERIA_NB_TEST) +logger.info("test duration < %s s ", CRITERIA_DURATION) +logger.info("success rate > %s ", CRITERIA_SUCCESS_RATE) # For all the versions for version in rp_utils.get_config('general.versions'): - for installer in installers: + for installer in INSTALLERS: # we consider the Tempest results of the last PERIOD days url = ("http://" + rp_utils.get_config('testapi.url') + - "?case=tempest_smoke_serial") - request = Request(url + '&period=' + str(PERIOD) + - '&installer=' + installer + - '&version=' + version) - logger.info("Search tempest_smoke_serial results for installer %s" - " for version %s" - % (installer, version)) + "?case=tempest_smoke_serial&period=" + str(PERIOD) + + "&installer=" + installer + "&version=" + version) + request = Request(url) + logger.info(("Search tempest_smoke_serial results for installer %s" + " for version %s"), installer, version) try: response = urlopen(request) k = response.read() results = json.loads(k) - except URLError as e: - logger.error("Error code: %s" % e) - + except URLError as err: + logger.error("Error code: %s", err) + logger.debug("request sent: %s", url) + logger.debug("Results from API: %s", results) test_results = results['results'] - + logger.debug("Test results: %s", test_results) scenario_results = {} criteria = {} errors = {} @@ -72,27 +71,37 @@ for version in rp_utils.get_config('general.versions'): scenario_results[r['scenario']] = [] scenario_results[r['scenario']].append(r) + logger.debug("Scenario results: %s", scenario_results) + for s, s_result in scenario_results.items(): scenario_results[s] = s_result[0:5] # For each scenario, we build a result object to deal with # results, criteria and error handling for result in scenario_results[s]: result["start_date"] = result["start_date"].split(".")[0] + logger.debug("start_date= %s", result["start_date"]) # retrieve results # **************** nb_tests_run = result['details']['tests'] nb_tests_failed = result['details']['failures'] - if nb_tests_run != 0: - success_rate = 100 * ((int(nb_tests_run) - + logger.debug("nb_tests_run= %s", nb_tests_run) + logger.debug("nb_tests_failed= %s", nb_tests_failed) + + try: + success_rate = (100 * (int(nb_tests_run) - int(nb_tests_failed)) / - int(nb_tests_run)) - else: + int(nb_tests_run)) + except ZeroDivisionError: success_rate = 0 result['details']["tests"] = nb_tests_run result['details']["Success rate"] = str(success_rate) + "%" + logger.info("nb_tests_run= %s", result['details']["tests"]) + logger.info("test rate = %s", + result['details']["Success rate"]) + # Criteria management # ******************* crit_tests = False @@ -100,11 +109,11 @@ for version in rp_utils.get_config('general.versions'): crit_time = False # Expect that at least 165 tests are run - if nb_tests_run >= criteria_nb_test: + if nb_tests_run >= CRITERIA_NB_TEST: crit_tests = True # Expect that at least 90% of success - if success_rate >= criteria_success_rate: + if success_rate >= CRITERIA_SUCCESS_RATE: crit_rate = True # Expect that the suite duration is inferior to 30m @@ -114,28 +123,27 @@ for version in rp_utils.get_config('general.versions'): '%Y-%m-%d %H:%M:%S') delta = stop_date - start_date - if (delta.total_seconds() < criteria_duration): + + if delta.total_seconds() < CRITERIA_DURATION: crit_time = True result['criteria'] = {'tests': crit_tests, 'Success rate': crit_rate, 'duration': crit_time} try: - logger.debug("Scenario %s, Installer %s" - % (s_result[1]['scenario'], installer)) - logger.debug("Nb Test run: %s" % nb_tests_run) - logger.debug("Test duration: %s" - % result['details']['duration']) - logger.debug("Success rate: %s" % success_rate) - except: + logger.debug("Nb Test run: %s", nb_tests_run) + logger.debug("Test duration: %s", delta) + logger.debug("Success rate: %s", success_rate) + except Exception: # pylint: disable=broad-except logger.error("Data format error") # Error management # **************** try: errors = result['details']['errors'] - result['errors'] = errors.replace('{0}', '') - except: + logger.info("errors: %s", errors) + result['errors'] = errors + except Exception: # pylint: disable=broad-except logger.error("Error field not present (Brahamputra runs?)") templateLoader = jinja2.FileSystemLoader(".") @@ -146,7 +154,7 @@ for version in rp_utils.get_config('general.versions'): template = templateEnv.get_template(TEMPLATE_FILE) outputText = template.render(scenario_results=scenario_results, - items=items, + items=ITEMS, installer=installer) with open("./display/" + version + diff --git a/utils/test/reporting/reporting/qtip/reporting-status.py b/utils/test/reporting/reporting/qtip/reporting-status.py index f0127b50f..56f9e0aee 100644 --- a/utils/test/reporting/reporting/qtip/reporting-status.py +++ b/utils/test/reporting/reporting/qtip/reporting-status.py @@ -33,8 +33,7 @@ def prepare_profile_file(version): if not os.path.exists(profile_dir): os.makedirs(profile_dir) - profile_file = "{}/{}/scenario_history.txt".format(profile_dir, - version) + profile_file = "{}/scenario_history.txt".format(profile_dir) if not os.path.exists(profile_file): with open(profile_file, 'w') as f: info = 'date,scenario,installer,details,score\n' diff --git a/utils/test/reporting/reporting/qtip/template/index-status-tmpl.html b/utils/test/reporting/reporting/qtip/template/index-status-tmpl.html index 26da36ceb..92f3395dc 100644 --- a/utils/test/reporting/reporting/qtip/template/index-status-tmpl.html +++ b/utils/test/reporting/reporting/qtip/template/index-status-tmpl.html @@ -46,10 +46,11 @@
diff --git a/utils/test/reporting/reporting/reporting.yaml b/utils/test/reporting/reporting/reporting.yaml index 26feb31d3..9bb90b806 100644 --- a/utils/test/reporting/reporting/reporting.yaml +++ b/utils/test/reporting/reporting/reporting.yaml @@ -64,6 +64,10 @@ qtip: log_level: ERROR period: 1 -bottleneck: +bottlenecks: + test_list: + - posca_factor_ping + - posca_factor_system_bandwidth + log_level: ERROR vsperf: diff --git a/utils/test/reporting/reporting/storperf/reporting-status.py b/utils/test/reporting/reporting/storperf/reporting-status.py index 0c188a338..103b80fd9 100644 --- a/utils/test/reporting/reporting/storperf/reporting-status.py +++ b/utils/test/reporting/reporting/storperf/reporting-status.py @@ -7,13 +7,12 @@ # http://www.apache.org/licenses/LICENSE-2.0 # import datetime -import jinja2 import os -# manage conf -import utils.reporting_utils as rp_utils +import jinja2 -import utils.scenarioResult as sr +import reporting.utils.reporting_utils as rp_utils +import reporting.utils.scenarioResult as sr installers = rp_utils.get_config('general.installers') versions = rp_utils.get_config('general.versions') @@ -39,7 +38,8 @@ for version in versions: for installer in installers: # get scenarios results data # for the moment we consider only 1 case snia_steady_state - scenario_results = rp_utils.getScenarios("snia_steady_state", + scenario_results = rp_utils.getScenarios("storperf", + "snia_steady_state", installer, version) # logger.info("scenario_results: %s" % scenario_results) diff --git a/utils/test/reporting/reporting/utils/reporting_utils.py b/utils/test/reporting/reporting/utils/reporting_utils.py index 62820914a..235bd6ef9 100644 --- a/utils/test/reporting/reporting/utils/reporting_utils.py +++ b/utils/test/reporting/reporting/utils/reporting_utils.py @@ -20,15 +20,15 @@ import yaml # YAML UTILS # # ----------------------------------------------------------- -def get_parameter_from_yaml(parameter, file): +def get_parameter_from_yaml(parameter, config_file): """ Returns the value of a given parameter in file.yaml parameter must be given in string format with dots Example: general.openstack.image_name """ - with open(file) as f: - file_yaml = yaml.safe_load(f) - f.close() + with open(config_file) as my_file: + file_yaml = yaml.safe_load(my_file) + my_file.close() value = file_yaml for element in parameter.split("."): value = value.get(element) @@ -39,6 +39,9 @@ def get_parameter_from_yaml(parameter, file): def get_config(parameter): + """ + Get configuration parameter from yaml configuration file + """ yaml_ = os.environ["CONFIG_REPORTING_YAML"] return get_parameter_from_yaml(parameter, yaml_) @@ -49,20 +52,23 @@ def get_config(parameter): # # ----------------------------------------------------------- def getLogger(module): - logFormatter = logging.Formatter("%(asctime)s [" + - module + - "] [%(levelname)-5.5s] %(message)s") + """ + Get Logger + """ + log_formatter = logging.Formatter("%(asctime)s [" + + module + + "] [%(levelname)-5.5s] %(message)s") logger = logging.getLogger() log_file = get_config('general.log.log_file') log_level = get_config('general.log.log_level') - fileHandler = logging.FileHandler("{0}/{1}".format('.', log_file)) - fileHandler.setFormatter(logFormatter) - logger.addHandler(fileHandler) + file_handler = logging.FileHandler("{0}/{1}".format('.', log_file)) + file_handler.setFormatter(log_formatter) + logger.addHandler(file_handler) - consoleHandler = logging.StreamHandler() - consoleHandler.setFormatter(logFormatter) - logger.addHandler(consoleHandler) + console_handler = logging.StreamHandler() + console_handler.setFormatter(log_formatter) + logger.addHandler(console_handler) logger.setLevel(log_level) return logger @@ -73,6 +79,9 @@ def getLogger(module): # # ----------------------------------------------------------- def getApiResults(case, installer, scenario, version): + """ + Get Results by calling the API + """ results = json.dumps([]) # to remove proxy (to be removed at the end for local test only) # proxy_handler = urllib2.ProxyHandler({}) @@ -94,29 +103,32 @@ def getApiResults(case, installer, scenario, version): response = urlopen(request) k = response.read() results = json.loads(k) - except URLError as e: - print 'No kittez. Got an error code:'.format(e) + except URLError: + print "Error when retrieving results form API" return results -def getScenarios(case, installer, version): - - try: - case = case.getName() - except: - # if case is not an object test case, try the string - if type(case) == str: - case = case - else: - raise ValueError("Case cannot be evaluated") +def getScenarios(project, case, installer, version): + """ + Get the list of Scenarios + """ period = get_config('general.period') url_base = get_config('testapi.url') - url = ("http://" + url_base + "?case=" + case + - "&period=" + str(period) + "&installer=" + installer + - "&version=" + version) + url = ("http://" + url_base + + "?installer=" + installer + + "&period=" + str(period)) + + if version is not None: + url += "&version=" + version + + if project is not None: + url += "&project=" + project + + if case is not None: + url += "&case=" + case try: request = Request(url) @@ -136,7 +148,7 @@ def getScenarios(case, installer, version): results = json.loads(k) test_results += results['results'] except KeyError: - print ('No pagination detected') + print "No pagination detected" except URLError as err: print 'Got an error code: {}'.format(err) @@ -144,32 +156,38 @@ def getScenarios(case, installer, version): test_results.reverse() scenario_results = {} - for r in test_results: + for my_result in test_results: # Retrieve all the scenarios per installer - if not r['scenario'] in scenario_results.keys(): - scenario_results[r['scenario']] = [] + if not my_result['scenario'] in scenario_results.keys(): + scenario_results[my_result['scenario']] = [] # Do we consider results from virtual pods ... # Do we consider results for non HA scenarios... exclude_virtual_pod = get_config('functest.exclude_virtual') exclude_noha = get_config('functest.exclude_noha') - if ((exclude_virtual_pod and "virtual" in r['pod_name']) or - (exclude_noha and "noha" in r['scenario'])): + if ((exclude_virtual_pod and "virtual" in my_result['pod_name']) or + (exclude_noha and "noha" in my_result['scenario'])): print "exclude virtual pod results..." else: - scenario_results[r['scenario']].append(r) + scenario_results[my_result['scenario']].append(my_result) return scenario_results def getScenarioStats(scenario_results): + """ + Get the number of occurence of scenarios over the defined PERIOD + """ scenario_stats = {} - for k, v in scenario_results.iteritems(): - scenario_stats[k] = len(v) - + for res_k, res_v in scenario_results.iteritems(): + scenario_stats[res_k] = len(res_v) return scenario_stats def getScenarioStatus(installer, version): + """ + Get the status of a scenariofor Yardstick + they used criteria SUCCESS (default: PASS) + """ period = get_config('general.period') url_base = get_config('testapi.url') @@ -184,33 +202,37 @@ def getScenarioStatus(installer, version): response.close() results = json.loads(k) test_results = results['results'] - except URLError as e: - print 'Got an error code: {}'.format(e) + except URLError: + print "GetScenarioStatus: error when calling the API" scenario_results = {} result_dict = {} if test_results is not None: - for r in test_results: - if r['stop_date'] != 'None' and r['criteria'] is not None: - if not r['scenario'] in scenario_results.keys(): - scenario_results[r['scenario']] = [] - scenario_results[r['scenario']].append(r) - - for k, v in scenario_results.items(): + for test_r in test_results: + if (test_r['stop_date'] != 'None' and + test_r['criteria'] is not None): + if not test_r['scenario'] in scenario_results.keys(): + scenario_results[test_r['scenario']] = [] + scenario_results[test_r['scenario']].append(test_r) + + for scen_k, scen_v in scenario_results.items(): # scenario_results[k] = v[:LASTEST_TESTS] s_list = [] - for element in v: + for element in scen_v: if element['criteria'] == 'SUCCESS': s_list.append(1) else: s_list.append(0) - result_dict[k] = s_list + result_dict[scen_k] = s_list # return scenario_results return result_dict def getQtipResults(version, installer): + """ + Get QTIP results + """ period = get_config('qtip.period') url_base = get_config('testapi.url') @@ -240,19 +262,24 @@ def getQtipResults(version, installer): def getNbtestOk(results): + """ + based on default value (PASS) count the number of test OK + """ nb_test_ok = 0 - for r in results: - for k, v in r.iteritems(): + for my_result in results: + for res_k, res_v in my_result.iteritems(): try: - if "PASS" in v: + if "PASS" in res_v: nb_test_ok += 1 - except: + except Exception: print "Cannot retrieve test status" return nb_test_ok def getResult(testCase, installer, scenario, version): - + """ + Get Result for a given Functest Testcase + """ # retrieve raw results results = getApiResults(testCase, installer, scenario, version) # let's concentrate on test results only @@ -269,10 +296,10 @@ def getResult(testCase, installer, scenario, version): # print " ---------------- " # print "nb of results:" + str(len(test_results)) - for r in test_results: + for res_r in test_results: # print r["start_date"] # print r["criteria"] - scenario_results.append({r["start_date"]: r["criteria"]}) + scenario_results.append({res_r["start_date"]: res_r["criteria"]}) # sort results scenario_results.sort() # 4 levels for the results @@ -295,7 +322,7 @@ def getResult(testCase, installer, scenario, version): test_result_indicator = 1 else: # Test the last 4 run - if (len(scenario_results) > 3): + if len(scenario_results) > 3: last4runResults = scenario_results[-4:] nbTestOkLast4 = getNbtestOk(last4runResults) # print "Nb test OK (last 4 run):"+ str(nbTestOkLast4) @@ -309,19 +336,22 @@ def getResult(testCase, installer, scenario, version): def getJenkinsUrl(build_tag): - # e.g. jenkins-functest-apex-apex-daily-colorado-daily-colorado-246 - # id = 246 - # jenkins-functest-compass-huawei-pod5-daily-master-136 - # id = 136 - # note it is linked to jenkins format - # if this format changes...function to be adapted.... + """ + Get Jenkins url_base corespoding to the last test CI run + e.g. jenkins-functest-apex-apex-daily-colorado-daily-colorado-246 + id = 246 + jenkins-functest-compass-huawei-pod5-daily-master-136 + id = 136 + note it is linked to jenkins format + if this format changes...function to be adapted.... + """ url_base = get_config('functest.jenkins_url') try: build_id = [int(s) for s in build_tag.split("-") if s.isdigit()] url_id = (build_tag[8:-(len(str(build_id[0])) + 1)] + "/" + str(build_id[0])) jenkins_url = url_base + url_id + "/console" - except: + except Exception: print 'Impossible to get jenkins url:' if "jenkins-" not in build_tag: @@ -331,10 +361,13 @@ def getJenkinsUrl(build_tag): def getScenarioPercent(scenario_score, scenario_criteria): + """ + Get success rate of the scenario (in %) + """ score = 0.0 try: score = float(scenario_score) / float(scenario_criteria) * 100 - except: + except Exception: print 'Impossible to calculate the percentage score' return score @@ -343,32 +376,41 @@ def getScenarioPercent(scenario_score, scenario_criteria): # Functest # ********* def getFunctestConfig(version=""): + """ + Get Functest configuration + """ config_file = get_config('functest.test_conf') + version response = requests.get(config_file) return yaml.safe_load(response.text) def getArchitectures(scenario_results): + """ + Get software architecture (x86 or Aarch64) + """ supported_arch = ['x86'] - if (len(scenario_results) > 0): + if len(scenario_results) > 0: for scenario_result in scenario_results.values(): for value in scenario_result: - if ("armband" in value['build_tag']): + if "armband" in value['build_tag']: supported_arch.append('aarch64') return supported_arch return supported_arch def filterArchitecture(results, architecture): + """ + Restrict the list of results based on given architecture + """ filtered_results = {} - for name, results in results.items(): + for name, res in results.items(): filtered_values = [] - for value in results: - if (architecture is "x86"): + for value in res: + if architecture is "x86": # drop aarch64 results if ("armband" not in value['build_tag']): filtered_values.append(value) - elif(architecture is "aarch64"): + elif architecture is "aarch64": # drop x86 results if ("armband" in value['build_tag']): filtered_values.append(value) @@ -381,6 +423,9 @@ def filterArchitecture(results, architecture): # Yardstick # ********* def subfind(given_list, pattern_list): + """ + Yardstick util function + """ LASTEST_TESTS = get_config('general.nb_iteration_tests_success_criteria') for i in range(len(given_list)): if given_list[i] == pattern_list[0] and \ @@ -390,7 +435,9 @@ def subfind(given_list, pattern_list): def _get_percent(status): - + """ + Yardstick util function to calculate success rate + """ if status * 100 % 6: return round(float(status) * 100 / 6, 1) else: @@ -398,13 +445,16 @@ def _get_percent(status): def get_percent(four_list, ten_list): + """ + Yardstick util function to calculate success rate + """ four_score = 0 ten_score = 0 - for v in four_list: - four_score += v - for v in ten_list: - ten_score += v + for res_v in four_list: + four_score += res_v + for res_v in ten_list: + ten_score += res_v LASTEST_TESTS = get_config('general.nb_iteration_tests_success_criteria') if four_score == LASTEST_TESTS: @@ -420,9 +470,12 @@ def get_percent(four_list, ten_list): def _test(): + """ + Yardstick util function (test) + """ status = getScenarioStatus("compass", "master") print "status:++++++++++++++++++++++++" - print(json.dumps(status, indent=4)) + print json.dumps(status, indent=4) # ---------------------------------------------------------- @@ -432,8 +485,9 @@ def _test(): # ----------------------------------------------------------- def export_csv(scenario_file_name, installer, version): - # csv - # generate sub files based on scenario_history.txt + """ + Generate sub files based on scenario_history.txt + """ scenario_installer_file_name = ("./display/" + version + "/functest/scenario_history_" + installer + ".csv") @@ -443,21 +497,25 @@ def export_csv(scenario_file_name, installer, version): for line in scenario_file: if installer in line: scenario_installer_file.write(line) - scenario_installer_file.close + scenario_installer_file.close def generate_csv(scenario_file): + """ + Generate sub files based on scenario_history.txt + """ import shutil - # csv - # generate sub files based on scenario_history.txt csv_file = scenario_file.replace('txt', 'csv') shutil.copy2(scenario_file, csv_file) def export_pdf(pdf_path, pdf_doc_name): + """ + Export results to pdf + """ try: pdfkit.from_file(pdf_path, pdf_doc_name) except IOError: print "Error but pdf generated anyway..." - except: + except Exception: print "impossible to generate PDF" diff --git a/utils/test/reporting/reporting/vsperf/__init__.py b/utils/test/reporting/reporting/vsperf/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/utils/test/reporting/reporting/vsperf/reporting-status.py b/utils/test/reporting/reporting/vsperf/reporting-status.py new file mode 100644 index 000000000..fc4cc677d --- /dev/null +++ b/utils/test/reporting/reporting/vsperf/reporting-status.py @@ -0,0 +1,138 @@ +#!/usr/bin/python +# +# This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +import datetime +import os + +import jinja2 + +import reporting.utils.reporting_utils as rp_utils +import reporting.utils.scenarioResult as sr + +installers = rp_utils.get_config('general.installers') +PERIOD = rp_utils.get_config('general.period') + +# Logger +logger = rp_utils.getLogger("Storperf-Status") +reportingDate = datetime.datetime.now().strftime("%Y-%m-%d %H:%M") + +logger.info("*******************************************") +logger.info("* Generating reporting scenario status *") +logger.info("* Data retention = %s days *" % PERIOD) +logger.info("* *") +logger.info("*******************************************") + +# retrieve the list of storperf tests +versions = {'master'} + +# For all the versions +for version in versions: + # For all the installers + for installer in installers: + scenario_results = rp_utils.getScenarios("vsperf", + None, + installer, + None) + items = {} + scenario_result_criteria = {} + logger.info("installer %s, version %s, scenario ", installer, version) + + # From each scenarios get results list + for s, s_result in scenario_results.items(): + logger.info("---------------------------------") + logger.info("installer %s, version %s, scenario %s", installer, + version, s) + ten_criteria = len(s_result) + + ten_score = 0 + for v in s_result: + if "PASS" in v['criteria']: + ten_score += 1 + + logger.info("ten_score: %s / %s" % (ten_score, ten_criteria)) + + four_score = 0 + try: + LASTEST_TESTS = rp_utils.get_config( + 'general.nb_iteration_tests_success_criteria') + s_result.sort(key=lambda x: x['start_date']) + four_result = s_result[-LASTEST_TESTS:] + logger.debug("four_result: {}".format(four_result)) + logger.debug("LASTEST_TESTS: {}".format(LASTEST_TESTS)) + # logger.debug("four_result: {}".format(four_result)) + four_criteria = len(four_result) + for v in four_result: + if "PASS" in v['criteria']: + four_score += 1 + logger.info("4 Score: %s / %s " % (four_score, + four_criteria)) + except Exception: + logger.error("Impossible to retrieve the four_score") + + try: + s_status = (four_score * 100) / four_criteria + except ZeroDivisionError: + s_status = 0 + logger.info("Score percent = %s" % str(s_status)) + s_four_score = str(four_score) + '/' + str(four_criteria) + s_ten_score = str(ten_score) + '/' + str(ten_criteria) + s_score_percent = str(s_status) + + logger.debug(" s_status: {}".format(s_status)) + if s_status == 100: + logger.info(">>>>> scenario OK, save the information") + else: + logger.info(">>>> scenario not OK, last 4 iterations = %s, \ + last 10 days = %s" % (s_four_score, s_ten_score)) + + s_url = "" + if len(s_result) > 0: + build_tag = s_result[len(s_result)-1]['build_tag'] + logger.debug("Build tag: %s" % build_tag) + s_url = s_url = rp_utils.getJenkinsUrl(build_tag) + logger.info("last jenkins url: %s" % s_url) + + # Save daily results in a file + path_validation_file = ("./display/" + version + + "/vsperf/scenario_history.txt") + + if not os.path.exists(path_validation_file): + with open(path_validation_file, 'w') as f: + info = 'date,scenario,installer,details,score\n' + f.write(info) + + with open(path_validation_file, "a") as f: + info = (reportingDate + "," + s + "," + installer + + "," + s_ten_score + "," + + str(s_score_percent) + "\n") + f.write(info) + + scenario_result_criteria[s] = sr.ScenarioResult(s_status, + s_four_score, + s_ten_score, + s_score_percent, + s_url) + + logger.info("--------------------------") + + templateLoader = jinja2.FileSystemLoader(".") + templateEnv = jinja2.Environment(loader=templateLoader, + autoescape=True) + + TEMPLATE_FILE = "./reporting/vsperf/template/index-status-tmpl.html" + template = templateEnv.get_template(TEMPLATE_FILE) + + outputText = template.render(scenario_results=scenario_result_criteria, + installer=installer, + period=PERIOD, + version=version, + date=reportingDate) + + with open("./display/" + version + + "/vsperf/status-" + installer + ".html", "wb") as fh: + fh.write(outputText) diff --git a/utils/test/reporting/reporting/vsperf/template/index-status-tmpl.html b/utils/test/reporting/reporting/vsperf/template/index-status-tmpl.html new file mode 100644 index 000000000..7e06ef66b --- /dev/null +++ b/utils/test/reporting/reporting/vsperf/template/index-status-tmpl.html @@ -0,0 +1,114 @@ + + + + + + + + + + + + + + + +
+
+

Vsperf status page ({{version}}, {{date}})

+ +
+
+
+
+ +

Reported values represent the percentage of completed + + CI tests during the reporting period, where results + + were communicated to the Test Database.

+
+

List of last scenarios ({{version}}) run over the last {{period}} days

+ + + + + + + + + {% for scenario,result in scenario_results.iteritems() -%} + + + + + + + + {%- endfor %} +
ScenarioStatusTrendLast 4 IterationsLast 10 Days
{{scenario}}
{{scenario_results[scenario].getFourDaysScore()}}{{scenario_results[scenario].getTenDaysScore()}}
+
+ + +
+
+
diff --git a/utils/test/reporting/reporting/yardstick/reporting-status.py b/utils/test/reporting/reporting/yardstick/reporting-status.py index 85c386bf1..6584f4e8d 100644 --- a/utils/test/reporting/reporting/yardstick/reporting-status.py +++ b/utils/test/reporting/reporting/yardstick/reporting-status.py @@ -7,14 +7,13 @@ # http://www.apache.org/licenses/LICENSE-2.0 # import datetime -import jinja2 import os -import utils.scenarioResult as sr -from scenarios import config as cf +import jinja2 -# manage conf -import utils.reporting_utils as rp_utils +import reporting.utils.scenarioResult as sr +import reporting.utils.reporting_utils as rp_utils +from scenarios import config as cf installers = rp_utils.get_config('general.installers') versions = rp_utils.get_config('general.versions') diff --git a/utils/test/testapi/.gitignore b/utils/test/testapi/.gitignore index c7b63b5b1..86ec0d2d5 100644 --- a/utils/test/testapi/.gitignore +++ b/utils/test/testapi/.gitignore @@ -1,4 +1,7 @@ AUTHORS ChangeLog setup.cfg-e - +opnfv_testapi/static +build +*.egg-info +3rd_party/static/static diff --git a/utils/test/testapi/3rd_party/static/testapi-ui/components/pods/pods.html b/utils/test/testapi/3rd_party/static/testapi-ui/components/pods/pods.html index cdfcfaf36..e366670a9 100644 --- a/utils/test/testapi/3rd_party/static/testapi-ui/components/pods/pods.html +++ b/utils/test/testapi/3rd_party/static/testapi-ui/components/pods/pods.html @@ -1,8 +1,12 @@ -

{{ctrl.pageHeader}}

-

{{ctrl.pageParagraph}}

+

Pods

+

This page is used to create or query pods.
+ Querying pods is open to everybody.
+ But only login users are granted the privilege to create the new pod. +

+
-
+

Create

@@ -63,7 +67,7 @@
- +