From: Fatih Degirmenci Date: Mon, 28 Aug 2017 23:04:59 +0000 (+0000) Subject: Merge "xci: Structure osa periodic jobs" X-Git-Url: https://gerrit.opnfv.org/gerrit/gitweb?a=commitdiff_plain;h=2356471ea955bea6e724fff354636f7963527963;hp=218d9a3e62a37af50a84631a7a68190987d197de;p=releng.git Merge "xci: Structure osa periodic jobs" --- diff --git a/jjb/apex/apex-deploy.sh b/jjb/apex/apex-deploy.sh index 3a2ca606b..ce9544b28 100755 --- a/jjb/apex/apex-deploy.sh +++ b/jjb/apex/apex-deploy.sh @@ -3,7 +3,6 @@ set -o errexit set -o nounset set -o pipefail -APEX_PKGS="common undercloud onos" IPV6_FLAG=False # log info to console @@ -11,6 +10,8 @@ echo "Starting the Apex deployment." echo "--------------------------------------------------------" echo +sudo rm -rf /tmp/tmp* + if [ -z "$DEPLOY_SCENARIO" ]; then echo "Deploy scenario not set!" exit 1 @@ -57,7 +58,9 @@ else BASE=$CONFIG IMAGES=$RESOURCES LIB="/var/opt/opnfv/lib" - + sudo mkdir -p /var/log/apex + sudo chmod 777 /var/log/apex + cd /var/log/apex fi # Install Dependencies diff --git a/jjb/apex/apex-download-artifact.sh b/jjb/apex/apex-download-artifact.sh index 52c3c67ec..860cd60a5 100755 --- a/jjb/apex/apex-download-artifact.sh +++ b/jjb/apex/apex-download-artifact.sh @@ -3,8 +3,6 @@ set -o errexit set -o nounset set -o pipefail -APEX_PKGS="common undercloud onos" - # log info to console echo "Downloading the Apex artifact. This could take some time..." echo "--------------------------------------------------------" @@ -40,9 +38,8 @@ else # find version of RPM VERSION_EXTENSION=$(echo $(basename $RPM_LIST) | grep -Eo '[0-9]+\.[0-9]+-([0-9]{8}|[a-z]+-[0-9]\.[0-9]+)') # build RPM List which already includes base Apex RPM - for pkg in ${APEX_PKGS}; do - RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}.noarch.rpm" - done + RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-undercloud-${VERSION_EXTENSION}.noarch.rpm" + RPM_LIST+=" ${RPM_INSTALL_PATH}/python34-opnfv-apex-${VERSION_EXTENSION}.noarch.rpm" # remove old / install new RPMs if rpm -q opnfv-apex > /dev/null; then diff --git a/jjb/apex/apex-upload-artifact.sh b/jjb/apex/apex-upload-artifact.sh index f53451d41..4037d25ad 100755 --- a/jjb/apex/apex-upload-artifact.sh +++ b/jjb/apex/apex-upload-artifact.sh @@ -126,15 +126,13 @@ elif [ "$ARTIFACT_TYPE" == 'rpm' ]; then RPM_INSTALL_PATH=$BUILD_DIRECTORY/noarch RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL) VERSION_EXTENSION=$(echo $(basename $OPNFV_RPM_URL) | sed 's/opnfv-apex-//') - for pkg in common undercloud onos; do - RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}" - done + RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-undercloud-${VERSION_EXTENSION}" + RPM_LIST+=" ${RPM_INSTALL_PATH}/python34-opnfv-apex-${VERSION_EXTENSION}" SRPM_INSTALL_PATH=$BUILD_DIRECTORY SRPM_LIST=$SRPM_INSTALL_PATH/$(basename $OPNFV_SRPM_URL) VERSION_EXTENSION=$(echo $(basename $OPNFV_SRPM_URL) | sed 's/opnfv-apex-//') - for pkg in common undercloud onos; do - SRPM_LIST+=" ${SRPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}" - done + SRPM_LIST+=" ${SRPM_INSTALL_PATH}/opnfv-apex-undercloud-${VERSION_EXTENSION}" + SRPM_LIST+=" ${SRPM_INSTALL_PATH}/python34-opnfv-apex-${VERSION_EXTENSION}" if [[ -n "$SIGN_ARTIFACT" && "$SIGN_ARTIFACT" == "true" ]]; then signrpm diff --git a/jjb/apex/apex.yml b/jjb/apex/apex.yml index f0e0535ea..82062c6f9 100644 --- a/jjb/apex/apex.yml +++ b/jjb/apex/apex.yml @@ -180,7 +180,7 @@ branch-pattern: '**/{branch}' file-paths: - compare-type: ANT - pattern: 'tests/**' + pattern: 'apex/tests/**' properties: - logrotate-default - throttle: @@ -243,11 +243,18 @@ pattern: 'lib/**' - compare-type: ANT pattern: 'config/**' + - compare-type: ANT + pattern: 'apex/**' properties: - logrotate-default + - build-blocker: + use-build-blocker: true + block-level: 'NODE' + blocking-jobs: + - 'apex-verify.*' - throttle: - max-per-node: 3 + max-per-node: 1 max-total: 10 option: 'project' @@ -373,8 +380,13 @@ properties: - logrotate-default + - build-blocker: + use-build-blocker: true + block-level: 'NODE' + blocking-jobs: + - 'apex-verify.*' - throttle: - max-per-node: 3 + max-per-node: 1 max-total: 10 option: 'project' @@ -779,18 +791,18 @@ enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|nosdn-kvm|odl_l3-fdio)-ha/" abort-all-job: false git-revision: false - - multijob: - name: StorPerf - condition: ALWAYS - projects: - - name: 'storperf-apex-baremetal-daily-{scenario_stream}' - node-parameters: true - current-parameters: false - predefined-parameters: - DEPLOY_SCENARIO=$DEPLOY_SCENARIO - kill-phase-on: NEVER - abort-all-job: false - git-revision: false +# - multijob: +# name: StorPerf +# condition: ALWAYS +# projects: +# - name: 'storperf-apex-baremetal-daily-{scenario_stream}' +# node-parameters: true +# current-parameters: false +# predefined-parameters: +# DEPLOY_SCENARIO=$DEPLOY_SCENARIO +# kill-phase-on: NEVER +# abort-all-job: false +# git-revision: false # Build status is always success due conditional plugin prefetching # build status before multijob phases execute # - conditional-step: diff --git a/jjb/apex/apex.yml.j2 b/jjb/apex/apex.yml.j2 index 5a44dbc00..16b025500 100644 --- a/jjb/apex/apex.yml.j2 +++ b/jjb/apex/apex.yml.j2 @@ -92,7 +92,7 @@ branch-pattern: '**/{branch}' file-paths: - compare-type: ANT - pattern: 'tests/**' + pattern: 'apex/tests/**' properties: - logrotate-default - throttle: @@ -155,11 +155,18 @@ pattern: 'lib/**' - compare-type: ANT pattern: 'config/**' + - compare-type: ANT + pattern: 'apex/**' properties: - logrotate-default + - build-blocker: + use-build-blocker: true + block-level: 'NODE' + blocking-jobs: + - 'apex-verify.*' - throttle: - max-per-node: 3 + max-per-node: 1 max-total: 10 option: 'project' @@ -285,8 +292,13 @@ properties: - logrotate-default + - build-blocker: + use-build-blocker: true + block-level: 'NODE' + blocking-jobs: + - 'apex-verify.*' - throttle: - max-per-node: 3 + max-per-node: 1 max-total: 10 option: 'project' @@ -691,18 +703,18 @@ enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|nosdn-kvm|odl_l3-fdio)-ha/" abort-all-job: false git-revision: false - - multijob: - name: StorPerf - condition: ALWAYS - projects: - - name: 'storperf-apex-baremetal-daily-{scenario_stream}' - node-parameters: true - current-parameters: false - predefined-parameters: - DEPLOY_SCENARIO=$DEPLOY_SCENARIO - kill-phase-on: NEVER - abort-all-job: false - git-revision: false +# - multijob: +# name: StorPerf +# condition: ALWAYS +# projects: +# - name: 'storperf-apex-baremetal-daily-{scenario_stream}' +# node-parameters: true +# current-parameters: false +# predefined-parameters: +# DEPLOY_SCENARIO=$DEPLOY_SCENARIO +# kill-phase-on: NEVER +# abort-all-job: false +# git-revision: false # Build status is always success due conditional plugin prefetching # build status before multijob phases execute # - conditional-step: diff --git a/jjb/armband/armband-ci-jobs.yml b/jjb/armband/armband-ci-jobs.yml index cdc14e4e0..f1bff072c 100644 --- a/jjb/armband/armband-ci-jobs.yml +++ b/jjb/armband/armband-ci-jobs.yml @@ -52,16 +52,16 @@ slave-label: arm-pod2 installer: fuel <<: *euphrates - - arm-pod3: - slave-label: arm-pod3 + - arm-pod5: + slave-label: arm-pod5 installer: fuel <<: *euphrates - arm-pod4: slave-label: arm-pod4 installer: fuel <<: *euphrates - - arm-virtual1: - slave-label: arm-virtual1 + - arm-virtual2: + slave-label: arm-virtual2 installer: fuel <<: *euphrates #-------------------------------- @@ -71,16 +71,16 @@ slave-label: arm-pod2 installer: fuel <<: *master - - arm-pod3: - slave-label: arm-pod3 + - arm-pod5: + slave-label: arm-pod5 installer: fuel <<: *master - arm-pod4: slave-label: arm-pod4 installer: fuel <<: *master - - arm-virtual1: - slave-label: arm-virtual1 + - arm-virtual2: + slave-label: arm-virtual2 installer: fuel <<: *master #-------------------------------- @@ -412,31 +412,31 @@ # Enea Armband Non CI Virtual Triggers running against euphrates branch #-------------------------------------------------------------------- - trigger: - name: 'fuel-os-odl_l2-nofeature-ha-arm-virtual1-euphrates-trigger' + name: 'fuel-os-odl_l2-nofeature-ha-arm-virtual2-euphrates-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-nosdn-nofeature-ha-arm-virtual1-euphrates-trigger' + name: 'fuel-os-nosdn-nofeature-ha-arm-virtual2-euphrates-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l3-nofeature-ha-arm-virtual1-euphrates-trigger' + name: 'fuel-os-odl_l3-nofeature-ha-arm-virtual2-euphrates-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-arm-virtual1-euphrates-trigger' + name: 'fuel-os-odl_l2-bgpvpn-ha-arm-virtual2-euphrates-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-nofeature-noha-arm-virtual1-euphrates-trigger' + name: 'fuel-os-odl_l2-nofeature-noha-arm-virtual2-euphrates-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-sfc-ha-arm-virtual1-euphrates-trigger' + name: 'fuel-os-odl_l2-sfc-ha-arm-virtual2-euphrates-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-sfc-noha-arm-virtual1-euphrates-trigger' + name: 'fuel-os-odl_l2-sfc-noha-arm-virtual2-euphrates-trigger' triggers: - timed: '' @@ -444,31 +444,31 @@ # Enea Armband Non CI Virtual Triggers running against master branch #-------------------------------------------------------------------- - trigger: - name: 'fuel-os-odl_l2-nofeature-ha-arm-virtual1-master-trigger' + name: 'fuel-os-odl_l2-nofeature-ha-arm-virtual2-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-nosdn-nofeature-ha-arm-virtual1-master-trigger' + name: 'fuel-os-nosdn-nofeature-ha-arm-virtual2-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l3-nofeature-ha-arm-virtual1-master-trigger' + name: 'fuel-os-odl_l3-nofeature-ha-arm-virtual2-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-arm-virtual1-master-trigger' + name: 'fuel-os-odl_l2-bgpvpn-ha-arm-virtual2-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-nofeature-noha-arm-virtual1-master-trigger' + name: 'fuel-os-odl_l2-nofeature-noha-arm-virtual2-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-sfc-ha-arm-virtual1-master-trigger' + name: 'fuel-os-odl_l2-sfc-ha-arm-virtual2-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-sfc-noha-arm-virtual1-master-trigger' + name: 'fuel-os-odl_l2-sfc-noha-arm-virtual2-master-trigger' triggers: - timed: '' @@ -538,62 +538,62 @@ # Enea Armband POD 3 Triggers running against master branch #---------------------------------------------------------- - trigger: - name: 'fuel-os-odl_l2-nofeature-ha-arm-pod3-master-trigger' + name: 'fuel-os-odl_l2-nofeature-ha-arm-pod5-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-nosdn-nofeature-ha-arm-pod3-master-trigger' + name: 'fuel-os-nosdn-nofeature-ha-arm-pod5-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l3-nofeature-ha-arm-pod3-master-trigger' + name: 'fuel-os-odl_l3-nofeature-ha-arm-pod5-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod3-master-trigger' + name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod5-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-nofeature-noha-arm-pod3-master-trigger' + name: 'fuel-os-odl_l2-nofeature-noha-arm-pod5-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-sfc-ha-arm-pod3-master-trigger' + name: 'fuel-os-odl_l2-sfc-ha-arm-pod5-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-sfc-noha-arm-pod3-master-trigger' + name: 'fuel-os-odl_l2-sfc-noha-arm-pod5-master-trigger' triggers: - timed: '' #--------------------------------------------------------------- # Enea Armband POD 3 Triggers running against euphrates branch #--------------------------------------------------------------- - trigger: - name: 'fuel-os-odl_l2-nofeature-ha-arm-pod3-euphrates-trigger' + name: 'fuel-os-odl_l2-nofeature-ha-arm-pod5-euphrates-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-nosdn-nofeature-ha-arm-pod3-euphrates-trigger' + name: 'fuel-os-nosdn-nofeature-ha-arm-pod5-euphrates-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l3-nofeature-ha-arm-pod3-euphrates-trigger' + name: 'fuel-os-odl_l3-nofeature-ha-arm-pod5-euphrates-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod3-euphrates-trigger' + name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod5-euphrates-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-nofeature-noha-arm-pod3-euphrates-trigger' + name: 'fuel-os-odl_l2-nofeature-noha-arm-pod5-euphrates-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-sfc-ha-arm-pod3-euphrates-trigger' + name: 'fuel-os-odl_l2-sfc-ha-arm-pod5-euphrates-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-sfc-noha-arm-pod3-euphrates-trigger' + name: 'fuel-os-odl_l2-sfc-noha-arm-pod5-euphrates-trigger' triggers: - timed: '' #-------------------------------------------------------------------------- diff --git a/jjb/bottlenecks/bottlenecks-run-suite.sh b/jjb/bottlenecks/bottlenecks-run-suite.sh index 341aab590..a7570431d 100644 --- a/jjb/bottlenecks/bottlenecks-run-suite.sh +++ b/jjb/bottlenecks/bottlenecks-run-suite.sh @@ -16,6 +16,10 @@ RELENG_REPO=${WORKSPACE}/releng [ -d ${RELENG_REPO} ] && rm -rf ${RELENG_REPO} git clone https://gerrit.opnfv.org/gerrit/releng ${RELENG_REPO} >${redirect} +YARDSTICK_REPO=${WORKSPACE}/yardstick +[ -d ${YARDSTICK_REPO} ] && rm -rf ${YARDSTICK_REPO} +git clone https://gerrit.opnfv.org/gerrit/yardstick ${YARDSTICK_REPO} >${redirect} + OPENRC=/tmp/admin_rc.sh OS_CACERT=/tmp/os_cacert @@ -85,11 +89,18 @@ if [[ $SUITE_NAME == *posca* ]]; then echo "Don't support to generate pod.yaml on ${INSTALLER_TYPE} currently." fi - cmd="sudo python ${RELENG_REPO}/utils/create_pod_file.py -t ${INSTALLER_TYPE} \ + if [[ ${INSTALLER_TYPE} != compass ]]; then + cmd="sudo python ${RELENG_REPO}/utils/create_pod_file.py -t ${INSTALLER_TYPE} \ -i ${INSTALLER_IP} ${options} -f ${BOTTLENECKS_CONFIG}/pod.yaml \ -s ${BOTTLENECKS_CONFIG}/id_rsa" - echo ${cmd} - ${cmd} + echo ${cmd} + ${cmd} + else + cmd="sudo cp ${YARDSTICK_REPO}/etc/yardstick/nodes/compass_sclab_virtual/pod.yaml \ + ${BOTTLENECKS_CONFIG}" + echo ${cmd} + ${cmd} + fi deactivate diff --git a/jjb/compass4nfv/compass-dovetail-jobs.yml b/jjb/compass4nfv/compass-dovetail-jobs.yml index 67d1e4eee..101db8241 100644 --- a/jjb/compass4nfv/compass-dovetail-jobs.yml +++ b/jjb/compass4nfv/compass-dovetail-jobs.yml @@ -19,7 +19,7 @@ #------------------------------------ pod: - baremetal: - slave-label: compass-baremetal + slave-label: compass-baremetal-branch os-version: 'xenial' <<: *danube #----------------------------------- diff --git a/jjb/daisy4nfv/daisy-daily-jobs.yml b/jjb/daisy4nfv/daisy-daily-jobs.yml index 84cc2a4c2..9a680e7b8 100644 --- a/jjb/daisy4nfv/daisy-daily-jobs.yml +++ b/jjb/daisy4nfv/daisy-daily-jobs.yml @@ -181,7 +181,7 @@ - trigger: name: 'daisy-os-nosdn-nofeature-ha-baremetal-daily-master-trigger' triggers: - - timed: '0 12 * * *' + - timed: '0 16 * * *' # Basic NOHA Scenarios - trigger: name: 'daisy-os-nosdn-nofeature-noha-baremetal-daily-master-trigger' @@ -191,7 +191,7 @@ - trigger: name: 'daisy-os-odl-nofeature-ha-baremetal-daily-master-trigger' triggers: - - timed: '0 16 * * *' + - timed: '0 12 * * *' #----------------------------------------------- # Triggers for job running on daisy-virtual against master branch #----------------------------------------------- diff --git a/jjb/doctor/doctor.yml b/jjb/doctor/doctor.yml index 23d12def4..d535d6109 100644 --- a/jjb/doctor/doctor.yml +++ b/jjb/doctor/doctor.yml @@ -39,7 +39,7 @@ pod: - arm-pod2: slave-label: '{pod}' - - arm-pod3: + - arm-pod5: slave-label: '{pod}' jobs: diff --git a/jjb/dovetail/dovetail-ci-jobs.yml b/jjb/dovetail/dovetail-ci-jobs.yml index 42e1ad585..92b1db356 100644 --- a/jjb/dovetail/dovetail-ci-jobs.yml +++ b/jjb/dovetail/dovetail-ci-jobs.yml @@ -142,12 +142,12 @@ SUT: fuel auto-trigger-name: 'daily-trigger-disabled' <<: *master - - arm-pod3: + - arm-pod5: slave-label: '{pod}' SUT: fuel auto-trigger-name: 'daily-trigger-disabled' <<: *master - - arm-virtual1: + - arm-virtual2: slave-label: '{pod}' SUT: fuel auto-trigger-name: 'daily-trigger-disabled' diff --git a/jjb/dovetail/dovetail-run.sh b/jjb/dovetail/dovetail-run.sh index 346a1ef08..7dd6a2ddc 100755 --- a/jjb/dovetail/dovetail-run.sh +++ b/jjb/dovetail/dovetail-run.sh @@ -122,13 +122,26 @@ if [ "$INSTALLER_TYPE" == "apex" ]; then sudo scp $ssh_options stack@${INSTALLER_IP}:~/.ssh/id_rsa ${DOVETAIL_CONFIG}/id_rsa fi +image_path=${HOME}/opnfv/dovetail/images +if [[ ! -d ${image_path} ]]; then + mkdir -p ${image_path} +fi # sdnvpn test case needs to download this image first before running -echo "Download image ubuntu-16.04-server-cloudimg-amd64-disk1.img ..." -wget -q -nc http://artifacts.opnfv.org/sdnvpn/ubuntu-16.04-server-cloudimg-amd64-disk1.img -P ${DOVETAIL_CONFIG} +ubuntu_image=${image_path}/ubuntu-16.04-server-cloudimg-amd64-disk1.img +if [[ ! -f ${ubuntu_image} ]]; then + echo "Download image ubuntu-16.04-server-cloudimg-amd64-disk1.img ..." + wget -q -nc http://artifacts.opnfv.org/sdnvpn/ubuntu-16.04-server-cloudimg-amd64-disk1.img -P ${image_path} +fi +sudo cp ${ubuntu_image} ${DOVETAIL_CONFIG} # functest needs to download this image first before running -echo "Download image cirros-0.3.5-x86_64-disk.img ..." -wget -q -nc http://download.cirros-cloud.net/0.3.5/cirros-0.3.5-x86_64-disk.img -P ${DOVETAIL_CONFIG} +cirros_image=${image_path}/cirros-0.3.5-x86_64-disk.img +if [[ ! -f ${cirros_image} ]]; then + echo "Download image cirros-0.3.5-x86_64-disk.img ..." + wget -q -nc http://download.cirros-cloud.net/0.3.5/cirros-0.3.5-x86_64-disk.img -P ${image_path} +fi +sudo cp ${cirros_image} ${DOVETAIL_CONFIG} + opts="--privileged=true -id" diff --git a/jjb/functest/functest-alpine.sh b/jjb/functest/functest-alpine.sh index da098862a..f0e08e171 100644 --- a/jjb/functest/functest-alpine.sh +++ b/jjb/functest/functest-alpine.sh @@ -66,6 +66,7 @@ fi volumes="${images_vol} ${results_vol} ${sshkey_vol} ${rc_file_vol} ${cacert_file_vol}" +set +e tiers=(healthcheck smoke features vnf) for tier in ${tiers[@]}; do diff --git a/jjb/functest/functest-daily-jobs.yml b/jjb/functest/functest-daily-jobs.yml index f14ca758f..23649fc08 100644 --- a/jjb/functest/functest-daily-jobs.yml +++ b/jjb/functest/functest-daily-jobs.yml @@ -154,7 +154,7 @@ slave-label: '{pod}' installer: fuel <<: *master - - arm-pod3: + - arm-pod5: slave-label: '{pod}' installer: fuel <<: *master @@ -162,7 +162,7 @@ slave-label: '{pod}' installer: fuel <<: *master - - arm-virtual1: + - arm-virtual2: slave-label: '{pod}' installer: fuel <<: *master @@ -190,7 +190,7 @@ slave-label: '{pod}' installer: fuel <<: *danube - - arm-pod3: + - arm-pod5: slave-label: '{pod}' installer: fuel <<: *danube @@ -198,7 +198,7 @@ slave-label: '{pod}' installer: fuel <<: *danube - - arm-virtual1: + - arm-virtual2: slave-label: '{pod}' installer: fuel <<: *danube @@ -418,6 +418,7 @@ - ./functest-env-presetup.sh - ../../utils/fetch_os_creds.sh - ./functest-alpine.sh + - ../../utils/push-test-logs.sh - builder: name: functest-daily diff --git a/jjb/functest/set-functest-env.sh b/jjb/functest/set-functest-env.sh index 7d9e737e7..e54c3bf13 100755 --- a/jjb/functest/set-functest-env.sh +++ b/jjb/functest/set-functest-env.sh @@ -33,7 +33,7 @@ if [ "$BRANCH" != 'stable/danube' ]; then echo "Functest: Download images that will be used by test cases" images_dir="${HOME}/opnfv/functest/images" chmod +x ${WORKSPACE}/functest/ci/download_images.sh - ${WORKSPACE}/functest/ci/download_images.sh ${images_dir} + ${WORKSPACE}/functest/ci/download_images.sh ${images_dir} > ${redirect} 2>&1 images_vol="-v ${images_dir}:/home/opnfv/functest/images" echo "Functest: Images successfully downloaded" fi diff --git a/jjb/global/slave-params.yml b/jjb/global/slave-params.yml index f5de021a6..9234206a5 100644 --- a/jjb/global/slave-params.yml +++ b/jjb/global/slave-params.yml @@ -817,15 +817,15 @@ default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab description: 'Base URI to the configuration directory' - parameter: - name: 'arm-pod3-defaults' + name: 'arm-pod5-defaults' parameters: - node: name: SLAVE_NAME description: 'Slave name on Jenkins' allowed-slaves: - - arm-pod3 + - arm-pod5 default-slaves: - - arm-pod3 + - arm-pod5 - string: name: GIT_BASE default: https://gerrit.opnfv.org/gerrit/$PROJECT @@ -853,15 +853,15 @@ default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab description: 'Base URI to the configuration directory' - parameter: - name: 'arm-virtual1-defaults' + name: 'arm-virtual2-defaults' parameters: - node: name: SLAVE_NAME description: 'Slave name on Jenkins' allowed-slaves: - - arm-virtual1 + - arm-virtual2 default-slaves: - - arm-virtual1 + - arm-virtual2 - string: name: GIT_BASE default: https://gerrit.opnfv.org/gerrit/$PROJECT diff --git a/jjb/releng/opnfv-docker.yml b/jjb/releng/opnfv-docker.yml index 9d27329ed..414eba255 100644 --- a/jjb/releng/opnfv-docker.yml +++ b/jjb/releng/opnfv-docker.yml @@ -60,6 +60,11 @@ dockerdir: 'docker/storperf-master' <<: *master <<: *other-receivers + - 'storperf-graphite': + project: 'storperf' + dockerdir: 'docker/storperf-graphite' + <<: *master + <<: *other-receivers - 'storperf-httpfrontend': project: 'storperf' dockerdir: 'docker/storperf-httpfrontend' diff --git a/jjb/securedlab/check-jinja2.yml b/jjb/securedlab/check-jinja2.yml index 1e85536e7..430ced560 100644 --- a/jjb/securedlab/check-jinja2.yml +++ b/jjb/securedlab/check-jinja2.yml @@ -70,6 +70,12 @@ pattern: '**/*.jinja2' - compare-type: ANT pattern: '**/*.yaml' + skip-vote: + successful: true + failed: true + unstable: true + notbuilt: true + builders: - check-jinja diff --git a/jjb/yardstick/yardstick-daily-jobs.yml b/jjb/yardstick/yardstick-daily-jobs.yml index 007384b7a..39935abc2 100644 --- a/jjb/yardstick/yardstick-daily-jobs.yml +++ b/jjb/yardstick/yardstick-daily-jobs.yml @@ -176,22 +176,22 @@ installer: fuel auto-trigger-name: 'daily-trigger-disabled' <<: *danube - - arm-pod3: + - arm-pod5: slave-label: '{pod}' installer: fuel auto-trigger-name: 'daily-trigger-disabled' <<: *master - - arm-pod3: + - arm-pod5: slave-label: '{pod}' installer: fuel auto-trigger-name: 'daily-trigger-disabled' <<: *danube - - arm-virtual1: + - arm-virtual2: slave-label: '{pod}' installer: fuel auto-trigger-name: 'daily-trigger-disabled' <<: *master - - arm-virtual1: + - arm-virtual2: slave-label: '{pod}' installer: fuel auto-trigger-name: 'daily-trigger-disabled' @@ -381,7 +381,7 @@ default: '-i 104.197.68.199:8086' description: 'Arguments to use in order to choose the backend DB' - parameter: - name: 'yardstick-params-arm-virtual1' + name: 'yardstick-params-arm-virtual2' parameters: - string: name: YARDSTICK_DB_BACKEND @@ -464,7 +464,7 @@ description: 'Arguments to use in order to choose the backend DB' - parameter: - name: 'yardstick-params-arm-pod3' + name: 'yardstick-params-arm-pod5' parameters: - string: name: YARDSTICK_DB_BACKEND @@ -515,4 +515,4 @@ - trigger: name: 'yardstick-daily-huawei-pod4-trigger' triggers: - - timed: '0 1 * * *' + - timed: '' \ No newline at end of file diff --git a/utils/push-test-logs.sh b/utils/push-test-logs.sh index 79190ec2f..518d20ae5 100644 --- a/utils/push-test-logs.sh +++ b/utils/push-test-logs.sh @@ -27,7 +27,7 @@ node_list=(\ 'ericsson-pod1' 'ericsson-pod2' \ 'ericsson-virtual1' 'ericsson-virtual2' 'ericsson-virtual3' \ 'ericsson-virtual4' 'ericsson-virtual5' 'ericsson-virtual12' \ -'arm-pod1' 'arm-pod3' \ +'arm-pod1' 'arm-pod5' \ 'huawei-pod1' 'huawei-pod2' 'huawei-pod3' 'huawei-pod4' 'huawei-pod5' \ 'huawei-pod6' 'huawei-pod7' 'huawei-pod12' \ 'huawei-virtual1' 'huawei-virtual2' 'huawei-virtual3' 'huawei-virtual4' \ diff --git a/utils/test/reporting/docker/Dockerfile b/utils/test/reporting/docker/Dockerfile index f5168d1ae..f2357909d 100644 --- a/utils/test/reporting/docker/Dockerfile +++ b/utils/test/reporting/docker/Dockerfile @@ -27,19 +27,28 @@ ENV CONFIG_REPORTING_YAML ${working_dir}/reporting.yaml WORKDIR ${HOME} # Packaged dependencies RUN apt-get update && apt-get install -y \ +build-essential \ ssh \ +curl \ +gnupg \ python-pip \ +python-dev \ +python-setuptools \ git-core \ -nodejs \ -npm \ supervisor \ --no-install-recommends -RUN pip install --upgrade pip +RUN pip install --upgrade pip && easy_install -U setuptools==30.0.0 -RUN git clone --depth 1 https://gerrit.opnfv.org/gerrit/releng ${HOME}/releng +RUN git clone --depth 1 https://gerrit.opnfv.org/gerrit/releng /home/opnfv/releng RUN pip install -r ${working_dir}/requirements.txt +RUN sh -c 'curl -sL https://deb.nodesource.com/setup_8.x | bash -' \ + && apt-get install -y nodejs \ + && npm install -g bower \ + && npm install -g grunt \ + && npm install -g grunt-cli + WORKDIR ${working_dir} RUN python setup.py install RUN docker/reporting.sh diff --git a/utils/test/reporting/docker/web_server.sh b/utils/test/reporting/docker/web_server.sh index a34c11dd7..0dd8df73d 100755 --- a/utils/test/reporting/docker/web_server.sh +++ b/utils/test/reporting/docker/web_server.sh @@ -9,8 +9,6 @@ echo "daemon off;" >> /etc/nginx/nginx.conf # supervisor config cp /home/opnfv/releng/utils/test/reporting/docker/supervisor.conf /etc/supervisor/conf.d/ -ln -s /usr/bin/nodejs /usr/bin/node - # Manage Angular front end cd pages && /bin/bash angular.sh diff --git a/utils/test/reporting/reporting/functest/template/index-status-tmpl.html b/utils/test/reporting/reporting/functest/template/index-status-tmpl.html index 74d410e96..50fc648aa 100644 --- a/utils/test/reporting/reporting/functest/template/index-status-tmpl.html +++ b/utils/test/reporting/reporting/functest/template/index-status-tmpl.html @@ -90,7 +90,7 @@ $(document).ready(function (){

List of last scenarios ({{version}}) run over the last {{period}} days

- + @@ -98,14 +98,39 @@ $(document).ready(function (){ {% for scenario,iteration in scenario_stats.iteritems() -%} + {% if '-ha' in scenario -%} + {%- endif %} + + {%- endfor %} +
+
ScenarioHA Scenario Status Trend Score
{{scenario}}
{{scenario_results[scenario].getScore()}} {{iteration}}
+
+ + + + + + + + + {% for scenario,iteration in scenario_stats.iteritems() -%} + + {% if '-noha' in scenario -%} + + + + + + {%- endif %} {%- endfor %} -
NOHA ScenarioStatusTrendScoreIteration
{{scenario}}
{{scenario_results[scenario].getScore()}}{{iteration}}
+ + diff --git a/utils/test/reporting/setup.py b/utils/test/reporting/setup.py index a52d90555..17849f67b 100644 --- a/utils/test/reporting/setup.py +++ b/utils/test/reporting/setup.py @@ -8,7 +8,6 @@ # http://www.apache.org/licenses/LICENSE-2.0 # pylint: disable=missing-docstring - import setuptools # In python < 2.7.4, a lazy loading of package `pbr` will break diff --git a/utils/test/testapi/opnfv_testapi/resources/models.py b/utils/test/testapi/opnfv_testapi/resources/models.py index 6f04cc236..e70a6ed23 100644 --- a/utils/test/testapi/opnfv_testapi/resources/models.py +++ b/utils/test/testapi/opnfv_testapi/resources/models.py @@ -61,11 +61,11 @@ class ModelBase(object): '{} has no attribute {}'.format(cls.__name__, k)) value = v if isinstance(v, dict) and k in attr_parser: - value = attr_parser[k].from_dict(v) + value = attr_parser[k].from_dict_with_raise(v) elif isinstance(v, list) and k in attr_parser: value = [] for item in v: - value.append(attr_parser[k].from_dict(item)) + value.append(attr_parser[k].from_dict_with_raise(item)) t.__setattr__(k, value) diff --git a/utils/test/testapi/opnfv_testapi/resources/scenario_handlers.py b/utils/test/testapi/opnfv_testapi/resources/scenario_handlers.py index bd06400b4..e9c19a7a4 100644 --- a/utils/test/testapi/opnfv_testapi/resources/scenario_handlers.py +++ b/utils/test/testapi/opnfv_testapi/resources/scenario_handlers.py @@ -114,8 +114,21 @@ class ScenarioGURHandler(GenericScenarioHandler): self._get_one(query={'name': name}) pass + @swagger.operation(nickname="updateScenarioName") def put(self, name): - pass + """ + @description: update scenario, only rename is supported currently + @param body: fields to be updated + @type body: L{ScenarioUpdateRequest} + @in body: body + @rtype: L{Scenario} + @return 200: update success + @raise 404: scenario not exist + @raise 403: nothing to update + """ + query = {'name': name} + db_keys = ['name'] + self._update(query=query, db_keys=db_keys) @swagger.operation(nickname="deleteScenarioByName") def delete(self, name): @@ -147,6 +160,12 @@ class ScenarioUpdater(object): ('projects', 'put'): self._update_requests_update_projects, ('projects', 'delete'): self._update_requests_delete_projects, ('owner', 'put'): self._update_requests_change_owner, + ('versions', 'post'): self._update_requests_add_versions, + ('versions', 'put'): self._update_requests_update_versions, + ('versions', 'delete'): self._update_requests_delete_versions, + ('installers', 'post'): self._update_requests_add_installers, + ('installers', 'put'): self._update_requests_update_installers, + ('installers', 'delete'): self._update_requests_delete_installers, } updates[(item, action)](self.data) @@ -210,42 +229,16 @@ class ScenarioUpdater(object): @iter_installers @iter_versions def _update_requests_add_projects(self, version): - exists = list() - malformat = list() - for n in self.body: - try: - f_n = models.ScenarioProject.from_dict_with_raise(n) - if not any(o.project == f_n.project for o in version.projects): - version.projects.append(f_n) - else: - exists.append(n['project']) - except Exception as e: - malformat.append(e.message) - if malformat: - raises.BadRequest(message.bad_format(malformat)) - elif exists: - raises.Conflict(message.exist('projects', exists)) + version.projects = self._update_with_body(models.ScenarioProject, + 'project', + version.projects) @iter_installers @iter_versions def _update_requests_update_projects(self, version): - exists = list() - malformat = list() - projects = list() - for n in self.body: - try: - f_n = models.ScenarioProject.from_dict_with_raise(n) - if not any(o.project == f_n.project for o in projects): - projects.append(models.ScenarioProject.from_dict(n)) - else: - exists.append(n['project']) - except: - malformat.append(n) - if malformat: - raises.BadRequest(message.bad_format(malformat)) - elif exists: - raises.Forbidden(message.exist('projects', exists)) - version.projects = projects + version.projects = self._update_with_body(models.ScenarioProject, + 'project', + list()) @iter_installers @iter_versions @@ -255,14 +248,68 @@ class ScenarioUpdater(object): @iter_installers @iter_versions def _update_requests_change_owner(self, version): - version.owner = self.body + version.owner = self.body.get('owner') + + @iter_installers + def _update_requests_add_versions(self, installer): + installer.versions = self._update_with_body(models.ScenarioVersion, + 'version', + installer.versions) + + @iter_installers + def _update_requests_update_versions(self, installer): + installer.versions = self._update_with_body(models.ScenarioVersion, + 'version', + list()) + + @iter_installers + def _update_requests_delete_versions(self, installer): + installer.versions = self._remove_versions(installer.versions) + + def _update_requests_add_installers(self, scenario): + scenario.installers = self._update_with_body(models.ScenarioInstaller, + 'installer', + scenario.installers) + + def _update_requests_update_installers(self, scenario): + scenario.installers = self._update_with_body(models.ScenarioInstaller, + 'installer', + list()) + + def _update_requests_delete_installers(self, scenario): + scenario.installers = self._remove_installers(scenario.installers) + + def _update_with_body(self, clazz, field, withs): + exists = list() + malformat = list() + for new in self.body: + try: + format_new = clazz.from_dict_with_raise(new) + new_name = getattr(format_new, field) + if not any(getattr(o, field) == new_name for o in withs): + withs.append(format_new) + else: + exists.append(new_name) + except Exception as error: + malformat.append(error.message) + if malformat: + raises.BadRequest(message.bad_format(malformat)) + elif exists: + raises.Conflict(message.exist('{}s'.format(field), exists)) + return withs def _filter_installers(self, installers): return self._filter('installer', installers) + def _remove_installers(self, installers): + return self._remove('installer', installers) + def _filter_versions(self, versions): return self._filter('version', versions) + def _remove_versions(self, versions): + return self._remove('version', versions) + def _filter_projects(self, projects): return self._filter('project', projects) @@ -584,7 +631,7 @@ class ScenarioOwnerHandler(GenericScenarioUpdateHandler): installer=& \ version= @param body: new owner - @type body: L{string} + @type body: L{ScenarioChangeOwnerRequest} @in body: body @param installer: installer type @type installer: L{string} @@ -602,3 +649,127 @@ class ScenarioOwnerHandler(GenericScenarioUpdateHandler): locators={'scenario': scenario, 'installer': None, 'version': None}) + + +class ScenarioVersionsHandler(GenericScenarioUpdateHandler): + @swagger.operation(nickname="addVersionsUnderScenario") + def post(self, scenario): + """ + @description: add versions to scenario + @notes: add one or multiple versions + POST /api/v1/scenarios//versions? \ + installer= + @param body: versions to be added + @type body: C{list} of L{ScenarioVersion} + @in body: body + @param installer: installer type + @type installer: L{string} + @in installer: query + @required installer: True + @return 200: versions are added. + @raise 400: bad schema + @raise 409: conflict, version already exists + @raise 404: scenario/installer not exist + """ + self.do_update('versions', + 'post', + locators={'scenario': scenario, + 'installer': None}) + + @swagger.operation(nickname="updateVersionsUnderScenario") + def put(self, scenario): + """ + @description: replace all versions + @notes: substitute all versions as a totality + PUT /api/v1/scenarios//versions? \ + installer= + @param body: new versions + @type body: C{list} of L{ScenarioVersion} + @in body: body + @param installer: installer type + @type installer: L{string} + @in installer: query + @required installer: True + @return 200: replace versions success. + @raise 400: bad schema + @raise 404: scenario/installer not exist + """ + self.do_update('versions', + 'put', + locators={'scenario': scenario, + 'installer': None}) + + @swagger.operation(nickname="deleteVersionsUnderScenario") + def delete(self, scenario): + """ + @description: delete one or multiple versions + @notes: delete one or multiple versions + DELETE /api/v1/scenarios//versions? \ + installer= + @param body: versions(names) to be deleted + @type body: C{list} of L{string} + @in body: body + @param installer: installer type + @type installer: L{string} + @in installer: query + @required installer: True + @return 200: delete versions success. + @raise 404: scenario/installer not exist + """ + self.do_update('versions', + 'delete', + locators={'scenario': scenario, + 'installer': None}) + + +class ScenarioInstallersHandler(GenericScenarioUpdateHandler): + @swagger.operation(nickname="addInstallersUnderScenario") + def post(self, scenario): + """ + @description: add installers to scenario + @notes: add one or multiple installers + POST /api/v1/scenarios//installers + @param body: installers to be added + @type body: C{list} of L{ScenarioInstaller} + @in body: body + @return 200: installers are added. + @raise 400: bad schema + @raise 409: conflict, installer already exists + @raise 404: scenario not exist + """ + self.do_update('installers', + 'post', + locators={'scenario': scenario}) + + @swagger.operation(nickname="updateInstallersUnderScenario") + def put(self, scenario): + """ + @description: replace all installers + @notes: substitute all installers as a totality + PUT /api/v1/scenarios//installers + @param body: new installers + @type body: C{list} of L{ScenarioInstaller} + @in body: body + @return 200: replace versions success. + @raise 400: bad schema + @raise 404: scenario/installer not exist + """ + self.do_update('installers', + 'put', + locators={'scenario': scenario}) + + @swagger.operation(nickname="deleteInstallersUnderScenario") + def delete(self, scenario): + """ + @description: delete one or multiple installers + @notes: delete one or multiple installers + DELETE /api/v1/scenarios//installers + @param body: installers(names) to be deleted + @type body: C{list} of L{string} + @in body: body + @return 200: delete versions success. + @raise 404: scenario/installer not exist + """ + self.do_update('installers', + 'delete', + locators={'scenario': scenario}) diff --git a/utils/test/testapi/opnfv_testapi/resources/scenario_models.py b/utils/test/testapi/opnfv_testapi/resources/scenario_models.py index ec262aa2f..d950ed1d7 100644 --- a/utils/test/testapi/opnfv_testapi/resources/scenario_models.py +++ b/utils/test/testapi/opnfv_testapi/resources/scenario_models.py @@ -160,6 +160,18 @@ class ScenarioCreateRequest(models.ModelBase): return {'installers': ScenarioInstaller} +@swagger.model() +class ScenarioChangeOwnerRequest(models.ModelBase): + def __init__(self, owner=None): + self.owner = owner + + +@swagger.model() +class ScenarioUpdateRequest(models.ModelBase): + def __init__(self, name=None): + self.name = name + + @swagger.model() class Scenario(models.ModelBase): """ diff --git a/utils/test/testapi/opnfv_testapi/router/url_mappings.py b/utils/test/testapi/opnfv_testapi/router/url_mappings.py index 9c9556c6b..3e3ab87aa 100644 --- a/utils/test/testapi/opnfv_testapi/router/url_mappings.py +++ b/utils/test/testapi/opnfv_testapi/router/url_mappings.py @@ -64,6 +64,10 @@ mappings = [ scenario_handlers.ScenarioProjectsHandler), (r"/api/v1/scenarios/([^/]+)/owner", scenario_handlers.ScenarioOwnerHandler), + (r"/api/v1/scenarios/([^/]+)/versions", + scenario_handlers.ScenarioVersionsHandler), + (r"/api/v1/scenarios/([^/]+)/installers", + scenario_handlers.ScenarioInstallersHandler), # static path (r'/(.*\.(css|png|gif|js|html|json|map|woff2|woff|ttf))', diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/resources/test_scenario.py b/utils/test/testapi/opnfv_testapi/tests/unit/resources/test_scenario.py index f9bb58c6b..1367fc669 100644 --- a/utils/test/testapi/opnfv_testapi/tests/unit/resources/test_scenario.py +++ b/utils/test/testapi/opnfv_testapi/tests/unit/resources/test_scenario.py @@ -2,7 +2,7 @@ import functools import httplib import json import os -from copy import deepcopy + from datetime import datetime from opnfv_testapi.common import message @@ -50,15 +50,15 @@ class TestScenarioBase(base.TestBase): self.assertEqual(scenario, models.Scenario.from_dict(req)) @staticmethod - def _set_query(*args): + def set_query(*args): uri = '' for arg in args: uri += arg + '&' return uri[0: -1] - def _get_and_assert(self, name, req=None): + def get_and_assert(self, name): code, body = self.get(name) - self.assert_res(code, body, req) + self.assert_res(code, body, self.req_d) class TestScenarioCreate(TestScenarioBase): @@ -97,25 +97,25 @@ class TestScenarioGet(TestScenarioBase): self.scenario_2 = self.create_return_name(self.req_2) def test_getByName(self): - self._get_and_assert(self.scenario_1, self.req_d) + self.get_and_assert(self.scenario_1) def test_getAll(self): self._query_and_assert(query=None, reqs=[self.req_d, self.req_2]) def test_queryName(self): - query = self._set_query('name=nosdn-nofeature-ha') + query = self.set_query('name=nosdn-nofeature-ha') self._query_and_assert(query, reqs=[self.req_d]) def test_queryInstaller(self): - query = self._set_query('installer=apex') + query = self.set_query('installer=apex') self._query_and_assert(query, reqs=[self.req_d]) def test_queryVersion(self): - query = self._set_query('version=master') + query = self.set_query('version=master') self._query_and_assert(query, reqs=[self.req_d]) def test_queryProject(self): - query = self._set_query('project=functest') + query = self.set_query('project=functest') self._query_and_assert(query, reqs=[self.req_d, self.req_2]) # close due to random fail, open again after solve it in another patch @@ -170,14 +170,21 @@ class TestScenarioUpdate(TestScenarioBase): def update_url_fixture(item): def _update_url_fixture(xstep): def wrapper(self, *args, **kwargs): + self.update_url = '{}/{}'.format(self.scenario_url, item) locator = None if item in ['projects', 'owner']: locator = 'installer={}&version={}'.format( self.installer, self.version) - self.update_url = '{}/{}?{}'.format(self.scenario_url, - item, - locator) + elif item in ['versions']: + locator = 'installer={}'.format( + self.installer) + elif item in ['rename']: + self.update_url = self.scenario_url + + if locator: + self.update_url = '{}?{}'.format(self.update_url, locator) + xstep(self, *args, **kwargs) return wrapper return _update_url_fixture @@ -186,138 +193,257 @@ class TestScenarioUpdate(TestScenarioBase): def _update_partial(set_update): @functools.wraps(set_update) def wrapper(self): - update, scenario = set_update(self, deepcopy(self.req_d)) - code, body = getattr(self, operate)(update, self.scenario) - getattr(self, expected)(code, scenario) + update = set_update(self) + code, body = getattr(self, operate)(update) + getattr(self, expected)(code) return wrapper return _update_partial @update_partial('_add', '_success') - def test_addScore(self, scenario): + def test_addScore(self): add = models.ScenarioScore(date=str(datetime.now()), score='11/12') - projects = scenario['installers'][0]['versions'][0]['projects'] + projects = self.req_d['installers'][0]['versions'][0]['projects'] functest = filter(lambda f: f['project'] == 'functest', projects)[0] functest['scores'].append(add.format()) self.update_url = '{}/scores?{}'.format(self.scenario_url, self.locate_project) - return add, scenario + return add @update_partial('_add', '_success') - def test_addTrustIndicator(self, scenario): + def test_addTrustIndicator(self): add = models.ScenarioTI(date=str(datetime.now()), status='gold') - projects = scenario['installers'][0]['versions'][0]['projects'] + projects = self.req_d['installers'][0]['versions'][0]['projects'] functest = filter(lambda f: f['project'] == 'functest', projects)[0] functest['trust_indicators'].append(add.format()) self.update_url = '{}/trust_indicators?{}'.format(self.scenario_url, self.locate_project) - return add, scenario + return add @update_partial('_add', '_success') - def test_addCustoms(self, scenario): - add = ['odl', 'parser', 'vping_ssh'] - projects = scenario['installers'][0]['versions'][0]['projects'] + def test_addCustoms(self): + adds = ['odl', 'parser', 'vping_ssh'] + projects = self.req_d['installers'][0]['versions'][0]['projects'] functest = filter(lambda f: f['project'] == 'functest', projects)[0] - functest['customs'] = list(set(functest['customs'] + add)) + functest['customs'] = list(set(functest['customs'] + adds)) self.update_url = '{}/customs?{}'.format(self.scenario_url, self.locate_project) - return add, scenario + return adds @update_partial('_update', '_success') - def test_updateCustoms(self, scenario): - news = ['odl', 'parser', 'vping_ssh'] - projects = scenario['installers'][0]['versions'][0]['projects'] + def test_updateCustoms(self): + updates = ['odl', 'parser', 'vping_ssh'] + projects = self.req_d['installers'][0]['versions'][0]['projects'] functest = filter(lambda f: f['project'] == 'functest', projects)[0] - functest['customs'] = news + functest['customs'] = updates self.update_url = '{}/customs?{}'.format(self.scenario_url, self.locate_project) - return news, scenario + return updates @update_partial('_delete', '_success') - def test_deleteCustoms(self, scenario): - obsoletes = ['vping_ssh'] - projects = scenario['installers'][0]['versions'][0]['projects'] + def test_deleteCustoms(self): + deletes = ['vping_ssh'] + projects = self.req_d['installers'][0]['versions'][0]['projects'] functest = filter(lambda f: f['project'] == 'functest', projects)[0] functest['customs'] = ['healthcheck'] self.update_url = '{}/customs?{}'.format(self.scenario_url, self.locate_project) - return obsoletes, scenario + return deletes @update_url_fixture('projects') @update_partial('_add', '_success') - def test_addProjects_succ(self, scenario): + def test_addProjects_succ(self): add = models.ScenarioProject(project='qtip').format() - scenario['installers'][0]['versions'][0]['projects'].append(add) - return [add], scenario + self.req_d['installers'][0]['versions'][0]['projects'].append(add) + return [add] @update_url_fixture('projects') @update_partial('_add', '_conflict') - def test_addProjects_already_exist(self, scenario): + def test_addProjects_already_exist(self): add = models.ScenarioProject(project='functest').format() - scenario['installers'][0]['versions'][0]['projects'].append(add) - return [add], scenario + return [add] @update_url_fixture('projects') @update_partial('_add', '_bad_request') - def test_addProjects_bad_schema(self, scenario): + def test_addProjects_bad_schema(self): add = models.ScenarioProject(project='functest').format() add['score'] = None - scenario['installers'][0]['versions'][0]['projects'].append(add) - return [add], scenario + return [add] @update_url_fixture('projects') @update_partial('_update', '_success') - def test_updateProjects_succ(self, scenario): + def test_updateProjects_succ(self): update = models.ScenarioProject(project='qtip').format() - scenario['installers'][0]['versions'][0]['projects'] = [update] - return [update], scenario + self.req_d['installers'][0]['versions'][0]['projects'] = [update] + return [update] + + @update_url_fixture('projects') + @update_partial('_update', '_conflict') + def test_updateProjects_duplicated(self): + update = models.ScenarioProject(project='qtip').format() + return [update, update] @update_url_fixture('projects') @update_partial('_update', '_bad_request') - def test_updateProjects_bad_schema(self, scenario): + def test_updateProjects_bad_schema(self): update = models.ScenarioProject(project='functest').format() update['score'] = None - scenario['installers'][0]['versions'][0]['projects'] = [update] - return [update], scenario + return [update] @update_url_fixture('projects') @update_partial('_delete', '_success') - def test_deleteProjects(self, scenario): + def test_deleteProjects(self): deletes = ['functest'] - projects = scenario['installers'][0]['versions'][0]['projects'] - scenario['installers'][0]['versions'][0]['projects'] = filter( + projects = self.req_d['installers'][0]['versions'][0]['projects'] + self.req_d['installers'][0]['versions'][0]['projects'] = filter( lambda f: f['project'] != 'functest', projects) - return deletes, scenario + return deletes @update_url_fixture('owner') @update_partial('_update', '_success') - def test_changeOwner(self, scenario): + def test_changeOwner(self): new_owner = 'new_owner' - scenario['installers'][0]['versions'][0]['owner'] = new_owner - return new_owner, scenario + update = models.ScenarioChangeOwnerRequest(new_owner).format() + self.req_d['installers'][0]['versions'][0]['owner'] = new_owner + return update + + @update_url_fixture('versions') + @update_partial('_add', '_success') + def test_addVersions_succ(self): + add = models.ScenarioVersion(version='Euphrates').format() + self.req_d['installers'][0]['versions'].append(add) + return [add] + + @update_url_fixture('versions') + @update_partial('_add', '_conflict') + def test_addVersions_already_exist(self): + add = models.ScenarioVersion(version='master').format() + return [add] - def _add(self, update_req, new_scenario): + @update_url_fixture('versions') + @update_partial('_add', '_bad_request') + def test_addVersions_bad_schema(self): + add = models.ScenarioVersion(version='euphrates').format() + add['notexist'] = None + return [add] + + @update_url_fixture('versions') + @update_partial('_update', '_success') + def test_updateVersions_succ(self): + update = models.ScenarioVersion(version='euphrates').format() + self.req_d['installers'][0]['versions'] = [update] + return [update] + + @update_url_fixture('versions') + @update_partial('_update', '_conflict') + def test_updateVersions_duplicated(self): + update = models.ScenarioVersion(version='euphrates').format() + return [update, update] + + @update_url_fixture('versions') + @update_partial('_update', '_bad_request') + def test_updateVersions_bad_schema(self): + update = models.ScenarioVersion(version='euphrates').format() + update['not_owner'] = 'Iam' + return [update] + + @update_url_fixture('versions') + @update_partial('_delete', '_success') + def test_deleteVersions(self): + deletes = ['master'] + versions = self.req_d['installers'][0]['versions'] + self.req_d['installers'][0]['versions'] = filter( + lambda f: f['version'] != 'master', + versions) + return deletes + + @update_url_fixture('installers') + @update_partial('_add', '_success') + def test_addInstallers_succ(self): + add = models.ScenarioInstaller(installer='daisy').format() + self.req_d['installers'].append(add) + return [add] + + @update_url_fixture('installers') + @update_partial('_add', '_conflict') + def test_addInstallers_already_exist(self): + add = models.ScenarioInstaller(installer='apex').format() + return [add] + + @update_url_fixture('installers') + @update_partial('_add', '_bad_request') + def test_addInstallers_bad_schema(self): + add = models.ScenarioInstaller(installer='daisy').format() + add['not_exist'] = 'not_exist' + return [add] + + @update_url_fixture('installers') + @update_partial('_update', '_success') + def test_updateInstallers_succ(self): + update = models.ScenarioInstaller(installer='daisy').format() + self.req_d['installers'] = [update] + return [update] + + @update_url_fixture('installers') + @update_partial('_update', '_conflict') + def test_updateInstallers_duplicated(self): + update = models.ScenarioInstaller(installer='daisy').format() + return [update, update] + + @update_url_fixture('installers') + @update_partial('_update', '_bad_request') + def test_updateInstallers_bad_schema(self): + update = models.ScenarioInstaller(installer='daisy').format() + update['not_exist'] = 'not_exist' + return [update] + + @update_url_fixture('installers') + @update_partial('_delete', '_success') + def test_deleteInstallers(self): + deletes = ['apex'] + installers = self.req_d['installers'] + self.req_d['installers'] = filter( + lambda f: f['installer'] != 'apex', + installers) + return deletes + + @update_url_fixture('rename') + @update_partial('_update', '_success') + def test_renameScenario(self): + new_name = 'new_scenario_name' + update = models.ScenarioUpdateRequest(name=new_name) + self.req_d['name'] = new_name + return update + + @update_url_fixture('rename') + @update_partial('_update', '_forbidden') + def test_renameScenario_exist(self): + new_name = self.req_d['name'] + update = models.ScenarioUpdateRequest(name=new_name) + return update + + def _add(self, update_req): return self.post_direct_url(self.update_url, update_req) - def _update(self, update_req, new_scenario): + def _update(self, update_req): return self.update_direct_url(self.update_url, update_req) - def _delete(self, update_req, new_scenario): + def _delete(self, update_req): return self.delete_direct_url(self.update_url, update_req) - def _success(self, status, new_scenario): + def _success(self, status): self.assertEqual(status, httplib.OK) - self._get_and_assert(new_scenario.get('name'), new_scenario) + self.get_and_assert(self.req_d['name']) - def _forbidden(self, status, new_scenario): + def _forbidden(self, status): self.assertEqual(status, httplib.FORBIDDEN) - def _bad_request(self, status, new_scenario): + def _bad_request(self, status): self.assertEqual(status, httplib.BAD_REQUEST) - def _conflict(self, status, new_scenario): + def _conflict(self, status): self.assertEqual(status, httplib.CONFLICT)