Merge "Enable Daisy Euphrates merge jobs"
authorSerena Feng <feng.xiaowei@zte.com.cn>
Sat, 30 Sep 2017 01:23:32 +0000 (01:23 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Sat, 30 Sep 2017 01:23:32 +0000 (01:23 +0000)
25 files changed:
jjb/apex/apex.yml
jjb/apex/apex.yml.j2
jjb/apex/scenarios.yaml.hidden
jjb/compass4nfv/compass-ci-jobs.yml
jjb/compass4nfv/compass-deploy.sh
jjb/dovetail/dovetail-ci-jobs.yml
jjb/dovetail/dovetail-run.sh
jjb/fuel/fuel-daily-jobs.yml
jjb/functest/functest-alpine.sh
jjb/functest/functest-loop.sh
jjb/functest/functest-suite.sh
jjb/functest/set-functest-env.sh
jjb/releng/opnfv-docker-arm.yml
jjb/releng/opnfv-docker.yml
jjb/xci/xci-verify-jobs.yml
jjb/yardstick/yardstick-daily.sh
modules/opnfv/utils/ovs_logger.py
utils/fetch_os_creds.sh
utils/test/reporting/docker/Dockerfile
utils/test/reporting/docker/reporting.sh
utils/test/reporting/html/euphrates.html
utils/test/reporting/img/euphrates.jpg
utils/test/reporting/reporting/bottlenecks/reporting-status.py
utils/test/reporting/reporting/functest/reporting-status.py
utils/test/reporting/reporting/yardstick/scenarios.py

index 058f18a..c858b7f 100644 (file)
@@ -28,6 +28,7 @@
             baremetal-slave: 'apex-baremetal-master'
             verify-scenario: 'os-odl-nofeature-ha'
             scenario_stream: 'master'
+            disable_daily: true
         - euphrates: &euphrates
             branch: 'stable/euphrates'
             gs-pathname: '/euphrates'
@@ -36,6 +37,7 @@
             baremetal-slave: 'apex-baremetal-master'
             verify-scenario: 'os-odl-nofeature-ha'
             scenario_stream: 'euphrates'
+            disable_daily: false
         - danube: &danube
             branch: 'stable/danube'
             gs-pathname: '/danube'
@@ -45,6 +47,7 @@
             verify-scenario: 'os-odl_l3-nofeature-ha'
             scenario_stream: 'danube'
             disabled: true
+            disable_daily: true
 
     scenario:
         - 'os-nosdn-nofeature-noha':
               <<: *master
         - 'os-odl-sfc-ha':
               <<: *master
-        - 'os-odl-fdio-dvr-noha':
+        - 'os-odl-fdio_dvr-noha':
               <<: *master
-        - 'os-odl-fdio-dvr-ha':
+        - 'os-odl-fdio_dvr-ha':
               <<: *master
         - 'os-nosdn-nofeature-noha':
               <<: *euphrates
               <<: *euphrates
         - 'os-odl-sfc-ha':
               <<: *euphrates
-        - 'os-odl-fdio-dvr-noha':
+        - 'os-odl-fdio_dvr-noha':
               <<: *euphrates
-        - 'os-odl-fdio-dvr-ha':
+        - 'os-odl-fdio_dvr-ha':
               <<: *euphrates
 
     platform:
 
     node: '{baremetal-slave}'
 
-    disabled: false
+    disabled: '{obj:disable_daily}'
 
     scm:
         - git-scm
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-os-odl-fdio-dvr-noha-baremetal-master'
+                - name: 'apex-os-odl-fdio_dvr-noha-baremetal-master'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-os-odl-fdio-dvr-ha-baremetal-master'
+                - name: 'apex-os-odl-fdio_dvr-ha-baremetal-master'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-os-odl-fdio-dvr-noha-baremetal-euphrates'
+                - name: 'apex-os-odl-fdio_dvr-noha-baremetal-euphrates'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-os-odl-fdio-dvr-ha-baremetal-euphrates'
+                - name: 'apex-os-odl-fdio_dvr-ha-baremetal-euphrates'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
index 09c2f8c..6bebf3b 100644 (file)
@@ -28,6 +28,7 @@
             baremetal-slave: 'apex-baremetal-master'
             verify-scenario: 'os-odl-nofeature-ha'
             scenario_stream: 'master'
+            disable_daily: true
         - euphrates: &euphrates
             branch: 'stable/euphrates'
             gs-pathname: '/euphrates'
@@ -36,6 +37,7 @@
             baremetal-slave: 'apex-baremetal-master'
             verify-scenario: 'os-odl-nofeature-ha'
             scenario_stream: 'euphrates'
+            disable_daily: false
         - danube: &danube
             branch: 'stable/danube'
             gs-pathname: '/danube'
@@ -45,6 +47,7 @@
             verify-scenario: 'os-odl_l3-nofeature-ha'
             scenario_stream: 'danube'
             disabled: true
+            disable_daily: true
 
     scenario:
         {%- for stream in scenarios %}
 
     node: '{baremetal-slave}'
 
-    disabled: false
+    disabled: '{obj:disable_daily}'
 
     scm:
         - git-scm
index e7fe923..a89aa5d 100644 (file)
@@ -18,8 +18,8 @@ master:
   - 'os-nosdn-kvm_ovs_dpdk-ha'
   - 'os-odl-sfc-noha'
   - 'os-odl-sfc-ha'
-  - 'os-odl-fdio-dvr-noha'
-  - 'os-odl-fdio-dvr-ha'
+  - 'os-odl-fdio_dvr-noha'
+  - 'os-odl-fdio_dvr-ha'
 euphrates:
   - 'os-nosdn-nofeature-noha'
   - 'os-nosdn-nofeature-ha'
@@ -40,8 +40,8 @@ euphrates:
   - 'os-nosdn-kvm_ovs_dpdk-ha'
   - 'os-odl-sfc-noha'
   - 'os-odl-sfc-ha'
-  - 'os-odl-fdio-dvr-noha'
-  - 'os-odl-fdio-dvr-ha'
+  - 'os-odl-fdio_dvr-noha'
+  - 'os-odl-fdio_dvr-ha'
 danube:
   - 'os-nosdn-nofeature-noha'
   - 'os-nosdn-nofeature-ha'
index 74a134e..b107c65 100644 (file)
@@ -95,7 +95,7 @@
         - 'os-odl-sfc-ha':
             disabled: false
             auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
-        - 'os-nosdn-dpdk-ha':
+        - 'os-nosdn-ovs_dpdk-ha':
             disabled: false
             auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
         - 'k8-nosdn-nofeature-ha':
         - 'os-odl-sfc-noha':
             disabled: false
             auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
-        - 'os-nosdn-dpdk-noha':
+        - 'os-nosdn-ovs_dpdk-noha':
             disabled: false
             auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
 
                         build-step-failure-threshold: 'never'
                         failure-threshold: 'never'
                         unstable-threshold: 'FAILURE'
+        - conditional-step:
+            condition-kind: and
+            condition-operands:
+                - condition-kind: regex-match
+                  regex: os-nosdn-nofeature-ha
+                  label: '{scenario}'
+            steps:
+                - trigger-builds:
+                    - project: 'bottlenecks-compass-posca_stress_ping-{pod}-daily-{stream}'
+                      current-parameters: false
+                      predefined-parameters:
+                        DEPLOY_SCENARIO={scenario}
+                      block: true
+                      same-node: true
+                      block-thresholds:
+                        build-step-failure-threshold: 'never'
+                        failure-threshold: 'never'
+                        unstable-threshold: 'FAILURE'
 
 - job-template:
     name: 'compass-deploy-{pod}-daily-{stream}'
     triggers:
         - timed: ''
 - trigger:
-    name: 'compass-os-nosdn-dpdk-ha-baremetal-centos-master-trigger'
+    name: 'compass-os-nosdn-ovs_dpdk-ha-baremetal-centos-master-trigger'
     triggers:
         - timed: ''
 - trigger:
     triggers:
         - timed: ''
 - trigger:
-    name: 'compass-os-nosdn-dpdk-noha-baremetal-centos-master-trigger'
+    name: 'compass-os-nosdn-ovs_dpdk-noha-baremetal-centos-master-trigger'
     triggers:
         - timed: ''
 
     triggers:
         - timed: ''
 - trigger:
-    name: 'compass-os-nosdn-dpdk-ha-huawei-pod7-danube-trigger'
+    name: 'compass-os-nosdn-ovs_dpdk-ha-huawei-pod7-danube-trigger'
     triggers:
         - timed: ''
 - trigger:
     triggers:
         - timed: ''
 - trigger:
-    name: 'compass-os-nosdn-dpdk-noha-huawei-pod7-danube-trigger'
+    name: 'compass-os-nosdn-ovs_dpdk-noha-huawei-pod7-danube-trigger'
     triggers:
         - timed: ''
 #--------------------
     triggers:
         - timed: '0 14 * * *'
 - trigger:
-    name: 'compass-os-nosdn-dpdk-ha-baremetal-master-trigger'
+    name: 'compass-os-nosdn-ovs_dpdk-ha-baremetal-master-trigger'
     triggers:
         - timed: '0 16 * * *'
 - trigger:
     triggers:
         - timed: ''
 - trigger:
-    name: 'compass-os-nosdn-dpdk-noha-baremetal-master-trigger'
+    name: 'compass-os-nosdn-ovs_dpdk-noha-baremetal-master-trigger'
     triggers:
         - timed: ''
 
     triggers:
         - timed: '0 13 * * *'
 - trigger:
-    name: 'compass-os-nosdn-dpdk-ha-baremetal-euphrates-trigger'
+    name: 'compass-os-nosdn-ovs_dpdk-ha-baremetal-euphrates-trigger'
     triggers:
         - timed: '0 9 * * *'
 - trigger:
     triggers:
         - timed: ''
 - trigger:
-    name: 'compass-os-nosdn-dpdk-noha-baremetal-euphrates-trigger'
+    name: 'compass-os-nosdn-ovs_dpdk-noha-baremetal-euphrates-trigger'
     triggers:
         - timed: ''
 
     triggers:
         - timed: '0 13 * * *'
 - trigger:
-    name: 'compass-os-nosdn-dpdk-ha-virtual-master-trigger'
+    name: 'compass-os-nosdn-ovs_dpdk-ha-virtual-master-trigger'
     triggers:
         - timed: '0 17 * * *'
 - trigger:
     triggers:
         - timed: '0 20 * * *'
 - trigger:
-    name: 'compass-os-nosdn-dpdk-noha-virtual-master-trigger'
+    name: 'compass-os-nosdn-ovs_dpdk-noha-virtual-master-trigger'
     triggers:
         - timed: '0 11 * * *'
 
     triggers:
         - timed: '0 16 * * *'
 - trigger:
-    name: 'compass-os-nosdn-dpdk-ha-virtual-euphrates-trigger'
+    name: 'compass-os-nosdn-ovs_dpdk-ha-virtual-euphrates-trigger'
     triggers:
         - timed: '0 14 * * *'
 - trigger:
     triggers:
         - timed: '0 19 * * *'
 - trigger:
-    name: 'compass-os-nosdn-dpdk-noha-virtual-euphrates-trigger'
+    name: 'compass-os-nosdn-ovs_dpdk-noha-virtual-euphrates-trigger'
     triggers:
         - timed: '0 12 * * *'
index 9d4ae51..ad069a5 100644 (file)
@@ -39,7 +39,7 @@ elif [[ "${DEPLOY_SCENARIO}" =~ "-onos" ]]; then
     export NETWORK_CONF_FILE=network_onos.yml
 elif [[ "${DEPLOY_SCENARIO}" =~ "-openo" ]]; then
     export NETWORK_CONF_FILE=network_openo.yml
-elif [[ "${DEPLOY_SCENARIO}" =~ "-dpdk" ]]; then
+elif [[ "${DEPLOY_SCENARIO}" =~ "-ovs_dpdk" ]]; then
     export NETWORK_CONF_FILE=network_dpdk.yml
 else
     export NETWORK_CONF_FILE=network.yml
index 1474803..94f38b1 100644 (file)
@@ -25,7 +25,7 @@
         branch: 'stable/{stream}'
         dovetail-branch: master
         gs-pathname: '/{stream}'
-        docker-tag: 'cvp.0.6.0'
+        docker-tag: 'cvp.0.7.0'
 
 #-----------------------------------
 # POD, PLATFORM, AND BRANCH MAPPING
index 9c4e205..2cbb947 100755 (executable)
@@ -152,8 +152,10 @@ dovetail_home_volume="-v ${DOVETAIL_HOME}:${DOVETAIL_HOME}"
 echo "Dovetail: Pulling image opnfv/dovetail:${DOCKER_TAG}"
 docker pull opnfv/dovetail:$DOCKER_TAG >$redirect
 
+env4bgpvpn="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP}"
+
 cmd="docker run ${opts} -e DOVETAIL_HOME=${DOVETAIL_HOME} ${docker_volume} ${dovetail_home_volume} \
-     ${sshkey} opnfv/dovetail:${DOCKER_TAG} /bin/bash"
+     ${sshkey} ${env4bgpvpn} opnfv/dovetail:${DOCKER_TAG} /bin/bash"
 echo "Dovetail: running docker run command: ${cmd}"
 ${cmd} >${redirect}
 sleep 5
@@ -186,14 +188,6 @@ cat << EOF >$tempest_conf_file
 compute:
     min_compute_nodes: 2
     volume_device_name: ${volume_device}
-    min_microversion: 2.2
-    max_microversion: latest
-
-compute-feature-enabled:
-    live_migration: True
-    block_migration_for_live_migration: True
-    block_migrate_cinder_iscsi: True
-    attach_encrypted_volume: True
 
 EOF
 
index baf44c5..3ca6d05 100644 (file)
 - trigger:
     name: 'fuel-os-nosdn-nofeature-ha-zte-pod1-daily-master-trigger'
     triggers:
-        - timed: ''
+        - timed: '0 10 * * *'
 - trigger:
     name: 'fuel-os-odl-nofeature-ha-zte-pod1-daily-master-trigger'
     triggers:
 - trigger:
     name: 'fuel-os-nosdn-nofeature-ha-zte-pod1-daily-danube-trigger'
     triggers:
-        - timed: '0 2 * * 6'
+        - timed: ''
 - trigger:
     name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod1-daily-danube-trigger'
     triggers:
-        - timed: '0 2 * * 1,3,5'
+        - timed: ''
 - trigger:
     name: 'fuel-os-odl-nofeature-ha-zte-pod1-daily-danube-trigger'
     triggers:
index 8ede529..797f660 100755 (executable)
@@ -5,13 +5,14 @@ set +u
 set +o pipefail
 
 run_tiers() {
-    cmd_opt='prepare_env start && run_tests -r -t all'
-    [[ $BUILD_TAG =~ "suite" ]] && cmd_opt='prepare_env start && run_tests -t all'
+    tiers=$1
+    cmd_opt="prepare_env start && run_tests -r -t all"
+    [[ $BUILD_TAG =~ "suite" ]] && cmd_opt="prepare_env start && run_tests -t all"
     ret_val_file="${HOME}/opnfv/functest/results/${BRANCH##*/}/return_value"
     echo 0 > ${ret_val_file}
 
     for tier in ${tiers[@]}; do
-        FUNCTEST_IMAGE=opnfv/functest-${tier}
+        FUNCTEST_IMAGE=opnfv/functest-${tier}:${DOCKER_TAG}
         echo "Functest: Pulling Functest Docker image ${FUNCTEST_IMAGE} ..."
         docker pull ${FUNCTEST_IMAGE}>/dev/null
         cmd="docker run --privileged=true ${envs} ${volumes} ${FUNCTEST_IMAGE} /bin/bash -c '${cmd_opt}'"
@@ -19,31 +20,35 @@ run_tiers() {
         eval ${cmd}
         ret_value=$?
         if [ ${ret_value} != 0 ]; then
-          echo ${ret_value} > ${ret_val_file}
+            echo ${ret_value} > ${ret_val_file}
+            if [ ${tier} == 'healthcheck' ]; then
+                echo "Healthcheck tier failed. Exiting Functest..."
+                exit 1
+            fi
         fi
     done
 }
 
 run_test() {
     test_name=$1
-    cmd_opt='prepare_env start && run_tests -r -t $test_name'
-    [[ $BUILD_TAG =~ "suite" ]] && cmd_opt='prepare_env start && run_tests -t $test_name'
+    cmd_opt="prepare_env start && run_tests -r -t ${test_name}"
+    [[ $BUILD_TAG =~ "suite" ]] && cmd_opt="prepare_env start && run_tests -t ${test_name}"
     ret_val_file="${HOME}/opnfv/functest/results/${BRANCH##*/}/return_value"
     echo 0 > ${ret_val_file}
     # Determine which Functest image should be used for the test case
     case ${test_name} in
         connection_check|api_check|snaps_health_check)
-            FUNCTEST_IMAGE=opnfv/functest-healthcheck ;;
+            FUNCTEST_IMAGE=opnfv/functest-healthcheck:${DOCKER_TAG} ;;
         vping_ssh|vping_userdata|tempest_smoke_serial|rally_sanity|refstack_defcore|odl|odl_netvirt|fds|snaps_smoke)
-            FUNCTEST_IMAGE=opnfv/functest-smoke ;;
+            FUNCTEST_IMAGE=opnfv/functest-smoke:${DOCKER_TAG} ;;
         tempest_full_parallel|tempest_custom|rally_full)
-            FUNCTEST_IMAGE=opnfv/functest-components ;;
+            FUNCTEST_IMAGE=opnfv/functest-components:${DOCKER_TAG} ;;
         cloudify_ims|orchestra_openims|orchestra_clearwaterims|vyos_vrouter)
-            FUNCTEST_IMAGE=opnfv/functest-vnf ;;
+            FUNCTEST_IMAGE=opnfv/functest-vnf:${DOCKER_TAG} ;;
         promise|doctor-notification|bgpvpn|functest-odl-sfc|domino-multinode|barometercollectd)
-            FUNCTEST_IMAGE=opnfv/functest-features ;;
-        parser)
-            FUNCTEST_IMAGE=opnfv/functest-parser ;;
+            FUNCTEST_IMAGE=opnfv/functest-features:${DOCKER_TAG} ;;
+        parser-basics)
+            FUNCTEST_IMAGE=opnfv/functest-parser:${DOCKER_TAG} ;;
         *)
             echo "Unkown test case $test_name"
             exit 1
@@ -66,6 +71,7 @@ FUNCTEST_DIR=/home/opnfv/functest
 DEPLOY_TYPE=baremetal
 [[ $BUILD_TAG =~ "virtual" ]] && DEPLOY_TYPE=virt
 HOST_ARCH=$(uname -m)
+DOCKER_TAG=${DOCKER_TAG:-latest}
 
 # Prepare OpenStack credentials volume
 rc_file=${HOME}/opnfv-openrc.sh
index 00a5f13..a590d9f 100755 (executable)
@@ -2,11 +2,7 @@
 set +e
 
 [[ "$PUSH_RESULTS_TO_DB" == "true" ]] && flags+="-r"
-if [ "$BRANCH" == 'master' ]; then
-    cmd="run_tests -t all ${flags}"
-else
-    cmd="python ${FUNCTEST_REPO_DIR}/functest/ci/run_tests.py -t all ${flags}"
-fi
+cmd="run_tests -t all ${flags}"
 
 container_id=$(docker ps -a | grep opnfv/functest | awk '{print $1}' | head -1)
 docker exec $container_id $cmd
index 9b7f135..469a577 100755 (executable)
@@ -10,11 +10,7 @@ global_ret_val=0
 
 tests=($(echo $FUNCTEST_SUITE_NAME | tr "," "\n"))
 for test in ${tests[@]}; do
-    if [ "$BRANCH" == 'master' ]; then
-        cmd="run_tests -t $test"
-    else
-        cmd="python /home/opnfv/repos/functest/functest/ci/run_tests.py -t $test"
-    fi
+    cmd="run_tests -t $test"
     docker exec $container_id $cmd
     let global_ret_val+=$?
 done
index bb79dac..b116a85 100755 (executable)
@@ -106,12 +106,6 @@ if [ $(docker ps | grep "${FUNCTEST_IMAGE}:${DOCKER_TAG}" | wc -l) == 0 ]; then
     exit 1
 fi
 
-if [ "$BRANCH" == 'master' ]; then
-    cmd="prepare_env start"
-else
-    cmd="python ${FUNCTEST_REPO_DIR}/functest/ci/prepare_env.py start"
-fi
-
-
+cmd="prepare_env start"
 echo "Executing command inside the docker: ${cmd}"
 docker exec ${container_id} ${cmd}
index 9315a4d..7f2574e 100644 (file)
         stream: master
         branch: '{stream}'
         disabled: false
-    danube: &danube
-        stream: danube
-        branch: 'stable/{stream}'
-        disabled: false
     euphrates: &euphrates
         stream: euphrates
         branch: 'stable/{stream}'
@@ -22,6 +18,7 @@
         receivers: >
             cristina.pauna@enea.com
             alexandru.avadanii@enea.com
+            delia.popescu@enea.com
     dovetail-arm-receivers: &dovetail-arm-receivers
         receivers: >
             cristina.pauna@enea.com
@@ -38,6 +35,7 @@
             alexandru.avadanii@enea.com
             alexandru.nemes@enea.com
             catalina.focsa@enea.com
+            delia.popescu@enea.com
     other-receivers: &other-receivers
         receivers: ''
 
             <<: *master
             <<: *yardstick-arm-receivers
         # projects with jobs for stable/euphrates
+        - 'functest':
+            project: 'functest'
+            <<: *euphrates
+            <<: *functest-arm-receivers
+        - 'dovetail':
+            project: 'dovetail'
+            <<: *euphrates
+            <<: *dovetail-arm-receivers
         - 'storperf-master':
             project: 'storperf'
             dockerdir: 'docker/storperf-master'
index 373c386..01736c2 100644 (file)
             <<: *master
             <<: *other-receivers
         # projects with jobs for Danube
-        - 'bottlenecks':
-            project: 'bottlenecks'
-            <<: *danube
-            <<: *other-receivers
         - 'qtip':
             project: 'qtip'
             <<: *euphrates
             <<: *euphrates
             <<: *other-receivers
         # projects with jobs for euphrates
+        - 'bottlenecks':
+            project: 'bottlenecks'
+            <<: *euphrates
+            <<: *other-receivers
         - 'functest':
             project: 'functest'
             <<: *euphrates
index 0e79bd9..17519ac 100644 (file)
 # distros
 #--------------------------------
     distro:
-        - 'ubuntu':
+        - ubuntu:
             disabled: false
-        - 'centos':
-            disabled: true
-        - 'opensuse':
-            disabled: true
+            successful: false
+            failed: false
+            unstable: false
+            notbuilt: false
+        - centos:
+            disabled: false
+            successful: true
+            failed: true
+            unstable: true
+            notbuilt: true
+        - opensuse:
+            disabled: false
+            successful: false
+            failed: false
+            unstable: false
+            notbuilt: false
 #--------------------------------
 # type
 #--------------------------------
@@ -34,7 +46,7 @@
 #--------------------------------
     jobs:
         - 'xci-verify-{distro}-{type}-{stream}'
-        - 'xci-verify-{phase}-{type}-{stream}'
+        - 'xci-verify-{distro}-{phase}-{type}-{stream}'
 #--------------------------------
 # job templates
 #--------------------------------
                 - 'osa-verify-{distro}-.*'
                 - 'osa-periodic-{distro}-.*'
             block-level: 'NODE'
+        - throttle:
+            max-per-node: 2
+            max-total: 10
+            categories:
+                - xci-verify-virtual
+            option: category
 
     wrappers:
         - ssh-agent-wrapper
                   - compare-type: ANT
                     pattern: 'docs/**'
             readable-message: true
+            skip-vote:
+                successful: '{obj:successful}'
+                failed: '{obj:failed}'
+                unstable: '{obj:unstable}'
+                notbuilt: '{obj:notbuilt}'
 
     parameters:
         - project-parameter:
             name: deploy
             condition: SUCCESSFUL
             projects:
-                - name: 'xci-verify-deploy-{type}-{stream}'
+                - name: 'xci-verify-{distro}-deploy-{type}-{stream}'
                   current-parameters: true
                   predefined-parameters: |
                     DISTRO={distro}
                     DEPLOY_SCENARIO=os-nosdn-nofeature-noha
                     CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
+                    GERRIT_BRANCH=$GERRIT_BRANCH
+                    GERRIT_REFSPEC=$GERRIT_REFSPEC
+                    GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                    GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
                   node-parameters: true
                   kill-phase-on: FAILURE
                   abort-all-job: true
             name: healthcheck
             condition: SUCCESSFUL
             projects:
-                - name: 'xci-verify-healthcheck-{type}-{stream}'
+                - name: 'xci-verify-{distro}-healthcheck-{type}-{stream}'
                   current-parameters: true
                   predefined-parameters: |
                     DISTRO={distro}
                     CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
                     FUNCTEST_MODE=tier
                     FUNCTEST_TIER=healthcheck
+                    GERRIT_BRANCH=$GERRIT_BRANCH
+                    GERRIT_REFSPEC=$GERRIT_REFSPEC
+                    GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                    GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
                   node-parameters: true
                   kill-phase-on: NEVER
                   abort-all-job: true
 
 - job-template:
-    name: 'xci-verify-{phase}-{type}-{stream}'
+    name: 'xci-verify-{distro}-{phase}-{type}-{stream}'
 
     disabled: false
 
         - build-blocker:
             use-build-blocker: true
             blocking-jobs:
-                - 'xci-verify-deploy-.*'
-                - 'xci-verify-healthcheck-.*'
                 - 'bifrost-verify-.*'
                 - 'bifrost-periodic-.*'
                 - 'osa-verify-.*'
             block-level: 'NODE'
 
     parameters:
+        - string:
+            name: PROJECT
+            default: 'releng-xci'
         - string:
             name: DISTRO
             default: 'ubuntu'
 
     builders:
         - description-setter:
-            description: "Built on $NODE_NAME for $DISTRO"
+            description: "Built on $NODE_NAME"
         - 'xci-verify-{phase}-macro'
 
 #--------------------------------
index 8b3980f..5398a34 100755 (executable)
@@ -18,6 +18,10 @@ if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
     fi
 fi
 
+if [[ "${INSTALLER_TYPE}" == 'fuel' ]]; then
+    sshkey="-v ${SSH_KEY}:/root/.ssh/mcp.rsa"
+fi
+
 if [[ ${INSTALLER_TYPE} == 'joid' ]]; then
     if [[ "${DEPLOY_SCENARIO:0:2}" == "k8" ]];then
         rc_file_vol="-v /home/ubuntu/config:/etc/yardstick/admin.conf"
@@ -47,6 +51,10 @@ envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
     -e YARDSTICK_BRANCH=${BRANCH} -e BRANCH=${BRANCH} \
     -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO}"
 
+if [[ "${INSTALLER_TYPE}" == 'fuel' ]]; then
+    envs+=" -e SSH_KEY=/root/.ssh/mcp.rsa"
+fi
+
 # Pull the image with correct tag
 DOCKER_REPO='opnfv/yardstick'
 if [ "$(uname -m)" = 'aarch64' ]; then
index 7777a9a..eb070e3 100644 (file)
@@ -27,11 +27,10 @@ class OVSLogger(object):
         if not os.path.exists(dirpath):
             os.makedirs(dirpath)
 
-    def __ssh_host(self, ssh_conn, host_prefix='10.20.0'):
+    def __ssh_host(self, ssh_conn):
         try:
-            _, stdout, _ = ssh_conn.exec_command('hostname -I')
-            hosts = stdout.readline().strip().split(' ')
-            found_host = [h for h in hosts if h.startswith(host_prefix)][0]
+            _, stdout, _ = ssh_conn.exec_command('hostname')
+            found_host = stdout.readline()
             return found_host
         except Exception as e:
             logger.error(e)
@@ -98,10 +97,18 @@ class OVSLogger(object):
 
     def dump_ovs_logs(self, controller_clients, compute_clients,
                       related_error=None, timestamp=None):
+        """
+        delete controller_clients argument because
+        that was producing an error in XCI installer.
+
+        For more information see:
+        TODO: https://jira.opnfv.org/browse/RELENG-314
+        """
+        del controller_clients
         if timestamp is None:
             timestamp = time.strftime("%Y%m%d-%H%M%S")
-
-        clients = controller_clients + compute_clients
+        # clients = controller_clients + compute_clients
+        clients = compute_clients
         for client in clients:
             self.ofctl_dump_flows(client, timestamp=timestamp)
             self.vsctl_show(client, timestamp=timestamp)
index 0873a68..ac75950 100755 (executable)
@@ -167,11 +167,7 @@ elif [ "$installer_type" == "apex" ]; then
     sudo scp $ssh_options root@$installer_ip:/home/stack/overcloudrc.v3 $dest_path
 
 elif [ "$installer_type" == "compass" ]; then
-    if [ "${BRANCH}" == "master" ]; then
-        sudo docker cp compass-tasks:/opt/openrc $dest_path &> /dev/null
-        sudo chown $(whoami):$(whoami) $dest_path
-        sudo docker cp compass-tasks:/opt/os_cacert $os_cacert
-    else
+    if [ "${BRANCH}" == "stable/danube" ]; then
         verify_connectivity $installer_ip
         controller_ip=$(sshpass -p'root' ssh 2>/dev/null $ssh_options root@${installer_ip} \
             'mysql -ucompass -pcompass -Dcompass -e"select *  from cluster;"' \
@@ -201,6 +197,10 @@ elif [ "$installer_type" == "compass" ]; then
         fi
         info "public_ip: $public_ip"
         swap_to_public $public_ip
+    else
+        sudo docker cp compass-tasks:/opt/openrc $dest_path &> /dev/null
+        sudo chown $(whoami):$(whoami) $dest_path
+        sudo docker cp compass-tasks:/opt/os_cacert $os_cacert
     fi
 
 elif [ "$installer_type" == "joid" ]; then
index f235790..07440ad 100644 (file)
@@ -22,7 +22,7 @@ ARG BRANCH=master
 
 ENV HOME /home/opnfv
 ENV working_dir ${HOME}/releng/utils/test/reporting
-ENV CONFIG_REPORTING_YAML ${working_dir}/reporting.yaml
+ENV CONFIG_REPORTING_YAML ${working_dir}/reporting/reporting.yaml
 
 WORKDIR ${HOME}
 # Packaged dependencies
index 6cc7a7c..25d3ef1 100755 (executable)
@@ -3,7 +3,7 @@
 export PYTHONPATH="${PYTHONPATH}:./reporting"
 export CONFIG_REPORTING_YAML=./reporting/reporting.yaml
 
-declare -a versions=(danube master)
+declare -a versions=(euphrates danube master)
 declare -a projects=(functest storperf yardstick qtip vsperf bottlenecks)
 
 project=$1
index 86258ac..ff7061b 100644 (file)
@@ -35,7 +35,7 @@
                                        <div id="main">
                                                <div class="inner">
                                                        <header>
-                                                               <h1>Danube reporting</h1>
+                                                               <h1>Euphrates reporting</h1>
                                                        </header>
                                                        <section class="tiles">
                                                                <article class="style3">
index 3625b50..3eb490d 100644 (file)
Binary files a/utils/test/reporting/img/euphrates.jpg and b/utils/test/reporting/img/euphrates.jpg differ
index 8966d06..225227a 100644 (file)
@@ -37,10 +37,14 @@ for version in VERSIONS:
     # For all the installers
     for installer in INSTALLERS:
         # get scenarios results data
+        if version != 'master':
+            new_version = "stable/{}".format(version)
+        else:
+            new_version = version
         scenario_results = rp_utils.getScenarios("bottlenecks",
                                                  "posca_factor_ping",
                                                  installer,
-                                                 version)
+                                                 new_version)
         LOGGER.info("scenario_results: %s", scenario_results)
 
         scenario_stats = rp_utils.getScenarioStats(scenario_results)
index 808c841..267803e 100755 (executable)
@@ -13,8 +13,8 @@ import time
 
 import jinja2
 
-import testCase as tc
-import scenarioResult as sr
+import reporting.functest.testCase as tc
+import reporting.functest.scenarioResult as sr
 import reporting.utils.reporting_utils as rp_utils
 
 """
@@ -221,7 +221,7 @@ for version in versions:
                                 LOGGER.debug("No results found")
 
                         items[s] = testCases2BeDisplayed
-                except Exception:
+                except Exception:  # pylint: disable=broad-except
                     LOGGER.error("Error installer %s, version %s, scenario %s",
                                  installer, version, s)
                     LOGGER.error("No data available: %s", sys.exc_info()[0])
index 26e8c8b..7504493 100644 (file)
@@ -9,7 +9,7 @@
 import requests
 import yaml
 
-import utils.reporting_utils as rp_utils
+import reporting.utils.reporting_utils as rp_utils
 
 yardstick_conf = rp_utils.get_config('yardstick.test_conf')
 response = requests.get(yardstick_conf)