Merge "Refactoring daisy CI Jobs"
authorSerena Feng <feng.xiaowei@zte.com.cn>
Wed, 8 Feb 2017 10:03:48 +0000 (10:03 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Wed, 8 Feb 2017 10:03:48 +0000 (10:03 +0000)
27 files changed:
jjb/apex/apex-snapshot-create.sh
jjb/apex/apex.yml
jjb/bottlenecks/bottlenecks-ci-jobs.yml
jjb/bottlenecks/bottlenecks-project-jobs.yml
jjb/compass4nfv/compass-ci-jobs.yml
jjb/compass4nfv/compass-deploy.sh
jjb/compass4nfv/compass-dovetail-jobs.yml
jjb/compass4nfv/compass-verify-jobs.yml
jjb/daisy4nfv/daisy4nfv-virtual-deploy.sh
jjb/fuel/fuel-daily-jobs.yml
jjb/global/releng-macros.yml
jjb/kvmfornfv/kvmfornfv-upload-artifact.sh
jjb/kvmfornfv/kvmfornfv.yml
jjb/qtip/helpers/cleanup-deploy.sh [moved from jjb/qtip/qtip-cleanup.sh with 100% similarity]
jjb/qtip/helpers/validate-deploy.sh [moved from jjb/qtip/qtip-daily-ci.sh with 86% similarity]
jjb/qtip/helpers/validate-setup.sh [new file with mode: 0644]
jjb/qtip/qtip-ci-jobs.yml [deleted file]
jjb/qtip/qtip-validate-jobs.yml [new file with mode: 0644]
jjb/qtip/qtip-verify-jobs.yml [moved from jjb/qtip/qtip-project-jobs.yml with 93% similarity]
jjb/releng/opnfv-docker.yml
jjb/releng/testapi-automate.yml
jjb/yardstick/yardstick-project-jobs.yml
prototypes/bifrost/scripts/destroy-env.sh
utils/test/reporting/functest/reporting-status.py
utils/test/reporting/reporting.yaml
utils/test/reporting/utils/reporting_utils.py
utils/test/testapi/htmlize/htmlize.py

index 09c6a11..5725ac6 100644 (file)
@@ -26,10 +26,10 @@ mkdir -p ${tmp_dir}
 pushd ${tmp_dir} > /dev/null
 echo "Copying overcloudrc and ssh key from Undercloud..."
 # Store overcloudrc
-UNDERCLOUD=$(sudo virsh domifaddr undercloud | grep -Eo '[0-9]+\.[0-9]+\.[0-9]+\.[0-9]')
-scp ${SSH_OPTIONS[@]} stack@${UNDERCLOUD}:overcloudrc ./
+UNDERCLOUD=$(sudo virsh domifaddr undercloud | grep -Eo '[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+')
+sudo scp ${SSH_OPTIONS[@]} stack@${UNDERCLOUD}:overcloudrc ./
 # Copy out ssh key of stack from undercloud
-scp ${SSH_OPTIONS[@]} stack@${UNDERCLOUD}:.ssh/id_rsa ./
+sudo scp ${SSH_OPTIONS[@]} stack@${UNDERCLOUD}:.ssh/id_rsa ./
 popd > /dev/null
 
 echo "Gathering introspection information"
index fcf08ed..512112e 100644 (file)
                 build-step-failure-threshold: 'never'
                 failure-threshold: 'never'
                 unstable-threshold: 'FAILURE'
-
-
+        - trigger-builds:
+          - project: 'apex-deploy-baremetal-os-odl-bgpvpn-ha-{stream}'
+            predefined-parameters: |
+              BUILD_DIRECTORY=apex-build-{stream}/.build
+              OPNFV_CLEAN=yes
+            git-revision: true
+            same-node: true
+            block-thresholds:
+                build-step-failure-threshold: 'never'
+            block: true
+        - trigger-builds:
+          - project: 'functest-apex-{daily-slave}-daily-{stream}'
+            predefined-parameters:
+              DEPLOY_SCENARIO=os-odl-bgpvpn-ha
+            block: true
+            same-node: true
+            block-thresholds:
+                build-step-failure-threshold: 'never'
+                failure-threshold: 'never'
+                unstable-threshold: 'FAILURE'
+        - trigger-builds:
+          - project: 'yardstick-apex-{slave}-daily-{stream}'
+            predefined-parameters:
+              DEPLOY_SCENARIO=os-odl-bgpvpn-ha
+            block: true
+            same-node: true
+            block-thresholds:
+                build-step-failure-threshold: 'never'
+                failure-threshold: 'never'
+                unstable-threshold: 'FAILURE'
 # Colorado Build
 - job-template:
     name: 'apex-build-colorado'
index 582f40c..a9ccd69 100644 (file)
@@ -72,6 +72,7 @@
     suite:
         - 'rubbos'
         - 'vstf'
+        - 'posca'
 
     jobs:
         - 'bottlenecks-{installer}-{suite}-{pod}-daily-{stream}'
index 535edaf..12ea31b 100644 (file)
@@ -29,6 +29,7 @@
     suite:
         - 'rubbos'
         - 'vstf'
+        - 'posca'
 
 ################################
 # job templates
index 6c9686a..b749ea6 100644 (file)
         - shell:
             !include-raw-escape: ./compass-deploy.sh
 
-    publishers:
-        - archive:
-            artifacts: 'ansible.log'
-            allow-empty: 'true'
-            fingerprint: true
-
 ########################
 # parameter macros
 ########################
index f89d04e..6696e4b 100644 (file)
@@ -65,7 +65,4 @@ echo
 echo "--------------------------------------------------------"
 echo "Done!"
 
-ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
-sshpass -p root scp 2>/dev/null $ssh_options root@${INSTALLER_IP}:/var/ansible/run/openstack_${OPENSTACK_VERSION}-opnfv2/ansible.log ./  &> /dev/null
-
 exit $deploy_ret
index 3337cd0..d49d0ec 100644 (file)
@@ -28,7 +28,7 @@
     scenario:
         - 'os-nosdn-nofeature-ha':
             disabled: false
-            auto-trigger-name: 'weekly-trigger-disabled'
+            auto-trigger-name: 'compass-{scenario}-{pod}-weekly-{stream}-trigger'
 
     jobs:
         - 'compass-{scenario}-{pod}-weekly-{stream}'
@@ -63,7 +63,7 @@
         - project-parameter:
             project: '{project}'
             branch: '{branch}'
-        - compass-ci-parameter:
+        - compass-dovetail-parameter:
             installer: '{installer}'
             gs-pathname: '{gs-pathname}'
         - string:
         - project-parameter:
             project: '{project}'
             branch: '{branch}'
-        - compass-ci-parameter:
+        - compass-dovetail-parameter:
             installer: '{installer}'
             gs-pathname: '{gs-pathname}'
         - '{slave-label}-defaults'
         - shell:
             !include-raw-escape: ./compass-deploy.sh
 
-    publishers:
-        - archive:
-            artifacts: 'ansible.log'
-            allow-empty: 'true'
-            fingerprint: true
-
 ########################
 # parameter macros
 ########################
 - trigger:
     name: 'compass-os-nosdn-nofeature-ha-baremetal-weekly-colorado-trigger'
     triggers:
-        - timed: ''
+        - timed: 'H H * * 0'
+
+- trigger:
+    name: 'dovetail-weekly-trigger'
+    triggers:
+        - timed: 'H H * * 0'
index f4df670..d581380 100644 (file)
             description: "Built on $NODE_NAME"
         - '{project}-verify-{phase}-macro'
 
-    publishers:
-        - archive:
-            artifacts: 'ansible.log'
-            allow-empty: 'true'
-            fingerprint: true
 #####################################
 # builder macros
 #####################################
index 4aa7b0b..ef4a07b 100755 (executable)
@@ -15,7 +15,7 @@ else
     exit 0
 fi
 
-./ci/deploy/deploy.sh ${DHA_CONF} ${NETWORK_CONF}
+sudo ./ci/deploy/deploy.sh -d ${DHA_CONF} -n ${NETWORK_CONF} -p ${NODE_NAME:-"zte-virtual1"}
 
 if [ $? -ne 0 ]; then
     echo "depolyment failed!"
index a9af1bc..02267bd 100644 (file)
@@ -83,6 +83,8 @@
             auto-trigger-name: 'daily-trigger-disabled'
         - 'os-nosdn-kvm_ovs_dpdk-ha':
             auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
+        - 'os-nosdn-kvm_ovs_dpdk_bar-ha':
+            auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
         # NOHA scenarios
         - 'os-nosdn-nofeature-noha':
             auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
     name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-baremetal-daily-master-trigger'
     triggers:
         - timed: '30 12 * * *'
+- trigger:
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-baremetal-daily-master-trigger'
+    triggers:
+        - timed: '30 8 * * *'
 # NOHA Scenarios
 - trigger:
     name: 'fuel-os-nosdn-nofeature-noha-baremetal-daily-master-trigger'
     name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-baremetal-daily-danube-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-baremetal-daily-danube-trigger'
+    triggers:
+        - timed: ''
 # NOHA Scenarios
 - trigger:
     name: 'fuel-os-nosdn-nofeature-noha-baremetal-daily-danube-trigger'
     name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-virtual-daily-master-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-virtual-daily-master-trigger'
+    triggers:
+        - timed: ''
 # NOHA Scenarios
 - trigger:
     name: 'fuel-os-nosdn-nofeature-noha-virtual-daily-master-trigger'
     name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-virtual-daily-danube-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-virtual-daily-danube-trigger'
+    triggers:
+        - timed: ''
 # NOHA Scenarios
 - trigger:
     name: 'fuel-os-nosdn-nofeature-noha-virtual-daily-danube-trigger'
     name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod1-daily-master-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod1-daily-master-trigger'
+    triggers:
+        - timed: ''
 # NOHA Scenarios
 - trigger:
     name: 'fuel-os-nosdn-nofeature-noha-zte-pod1-daily-master-trigger'
     name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod2-daily-master-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod2-daily-master-trigger'
+    triggers:
+        - timed: ''
 # NOHA Scenarios
 - trigger:
     name: 'fuel-os-nosdn-nofeature-noha-zte-pod2-daily-master-trigger'
     name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod3-daily-master-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod3-daily-master-trigger'
+    triggers:
+        - timed: ''
 # NOHA Scenarios
 - trigger:
     name: 'fuel-os-nosdn-nofeature-noha-zte-pod3-daily-master-trigger'
     name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod1-daily-danube-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod1-daily-danube-trigger'
+    triggers:
+        - timed: ''
 # NOHA Scenarios
 - trigger:
     name: 'fuel-os-nosdn-nofeature-noha-zte-pod1-daily-danube-trigger'
     name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod2-daily-danube-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod2-daily-danube-trigger'
+    triggers:
+        - timed: ''
 # NOHA Scenarios
 - trigger:
     name: 'fuel-os-nosdn-nofeature-noha-zte-pod2-daily-danube-trigger'
     name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod3-daily-danube-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod3-daily-danube-trigger'
+    triggers:
+        - timed: ''
 # NOHA Scenarios
 - trigger:
     name: 'fuel-os-nosdn-nofeature-noha-zte-pod3-daily-danube-trigger'
index 7e11d92..23c2435 100644 (file)
@@ -93,7 +93,6 @@
                   - branch-compare-type: 'ANT'
                     branch-pattern: '**/{branch}'
 
-# NOTE: unused macro, but we may use this for some jobs.
 - trigger:
     name: gerrit-trigger-patch-merged
     triggers:
                     comment-contains-value: 'remerge'
             projects:
               - project-compare-type: 'ANT'
-                project-pattern: '{name}'
+                project-pattern: '{project}'
                 branches:
                   - branch-compare-type: 'ANT'
                     branch-pattern: '**/{branch}'
index 6f8fff3..c6b8005 100755 (executable)
@@ -11,16 +11,17 @@ fi
 
 case "$JOB_TYPE" in
     verify)
-        OPNFV_ARTIFACT_VERSION="gerrit-$GERRIT_CHANGE_NUMBER"
-        GS_UPLOAD_LOCATION="gs://artifacts.opnfv.org/$PROJECT/review/$GERRIT_CHANGE_NUMBER"
-        echo "Removing outdated artifacts produced for the previous patch for the change $GERRIT_CHANGE_NUMBER"
-        gsutil ls $GS_UPLOAD_LOCATION > /dev/null 2>&1 && gsutil rm -r $GS_UPLOAD_LOCATION
-        echo "Uploading artifacts for the change $GERRIT_CHANGE_NUMBER. This could take some time..."
-        ;;
+       OPNFV_ARTIFACT_VERSION="gerrit-$GERRIT_CHANGE_NUMBER"
+       GS_UPLOAD_LOCATION="gs://artifacts.opnfv.org/$PROJECT/review/$GERRIT_CHANGE_NUMBER"
+       echo "Removing outdated artifacts produced for the previous patch for the change $GERRIT_CHANGE_NUMBER"
+       gsutil ls $GS_UPLOAD_LOCATION > /dev/null 2>&1 && gsutil rm -r $GS_UPLOAD_LOCATION
+       echo "Uploading artifacts for the change $GERRIT_CHANGE_NUMBER. This could take some time..."
+       ;;
     daily)
         echo "Uploading daily artifacts This could take some time..."
         OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
         GS_UPLOAD_LOCATION="gs://$GS_URL/$OPNFV_ARTIFACT_VERSION"
+        GS_LOG_LOCATION="gs://$GS_URL/logs-$(date -u +"%Y-%m-%d")/
         ;;
     *)
         echo "Artifact upload is not enabled for $JOB_TYPE jobs"
@@ -38,10 +39,23 @@ esac
 source $WORKSPACE/opnfv.properties
 
 # upload artifacts
-gsutil cp -r $WORKSPACE/build_output/* $GS_UPLOAD_LOCATION > $WORKSPACE/gsutil.log 2>&1
-gsutil -m setmeta -r \
-    -h "Cache-Control:private, max-age=0, no-transform" \
-    $GS_UPLOAD_LOCATION > /dev/null 2>&1
+if [[ "$PHASE" == "build" ]]; then
+    gsutil cp -r $WORKSPACE/build_output/* $GS_UPLOAD_LOCATION > $WORKSPACE/gsutil.log 2>&1
+    gsutil -m setmeta -r \
+        -h "Cache-Control:private, max-age=0, no-transform" \
+        $GS_UPLOAD_LOCATION > /dev/null 2>&1
+else
+    if [[ "$JOB_TYPE" == "daily" ]]; then
+        log_dir=$WORKSPACE/build_output/log
+        if [[ -d "$log_dir" ]]; then
+            #Uploading logs to artifacts
+            echo "Uploading artifacts for future debugging needs...."
+            gsutil cp -r $WORKSPACE/build_output/log-*.tar.gz $GS_LOG_LOCATION > $WORKSPACE/gsutil.log 2>&1
+        else
+            echo "No test logs/artifacts available for uploading"
+        fi
+    fi
+fi
 
 # upload metadata file for the artifacts built by daily job
 if [[ "$JOB_TYPE" == "daily" ]]; then
index 157f2dc..522e971 100644 (file)
             name: TEST_NAME
             default: '{testname}'
             description: "Daily job to execute kvmfornfv '{testname}' testcase."
+        - string:
+            name: PHASE
+            default: '{phase}'
+            description: "Execution of kvmfornfv daily '{phase}' job ."
 
     builders:
         - description-setter:
             !include-raw: ./kvmfornfv-download-artifact.sh
         - shell:
             !include-raw: ./kvmfornfv-test.sh
+        - shell:
+            !include-raw: ./kvmfornfv-upload-artifact.sh
 - builder:
     name: 'kvmfornfv-packet_forward-daily-build-macro'
     builders:
similarity index 86%
rename from jjb/qtip/qtip-daily-ci.sh
rename to jjb/qtip/helpers/validate-deploy.sh
index 4fdc043..1645537 100644 (file)
@@ -27,12 +27,7 @@ if [ $(docker ps | grep 'opnfv/qtip' | wc -l) == 0 ]; then
 else
     echo "The container ID is: ${container_id}"
     QTIP_REPO=/home/opnfv/repos/qtip
-
-    echo "Run Qtip test"
-    docker exec -t ${container_id} $QTIP_REPO/docker/run_qtip.sh
-
-    echo "Pushing available results to DB"
-    docker exec -t ${container_id} $QTIP_REPO/docker/push_db.sh
+# TODO(yujunz): execute benchmark plan for compute-qpi
 fi
 
 echo "Qtip done!"
diff --git a/jjb/qtip/helpers/validate-setup.sh b/jjb/qtip/helpers/validate-setup.sh
new file mode 100644 (file)
index 0000000..8d84e12
--- /dev/null
@@ -0,0 +1,24 @@
+#!/usr/bin/env bash
+##############################################################################
+# Copyright (c) 2017 ZTE and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+set -e
+
+# setup virtualenv
+sudo pip install -u virtualenv virtualenvwrapper
+export WORKON_HOME=$HOME/.virtualenvs
+source /usr/local/bin/virtualenvwrapper.sh
+mkvirtualenv qtip
+workon qtip
+
+# setup qtip
+sudo pip install $HOME/repos/qtip
+
+# testing
+qtip --version
+qtip --help
diff --git a/jjb/qtip/qtip-ci-jobs.yml b/jjb/qtip/qtip-ci-jobs.yml
deleted file mode 100644 (file)
index 38f9955..0000000
+++ /dev/null
@@ -1,100 +0,0 @@
-####################################
-# job configuration for qtip
-####################################
-- project:
-    name: qtip
-
-    project: 'qtip'
-
-#--------------------------------
-# BRANCH ANCHORS
-#--------------------------------
-    master: &master
-        stream: master
-        branch: '{stream}'
-        gs-pathname: ''
-        docker-tag: 'latest'
-#--------------------------------
-# POD, INSTALLER, AND BRANCH MAPPING
-#--------------------------------
-#        master
-#--------------------------------
-    pod:
-        - zte-pod2:
-            installer: fuel
-            auto-trigger-name: 'qtip-daily-zte-pod2-trigger'
-            <<: *master
-        - zte-pod3:
-            installer: fuel
-            auto-trigger-name: 'qtip-daily-zte-pod3-trigger'
-            <<: *master
-
-#--------------------------------
-    jobs:
-        - 'qtip-{installer}-{pod}-daily-{stream}'
-
-################################
-# job templates
-################################
-- job-template:
-    name: 'qtip-{installer}-{pod}-daily-{stream}'
-
-    disabled: false
-
-    parameters:
-        - project-parameter:
-            project: '{project}'
-            branch: '{branch}'
-        - '{installer}-defaults'
-        - '{pod}-defaults'
-        - string:
-            name: DEPLOY_SCENARIO
-            default: 'os-nosdn-nofeature-ha'
-        - string:
-            name: DOCKER_TAG
-            default: '{docker-tag}'
-            description: 'Tag to pull docker image'
-
-    scm:
-        - git-scm
-
-    triggers:
-        - '{auto-trigger-name}'
-
-    builders:
-        - description-setter:
-            description: "POD: $NODE_NAME"
-        - 'qtip-cleanup'
-        - 'qtip-daily-ci'
-
-    publishers:
-        - email:
-            recipients: wu.zhihui1@zte.com.cn, zhang.yujunz@zte.com.cn
-
-###########################
-#biuilder macros
-###########################
-- builder:
-    name: qtip-daily-ci
-    builders:
-        - shell:
-            !include-raw: ./qtip-daily-ci.sh
-
-- builder:
-    name: qtip-cleanup
-    builders:
-        - shell:
-            !include-raw: ./qtip-cleanup.sh
-
-#################
-#trigger macros
-#################
-- trigger:
-    name: 'qtip-daily-zte-pod2-trigger'
-    triggers:
-        - timed: '0 7 * * *'
-
-- trigger:
-    name: 'qtip-daily-zte-pod3-trigger'
-    triggers:
-        - timed: '0 1 * * *'
diff --git a/jjb/qtip/qtip-validate-jobs.yml b/jjb/qtip/qtip-validate-jobs.yml
new file mode 100644 (file)
index 0000000..161acb1
--- /dev/null
@@ -0,0 +1,136 @@
+#######################
+# validate after MERGE
+#######################
+- project:
+    name: qtip
+    project: qtip
+
+#--------------------------------
+# BRANCH ANCHORS
+#--------------------------------
+    master: &master
+        stream: master
+        branch: '{stream}'
+        gs-pathname: ''
+        docker-tag: latest
+#--------------------------------
+# JOB VARIABLES
+#--------------------------------
+    pod:
+        - zte-pod2:
+            installer: fuel
+            <<: *master
+        - zte-pod3:
+            installer: fuel
+            <<: *master
+    task:
+        - daily:
+            auto-builder-name: qtip-validate-deploy
+            auto-trigger-name: 'qtip-daily-{pod}-trigger'
+        - validate:
+            auto-builder-name: qtip-validate-setup
+            auto-trigger-name: qtip-validate-trigger
+#--------------------------------
+# JOB LIST
+#--------------------------------
+    jobs:
+        - 'qtip-{task}-{installer}-{pod}-{stream}'
+
+################################
+# job templates
+################################
+- job-template:
+    name: 'qtip-{task}-{installer}-{pod}-{stream}'
+    disabled: false
+    parameters:
+        - qtip-common-parameters:
+            project: '{project}'
+            <<: *master
+        - '{installer}-defaults'
+        - '{pod}-defaults'
+    scm:
+        - git-scm
+    triggers:
+        - '{auto-trigger-name}'
+    builders:
+        - qtip-common-builders
+        - '{auto-builder-name}'
+    publishers:
+        - qtip-common-publishers
+
+################
+# MARCOS
+################
+
+#---------
+# builder
+#---------
+
+- builder:
+    name: qtip-common-builders
+    builders:
+        - description-setter:
+            description: "POD: $NODE_NAME"
+
+- builder:
+    name: qtip-validate-deploy
+    builders:
+        - shell:
+            !include-raw: ./helpers/validate-deploy.sh
+        - shell:
+            !include-raw: ./helpers/cleanup-deploy.sh
+
+- builder:
+    name: qtip-validate-setup
+    builders:
+        - shell:
+            !include-raw: ./helpers/validate-setup.sh
+
+#-----------
+# parameter
+#-----------
+
+- parameter:
+    name: qtip-common-parameters
+    parameters:
+        - project-parameter:
+            project: '{project}'
+            branch: '{branch}'
+        - string:
+            name: DEPLOY_SCENARIO
+            default: 'os-nosdn-nofeature-ha'
+        - string:
+            name: DOCKER_TAG
+            default: '{docker-tag}'
+            description: 'Tag to pull docker image'
+
+#-----------
+# publisher
+#-----------
+
+- publisher:
+    name: qtip-common-publishers
+    publishers:
+        - email:
+            recipients: wu.zhihui1@zte.com.cn, zhang.yujunz@zte.com.cn
+
+#---------
+# trigger
+#---------
+
+- trigger:
+    name: qtip-daily-zte-pod2-trigger
+    triggers:
+        - timed: '0 7 * * *'
+
+- trigger:
+    name: qtip-daily-zte-pod3-trigger
+    triggers:
+        - timed: '0 1 * * *'
+
+- trigger:
+    name: qtip-validate-trigger
+    triggers:
+        - gerrit-trigger-patch-merged:
+            project: '{project}'
+            branch: '{branch}'
similarity index 93%
rename from jjb/qtip/qtip-project-jobs.yml
rename to jjb/qtip/qtip-verify-jobs.yml
index 00455f8..d1fc34d 100644 (file)
@@ -1,11 +1,12 @@
-- project:
-    name: qtip-project-jobs
-
-    project: 'qtip'
+######################
+# verify before MERGE
+######################
 
+- project:
+    name: qtip-verify-jobs
+    project: qtip
     jobs:
         - 'qtip-verify-{stream}'
-
     stream:
         - master:
             branch: '{stream}'
index cb9b4d4..70d38f2 100644 (file)
@@ -72,6 +72,8 @@
         # projects with jobs for master
         - 'daisy':
             <<: *master
+        - 'escalator':
+            <<: *master
 
     jobs:
         - '{project}-docker-build-push-monitor-{stream}'
index 4d450f3..53e074b 100644 (file)
@@ -6,12 +6,10 @@
             gs-pathname: ''
 
     phase:
-        - 'docker-update':
-            slave-label: 'opnfv-build'
+        - 'docker-update'
         - 'docker-deploy':
             slave-label: 'testresults'
-        - 'generate-doc':
-            slave-label: 'opnfv-build'
+        - 'generate-doc'
 
     jobs:
         - 'testapi-automate-{stream}'
                     pattern: 'utils/test/testapi/**'
 
     builders:
+        - description-setter:
+            description: "Built on $NODE_NAME"
         - multijob:
             name: docker-update
             condition: SUCCESSFUL
             condition: SUCCESSFUL
             projects:
                 - name: 'testapi-automate-docker-deploy-{stream}'
-                  current-parameters: true
+                  current-parameters: false
+                  predefined-parameters: |
+                    GIT_BASE=$GIT_BASE
+                  node-label-name: SLAVE_LABEL
+                  node-label: testresults
                   kill-phase-on: FAILURE
                   abort-all-job: true
         - multijob:
             max-per-node: 1
             option: 'project'
 
+    parameters:
+        - project-parameter:
+            project: '{project}'
+            branch: '{branch}'
+        - string:
+            name: DOCKER_TAG
+            default: "latest"
+            description: "Tag name for testapi docker image"
+
     wrappers:
         - ssh-agent-wrapper
         - timeout:
         - git-scm
 
     builders:
+        - description-setter:
+            description: "Built on $NODE_NAME"
         - 'testapi-automate-{phase}-macro'
 
 ################################
     name: 'email-publisher'
     publishers:
         - email:
-            recipients: rohitsakala@gmail.com serena.feng.711@gmail.com
+            recipients: rohitsakala@gmail.com feng.xiaowei@zte.com.cn
             notify-every-unstable-build: false
-            send-to-individuals: true
\ No newline at end of file
+            send-to-individuals: true
index 4b7ff6f..bbfa152 100644 (file)
             set -o errexit
             set -o pipefail
 
+            sudo apt-get install -y build-essential python-dev python3-dev
+
             echo "Running unit tests..."
             cd $WORKSPACE
-            virtualenv $WORKSPACE/yardstick_venv
-            source $WORKSPACE/yardstick_venv/bin/activate
-
-            # install python packages
-            sudo apt-get install -y build-essential python-dev python-pip python-pkg-resources
-            easy_install -U setuptools==33.1.1
-            easy_install -U pip
-            pip install -r requirements.txt || pip install -r tests/ci/requirements.txt
-            pip install -e .
-
-            # unit tests
-            ./run_tests.sh
-
-            deactivate
+            tox
index f6c66a8..b73092b 100755 (executable)
@@ -14,21 +14,18 @@ if [[ $(whoami) != "root" ]]; then
     exit 1
 fi
 
-virsh destroy jumphost.opnfvlocal || true
-virsh destroy controller00.opnfvlocal || true
-virsh destroy compute00.opnfvlocal || true
-virsh undefine jumphost.opnfvlocal || true
-virsh undefine controller00.opnfvlocal || true
-virsh undefine compute00.opnfvlocal || true
+# Delete all VMs on the slave since proposed patchsets
+# may leave undesired VM leftovers
+for vm in $(virsh list --all --name); do
+    virsh destroy $vm || true
+    virsh undefine $vm || true
+done
 
 service ironic-conductor stop || true
 
-echo "removing from database"
+echo "removing ironic database"
 if $(which mysql &> /dev/null); then
-    mysql -u root ironic --execute "truncate table ports;"
-    mysql -u root ironic --execute "delete from node_tags;"
-    mysql -u root ironic --execute "delete from nodes;"
-    mysql -u root ironic --execute "delete from conductors;"
+    mysql -u root ironic --execute "drop database ironic;"
 fi
 echo "removing leases"
 [[ -e /var/lib/misc/dnsmasq/dnsmasq.leases ]] && > /var/lib/misc/dnsmasq/dnsmasq.leases
index 66bdd57..158ee59 100755 (executable)
@@ -40,6 +40,9 @@ versions = rp_utils.get_config('general.versions')
 installers = rp_utils.get_config('general.installers')
 blacklist = rp_utils.get_config('functest.blacklist')
 log_level = rp_utils.get_config('general.log.log_level')
+exclude_noha = rp_utils.get_config('functest.exclude_noha')
+exclude_virtual = rp_utils.get_config('functest.exclude_virtual')
+
 response = requests.get(cf)
 
 functest_yaml_config = yaml.safe_load(response.text)
@@ -48,7 +51,10 @@ logger.info("*******************************************")
 logger.info("*                                         *")
 logger.info("*   Generating reporting scenario status  *")
 logger.info("*   Data retention: %s days               *" % period)
-logger.info("*   Log level: %s                       *" % log_level)
+logger.info("*   Log level: %s                         *" % log_level)
+logger.info("*                                         *")
+logger.info("*   Virtual PODs exluded: %s              *" % exclude_virtual)
+logger.info("*   NOHA scenarios excluded: %s           *" % exclude_noha)
 logger.info("*                                         *")
 logger.info("*******************************************")
 
@@ -90,7 +96,6 @@ for version in versions:
         scenario_stats = rp_utils.getScenarioStats(scenario_results)
         items = {}
         scenario_result_criteria = {}
-
         scenario_file_name = ("./display/" + version +
                               "/functest/scenario_history.txt")
         # initiate scenario file if it does not exist
index fa98626..9db0890 100644 (file)
@@ -2,13 +2,13 @@ general:
     installers:
         - apex
         - compass
+        - daisy
         - fuel
         - joid
-        - daisy
 
     versions:
         - master
-        - colorado
+
     log:
         log_file: reporting.log
         log_level: ERROR
@@ -30,17 +30,19 @@ general:
 
 testapi:
     url: testresults.opnfv.org/test/api/v1/results
-    
+
 functest:
     blacklist:
         - ovno
         - security_scan
+        - rally_sanity
     max_scenario_criteria: 50
     test_conf: https://git.opnfv.org/cgit/functest/plain/functest/ci/testcases.yaml
     log_level: ERROR
-    jenkins_url: https://build.opnfv.org/ci/view/functest/job
-    
-    
+    jenkins_url: https://build.opnfv.org/ci/view/functest/job/
+    exclude_noha: False
+    exclude_virtual: True
+
 yardstick:
     test_conf: https://git.opnfv.org/cgit/yardstick/plain/tests/ci/report_config.yaml
     log_level: ERROR
index 0af60c7..da97953 100644 (file)
@@ -127,7 +127,15 @@ def getScenarios(case, installer, version):
             # Retrieve all the scenarios per installer
             if not r['scenario'] in scenario_results.keys():
                 scenario_results[r['scenario']] = []
-            scenario_results[r['scenario']].append(r)
+            # Do we consider results from virtual pods ...
+            # Do we consider results for non HA scenarios...
+            exclude_virtual_pod = get_config('functest.exclude_virtual')
+            exclude_noha = get_config('functest.exclude_noha')
+            if ((exclude_virtual_pod and "virtual" in r['pod_name']) or
+               (exclude_noha and "noha" in r['scenario'])):
+                    print "exclude virtual pod results..."
+            else:
+                scenario_results[r['scenario']].append(r)
 
     return scenario_results
 
@@ -254,13 +262,14 @@ def getResult(testCase, installer, scenario, version):
 def getJenkinsUrl(build_tag):
     # e.g. jenkins-functest-apex-apex-daily-colorado-daily-colorado-246
     # id = 246
+    # jenkins-functest-compass-huawei-pod5-daily-master-136
+    # id = 136
     # note it is linked to jenkins format
     # if this format changes...function to be adapted....
     url_base = get_config('functest.jenkins_url')
     try:
         build_id = [int(s) for s in build_tag.split("-") if s.isdigit()]
-        jenkins_path = filter(lambda c: not c.isdigit(), build_tag)
-        url_id = jenkins_path[8:-1] + "/" + str(build_id[0])
+        url_id = build_tag[8:-(len(build_id)+3)] + "/" + str(build_id[0])
         jenkins_url = url_base + url_id + "/console"
     except:
         print 'Impossible to get jenkins url:'
index 075e31f..70976d2 100644 (file)
@@ -39,12 +39,12 @@ if __name__ == '__main__':
     parser.add_argument('-ru', '--resource-listing-url',
                         type=str,
                         required=False,
-                        default='http://testresults.opnfv.org/test/swagger/spec.json',
+                        default='http://testresults.opnfv.org/auto/swagger/spec.json',
                         help='Resource Listing Spec File')
     parser.add_argument('-au', '--api-declaration-url',
                         type=str,
                         required=False,
-                        default='http://testresults.opnfv.org/test/swagger/spec',
+                        default='http://testresults.opnfv.org/auto/swagger/spec',
                         help='API Declaration Spec File')
     parser.add_argument('-o', '--output-directory',
                         required=True,