Merge "jjb: xci: Make use of alternative ssh configuration file"
authorMarkos Chandras <mchandras@suse.de>
Tue, 10 Oct 2017 08:40:25 +0000 (08:40 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Tue, 10 Oct 2017 08:40:25 +0000 (08:40 +0000)
12 files changed:
jjb/apex/apex.yml
jjb/apex/apex.yml.j2
jjb/bottlenecks/bottlenecks-run-suite.sh
jjb/compass4nfv/compass-ci-jobs.yml
jjb/fuel/fuel-deploy.sh
jjb/global/installer-params.yml
jjb/releng/opnfv-docker-custom.yml [new file with mode: 0644]
jjb/xci/bifrost-cleanup-job.yml
jjb/xci/bifrost-verify-jobs.yml
jjb/xci/bifrost-verify.sh
jjb/xci/xci-verify-jobs.yml
utils/test/reporting/reporting/functest/reporting-status.py

index 2ec1af6..2e11592 100644 (file)
             - 'apex-run.*'
 
     triggers:
-      - timed: '0 1 * * *'
+      - timed: '' #'0 1 * * *'
 
     builders:
       - description-setter:
index d2adb91..d49b6ea 100644 (file)
             - 'apex-run.*'
 
     triggers:
-      - timed: '0 1 * * *'
+      - timed: '' #'0 1 * * *'
 
     builders:
       - description-setter:
index 811f9d5..f0b8a40 100644 (file)
@@ -27,6 +27,7 @@ BOTTLENECKS_CONFIG=/tmp
 
 if [[ $SUITE_NAME == *posca* ]]; then
     POSCA_SCRIPT=/home/opnfv/bottlenecks/testsuites/posca
+    sudo rm -f ${OPENRC}
 
     # Preparing OpenStack RC and Cacert files
     echo "BOTTLENECKS INFO: fetching os credentials from $INSTALLER_TYPE"
index b107c65..74236e3 100644 (file)
 - trigger:
     name: 'compass-os-nosdn-nofeature-ha-huawei-pod7-danube-trigger'
     triggers:
-        - timed: '0 19 * * *'
+        - timed: '' #'0 19 * * *'
 - trigger:
     name: 'compass-os-nosdn-openo-ha-huawei-pod7-danube-trigger'
     triggers:
 - trigger:
     name: 'compass-os-odl_l2-nofeature-ha-huawei-pod7-danube-trigger'
     triggers:
-        - timed: '0 21 * * *'
+        - timed: '' #'0 21 * * *'
 - trigger:
     name: 'compass-os-odl_l3-nofeature-ha-huawei-pod7-danube-trigger'
     triggers:
-        - timed: '0 15 * * *'
+        - timed: '' #'0 15 * * *'
 - trigger:
     name: 'compass-os-onos-nofeature-ha-huawei-pod7-danube-trigger'
     triggers:
index be45604..e818d90 100755 (executable)
@@ -76,13 +76,6 @@ if [[ "${LAB_CONFIG_URL}" =~ ^(git|ssh):// ]]; then
     rm -rf "${LOCAL_CFG}"
     git clone --quiet --branch "${BRANCH}" "${LAB_CONFIG_URL}" "${LOCAL_CFG}"
     LAB_CONFIG_URL="file://${LOCAL_CFG}"
-
-    # Source local_env if present, which contains POD-specific config
-    local_env="${LOCAL_CFG}/labs/${LAB_NAME}/${POD_NAME}/fuel/config/local_env"
-    if [ -e "${local_env}" ]; then
-        echo "-- Sourcing local environment file"
-        source "${local_env}"
-    fi
 fi
 
 # releng wants us to use nothing else but opnfv.iso for now. We comply.
index 8c01a9f..454cee8 100644 (file)
@@ -48,7 +48,7 @@
           description: 'Installer used for deploying OPNFV on this POD'
       - string:
           name: EXTERNAL_NETWORK
-          default: 'admin_floating_net'
+          default: 'floating_net'
           description: 'external network for test'
       - string:
           name: BRIDGE
diff --git a/jjb/releng/opnfv-docker-custom.yml b/jjb/releng/opnfv-docker-custom.yml
new file mode 100644 (file)
index 0000000..3e4a40d
--- /dev/null
@@ -0,0 +1,111 @@
+########################
+# Job configuration for opnfv-docker-custom
+########################
+- project:
+
+    name: opnfv-docker-custom
+
+    project: '{name}'
+
+    jobs:
+        - 'opnfv-docker-custom-verify-{stream}'
+        - 'opnfv-docker-custom-merge-{stream}'
+
+    stream:
+        - master:
+            branch: '{stream}'
+            gs-pathname: ''
+            disabled: false
+        - danube:
+            branch: 'stable/{stream}'
+            gs-pathname: '/{stream}'
+            disabled: false
+
+########################
+# job templates
+########################
+
+- job-template:
+    name: 'opnfv-docker-custom-verify-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    parameters:
+        - project-parameter:
+            project: $GERRIT_PROJECT
+            branch: '{branch}'
+
+    scm:
+        - git-scm-gerrit
+
+    triggers:
+        - gerrit:
+            server-name: 'gerrit.opnfv.org'
+            trigger-on:
+                - patchset-created-event:
+                    exclude-drafts: 'false'
+                    exclude-trivial-rebase: 'false'
+                    exclude-no-code-change: 'false'
+                - draft-published-event
+                - comment-added-contains-event:
+                    comment-contains-value: 'recheck'
+                - comment-added-contains-event:
+                    comment-contains-value: 'reverify'
+            projects:
+              - project-compare-type: 'REG_EXP'
+                project-pattern: 'disabled'
+                branches:
+                  - branch-compare-type: 'ANT'
+                    branch-pattern: '**/{branch}'
+                file-paths:
+                  - compare-type: ANT
+                    pattern: 'docker/**'
+
+    builders:
+        - verify-docker
+
+- job-template:
+    name: 'opnfv-docker-custom-merge-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    parameters:
+        - project-parameter:
+            project: $GERRIT_PROJECT
+            branch: '{branch}'
+
+    scm:
+        - git-scm
+
+    triggers:
+        - gerrit:
+            server-name: 'gerrit.opnfv.org'
+            trigger-on:
+                - change-merged-event
+                - comment-added-contains-event:
+                    comment-contains-value: 'remerge'
+            projects:
+              - project-compare-type: 'REG_EXP'
+                project-pattern: 'disabled'
+                branches:
+                  - branch-compare-type: 'ANT'
+                    branch-pattern: '**/{branch}'
+                file-paths:
+                  - compare-type: ANT
+                    pattern: 'docker/**'
+
+    builders:
+        - merge-docker
+
+- builder:
+    name: verify-docker
+    builders:
+        - shell: |
+            /bin/bash $WORKSPACE/verify-docker.sh
+
+- builder:
+    name: merge-docker
+    builders:
+        - shell: |
+            /bin/bash $WORKSPACE/merge-docker.sh
+
index f901afe..d517761 100644 (file)
@@ -28,6 +28,7 @@
 - job-template:
     name: '{project}-bifrost-cleanup-{stream}'
 
+    disabled: true
     concurrent: false
 
     node: bifrost-verify-virtual
index 137787d..5f0b657 100644 (file)
@@ -8,16 +8,6 @@
       - master:
           branch: '{stream}'
     # -------------------------------
-    # projects
-    # -------------------------------
-    project:
-      - 'openstack':
-          project-repo: 'https://git.openstack.org/openstack/bifrost'
-          clone-location: '$WORKSPACE/bifrost'
-      - 'opnfv':
-          project-repo: 'https://gerrit.opnfv.org/gerrit/releng-xci'
-          clone-location: '$WORKSPACE/releng-xci'
-    # -------------------------------
     # distros
     # -------------------------------
     distro:
@@ -48,7 +38,7 @@
     # jobs
     # -------------------------------
     jobs:
-      - '{project}-bifrost-verify-{distro}-{type}-{stream}'
+      - 'openstack-bifrost-verify-{distro}-{type}-{stream}'
 
 # -------------------------------
 # VM defaults
@@ -67,7 +57,7 @@
 # job templates
 # -------------------------------
 - job-template:
-    name: '{project}-bifrost-verify-{distro}-{type}-{stream}'
+    name: 'openstack-bifrost-verify-{distro}-{type}-{stream}'
 
     disabled: '{obj:disabled}'
 
           block-level: 'NODE'
 
     parameters:
-      - string:
-          name: PROJECT
-          default: '{project}'
       - string:
           name: PROJECT_REPO
-          default: '{project-repo}'
+          default: 'https://git.openstack.org/openstack/bifrost'
       - string:
           name: CLONE_LOCATION
-          default: '{clone-location}'
+          default: '$WORKSPACE/bifrost'
       - string:
           name: DISTRO
           default: '{distro}'
           default: 'true'
       - label:
           name: SLAVE_LABEL
-          default: 'infra-{type}-{distro}'
-      # yamllint disable rule:line-length
-      - string:
-          name: BIFROST_LOG_URL
-          default: 'http://artifacts.opnfv.org/cross-community-ci/openstack/bifrost/$GERRIT_NAME/$GERRIT_CHANGE_NUMBER/$GERRIT_PATCHSET_NUMBER/$JOB_NAME'
-      # yamllint enable rule:line-length
+          default: 'xci-virtual'
       - string:
           name: XCI_LOOP
           default: 'verify'
           wipe-workspace: true
 
     triggers:
-      - '{project}-gerrit-trigger':
+      - 'openstack-gerrit-trigger':
           branch: '{branch}'
 
     builders:
                 exclude-no-code-change: 'false'
             - comment-added-contains-event:
                 comment-contains-value: 'recheck'
-          custom-url: '* $JOB_NAME $BIFROST_LOG_URL/index.html'
           silent-start: true
           projects:
             - project-compare-type: 'PLAIN'
                   pattern: 'releasenotes/**'
           readable-message: true
 
-- trigger:
-    name: 'opnfv-gerrit-trigger'
-    triggers:
-      - gerrit:
-          server-name: 'gerrit.opnfv.org'
-          trigger-on:
-            - patchset-created-event:
-                exclude-drafts: 'false'
-                exclude-trivial-rebase: 'false'
-                exclude-no-code-change: 'false'
-            - draft-published-event
-            - comment-added-contains-event:
-                comment-contains-value: 'recheck'
-            - comment-added-contains-event:
-                comment-contains-value: 'reverify'
-          custom-url: '* $JOB_NAME $BIFROST_LOG_URL/index.html'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: 'releng-xci'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-              file-paths:
-                - compare-type: ANT
-                  pattern: 'bifrost/**'
-          readable-message: true
-
 # --------------------------
 # builder macros
 # --------------------------
index 03d9afc..ef07309 100755 (executable)
@@ -11,100 +11,6 @@ set -o errexit
 set -o nounset
 set -o pipefail
 
-trap cleanup_and_upload EXIT
-
-function upload_logs() {
-    BIFROST_CONSOLE_LOG="${BUILD_URL}/consoleText"
-    BIFROST_GS_URL=${BIFROST_LOG_URL/http:/gs:}
-
-    # Make sure the old landing page is gone in case
-    # we break later on. We don't want to publish
-    # stale information.
-    # TODO: Maybe cleanup the entire $BIFROST_GS_URL directory
-    # before we upload the new data.
-    gsutil -q rm ${BIFROST_GS_URL}/index.html || true
-
-    echo "Uploading collected bifrost build logs to ${BIFROST_LOG_URL}"
-
-    if [[ -d ${WORKSPACE}/logs ]]; then
-        pushd ${WORKSPACE}/logs &> /dev/null
-        for x in *.log; do
-            echo "Compressing and uploading $x"
-            gsutil -q cp -Z ${x} ${BIFROST_GS_URL}/${x}
-        done
-        popd &> /dev/null
-    fi
-
-    echo "Generating the ${BIFROST_LOG_URL}/index.html landing page"
-    cat > ${WORKSPACE}/index.html <<EOF
-<html>
-<h1>Build results for <a href=https://$GERRIT_NAME/#/c/$GERRIT_CHANGE_NUMBER/$GERRIT_PATCHSET_NUMBER>$GERRIT_NAME/$GERRIT_CHANGE_NUMBER/$GERRIT_PATCHSET_NUMBER</a></h1>
-<h2>Job: <a href=${BUILD_URL}>$JOB_NAME</a></h2>
-<ul>
-<li><a href=${BIFROST_LOG_URL}/build_log.txt>build_log.txt</a></li>
-EOF
-
-    if [[ -d ${WORKSPACE}/logs ]]; then
-        pushd ${WORKSPACE}/logs &> /dev/null
-        for x in *.log; do
-            echo "<li><a href=${BIFROST_LOG_URL}/${x}>${x}</a></li>" >> ${WORKSPACE}/index.html
-        done
-        popd &> /dev/null
-    fi
-
-    cat >> ${WORKSPACE}/index.html << EOF
-</ul>
-</html>
-EOF
-
-    # Upload landing page
-    echo "Uploading the landing page"
-    gsutil -q cp ${WORKSPACE}/index.html ${BIFROST_GS_URL}/index.html
-    rm -f ${WORKSPACE}/index.html
-
-    # Finally, download and upload the entire build log so we can retain
-    # as much build information as possible
-    echo "Uploading the final console output"
-    curl -s -L ${BIFROST_CONSOLE_LOG} > ${WORKSPACE}/build_log.txt
-    gsutil -q cp -Z ${WORKSPACE}/build_log.txt ${BIFROST_GS_URL}/build_log.txt
-    rm -f ${WORKSPACE}/build_log.txt
-}
-
-function fix_ownership() {
-    if [ -z "${JOB_URL+x}" ]; then
-        echo "Not running as part of Jenkins. Handle the logs manually."
-    else
-        # Make sure cache exists
-        [[ ! -d ${HOME}/.cache ]] && mkdir ${HOME}/.cache
-
-        sudo chown -R jenkins:jenkins $WORKSPACE
-        sudo chown -R jenkins:jenkins ${HOME}/.cache
-    fi
-}
-
-function cleanup_and_upload() {
-    original_exit=$?
-    echo "Job exit code: $original_exit"
-    # Turn off errexit
-    set +o errexit
-    fix_ownership
-    upload_logs
-    exit $original_exit
-}
-
-# check distro to see if we support it
-if [[ ! "$DISTRO" =~ (xenial|centos7|suse) ]]; then
-    echo "Distro $DISTRO is not supported!"
-    exit 1
-fi
-
-# remove previously cloned repos
-/bin/rm -rf $WORKSPACE/bifrost $WORKSPACE/releng-xci
-
-# Fix up permissions
-fix_ownership
-
-# clone all the repos first and checkout the patch afterwards
 git clone https://git.openstack.org/openstack/bifrost $WORKSPACE/bifrost
 git clone https://gerrit.opnfv.org/gerrit/releng-xci $WORKSPACE/releng-xci
 
@@ -115,10 +21,9 @@ git fetch $PROJECT_REPO $GERRIT_REFSPEC && sudo git checkout FETCH_HEAD
 # combine opnfv and upstream scripts/playbooks
 /bin/cp -rf $WORKSPACE/releng-xci/bifrost/* $WORKSPACE/bifrost/
 
-# cleanup remnants of previous deployment
-cd $WORKSPACE/bifrost
-sudo -H -E ./scripts/destroy-env.sh
-
+cd $WORKSPACE/releng-xci
+cat > bifrost_test.sh<<EOF
+cd ~/bifrost
 # provision 3 VMs; xcimaster, controller, and compute
 cd $WORKSPACE/bifrost
 ./scripts/bifrost-provision.sh
@@ -128,3 +33,11 @@ cd $WORKSPACE/bifrost
 source env-vars
 ironic node-list
 sudo -H -E virsh list
+EOF
+chmod a+x bifrost_test.sh
+
+./xci/scripts/vm/start-new-vm.sh $DISTRO
+
+rsync -a $WORKSPACE/bifrost ${DISTRO,,}_xci_vm:~/bifrost
+
+ssh ${DISTRO,,}_xci_vm "cd ~/bifrost && ./bifrost_test.sh"
index 1fde17a..93ca187 100644 (file)
           #!/bin/bash
 
           # skip the deployment if the patch doesn't impact the deployment
-          if [[ "$GERRIT_TOPIC" =~ 'skip-verify' ]]; then
+          if [[ "$GERRIT_TOPIC" =~ skip-verify|skip-deployment ]]; then
               echo "Skipping the deployment!"
               exit 0
           fi
           #!/bin/bash
 
           # skip the healthcheck if the patch doesn't impact the deployment
-          if [[ "$GERRIT_TOPIC" =~ 'skip-verify' ]]; then
+          if [[ "$GERRIT_TOPIC" =~ skip-verify|skip-deployment ]]; then
               echo "Skipping the healthcheck!"
               exit 0
           fi
index 267803e..c71e00f 100755 (executable)
@@ -230,12 +230,26 @@ for version in versions:
                 # Evaluate the results for scenario validation
                 # **********************************************
                 # the validation criteria = nb runnable tests x 3
-                # because each test case = 0,1,2 or 3
-                scenario_criteria = nb_test_runnable_for_this_scenario * 3
-                # if 0 runnable tests set criteria at a high value
-                if scenario_criteria < 1:
-                    scenario_criteria = 50  # conf.MAX_SCENARIO_CRITERIA
+                # because each test case can get
+                # 0 point (never PASS)
+                # 1 point at least (PASS once over the time window)
+                # 2 points (PASS more than once but 1 FAIL on the last 4)
+                # 3 points PASS on the last 4 iterations
+                # e.g. 1 scenario = 10 cases
+                # 1 iteration : max score = 10 (10x1)
+                # 2 iterations : max score = 20 (10x2)
+                # 3 iterations : max score = 20
+                # 4 or more iterations : max score = 30 (1x30)
+                if len(s_result) > 3:
+                    k_score = 3
+                elif len(s_result) < 2:
+                    k_score = 1
+                else:
+                    k_score = 2
+
+                scenario_criteria = nb_test_runnable_for_this_scenario*k_score
 
+                # score for reporting
                 s_score = str(scenario_score) + "/" + str(scenario_criteria)
                 s_score_percent = rp_utils.getScenarioPercent(
                     scenario_score,