Merge "xci: Log functest output before failure"
authorFatih Degirmenci <fdegir@gmail.com>
Sat, 17 Mar 2018 11:02:15 +0000 (11:02 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Sat, 17 Mar 2018 11:02:15 +0000 (11:02 +0000)
19 files changed:
.gitmodules [new file with mode: 0644]
jjb/apex/apex-deploy.sh
jjb/apex/apex-download-artifact.sh
jjb/apex/apex.yml
jjb/apex/apex.yml.j2
jjb/bottlenecks/bottlenecks-ci-jobs.yml
jjb/bottlenecks/bottlenecks-run-suite.sh
jjb/compass4nfv/compass-ci-jobs.yml
jjb/container4nfv/yardstick-arm64.yml
jjb/daisy4nfv/daisy-daily-jobs.yml
jjb/dovetail/dovetail-run.sh
jjb/functest/functest-alpine.sh
jjb/functest/functest-k8.sh
jjb/global-jjb [new submodule]
jjb/xci/xci-merge-jobs.yml
jjb/xci/xci-set-scenario.sh
jjb/xci/xci-verify-jobs.yml
modules/opnfv/deployment/compass/adapter_container.py [new file with mode: 0644]
modules/opnfv/deployment/factory.py

diff --git a/.gitmodules b/.gitmodules
new file mode 100644 (file)
index 0000000..07b28be
--- /dev/null
@@ -0,0 +1,3 @@
+[submodule "jjb/global-jjb"]
+       path = jjb/global-jjb
+       url = https://github.com/lfit/releng-global-jjb
index b8ae75a..123db3e 100755 (executable)
@@ -31,7 +31,9 @@ elif [[ "$DEPLOY_SCENARIO" == *gate* ]]; then
 fi
 
 # Dev or RPM/ISO build
-if [[ "$ARTIFACT_VERSION" =~ dev ]]; then
+# For upstream deployments we currently only use git repo and not RPM
+# Need to decide after Fraser if we want to use RPM or not for upstream
+if [[ "$ARTIFACT_VERSION" =~ dev || "$DEPLOY_SCENARIO" =~ "upstream" ]]; then
   # Settings for deploying from git workspace
   DEPLOY_SETTINGS_DIR="${WORKSPACE}/config/deploy"
   NETWORK_SETTINGS_DIR="${WORKSPACE}/config/network"
@@ -134,6 +136,11 @@ else
   DEPLOY_CMD="${DEPLOY_CMD} -i ${INVENTORY_FILE}"
 fi
 
+if [[ "$DEPLOY_SCENARIO" =~ "upstream" ]]; then
+  echo "Upstream deployment detected"
+  DEPLOY_CMD="${DEPLOY_CMD} --upstream"
+fi
+
 if [ "$IPV6_FLAG" == "True" ]; then
   NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings_v6.yaml"
 elif echo ${DEPLOY_SCENARIO} | grep fdio; then
index 68baf59..c12406c 100755 (executable)
@@ -10,7 +10,10 @@ echo
 
 [[ -d $BUILD_DIRECTORY ]] || mkdir -p $BUILD_DIRECTORY
 
-if [[ "$ARTIFACT_VERSION" =~ dev ]]; then
+# if upstream we do not need to download anything
+if [[ "$DEPLOY_SCENARIO" =~ upstream ]]; then
+  echo "Upstream deployment detected, skipping download artifact"
+elif [[ "$ARTIFACT_VERSION" =~ dev ]]; then
   # dev build
   GERRIT_PATCHSET_NUMBER=$(echo $GERRIT_REFSPEC | grep -Eo '[0-9]+$')
   export OPNFV_ARTIFACT_VERSION="dev${GERRIT_CHANGE_NUMBER}_${GERRIT_PATCHSET_NUMBER}"
index cf29b92..e19a90e 100644 (file)
               abort-all-job: false
               git-revision: false
       - multijob:
-          name: Dovetail
+          name: Dovetail-proposed_tests
           condition: ALWAYS
           projects:
             - name: 'dovetail-apex-baremetal-proposed_tests-{scenario_stream}'
               predefined-parameters:
                 DEPLOY_SCENARIO=$DEPLOY_SCENARIO
               kill-phase-on: NEVER
-              enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/"
+              enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/
+                                 && $BUILD_NUMBER % 2 == 1"
+              abort-all-job: false
+              git-revision: false
+      - multijob:
+          name: Dovetail-default
+          condition: ALWAYS
+          projects:
+            - name: 'dovetail-apex-baremetal-default-{scenario_stream}'
+              node-parameters: true
+              current-parameters: false
+              predefined-parameters:
+                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+              kill-phase-on: NEVER
+              enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/
+                                 && $BUILD_NUMBER % 2 == 0"
               abort-all-job: false
               git-revision: false
       - multijob:
index ab65c4e..ecc6f27 100644 (file)
               abort-all-job: false
               git-revision: false
       - multijob:
-          name: Dovetail
+          name: Dovetail-proposed_tests
           condition: ALWAYS
           projects:
             - name: 'dovetail-apex-baremetal-proposed_tests-{scenario_stream}'
               predefined-parameters:
                 DEPLOY_SCENARIO=$DEPLOY_SCENARIO
               kill-phase-on: NEVER
-              enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/"
+              enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/
+                                 && $BUILD_NUMBER % 2 == 1"
+              abort-all-job: false
+              git-revision: false
+      - multijob:
+          name: Dovetail-default
+          condition: ALWAYS
+          projects:
+            - name: 'dovetail-apex-baremetal-default-{scenario_stream}'
+              node-parameters: true
+              current-parameters: false
+              predefined-parameters:
+                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+              kill-phase-on: NEVER
+              enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/
+                                 && $BUILD_NUMBER % 2 == 0"
               abort-all-job: false
               git-revision: false
       - multijob:
index 1d31f27..072117a 100644 (file)
     suite:
       - 'posca_stress_traffic'
       - 'posca_stress_ping'
+      - 'posca_factor_multistack_storage'
+      - 'posca_factor_multistack_storage_parallel'
+      - 'posca_feature_moon_resources'
+      - 'posca_feature_moon_tenants'
+      - 'posca_feature_vnf_scale_out'
+      - 'posca_factor_soak_throughputs'
 
     jobs:
       - 'bottlenecks-{installer}-{suite}-{pod}-daily-{stream}'
index ebd905e..7a3db00 100644 (file)
@@ -136,13 +136,12 @@ if [[ $SUITE_NAME == *posca* ]]; then
     # Running test cases through Bottlenecks docker
     if [[ $SUITE_NAME == posca_stress_traffic ]]; then
         TEST_CASE=posca_factor_system_bandwidth
-        testcase_cmd="docker exec bottlenecks-load-master python ${POSCA_SCRIPT}/../run_testsuite.py testcase $TEST_CASE $REPORT"
-        echo "BOTTLENECKS INFO: running test case ${TEST_CASE} with report indicator: ${testcase_cmd}"
-        ${testcase_cmd} >$redirect
     elif [[ $SUITE_NAME == posca_stress_ping ]]; then
         TEST_CASE=posca_factor_ping
-        testcase_cmd="docker exec bottlenecks-load-master python ${POSCA_SCRIPT}/../run_testsuite.py testcase $TEST_CASE $REPORT"
-        echo "BOTTLENECKS INFO: running test case ${TEST_CASE} with report indicator: ${testcase_cmd}"
-        ${testcase_cmd} >$redirect
+    else
+        TEST_CASE=$SUITE_NAME
     fi
+    testcase_cmd="docker exec bottlenecks-load-master python ${POSCA_SCRIPT}/../run_testsuite.py testcase $TEST_CASE $REPORT"
+    echo "BOTTLENECKS INFO: running test case ${TEST_CASE} with report indicator: ${testcase_cmd}"
+    ${testcase_cmd} >$redirect
 fi
index 357f24e..44cb84d 100644 (file)
                     build-step-failure-threshold: 'never'
                     failure-threshold: 'never'
                     unstable-threshold: 'FAILURE'
+                - project: 'bottlenecks-compass-posca_factor_multistack_storage_parallel-{pod}-daily-{stream}'
+                  current-parameters: false
+                  predefined-parameters:
+                    DEPLOY_SCENARIO={scenario}
+                  block: true
+                  same-node: true
+                  block-thresholds:
+                    build-step-failure-threshold: 'never'
+                    failure-threshold: 'never'
+                    unstable-threshold: 'FAILURE'
+                - project: 'bottlenecks-compass-posca_factor_soak_throughputs-{pod}-daily-{stream}'
+                  current-parameters: false
+                  predefined-parameters:
+                    DEPLOY_SCENARIO={scenario}
+                  block: true
+                  same-node: true
+                  block-thresholds:
+                    build-step-failure-threshold: 'never'
+                    failure-threshold: 'never'
+                    unstable-threshold: 'FAILURE'
       - conditional-step:
           condition-kind: and
           condition-operands:
index bd1d8aa..9b8b271 100644 (file)
@@ -5,7 +5,7 @@
 - project:
     name: yardstick-arm64
 
-    project: '{name}'
+    project: 'yardstick'
 
     # -------------------------------
     # BRANCH ANCHORS
           name: CI_DEBUG
           default: 'false'
           description: "Show debut output information"
+      - string:
+          name: GIT_BASE
+          default: https://gerrit.opnfv.org/gerrit/$PROJECT
+          description: "Used for overriding the GIT URL coming from Global Jenkins configuration."
 
     scm:
       - git-scm
index 043e40f..cc2dc5a 100644 (file)
           use-build-blocker: true
           blocking-jobs:
             - 'daisy-kolla-build-.*'
-            - '{installer}-daily-(build|deploy|test)-(euphrates|master)'
-            - '{installer}-.*-(baremetal|virtual|zte-pod9)-daily-(euphrates|master)'
+            - '{installer}-(build|deploy|test)-daily-(euphrates|master)'
+            - '{installer}-deploy-(baremetal|virtual|zte-pod9)-daily-(euphrates|master)'
             - '(functest|yardstick)-{installer}-(baremetal|virtual|zte-pod9)-daily-(euphrates|master)'
           block-level: 'NODE'
 
 - trigger:
     name: 'daisy-os-nosdn-ovs_dpdk-noha-zte-pod9-daily-master-trigger'
     triggers:
-      - timed: '0 10 * * *'
+      - timed: '0 10,20 * * *'
 # Basic HA Scenarios
 - trigger:
     name: 'daisy-os-nosdn-nofeature-ha-zte-pod9-daily-master-trigger'
index ec879e3..a5a95f4 100755 (executable)
@@ -24,6 +24,9 @@ mkdir -p ${DOVETAIL_HOME}
 DOVETAIL_CONFIG=${DOVETAIL_HOME}/pre_config
 mkdir -p ${DOVETAIL_CONFIG}
 
+DOVETAIL_IMAGES=${DOVETAIL_HOME}/images
+mkdir -p ${DOVETAIL_IMAGES}
+
 ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
 
 sshkey=""
@@ -189,7 +192,7 @@ if [[ ! -f ${ubuntu_image} ]]; then
     echo "Download image ubuntu-16.04-server-cloudimg-amd64-disk1.img ..."
     wget -q -nc http://artifacts.opnfv.org/sdnvpn/ubuntu-16.04-server-cloudimg-amd64-disk1.img -P ${image_path}
 fi
-sudo cp ${ubuntu_image} ${DOVETAIL_CONFIG}
+sudo cp ${ubuntu_image} ${DOVETAIL_IMAGES}
 
 # functest needs to download this image first before running
 cirros_image=${image_path}/cirros-0.3.5-x86_64-disk.img
@@ -197,7 +200,7 @@ if [[ ! -f ${cirros_image} ]]; then
     echo "Download image cirros-0.3.5-x86_64-disk.img ..."
     wget -q -nc http://download.cirros-cloud.net/0.3.5/cirros-0.3.5-x86_64-disk.img -P ${image_path}
 fi
-sudo cp ${cirros_image} ${DOVETAIL_CONFIG}
+sudo cp ${cirros_image} ${DOVETAIL_IMAGES}
 
 # snaps_smoke test case needs to download this image first before running
 ubuntu14_image=${image_path}/ubuntu-14.04-server-cloudimg-amd64-disk1.img
@@ -205,7 +208,7 @@ if [[ ! -f ${ubuntu14_image} ]]; then
     echo "Download image ubuntu-14.04-server-cloudimg-amd64-disk1.img ..."
     wget -q -nc https://cloud-images.ubuntu.com/releases/14.04/release/ubuntu-14.04-server-cloudimg-amd64-disk1.img -P ${image_path}
 fi
-sudo cp ${ubuntu14_image} ${DOVETAIL_CONFIG}
+sudo cp ${ubuntu14_image} ${DOVETAIL_IMAGES}
 
 # cloudify_ims test case needs to download these 2 images first before running
 cloudify_image=${image_path}/cloudify-manager-premium-4.0.1.qcow2
@@ -213,13 +216,13 @@ if [[ ! -f ${cloudify_image} ]]; then
     echo "Download image cloudify-manager-premium-4.0.1.qcow2 ..."
     wget -q -nc http://repository.cloudifysource.org/cloudify/4.0.1/sp-release/cloudify-manager-premium-4.0.1.qcow2 -P ${image_path}
 fi
-sudo cp ${cloudify_image} ${DOVETAIL_CONFIG}
+sudo cp ${cloudify_image} ${DOVETAIL_IMAGES}
 trusty_image=${image_path}/trusty-server-cloudimg-amd64-disk1.img
 if [[ ! -f ${trusty_image} ]]; then
     echo "Download image trusty-server-cloudimg-amd64-disk1.img ..."
     wget -q -nc http://cloud-images.ubuntu.com/trusty/current/trusty-server-cloudimg-amd64-disk1.img -P ${image_path}
 fi
-sudo cp ${trusty_image} ${DOVETAIL_CONFIG}
+sudo cp ${trusty_image} ${DOVETAIL_IMAGES}
 
 opts="--privileged=true -id"
 
@@ -236,10 +239,8 @@ fi
 echo "Dovetail: Pulling image ${DOCKER_REPO}:${DOCKER_TAG}"
 docker pull ${DOCKER_REPO}:$DOCKER_TAG >$redirect
 
-env4bgpvpn="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP}"
-
 cmd="docker run ${opts} -e DOVETAIL_HOME=${DOVETAIL_HOME} ${docker_volume} ${dovetail_home_volume} \
-     ${sshkey} ${env4bgpvpn} ${DOCKER_REPO}:${DOCKER_TAG} /bin/bash"
+     ${sshkey} ${DOCKER_REPO}:${DOCKER_TAG} /bin/bash"
 echo "Dovetail: running docker run command: ${cmd}"
 ${cmd} >${redirect}
 sleep 5
index 432bbbb..788a4cd 100755 (executable)
@@ -165,24 +165,19 @@ volumes="${images_vol} ${results_vol} ${sshkey_vol} ${rc_file_vol} ${cacert_file
 
 set +e
 
-
-if [[ ${DEPLOY_SCENARIO} =~ ^os-.* ]]; then
-    [[ ${BRANCH##*/} == "master" ]] && check_os_deployment
-    if [ ${FUNCTEST_MODE} == 'testcase' ]; then
-        echo "FUNCTEST_MODE=testcase, FUNCTEST_SUITE_NAME=${FUNCTEST_SUITE_NAME}"
-        run_test ${FUNCTEST_SUITE_NAME}
-    elif [ ${FUNCTEST_MODE} == 'tier' ]; then
-        echo "FUNCTEST_MODE=tier, FUNCTEST_TIER=${FUNCTEST_TIER}"
-        tiers=(${FUNCTEST_TIER})
-        run_tiers ${tiers}
+[[ ${BRANCH##*/} == "master" ]] && check_os_deployment
+if [ ${FUNCTEST_MODE} == 'testcase' ]; then
+    echo "FUNCTEST_MODE=testcase, FUNCTEST_SUITE_NAME=${FUNCTEST_SUITE_NAME}"
+    run_test ${FUNCTEST_SUITE_NAME}
+elif [ ${FUNCTEST_MODE} == 'tier' ]; then
+    echo "FUNCTEST_MODE=tier, FUNCTEST_TIER=${FUNCTEST_TIER}"
+    tiers=(${FUNCTEST_TIER})
+    run_tiers ${tiers}
+else
+    if [ ${DEPLOY_TYPE} == 'baremetal' ]; then
+        tiers=(healthcheck smoke features vnf parser)
     else
-        if [ ${DEPLOY_TYPE} == 'baremetal' ]; then
-            tiers=(healthcheck smoke features vnf parser)
-        else
-            tiers=(healthcheck smoke features)
-        fi
-        run_tiers ${tiers}
+        tiers=(healthcheck smoke features)
     fi
-else
-    echo "k8 deployment has not been supported by functest yet"
+    run_tiers ${tiers}
 fi
index 3f08e4a..115ff42 100755 (executable)
@@ -46,6 +46,8 @@ envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} \
 
 DOCKER_TAG=`[[ ${BRANCH##*/} == "master" ]] && echo "latest" || echo ${BRANCH##*/}`
 
+set +e
+
 FUNCTEST_IMAGE=opnfv/functest-kubernetes:${DOCKER_TAG}
 echo "Pulling Docker image ${FUNCTEST_IMAGE} ..."
 docker pull ${FUNCTEST_IMAGE}>/dev/null
diff --git a/jjb/global-jjb b/jjb/global-jjb
new file mode 160000 (submodule)
index 0000000..779110b
--- /dev/null
@@ -0,0 +1 @@
+Subproject commit 779110b5cd63f3eabb63598a1be79d9b9ba85464
index 92eea19..8a3a7cf 100644 (file)
@@ -2,6 +2,7 @@
 - project:
     name: 'opnfv-xci-merge'
 
+    project: releng-xci
     # -------------------------------
     # branches
     # -------------------------------
       - opensuse:
           disabled: false
     # -------------------------------
+    # scenarios
+    # -------------------------------
+    scenario:
+      - 'os-odl-sfc':
+          project: sfc
+    # -------------------------------
     # type
     # -------------------------------
     type:
       - virtual
     # -------------------------------
-    # patch verification phases
+    # postmerge promotion phases
     # -------------------------------
     phase:
       - 'deploy'
       - 'healthcheck'
-      - 'promote'
-    # -------------------------------
-    # scenarios
-    # -------------------------------
-    scenario:
-      - 'os-odl-sfc':
-          project: sfc
     # -------------------------------
     # jobs
     # -------------------------------
     jobs:
-      - 'xci-{scenario}-{type}-{distro}-merge-{stream}'
-      - 'xci-{phase}-{type}-{distro}-merge-{stream}'
+      - 'xci-{scenario}-{type}-merge-{stream}'
+      - 'xci-{scenario}-{type}-promote-merge-{stream}'
+      - 'xci-merge-{distro}-{type}-{stream}'
+      - 'xci-merge-{distro}-{phase}-{type}-{stream}'
 # -------------------------------
 # job templates
 # -------------------------------
 - job-template:
-    name: 'xci-{scenario}-{type}-{distro}-merge-{stream}'
+    name: 'xci-{scenario}-{type}-merge-{stream}'
 
     project-type: multijob
 
 
     properties:
       - logrotate-default
-      - build-blocker:
-          use-build-blocker: true
-          blocking-jobs:
-            - 'xci-verify-{distro}-.*'
-            - 'xci-.*-{distro}-merge-.*'
-            - 'openstack-bifrost-verify-{distro}-.*'
-            - 'xci-osa-verify-{distro}-.*'
-            - 'xci-osa-periodic-{distro}-.*'
-            - 'xci-(os|k8s).*?-virtual-{distro}-.*'
-          block-level: 'NODE'
-      - throttle:
-          max-per-node: 2
-          max-total: 10
-          categories:
-            - xci-verify-virtual
-          option: category
 
     wrappers:
       - ssh-agent-wrapper
       - fix-workspace-permissions
 
     scm:
-      - git:
-          credentials-id: '$SSH_CREDENTIAL_ID'
-          url: 'https://gerrit.opnfv.org/gerrit/releng-xci'
-          branches:
-            - 'origin/master'
-          timeout: 15
+      - git-scm-gerrit
 
     triggers:
       - gerrit:
 
     parameters:
       - project-parameter:
-          project: '{obj:project}'
+          project: $GERRIT_PROJECT
           branch: '{branch}'
       - label:
           name: SLAVE_LABEL
           default: 'xci-virtual'
       - string:
-          name: DEPLOY_SCENARIO
-          default: '{scenario}'
+          name: DISTRO
+          default: 'all'
       - string:
           name: CLEAN_DIB_IMAGES
           default: 'true'
 
     builders:
       - description-setter:
-          description: "Built on $NODE_NAME"
+          description: "Node: $NODE_NAME"
+      - 'xci-merge-set-scenario-macro'
+      - multijob:
+          name: deploy and test
+          condition: SUCCESSFUL
+          projects:
+            - name: 'xci-merge-opensuse-{type}-{stream}'
+              current-parameters: false
+              predefined-parameters: |
+                DISTRO=opensuse
+                CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
+                GERRIT_PROJECT=$GERRIT_PROJECT
+                GERRIT_BRANCH=$GERRIT_BRANCH
+                GERRIT_REFSPEC=$GERRIT_REFSPEC
+                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+              node-parameters: false
+              kill-phase-on: FAILURE
+              abort-all-job: true
+            - name: 'xci-merge-ubuntu-{type}-{stream}'
+              current-parameters: false
+              predefined-parameters: |
+                DISTRO=ubuntu
+                CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
+                GERRIT_PROJECT=$GERRIT_PROJECT
+                GERRIT_BRANCH=$GERRIT_BRANCH
+                GERRIT_REFSPEC=$GERRIT_REFSPEC
+                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+              node-parameters: false
+              kill-phase-on: FAILURE
+              abort-all-job: true
+            - name: 'xci-merge-centos-{type}-{stream}'
+              current-parameters: false
+              predefined-parameters: |
+                DISTRO=centos
+                CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
+                GERRIT_PROJECT=$GERRIT_PROJECT
+                GERRIT_BRANCH=$GERRIT_BRANCH
+                GERRIT_REFSPEC=$GERRIT_REFSPEC
+                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+              node-parameters: false
+              kill-phase-on: FAILURE
+              abort-all-job: true
+      - inject:
+          properties-file: "/tmp/$GERRIT_CHANGE_NUMBER/$DISTRO/scenario.properties"
+      - multijob:
+          name: promote
+          condition: SUCCESSFUL
+          projects:
+            - name: 'xci-{scenario}-{type}-promote-merge-{stream}'
+              current-parameters: true
+              predefined-parameters: |
+                GERRIT_PROJECT=$GERRIT_PROJECT
+                GERRIT_BRANCH=$GERRIT_BRANCH
+                GERRIT_REFSPEC=$GERRIT_REFSPEC
+                GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+              node-parameters: true
+              kill-phase-on: NEVER
+              abort-all-job: true
+
+- job-template:
+    name: 'xci-merge-{distro}-{type}-{stream}'
+
+    project-type: multijob
+
+    disabled: '{obj:disabled}'
+
+    concurrent: true
+
+    properties:
+      - logrotate-default
+      - build-blocker:
+          use-build-blocker: true
+          blocking-jobs:
+            - 'xci-verify-{distro}-.*'
+            - 'xci-merge-{distro}-.*'
+            - 'openstack-bifrost-verify-{distro}-.*'
+            - 'xci-osa-verify-{distro}-.*'
+            - 'xci-osa-periodic-{distro}-.*'
+          block-level: 'NODE'
+      - throttle:
+          max-per-node: 1
+          max-total: 3
+          categories:
+            - xci-verify-virtual
+          option: category
+
+    wrappers:
+      - ssh-agent-wrapper
+      - build-timeout:
+          timeout: 240
+      - fix-workspace-permissions
+
+    scm:
+      - git-scm-gerrit
+
+    parameters:
+      - project-parameter:
+          project: $GERRIT_PROJECT
+          branch: '{branch}'
+      - label:
+          name: SLAVE_LABEL
+          default: 'xci-virtual'
+      - string:
+          name: DISTRO
+          default: '{distro}'
+      - string:
+          name: CLEAN_DIB_IMAGES
+          default: 'true'
+      - string:
+          name: GIT_BASE
+          default: https://gerrit.opnfv.org/gerrit/$PROJECT
+          description: 'Git URL to use on this Jenkins Slave'
+
+    builders:
+      - 'xci-merge-set-scenario-macro'
+      - inject:
+          properties-file: "/tmp/$GERRIT_CHANGE_NUMBER/$DISTRO/scenario.properties"
+      - description-setter:
+          description: "Scenario: $DEPLOY_SCENARIO | Node: $NODE_NAME"
       - multijob:
           name: deploy
           condition: SUCCESSFUL
           projects:
-            - name: 'xci-deploy-{type}-{distro}-merge-{stream}'
+            - name: 'xci-merge-{distro}-deploy-{type}-{stream}'
               current-parameters: true
               predefined-parameters: |
-                PROJECT=$PROJECT
                 DISTRO={distro}
-                DEPLOY_SCENARIO={scenario}
                 CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
+                GERRIT_PROJECT=$GERRIT_PROJECT
                 GERRIT_BRANCH=$GERRIT_BRANCH
                 GERRIT_REFSPEC=$GERRIT_REFSPEC
                 GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
           name: healthcheck
           condition: SUCCESSFUL
           projects:
-            - name: 'xci-healthcheck-{type}-{distro}-merge-{stream}'
+            - name: 'xci-merge-{distro}-healthcheck-{type}-{stream}'
               current-parameters: true
               predefined-parameters: |
-                PROJECT=$PROJECT
                 DISTRO={distro}
-                DEPLOY_SCENARIO={scenario}
                 CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
-                FUNCTEST_MODE=tier
-                FUNCTEST_TIER=healthcheck
+                GERRIT_PROJECT=$GERRIT_PROJECT
                 GERRIT_BRANCH=$GERRIT_BRANCH
                 GERRIT_REFSPEC=$GERRIT_REFSPEC
                 GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                 GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+                FUNCTEST_MODE=tier
+                FUNCTEST_TIER=healthcheck
               node-parameters: true
               kill-phase-on: NEVER
               abort-all-job: true
                 !include-raw: ./xci-cleanup.sh
 
 - job-template:
-    name: 'xci-{phase}-{type}-{distro}-merge-{stream}'
+    name: 'xci-merge-{distro}-{phase}-{type}-{stream}'
 
     disabled: false
 
     parameters:
       - string:
           name: PROJECT
-          default: 'releng-xci'
+          default: $GERRIT_PROJECT
       - string:
           name: DISTRO
           default: 'ubuntu'
-      - string:
-          name: DEPLOY_SCENARIO
-          default: 'os-nosdn-nofeature-noha'
       - string:
           name: FUNCTEST_MODE
           default: 'tier'
       - string:
           name: OPNFV_RELENG_DEV_PATH
           default: $WORKSPACE/
-      - string:
-          name: INSTALLER_TYPE
-          default: 'osa'
       - string:
           name: GIT_BASE
           default: https://gerrit.opnfv.org/gerrit/$PROJECT
           description: 'Git URL to use on this Jenkins Slave'
 
     wrappers:
+      - inject:
+          properties-file: "/tmp/$GERRIT_CHANGE_NUMBER/$DISTRO/scenario.properties"
       - ssh-agent-wrapper
       - build-timeout:
           timeout: 240
       - fix-workspace-permissions
 
     scm:
-      - git:
-          credentials-id: '$SSH_CREDENTIAL_ID'
-          url: 'https://gerrit.opnfv.org/gerrit/releng-xci'
-          branches:
-            - 'origin/master'
-          timeout: 15
+      - git-scm-gerrit
 
     builders:
       - description-setter:
-          description: "Built on $NODE_NAME"
+          description: "Scenario: $DEPLOY_SCENARIO | Node: $NODE_NAME"
       - 'xci-merge-{phase}-macro'
 
+- job-template:
+    name: 'xci-{scenario}-{type}-promote-merge-{stream}'
+
+    disabled: false
+
+    concurrent: false
+
+    properties:
+      - logrotate-default
+
+    parameters:
+      - string:
+          name: PROJECT
+          default: $GERRIT_PROJECT
+      - string:
+          name: DISTRO
+          default: 'all'
+      - string:
+          name: OPNFV_RELENG_DEV_PATH
+          default: $WORKSPACE/
+      - string:
+          name: GIT_BASE
+          default: https://gerrit.opnfv.org/gerrit/$PROJECT
+          description: 'Git URL to use on this Jenkins Slave'
+
+    wrappers:
+      - inject:
+          properties-file: "/tmp/$GERRIT_CHANGE_NUMBER/$DISTRO/scenario.properties"
+      - ssh-agent-wrapper
+      - build-timeout:
+          timeout: 240
+      - fix-workspace-permissions
+
+    builders:
+      - description-setter:
+          description: "Scenario: $DEPLOY_SCENARIO | Node: $NODE_NAME"
+      - 'xci-merge-promote-macro'
+
 # -------------------------------
 # builder macros
 # -------------------------------
 - builder:
-    name: 'xci-merge-deploy-macro'
+    name: 'xci-merge-set-scenario-macro'
     builders:
       - shell:
-          !include-raw: ./xci-start-new-vm.sh
-      - shell:
-          !include-raw: ./xci-start-deployment.sh
+          !include-raw: ./xci-set-scenario.sh
+
+- builder:
+    name: 'xci-merge-deploy-macro'
+    builders:
+      - shell: |
+          #!/bin/bash
+          echo "Hello World"
 
 - builder:
     name: 'xci-merge-healthcheck-macro'
     builders:
-      - shell:
-          !include-raw: ./xci-run-functest.sh
+      - shell: |
+          #!/bin/bash
+          echo "Hello World"
 
 - builder:
     name: 'xci-merge-promote-macro'
     builders:
-      - shell:
-          !include-raw: ./xci-promote.sh
+      - shell: |
+          #!/bin/bash
+          echo "Hello World"
index 62b26c0..c602957 100755 (executable)
@@ -108,6 +108,12 @@ function determine_generic_scenario() {
                 ;;
             esac
     done
+
+    # extract releng-xci sha
+    RELENG_XCI_SHA=$(cd $WORKSPACE && git rev-parse HEAD)
+
+    # extract scenario sha which is same as releng-xci sha for generic scenarios
+    SCENARIO_SHA=$RELENG_XCI_SHA
 }
 
 # This function determines the impacted external scenario by processing the Gerrit
@@ -134,6 +140,12 @@ function determine_external_scenario() {
 
     # process the diff to find out what scenario(s) are impacted - there should only be 1
     DEPLOY_SCENARIO+=$(git diff HEAD^..HEAD --name-only | grep scenarios | awk -F '[/|/]' '{print $2}' | uniq)
+
+    # extract releng-xci sha
+    RELENG_XCI_SHA=$(cd $WORKSPACE && git rev-parse HEAD)
+
+    # extract scenario sha
+    SCENARIO_SHA=$(cd $WORK_DIRECTORY/$GERRIT_PROJECT && git rev-parse HEAD)
 }
 
 echo "Determining the impacted scenario"
@@ -179,9 +191,16 @@ esac
 
 # save the installer and scenario names into java properties file
 # so they can be injected to downstream jobs via envInject
-echo "Recording the installer '$INSTALLER_TYPE' and scenario '${DEPLOY_SCENARIO[0]}' for downstream jobs"
+echo "Recording the installer '$INSTALLER_TYPE' and scenario '${DEPLOY_SCENARIO[0]}' and SHAs for downstream jobs"
 echo "INSTALLER_TYPE=$INSTALLER_TYPE" > $WORK_DIRECTORY/scenario.properties
 echo "DEPLOY_SCENARIO=$DEPLOY_SCENARIO" >> $WORK_DIRECTORY/scenario.properties
+echo "RELENG_XCI_SHA=$RELENG_XCI_SHA" >> $WORK_DIRECTORY/scenario.properties
+echo "SCENARIO_SHA=$SCENARIO_SHA" >> $WORK_DIRECTORY/scenario.properties
+
+# skip scenario support check if the job is promotion job
+if [[ "$JOB_NAME" =~ (os|k8) ]]; then
+    exit 0
+fi
 
 # skip the deployment if the scenario is not supported on this distro
 OPNFV_SCENARIO_REQUIREMENTS=$WORKSPACE/xci/opnfv-scenario-requirements.yml
index 4c9ff9e..d78dc82 100644 (file)
@@ -66,7 +66,7 @@
           use-build-blocker: true
           blocking-jobs:
             - 'xci-verify-{distro}-.*'
-            - 'xci-.*-{distro}-merge-.*'
+            - 'xci-merge-{distro}-.*'
             - 'openstack-bifrost-verify-{distro}-.*'
             - 'xci-osa-verify-{distro}-.*'
             - 'xci-osa-periodic-{distro}-.*'
               forbidden-file-paths:
                 - compare-type: ANT
                   pattern: 'xci/scripts/vm/**'
+                - compare-type: ANT
+                  pattern: 'docs/**'
+                - compare-type: ANT
+                  pattern: 'prototypes/**'
+                - compare-type: ANT
+                  pattern: 'upstream/**'
             - project-compare-type: 'REG_EXP'
               project-pattern: 'sfc|sdnvpn'
               branches:
diff --git a/modules/opnfv/deployment/compass/adapter_container.py b/modules/opnfv/deployment/compass/adapter_container.py
new file mode 100644 (file)
index 0000000..1713fe2
--- /dev/null
@@ -0,0 +1,83 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2018 HUAWEI TECHNOLOGIES CO.,LTD and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+
+from opnfv.deployment import manager
+from opnfv.utils import opnfv_logger as logger
+from opnfv.utils import ssh_utils
+
+import yaml
+import os
+
+logger = logger.Logger(__name__).getLogger()
+
+
+class ContainerizedCompassAdapter():
+
+    def __init__(self, installer_ip, installer_user, pkey_file):
+
+        self.installer = 'compass'
+        self.installer_ip = installer_ip
+        self.installer_user = installer_user
+        self.pkey_file = pkey_file
+        self.DST_PATH_UC = "/tmp/openstack_user_config.yml"
+        self.nodes = []
+        self.ROLES = {}
+
+        if pkey_file is not None and not os.path.isfile(pkey_file):
+            raise Exception(
+                'The private key file %s does not exist!' % pkey_file)
+
+    def _find_nodes(self, file):
+        nodes = file['compute_hosts']
+        for compute in nodes:
+            self.ROLES[compute] = 'compute'
+        controllers = file['haproxy_hosts']
+        for controller in controllers:
+            nodes[controller] = controllers[controller]
+            self.ROLES[controller] = 'controller'
+        return nodes
+
+    def _process_nodes(self, raw_nodes):
+        nodes = []
+
+        for node in raw_nodes:
+            name = node
+            ip = raw_nodes[node]['ip']
+            status = 'active'
+            id = None
+            if self.ROLES[node] == 'controller':
+                roles = 'controller'
+            elif self.ROLES[node] == 'compute':
+                roles = 'compute'
+            ssh_client = ssh_utils.get_ssh_client(hostname=ip,
+                                                  username=self.installer_user,
+                                                  pkey_file=self.pkey_file)
+            node = manager.Node(id, ip, name, status, roles, ssh_client)
+            nodes.append(node)
+
+        return nodes
+
+    def get_nodes(self, options=None):
+        try:
+            # if we have retrieved previously all the nodes, don't do it again
+            # This fails the first time when the constructor calls this method
+            # therefore the try/except
+            if len(self.nodes) > 0:
+                return self.nodes
+        except:
+            pass
+
+        with open(self.DST_PATH_UC, 'r') as stream:
+            try:
+                file = yaml.load(stream)
+                raw_nodes = self._find_nodes(file)
+            except yaml.YAMLError as exc:
+                logger.error(exc)
+        self.nodes = self._process_nodes(raw_nodes)
+        return self.nodes
index 2788e5e..1fd8d44 100644 (file)
@@ -9,7 +9,7 @@
 
 
 from opnfv.deployment.apex import adapter as apex_adapter
-from opnfv.deployment.compass import adapter as compass_adapter
+from opnfv.deployment.compass import adapter_container as compass_adapter
 from opnfv.deployment.fuel import adapter as fuel_adapter
 from opnfv.deployment.osa import adapter as osa_adapter
 from opnfv.deployment.daisy import adapter as daisy_adapter
@@ -44,10 +44,10 @@ class Factory(object):
                                             installer_user=installer_user,
                                             installer_pwd=installer_pwd)
         elif installer.lower() == "compass":
-            return compass_adapter.CompassAdapter(
+            return compass_adapter.ContainerizedCompassAdapter(
                 installer_ip=installer_ip,
                 installer_user=installer_user,
-                installer_pwd=installer_pwd)
+                pkey_file=pkey_file)
         elif installer.lower() == "osa":
             return osa_adapter.OSAAdapter(installer_ip=installer_ip,
                                           installer_user=installer_user,