Merge "Fix project name"
authorTrevor Bramwell <tbramwell@linuxfoundation.org>
Fri, 6 Apr 2018 18:19:00 +0000 (18:19 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Fri, 6 Apr 2018 18:19:00 +0000 (18:19 +0000)
jjb/apex/apex-fetch-logs.sh
jjb/parser/parser.yml
jjb/xci/xci-run-functest.sh
jjb/xci/xci-set-scenario.sh
jjb/yardstick/yardstick-daily-jobs.yml
jjb/yardstick/yardstick-project-jobs.yml

index f25f456..bdb2252 100755 (executable)
@@ -14,9 +14,10 @@ if sudo opnfv-pyutil --fetch-logs; then
       echo "WARNING: Unable to determine log location.  Logs will not be uploaded"
       exit 0
   else
+    sudo chmod 777 ${LOG_LOCATION}
     UPLOAD_LOCATION="${GS_URL}/logs/${JOB_NAME}/${BUILD_NUMBER}/"
-    gsutil cp -r ${LOG_LOCATION} gs://${UPLOAD_LOCATION} > gsutil.latest_logs.log
-    echo "Logs available at https://${UPLOAD_LOCATION}/$(basename $LOG_LOCATION)"
+    gsutil -m cp -r ${LOG_LOCATION} gs://${UPLOAD_LOCATION} > gsutil.latest_logs.log
+    echo -e "Logs available at: \n$(find ${LOG_LOCATION} -type f | sed -n 's#^/tmp/#http://'$UPLOAD_LOCATION'#p')"
   fi
 else
   echo "WARNING: Log retrieval failed.  No logs will be uploaded"
index 8754419..b711985 100644 (file)
           branch: '{stream}'
           gs-pathname: ''
           disabled: false
+      - fraser: &fraser
+          branch: 'stable/{stream}'
+          gs-pathname: '/{stream}'
+          disabled: false
       - euphrates:
           branch: 'stable/{stream}'
           gs-pathname: '/{stream}'
index aa98e07..78b7d44 100755 (executable)
@@ -55,12 +55,10 @@ export XCI_VENV=/home/devuser/releng-xci/venv
 ssh -F $HOME/.ssh/${DISTRO}-xci-vm-config ${DISTRO}_xci_vm "source $XCI_VENV/bin/activate; cd releng-xci/xci && ansible-playbook -i installer/osa/files/$XCI_FLAVOR/inventory playbooks/prepare-functest.yml"
 echo "Running functest"
 ssh -F $HOME/.ssh/${DISTRO}-xci-vm-config ${DISTRO}_xci_vm_opnfv "/root/run-functest.sh"
+# Record exit code
+functest_exit=$?
 echo "Functest log"
 echo "---------------------------------------------------------------------------------"
 ssh -F $HOME/.ssh/${DISTRO}-xci-vm-config ${DISTRO}_xci_vm_opnfv "cat /root/results/functest.log"
 echo "---------------------------------------------------------------------------------"
-# check the log to see if we have any error
-if ssh -F $HOME/.ssh/${DISTRO}-xci-vm-config ${DISTRO}_xci_vm_opnfv "grep -q 'FAIL' /root/results/functest.log"; then
-    echo "Error: Functest failed!"
-    exit 1
-fi
+exit ${functest_exit}
index c3012cd..3e64ab1 100755 (executable)
@@ -96,22 +96,22 @@ function determine_generic_scenario() {
 
     # get the changeset
     cd $WORKSPACE
-    CHANGESET=$(git diff HEAD^..HEAD --name-only)
-    for CHANGED_FILE in $CHANGESET; do
+    SCENARIOS=$(git diff HEAD^..HEAD --name-only -- 'xci/scenarios' | cut -d "/" -f 3 | uniq)
+    # We need to set default scenario for changes that do not mess with scenarios
+    NO_SCENARIOS=$(git diff HEAD^..HEAD --name-only | grep -v 'xci/scenarios' | cut -d "/" -f 3 | uniq)
+    for CHANGED_SCENARIO in $SCENARIOS; do
+        [[ ${DEPLOY_SCENARIO[@]} =~ $CHANGED_SCENARIO ]] || DEPLOY_SCENARIO[${#DEPLOY_SCENARIO[@]}]=$CHANGED_SCENARIO
+    done
+    for CHANGED_FILE in $NO_SCENARIOS; do
         case $CHANGED_FILE in
-            *k8-nosdn*|*kubespray*)
+            kubespray)
                 [[ ${DEPLOY_SCENARIO[@]} =~ "k8-nosdn-nofeature" ]] || DEPLOY_SCENARIO[${#DEPLOY_SCENARIO[@]}]='k8-nosdn-nofeature'
                 ;;
-            *os-odl*)
-                [[ ${DEPLOY_SCENARIO[@]} =~ "os-odl-nofeature" ]] || DEPLOY_SCENARIO[${#DEPLOY_SCENARIO[@]}]='os-odl-nofeature'
-                ;;
-            *os-nosdn*|*osa*)
-                [[ ${DEPLOY_SCENARIO[@]} =~ "os-nosdn-nofeature" ]] || DEPLOY_SCENARIO[${#DEPLOY_SCENARIO[@]}]='os-nosdn-nofeature'
-                ;;
+            # Default case (including OSA changes)
             *)
                 [[ ${DEPLOY_SCENARIO[@]} =~ "os-nosdn-nofeature" ]] || DEPLOY_SCENARIO[${#DEPLOY_SCENARIO[@]}]='os-nosdn-nofeature'
                 ;;
-            esac
+        esac
     done
 
     # extract releng-xci sha
index 24c7f31..2eaa0fe 100644 (file)
@@ -15,8 +15,8 @@
       branch: '{stream}'
       gs-pathname: ''
       docker-tag: 'latest'
-    euphrates: &euphrates
-      stream: euphrates
+    fraser: &fraser
+      stream: fraser
       branch: 'stable/{stream}'
       gs-pathname: '{stream}'
       docker-tag: 'stable'
           auto-trigger-name: 'daily-trigger-disabled'
           <<: *master
       - virtual:
-          slave-label: apex-virtual-euphrates
+          slave-label: apex-virtual-fraser
           installer: apex
           auto-trigger-name: 'daily-trigger-disabled'
-          <<: *euphrates
+          <<: *fraser
       - baremetal:
-          slave-label: apex-baremetal-euphrates
+          slave-label: apex-baremetal-fraser
           installer: apex
           auto-trigger-name: 'daily-trigger-disabled'
-          <<: *euphrates
+          <<: *fraser
       # fuel CI PODs
       - baremetal:
           slave-label: fuel-baremetal
           slave-label: fuel-baremetal
           installer: fuel
           auto-trigger-name: 'daily-trigger-disabled'
-          <<: *euphrates
+          <<: *fraser
       - virtual:
           slave-label: fuel-virtual
           installer: fuel
           auto-trigger-name: 'daily-trigger-disabled'
-          <<: *euphrates
+          <<: *fraser
       # armband CI PODs
       - armband-baremetal:
           slave-label: armband-baremetal
           slave-label: armband-baremetal
           installer: fuel
           auto-trigger-name: 'daily-trigger-disabled'
-          <<: *euphrates
+          <<: *fraser
       - armband-virtual:
           slave-label: armband-virtual
           installer: fuel
           auto-trigger-name: 'daily-trigger-disabled'
-          <<: *euphrates
+          <<: *fraser
       # joid CI PODs
       - baremetal:
           slave-label: joid-baremetal
           slave-label: joid-baremetal
           installer: joid
           auto-trigger-name: 'daily-trigger-disabled'
-          <<: *euphrates
+          <<: *fraser
       - virtual:
           slave-label: joid-virtual
           installer: joid
           auto-trigger-name: 'daily-trigger-disabled'
-          <<: *euphrates
+          <<: *fraser
       # compass CI PODs
       - baremetal:
           slave-label: compass-baremetal
           slave-label: compass-baremetal
           installer: compass
           auto-trigger-name: 'daily-trigger-disabled'
-          <<: *euphrates
+          <<: *fraser
       - virtual:
           slave-label: compass-virtual
           installer: compass
           auto-trigger-name: 'daily-trigger-disabled'
-          <<: *euphrates
+          <<: *fraser
       # daisy CI PODs
       - baremetal:
           slave-label: daisy-baremetal
           slave-label: '{pod}'
           installer: fuel
           auto-trigger-name: 'daily-trigger-disabled'
-          <<: *euphrates
+          <<: *fraser
       - zte-pod2:
           slave-label: '{pod}'
           installer: daisy
           description: 'Arguments to use in order to choose the backend DB'
 
 - parameter:
-    name: 'yardstick-params-apex-virtual-euphrates'
+    name: 'yardstick-params-apex-virtual-fraser'
     parameters:
       - string:
           name: YARDSTICK_DB_BACKEND
           description: 'Arguments to use in order to choose the backend DB'
 
 - parameter:
-    name: 'yardstick-params-apex-baremetal-euphrates'
+    name: 'yardstick-params-apex-baremetal-fraser'
     parameters:
       - string:
           name: YARDSTICK_DB_BACKEND
index 63ce298..56e825e 100644 (file)
@@ -17,7 +17,7 @@
           branch: '{stream}'
           gs-pathname: ''
           disabled: false
-      - euphrates:
+      - fraser: &fraser
           branch: 'stable/{stream}'
           gs-pathname: '/{stream}'
           disabled: false
@@ -87,7 +87,8 @@
       - string:
           name: GS_URL
           default: '$GS_BASE{gs-pathname}'
-          description: "Directory where the build artifact will be located upon the completion     of the build."
+          description: "Directory where the build artifact will be located upon\
+            \ the completion of the build."
 
     scm:
       - git-scm
           sudo apt-get -y clean && sudo apt-get -y autoremove
 
           echo "Configure RabbitMQ service"
-          service rabbitmq-server restart
-          rabbitmqctl start_app
-          if [ -z "$(rabbitmqctl list_users | grep yardstick)" ]; then
-            rabbitmqctl add_user yardstick yardstick
-            rabbitmqctl set_permissions yardstick ".*" ".*" ".*"
+          sudo service rabbitmq-server restart
+          sudo rabbitmqctl start_app
+          if [ -z "$(sudo rabbitmqctl list_users | grep yardstick)" ]; then
+            sudo rabbitmqctl add_user yardstick yardstick
+            sudo rabbitmqctl set_permissions yardstick ".*" ".*" ".*"
           fi
 
           echo "Running functional tests in Python 2.7 ..."
           sudo apt-get -y clean && sudo apt-get -y autoremove
 
           echo "Configure RabbitMQ service"
-          service rabbitmq-server restart
-          rabbitmqctl start_app
-          if [ -z "$(rabbitmqctl list_users | grep yardstick)" ]; then
-            rabbitmqctl add_user yardstick yardstick
-            rabbitmqctl set_permissions yardstick ".*" ".*" ".*"
+          sudo service rabbitmq-server restart
+          sudo rabbitmqctl start_app
+          if [ -z "$(sudo rabbitmqctl list_users | grep yardstick)" ]; then
+            sudo rabbitmqctl add_user yardstick yardstick
+            sudo rabbitmqctl set_permissions yardstick ".*" ".*" ".*"
           fi
 
           echo "Running functional tests in Python 3 ..."