Merge "dovetail: Create jenkins jobs to deploy compass colorado and run dovetail...
authorJun Li <matthew.lijun@huawei.com>
Thu, 26 Jan 2017 03:04:21 +0000 (03:04 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Thu, 26 Jan 2017 03:04:21 +0000 (03:04 +0000)
jjb/fuel/fuel-daily-jobs.yml
jjb/fuel/fuel-project-jobs.yml
jjb/infra/bifrost-verify.sh
jjb/joid/joid-deploy.sh
jjb/releng/testapi-automate.yml
jjb/releng/testapi-backup-mongodb.sh [new file with mode: 0644]
utils/fetch_os_creds.sh
utils/test/testapi/run_test.sh

index 02fa9fb..b0dee73 100644 (file)
 
     publishers:
         - email:
-            recipients: jonas.bjurel@ericsson.com stefan.k.berg@ericsson.com peter.barabas@ericsson.com fzhadaev@mirantis.com
+            recipients: peter.barabas@ericsson.com fzhadaev@mirantis.com
 
 ########################
 # parameter macros
index deab3ab..390094e 100644 (file)
@@ -79,7 +79,7 @@
 
     publishers:
         - email:
-            recipients: jonas.bjurel@ericsson.com stefan.k.berg@ericsson.com fzhadaev@mirantis.com
+            recipients: fzhadaev@mirantis.com
 
 - job-template:
     name: 'fuel-merge-build-{stream}'
 
     publishers:
         - email:
-            recipients: jonas.bjurel@ericsson.com stefan.k.berg@ericsson.com fzhadaev@mirantis.com
+            recipients: fzhadaev@mirantis.com
 
 - job-template:
     name: 'fuel-deploy-generic-daily-{stream}'
index 48f916e..a7ef9c4 100755 (executable)
@@ -24,16 +24,17 @@ function upload_logs() {
     gsutil -q cp -Z ${WORKSPACE}/build_log.txt ${BIFROST_GS_URL}/build_log.txt
     rm ${WORKSPACE}/build_log.txt
 
-    [[ ! -d ${WORKSPACE}/logs ]] && return 0
-
-    pushd ${WORKSPACE}/logs/ &> /dev/null
-    for x in *.log; do
-        echo "Compressing and uploading $x"
-        gsutil -q cp -Z ${x} ${BIFROST_GS_URL}/${x}
-    done
+    if [[ -d ${WORKSPACE}/logs ]]; then
+        pushd ${WORKSPACE}/logs &> /dev/null
+        for x in *.log; do
+            echo "Compressing and uploading $x"
+            gsutil -q cp -Z ${x} ${BIFROST_GS_URL}/${x}
+        done
+        popd &> /dev/null
+    fi
 
     echo "Generating the landing page"
-    cat > index.html <<EOF
+    cat > ${WORKSPACE}/index.html <<EOF
 <html>
 <h1>Build results for <a href=https://$GERRIT_NAME/#/c/$GERRIT_CHANGE_NUMBER/$GERRIT_PATCHSET_NUMBER>$GERRIT_NAME/$GERRIT_CHANGE_NUMBER/$GERRIT_PATCHSET_NUMBER</a></h1>
 <h2>Job: $JOB_NAME</h2>
@@ -41,20 +42,22 @@ function upload_logs() {
 <li><a href=${BIFROST_LOG_URL}/build_log.txt>build_log.txt</a></li>
 EOF
 
-    for x in *.log; do
-        echo "<li><a href=${BIFROST_LOG_URL}/${x}>${x}</a></li>" >> index.html
-    done
+    if [[ -d ${WORKSPACE}/logs ]]; then
+        pushd ${WORKSPACE}/logs &> /dev/null
+        for x in *.log; do
+            echo "<li><a href=${BIFROST_LOG_URL}/${x}>${x}</a></li>" >> ${WORKSPACE}/index.html
+        done
+        popd &> /dev/null
+    fi
 
-    cat >> index.html << EOF
+    cat >> ${WORKSPACE}/index.html << EOF
 </ul>
 </html>
 EOF
 
-    gsutil -q cp index.html ${BIFROST_GS_URL}/index.html
-
-    rm index.html
+    gsutil -q cp ${WORKSPACE}/index.html ${BIFROST_GS_URL}/index.html
 
-    popd &> /dev/null
+    rm ${WORKSPACE}/index.html
 }
 
 function fix_ownership() {
index 88dbe22..da5dcf0 100644 (file)
@@ -45,17 +45,24 @@ export POD_NAME=${POD/-}
 ##
 
 cd $WORKSPACE/ci
-if [ -e "$LAB_CONFIG/environments.yaml" ] && [ "$MAAS_REINSTALL" == "false" ]; then
+
+if [ -e "$LAB_CONFIG/deployconfig.yaml" ] && [ "$MAAS_REINSTALL" == "false" ]; then
     echo "------ Recover Juju environment to use MAAS ------"
-    cp $LAB_CONFIG/environments.yaml .
-    cp $LAB_CONFIG/deployment.yaml .
-    if [ -e $LAB_CONFIG/deployconfig.yaml ]; then
+    if [ -e deployconfig.yaml ]; then
         cp $LAB_CONFIG/deployconfig.yaml .
+        cp $LAB_CONFIG/deployment.yaml .
+        cp $LAB_CONFIG/labconfig.yaml .
     fi
 else
-    echo "------ Redeploy MAAS ------"
-    ./00-maasdeploy.sh $POD_NAME
-    exit_on_error $? "MAAS Deploy FAILED"
+    if ["$NODE_NAME" == "default" ]; then
+        echo "------ Redeploy MAAS ------"
+        ./03-maasdeploy.sh default
+        exit_on_error $? "MAAS Deploy FAILED"
+    else
+        echo "------ Redeploy MAAS ------"
+        ./03-maasdeploy.sh custom $LAB_CONFIG/labconfig.yaml
+        exit_on_error $? "MAAS Deploy FAILED"
+    fi
 fi
 
 ##
@@ -142,7 +149,7 @@ if [ "$JOID_MODEL" == 'os' ]; then
 
   # export the openrc file by getting the one generated by joid and add SDN
   # controller for Functest
-  cp ./cloud/admin-openrc $JOID_ADMIN_OPENRC
+  cp ./cloud/admin-openrc $JOID_ADMIN_OPENRC
   cat << EOF >> $JOID_ADMIN_OPENRC
   export SDN_CONTROLLER=$SDN_CONTROLLER_IP
   export SDN_PASSWORD=$SDN_PASSWORD
index abb3481..8332f5b 100644 (file)
 
     project: 'releng'
 
+- job:
+    name: 'testapi-mongodb-backup'
+
+    slave-label: 'testresults'
+
+    scm:
+        - git-scm
+
+    triggers:
+        - timed: '@weekly'
+
+    builders:
+        - mongodb-backup
+
 - job-template:
     name: 'testapi-verify-{stream}'
 
@@ -55,8 +69,8 @@
         - cobertura:
             report-file: "coverage.xml"
             only-stable: "true"
-            health-auto-update: "true"
-            stability-auto-update: "true"
+            health-auto-update: "false"
+            stability-auto-update: "false"
             zoom-coverage-chart: "true"
             targets:
                 - files:
 ################################
 # job builders
 ################################
+- builder:
+    name: mongodb-backup
+    builders:
+        - shell: |
+            bash ./jjb/releng/testapi-backup-mongodb.sh
 
 - builder:
     name: run-unit-tests
diff --git a/jjb/releng/testapi-backup-mongodb.sh b/jjb/releng/testapi-backup-mongodb.sh
new file mode 100644 (file)
index 0000000..52957ab
--- /dev/null
@@ -0,0 +1,31 @@
+#!/bin/bash
+
+set -e
+
+# Run MongoDB backup
+python $WORKSPACE/utils/test/testapi/update/templates/backup_mongodb.py -o $WORKSPACE/
+
+# Compressing the dump
+now=$(date +"%m_%d_%Y_%H_%M_%S")
+echo $now
+
+file_name="testapi_mongodb_"$now".tar.gz"
+echo $file_name
+
+tar cvfz "$file_name" test_results_collection*
+
+rm -rf test_results_collection*
+
+artifact_dir="testapibackup"
+workspace="$WORKSPACE"
+
+set +e
+/usr/local/bin/gsutil &>/dev/null
+if [ $? != 0 ]; then
+    echo "Not possible to push results to artifact: gsutil not installed"
+    exit 1
+else
+    echo "Uploading mongodump to artifact $artifact_dir"
+    /usr/local/bin/gsutil cp -r "$workspace"/"$file_name" gs://testingrohit/"$artifact_dir"/
+    echo "MongoDump can be found at http://artifacts.opnfv.org/$artifact_dir"
+fi
index 856f69a..c1e21f3 100755 (executable)
@@ -121,6 +121,14 @@ if [ "$installer_type" == "fuel" ]; then
     # but sometimes the output of endpoint-list is like this: http://172.30.9.70:8004/v1/%(tenant_id)s
     # Fuel virtual need a fix
 
+    #convert to v3 URL
+    auth_url=$(cat $dest_path|grep AUTH_URL)
+    if [[ -z `echo $auth_url |grep v3` ]]; then
+        auth_url=$(echo $auth_url |sed "s|'$|v3&|")
+    fi
+    sed -i '/AUTH_URL/d' $dest_path
+    echo $auth_url >> $dest_path
+
 elif [ "$installer_type" == "apex" ]; then
     verify_connectivity $installer_ip
 
index d1f05f2..51db09f 100755 (executable)
@@ -5,20 +5,16 @@ set -o errexit
 # Get script directory
 SCRIPTDIR=`dirname $0`
 
-# Either Workspace is set (CI)
-if [ -z $WORKSPACE ]
-then
-    WORKSPACE="."
-fi
-
 echo "Running unit tests..."
 
 # Creating virtual environment
-virtualenv $WORKSPACE/testapi_venv
-source $WORKSPACE/testapi_venv/bin/activate
+virtualenv $SCRIPTDIR/testapi_venv
+source $SCRIPTDIR/testapi_venv/bin/activate
 
 # Install requirements
 pip install -r $SCRIPTDIR/requirements.txt
+pip install coverage
+pip install nose>=1.3.1
 
 find . -type f -name "*.pyc" -delete
 
@@ -26,7 +22,7 @@ nosetests --with-xunit \
     --with-coverage \
     --cover-erase \
     --cover-package=$SCRIPTDIR/opnfv_testapi/cmd \
-    --cover-package=$SCRIPTDIR/opnfv_testapi/commonn \
+    --cover-package=$SCRIPTDIR/opnfv_testapi/common \
     --cover-package=$SCRIPTDIR/opnfv_testapi/resources \
     --cover-package=$SCRIPTDIR/opnfv_testapi/router \
     --cover-xml \