Merge "Add scenarios jobs"
authorSerena Feng <feng.xiaowei@zte.com.cn>
Fri, 24 Feb 2017 06:15:50 +0000 (06:15 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Fri, 24 Feb 2017 06:15:50 +0000 (06:15 +0000)
18 files changed:
jjb/apex/apex-deploy.sh
jjb/apex/apex-snapshot-create.sh
jjb/apex/apex-upload-artifact.sh
jjb/apex/apex.yml
jjb/daisy4nfv/daisy-deploy.sh
jjb/daisy4nfv/daisy4nfv-build.sh
jjb/functest/functest-ci-jobs.yml
jjb/functest/functest-suite.sh
jjb/global/releng-macros.yml
jjb/infra/bifrost-cleanup-job.yml [new file with mode: 0644]
jjb/joid/joid-daily-jobs.yml
jjb/opnfvdocs/docs-post-rtd.sh
jjb/opnfvdocs/docs-rtd.yaml
jjb/qtip/qtip-verify-jobs.yml
jjb/releng/opnfv-docker.sh
modules/opnfv/deployment/factory.py
modules/opnfv/deployment/manager.py
utils/test/reporting/functest/testCase.py

index 63baa57..d39217a 100755 (executable)
@@ -15,7 +15,7 @@ if ! rpm -q wget > /dev/null; then
   sudo yum -y install wget
 fi
 
-if [[ "$BUILD_DIRECTORY" == *verify* || "$BUILD_DIRECTORY" == *csit* ]]; then
+if [[ "$BUILD_DIRECTORY" == *verify* || "$BUILD_DIRECTORY" == *promote* ]]; then
     # Build is from a verify, use local build artifacts (not RPMs)
     cd $WORKSPACE/../${BUILD_DIRECTORY}
     WORKSPACE=$(pwd)
@@ -79,8 +79,8 @@ elif [[ "$DEPLOY_SCENARIO" == *gate* ]]; then
   fi
 fi
 
-# use local build for verify and csit promote
-if [[ "$BUILD_DIRECTORY" == *verify* || "$BUILD_DIRECTORY" == *csit* ]]; then
+# use local build for verify and promote
+if [[ "$BUILD_DIRECTORY" == *verify* || "$BUILD_DIRECTORY" == *promote* ]]; then
     if [ ! -e "${WORKSPACE}/build/lib" ]; then
       ln -s ${WORKSPACE}/lib ${WORKSPACE}/build/lib
     fi
@@ -159,7 +159,7 @@ if [ "$OPNFV_CLEAN" == 'yes' ]; then
   else
     clean_opts=''
   fi
-  if [[ "$BUILD_DIRECTORY" == *verify* || "$BUILD_DIRECTORY" == *csit* ]]; then
+  if [[ "$BUILD_DIRECTORY" == *verify* || "$BUILD_DIRECTORY" == *promote* ]]; then
     sudo CONFIG=${CONFIG} LIB=${LIB} ./clean.sh ${clean_opts}
   else
     sudo CONFIG=${CONFIG} LIB=${LIB} opnfv-clean ${clean_opts}
@@ -183,10 +183,13 @@ if [[ "$JOB_NAME" == *virtual* ]]; then
   # settings for virtual deployment
   DEPLOY_CMD="${DEPLOY_CMD} -v"
   if [[ "${DEPLOY_SCENARIO}" =~ fdio|ovs ]]; then
-    DEPLOY_CMD="${DEPLOY_CMD} --virtual-ram 14"
+    DEPLOY_CMD="${DEPLOY_CMD} --virtual-default-ram 14 --virtual-compute-ram 8"
   fi
   if [[ "$JOB_NAME" == *csit* ]]; then
-    DEPLOY_CMD="${DEPLOY_CMD} -e csit-environment.yaml --virtual-computes 2"
+    DEPLOY_CMD="${DEPLOY_CMD} -e csit-environment.yaml"
+  fi
+  if [[ "$JOB_NAME" == *promote* ]]; then
+    DEPLOY_CMD="${DEPLOY_CMD} --virtual-computes 2"
   fi
 else
   # settings for bare metal deployment
index f146dd8..b2a3944 100644 (file)
@@ -13,6 +13,7 @@ set -o nounset
 set -o pipefail
 
 SSH_OPTIONS=(-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null -o LogLevel=error)
+SNAP_TYPE=$(echo ${JOB_NAME} | sed -n 's/^apex-\(.\+\)-promote.*$/\1/p')
 
 echo "Creating Apex snapshot..."
 echo "-------------------------"
@@ -81,17 +82,19 @@ sudo chown jenkins-ci:jenkins-ci *
 
 # tar up artifacts
 DATE=`date +%Y-%m-%d`
-tar czf ../apex-csit-snap-${DATE}.tar.gz .
+tar czf ../apex-${SNAP_TYPE}-snap-${DATE}.tar.gz .
 popd > /dev/null
 sudo rm -rf ${tmp_dir}
-echo "Snapshot saved as apex-csit-snap-${DATE}.tar.gz"
+echo "Snapshot saved as apex-${SNAP_TYPE}-snap-${DATE}.tar.gz"
 
 # update opnfv properties file
-curl -O -L http://$GS_URL/snapshot.properties
-sed -i '/^OPNFV_SNAP_URL=/{h;s#=.*#='${GS_URL}'/apex-csit-snap-'${DATE}'.tar.gz#};${x;/^$/{s##OPNFV_SNAP_URL='${GS_URL}'/apex-csit-snap-'${DATE}'.tar.gz#;H};x}' snapshot.properties
-snap_sha=$(sha512sum apex-csit-snap-${DATE}.tar.gz | cut -d' ' -f1)
-sed -i '/^OPNFV_SNAP_SHA512SUM=/{h;s/=.*/='${snap_sha}'/};${x;/^$/{s//OPNFV_SNAP_SHA512SUM='${snap_sha}'/;H};x}' snapshot.properties
-echo "OPNFV_SNAP_URL=$GS_URL/apex-csit-snap-${DATE}.tar.gz"
-echo "OPNFV_SNAP_SHA512SUM=$(sha512sum apex-csit-snap-${DATE}.tar.gz | cut -d' ' -f1)"
-echo "Updated properties file: "
-cat snapshot.properties
+if [ "$SNAP_TYPE" == 'csit' ]; then
+  curl -O -L http://$GS_URL/snapshot.properties
+  sed -i '/^OPNFV_SNAP_URL=/{h;s#=.*#='${GS_URL}'/apex-csit-snap-'${DATE}'.tar.gz#};${x;/^$/{s##OPNFV_SNAP_URL='${GS_URL}'/apex-csit-snap-'${DATE}'.tar.gz#;H};x}' snapshot.properties
+  snap_sha=$(sha512sum apex-csit-snap-${DATE}.tar.gz | cut -d' ' -f1)
+  sed -i '/^OPNFV_SNAP_SHA512SUM=/{h;s/=.*/='${snap_sha}'/};${x;/^$/{s//OPNFV_SNAP_SHA512SUM='${snap_sha}'/;H};x}' snapshot.properties
+  echo "OPNFV_SNAP_URL=$GS_URL/apex-csit-snap-${DATE}.tar.gz"
+  echo "OPNFV_SNAP_SHA512SUM=$(sha512sum apex-csit-snap-${DATE}.tar.gz | cut -d' ' -f1)"
+  echo "Updated properties file: "
+  cat snapshot.properties
+fi
index ef8ad53..15aa67a 100755 (executable)
@@ -76,12 +76,15 @@ gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > gsutil.la
 uploadsnap () {
   # Uploads snapshot artifact and updated properties file
   echo "Uploading snapshot artifacts"
-  gsutil cp $WORKSPACE/apex-csit-snap-`date +%Y-%m-%d`.tar.gz gs://$GS_URL/ > gsutil.iso.log
-  gsutil cp $WORKSPACE/snapshot.properties gs://$GS_URL/snapshot.properties > gsutil.latest.log
+  SNAP_TYPE=$(echo ${JOB_NAME} | sed -n 's/^apex-\(.\+\)-promote.*$/\1/p')
+  gsutil cp $WORKSPACE/apex-${SNAP_TYPE}-snap-`date +%Y-%m-%d`.tar.gz gs://$GS_URL/ > gsutil.iso.log
+  if [ "$SNAP_TYPE" == 'csit' ]; then
+    gsutil cp $WORKSPACE/snapshot.properties gs://$GS_URL/snapshot.properties > gsutil.latest.log
+  fi
   echo "Upload complete for Snapshot"
 }
 
-if echo $WORKSPACE | grep csit > /dev/null; then
+if echo $WORKSPACE | grep promote > /dev/null; then
   uploadsnap
 elif gpg2 --list-keys | grep "opnfv-helpdesk@rt.linuxfoundation.org"; then
   echo "Signing Key avaliable"
index 3938f15..126651e 100644 (file)
@@ -14,6 +14,7 @@
         - 'apex-build-colorado'
         - 'apex-deploy-baremetal-os-odl_l2-fdio-ha-colorado'
         - 'apex-csit-promote-daily-{stream}'
+        - 'apex-fdio-promote-daily-{stream}'
 
     # stream:    branch with - in place of / (eg. stable-arno)
     # branch:    branch (eg. stable/arno)
             blocking-jobs:
                 - 'apex-daily.*'
                 - 'apex-verify.*'
-                - 'apex-csit.*'
+                - 'apex-.*-promote.*'
 
     builders:
         - trigger-builds:
                 - 'apex-deploy.*'
                 - 'apex-build.*'
                 - 'apex-runner.*'
-                - 'apex-csit.*'
+                - 'apex-.*-promote.*'
 
     triggers:
         - 'apex-{stream}'
         - shell:
             !include-raw-escape: ./apex-upload-artifact.sh
 
+# FDIO promote
+- job-template:
+    name: 'apex-fdio-promote-daily-{stream}'
+
+    # Job template for promoting CSIT Snapshots
+    #
+    # Required Variables:
+    #     stream:    branch with - in place of / (eg. stable)
+    #     branch:    branch (eg. stable)
+    node: '{daily-slave}'
+
+    disabled: false
+
+    scm:
+        - git-scm
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+            branch: '{branch}'
+        - apex-parameter:
+            gs-pathname: '{gs-pathname}'
+
+    properties:
+        - build-blocker:
+            use-build-blocker: true
+            block-level: 'NODE'
+            blocking-jobs:
+                - 'apex-verify.*'
+                - 'apex-deploy.*'
+                - 'apex-build.*'
+                - 'apex-runner.*'
+                - 'apex-daily.*'
+
+    builders:
+        - 'apex-build'
+        - trigger-builds:
+          - project: 'apex-deploy-virtual-os-odl_l2-fdio-noha-{stream}'
+            predefined-parameters: |
+              BUILD_DIRECTORY=apex-fdio-promote-daily-{stream}
+              OPNFV_CLEAN=yes
+            git-revision: false
+            block: true
+            same-node: true
+        - shell:
+            !include-raw-escape: ./apex-snapshot-create.sh
+        - shell:
+            !include-raw-escape: ./apex-upload-artifact.sh
+
 - job-template:
     name: 'apex-gs-clean-{stream}'
 
index b303c2c..b512e3f 100755 (executable)
@@ -22,9 +22,12 @@ cd $WORKSPACE
 BASE_DIR=$(cd ./;pwd)
 
 echo "Cloning securedlab repo $BRANCH"
-git clone ssh://jenkins-ericsson@gerrit.opnfv.org:29418/securedlab --quiet \
+git clone ssh://jenkins-zte@gerrit.opnfv.org:29418/securedlab --quiet \
     --branch $BRANCH
 
+# daisy ci/deploy/deploy.sh use $BASE_DIR/labs dir
+cp -r securedlab/labs .
+
 DEPLOY_COMMAND="sudo ./ci/deploy/deploy.sh -b $BASE_DIR \
                 -l $LAB_NAME -p $POD_NAME -B $BRIDGE"
 
@@ -45,7 +48,7 @@ $DEPLOY_COMMAND
 """
 
 # start the deployment
-#$DEPLOY_COMMAND
+$DEPLOY_COMMAND
 
 if [ $? -ne 0 ]; then
     echo
index eb29fed..375d807 100755 (executable)
@@ -26,6 +26,7 @@ cd $WORKSPACE
     echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
     echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
     echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.bin"
+    echo "OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $OUTPUT_DIR/opnfv-$OPNFV_ARTIFACT_VERSION.bin | cut -d' ' -f1)"
     echo "OPNFV_BUILD_URL=$BUILD_URL"
 ) > $WORKSPACE/opnfv.properties
 
index 0f53ebc..e85144c 100644 (file)
         - 'functest-cleanup'
         - 'set-functest-env'
         - 'functest-suite'
-        - 'functest-store-results'
-        - 'functest-exit'
 
 - builder:
     name: functest-daily
index f28d3d0..228cc3d 100755 (executable)
@@ -1,19 +1,18 @@
 #!/bin/bash
-set -e
 
-echo "Functest: run $FUNCTEST_SUITE_NAME on branch $BRANCH"
-if [[ "$BRANCH" =~ 'brahmaputra' ]]; then
-    cmd="${FUNCTEST_REPO_DIR}/docker/run_tests.sh --test $FUNCTEST_SUITE_NAME"
-elif [[ "$BRANCH" =~ 'colorado' ]]; then
-    cmd="python ${FUNCTEST_REPO_DIR}/ci/run_tests.py -t $FUNCTEST_SUITE_NAME"
-else
-    cmd="functest testcase run $FUNCTEST_SUITE_NAME"
-fi
 container_id=$(docker ps -a | grep opnfv/functest | awk '{print $1}' | head -1)
-docker exec $container_id $cmd
+if [ -z $container_id ]; then
+    echo "Functest container not found"
+    exit 1
+fi
+
+global_ret_val=0
 
-ret_value=$?
-ret_val_file="${HOME}/opnfv/functest/results/${BRANCH##*/}/return_value"
-echo ${ret_value}>${ret_val_file}
+tests=($(echo $FUNCTEST_SUITE_NAME | tr "," "\n"))
+for test in ${tests[@]}; do
+    cmd="python /home/opnfv/repos/functest/functest/ci/run_tests.py -t $test"
+    docker exec $container_id $cmd
+    let global_ret_val+=$?
+done
 
-exit 0
+exit $global_ret_val
index d5eb0c9..c245ee8 100644 (file)
             allow-empty: true
             fingerprint: true
             latest-only: true
+
+- publisher:
+    name: publish-coverage
+    publishers:
+      - cobertura:
+          report-file: "coverage.xml"
+          only-stable: "true"
+          health-auto-update: "true"
+          stability-auto-update: "true"
+          zoom-coverage-chart: "true"
+          targets:
+            - files:
+                healthy: 10
+                unhealthy: 20
+                failing: 30
+            - method:
+                healthy: 50
+                unhealthy: 40
+                failing: 30
+
diff --git a/jjb/infra/bifrost-cleanup-job.yml b/jjb/infra/bifrost-cleanup-job.yml
new file mode 100644 (file)
index 0000000..ba283ff
--- /dev/null
@@ -0,0 +1,148 @@
+- project:
+    name: 'openstack-bifrost-cleanup'
+#--------------------------------
+# branches
+#--------------------------------
+    stream:
+        - master:
+            branch: '{stream}'
+
+#--------------------------------
+# projects
+#--------------------------------
+    project:
+        - 'openstack':
+            project-repo: 'https://git.openstack.org/openstack/bifrost'
+            clone-location: '/opt/bifrost'
+        - 'opnfv':
+            project-repo: 'https://gerrit.opnfv.org/gerrit/releng'
+            clone-location: '/opt/releng'
+
+#--------------------------------
+# jobs
+#--------------------------------
+    jobs:
+        - '{project}-bifrost-cleanup-{stream}'
+
+- job-template:
+    name: '{project}-bifrost-cleanup-{stream}'
+
+    concurrent: false
+
+    node: bifrost-verify-virtual
+
+    # Make sure no verify job is running on any of the slaves since that would
+    # produce build logs after we wipe the destination directory.
+    properties:
+        - build-blocker:
+            blocking-jobs:
+                - '{project}-bifrost-verify-*'
+
+    parameters:
+        - string:
+            name: PROJECT
+            default: '{project}'
+
+    builders:
+        - shell: |
+            #!/bin/bash
+
+            set -eu
+
+            # DO NOT change this unless you know what you are doing.
+            BIFROST_GS_URL="gs://artifacts.opnfv.org/cross-community-ci/openstack/bifrost/$GERRIT_NAME/$GERRIT_CHANGE_NUMBER/"
+
+            # This should never happen... even 'recheck' uses the last jobs'
+            # gerrit information. Better exit with error so we can investigate
+            [[ ! -n $GERRIT_NAME ]] || [[ ! -n $GERRIT_CHANGE_NUMBER ]] && exit 1
+
+            echo "Removing build artifacts for $GERRIT_NAME/$GERRIT_CHANGE_NUMBER"
+
+            if ! [[ "$BIFROST_GS_URL" =~ "/cross-community-ci/openstack/bifrost/" ]]; then
+                echo "Oops! BIFROST_GS_URL=$BIFROST_GS_URL does not seem like a valid"
+                echo "bifrost location on the Google storage server. Please double-check"
+                echo "that it's set properly or fix this line if necessary."
+                echo "gsutil will not be executed until this is fixed!"
+                exit 1
+            fi
+            # No force (-f). We always verify upstream jobs so if there are no logs
+            # something else went wrong and we need to break immediately and investigate
+            gsutil rm -r $BIFROST_GS_URL
+
+    triggers:
+        - '{project}-gerrit-trigger-cleanup':
+            branch: '{branch}'
+
+    publishers:
+        - email:
+            recipients: fatih.degirmenci@ericsson.com yroblamo@redhat.com mchandras@suse.de jack.morgan@intel.com zhang.jun3g@zte.com.cn
+#--------------------------------
+# trigger macros
+#--------------------------------
+- trigger:
+    name: 'openstack-gerrit-trigger-cleanup'
+    triggers:
+        - gerrit:
+            server-name: 'review.openstack.org'
+            escape-quotes: true
+            trigger-on:
+                - patchset-created-event:
+                    exclude-drafts: 'false'
+                    exclude-trivial-rebase: 'false'
+                    exclude-no-code-change: 'false'
+                - patchset-uploaded-event: 'false'
+                # We only run this when the change is merged since
+                # we don't need the logs anymore
+                - change-merged-event: 'true'
+                - change-abandoned-event: 'true'
+                - change-restored-event: 'false'
+                - draft-published-event: 'false'
+            # This is an OPNFV maintenance job. We don't want to provide
+            # feedback on Gerrit
+            silent: true
+            silent-start: true
+            projects:
+              - project-compare-type: 'PLAIN'
+                project-pattern: 'openstack/bifrost'
+                branches:
+                  - branch-compare-type: 'ANT'
+                    branch-pattern: '**/{branch}'
+                forbidden-file-paths:
+                  - compare-type: ANT
+                    pattern: 'doc/**'
+                  - compare-type: ANT
+                    pattern: 'releasenotes/**'
+            readable-message: true
+- trigger:
+    name: 'opnfv-gerrit-trigger-cleanup'
+    triggers:
+        - gerrit:
+            server-name: 'gerrit.opnfv.org'
+            trigger-on:
+                - patchset-created-event:
+                    exclude-drafts: 'false'
+                    exclude-trivial-rebase: 'false'
+                    exclude-no-code-change: 'false'
+                - patchset-uploaded-event: 'false'
+                # We only run this when the change is merged since
+                # we don't need the logs anymore
+                - change-merged-event: 'true'
+                - change-abandoned-event: 'true'
+                - change-restored-event: 'false'
+                - draft-published-event: 'false'
+            # This is an OPNFV maintenance job. We don't want to provide
+            # feedback on Gerrit
+            silent: true
+            silent-start: true
+            projects:
+              - project-compare-type: 'ANT'
+                project-pattern: 'releng'
+                branches:
+                  - branch-compare-type: 'ANT'
+                    branch-pattern: '**/{branch}'
+                file-paths:
+                  - compare-type: ANT
+                    pattern: 'prototypes/bifrost/**'
+                  - compare-type: ANT
+                    pattern: 'jjb/infra/**'
+            readable-message: true
index 88269d3..e615244 100644 (file)
 - trigger:
     name: 'joid-os-nosdn-nofeature-ha-cengn-pod1-master-trigger'
     triggers:
-        - timed: '5 2 * * *'
+        - timed: ''
 # os-nosdn-nofeature-ha trigger - branch: danube
 - trigger:
     name: 'joid-os-nosdn-nofeature-ha-baremetal-danube-trigger'
 - trigger:
     name: 'joid-os-odl_l2-nofeature-ha-cengn-pod1-master-trigger'
     triggers:
-        - timed: '5 7 * * *'
+        - timed: ''
 # os-odl_l2-nofeature-ha trigger - branch: danube
 - trigger:
     name: 'joid-os-odl_l2-nofeature-ha-baremetal-danube-trigger'
 - trigger:
     name: 'joid-os-onos-nofeature-ha-cengn-pod1-master-trigger'
     triggers:
-        - timed: '5 12 * * *'
+        - timed: ''
 # os-onos-nofeature-ha trigger - branch: danube
 - trigger:
     name: 'joid-os-onos-nofeature-ha-baremetal-danube-trigger'
 - trigger:
     name: 'joid-os-onos-sfc-ha-cengn-pod1-master-trigger'
     triggers:
-        - timed: '5 17 * * *'
+        - timed: ''
 # os-onos-sfc-ha trigger - branch: danube
 - trigger:
     name: 'joid-os-onos-sfc-ha-baremetal-danube-trigger'
 - trigger:
     name: 'joid-os-nosdn-lxd-noha-cengn-pod1-master-trigger'
     triggers:
-        - timed: '5 22 * * *'
+        - timed: ''
 # os-nosdn-lxd-noha trigger - branch: danube
 - trigger:
     name: 'joid-os-nosdn-lxd-noha-baremetal-danube-trigger'
 - trigger:
     name: 'joid-os-nosdn-lxd-ha-cengn-pod1-master-trigger'
     triggers:
-        - timed: '5 10 * * *'
+        - timed: ''
 # os-nosdn-lxd-ha trigger - branch: danube
 - trigger:
     name: 'joid-os-nosdn-lxd-ha-baremetal-danube-trigger'
 - trigger:
     name: 'joid-os-nosdn-nofeature-noha-cengn-pod1-master-trigger'
     triggers:
-        - timed: '5 4 * * *'
+        - timed: ''
 # os-nosdn-nofeature-noha trigger - branch: danube
 - trigger:
     name: 'joid-os-nosdn-nofeature-noha-baremetal-danube-trigger'
 - trigger:
     name: 'joid-k8-nosdn-nofeature-noha-cengn-pod1-master-trigger'
     triggers:
-        - timed: '5 15 * * *'
+        - timed: ''
 # k8-nosdn-nofeature-noha trigger - branch: danube
 - trigger:
     name: 'joid-k8-nosdn-nofeature-noha-baremetal-danube-trigger'
 - trigger:
     name: 'joid-k8-nosdn-lb-noha-cengn-pod1-master-trigger'
     triggers:
-        - timed: '5 20 * * *'
+        - timed: ''
 # k8-nosdn-lb-noha trigger - branch: danube
 - trigger:
     name: 'joid-k8-nosdn-lb-noha-baremetal-danube-trigger'
index 7faa26f..e3dc9b5 100644 (file)
@@ -4,4 +4,4 @@ if [ $GERRIT_BRANCH == "master" ]; then
 else
     RTD_BUILD_VERSION=${{GERRIT_BRANCH/\//-}}
 fi
-curl -X POST --data "version_slug=$RTD_BUILD_VERSION" https://readthedocs.org/build/{rtdproject}
+curl -X POST --data "version_slug=$RTD_BUILD_VERSION" https://readthedocs.org/build/opnfvdocsdemo
index 151b535..01b2820 100644 (file)
@@ -46,7 +46,7 @@
     parameters:
         - label:
             name: SLAVE_LABEL
-            default: 'lf-build1'
+            default: 'lf-build2'
             description: 'Slave label on Jenkins'
         - project-parameter:
             project: '{project}'
index d1fc34d..3cb331c 100644 (file)
@@ -55,6 +55,8 @@
 
     builders:
         - qtip-unit-tests-and-docs-build
+    publisher:
+        - 'publish-coverage'
 
 ################################
 ## job builders
index 7b93d50..ded743d 100644 (file)
@@ -77,8 +77,12 @@ fi
 echo "Building docker image: $DOCKER_REPO_NAME:$DOCKER_TAG"
 echo "--------------------------------------------------------"
 echo
-cmd="docker build --no-cache -t $DOCKER_REPO_NAME:$DOCKER_TAG --build-arg BRANCH=$BRANCH
-    -f $DOCKERFILE ."
+if [[ $DOCKER_REPO_NAME == *"dovetail"* ]]; then
+    cmd="docker build --no-cache -t $DOCKER_REPO_NAME:$DOCKER_TAG -f $DOCKERFILE ."
+else
+    cmd="docker build --no-cache -t $DOCKER_REPO_NAME:$DOCKER_TAG --build-arg BRANCH=$BRANCH
+        -f $DOCKERFILE ."
+fi
 
 echo ${cmd}
 ${cmd}
index e48a751..1ccee4e 100644 (file)
@@ -41,4 +41,5 @@ class Factory(object):
                                             installer_user=installer_user,
                                             installer_pwd=installer_pwd)
         else:
-            raise Exception("Installer adapter is not implemented.")
+            raise Exception("Installer adapter is not implemented for "
+                            "the given installer.")
index 7047a4d..df735f1 100644 (file)
@@ -95,6 +95,7 @@ class Deployment(object):
 
 
 class Role():
+    INSTALLER = 'installer'
     CONTROLLER = 'controller'
     COMPUTE = 'compute'
     ODL = 'opendaylight'
@@ -131,7 +132,7 @@ class Node(object):
         self.memory = 'unknown'
         self.ovs = 'unknown'
 
-        if ssh_client:
+        if ssh_client and Role.INSTALLER not in self.roles:
             sys_info = self.get_system_info()
             self.cpu_info = sys_info['cpu_info']
             self.memory = sys_info['memory']
@@ -318,7 +319,7 @@ class DeploymentHandler(object):
                                        name=installer,
                                        status=NodeStatus.STATUS_OK,
                                        ssh_client=self.installer_connection,
-                                       roles='installer node')
+                                       roles=Role.INSTALLER)
         else:
             raise Exception(
                 'Cannot establish connection to the installer node!')
index 22196c8..e40aa7f 100644 (file)
@@ -44,7 +44,9 @@ class TestCase(object):
                                'connection_check': 'Health (connection)',
                                'api_check': 'Health (api)',
                                'snaps_smoke': 'SNAPS',
-                               'snaps_health_check': 'Health (dhcp)'}
+                               'snaps_health_check': 'Health (dhcp)',
+                               'gluon_vping': 'Netready',
+                               'barometercollectd': 'Barometer'}
         try:
             self.displayName = display_name_matrix[self.name]
         except:
@@ -140,8 +142,9 @@ class TestCase(object):
                              'connection_check': 'connection_check',
                              'api_check': 'api_check',
                              'snaps_smoke': 'snaps_smoke',
-                             'snaps_health_check': 'snaps_health_check'
-                             }
+                             'snaps_health_check': 'snaps_health_check',
+                             'gluon_vping': 'gluon_vping',
+                             'barometercollectd': 'barometercollectd'}
         try:
             return test_match_matrix[self.name]
         except: