Merge "doc: fix logo setting for sphinx-build"
authorRyota MIBU <r-mibu@cq.jp.nec.com>
Mon, 23 Nov 2015 15:28:23 +0000 (15:28 +0000)
committerGerrit Code Review <gerrit@172.30.200.206>
Mon, 23 Nov 2015 15:28:23 +0000 (15:28 +0000)
21 files changed:
jjb/fuel/fuel-build.sh
jjb/fuel/fuel-deploy-virtual.sh [new file with mode: 0755]
jjb/fuel/fuel-deploy.sh
jjb/fuel/fuel-download-artifact.sh
jjb/fuel/fuel-upload-artifact.sh
jjb/fuel/fuel-virtual-deploy.sh [deleted file]
jjb/fuel/fuel.yml
jjb/functest/functest-docker.sh
jjb/functest/functest.yml
jjb/genesis/genesis-opensteak.yml [deleted file]
jjb/kvmfornfv/kvmfornfv-build.sh [new file with mode: 0755]
jjb/kvmfornfv/kvmfornfv.yml
jjb/opnfv/opnfv-docs.yml
jjb/releng-defaults.yaml
jjb/releng-macros.yaml
utils/docu-build-new.sh [deleted file]
utils/fetch_os_creds.sh
utils/jenkins-jnlp-connect.sh
utils/test/result_collection_api/dashboard/functest2Dashboard.py
utils/test/result_collection_api/dashboard/vsperf2Dashboard.py [new file with mode: 0755]
utils/test/result_collection_api/resources/handlers.py

index 178a50c..cffd867 100755 (executable)
@@ -12,7 +12,12 @@ echo
 [[ -d $CACHE_DIRECTORY ]] || mkdir -p $CACHE_DIRECTORY
 
 # set OPNFV_ARTIFACT_VERSION
-export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
+if [[ "$JOB_NAME" =~ "merge" ]]; then
+    echo "Building Fuel ISO for a merged change"
+    export OPNFV_ARTIFACT_VERSION="gerrit-$GERRIT_CHANGE_NUMBER"
+else
+    export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
+fi
 
 # start the build
 cd $WORKSPACE/$INSTALLER/ci
diff --git a/jjb/fuel/fuel-deploy-virtual.sh b/jjb/fuel/fuel-deploy-virtual.sh
new file mode 100755 (executable)
index 0000000..ff4f8c3
--- /dev/null
@@ -0,0 +1,41 @@
+#!/bin/bash
+set -o errexit
+set -o nounset
+set -o pipefail
+
+# source the file so we get OPNFV vars
+source latest.properties
+
+# echo the info about artifact that is used during the deployment
+echo "Using $(echo $OPNFV_ARTIFACT_URL | cut -d'/' -f3) for deployment"
+
+# create TMPDIR if it doesn't exist
+export TMPDIR=$HOME/tmpdir
+[[ -d $TMPDIR ]] || mkdir -p $TMPDIR
+
+# change permissions down to TMPDIR
+chmod a+x $HOME
+chmod a+x $TMPDIR
+
+# set CONFDIR, BRIDGE
+CONFDIR=$WORKSPACE/fuel/deploy/templates/virtual_environment/conf
+BRIDGE=pxebr
+
+# log info to console
+echo "Starting the deployment for a merged change using $INSTALLER. This could take some time..."
+echo "--------------------------------------------------------"
+echo
+
+# start the deployment
+echo "Issuing command"
+echo "sudo $WORKSPACE/fuel/ci/deploy.sh -iso $WORKSPACE/opnfv.iso -dea $CONFDIR/dea.yaml -dha $CONFDIR/dha.yaml -s $TMPDIR -b $BRIDGE -nh"
+
+sudo $WORKSPACE/fuel/ci/deploy.sh -iso $WORKSPACE/opnfv.iso -dea $CONFDIR/dea.yaml -dha $CONFDIR/dha.yaml -s $TMPDIR -b $BRIDGE -nh
+
+echo
+echo "--------------------------------------------------------"
+echo "Virtual deployment is done! Removing the intermediate files from artifact repo"
+
+PROPERTIES_FILE=$(echo $OPNFV_ARTIFACT_URL | sed 's/iso/properties/')
+gsutil rm gs://$OPNFV_ARTIFACT_URL
+gsutil rm gs://$PROPERTIES_FILE
index bb0e2b1..2ec5195 100755 (executable)
@@ -18,8 +18,8 @@ chmod a+x $HOME
 chmod a+x $TMPDIR
 
 # set CONFDIR, BRIDGE
-export CONFDIR=$WORKSPACE/fuel/deploy/templates/hardware_environment/conf/linux_foundation_lab/pod2
-export BRIDGE=pxebr
+CONFDIR=$WORKSPACE/fuel/deploy/templates/hardware_environment/conf/linux_foundation_lab/pod2
+BRIDGE=pxebr
 
 # clone genesis repo and checkout the SR1 tag
 echo "Cloning genesis repo"
@@ -42,6 +42,7 @@ echo
 # start the deployment
 echo "Issuing command"
 echo "sudo $WORKSPACE/fuel/ci/deploy.sh -iso $WORKSPACE/opnfv.iso -dea $CONFDIR/dea.yaml -dha $CONFDIR/dha.yaml -s $TMPDIR -b $BRIDGE -nh"
+
 sudo $WORKSPACE/fuel/ci/deploy.sh -iso $WORKSPACE/opnfv.iso -dea $CONFDIR/dea.yaml -dha $CONFDIR/dha.yaml -s $TMPDIR -b $BRIDGE -nh
 
 echo
index 6eb1ba4..05dc05e 100755 (executable)
@@ -3,8 +3,13 @@ set -o errexit
 set -o nounset
 set -o pipefail
 
-# get the latest.properties file in order to get info regarding latest artifact
-curl -s -o $WORKSPACE/latest.properties http://$GS_URL/latest.properties
+if [[ "$JOB_NAME" =~ "merge" ]]; then
+    # get the properties file for the Fuel ISO built for a merged change
+    curl -s -o $WORKSPACE/latest.properties http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties
+else
+    # get the latest.properties file in order to get info regarding latest artifact
+    curl -s -o $WORKSPACE/latest.properties http://$GS_URL/latest.properties
+fi
 
 # check if we got the file
 [[ -f latest.properties ]] || exit 1
index 3b700c6..2783f2c 100755 (executable)
@@ -14,7 +14,11 @@ source $WORKSPACE/opnfv.properties
 # upload artifact and additional files to google storage
 gsutil cp $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > gsutil.iso.log 2>&1
 gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log 2>&1
-gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1
+if [[ "$JOB_NAME" =~ "daily" ]]; then
+    gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1
+elif [[ "$JOB_NAME" =~ "merge" ]]; then
+    echo "Uploaded Fuel ISO for a merged change"
+fi
 
 echo
 echo "--------------------------------------------------------"
diff --git a/jjb/fuel/fuel-virtual-deploy.sh b/jjb/fuel/fuel-virtual-deploy.sh
deleted file mode 100755 (executable)
index 4e0b2dc..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-echo "Placeholder for Fuel virtual deploy"
index 1f53454..60a69c6 100644 (file)
@@ -11,9 +11,8 @@
 
     jobs:
         - 'fuel-verify-build-{stream}'
-        - 'fuel-verify-virtual-deploy-{stream}'
         - 'fuel-merge-build-{stream}'
-        - 'fuel-merge-virtual-deploy-{stream}'
+        - 'fuel-merge-deploy-virtual-{stream}'
         - 'fuel-daily-{stream}'
         - 'fuel-build-{stream}'
         - 'fuel-deploy-{stream}'
     builders:
         - shell:
             !include-raw ./fuel-build.sh
-#        - shell:
-#            !include-raw ./fuel-upload-artifact.sh
-#        - shell:
-#            !include-raw ./fuel-workspace-cleanup.sh
-
-- job-template:
-    name: 'fuel-verify-virtual-deploy-{stream}'
-
-    project-type: freestyle
-
-    node: ericsson-build
-
-    concurrent: true
-
-    properties:
-        - throttle:
-            enabled: true
-            max-total: 2
-            max-per-node: 1
-
-    logrotate:
-        daysToKeep: 30
-        numToKeep: 10
-        artifactDaysToKeep: -1
-        artifactNumToKeep: -1
-
-    parameters:
-        - project-parameter:
-            project: '{project}'
-        - gerrit-parameter:
-            branch: '{branch}'
-        - fuel-parameter:
-            installer: '{installer}'
-            gs-pathname: '{gs-pathname}'
-
-    scm:
-        - gerrit-trigger-scm:
-            credentials-id: '{ssh-credentials}'
-            refspec: '$GERRIT_REFSPEC'
-            choosing-strategy: 'gerrit'
-
-    wrappers:
-        - ssh-agent-credentials:
-            user: '{ssh-credentials}'
-
-    triggers:
-        - gerrit:
-            trigger-on:
-                - patchset-created-event:
-                    exclude-drafts: 'false'
-                    exclude-trivial-rebase: 'false'
-                    exclude-no-code-change: 'false'
-                - draft-published-event
-                - comment-added-contains-event:
-                    comment-contains-value: 'recheck'
-                - comment-added-contains-event:
-                    comment-contains-value: 'reverify'
-            projects:
-              - project-compare-type: 'ANT'
-                project-pattern: '{project}'
-                branches:
-                  - branch-compare-type: 'ANT'
-                    branch-pattern: '**/{branch}'
-            dependency-jobs: 'fuel-verify-build-{stream}'
-
-    builders:
-#        - shell:
-#            !include-raw ./fuel-download-artifact.sh
         - shell:
-            !include-raw ./fuel-virtual-deploy.sh
-#        - shell:
-#            !include-raw ./fuel-workspace-cleanup.sh
+            !include-raw ./fuel-workspace-cleanup.sh
 
 - job-template:
     name: 'fuel-merge-build-{stream}'
     builders:
         - shell:
             !include-raw ./fuel-build.sh
-#        - shell:
-#            !include-raw ./fuel-upload-artifact.sh
-#        - shell:
-#            !include-raw ./fuel-workspace-cleanup.sh
+        - shell:
+            !include-raw ./fuel-upload-artifact.sh
+        - shell:
+            !include-raw ./fuel-workspace-cleanup.sh
 
 - job-template:
-    name: 'fuel-merge-virtual-deploy-{stream}'
+    name: 'fuel-merge-deploy-virtual-{stream}'
 
     project-type: freestyle
 
-    node: ericsson-build
+    node: fuel-deploy-virtual
 
     concurrent: true
 
             dependency-jobs: 'fuel-merge-build-{stream}'
 
     builders:
-#        - shell:
-#            !include-raw ./fuel-download-artifact.sh
         - shell:
-            !include-raw ./fuel-virtual-deploy.sh
-#        - shell:
-#            !include-raw ./fuel-workspace-cleanup.sh
+            !include-raw ./fuel-download-artifact.sh
+        - shell:
+            !include-raw ./fuel-deploy-virtual.sh
+        - shell:
+            !include-raw ./fuel-workspace-cleanup.sh
+
+    publishers:
+        - email:
+            recipients: jonas.bjurel@ericsson.com stefan.k.berg@ericsson.com
 
 - job-template:
     name: 'fuel-daily-{stream}'
index 54315ca..c73bcf9 100644 (file)
@@ -26,6 +26,7 @@ else
     echo "Tag version to be build and pushed: $DOCKER_TAG"
 fi
 
+
 # Remove previous running containers if exist
 if [[ ! -z $(docker ps -a | grep $DOCKER_IMAGE_NAME) ]]; then
     echo "Removing existing $DOCKER_IMAGE_NAME containers..."
@@ -33,20 +34,21 @@ if [[ ! -z $(docker ps -a | grep $DOCKER_IMAGE_NAME) ]]; then
     docker ps -a | grep $DOCKER_IMAGE_NAME | awk '{{print $1}}' | xargs docker rm
 fi
 
-# list the images
-echo "Available images are:"
-docker images
 
 # Remove existing images if exist
 if [[ ! -z $(docker images | grep $DOCKER_IMAGE_NAME) ]]; then
-    echo "Removing existing $DOCKER_IMAGE_NAME images..."
-    docker images | grep $DOCKER_IMAGE_NAME | awk '{{print $3}}' \
-        | xargs docker rmi -f
+    echo "Docker images to remove:"
+    docker images | head -1 && docker images | grep $DOCKER_IMAGE_NAME
+    image_tags=($(docker images | grep $DOCKER_IMAGE_NAME | awk '{{print $2}}'))
+    for tag in "${{image_tags[@]}}"; do
+        echo "Removing docker image $DOCKER_IMAGE_NAME:$tag..."
+        docker rmi $DOCKER_IMAGE_NAME:$tag
+    done
 fi
 
 
 # Start the build
-echo "Building of $DOCKER_IMAGE_NAME:$DOCKER_TAG..."
+echo "Building docker image: $DOCKER_IMAGE_NAME:$DOCKER_TAG..."
 cd $WORKSPACE/docker
 docker build -t $DOCKER_IMAGE_NAME:$DOCKER_TAG .
 echo "Creating tag 'latest'..."
index 598bcf2..4df779d 100644 (file)
         artifactNumToKeep: -1
 
     builders:
+        - 'functest-cleanup'
         - 'set-functest-env'
         - 'functest-all'
         - 'functest-store-results'
-        - 'functest-cleanup'
 
 - job-template:
     name: functest-{installer}-{stream}
         artifactNumToKeep: -1
 
     builders:
+        - 'functest-cleanup'
         - 'set-functest-env'
         - 'functest-all'
         - 'functest-store-results'
-        - 'functest-cleanup'
 
 - job-template:
     name: functest-vims-{installer}-{stream}
 
             echo "Functest: Start Docker and prepare environment"
             envs="INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP}"
-            docker ps -a | grep opnfv/functest | awk '{print $1}' | xargs docker rm -f &>/dev/null
-            docker pull opnfv/functest
+            docker pull opnfv/functest:latest_stable
             echo "Functest: Running docker run command: docker run -i -e $envs opnfv/functest /bin/bash &"
             docker run -i -e $envs opnfv/functest /bin/bash &
             docker ps -a
             set +e
 
             # cleanup: remove any docker containers leftovers
-            echo "Removing the docker container..."
+            echo "Removing existing Functest Docker containers..."
             docker rm -f $(docker ps | grep opnfv/functest | awk '{print $1}')
-            echo "Removing the docker image..."
+            echo "Removing existing Functest Docker image..."
             docker rmi -f $(docker images | grep opnfv/functest | awk '{print $3}')
-
diff --git a/jjb/genesis/genesis-opensteak.yml b/jjb/genesis/genesis-opensteak.yml
deleted file mode 100644 (file)
index f232235..0000000
+++ /dev/null
@@ -1,219 +0,0 @@
-# this is the job configuration for bgs
-- project:
-
-    name: genesis-opensteak
-
-    installer:
-        - opensteak
-    jobs:
-        - 'genesis-opensteak-verify'
-        - 'genesis-opensteak-merge'
-        - 'genesis-opensteak-daily-{stream}'
-
-    # stream:    branch with - in place of / (eg. stable-helium)
-    # branch:    branch (eg. stable/helium)
-    stream:
-        - master:
-            branch: 'master'
-
-    project: 'genesis'
-
-########################
-# job templates
-########################
-
-- job-template:
-    name: 'genesis-opensteak-verify'
-
-    project-type: freestyle
-
-    node: ericsson-build
-
-    logrotate:
-        daysToKeep: 30
-        numToKeep: 10
-        artifactDaysToKeep: -1
-        artifactNumToKeep: -1
-
-    parameters:
-        - string:
-            name: BUILD_DIRECTORY
-            default: $WORKSPACE/build_output
-        - string:
-            name: GIT_BASE
-            default: https://gerrit.opnfv.org/gerrit/genesis
-        - project-parameter:
-            project: '{project}'
-        - gerrit-parameter:
-            branch: 'master'
-
-    scm:
-        - gerrit-trigger-scm:
-            credentials-id: '{ssh-credentials}'
-            refspec: '$GERRIT_REFSPEC'
-            choosing-strategy: 'gerrit'
-
-    wrappers:
-        - ssh-agent-credentials:
-            user: '{ssh-credentials}'
-
-    triggers:
-        - gerrit:
-            trigger-on:
-                - patchset-created-event:
-                    exclude-drafts: 'false'
-                    exclude-trivial-rebase: 'false'
-                    exclude-no-code-change: 'false'
-                - draft-published-event
-                - comment-added-contains-event:
-                    comment-contains-value: 'recheck'
-                - comment-added-contains-event:
-                    comment-contains-value: 'reverify'
-            projects:
-              - project-compare-type: 'ANT'
-                project-pattern: 'genesis'
-                branches:
-                  - branch-compare-type: 'ANT'
-                    branch-pattern: '**/master'
-                file-paths:
-                  - compare-type: ANT
-                    pattern: 'common/**'
-                  - compare-type: ANT
-                    pattern: 'opensteak/**'
-
-
-    builders:
-        - 'opensteak-verify'
-
-- job-template:
-    name: 'genesis-opensteak-merge'
-
-    # builder-merge job to run JJB update
-    #
-    # This job's purpose is to update all the JJB
-
-    project-type: freestyle
-
-    node: ericsson-build
-
-    logrotate:
-        daysToKeep: 30
-        numToKeep: 40
-        artifactDaysToKeep: -1
-        artifactNumToKeep: 5
-
-    parameters:
-        - string:
-            name: BUILD_DIRECTORY
-            default: $WORKSPACE/build_output
-        - string:
-            name: GIT_BASE
-            default: https://gerrit.opnfv.org/gerrit/genesis
-        - project-parameter:
-            project: '{project}'
-        - gerrit-parameter:
-            branch: 'master'
-
-    scm:
-        - gerrit-trigger-scm:
-            credentials-id: '{ssh-credentials}'
-            refspec: ''
-            choosing-strategy: 'default'
-
-    wrappers:
-        - ssh-agent-credentials:
-            user: '{ssh-credentials}'
-
-    triggers:
-        - gerrit:
-            trigger-on:
-                - change-merged-event
-                - comment-added-contains-event:
-                    comment-contains-value: 'remerge'
-            projects:
-              - project-compare-type: 'ANT'
-                project-pattern: 'genesis'
-                branches:
-                    - branch-compare-type: 'ANT'
-                      branch-pattern: '**/master'
-                file-paths:
-                  - compare-type: ANT
-                    pattern: 'common/**'
-                  - compare-type: ANT
-                    pattern: 'opensteak/**'
-
-    builders:
-        - 'opensteak-merge'
-
-- job-template:
-    name: 'genesis-opensteak-daily-{stream}'
-
-    project-type: freestyle
-
-    node: ericsson-build
-
-    parameters:
-        - string:
-            name: BUILD_DIRECTORY
-            default: $WORKSPACE/build_output
-            description: "Directory where the build artifact will be located upon the completion of the build."
-        - string:
-            name: GS_URL
-            default: 'artifacts.opnfv.org/genesis/opensteak'
-            description: "URL to Google Storage."
-        - string:
-            name: INSTALLER
-            default: 'opensteak'
-            description: "Installer to use."
-        - string:
-            name: GIT_BASE
-            default: https://gerrit.opnfv.org/gerrit/genesis
-        - string:
-            name: GERRIT_BRANCH
-            default: origin/master
-            description: "Branch to build, deploy and test."
-        - string:
-            name: GERRIT_REFSPEC
-            default: refs/heads/master
-            description: "Refspec to retrieve."
-
-    scm:
-        - git:
-            skip-tag: true
-            url: $GIT_BASE
-            branches:
-                - $GERRIT_BRANCH
-            refspec: $GERRIT_REFSPEC
-
-    triggers:
-        - pollscm: '@midnight'
-
-    logrotate:
-        daysToKeep: 30
-        numToKeep: 10
-        artifactDaysToKeep: -1
-        artifactNumToKeep: -1
-
-    builders:
-        - 'opensteak-daily-master'
-
-- builder:
-    name: opensteak-verify
-    builders:
-        - shell: |
-            #!/bin/bash
-            echo "Hello World!"
-
-- builder:
-    name: opensteak-merge
-    builders:
-        - shell: |
-            #!/bin/bash
-            echo "Hello World!"
-
-- builder:
-    name: opensteak-daily-master
-    builders:
-        - shell: |
-            #!/bin/bash
-            echo "Hello World!"
diff --git a/jjb/kvmfornfv/kvmfornfv-build.sh b/jjb/kvmfornfv/kvmfornfv-build.sh
new file mode 100755 (executable)
index 0000000..4e00a9d
--- /dev/null
@@ -0,0 +1,9 @@
+#!/bin/bash
+
+# build output directory
+OUTPUT_DIR=$WORKSPACE/build_output
+mkdir -p $OUTPUT_DIR
+
+# start the build
+cd $WORKSPACE
+./ci/build.sh $OUTPUT_DIR
index 17f7cfe..345edca 100644 (file)
@@ -1,9 +1,8 @@
 - project:
     name: kvmfornfv
     jobs:
-        - 'kvmfornfv-verify'
-        - 'kvmfornfv-merge'
-        - 'kvmfornfv-daily-{stream}'
+        - 'kvmfornfv-verify-{stream}'
+        - 'kvmfornfv-merge-{stream}'
 
     # stream:    branch with - in place of / (eg. stable-arno)
     # branch:    branch (eg. stable/arno)
@@ -14,7 +13,7 @@
     project: 'kvmfornfv'
 
 - job-template:
-    name: 'kvmfornfv-verify'
+    name: 'kvmfornfv-verify-{stream}'
 
     node: ericsson-build
 
 
     builders:
         - shell:
-            echo "Hello World"
+            !include-raw ./kvmfornfv-build.sh
 
 - job-template:
-    name: 'kvmfornfv-merge'
+    name: 'kvmfornfv-merge-{stream}'
 
     # builder-merge job to run JJB update
     #
     # This job's purpose is to update all the JJB
 
-    node: master
+    node: ericsson-build
 
     project-type: freestyle
 
 
     builders:
         - shell:
-            echo "Hello World"
-
-
-- job-template:
-    name: 'kvmfornfv-daily-{stream}'
-
-    # Job template for daily builders
-    #
-    # Required Variables:
-    #     stream:    branch with - in place of / (eg. stable)
-    #     branch:    branch (eg. stable)
-    node: master
-
-    disabled: true
-
-    project-type: freestyle
-
-    logrotate:
-        daysToKeep: '{build-days-to-keep}'
-        numToKeep: '{build-num-to-keep}'
-        artifactDaysToKeep: '{build-artifact-days-to-keep}'
-        artifactNumToKeep: '{build-artifact-num-to-keep}'
-
-    parameters:
-        - project-parameter:
-            project: '{project}'
-
-    scm:
-        - git-scm:
-            credentials-id: '{ssh-credentials}'
-            refspec: ''
-            branch: '{branch}'
-
-    wrappers:
-        - ssh-agent-credentials:
-            user: '{ssh-credentials}'
-
-    triggers:
-        - timed: 'H H * * *'
-
-    builders:
-        - shell:
-             echo "Hello World"
+            !include-raw ./kvmfornfv-build.sh
index efb6b5b..b761740 100644 (file)
@@ -74,8 +74,8 @@
                     pattern: 'docs/**'
 
     builders:
-       - shell:
-           !include-raw ../../utils/docu-build-new.sh
+        - build-html-and-pdf-docs-output
+        - upload-under-review-docs-to-opnfv-artifacts
 
 - job-template:
     name: 'opnfv-docs-merge'
                     pattern: 'docs/**'
 
     builders:
-       - shell:
-           !include-raw ../../utils/docu-build-new.sh
+        - build-html-and-pdf-docs-output
+        - upload-merged-docs-to-opnfv-artifacts
+        - remove-old-docs-from-opnfv-artifacts
index a78a37e..0292f5f 100644 (file)
@@ -9,4 +9,3 @@
     build-artifact-num-to-keep: 5
 
     ssh-credentials: 'd42411ac011ad6f3dd2e1fa34eaa5d87f910eb2e'
-
index 2c694c8..86680f2 100644 (file)
     name: jacoco-nojava-workaround
     builders:
         - shell: 'mkdir -p $WORKSPACE/target/classes'
+
+
+# New Releng macros
+
+- builder:
+    name: build-html-and-pdf-docs-output
+    builders:
+        - shell: |
+            #!/bin/bash -e
+            set -o pipefail
+            export PATH=$PATH:/usr/local/bin/
+
+            [[ $GERRIT_CHANGE_NUMBER =~ .+ ]]
+
+            git_sha1="$(git rev-parse HEAD)"
+
+            find docs/ -type f -iname '*.rst' -print0 | while read file
+            do
+                sed -i "s/_sha1_/$git_sha1/g" "$file"
+            done
+
+            find docs/ -name 'index.rst' -printf '%h\n' | while read dir
+            do
+                _name="${dir##*/}"
+                _build="$dir/build"
+                _output="docs/output/$_name"
+
+                echo
+                echo "#################${dir//?/#}"
+                echo "Building DOCS in $dir"
+                echo "#################${dir//?/#}"
+                echo
+
+                mkdir -p "$_output"
+
+                sphinx-build -b html -E -c docs/etc "$dir" "$_output"
+
+                sphinx-build -b latex -E -c docs/etc -D project=$_name "$dir" "$_build"
+                make -C "$_build" LATEXOPTS='--interaction=nonstopmode' all-pdf
+                mv "$_build"/*.pdf "$_output"
+            done
+
+#TODO(r-mibu): change this to publisher
+- builder:
+    name: upload-under-review-docs-to-opnfv-artifacts
+    builders:
+        - shell: |
+            #!/bin/bash -e
+            set -o pipefail
+            export PATH=$PATH:/usr/local/bin/
+
+            [[ $GERRIT_CHANGE_NUMBER =~ .+ ]]
+            [[ -d docs/output ]]
+
+            echo
+            echo "###########################"
+            echo "UPLOADING DOCS UNDER REVIEW"
+            echo "###########################"
+            echo
+
+            gs_path="artifacts.opnfv.org/review/$GERRIT_CHANGE_NUMBER"
+
+            gsutil -m cp -r docs/output/* "gs://$gs_path"
+
+            if gsutil ls "gs://$gs_path" | grep -e 'html$' > /dev/null 2>&1 ; then
+                gsutil -m setmeta \
+                    -h "Content-Type:text/html" \
+                    -h "Cache-Control:private, max-age=0, no-transform" \
+                    "gs://$gs_path"/**.html
+            fi
+
+            files=$(find docs/output | grep -e 'index.html$' -e 'pdf$' | \
+                    sed -e "s|^docs/output|    http://$gs_path|")
+            gerrit_comment="Document link(s):
+            $files"
+            echo
+            echo "$gerrit_comment"
+            echo
+            ssh -p 29418 gerrit.opnfv.org "gerrit review -p $GERRIT_PROJECT -m '$gerrit_comment' $GERRIT_PATCHSET_REVISION"
+
+#TODO(r-mibu): change this to publisher
+- builder:
+    name: upload-merged-docs-to-opnfv-artifacts
+    builders:
+        - shell: |
+            #!/bin/bash -e
+            set -o pipefail
+            export PATH=$PATH:/usr/local/bin/
+
+            [[ -d docs/output ]]
+
+            echo
+            echo "#####################"
+            echo "UPLOADING MERGED DOCS"
+            echo "#####################"
+            echo
+
+            if [[ "$GERRIT_BRANCH" == "master" ]] ; then
+                gs_path="artifacts.opnfv.org/$GERRIT_PROJECT/docs"
+            else
+                gs_path="artifacts.opnfv.org/$GERRIT_PROJECT/$GERRIT_BRANCH/docs"
+            fi
+
+            gsutil -m cp -r docs/output/* "gs://$gs_path"
+
+            if gsutil ls "gs://$gs_path" | grep -e 'html$' > /dev/null 2>&1 ; then
+                gsutil -m setmeta \
+                    -h "Content-Type:text/html" \
+                    -h "Cache-Control:private, max-age=0, no-transform" \
+                    "gs://$gs_path"/**.html
+            fi
+
+            files=$(find docs/output | grep -e 'index.html$' -e 'pdf$' | \
+                    sed -e "s|^docs/output|    http://$gs_path|")
+            gerrit_comment="Document link(s):
+            $files"
+            echo
+            echo "$gerrit_comment"
+            echo
+            ssh -p 29418 gerrit.opnfv.org "gerrit review -p $GERRIT_PROJECT -m '$gerrit_comment' $GERRIT_PATCHSET_REVISION"
+
+#TODO(r-mibu): change this to publisher
+- builder:
+    name: remove-old-docs-from-opnfv-artifacts
+    builders:
+        - shell: |
+            #!/bin/bash -e
+            export PATH=$PATH:/usr/local/bin/
+
+            [[ $GERRIT_CHANGE_NUMBER =~ .+ ]]
+
+            gs_path="artifacts.opnfv.org/review/$GERRIT_CHANGE_NUMBER"
+
+            if gsutil ls "gs://$gs_path" > /dev/null 2>&1 ; then
+                echo
+                echo "Deleting Out-of-dated Documents..."
+                gsutil -m rm -r "gs://$gs_path_review"
+            fi
diff --git a/utils/docu-build-new.sh b/utils/docu-build-new.sh
deleted file mode 100755 (executable)
index 67a62e3..0000000
+++ /dev/null
@@ -1,114 +0,0 @@
-#!/bin/bash
-set -e
-set -o pipefail
-
-export PATH=$PATH:/usr/local/bin/
-git_sha1="$(git rev-parse HEAD)"
-
-clean() {{
-if [[ -d docs/output ]]; then
-rm -rf docs/output
-echo "cleaning up output directory"
-fi
-}}
-
-trap clean EXIT TERM INT SIGTERM SIGHUP
-
-#set git_sha1
-files=()
-while read -r -d ''; do
-  files+=("$REPLY")
-done < <(find docs/ -type f -iname '*.rst' -print0)
-for file in "${{files[@]}}"; do
-  sed -i "s/_sha1_/$git_sha1/g" $file
-done
-
-directories=()
-while read -d $'\n'; do
-  directories+=("$REPLY")
-done < <(find docs/ -name 'index.rst' -printf '%h\n' | sort -u )
-
-for dir in "${{directories[@]}}"; do
-  _name="${{dir##*/}}"
-  _build="${{dir}}/build"
-  _output="docs/output/${{_name}}"
-  echo
-  echo "#################${{_name//?/#}}"
-  echo "Building DOCS in ${{_name}}"
-  echo "#################${{_name//?/#}}"
-  echo
-
-  mkdir -p "${{_output}}"
-
-  sphinx-build -b html -E -c docs/etc "${{dir}}" "${{_output}}"
-
-  sphinx-build -b latex -E -c docs/etc "${{dir}}" "${{_build}}"
-  make -C "${{_build}}" LATEXOPTS='--interaction=nonstopmode' all-pdf
-  mv "${{_build}}"/*.pdf "${{_output}}"
-
-done
-
-# NOTE: make sure source parameters for GS paths are not empty.
-[[ $GERRIT_CHANGE_NUMBER =~ .+ ]]
-[[ $GERRIT_PROJECT =~ .+ ]]
-[[ $GERRIT_BRANCH =~ .+ ]]
-
-gs_path_review="artifacts.opnfv.org/review/$GERRIT_CHANGE_NUMBER"
-
-if [[ $GERRIT_BRANCH = "master" ]] ; then
-  gs_path_branch="artifacts.opnfv.org/$GERRIT_PROJECT"
-else
-  gs_path_branch="artifacts.opnfv.org/$GERRIT_PROJECT/${{GERRIT_BRANCH##*/}}"
-fi
-
-for dir in "${{directories[@]}}"; do
-  echo
-  echo "#############################"
-  echo "UPLOADING DOCS in ${{dir##*/}}"
-  echo "#############################"
-  echo
-
-
-  if [[ $JOB_NAME =~ "verify" ]] ; then
-
-    #upload artifacts for verify job
-    gsutil cp -r docs/output/"${{dir##*/}}/" "gs://$gs_path_review/"
-
-    # post link to gerrit as comment
-    gerrit_comment="$(echo '"Document is available at 'http://$gs_path_review/"${{dir##*/}}"/index.html' for review"')"
-    echo "$gerrit_comment"
-    ssh -p 29418 gerrit.opnfv.org gerrit review -p $GERRIT_PROJECT -m \
-    "$gerrit_comment" $GERRIT_PATCHSET_REVISION
-
-    #set cache to 0
-    for x in $(gsutil ls gs://$gs_path_review/"${{dir##*/}}" | grep html);
-    do
-      gsutil setmeta -h "Content-Type:text/html" \
-      -h "Cache-Control:private, max-age=0, no-transform" \
-      "$x"
-    done
-
-  else
-
-    #upload artifacts for merge job
-    gsutil cp -r docs/output/"${{dir##*/}}" "gs://$gs_path_branch/docs/"
-    echo "Latest document is available at http://$gs_path_branch/docs/"${{dir##*/}}"/index.html"
-
-    #set cache to 0
-    for x in $(gsutil ls gs://$gs_path_branch/"${{dir}}" | grep html);
-    do
-      gsutil setmeta -h "Content-Type:text/html" \
-      -h "Cache-Control:private, max-age=0, no-transform" \
-      "$x"
-    done
-
-    #Clean up review when merging
-    if gsutil ls "gs://$gs_path_review" > /dev/null 2>&1 ; then
-      echo
-      echo "Deleting Out-of-dated Documents..."
-      gsutil rm -r "gs://$gs_path_review"
-    fi
-
-  fi
-
-done
index cefc857..7a5f812 100755 (executable)
@@ -126,14 +126,12 @@ elif [ "$installer_type" == "foreman" ]; then
         | grep $admin_ip | sed 's/ /\n/g' | grep ^http | head -1) &> /dev/null
 
 elif [ "$installer_type" == "compass" ]; then
-    #ip_compass="10.1.0.12"
     verify_connectivity $installer_ip
-
-    # controller_ip='10.1.0.222'
-    controller_ip=$(sshpass -p'root' ssh 2>/dev/null -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no root@10.1.0.12 \
-        'mysql -ucompass -pcompass -Dcompass -e"select package_config  from cluster;"' \
-        | awk -F"," '{for(i=1;i<NF;i++)if($i~/\"ha_proxy\": {\"vip\":/)print $i}' \
+    controller_ip=$(sshpass -p'root' ssh 2>/dev/null $ssh_options root@${installer_ip} \
+        'mysql -ucompass -pcompass -Dcompass -e"select *  from cluster;"' \
+        | awk -F"," '{for(i=1;i<NF;i++)if($i~/\"host1\"/) {print $(i+1);break;}}'  \
         | grep -oP "\d+.\d+.\d+.\d+")
+
     if [ -z $controller_ip ]; then
         error "The controller $controller_ip is not up. Please check that the POD is correctly deployed."
     fi
index 03e47b8..d263b19 100755 (executable)
@@ -78,7 +78,7 @@ makemonit () {
 echo "Writing the following as monit config:"
 cat << EOF | tee $monitconfdir/jenkins
 check process jenkins with pidfile /var/run/$jenkinsuser/jenkins_jnlp_pid
-start program = "/bin/bash -c 'cd $dir; export started_monit=true; $0 $@'" as uid "$jenkinsuser" and gid "$jenkinsuser"
+start program = "/usr/bin/sudo -u $jenkinsuser /bin/bash -c 'cd $dir; export started_monit=true; $0 $@'"
 stop program = "/bin/bash -c '/bin/kill \$(/bin/cat /var/run/$jenkinsuser/jenkins_jnlp_pid)'"
 EOF
 }
@@ -87,7 +87,7 @@ if [[ -f $monitconfdir/jenkins ]]; then
   #test for diff
   if [[ "$(diff $monitconfdir/jenkins <(echo "\
 check process jenkins with pidfile /var/run/$jenkinsuser/jenkins_jnlp_pid
-start program = \"/bin/bash -c 'cd $dir; export started_monit=true; $0 $@'\" as uid \"$jenkinsuser\" and gid \"$jenkinsuser\"
+start program = \"usr/bin/sudo -u $jenkinsuser /bin/bash -c 'cd $dir; export started_monit=true; $0 $@'\"
 stop program = \" /bin/bash -c '/bin/kill \$(/bin/cat /var/run/$jenkinsuser/jenkins_jnlp_pid)'\"\
 ") )" ]]; then
     echo "Updating monit config..."
@@ -169,7 +169,7 @@ do
                 s ) slave_secret="$OPTARG";;
                 h ) usage; exit;;
                 t ) started_monit=true
-                    skip_monit=true 
+                    skip_monit=true
                     run_in_foreground=true ;;
                 f ) test_firewall ;;
                 \? ) echo "Unknown option: -$OPTARG" >&2; exit 1;;
index 688f0c2..bfb7c87 100644 (file)
@@ -21,7 +21,7 @@ def get_functest_cases():
     get the list of the supported test cases
     TODO: update the list when adding a new test case for the dashboard
     """
-    return ["vPing", "Tempest", "odl", "Rally"]
+    return ["status", "vPing", "vIMS", "Tempest", "odl", "Rally"]
 
 
 def format_functest_for_dashboard(case, results):
@@ -53,6 +53,113 @@ def check_functest_case_exist(case):
         return True
 
 
+def format_status_for_dashboard(results):
+    test_data = [{'description': 'Functest status'}]
+
+    # define magic equation for the status....
+    # 5 suites: vPing, odl, Tempest, vIMS, Rally
+    # Which overall KPI make sense...
+
+    # TODO to be done and discussed
+    testcases = get_functest_cases()
+    test_data.append({'nb test suite(s) run': len(testcases)-1})
+    # test_data.append({'nb test suite(s) failed':1})
+    # test_data.append({'test suite run': ['vPing', 'tempest', 'vIMS' ]})
+    # test_data.append({'average Openstack Tempest failure rate (%)': 10})
+    # test_data.append({'average odl failure rate (%)': 10})
+
+    return test_data
+
+
+def format_vIMS_for_dashboard(results):
+    """
+    Post processing for the vIMS test case
+    """
+    test_data = [{'description': 'vIMS results for Dashboard'}]
+
+    # Graph 1: (duration_deployment_orchestrator,
+    #            duration_deployment_vnf,
+    #             duration_test) = f(time)
+    # ********************************
+    new_element = []
+
+    for data in results:
+        new_element.append({'x': data['creation_date'],
+                            'y1': data['details']['orchestrator']['duration'],
+                            'y2': data['details']['vIMS']['duration'],
+                            'y3': data['details']['sig_test']['duration']})
+
+    test_data.append({'name': "Tempest nb tests/nb failures",
+                      'info': {'type': "graph",
+                               'xlabel': 'time',
+                               'y1label': 'orchestation deployment duration',
+                               'y2label': 'vIMS deployment duration',
+                               'y3label': 'vIMS test duration'},
+                      'data_set': new_element})
+
+    # Graph 2: (Nb test, nb failure, nb skipped)=f(time)
+    # **************************************************
+    new_element = []
+
+    for data in results:
+        # Retrieve all the tests
+        nbTests = 0
+        nbFailures = 0
+        nbSkipped = 0
+        vIMS_test = data['details']['sig_test']['result']
+
+        for data_test in vIMS_test:
+            # Calculate nb of tests run and nb of tests failed
+            # vIMS_results = get_vIMSresults(vIMS_test)
+            # print vIMS_results
+            if data_test['result'] == "Passed":
+                nbTests += 1
+            elif data_test['result'] == "Failed":
+                nbFailures += 1
+            elif data_test['result'] == "Skipped":
+                nbSkipped += 1
+
+        new_element.append({'x': data['creation_date'],
+                            'y1': nbTests,
+                            'y2': nbFailures,
+                            'y3': nbSkipped})
+
+    test_data.append({'name': "vIMS nb tests passed/failed/skipped",
+                      'info': {'type': "graph",
+                               'xlabel': 'time',
+                               'y1label': 'Number of tests passed',
+                               'y2label': 'Number of tests failed',
+                               'y3label': 'Number of tests skipped'},
+                      'data_set': new_element})
+
+    # Graph 3: bar graph Summ(nb tests run), Sum (nb tests failed)
+    # ********************************************************
+    nbTests = 0
+    nbFailures = 0
+
+    for data in results:
+        vIMS_test = data['details']['sig_test']['result']
+
+        for data_test in vIMS_test:
+            nbTestsOK = 0
+            nbTestsKO = 0
+
+            if data_test['result'] == "Passed":
+                nbTestsOK += 1
+            elif data_test['result'] == "Failed":
+                nbTestsKO += 1
+
+            nbTests += nbTestsOK + nbTestsKO
+            nbFailures += nbTestsKO
+
+    test_data.append({'name': "Total number of tests run/failure tests",
+                      'info': {"type": "bar"},
+                      'data_set': [{'Run': nbTests,
+                                    'Failed': nbFailures}]})
+
+    return test_data
+
+
 def format_Tempest_for_dashboard(results):
     """
     Post processing for the Tempest test case
diff --git a/utils/test/result_collection_api/dashboard/vsperf2Dashboard.py b/utils/test/result_collection_api/dashboard/vsperf2Dashboard.py
new file mode 100755 (executable)
index 0000000..323d391
--- /dev/null
@@ -0,0 +1,121 @@
+#!/usr/bin/python
+
+# Copyright 2015 Intel Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"),
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+def get_vsperf_cases():
+    """
+    get the list of the supported test cases
+    TODO: update the list when adding a new test case for the dashboard
+    """
+    return ["tput_ovsdpdk", "tput_ovs",
+            "b2b_ovsdpdk", "b2b_ovs",
+            "tput_mod_vlan_ovsdpdk", "tput_mod_vlan_ovs",
+            "cont_ovsdpdk", "cont_ovs",
+            "pvp_cont_ovsdpdkuser", "pvp_cont_ovsdpdkcuse", "pvp_cont_ovsvirtio",
+            "pvvp_cont_ovsdpdkuser", "pvvp_cont_ovsdpdkcuse", "pvvp_cont_ovsvirtio",
+            "scalability_ovsdpdk", "scalability_ovs",
+            "pvp_tput_ovsdpdkuser", "pvp_tput_ovsdpdkcuse", "pvp_tput_ovsvirtio",
+            "pvp_b2b_ovsdpdkuser", "pvp_b2b_ovsdpdkcuse", "pvp_b2b_ovsvirtio",
+            "pvvp_tput_ovsdpdkuser", "pvvp_tput_ovsdpdkcuse", "pvvp_tput_ovsvirtio",
+            "pvvp_b2b_ovsdpdkuser", "pvvp_b2b_ovsdpdkcuse", "pvvp_b2b_ovsvirtio",
+            "cpu_load_ovsdpdk", "cpu_load_ovs",
+            "mem_load_ovsdpdk", "mem_load_ovs"]
+
+
+def check_vsperf_case_exist(case):
+    """
+    check if the testcase exists
+    if the test case is not defined or not declared in the list
+    return False
+    """
+    vsperf_cases = get_vsperf_cases()
+    if (case is None or case not in vsperf_cases):
+        return False
+    else:
+        return True
+
+
+def format_vsperf_for_dashboard(case, results):
+    """
+    generic method calling the method corresponding to the test case
+    check that the testcase is properly declared first
+    then build the call to the specific method
+    """
+    if check_vsperf_case_exist(case):
+        res = format_common_for_dashboard(case, results)
+    else:
+        res = []
+        print "Test cases not declared"
+    return res
+
+
+def format_common_for_dashboard(case, results):
+    """
+    Common post processing
+    """
+    test_data_description = case + " results for Dashboard"
+    test_data = [{'description': test_data_description}]
+
+    graph_name = ''
+    if "b2b" in case:
+        graph_name = "B2B frames"
+    else:
+        graph_name = "Rx frames per second"
+
+    # Graph 1: Rx fps = f(time)
+    # ********************************
+    new_element = []
+    for data in results:
+        new_element.append({'x': data['creation_date'],
+                            'y1': data['details']['64'],
+                            'y2': data['details']['128'],
+                            'y3': data['details']['512'],
+                            'y4': data['details']['1024'],
+                            'y5': data['details']['1518']})
+
+    test_data.append({'name': graph_name,
+                      'info': {'type': "graph",
+                               'xlabel': 'time',
+                               'y1label': 'frame size 64B',
+                               'y2label': 'frame size 128B',
+                               'y3label': 'frame size 512B',
+                               'y4label': 'frame size 1024B',
+                               'y5label': 'frame size 1518B'},
+                      'data_set': new_element})
+
+    return test_data
+
+
+
+
+############################  For local test  ################################
+import os
+
+def _test():
+    ans = [{'creation_date': '2015-09-12', 'project_name': 'vsperf', 'version': 'ovs_master', 'pod_name': 'pod1-vsperf', 'case_name': 'tput_ovsdpdk', 'installer': 'build_sie', 'details': {'64': '26.804', '1024': '1097.284', '512': '178.137', '1518': '12635.860', '128': '100.564'}},
+           {'creation_date': '2015-09-33', 'project_name': 'vsperf', 'version': 'ovs_master', 'pod_name': 'pod1-vsperf', 'case_name': 'tput_ovsdpdk', 'installer': 'build_sie', 'details': {'64': '16.804', '1024': '1087.284', '512': '168.137', '1518': '12625.860', '128': '99.564'}}]
+
+    result = format_vsperf_for_dashboard("pvp_cont_ovsdpdkcuse", ans)
+    print result
+
+    result = format_vsperf_for_dashboard("b2b_ovsdpdk", ans)
+    print result
+
+    result = format_vsperf_for_dashboard("non_existing", ans)
+    print result
+
+if __name__ == '__main__':
+    _test()
index 85c6172..be08c97 100644 (file)
@@ -719,6 +719,21 @@ class DashboardHandler(GenericApiHandler):
                             "error:Project name missing")
         elif check_dashboard_ready_project(project_arg, "./dashboard"):
             res = []
+
+            if case_arg is None:
+                raise HTTPError(
+                    HTTP_NOT_FOUND,
+                    "error:Test case missing for project " + project_arg)
+
+            # special case of status for project
+            if case_arg == "status":
+                del get_request["case_name"]
+                # retention time to be agreed
+                # last five days by default?
+                # TODO move to DB
+                period = datetime.now() - timedelta(days=5)
+                get_request["creation_date"] = {"$gte": period}
+
             # fetching results
             cursor = self.db.test_results.find(get_request)
             while (yield cursor.fetch_next):
@@ -726,11 +741,7 @@ class DashboardHandler(GenericApiHandler):
                     cursor.next_object())
                 res.append(test_result.format_http())
 
-            if case_arg is None:
-                raise HTTPError(
-                    HTTP_NOT_FOUND,
-                    "error:Test case missing for project " + project_arg)
-            elif check_dashboard_ready_case(project_arg, case_arg):
+            if check_dashboard_ready_case(project_arg, case_arg):
                 dashboard = get_dashboard_result(project_arg, case_arg, res)
             else:
                 raise HTTPError(