Merge "improve: discard venv support for official product"
authorSerena Feng <feng.xiaowei@zte.com.cn>
Tue, 5 Sep 2017 02:00:33 +0000 (02:00 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Tue, 5 Sep 2017 02:00:33 +0000 (02:00 +0000)
31 files changed:
jjb/armband/armband-ci-jobs.yml
jjb/armband/armband-deploy.sh [deleted file]
jjb/armband/armband-download-artifact.sh [deleted file]
jjb/armband/armband-project-jobs.yml [deleted file]
jjb/armband/armband-verify-jobs.yml
jjb/armband/armband-workspace-cleanup.sh [deleted file]
jjb/armband/build.sh [deleted file]
jjb/armband/upload-artifacts.sh [deleted file]
jjb/fuel/fuel-build.sh [deleted file]
jjb/fuel/fuel-daily-jobs.yml
jjb/fuel/fuel-deploy.sh
jjb/fuel/fuel-download-artifact.sh
jjb/fuel/fuel-project-jobs.yml
jjb/fuel/fuel-upload-artifact.sh [deleted file]
jjb/fuel/fuel-verify-jobs.yml
jjb/fuel/fuel-weekly-jobs.yml
utils/test/reporting/docker/reporting.sh
utils/test/reporting/reporting/functest/reporting-status.py
utils/test/reporting/reporting/storperf/reporting-status.py
utils/test/reporting/reporting/utils/reporting_utils.py
utils/test/reporting/reporting/vsperf/__init__.py [new file with mode: 0644]
utils/test/reporting/reporting/vsperf/reporting-status.py [new file with mode: 0644]
utils/test/reporting/reporting/vsperf/template/index-status-tmpl.html [new file with mode: 0644]
utils/test/reporting/reporting/yardstick/reporting-status.py
utils/test/testapi/opnfv_testapi/tests/unit/common/noparam.ini [deleted file]
utils/test/testapi/opnfv_testapi/tests/unit/common/normal.ini [deleted file]
utils/test/testapi/opnfv_testapi/tests/unit/common/nosection.ini [deleted file]
utils/test/testapi/opnfv_testapi/tests/unit/common/notboolean.ini [deleted file]
utils/test/testapi/opnfv_testapi/tests/unit/common/notint.ini [deleted file]
utils/test/testapi/opnfv_testapi/tests/unit/conftest.py
utils/test/testapi/opnfv_testapi/tests/unit/resources/test_base.py

index a6b781e..b27b053 100644 (file)
 
     builders:
         - shell:
-            !include-raw-escape: ./armband-download-artifact.sh
-        - shell:
-            !include-raw-escape: ./armband-deploy.sh
+            !include-raw-escape: ../fuel/fuel-deploy.sh
 
     publishers:
         - email:
 - parameter:
     name: armband-ci-parameter
     parameters:
-        - string:
-            name: BUILD_DIRECTORY
-            default: $WORKSPACE/build_output
-            description: "Directory where the build artifact will be located upon the completion of the build."
-        - string:
-            name: CACHE_DIRECTORY
-            default: $HOME/opnfv/cache/$INSTALLER_TYPE
-            description: "Directory where the cache to be used during the build is located."
         - string:
             name: GS_URL
             default: artifacts.opnfv.org/$PROJECT{gs-pathname}
diff --git a/jjb/armband/armband-deploy.sh b/jjb/armband/armband-deploy.sh
deleted file mode 100755 (executable)
index 05679aa..0000000
+++ /dev/null
@@ -1,127 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-#           (c) 2017 Enea Software AB
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o nounset
-set -o pipefail
-
-export TERM="vt220"
-
-if [[ "$BRANCH" != 'master' ]]; then
-    # source the file so we get OPNFV vars
-    source latest.properties
-
-    # echo the info about artifact that is used during the deployment
-    echo "Using ${OPNFV_ARTIFACT_URL/*\/} for deployment"
-fi
-
-if [[ "$JOB_NAME" =~ "merge" ]]; then
-    # set simplest scenario for virtual deploys to run for merges
-    DEPLOY_SCENARIO="os-nosdn-nofeature-ha"
-elif [[ "$BRANCH" != 'master' ]]; then
-    # for none-merge deployments
-    # checkout the commit that was used for building the downloaded artifact
-    # to make sure the ISO and deployment mechanism uses same versions
-    echo "Checking out $OPNFV_GIT_SHA1"
-    git checkout $OPNFV_GIT_SHA1 --quiet
-fi
-
-# set deployment parameters
-export TMPDIR=${WORKSPACE}/tmpdir
-
-LAB_NAME=${NODE_NAME/-*}
-POD_NAME=${NODE_NAME/*-}
-
-# we currently support enea
-if [[ ! $LAB_NAME =~ (arm|enea) ]]; then
-    echo "Unsupported/unidentified lab $LAB_NAME. Cannot continue!"
-    exit 1
-fi
-
-echo "Using configuration for $LAB_NAME"
-
-# create TMPDIR if it doesn't exist
-mkdir -p $TMPDIR
-
-cd $WORKSPACE
-if [[ $LAB_CONFIG_URL =~ ^(git|ssh):// ]]; then
-    echo "Cloning securedlab repo $BRANCH"
-    git clone --quiet --branch $BRANCH $LAB_CONFIG_URL lab-config
-    LAB_CONFIG_URL=file://${WORKSPACE}/lab-config
-
-    # Source local_env if present, which contains POD-specific config
-    local_env="${WORKSPACE}/lab-config/labs/$LAB_NAME/$POD_NAME/fuel/config/local_env"
-    if [ -e $local_env ]; then
-        echo "-- Sourcing local environment file"
-        source $local_env
-    fi
-fi
-
-# releng wants us to use nothing else but opnfv.iso for now. We comply.
-ISO_FILE=$WORKSPACE/opnfv.iso
-
-# log file name
-FUEL_LOG_FILENAME="${JOB_NAME}_${BUILD_NUMBER}.log.tar.gz"
-
-# Deploy Cache (to enable just create the deploy-cache subdir)
-# NOTE: Only available when ISO files are cached using ISOSTORE mechanism
-DEPLOY_CACHE=${ISOSTORE:-/iso_mount/opnfv_ci}/${BRANCH##*/}/deploy-cache
-if [[ -d "${DEPLOY_CACHE}" ]]; then
-    echo "Deploy cache dir present."
-    echo "--------------------------------------------------------"
-    echo "Fuel@OPNFV deploy cache: ${DEPLOY_CACHE}"
-    DEPLOY_CACHE="-C ${DEPLOY_CACHE}"
-else
-    DEPLOY_CACHE=""
-fi
-
-# construct the command
-DEPLOY_COMMAND="$WORKSPACE/ci/deploy.sh -b ${LAB_CONFIG_URL} \
-    -l $LAB_NAME -p $POD_NAME -s $DEPLOY_SCENARIO -i file://${ISO_FILE} \
-    -B ${DEFAULT_BRIDGE:-pxebr} -S $TMPDIR -L $WORKSPACE/$FUEL_LOG_FILENAME \
-    ${DEPLOY_CACHE}"
-
-# log info to console
-echo "Deployment parameters"
-echo "--------------------------------------------------------"
-echo "Scenario: $DEPLOY_SCENARIO"
-echo "Lab: $LAB_NAME"
-echo "POD: $POD_NAME"
-[[ "$BRANCH" != 'master' ]] && echo "ISO: ${OPNFV_ARTIFACT_URL/*\/}"
-echo
-echo "Starting the deployment using $INSTALLER_TYPE. This could take some time..."
-echo "--------------------------------------------------------"
-echo
-
-# start the deployment
-echo "Issuing command"
-echo "$DEPLOY_COMMAND"
-echo
-
-$DEPLOY_COMMAND
-exit_code=$?
-
-echo
-echo "--------------------------------------------------------"
-echo "Deployment is done!"
-
-# upload logs for baremetal deployments
-# work with virtual deployments is still going on so we skip that for the timebeing
-if [[ "$JOB_NAME" =~ "baremetal-daily" ]]; then
-    echo "Uploading deployment logs"
-    gsutil cp $WORKSPACE/$FUEL_LOG_FILENAME gs://$GS_URL/logs/$FUEL_LOG_FILENAME > /dev/null 2>&1
-    echo "Logs are available as http://$GS_URL/logs/$FUEL_LOG_FILENAME"
-fi
-
-if [[ $exit_code -ne 0 ]]; then
-    echo "Deployment failed!"
-    exit $exit_code
-else
-    echo "Deployment is successful!"
-fi
diff --git a/jjb/armband/armband-download-artifact.sh b/jjb/armband/armband-download-artifact.sh
deleted file mode 100755 (executable)
index 4f83305..0000000
+++ /dev/null
@@ -1,77 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-#           (c) 2017 Enea AB
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o pipefail
-
-# disable Fuel ISO download for master branch
-[[ "$BRANCH" == 'master' ]] && exit 0
-
-echo "Host info: $(hostname) $(hostname -I)"
-
-# Configurable environment variables:
-# ISOSTORE (/iso_mount/opnfv_ci)
-
-if [[ "$JOB_NAME" =~ "merge" ]]; then
-    echo "Downloading http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties"
-    # get the properties file for the Armband Fuel ISO built for a merged change
-    curl -f -s -o $WORKSPACE/latest.properties http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties
-else
-    # get the latest.properties file in order to get info regarding latest artifact
-    echo "Downloading http://$GS_URL/latest.properties"
-    curl -f -s -o $WORKSPACE/latest.properties http://$GS_URL/latest.properties
-fi
-
-# source the file so we get artifact metadata, it will exit if it doesn't exist
-source latest.properties
-
-# echo the info about artifact that is used during the deployment
-OPNFV_ARTIFACT=${OPNFV_ARTIFACT_URL/*\/}
-echo "Using $OPNFV_ARTIFACT for deployment"
-
-# Releng doesn't want us to use anything but opnfv.iso for now. We comply.
-ISO_FILE=${WORKSPACE}/opnfv.iso
-
-# using ISOs for verify & merge jobs from local storage will be enabled later
-if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
-    # check if we already have the ISO to avoid redownload
-    ISOSTORE=${ISOSTORE:-/iso_mount/opnfv_ci}/${BRANCH##*/}
-    if [[ -f "$ISOSTORE/$OPNFV_ARTIFACT" ]]; then
-        echo "ISO exists locally. Skipping the download and using the file from ISO store"
-        ln -s $ISOSTORE/$OPNFV_ARTIFACT ${ISO_FILE}
-        echo "--------------------------------------------------------"
-        echo
-        ls -al ${ISO_FILE}
-        echo
-        echo "--------------------------------------------------------"
-        echo "Done!"
-        exit 0
-    fi
-fi
-
-# Use gsutils if available
-if $(which gsutil &>/dev/null); then
-    DOWNLOAD_URL="gs://$OPNFV_ARTIFACT_URL"
-    CMD="gsutil cp ${DOWNLOAD_URL} ${ISO_FILE}"
-else
-    # download image
-    # -f returns error if the file was not found or on server error
-    DOWNLOAD_URL="http://$OPNFV_ARTIFACT_URL"
-    CMD="curl -f -s -o ${ISO_FILE} ${DOWNLOAD_URL}"
-fi
-
-# log info to console
-echo "Downloading the $INSTALLER_TYPE artifact using URL $DOWNLOAD_URL"
-echo "This could take some time..."
-echo "--------------------------------------------------------"
-echo "$CMD"
-$CMD
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/armband/armband-project-jobs.yml b/jjb/armband/armband-project-jobs.yml
deleted file mode 100644 (file)
index 0623b55..0000000
+++ /dev/null
@@ -1,95 +0,0 @@
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
-- project:
-    name: armband
-
-    project: '{name}'
-
-    installer: 'fuel'
-
-    jobs:
-        - 'armband-{installer}-build-daily-{stream}'
-
-    stream:
-        - master:
-            branch: '{stream}'
-            gs-pathname: ''
-            disabled: false
-        - euphrates:
-            branch: 'stable/{stream}'
-            gs-pathname: '/{stream}'
-            disabled: true
-
-- job-template:
-    name: 'armband-{installer}-build-daily-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    concurrent: false
-
-    properties:
-        - logrotate-default
-        - throttle:
-            enabled: true
-            max-total: 1
-            max-per-node: 1
-            option: 'project'
-
-    parameters:
-        - project-parameter:
-            project: '{project}'
-            branch: '{branch}'
-        - 'opnfv-build-enea-defaults'
-        - '{installer}-defaults'
-        - armband-project-parameter:
-            gs-pathname: '{gs-pathname}'
-
-    scm:
-        - git-scm
-
-    triggers:
-        - pollscm:
-            cron: '0 H/4 * * *'
-
-    wrappers:
-        - timeout:
-            timeout: 360
-            fail: true
-
-    builders:
-        - shell:
-            !include-raw-escape: ./build.sh
-        - shell:
-            !include-raw-escape: ./upload-artifacts.sh
-
-    publishers:
-        - email:
-            recipients: armband@enea.com
-        - email-jenkins-admins-on-failure
-
-########################
-# parameter macros
-########################
-- parameter:
-    name: armband-project-parameter
-    parameters:
-        - string:
-            name: BUILD_DIRECTORY
-            default: $WORKSPACE/build_output
-            description: "Directory where the build artifact will be located upon the completion of the build."
-        - string:
-            name: CACHE_DIRECTORY
-            default: $HOME/opnfv/cache/$INSTALLER_TYPE
-            description: "Directory where the cache to be used during the build is located."
-        - string:
-            name: GS_URL
-            default: artifacts.opnfv.org/$PROJECT{gs-pathname}
-            description: "URL to Google Storage."
-        - choice:
-            name: FORCE_BUILD
-            choices:
-                - 'false'
-                - 'true'
-            description: 'Force build even if there are no changes in the armband repo. Default false'
index c9476b1..56f70d6 100644 (file)
@@ -22,8 +22,6 @@
     phase:
         - 'basic':
             slave-label: 'opnfv-build-enea'
-        - 'build':
-            slave-label: 'opnfv-build-enea'
         - 'deploy-virtual':
             slave-label: 'opnfv-build-enea'
         - 'smoke-test':
                   node-parameters: false
                   kill-phase-on: FAILURE
                   abort-all-job: true
-        - multijob:
-            name: build
-            condition: SUCCESSFUL
-            projects:
-                - name: 'armband-verify-build-{stream}'
-                  current-parameters: false
-                  predefined-parameters: |
-                    BRANCH=$BRANCH
-                    GERRIT_REFSPEC=$GERRIT_REFSPEC
-                    GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                    GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-                  node-parameters: false
-                  kill-phase-on: FAILURE
-                  abort-all-job: true
         - multijob:
             name: deploy-virtual
             condition: SUCCESSFUL
 
             echo "Not activated!"
 
-- builder:
-    name: 'armband-verify-build-macro'
-    builders:
-        - shell:
-            !include-raw: ./build.sh
-        - shell:
-            !include-raw: ./armband-workspace-cleanup.sh
-
 - builder:
     name: 'armband-verify-deploy-virtual-macro'
     builders:
 - parameter:
     name: 'armband-verify-defaults'
     parameters:
-        - string:
-            name: BUILD_DIRECTORY
-            default: $WORKSPACE/build_output
-            description: "Directory where the build artifact will be located upon the completion of the build."
-        - string:
-            name: CACHE_DIRECTORY
-            default: $HOME/opnfv/cache/$INSTALLER_TYPE
-            description: "Directory where the cache to be used during the build is located."
         - string:
             name: GS_URL
             default: artifacts.opnfv.org/$PROJECT{gs-pathname}
diff --git a/jjb/armband/armband-workspace-cleanup.sh b/jjb/armband/armband-workspace-cleanup.sh
deleted file mode 100755 (executable)
index d8948c7..0000000
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# delete the $WORKSPACE to open some space
-/bin/rm -rf $WORKSPACE
diff --git a/jjb/armband/build.sh b/jjb/armband/build.sh
deleted file mode 100755 (executable)
index 29c01bb..0000000
+++ /dev/null
@@ -1,114 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# Copyright (c) 2017 Enea AB.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-# disable Armband iso build for master branch
-if [[ "$BRANCH" == 'master' ]]; then
-    touch $WORKSPACE/.noupload
-    echo "--------------------------------------------------------"
-    echo "Done!"
-    exit 0
-fi
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-export TERM="vt220"
-
-echo "Host info: $(hostname) $(hostname -I)"
-
-cd $WORKSPACE
-
-# Armband requires initializing git submodules (e.g. for Fuel's clean_cache.sh)
-make submodules-init
-
-# remove the expired items from cache
-test -f $WORKSPACE/ci/clean_cache.sh && $WORKSPACE/ci/clean_cache.sh $CACHE_DIRECTORY
-
-LATEST_ISO_PROPERTIES=$WORKSPACE/latest.iso.properties
-if [[ "$JOB_NAME" =~ "daily" ]]; then
-    # check to see if we already have an artifact on artifacts.opnfv.org
-    # for this commit during daily builds
-    echo "Checking to see if we already built and stored Armband Fuel ISO for this commit"
-
-    curl -s -o $LATEST_ISO_PROPERTIES http://$GS_URL/latest.properties 2>/dev/null
-fi
-
-# get metadata of latest ISO
-if grep -q OPNFV_GIT_SHA1 $LATEST_ISO_PROPERTIES 2>/dev/null; then
-    LATEST_ISO_SHA1=$(grep OPNFV_GIT_SHA1 $LATEST_ISO_PROPERTIES | cut -d'=' -f2)
-    LATEST_ISO_URL=$(grep OPNFV_ARTIFACT_URL $LATEST_ISO_PROPERTIES | cut -d'=' -f2)
-else
-    LATEST_ISO_SHA1=none
-fi
-
-# get current SHA1
-CURRENT_SHA1=$(git rev-parse HEAD)
-
-# set FORCE_BUILD to false for non-daily builds
-FORCE_BUILD=${FORCE_BUILD:-false}
-
-if [[ "$CURRENT_SHA1" == "$LATEST_ISO_SHA1" && "$FORCE_BUILD" == "false" ]]; then
-    echo "***************************************************"
-    echo "   An ISO has already been built for this commit"
-    echo "   $LATEST_ISO_URL"
-    echo "***************************************************"
-else
-    echo "This commit has not been built yet or forced build! Proceeding with the build."
-    /bin/rm -f $LATEST_ISO_PROPERTIES
-    echo
-fi
-
-# log info to console
-echo "Starting the build of Armband $INSTALLER_TYPE. This could take some time..."
-echo "-----------------------------------------------------------"
-echo
-
-# create the cache directory if it doesn't exist
-mkdir -p $CACHE_DIRECTORY
-
-# set OPNFV_ARTIFACT_VERSION
-if [[ "$JOB_NAME" =~ "merge" ]]; then
-    echo "Building Fuel ISO for a merged change"
-    export OPNFV_ARTIFACT_VERSION="gerrit-$GERRIT_CHANGE_NUMBER"
-    echo "Not supported"
-    exit 1
-else
-    export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
-fi
-
-NOCACHE_PATTERN="verify: no-cache"
-if [[ "$JOB_NAME" =~ "verify" && "$GERRIT_CHANGE_COMMIT_MESSAGE" =~ "$NOCACHE_PATTERN" ]]; then
-    echo "The cache will not be used for this build!"
-    NOCACHE_ARG="-f P"
-fi
-NOCACHE_ARG=${NOCACHE_ARG:-}
-
-# start the build
-cd $WORKSPACE/ci
-./build.sh -v $OPNFV_ARTIFACT_VERSION $NOCACHE_ARG -c file://$CACHE_DIRECTORY $BUILD_DIRECTORY
-
-# list the build artifacts
-ls -al $BUILD_DIRECTORY
-
-# save information regarding artifact into file
-(
-    echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
-    echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
-    echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
-    echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
-    echo "OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso | cut -d' ' -f1)"
-    echo "OPNFV_BUILD_URL=$BUILD_URL"
-) > $WORKSPACE/opnfv.properties
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/armband/upload-artifacts.sh b/jjb/armband/upload-artifacts.sh
deleted file mode 100755 (executable)
index 97987e2..0000000
+++ /dev/null
@@ -1,93 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o pipefail
-
-# configurable environment variables:
-# ISOSTORE (/iso_mount/opnfv_ci)
-
-# check if we built something
-if [ -f $WORKSPACE/.noupload ]; then
-    echo "Nothing new to upload. Exiting."
-    /bin/rm -f $WORKSPACE/.noupload
-    exit 0
-fi
-
-# source the opnfv.properties to get ARTIFACT_VERSION
-source $WORKSPACE/opnfv.properties
-
-
-# storing ISOs for verify & merge jobs will be done once we get the disk array
-if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
-    # store ISO locally on NFS first
-    ISOSTORE=${ISOSTORE:-/iso_mount/opnfv_ci}
-    if [[ -d "$ISOSTORE" ]]; then
-        ISOSTORE=${ISOSTORE}/${BRANCH##*/}
-        mkdir -p $ISOSTORE
-
-        # remove all but most recent 3 ISOs first to keep iso_mount clean & tidy
-        cd $ISOSTORE
-        ls -tp | grep -v '/' | tail -n +4 | xargs -d '\n' /bin/rm -f --
-
-        # store ISO
-        echo "Storing latest ISO in local storage"
-        touch .storing
-        /bin/cp -f $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso \
-            $ISOSTORE/opnfv-$OPNFV_ARTIFACT_VERSION.iso
-        rm .storing
-    fi
-fi
-
-# log info to console
-echo "Uploading armband artifacts. This could take some time..."
-echo
-
-echo "Started at $(date)"
-cd $WORKSPACE
-# upload artifact and additional files to google storage
-gsutil cp $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso \
-    gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > gsutil.iso.log 2>&1
-gsutil cp $WORKSPACE/opnfv.properties \
-    gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log 2>&1
-if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
-    gsutil cp $WORKSPACE/opnfv.properties \
-    gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1
-elif [[ "$JOB_NAME" =~ "merge" ]]; then
-    echo "Uploaded Armband Fuel ISO for a merged change"
-fi
-echo "Ended at $(date)"
-
-gsutil -m setmeta \
-    -h "Content-Type:text/html" \
-    -h "Cache-Control:private, max-age=0, no-transform" \
-    gs://$GS_URL/latest.properties \
-    gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > /dev/null 2>&1
-
-gsutil -m setmeta \
-    -h "Cache-Control:private, max-age=0, no-transform" \
-    gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > /dev/null 2>&1
-
-# disabled errexit due to gsutil setmeta complaints
-#   BadRequestException: 400 Invalid argument
-# check if we uploaded the file successfully to see if things are fine
-gsutil ls gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > /dev/null 2>&1
-if [[ $? -ne 0 ]]; then
-    echo "Problem while uploading artifact!"
-    echo "Check log $WORKSPACE/gsutil.iso.log on the machine where this build is done."
-    exit 1
-fi
-
-echo "Done!"
-echo
-echo "--------------------------------------------------------"
-echo
-echo "Artifact is available as http://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
-echo
-echo "--------------------------------------------------------"
-echo
diff --git a/jjb/fuel/fuel-build.sh b/jjb/fuel/fuel-build.sh
deleted file mode 100755 (executable)
index 2c0d12a..0000000
+++ /dev/null
@@ -1,109 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-# disable Fuel iso build for master branch
-if [[ "$BRANCH" == 'master' ]]; then
-    touch $WORKSPACE/.noupload
-    echo "--------------------------------------------------------"
-    echo "Done!"
-    exit 0
-fi
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-export TERM="vt220"
-
-cd $WORKSPACE
-
-# remove the expired items from cache
-test -f $WORKSPACE/ci/clean_cache.sh && $WORKSPACE/ci/clean_cache.sh $CACHE_DIRECTORY
-
-LATEST_ISO_PROPERTIES=$WORKSPACE/latest.iso.properties
-if [[ "$JOB_NAME" =~ "daily" ]]; then
-    # check to see if we already have an artifact on artifacts.opnfv.org
-    # for this commit during daily builds
-    echo "Checking to see if we already built and stored Fuel ISO for this commit"
-
-    curl -s -o $LATEST_ISO_PROPERTIES http://$GS_URL/latest.properties 2>/dev/null
-fi
-
-# get metadata of latest ISO
-if grep -q OPNFV_GIT_SHA1 $LATEST_ISO_PROPERTIES 2>/dev/null; then
-    LATEST_ISO_SHA1=$(grep OPNFV_GIT_SHA1 $LATEST_ISO_PROPERTIES | cut -d'=' -f2)
-    LATEST_ISO_URL=$(grep OPNFV_ARTIFACT_URL $LATEST_ISO_PROPERTIES | cut -d'=' -f2)
-else
-    LATEST_ISO_SHA1=none
-fi
-
-# get current SHA1
-CURRENT_SHA1=$(git rev-parse HEAD)
-
-# set FORCE_BUILD to false for non-daily builds
-FORCE_BUILD=${FORCE_BUILD:-false}
-
-if [[ "$CURRENT_SHA1" == "$LATEST_ISO_SHA1" && "$FORCE_BUILD" == "false" ]]; then
-    echo "***************************************************"
-    echo "   An ISO has already been built for this commit"
-    echo "   $LATEST_ISO_URL"
-    echo "***************************************************"
-#    echo "Nothing new to build. Exiting."
-#    touch $WORKSPACE/.noupload
-#    exit 0
-else
-    echo "This commit has not been built yet or forced build! Proceeding with the build."
-    /bin/rm -f $LATEST_ISO_PROPERTIES
-    echo
-fi
-
-# log info to console
-echo "Starting the build of $INSTALLER_TYPE. This could take some time..."
-echo "--------------------------------------------------------"
-echo
-
-# create the cache directory if it doesn't exist
-mkdir -p $CACHE_DIRECTORY
-
-# set OPNFV_ARTIFACT_VERSION
-if [[ "$JOB_NAME" =~ "merge" ]]; then
-    echo "Building Fuel ISO for a merged change"
-    export OPNFV_ARTIFACT_VERSION="gerrit-$GERRIT_CHANGE_NUMBER"
-else
-    export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
-fi
-
-NOCACHE_PATTERN="verify: no-cache"
-if [[ "$JOB_NAME" =~ "verify" && "$GERRIT_CHANGE_COMMIT_MESSAGE" =~ "$NOCACHE_PATTERN" ]]; then
-    echo "The cache will not be used for this build!"
-    NOCACHE_ARG="-f P"
-fi
-NOCACHE_ARG=${NOCACHE_ARG:-}
-
-# start the build
-cd $WORKSPACE/ci
-./build.sh -v $OPNFV_ARTIFACT_VERSION $NOCACHE_ARG -c file://$CACHE_DIRECTORY $BUILD_DIRECTORY
-
-# list the build artifacts
-ls -al $BUILD_DIRECTORY
-
-# save information regarding artifact into file
-(
-    echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
-    echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
-    echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
-    echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
-    echo "OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso | cut -d' ' -f1)"
-    echo "OPNFV_BUILD_URL=$BUILD_URL"
-) > $WORKSPACE/opnfv.properties
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
index e5bb409..488505e 100644 (file)
 - parameter:
     name: fuel-ci-parameter
     parameters:
-        - string:
-            name: BUILD_DIRECTORY
-            default: $WORKSPACE/build_output
-            description: "Directory where the build artifact will be located upon the completion of the build."
-        - string:
-            name: CACHE_DIRECTORY
-            default: $HOME/opnfv/cache/$INSTALLER_TYPE
-            description: "Directory where the cache to be used during the build is located."
         - string:
             name: GS_URL
             default: artifacts.opnfv.org/$PROJECT{gs-pathname}
index eebd8bc..ddaebc9 100755 (executable)
@@ -1,7 +1,7 @@
 #!/bin/bash
 # SPDX-license-identifier: Apache-2.0
 ##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
+# Copyright (c) 2017 Ericsson AB, Mirantis Inc., Enea Software AB and others.
 # All rights reserved. This program and the accompanying materials
 # are made available under the terms of the Apache License, Version 2.0
 # which accompanies this distribution, and is available at
@@ -20,82 +20,99 @@ if [[ "$BRANCH" != 'master' ]]; then
     echo "Using ${OPNFV_ARTIFACT_URL/*\/} for deployment"
 fi
 
-if [[ "$JOB_NAME" =~ "merge" ]]; then
+# shellcheck disable=SC2153
+if [[ "${JOB_NAME}" =~ "merge" ]]; then
     # set simplest scenario for virtual deploys to run for merges
     DEPLOY_SCENARIO="os-nosdn-nofeature-ha"
-elif [[ "$BRANCH" != 'master' ]]; then
+elif [[ "${BRANCH}" != 'master' ]]; then
     # for none-merge deployments
     # checkout the commit that was used for building the downloaded artifact
     # to make sure the ISO and deployment mechanism uses same versions
-    echo "Checking out $OPNFV_GIT_SHA1"
-    git checkout $OPNFV_GIT_SHA1 --quiet
+    echo "Checking out ${OPNFV_GIT_SHA1}"
+    git checkout "${OPNFV_GIT_SHA1}" --quiet
 fi
 
 # set deployment parameters
-export TMPDIR=$HOME/tmpdir
+export TMPDIR=${HOME}/tmpdir
 BRIDGE=${BRIDGE:-pxebr}
+# shellcheck disable=SC2153
 LAB_NAME=${NODE_NAME/-*}
+# shellcheck disable=SC2153
 POD_NAME=${NODE_NAME/*-}
-
-if [[ "$NODE_NAME" =~ "virtual" ]]; then
-    POD_NAME="virtual_kvm"
-fi
-
-# we currently support ericsson, intel, lf and zte labs
-if [[ ! "$LAB_NAME" =~ (ericsson|intel|lf|zte) ]]; then
-    echo "Unsupported/unidentified lab $LAB_NAME. Cannot continue!"
-    exit 1
+# Armband might override LAB_CONFIG_URL, all others use the default
+LAB_CONFIG_URL=${LAB_CONFIG_URL:-'ssh://jenkins-ericsson@gerrit.opnfv.org:29418/securedlab'}
+
+# Fuel requires deploy script to be ran with sudo, Armband does not
+SUDO=sudo
+if [ "${PROJECT}" = 'fuel' ]; then
+    # Fuel does not use any POD-specific configuration for virtual deploys
+    if [[ "${NODE_NAME}" =~ "virtual" ]]; then
+        POD_NAME="virtual_kvm"
+    fi
+    # Fuel currently supports ericsson, intel, lf and zte labs
+    if [[ ! "${LAB_NAME}" =~ (ericsson|intel|lf|zte) ]]; then
+        echo "Unsupported/unidentified lab ${LAB_NAME}. Cannot continue!"
+        exit 1
+    fi
 else
-    echo "Using configuration for $LAB_NAME"
+    SUDO=
+    # Armband currently supports arm, enea labs
+    if [[ ! "${LAB_NAME}" =~ (arm|enea) ]]; then
+        echo "Unsupported/unidentified lab ${LAB_NAME}. Cannot continue!"
+        exit 1
+    fi
 fi
 
-# create TMPDIR if it doesn't exist
-export TMPDIR=$HOME/tmpdir
-mkdir -p $TMPDIR
-
-# change permissions down to TMPDIR
-chmod a+x $HOME
-chmod a+x $TMPDIR
-
-# clone the securedlab repo
-cd $WORKSPACE
-echo "Cloning securedlab repo $BRANCH"
-git clone ssh://jenkins-ericsson@gerrit.opnfv.org:29418/securedlab --quiet \
-    --branch $BRANCH
-
-# Source local_env if present, which contains POD-specific config
-local_env="${WORKSPACE}/securedlab/labs/$LAB_NAME/$POD_NAME/fuel/config/local_env"
-if [ -e "${local_env}" ]; then
-    echo "-- Sourcing local environment file"
-    source "${local_env}"
+echo "Using configuration for ${LAB_NAME}"
+
+# create TMPDIR if it doesn't exist, change permissions
+mkdir -p "${TMPDIR}"
+chmod a+x "${HOME}" "${TMPDIR}"
+
+cd "${WORKSPACE}" || exit 1
+if [[ "${LAB_CONFIG_URL}" =~ ^(git|ssh):// ]]; then
+    echo "Cloning securedlab repo ${BRANCH}"
+    git clone --quiet --branch "${BRANCH}" "${LAB_CONFIG_URL}" lab-config
+    LAB_CONFIG_URL=file://${WORKSPACE}/lab-config
+
+    # Source local_env if present, which contains POD-specific config
+    local_env="${WORKSPACE}/lab-config/labs/${LAB_NAME}/${POD_NAME}/fuel/config/local_env"
+    if [ -e "${local_env}" ]; then
+        echo "-- Sourcing local environment file"
+        source "${local_env}"
+    fi
 fi
 
+# releng wants us to use nothing else but opnfv.iso for now. We comply.
+ISO_FILE=file://${WORKSPACE}/opnfv.iso
+
 # log file name
 FUEL_LOG_FILENAME="${JOB_NAME}_${BUILD_NUMBER}.log.tar.gz"
 
 # construct the command
-DEPLOY_COMMAND="sudo $WORKSPACE/ci/deploy.sh -b file://$WORKSPACE/securedlab \
-    -l $LAB_NAME -p $POD_NAME -s $DEPLOY_SCENARIO -i file://$WORKSPACE/opnfv.iso \
-    -B ${DEFAULT_BRIDGE:-${BRIDGE}} -S $TMPDIR -L $WORKSPACE/$FUEL_LOG_FILENAME"
+DEPLOY_COMMAND="${SUDO} ${WORKSPACE}/ci/deploy.sh -b ${LAB_CONFIG_URL} \
+    -l ${LAB_NAME} -p ${POD_NAME} -s ${DEPLOY_SCENARIO} -i ${ISO_FILE} \
+    -B ${DEFAULT_BRIDGE:-${BRIDGE}} -S ${TMPDIR} \
+    -L ${WORKSPACE}/${FUEL_LOG_FILENAME}"
 
 # log info to console
 echo "Deployment parameters"
 echo "--------------------------------------------------------"
-echo "Scenario: $DEPLOY_SCENARIO"
-echo "Lab: $LAB_NAME"
-echo "POD: $POD_NAME"
-[[ "$BRANCH" != 'master' ]] && echo "ISO: ${OPNFV_ARTIFACT_URL/*\/}"
+echo "Scenario: ${DEPLOY_SCENARIO}"
+echo "Lab: ${LAB_NAME}"
+echo "POD: ${POD_NAME}"
+[[ "${BRANCH}" != 'master' ]] && echo "ISO: ${OPNFV_ARTIFACT_URL/*\/}"
 echo
-echo "Starting the deployment using $INSTALLER_TYPE. This could take some time..."
+echo "Starting the deployment using ${INSTALLER_TYPE}. This could take some time..."
 echo "--------------------------------------------------------"
 echo
 
 # start the deployment
 echo "Issuing command"
-echo "$DEPLOY_COMMAND"
+echo "${DEPLOY_COMMAND}"
 echo
 
-$DEPLOY_COMMAND
+${DEPLOY_COMMAND}
 exit_code=$?
 
 echo
@@ -103,17 +120,18 @@ echo "--------------------------------------------------------"
 echo "Deployment is done!"
 
 # upload logs for baremetal deployments
-# work with virtual deployments is still going on so we skip that for the timebeing
-if [[ "$JOB_NAME" =~ (baremetal-daily|baremetal-weekly) ]]; then
+# work with virtual deployments is still going on, so skip that for now
+if [[ "${JOB_NAME}" =~ (baremetal-daily|baremetal-weekly) ]]; then
     echo "Uploading deployment logs"
-    gsutil cp $WORKSPACE/$FUEL_LOG_FILENAME gs://$GS_URL/logs/$FUEL_LOG_FILENAME > /dev/null 2>&1
-    echo "Logs are available as http://$GS_URL/logs/$FUEL_LOG_FILENAME"
+    gsutil cp "${WORKSPACE}/${FUEL_LOG_FILENAME}" \
+        "gs://${GS_URL}/logs/${FUEL_LOG_FILENAME}" > /dev/null 2>&1
+    echo "Logs are available at http://${GS_URL}/logs/${FUEL_LOG_FILENAME}"
 fi
 
-if [[ $exit_code -ne 0 ]]; then
+if [[ "${exit_code}" -ne 0 ]]; then
     echo "Deployment failed!"
-    exit $exit_code
-else
-    echo "Deployment is successful!"
-    exit 0
+    exit "${exit_code}"
 fi
+
+echo "Deployment is successful!"
+exit 0
index c3b8253..fa0c88b 100755 (executable)
@@ -11,9 +11,9 @@ set -o errexit
 set -o pipefail
 
 # disable Fuel ISO download for master branch
-[[ "$BRANCH" == 'master' ]] && exit 0
+[[ ! "$BRANCH" =~ (danube) ]] && exit 0
 
-# use proxy url to replace the nomral URL, for googleusercontent.com will be blocked randomly
+# use proxy url to replace the normal URL, or googleusercontent.com will be blocked randomly
 [[ "$NODE_NAME" =~ (zte) ]] && GS_URL=${GS_BASE_PROXY%%/*}/$GS_URL
 
 if [[ "$JOB_NAME" =~ "merge" ]]; then
index 6bb7e51..e850a0a 100644 (file)
             disabled: true
 
     jobs:
-        - 'fuel-build-daily-{stream}'
-        - 'fuel-merge-build-{stream}'
         - 'fuel-merge-deploy-virtual-{stream}'
         - 'fuel-deploy-generic-daily-{stream}'
 
 ########################
 # job templates
 ########################
-- job-template:
-    name: 'fuel-build-daily-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    concurrent: false
-
-    properties:
-        - logrotate-default
-        - throttle:
-            enabled: true
-            max-total: 1
-            max-per-node: 1
-            option: 'project'
-
-    parameters:
-        - project-parameter:
-            project: '{project}'
-            branch: '{branch}'
-        - 'opnfv-build-ubuntu-defaults'
-        - '{installer}-defaults'
-        - choice:
-            name: FORCE_BUILD
-            choices:
-              - 'false'
-              - 'true'
-            description: "Force build even if there is no changes in fuel repo. Default false"
-        - fuel-project-parameter:
-            gs-pathname: '{gs-pathname}'
-
-    scm:
-        - git-scm
-
-    triggers:
-        - timed: '0 H/4 * * *'
-
-    wrappers:
-        - timeout:
-            timeout: 360
-            fail: true
-
-    builders:
-        - shell:
-            !include-raw-escape: ./fuel-build.sh
-        - shell:
-            !include-raw-escape: ./fuel-upload-artifact.sh
-        - shell:
-            !include-raw-escape: ./fuel-workspace-cleanup.sh
-
-    publishers:
-        - email:
-            recipients: fzhadaev@mirantis.com
-        - email-jenkins-admins-on-failure
-
-- job-template:
-    name: 'fuel-merge-build-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    concurrent: true
-
-    parameters:
-        - project-parameter:
-            project: '{project}'
-            branch: '{branch}'
-        - 'opnfv-build-ubuntu-defaults'
-        - '{installer}-defaults'
-        - fuel-project-parameter:
-            gs-pathname: '{gs-pathname}'
-
-    scm:
-        - git-scm
-
-    wrappers:
-        - ssh-agent-wrapper
-        - timeout:
-            timeout: 360
-            fail: true
-
-    triggers:
-        - gerrit:
-            server-name: 'gerrit.opnfv.org'
-            trigger-on:
-                - change-merged-event
-                - comment-added-contains-event:
-                    comment-contains-value: 'remerge'
-            projects:
-              - project-compare-type: 'ANT'
-                project-pattern: '{project}'
-                branches:
-                    - branch-compare-type: 'ANT'
-                      branch-pattern: '**/{branch}'
-                file-paths:
-                  - compare-type: ANT
-                    pattern: 'ci/**'
-                  - compare-type: ANT
-                    pattern: 'build/**'
-                  - compare-type: ANT
-                    pattern: 'deploy/**'
-                disable-strict-forbidden-file-verification: 'true'
-                forbidden-file-paths:
-                  - compare-type: ANT
-                    pattern: 'docs/**'
-
-    builders:
-        - shell:
-            !include-raw-escape: ./fuel-build.sh
-        - shell:
-            !include-raw-escape: ./fuel-upload-artifact.sh
-        - shell:
-            !include-raw-escape: ./fuel-workspace-cleanup.sh
-
 - job-template:
     name: 'fuel-merge-deploy-virtual-{stream}'
 
                   - compare-type: ANT
                     pattern: 'ci/**'
                   - compare-type: ANT
-                    pattern: 'build/**'
-                  - compare-type: ANT
-                    pattern: 'deploy/**'
+                    pattern: 'mcp/**'
                 disable-strict-forbidden-file-verification: 'true'
                 forbidden-file-paths:
                   - compare-type: ANT
             dependency-jobs: 'fuel-merge-build-{stream}'
 
     builders:
-        - shell:
-            !include-raw-escape: ./fuel-download-artifact.sh
         - shell:
             !include-raw-escape: ./fuel-deploy.sh
         - shell:
             name: '$BUILD_NUMBER - POD: $NODE_NAME Scenario: $DEPLOY_SCENARIO'
 
     builders:
-        - shell:
-            !include-raw-escape: ./fuel-download-artifact.sh
         - shell:
             !include-raw-escape: ./fuel-deploy.sh
 
 - parameter:
     name: fuel-project-parameter
     parameters:
-        - string:
-            name: BUILD_DIRECTORY
-            default: $WORKSPACE/build_output
-            description: "Directory where the build artifact will be located upon the completion of the build."
-        - string:
-            name: CACHE_DIRECTORY
-            default: $HOME/opnfv/cache/$INSTALLER_TYPE
-            description: "Directory where the cache to be used during the build is located."
         - string:
             name: GS_URL
             default: artifacts.opnfv.org/$PROJECT{gs-pathname}
diff --git a/jjb/fuel/fuel-upload-artifact.sh b/jjb/fuel/fuel-upload-artifact.sh
deleted file mode 100755 (executable)
index d1ac350..0000000
+++ /dev/null
@@ -1,118 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o pipefail
-
-# check if we built something
-if [ -f $WORKSPACE/.noupload ]; then
-    echo "Nothing new to upload. Exiting."
-    /bin/rm -f $WORKSPACE/.noupload
-    exit 0
-fi
-
-# source the opnfv.properties to get ARTIFACT_VERSION
-source $WORKSPACE/opnfv.properties
-
-nfsstore () {
-# storing ISOs for verify & merge jobs will be done once we get the disk array
-if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
-    # store ISO locally on NFS first
-    ISOSTORE="/iso_mount/opnfv_ci/${BRANCH##*/}"
-    if [[ -d "$ISOSTORE" ]]; then
-        # remove all but most recent 5 ISOs first to keep iso_mount clean & tidy
-        cd $ISOSTORE
-        ls -tp | grep -v '/' | tail -n +6 | xargs -d '\n' /bin/rm -f --
-
-        # store ISO
-        echo "Storing $INSTALLER_TYPE artifact on NFS..."
-        /bin/cp -f $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso \
-            $ISOSTORE/opnfv-$OPNFV_ARTIFACT_VERSION.iso
-    fi
-fi
-}
-
-importkey () {
-# clone releng repository
-echo "Cloning releng repository..."
-[ -d releng ] && rm -rf releng
-git clone https://gerrit.opnfv.org/gerrit/releng $WORKSPACE/releng/ &> /dev/null
-#this is where we import the siging key
-if [ -f $WORKSPACE/releng/utils/gpg_import_key.sh ]; then
-  source $WORKSPACE/releng/utils/gpg_import_key.sh
-fi
-}
-
-signiso () {
-gpg2 -vvv --batch --yes --no-tty \
-  --default-key opnfv-helpdesk@rt.linuxfoundation.org  \
-  --passphrase besteffort \
-  --detach-sig $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso
-
-gsutil cp $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso.sig gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso.sig
-echo "ISO signature Upload Complete!"
-}
-
-uploadiso () {
-# log info to console
-echo "Uploading $INSTALLER_TYPE artifact. This could take some time..."
-echo
-
-cd $WORKSPACE
-# upload artifact and additional files to google storage
-gsutil cp $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso \
-    gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > gsutil.iso.log 2>&1
-gsutil cp $WORKSPACE/opnfv.properties \
-    gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log 2>&1
-if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
-    gsutil cp $WORKSPACE/opnfv.properties \
-    gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1
-elif [[ "$JOB_NAME" =~ "merge" ]]; then
-    echo "Uploaded Fuel ISO for a merged change"
-fi
-
-gsutil -m setmeta \
-    -h "Content-Type:text/html" \
-    -h "Cache-Control:private, max-age=0, no-transform" \
-    gs://$GS_URL/latest.properties \
-    gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > /dev/null 2>&1
-
-gsutil -m setmeta \
-    -h "Cache-Control:private, max-age=0, no-transform" \
-    gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > /dev/null 2>&1
-
-# disabled errexit due to gsutil setmeta complaints
-#   BadRequestException: 400 Invalid argument
-# check if we uploaded the file successfully to see if things are fine
-gsutil ls gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > /dev/null 2>&1
-if [[ $? -ne 0 ]]; then
-    echo "Problem while uploading artifact!"
-    echo "Check log $WORKSPACE/gsutil.iso.log on the machine where this build is done."
-    exit 1
-fi
-
-echo "Done!"
-echo
-echo "--------------------------------------------------------"
-echo
-echo "Artifact is available as http://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
-echo
-echo "--------------------------------------------------------"
-echo
-}
-
-nfsstore
-
-if [[ "$JOB_NAME" =~ merge ]]; then
-    uploadiso
-elif [[ "$JOB_NAME" =~ build ]]; then
-    importkey
-    signiso
-    uploadiso
-fi
-
index 469ca92..899be9a 100644 (file)
@@ -22,8 +22,6 @@
     phase:
         - 'basic':
             slave-label: 'opnfv-build-ubuntu'
-        - 'build':
-            slave-label: 'opnfv-build-ubuntu'
         - 'deploy-virtual':
             slave-label: 'opnfv-build-ubuntu'
         - 'smoke-test':
@@ -85,9 +83,7 @@
                   - compare-type: ANT
                     pattern: 'ci/**'
                   - compare-type: ANT
-                    pattern: 'build/**'
-                  - compare-type: ANT
-                    pattern: 'deploy/**'
+                    pattern: 'mcp/**'
                 disable-strict-forbidden-file-verification: 'true'
                 forbidden-file-paths:
                   - compare-type: ANT
                   node-parameters: false
                   kill-phase-on: FAILURE
                   abort-all-job: true
-        - multijob:
-            name: build
-            condition: SUCCESSFUL
-            projects:
-                - name: 'fuel-verify-build-{stream}'
-                  current-parameters: false
-                  predefined-parameters: |
-                    BRANCH=$BRANCH
-                    GERRIT_REFSPEC=$GERRIT_REFSPEC
-                    GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                    GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-                  node-parameters: false
-                  kill-phase-on: FAILURE
-                  abort-all-job: true
         - multijob:
             name: deploy-virtual
             condition: SUCCESSFUL
 
             echo "Not activated!"
 
-- builder:
-    name: 'fuel-verify-build-macro'
-    builders:
-        - shell:
-            !include-raw: ./fuel-build.sh
-        - shell:
-            !include-raw: ./fuel-workspace-cleanup.sh
-
 - builder:
     name: 'fuel-verify-deploy-virtual-macro'
     builders:
 - parameter:
     name: 'fuel-verify-defaults'
     parameters:
-        - string:
-            name: BUILD_DIRECTORY
-            default: $WORKSPACE/build_output
-            description: "Directory where the build artifact will be located upon the completion of the build."
-        - string:
-            name: CACHE_DIRECTORY
-            default: $HOME/opnfv/cache/$INSTALLER_TYPE
-            description: "Directory where the cache to be used during the build is located."
         - string:
             name: GS_URL
             default: artifacts.opnfv.org/$PROJECT{gs-pathname}
index 57e36e1..c681c62 100644 (file)
     builders:
         - description-setter:
             description: "Built on $NODE_NAME"
-        - shell:
-            !include-raw-escape: ./fuel-download-artifact.sh
         - shell:
             !include-raw-escape: ./fuel-deploy.sh
 
 - parameter:
     name: fuel-weekly-parameter
     parameters:
-        - string:
-            name: BUILD_DIRECTORY
-            default: $WORKSPACE/build_output
-            description: "Directory where the build artifact will be located upon the completion of the build."
-        - string:
-            name: CACHE_DIRECTORY
-            default: $HOME/opnfv/cache/$INSTALLER_TYPE
-            description: "Directory where the cache to be used during the build is located."
         - string:
             name: GS_URL
             default: artifacts.opnfv.org/$PROJECT{gs-pathname}
index 076dc47..d8db620 100755 (executable)
@@ -4,7 +4,7 @@ export PYTHONPATH="${PYTHONPATH}:./reporting"
 export CONFIG_REPORTING_YAML=./reporting/reporting.yaml
 
 declare -a versions=(danube master)
-declare -a projects=(functest storperf yardstick qtip)
+declare -a projects=(functest storperf yardstick qtip vsperf)
 
 project=$1
 reporting_type=$2
@@ -32,6 +32,7 @@ cp -Rf js display
 # yardstick | status
 # storperf  | status
 # qtip      | status
+# vsperf    | status
 
 function report_project()
 {
index c7c2051..02bf67d 100755 (executable)
@@ -7,18 +7,19 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 #
 import datetime
-import jinja2
 import os
 import sys
 import time
 
+import jinja2
+
 import testCase as tc
 import scenarioResult as sr
+import reporting.utils.reporting_utils as rp_utils
 
-# manage conf
-import utils.reporting_utils as rp_utils
-
-"""Functest reporting status"""
+"""
+Functest reporting status
+"""
 
 # Logger
 logger = rp_utils.getLogger("Functest-Status")
@@ -106,7 +107,8 @@ for version in versions:
     for installer in installers:
 
         # get scenarios
-        scenario_results = rp_utils.getScenarios(healthcheck,
+        scenario_results = rp_utils.getScenarios("functest",
+                                                 "connection_check",
                                                  installer,
                                                  version)
         # get nb of supported architecture (x86, aarch64)
@@ -219,7 +221,7 @@ for version in versions:
                                 logger.debug("No results found")
 
                         items[s] = testCases2BeDisplayed
-                except:
+                except Exception:
                     logger.error("Error: installer %s, version %s, scenario %s"
                                  % (installer, version, s))
                     logger.error("No data available: %s" % (sys.exc_info()[0]))
@@ -279,13 +281,13 @@ for version in versions:
             template = templateEnv.get_template(TEMPLATE_FILE)
 
             outputText = template.render(
-                            scenario_stats=scenario_stats,
-                            scenario_results=scenario_result_criteria,
-                            items=items,
-                            installer=installer_display,
-                            period=period,
-                            version=version,
-                            date=reportingDate)
+                scenario_stats=scenario_stats,
+                scenario_results=scenario_result_criteria,
+                items=items,
+                installer=installer_display,
+                period=period,
+                version=version,
+                date=reportingDate)
 
             with open("./display/" + version +
                       "/functest/status-" +
@@ -298,8 +300,6 @@ for version in versions:
 
             # Generate outputs for export
             # pdf
-            # TODO Change once web site updated...use the current one
-            # to test pdf production
             url_pdf = rp_utils.get_config('general.url')
             pdf_path = ("./display/" + version +
                         "/functest/status-" + installer_display + ".html")
index 0c188a3..103b80f 100644 (file)
@@ -7,13 +7,12 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 #
 import datetime
-import jinja2
 import os
 
-# manage conf
-import utils.reporting_utils as rp_utils
+import jinja2
 
-import utils.scenarioResult as sr
+import reporting.utils.reporting_utils as rp_utils
+import reporting.utils.scenarioResult as sr
 
 installers = rp_utils.get_config('general.installers')
 versions = rp_utils.get_config('general.versions')
@@ -39,7 +38,8 @@ for version in versions:
     for installer in installers:
         # get scenarios results data
         # for the moment we consider only 1 case snia_steady_state
-        scenario_results = rp_utils.getScenarios("snia_steady_state",
+        scenario_results = rp_utils.getScenarios("storperf",
+                                                 "snia_steady_state",
                                                  installer,
                                                  version)
         # logger.info("scenario_results: %s" % scenario_results)
index 6282091..235bd6e 100644 (file)
@@ -20,15 +20,15 @@ import yaml
 #               YAML UTILS
 #
 # -----------------------------------------------------------
-def get_parameter_from_yaml(parameter, file):
+def get_parameter_from_yaml(parameter, config_file):
     """
     Returns the value of a given parameter in file.yaml
     parameter must be given in string format with dots
     Example: general.openstack.image_name
     """
-    with open(file) as f:
-        file_yaml = yaml.safe_load(f)
-    f.close()
+    with open(config_file) as my_file:
+        file_yaml = yaml.safe_load(my_file)
+    my_file.close()
     value = file_yaml
     for element in parameter.split("."):
         value = value.get(element)
@@ -39,6 +39,9 @@ def get_parameter_from_yaml(parameter, file):
 
 
 def get_config(parameter):
+    """
+    Get configuration parameter from yaml configuration file
+    """
     yaml_ = os.environ["CONFIG_REPORTING_YAML"]
     return get_parameter_from_yaml(parameter, yaml_)
 
@@ -49,20 +52,23 @@ def get_config(parameter):
 #
 # -----------------------------------------------------------
 def getLogger(module):
-    logFormatter = logging.Formatter("%(asctime)s [" +
-                                     module +
-                                     "] [%(levelname)-5.5s]  %(message)s")
+    """
+    Get Logger
+    """
+    log_formatter = logging.Formatter("%(asctime)s [" +
+                                      module +
+                                      "] [%(levelname)-5.5s]  %(message)s")
     logger = logging.getLogger()
     log_file = get_config('general.log.log_file')
     log_level = get_config('general.log.log_level')
 
-    fileHandler = logging.FileHandler("{0}/{1}".format('.', log_file))
-    fileHandler.setFormatter(logFormatter)
-    logger.addHandler(fileHandler)
+    file_handler = logging.FileHandler("{0}/{1}".format('.', log_file))
+    file_handler.setFormatter(log_formatter)
+    logger.addHandler(file_handler)
 
-    consoleHandler = logging.StreamHandler()
-    consoleHandler.setFormatter(logFormatter)
-    logger.addHandler(consoleHandler)
+    console_handler = logging.StreamHandler()
+    console_handler.setFormatter(log_formatter)
+    logger.addHandler(console_handler)
     logger.setLevel(log_level)
     return logger
 
@@ -73,6 +79,9 @@ def getLogger(module):
 #
 # -----------------------------------------------------------
 def getApiResults(case, installer, scenario, version):
+    """
+    Get Results by calling the API
+    """
     results = json.dumps([])
     # to remove proxy (to be removed at the end for local test only)
     # proxy_handler = urllib2.ProxyHandler({})
@@ -94,29 +103,32 @@ def getApiResults(case, installer, scenario, version):
         response = urlopen(request)
         k = response.read()
         results = json.loads(k)
-    except URLError as e:
-        print 'No kittez. Got an error code:'.format(e)
+    except URLError:
+        print "Error when retrieving results form API"
 
     return results
 
 
-def getScenarios(case, installer, version):
-
-    try:
-        case = case.getName()
-    except:
-        # if case is not an object test case, try the string
-        if type(case) == str:
-            case = case
-        else:
-            raise ValueError("Case cannot be evaluated")
+def getScenarios(project, case, installer, version):
+    """
+    Get the list of Scenarios
+    """
 
     period = get_config('general.period')
     url_base = get_config('testapi.url')
 
-    url = ("http://" + url_base + "?case=" + case +
-           "&period=" + str(period) + "&installer=" + installer +
-           "&version=" + version)
+    url = ("http://" + url_base +
+           "?installer=" + installer +
+           "&period=" + str(period))
+
+    if version is not None:
+        url += "&version=" + version
+
+    if project is not None:
+        url += "&project=" + project
+
+    if case is not None:
+        url += "&case=" + case
 
     try:
         request = Request(url)
@@ -136,7 +148,7 @@ def getScenarios(case, installer, version):
                     results = json.loads(k)
                     test_results += results['results']
         except KeyError:
-            print ('No pagination detected')
+            print "No pagination detected"
     except URLError as err:
         print 'Got an error code: {}'.format(err)
 
@@ -144,32 +156,38 @@ def getScenarios(case, installer, version):
         test_results.reverse()
         scenario_results = {}
 
-        for r in test_results:
+        for my_result in test_results:
             # Retrieve all the scenarios per installer
-            if not r['scenario'] in scenario_results.keys():
-                scenario_results[r['scenario']] = []
+            if not my_result['scenario'] in scenario_results.keys():
+                scenario_results[my_result['scenario']] = []
             # Do we consider results from virtual pods ...
             # Do we consider results for non HA scenarios...
             exclude_virtual_pod = get_config('functest.exclude_virtual')
             exclude_noha = get_config('functest.exclude_noha')
-            if ((exclude_virtual_pod and "virtual" in r['pod_name']) or
-                    (exclude_noha and "noha" in r['scenario'])):
+            if ((exclude_virtual_pod and "virtual" in my_result['pod_name']) or
+                    (exclude_noha and "noha" in my_result['scenario'])):
                 print "exclude virtual pod results..."
             else:
-                scenario_results[r['scenario']].append(r)
+                scenario_results[my_result['scenario']].append(my_result)
 
     return scenario_results
 
 
 def getScenarioStats(scenario_results):
+    """
+    Get the number of occurence of scenarios over the defined PERIOD
+    """
     scenario_stats = {}
-    for k, v in scenario_results.iteritems():
-        scenario_stats[k] = len(v)
-
+    for res_k, res_v in scenario_results.iteritems():
+        scenario_stats[res_k] = len(res_v)
     return scenario_stats
 
 
 def getScenarioStatus(installer, version):
+    """
+    Get the status of a scenariofor Yardstick
+    they used criteria SUCCESS (default: PASS)
+    """
     period = get_config('general.period')
     url_base = get_config('testapi.url')
 
@@ -184,33 +202,37 @@ def getScenarioStatus(installer, version):
         response.close()
         results = json.loads(k)
         test_results = results['results']
-    except URLError as e:
-        print 'Got an error code: {}'.format(e)
+    except URLError:
+        print "GetScenarioStatus: error when calling the API"
 
     scenario_results = {}
     result_dict = {}
     if test_results is not None:
-        for r in test_results:
-            if r['stop_date'] != 'None' and r['criteria'] is not None:
-                if not r['scenario'] in scenario_results.keys():
-                    scenario_results[r['scenario']] = []
-                scenario_results[r['scenario']].append(r)
-
-        for k, v in scenario_results.items():
+        for test_r in test_results:
+            if (test_r['stop_date'] != 'None' and
+                    test_r['criteria'] is not None):
+                if not test_r['scenario'] in scenario_results.keys():
+                    scenario_results[test_r['scenario']] = []
+                scenario_results[test_r['scenario']].append(test_r)
+
+        for scen_k, scen_v in scenario_results.items():
             # scenario_results[k] = v[:LASTEST_TESTS]
             s_list = []
-            for element in v:
+            for element in scen_v:
                 if element['criteria'] == 'SUCCESS':
                     s_list.append(1)
                 else:
                     s_list.append(0)
-            result_dict[k] = s_list
+            result_dict[scen_k] = s_list
 
     # return scenario_results
     return result_dict
 
 
 def getQtipResults(version, installer):
+    """
+    Get QTIP results
+    """
     period = get_config('qtip.period')
     url_base = get_config('testapi.url')
 
@@ -240,19 +262,24 @@ def getQtipResults(version, installer):
 
 
 def getNbtestOk(results):
+    """
+    based on default value (PASS) count the number of test OK
+    """
     nb_test_ok = 0
-    for r in results:
-        for k, v in r.iteritems():
+    for my_result in results:
+        for res_k, res_v in my_result.iteritems():
             try:
-                if "PASS" in v:
+                if "PASS" in res_v:
                     nb_test_ok += 1
-            except:
+            except Exception:
                 print "Cannot retrieve test status"
     return nb_test_ok
 
 
 def getResult(testCase, installer, scenario, version):
-
+    """
+    Get Result  for a given Functest Testcase
+    """
     # retrieve raw results
     results = getApiResults(testCase, installer, scenario, version)
     # let's concentrate on test results only
@@ -269,10 +296,10 @@ def getResult(testCase, installer, scenario, version):
         # print " ---------------- "
         # print "nb of results:" + str(len(test_results))
 
-        for r in test_results:
+        for res_r in test_results:
             # print r["start_date"]
             # print r["criteria"]
-            scenario_results.append({r["start_date"]: r["criteria"]})
+            scenario_results.append({res_r["start_date"]: res_r["criteria"]})
         # sort results
         scenario_results.sort()
         # 4 levels for the results
@@ -295,7 +322,7 @@ def getResult(testCase, installer, scenario, version):
             test_result_indicator = 1
         else:
             # Test the last 4 run
-            if (len(scenario_results) > 3):
+            if len(scenario_results) > 3:
                 last4runResults = scenario_results[-4:]
                 nbTestOkLast4 = getNbtestOk(last4runResults)
                 # print "Nb test OK (last 4 run):"+ str(nbTestOkLast4)
@@ -309,19 +336,22 @@ def getResult(testCase, installer, scenario, version):
 
 
 def getJenkinsUrl(build_tag):
-    # e.g. jenkins-functest-apex-apex-daily-colorado-daily-colorado-246
-    # id = 246
-    # jenkins-functest-compass-huawei-pod5-daily-master-136
-    # id = 136
-    # note it is linked to jenkins format
-    # if this format changes...function to be adapted....
+    """
+    Get Jenkins url_base corespoding to the last test CI run
+    e.g. jenkins-functest-apex-apex-daily-colorado-daily-colorado-246
+    id = 246
+    jenkins-functest-compass-huawei-pod5-daily-master-136
+    id = 136
+    note it is linked to jenkins format
+    if this format changes...function to be adapted....
+    """
     url_base = get_config('functest.jenkins_url')
     try:
         build_id = [int(s) for s in build_tag.split("-") if s.isdigit()]
         url_id = (build_tag[8:-(len(str(build_id[0])) + 1)] +
                   "/" + str(build_id[0]))
         jenkins_url = url_base + url_id + "/console"
-    except:
+    except Exception:
         print 'Impossible to get jenkins url:'
 
     if "jenkins-" not in build_tag:
@@ -331,10 +361,13 @@ def getJenkinsUrl(build_tag):
 
 
 def getScenarioPercent(scenario_score, scenario_criteria):
+    """
+    Get success rate of the scenario (in %)
+    """
     score = 0.0
     try:
         score = float(scenario_score) / float(scenario_criteria) * 100
-    except:
+    except Exception:
         print 'Impossible to calculate the percentage score'
     return score
 
@@ -343,32 +376,41 @@ def getScenarioPercent(scenario_score, scenario_criteria):
 # Functest
 # *********
 def getFunctestConfig(version=""):
+    """
+    Get Functest configuration
+    """
     config_file = get_config('functest.test_conf') + version
     response = requests.get(config_file)
     return yaml.safe_load(response.text)
 
 
 def getArchitectures(scenario_results):
+    """
+    Get software architecture (x86 or Aarch64)
+    """
     supported_arch = ['x86']
-    if (len(scenario_results) > 0):
+    if len(scenario_results) > 0:
         for scenario_result in scenario_results.values():
             for value in scenario_result:
-                if ("armband" in value['build_tag']):
+                if "armband" in value['build_tag']:
                     supported_arch.append('aarch64')
                     return supported_arch
     return supported_arch
 
 
 def filterArchitecture(results, architecture):
+    """
+    Restrict the list of results based on given architecture
+    """
     filtered_results = {}
-    for name, results in results.items():
+    for name, res in results.items():
         filtered_values = []
-        for value in results:
-            if (architecture is "x86"):
+        for value in res:
+            if architecture is "x86":
                 # drop aarch64 results
                 if ("armband" not in value['build_tag']):
                     filtered_values.append(value)
-            elif(architecture is "aarch64"):
+            elif architecture is "aarch64":
                 # drop x86 results
                 if ("armband" in value['build_tag']):
                     filtered_values.append(value)
@@ -381,6 +423,9 @@ def filterArchitecture(results, architecture):
 # Yardstick
 # *********
 def subfind(given_list, pattern_list):
+    """
+    Yardstick util function
+    """
     LASTEST_TESTS = get_config('general.nb_iteration_tests_success_criteria')
     for i in range(len(given_list)):
         if given_list[i] == pattern_list[0] and \
@@ -390,7 +435,9 @@ def subfind(given_list, pattern_list):
 
 
 def _get_percent(status):
-
+    """
+    Yardstick util function to calculate success rate
+    """
     if status * 100 % 6:
         return round(float(status) * 100 / 6, 1)
     else:
@@ -398,13 +445,16 @@ def _get_percent(status):
 
 
 def get_percent(four_list, ten_list):
+    """
+    Yardstick util function to calculate success rate
+    """
     four_score = 0
     ten_score = 0
 
-    for v in four_list:
-        four_score += v
-    for v in ten_list:
-        ten_score += v
+    for res_v in four_list:
+        four_score += res_v
+    for res_v in ten_list:
+        ten_score += res_v
 
     LASTEST_TESTS = get_config('general.nb_iteration_tests_success_criteria')
     if four_score == LASTEST_TESTS:
@@ -420,9 +470,12 @@ def get_percent(four_list, ten_list):
 
 
 def _test():
+    """
+    Yardstick util function (test)
+    """
     status = getScenarioStatus("compass", "master")
     print "status:++++++++++++++++++++++++"
-    print(json.dumps(status, indent=4))
+    print json.dumps(status, indent=4)
 
 
 # ----------------------------------------------------------
@@ -432,8 +485,9 @@ def _test():
 # -----------------------------------------------------------
 
 def export_csv(scenario_file_name, installer, version):
-    # csv
-    # generate sub files based on scenario_history.txt
+    """
+    Generate sub files based on scenario_history.txt
+    """
     scenario_installer_file_name = ("./display/" + version +
                                     "/functest/scenario_history_" +
                                     installer + ".csv")
@@ -443,21 +497,25 @@ def export_csv(scenario_file_name, installer, version):
         for line in scenario_file:
             if installer in line:
                 scenario_installer_file.write(line)
-        scenario_installer_file.close
+    scenario_installer_file.close
 
 
 def generate_csv(scenario_file):
+    """
+    Generate sub files based on scenario_history.txt
+    """
     import shutil
-    # csv
-    # generate sub files based on scenario_history.txt
     csv_file = scenario_file.replace('txt', 'csv')
     shutil.copy2(scenario_file, csv_file)
 
 
 def export_pdf(pdf_path, pdf_doc_name):
+    """
+    Export results to pdf
+    """
     try:
         pdfkit.from_file(pdf_path, pdf_doc_name)
     except IOError:
         print "Error but pdf generated anyway..."
-    except:
+    except Exception:
         print "impossible to generate PDF"
diff --git a/utils/test/reporting/reporting/vsperf/__init__.py b/utils/test/reporting/reporting/vsperf/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/utils/test/reporting/reporting/vsperf/reporting-status.py b/utils/test/reporting/reporting/vsperf/reporting-status.py
new file mode 100644 (file)
index 0000000..fc4cc67
--- /dev/null
@@ -0,0 +1,138 @@
+#!/usr/bin/python
+#
+# This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+import datetime
+import os
+
+import jinja2
+
+import reporting.utils.reporting_utils as rp_utils
+import reporting.utils.scenarioResult as sr
+
+installers = rp_utils.get_config('general.installers')
+PERIOD = rp_utils.get_config('general.period')
+
+# Logger
+logger = rp_utils.getLogger("Storperf-Status")
+reportingDate = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
+
+logger.info("*******************************************")
+logger.info("*   Generating reporting scenario status  *")
+logger.info("*   Data retention = %s days              *" % PERIOD)
+logger.info("*                                         *")
+logger.info("*******************************************")
+
+# retrieve the list of storperf tests
+versions = {'master'}
+
+# For all the versions
+for version in versions:
+    # For all the installers
+    for installer in installers:
+        scenario_results = rp_utils.getScenarios("vsperf",
+                                                 None,
+                                                 installer,
+                                                 None)
+        items = {}
+        scenario_result_criteria = {}
+        logger.info("installer %s, version %s, scenario ", installer, version)
+
+        # From each scenarios get results list
+        for s, s_result in scenario_results.items():
+            logger.info("---------------------------------")
+            logger.info("installer %s, version %s, scenario %s", installer,
+                        version, s)
+            ten_criteria = len(s_result)
+
+            ten_score = 0
+            for v in s_result:
+                if "PASS" in v['criteria']:
+                    ten_score += 1
+
+            logger.info("ten_score: %s / %s" % (ten_score, ten_criteria))
+
+            four_score = 0
+            try:
+                LASTEST_TESTS = rp_utils.get_config(
+                    'general.nb_iteration_tests_success_criteria')
+                s_result.sort(key=lambda x: x['start_date'])
+                four_result = s_result[-LASTEST_TESTS:]
+                logger.debug("four_result: {}".format(four_result))
+                logger.debug("LASTEST_TESTS: {}".format(LASTEST_TESTS))
+                # logger.debug("four_result: {}".format(four_result))
+                four_criteria = len(four_result)
+                for v in four_result:
+                    if "PASS" in v['criteria']:
+                        four_score += 1
+                logger.info("4 Score: %s / %s " % (four_score,
+                                                   four_criteria))
+            except Exception:
+                logger.error("Impossible to retrieve the four_score")
+
+            try:
+                s_status = (four_score * 100) / four_criteria
+            except ZeroDivisionError:
+                s_status = 0
+            logger.info("Score percent = %s" % str(s_status))
+            s_four_score = str(four_score) + '/' + str(four_criteria)
+            s_ten_score = str(ten_score) + '/' + str(ten_criteria)
+            s_score_percent = str(s_status)
+
+            logger.debug(" s_status: {}".format(s_status))
+            if s_status == 100:
+                logger.info(">>>>> scenario OK, save the information")
+            else:
+                logger.info(">>>> scenario not OK, last 4 iterations = %s, \
+                             last 10 days = %s" % (s_four_score, s_ten_score))
+
+            s_url = ""
+            if len(s_result) > 0:
+                build_tag = s_result[len(s_result)-1]['build_tag']
+                logger.debug("Build tag: %s" % build_tag)
+                s_url = s_url = rp_utils.getJenkinsUrl(build_tag)
+                logger.info("last jenkins url: %s" % s_url)
+
+            # Save daily results in a file
+            path_validation_file = ("./display/" + version +
+                                    "/vsperf/scenario_history.txt")
+
+            if not os.path.exists(path_validation_file):
+                with open(path_validation_file, 'w') as f:
+                    info = 'date,scenario,installer,details,score\n'
+                    f.write(info)
+
+            with open(path_validation_file, "a") as f:
+                info = (reportingDate + "," + s + "," + installer +
+                        "," + s_ten_score + "," +
+                        str(s_score_percent) + "\n")
+                f.write(info)
+
+            scenario_result_criteria[s] = sr.ScenarioResult(s_status,
+                                                            s_four_score,
+                                                            s_ten_score,
+                                                            s_score_percent,
+                                                            s_url)
+
+            logger.info("--------------------------")
+
+        templateLoader = jinja2.FileSystemLoader(".")
+        templateEnv = jinja2.Environment(loader=templateLoader,
+                                         autoescape=True)
+
+        TEMPLATE_FILE = "./reporting/vsperf/template/index-status-tmpl.html"
+        template = templateEnv.get_template(TEMPLATE_FILE)
+
+        outputText = template.render(scenario_results=scenario_result_criteria,
+                                     installer=installer,
+                                     period=PERIOD,
+                                     version=version,
+                                     date=reportingDate)
+
+        with open("./display/" + version +
+                  "/vsperf/status-" + installer + ".html", "wb") as fh:
+            fh.write(outputText)
diff --git a/utils/test/reporting/reporting/vsperf/template/index-status-tmpl.html b/utils/test/reporting/reporting/vsperf/template/index-status-tmpl.html
new file mode 100644 (file)
index 0000000..7e06ef6
--- /dev/null
@@ -0,0 +1,114 @@
+ <html>
+  <head>
+    <meta charset="utf-8">
+    <!-- Bootstrap core CSS -->
+    <link href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css" rel="stylesheet">
+    <link href="../../css/default.css" rel="stylesheet">
+    <script type="text/javascript" src="http://ajax.googleapis.com/ajax/libs/jquery/1/jquery.min.js"></script>
+    <script type="text/javascript" src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js"></script>
+    <script type="text/javascript" src="http://d3js.org/d3.v2.min.js"></script>
+    <script type="text/javascript" src="../../js/gauge.js"></script>
+    <script type="text/javascript" src="../../js/trend.js"></script>
+    <script>
+        function onDocumentReady() {
+            // Gauge management
+            {% for scenario in scenario_results.keys() -%}
+            var gaugeScenario{{loop.index}} = gauge('#gaugeScenario{{loop.index}}');
+            {%- endfor %}
+            // assign success rate to the gauge
+            function updateReadings() {
+                {% for scenario in scenario_results.keys() -%}
+                 gaugeScenario{{loop.index}}.update({{scenario_results[scenario].getScorePercent()}});
+                 {%- endfor %}
+            }
+            updateReadings();
+        }
+
+        // trend line management
+        d3.csv("./scenario_history.txt", function(data) {
+            // ***************************************
+            // Create the trend line
+            {% for scenario in scenario_results.keys() -%}
+            // for scenario {{scenario}}
+            // Filter results
+                var trend{{loop.index}} = data.filter(function(row) {
+                    return row["scenario"]=="{{scenario}}" && row["installer"]=="{{installer}}";
+                })
+            // Parse the date
+            trend{{loop.index}}.forEach(function(d) {
+                d.date = parseDate(d.date);
+                d.score = +d.score
+            });
+            // Draw the trend line
+            var mytrend = trend("#trend_svg{{loop.index}}",trend{{loop.index}})
+            // ****************************************
+            {%- endfor %}
+        });
+        if ( !window.isLoaded ) {
+            window.addEventListener("load", function() {
+            onDocumentReady();
+            }, false);
+        } else {
+            onDocumentReady();
+        }
+    </script>
+    <script type="text/javascript">
+    $(document).ready(function (){
+        $(".btn-more").click(function() {
+            $(this).hide();
+            $(this).parent().find(".panel-default").show();
+        });
+    })
+    </script>
+  </head>
+    <body>
+    <div class="container">
+      <div class="masthead">
+          <h3 class="text-muted">Vsperf status page ({{version}}, {{date}})</h3>
+        <nav>
+          <ul class="nav nav-justified">
+            <li class="active"><a href="http://testresults.opnfv.org/reporting/index.html">Home</a></li>
+            <li><a href="status-apex.html">Apex</a></li>
+            <li><a href="status-compass.html">Compass</a></li>
+            <li><a href="status-fuel.html">Fuel</a></li>
+            <li><a href="status-joid.html">Joid</a></li>
+          </ul>
+        </nav>
+      </div>
+<div class="row">
+    <div class="col-md-1"></div>
+    <div class="col-md-10">
+        <div class="page-header">
+            <h2>{{installer}}</h2>
+        </div>
+        <div><h1>Reported values represent the percentage of completed
+
+          CI tests during the reporting period, where results
+
+          were communicated to the Test Database.</h1></div>
+        <div class="scenario-overview">
+            <div class="panel-heading"><h4><b>List of last scenarios ({{version}}) run over the last {{period}} days </b></h4></div>
+                <table class="table">
+                    <tr>
+                        <th width="40%">Scenario</th>
+                        <th width="20%">Status</th>
+                        <th width="20%">Trend</th>
+                        <th width="10%">Last 4 Iterations</th>
+                        <th width="10%">Last 10 Days</th>
+                    </tr>
+                        {% for scenario,result in scenario_results.iteritems() -%}
+                            <tr class="tr-ok">
+                                <td><a href="{{scenario_results[scenario].getLastUrl()}}">{{scenario}}</a></td>
+                                <td><div id="gaugeScenario{{loop.index}}"></div></td>
+                                <td><div id="trend_svg{{loop.index}}"></div></td>
+                                <td>{{scenario_results[scenario].getFourDaysScore()}}</td>
+                                <td>{{scenario_results[scenario].getTenDaysScore()}}</td>
+                            </tr>
+                        {%- endfor %}
+                </table>
+        </div>
+
+
+    </div>
+    <div class="col-md-1"></div>
+</div>
index 85c386b..6584f4e 100644 (file)
@@ -7,14 +7,13 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 #
 import datetime
-import jinja2
 import os
 
-import utils.scenarioResult as sr
-from scenarios import config as cf
+import jinja2
 
-# manage conf
-import utils.reporting_utils as rp_utils
+import reporting.utils.scenarioResult as sr
+import reporting.utils.reporting_utils as rp_utils
+from scenarios import config as cf
 
 installers = rp_utils.get_config('general.installers')
 versions = rp_utils.get_config('general.versions')
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/common/noparam.ini b/utils/test/testapi/opnfv_testapi/tests/unit/common/noparam.ini
deleted file mode 100644 (file)
index be7f2b9..0000000
+++ /dev/null
@@ -1,16 +0,0 @@
-# to add a new parameter in the config file,
-# the CONF object in config.ini must be updated
-[mongo]
-# URL of the mongo DB
-# Mongo auth url => mongodb://user1:pwd1@host1/?authSource=db1
-url = mongodb://127.0.0.1:27017/
-
-[api]
-# Listening port
-port = 8000
-# With debug_on set to true, error traces will be shown in HTTP responses
-debug = True
-authenticate = False
-
-[ui]
-url = http://localhost:8000
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/common/normal.ini b/utils/test/testapi/opnfv_testapi/tests/unit/common/normal.ini
deleted file mode 100644 (file)
index c81c6c5..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-# to add a new parameter in the config file,
-# the CONF object in config.ini must be updated
-[mongo]
-# URL of the mongo DB
-# Mongo auth url => mongodb://user1:pwd1@host1/?authSource=db1
-url = mongodb://127.0.0.1:27017/
-dbname = test_results_collection
-
-[api]
-# Listening port
-port = 8000
-# With debug_on set to true, error traces will be shown in HTTP responses
-debug = True
-authenticate = False
-
-[ui]
-url = http://localhost:8000
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/common/nosection.ini b/utils/test/testapi/opnfv_testapi/tests/unit/common/nosection.ini
deleted file mode 100644 (file)
index a9ed49c..0000000
+++ /dev/null
@@ -1,11 +0,0 @@
-# to add a new parameter in the config file,
-# the CONF object in config.ini must be updated
-[api]
-# Listening port
-port = 8000
-# With debug_on set to true, error traces will be shown in HTTP responses
-debug = True
-authenticate = False
-
-[ui]
-url = http://localhost:8000
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/common/notboolean.ini b/utils/test/testapi/opnfv_testapi/tests/unit/common/notboolean.ini
deleted file mode 100644 (file)
index 3a11f9d..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-# to add a new parameter in the config file,
-# the CONF object in config.ini must be updated
-[mongo]
-# URL of the mongo DB
-# Mongo auth url => mongodb://user1:pwd1@host1/?authSource=db1
-url = mongodb://127.0.0.1:27017/
-dbname = test_results_collection
-
-[api]
-# Listening port
-port = 8000
-# With debug_on set to true, error traces will be shown in HTTP responses
-debug = True
-authenticate = notboolean
-
-[ui]
-url = http://localhost:8000
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/common/notint.ini b/utils/test/testapi/opnfv_testapi/tests/unit/common/notint.ini
deleted file mode 100644 (file)
index 8180719..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-# to add a new parameter in the config file,
-# the CONF object in config.ini must be updated
-[mongo]
-# URL of the mongo DB
-# Mongo auth url => mongodb://user1:pwd1@host1/?authSource=db1
-url = mongodb://127.0.0.1:27017/
-dbname = test_results_collection
-
-[api]
-# Listening port
-port = notint
-# With debug_on set to true, error traces will be shown in HTTP responses
-debug = True
-authenticate = False
-
-[ui]
-url = http://localhost:8000
index feff1da..75e621d 100644 (file)
@@ -5,4 +5,4 @@ import pytest
 
 @pytest.fixture
 def config_normal():
-    return path.join(path.dirname(__file__), 'common/normal.ini')
+    return path.join(path.dirname(__file__), '../../../etc/config.ini')
index 77a8d18..39633e5 100644 (file)
@@ -37,7 +37,8 @@ class TestBase(testing.AsyncHTTPTestCase):
 
     def _patch_server(self):
         import argparse
-        config = path.join(path.dirname(__file__), '../common/normal.ini')
+        config = path.join(path.dirname(__file__),
+                           '../../../../etc/config.ini')
         self.config_patcher = mock.patch(
             'argparse.ArgumentParser.parse_known_args',
             return_value=(argparse.Namespace(config_file=config), None))
@@ -46,9 +47,6 @@ class TestBase(testing.AsyncHTTPTestCase):
         self.config_patcher.start()
         self.db_patcher.start()
 
-    def set_config_file(self):
-        self.config_file = 'normal.ini'
-
     def get_app(self):
         from opnfv_testapi.cmd import server
         return server.make_app()