Merge "JJB 2.0.X does not allow scenario to be passed here"
authormei mei <meimei@huawei.com>
Tue, 8 May 2018 01:32:33 +0000 (01:32 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Tue, 8 May 2018 01:32:33 +0000 (01:32 +0000)
17 files changed:
.gitignore
docs/conf.py [new file with mode: 0644]
docs/conf.yaml [new file with mode: 0644]
docs/requirements.txt [new file with mode: 0644]
jjb/apex/apex-build.sh
jjb/apex/apex-deploy.sh
jjb/apex/apex-download-artifact.sh
jjb/apex/apex-iso-verify.sh
jjb/apex/apex-upload-artifact.sh
jjb/apex/scenarios.yaml.hidden
jjb/armband/armband-verify-jobs.yml
jjb/fuel/fuel-verify-jobs.yml
jjb/functest/functest-alpine.sh
jjb/releng/opnfv-docker.yml
jjb/snaps/snaps-verify-jobs.yml
jjb/yardstick/yardstick-daily.sh
tox.ini

index 4309e8e..7790d46 100644 (file)
@@ -5,6 +5,7 @@
 /releng/
 .idea
 *.py[cod]
+docs/_build/
 
 .Python
 env/
diff --git a/docs/conf.py b/docs/conf.py
new file mode 100644 (file)
index 0000000..86ab8c5
--- /dev/null
@@ -0,0 +1 @@
+from docs_conf.conf import *  # flake8: noqa
diff --git a/docs/conf.yaml b/docs/conf.yaml
new file mode 100644 (file)
index 0000000..749a4b1
--- /dev/null
@@ -0,0 +1,3 @@
+---
+project_cfg: opnfv
+project: releng
diff --git a/docs/requirements.txt b/docs/requirements.txt
new file mode 100644 (file)
index 0000000..f26b041
--- /dev/null
@@ -0,0 +1,3 @@
+lfdocs-conf
+sphinxcontrib-httpdomain
+sphinx-opnfv-theme
index cf59998..aabd20e 100755 (executable)
@@ -18,10 +18,18 @@ elif echo $BUILD_TAG | grep "csit" 1> /dev/null; then
   export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY"
 elif [ "$ARTIFACT_VERSION" == "daily" ]; then
   export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d")
-  export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY --iso"
+  if [ "$BRANCH" == 'master' ]; then
+    export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY"
+  else
+    export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY --iso"
+  fi
 else
   export OPNFV_ARTIFACT_VERSION=${ARTIFACT_VERSION}
-  export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY --iso"
+  if [ "$BRANCH" == 'master' ]; then
+    export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY"
+  else
+    export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY --iso"
+  fi
 fi
 
 # Temporary hack until we fix apex build script
@@ -46,7 +54,7 @@ echo "Cache Directory Contents:"
 echo "-------------------------"
 ls -al $CACHE_DIRECTORY
 
-if [[ "$BUILD_ARGS" =~ '--iso' ]]; then
+if [[ "$BUILD_ARGS" =~ '--iso' && "$BRANCH" != 'master' ]]; then
   mkdir -p /tmp/apex-iso/
   rm -f /tmp/apex-iso/*.iso
   cp -f $BUILD_DIRECTORY/../.build/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso /tmp/apex-iso/
@@ -54,18 +62,32 @@ fi
 
 if ! echo $ARTIFACT_VERSION | grep "dev" 1> /dev/null; then
   echo "Writing opnfv.properties file"
-  # save information regarding artifact into file
-  (
-    echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
-    echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
-    echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
-    echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
-    echo "OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/../.build/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso | cut -d' ' -f1)"
-    echo "OPNFV_SRPM_URL=$GS_URL/opnfv-apex-$RPM_VERSION.src.rpm"
-    echo "OPNFV_RPM_URL=$GS_URL/opnfv-apex-$RPM_VERSION.noarch.rpm"
-    echo "OPNFV_RPM_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/../.build/noarch/opnfv-apex-$RPM_VERSION.noarch.rpm | cut -d' ' -f1)"
-    echo "OPNFV_BUILD_URL=$BUILD_URL"
-  ) > $WORKSPACE/opnfv.properties
+  if [ "$BRANCH" != master ]; then
+    # save information regarding artifact into file
+    (
+      echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
+      echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
+      echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
+      echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
+      echo "OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/../.build/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso | cut -d' ' -f1)"
+      echo "OPNFV_SRPM_URL=$GS_URL/opnfv-apex-$RPM_VERSION.src.rpm"
+      echo "OPNFV_RPM_URL=$GS_URL/opnfv-apex-$RPM_VERSION.noarch.rpm"
+      echo "OPNFV_RPM_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/../.build/noarch/opnfv-apex-$RPM_VERSION.noarch.rpm | cut -d' ' -f1)"
+      echo "OPNFV_BUILD_URL=$BUILD_URL"
+    ) > $WORKSPACE/opnfv.properties
+  else
+    # save information regarding artifact into file
+    # we only generate the python package for master
+    (
+      echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
+      echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
+      echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
+      echo "OPNFV_SRPM_URL=$GS_URL/python34-opnfv-apex-$RPM_VERSION.src.rpm"
+      echo "OPNFV_RPM_URL=$GS_URL/python34-opnfv-apex-$RPM_VERSION.noarch.rpm"
+      echo "OPNFV_RPM_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/../.build/noarch/python34-opnfv-apex-$RPM_VERSION.noarch.rpm | cut -d' ' -f1)"
+      echo "OPNFV_BUILD_URL=$BUILD_URL"
+    ) > $WORKSPACE/opnfv.properties
+  fi
 fi
 echo "--------------------------------------------------------"
 echo "Done!"
index 958e4bf..35c2b85 100755 (executable)
@@ -18,20 +18,26 @@ else
 fi
 
 # Dev or RPM/ISO build
-# For upstream deployments we currently only use git repo and not RPM
-# Need to decide after Fraser if we want to use RPM or not for upstream
-if [[ "$ARTIFACT_VERSION" =~ dev || "$DEPLOY_SCENARIO" =~ "upstream" ]]; then
+if [[ "$ARTIFACT_VERSION" =~ dev ]]; then
   # Settings for deploying from git workspace
   DEPLOY_SETTINGS_DIR="${WORKSPACE}/config/deploy"
   NETWORK_SETTINGS_DIR="${WORKSPACE}/config/network"
-  DEPLOY_CMD="opnfv-deploy --image-dir ${WORKSPACE}/.build"
   CLEAN_CMD="opnfv-clean"
-  RESOURCES="${WORKSPACE}/.build/"
+  # if we are using master, then we are downloading/caching upstream images
+  # we want to use that built in mechanism to avoid re-downloading every job
+  # so we use a dedicated folder to hold the upstream cache
+  UPSTREAM_CACHE=$HOME/upstream_cache
+  if [ "$BRANCH" == 'master' ]; then
+    mkdir -p ${UPSTREAM_CACHE}
+    RESOURCES=$UPSTREAM_CACHE
+  else
+    RESOURCES="${WORKSPACE}/.build/"
+  fi
   CONFIG="${WORKSPACE}/build"
   BASE=$CONFIG
   IMAGES=$RESOURCES
   LIB="${WORKSPACE}/lib"
-
+  DEPLOY_CMD="opnfv-deploy --image-dir ${RESOURCES}"
   # Ensure artifacts were downloaded and extracted correctly
   # TODO(trozet) add verification here
 
@@ -43,9 +49,17 @@ if [[ "$ARTIFACT_VERSION" =~ dev || "$DEPLOY_SCENARIO" =~ "upstream" ]]; then
 else
   DEPLOY_SETTINGS_DIR="/etc/opnfv-apex/"
   NETWORK_SETTINGS_DIR="/etc/opnfv-apex/"
-  DEPLOY_CMD="opnfv-deploy"
   CLEAN_CMD="opnfv-clean"
-  RESOURCES="/var/opt/opnfv/images"
+  # set to use different directory here because upon RPM removal this
+  # directory will be wiped in daily
+  UPSTREAM_CACHE=$HOME/upstream_cache
+  if [ "$BRANCH" == 'master' ]; then
+    mkdir -p ${UPSTREAM_CACHE}
+    RESOURCES=$UPSTREAM_CACHE
+  else
+    RESOURCES="/var/opt/opnfv/images"
+  fi
+  DEPLOY_CMD="opnfv-deploy --image-dir ${RESOURCES}"
   CONFIG="/var/opt/opnfv"
   BASE=$CONFIG
   IMAGES=$RESOURCES
@@ -123,7 +137,7 @@ else
   DEPLOY_CMD="${DEPLOY_CMD} -i ${INVENTORY_FILE}"
 fi
 
-if [[ "$DEPLOY_SCENARIO" =~ "upstream" ]]; then
+if [[ "$BRANCH" == "master" ]]; then
   echo "Upstream deployment detected"
   DEPLOY_CMD="${DEPLOY_CMD} --upstream"
 fi
index 6c0f8fe..3efe1cb 100755 (executable)
@@ -17,19 +17,20 @@ else
   echo "Deploy scenario: ${DEPLOY_SCENARIO}"
 fi
 
-# if upstream we do not need to download anything
-if [[ "$DEPLOY_SCENARIO" =~ upstream ]]; then
-  echo "Upstream deployment detected, skipping download artifact"
-elif [[ "$ARTIFACT_VERSION" =~ dev ]]; then
-  # dev build
-  GERRIT_PATCHSET_NUMBER=$(echo $GERRIT_REFSPEC | grep -Eo '[0-9]+$')
-  export OPNFV_ARTIFACT_VERSION="dev${GERRIT_CHANGE_NUMBER}_${GERRIT_PATCHSET_NUMBER}"
-  # get build artifact
-  pushd ${BUILD_DIRECTORY} > /dev/null
-  echo "Downloading packaged dev build: apex-${OPNFV_ARTIFACT_VERSION}.tar.gz"
-  curl --fail -s -o $BUILD_DIRECTORY/apex-${OPNFV_ARTIFACT_VERSION}.tar.gz http://$GS_URL/apex-${OPNFV_ARTIFACT_VERSION}.tar.gz
-  tar -xvf apex-${OPNFV_ARTIFACT_VERSION}.tar.gz
-  popd > /dev/null
+if [[ "$ARTIFACT_VERSION" =~ dev ]]; then
+  if [ "$BRANCH" == 'master' ]; then
+    echo "Skipping download of artifacts for master branch"
+  else
+    # dev build
+    GERRIT_PATCHSET_NUMBER=$(echo $GERRIT_REFSPEC | grep -Eo '[0-9]+$')
+    export OPNFV_ARTIFACT_VERSION="dev${GERRIT_CHANGE_NUMBER}_${GERRIT_PATCHSET_NUMBER}"
+    # get build artifact
+    pushd ${BUILD_DIRECTORY} > /dev/null
+    echo "Downloading packaged dev build: apex-${OPNFV_ARTIFACT_VERSION}.tar.gz"
+    curl --fail -s -o $BUILD_DIRECTORY/apex-${OPNFV_ARTIFACT_VERSION}.tar.gz http://$GS_URL/apex-${OPNFV_ARTIFACT_VERSION}.tar.gz
+    tar -xvf apex-${OPNFV_ARTIFACT_VERSION}.tar.gz
+    popd > /dev/null
+  fi
 else
   echo "Will use RPMs..."
 
@@ -45,20 +46,16 @@ else
   RPM_INSTALL_PATH=$(echo "http://"$OPNFV_RPM_URL | sed 's/\/'"$(basename $OPNFV_RPM_URL)"'//')
   RPM_LIST=$(basename $OPNFV_RPM_URL)
 
-  # find version of RPM
-  VERSION_EXTENSION=$(echo $(basename $RPM_LIST) | grep -Eo '[0-9]+\.[0-9]+-([0-9]{8}|[a-z]+-[0-9]\.[0-9]+)')
-  # build RPM List which already includes base Apex RPM
-  RPM_LIST+=" opnfv-apex-undercloud-${VERSION_EXTENSION}.noarch.rpm"
-
-  # add back legacy support for danube
-  if [ "$BRANCH" == 'stable/danube' ]; then
-    RPM_LIST+=" opnfv-apex-common-${VERSION_EXTENSION}.noarch.rpm"
-  else
+  if [ "$BRANCH" != 'master' ]; then
+    # find version of RPM
+    VERSION_EXTENSION=$(echo $(basename $RPM_LIST) | grep -Eo '[0-9]+\.[0-9]+-([0-9]{8}|[a-z]+-[0-9]\.[0-9]+)')
+    # build RPM List which already includes base Apex RPM
+    RPM_LIST+=" opnfv-apex-undercloud-${VERSION_EXTENSION}.noarch.rpm"
     RPM_LIST+=" python34-opnfv-apex-${VERSION_EXTENSION}.noarch.rpm"
   fi
 
   # remove old / install new RPMs
-  if rpm -q opnfv-apex > /dev/null; then
+  if rpm -q python34-opnfv-apex > /dev/null; then
     INSTALLED_RPMS=$(rpm -qa | grep apex)
     if [ -n "$INSTALLED_RPMS" ]; then
       sudo yum remove -y ${INSTALLED_RPMS}
index 4faeb60..f349376 100755 (executable)
@@ -8,6 +8,11 @@ echo "Starting the Apex iso verify."
 echo "--------------------------------------------------------"
 echo
 
+if [ "$BRANCH" == 'master' ]; then
+  echo "Skipping Apex iso verify for master branch"
+  exit 0
+fi
+
 # Must be RPMs/ISO
 echo "Downloading latest properties file"
 
index 4037d25..8743368 100755 (executable)
@@ -109,8 +109,8 @@ fi
 if [ "$ARTIFACT_TYPE" == 'snapshot' ]; then
   uploadsnap
 elif [ "$ARTIFACT_TYPE" == 'iso' ]; then
-  if [[ "$ARTIFACT_VERSION" =~ dev ]]; then
-    echo "Skipping artifact upload for ${ARTIFACT_TYPE} due to dev build"
+  if [[ "$ARTIFACT_VERSION" =~ dev || "$BRANCH" == 'master' ]]; then
+    echo "Skipping ISO artifact upload for ${ARTIFACT_TYPE} due to dev/master build"
     exit 0
   fi
   if [[ -n "$SIGN_ARTIFACT" && "$SIGN_ARTIFACT" == "true" ]]; then
@@ -119,20 +119,28 @@ elif [ "$ARTIFACT_TYPE" == 'iso' ]; then
   uploadiso
 elif [ "$ARTIFACT_TYPE" == 'rpm' ]; then
   if [[ "$ARTIFACT_VERSION" =~ dev ]]; then
-    echo "dev build detected, will upload image tarball"
-    ARTIFACT_TYPE=tarball
-    uploadimages
+    if [ "$BRANCH" == 'master' ]; then
+      echo "will not upload artifacts, master uses upstream"
+      ARTIFACT_TYPE=none
+    else
+      echo "dev build detected, will upload image tarball"
+      ARTIFACT_TYPE=tarball
+      uploadimages
+    fi
   else
     RPM_INSTALL_PATH=$BUILD_DIRECTORY/noarch
+    # RPM URL should be python package for master, and is only package we need
     RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL)
-    VERSION_EXTENSION=$(echo $(basename $OPNFV_RPM_URL) | sed 's/opnfv-apex-//')
-    RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-undercloud-${VERSION_EXTENSION}"
-    RPM_LIST+=" ${RPM_INSTALL_PATH}/python34-opnfv-apex-${VERSION_EXTENSION}"
     SRPM_INSTALL_PATH=$BUILD_DIRECTORY
     SRPM_LIST=$SRPM_INSTALL_PATH/$(basename $OPNFV_SRPM_URL)
-    VERSION_EXTENSION=$(echo $(basename $OPNFV_SRPM_URL) | sed 's/opnfv-apex-//')
-    SRPM_LIST+=" ${SRPM_INSTALL_PATH}/opnfv-apex-undercloud-${VERSION_EXTENSION}"
-    SRPM_LIST+=" ${SRPM_INSTALL_PATH}/python34-opnfv-apex-${VERSION_EXTENSION}"
+    if [ "$BRANCH" != 'master' ]; then
+      VERSION_EXTENSION=$(echo $(basename $OPNFV_RPM_URL) | sed 's/opnfv-apex-//')
+      RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-undercloud-${VERSION_EXTENSION}"
+      RPM_LIST+=" ${RPM_INSTALL_PATH}/python34-opnfv-apex-${VERSION_EXTENSION}"
+      VERSION_EXTENSION=$(echo $(basename $OPNFV_SRPM_URL) | sed 's/opnfv-apex-//')
+      SRPM_LIST+=" ${SRPM_INSTALL_PATH}/opnfv-apex-undercloud-${VERSION_EXTENSION}"
+      SRPM_LIST+=" ${SRPM_INSTALL_PATH}/python34-opnfv-apex-${VERSION_EXTENSION}"
+    fi
 
     if [[ -n "$SIGN_ARTIFACT" && "$SIGN_ARTIFACT" == "true" ]]; then
       signrpm
index 91cd789..2650eaf 100644 (file)
@@ -1,6 +1,6 @@
 master:
-  - 'os-odl-master_upstream-noha'
-  - 'os-odl-queens_upstream-noha'
+  - 'os-odl-nofeature-noha'
+  - 'os-odl-queens-noha'
 fraser:
   - 'os-nosdn-nofeature-noha'
   - 'os-nosdn-nofeature-ha'
index c800872..08cf3c0 100644 (file)
     # patch verification phases
     #####################################
     phase:
-      - 'basic':
-          slave-label: 'armband-virtual'
       - 'deploy-virtual':
           slave-label: 'armband-virtual'
-      - 'smoke-test':
-          slave-label: 'armband-virtual'
     #####################################
     # jobs
     #####################################
           installer: '{installer}'
       - '{installer}-defaults':
           gs-pathname: '{gs-pathname}'
+      - string:
+          name: DEPLOY_SCENARIO
+          default: 'os-nosdn-nofeature-ha'
 
     builders:
       - description-setter:
           description: "Built on $NODE_NAME"
       - multijob:
-          name: basic
+          name: deploy-virtual
           condition: SUCCESSFUL
           projects:
-            - name: 'armband-verify-basic-{stream}'
+            - name: 'armband-verify-deploy-virtual-{stream}'
               current-parameters: false
               predefined-parameters: |
                 BRANCH=$BRANCH
               node-parameters: false
               kill-phase-on: FAILURE
               abort-all-job: true
+
       - multijob:
-          name: deploy-virtual
+          name: smoke-test
           condition: SUCCESSFUL
           projects:
-            - name: 'armband-verify-deploy-virtual-{stream}'
+            # Use Functest job definition from jjb/functest/functest-daily-jobs
+            - name: 'functest-fuel-armband-virtual-suite-{stream}'
               current-parameters: false
               predefined-parameters: |
+                FUNCTEST_MODE=tier
+                FUNCTEST_TIER=healthcheck
+                # Should be in sync with fuel-deploy.sh default scenario
+                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
                 BRANCH=$BRANCH
                 GERRIT_REFSPEC=$GERRIT_REFSPEC
                 GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                 GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: false
-              kill-phase-on: FAILURE
+              node-parameters: true
+              kill-phase-on: NEVER
               abort-all-job: true
-      - multijob:
-          name: smoke-test
-          condition: SUCCESSFUL
-          projects:
-            - name: 'armband-verify-smoke-test-{stream}'
+            - name: 'functest-fuel-armband-virtual-suite-{stream}'
               current-parameters: false
               predefined-parameters: |
+                FUNCTEST_MODE=testcase
+                FUNCTEST_SUITE_NAME=vping_ssh
+                # Should be in sync with fuel-deploy.sh default scenario
+                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
                 BRANCH=$BRANCH
                 GERRIT_REFSPEC=$GERRIT_REFSPEC
                 GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                 GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: false
-              kill-phase-on: FAILURE
+              node-parameters: true
+              kill-phase-on: NEVER
               abort-all-job: true
 
 - job-template:
           use-build-blocker: true
           blocking-jobs:
             - 'armband-verify-deploy-.*'
-            - 'armband-verify-test-.*'
           block-level: 'NODE'
 
     scm:
 #####################################
 # builder macros
 #####################################
-- builder:
-    name: 'armband-verify-basic-macro'
-    builders:
-      - shell: |
-          #!/bin/bash
-
-          echo "Not activated!"
-
 - builder:
     name: 'armband-verify-deploy-virtual-macro'
     builders:
       - shell:
           !include-raw: ../fuel/fuel-deploy.sh
-
-- builder:
-    name: 'armband-verify-smoke-test-macro'
-    builders:
-      - shell: |
-          #!/bin/bash
-
-          echo "Not activated!"
index d81b0ce..bef6a50 100644 (file)
     # patch verification phases
     #####################################
     phase:
-      - 'basic':
-          slave-label: 'fuel-virtual'
       - 'deploy-virtual':
           slave-label: 'fuel-virtual'
-      - 'smoke-test':
-          slave-label: 'fuel-virtual'
     #####################################
     # jobs
     #####################################
           installer: '{installer}'
       - '{installer}-defaults':
           gs-pathname: '{gs-pathname}'
+      - string:
+          name: DEPLOY_SCENARIO
+          default: 'os-nosdn-nofeature-ha'
 
     builders:
       - description-setter:
           description: "Built on $NODE_NAME"
       - multijob:
-          name: basic
+          name: deploy-virtual
           condition: SUCCESSFUL
           projects:
-            - name: 'fuel-verify-basic-{stream}'
+            - name: 'fuel-verify-deploy-virtual-{stream}'
               current-parameters: false
               predefined-parameters: |
                 BRANCH=$BRANCH
               node-parameters: false
               kill-phase-on: FAILURE
               abort-all-job: true
+
       - multijob:
-          name: deploy-virtual
+          name: smoke-test
           condition: SUCCESSFUL
           projects:
-            - name: 'fuel-verify-deploy-virtual-{stream}'
+            # Use Functest job definition from jjb/functest/functest-daily-jobs
+            - name: 'functest-fuel-virtual-suite-{stream}'
               current-parameters: false
               predefined-parameters: |
+                FUNCTEST_MODE=tier
+                FUNCTEST_TIER=healthcheck
+                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
                 BRANCH=$BRANCH
                 GERRIT_REFSPEC=$GERRIT_REFSPEC
                 GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                 GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: false
-              kill-phase-on: FAILURE
+              node-parameters: true
+              kill-phase-on: NEVER
               abort-all-job: true
-      - multijob:
-          name: smoke-test
-          condition: SUCCESSFUL
-          projects:
-            - name: 'fuel-verify-smoke-test-{stream}'
+            - name: 'functest-fuel-virtual-suite-{stream}'
               current-parameters: false
               predefined-parameters: |
+                FUNCTEST_MODE=testcase
+                FUNCTEST_SUITE_NAME=vping_ssh
+                DEPLOY_SCENARIO=$DEPLOY_SCENARIO
                 BRANCH=$BRANCH
                 GERRIT_REFSPEC=$GERRIT_REFSPEC
                 GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
                 GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
-              node-parameters: false
-              kill-phase-on: FAILURE
+              node-parameters: true
+              kill-phase-on: NEVER
               abort-all-job: true
 
 - job-template:
           use-build-blocker: true
           blocking-jobs:
             - 'fuel-verify-deploy-.*'
-            - 'fuel-verify-test-.*'
           block-level: 'NODE'
 
     scm:
 #####################################
 # builder macros
 #####################################
-- builder:
-    name: 'fuel-verify-basic-macro'
-    builders:
-      - shell: |
-          #!/bin/bash
-
-          echo "Not activated!"
-
 - builder:
     name: 'fuel-verify-deploy-virtual-macro'
     builders:
       - shell:
           !include-raw: ./fuel-deploy.sh
-
-- builder:
-    name: 'fuel-verify-smoke-test-macro'
-    builders:
-      - shell: |
-          #!/bin/bash
-
-          echo "Not activated!"
index 440b00a..1df9a55 100755 (executable)
@@ -172,10 +172,10 @@ elif [ ${FUNCTEST_MODE} == 'tier' ]; then
     tiers=(${FUNCTEST_TIER})
     run_tiers ${tiers}
 else
-    if [ ${DEPLOY_TYPE} == 'baremetal' ]; then
+    if [ ${DEPLOY_TYPE} == 'baremetal' ] && [ "${HOST_ARCH}" != "aarch64" ]; then
         tiers=(healthcheck smoke features vnf parser)
     else
-        tiers=(healthcheck smoke features)
+        tiers=(healthcheck smoke features parser)
     fi
     run_tiers ${tiers}
 fi
index 0fe76f0..b7d1ce6 100644 (file)
           <<: *danube
           <<: *other-receivers
       # projects with jobs for fraser
+      - 'bottlenecks':
+          project: 'bottlenecks'
+          <<: *fraser
+          <<: *other-receivers
       - 'clover':
           project: 'clover'
           dockerdir: '.'
index 44a1fbf..10514c3 100644 (file)
@@ -19,7 +19,7 @@
       - fraser: &fraser
           branch: 'stable/{stream}'
           gs-pathname: '/{stream}'
-          disabled: false
+          disabled: true
 
 - job-template:
     name: 'snaps-verify-{stream}'
index 2fd6848..783c64e 100755 (executable)
@@ -63,6 +63,7 @@ if [ "$(uname -m)" = 'aarch64' ]; then
 fi
 echo "Yardstick: Pulling image ${DOCKER_REPO}:${DOCKER_TAG}"
 docker pull ${DOCKER_REPO}:$DOCKER_TAG >$redirect
+docker images
 
 # map log directory
 branch=${BRANCH##*/}
diff --git a/tox.ini b/tox.ini
index 6618a81..abdffc5 100644 (file)
--- a/tox.ini
+++ b/tox.ini
@@ -4,7 +4,7 @@
 # and then run "tox" from this directory.
 
 [tox]
-envlist = py27
+envlist = jjb,docs,docs-linkcheck
 skipsdist = True
 
 [testenv]
@@ -18,3 +18,14 @@ deps =
   jenkins-job-builder==1.6.1
 commands=
   jenkins-jobs test -o job_output -r jjb/global:{posargs:"jjb/"}
+
+[testenv:docs]
+deps = -r{toxinidir}/docs/requirements.txt
+commands =
+    sphinx-build -b html -n -d {envtmpdir}/doctrees ./docs {toxinidir}/docs/_build/html
+    echo "Generated docs available in {toxinidir}/docs/_build/html"
+whitelist_externals = echo
+
+[testenv:docs-linkcheck]
+deps = -r{toxinidir}/docs/requirements.txt
+commands = sphinx-build -b linkcheck -d {envtmpdir}/doctrees ./docs {toxinidir}/docs/_build/linkcheck