Merge "bug fix: installation of node, bower and grunt for reporting docker"
authorMorgan Richomme <morgan.richomme@orange.com>
Fri, 25 Aug 2017 08:06:18 +0000 (08:06 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Fri, 25 Aug 2017 08:06:18 +0000 (08:06 +0000)
29 files changed:
jjb/apex/apex-deploy.sh
jjb/apex/apex-download-artifact.sh
jjb/apex/apex-upload-artifact.sh
jjb/apex/apex.yml
jjb/apex/apex.yml.j2
jjb/armband/armband-ci-jobs.yml
jjb/armband/armband-deploy.sh
jjb/bottlenecks/bottlenecks-run-suite.sh
jjb/calipso/calipso.yml
jjb/compass4nfv/compass-ci-jobs.yml
jjb/compass4nfv/compass-deploy.sh
jjb/daisy4nfv/daisy-daily-jobs.yml
jjb/dovetail/dovetail-run.sh
jjb/fuel/fuel-daily-jobs.yml
jjb/functest/functest-alpine.sh
jjb/functest/set-functest-env.sh
jjb/global/slave-params.yml
jjb/releng/opnfv-docker.yml
jjb/snaps/snaps-verify-jobs.yml [moved from jjb/snaps/snaps.yml with 78% similarity]
jjb/storperf/storperf-daily-jobs.yml [new file with mode: 0644]
jjb/storperf/storperf-verify-jobs.yml [new file with mode: 0644]
jjb/storperf/storperf.yml
utils/create_pod_file.py
utils/fetch_os_creds.sh
utils/push-test-logs.sh
utils/test/testapi/opnfv_testapi/resources/scenario_handlers.py
utils/test/testapi/opnfv_testapi/resources/scenario_models.py
utils/test/testapi/opnfv_testapi/tests/unit/resources/scenario-c2.json
utils/test/testapi/opnfv_testapi/tests/unit/resources/test_scenario.py

index 3a2ca60..ed02714 100755 (executable)
@@ -3,7 +3,6 @@ set -o errexit
 set -o nounset
 set -o pipefail
 
-APEX_PKGS="common undercloud onos"
 IPV6_FLAG=False
 
 # log info to console
index 52c3c67..860cd60 100755 (executable)
@@ -3,8 +3,6 @@ set -o errexit
 set -o nounset
 set -o pipefail
 
-APEX_PKGS="common undercloud onos"
-
 # log info to console
 echo "Downloading the Apex artifact. This could take some time..."
 echo "--------------------------------------------------------"
@@ -40,9 +38,8 @@ else
   # find version of RPM
   VERSION_EXTENSION=$(echo $(basename $RPM_LIST) | grep -Eo '[0-9]+\.[0-9]+-([0-9]{8}|[a-z]+-[0-9]\.[0-9]+)')
   # build RPM List which already includes base Apex RPM
-  for pkg in ${APEX_PKGS}; do
-    RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}.noarch.rpm"
-  done
+  RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-undercloud-${VERSION_EXTENSION}.noarch.rpm"
+  RPM_LIST+=" ${RPM_INSTALL_PATH}/python34-opnfv-apex-${VERSION_EXTENSION}.noarch.rpm"
 
   # remove old / install new RPMs
   if rpm -q opnfv-apex > /dev/null; then
index f53451d..4037d25 100755 (executable)
@@ -126,15 +126,13 @@ elif [ "$ARTIFACT_TYPE" == 'rpm' ]; then
     RPM_INSTALL_PATH=$BUILD_DIRECTORY/noarch
     RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL)
     VERSION_EXTENSION=$(echo $(basename $OPNFV_RPM_URL) | sed 's/opnfv-apex-//')
-    for pkg in common undercloud onos; do
-      RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}"
-    done
+    RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-undercloud-${VERSION_EXTENSION}"
+    RPM_LIST+=" ${RPM_INSTALL_PATH}/python34-opnfv-apex-${VERSION_EXTENSION}"
     SRPM_INSTALL_PATH=$BUILD_DIRECTORY
     SRPM_LIST=$SRPM_INSTALL_PATH/$(basename $OPNFV_SRPM_URL)
     VERSION_EXTENSION=$(echo $(basename $OPNFV_SRPM_URL) | sed 's/opnfv-apex-//')
-    for pkg in common undercloud onos; do
-      SRPM_LIST+=" ${SRPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}"
-    done
+    SRPM_LIST+=" ${SRPM_INSTALL_PATH}/opnfv-apex-undercloud-${VERSION_EXTENSION}"
+    SRPM_LIST+=" ${SRPM_INSTALL_PATH}/python34-opnfv-apex-${VERSION_EXTENSION}"
 
     if [[ -n "$SIGN_ARTIFACT" && "$SIGN_ARTIFACT" == "true" ]]; then
       signrpm
index 07181fc..f0e0535 100644 (file)
     concurrent: true
 
     disabled: false
-
+    quiet-period: 30
     scm:
         - git-scm-gerrit
 
                 - 'functest.*'
                 - 'yardstick.*'
                 - 'dovetail.*'
+                - 'storperf.*'
         - throttle:
             max-per-node: 1
             max-total: 10
                   enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|nosdn-kvm|odl_l3-fdio)-ha/"
                   abort-all-job: false
                   git-revision: false
+        - multijob:
+            name: StorPerf
+            condition: ALWAYS
+            projects:
+                - name: 'storperf-apex-baremetal-daily-{scenario_stream}'
+                  node-parameters: true
+                  current-parameters: false
+                  predefined-parameters:
+                    DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+                  kill-phase-on: NEVER
+                  abort-all-job: false
+                  git-revision: false
 # Build status is always success due conditional plugin prefetching
 # build status before multijob phases execute
 #        - conditional-step:
index 2d760f8..5a44dbc 100644 (file)
     concurrent: true
 
     disabled: false
-
+    quiet-period: 30
     scm:
         - git-scm-gerrit
 
                 - 'functest.*'
                 - 'yardstick.*'
                 - 'dovetail.*'
+                - 'storperf.*'
         - throttle:
             max-per-node: 1
             max-total: 10
                   enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|nosdn-kvm|odl_l3-fdio)-ha/"
                   abort-all-job: false
                   git-revision: false
+        - multijob:
+            name: StorPerf
+            condition: ALWAYS
+            projects:
+                - name: 'storperf-apex-baremetal-daily-{scenario_stream}'
+                  node-parameters: true
+                  current-parameters: false
+                  predefined-parameters:
+                    DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+                  kill-phase-on: NEVER
+                  abort-all-job: false
+                  git-revision: false
 # Build status is always success due conditional plugin prefetching
 # build status before multijob phases execute
 #        - conditional-step:
index a5d75bd..cdc14e4 100644 (file)
 - trigger:
     name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-master-trigger'
     triggers:
-        - timed: ''
+        - timed: '0 1 * * *'
 - trigger:
     name: 'fuel-os-odl_l3-nofeature-ha-armband-baremetal-master-trigger'
     triggers:
-        - timed: ''
+        - timed: '0 16 * * *'
 - trigger:
     name: 'fuel-os-odl_l2-bgpvpn-ha-armband-baremetal-master-trigger'
     triggers:
index fc59120..05679aa 100755 (executable)
@@ -35,13 +35,6 @@ fi
 # set deployment parameters
 export TMPDIR=${WORKSPACE}/tmpdir
 
-# arm-pod4 is an aarch64 jenkins slave for the same POD as the
-# x86 jenkins slave arm-pod3; therefore we use the same pod name
-# to deploy the pod from both jenkins slaves
-if [[ "${NODE_NAME}" == "arm-pod4" ]]; then
-    NODE_NAME="arm-pod3"
-fi
-
 LAB_NAME=${NODE_NAME/-*}
 POD_NAME=${NODE_NAME/*-}
 
index 6bab0e4..a757043 100644 (file)
@@ -16,12 +16,19 @@ RELENG_REPO=${WORKSPACE}/releng
 [ -d ${RELENG_REPO} ] && rm -rf ${RELENG_REPO}
 git clone https://gerrit.opnfv.org/gerrit/releng ${RELENG_REPO} >${redirect}
 
+YARDSTICK_REPO=${WORKSPACE}/yardstick
+[ -d ${YARDSTICK_REPO} ] && rm -rf ${YARDSTICK_REPO}
+git clone https://gerrit.opnfv.org/gerrit/yardstick ${YARDSTICK_REPO} >${redirect}
+
 OPENRC=/tmp/admin_rc.sh
 OS_CACERT=/tmp/os_cacert
 
+BOTTLENECKS_CONFIG=/tmp
+
 if [[ $SUITE_NAME == *posca* ]]; then
     POSCA_SCRIPT=/home/opnfv/bottlenecks/testsuites/posca
 
+    # Preparing OpenStack RC and Cacert files
     echo "BOTTLENECKS INFO: fetching os credentials from $INSTALLER_TYPE"
     if [[ $INSTALLER_TYPE == 'compass' ]]; then
         if [[ ${BRANCH} == 'master' ]]; then
@@ -49,6 +56,67 @@ if [[ $SUITE_NAME == *posca* ]]; then
         exit 1
     fi
 
+    # Finding and crearting POD description files from different deployments
+    ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
+
+    if [ "$INSTALLER_TYPE" == "fuel" ]; then
+        echo "Fetching id_rsa file from jump_server $INSTALLER_IP..."
+        sshpass -p r00tme sudo scp $ssh_options root@${INSTALLER_IP}:~/.ssh/id_rsa ${BOTTLENECKS_CONFIG}/id_rsa
+    fi
+
+    if [ "$INSTALLER_TYPE" == "apex" ]; then
+        echo "Fetching id_rsa file from jump_server $INSTALLER_IP..."
+        sudo scp $ssh_options stack@${INSTALLER_IP}:~/.ssh/id_rsa ${BOTTLENECKS_CONFIG}/id_rsa
+    fi
+
+    set +e
+
+    sudo pip install virtualenv
+
+    cd ${RELENG_REPO}/modules
+    sudo virtualenv venv
+    source venv/bin/activate
+    sudo pip install -e ./ >/dev/null
+    sudo pip install netaddr
+
+    if [[ ${INSTALLER_TYPE} == compass ]]; then
+        options="-u root -p root"
+    elif [[ ${INSTALLER_TYPE} == fuel ]]; then
+        options="-u root -p r00tme"
+    elif [[ ${INSTALLER_TYPE} == apex ]]; then
+        options="-u stack -k /root/.ssh/id_rsa"
+    else
+        echo "Don't support to generate pod.yaml on ${INSTALLER_TYPE} currently."
+    fi
+
+    if [[ ${INSTALLER_TYPE} != compass ]]; then
+        cmd="sudo python ${RELENG_REPO}/utils/create_pod_file.py -t ${INSTALLER_TYPE} \
+         -i ${INSTALLER_IP} ${options} -f ${BOTTLENECKS_CONFIG}/pod.yaml \
+         -s ${BOTTLENECKS_CONFIG}/id_rsa"
+        echo ${cmd}
+        ${cmd}
+    else
+        cmd="sudo cp ${YARDSTICK_REPO}/etc/yardstick/nodes/compass_sclab_virtual/pod.yaml \
+        ${BOTTLENECKS_CONFIG}"
+        echo ${cmd}
+        ${cmd}
+    fi
+
+    deactivate
+
+    set -e
+
+    cd ${WORKSPACE}
+
+    if [ -f ${BOTTLENECKS_CONFIG}/pod.yaml ]; then
+        echo "FILE: ${BOTTLENECKS_CONFIG}/pod.yaml:"
+        cat ${BOTTLENECKS_CONFIG}/pod.yaml
+    else
+        echo "ERROR: cannot find file ${BOTTLENECKS_CONFIG}/pod.yaml. Please check if it is existing."
+        sudo ls -al ${BOTTLENECKS_CONFIG}
+    fi
+
+    # Pulling Bottlenecks docker and passing environment variables
     echo "INFO: pulling Bottlenecks docker ${DOCKER_TAG}"
     docker pull opnfv/bottlenecks:${DOCKER_TAG} >$redirect
 
@@ -65,6 +133,7 @@ if [[ $SUITE_NAME == *posca* ]]; then
     ${cmd} >$redirect
     sleep 5
 
+    # Running test cases through Bottlenecks docker
     if [[ $SUITE_NAME == posca_stress_traffic ]]; then
         TEST_CASE=posca_factor_system_bandwidth
         testcase_cmd="docker exec bottlenecks-load-master python ${POSCA_SCRIPT}/../run_testsuite.py testcase $TEST_CASE $REPORT"
index b8d10eb..c5ba8eb 100644 (file)
@@ -20,6 +20,8 @@
         - project-parameter:
             project: '{project}'
             branch: '{branch}'
+        - 'opnfv-build-defaults'
+
 
     scm:
         - git-scm-gerrit
@@ -56,4 +58,4 @@
             set -o nounset
             set -o pipefail
             cd $WORKSPACE
-            PYTHONPATH=$PWD/app python3 -m unittest discover -s app/test/fetch
+            PYTHONPATH=$PWD/app app/test/verify.sh
index 4c12f19..8b4a74b 100644 (file)
         - 'k8-nosdn-nofeature-ha':
             disabled: false
             auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+        - 'os-nosdn-nofeature-noha':
+            disabled: false
+            auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+        - 'os-odl_l3-nofeature-noha':
+            disabled: false
+            auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+        - 'os-odl_l2-moon-noha':
+            disabled: false
+            auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+        - 'os-nosdn-kvm-noha':
+            disabled: false
+            auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+        - 'os-odl-sfc-noha':
+            disabled: false
+            auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+        - 'os-nosdn-dpdk-noha':
+            disabled: false
+            auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
 
 
     jobs:
 ########################
 # trigger macros
 ########################
+
+#---------------------------
+# ha-baremetal-centos-master
+#---------------------------
 - trigger:
     name: 'compass-os-nosdn-nofeature-ha-baremetal-centos-master-trigger'
     triggers:
     triggers:
         - timed: ''
 
+#-----------------------------
+# noha-baremetal-centos-master
+#-----------------------------
+- trigger:
+    name: 'compass-os-nosdn-nofeature-noha-baremetal-centos-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-os-odl_l3-nofeature-noha-baremetal-centos-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-os-odl_l2-moon-noha-baremetal-centos-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-os-nosdn-kvm-noha-baremetal-centos-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-os-odl-sfc-noha-baremetal-centos-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-os-nosdn-dpdk-noha-baremetal-centos-master-trigger'
+    triggers:
+        - timed: ''
+
+#--------------------
+# ha-baremetal-master
+#--------------------
 - trigger:
     name: 'compass-os-nosdn-nofeature-ha-baremetal-master-trigger'
     triggers:
-        - timed: '0 2 * * *'
+        - timed: '0 20 * * *'
 - trigger:
     name: 'compass-os-nosdn-openo-ha-baremetal-master-trigger'
     triggers:
 - trigger:
     name: 'compass-os-odl-sfc-ha-baremetal-master-trigger'
     triggers:
-        - timed: '0 4 * * *'
+        - timed: '0 10 * * *'
 
+#----------------------
+# noha-baremetal-master
+#----------------------
+- trigger:
+    name: 'compass-os-nosdn-kvm-noha-baremetal-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-os-nosdn-nofeature-noha-baremetal-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-os-odl_l3-nofeature-noha-baremetal-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-os-odl_l2-moon-noha-baremetal-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-os-odl-sfc-noha-baremetal-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-os-nosdn-dpdk-noha-baremetal-master-trigger'
+    triggers:
+        - timed: ''
 
+#--------------------
+# ha-baremetal-danube
+#--------------------
 - trigger:
     name: 'compass-os-nosdn-nofeature-ha-baremetal-danube-trigger'
     triggers:
     triggers:
         - timed: ''
 
+#----------------------
+# noha-baremetal-danube
+#----------------------
+- trigger:
+    name: 'compass-os-nosdn-kvm-noha-baremetal-danube-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-os-nosdn-nofeature-noha-baremetal-danube-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-os-odl_l3-nofeature-noha-baremetal-danube-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-os-odl_l2-moon-noha-baremetal-danube-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-os-odl-sfc-noha-baremetal-danube-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-os-nosdn-dpdk-noha-baremetal-danube-trigger'
+    triggers:
+        - timed: ''
 
+#------------------
+# ha-virtual-master
+#------------------
 - trigger:
     name: 'compass-os-nosdn-nofeature-ha-virtual-master-trigger'
     triggers:
 - trigger:
     name: 'compass-os-odl_l2-moon-ha-virtual-master-trigger'
     triggers:
-        - timed: '0 22 * * *'
+        - timed: '0 12 * * *'
 - trigger:
     name: 'compass-os-nosdn-kvm-ha-virtual-master-trigger'
     triggers:
-        - timed: '0 23 * * *'
+        - timed: '0 13 * * *'
 - trigger:
     name: 'compass-os-nosdn-dpdk-ha-virtual-master-trigger'
     triggers:
     triggers:
         - timed: '0 16 * * *'
 
+#--------------------
+# noha-virtual-master
+#--------------------
+- trigger:
+    name: 'compass-os-nosdn-kvm-noha-virtual-master-trigger'
+    triggers:
+        - timed: '0 13 * * *'
+- trigger:
+    name: 'compass-os-nosdn-nofeature-noha-virtual-master-trigger'
+    triggers:
+        - timed: '0 14 * * *'
+- trigger:
+    name: 'compass-os-odl_l3-nofeature-noha-virtual-master-trigger'
+    triggers:
+        - timed: '0 15 * * *'
+- trigger:
+    name: 'compass-os-odl_l2-moon-noha-virtual-master-trigger'
+    triggers:
+        - timed: '0 18 * * *'
+- trigger:
+    name: 'compass-os-odl-sfc-noha-virtual-master-trigger'
+    triggers:
+        - timed: '0 20 * * *'
+- trigger:
+    name: 'compass-os-nosdn-dpdk-noha-virtual-master-trigger'
+    triggers:
+        - timed: '0 11 * * *'
+
+#------------------
+# ha-virtual-danube
+#------------------
 - trigger:
     name: 'compass-os-nosdn-nofeature-ha-virtual-danube-trigger'
     triggers:
     name: 'compass-k8-nosdn-nofeature-ha-virtual-danube-trigger'
     triggers:
         - timed: ''
+
+#--------------------
+# noha-virtual-danube
+#--------------------
+- trigger:
+    name: 'compass-os-nosdn-kvm-noha-virtual-danube-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-os-nosdn-nofeature-noha-virtual-danube-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-os-odl_l3-nofeature-noha-virtual-danube-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-os-odl_l2-moon-noha-virtual-danube-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-os-odl-sfc-noha-virtual-danube-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'compass-os-nosdn-dpdk-noha-virtual-danube-trigger'
+    triggers:
+        - timed: ''
index 7a5af5f..9d4ae51 100644 (file)
@@ -52,6 +52,11 @@ fi
 if [[ "$NODE_NAME" =~ "-virtual" ]]; then
     export NETWORK_CONF=$CONFDIR/vm_environment/$NODE_NAME/${NETWORK_CONF_FILE}
     export DHA_CONF=$CONFDIR/vm_environment/${DEPLOY_SCENARIO}.yml
+    if [[ "${DEPLOY_SCENARIO}" =~ "-moon-noha" ]]; then
+        export VIRT_NUMBER=3
+    elif [[ "${DEPLOY_SCENARIO}" =~ "-noha" ]]; then
+        export VIRT_NUMBER=2
+    fi
 else
     export INSTALL_NIC=eth1
     export NETWORK_CONF=$CONFDIR/hardware_environment/$NODE_NAME/${NETWORK_CONF_FILE}
index d7c1acb..9a680e7 100644 (file)
 - trigger:
     name: 'daisy-os-nosdn-nofeature-ha-baremetal-daily-master-trigger'
     triggers:
-        - timed: '0 12 * * *'
+        - timed: '0 16 * * *'
 # Basic NOHA Scenarios
 - trigger:
     name: 'daisy-os-nosdn-nofeature-noha-baremetal-daily-master-trigger'
 - trigger:
     name: 'daisy-os-odl-nofeature-ha-baremetal-daily-master-trigger'
     triggers:
-        - timed: '0 16 * * *'
+        - timed: '0 12 * * *'
 #-----------------------------------------------
 # Triggers for job running on daisy-virtual against master branch
 #-----------------------------------------------
 - trigger:
     name: 'daisy-os-nosdn-nofeature-ha-virtual-daily-master-trigger'
     triggers:
-        - timed: '0 12 * * *'
+        - timed: '0 16 * * *'
 # Basic NOHA Scenarios
 - trigger:
     name: 'daisy-os-nosdn-nofeature-noha-virtual-daily-master-trigger'
 - trigger:
     name: 'daisy-os-odl-nofeature-ha-virtual-daily-master-trigger'
     triggers:
-        - timed: '0 16 * * *'
+        - timed: '0 12 * * *'
index d05b309..346a1ef 100755 (executable)
@@ -173,7 +173,7 @@ cat << EOF >$tempest_conf_file
 compute:
     min_compute_nodes: 2
     volume_device_name: ${volume_device}
-    min_microversion: 2.0
+    min_microversion: 2.2
     max_microversion: latest
 
 compute-feature-enabled:
index 4b2f8e9..cbdd3dd 100644 (file)
 - trigger:
     name: 'fuel-os-nosdn-nofeature-ha-baremetal-daily-master-trigger'
     triggers:
-        - timed: '' # '5 20 * * *'
+        - timed: '5 20 * * *'
 - trigger:
     name: 'fuel-os-odl_l2-nofeature-ha-baremetal-daily-master-trigger'
     triggers:
 - trigger:
     name: 'fuel-os-odl_l3-nofeature-ha-baremetal-daily-master-trigger'
     triggers:
-        - timed: '' # '5 2 * * *'
+        - timed: '5 2 * * *'
 - trigger:
     name: 'fuel-os-nosdn-ovs-ha-baremetal-daily-master-trigger'
     triggers:
-        - timed: '' # '5 5 * * *'
+        - timed: '5 5 * * *'
 - trigger:
     name: 'fuel-os-onos-sfc-ha-baremetal-daily-master-trigger'
     triggers:
index 9084cca..da09886 100644 (file)
@@ -67,9 +67,9 @@ fi
 volumes="${images_vol} ${results_vol} ${sshkey_vol} ${rc_file_vol} ${cacert_file_vol}"
 
 
-tiers=(healthcheck smoke)
+tiers=(healthcheck smoke features vnf)
 for tier in ${tiers[@]}; do
-    FUNCTEST_IMAGE=opnfv/functest-${tier}
+    FUNCTEST_IMAGE=ollivier/functest-${tier}
     echo "Functest: Pulling Functest Docker image ${FUNCTEST_IMAGE} ..."
     docker pull ${FUNCTEST_IMAGE}>/dev/null
     cmd="docker run ${envs} ${volumes} ${FUNCTEST_IMAGE}"
index f6071e3..7d9e737 100755 (executable)
@@ -33,8 +33,9 @@ if [ "$BRANCH" != 'stable/danube' ]; then
   echo "Functest: Download images that will be used by test cases"
   images_dir="${HOME}/opnfv/functest/images"
   chmod +x ${WORKSPACE}/functest/ci/download_images.sh
-  ${WORKSPACE}/functest/ci/download_images.sh ${images_dir} ${DEPLOY_SCENARIO} ${HOST_ARCH} 2> ${redirect}
+  ${WORKSPACE}/functest/ci/download_images.sh ${images_dir}
   images_vol="-v ${images_dir}:/home/opnfv/functest/images"
+  echo "Functest: Images successfully downloaded"
 fi
 
 dir_result="${HOME}/opnfv/functest/results/${BRANCH##*/}"
@@ -43,11 +44,18 @@ sudo rm -rf ${dir_result}/*
 results_vol="-v ${dir_result}:/home/opnfv/functest/results"
 custom_params=
 test -f ${HOME}/opnfv/functest/custom/params_${DOCKER_TAG} && custom_params=$(cat ${HOME}/opnfv/functest/custom/params_${DOCKER_TAG})
+echo "Functest: custom parameters successfully retrieved: ${custom_params}"
 
 envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
     -e NODE_NAME=${NODE_NAME} -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} \
     -e BUILD_TAG=${BUILD_TAG} -e CI_DEBUG=${CI_DEBUG} -e DEPLOY_TYPE=${DEPLOY_TYPE}"
 
+if [[ ${INSTALLER_TYPE} == 'fuel' && ! -z ${SALT_MASTER_IP} ]]; then
+  HOST_ARCH=$(ssh -l ubuntu ${SALT_MASTER_IP} -i ${SSH_KEY} -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no \
+  "sudo salt 'cmp*' grains.get cpuarch --out yaml |awk '{print \$2; exit}'")
+  envs="${envs} -e POD_ARCH=${HOST_ARCH}"
+fi
+
 if [[ ${INSTALLER_TYPE} == 'compass' && ${DEPLOY_SCENARIO} == *'os-nosdn-openo-ha'* ]]; then
     ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
     openo_msb_port=${openo_msb_port:-80}
@@ -64,6 +72,8 @@ else
   volumes="${results_vol} ${sshkey_vol} ${stackrc_vol} ${rc_file_vol}"
 fi
 
+echo "Functest: volumes defined"
+
 FUNCTEST_IMAGE="opnfv/functest"
 if [ "$HOST_ARCH" = "aarch64" ]; then
     FUNCTEST_IMAGE="${FUNCTEST_IMAGE}_${HOST_ARCH}"
index 0e23380..f5de021 100644 (file)
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
+- parameter:
+    name: 'intel-virtual10-defaults'
+    parameters:
+        - node:
+            name: SLAVE_NAME
+            description: 'Slave name on Jenkins'
+            allowed-slaves:
+                - intel-virtual10
+            default-slaves:
+                - intel-virtual10
+        - string:
+            name: GIT_BASE
+            default: https://gerrit.opnfv.org/gerrit/$PROJECT
+            description: 'Git URL to use on this Jenkins Slave'
 - parameter:
     name: 'doctor-defaults'
     parameters:
index 7e605b9..9d27329 100644 (file)
             dockerdir: 'docker/storperf-reporting'
             <<: *master
             <<: *other-receivers
+        - 'storperf-swaggerui':
+            project: 'storperf'
+            dockerdir: 'docker/storperf-swaggerui'
+            <<: *master
+            <<: *other-receivers
         - 'yardstick':
             project: 'yardstick'
             <<: *master
similarity index 78%
rename from jjb/snaps/snaps.yml
rename to jjb/snaps/snaps-verify-jobs.yml
index 50b7c30..01ea3e4 100644 (file)
             branch: '{stream}'
             gs-pathname: ''
             disabled: false
-        - danube:
-            branch: 'stable/{stream}'
-            gs-pathname: '/{stream}'
-            disabled: false
 
 - job-template:
     name: 'snaps-verify-{stream}'
 
     disabled: '{obj:disabled}'
 
+    concurrent: false
+
     parameters:
         - project-parameter:
             project: '{project}'
             branch: '{branch}'
-        - 'opnfv-build-ubuntu-defaults'
+        - string:
+            name: DEPLOYMENT_HOST_IP
+            default: 192.168.122.2
+            description: 'IP of the deployment node'
+        - string:
+            name: CONTROLLER_IP
+            default: 192.168.122.3
+            description: 'IP of the controller node'
+        - 'intel-virtual10-defaults'
 
     scm:
         - git-scm-gerrit
@@ -60,4 +66,7 @@
 
     builders:
         - shell: |
-            echo "Nothing to verify!"
+            #!/bin/bash
+
+            cd $WORKSPACE/ci
+            ./run_tests.sh $DEPLOYMENT_HOST_IP $CONTROLLER_IP
diff --git a/jjb/storperf/storperf-daily-jobs.yml b/jjb/storperf/storperf-daily-jobs.yml
new file mode 100644 (file)
index 0000000..e849e29
--- /dev/null
@@ -0,0 +1,175 @@
+###################################
+# job configuration for storperf
+###################################
+- project:
+    name: storperf-daily
+
+    project: storperf
+
+#--------------------------------
+# BRANCH ANCHORS
+#--------------------------------
+    master: &master
+        stream: master
+        branch: '{stream}'
+        gs-pathname: ''
+        docker-tag: 'latest'
+#--------------------------------
+# POD, INSTALLER, AND BRANCH MAPPING
+#--------------------------------
+#    Installers using labels
+#            CI PODs
+# This section should only contain the installers
+# that have been switched using labels for slaves
+#--------------------------------
+    pod:
+## fuel CI PODs
+#        - baremetal:
+#            slave-label: fuel-baremetal
+#            installer: fuel
+#            <<: *master
+#        - virtual:
+#            slave-label: fuel-virtual
+#            installer: fuel
+#            <<: *master
+## joid CI PODs
+#        - baremetal:
+#            slave-label: joid-baremetal
+#            installer: joid
+#            <<: *master
+#        - virtual:
+#            slave-label: joid-virtual
+#            installer: joid
+#            <<: *master
+## compass CI PODs
+#        - baremetal:
+#            slave-label: compass-baremetal
+#            installer: compass
+#            <<: *master
+#        - virtual:
+#            slave-label: compass-virtual
+#            installer: compass
+#            <<: *master
+## apex CI PODs
+#        - virtual:
+#            slave-label: apex-virtual-master
+#            installer: apex
+#            <<: *master
+        - baremetal:
+            slave-label: apex-baremetal-master
+            installer: apex
+            <<: *master
+## armband CI PODs
+#        - armband-baremetal:
+#            slave-label: armband-baremetal
+#            installer: fuel
+#            <<: *master
+#        - armband-virtual:
+#            slave-label: armband-virtual
+#            installer: fuel
+#            <<: *master
+## daisy CI PODs
+#        - baremetal:
+#            slave-label: daisy-baremetal
+#            installer: daisy
+#            <<: *master
+#        - virtual:
+#            slave-label: daisy-virtual
+#            installer: daisy
+#            <<: *master
+
+    jobs:
+        - 'storperf-{installer}-{pod}-daily-{stream}'
+
+################################
+# job template
+################################
+- job-template:
+    name: 'storperf-{installer}-{pod}-daily-{stream}'
+
+    concurrent: true
+
+    properties:
+        - logrotate-default
+        - throttle:
+            enabled: true
+            max-per-node: 1
+            option: 'project'
+
+    wrappers:
+        - build-name:
+            name: '$BUILD_NUMBER Scenario: $DEPLOY_SCENARIO'
+        - timeout:
+            timeout: '30'
+            abort: true
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+            branch: '{branch}'
+        - '{installer}-defaults'
+        - '{slave-label}-defaults'
+        - string:
+            name: DEPLOY_SCENARIO
+            default: 'os-odl_l2-nofeature-ha'
+        - string:
+            name: DOCKER_TAG
+            default: '{docker-tag}'
+            description: 'Tag to pull docker image'
+        - string:
+            name: CLEAN_DOCKER_IMAGES
+            default: 'false'
+            description: 'Remove downloaded docker images (opnfv/storperf*:*)'
+        - string:
+            name: GS_PATHNAME
+            default: '{gs-pathname}'
+            description: "Version directory where the opnfv documents will be stored in gs repository"
+        - string:
+            name: DISK_TYPE
+            default: 'HDD'
+            description: 'The type of hard disk that Cinder uses'
+        - string:
+            name: VOLUME_SIZE
+            default: '2'
+            description: 'Size of Cinder volume (in GB)'
+        - string:
+            name: WORKLOADS
+            default: 'rw'
+            description: 'Workloads to run'
+        - string:
+            name: BLOCK_SIZES
+            default: '16384'
+            description: 'Block sizes for VM I/O operations'
+        - string:
+            name: QUEUE_DEPTHS
+            default: '4'
+            description: 'Number of simultaneous I/O operations to keep active'
+        - string:
+            name: STEADY_STATE_SAMPLES
+            default: '10'
+            description: 'Number of samples to use (1 per minute) to measure steady state'
+        - string:
+            name: TEST_CASE
+            choices:
+                - 'snia_steady_state'
+            description: 'The test case to run'
+
+    scm:
+        - git-scm
+
+    builders:
+        - description-setter:
+            description: "Built on $NODE_NAME"
+        - 'storperf-daily-builder'
+
+########################
+# builder macros
+########################
+- builder:
+    name: storperf-daily-builder
+    builders:
+        - shell: |
+            #!/bin/bash
+
+            cd $WORKSPACE
+            ./ci/daily.sh
diff --git a/jjb/storperf/storperf-verify-jobs.yml b/jjb/storperf/storperf-verify-jobs.yml
new file mode 100644 (file)
index 0000000..55c4e4c
--- /dev/null
@@ -0,0 +1,190 @@
+- project:
+    name: storperf-verify
+
+    project: 'storperf'
+
+#--------------------------------
+# branches
+#--------------------------------
+    stream:
+        - master:
+            branch: '{stream}'
+            gs-pathname: ''
+            disabled: false
+            docker-tag: 'latest'
+#--------------------------------
+# patch verification phases
+#--------------------------------
+    phase:
+        - 'unit-test':
+            slave-label: 'opnfv-build-ubuntu'
+        - 'build-x86_64':
+            slave-label: 'opnfv-build-ubuntu'
+        - 'build-aarch64':
+            slave-label: 'opnfv-build-ubuntu-arm'
+#--------------------------------
+# jobs
+#--------------------------------
+    jobs:
+        - 'storperf-verify-{stream}'
+        - 'storperf-verify-{phase}-{stream}'
+#--------------------------------
+# job templates
+#--------------------------------
+- job-template:
+    name: 'storperf-verify-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    project-type: 'multijob'
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+            branch: '{branch}'
+        - 'opnfv-build-defaults'
+
+    scm:
+        - git-scm-gerrit
+
+    triggers:
+        - gerrit:
+            server-name: 'gerrit.opnfv.org'
+            trigger-on:
+                - patchset-created-event:
+                    exclude-drafts: 'false'
+                    exclude-trivial-rebase: 'false'
+                    exclude-no-code-change: 'false'
+                - draft-published-event
+                - comment-added-contains-event:
+                    comment-contains-value: 'recheck'
+                - comment-added-contains-event:
+                    comment-contains-value: 'reverify'
+            projects:
+              - project-compare-type: 'ANT'
+                project-pattern: '{project}'
+                branches:
+                  - branch-compare-type: 'ANT'
+                    branch-pattern: '**/{branch}'
+
+    builders:
+        - shell: |
+            #!/bin/bash
+
+            # we do nothing here as the main stuff will be done
+            # in phase jobs
+            echo "Triggering phase jobs!"
+        - multijob:
+            name: 'storperf-build-and-unittest'
+            execution-type: PARALLEL
+            projects:
+                - name: 'storperf-verify-unit-test-{stream}'
+                  current-parameters: false
+                  predefined-parameters: |
+                    GERRIT_BRANCH=$GERRIT_BRANCH
+                    GERRIT_REFSPEC=$GERRIT_REFSPEC
+                    GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                    GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+                  git-revision: true
+                  node-parameters: false
+                  kill-phase-on: FAILURE
+                  abort-all-job: false
+                - name: 'storperf-verify-build-x86_64-{stream}'
+                  current-parameters: false
+                  predefined-parameters: |
+                    GERRIT_BRANCH=$GERRIT_BRANCH
+                    GERRIT_REFSPEC=$GERRIT_REFSPEC
+                    GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                    GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+                    ARCH=x86_64
+                  git-revision: true
+                  node-parameters: false
+                  kill-phase-on: FAILURE
+                  abort-all-job: false
+                - name: 'storperf-verify-build-aarch64-{stream}'
+                  current-parameters: false
+                  predefined-parameters: |
+                    GERRIT_BRANCH=$GERRIT_BRANCH
+                    GERRIT_REFSPEC=$GERRIT_REFSPEC
+                    GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                    GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+                    ARCH=aarch64
+                  git-revision: true
+                  node-parameters: false
+                  kill-phase-on: FAILURE
+                  abort-all-job: false
+
+- job-template:
+    name: 'storperf-verify-{phase}-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    wrappers:
+        - ssh-agent-wrapper
+        - build-timeout:
+            timeout: 30
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+            branch: '{branch}'
+        - '{slave-label}-defaults'
+
+    scm:
+        - git-scm-gerrit
+
+    builders:
+        - 'storperf-verify-{phase}-builders-macro'
+
+    publishers:
+        - 'storperf-verify-{phase}-publishers-macro'
+#--------------------------------
+# builder macros
+#--------------------------------
+- builder:
+    name: 'storperf-verify-unit-test-builders-macro'
+    builders:
+        - shell: |
+            $WORKSPACE/ci/verify.sh
+- builder:
+    name: 'storperf-verify-build-x86_64-builders-macro'
+    builders:
+        - shell: |
+            $WORKSPACE/ci/verify-build.sh
+- builder:
+    name: 'storperf-verify-build-aarch64-builders-macro'
+    builders:
+        - shell: |
+            $WORKSPACE/ci/verify-build.sh
+#--------------------------------
+# publisher macros
+#--------------------------------
+- publisher:
+    name: 'storperf-verify-unit-test-publishers-macro'
+    publishers:
+        - junit:
+            results: nosetests.xml
+        - cobertura:
+            report-file: "coverage.xml"
+            only-stable: "true"
+            health-auto-update: "true"
+            stability-auto-update: "true"
+            zoom-coverage-chart: "true"
+            targets:
+                - files:
+                    healthy: 10
+                    unhealthy: 20
+                    failing: 30
+                - method:
+                    healthy: 50
+                    unhealthy: 40
+                    failing: 30
+        - email-jenkins-admins-on-failure
+- publisher:
+    name: 'storperf-verify-build-x86_64-publishers-macro'
+    publishers:
+        - email-jenkins-admins-on-failure
+- publisher:
+    name: 'storperf-verify-build-aarch64-publishers-macro'
+    publishers:
+        - email-jenkins-admins-on-failure
index f675cbb..307becf 100644 (file)
@@ -4,9 +4,7 @@
     project: '{name}'
 
     jobs:
-        - 'storperf-verify-{stream}'
         - 'storperf-merge-{stream}'
-        - 'storperf-daily-{stream}'
 
     stream:
         - master:
             disabled: false
             docker-tag: 'stable'
 
-- job-template:
-    name: 'storperf-verify-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    node: opnfv-build-ubuntu
-
-    parameters:
-        - project-parameter:
-            project: '{project}'
-            branch: '{branch}'
-        - string:
-            name: GIT_BASE
-            default: https://gerrit.opnfv.org/gerrit/$PROJECT
-            description: "Used for overriding the GIT URL coming from Global Jenkins configuration in case if the stuff is done on none-LF HW."
-
-    scm:
-        - git-scm-gerrit
-
-    triggers:
-        - gerrit:
-            server-name: 'gerrit.opnfv.org'
-            trigger-on:
-                - patchset-created-event:
-                    exclude-drafts: 'false'
-                    exclude-trivial-rebase: 'false'
-                    exclude-no-code-change: 'false'
-                - draft-published-event
-                - comment-added-contains-event:
-                    comment-contains-value: 'recheck'
-                - comment-added-contains-event:
-                    comment-contains-value: 'reverify'
-            projects:
-              - project-compare-type: 'ANT'
-                project-pattern: '{project}'
-                branches:
-                  - branch-compare-type: 'ANT'
-                    branch-pattern: '**/{branch}'
-
-    builders:
-        - shell: |
-            $WORKSPACE/ci/verify.sh
-
-    publishers:
-        - junit:
-            results: nosetests.xml
-        - cobertura:
-            report-file: "coverage.xml"
-            only-stable: "true"
-            health-auto-update: "true"
-            stability-auto-update: "true"
-            zoom-coverage-chart: "true"
-            targets:
-                - files:
-                    healthy: 10
-                    unhealthy: 20
-                    failing: 30
-                - method:
-                    healthy: 50
-                    unhealthy: 40
-                    failing: 30
-        - email-jenkins-admins-on-failure
-
 - job-template:
     name: 'storperf-merge-{stream}'
 
                     failing: 30
         - email-jenkins-admins-on-failure
 
-- job-template:
-    name: 'storperf-daily-{stream}'
-
-    # Job template for daily builders
-    #
-    # Required Variables:
-    #     stream:    branch with - in place of / (eg. stable)
-    #     branch:    branch (eg. stable)
-    disabled: '{obj:disabled}'
-
-    parameters:
-        - project-parameter:
-            project: '{project}'
-            branch: '{branch}'
-        - 'intel-pod9-defaults'
-        - string:
-            name: DEPLOY_SCENARIO
-            default: 'os-nosdn-nofeature-noha'
-        - string:
-            name: DOCKER_TAG
-            default: '{docker-tag}'
-            description: 'Tag to pull docker image'
-        - choice:
-            name: DISK_TYPE
-            choices:
-                - 'SSD'
-                - 'HDD'
-            default: 'HDD'
-            description: 'The type of hard disk that Cinder uses'
-        - string:
-            name: AGENT_COUNT
-            description: 'The number of slave agents to start. Defaults to the cinder node count'
-        - string:
-            name: VOLUME_SIZE
-            default: '4'
-            description: 'Size of Cinder volume (in GB)'
-        - string:
-            name: WORKLOADS
-            default: 'wr,rr,rw'
-            description: 'Workloads to run'
-        - string:
-            name: BLOCK_SIZES
-            default: '2048,16384'
-            description: 'Block sizes for VM I/O operations'
-        - string:
-            name: QUEUE_DEPTHS
-            default: '1,4'
-            description: 'Number of simultaneous I/O operations to keep active'
-        - string:
-            name: STEADY_STATE_SAMPLES
-            default: '10'
-            description: 'Number of samples to use (1 per minute) to measure steady state'
-        - string:
-            name: DEADLINE
-            description: 'Maximum run time in minutes if steady state cannot be found. Defaults to 3 times steady state samples'
-        - choice:
-            name: TEST_CASE
-            choices:
-                - 'snia_steady_state'
-            description: 'The test case to run'
-
-    scm:
-        - git-scm
-
-    triggers:
-        - timed: '0 22 * * *'
-
-    builders:
-        - shell: |
-            $WORKSPACE/ci/daily.sh
-
index e2c57d2..def5ecc 100644 (file)
@@ -12,6 +12,8 @@ parser.add_argument("-u", "--user", help="Give username of this pod")
 parser.add_argument("-k", "--key", help="Give key file of the user")
 parser.add_argument("-p", "--password", help="Give password of the user")
 parser.add_argument("-f", "--filepath", help="Give dest path of output file")
+parser.add_argument("-s", "--sshkey", default="/root/.ssh/id_rsa",
+                    help="Give the path for ssh key")
 args = parser.parse_args()
 
 
@@ -92,7 +94,7 @@ def create_file(handler, INSTALLER_TYPE):
             item['password'] = 'root'
     else:
         for item in node_list:
-            item['key_filename'] = '/root/.ssh/id_rsa'
+            item['key_filename'] = args.sshkey
     data = {'nodes': node_list}
     with open(args.filepath, "w") as fw:
         yaml.dump(data, fw)
index 8374edb..312e1ac 100755 (executable)
@@ -107,7 +107,7 @@ if [ "$installer_type" == "fuel" ]; then
 
         # retrieving controller vip
         controller_ip=$(ssh 2>/dev/null ${ssh_options} ubuntu@${installer_ip} \
-            "sudo salt --out txt 'ctl01*' pillar.get _param:openstack_control_address | awk '{print \$2}'" | \
+            "sudo salt --out txt 'ctl*' pillar.get _param:openstack_control_address | awk '{print \$2; exit}'" | \
             sed 's/ //g') &> /dev/null
 
         info "Fetching rc file from controller $controller_ip..."
index eb57deb..79190ec 100644 (file)
@@ -31,6 +31,7 @@ node_list=(\
 'huawei-pod1' 'huawei-pod2' 'huawei-pod3' 'huawei-pod4' 'huawei-pod5' \
 'huawei-pod6' 'huawei-pod7' 'huawei-pod12' \
 'huawei-virtual1' 'huawei-virtual2' 'huawei-virtual3' 'huawei-virtual4' \
+'huawei-virtual5' 'huawei-virtual8' 'huawei-virtual9' \
 'zte-pod2' \
 'zte-virtual1')
 
index bd06400..d215d18 100644 (file)
@@ -255,7 +255,7 @@ class ScenarioUpdater(object):
     @iter_installers
     @iter_versions
     def _update_requests_change_owner(self, version):
-        version.owner = self.body
+        version.owner = self.body.get('owner')
 
     def _filter_installers(self, installers):
         return self._filter('installer', installers)
@@ -584,7 +584,7 @@ class ScenarioOwnerHandler(GenericScenarioUpdateHandler):
                 installer=<installer_name>& \
                 version=<version_name>
         @param body: new owner
-        @type body: L{string}
+        @type body: L{ScenarioChangeOwnerRequest}
         @in body: body
         @param installer: installer type
         @type installer: L{string}
index 7d07707..c6da76b 100644 (file)
@@ -16,6 +16,13 @@ class ScenarioTI(models.ModelBase):
         self.date = date
         self.status = status
 
+    def __eq__(self, other):
+        return (self.date == other.date and
+                self.status == other.status)
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
 
 @swagger.model()
 class ScenarioScore(models.ModelBase):
@@ -23,6 +30,13 @@ class ScenarioScore(models.ModelBase):
         self.date = date
         self.score = score
 
+    def __eq__(self, other):
+        return (self.date == other.date and
+                self.score == other.score)
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
 
 @swagger.model()
 class ScenarioProject(models.ModelBase):
@@ -50,10 +64,10 @@ class ScenarioProject(models.ModelBase):
                 'trust_indicators': ScenarioTI}
 
     def __eq__(self, other):
-        return [self.project == other.project and
+        return (self.project == other.project and
                 self._customs_eq(other) and
                 self._scores_eq(other) and
-                self._ti_eq(other)]
+                self._ti_eq(other))
 
     def __ne__(self, other):
         return not self.__eq__(other)
@@ -62,10 +76,10 @@ class ScenarioProject(models.ModelBase):
         return set(self.customs) == set(other.customs)
 
     def _scores_eq(self, other):
-        return set(self.scores) == set(other.scores)
+        return self.scores == other.scores
 
     def _ti_eq(self, other):
-        return set(self.trust_indicators) == set(other.trust_indicators)
+        return self.trust_indicators == other.trust_indicators
 
 
 @swagger.model()
@@ -84,9 +98,9 @@ class ScenarioVersion(models.ModelBase):
         return {'projects': ScenarioProject}
 
     def __eq__(self, other):
-        return [self.version == other.version and
+        return (self.version == other.version and
                 self.owner == other.owner and
-                self._projects_eq(other)]
+                self._projects_eq(other))
 
     def __ne__(self, other):
         return not self.__eq__(other)
@@ -116,7 +130,7 @@ class ScenarioInstaller(models.ModelBase):
         return {'versions': ScenarioVersion}
 
     def __eq__(self, other):
-        return [self.installer == other.installer and self._versions_eq(other)]
+        return (self.installer == other.installer and self._versions_eq(other))
 
     def __ne__(self, other):
         return not self.__eq__(other)
@@ -146,6 +160,12 @@ class ScenarioCreateRequest(models.ModelBase):
         return {'installers': ScenarioInstaller}
 
 
+@swagger.model()
+class ScenarioChangeOwnerRequest(models.ModelBase):
+    def __init__(self, owner=None):
+        self.owner = owner
+
+
 @swagger.model()
 class Scenario(models.ModelBase):
     """
@@ -166,7 +186,7 @@ class Scenario(models.ModelBase):
         return not self.__eq__(other)
 
     def __eq__(self, other):
-        return [self.name == other.name and self._installers_eq(other)]
+        return (self.name == other.name and self._installers_eq(other))
 
     def _installers_eq(self, other):
         for s_install in self.installers:
index b6a3b83..980051c 100644 (file)
@@ -8,7 +8,7 @@
       [
         {
           "owner": "Lucky",
-          "version": "colorado",
+          "version": "danube",
           "projects":
           [
             {
@@ -29,7 +29,7 @@
               "scores": [
                 {
                   "date": "2017-01-08 22:46:44",
-                  "score": "0"
+                  "score": "0/1"
                 }
               ],
               "trust_indicators": [
index 466caaf..50a8c8d 100644 (file)
@@ -47,8 +47,7 @@ class TestScenarioBase(base.TestBase):
             req = self.req_d
         self.assertIsNotNone(scenario._id)
         self.assertIsNotNone(scenario.creation_date)
-
-        scenario == models.Scenario.from_dict(req)
+        self.assertEqual(scenario, models.Scenario.from_dict(req))
 
     @staticmethod
     def _set_query(*args):
@@ -298,8 +297,9 @@ class TestScenarioUpdate(TestScenarioBase):
     @update_partial('_update', '_success')
     def test_changeOwner(self, scenario):
         new_owner = 'new_owner'
-        scenario['installers'][0]['versions'][0]['owner'] = 'www'
-        return new_owner, scenario
+        update = models.ScenarioChangeOwnerRequest(new_owner).format()
+        scenario['installers'][0]['versions'][0]['owner'] = new_owner
+        return update, scenario
 
     def _add(self, update_req, new_scenario):
         return self.post_direct_url(self.update_url, update_req)