--- /dev/null
+#!/bin/bash
+set -o errexit
+set -o nounset
+set -o pipefail
+# log info to console
+echo "Starting unit tests for Apex..."
+echo "---------------------------------------------------------------------------------------"
+echo
+
+
+pushd ci/ > /dev/null
+sudo CONFIG="${WORKSPACE}/build" LIB="${WORKSPACE}/lib" ./clean.sh
+./test.sh
+popd
+
+echo "--------------------------------------------------------"
+echo "Unit Tests Done!"
- 'apex-verify-{stream2}'
builders:
+ - 'apex-unit-test'
- 'apex-build'
- trigger-builds:
- project: 'apex-deploy-virtual-os-odl_l2-nofeature-ha-{stream2}'
max-total: 10
builders:
+ - 'apex-unit-test'
- 'apex-build'
- trigger-builds:
- project: 'apex-deploy-virtual-os-nosdn-nofeature-ha-{stream1}'
########################
# builder macros
########################
+- builder:
+ name: 'apex-unit-test'
+ builders:
+ - shell:
+ !include-raw: ./apex-unit-test.sh
+
- builder:
name: 'apex-build'
builders:
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'opnfv-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- gerrit-trigger-scm:
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'opnfv-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- gerrit-trigger-scm:
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'opnfv-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- gerrit-trigger-scm:
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'opnfv-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- gerrit-trigger-scm:
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'opnfv-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- gerrit-trigger-scm:
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'opnfv-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- gerrit-trigger-scm:
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'opnfv-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- gerrit-trigger-scm:
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'opnfv-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- gerrit-trigger-scm:
- master:
branch: '{stream}'
gs-pathname: ''
+ disabled: false
- brahmaputra:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
+ branch: '{stream}'
+ gs-pathname: ''
+ disabled: true
- job-template:
name: 'fastpathmetrics-verify-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'opnfv-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- gerrit-trigger-scm:
project-type: freestyle
+ disabled: '{obj:disabled}'
+
concurrent: true
properties:
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'opnfv-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- gerrit-trigger-scm:
project-type: freestyle
+ disabled: '{obj:disabled}'
+
concurrent: true
properties:
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'opnfv-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- gerrit-trigger-scm:
cd $WORKSPACE
# remove the expired items from cache
-./ci/clean_cache.sh $CACHE_DIRECTORY
+test -f $WORKSPACE/ci/clean_cache.sh && $WORKSPACE/ci/clean_cache.sh $CACHE_DIRECTORY
LATEST_ISO_PROPERTIES=$WORKSPACE/latest.iso.properties
if [[ "$JOB_NAME" =~ "daily" ]]; then
- virtual:
slave-label: fuel-virtual
<<: *brahmaputra
-
-# just in case if things go wrong
- - lf-pod2:
- slave-label: '{pod}'
- <<: *master
#--------------------------------
# None-CI PODs
#--------------------------------
auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- 'os-odl_l3-nofeature-ha':
auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-onos-nofeature-ha':
+ - 'os-onos-sfc-ha':
auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-odl_l2-bgpvpn-ha':
+ - 'os-onos-nofeature-ha':
auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- 'os-odl_l2-sfc-ha':
auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
+ - 'os-odl_l2-bgpvpn-ha':
+ auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- 'os-nosdn-kvm-ha':
auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- 'os-nosdn-ovs-ha':
- trigger:
name: 'fuel-os-nosdn-nofeature-ha-baremetal-daily-master-trigger'
triggers:
- - timed: ''
+ - timed: '0 19 * * *'
- trigger:
name: 'fuel-os-odl_l2-nofeature-ha-baremetal-daily-master-trigger'
triggers:
- - timed: ''
+ - timed: '0 0 * * *'
- trigger:
name: 'fuel-os-odl_l3-nofeature-ha-baremetal-daily-master-trigger'
triggers:
- - timed: ''
+ - timed: '0 5 * * *'
- trigger:
- name: 'fuel-os-onos-nofeature-ha-baremetal-daily-master-trigger'
+ name: 'fuel-os-onos-sfc-ha-baremetal-daily-master-trigger'
triggers:
- - timed: ''
+ - timed: '0 10 * * *'
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-baremetal-daily-master-trigger'
+ name: 'fuel-os-onos-nofeature-ha-baremetal-daily-master-trigger'
triggers:
- timed: ''
- trigger:
name: 'fuel-os-odl_l2-sfc-ha-baremetal-daily-master-trigger'
+ triggers:
+ - timed: '0 15 * * *'
+- trigger:
+ name: 'fuel-os-odl_l2-bgpvpn-ha-baremetal-daily-master-trigger'
triggers:
- timed: ''
- trigger:
triggers:
- timed: ''
#-----------------------------------------------
-# Triggers for job running on lf-pod2 against master branch
-#-----------------------------------------------
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-lf-pod2-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-lf-pod2-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-lf-pod2-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-onos-nofeature-ha-lf-pod2-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-lf-pod2-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-lf-pod2-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm-ha-lf-pod2-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-ovs-ha-lf-pod2-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm-noha-lf-pod2-daily-master-trigger'
- triggers:
- - timed: ''
-#-----------------------------------------------
# Triggers for job running on fuel-virtual against master branch
#-----------------------------------------------
- trigger:
name: 'fuel-os-odl_l3-nofeature-ha-virtual-daily-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-onos-sfc-ha-virtual-daily-master-trigger'
+ triggers:
+ - timed: ''
- trigger:
name: 'fuel-os-onos-nofeature-ha-virtual-daily-master-trigger'
triggers:
name: 'fuel-os-odl_l3-nofeature-ha-zte-pod1-daily-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-onos-sfc-ha-zte-pod1-daily-master-trigger'
+ triggers:
+ - timed: ''
- trigger:
name: 'fuel-os-onos-nofeature-ha-zte-pod1-daily-master-trigger'
triggers:
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'opnfv-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- gerrit-trigger-scm:
fi
INSTALLER_IP=$(/usr/sbin/arp -e | grep ${instack_mac} | awk {'print $1'})
sshkey="-v /root/.ssh/id_rsa:/root/.ssh/id_rsa"
+ sudo scp root@${INSTALLER_IP}:/home/stack/stackrc .
+ stackrc="-v ./stackrc:/home/opnfv/functest/conf/stackrc"
+
if sudo iptables -C FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable 2> ${redirect}; then
sudo iptables -D FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable
fi
echo "Functest: Pulling image opnfv/functest:${DOCKER_TAG}"
docker pull opnfv/functest:$DOCKER_TAG >/dev/null
-cmd="sudo docker run --privileged=true -id ${envs} ${labconfig} ${sshkey} ${res_volume} ${custom_params} opnfv/functest:${DOCKER_TAG} /bin/bash"
+cmd="sudo docker run --privileged=true -id ${envs} ${labconfig} ${sshkey} ${res_volume} ${custom_params} ${stackrc} opnfv/functest:${DOCKER_TAG} /bin/bash"
echo "Functest: Running docker run command: ${cmd}"
${cmd} >${redirect}
sleep 5
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'opnfv-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- gerrit-trigger-scm:
auto-trigger-name: 'daily-trigger-disabled'
- 'os-nosdn-nofeature-ha':
auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
+ - 'os-nosdn-lxd-ha':
+ auto-trigger-name: 'daily-trigger-disabled'
+ - 'os-nosdn-lxd-noha':
+ auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
- 'os-odl_l2-nofeature-ha':
auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
- 'os-onos-nofeature-ha':
name: 'joid-os-onos-nofeature-ha-juniper-pod1-brahmaputra-trigger'
triggers:
- timed: ''
+
+# os-nosdn-lxd-noha trigger - branch: master
+- trigger:
+ name: 'joid-os-nosdn-lxd-noha-baremetal-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'joid-os-nosdn-lxd-noha-virtual-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'joid-os-nosdn-lxd-noha-orange-pod2-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'joid-os-nosdn-lxd-noha-juniper-pod1-master-trigger'
+ triggers:
+ - timed: ''
+
+# os-nosdn-lxd-noha trigger - branch: stable/brahmaputra
+- trigger:
+ name: 'joid-os-nosdn-lxd-noha-baremetal-brahmaputra-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'joid-os-nosdn-lxd-noha-virtual-brahmaputra-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'joid-os-nosdn-lxd-noha-orange-pod2-brahmaputra-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'joid-os-nosdn-lxd-noha-juniper-pod1-brahmaputra-trigger'
+ triggers:
+ - timed: ''
+
+# os-nosdn-lxd-ha trigger - branch: master
+- trigger:
+ name: 'joid-os-nosdn-lxd-ha-baremetal-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'joid-os-nosdn-lxd-ha-virtual-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'joid-os-nosdn-lxd-ha-orange-pod2-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'joid-os-nosdn-lxd-ha-juniper-pod1-master-trigger'
+ triggers:
+ - timed: ''
+
+# os-nosdn-lxd-ha trigger - branch: stable/brahmaputra
+- trigger:
+ name: 'joid-os-nosdn-lxd-ha-baremetal-brahmaputra-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'joid-os-nosdn-lxd-ha-virtual-brahmaputra-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'joid-os-nosdn-lxd-ha-orange-pod2-brahmaputra-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'joid-os-nosdn-lxd-ha-juniper-pod1-brahmaputra-trigger'
+ triggers:
+ - timed: ''
##
echo "------ Deploy with juju ------"
-echo "Execute: ./deploy.sh -t $HA_MODE -o $OS_RELEASE -s $SDN_CONTROLLER -l $POD_NAME -d $UBUNTU_DISTRO"
+echo "Execute: ./deploy.sh -t $HA_MODE -o $OS_RELEASE -s $SDN_CONTROLLER -l $POD_NAME -d $UBUNTU_DISTRO -f $NFV_FEATURES"
-./deploy.sh -t $HA_MODE -o $OS_RELEASE -s $SDN_CONTROLLER -l $POD_NAME -d $UBUNTU_DISTRO
+./deploy.sh -t $HA_MODE -o $OS_RELEASE -s $SDN_CONTROLLER -l $POD_NAME -d $UBUNTU_DISTRO -f $NFV_FEATURES
exit_on_error $? "Main deploy FAILED"
##
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'opnfv-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- gerrit-trigger-scm:
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'opnfv-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- gerrit-trigger-scm:
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'opnfv-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- gerrit-trigger-scm:
parameters:
- project-parameter:
project: '{project}'
- - 'opnfv-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- git-scm:
parameters:
- project-parameter:
project: '{project}'
- - 'opnfv-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
- string:
name: GS_URL
default: '$GS_BASE{gs-pathname}'
description: "Reinstall MAAS and Bootstrap before deploy [true/false]"
- string:
name: UBUNTU_DISTRO
- default: 'trusty'
+ default: 'xenial'
description: "Ubuntu distribution to use for Openstack (trusty|xenial)"
- string:
name: CPU_ARCHITECTURE
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'opnfv-build-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'opnfv-build'
- description: 'Slave label on Jenkins'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
-
-- parameter:
- name: 'intel-build1-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-build1
- default-slaves:
- - intel-build1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'intel-build2-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-build2
- default-slaves:
- - intel-build2
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'intel-build3-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-build3
- default-slaves:
- - intel-build3
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
- parameter:
name: 'zte-pod1-defaults'
parameters:
name: LAB_CONFIG_URL
default: ssh://git@git.enea.com/pharos/lab-config
description: 'Base URI to the configuration directory'
+- parameter:
+ name: 'opnfv-build-centos-defaults'
+ parameters:
+ - label:
+ name: SLAVE_LABEL
+ default: 'opnfv-build-centos'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: BUILD_DIRECTORY
+ default: $WORKSPACE/build_output
+ description: "Directory where the build artifact will be located upon the completion of the build."
+- parameter:
+ name: 'opnfv-build-ubuntu-defaults'
+ parameters:
+ - label:
+ name: SLAVE_LABEL
+ default: 'opnfv-build-ubuntu'
+ description: 'Slave label on Jenkins'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: BUILD_DIRECTORY
+ default: $WORKSPACE/build_output
+ description: "Directory where the build artifact will be located upon the completion of the build."
- master:
branch: '{stream}'
gs-pathname: ''
+ disabled: false
- brahmaputra:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
+ disabled: true
- job-template:
name: 'ovsnfv-verify-{stream}'
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'intel-build2-defaults'
+ - 'opnfv-build-centos-defaults'
- string:
name: GS_URL
default: '$GS_BASE{gs-pathname}'
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'intel-build2-defaults'
+ - 'opnfv-build-centos-defaults'
- string:
name: GS_URL
default: '$GS_BASE{gs-pathname}'
- job-template:
name: 'ovsnfv-daily-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: '{project}'
- - 'intel-build2-defaults'
+ - 'opnfv-build-centos-defaults'
- string:
name: GS_URL
default: '$GS_BASE{gs-pathname}'
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'opnfv-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- gerrit-trigger-scm:
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'opnfv-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- gerrit-trigger-scm:
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'opnfv-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- gerrit-trigger-scm:
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'opnfv-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- gerrit-trigger-scm:
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'opnfv-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- gerrit-trigger-scm:
- job-template:
name: 'storperf-verify-{stream}'
- node: opnfv-build
+ node: opnfv-build-ubuntu
parameters:
- project-parameter:
- job-template:
name: 'storperf-merge-{stream}'
- # builder-merge job to run JJB update
- #
- # This job's purpose is to update all the JJB
-
- node: opnfv-build
+ node: opnfv-build-ubuntu
parameters:
- project-parameter:
# Required Variables:
# stream: branch with - in place of / (eg. stable)
# branch: branch (eg. stable)
- node: opnfv-build
+ node: opnfv-build-ubuntu
disabled: true
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'opnfv-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- gerrit-trigger-scm:
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'opnfv-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- gerrit-trigger-scm:
# Check if controller is alive (online='True')
controller_ip=$(sshpass -p r00tme ssh 2>/dev/null $ssh_options root@${installer_ip} \
- 'fuel node | grep controller | grep "True\| 1" | awk "{print \$10}" | tail -1') &> /dev/null
+ 'fuel node | grep controller | grep "True\| 1" | awk -F\| "{print \$5}" | tail -1') &> /dev/null
if [ -z $controller_ip ]; then
error "The controller $controller_ip is not up. Please check that the POD is correctly deployed."
\r
var opnfv_dashboard_testcases = {\r
'VIM': {\r
- 'Tempest': ['Tempest duration',\r
+ 'vping_ssh': ['vPing duration'],\r
+ 'vping_userdata': ['vPing_userdata duration'],\r
+ 'tempest': ['Tempest duration',\r
'Tempest nb tests/nb failures'],\r
- 'vPing': ['vPing duration'],\r
- 'vPing_userdata': ['vPing_userdata duration'],\r
- 'Rally': ['rally duration']\r
+ 'rally_sanity': ['rally duration']\r
},\r
'Controller': {\r
- 'ODL': ['ODL nb tests/nb failures'],\r
- 'ONOS': ['ONOS FUNCvirNet duration ',\r
+ 'odl': ['ODL nb tests/nb failures'],\r
+ 'onos': ['ONOS FUNCvirNet duration ',\r
'ONOS FUNCvirNet nb tests/nb failures',\r
'ONOS FUNCvirNetL3 duration',\r
'ONOS FUNCvirNetL3 nb tests/nb failures']\r
},\r
'Features': {\r
- 'vIMS': ['vIMS nb tests passed/failed/skipped',\r
+ 'vims': ['vIMS nb tests passed/failed/skipped',\r
'vIMS orchestrator/VNF/test duration'],\r
'promise': ['Promise duration ',\r
'Promise nb tests/nb failures'],\r
# init just tempest to get the list of scenarios
# as all the scenarios run Tempest
-tempest = tc.TestCase("Tempest", "functest", -1)
+tempest = tc.TestCase("tempest_smoke_serial", "functest", -1)
# Retrieve the Functest configuration to detect which tests are relevant
# according to the installer, scenario
print "Generate Tempest automatic reporting"
for installer in installers:
# we consider the Tempest results of the last PERIOD days
- url = "http://testresults.opnfv.org/test/api/v1/results?case=Tempest"
+ url = "http://testresults.opnfv.org/test/api/v1/results?case=tempest_smoke_serial"
request = Request(url + '&period=' + str(PERIOD)
+ '&installer=' + installer + '&version=master')
step_order = ["initialisation", "orchestrator", "vIMS", "sig_test"]
for installer in installers:
- request = Request('http://testresults.opnfv.org/test/api/v1/results?case=vIMS&installer=' + installer)
+ request = Request('http://testresults.opnfv.org/test/api/v1/results?case=vims&installer=' + installer)
try:
response = urlopen(request)
print 'No kittez. Got an error code:', e
test_results = results['results']
- test_results.reverse()
scenario_results = {}
for r in test_results:
versions = ["master"]
PERIOD = 10
MAX_SCENARIO_CRITERIA = 18
+URL_BASE = 'http://testresults.opnfv.org/test/api/v1/results'
# urllib2.install_opener(opener)
# url = "http://127.0.0.1:8000/results?case=" + case + \
# "&period=30&installer=" + installer
- url = ("http://testresults.opnfv.org/test/api/v1/results?case=" + case +
+ url = (reportingConf.URL_BASE + "?case=" + case +
"&period=" + str(reportingConf.PERIOD) + "&installer=" + installer +
"&scenario=" + scenario + "&version=" + version)
request = Request(url)
case = case.getName()
print case
- url = ("http://testresults.opnfv.org/test/api/v1/results?case=" + case +
+ url = (reportingConf.URL_BASE + "?case=" + case +
"&period=" + str(reportingConf.PERIOD) + "&installer=" + installer +
"&version=" + version)
request = Request(url)
<div class="panel-heading">
<div class="progress-bar" role="progressbar" aria-valuenow="{{result.pr_step_ok}}" aria-valuemin="0" aria-valuemax="100" style="width: {{result.pr_step_ok}}%"></div>
<span class="panel-header-item">
- <h4><b>{{result.creation_date}}</b></h4>
+ <h4><b>{{result.start_date}}</b></h4>
</span>
<span class="badge panel-pod-name">{{result.pod_name}}</span>
</div>
<ul class="nav nav-justified">
<li class="active"><a href="index.html">Home</a></li>
<li><a href="index-vims-fuel.html">Fuel</a></li>
- <li><a href="index--vims-compass.html">Compass</a></li>
+ <li><a href="index-vims-compass.html">Compass</a></li>
<li><a href="index-vims-joid.html">JOID</a></li>
<li><a href="index-vims-apex.html">APEX</a></li>
</ul>
<div class="panel-heading">
<div class="progress-bar" role="progressbar" aria-valuenow="{{result.pr_step_ok}}" aria-valuemin="0" aria-valuemax="100" style="width: {{result.pr_step_ok}}%"></div>
<span class="panel-header-item">
- <h4><b>{{result.creation_date}}</b></h4>
+ <h4><b>{{result.start_date}}</b></h4>
</span>
<span class="badge panel-pod-name">{{result.pod_name}}</span>
</div>
# '<name in the config>':'<name in the DB>'
# I know it is uggly...
test_match_matrix = {'healthcheck': 'healthcheck',
- 'vping_ssh': 'vPing',
- 'vping_userdata': 'vPing_userdata',
- 'odl': 'ODL',
- 'onos': 'ONOS',
- 'ovno': 'ovno',
- 'tempest_smoke_serial': 'Tempest',
+ 'vping_ssh': 'vping_ssh',
+ 'vping_userdata': 'vping_userdata',
+ 'odl': 'odl',
+ 'onos': 'onos',
+ 'ocl': 'ocl',
+ 'tempest_smoke_serial': 'tempest_smoke_serial',
'tempest_full_parallel': 'tempest_full_parallel',
- 'rally_sanity': 'Rally',
+ 'rally_sanity': 'rally_sanity',
'bgpvpn': 'bgpvpn',
'rally_full': 'rally_full',
- 'vims': 'vIMS',
+ 'vims': 'vims',
'doctor': 'doctor-notification',
'promise': 'promise'
}
return test_match_matrix[self.name]
except:
return "unknown"
+
+ def getTestDisplayName(self):
+ # Correspondance name of the test case / name in the DB
+ test_match_matrix = {'healthcheck': 'healthcheck',
+ 'vping_ssh': 'vPing (ssh)',
+ 'vping_userdata': 'vPing (userdata)',
+ 'odl': 'ODL',
+ 'onos': 'ONOS',
+ 'ocl': 'OCL',
+ 'tempest_smoke_serial': 'Tempest (smoke)',
+ 'tempest_full_parallel': 'Tempest (full)',
+ 'rally_sanity': 'Rally (smoke)',
+ 'bgpvpn': 'bgpvpn',
+ 'rally_full': 'Rally (full)',
+ 'vims': 'vIMS',
+ 'doctor': 'Doctor',
+ 'promise': 'Promise'
+ }
+ try:
+ return test_match_matrix[self.name]
+ except:
+ return "unknown"
code, message = error(data)
raise HTTPError(code, message)
- data.creation_date = datetime.now()
- _id = yield self._eval_db(self.table, 'insert', data.format())
+ if self.table != 'results':
+ data.creation_date = datetime.now()
+ _id = yield self._eval_db(self.table, 'insert', data.format(),
+ check_keys=False)
if 'name' in self.json_args:
resource = data.name
else:
edit_request.update(self._update_requests(data))
""" Updating the DB """
- yield self._eval_db(self.table, 'update', query, edit_request)
+ yield self._eval_db(self.table, 'update', query, edit_request,
+ check_keys=False)
edit_request['_id'] = str(data._id)
self.finish_request(edit_request)
query[key] = new
return equal, query
- def _eval_db(self, table, method, *args):
- return eval('self.db.%s.%s(*args)' % (table, method))
+ def _eval_db(self, table, method, *args, **kwargs):
+ return eval('self.db.%s.%s(*args, **kwargs)' % (table, method))
def _eval_db_find_one(self, query, table=None):
if table is None:
return_one = True
docs = [docs]
+ if check_keys:
+ for doc in docs:
+ self._check_keys(doc)
+
ids = []
for doc in docs:
if '_id' not in doc:
doc['_id'] = str(ObjectId())
- if not check_keys or not self._find_one(doc['_id']):
+ if not self._find_one(doc['_id']):
ids.append(doc['_id'])
self.contents.append(doc_or_docs)
def find(self, *args):
return MemCursor(self._find(*args))
- def _update(self, spec, document):
+ def _update(self, spec, document, check_keys=True):
updated = False
+
+ if check_keys:
+ self._check_keys(document)
+
for index in range(len(self.contents)):
content = self.contents[index]
if self._in(content, spec):
self.contents[index] = content
return updated
- def update(self, spec, document):
- return thread_execute(self._update, spec, document)
+ def update(self, spec, document, check_keys=True):
+ return thread_execute(self._update, spec, document, check_keys)
def _remove(self, spec_or_id=None):
if spec_or_id is None:
def clear(self):
self._remove()
+ def _check_keys(self, doc):
+ for key in doc.keys():
+ print('key', key, 'value', doc.get(key))
+ if '.' in key:
+ raise NameError('key {} must not contain .'.format(key))
+ if key.startswith('$'):
+ raise NameError('key {} must not start with $'.format(key))
+ if isinstance(doc.get(key), dict):
+ self._check_keys(doc.get(key))
+
+
pods = MemDb()
projects = MemDb()
testcases = MemDb()
user = yield self.db.pods.find_one({'_id': '1'})
self.assertEqual(user.get('name', None), 'new_test1')
+ def test_update_dot_error(self):
+ self._update_assert({'_id': '1', 'name': {'1. name': 'test1'}},
+ 'key 1. name must not contain .')
+
+ def test_update_dot_no_error(self):
+ self._update_assert({'_id': '1', 'name': {'1. name': 'test1'}},
+ None,
+ check_keys=False)
+
+ def test_update_dollar_error(self):
+ self._update_assert({'_id': '1', 'name': {'$name': 'test1'}},
+ 'key $name must not start with $')
+
+ def test_update_dollar_no_error(self):
+ self._update_assert({'_id': '1', 'name': {'$name': 'test1'}},
+ None,
+ check_keys=False)
+
@gen_test
def test_remove(self):
yield self.db.pods.remove({'_id': '1'})
user = yield self.db.pods.find_one({'_id': '1'})
self.assertIsNone(user)
- @gen_test
- def test_insert_check_keys(self):
- yield self.db.pods.insert({'_id': '1', 'name': 'test1'},
- check_keys=False)
- cursor = self.db.pods.find({'_id': '1'})
- names = []
- while (yield cursor.fetch_next):
- ob = cursor.next_object()
- names.append(ob.get('name'))
- self.assertItemsEqual(names, ['test1', 'test1'])
+ def test_insert_dot_error(self):
+ self._insert_assert({'_id': '1', '2. name': 'test1'},
+ 'key 2. name must not contain .')
+
+ def test_insert_dot_no_error(self):
+ self._insert_assert({'_id': '1', '2. name': 'test1'},
+ None,
+ check_keys=False)
+
+ def test_insert_dollar_error(self):
+ self._insert_assert({'_id': '1', '$name': 'test1'},
+ 'key $name must not start with $')
+
+ def test_insert_dollar_no_error(self):
+ self._insert_assert({'_id': '1', '$name': 'test1'},
+ None,
+ check_keys=False)
def _clear(self):
self.db.pods.clear()
+ def _update_assert(self, docs, error=None, **kwargs):
+ self._db_assert('update', error, {'_id': '1'}, docs, **kwargs)
+
+ def _insert_assert(self, docs, error=None, **kwargs):
+ self._db_assert('insert', error, docs, **kwargs)
+
+ @gen_test
+ def _db_assert(self, method, error, *args, **kwargs):
+ name_error = None
+ try:
+ yield self._eval_pods_db(method, *args, **kwargs)
+ except NameError as err:
+ name_error = err.args[0]
+ finally:
+ self.assertEqual(name_error, error)
+
+ def _eval_pods_db(self, method, *args, **kwargs):
+ return eval('self.db.pods.%s(*args, **kwargs)' % method)
+
+
if __name__ == '__main__':
unittest.main()
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
import unittest
+import copy
from opnfv_testapi.common.constants import HTTP_OK, HTTP_BAD_REQUEST, \
HTTP_NOT_FOUND
self.assertEqual(code, HTTP_OK)
self.assert_href(body)
+ def test_key_with_doc(self):
+ req = copy.deepcopy(self.req_d)
+ req.details = {'1.name': 'dot_name'}
+ (code, body) = self.create(req)
+ self.assertEqual(code, HTTP_OK)
+ self.assert_href(body)
+
class TestResultGet(TestResultBase):
def test_getOne(self):
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
import unittest
+import copy
from test_base import TestBase
from opnfv_testapi.resources.testcase_models import TestcaseCreateRequest, \
self.assertEqual(_id, new_body._id)
self.assert_update_body(self.req_d, new_body, self.update_e)
+ def test_with_dollar(self):
+ self.create_d()
+ update = copy.deepcopy(self.update_d)
+ update.description = {'2. change': 'dollar change'}
+ code, body = self.update(update, self.req_d.name)
+ self.assertEqual(code, HTTP_OK)
+
class TestCaseDelete(TestCaseBase):
def test_notFound(self):
setuptools>=16.0
tornado>=3.1
epydoc>=0.3.1
+motor
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
+# 09/06/2016: change for migration after refactoring
+# 16/06/2016: Alignment of test name (JIRA: FUNCTEST-304)
##############################################################################
collections_old2New = {
- 'pod': 'pods',
- 'test_projects': 'projects',
- 'test_testcases': 'testcases',
- 'test_results': 'results'
+ # 'pod': 'pods',
+ # 'test_projects': 'projects',
+ # 'test_testcases': 'testcases',
+ # 'test_results': 'results'
}
fields_old2New = {
- 'test_results': [({}, {'creation_date': 'start_date'})]
+ # 'test_results': [({}, {'creation_date': 'start_date'})]
}
docs_old2New = {
- 'test_results': [
- ({'criteria': 'failed'}, {'criteria': 'FAILED'}),
- ({'criteria': 'passed'}, {'criteria': 'PASS'})
+ # 'test_results': [
+ # ({'criteria': 'failed'}, {'criteria': 'FAILED'}),
+ # ({'criteria': 'passed'}, {'criteria': 'PASS'})
+ # ]
+ 'testcases': [
+ ({'name': 'vPing'}, {'name': 'vping_ssh'}),
+ ({'name': 'Tempest'}, {'name': 'tempest_smoke_serial'}),
+ ({'name': 'Rally'}, {'name': 'rally_sanity'}),
+ ({'name': 'ODL'}, {'name': 'odl'}),
+ ({'name': 'vIMS'}, {'name': 'vims'}),
+ ({'name': 'ONOS'}, {'name': 'onos'}),
+ ({'name': 'vPing_userdata'}, {'name': 'vping_userdata'}),
+ ({'name': 'ovno'}, {'name': 'ocl'})
+ ],
+ 'results': [
+ ({'case_name': 'vPing'}, {'case_name': 'vping_ssh'}),
+ ({'case_name': 'Tempest'}, {'case_name': 'tempest_smoke_serial'}),
+ ({'case_name': 'Rally'}, {'case_name': 'rally_sanity'}),
+ ({'case_name': 'ODL'}, {'case_name': 'odl'}),
+ ({'case_name': 'vIMS'}, {'case_name': 'vims'}),
+ ({'case_name': 'ONOS'}, {'case_name': 'onos'}),
+ ({'case_name': 'vPing_userdata'}, {'case_name': 'vping_userdata'}),
+ ({'case_name': 'ovno'}, {'case_name': 'ocl'})
]
}
parser.add_argument('-d', '--db',
type=str,
required=False,
- default=None,
+ default='test_results_collection',
help='database name after the restore.')
logger.addHandler(file_handler)
-def _get_dicts_from_list(dict_list, keys):
+def _get_dicts_from_list(testcase, dict_list, keys):
dicts = []
for dictionary in dict_list:
# iterate over dictionaries in input list
+ if not isinstance(dictionary, dict):
+ logger.info("Skipping non-dict details testcase [{}]".format(testcase))
+ continue
if keys == set(dictionary.keys()):
# check the dictionary structure
dicts.append(dictionary)
return test_results
+def _convert_value(value):
+ return value if value != '' else 0
+
+
def _convert_duration(duration):
if (isinstance(duration, str) or isinstance(duration, unicode)) and ':' in duration:
hours, minutes, seconds = duration.split(":")
+ hours = _convert_value(hours)
+ minutes = _convert_value(minutes)
+ seconds = _convert_value(seconds)
int_duration = 3600 * int(hours) + 60 * int(minutes) + float(seconds)
else:
int_duration = duration
-> details.orchestrator.duration
"""
testcase_details = testcase['details']
- sig_test_results = _get_dicts_from_list(testcase_details['sig_test']['result'],
+ sig_test_results = _get_dicts_from_list(testcase, testcase_details['sig_test']['result'],
{'duration', 'result', 'name', 'error'})
if len(sig_test_results) < 1:
logger.info("No 'result' from 'sig_test' found in vIMS details, skipping")
testcase_details = testcase['details']
funcvirnet_details = testcase_details['FUNCvirNet']['status']
- funcvirnet_statuses = _get_dicts_from_list(funcvirnet_details, {'Case result', 'Case name:'})
+ funcvirnet_statuses = _get_dicts_from_list(testcase, funcvirnet_details, {'Case result', 'Case name:'})
funcvirnetl3_details = testcase_details['FUNCvirNetL3']['status']
- funcvirnetl3_statuses = _get_dicts_from_list(funcvirnetl3_details, {'Case result', 'Case name:'})
+ funcvirnetl3_statuses = _get_dicts_from_list(testcase, funcvirnetl3_details, {'Case result', 'Case name:'})
if len(funcvirnet_statuses) < 0:
logger.info("No results found in 'FUNCvirNet' part of ONOS results")
-> details.tests
-> details.success_percentage
"""
- summaries = _get_dicts_from_list(testcase['details'], {'summary'})
+ summaries = _get_dicts_from_list(testcase, testcase['details'], {'summary'})
if len(summaries) != 1:
logger.info("Found zero or more than one 'summaries' in Rally details, skipping")
-> details.failures
-> details.success_percentage?
"""
- test_statuses = _get_dicts_from_list(testcase['details']['details'], {'test_status', 'test_doc', 'test_name'})
+ test_statuses = _get_dicts_from_list(testcase, testcase['details']['details'],
+ {'test_status', 'test_doc', 'test_name'})
if len(test_statuses) < 1:
logger.info("No 'test_status' found in ODL details, skipping")
return False
def publish_mongo_data(output_destination):
tmp_filename = 'mongo-{}.log'.format(uuid.uuid4())
try:
- subprocess.check_call(['mongoexport', '--db', 'test_results_collection', '-c', 'test_results', '--out',
+ subprocess.check_call(['mongoexport', '--db', 'test_results_collection', '-c', 'results', '--out',
tmp_filename])
with open(tmp_filename) as fobj:
for mongo_json_line in fobj:
test_result = json.loads(mongo_json_line)
if modify_mongo_entry(test_result):
- shared_utils.publish_json(test_result, output_destination, es_user, es_passwd)
+ shared_utils.publish_json(test_result, es_user, es_passwd, output_destination)
finally:
if os.path.exists(tmp_filename):
os.remove(tmp_filename)
def get_mongo_data(days):
past_time = datetime.datetime.today() - datetime.timedelta(days=days)
- mongo_json_lines = subprocess.check_output(['mongoexport', '--db', 'test_results_collection', '-c', 'test_results',
+ mongo_json_lines = subprocess.check_output(['mongoexport', '--db', 'test_results_collection', '-c', 'results',
'--query', '{{"creation_date":{{$gt:"{}"}}}}'
.format(past_time)]).splitlines()
help='the url of mongodb, defaults to http://localhost:8082')
args = parser.parse_args()
- base_elastic_url = urlparse.urljoin(args.elasticsearch_url, '/test_results/mongo2elastic')
+ base_elastic_url = urlparse.urljoin(args.elasticsearch_url, '/results/mongo2elastic')
output_destination = args.output_destination
days = args.merge_latest
es_user = args.elasticsearch_username