Merge "Add a put result method to modify trust_indicator"
authorMorgan Richomme <morgan.richomme@orange.com>
Wed, 6 Jul 2016 08:49:08 +0000 (08:49 +0000)
committerGerrit Code Review <gerrit@172.30.200.206>
Wed, 6 Jul 2016 08:49:08 +0000 (08:49 +0000)
23 files changed:
jjb/apex/apex-upload-artifact.sh
jjb/apex/apex.yml
jjb/armband/armband-ci-jobs.yml
jjb/compass4nfv/compass-ci-jobs.yml
jjb/compass4nfv/compass-project-jobs.yml
jjb/domino/domino.yml [new file with mode: 0644]
jjb/fuel/fuel-ci-jobs.yml
jjb/functest/functest-ci-jobs.yml
jjb/functest/functest-loop.sh [moved from jjb/functest/functest-daily.sh with 100% similarity]
jjb/functest/set-functest-env.sh
jjb/joid/joid-ci-jobs.yml
jjb/opnfv/slave-params.yml
jjb/yardstick/yardstick-ci-jobs.yml
utils/gpg_import_key.sh [new file with mode: 0644]
utils/push-test-logs.sh
utils/test/reporting/functest/reporting-status.py
utils/test/reporting/functest/reportingConf.py
utils/test/reporting/functest/reportingUtils.py
utils/test/result_collection_api/docker/Dockerfile [new file with mode: 0644]
utils/test/result_collection_api/docker/prepare-env.sh [new file with mode: 0755]
utils/test/result_collection_api/docker/start-server.sh [new file with mode: 0755]
utils/test/result_collection_api/opnfv_testapi/resources/result_handlers.py
utils/test/result_collection_api/opnfv_testapi/tests/unit/test_project.py

index 0598f56..ba69f3e 100755 (executable)
@@ -11,6 +11,32 @@ echo
 # source the opnfv.properties to get ARTIFACT_VERSION
 source $WORKSPACE/opnfv.properties
 
+#this is where we import the siging key
+source $WORKSPACE/releng/utils/gpg_import_key.sh
+
+signrpm () {
+for artifact in $RPM_LIST $SRPM_LIST; do
+  echo "Signing artifact: ${artifact}"
+  gpg2 -vvv --batch \
+    --default-key opnfv-helpdesk@rt.linuxfoundation.org  \
+    --passphrase besteffort \
+    --detach-sig $artifact
+    gsutil cp "$artifact".sig gs://$GS_URL/$(basename "$artifact".sig)
+    echo "Upload complete for ${artifact} signature"
+done
+}
+
+signiso () {
+time gpg2 -vvv --batch \
+  --default-key opnfv-helpdesk@rt.linuxfoundation.org  \
+  --passphrase notreallysecure \
+  --detach-sig $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso
+
+gsutil cp $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso.sig gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso.sig 
+echo "ISO signature Upload Complete!"
+}
+
+uploadiso () {
 # upload artifact and additional files to google storage
 gsutil cp $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > gsutil.iso.log
 echo "ISO Upload Complete!"
@@ -26,7 +52,10 @@ VERSION_EXTENSION=$(echo $(basename $OPNFV_SRPM_URL) | sed 's/opnfv-apex-//')
 for pkg in common undercloud opendaylight-sfc onos; do
     SRPM_LIST+=" ${SRPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}"
 done
+}
 
+uploadrpm () {
+#This is where we upload the rpms
 for artifact in $RPM_LIST $SRPM_LIST; do
   echo "Uploading artifact: ${artifact}"
   gsutil cp $artifact gs://$GS_URL/$(basename $artifact) > gsutil.iso.log
@@ -34,6 +63,18 @@ for artifact in $RPM_LIST $SRPM_LIST; do
 done
 gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log
 gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log
+}
+
+if gpg2 --list-keys | grep "opnfv-helpdesk@rt.linuxfoundation.org"; then
+  echo "Signing Key avaliable"
+  signiso
+  uploadiso
+  signrpm
+  uploadrpm
+else
+  uploadiso
+  uploadrpm
+fi
 
 echo
 echo "--------------------------------------------------------"
index 578024e..3ba8842 100644 (file)
             git-revision: false
             block: true
             same-node: true
-#        - trigger-builds:
-#          - project: 'functest-apex-{verify-slave}-suite-{stream1}'
-#            predefined-parameters: |
-#              DEPLOY_SCENARIO=os-nosdn-nofeature-ha
-#              FUNCTEST_SUITE_NAME=vping_userdata
-#            block: true
-#            same-node: true
+        - trigger-builds:
+          - project: 'functest-apex-{verify-slave}-suite-{stream1}'
+            predefined-parameters: |
+              DEPLOY_SCENARIO=os-nosdn-nofeature-ha
+              FUNCTEST_SUITE_NAME=healthcheck
+            block: true
+            same-node: true
         - trigger-builds:
           - project: 'apex-deploy-virtual-os-odl_l2-nofeature-ha-{stream1}'
             predefined-parameters: |
             git-revision: false
             block: true
             same-node: true
+        - trigger-builds:
+          - project: 'functest-apex-{verify-slave}-suite-{stream1}'
+            predefined-parameters: |
+              DEPLOY_SCENARIO=os-odl_l2-nofeature-ha
+              FUNCTEST_SUITE_NAME=healthcheck
+            block: true
+            same-node: true
         - 'apex-workspace-cleanup'
 
 - job-template:
index 3d0db65..53c652e 100644 (file)
@@ -23,6 +23,8 @@
     pod:
         - arm-pod1:
             <<: *brahmaputra
+        - arm-pod2:
+            <<: *brahmaputra
 #--------------------------------
 #        master
 #--------------------------------
     name: 'armband-os-odl_l2-nofeature-ha-arm-pod1-brahmaputra-trigger'
     triggers:
         - timed: '0 18 * * *'
+#---------------------------------------------------------------
+# Enea Armband POD 2 Triggers running against brahmaputra branch
+#---------------------------------------------------------------
+- trigger:
+    name: 'armband-os-odl_l2-nofeature-ha-arm-pod2-brahmaputra-trigger'
+    triggers:
+        - timed: ''
index 4bbea49..52d6785 100644 (file)
         - '{auto-trigger-name}'
 
     builders:
+        - description-setter:
+            description: "POD: $NODE_NAME"
         - trigger-builds:
             - project: 'compass-deploy-{pod}-daily-{stream}'
               current-parameters: true
 
 
     builders:
+        - description-setter:
+            description: "POD: $NODE_NAME"
         - shell:
             !include-raw-escape: ./compass-download-artifact.sh
         - shell:
         - choice:
             name: COMPASS_OPENSTACK_VERSION
             choices:
-                - 'liberty'
                 - 'mitaka'
+                - 'liberty'
 
 ########################
 # trigger macros
index a0438ee..da28687 100644 (file)
             - project: 'functest-{slave-label}-suite-{stream}'
               current-parameters: true
               predefined-parameters:
-                FUNCTEST_SUITE_NAME=vping_userdata
+                FUNCTEST_SUITE_NAME=healthcheck
               same-node: true
               block: true
               block-thresholds:
         - choice:
             name: COMPASS_OPENSTACK_VERSION
             choices:
-                - 'liberty'
                 - 'mitaka'
+                - 'liberty'
         - choice:
             name: COMPASS_OS_VERSION
             choices:
diff --git a/jjb/domino/domino.yml b/jjb/domino/domino.yml
new file mode 100644 (file)
index 0000000..29e171b
--- /dev/null
@@ -0,0 +1,55 @@
+- project:
+    name: domino
+
+    project: '{name}'
+
+    jobs:
+        - 'domino-verify-{stream}'
+
+    stream:
+        - master:
+            branch: '{stream}'
+            gs-pathname: ''
+
+- job-template:
+    name: 'domino-verify-{stream}'
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+        - gerrit-parameter:
+            branch: '{branch}'
+        - 'opnfv-build-ubuntu-defaults'
+
+    scm:
+        - gerrit-trigger-scm:
+            credentials-id: '{ssh-credentials}'
+            refspec: '$GERRIT_REFSPEC'
+            choosing-strategy: 'gerrit'
+
+    triggers:
+        - gerrit:
+            trigger-on:
+                - patchset-created-event:
+                    exclude-drafts: 'false'
+                    exclude-trivial-rebase: 'false'
+                    exclude-no-code-change: 'false'
+                - draft-published-event
+                - comment-added-contains-event:
+                    comment-contains-value: 'recheck'
+                - comment-added-contains-event:
+                    comment-contains-value: 'reverify'
+            projects:
+              - project-compare-type: 'ANT'
+                project-pattern: '{project}'
+                branches:
+                  - branch-compare-type: 'ANT'
+                    branch-pattern: '**/{branch}'
+                forbidden-file-paths:
+                  - compare-type: ANT
+                    pattern: 'docs/**|.gitignore'
+
+    builders:
+        - shell: |
+            #!/bin/bash
+            ./tests/run.sh
index e78be54..de7ca6a 100644 (file)
             gs-pathname: '{gs-pathname}'
 
     builders:
+        - description-setter:
+            description: "POD: $NODE_NAME"
         - trigger-builds:
             - project: 'fuel-deploy-{pod}-daily-{stream}'
               current-parameters: false
             name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
 
     builders:
+        - description-setter:
+            description: "POD: $NODE_NAME"
         - shell:
             !include-raw-escape: ./fuel-download-artifact.sh
         - shell:
 - trigger:
     name: 'fuel-os-odl_l2-nofeature-ha-zte-pod1-daily-master-trigger'
     triggers:
-        - timed: '0 12 * * *'
+        - timed: '15 9 * * *'
 - trigger:
     name: 'fuel-os-odl_l3-nofeature-ha-zte-pod1-daily-master-trigger'
     triggers:
index 32251b8..f9cf011 100644 (file)
 
     testsuite:
         - 'daily'
+        - 'weekly'
         - 'suite'
 
     jobs:
             branch: '{branch}'
 
     builders:
+        - description-setter:
+            description: "POD: $NODE_NAME"
         - 'functest-{testsuite}-builder'
 
 ########################
         - string:
             name: FUNCTEST_SUITE_NAME
             default: 'daily'
-            description: "Suite name to run"
+            description: "Daily suite name to run"
+- parameter:
+    name: functest-weekly-parameter
+    parameters:
+        - string:
+            name: FUNCTEST_SUITE_NAME
+            default: 'weekly'
+            description: "Weekly suite name to run"
 - parameter:
     name: functest-suite-parameter
     parameters:
             name: FUNCTEST_SUITE_NAME
             choices:
                 - 'healthcheck'
-                - 'tempest'
-                - 'rally'
+                - 'vping_userdata'
+                - 'vping_ssh'
+                - 'tempest_smoke_serial'
+                - 'rally_sanity'
                 - 'odl'
                 - 'onos'
-                - 'ovno'
                 - 'promise'
                 - 'doctor'
+                - 'bgpvpn'
+                - 'security_scan'
+                - 'tempest_full_parallel'
+                - 'rally_full'
                 - 'vims'
-                - 'vping_userdata'
-                - 'vping_ssh'
 - parameter:
     name: functest-parameter
     parameters:
         - 'functest-daily'
         - 'functest-store-results'
 
+- builder:
+    name: functest-weekly-builder
+    builders:
+        - 'functest-cleanup'
+        - 'set-functest-env'
+        - 'functest-weekly'
+        - 'functest-store-results'
+
 - builder:
     name: functest-suite-builder
     builders:
         - 'functest-suite'
 
 - builder:
-    name: functest-suite
+    name: functest-daily
     builders:
         - shell:
-            !include-raw: ./functest-suite.sh
+            !include-raw: ./functest-loop.sh
 
 - builder:
-    name: functest-daily
+    name: functest-weekly
     builders:
         - shell:
-            !include-raw: ./functest-daily.sh
+            !include-raw: ./functest-loop.sh
+
+- builder:
+    name: functest-suite
+    builders:
+        - shell:
+            !include-raw: ./functest-suite.sh
 
 - builder:
     name: set-functest-env
index 0b8747a..d2e232d 100755 (executable)
@@ -27,6 +27,7 @@ if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
     if sudo iptables -C FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable 2> ${redirect}; then
         sudo iptables -D FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable
     fi
+
 elif [[ ${INSTALLER_TYPE} == 'joid' ]]; then
     # If production lab then creds may be retrieved dynamically
     # creds are on the jumphost, always in the same folder
@@ -34,6 +35,12 @@ elif [[ ${INSTALLER_TYPE} == 'joid' ]]; then
     # If dev lab, credentials may not be the default ones, just provide a path to put them into docker
     # replace the default one by the customized one provided by jenkins config
 fi
+
+# Set iptables rule to allow forwarding return traffic for container
+if ! sudo iptables -C FORWARD -j RETURN 2> ${redirect} || ! sudo iptables -L FORWARD | awk 'NR==3' | grep RETURN 2> ${redirect}; then
+    sudo iptables -I FORWARD -j RETURN
+fi
+
 echo "Functest: Start Docker and prepare environment"
 envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
     -e NODE_NAME=${NODE_NAME} -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} \
index 6927ad9..a1e5677 100644 (file)
             default: '{scenario}'
 
     builders:
+        - description-setter:
+            description: "POD: $NODE_NAME"
         - trigger-builds:
             - project: 'joid-deploy-{pod}-daily-{stream}'
               current-parameters: true
             branch: '{branch}'
 
     builders:
+        - description-setter:
+            description: "POD: $NODE_NAME"
         - 'builder-macro'
 ########################
 # builder macros
index da0808b..e5313c8 100644 (file)
             name: LAB_CONFIG_URL
             default: ssh://git@git.enea.com/pharos/lab-config
             description: 'Base URI to the configuration directory'
+
+- parameter:
+    name: 'arm-pod2-defaults'
+    parameters:
+        - node:
+            name: SLAVE_NAME
+            description: 'Slave name on Jenkins'
+            allowed-slaves:
+                - arm-pod2
+            default-slaves:
+                - arm-pod2
+        - string:
+            name: GIT_BASE
+            default: https://gerrit.opnfv.org/gerrit/$PROJECT
+            description: 'Git URL to use on this Jenkins Slave'
+        - string:
+            name: DEFAULT_BRIDGE
+            default: 'admin_br0,public_br0'
+            desciption: 'The bridge to use for Fuel PXE booting. It can be a comma sparated list of bridges, in which case the first is the PXE boot bridge, and all subsequent interfaces that will be added to the VM. If left empty, most deploy scripts will default to pxebr.'
+        - string:
+            name: DEPLOY_TIMEOUT
+            default: '360'
+            description: 'Deployment timeout in minutes'
+        - string:
+            name: LAB_CONFIG_URL
+            default: ssh://git@git.enea.com/pharos/lab-config
+            description: 'Base URI to the configuration directory'
+
 - parameter:
     name: 'opnfv-build-centos-defaults'
     parameters:
index 4f98e2c..8b8ced1 100644 (file)
             branch: '{branch}'
 
     builders:
+        - description-setter:
+            description: "POD: $NODE_NAME"
         - 'yardstick-cleanup'
         #- 'yardstick-fetch-os-creds'
         - 'yardstick-{testsuite}'
diff --git a/utils/gpg_import_key.sh b/utils/gpg_import_key.sh
new file mode 100644 (file)
index 0000000..3afeda8
--- /dev/null
@@ -0,0 +1,42 @@
+#!/bin/bash -e
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2016 NEC and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+function isinstalled {
+if rpm -q "$@" >/dev/null 2>&1; then
+  true
+    else
+      echo installing "$1"
+      sudo yum install "$1"
+  false
+fi
+}
+
+if ! isinstalled gnupg2; then
+  echo "error with install"
+  exit 1
+fi
+
+if ! which gsutil;
+  then echo "error gsutil not installed";
+  exit 1
+fi
+
+if gpg2 --list-keys | grep "opnfv-helpdesk@rt.linuxfoundation.org"; then
+  echo "Key Already available"
+else
+  if [ -z "$NODE_NAME" ];
+    then echo "Cannot find node name"
+      exit 1
+    else echo "Importing key for '$NODE_NAME'";
+     gsutil cp gs://opnfv-signing-keys/"$NODE_NAME"-subkey .
+     gpg2 --import "$NODE_NAME"-subkey
+     rm -f "$NODE_NAME"-subkey
+   fi
+fi
index 7456450..964b419 100644 (file)
@@ -17,13 +17,13 @@ res_build_date=${1:-$(date -u +"%Y-%m-%d_%H-%M-%S")}
 project=$PROJECT
 branch=${GIT_BRANCH##*/}
 testbed=$NODE_NAME
-dir_result="${HOME}/opnfv/$project/results"
+dir_result="${HOME}/opnfv/$project/results/${branch}"
 # src: https://wiki.opnfv.org/display/INF/Hardware+Infrastructure
 # + intel-pod3 (vsperf)
 node_list=(\
 'lf-pod1' 'lf-pod2' 'intel-pod2' 'intel-pod3' \
 'intel-pod5' 'intel-pod6' 'intel-pod7' 'intel-pod8' \
-'ericsson-pod2' 'huawei-pod1')
+'ericsson-pod2' 'huawei-pod1' 'huawei-pod2' 'huawei-virtual1' 'huawei-virtual2' 'huawei-virtual3' 'huawei-virtual4')
 
 if [[ ! " ${node_list[@]} " =~ " ${testbed} " ]]; then
     echo "This is not a CI POD. Aborting pushing the logs to artifacts."
index 2ce5efb..adbee36 100644 (file)
@@ -8,6 +8,7 @@
 #
 import datetime
 import jinja2
+import logging
 import os
 import requests
 import sys
@@ -19,7 +20,21 @@ import reportingConf as conf
 import testCase as tc
 import scenarioResult as sr
 
-testCases4Validation = []
+# Logger
+logFormatter = logging.Formatter("%(asctime)s [%(threadName)-12.12s] [%(levelname)-5.5s]  %(message)s")
+logger = logging.getLogger()
+
+fileHandler = logging.FileHandler("{0}/{1}".format('.', conf.LOG_FILE))
+fileHandler.setFormatter(logFormatter)
+logger.addHandler(fileHandler)
+
+consoleHandler = logging.StreamHandler()
+consoleHandler.setFormatter(logFormatter)
+logger.addHandler(consoleHandler)
+logger.setLevel(conf.LOG_LEVEL)
+
+# Initialization
+testValid = []
 otherTestCases = []
 
 # init just tempest to get the list of scenarios
@@ -28,16 +43,16 @@ tempest = tc.TestCase("tempest_smoke_serial", "functest", -1)
 
 # Retrieve the Functest configuration to detect which tests are relevant
 # according to the installer, scenario
-# cf = "https://git.opnfv.org/cgit/functest/plain/ci/config_functest.yaml"
-cf = "https://git.opnfv.org/cgit/functest/plain/ci/testcases.yaml"
+cf = conf.TEST_CONF
 response = requests.get(cf)
+
 functest_yaml_config = yaml.load(response.text)
 
-print "****************************************"
-print "*   Generating reporting.....          *"
-print ("*   Data retention = %s days           *" % conf.PERIOD)
-print "*                                      *"
-print "****************************************"
+logger.info("****************************************")
+logger.info("*   Generating reporting.....          *")
+logger.info("*   Data retention = %s days           *" % conf.PERIOD)
+logger.info("*                                      *")
+logger.info("****************************************")
 
 # Retrieve test cases of Tier 1 (smoke)
 config_tiers = functest_yaml_config.get("tiers")
@@ -50,19 +65,22 @@ config_tiers = functest_yaml_config.get("tiers")
 for tier in config_tiers:
     if tier['order'] > 0 and tier['order'] < 3:
         for case in tier['testcases']:
-            testCases4Validation.append(tc.TestCase(case['name'],
-                                                    "functest",
-                                                    case['dependencies']))
+            if case['name'] not in conf.blacklist:
+                testValid.append(tc.TestCase(case['name'],
+                                             "functest",
+                                             case['dependencies']))
     elif tier['order'] == 3:
         for case in tier['testcases']:
-            testCases4Validation.append(tc.TestCase(case['name'],
-                                                    case['name'],
-                                                    case['dependencies']))
+            if case['name'] not in conf.blacklist:
+                testValid.append(tc.TestCase(case['name'],
+                                             case['name'],
+                                             case['dependencies']))
     elif tier['order'] > 3:
         for case in tier['testcases']:
-            otherTestCases.append(tc.TestCase(case['name'],
-                                              "functest",
-                                              case['dependencies']))
+            if case['name'] not in conf.blacklist:
+                otherTestCases.append(tc.TestCase(case['name'],
+                                                  "functest",
+                                                  case['dependencies']))
 
 # For all the versions
 for version in conf.versions:
@@ -84,27 +102,27 @@ for version in conf.versions:
             # Check if test case is runnable / installer, scenario
             # for the test case used for Scenario validation
             try:
-                print ("---------------------------------")
-                print ("installer %s, version %s, scenario %s:" %
-                       (installer, version, s))
+                logger.info("---------------------------------")
+                logger.info("installer %s, version %s, scenario %s:" %
+                            (installer, version, s))
 
                 # 1) Manage the test cases for the scenario validation
                 # concretely Tiers 0-3
-                for test_case in testCases4Validation:
+                for test_case in testValid:
                     test_case.checkRunnable(installer, s,
                                             test_case.getConstraints())
-                    print ("testcase %s is %s" % (test_case.getName(),
-                                                  test_case.isRunnable))
+                    logger.debug("testcase %s is %s" % (test_case.getName(),
+                                                        test_case.isRunnable))
                     time.sleep(1)
                     if test_case.isRunnable:
                         dbName = test_case.getDbName()
                         name = test_case.getName()
                         project = test_case.getProject()
                         nb_test_runnable_for_this_scenario += 1
-                        print (" Searching results for case %s " %
-                               (dbName))
+                        logger.info(" Searching results for case %s " %
+                                    (dbName))
                         result = utils.getResult(dbName, installer, s, version)
-                        print " >>>> Test result=" + str(result)
+                        logger.info(" >>>> Test score = " + str(result))
                         test_case.setCriteria(result)
                         test_case.setIsRunnable(True)
                         testCases2BeDisplayed.append(tc.TestCase(name,
@@ -120,15 +138,15 @@ for version in conf.versions:
                 for test_case in otherTestCases:
                     test_case.checkRunnable(installer, s,
                                             test_case.getConstraints())
-                    print ("testcase %s is %s" % (test_case.getName(),
-                                                  test_case.isRunnable))
+                    logger.info("testcase %s is %s" %
+                                (test_case.getName(), test_case.isRunnable))
                     time.sleep(1)
                     if test_case.isRunnable:
                         dbName = test_case.getDbName()
                         name = test_case.getName()
                         project = test_case.getProject()
-                        print (" Searching results for case %s " %
-                               (dbName))
+                        logger.info(" Searching results for case %s " %
+                                    (dbName))
                         result = utils.getResult(dbName, installer, s, version)
                         test_case.setCriteria(result)
                         test_case.setIsRunnable(True)
@@ -141,9 +159,9 @@ for version in conf.versions:
 
                     items[s] = testCases2BeDisplayed
             except:
-                print ("Error: installer %s, version %s, scenario %s" %
-                       (installer, version, s))
-                print "No data available , error %s " % (sys.exc_info()[0])
+                logger.error("Error: installer %s, version %s, scenario %s" %
+                             (installer, version, s))
+                logger.error("No data available: %s " % (sys.exc_info()[0]))
 
             # **********************************************
             # Evaluate the results for scenario validation
@@ -158,11 +176,11 @@ for version in conf.versions:
             s_score = str(scenario_score) + "/" + str(scenario_criteria)
             s_status = "KO"
             if scenario_score < scenario_criteria:
-                print (">>>> scenario not OK, score = %s/%s" %
-                       (scenario_score, scenario_criteria))
+                logger.info(">>>> scenario not OK, score = %s/%s" %
+                            (scenario_score, scenario_criteria))
                 s_status = "KO"
             else:
-                print ">>>>> scenario OK, save the information"
+                logger.info(">>>>> scenario OK, save the information")
                 s_status = "OK"
                 path_validation_file = ("./release/" + version +
                                         "/validated_scenario_history.txt")
@@ -173,7 +191,7 @@ for version in conf.versions:
                     f.write(info)
 
             scenario_result_criteria[s] = sr.ScenarioResult(s_status, s_score)
-            print "--------------------------"
+            logger.info("--------------------------")
 
         templateLoader = jinja2.FileSystemLoader(os.path.dirname
                                                  (os.path.abspath
index 649246d..61410b4 100644 (file)
 #
 # ****************************************************
 installers = ["apex", "compass", "fuel", "joid"]
-# installers = ["compass"]
+# installers = ["apex"]
+# list of test cases declared in testcases.yaml but that must not be
+# taken into account for the scoring
+blacklist = ["odl", "ovno", "security_scan"]
 # versions = ["brahmaputra", "master"]
 versions = ["master"]
 PERIOD = 10
 MAX_SCENARIO_CRITERIA = 18
+# get the last 5 test results to determinate the success criteria
+NB_TESTS = 5
 URL_BASE = 'http://testresults.opnfv.org/test/api/v1/results'
+TEST_CONF = "https://git.opnfv.org/cgit/functest/plain/ci/testcases.yaml"
+LOG_LEVEL = "INFO"
+LOG_FILE = "reporting.log"
index 0db570f..2f06b84 100644 (file)
@@ -21,7 +21,8 @@ def getApiResults(case, installer, scenario, version):
     #       "&period=30&installer=" + installer
     url = (reportingConf.URL_BASE + "?case=" + case +
            "&period=" + str(reportingConf.PERIOD) + "&installer=" + installer +
-           "&scenario=" + scenario + "&version=" + version)
+           "&scenario=" + scenario + "&version=" + version +
+           "&last=" + str(reportingConf.NB_TESTS))
     request = Request(url)
 
     try:
@@ -104,7 +105,7 @@ def getResult(testCase, installer, scenario, version):
         # print "nb of results:" + str(len(test_results))
 
         for r in test_results:
-            # print r["creation_date"]
+            # print r["start_date"]
             # print r["criteria"]
             scenario_results.append({r["start_date"]: r["criteria"]})
         # sort results
@@ -116,7 +117,7 @@ def getResult(testCase, installer, scenario, version):
         # 0: 0% success, not passing
         test_result_indicator = 0
         nbTestOk = getNbtestOk(scenario_results)
-        # print "Nb test OK:"+ str(nbTestOk)
+        # print "Nb test OK (last 10 days):"+ str(nbTestOk)
         # check that we have at least 4 runs
         if nbTestOk < 1:
             test_result_indicator = 0
@@ -126,7 +127,9 @@ def getResult(testCase, installer, scenario, version):
             # Test the last 4 run
             if (len(scenario_results) > 3):
                 last4runResults = scenario_results[-4:]
-                if getNbtestOk(last4runResults):
+                nbTestOkLast4 = getNbtestOk(last4runResults)
+                # print "Nb test OK (last 4 run):"+ str(nbTestOkLast4)
+                if nbTestOkLast4 > 3:
                     test_result_indicator = 3
                 else:
                     test_result_indicator = 2
diff --git a/utils/test/result_collection_api/docker/Dockerfile b/utils/test/result_collection_api/docker/Dockerfile
new file mode 100644 (file)
index 0000000..ffee4c2
--- /dev/null
@@ -0,0 +1,52 @@
+#######################################################
+#   Docker container for OPNFV-TESTAPI
+#######################################################
+# Purpose: run opnfv-testapi for gathering test results
+#
+# Maintained by SerenaFeng
+# Build:
+#    $ docker build -t opnfv/testapi:tag .
+#
+# Execution:
+#    $ docker run -dti -p 8000:8000 \
+#      -e "swagger_url=http://10.63.243.17:8000" \
+#      -e "mongodb_url=mongodb://10.63.243.17:27017/" \
+#      -e "api_port=8000"
+#      opnfv/testapi:tag
+#
+# NOTE: providing swagger_url, api_port, mongodb_url is optional.
+#       If not provided, it will use the default one
+#       configured in config.ini
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+
+FROM ubuntu:14.04
+MAINTAINER SerenaFeng <feng.xiaowei@zte.com.cn>
+LABEL version="v1" description="OPNFV TestAPI Docker container"
+
+ENV HOME /home
+
+# Packaged dependencies
+RUN apt-get update && apt-get install -y \
+curl \
+git \
+gcc \
+wget \
+python-dev \
+python-pip \
+crudini \
+--no-install-recommends
+
+RUN pip install --upgrade pip
+
+RUN git config --global http.sslVerify false
+RUN git clone https://gerrit.opnfv.org/gerrit/releng /home/releng
+
+WORKDIR /home/releng/utils/test/result_collection_api/
+RUN pip install -r requirements.txt
+RUN python setup.py install
+CMD ["bash", "docker/start-server.sh"]
diff --git a/utils/test/result_collection_api/docker/prepare-env.sh b/utils/test/result_collection_api/docker/prepare-env.sh
new file mode 100755 (executable)
index 0000000..99433cc
--- /dev/null
@@ -0,0 +1,16 @@
+#!/bin/bash
+FILE=/etc/opnfv_testapi/config.ini
+
+
+if [ "$mongodb_url" != "" ]; then
+    sudo crudini --set --existing $FILE mongo url $mongodb_url
+fi
+
+if [ "$swagger_url" != "" ]; then
+    sudo crudini --set --existing $FILE swagger base_url $swagger_url
+fi
+
+if [ "$api_port" != "" ];then
+    sudo crudini --set --existing $FILE api port $api_port
+fi
+
diff --git a/utils/test/result_collection_api/docker/start-server.sh b/utils/test/result_collection_api/docker/start-server.sh
new file mode 100755 (executable)
index 0000000..8bf6084
--- /dev/null
@@ -0,0 +1,4 @@
+#!/bin/bash
+
+bash docker/prepare-env.sh
+opnfv-testapi
index 148a803..400b84a 100644 (file)
@@ -112,7 +112,7 @@ class ResultsCLHandler(GenericResultHandler):
             @type period: L{string}
             @in period: query
             @required period: False
-            @param last: last days
+            @param last: last records stored until now
             @type last: L{string}
             @in last: query
             @required last: False
index d473060..327ddf7 100644 (file)
@@ -10,7 +10,7 @@ import unittest
 
 from test_base import TestBase
 from opnfv_testapi.resources.project_models import ProjectCreateRequest, \
-    Project, Projects
+    Project, Projects, ProjectUpdateRequest
 from opnfv_testapi.common.constants import HTTP_OK, HTTP_BAD_REQUEST, \
     HTTP_FORBIDDEN, HTTP_NOT_FOUND
 
@@ -112,7 +112,7 @@ class TestProjectUpdate(TestProjectBase):
         code, body = self.get(self.req_d.name)
         _id = body._id
 
-        req = ProjectCreateRequest('newName', 'new description')
+        req = ProjectUpdateRequest('newName', 'new description')
         code, body = self.update(req, self.req_d.name)
         self.assertEqual(code, HTTP_OK)
         self.assertEqual(_id, body._id)