Merge "template-ize kibana visualization"
authorMorgan Richomme <morgan.richomme@orange.com>
Tue, 27 Sep 2016 07:02:50 +0000 (07:02 +0000)
committerGerrit Code Review <gerrit@172.30.200.206>
Tue, 27 Sep 2016 07:02:50 +0000 (07:02 +0000)
12 files changed:
jjb/doctor/doctor.yml
jjb/dovetail/dovetail-ci-jobs.yml [new file with mode: 0644]
jjb/dovetail/dovetail-cleanup.sh [new file with mode: 0755]
jjb/dovetail/dovetail-run.sh [new file with mode: 0755]
jjb/opnfv/opnfv-docs.yml
jjb/qtip/qtip-cleanup.sh
prototypes/bifrost/scripts/destroy-env.sh
utils/test/reporting/functest/reporting-status.py
utils/test/reporting/functest/reportingConf.py
utils/test/reporting/functest/reportingUtils.py
utils/test/reporting/functest/scenarioResult.py
utils/test/reporting/functest/template/index-status-tmpl.html

index 2010e12..f93ac9b 100644 (file)
               TESTCASE_OPTIONS=-e INSPECTOR_TYPE=congress -v $WORKSPACE:$HOME/opnfv/repos/doctor
             block: true
             same-node: true
+        - shell: |
+            logfile=$HOME/opnfv/functest/results/{stream}/doctor.log
+            echo
+            echo "[$logfile]"
+            echo
+            [ -e $logfile ] && cat $logfile
diff --git a/jjb/dovetail/dovetail-ci-jobs.yml b/jjb/dovetail/dovetail-ci-jobs.yml
new file mode 100644 (file)
index 0000000..9d2f69d
--- /dev/null
@@ -0,0 +1,177 @@
+###################################
+# job configuration for dovetail
+###################################
+- project:
+    name: dovetail
+
+    project: '{name}'
+
+#---------------------------------------
+# BRANCH ANCHORS
+#---------------------------------------
+# 1)the stream/branch here represents the SUT(System Under Test) stream/branch
+# 2)docker-tag is the docker tag of dovetail(only master by now, then all latest used)
+#   the dovetail stream is one-to-one mapping with dovetail docker-tag
+#   the dovetail is not sync with A/B/C release
+#
+    master: &master
+        stream: master
+        branch: '{stream}'
+        gs-pathname: ''
+        docker-tag: 'latest'
+    colorado: &colorado
+        stream: colorado
+        branch: 'stable/{stream}'
+        gs-pathname: '{stream}'
+        docker-tag: 'latest'
+
+#-----------------------------------
+# POD, PLATFORM, AND BRANCH MAPPING
+#-----------------------------------
+#            CI PODs
+# This section should only contain the SUTs
+# that have been switched using labels for slaves
+#------------------------------------------------
+# the pods, SUTs listed here are just examples to
+# let the dovetail tool run, there can be more ways beside CI to
+# run the dovetail tool.
+# pods, SUTs will be added/adjusted when needed
+    pod:
+# fuel CI PODs
+        - baremetal:
+            slave-label: fuel-baremetal
+            SUT: fuel
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *master
+        - virtual:
+            slave-label: fuel-virtual
+            SUT: fuel
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *master
+        - baremetal:
+            slave-label: fuel-baremetal
+            SUT: fuel
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *colorado
+        - virtual:
+            slave-label: fuel-virtual
+            SUT: fuel
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *colorado
+#compass CI PODs
+        - baremetal:
+            slave-label: compass-baremetal
+            SUT: compass
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *master
+        - virtual:
+            slave-label: compass-virtual
+            SUT: compass
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *master
+        - baremetal:
+            slave-label: compass-baremetal
+            SUT: compass
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *colorado
+        - virtual:
+            slave-label: compass-virtual
+            SUT: compass
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *colorado
+#--------------------------------
+#        None-CI PODs
+#--------------------------------
+        - huawei-pod5:
+            slave-label: '{pod}'
+            SUT: compass
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *master
+#--------------------------------
+    testsuite:
+        - 'basic'
+
+    jobs:
+        - 'dovetail-{SUT}-{pod}-{testsuite}-{stream}'
+
+################################
+# job templates
+################################
+- job-template:
+    name: 'dovetail-{SUT}-{pod}-{testsuite}-{stream}'
+
+    disabled: false
+
+    concurrent: true
+
+    properties:
+        - throttle:
+            enabled: true
+            max-per-node: 1
+            option: 'project'
+
+    wrappers:
+        - build-name:
+            name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+        - timeout:
+            timeout: 180
+            abort: true
+
+    triggers:
+        - '{auto-trigger-name}'
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+        - '{SUT}-defaults'
+        - '{slave-label}-defaults'
+        - string:
+            name: DEPLOY_SCENARIO
+            default: 'os-nosdn-nofeature-ha'
+        - string:
+            name: DOCKER_TAG
+            default: '{docker-tag}'
+            description: 'Tag to pull docker image'
+        - string:
+            name: CI_DEBUG
+            default: 'true'
+            description: "Show debug output information"
+
+    scm:
+        - git-scm:
+            credentials-id: '{ssh-credentials}'
+            refspec: ''
+            branch: '{branch}'
+
+    builders:
+        - description-setter:
+            description: "POD: $NODE_NAME"
+        - 'dovetail-cleanup'
+        - 'dovetail-{testsuite}'
+
+    publishers:
+        - archive:
+            artifacts: 'results/**/*'
+            allow-empty: true
+            fingerprint: true
+
+########################
+# builder macros
+########################
+- builder:
+    name: dovetail-basic
+    builders:
+        - shell:
+            !include-raw: ./dovetail-run.sh
+
+- builder:
+    name: dovetail-fetch-os-creds
+    builders:
+        - shell:
+            !include-raw: ../../utils/fetch_os_creds.sh
+
+- builder:
+    name: dovetail-cleanup
+    builders:
+        - shell:
+            !include-raw: ./dovetail-cleanup.sh
diff --git a/jjb/dovetail/dovetail-cleanup.sh b/jjb/dovetail/dovetail-cleanup.sh
new file mode 100755 (executable)
index 0000000..297222b
--- /dev/null
@@ -0,0 +1,20 @@
+#!/bin/bash
+[[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
+
+echo "Cleaning up docker containers/images..."
+# Remove previous running containers if exist
+if [[ ! -z $(docker ps -a | grep opnfv/dovetail) ]]; then
+    echo "Removing existing opnfv/dovetail containers..."
+    docker ps -a | grep opnfv/dovetail | awk '{print $1}' | xargs docker rm -f >$redirect
+fi
+
+# Remove existing images if exist
+if [[ ! -z $(docker images | grep opnfv/dovetail) ]]; then
+    echo "Docker images to remove:"
+    docker images | head -1 && docker images | grep opnfv/dovetail
+    image_tags=($(docker images | grep opnfv/dovetail | awk '{print $2}'))
+    for tag in "${image_tags[@]}"; do
+        echo "Removing docker image opnfv/dovetail:$tag..."
+        docker rmi opnfv/dovetail:$tag >$redirect
+    done
+fi
diff --git a/jjb/dovetail/dovetail-run.sh b/jjb/dovetail/dovetail-run.sh
new file mode 100755 (executable)
index 0000000..6453425
--- /dev/null
@@ -0,0 +1,52 @@
+#!/bin/bash
+
+#the noun INSTALLER is used in community, here is just the example to run.
+#multi-platforms are supported.
+
+set -e
+[[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
+
+# labconfig is used only for joid
+labconfig=""
+sshkey=""
+if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
+    instack_mac=$(sudo virsh domiflist undercloud | grep default | \
+                  grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
+    INSTALLER_IP=$(/usr/sbin/arp -e | grep ${instack_mac} | awk {'print $1'})
+    sshkey="-v /root/.ssh/id_rsa:/root/.ssh/id_rsa"
+    if [[ -n $(sudo iptables -L FORWARD |grep "REJECT"|grep "reject-with icmp-port-unreachable") ]]; then
+        #note: this happens only in opnfv-lf-pod1
+        sudo iptables -D FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable
+        sudo iptables -D FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable
+    fi
+elif [[ ${INSTALLER_TYPE} == 'joid' ]]; then
+    # If production lab then creds may be retrieved dynamically
+    # creds are on the jumphost, always in the same folder
+    labconfig="-v $LAB_CONFIG/admin-openrc:/home/opnfv/openrc"
+    # If dev lab, credentials may not be the default ones, just provide a path to put them into docker
+    # replace the default one by the customized one provided by jenkins config
+fi
+
+# Set iptables rule to allow forwarding return traffic for container
+if ! sudo iptables -C FORWARD -j RETURN 2> ${redirect} || ! sudo iptables -L FORWARD | awk 'NR==3' | grep RETURN 2> ${redirect}; then
+    sudo iptables -I FORWARD -j RETURN
+fi
+
+opts="--privileged=true --rm"
+envs="-e CI_DEBUG=${CI_DEBUG} \
+      -v /var/run/docker.sock:/var/run/docker.sock \
+      -v /home/opnfv/dovetail/results:/home/opnfv/dovetail/results"
+
+# Pull the image with correct tag
+echo "Dovetail: Pulling image opnfv/dovetail:${DOCKER_TAG}"
+docker pull opnfv/dovetail:$DOCKER_TAG >$redirect
+
+# Run docker
+echo "Dovetail: docker running..."
+sudo docker run ${opts} ${envs} ${labconfig} ${sshkey} opnfv/dovetail:${DOCKER_TAG} \
+"/home/opnfv/dovetail/scripts/run.py"
+
+echo "Dovetail: store results..."
+sudo cp -r /home/opnfv/dovetail/results ./
+
+echo "Dovetail: done!"
index 7436573..0ac8aa7 100644 (file)
     stream:
         - master:
             branch: '{stream}'
+            doc-version: ''
             gs-pathname: ''
             disabled: false
         - colorado:
             branch: 'stable/{stream}'
-            gs-pathname: '/{stream}'
+            doc-version: '2.0'
+            gs-pathname: '/{stream}/{doc-version}'
             disabled: false
 
 ########################
@@ -82,7 +84,7 @@
         - string:
             name: GS_URL
             default: '$GS_BASE{gs-pathname}'
-            description: "Directory where the build artifact will be located upon the completion     of the build."
+            description: "Directory where the build artifact will be located upon the completion of the build."
 
     scm:
         - gerrit-trigger-scm:
index b923aa2..95babb3 100644 (file)
@@ -6,15 +6,12 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-echo "Cleaning up QTIP  docker containers/images..."
-
 # Remove previous running containers if exist
 if [[ ! -z $(docker ps -a | grep opnfv/qtip) ]]; then
     echo "Removing existing opnfv/qtip containers..."
-    running_containers=$(docker ps | grep opnfv/qtip | awk '{print $1}')
-    docker stop ${running_containers}
-    all_containers=$(docker ps -a | grep opnfv/qtip | awk '{print $1}')
-    docker rm ${all_containers}
+    # workaround: sometimes it throws an error when stopping qtip container.
+    # To make sure ci job unblocked, remove qtip container by force without stopping it.
+    docker rm -f $(docker ps -a | grep opnfv/qtip | awk '{print $1}')
 fi
 
 # Remove existing images if exist
@@ -27,4 +24,3 @@ if [[ ! -z $(docker images | grep opnfv/qtip) ]]; then
         docker rmi opnfv/qtip:$tag
     done
 fi
-
index 86d7bc4..72ade5b 100755 (executable)
@@ -37,8 +37,8 @@ rm -rf /var/log/libvirt/baremetal_logs/*.log
 CLEAN_DIB_IMAGES=${CLEAN_DIB_IMAGES:-false}
 
 if [ $CLEAN_DIB_IMAGES = "true" ]; then
-    rm -rf /httpboot/*
-    rm -rf /tftpboot/*
+    rm -rf /httpboot
+    rm -rf /tftpboot
 fi
 
 # remove VM disk images
index ef567f1..90699bd 100755 (executable)
@@ -24,6 +24,7 @@ logger = utils.getLogger("Status")
 # Initialization
 testValid = []
 otherTestCases = []
+reportingDate = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
 
 # init just tempest to get the list of scenarios
 # as all the scenarios run Tempest
@@ -82,18 +83,26 @@ for version in conf.versions:
 
         # For all the scenarios get results
         for s, s_result in scenario_results.items():
+            logger.info("---------------------------------")
+            logger.info("installer %s, version %s, scenario %s:" %
+                        (installer, version, s))
+            logger.debug("Scenario results: %s" % s_result)
+
             # Green or Red light for a given scenario
             nb_test_runnable_for_this_scenario = 0
             scenario_score = 0
-
+            # url of the last jenkins log corresponding to a given
+            # scenario
+            s_url = ""
+            if len(s_result) > 0:
+                build_tag = s_result[len(s_result)-1]['build_tag']
+                logger.debug("Build tag: %s" % build_tag)
+                s_url = s_url = utils.getJenkinsUrl(build_tag)
+                logger.info("last jenkins url: %s" % s_url)
             testCases2BeDisplayed = []
             # Check if test case is runnable / installer, scenario
             # for the test case used for Scenario validation
             try:
-                logger.info("---------------------------------")
-                logger.info("installer %s, version %s, scenario %s:" %
-                            (installer, version, s))
-
                 # 1) Manage the test cases for the scenario validation
                 # concretely Tiers 0-3
                 for test_case in testValid:
@@ -185,7 +194,8 @@ for version in conf.versions:
             else:
                 logger.info(">>>>> scenario OK, save the information")
                 s_status = "OK"
-                path_validation_file = (conf.REPORTING_PATH + "/release/" + version +
+                path_validation_file = (conf.REPORTING_PATH +
+                                        "/release/" + version +
                                         "/validated_scenario_history.txt")
                 with open(path_validation_file, "a") as f:
                     time_format = "%Y-%m-%d %H:%M"
@@ -193,8 +203,20 @@ for version in conf.versions:
                             ";" + installer + ";" + s + "\n")
                     f.write(info)
 
-            scenario_result_criteria[s] = sr.ScenarioResult(s_status, s_score,
-                                                            s_score_percent)
+            # Save daily results in a file
+            path_validation_file = (conf.REPORTING_PATH +
+                                    "/release/" + version +
+                                    "/scenario_history.txt")
+            with open(path_validation_file, "a") as f:
+                info = (reportingDate + "," + s + "," + installer +
+                        "," + s_score + "," +
+                        str(round(s_score_percent)) + "\n")
+                f.write(info)
+
+            scenario_result_criteria[s] = sr.ScenarioResult(s_status,
+                                                            s_score,
+                                                            s_score_percent,
+                                                            s_url)
             logger.info("--------------------------")
 
         templateLoader = jinja2.FileSystemLoader(conf.REPORTING_PATH)
@@ -209,7 +231,8 @@ for version in conf.versions:
                                      items=items,
                                      installer=installer,
                                      period=conf.PERIOD,
-                                     version=version)
+                                     version=version,
+                                     date=reportingDate)
 
         with open(conf.REPORTING_PATH + "/release/" + version +
                   "/index-status-" + installer + ".html", "wb") as fh:
index b0e4cf7..e1c4b61 100644 (file)
 installers = ["apex", "compass", "fuel", "joid"]
 # list of test cases declared in testcases.yaml but that must not be
 # taken into account for the scoring
-blacklist = ["ovno", "security_scan", 'odl-sfc']
+blacklist = ["ovno", "security_scan"]
 # versions = ["brahmaputra", "master"]
 versions = ["master", "colorado"]
-PERIOD = 50
+PERIOD = 10
 MAX_SCENARIO_CRITERIA = 50
 # get the last 5 test results to determinate the success criteria
 NB_TESTS = 5
index f026204..9ba02e8 100644 (file)
@@ -139,7 +139,7 @@ def getResult(testCase, installer, scenario, version):
         # print "Nb test OK (last 10 days):"+ str(nbTestOk)
         # check that we have at least 4 runs
         if len(scenario_results) < 1:
-            # No results available     
+            # No results available
             test_result_indicator = -1
         elif nbTestOk < 1:
             test_result_indicator = 0
@@ -158,3 +158,21 @@ def getResult(testCase, installer, scenario, version):
             else:
                 test_result_indicator = 2
     return test_result_indicator
+
+
+def getJenkinsUrl(build_tag):
+    # e.g. jenkins-functest-apex-apex-daily-colorado-daily-colorado-246
+    # id = 246
+    # note it is linked to jenkins format
+    # if this format changes...function to be adapted....
+    url_base = "https://build.opnfv.org/ci/view/functest/job/"
+    jenkins_url = ""
+    try:
+        build_id = [int(s) for s in build_tag.split("-") if s.isdigit()]
+        jenkins_path = filter(lambda c: not c.isdigit(), build_tag)
+        url_id = jenkins_path[8:-1] + "/" + str(build_id[0])
+        jenkins_url = url_base + url_id + "/console"
+    except:
+        print 'Impossible to get jenkins url:'
+
+    return jenkins_url
index c6c3373..5a54eed 100644 (file)
 
 class ScenarioResult(object):
 
-    def __init__(self, status, score=0, score_percent=0):
+    def __init__(self, status, score=0, score_percent=0, url_lastrun=''):
         self.status = status
         self.score = score
         self.score_percent = score_percent
+        self.url_lastrun = url_lastrun
 
     def getStatus(self):
         return self.status
@@ -22,4 +23,7 @@ class ScenarioResult(object):
         return self.score
 
     def getScorePercent(self):
-        return self.score_percent
\ No newline at end of file
+        return self.score_percent
+
+    def getUrlLastRun(self):
+        return self.url_lastrun
index 96240de..67c2349 100644 (file)
@@ -18,7 +18,7 @@
     <body>
     <div class="container">
       <div class="masthead">
-        <h3 class="text-muted">Functest status page ({{version}})</h3>
+        <h3 class="text-muted">Functest status page ({{version}}, {{date}})</h3>
         <nav>
           <ul class="nav nav-justified">
             <li class="active"><a href="http://testresults.opnfv.org/reporting/index.html">Home</a></li>
@@ -47,7 +47,7 @@
                     </tr>
                         {% for scenario,iteration in scenario_stats.iteritems() -%}
                             <tr class="tr-ok">
-                                <td>{{scenario}}</td>
+                                <td><a href={{scenario_results[scenario].getUrlLastRun()}}>{{scenario}}</a></td>
                                 <td>{%if scenario_results[scenario].getScorePercent() < 8.3 -%}
                                         <img src="../../img/gauge_0.png">
                                     {%elif scenario_results[scenario].getScorePercent() < 16.7 -%}