Merge "fuel, armband: Move mcp.rsa to /var/lib/opnfv"
authorTrevor Bramwell <tbramwell@linuxfoundation.org>
Fri, 8 Sep 2017 17:34:11 +0000 (17:34 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Fri, 8 Sep 2017 17:34:11 +0000 (17:34 +0000)
13 files changed:
jjb/apex/apex.yml
jjb/apex/apex.yml.j2
jjb/bottlenecks/bottlenecks-run-suite.sh
jjb/dovetail/dovetail-ci-jobs.yml
jjb/functest/functest-alpine.sh [changed mode: 0644->0755]
jjb/storperf/storperf-verify-jobs.yml
jjb/xci/xci-verify-jobs.yml
utils/fetch_os_creds.sh
utils/test/reporting/docker/reporting.sh
utils/test/reporting/reporting/bottlenecks/__init__.py [new file with mode: 0644]
utils/test/reporting/reporting/bottlenecks/reporting-status.py [new file with mode: 0644]
utils/test/reporting/reporting/bottlenecks/template/index-status-tmpl.html [new file with mode: 0644]
utils/test/reporting/reporting/reporting.yaml

index 9ccf8e3..5290fdc 100644 (file)
@@ -15,6 +15,7 @@
         - 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
         - 'apex-upload-snapshot'
         - 'apex-create-snapshot'
+        - 'apex-flex-daily-os-nosdn-nofeature-ha-{stream}'
     # stream:    branch with - in place of / (eg. stable-arno)
     # branch:    branch (eg. stable/arno)
     stream:
                 abort-all-job: true
                 git-revision: false
 
+# Flex job
+- job-template:
+    name: 'apex-flex-daily-os-nosdn-nofeature-ha-{stream}'
+
+    project-type: 'multijob'
+
+    disabled: false
+
+    node: 'flex-pod2'
+
+    scm:
+        - git-scm
+    triggers:
+        - 'apex-{stream}'
+    parameters:
+        - '{project}-defaults'
+        - project-parameter:
+            project: '{project}'
+            branch: '{branch}'
+        - apex-parameter:
+            gs-pathname: '{gs-pathname}'
+        - string:
+            name: DEPLOY_SCENARIO
+            default: 'os-nosdn-nofeature-ha'
+            description: "Scenario to deploy with."
+        - string:
+            name: GIT_BASE
+            default: https://gerrit.opnfv.org/gerrit/$PROJECT
+            description: 'Git URL to use on this Jenkins Slave'
+        - string:
+            name: SSH_KEY
+            default: /root/.ssh/id_rsa
+            description: 'SSH key to use for Apex'
+    properties:
+        - logrotate-default
+        - build-blocker:
+            use-build-blocker: true
+            block-level: 'NODE'
+            blocking-jobs:
+                - 'apex-verify.*'
+                - 'apex-runner.*'
+                - 'apex-.*-promote.*'
+                - 'apex-run.*'
+                - 'apex-.+-baremetal-.+'
+        - throttle:
+            max-per-node: 1
+            max-total: 10
+            option: 'project'
+    builders:
+        - description-setter:
+            description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
+        - multijob:
+            name: 'Baremetal Deploy'
+            condition: SUCCESSFUL
+            projects:
+                - name: 'apex-deploy-baremetal-{stream}'
+                  node-parameters: true
+                  current-parameters: true
+                  predefined-parameters: |
+                    OPNFV_CLEAN=yes
+                    GERRIT_BRANCH=$GERRIT_BRANCH
+                    GERRIT_REFSPEC=
+                    DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+                  kill-phase-on: FAILURE
+                  abort-all-job: true
+                  git-revision: false
+        - multijob:
+            name: Yardstick
+            condition: ALWAYS
+            projects:
+                - name: 'yardstick-apex-baremetal-daily-{stream}'
+                  node-parameters: true
+                  current-parameters: false
+                  predefined-parameters:
+                    DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+                  kill-phase-on: NEVER
+                  abort-all-job: false
+                  git-revision: false
+
 ########################
 # parameter macros
 ########################
index c878fe6..ec74a74 100644 (file)
@@ -15,6 +15,7 @@
         - 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
         - 'apex-upload-snapshot'
         - 'apex-create-snapshot'
+        - 'apex-flex-daily-os-nosdn-nofeature-ha-{stream}'
     # stream:    branch with - in place of / (eg. stable-arno)
     # branch:    branch (eg. stable/arno)
     stream:
                 abort-all-job: true
                 git-revision: false
 
+# Flex job
+- job-template:
+    name: 'apex-flex-daily-os-nosdn-nofeature-ha-{stream}'
+
+    project-type: 'multijob'
+
+    disabled: false
+
+    node: 'flex-pod2'
+
+    scm:
+        - git-scm
+    triggers:
+        - 'apex-{stream}'
+    parameters:
+        - '{project}-defaults'
+        - project-parameter:
+            project: '{project}'
+            branch: '{branch}'
+        - apex-parameter:
+            gs-pathname: '{gs-pathname}'
+        - string:
+            name: DEPLOY_SCENARIO
+            default: 'os-nosdn-nofeature-ha'
+            description: "Scenario to deploy with."
+        - string:
+            name: GIT_BASE
+            default: https://gerrit.opnfv.org/gerrit/$PROJECT
+            description: 'Git URL to use on this Jenkins Slave'
+        - string:
+            name: SSH_KEY
+            default: /root/.ssh/id_rsa
+            description: 'SSH key to use for Apex'
+    properties:
+        - logrotate-default
+        - build-blocker:
+            use-build-blocker: true
+            block-level: 'NODE'
+            blocking-jobs:
+                - 'apex-verify.*'
+                - 'apex-runner.*'
+                - 'apex-.*-promote.*'
+                - 'apex-run.*'
+                - 'apex-.+-baremetal-.+'
+        - throttle:
+            max-per-node: 1
+            max-total: 10
+            option: 'project'
+    builders:
+        - description-setter:
+            description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
+        - multijob:
+            name: 'Baremetal Deploy'
+            condition: SUCCESSFUL
+            projects:
+                - name: 'apex-deploy-baremetal-{stream}'
+                  node-parameters: true
+                  current-parameters: true
+                  predefined-parameters: |
+                    OPNFV_CLEAN=yes
+                    GERRIT_BRANCH=$GERRIT_BRANCH
+                    GERRIT_REFSPEC=
+                    DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+                  kill-phase-on: FAILURE
+                  abort-all-job: true
+                  git-revision: false
+        - multijob:
+            name: Yardstick
+            condition: ALWAYS
+            projects:
+                - name: 'yardstick-apex-baremetal-daily-{stream}'
+                  node-parameters: true
+                  current-parameters: false
+                  predefined-parameters:
+                    DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+                  kill-phase-on: NEVER
+                  abort-all-job: false
+                  git-revision: false
+
 ########################
 # parameter macros
 ########################
index a757043..6d4d2d8 100644 (file)
@@ -125,7 +125,7 @@ if [[ $SUITE_NAME == *posca* ]]; then
           -e NODE_NAME=${NODE_NAME} -e EXTERNAL_NET=${EXTERNAL_NETWORK} \
           -e BRANCH=${BRANCH} -e GERRIT_REFSPEC_DEBUG=${GERRIT_REFSPEC_DEBUG} \
           -e BOTTLENECKS_DB_TARGET=${BOTTLENECKS_DB_TARGET} -e PACKAGE_URL=${PACKAGE_URL} \
-          -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO}"
+          -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} -e BUILD_TAG=${BUILD_TAG}"
     docker_volume="-v /var/run/docker.sock:/var/run/docker.sock -v /tmp:/tmp"
 
     cmd="docker run ${opts} ${envs} --name bottlenecks-load-master ${docker_volume} opnfv/bottlenecks:${DOCKER_TAG} /bin/bash"
index c13c938..32cfcfa 100644 (file)
         - huawei-pod4:
             slave-label: huawei-pod4
             SUT: apex
-            auto-trigger-name: 'apex-huawei-pod4-danube-trigger'
+            auto-trigger-name: 'apex-huawei-pod4-{testsuite}-danube-trigger'
             <<: *danube
 #--------------------------------
     testsuite:
         - build-name:
             name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
         - timeout:
-            timeout: 240
+            timeout: 300
             abort: true
         - fix-workspace-permissions
 
 # trigger macros
 #--------------------------
 - trigger:
-    name: 'apex-huawei-pod4-danube-trigger'
+    name: 'apex-huawei-pod4-proposed_tests-danube-trigger'
     triggers:
-        - timed: '* 1 * * *'
+        - timed: '0 1 * * *'
+- trigger:
+    name: 'apex-huawei-pod4-compliance_set-danube-trigger'
+    triggers:
+        - timed: ''
old mode 100644 (file)
new mode 100755 (executable)
index cee3026..9be9fe5
@@ -68,7 +68,12 @@ volumes="${images_vol} ${results_vol} ${sshkey_vol} ${rc_file_vol} ${cacert_file
 
 set +e
 
-tiers=(healthcheck smoke features vnf)
+if ${FUNCTEST_SUITE_NAME} == 'healthcheck'; then
+    tiers=(healthcheck)
+else
+    tiers=(healthcheck smoke features vnf)
+fi
+
 for tier in ${tiers[@]}; do
     FUNCTEST_IMAGE=opnfv/functest-${tier}
     echo "Functest: Pulling Functest Docker image ${FUNCTEST_IMAGE} ..."
index 55c4e4c..f99ceea 100644 (file)
     wrappers:
         - ssh-agent-wrapper
         - build-timeout:
-            timeout: 30
+            timeout: 60
 
     parameters:
         - project-parameter:
index 61d2558..5fca9bd 100644 (file)
 - builder:
     name: 'xci-verify-healthcheck-macro'
     builders:
-        - shell: |
-            #!/bin/bash
-
-            echo "Hello World!"
+        - shell:
+            !include-raw: ../../utils/fetch_os_creds.sh
+        - shell:
+            !include-raw: ../functest/functest-alpine.sh
index 0e2a2b9..3bc66f9 100755 (executable)
@@ -95,6 +95,7 @@ fi
 ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
 
 # Start fetching the files
+info "Fetching rc file..."
 if [ "$installer_type" == "fuel" ]; then
     verify_connectivity $installer_ip
     if [ "${BRANCH}" == "master" ]; then
@@ -110,7 +111,7 @@ if [ "$installer_type" == "fuel" ]; then
             "sudo salt --out txt 'ctl*' pillar.get _param:openstack_control_address | awk '{print \$2; exit}'" | \
             sed 's/ //g') &> /dev/null
 
-        info "Fetching rc file from controller $controller_ip..."
+        info "... from controller $controller_ip..."
         ssh ${ssh_options} ubuntu@${controller_ip} "sudo cat /root/keystonercv3" > $dest_path
 
         if [[ $BUILD_TAG =~ "baremetal" ]]; then
@@ -134,7 +135,7 @@ if [ "$installer_type" == "fuel" ]; then
             error "The controller $controller_ip is not up. Please check that the POD is correctly deployed."
         fi
 
-        info "Fetching rc file from controller $controller_ip..."
+        info "... from controller $controller_ip..."
         sshpass -p r00tme ssh 2>/dev/null ${ssh_options} root@${installer_ip} \
             "scp ${ssh_options} ${controller_ip}:/root/openrc ." &> /dev/null
         sshpass -p r00tme scp 2>/dev/null ${ssh_options} root@${installer_ip}:~/openrc $dest_path &> /dev/null
@@ -159,7 +160,7 @@ elif [ "$installer_type" == "apex" ]; then
 
     # The credentials file is located in the Instack VM (192.0.2.1)
     # NOTE: This might change for bare metal deployments
-    info "Fetching rc file from Instack VM $installer_ip..."
+    info "... from Instack VM $installer_ip..."
     if [ -f /root/.ssh/id_rsa ]; then
         chmod 600 /root/.ssh/id_rsa
     fi
@@ -169,7 +170,7 @@ elif [ "$installer_type" == "compass" ]; then
     if [ "${BRANCH}" == "master" ]; then
         sudo docker cp compass-tasks:/opt/openrc $dest_path &> /dev/null
         sudo chown $(whoami):$(whoami) $dest_path
-        sudo docker cp compass-tasks:/opt/os_cacert $os_cacert &> /dev/null
+        sudo docker cp compass-tasks:/opt/os_cacert $os_cacert
     else
         verify_connectivity $installer_ip
         controller_ip=$(sshpass -p'root' ssh 2>/dev/null $ssh_options root@${installer_ip} \
@@ -181,7 +182,7 @@ elif [ "$installer_type" == "compass" ]; then
             error "The controller $controller_ip is not up. Please check that the POD is correctly deployed."
         fi
 
-        info "Fetching rc file from controller $controller_ip..."
+        info "... from controller $controller_ip..."
         sshpass -p root ssh 2>/dev/null $ssh_options root@${installer_ip} \
             "scp $ssh_options ${controller_ip}:/opt/admin-openrc.sh ." &> /dev/null
         sshpass -p root scp 2>/dev/null $ssh_options root@${installer_ip}:~/admin-openrc.sh $dest_path &> /dev/null
@@ -205,7 +206,7 @@ elif [ "$installer_type" == "compass" ]; then
 elif [ "$installer_type" == "joid" ]; then
     # do nothing...for the moment
     # we can either do a scp from the jumphost or use the -v option to transmit the param to the docker file
-    echo "Do nothing, creds will be provided through volume option at docker creation for joid"
+    info "Do nothing, creds will be provided through volume option at docker creation for joid"
 
 elif [ "$installer_type" == "foreman" ]; then
     #ip_foreman="172.30.10.73"
@@ -242,6 +243,10 @@ elif [ "$installer_type" == "daisy" ]; then
 
     sshpass -p r00tme scp 2>/dev/null $ssh_options root@${installer_ip}:/etc/kolla/admin-openrc.sh $dest_path &> /dev/null
 
+elif ["$installer_type" == "osa"]; then
+    # Get RC file from control server
+    filename=$(ssh -o StrictHostKeyChecking=no root@${controller_ip} find /var/lib/lxc/controller00_nova_api_placement_container-* -name openrc)
+    scp root@${controller_ip}:${filename} ${destpath}
 else
     error "Installer $installer is not supported by this script"
 fi
index d8db620..6cc7a7c 100755 (executable)
@@ -4,7 +4,7 @@ export PYTHONPATH="${PYTHONPATH}:./reporting"
 export CONFIG_REPORTING_YAML=./reporting/reporting.yaml
 
 declare -a versions=(danube master)
-declare -a projects=(functest storperf yardstick qtip vsperf)
+declare -a projects=(functest storperf yardstick qtip vsperf bottlenecks)
 
 project=$1
 reporting_type=$2
diff --git a/utils/test/reporting/reporting/bottlenecks/__init__.py b/utils/test/reporting/reporting/bottlenecks/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/utils/test/reporting/reporting/bottlenecks/reporting-status.py b/utils/test/reporting/reporting/bottlenecks/reporting-status.py
new file mode 100644 (file)
index 0000000..8966d06
--- /dev/null
@@ -0,0 +1,145 @@
+#!/usr/bin/python
+#
+# This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+import datetime
+import os
+
+import jinja2
+
+import reporting.utils.reporting_utils as rp_utils
+import reporting.utils.scenarioResult as sr
+
+INSTALLERS = rp_utils.get_config('general.installers')
+VERSIONS = rp_utils.get_config('general.versions')
+PERIOD = rp_utils.get_config('general.period')
+
+# Logger
+LOGGER = rp_utils.getLogger("Bottlenecks-Status")
+reportingDate = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
+
+LOGGER.info("*******************************************")
+LOGGER.info("*   Generating reporting scenario status  *")
+LOGGER.info("*   Data retention = %s days              *", PERIOD)
+LOGGER.info("*                                         *")
+LOGGER.info("*******************************************")
+
+# retrieve the list of bottlenecks tests
+BOTTLENECKS_TESTS = rp_utils.get_config('bottlenecks.test_list')
+LOGGER.info("Bottlenecks tests: %s", BOTTLENECKS_TESTS)
+
+# For all the versions
+for version in VERSIONS:
+    # For all the installers
+    for installer in INSTALLERS:
+        # get scenarios results data
+        scenario_results = rp_utils.getScenarios("bottlenecks",
+                                                 "posca_factor_ping",
+                                                 installer,
+                                                 version)
+        LOGGER.info("scenario_results: %s", scenario_results)
+
+        scenario_stats = rp_utils.getScenarioStats(scenario_results)
+        LOGGER.info("scenario_stats: %s", scenario_stats)
+        items = {}
+        scenario_result_criteria = {}
+
+        # From each scenarios get results list
+        for s, s_result in scenario_results.items():
+            LOGGER.info("---------------------------------")
+            LOGGER.info("installer %s, version %s, scenario %s", installer,
+                        version, s)
+            ten_criteria = len(s_result)
+
+            ten_score = 0
+            for v in s_result:
+                if "PASS" in v['criteria']:
+                    ten_score += 1
+
+            LOGGER.info("ten_score: %s / %s", (ten_score, ten_criteria))
+
+            four_score = 0
+            try:
+                LASTEST_TESTS = rp_utils.get_config(
+                    'general.nb_iteration_tests_success_criteria')
+                s_result.sort(key=lambda x: x['start_date'])
+                four_result = s_result[-LASTEST_TESTS:]
+                LOGGER.debug("four_result: {}".format(four_result))
+                LOGGER.debug("LASTEST_TESTS: {}".format(LASTEST_TESTS))
+                # logger.debug("four_result: {}".format(four_result))
+                four_criteria = len(four_result)
+                for v in four_result:
+                    if "PASS" in v['criteria']:
+                        four_score += 1
+                LOGGER.info("4 Score: %s / %s ", (four_score,
+                                                  four_criteria))
+            except Exception:
+                LOGGER.error("Impossible to retrieve the four_score")
+
+            try:
+                s_status = (four_score * 100) / four_criteria
+            except Exception:
+                s_status = 0
+            LOGGER.info("Score percent = %s", str(s_status))
+            s_four_score = str(four_score) + '/' + str(four_criteria)
+            s_ten_score = str(ten_score) + '/' + str(ten_criteria)
+            s_score_percent = str(s_status)
+
+            LOGGER.debug(" s_status: %s", s_status)
+            if s_status == 100:
+                LOGGER.info(">>>>> scenario OK, save the information")
+            else:
+                LOGGER.info(">>>> scenario not OK, last 4 iterations = %s, \
+                             last 10 days = %s", (s_four_score, s_ten_score))
+
+            s_url = ""
+            if len(s_result) > 0:
+                build_tag = s_result[len(s_result)-1]['build_tag']
+                LOGGER.debug("Build tag: %s", build_tag)
+                s_url = s_url = rp_utils.getJenkinsUrl(build_tag)
+                LOGGER.info("last jenkins url: %s", s_url)
+
+            # Save daily results in a file
+            path_validation_file = ("./display/" + version +
+                                    "/bottlenecks/scenario_history.txt")
+
+            if not os.path.exists(path_validation_file):
+                with open(path_validation_file, 'w') as f:
+                    info = 'date,scenario,installer,details,score\n'
+                    f.write(info)
+
+            with open(path_validation_file, "a") as f:
+                info = (reportingDate + "," + s + "," + installer +
+                        "," + s_ten_score + "," +
+                        str(s_score_percent) + "\n")
+                f.write(info)
+
+            scenario_result_criteria[s] = sr.ScenarioResult(s_status,
+                                                            s_four_score,
+                                                            s_ten_score,
+                                                            s_score_percent,
+                                                            s_url)
+
+            LOGGER.info("--------------------------")
+
+        templateLoader = jinja2.FileSystemLoader(".")
+        templateEnv = jinja2.Environment(loader=templateLoader,
+                                         autoescape=True)
+
+        TEMPLATE_FILE = ("./reporting/bottlenecks/template"
+                         "/index-status-tmpl.html")
+        template = templateEnv.get_template(TEMPLATE_FILE)
+
+        outputText = template.render(scenario_results=scenario_result_criteria,
+                                     installer=installer,
+                                     period=PERIOD,
+                                     version=version,
+                                     date=reportingDate)
+
+        with open("./display/" + version +
+                  "/bottlenecks/status-" + installer + ".html", "wb") as fh:
+            fh.write(outputText)
diff --git a/utils/test/reporting/reporting/bottlenecks/template/index-status-tmpl.html b/utils/test/reporting/reporting/bottlenecks/template/index-status-tmpl.html
new file mode 100644 (file)
index 0000000..c4497ac
--- /dev/null
@@ -0,0 +1,114 @@
+ <html>
+  <head>
+    <meta charset="utf-8">
+    <!-- Bootstrap core CSS -->
+    <link href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css" rel="stylesheet">
+    <link href="../../css/default.css" rel="stylesheet">
+    <script type="text/javascript" src="http://ajax.googleapis.com/ajax/libs/jquery/1/jquery.min.js"></script>
+    <script type="text/javascript" src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js"></script>
+    <script type="text/javascript" src="http://d3js.org/d3.v2.min.js"></script>
+    <script type="text/javascript" src="../../js/gauge.js"></script>
+    <script type="text/javascript" src="../../js/trend.js"></script>
+    <script>
+        function onDocumentReady() {
+            // Gauge management
+            {% for scenario in scenario_results.keys() -%}
+            var gaugeScenario{{loop.index}} = gauge('#gaugeScenario{{loop.index}}');
+            {%- endfor %}
+            // assign success rate to the gauge
+            function updateReadings() {
+                {% for scenario in scenario_results.keys() -%}
+                 gaugeScenario{{loop.index}}.update({{scenario_results[scenario].getScorePercent()}});
+                 {%- endfor %}
+            }
+            updateReadings();
+        }
+
+        // trend line management
+        d3.csv("./scenario_history.txt", function(data) {
+            // ***************************************
+            // Create the trend line
+            {% for scenario in scenario_results.keys() -%}
+            // for scenario {{scenario}}
+            // Filter results
+                var trend{{loop.index}} = data.filter(function(row) {
+                    return row["scenario"]=="{{scenario}}" && row["installer"]=="{{installer}}";
+                })
+            // Parse the date
+            trend{{loop.index}}.forEach(function(d) {
+                d.date = parseDate(d.date);
+                d.score = +d.score
+            });
+            // Draw the trend line
+            var mytrend = trend("#trend_svg{{loop.index}}",trend{{loop.index}})
+            // ****************************************
+            {%- endfor %}
+        });
+        if ( !window.isLoaded ) {
+            window.addEventListener("load", function() {
+            onDocumentReady();
+            }, false);
+        } else {
+            onDocumentReady();
+        }
+    </script>
+    <script type="text/javascript">
+    $(document).ready(function (){
+        $(".btn-more").click(function() {
+            $(this).hide();
+            $(this).parent().find(".panel-default").show();
+        });
+    })
+    </script>
+  </head>
+    <body>
+    <div class="container">
+      <div class="masthead">
+          <h3 class="text-muted">Bottlenecks status page ({{version}}, {{date}})</h3>
+        <nav>
+          <ul class="nav nav-justified">
+            <li class="active"><a href="http://testresults.opnfv.org/reporting/index.html">Home</a></li>
+            <li><a href="status-apex.html">Apex</a></li>
+            <li><a href="status-compass.html">Compass</a></li>
+            <li><a href="status-fuel.html">Fuel</a></li>
+            <li><a href="status-joid.html">Joid</a></li>
+          </ul>
+        </nav>
+      </div>
+<div class="row">
+    <div class="col-md-1"></div>
+    <div class="col-md-10">
+        <div class="page-header">
+            <h2>{{installer}}</h2>
+        </div>
+        <div><h1>Reported values represent the percentage of completed
+
+          CI tests (posca_factor_ping) during the reporting period, where results
+
+          were communicated to the Test Database.</h1></div>
+        <div class="scenario-overview">
+            <div class="panel-heading"><h4><b>List of last scenarios ({{version}}) run over the last {{period}} days </b></h4></div>
+                <table class="table">
+                    <tr>
+                        <th width="40%">Scenario</th>
+                        <th width="20%">Status</th>
+                        <th width="20%">Trend</th>
+                        <th width="10%">Last 4 Iterations</th>
+                        <th width="10%">Last 10 Days</th>
+                    </tr>
+                        {% for scenario,result in scenario_results.iteritems() -%}
+                            <tr class="tr-ok">
+                                <td><a href="{{scenario_results[scenario].getLastUrl()}}">{{scenario}}</a></td>
+                                <td><div id="gaugeScenario{{loop.index}}"></div></td>
+                                <td><div id="trend_svg{{loop.index}}"></div></td>
+                                <td>{{scenario_results[scenario].getFourDaysScore()}}</td>
+                                <td>{{scenario_results[scenario].getTenDaysScore()}}</td>
+                            </tr>
+                        {%- endfor %}
+                </table>
+        </div>
+
+
+    </div>
+    <div class="col-md-1"></div>
+</div>
index 26feb31..9bb90b8 100644 (file)
@@ -64,6 +64,10 @@ qtip:
     log_level: ERROR
     period: 1
 
-bottleneck:
+bottlenecks:
+    test_list:
+        - posca_factor_ping
+        - posca_factor_system_bandwidth
+    log_level: ERROR
 
 vsperf: