Merge "updates to supporting Apex build rewrite"
authorTim Rozet <trozet@redhat.com>
Thu, 10 Mar 2016 13:12:38 +0000 (13:12 +0000)
committerGerrit Code Review <gerrit@172.30.200.206>
Thu, 10 Mar 2016 13:12:38 +0000 (13:12 +0000)
18 files changed:
docs/how-to-use-docs/documentation-example.rst
jjb/functest/functest-ci-jobs.yml
utils/jenkins-jnlp-connect.sh
utils/test/dashboard/css/bootstrap.min.css [moved from utils/test/result_collection_api/tools/dashboard/css/bootstrap.min.css with 100% similarity]
utils/test/dashboard/css/opnfv_dashboard_tests.css [moved from utils/test/result_collection_api/tools/dashboard/css/opnfv_dashboard_tests.css with 100% similarity]
utils/test/dashboard/js/bootstrap.min.js [moved from utils/test/result_collection_api/tools/dashboard/js/bootstrap.min.js with 100% similarity]
utils/test/dashboard/js/dygraph-combined.js [moved from utils/test/result_collection_api/tools/dashboard/js/dygraph-combined.js with 100% similarity]
utils/test/dashboard/js/jquery-2.2.0.min.js [moved from utils/test/result_collection_api/tools/dashboard/js/jquery-2.2.0.min.js with 100% similarity]
utils/test/dashboard/js/opnfv_dashboard_tests.js [moved from utils/test/result_collection_api/tools/dashboard/js/opnfv_dashboard_tests.js with 100% similarity]
utils/test/dashboard/js/opnfv_dashboard_tests_conf.js [moved from utils/test/result_collection_api/tools/dashboard/js/opnfv_dashboard_tests_conf.js with 100% similarity]
utils/test/dashboard/opnfv_dashboard_tests.html [moved from utils/test/result_collection_api/tools/dashboard/opnfv_dashboard_tests.html with 100% similarity]
utils/test/reporting/default.css [moved from utils/test/result_collection_api/tools/reporting/default.css with 100% similarity]
utils/test/reporting/index-tempest-tmpl.html [new file with mode: 0644]
utils/test/reporting/index-tmpl.html [moved from utils/test/result_collection_api/tools/reporting/index-tmpl.html with 100% similarity]
utils/test/reporting/reporting-tempest.py [new file with mode: 0644]
utils/test/reporting/reporting.py [moved from utils/test/result_collection_api/tools/reporting/reporting.py with 100% similarity]
utils/test/result_collection_api/samples/sample.json.postman_collection [moved from utils/test/result_collection_api/tools/samples/sample.json.postman_collection with 100% similarity]
utils/test/scripts/backup-db.sh [moved from utils/test/result_collection_api/tools/backup-db.sh with 100% similarity]

index c0ffc95..5e2a8fb 100644 (file)
@@ -82,10 +82,29 @@ Writing RST Markdown
 
 See http://sphinx-doc.org/rest.html .
 
-You can add dedicated contents by using 'only' directive with build type
-('html' and 'pdf') for OPNFV document
+**Hint:**
+Table and its contents won't be adjusted, so you may need to fix your source
+text when your table is truncated in PDF version. Or, you can use 'longtable'
+option that splits your table vertically (by rows) in multiple pages.
+It is useful if you have trouble in rendering table containing many rows.
+
+.. code-block:: bash
+
+    .. table::
+        :class: longtable
+
+        +------------------------+------------+----------+----------+
+        | Header row, column 1   | Header 2   | Header 3 | Header 4 |
+        +========================+============+==========+==========+
+        | body row 1, column 1   | column 2   | column 3 | column 4 |
+        +------------------------+------------+----------+----------+
+        | body row 2             | ...        | ...      |          |
+        +------------------------+------------+----------+----------+
 
-Example :
+**Hint:**
+You can add dedicated contents by using 'only' directive with build type
+('html' and 'pdf') for OPNFV document. But, this is not encouraged to use
+since this may make different views in HTML and PDF version.
 
 .. code-block:: bash
 
@@ -116,9 +135,19 @@ are not set in the conf.py .
 
 See http://sphinx-doc.org/config.html to learn sphinx configuration.
 
-Note: you can leave the file path for OPNFV logo image which will be prepared
+**Note:**
+You can leave the file path for OPNFV logo image which will be prepared
 before each document build.
 
+**Hint:**
+In PDF, figures will be floated to get better view. If you want to avoid such
+automated fixes, just add this option to your conf.py after copying the default
+configuration in to the document directory.
+
+.. code-block:: bash
+
+    latex_elements = {'figure_align': 'H'}
+
 Versioning
 ==========
 
index 9ec4268..90b5a67 100644 (file)
         - shell: |
             #!/bin/bash
             set +e
-            flag="-s"
-            if [ "${PUSH_RESULTS_TO_DB}" == "true" ]; then
-                flag=$flag" -r"
-            fi
+            flags="-s"
+            [[ "$PUSH_RESULTS_TO_DB" == "true" ]] && flags+=" -r"
             echo "Functest: run $FUNCTEST_SUITE_NAME"
-            cmd="${FUNCTEST_REPO_DIR}/docker/run_tests.sh --test $FUNCTEST_SUITE_NAME ${flag}"
+            cmd="${FUNCTEST_REPO_DIR}/docker/run_tests.sh --test $FUNCTEST_SUITE_NAME ${flags}"
             container_id=$(docker ps -a | grep opnfv/functest | awk '{print $1}' | head -1)
             docker exec $container_id $cmd
 
         - shell: |
             #!/bin/bash
             set +e
-            flag="-s"
-            if [ "${PUSH_RESULTS_TO_DB}" == "true" ]; then
-                flag=$flag" -r"
-            fi
-            cmd="${FUNCTEST_REPO_DIR}/docker/run_tests.sh ${flag}"
+            flags="-s"
+            [[ "$PUSH_RESULTS_TO_DB" == "true" ]] && flags+=" -r"
+            cmd="${FUNCTEST_REPO_DIR}/docker/run_tests.sh ${flags}"
             container_id=$(docker ps -a | grep opnfv/functest | awk '{print $1}' | head -1)
             docker exec $container_id $cmd
 
index 6fb6827..e0c6ff2 100755 (executable)
@@ -48,10 +48,14 @@ if [[ $(whoami) != "root" && $(whoami) != "$jenkinsuser"  ]]; then
   exit 1
 fi
 
-if [[ $distro == Debian || $distro == Ubuntu ]]; then
+if [ -d /etc/monit/conf.d ]; then
   monitconfdir="/etc/monit/conf.d/"
-elif [[ $distro == Fedora || $distro == CentOS || $distro == Redhat ]]; then
+elif [ -d /etc/monit.d ]; then
   monitconfdir="/etc/monit.d"
+else
+  echo "Could not determine the location of the monit configuration file."
+  echo "Make sure monit is installed."
+  exit 1
 fi
 
 #make pid dir
@@ -186,6 +190,5 @@ do
 done
 
 connectionstring="java -jar slave.jar -jnlpUrl https://build.opnfv.org/ci/computer/"$slave_name"/slave-agent.jnlp -secret "$slave_secret" -noCertificateCheck "
-distro="$(tr -s ' \011' '\012' < /etc/issue | head -n 1)"
 
 main "$@"
diff --git a/utils/test/reporting/index-tempest-tmpl.html b/utils/test/reporting/index-tempest-tmpl.html
new file mode 100644 (file)
index 0000000..24d87be
--- /dev/null
@@ -0,0 +1,90 @@
+ <html>
+  <head>
+    <meta charset="utf-8">
+    <!-- Bootstrap core CSS -->
+    <link href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css" rel="stylesheet">
+    <link href="default.css" rel="stylesheet">
+    <script type="text/javascript" src="http://ajax.googleapis.com/ajax/libs/jquery/1/jquery.min.js"></script>
+    <script type="text/javascript" src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js"></script>
+    <script type="text/javascript">
+    $(document).ready(function (){
+        $(".btn-more").click(function() {
+            $(this).hide();
+            $(this).parent().find(".panel-default").show();
+        });
+    })
+    </script>
+  </head>
+    <body>
+    <div class="container">
+      <div class="masthead">
+        <h3 class="text-muted">Tempest status page</h3>
+        <nav>
+          <ul class="nav nav-justified">
+            <li class="active"><a href="#">Home</a></li>
+            <li><a href="index-tempest-apex.html">Apex</a></li>
+            <li><a href="index-tempest-compass.html">Compass</a></li>
+            <li><a href="index-tempest-fuel.html">Fuel</a></li>
+            <li><a href="index-tempest-joid.html">Joid</a></li>
+          </ul>
+        </nav>
+      </div>
+<div class="row">
+    <div class="col-md-1"></div>
+    <div class="col-md-10">
+        <div class="page-header">
+            <h2>{{installer}}</h2>
+        </div>
+        {% for scenario_name, results in scenario_results.iteritems() -%}
+        <div class="scenario-part">
+            <div class="page-header">
+                <h3><span class="glyphicon glyphicon-chevron-right"> <b>{{scenario_name}}</b></h3>
+            </div>
+            {% for result in results -%}
+                {% if loop.index > 2 -%}
+                    <div class="panel panel-default" hidden>
+                {%- else -%}
+                    <div class="panel panel-default">
+                {%- endif %}
+                        <div class="panel-heading">
+                            <div class="progress-bar" role="progressbar" aria-valuenow="{{result.pr_step_ok}}" aria-valuemin="0" aria-valuemax="100" style="width: {{result.pr_step_ok}}%"></div>
+                            <span class="panel-header-item">
+                                <h4><b>{{result.creation_date}}</b></h4>
+                            </span>
+                            <span class="badge panel-pod-name">{{result.pod_name}}</span>
+                        </div>
+                        <table class="table">
+                            <tr>
+                                <th width="20%">Item</th>
+                                <th width="10%">Result</th>
+                                <th width="10%">Status</th>
+                                <th width="60%">Errors</th>
+                            </tr>
+                            {% for item in items -%}
+                                {% if item in result.details.keys() -%}
+                                    {% if result.criteria[item] -%}
+                                        <tr class="tr-ok">
+                                            <td>{{item}}</td>
+                                            <td>{{result.details[item]}}</td>
+                                            <td><span class="glyphicon glyphicon-ok"></td>
+                                            <td>{{result.errors[item]}}</td>
+                                        </tr>
+                                    {%- else -%}
+                                        <tr class="tr-danger">
+                                            <td>{{item}}</td>
+                                            <td>{{result.details[item]}}</td>
+                                            <td><span class="glyphicon glyphicon-remove"></td>
+                                            <td>{{result.errors[item]}}</td>
+                                        </tr>
+                                    {%- endif %}
+                                {%- endif %}
+                            {%- endfor %}
+                        </table>
+                    </div>
+            {%- endfor %}
+            <button type="button" class="btn btn-more">More than two</button>
+        </div>
+        {%- endfor %}
+    </div>
+    <div class="col-md-1"></div>
+</div>
diff --git a/utils/test/reporting/reporting-tempest.py b/utils/test/reporting/reporting-tempest.py
new file mode 100644 (file)
index 0000000..944b428
--- /dev/null
@@ -0,0 +1,99 @@
+from urllib2 import Request, urlopen, URLError
+import json
+import jinja2
+import os
+
+installers = ["apex", "compass", "fuel", "joid"]
+items = ["tests", "Success rate", "duration"]
+
+for installer in installers:
+    # we consider the Tempest results of the last 7 days
+    url = "http://testresults.opnfv.org/testapi/results?case=Tempest"
+    request = Request(url + '&period=7&installer=' + installer)
+
+    try:
+        response = urlopen(request)
+        k = response.read()
+        results = json.loads(k)
+    except URLError, e:
+        print 'No kittez. Got an error code:', e
+
+    test_results = results['test_results']
+    test_results.reverse()
+
+    scenario_results = {}
+    criteria = {}
+    errors = {}
+
+    for r in test_results:
+        # Retrieve all the scenarios per installer
+        if not r['version'] in scenario_results.keys():
+            scenario_results[r['version']] = []
+        scenario_results[r['version']].append(r)
+
+    for s, s_result in scenario_results.items():
+        scenario_results[s] = s_result[0:5]
+        # For each scenario, we build a result object to deal with
+        # results, criteria and error handling
+        for result in scenario_results[s]:
+            result["creation_date"] = result["creation_date"].split(".")[0]
+
+            # retrieve results
+            # ****************
+            nb_tests_run = result['details']['tests']
+            if nb_tests_run != 0:
+                success_rate = 100*(int(result['details']['tests']) - int(result['details']['failures']))/int(result['details']['tests'])
+            else:
+                success_rate = 0
+
+            result['details']["tests"] = nb_tests_run
+            result['details']["Success rate"] = str(success_rate) + "%"
+
+            # Criteria management
+            # *******************
+            crit_tests = False
+            crit_rate = False
+            crit_time = False
+
+            # Expect that at least 200 tests are run
+            if nb_tests_run >= 200:
+                crit_tests = True
+
+            # Expect that at least 90% of success
+            if success_rate >= 90:
+                crit_rate = True
+
+            # Expect that the suite duration is inferior to 45m
+            if result['details']['duration'] < 2700:
+                crit_time = True
+
+            result['criteria'] = {'tests': crit_tests,
+                                  'Success rate': crit_rate,
+                                  'duration': crit_time}
+
+            # error management
+            # ****************
+
+            # TODO get information from artefact based on build tag
+            # to identify errors of the associated run
+            # build tag needed to wget errors on the artifacts
+            # the idea is to list the tests in errors and provide the link
+            # towards complete artifact
+            # another option will be to put the errors in the DB
+            # (in the detail section)...
+            result['errors'] = {'tests': "",
+                                'Success rate': "",
+                                'duration': ""}
+
+    templateLoader = jinja2.FileSystemLoader(os.path.dirname(os.path.abspath(__file__)))
+    templateEnv = jinja2.Environment(loader=templateLoader)
+
+    TEMPLATE_FILE = "index-tempest-tmpl.html"
+    template = templateEnv.get_template(TEMPLATE_FILE)
+
+    outputText = template.render(scenario_results=scenario_results,
+                                 items=items,
+                                 installer=installer)
+
+    with open("index-tempest-" + installer + ".html", "wb") as fh:
+        fh.write(outputText)