Merge "fuel: Enable 3rd party CI for fuel plugin onos"
authorFatih Degirmenci <fatih.degirmenci@ericsson.com>
Wed, 5 Oct 2016 22:07:44 +0000 (22:07 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Wed, 5 Oct 2016 22:07:44 +0000 (22:07 +0000)
26 files changed:
INFO
jjb-sandbox/releng/releng-sandbox-jobs.yml [new file with mode: 0644]
jjb-sandbox/releng/verify-sandbox-jobs.sh [new file with mode: 0755]
jjb/armband/armband-ci-jobs.yml
jjb/dovetail/dovetail-run.sh
jjb/fuel/fuel-deploy.sh
jjb/infra/bifrost-verify-jobs.yml
jjb/infra/bifrost-verify.sh
jjb/opnfv/opnfv-docker.sh
jjb/opnfv/opnfv-utils.yml [new file with mode: 0644]
jjb/releng/releng-ci-jobs.yml
prototypes/bifrost/playbooks/roles/bifrost-prepare-for-test-dynamic/defaults/main.yml [deleted file]
prototypes/bifrost/playbooks/test-bifrost-infracloud.yaml
prototypes/bifrost/scripts/test-bifrost-deployment.sh
prototypes/puppet-infracloud/hiera/common.yaml
prototypes/puppet-infracloud/manifests/site.pp
utils/jenkins-jnlp-connect.sh
utils/test/dashboard/dashboard/elastic2kibana/main.py
utils/test/dashboard/dashboard/elastic2kibana/templates/dashboard.json [new file with mode: 0644]
utils/test/dashboard/dashboard/elastic2kibana/templates/visualization.json [new file with mode: 0644]
utils/test/reporting/css/default.css [moved from utils/test/reporting/functest/default.css with 76% similarity]
utils/test/reporting/functest/reporting-status.py
utils/test/reporting/functest/reportingConf.py
utils/test/reporting/functest/template/index-status-tmpl.html
utils/test/reporting/js/gauge.js [new file with mode: 0644]
utils/test/reporting/js/trend.js [new file with mode: 0644]

diff --git a/INFO b/INFO
index b723fa8..466afb8 100644 (file)
--- a/INFO
+++ b/INFO
@@ -20,6 +20,7 @@ Jose Lausuch (Ericsson, jose.lausuch@ericsson.com)
 Ryota Mibu (NEC, r-mibu@cq.jp.nec.com)
 Mei Mei (Huawei, meimei@huawei.com)
 Trevor Bramwell (Linux Foundation, tbramwell@linuxfoundation.org)
+Serena Feng (ZTE, feng.xiaowei@zte.com.cn)
 
 Link to TSC approval of the project: http://ircbot.wl.linuxfoundation.org/meetings/opnfv-meeting/2015/opnfv-meeting.2015-07-14-14.00.html
 Link to TSC voting for removal of Victor Laza as committer: http://meetbot.opnfv.org/meetings/opnfv-meeting/2016/opnfv-meeting.2016-02-16-14.59.html
diff --git a/jjb-sandbox/releng/releng-sandbox-jobs.yml b/jjb-sandbox/releng/releng-sandbox-jobs.yml
new file mode 100644 (file)
index 0000000..ee35f42
--- /dev/null
@@ -0,0 +1,77 @@
+- project:
+    name: 'releng-sandbox-jobs'
+    jobs:
+        - 'releng-deploy-sandbox'
+        - 'releng-clear-jenkins-jobs'
+
+    project: 'releng'
+
+- job-template:
+    name: 'releng-deploy-sandbox'
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+        - gerrit-parameter:
+            branch: 'master'
+    scm:
+        - gerrit-trigger-scm:
+            credentials-id: '{ssh-credentials}'
+            refspec: '$GERRIT_REFSPEC'
+            choosing-strategy: 'gerrit'
+
+    triggers:
+        - gerrit:
+            trigger-on:
+                - patchset-created-event:
+                    exclude-drafts: 'false'
+                    exclude-trivial-rebase: 'false'
+                    exclude-no-code-change: 'false'
+                - draft-published-event
+                - comment-added-contains-event:
+                    comment-contains-value: 'redeploy'
+            projects:
+              - project-compare-type: 'ANT'
+                project-pattern: 'releng'
+                branches:
+                  - branch-compare-type: 'ANT'
+                    branch-pattern: '**/master'
+                file-paths:
+                    - compare-type: ANT
+                      pattern: jjb-sandbox/**
+                    - compare-type: ANT
+                      pattern: utils/**
+
+    builders:
+        - shell:
+            !include-raw-escape: verify-sandbox-jobs.sh
+        - shell: |
+            #! /bin/bash
+            jenkins-jobs update -r jjb-sandbox
+
+    publishers:
+        - archive-artifacts:
+            artifacts: 'job_output/*'
+
+- job-template:
+    name: 'releng-clear-jenkins-jobs'
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+        - gerrit-parameter:
+            branch: 'master'
+
+    scm:
+        - gerrit-trigger-scm:
+            credentials-id: '{ssh-credentials}'
+            refspec: ''
+            choosing-strategy: 'default'
+
+    triggers:
+        - timed: '@weekly'
+
+    builders:
+        - shell: |
+            #! /bin/bash
+            jenkins-jobs delete -r -p jjb-sandbox -x jjb-sandbox/releng
diff --git a/jjb-sandbox/releng/verify-sandbox-jobs.sh b/jjb-sandbox/releng/verify-sandbox-jobs.sh
new file mode 100755 (executable)
index 0000000..8f67e74
--- /dev/null
@@ -0,0 +1,21 @@
+#! /bin/bash
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2016 Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+#test for non-ascii characters, these can pass the test and end up breaking things in production
+for x in $(find . -name *\.yml); do
+
+  if LC_ALL=C grep -q '[^[:print:][:space:]]' "$x"; then
+    echo "file "$x" contains non-ascii characters"
+    exit 1
+  fi
+
+done
+
+jenkins-jobs test -r jjb/releng-defaults.yaml:jjb/releng-macros.yaml:jjb/opnfv/installer-params.yml:jjb/opnfv/slave-params.yml:jjb-sandbox \
+    -o job_output
index f6b4865..55ab7fc 100644 (file)
 - trigger:
     name: 'fuel-os-odl_l2-nofeature-ha-armband-baremetal-master-trigger'
     triggers:
-        - timed: '0 0 * * 1'
+        - timed: '0 0 * * 1,6'
 - trigger:
     name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-master-trigger'
     triggers:
 - trigger:
     name: 'fuel-os-odl_l3-nofeature-ha-armband-baremetal-master-trigger'
     triggers:
-        - timed: '0 0 * * 3'
+        - timed: '0 0 * * 3,7'
 - trigger:
     name: 'fuel-os-odl_l2-bgpvpn-ha-armband-baremetal-master-trigger'
     triggers:
 - trigger:
     name: 'fuel-os-odl_l2-nofeature-ha-armband-baremetal-colorado-trigger'
     triggers:
-        - timed: '0 4 * * *'
+        - timed: '0 8 * * 1,4,6'
 - trigger:
     name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-colorado-trigger'
     triggers:
-        - timed: '0 8 * * *'
+        - timed: '0 16 * * 2,5'
 - trigger:
-    name: 'fuel-os-odl_l3-nofeature-ha-armband-baremetal-colorado-trigger'
+    name: 'fuel-os-odl_l2-bgpvpn-ha-armband-baremetal-colorado-trigger'
     triggers:
-        - timed: '0 12 * * *'
+        - timed: '0 8 * * 1,3,6'
 - trigger:
-    name: 'fuel-os-odl_l2-bgpvpn-ha-armband-baremetal-colorado-trigger'
+    name: 'fuel-os-odl_l3-nofeature-ha-armband-baremetal-colorado-trigger'
     triggers:
-        - timed: '0 16 * * *'
+        - timed: '0 16 * * 2,4,7'
 - trigger:
     name: 'fuel-os-odl_l2-nofeature-noha-armband-baremetal-colorado-trigger'
     triggers:
-        - timed: '0 20 * * *'
+        - timed: '0 8 * * 3,5,7'
 #---------------------------------------------------------------
 # Enea Armband CI Virtual Triggers running against master branch
 #---------------------------------------------------------------
index 6453425..3f7a47b 100755 (executable)
@@ -48,5 +48,8 @@ sudo docker run ${opts} ${envs} ${labconfig} ${sshkey} opnfv/dovetail:${DOCKER_T
 
 echo "Dovetail: store results..."
 sudo cp -r /home/opnfv/dovetail/results ./
+#To make sure the file owner is jenkins, for the copied results files in the above line
+#if not, there will be error when next time to wipe workspace
+sudo chown -R jenkins:jenkins ${WORKSPACE}/results
 
 echo "Dovetail: done!"
index 2104d90..ef47ff0 100755 (executable)
@@ -104,58 +104,5 @@ if [[ $exit_code -ne 0 ]]; then
     exit $exit_code
 else
     echo "Deployment is successful!"
-fi
-
-# Quick and dirty fix for SFC scenatio - will be fixed properly post-release
-if [[ ! "$DEPLOY_SCENARIO" =~ "os-odl_l2-sfc" ]]; then
     exit 0
 fi
-
-echo
-echo "SFC Scenario is deployed"
-echo
-
-# The stuff below is here temporarily and will be fixed once the release is out
-# The stuff below is here temporarily and will be fixed once the release is out
-export FUEL_MASTER_IP=10.20.0.2
-export TACKER_SCRIPT_URL="https://git.opnfv.org/cgit/fuel/plain/prototypes/sfc_tacker/poc.tacker-up.sh?h=${GIT_BRANCH#*/}"
-export CONTROLLER_NODE_IP=$(sshpass -pr00tme /usr/bin/ssh -o UserKnownHostsFile=/dev/null \
-    -o StrictHostKeyChecking=no root@$FUEL_MASTER_IP 'fuel node list' | \
-    grep controller | head -1 | cut -d'|' -f5)
-
-# we can't do much if we do not have the controller IP
-if [[ ! "$CONTROLLER_NODE_IP" =~ "10.20.0" ]]; then
-    echo "Unable to retrieve controller IP"
-    exit 1
-fi
-
-echo
-echo "Copying and executing poc.tacker-up.sh script on controller node $CONTROLLER_NODE_IP"
-echo
-
-expect << END
-spawn /usr/bin/ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -l root $::env(FUEL_MASTER_IP)
-expect {
-  -re ".*sword.*" {
-    exp_send "r00tme\r"
-  }
-}
-expect "# "
-send "/usr/bin/ssh -l root $::env(CONTROLLER_NODE_IP)\r"
-expect "# "
-send "PS1=\"tacker_poc> \"\r"
-expect -re {tacker_poc> $}
-send "sudo apt-get install -y git\r"
-expect -re {tacker_poc> $}
-sleep 10
-send "/bin/mkdir -p /root/sfc-poc && cd /root/sfc-poc\r"
-expect -re {tacker_poc> $}
-send "git clone https://gerrit.opnfv.org/gerrit/fuel && cd fuel\r"
-expect -re {tacker_poc> $}
-send "/bin/bash /root/sfc-poc/fuel/prototypes/sfc_tacker/poc.tacker-up.sh\r"
-expect -re {tacker_poc> $}
-send "exit\r"
-expect "Connection to $::env(CONTROLLER_NODE_IP) closed. "
-send "exit\r"
-expect "Connection to $::env(FUEL_MASTER_IP) closed. "
-END
index b117b32..17796a8 100644 (file)
                 file-paths:
                   - compare-type: ANT
                     pattern: 'prototypes/bifrost/**'
+                  - compare-type: ANT
+                    pattern: 'jjb/infra/**'
             readable-message: true
index 759b50b..f7920a3 100755 (executable)
@@ -18,6 +18,7 @@ function fix_ownership() {
         echo "Not running as part of Jenkins. Handle the logs manually."
     else
         sudo chown -R jenkins:jenkins $WORKSPACE
+        sudo chown -R jenkins:jenkins ${HOME}/.cache
     fi
 }
 
@@ -30,6 +31,9 @@ fi
 # remove previously cloned repos
 sudo /bin/rm -rf /opt/bifrost /opt/puppet-infracloud /opt/stack /opt/releng
 
+# Fix up permissions
+fix_ownership
+
 # clone all the repos first and checkout the patch afterwards
 sudo git clone https://git.openstack.org/openstack/bifrost /opt/bifrost
 sudo git clone https://git.openstack.org/openstack-infra/puppet-infracloud /opt/puppet-infracloud
index f56de7f..e637f7b 100644 (file)
@@ -67,22 +67,9 @@ fi
 
 
 # cd to directory where Dockerfile is located
-if [[ "$DOCKER_REPO_NAME" == "opnfv/bottlenecks" ]]; then
-    cd $WORKSPACE/ci/docker
-elif [[ "$DOCKER_REPO_NAME" == "opnfv/cperf" ]]; then
-    cd $WORKSPACE/docker
-elif [[ "$DOCKER_REPO_NAME" == "opnfv/dovetail" ]]; then
-    cd $WORKSPACE/docker
-elif [[ "$DOCKER_REPO_NAME" == "opnfv/functest" ]]; then
-    cd $WORKSPACE/docker
-elif [[ "$DOCKER_REPO_NAME" == "opnfv/qtip" ]]; then
-    cd $WORKSPACE/docker
-elif [[ "$DOCKER_REPO_NAME" == "opnfv/storperf" ]]; then
-    cd $WORKSPACE/docker
-elif [[ "$DOCKER_REPO_NAME" == "opnfv/yardstick" ]]; then
-    cd $WORKSPACE/tests/ci/docker/yardstick-ci
-else
-    echo "ERROR: DOCKER_REPO_NAME parameter not valid: $DOCKER_REPO_NAME"
+cd $WORKSPACE/docker
+if [ ! -f ./Dockerfile ]; then
+    echo "ERROR: Dockerfile not found."
     exit 1
 fi
 
diff --git a/jjb/opnfv/opnfv-utils.yml b/jjb/opnfv/opnfv-utils.yml
new file mode 100644 (file)
index 0000000..94a99d4
--- /dev/null
@@ -0,0 +1,40 @@
+- project:
+
+    name: opnfv-utils
+
+    jobs:
+        - 'prune-docker-images'
+########################
+# job templates
+########################
+- job-template:
+    name: 'prune-docker-images'
+
+    disabled: false
+
+    concurrent: true
+
+    parameters:
+        - node:
+            name: SLAVE_NAME
+            description: Slaves to prune docker images
+            default-slaves:
+                - arm-build1
+                - arm-build2
+                - ericsson-build4
+                - ericsson-build5
+                - lf-build2
+            allowed-multiselect: true
+            ignore-offline-nodes: true
+
+    builders:
+        - description-setter:
+            description: "Built on $NODE_NAME"
+        - shell: |
+            #!/bin/bash
+
+            (docker ps -q; docker ps -aq) | sort | uniq -u | xargs --no-run-if-empty docker rm
+            docker images -f dangling=true -q | xargs --no-run-if-empty docker rmi
+
+    triggers:
+        - timed: '@midnight'
index ac323a3..2d88449 100644 (file)
@@ -2,7 +2,6 @@
     name: builder-jobs
     jobs:
         - 'builder-verify-jjb'
-        - 'builder-sandbox'
         - 'builder-merge'
         - 'artifacts-api'
 
                 git pull
                 jenkins-jobs update -r --delete-old jjb/
 
-- job-template:
-    name: 'builder-sandbox'
-
-    # Upload all jjb jobs to sandbox instance, excluding jobs jjb
-    # builder jobs
-
-    parameters:
-        - project-parameter:
-            project: '{project}'
-        - gerrit-parameter:
-            branch: 'master'
-
-    scm:
-        - gerrit-trigger-scm:
-            credentials-id: '{ssh-credentials}'
-            refspec: ''
-            choosing-strategy: 'default'
-
-    triggers:
-        - gerrit:
-            trigger-on:
-                - change-merged-event
-                - comment-added-contains-event:
-                    comment-contains-value: 'remerge'
-            projects:
-              - project-compare-type: 'ANT'
-                project-pattern: 'releng'
-                branches:
-                    - branch-compare-type: 'ANT'
-                      branch-pattern: '**/sandbox'
-                file-paths:
-                    - compare-type: ANT
-                      pattern: jjb/**
-                    - compare-type: ANT
-                      pattern: utils/**
-
-    builders:
-        - shell:
-            !include-raw: verify-releng.sh
-        - shell: |
-                #!/bin/bash
-                source /opt/virtualenv/jenkins-job-builder/bin/activate
-                cd /opt/jenkins-ci/releng
-                git pull
-                cp /etc/jenkins_jobs/jenkins_jobs.ini jenkins_sandbox.ini
-                sed -i 's/url=.*/url=https:\/\/sandbox.opnfv.org\//g' jenkins_sandbox.ini
-                jenkins-jobs --conf jenkins_sandbox.ini update -r -x jjb/releng --delete-old jjb
-                rm -f jenkins_sandbox.ini
-
 - job-template:
     name: 'artifacts-api'
 
diff --git a/prototypes/bifrost/playbooks/roles/bifrost-prepare-for-test-dynamic/defaults/main.yml b/prototypes/bifrost/playbooks/roles/bifrost-prepare-for-test-dynamic/defaults/main.yml
deleted file mode 100644 (file)
index 69eb787..0000000
+++ /dev/null
@@ -1,4 +0,0 @@
----
-node_ssh_pause: 10
-wait_timeout: 1900
-multinode_testing: false
index b4dffdc..d650f10 100644 (file)
   roles:
     - role: ironic-enroll-dynamic
     - { role: ironic-inspect-node, when: inspect_nodes | default('false') | bool == true }
+- hosts: baremetal
+  name: "Create configuration drive files"
+  become: no
+  connection: local
+  roles:
+    - role: bifrost-configdrives-dynamic
 - hosts: baremetal
   vars:
     multinode_testing: "{{ inventory_dhcp | bool == true }}"
-  name: "Create configuration drive files and deploy machines."
+  name: "Deploy machines."
   become: no
   connection: local
+  serial: 1
   roles:
-    - role: bifrost-configdrives-dynamic
     - role: bifrost-deploy-nodes-dynamic
     - role: bifrost-prepare-for-test-dynamic
-      serial: 1
index fb49afc..773697e 100755 (executable)
@@ -18,6 +18,7 @@ ENABLE_VENV="false"
 USE_DHCP="false"
 USE_VENV="false"
 BUILD_IMAGE=true
+PROVISION_WAIT_TIMEOUT=${PROVISION_WAIT_TIMEOUT:-2400}
 
 # Set defaults for ansible command-line options to drive the different
 # tests.
@@ -114,7 +115,8 @@ ${ANSIBLE} -vvvv \
     -e download_ipa=${DOWNLOAD_IPA} \
     -e create_ipa_image=${CREATE_IPA_IMAGE} \
     -e write_interfaces_file=${WRITE_INTERFACES_FILE} \
-    -e ipv4_gateway=192.168.122.1
+    -e ipv4_gateway=192.168.122.1 \
+    -e wait_timeout=${PROVISION_WAIT_TIMEOUT}
 EXITCODE=$?
 
 if [ $EXITCODE != 0 ]; then
index 1fcde2f..6096b9c 100644 (file)
@@ -76,5 +76,95 @@ hosts:
   compute00.opnfvlocal:
     ip: 192.168.122.4
 
-# br-eth0 for debian, br_ens3 for RHEL
-bridge_name: br-eth0
+# settings for bifrost
+bridge_name: br_opnfv
+ironic_db_password: pass
+bifrost_mysql_password: pass
+bifrost_ssh_private_key: |
+  -----BEGIN RSA PRIVATE KEY-----
+  MIIEowIBAAKCAQEAvwr2LbfJQuKZDOQse+DQHX84c9LCHvQfy0pu15JkiLM5dUtx
+  hLr/5fxSzblubS4WkNZVsGTtUp51f8yoQyltqquGlVfUf0GO+PCLaRp0arhli0Rl
+  sAGatI12amnrVap82jINiKQRO+UnF97z2hiB35Zxko4jSaPOOiL48DEKowZHL2Ja
+  jjUt6dXcaNotXNaKZpcxz92gdZhFOPU8BrJ/mI9k9u6QI/4qLG/WzW4frHLigA1t
+  OrZ3Nnu3tloWNsS1lh71KRfEv46VD8tCAZfXqJtjdH4Z4AUO++CLF/K4zXhIoFqU
+  Wf8aS64YzoaAfnJ+jUwKs92dVjuFtbEk+t2YLQIDAQABAoIBAQCAr++YaD6oUV9r
+  caANaiiGVhY+3u9oTmXEWMVFbRVPh/riaglzsUuDLm7QqWIbJXqJ4fcitTmv95GK
+  nt+RLizzVEt5+gnoFs8qHU6rY+ibos6z+0TMRKhjiw8DK4oc0JT9nc3EB1CcmgW1
+  bLeyZ+PEKuEiKaDXkAHw43HwyfgyS3Lc90TSaLj3P7egsBuhx1Yy+wgyiPQ/bF0b
+  OBLHHK+nwYLGAq25n/+zA7XAndc2OQd4KzUJcvjyND+IMYnzEbeFH36UcFqbvgGu
+  nR55yIrCxsxcJhhT2slMNtg/xCmo3Jzz1kNBtwbNBik4/5Lkckny0xhQl+h7vz9U
+  +cKjwfK5AoGBAPSy/JHMeQ5/rzbA5LAZhVa/Yc4B5datkwLNg6mh4CzMabJs8AKd
+  de05XB/Nq6Hfp8Aa7zLt2GIb3iqF6w/y+j8YAXS2KQD8/HDs2/9Oxr512kfssk5D
+  dcpTqeIFetzM9pqnctVXBGlbz0QLeL+lT3kXY00+CBm6LjEv8dsPxZr3AoGBAMfd
+  nDnTjUVZ+sRpTBDM3MhKLMETxNWNDaozL+SgpYQwtKlSTfQVdFcM66a8qCFjQFsc
+  /6AjL0bjCA5u859IoQ4ValD0vgkyLHdEN0P1Grf3MK8kjOW1A1s1i2FY6U0z9AM2
+  zsUCA9bB5A9wwxwofoa8VkaDpVSMITbakVoNxJj7AoGAImcft2fmBTHScoJAJLoR
+  0xZpK8t8gug4aQZ34luN5v5+RcWnINb+g3GzEA2cec+2B/5BbwmdiH2eiJ/3YnCo
+  2kIHwl7x+N+Ypk/GxmhO7Owo2j/e+b3mS6HjmpFmqrBuY2PzcyceyalMxKZQPbGC
+  MOYm4e88uFFCuUuiV0gqYhUCgYBmSFhCE6yxeCnoSEbgNicq7SLYMIjEDOqYVpfE
+  9h2ed9qM6IzyQ+SFBBy4+MVGSOfPeRis2DTCnz8pO8i7lEyvy2/cPFPgmue8pZFu
+  2smwqfUlPJxKlgdArzdEO18x3kubNXo9whk614EiEcAX8fVGeK3iak665Pe+fb5z
+  Cqa47wKBgDp3/dgtMneoePKNefy4a9vp5y4XKviC6GOrr0xpEM2ptZ+I7mUJcACN
+  KbaW0dPgtS1cApelmF73IAJRYbKMW7lQzql61IoGw4pGTIMPKerqRs/hTWYPZiSG
+  QHWf3iTV5uQr6cSRoUgkAUHVw2KTGad41RAhDp352iakZuNNBFga
+  -----END RSA PRIVATE KEY-----
+bifrost_ssh_public_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC/CvYtt8lC4pkM5Cx74NAdfzhz0sIe9B/LSm7XkmSIszl1S3GEuv/l/FLNuW5tLhaQ1lWwZO1SnnV/zKhDKW2qq4aVV9R/QY748ItpGnRquGWLRGWwAZq0jXZqaetVqnzaMg2IpBE75ScX3vPaGIHflnGSjiNJo846IvjwMQqjBkcvYlqONS3p1dxo2i1c1opmlzHP3aB1mEU49TwGsn+Yj2T27pAj/iosb9bNbh+scuKADW06tnc2e7e2WhY2xLWWHvUpF8S/jpUPy0IBl9eom2N0fhngBQ774IsX8rjNeEigWpRZ/xpLrhjOhoB+cn6NTAqz3Z1WO4W1sST63Zgt yolanda@trasto
+infracloud_vlan: 415
+infracloud_gateway_ip: 172.30.13.1
+default_network_interface: eno3
+dhcp_static_mask: 255.255.255.128
+dchp_pool_start: 10.20.0.130
+dhcp_pool_end: 10.20.0.254
+network_interface: eno1
+ipv4_nameserver: 8.8.8.8
+ipv4_subnet_mask: 255.255.255.0
+ipv4_gateway: 172.30.13.1
+ironic_inventory:
+  controller00.opnfvlocal:
+    driver: agent_ipmitool
+    driver_info:
+      power:
+        ipmi_address: 172.30.8.90
+        ipmi_username: admin
+    provisioning_ipv4_address: 10.20.0.130
+    ipv4_address: 172.30.13.90
+    ansible_ssh_host: 172.30.13.90
+    ipv4_gateway: 172.30.13.1
+    ipv4_interface_mac: 00:1e:67:f9:9b:35
+    name: controller00.opnfvlocal
+    nics:
+    - mac: a4:bf:01:01:a9:fc
+    - mac: 00:1e:67:f6:9b:35
+    properties:
+      cpu_arch: x86_64
+      cpus: '44'
+      disk_size: '1800'
+      ram: '65536'
+    uuid: 00a22849-2442-e511-906e-0012795d96dd
+  compute00.opnfvlocal:
+    driver: agent_ipmitool
+    driver_info:
+      power:
+        ipmi_address: 172.30.8.91
+        ipmi_username: admin
+    provisioning_ipv4_address: 10.20.0.131
+    ipv4_address: 172.30.13.91
+    ansible_ssh_host: 172.30.13.91
+    ipv4_gateway: 172.30.13.1
+    ipv4_subnet_mask: 255.255.255.0
+    name: compute00.opnfvlocal
+    nics:
+    - mac: a4:bf:01:01:a9:d4
+    - mac: 00:1e:67:f6:9b:37
+    properties:
+      cpu_arch: x86_64
+      cpus: '44'
+      disk_size: '1800'
+      ram: '65536'
+    uuid: 0051e926-f242-e511-906e-0012795d96dd
+ipmi_passwords: {'172.30.8.90': 'octopus', '172.30.8.91': 'octopus'}
+neutron_subnet_cidr: '192.168.122.0/24'
+neutron_subnet_gateway: '192.168.122.1'
+neutron_subnet_allocation_pools:
+  - 'start=192.168.122.50,end=192.168.122.254'
+virt_type: 'qemu'
index 31c4576..f09bfe2 100644 (file)
@@ -34,11 +34,9 @@ node 'controller00.opnfvlocal' {
     ssl_cert_file_contents           => hiera('ssl_cert_file_contents'),
     br_name                          => hiera('bridge_name'),
     controller_public_address        => $::fqdn,
-    neutron_subnet_cidr              => '192.168.122.0/24',
-    neutron_subnet_gateway           => '192.168.122.1',
-    neutron_subnet_allocation_pools  => [
-                                          'start=192.168.122.50,end=192.168.122.254',
-                                        ],
+    neutron_subnet_cidr              => hiera('neutron_subnet_cidr'),
+    neutron_subnet_gateway           => hiera('neutron_subnet_gateway'),
+    neutron_subnet_allocation_pools  => hiera('neutron_subnet_allocation_pools'),
     opnfv_password                   => hiera('opnfv_password'),
   }
 }
@@ -61,7 +59,7 @@ node 'compute00.opnfvlocal' {
     ssl_key_file_contents            => hiera('ssl_key_file_contents'),
     br_name                          => hiera('bridge_name'),
     controller_public_address        => 'controller00.opnfvlocal',
-    virt_type                        => 'qemu',
+    virt_type                        => hiera('virt_type'),
   }
 }
 
@@ -73,7 +71,7 @@ node 'jumphost.opnfvlocal' {
   }
 }
 
-node 'baremetal.opnfvlocal' {
+node 'baremetal.opnfvlocal', 'lfpod5-jumpserver' {
   class { '::opnfv::server':
     iptables_public_udp_ports => [67, 69],
     sysadmins                 => hiera('sysadmins', []),
@@ -91,10 +89,12 @@ node 'baremetal.opnfvlocal' {
     vlan                      => hiera('infracloud_vlan'),
     gateway_ip                => hiera('infracloud_gateway_ip'),
     default_network_interface => hiera('default_network_interface'),
+    dhcp_static_mask          => hiera('dhcp_static_mask'),
     dhcp_pool_start           => hiera('dhcp_pool_start'),
     dhcp_pool_end             => hiera('dhcp_pool_end'),
     network_interface         => hiera('network_interface'),
     ipv4_nameserver           => hiera('ipv4_nameserver'),
     ipv4_subnet_mask          => hiera('ipv4_subnet_mask'),
+    bridge_name               => hiera('bridge_name'),
   }
 }
index 4b710ca..9ef4298 100755 (executable)
@@ -151,6 +151,7 @@ fi
 usage() {
     cat << EOF
 
+**this file must be copied to the jenkins home directory to work**
 jenkins-jnlp-connect.sh configures monit to keep slave connection up
 Checks for new versions of slave.jar
 run as root to create pid directory and create monit config.
index 95f758e..ae5cbe8 100644 (file)
@@ -27,10 +27,22 @@ es_creds = CONF.elastic_creds
 
 _installers = {'fuel', 'apex', 'compass', 'joid'}
 
+env = Environment(loader=PackageLoader('elastic2kibana', 'templates'))
+env.filters['jsonify'] = json.dumps
 
-class KibanaDashboard(dict):
+
+def dumps(self, items):
+    for key in items:
+        self.visualization[key] = json.dumps(self.visualization[key])
+
+
+def dumps_2depth(self, key1, key2):
+    self.visualization[key1][key2] = json.dumps(self.visualization[key1][key2])
+
+
+class Dashboard(dict):
     def __init__(self, project_name, case_name, family, installer, pod, scenarios, visualization):
-        super(KibanaDashboard, self).__init__()
+        super(Dashboard, self).__init__()
         self.project_name = project_name
         self.case_name = case_name
         self.family = family
@@ -46,14 +58,14 @@ class KibanaDashboard(dict):
 
     def _create_visualizations(self):
         for scenario in self.scenarios:
-            self._kibana_visualizations.append(KibanaVisualization(self.project_name,
-                                                                   self.case_name,
-                                                                   self.installer,
-                                                                   self.pod,
-                                                                   scenario,
-                                                                   self.visualization))
+            self._kibana_visualizations.append(Visualization(self.project_name,
+                                                             self.case_name,
+                                                             self.installer,
+                                                             self.pod,
+                                                             scenario,
+                                                             self.visualization))
 
-        self._visualization_title = self._kibana_visualizations[0].vis_title
+        self._visualization_title = self._kibana_visualizations[0].vis_state_title
 
     def _publish_visualizations(self):
         for visualization in self._kibana_visualizations:
@@ -62,113 +74,39 @@ class KibanaDashboard(dict):
             # logger.error("_publish_visualization: %s" % visualization)
             elastic_access.publish_docs(url, es_creds, visualization)
 
-    def _construct_panels(self):
-        size_x = 6
-        size_y = 3
-        max_columns = 7
-        column = 1
-        row = 1
-        panel_index = 1
-        panels_json = []
-        for visualization in self._kibana_visualizations:
-            panels_json.append({
-                "id": visualization.id,
-                "type": 'visualization',
-                "panelIndex": panel_index,
-                "size_x": size_x,
-                "size_y": size_y,
-                "col": column,
-                "row": row
-            })
-            panel_index += 1
-            column += size_x
-            if column > max_columns:
-                column = 1
-                row += size_y
-        return json.dumps(panels_json, separators=(',', ':'))
-
     def _create(self):
-        self['title'] = '{} {} {} {} {}'.format(self.project_name,
-                                                self.case_name,
-                                                self.installer,
-                                                self._visualization_title,
-                                                self.pod)
-        self.id = self['title'].replace(' ', '-').replace('/', '-')
-
-        self['hits'] = 0
-        self['description'] = "Kibana dashboard for project_name '{}', case_name '{}', installer '{}', data '{}' and" \
-                              " pod '{}'".format(self.project_name,
-                                                 self.case_name,
-                                                 self.installer,
-                                                 self._visualization_title,
-                                                 self.pod)
-        self['panelsJSON'] = self._construct_panels()
-        self['optionsJSON'] = json.dumps({
-            "darkTheme": False
-        },
-            separators=(',', ':'))
-        self['uiStateJSON'] = "{}"
-        self['scenario'] = 1
-        self['timeRestore'] = False
-        self['kibanaSavedObjectMeta'] = {
-            'searchSourceJSON': json.dumps({
-                "filter": [
-                    {
-                        "query": {
-                            "query_string": {
-                                "query": "*",
-                                "analyze_wildcard": True
-                            }
-                        }
-                    }
-                ]
+        db = {
+            "query": {
+                "project_name": self.project_name,
+                "case_name": self.case_name,
+                "installer": self.installer,
+                "metric": self._visualization_title,
+                "pod": self.pod
             },
-                separators=(',', ':'))
+            "test_family": self.family,
+            "ids": [visualization.id for visualization in self._kibana_visualizations]
         }
+        template = env.get_template('dashboard.json')
+        self.dashboard = json.loads(template.render(db=db))
+        dumps(self.dashboard, ['description', 'uiStateJSON', 'panelsJSON','optionsJSON'])
+        dumps_2depth(self.dashboard, 'kibanaSavedObjectMeta', 'searchSourceJSON')
+        self.id = self.dashboard['title'].replace(' ', '-').replace('/', '-')
 
-        label = self.case_name
-        if 'label' in self.visualization:
-            label += " %s" % self.visualization.get('label')
-        label += " %s" % self.visualization.get('name')
-        self['metadata'] = {
-            "label": label,
-            "test_family": self.family
-        }
 
     def _publish(self):
         url = urlparse.urljoin(base_elastic_url, '/.kibana/dashboard/{}'.format(self.id))
         logger.debug("publishing dashboard '{}'".format(url))
-        elastic_access.publish_docs(url, es_creds, self)
+        #logger.error("dashboard: %s" % json.dumps(self.dashboard))
+        elastic_access.publish_docs(url, es_creds, self.dashboard)
 
     def publish(self):
         self._publish_visualizations()
         self._publish()
 
 
-class KibanaSearchSourceJSON(dict):
-    """
-    "filter": [
-                    {"match": {"installer": {"query": installer, "type": "phrase"}}},
-                    {"match": {"project_name": {"query": project_name, "type": "phrase"}}},
-                    {"match": {"case_name": {"query": case_name, "type": "phrase"}}}
-                ]
-    """
-
-    def __init__(self, project_name, case_name, installer, pod, scenario):
-        super(KibanaSearchSourceJSON, self).__init__()
-        self["filter"] = [
-            {"match": {"project_name": {"query": project_name, "type": "phrase"}}},
-            {"match": {"case_name": {"query": case_name, "type": "phrase"}}},
-            {"match": {"installer": {"query": installer, "type": "phrase"}}},
-            {"match": {"scenario": {"query": scenario, "type": "phrase"}}}
-        ]
-        if pod != 'all':
-            self["filter"].append({"match": {"pod_name": {"query": pod, "type": "phrase"}}})
-
-
-class VisualizationBuilder(object):
+class VisStateBuilder(object):
     def __init__(self, visualization):
-        super(VisualizationBuilder, self).__init__()
+        super(VisStateBuilder, self).__init__()
         self.visualization = visualization
 
     def build(self):
@@ -184,14 +122,12 @@ class VisualizationBuilder(object):
             })
             index += 1
 
-        env = Environment(loader=PackageLoader('elastic2kibana', 'templates'))
-        env.filters['jsonify'] = json.dumps
         template = env.get_template('{}.json'.format(name))
         vis = template.render(aggs=aggs)
         return json.loads(vis)
 
 
-class KibanaVisualization(dict):
+class Visualization(object):
     def __init__(self, project_name, case_name, installer, pod, scenario, visualization):
         """
         We need two things
@@ -207,32 +143,28 @@ class KibanaVisualization(dict):
 
         :return:
         """
-        super(KibanaVisualization, self).__init__()
-        vis = VisualizationBuilder(visualization).build()
-        self.vis_title = vis['title']
-        self['title'] = '{} {} {} {} {} {}'.format(project_name,
-                                                   case_name,
-                                                   self.vis_title,
-                                                   installer,
-                                                   pod,
-                                                   scenario)
-        self.id = self['title'].replace(' ', '-').replace('/', '-')
-        self['visState'] = json.dumps(vis, separators=(',', ':'))
-        self['uiStateJSON'] = "{}"
-        self['description'] = "Kibana visualization for project_name '{}', case_name '{}', metric '{}', installer '{}'," \
-                              " pod '{}' and scenario '{}'".format(project_name,
-                                                                   case_name,
-                                                                   self.vis_title,
-                                                                   installer,
-                                                                   pod,
-                                                                   scenario)
-        self['scenario'] = 1
-        self['kibanaSavedObjectMeta'] = {"searchSourceJSON": json.dumps(KibanaSearchSourceJSON(project_name,
-                                                                                               case_name,
-                                                                                               installer,
-                                                                                               pod,
-                                                                                               scenario),
-                                                                        separators=(',', ':'))}
+        super(Visualization, self).__init__()
+        visState = VisStateBuilder(visualization).build()
+        self.vis_state_title = visState['title']
+
+        vis = {
+            "visState": json.dumps(visState),
+            "filters": {
+                "project_name": project_name,
+                "case_name": case_name,
+                "installer": installer,
+                "metric": self.vis_state_title,
+                "pod_name": pod,
+                "scenario": scenario
+            }
+        }
+
+        template = env.get_template('visualization.json')
+
+        self.visualization = json.loads(template.render(vis=vis))
+        dumps(self.visualization, ['visState', 'description', 'uiStateJSON'])
+        dumps_2depth(self.visualization, 'kibanaSavedObjectMeta', 'searchSourceJSON')
+        self.id = self.visualization['title'].replace(' ', '-').replace('/', '-')
 
 
 def _get_pods_and_scenarios(project_name, case_name, installer):
@@ -252,7 +184,8 @@ def _get_pods_and_scenarios(project_name, case_name, installer):
     })
 
     elastic_data = elastic_access.get_docs(urlparse.urljoin(base_elastic_url, '/test_results/mongo2elastic'),
-                                                   es_creds, query_json)
+                                           es_creds,
+                                           query_json)
 
     pods_and_scenarios = {}
 
@@ -290,20 +223,20 @@ def construct_dashboards():
                 pods_and_scenarios = _get_pods_and_scenarios(project, case_name, installer)
                 for visualization in visualizations:
                     for pod, scenarios in pods_and_scenarios.iteritems():
-                        kibana_dashboards.append(KibanaDashboard(project,
-                                                                 case_name,
-                                                                 family,
-                                                                 installer,
-                                                                 pod,
-                                                                 scenarios,
-                                                                 visualization))
+                        kibana_dashboards.append(Dashboard(project,
+                                                           case_name,
+                                                           family,
+                                                           installer,
+                                                           pod,
+                                                           scenarios,
+                                                           visualization))
     return kibana_dashboards
 
 
 def generate_js_inputs(js_file_path, kibana_url, dashboards):
     js_dict = {}
     for dashboard in dashboards:
-        dashboard_meta = dashboard['metadata']
+        dashboard_meta = dashboard.dashboard['metadata']
         test_family = dashboard_meta['test_family']
         test_label = dashboard_meta['label']
 
diff --git a/utils/test/dashboard/dashboard/elastic2kibana/templates/dashboard.json b/utils/test/dashboard/dashboard/elastic2kibana/templates/dashboard.json
new file mode 100644 (file)
index 0000000..cc80995
--- /dev/null
@@ -0,0 +1,61 @@
+{% set db = db|default({}) -%}
+
+
+{% macro calc_col(index) -%}
+  {% if index is divisibleby 2 %}
+    7
+  {% else %}
+    1
+  {% endif %}
+{%- endmacro %}
+
+{% macro calc_row(index) -%}
+{% set num = (index - 1)//2 %}
+  {{1 + num * 3}}
+{%- endmacro %}
+
+{
+  "description": "Kibana dashboard for {{db.query}}",
+  "hits": 0,
+  "kibanaSavedObjectMeta": {
+    "searchSourceJSON": {
+      "filter": [
+        {
+          "query": {
+            "query_string": {
+              "analyze_wildcard": true,
+              "query": "*"
+            }
+          }
+        }
+      ]
+    }
+  },
+  "metadata": {
+    "label": "{{db.query.case_name}} {{db.query.metric}}",
+    "test_family": "{{db.test_family}}"
+  },
+  "optionsJSON": {
+    "darkTheme": false
+  },
+  "panelsJSON": [
+    {% for id in db.ids %}
+    {
+      "col": {{calc_col(loop.index)}},
+      "id": "{{id}}",
+      "panelIndex": {{loop.index}},
+      "row": {{calc_row(loop.index)}},
+      "size_x": 6,
+      "size_y": 3,
+      "type": "visualization"
+    }
+    {% if not loop.last %}
+    ,
+    {% endif %}
+    {% endfor %}
+  ],
+  "scenario": 1,
+  "timeRestore": false,
+  "title": "{{db.query.project_name}} {{db.query.case_name}} {{db.query.installer}} {{db.query.metric}} {{db.query.pod}}",
+  "uiStateJSON": {}
+}
diff --git a/utils/test/dashboard/dashboard/elastic2kibana/templates/visualization.json b/utils/test/dashboard/dashboard/elastic2kibana/templates/visualization.json
new file mode 100644 (file)
index 0000000..d51d417
--- /dev/null
@@ -0,0 +1,32 @@
+{% set vis = vis|default({}) -%}
+
+
+{
+  "description": "Kibana visualization for {{ vis.filters }}",
+  "kibanaSavedObjectMeta": {
+    "searchSourceJSON": {
+      "filter": [
+        {% for key, value in vis.filters.iteritems() if key != "metric" %}
+        {% if not (key == "pod_name" and value == "all") %}
+        {
+          "match": {
+            "{{ key }}": {
+              "query": "{{ value }}",
+              "type": "phrase"
+            }
+          }
+        }
+        {% if not loop.last %}
+        ,
+        {% endif %}
+        {% endif %}
+        {% endfor %}
+      ]
+    }
+  },
+  "scenario": 1,
+  "title": "{{vis.filters.project_name}} {{vis.filters.case_name}} {{vis.filters.installer}} {{vis.filters.metric}} {{vis.filters.pod_name}} {{vis.filters.scenario}}",
+  "uiStateJSON": {},
+  "visState": {{ vis.visState }}
+}
+
similarity index 76%
rename from utils/test/reporting/functest/default.css
rename to utils/test/reporting/css/default.css
index 897c3b1..a9fa69d 100644 (file)
@@ -75,3 +75,36 @@ h2 {
     font-weight: bold;
     color:rgb(128, 128, 128)
 }
+
+#power-gauge g.arc {
+       fill: steelblue;
+}
+
+#power-gauge g.pointer {
+       fill: #e85116;
+       stroke: #b64011;
+}
+
+#power-gauge g.label text {
+       text-anchor: middle;
+       font-size: 14px;
+       font-weight: bold;
+       fill: #666;
+}
+
+#power-gauge path {
+
+}
+
+.axis path,
+.axis line {
+  fill: none;
+  stroke: #000;
+  shape-rendering: crispEdges;
+}
+
+.dot {
+  fill: steelblue;
+  stroke: steelblue;
+  stroke-width: 1.5px;
+}
index 90699bd..9df6996 100755 (executable)
@@ -184,8 +184,13 @@ for version in conf.versions:
                 scenario_criteria = conf.MAX_SCENARIO_CRITERIA
 
             s_score = str(scenario_score) + "/" + str(scenario_criteria)
-            s_score_percent = float(
+            s_score_percent = 0.0
+            try:
+                s_score_percent = float(
                 scenario_score) / float(scenario_criteria) * 100
+            except:
+                logger.error("cannot calculate the score percent")
+
             s_status = "KO"
             if scenario_score < scenario_criteria:
                 logger.info(">>>> scenario not OK, score = %s/%s" %
index e1c4b61..1c9a2ac 100644 (file)
@@ -13,7 +13,6 @@ installers = ["apex", "compass", "fuel", "joid"]
 # list of test cases declared in testcases.yaml but that must not be
 # taken into account for the scoring
 blacklist = ["ovno", "security_scan"]
-# versions = ["brahmaputra", "master"]
 versions = ["master", "colorado"]
 PERIOD = 10
 MAX_SCENARIO_CRITERIA = 50
index 67c2349..2beb912 100644 (file)
@@ -3,17 +3,65 @@
     <meta charset="utf-8">
     <!-- Bootstrap core CSS -->
     <link href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css" rel="stylesheet">
-    <link href="default.css" rel="stylesheet">
+    <link href="../../../css/default.css" rel="stylesheet">
     <script type="text/javascript" src="http://ajax.googleapis.com/ajax/libs/jquery/1/jquery.min.js"></script>
     <script type="text/javascript" src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js"></script>
-    <script type="text/javascript">
-    $(document).ready(function (){
-        $(".btn-more").click(function() {
-            $(this).hide();
-            $(this).parent().find(".panel-default").show();
+    <script type="text/javascript" src="http://d3js.org/d3.v2.min.js"></script>
+    <script type="text/javascript" src="../../../js/gauge.js"></script>
+    <script type="text/javascript" src="../../../js/trend.js"></script>
+    <script>
+    function onDocumentReady() {
+       // Gauge management
+        {% for scenario in scenario_stats.iteritems() -%}
+           var gaugeScenario{{loop.index}} = gauge('#gaugeScenario{{loop.index}}');
+        {%- endfor %}
+       
+       // assign success rate to the gauge
+       function updateReadings() {
+           {% for scenario,iteration in scenario_stats.iteritems() -%}
+               gaugeScenario{{loop.index}}.update({{scenario_results[scenario].getScorePercent()}});
+            {%- endfor %}
+       }
+       updateReadings();                                                                               
+        }
+        
+        // trend line management
+        d3.csv("./scenario_history.txt", function(data) {
+       // ***************************************
+       // Create the trend line
+      {% for scenario,iteration in scenario_stats.iteritems() -%}
+       // for scenario {{scenario}} 
+       // Filter results
+        var trend{{loop.index}} = data.filter(function(row) { 
+            return row["scenario"]=="{{scenario}}" && row["installer"]=="{{installer}}";
+       })
+       // Parse the date 
+        trend{{loop.index}}.forEach(function(d) {
+           d.date = parseDate(d.date);
+           d.score = +d.score
         });
-    })
-    </script>
+        // Draw the trend line
+        var mytrend = trend("#trend_svg{{loop.index}}",trend{{loop.index}})
+        // ****************************************
+        {%- endfor %}
+    });            
+    if ( !window.isLoaded ) {
+        window.addEventListener("load", function() {
+                       onDocumentReady();
+        }, false);
+    } else {
+       onDocumentReady();
+    }
+</script>
+<script type="text/javascript">
+$(document).ready(function (){
+    $(".btn-more").click(function() {
+       $(this).hide();
+       $(this).parent().find(".panel-default").show();
+    });
+})
+</script>
+    
   </head>
     <body>
     <div class="container">
             <div class="panel-heading"><h4><b>List of last scenarios ({{version}}) run over the last {{period}} days </b></h4></div>
                 <table class="table">
                     <tr>
-                        <th width="60%">Scenario</th>
+                        <th width="40%">Scenario</th>
                         <th width="20%">Status</th>
+                        <th width="20%">Trend</th>
                         <th width="10%">Score</th>
                         <th width="10%">Iteration</th>
                     </tr>
                         {% for scenario,iteration in scenario_stats.iteritems() -%}
                             <tr class="tr-ok">
                                 <td><a href={{scenario_results[scenario].getUrlLastRun()}}>{{scenario}}</a></td>
-                                <td>{%if scenario_results[scenario].getScorePercent() < 8.3 -%}
-                                        <img src="../../img/gauge_0.png">
-                                    {%elif scenario_results[scenario].getScorePercent() < 16.7 -%}
-                                        <img src="../../img/gauge_8.3.png">
-                                    {%elif scenario_results[scenario].getScorePercent() < 25 -%}
-                                        <img src="../../img/gauge_16.7.png">
-                                    {%elif scenario_results[scenario].getScorePercent() < 33.3 -%}
-                                        <img src="../../img/gauge_25.png">
-                                    {%elif scenario_results[scenario].getScorePercent() < 41.7 -%}
-                                        <img src="../../img/gauge_33.3.png">
-                                    {%elif scenario_results[scenario].getScorePercent() < 50 -%}
-                                        <img src="../../img/gauge_41.7.png">
-                                    {%elif scenario_results[scenario].getScorePercent() < 58.3 -%}
-                                        <img src="../../img/gauge_50.png">
-                                    {%elif scenario_results[scenario].getScorePercent() < 66.7 -%}
-                                        <img src="../../img/gauge_58.3.png">
-                                    {%elif scenario_results[scenario].getScorePercent() < 75 -%}
-                                        <img src="../../img/gauge_66.7.png">
-                                    {%elif scenario_results[scenario].getScorePercent() < 83.3 -%}
-                                        <img src="../../img/gauge_75.png">
-                                    {%elif scenario_results[scenario].getScorePercent() < 91.7 -%}
-                                        <img src="../../img/gauge_83.3.png">
-                                    {%elif scenario_results[scenario].getScorePercent() < 100 -%}
-                                        <img src="../../img/gauge_91.7.png">
-                                    {%- else -%}
-                                        <img src="../../img/gauge_100.png">
-                                {%- endif %}</td>
+                                <td><div id="gaugeScenario{{loop.index}}"></div></td>
+                                <td><div id="trend_svg{{loop.index}}"></div></td>
                                 <td>{{scenario_results[scenario].getScore()}}</td>
                                 <td>{{iteration}}</td>
                             </tr>
diff --git a/utils/test/reporting/js/gauge.js b/utils/test/reporting/js/gauge.js
new file mode 100644 (file)
index 0000000..4cad16c
--- /dev/null
@@ -0,0 +1,165 @@
+// ******************************************
+// Gauge for reporting
+// Each scenario has a score
+// We use a gauge to indicate the trust level
+// ******************************************
+var gauge = function(container) {
+  var that = {};
+  var config = {
+    size                                               : 150,
+    clipWidth                                  : 250,
+    clipHeight                                 : 100,
+    ringInset                                  : 20,
+    ringWidth                                  : 40,
+
+    pointerWidth                               : 7,
+    pointerTailLength                  : 5,
+    pointerHeadLengthPercent   : 0.8,
+
+    minValue                                   : 0,
+    maxValue                                   : 100,
+
+    minAngle                                   : -90,
+    maxAngle                                   : 90,
+
+    transitionMs                               : 4000,
+
+    majorTicks                                 : 7,
+    labelFormat                                        : d3.format(',g'),
+    labelInset                                 : 10,
+
+    arcColorFn                                 : d3.interpolateHsl(d3.rgb('#ff0000'), d3.rgb('#00ff00'))
+  };
+
+
+var range = undefined;
+var r = undefined;
+var pointerHeadLength = undefined;
+var value = 0;
+
+var svg = undefined;
+var arc = undefined;
+var scale = undefined;
+var ticks = undefined;
+var tickData = undefined;
+var pointer = undefined;
+
+var donut = d3.layout.pie();
+
+function deg2rad(deg) {
+  return deg * Math.PI / 180;
+}
+
+function newAngle(d) {
+  var ratio = scale(d);
+  var newAngle = config.minAngle + (ratio * range);
+  return newAngle;
+}
+
+function configure() {
+  range = config.maxAngle - config.minAngle;
+  r = config.size / 2;
+  pointerHeadLength = Math.round(r * config.pointerHeadLengthPercent);
+
+  // a linear scale that maps domain values to a percent from 0..1
+  scale = d3.scale.linear()
+    .range([0,1])
+    .domain([config.minValue, config.maxValue]);
+
+  ticks = scale.ticks(config.majorTicks);
+  tickData = d3.range(config.majorTicks).map(function() {return 1/config.majorTicks;});
+
+  arc = d3.svg.arc()
+    .innerRadius(r - config.ringWidth - config.ringInset)
+    .outerRadius(r - config.ringInset)
+    .startAngle(function(d, i) {
+      var ratio = d * i;
+      return deg2rad(config.minAngle + (ratio * range));
+    })
+    .endAngle(function(d, i) {
+      var ratio = d * (i+1);
+      return deg2rad(config.minAngle + (ratio * range));
+    });
+}
+that.configure = configure;
+
+function centerTranslation() {
+  return 'translate('+r +','+ r +')';
+}
+
+function isRendered() {
+  return (svg !== undefined);
+}
+that.isRendered = isRendered;
+
+function render(newValue) {
+  svg = d3.select(container)
+    .append('svg:svg')
+      .attr('class', 'gauge')
+      .attr('width', config.clipWidth)
+      .attr('height', config.clipHeight);
+
+  var centerTx = centerTranslation();
+
+  var arcs = svg.append('g')
+      .attr('class', 'arc')
+      .attr('transform', centerTx);
+
+  arcs.selectAll('path')
+      .data(tickData)
+    .enter().append('path')
+      .attr('fill', function(d, i) {
+        return config.arcColorFn(d * i);
+      })
+      .attr('d', arc);
+
+  var lg = svg.append('g')
+      .attr('class', 'label')
+      .attr('transform', centerTx);
+  lg.selectAll('text')
+      .data(ticks)
+    .enter().append('text')
+      .attr('transform', function(d) {
+        var ratio = scale(d);
+        var newAngle = config.minAngle + (ratio * range);
+        return 'rotate(' +newAngle +') translate(0,' +(config.labelInset - r) +')';
+      })
+      .text(config.labelFormat);
+
+  var lineData = [ [config.pointerWidth / 2, 0],
+          [0, -pointerHeadLength],
+          [-(config.pointerWidth / 2), 0],
+          [0, config.pointerTailLength],
+          [config.pointerWidth / 2, 0] ];
+  var pointerLine = d3.svg.line().interpolate('monotone');
+  var pg = svg.append('g').data([lineData])
+      .attr('class', 'pointer')
+      .attr('transform', centerTx);
+
+  pointer = pg.append('path')
+    .attr('d', pointerLine/*function(d) { return pointerLine(d) +'Z';}*/ )
+    .attr('transform', 'rotate(' +config.minAngle +')');
+
+  update(newValue === undefined ? 0 : newValue);
+}
+that.render = render;
+
+function update(newValue, newConfiguration) {
+  if ( newConfiguration  !== undefined) {
+    configure(newConfiguration);
+  }
+  var ratio = scale(newValue);
+  var newAngle = config.minAngle + (ratio * range);
+  pointer.transition()
+    .duration(config.transitionMs)
+    .ease('elastic')
+    .attr('transform', 'rotate(' +newAngle +')');
+}
+that.update = update;
+
+configure();
+
+render();
+
+return that;
+};
diff --git a/utils/test/reporting/js/trend.js b/utils/test/reporting/js/trend.js
new file mode 100644 (file)
index 0000000..f242133
--- /dev/null
@@ -0,0 +1,75 @@
+// ******************************************
+// Trend line for reporting
+// based on scenario_history.txt
+// where data looks like
+// date,scenario,installer,detail,score
+// 2016-09-22 13:12,os-nosdn-fdio-noha,apex,4/12,33.0
+// 2016-09-22 13:13,os-odl_l2-fdio-noha,apex,12/15,80.0
+// 2016-09-22 13:13,os-odl_l2-sfc-noha,apex,18/24,75.0
+// .....
+// ******************************************
+// Set the dimensions of the canvas / graph
+var trend_margin = {top: 20, right: 30, bottom: 50, left: 40},
+  trend_width = 300 - trend_margin.left - trend_margin.right,
+  trend_height = 130 - trend_margin.top - trend_margin.bottom;
+
+// Parse the date / time
+var parseDate = d3.time.format("%Y-%m-%d %H:%M").parse;
+
+// Set the ranges
+var trend_x = d3.time.scale().range([0, trend_width]);
+var trend_y = d3.scale.linear().range([trend_height, 0]);
+
+// Define the axes
+var trend_xAxis = d3.svg.axis().scale(trend_x)
+  .orient("bottom").ticks(2).tickFormat(d3.time.format("%m-%d"));
+
+var trend_yAxis = d3.svg.axis().scale(trend_y)
+  .orient("left").ticks(2);
+
+// Define the line
+var valueline = d3.svg.line()
+  .x(function(d) { return trend_x(d.date); })
+  .y(function(d) { return trend_y(d.score); });
+
+var trend = function(container, trend_data) {
+
+    var trend_svg = d3.select(container)
+    .append("svg")
+      .attr("width", trend_width + trend_margin.left + trend_margin.right)
+      .attr("height", trend_height + trend_margin.top + trend_margin.bottom)
+    .append("g")
+            .attr("transform",
+              "translate(" + trend_margin.left + "," + trend_margin.top + ")");
+
+    // Scale the range of the data
+    trend_x.domain(d3.extent(trend_data, function(d) { return d.date; }));
+    trend_y.domain([0, d3.max(trend_data, function(d) { return d.score; })]);
+
+    // Add the X Axis
+    trend_svg.append("g")
+        .attr("class", "x axis")
+        .attr("transform", "translate(0," + trend_height + ")")
+        .call(trend_xAxis);
+
+    // Add the Y Axis
+    trend_svg.append("g")
+        .attr("class", "y axis")
+        .call(trend_yAxis);
+
+    // Add the valueline path.
+    trend_svg.append("path")
+        .attr("class", "line")
+        .attr("d", valueline(trend_data))
+    .attr("stroke", "steelblue")
+    .attr("fill", "none");
+    trend_svg.selectAll(".dot")
+      .data(trend_data)
+      .enter().append("circle")
+      .attr("r", 2.5)
+        .attr("cx", function(d) { return trend_x(d.date); })
+        .attr("cy", function(d) { return trend_y(d.score); });   
+
+     return trend;
+}