Merge "fix files with MIT or BSD licenses in testapi"
authorMorgan Richomme <morgan.richomme@orange.com>
Mon, 17 Oct 2016 17:00:23 +0000 (17:00 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Mon, 17 Oct 2016 17:00:23 +0000 (17:00 +0000)
32 files changed:
jjb-sandbox/releng/releng-sandbox-jobs.yml
jjb-sandbox/releng/verify-sandbox-jobs.sh
jjb/armband/armband-ci-jobs.yml
jjb/armband/armband-deploy.sh
jjb/armband/build.sh
jjb/compass4nfv/compass-project-jobs.yml
jjb/daisy4nfv/daisy4nfv-build.sh
jjb/daisy4nfv/daisy4nfv-verify-jobs.yml
jjb/dovetail/dovetail-ci-jobs.yml
jjb/dovetail/dovetail-project-jobs.yml
jjb/dovetail/dovetail-run.sh
jjb/fuel/fuel-build.sh
jjb/fuel/fuel-deploy.sh
jjb/infra/bifrost-verify-jobs.yml
jjb/opnfv/opnfv-utils.yml
jjb/opnfv/slave-params.yml
prototypes/puppet-infracloud/hiera/common_baremetal.yaml
prototypes/puppet-infracloud/manifests/site.pp
prototypes/puppet-infracloud/modules/opnfv/manifests/server.pp
utils/installer-adapter/ApexAdapter.py [new file with mode: 0644]
utils/installer-adapter/CompassAdapter.py [new file with mode: 0644]
utils/installer-adapter/FuelAdapter.py [new file with mode: 0644]
utils/installer-adapter/InstallerHandler.py [new file with mode: 0644]
utils/installer-adapter/JoidAdapter.py [new file with mode: 0644]
utils/installer-adapter/RelengLogger.py [new file with mode: 0644]
utils/installer-adapter/SSHUtils.py [new file with mode: 0644]
utils/installer-adapter/__init__.py [new file with mode: 0644]
utils/installer-adapter/example.py [new file with mode: 0644]
utils/test/reporting/yardstick/reporting-status.py
utils/test/reporting/yardstick/reportingUtils.py
utils/test/reporting/yardstick/scenarioResult.py
utils/test/reporting/yardstick/template/index-status-tmpl.html

index ee35f42..aa10a43 100644 (file)
@@ -2,12 +2,13 @@
     name: 'releng-sandbox-jobs'
     jobs:
         - 'releng-deploy-sandbox'
-        - 'releng-clear-jenkins-jobs'
 
     project: 'releng'
+    node: 'releng-sandbox'
 
 - job-template:
     name: 'releng-deploy-sandbox'
+    node: '{node}'
 
     parameters:
         - project-parameter:
                 - draft-published-event
                 - comment-added-contains-event:
                     comment-contains-value: 'redeploy'
+            custom-url: '$BUILD_URL deploying to $JENKINS_URL'
+            silent-start: true
+            skip-vote:
+                successful: true
+                failed: true
+                unstable: true
+                notbuilt: true
             projects:
               - project-compare-type: 'ANT'
                 project-pattern: 'releng'
                 file-paths:
                     - compare-type: ANT
                       pattern: jjb-sandbox/**
-                    - compare-type: ANT
-                      pattern: utils/**
+
+    wrappers: ''
 
     builders:
         - shell:
             !include-raw-escape: verify-sandbox-jobs.sh
         - shell: |
-            #! /bin/bash
-            jenkins-jobs update -jjb-sandbox
+            #!/bin/bash
+            jenkins-jobs update --delete-old -r jjb/releng-defaults.yaml:jjb/releng-macros.yaml:jjb/opnfv/installer-params.yml:jjb/opnfv/slave-params.yml:jjb-sandbox
 
     publishers:
         - archive-artifacts:
             artifacts: 'job_output/*'
-
-- job-template:
-    name: 'releng-clear-jenkins-jobs'
-
-    parameters:
-        - project-parameter:
-            project: '{project}'
-        - gerrit-parameter:
-            branch: 'master'
-
-    scm:
-        - gerrit-trigger-scm:
-            credentials-id: '{ssh-credentials}'
-            refspec: ''
-            choosing-strategy: 'default'
-
-    triggers:
-        - timed: '@weekly'
-
-    builders:
-        - shell: |
-            #! /bin/bash
-            jenkins-jobs delete -r -p jjb-sandbox -x jjb-sandbox/releng
index 8f67e74..5990161 100755 (executable)
@@ -1,4 +1,4 @@
-#! /bin/bash
+#!/bin/bash
 # SPDX-license-identifier: Apache-2.0
 ##############################################################################
 # Copyright (c) 2016 Linux Foundation and others.
index 0246818..d4fa5da 100644 (file)
             auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
         - 'os-odl_l2-bgpvpn-ha':
             auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
+        - 'os-odl_l2-sfc-ha':
+            auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
 
         # NOHA scenarios
         - 'os-odl_l2-nofeature-noha':
             auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
+        - 'os-odl_l2-sfc-noha':
+            auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
 
     jobs:
         - '{installer}-{scenario}-{pod}-daily-{stream}'
 - trigger:
     name: 'fuel-os-odl_l2-nofeature-ha-armband-baremetal-master-trigger'
     triggers:
-        - timed: '0 0 * * 1,6'
+        - timed: '0 0 * * 1'
 - trigger:
     name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-master-trigger'
     triggers:
 - trigger:
     name: 'fuel-os-odl_l3-nofeature-ha-armband-baremetal-master-trigger'
     triggers:
-        - timed: '0 0 * * 3,7'
+        - timed: '0 0 * * 3'
 - trigger:
     name: 'fuel-os-odl_l2-bgpvpn-ha-armband-baremetal-master-trigger'
     triggers:
     name: 'fuel-os-odl_l2-nofeature-noha-armband-baremetal-master-trigger'
     triggers:
         - timed: '0 0 * * 5'
+- trigger:
+    name: 'fuel-os-odl_l2-sfc-ha-armband-baremetal-master-trigger'
+    triggers:
+        - timed: '0 0 * * 6'
+- trigger:
+    name: 'fuel-os-odl_l2-sfc-noha-armband-baremetal-master-trigger'
+    triggers:
+        - timed: '0 0 * * 7'
+
 #----------------------------------------------------------------------
 # Enea Armband CI Baremetal Triggers running against colorado branch
 #----------------------------------------------------------------------
     name: 'fuel-os-odl_l2-nofeature-noha-armband-baremetal-colorado-trigger'
     triggers:
         - timed: '0 16 * * 3,5'
+- trigger:
+    name: 'fuel-os-odl_l2-sfc-ha-armband-baremetal-colorado-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l2-sfc-noha-armband-baremetal-colorado-trigger'
+    triggers:
+        - timed: ''
 #---------------------------------------------------------------
 # Enea Armband CI Virtual Triggers running against master branch
 #---------------------------------------------------------------
     name: 'fuel-os-odl_l2-nofeature-noha-armband-virtual-master-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l2-sfc-ha-armband-virtual-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l2-sfc-noha-armband-virtual-master-trigger'
+    triggers:
+        - timed: ''
 #--------------------------------------------------------------------
 # Enea Armband CI Virtual Triggers running against colorado branch
 #--------------------------------------------------------------------
     name: 'fuel-os-odl_l2-nofeature-noha-armband-virtual-colorado-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l2-sfc-ha-armband-virtual-colorado-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l2-sfc-noha-armband-virtual-colorado-trigger'
+    triggers:
+        - timed: ''
 #----------------------------------------------------------
 # Enea Armband POD 2 Triggers running against master branch
 #----------------------------------------------------------
     name: 'fuel-os-odl_l2-nofeature-noha-arm-pod2-master-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l2-sfc-ha-arm-pod2-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l2-sfc-noha-arm-pod2-master-trigger'
+    triggers:
+        - timed: ''
 #---------------------------------------------------------------
 # Enea Armband POD 2 Triggers running against colorado branch
 #---------------------------------------------------------------
     name: 'fuel-os-odl_l2-nofeature-noha-arm-pod2-colorado-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l2-sfc-ha-arm-pod2-colorado-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l2-sfc-noha-arm-pod2-colorado-trigger'
+    triggers:
+        - timed: ''
index 4041a6b..c8e58af 100755 (executable)
@@ -12,6 +12,8 @@ set -o errexit
 set -o nounset
 set -o pipefail
 
+export TERM="vt220"
+
 # source the file so we get OPNFV vars
 source latest.properties
 
@@ -47,7 +49,7 @@ mkdir -p $TMPDIR
 
 cd $WORKSPACE
 if [[ $LAB_CONFIG_URL =~ ^(git|ssh):// ]]; then
-    echo "cloning $LAB_CONFIG_URL"
+    echo "Cloning securedlab repo ${GIT_BRANCH##origin/}"
     git clone --quiet --branch ${GIT_BRANCH##origin/} $LAB_CONFIG_URL lab-config
     LAB_CONFIG_URL=file://${WORKSPACE}/lab-config
 
index 300306f..a058ca1 100755 (executable)
@@ -12,6 +12,8 @@ set -o errexit
 set -o nounset
 set -o pipefail
 
+export TERM="vt220"
+
 echo "Host info: $(hostname) $(hostname -I)"
 
 cd $WORKSPACE
index bede7de..4d799af 100644 (file)
             branch: 'stable/{stream}'
             gs-pathname: '/{stream}'
 
+    distro:
+        - 'trusty':
+            disabled: false
+            os-version: 'trusty'
+            openstack-os-version: ''
+        - 'centos7':
+            disabled: false
+            os-version: 'centos7'
+            openstack-os-version: ''
 
     jobs:
-        - 'compass-verify-{stream}'
+        - 'compass-verify-{distro}-{stream}'
         - 'compass-build-iso-{stream}'
         - 'compass-build-ppa-{stream}'
-        - 'compass-verify-deploy-{stream}'
+        - 'compass-verify-deploy-{distro}-{stream}'
 
 
 ########################
 # job templates
 ########################
 - job-template:
-    name: 'compass-verify-{stream}'
+    name: 'compass-verify-{distro}-{stream}'
 
     disabled: false
 
 
     builders:
         - trigger-builds:
-            - project: 'compass-verify-deploy-{stream}'
+            - project: 'compass-verify-deploy-{distro}-{stream}'
               current-parameters: true
+              predefined-parameters: |
+                COMPASS_OS_VERSION={os-version}
+                COMPASS_OS_VERSION_OPTION={openstack-os-version}
               same-node: true
               block: true
         - trigger-builds:
                 unstable-threshold: 'FAILURE'
 
 - job-template:
-    name: 'compass-verify-deploy-{stream}'
+    name: 'compass-verify-deploy-{distro}-{stream}'
 
     concurrent: true
 
index 9eae848..ec11db5 100755 (executable)
@@ -4,3 +4,11 @@ echo "--------------------------------------------------------"
 echo "This is diasy4nfv build job!"
 echo "--------------------------------------------------------"
 
+# build output directory
+OUTPUT_DIR=$WORKSPACE/build_output
+mkdir -p $OUTPUT_DIR
+
+# start the build
+cd $WORKSPACE
+./ci/build.sh $OUTPUT_DIR
+
index 6444cf8..e81e300 100644 (file)
@@ -19,7 +19,7 @@
         - 'basic':
             slave-label: 'opnfv-build'
         - 'build':
-            slave-label: 'opnfv-build-ubuntu'
+            slave-label: 'opnfv-build-centos'
         - 'deploy-virtual':
             slave-label: 'opnfv-build'
         - 'smoke-test':
index 1dd1795..2921200 100644 (file)
             SUT: compass
             auto-trigger-name: 'daily-trigger-disabled'
             <<: *colorado
+#apex CI PODs
+        - apex-verify-master:
+            slave-label: '{pod}'
+            SUT: apex
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *master
+        - apex-daily-master:
+            slave-label: '{pod}'
+            SUT: apex
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *master
+        - apex-verify-colorado:
+            slave-label: '{pod}'
+            SUT: apex
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *colorado
+        - apex-daily-colorado:
+            slave-label: '{pod}'
+            SUT: apex
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *colorado
 #--------------------------------
 #        None-CI PODs
 #--------------------------------
index bf05522..41fd8cd 100644 (file)
@@ -57,9 +57,7 @@
                   - branch-compare-type: 'ANT'
                     branch-pattern: '**/{branch}'
     builders:
-         - shell: |
-             echo "dovetail: verify job"
-         #unittest will be added future
+        - dovetail-unit-tests
 
 - job-template:
     name: 'dovetail-merge-{stream}'
                       branch-pattern: '**/{branch}'
 
     builders:
-         - shell: |
-             echo "dovetail: merge"
-         #unittest will be added future
+        - dovetail-unit-tests
+
+################################
+#builders for dovetail project
+###############################
+- builder:
+    name: dovetail-unit-tests
+    builders:
+        - shell: |
+            #!/bin/bash
+            set -o errexit
+            set -o pipefail
+
+            echo "Running unit tests..."
+            cd $WORKSPACE
+            virtualenv $WORKSPACE/dovetail_venv
+            source $WORKSPACE/dovetail_venv/bin/activate
+
+            #packages installation
+            easy_install -U setuptools
+            easy_install -U pip
+            pip install -r unittests/requirements.txt
+            pip install -e .
+
+            #unit tests
+            /bin/bash $WORKSPACE/unittests/unittest.sh
+
+            deactivate
index 3f7a47b..098b7db 100755 (executable)
@@ -34,6 +34,10 @@ fi
 
 opts="--privileged=true --rm"
 envs="-e CI_DEBUG=${CI_DEBUG} \
+      -e INSTALLER_TYPE=${INSTALLER_TYPE} \
+      -e INSTALLER_IP=${INSTALLER_IP} \
+      -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} \
+      -e DEPLOY_TYPE=${DEPLOY_TYPE} \
       -v /var/run/docker.sock:/var/run/docker.sock \
       -v /home/opnfv/dovetail/results:/home/opnfv/dovetail/results"
 
@@ -44,7 +48,7 @@ docker pull opnfv/dovetail:$DOCKER_TAG >$redirect
 # Run docker
 echo "Dovetail: docker running..."
 sudo docker run ${opts} ${envs} ${labconfig} ${sshkey} opnfv/dovetail:${DOCKER_TAG} \
-"/home/opnfv/dovetail/scripts/run.py"
+"/home/opnfv/dovetail/dovetail/run.py"
 
 echo "Dovetail: store results..."
 sudo cp -r /home/opnfv/dovetail/results ./
index 7e36a0c..c66dc3d 100755 (executable)
@@ -11,6 +11,8 @@ set -o errexit
 set -o nounset
 set -o pipefail
 
+export TERM="vt220"
+
 cd $WORKSPACE
 
 # remove the expired items from cache
index ef47ff0..48b1dac 100755 (executable)
@@ -10,6 +10,8 @@
 set -o nounset
 set -o pipefail
 
+export TERM="vt220"
+
 # source the file so we get OPNFV vars
 source latest.properties
 
index 17796a8..a2a57d4 100644 (file)
     triggers:
         - gerrit:
             server-name: 'review.openstack.org'
-            silent-start: true
-            skip-vote:
-                successful: true
-                failed: true
-                unstable: true
-                notbuilt: true
             escape-quotes: true
             trigger-on:
                 - patchset-created-event:
index 94a99d4..717bb3c 100644 (file)
@@ -19,7 +19,6 @@
             name: SLAVE_NAME
             description: Slaves to prune docker images
             default-slaves:
-                - arm-build1
                 - arm-build2
                 - ericsson-build4
                 - ericsson-build5
index 4ffaff4..7eca41a 100644 (file)
             description: 'Git URL to use on this Jenkins Slave'
         - string:
             name: LAB_CONFIG_URL
-            default: ssh://git@git.enea.com/pharos/lab-config
+            default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab
             description: 'Base URI to the configuration directory'
 - parameter:
     name: 'joid-baremetal-defaults'
             description: 'Git URL to use on this Jenkins Slave'
         - string:
             name: LAB_CONFIG_URL
-            default: ssh://git@git.enea.com/pharos/lab-config
+            default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab
             description: 'Base URI to the configuration directory'
 - parameter:
     name: 'joid-virtual-defaults'
             description: 'Git URL to use on this Jenkins Slave'
         - string:
             name: LAB_CONFIG_URL
-            default: ssh://git@git.enea.com/pharos/lab-config
+            default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab
             description: 'Base URI to the configuration directory'
 - parameter:
     name: 'intel-virtual6-defaults'
index 5ea0083..9825ed3 100644 (file)
@@ -115,7 +115,7 @@ default_network_interface: eno3
 dhcp_static_mask: 255.255.255.128
 dhcp_pool_start: 10.20.0.130
 dhcp_pool_end: 10.20.0.254
-network_interface: eno1
+network_interface: eth1
 ipv4_nameserver: 8.8.8.8
 ipv4_subnet_mask: 255.255.255.0
 ipv4_gateway: 172.30.13.1
@@ -131,6 +131,7 @@ ironic_inventory:
     ansible_ssh_host: 172.30.13.90
     ipv4_gateway: 172.30.13.1
     ipv4_interface_mac: 00:1e:67:f9:9b:35
+    ipv4_subnet_mask: 255.255.255.192
     name: controller00.opnfvlocal
     nics:
     - mac: a4:bf:01:01:a9:fc
@@ -151,6 +152,7 @@ ironic_inventory:
     ipv4_address: 172.30.13.91
     ansible_ssh_host: 172.30.13.91
     ipv4_gateway: 172.30.13.1
+    ipv4_interface_mac: 00:1e:67:f6:9b:37
     ipv4_subnet_mask: 255.255.255.0
     name: compute00.opnfvlocal
     nics:
@@ -168,3 +170,4 @@ neutron_subnet_gateway: '172.30.13.1'
 neutron_subnet_allocation_pools:
   - 'start=172.30.13.100,end=172.30.13.254'
 virt_type: 'kvm'
+dib_dev_user_password: devuser
index f09bfe2..8cbfef8 100644 (file)
@@ -96,5 +96,6 @@ node 'baremetal.opnfvlocal', 'lfpod5-jumpserver' {
     ipv4_nameserver           => hiera('ipv4_nameserver'),
     ipv4_subnet_mask          => hiera('ipv4_subnet_mask'),
     bridge_name               => hiera('bridge_name'),
+    dib_dev_user_password     => hiera('dib_dev_user_password'),
   }
 }
index c4bff09..a1e7d5d 100644 (file)
@@ -224,6 +224,20 @@ class opnfv::server (
     }
   }
 
-  # add hosts entries
+  # ensure that we have non-pass sudo, and
+  # not require tty
+  file_line { 'sudo_rule_no_pw':
+    path => '/etc/sudoers',
+    line => '%wheel     ALL=(ALL)       NOPASSWD: ALL',
+  }
+  file_line { 'sudo_rule_notty':
+    path   => '/etc/sudoers',
+    line   => 'Defaults    requiretty',
+    match  => '.*requiretty.*',
+    match_for_absence => true,
+    ensure => absent,
+    multiple => true,
+  }
+
   create_resources('host', hiera_hash('hosts'))
 }
diff --git a/utils/installer-adapter/ApexAdapter.py b/utils/installer-adapter/ApexAdapter.py
new file mode 100644 (file)
index 0000000..bf451f3
--- /dev/null
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2016 Ericsson AB and others.
+# Author: Jose Lausuch (jose.lausuch@ericsson.com)
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+
+from SSHUtils import SSH_Connection
+
+
+class ApexAdapter:
+
+    def __init__(self, installer_ip):
+        self.installer_ip = installer_ip
+
+    def get_deployment_info(self):
+        pass
+
+    def get_nodes(self):
+        pass
+
+    def get_controller_ips(self):
+        pass
+
+    def get_compute_ips(self):
+        pass
+
+    def get_file_from_installer(self, origin, target, options=None):
+        pass
+
+    def get_file_from_controller(self, origin, target, ip=None, options=None):
+        pass
\ No newline at end of file
diff --git a/utils/installer-adapter/CompassAdapter.py b/utils/installer-adapter/CompassAdapter.py
new file mode 100644 (file)
index 0000000..b40a8d7
--- /dev/null
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2016 Ericsson AB and others.
+# Author: Jose Lausuch (jose.lausuch@ericsson.com)
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+
+from SSHUtils import SSH_Connection
+
+
+class CompassAdapter:
+
+    def __init__(self, installer_ip):
+        self.installer_ip = installer_ip
+
+    def get_deployment_info(self):
+        pass
+
+    def get_nodes(self):
+        pass
+
+    def get_controller_ips(self):
+        pass
+
+    def get_compute_ips(self):
+        pass
+
+    def get_file_from_installer(self, origin, target, options=None):
+        pass
+
+    def get_file_from_controller(self, origin, target, ip=None, options=None):
+        pass
\ No newline at end of file
diff --git a/utils/installer-adapter/FuelAdapter.py b/utils/installer-adapter/FuelAdapter.py
new file mode 100644 (file)
index 0000000..15f0e92
--- /dev/null
@@ -0,0 +1,219 @@
+##############################################################################
+# Copyright (c) 2016 Ericsson AB and others.
+# Author: Jose Lausuch (jose.lausuch@ericsson.com)
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+
+from SSHUtils import SSH_Connection
+import RelengLogger as rl
+
+
+class FuelAdapter:
+
+    def __init__(self, installer_ip, user="root", password="r00tme"):
+        self.installer_ip = installer_ip
+        self.user = user
+        self.password = password
+        self.connection = SSH_Connection(
+            installer_ip, self.user, self.password, use_system_keys=False)
+        self.logger = rl.Logger("Handler").getLogger()
+
+    def runcmd_fuel_nodes(self):
+        output, error = self.connection.run_remote_cmd('fuel nodes')
+        if len(error) > 0:
+            self.logger.error("error %s" % error)
+            return error
+        return output
+
+    def runcmd_fuel_env(self):
+        output, error = self.connection.run_remote_cmd('fuel env')
+        if len(error) > 0:
+            self.logger.error("error %s" % error)
+            return error
+        return output
+
+    def get_clusters(self):
+        environments = []
+        output = self.runcmd_fuel_env()
+        lines = output.rsplit('\n')
+        if len(lines) < 2:
+            self.logger.infp("No environments found in the deployment.")
+            return None
+        else:
+            fields = lines[0].rsplit(' | ')
+
+            index_id = -1
+            index_status = -1
+            index_name = -1
+            index_release_id = -1
+
+            for i in range(0, len(fields) - 1):
+                if "id" in fields[i]:
+                    index_id = i
+                elif "status" in fields[i]:
+                    index_status = i
+                elif "name" in fields[i]:
+                    index_name = i
+                elif "release_id" in fields[i]:
+                    index_release_id = i
+
+            # order env info
+            for i in range(2, len(lines) - 1):
+                fields = lines[i].rsplit(' | ')
+                dict = {"id": fields[index_id].strip(),
+                        "status": fields[index_status].strip(),
+                        "name": fields[index_name].strip(),
+                        "release_id": fields[index_release_id].strip()}
+                environments.append(dict)
+
+        return environments
+
+    def get_nodes(self, options=None):
+        nodes = []
+        output = self.runcmd_fuel_nodes()
+        lines = output.rsplit('\n')
+        if len(lines) < 2:
+            self.logger.info("No nodes found in the deployment.")
+            return None
+        else:
+            # get fields indexes
+            fields = lines[0].rsplit(' | ')
+
+            index_id = -1
+            index_status = -1
+            index_name = -1
+            index_cluster = -1
+            index_ip = -1
+            index_mac = -1
+            index_roles = -1
+            index_online = -1
+
+            for i in range(0, len(fields) - 1):
+                if "id" in fields[i]:
+                    index_id = i
+                elif "status" in fields[i]:
+                    index_status = i
+                elif "name" in fields[i]:
+                    index_name = i
+                elif "cluster" in fields[i]:
+                    index_cluster = i
+                elif "ip" in fields[i]:
+                    index_ip = i
+                elif "mac" in fields[i]:
+                    index_mac = i
+                elif "roles " in fields[i]:
+                    index_roles = i
+                elif "online" in fields[i]:
+                    index_online = i
+
+            # order nodes info
+            for i in range(2, len(lines) - 1):
+                fields = lines[i].rsplit(' | ')
+                dict = {"id": fields[index_id].strip(),
+                        "status": fields[index_status].strip(),
+                        "name": fields[index_name].strip(),
+                        "cluster": fields[index_cluster].strip(),
+                        "ip": fields[index_ip].strip(),
+                        "mac": fields[index_mac].strip(),
+                        "roles": fields[index_roles].strip(),
+                        "online": fields[index_online].strip()}
+                if options and options['cluster']:
+                    if fields[index_cluster].strip() == options['cluster']:
+                        nodes.append(dict)
+                else:
+                    nodes.append(dict)
+
+        return nodes
+
+    def get_controller_ips(self, options):
+        nodes = self.get_nodes(options=options)
+        controllers = []
+        for node in nodes:
+            if "controller" in node["roles"]:
+                controllers.append(node['ip'])
+        return controllers
+
+    def get_compute_ips(self, options=None):
+        nodes = self.get_nodes(options=options)
+        computes = []
+        for node in nodes:
+            if "compute" in node["roles"]:
+                computes.append(node['ip'])
+        return computes
+
+    def get_deployment_info(self):
+        str = "Deployment details:\n"
+        str += "\tInstaller:  Fuel\n"
+        str += "\tScenario:   Unknown\n"
+        sdn = "None"
+        clusters = self.get_clusters()
+        str += "\tN.Clusters: %s\n" % len(clusters)
+        for cluster in clusters:
+            cluster_dic = {'cluster': cluster['id']}
+            str += "\tCluster info:\n"
+            str += "\t   ID:          %s\n" % cluster['id']
+            str += "\t   NAME:        %s\n" % cluster['name']
+            str += "\t   STATUS:      %s\n" % cluster['status']
+            nodes = self.get_nodes(options=cluster_dic)
+            num_nodes = len(nodes)
+            for node in nodes:
+                if "opendaylight" in node['roles']:
+                    sdn = "OpenDaylight"
+                elif "onos" in node['roles']:
+                    sdn = "ONOS"
+            num_controllers = len(
+                self.get_controller_ips(options=cluster_dic))
+            num_computes = len(self.get_compute_ips(options=cluster_dic))
+            ha = False
+            if num_controllers > 1:
+                ha = True
+
+            str += "\t   HA:          %s\n" % ha
+            str += "\t   NUM.NODES:   %s\n" % num_nodes
+            str += "\t   CONTROLLERS: %s\n" % num_controllers
+            str += "\t   COMPUTES:    %s\n" % num_computes
+            str += "\t   SDN CONTR.:  %s\n\n" % sdn
+        str += self.runcmd_fuel_nodes()
+        return str
+
+    def get_file_from_installer(self, remote_path, local_path, options=None):
+        self.logger.debug("Fetching %s from %s" %
+                          (remote_path, self.installer_ip))
+        if self.connection.scp_get(local_path, remote_path) != 0:
+            self.logger.error("SCP failed to retrieve the file.")
+            return 1
+        self.logger.info("%s successfully copied from Fuel to %s" %
+                         (remote_path, local_path))
+
+    def get_file_from_controller(self,
+                                 remote_path,
+                                 local_path,
+                                 ip=None,
+                                 options=None):
+        if ip is None:
+            controllers = self.get_controller_ips(options=options)
+            if len(controllers) == 0:
+                self.logger.info("No controllers found in the deployment.")
+                return 1
+            else:
+                target_ip = controllers[0]
+        else:
+            target_ip = ip
+
+        fuel_dir = '/root/scp/'
+        cmd = 'mkdir -p %s;rsync -Rav %s:%s %s' % (
+            fuel_dir, target_ip, remote_path, fuel_dir)
+        self.logger.info("Copying %s from %s to Fuel..." %
+                         (remote_path, target_ip))
+        output, error = self.connection.run_remote_cmd(cmd)
+        self.logger.debug("Copying files from Fuel to %s..." % local_path)
+        self.get_file_from_installer(
+            fuel_dir + remote_path, local_path, options)
+        cmd = 'rm -r %s' % fuel_dir
+        output, error = self.connection.run_remote_cmd(cmd)
+        self.logger.info("%s successfully copied from %s to %s" %
+                         (remote_path, target_ip, local_path))
diff --git a/utils/installer-adapter/InstallerHandler.py b/utils/installer-adapter/InstallerHandler.py
new file mode 100644 (file)
index 0000000..b81b806
--- /dev/null
@@ -0,0 +1,78 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+# Author: Jose Lausuch (jose.lausuch@ericsson.com)
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+from FuelAdapter import FuelAdapter
+from ApexAdapter import ApexAdapter
+from CompassAdapter import CompassAdapter
+from JoidAdapter import JoidAdapter
+
+
+INSTALLERS = ["fuel", "apex", "compass", "joid"]
+
+
+class InstallerHandler:
+
+    def __init__(self,
+                 installer,
+                 installer_ip,
+                 installer_user,
+                 installer_pwd=None):
+        self.installer = installer.lower()
+        self.installer_ip = installer_ip
+        self.installer_user = installer_user
+        self.installer_pwd = installer_pwd
+
+        if self.installer == INSTALLERS[0]:
+            self.InstallerAdapter = FuelAdapter(self.installer_ip,
+                                                self.installer_user,
+                                                self.installer_pwd)
+        elif self.installer == INSTALLERS[1]:
+            self.InstallerAdapter = ApexAdapter(self.installer_ip)
+        elif self.installer == INSTALLERS[2]:
+            self.InstallerAdapter = CompassAdapter(self.installer_ip)
+        elif self.installer == INSTALLERS[3]:
+            self.InstallerAdapter = JoidAdapter(self.installer_ip)
+        else:
+            print("Installer %s is  not valid. "
+                  "Please use one of the followings: %s"
+                  % (self.installer, INSTALLERS))
+            exit(1)
+
+    def get_deployment_info(self):
+        return self.InstallerAdapter.get_deployment_info()
+
+    def get_nodes(self, options=None):
+        return self.InstallerAdapter.get_nodes(options=options)
+
+    def get_controller_ips(self, options=None):
+        return self.InstallerAdapter.get_controller_ips(options=options)
+
+    def get_compute_ips(self, options=None):
+        return self.InstallerAdapter.get_compute_ips(options=options)
+
+    def get_file_from_installer(self,
+                                remote_path,
+                                local_path,
+                                options=None):
+        return self.InstallerAdapter.get_file_from_installer(remote_path,
+                                                             local_path,
+                                                             options=options)
+
+    def get_file_from_controller(self,
+                                 remote_path,
+                                 local_path,
+                                 ip=None,
+                                 options=None):
+        return self.InstallerAdapter.get_file_from_controller(remote_path,
+                                                              local_path,
+                                                              ip=ip,
+                                                              options=options)
+
+    def get_all(self):
+        pass
diff --git a/utils/installer-adapter/JoidAdapter.py b/utils/installer-adapter/JoidAdapter.py
new file mode 100644 (file)
index 0000000..e78ca0f
--- /dev/null
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2016 Ericsson AB and others.
+# Author: Jose Lausuch (jose.lausuch@ericsson.com)
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+
+from SSHUtils import SSH_Connection
+
+
+class JoidAdapter:
+
+    def __init__(self, installer_ip):
+        self.installer_ip = installer_ip
+
+    def get_deployment_info(self):
+        pass
+
+    def get_nodes(self):
+        pass
+
+    def get_controller_ips(self):
+        pass
+
+    def get_compute_ips(self):
+        pass
+
+    def get_file_from_installer(self, origin, target, options=None):
+        pass
+
+    def get_file_from_controller(self, origin, target, ip=None, options=None):
+        pass
\ No newline at end of file
diff --git a/utils/installer-adapter/RelengLogger.py b/utils/installer-adapter/RelengLogger.py
new file mode 100644 (file)
index 0000000..b38e780
--- /dev/null
@@ -0,0 +1,52 @@
+#!/usr/bin/env python
+#
+# jose.lausuch@ericsson.com
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Logging levels:
+#  Level     Numeric value
+#  CRITICAL  50
+#  ERROR     40
+#  WARNING   30
+#  INFO      20
+#  DEBUG     10
+#  NOTSET    0
+#
+# Usage:
+#  import RelengLogger as rl
+#  logger = fl.Logger("script_name").getLogger()
+#  logger.info("message to be shown with - INFO - ")
+#  logger.debug("message to be shown with - DEBUG -")
+
+import logging
+import os
+
+
+class Logger:
+
+    def __init__(self, logger_name, level="INFO"):
+
+        self.logger = logging.getLogger(logger_name)
+        self.logger.propagate = 0
+        self.logger.setLevel(logging.DEBUG)
+
+        ch = logging.StreamHandler()
+        formatter = logging.Formatter('%(asctime)s - %(name)s - '
+                                      '%(levelname)s - %(message)s')
+        ch.setFormatter(formatter)
+        if level.lower() == "debug":
+            ch.setLevel(logging.DEBUG)
+        else:
+            ch.setLevel(logging.INFO)
+        self.logger.addHandler(ch)
+
+        hdlr = logging.FileHandler('/tmp/releng.log')
+        hdlr.setFormatter(formatter)
+        hdlr.setLevel(logging.DEBUG)
+        self.logger.addHandler(hdlr)
+
+    def getLogger(self):
+        return self.logger
diff --git a/utils/installer-adapter/SSHUtils.py b/utils/installer-adapter/SSHUtils.py
new file mode 100644 (file)
index 0000000..9c92a3b
--- /dev/null
@@ -0,0 +1,130 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+# Author: Jose Lausuch (jose.lausuch@ericsson.com)
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+
+import paramiko
+from scp import SCPClient
+import time
+import RelengLogger as rl
+
+
+class SSH_Connection:
+
+    def __init__(self,
+                 host,
+                 user,
+                 password,
+                 use_system_keys=True,
+                 private_key=None,
+                 use_proxy=False,
+                 proxy_host=None,
+                 proxy_user=None,
+                 proxy_password=None,
+                 timeout=10):
+        self.host = host
+        self.user = user
+        self.password = password
+        self.use_system_keys = use_system_keys
+        self.private_key = private_key
+        self.use_proxy = use_proxy
+        self.proxy_host = proxy_host
+        self.proxy_user = proxy_user
+        self.proxy_password = proxy_password
+        self.timeout = timeout
+        paramiko.util.log_to_file("paramiko.log")
+        self.logger = rl.Logger("SSHUtils").getLogger()
+
+    def connect(self):
+        client = paramiko.SSHClient()
+        if self.use_system_keys:
+            client.load_system_host_keys()
+        elif self.private_key:
+            client.load_host_keys(self.private_key)
+        else:
+            client.load_host_keys('/dev/null')
+
+        client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
+
+        t = self.timeout
+        proxy = None
+        if self.use_proxy:
+            proxy_command = 'ssh -o UserKnownHostsFile=/dev/null '
+            '-o StrictHostKeyChecking=no %s@%s -W %s:%s' % (self.proxy_user,
+                                                            self.proxy_host,
+                                                            self.host, 22)
+            proxy = paramiko.ProxyCommand(proxy_command)
+            self.logger.debug("Proxy command: %s" % proxy_command)
+        while t > 0:
+            try:
+                self.logger.debug(
+                    "Trying to stablish ssh connection to %s..." % self.host)
+                client.connect(self.host,
+                               username=self.user,
+                               password=self.password,
+                               look_for_keys=True,
+                               sock=proxy,
+                               pkey=self.private_key,
+                               timeout=self.timeout)
+                self.logger.debug("Successfully connected to %s!" % self.host)
+                return client
+            except:
+                time.sleep(1)
+                t -= 1
+
+        if t == 0:
+            return None
+
+    def scp_put(self, local_path, remote_path):
+        client = self.connect()
+        if client:
+            scp = SCPClient(client.get_transport())
+            try:
+                scp.put(local_path, remote_path)
+                client.close()
+                return 0
+            except Exception, e:
+                self.logger.error(e)
+                client.close()
+                return 1
+        else:
+            self.logger.error("Cannot stablish ssh connection.")
+
+    def scp_get(self, local_path, remote_path):
+        client = self.connect()
+        if client:
+            scp = SCPClient(client.get_transport())
+            try:
+                scp.get(remote_path, local_path)
+                client.close()
+                return 0
+            except Exception, e:
+                self.logger.error(e)
+                client.close()
+                return 1
+        else:
+            self.logger.error("Cannot stablish ssh connection.")
+            return 1
+
+    def run_remote_cmd(self, command):
+        client = self.connect()
+        if client:
+            try:
+                stdin, stdout, stderr = client.exec_command(command)
+                out = ''
+                for line in stdout.readlines():
+                    out += line
+                err = stderr.readlines()
+                client.close()
+                return out, err
+            except:
+                client.close()
+                return 1
+        else:
+            self.logger.error("Cannot stablish ssh connection.")
+            return 1
diff --git a/utils/installer-adapter/__init__.py b/utils/installer-adapter/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/utils/installer-adapter/example.py b/utils/installer-adapter/example.py
new file mode 100644 (file)
index 0000000..804d79c
--- /dev/null
@@ -0,0 +1,22 @@
+# This is an example of usage of this Tool
+# Author: Jose Lausuch (jose.lausuch@ericsson.com)
+
+from InstallerHandler import InstallerHandler
+
+fuel_handler = InstallerHandler(installer='fuel',
+                                installer_ip='10.20.0.2',
+                                installer_user='root',
+                                installer_pwd='r00tme')
+print("Nodes in cluster 1:\n%s\n" %
+      fuel_handler.get_nodes(options={'cluster': '1'}))
+print("Nodes in cluster 2:\n%s\n" %
+      fuel_handler.get_nodes(options={'cluster': '2'}))
+print("Nodes:\n%s\n" % fuel_handler.get_nodes())
+print("Controller nodes:\n%s\n" % fuel_handler.get_controller_ips())
+print("Compute nodes:\n%s\n" % fuel_handler.get_compute_ips())
+print("\n%s\n" % fuel_handler.get_deployment_info())
+fuel_handler.get_file_from_installer('/root/deploy/dea.yaml', './dea.yaml')
+fuel_handler.get_file_from_controller(
+    '/etc/neutron/neutron.conf', './neutron.conf')
+fuel_handler.get_file_from_controller(
+    '/root/openrc', './openrc')
index 60f1523..49809e9 100644 (file)
@@ -8,10 +8,7 @@
 #
 import datetime
 import jinja2
-import requests
-import sys
-import time
-import yaml
+import os
 
 import reportingUtils as utils
 import reportingConf as conf
@@ -20,6 +17,7 @@ from scenarios import config as cf
 
 # Logger
 logger = utils.getLogger("Yardstick-Status")
+reportingDate = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
 
 logger.info("*******************************************")
 logger.info("*   Generating reporting scenario status  *")
@@ -35,21 +33,23 @@ for version in conf.versions:
         # get scenarios results data
         scenario_results = utils.getScenarioStatus(installer, version)
         if 'colorado' == version:
-            stable_result = utils.getScenarioStatus(installer, 'stable/colorado')
-            for k,v in stable_result.items():
-                if not scenario_results.has_key(k):
+            stable_result = utils.getScenarioStatus(installer,
+                                                    'stable/colorado')
+            for k, v in stable_result.items():
+                if k not in scenario_results.keys():
                     scenario_results[k] = []
                 scenario_results[k] += stable_result[k]
         scenario_result_criteria = {}
 
         for s in scenario_results.keys():
-            if cf.has_key(installer) and cf[installer].has_key(s):
+            if installer in cf.keys() and s in cf[installer].keys():
                 scenario_results.pop(s)
 
         # From each scenarios get results list
         for s, s_result in scenario_results.items():
             logger.info("---------------------------------")
-            logger.info("installer %s, version %s, scenario %s:" % (installer, version, s))
+            logger.info("installer %s, version %s, scenario %s:" % (installer,
+                                                                    version, s))
 
             ten_criteria = len(s_result)
             ten_score = 0
@@ -62,15 +62,38 @@ for version in conf.versions:
             for v in four_result:
                 four_score += v
 
-            s_status = str(utils.get_status(four_result, s_result))
+            s_status = str(utils.get_percent(four_result, s_result))
             s_four_score = str(four_score) + '/' + str(four_criteria)
             s_ten_score = str(ten_score) + '/' + str(ten_criteria)
-            scenario_result_criteria[s] = sr.ScenarioResult(s_status, s_four_score, s_ten_score)
+            s_score_percent = utils.get_percent(four_result, s_result)
 
             if '100' == s_status:
                 logger.info(">>>>> scenario OK, save the information")
             else:
-                logger.info(">>>> scenario not OK, last 4 iterations = %s, last 10 days = %s" % (s_four_score, s_ten_score))
+                logger.info(">>>> scenario not OK, last 4 iterations = %s, \
+                            last 10 days = %s" % (s_four_score, s_ten_score))
+
+            # Save daily results in a file
+            path_validation_file = (conf.REPORTING_PATH +
+                                    "/release/" + version +
+                                    "/scenario_history.txt")
+
+            if not os.path.exists(path_validation_file):
+                with open(path_validation_file, 'w') as f:
+                    info = 'date,scenario,installer,details,score\n'
+                    f.write(info)
+
+            with open(path_validation_file, "a") as f:
+                info = (reportingDate + "," + s + "," + installer +
+                        "," + s_ten_score + "," +
+                        str(s_score_percent) + "\n")
+                f.write(info)
+
+            scenario_result_criteria[s] = sr.ScenarioResult(s_status,
+                                                            s_four_score,
+                                                            s_ten_score,
+                                                            s_score_percent)
+
             logger.info("--------------------------")
 
         templateLoader = jinja2.FileSystemLoader(conf.REPORTING_PATH)
@@ -82,7 +105,8 @@ for version in conf.versions:
         outputText = template.render(scenario_results=scenario_result_criteria,
                                      installer=installer,
                                      period=conf.PERIOD,
-                                     version=version)
+                                     version=version,
+                                     date=reportingDate)
 
         with open(conf.REPORTING_PATH + "/release/" + version +
                   "/index-status-" + installer + ".html", "wb") as fh:
index 71eb919..ec9ed76 100644 (file)
@@ -32,7 +32,7 @@ def getLogger(module):
 def getScenarioStatus(installer, version):
     url = (conf.URL_BASE + "?case=" + "scenario_status" +
            "&installer=" + installer +
-           "&version=" + version +"&period=" + str(conf.PERIOD))
+           "&version=" + version + "&period=" + str(conf.PERIOD))
     request = Request(url)
 
     try:
@@ -53,7 +53,7 @@ def getScenarioStatus(installer, version):
                     scenario_results[r['scenario']] = []
                 scenario_results[r['scenario']].append(r)
 
-        for k,v in scenario_results.items():
+        for k, v in scenario_results.items():
             # scenario_results[k] = v[:conf.LASTEST_TESTS]
             s_list = []
             for element in v:
@@ -66,20 +66,25 @@ def getScenarioStatus(installer, version):
     # return scenario_results
     return result_dict
 
+
 def subfind(given_list, pattern_list):
+
     for i in range(len(given_list)):
-        if given_list[i] == pattern_list[0] and given_list[i:i + conf.LASTEST_TESTS] == pattern_list:
+        if given_list[i] == pattern_list[0] and \
+                given_list[i:i + conf.LASTEST_TESTS] == pattern_list:
             return True
     return False
 
-def get_percent(status):
-    
+
+def _get_percent(status):
+
     if status * 100 % 6:
         return round(float(status) * 100 / 6, 1)
     else:
         return status * 100 / 6
 
-def get_status(four_list, ten_list):
+
+def get_percent(four_list, ten_list):
     four_score = 0
     ten_score = 0
 
@@ -97,13 +102,13 @@ def get_status(four_list, ten_list):
     else:
         status = four_score + 1
 
-    return get_percent(status)
+    return _get_percent(status)
 
 
 def _test():
     status = getScenarioStatus("compass", "master")
     print "status:++++++++++++++++++++++++"
-    print json.dumps(status,indent=4)
+    print json.dumps(status, indent=4)
 
 
 if __name__ == '__main__':    # pragma: no cover
index 61ffb2c..1f7eb2b 100644 (file)
@@ -9,10 +9,12 @@
 
 
 class ScenarioResult(object):
-    def __init__(self, status, four_days_score='', ten_days_score=''):
+    def __init__(self, status, four_days_score='', ten_days_score='',
+                 score_percent=0.0):
         self.status = status
         self.four_days_score = four_days_score
         self.ten_days_score = ten_days_score
+        self.score_percent = score_percent
 
     def getStatus(self):
         return self.status
@@ -22,3 +24,6 @@ class ScenarioResult(object):
 
     def getFourDaysScore(self):
         return self.four_days_score
+
+    def getScorePercent(self):
+        return self.score_percent
index 602ce8a..5a4dc34 100644 (file)
@@ -3,9 +3,56 @@
     <meta charset="utf-8">
     <!-- Bootstrap core CSS -->
     <link href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css" rel="stylesheet">
-    <link href="default.css" rel="stylesheet">
+    <link href="../../../css/default.css" rel="stylesheet">
     <script type="text/javascript" src="http://ajax.googleapis.com/ajax/libs/jquery/1/jquery.min.js"></script>
     <script type="text/javascript" src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js"></script>
+    <script type="text/javascript" src="http://d3js.org/d3.v2.min.js"></script>
+    <script type="text/javascript" src="../../../js/gauge.js"></script>
+    <script type="text/javascript" src="../../../js/trend.js"></script>
+    <script>
+        function onDocumentReady() {
+            // Gauge management
+            {% for scenario in scenario_results.keys() -%}
+            var gaugeScenario{{loop.index}} = gauge('#gaugeScenario{{loop.index}}');
+            {%- endfor %}
+            // assign success rate to the gauge
+            function updateReadings() {
+                {% for scenario in scenario_results.keys() -%}
+                 gaugeScenario{{loop.index}}.update({{scenario_results[scenario].getScorePercent()}});
+                 {%- endfor %}
+            }
+            updateReadings();
+        }
+
+        // trend line management
+        //d3.csv("./scenario_history.txt", function(data) {
+        d3.csv("./scenario_history.txt", function(data) {
+            // ***************************************
+            // Create the trend line
+            {% for scenario in scenario_results.keys() -%}
+            // for scenario {{scenario}}
+            // Filter results
+                var trend{{loop.index}} = data.filter(function(row) {
+                    return row["scenario"]=="{{scenario}}" && row["installer"]=="{{installer}}";
+                })
+            // Parse the date
+            trend{{loop.index}}.forEach(function(d) {
+                d.date = parseDate(d.date);
+                d.score = +d.score
+            });
+            // Draw the trend line
+            var mytrend = trend("#trend_svg{{loop.index}}",trend{{loop.index}})
+            // ****************************************
+            {%- endfor %}
+        });
+        if ( !window.isLoaded ) {
+            window.addEventListener("load", function() {
+            onDocumentReady();
+            }, false);
+        } else {
+            onDocumentReady();
+        }
+    </script>
     <script type="text/javascript">
     $(document).ready(function (){
         $(".btn-more").click(function() {
     <body>
     <div class="container">
       <div class="masthead">
-        <h3 class="text-muted">Yardstick status page ({{version}})</h3>
+          <h3 class="text-muted">Yardstick status page ({{version}}, {{date}})</h3>
         <nav>
           <ul class="nav nav-justified">
-            <li class="active"><a href="index.html">Home</a></li>
+            <li class="active"><a href="http://testresults.opnfv.org/reporting/index.html">Home</a></li>
             <li><a href="index-status-apex.html">Apex</a></li>
             <li><a href="index-status-compass.html">Compass</a></li>
             <li><a href="index-status-fuel.html">Fuel</a></li>
                     <tr>
                         <th width="40%">Scenario</th>
                         <th width="20%">Status</th>
-                        <th width="20%">Last 4 Iterations</th>
-                        <th width="20%">Last 10 Days</th>
+                        <th width="20%">Trend</th>
+                        <th width="10%">Last 4 Iterations</th>
+                        <th width="10%">Last 10 Days</th>
                     </tr>
                         {% for scenario,result in scenario_results.iteritems() -%}
                             <tr class="tr-ok">
                                 <td>{{scenario}}</td>
-                                <td>
-                                    <img src="../../img/gauge_{{ scenario_results[scenario].getStatus() }}.png">
-                                </td>
+                                <td><div id="gaugeScenario{{loop.index}}"></div></td>
+                                <td><div id="trend_svg{{loop.index}}"></div></td>
                                 <td>{{scenario_results[scenario].getFourDaysScore()}}</td>
                                 <td>{{scenario_results[scenario].getTenDaysScore()}}</td>
                             </tr>