Merge "Add verify jobs for Armband"
authorAric Gardner <agardner@linuxfoundation.org>
Mon, 31 Oct 2016 14:32:40 +0000 (14:32 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Mon, 31 Oct 2016 14:32:40 +0000 (14:32 +0000)
148 files changed:
jjb-sandbox/releng/releng-sandbox-jobs.yml
jjb-sandbox/releng/verify-sandbox-jobs.sh
jjb/apex/apex-deploy.sh
jjb/apex/apex-upload-artifact.sh
jjb/armband/armband-ci-jobs.yml
jjb/armband/armband-deploy.sh
jjb/armband/build.sh
jjb/compass4nfv/compass-ci-jobs.yml
jjb/compass4nfv/compass-project-jobs.yml
jjb/daisy4nfv/daisy4nfv-build.sh
jjb/daisy4nfv/daisy4nfv-verify-jobs.yml
jjb/dovetail/dovetail-ci-jobs.yml
jjb/dovetail/dovetail-project-jobs.yml
jjb/dovetail/dovetail-run.sh
jjb/fuel/fuel-build.sh
jjb/fuel/fuel-deploy.sh
jjb/functest/functest-ci-jobs.yml
jjb/functest/functest-project-jobs.yml
jjb/infra/bifrost-verify-jobs.yml
jjb/opnfv/opnfv-docs.yml
jjb/opnfv/opnfv-utils.yml
jjb/opnfv/slave-params.yml
jjb/qtip/qtip-ci-jobs.yml
jjb/releng-macros.yaml
jjb/vswitchperf/vswitchperf.yml
jjb/yardstick/yardstick-ci-jobs.yml
prototypes/bifrost/scripts/destroy-env.sh
prototypes/puppet-infracloud/deploy_on_baremetal.md [new file with mode: 0644]
prototypes/puppet-infracloud/hiera/common_baremetal.yaml
prototypes/puppet-infracloud/manifests/site.pp
prototypes/puppet-infracloud/modules/opnfv/manifests/server.pp
utils/installer-adapter/ApexAdapter.py [new file with mode: 0644]
utils/installer-adapter/CompassAdapter.py [new file with mode: 0644]
utils/installer-adapter/FuelAdapter.py [new file with mode: 0644]
utils/installer-adapter/InstallerHandler.py [new file with mode: 0644]
utils/installer-adapter/JoidAdapter.py [new file with mode: 0644]
utils/installer-adapter/RelengLogger.py [new file with mode: 0644]
utils/installer-adapter/SSHUtils.py [new file with mode: 0644]
utils/installer-adapter/__init__.py [moved from utils/test/result_collection_api/update/templates/__init__.py with 100% similarity]
utils/installer-adapter/example.py [new file with mode: 0644]
utils/test/dashboard/README.rst [new file with mode: 0644]
utils/test/dashboard/dashboard/conf/config.py
utils/test/dashboard/dashboard/conf/testcases.py
utils/test/dashboard/dashboard/elastic2kibana/dashboard_assembler.py
utils/test/dashboard/dashboard/elastic2kibana/main.py
utils/test/dashboard/dashboard/elastic2kibana/utility.py
utils/test/dashboard/dashboard/elastic2kibana/visualization_assembler.py
utils/test/dashboard/dashboard/elastic2kibana_main.py [deleted file]
utils/test/dashboard/dashboard/functest/format.py [moved from utils/test/dashboard/dashboard/mongo2elastic/format.py with 94% similarity]
utils/test/dashboard/dashboard/mongo2elastic/main.py
utils/test/dashboard/dashboard/mongo2elastic_main.py [deleted file]
utils/test/dashboard/dashboard/qtip/format.py [new file with mode: 0644]
utils/test/dashboard/etc/config.ini
utils/test/dashboard/install.sh [new file with mode: 0755]
utils/test/dashboard/setup.cfg [new file with mode: 0644]
utils/test/dashboard/setup.py [new file with mode: 0644]
utils/test/reporting/functest/reporting-status.py
utils/test/reporting/functest/reportingUtils.py
utils/test/reporting/yardstick/reporting-status.py
utils/test/reporting/yardstick/reportingUtils.py
utils/test/reporting/yardstick/scenarioResult.py
utils/test/reporting/yardstick/template/index-status-tmpl.html
utils/test/testapi/3rd_party/static/.gitignore [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/static/.gitignore with 100% similarity]
utils/test/testapi/3rd_party/static/css/highlight.default.css [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/static/css/highlight.default.css with 100% similarity]
utils/test/testapi/3rd_party/static/css/hightlight.default.css [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/static/css/hightlight.default.css with 100% similarity]
utils/test/testapi/3rd_party/static/css/screen.css [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/static/css/screen.css with 100% similarity]
utils/test/testapi/3rd_party/static/endpoint.html [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/static/endpoint.html with 100% similarity]
utils/test/testapi/3rd_party/static/images/explorer_icons.png [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/static/images/explorer_icons.png with 100% similarity]
utils/test/testapi/3rd_party/static/images/logo_small.png [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/static/images/logo_small.png with 100% similarity]
utils/test/testapi/3rd_party/static/images/pet_store_api.png [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/static/images/pet_store_api.png with 100% similarity]
utils/test/testapi/3rd_party/static/images/throbber.gif [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/static/images/throbber.gif with 100% similarity]
utils/test/testapi/3rd_party/static/images/wordnik_api.png [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/static/images/wordnik_api.png with 100% similarity]
utils/test/testapi/3rd_party/static/index.html [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/static/index.html with 100% similarity]
utils/test/testapi/3rd_party/static/lib/backbone-min.js [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/static/lib/backbone-min.js with 100% similarity]
utils/test/testapi/3rd_party/static/lib/handlebars-1.0.0.js [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/static/lib/handlebars-1.0.0.js with 100% similarity]
utils/test/testapi/3rd_party/static/lib/highlight.7.3.pack.js [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/static/lib/highlight.7.3.pack.js with 100% similarity]
utils/test/testapi/3rd_party/static/lib/jquery-1.8.0.min.js [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/static/lib/jquery-1.8.0.min.js with 100% similarity]
utils/test/testapi/3rd_party/static/lib/jquery.ba-bbq.min.js [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/static/lib/jquery.ba-bbq.min.js with 100% similarity]
utils/test/testapi/3rd_party/static/lib/jquery.slideto.min.js [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/static/lib/jquery.slideto.min.js with 100% similarity]
utils/test/testapi/3rd_party/static/lib/jquery.wiggle.min.js [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/static/lib/jquery.wiggle.min.js with 100% similarity]
utils/test/testapi/3rd_party/static/lib/shred.bundle.js [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/static/lib/shred.bundle.js with 100% similarity]
utils/test/testapi/3rd_party/static/lib/shred/content.js [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/static/lib/shred/content.js with 100% similarity]
utils/test/testapi/3rd_party/static/lib/swagger-oauth.js [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/static/lib/swagger-oauth.js with 100% similarity]
utils/test/testapi/3rd_party/static/lib/swagger.js [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/static/lib/swagger.js with 100% similarity]
utils/test/testapi/3rd_party/static/lib/underscore-min.js [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/static/lib/underscore-min.js with 100% similarity]
utils/test/testapi/3rd_party/static/o2c.html [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/static/o2c.html with 100% similarity]
utils/test/testapi/3rd_party/static/swagger-ui.js [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/static/swagger-ui.js with 100% similarity]
utils/test/testapi/3rd_party/static/swagger-ui.min.js [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/static/swagger-ui.min.js with 100% similarity]
utils/test/testapi/README.rst [moved from utils/test/result_collection_api/README.rst with 97% similarity]
utils/test/testapi/docker/Dockerfile [moved from utils/test/result_collection_api/docker/Dockerfile with 96% similarity]
utils/test/testapi/docker/prepare-env.sh [moved from utils/test/result_collection_api/docker/prepare-env.sh with 100% similarity]
utils/test/testapi/docker/start-server.sh [moved from utils/test/result_collection_api/docker/start-server.sh with 100% similarity]
utils/test/testapi/etc/config.ini [moved from utils/test/result_collection_api/etc/config.ini with 100% similarity]
utils/test/testapi/install.sh [new file with mode: 0755]
utils/test/testapi/opnfv_testapi/__init__.py [moved from utils/test/result_collection_api/opnfv_testapi/__init__.py with 100% similarity]
utils/test/testapi/opnfv_testapi/cmd/__init__.py [moved from utils/test/result_collection_api/opnfv_testapi/cmd/__init__.py with 100% similarity]
utils/test/testapi/opnfv_testapi/cmd/server.py [moved from utils/test/result_collection_api/opnfv_testapi/cmd/server.py with 100% similarity]
utils/test/testapi/opnfv_testapi/common/__init__.py [moved from utils/test/result_collection_api/opnfv_testapi/common/__init__.py with 100% similarity]
utils/test/testapi/opnfv_testapi/common/config.py [moved from utils/test/result_collection_api/opnfv_testapi/common/config.py with 100% similarity]
utils/test/testapi/opnfv_testapi/common/constants.py [moved from utils/test/result_collection_api/opnfv_testapi/common/constants.py with 100% similarity]
utils/test/testapi/opnfv_testapi/resources/__init__.py [moved from utils/test/result_collection_api/opnfv_testapi/resources/__init__.py with 100% similarity]
utils/test/testapi/opnfv_testapi/resources/handlers.py [moved from utils/test/result_collection_api/opnfv_testapi/resources/handlers.py with 100% similarity]
utils/test/testapi/opnfv_testapi/resources/models.py [moved from utils/test/result_collection_api/opnfv_testapi/resources/models.py with 100% similarity]
utils/test/testapi/opnfv_testapi/resources/pod_handlers.py [moved from utils/test/result_collection_api/opnfv_testapi/resources/pod_handlers.py with 100% similarity]
utils/test/testapi/opnfv_testapi/resources/pod_models.py [moved from utils/test/result_collection_api/opnfv_testapi/resources/pod_models.py with 100% similarity]
utils/test/testapi/opnfv_testapi/resources/project_handlers.py [moved from utils/test/result_collection_api/opnfv_testapi/resources/project_handlers.py with 100% similarity]
utils/test/testapi/opnfv_testapi/resources/project_models.py [moved from utils/test/result_collection_api/opnfv_testapi/resources/project_models.py with 100% similarity]
utils/test/testapi/opnfv_testapi/resources/result_handlers.py [moved from utils/test/result_collection_api/opnfv_testapi/resources/result_handlers.py with 100% similarity]
utils/test/testapi/opnfv_testapi/resources/result_models.py [moved from utils/test/result_collection_api/opnfv_testapi/resources/result_models.py with 100% similarity]
utils/test/testapi/opnfv_testapi/resources/testcase_handlers.py [moved from utils/test/result_collection_api/opnfv_testapi/resources/testcase_handlers.py with 100% similarity]
utils/test/testapi/opnfv_testapi/resources/testcase_models.py [moved from utils/test/result_collection_api/opnfv_testapi/resources/testcase_models.py with 100% similarity]
utils/test/testapi/opnfv_testapi/router/__init__.py [moved from utils/test/result_collection_api/opnfv_testapi/router/__init__.py with 100% similarity]
utils/test/testapi/opnfv_testapi/router/url_mappings.py [moved from utils/test/result_collection_api/opnfv_testapi/router/url_mappings.py with 100% similarity]
utils/test/testapi/opnfv_testapi/tests/__init__.py [moved from utils/test/result_collection_api/opnfv_testapi/tests/__init__.py with 100% similarity]
utils/test/testapi/opnfv_testapi/tests/unit/__init__.py [moved from utils/test/result_collection_api/opnfv_testapi/tests/unit/__init__.py with 100% similarity]
utils/test/testapi/opnfv_testapi/tests/unit/fake_pymongo.py [moved from utils/test/result_collection_api/opnfv_testapi/tests/unit/fake_pymongo.py with 100% similarity]
utils/test/testapi/opnfv_testapi/tests/unit/test_base.py [moved from utils/test/result_collection_api/opnfv_testapi/tests/unit/test_base.py with 100% similarity]
utils/test/testapi/opnfv_testapi/tests/unit/test_fake_pymongo.py [moved from utils/test/result_collection_api/opnfv_testapi/tests/unit/test_fake_pymongo.py with 100% similarity]
utils/test/testapi/opnfv_testapi/tests/unit/test_pod.py [moved from utils/test/result_collection_api/opnfv_testapi/tests/unit/test_pod.py with 100% similarity]
utils/test/testapi/opnfv_testapi/tests/unit/test_project.py [moved from utils/test/result_collection_api/opnfv_testapi/tests/unit/test_project.py with 100% similarity]
utils/test/testapi/opnfv_testapi/tests/unit/test_result.py [moved from utils/test/result_collection_api/opnfv_testapi/tests/unit/test_result.py with 100% similarity]
utils/test/testapi/opnfv_testapi/tests/unit/test_testcase.py [moved from utils/test/result_collection_api/opnfv_testapi/tests/unit/test_testcase.py with 100% similarity]
utils/test/testapi/opnfv_testapi/tests/unit/test_version.py [moved from utils/test/result_collection_api/opnfv_testapi/tests/unit/test_version.py with 100% similarity]
utils/test/testapi/opnfv_testapi/tornado_swagger/README.md [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/README.md with 100% similarity]
utils/test/testapi/opnfv_testapi/tornado_swagger/__init__.py [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/__init__.py with 100% similarity]
utils/test/testapi/opnfv_testapi/tornado_swagger/handlers.py [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/handlers.py with 100% similarity]
utils/test/testapi/opnfv_testapi/tornado_swagger/settings.py [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/settings.py with 100% similarity]
utils/test/testapi/opnfv_testapi/tornado_swagger/swagger.py [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/swagger.py with 100% similarity]
utils/test/testapi/opnfv_testapi/tornado_swagger/views.py [moved from utils/test/result_collection_api/opnfv_testapi/tornado_swagger/views.py with 100% similarity]
utils/test/testapi/requirements.txt [moved from utils/test/result_collection_api/requirements.txt with 100% similarity]
utils/test/testapi/run_test.sh [moved from utils/test/result_collection_api/run_test.sh with 100% similarity]
utils/test/testapi/setup.cfg [moved from utils/test/result_collection_api/setup.cfg with 100% similarity]
utils/test/testapi/setup.py [moved from utils/test/result_collection_api/setup.py with 100% similarity]
utils/test/testapi/test-requirements.txt [moved from utils/test/result_collection_api/test-requirements.txt with 100% similarity]
utils/test/testapi/update/README.md [moved from utils/test/result_collection_api/update/README.md with 100% similarity]
utils/test/testapi/update/__init__.py [moved from utils/test/result_collection_api/update/__init__.py with 100% similarity]
utils/test/testapi/update/playbook-update.sh [moved from utils/test/result_collection_api/update/playbook-update.sh with 100% similarity]
utils/test/testapi/update/templates/__init__.py [new file with mode: 0644]
utils/test/testapi/update/templates/backup_mongodb.py [moved from utils/test/result_collection_api/update/templates/backup_mongodb.py with 100% similarity]
utils/test/testapi/update/templates/changes_in_mongodb.py [moved from utils/test/result_collection_api/update/templates/changes_in_mongodb.py with 100% similarity]
utils/test/testapi/update/templates/restore_mongodb.py [moved from utils/test/result_collection_api/update/templates/restore_mongodb.py with 100% similarity]
utils/test/testapi/update/templates/rm_images.sh [moved from utils/test/result_collection_api/update/templates/rm_images.sh with 100% similarity]
utils/test/testapi/update/templates/rm_olds.sh [moved from utils/test/result_collection_api/update/templates/rm_olds.sh with 100% similarity]
utils/test/testapi/update/templates/update_mongodb.py [moved from utils/test/result_collection_api/update/templates/update_mongodb.py with 100% similarity]
utils/test/testapi/update/templates/utils.py [moved from utils/test/result_collection_api/update/templates/utils.py with 100% similarity]
utils/test/testapi/update/test.yml [moved from utils/test/result_collection_api/update/test.yml with 100% similarity]
utils/test/testapi/update/update.yml [moved from utils/test/result_collection_api/update/update.yml with 100% similarity]
utils/test/testapi/update/update_api.py [moved from utils/test/result_collection_api/update/update_api.py with 100% similarity]

index ee35f42..aa10a43 100644 (file)
@@ -2,12 +2,13 @@
     name: 'releng-sandbox-jobs'
     jobs:
         - 'releng-deploy-sandbox'
-        - 'releng-clear-jenkins-jobs'
 
     project: 'releng'
+    node: 'releng-sandbox'
 
 - job-template:
     name: 'releng-deploy-sandbox'
+    node: '{node}'
 
     parameters:
         - project-parameter:
                 - draft-published-event
                 - comment-added-contains-event:
                     comment-contains-value: 'redeploy'
+            custom-url: '$BUILD_URL deploying to $JENKINS_URL'
+            silent-start: true
+            skip-vote:
+                successful: true
+                failed: true
+                unstable: true
+                notbuilt: true
             projects:
               - project-compare-type: 'ANT'
                 project-pattern: 'releng'
                 file-paths:
                     - compare-type: ANT
                       pattern: jjb-sandbox/**
-                    - compare-type: ANT
-                      pattern: utils/**
+
+    wrappers: ''
 
     builders:
         - shell:
             !include-raw-escape: verify-sandbox-jobs.sh
         - shell: |
-            #! /bin/bash
-            jenkins-jobs update -jjb-sandbox
+            #!/bin/bash
+            jenkins-jobs update --delete-old -r jjb/releng-defaults.yaml:jjb/releng-macros.yaml:jjb/opnfv/installer-params.yml:jjb/opnfv/slave-params.yml:jjb-sandbox
 
     publishers:
         - archive-artifacts:
             artifacts: 'job_output/*'
-
-- job-template:
-    name: 'releng-clear-jenkins-jobs'
-
-    parameters:
-        - project-parameter:
-            project: '{project}'
-        - gerrit-parameter:
-            branch: 'master'
-
-    scm:
-        - gerrit-trigger-scm:
-            credentials-id: '{ssh-credentials}'
-            refspec: ''
-            choosing-strategy: 'default'
-
-    triggers:
-        - timed: '@weekly'
-
-    builders:
-        - shell: |
-            #! /bin/bash
-            jenkins-jobs delete -r -p jjb-sandbox -x jjb-sandbox/releng
index 8f67e74..5990161 100755 (executable)
@@ -1,4 +1,4 @@
-#! /bin/bash
+#!/bin/bash
 # SPDX-license-identifier: Apache-2.0
 ##############################################################################
 # Copyright (c) 2016 Linux Foundation and others.
index 72fa6f6..e21387a 100755 (executable)
@@ -3,7 +3,7 @@ set -o errexit
 set -o nounset
 set -o pipefail
 
-APEX_PKGS="common undercloud opendaylight-sfc onos"
+APEX_PKGS="common undercloud onos"
 IPV6_FLAG=False
 
 # log info to console
index 0dd112b..f54e4c5 100755 (executable)
@@ -49,13 +49,13 @@ echo "ISO Upload Complete!"
 RPM_INSTALL_PATH=$BUILD_DIRECTORY/noarch
 RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL)
 VERSION_EXTENSION=$(echo $(basename $OPNFV_RPM_URL) | sed 's/opnfv-apex-//')
-for pkg in common undercloud opendaylight-sfc onos; do
+for pkg in common undercloud onos; do
     RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}"
 done
 SRPM_INSTALL_PATH=$BUILD_DIRECTORY
 SRPM_LIST=$SRPM_INSTALL_PATH/$(basename $OPNFV_SRPM_URL)
 VERSION_EXTENSION=$(echo $(basename $OPNFV_SRPM_URL) | sed 's/opnfv-apex-//')
-for pkg in common undercloud opendaylight-sfc onos; do
+for pkg in common undercloud onos; do
     SRPM_LIST+=" ${SRPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}"
 done
 }
index 0246818..2122959 100644 (file)
             slave-label: arm-pod2
             installer: fuel
             <<: *colorado
+        - arm-pod3:
+            slave-label: arm-pod3
+            installer: fuel
+            <<: *colorado
 #--------------------------------
 #        master
 #--------------------------------
             slave-label: arm-pod2
             installer: fuel
             <<: *master
+        - arm-pod3:
+            slave-label: arm-pod3
+            installer: fuel
+            <<: *master
 #--------------------------------
 #       scenarios
 #--------------------------------
             auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
         - 'os-odl_l2-bgpvpn-ha':
             auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
+        - 'os-odl_l2-sfc-ha':
+            auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
 
         # NOHA scenarios
         - 'os-odl_l2-nofeature-noha':
             auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
+        - 'os-odl_l2-sfc-noha':
+            auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
 
     jobs:
         - '{installer}-{scenario}-{pod}-daily-{stream}'
 - trigger:
     name: 'fuel-os-odl_l2-nofeature-ha-armband-baremetal-master-trigger'
     triggers:
-        - timed: '0 0 * * 1,6'
+        - timed: '0 0 * * 1'
 - trigger:
     name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-master-trigger'
     triggers:
 - trigger:
     name: 'fuel-os-odl_l3-nofeature-ha-armband-baremetal-master-trigger'
     triggers:
-        - timed: '0 0 * * 3,7'
+        - timed: '0 0 * * 3'
 - trigger:
     name: 'fuel-os-odl_l2-bgpvpn-ha-armband-baremetal-master-trigger'
     triggers:
     name: 'fuel-os-odl_l2-nofeature-noha-armband-baremetal-master-trigger'
     triggers:
         - timed: '0 0 * * 5'
+- trigger:
+    name: 'fuel-os-odl_l2-sfc-ha-armband-baremetal-master-trigger'
+    triggers:
+        - timed: '0 0 * * 6'
+- trigger:
+    name: 'fuel-os-odl_l2-sfc-noha-armband-baremetal-master-trigger'
+    triggers:
+        - timed: '0 0 * * 7'
+
 #----------------------------------------------------------------------
 # Enea Armband CI Baremetal Triggers running against colorado branch
 #----------------------------------------------------------------------
     name: 'fuel-os-odl_l2-nofeature-noha-armband-baremetal-colorado-trigger'
     triggers:
         - timed: '0 16 * * 3,5'
+- trigger:
+    name: 'fuel-os-odl_l2-sfc-ha-armband-baremetal-colorado-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l2-sfc-noha-armband-baremetal-colorado-trigger'
+    triggers:
+        - timed: ''
 #---------------------------------------------------------------
 # Enea Armband CI Virtual Triggers running against master branch
 #---------------------------------------------------------------
     name: 'fuel-os-odl_l2-nofeature-noha-armband-virtual-master-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l2-sfc-ha-armband-virtual-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l2-sfc-noha-armband-virtual-master-trigger'
+    triggers:
+        - timed: ''
 #--------------------------------------------------------------------
 # Enea Armband CI Virtual Triggers running against colorado branch
 #--------------------------------------------------------------------
     name: 'fuel-os-odl_l2-nofeature-noha-armband-virtual-colorado-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l2-sfc-ha-armband-virtual-colorado-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l2-sfc-noha-armband-virtual-colorado-trigger'
+    triggers:
+        - timed: ''
 #----------------------------------------------------------
 # Enea Armband POD 2 Triggers running against master branch
 #----------------------------------------------------------
     name: 'fuel-os-odl_l2-nofeature-noha-arm-pod2-master-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l2-sfc-ha-arm-pod2-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l2-sfc-noha-arm-pod2-master-trigger'
+    triggers:
+        - timed: ''
 #---------------------------------------------------------------
 # Enea Armband POD 2 Triggers running against colorado branch
 #---------------------------------------------------------------
     name: 'fuel-os-odl_l2-nofeature-noha-arm-pod2-colorado-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l2-sfc-ha-arm-pod2-colorado-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l2-sfc-noha-arm-pod2-colorado-trigger'
+    triggers:
+        - timed: ''
+#----------------------------------------------------------
+# Enea Armband POD 3 Triggers running against master branch
+#----------------------------------------------------------
+- trigger:
+    name: 'fuel-os-odl_l2-nofeature-ha-arm-pod3-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'fuel-os-nosdn-nofeature-ha-arm-pod3-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l3-nofeature-ha-arm-pod3-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod3-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l2-nofeature-noha-arm-pod3-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l2-sfc-ha-arm-pod3-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l2-sfc-noha-arm-pod3-master-trigger'
+    triggers:
+        - timed: ''
+#---------------------------------------------------------------
+# Enea Armband POD 3 Triggers running against colorado branch
+#---------------------------------------------------------------
+- trigger:
+    name: 'fuel-os-odl_l2-nofeature-ha-arm-pod3-colorado-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'fuel-os-nosdn-nofeature-ha-arm-pod3-colorado-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l3-nofeature-ha-arm-pod3-colorado-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod3-colorado-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l2-nofeature-noha-arm-pod3-colorado-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l2-sfc-ha-arm-pod3-colorado-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'fuel-os-odl_l2-sfc-noha-arm-pod3-colorado-trigger'
+    triggers:
+        - timed: ''
index 4041a6b..c8e58af 100755 (executable)
@@ -12,6 +12,8 @@ set -o errexit
 set -o nounset
 set -o pipefail
 
+export TERM="vt220"
+
 # source the file so we get OPNFV vars
 source latest.properties
 
@@ -47,7 +49,7 @@ mkdir -p $TMPDIR
 
 cd $WORKSPACE
 if [[ $LAB_CONFIG_URL =~ ^(git|ssh):// ]]; then
-    echo "cloning $LAB_CONFIG_URL"
+    echo "Cloning securedlab repo ${GIT_BRANCH##origin/}"
     git clone --quiet --branch ${GIT_BRANCH##origin/} $LAB_CONFIG_URL lab-config
     LAB_CONFIG_URL=file://${WORKSPACE}/lab-config
 
index 300306f..a058ca1 100755 (executable)
@@ -12,6 +12,8 @@ set -o errexit
 set -o nounset
 set -o pipefail
 
+export TERM="vt220"
+
 echo "Host info: $(hostname) $(hostname -I)"
 
 cd $WORKSPACE
index da882cd..eb91131 100644 (file)
             blocking-jobs:
                 - 'compass-os-.*?-{pod}-daily-.*?'
                 - 'compass-os-.*?-baremetal-daily-.*?'
-                - 'compass-verify-[^-]*'
+                - 'compass-verify-[^-]*-[^-]*'
             block-level: 'NODE'
 
     wrappers:
index bede7de..5ce9064 100644 (file)
             branch: 'stable/{stream}'
             gs-pathname: '/{stream}'
 
+    distro:
+        - 'trusty':
+            disabled: false
+            os-version: 'trusty'
+            openstack-os-version: ''
+        - 'centos7':
+            disabled: false
+            os-version: 'centos7'
+            openstack-os-version: ''
 
     jobs:
-        - 'compass-verify-{stream}'
+        - 'compass-verify-{distro}-{stream}'
         - 'compass-build-iso-{stream}'
         - 'compass-build-ppa-{stream}'
-        - 'compass-verify-deploy-{stream}'
+        - 'compass-verify-deploy-{distro}-{stream}'
 
 
 ########################
 # job templates
 ########################
 - job-template:
-    name: 'compass-verify-{stream}'
+    name: 'compass-verify-{distro}-{stream}'
 
     disabled: false
 
@@ -47,7 +56,7 @@
         - build-blocker:
             use-build-blocker: true
             blocking-jobs:
-                - 'compass-verify-[^-]*'
+                - 'compass-verify-[^-]*-[^-]*'
                 - 'compass-os-.*?-virtual-daily-.*?'
             block-level: 'NODE'
 
 
     builders:
         - trigger-builds:
-            - project: 'compass-verify-deploy-{stream}'
+            - project: 'compass-verify-deploy-{distro}-{stream}'
               current-parameters: true
+              predefined-parameters: |
+                COMPASS_OS_VERSION={os-version}
+                COMPASS_OS_VERSION_OPTION={openstack-os-version}
               same-node: true
               block: true
         - trigger-builds:
                 unstable-threshold: 'FAILURE'
 
 - job-template:
-    name: 'compass-verify-deploy-{stream}'
+    name: 'compass-verify-deploy-{distro}-{stream}'
 
     concurrent: true
 
index 9eae848..ec11db5 100755 (executable)
@@ -4,3 +4,11 @@ echo "--------------------------------------------------------"
 echo "This is diasy4nfv build job!"
 echo "--------------------------------------------------------"
 
+# build output directory
+OUTPUT_DIR=$WORKSPACE/build_output
+mkdir -p $OUTPUT_DIR
+
+# start the build
+cd $WORKSPACE
+./ci/build.sh $OUTPUT_DIR
+
index 6444cf8..d2adafd 100644 (file)
@@ -1,9 +1,7 @@
 - project:
     name: 'daisy4nfv-verify-jobs'
 
-    project: 'daisy4nfv'
-
-    installer: 'daisy4nfv'
+    project: 'daisy'
 #####################################
 # branch definitions
 #####################################
@@ -19,7 +17,7 @@
         - 'basic':
             slave-label: 'opnfv-build'
         - 'build':
-            slave-label: 'opnfv-build-ubuntu'
+            slave-label: 'opnfv-build-centos'
         - 'deploy-virtual':
             slave-label: 'opnfv-build'
         - 'smoke-test':
             projects:
                 - name: 'daisy4nfv-verify-basic-{stream}'
                   current-parameters: false
+                  predefined-parameters: |
+                    GERRIT_BRANCH=$GERRIT_BRANCH
+                    GERRIT_REFSPEC=$GERRIT_REFSPEC
+                    GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                    GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
                   node-parameters: false
                   kill-phase-on: FAILURE
                   abort-all-job: true
             projects:
                 - name: 'daisy4nfv-verify-build-{stream}'
                   current-parameters: false
+                  predefined-parameters: |
+                    GERRIT_BRANCH=$GERRIT_BRANCH
+                    GERRIT_REFSPEC=$GERRIT_REFSPEC
+                    GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                    GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
                   node-parameters: false
                   kill-phase-on: FAILURE
                   abort-all-job: true
             projects:
                 - name: 'daisy4nfv-verify-deploy-virtual-{stream}'
                   current-parameters: false
+                  predefined-parameters: |
+                    GERRIT_BRANCH=$GERRIT_BRANCH
+                    GERRIT_REFSPEC=$GERRIT_REFSPEC
+                    GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                    GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
                   node-parameters: false
                   kill-phase-on: FAILURE
                   abort-all-job: true
             projects:
                 - name: 'daisy4nfv-verify-smoke-test-{stream}'
                   current-parameters: false
+                  predefined-parameters: |
+                    GERRIT_BRANCH=$GERRIT_BRANCH
+                    GERRIT_REFSPEC=$GERRIT_REFSPEC
+                    GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                    GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
                   node-parameters: false
                   kill-phase-on: FAILURE
                   abort-all-job: true
 # builder macros
 #####################################
 - builder:
-    name: 'daisy4nfv-verify-basic-macro'
+    name: 'daisy-verify-basic-macro'
     builders:
         - shell:
             !include-raw: ./daisy4nfv-basic.sh
 
 - builder:
-    name: 'daisy4nfv-verify-build-macro'
+    name: 'daisy-verify-build-macro'
     builders:
         - shell:
             !include-raw: ./daisy4nfv-build.sh
 
 - builder:
-    name: 'daisy4nfv-verify-deploy-virtual-macro'
+    name: 'daisy-verify-deploy-virtual-macro'
     builders:
         - shell:
             !include-raw: ./daisy4nfv-virtual-deploy.sh
 
 - builder:
-    name: 'daisy4nfv-verify-smoke-test-macro'
+    name: 'daisy-verify-smoke-test-macro'
     builders:
         - shell: |
             #!/bin/bash
index 1dd1795..2921200 100644 (file)
             SUT: compass
             auto-trigger-name: 'daily-trigger-disabled'
             <<: *colorado
+#apex CI PODs
+        - apex-verify-master:
+            slave-label: '{pod}'
+            SUT: apex
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *master
+        - apex-daily-master:
+            slave-label: '{pod}'
+            SUT: apex
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *master
+        - apex-verify-colorado:
+            slave-label: '{pod}'
+            SUT: apex
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *colorado
+        - apex-daily-colorado:
+            slave-label: '{pod}'
+            SUT: apex
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *colorado
 #--------------------------------
 #        None-CI PODs
 #--------------------------------
index bf05522..41fd8cd 100644 (file)
@@ -57,9 +57,7 @@
                   - branch-compare-type: 'ANT'
                     branch-pattern: '**/{branch}'
     builders:
-         - shell: |
-             echo "dovetail: verify job"
-         #unittest will be added future
+        - dovetail-unit-tests
 
 - job-template:
     name: 'dovetail-merge-{stream}'
                       branch-pattern: '**/{branch}'
 
     builders:
-         - shell: |
-             echo "dovetail: merge"
-         #unittest will be added future
+        - dovetail-unit-tests
+
+################################
+#builders for dovetail project
+###############################
+- builder:
+    name: dovetail-unit-tests
+    builders:
+        - shell: |
+            #!/bin/bash
+            set -o errexit
+            set -o pipefail
+
+            echo "Running unit tests..."
+            cd $WORKSPACE
+            virtualenv $WORKSPACE/dovetail_venv
+            source $WORKSPACE/dovetail_venv/bin/activate
+
+            #packages installation
+            easy_install -U setuptools
+            easy_install -U pip
+            pip install -r unittests/requirements.txt
+            pip install -e .
+
+            #unit tests
+            /bin/bash $WORKSPACE/unittests/unittest.sh
+
+            deactivate
index 3f7a47b..098b7db 100755 (executable)
@@ -34,6 +34,10 @@ fi
 
 opts="--privileged=true --rm"
 envs="-e CI_DEBUG=${CI_DEBUG} \
+      -e INSTALLER_TYPE=${INSTALLER_TYPE} \
+      -e INSTALLER_IP=${INSTALLER_IP} \
+      -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} \
+      -e DEPLOY_TYPE=${DEPLOY_TYPE} \
       -v /var/run/docker.sock:/var/run/docker.sock \
       -v /home/opnfv/dovetail/results:/home/opnfv/dovetail/results"
 
@@ -44,7 +48,7 @@ docker pull opnfv/dovetail:$DOCKER_TAG >$redirect
 # Run docker
 echo "Dovetail: docker running..."
 sudo docker run ${opts} ${envs} ${labconfig} ${sshkey} opnfv/dovetail:${DOCKER_TAG} \
-"/home/opnfv/dovetail/scripts/run.py"
+"/home/opnfv/dovetail/dovetail/run.py"
 
 echo "Dovetail: store results..."
 sudo cp -r /home/opnfv/dovetail/results ./
index 7e36a0c..c66dc3d 100755 (executable)
@@ -11,6 +11,8 @@ set -o errexit
 set -o nounset
 set -o pipefail
 
+export TERM="vt220"
+
 cd $WORKSPACE
 
 # remove the expired items from cache
index ef47ff0..48b1dac 100755 (executable)
@@ -10,6 +10,8 @@
 set -o nounset
 set -o pipefail
 
+export TERM="vt220"
+
 # source the file so we get OPNFV vars
 source latest.properties
 
index 3487793..afeb1f9 100644 (file)
             slave-label: '{pod}'
             installer: fuel
             <<: *master
+        - arm-pod3:
+            slave-label: '{pod}'
+            installer: fuel
+            <<: *master
         - zte-pod1:
             slave-label: '{pod}'
             installer: fuel
             slave-label: '{pod}'
             installer: fuel
             <<: *colorado
+        - arm-pod3:
+            slave-label: '{pod}'
+            installer: fuel
+            <<: *colorado
 # PODs for verify jobs triggered by each patch upload
         - ool-virtual1:
             slave-label: '{pod}'
index a984545..373ed89 100644 (file)
                   - compare-type: ANT
                     pattern: 'docs/**|.gitignore'
 
+    builders:
+        - functest-unit-tests-and-docs-build
+
+    publishers:
+        - junit:
+            results: nosetests.xml
+        - cobertura:
+            report-file: "coverage.xml"
+            only-stable: "true"
+            health-auto-update: "true"
+            stability-auto-update: "true"
+            zoom-coverage-chart: "true"
+            targets:
+                - files:
+                    healthy: 10
+                    unhealthy: 20
+                    failing: 30
+                - method:
+                    healthy: 50
+                    unhealthy: 40
+                    failing: 30
+
+################################
+# job builders
+################################
+
+- builder:
+    name: functest-unit-tests-and-docs-build
     builders:
         - shell: |
-            echo "Nothing to verify!"
+            $WORKSPACE/run_unit_tests.sh
index 17796a8..d15bf74 100644 (file)
     triggers:
         - gerrit:
             server-name: 'review.openstack.org'
-            silent-start: true
-            skip-vote:
-                successful: true
-                failed: true
-                unstable: true
-                notbuilt: true
             escape-quotes: true
             trigger-on:
                 - patchset-created-event:
                     exclude-no-code-change: 'false'
                 - comment-added-contains-event:
                     comment-contains-value: 'recheck'
+            custom-url: '* $JOB_NAME $BUILD_URL'
+            silent-start: true
             projects:
               - project-compare-type: 'PLAIN'
                 project-pattern: 'openstack/bifrost'
index 0ac8aa7..307c1db 100644 (file)
             name: GS_URL
             default: '$GS_BASE{gs-pathname}'
             description: "Directory where the build artifact will be located upon the completion of the build."
+        - string:
+            name: GERRIT_REFSPEC
+            default: 'refs/heads/{branch}'
+            description: "JJB configured GERRIT_REFSPEC parameter"
 
     scm:
         - gerrit-trigger-scm:
                 - change-merged-event
                 - comment-added-contains-event:
                     comment-contains-value: 'remerge'
+                - comment-added-contains-event:
+                    comment-contains-value: 'rebuild docs'
             projects:
                 - project-compare-type: 'ANT'
                   project-pattern: '*'
index 94a99d4..717bb3c 100644 (file)
@@ -19,7 +19,6 @@
             name: SLAVE_NAME
             description: Slaves to prune docker images
             default-slaves:
-                - arm-build1
                 - arm-build2
                 - ericsson-build4
                 - ericsson-build5
index 4ffaff4..b46960f 100644 (file)
             description: 'Git URL to use on this Jenkins Slave'
         - string:
             name: LAB_CONFIG_URL
-            default: ssh://git@git.enea.com/pharos/lab-config
+            default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab
             description: 'Base URI to the configuration directory'
 - parameter:
     name: 'joid-baremetal-defaults'
             description: 'Git URL to use on this Jenkins Slave'
         - string:
             name: LAB_CONFIG_URL
-            default: ssh://git@git.enea.com/pharos/lab-config
+            default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab
             description: 'Base URI to the configuration directory'
 - parameter:
     name: 'joid-virtual-defaults'
             description: 'Git URL to use on this Jenkins Slave'
         - string:
             name: LAB_CONFIG_URL
-            default: ssh://git@git.enea.com/pharos/lab-config
+            default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab
+            description: 'Base URI to the configuration directory'
+- parameter:
+    name: 'arm-pod3-defaults'
+    parameters:
+        - node:
+            name: SLAVE_NAME
+            description: 'Slave name on Jenkins'
+            allowed-slaves:
+                - arm-pod3
+            default-slaves:
+                - arm-pod3
+        - string:
+            name: GIT_BASE
+            default: https://gerrit.opnfv.org/gerrit/$PROJECT
+            description: 'Git URL to use on this Jenkins Slave'
+        - string:
+            name: LAB_CONFIG_URL
+            default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab
             description: 'Base URI to the configuration directory'
 - parameter:
     name: 'intel-virtual6-defaults'
index d0d6b47..cca8cee 100644 (file)
             installer: fuel
             auto-trigger-name: 'qtip-daily-zte-pod2-trigger'
             <<: *master
+        - zte-pod3:
+            installer: fuel
+            auto-trigger-name: 'qtip-daily-zte-pod3-trigger'
+            <<: *master
 
 #--------------------------------
     jobs:
 - trigger:
     name: 'qtip-daily-zte-pod2-trigger'
     triggers:
-        - timed: '0 5 * * *'
+        - timed: '0 7 * * *'
 
+- trigger:
+    name: 'qtip-daily-zte-pod3-trigger'
+    triggers:
+        - timed: '0 1 * * *'
index d2dc1d1..2ebd775 100644 (file)
 
             mkdir -p upload
             mv docs_output "$local_path"
-            gsutil -m cp -r "$local_path" "gs://$GS_URL"
+            gsutil -m cp -r "$local_path" "gs://$gs_path"
 
             gsutil -m setmeta \
                 -h "Content-Type:text/html" \
index 363423d..3f7f6bf 100644 (file)
             branch: '{stream}'
             gs-pathname: ''
             disabled: false
+            slave-label: 'opnfv-build-ubuntu'
         - colorado:
             branch: 'stable/{stream}'
             gs-pathname: '/{stream}'
             disabled: false
+            slave-label: 'intel-pod3'
 
 - job-template:
 
@@ -72,7 +74,7 @@
             project: '{project}'
         - gerrit-parameter:
             branch: '{branch}'
-        - 'opnfv-build-ubuntu-defaults'
+        - '{slave-label}-defaults'
 
     scm:
         - gerrit-trigger-scm:
             make
             # run basic sanity test
             make sanity
+            cd ../ci
+            ./build-vsperf.sh verify
 
 - job-template:
     name: 'vswitchperf-merge-{stream}'
             project: '{project}'
         - gerrit-parameter:
             branch: '{branch}'
-        - 'opnfv-build-ubuntu-defaults'
+        - '{slave-label}-defaults'
 
     scm:
         - gerrit-trigger-scm:
             cd src
             make clobber
             make
+            cd ../ci
+            ./build-vsperf.sh merge
index 962ea47..9d80e42 100644 (file)
             installer: fuel
             auto-trigger-name: 'daily-trigger-disabled'
             <<: *colorado
+        - arm-pod3:
+            slave-label: '{pod}'
+            installer: fuel
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *master
+        - arm-pod3:
+            slave-label: '{pod}'
+            installer: fuel
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *colorado
         - orange-pod2:
             slave-label: '{pod}'
             installer: joid
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
-            default: ''
+            default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 - parameter:
     name: 'yardstick-params-armband-baremetal'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
-            default: ''
+            default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 - parameter:
     name: 'yardstick-params-joid-baremetal'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
-            default: ''
+            default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 - parameter:
     name: 'yardstick-params-intel-pod8'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
-            default: ''
+            default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 - parameter:
     name: 'yardstick-params-lf-pod1'
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 
+- parameter:
+    name: 'yardstick-params-arm-pod3'
+    parameters:
+        - string:
+            name: YARDSTICK_DB_BACKEND
+            default: '-i 104.197.68.199:8086'
+            description: 'Arguments to use in order to choose the backend DB'
+
 - parameter:
     name: 'yardstick-params-virtual'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
-            default: ''
+            default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 
 - parameter:
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
-            default: ''
+            default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 
 - parameter:
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
-            default: ''
+            default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 
 - parameter:
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
-            default: ''
+            default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 
 #######################
index 6746457..cdc55df 100755 (executable)
@@ -46,7 +46,7 @@ fi
 rm -rf /var/lib/libvirt/images/*.qcow2
 
 echo "restarting services"
-service dnsmasq restart
+service dnsmasq restart || true
 service libvirtd restart
 service ironic-api restart
 service ironic-conductor start
diff --git a/prototypes/puppet-infracloud/deploy_on_baremetal.md b/prototypes/puppet-infracloud/deploy_on_baremetal.md
new file mode 100644 (file)
index 0000000..334dff4
--- /dev/null
@@ -0,0 +1,57 @@
+How to deploy Infra Cloud on baremetal
+==================================
+
+Install bifrost controller
+--------------------------
+First step for deploying Infra Cloud is to install the bifrost controller. This can be virtualized, doesn't need to be on baremetal.
+To achieve that, first we can create a virtual machine with libvirt, with the proper network setup. This VM needs to share one physical interface (the PXE boot one), with the servers for the controller and compute nodes.
+Please follow documentation on: [https://git.openstack.org/cgit/openstack/bifrost/tree/tools/virsh_dev_env/README.md](https://git.openstack.org/cgit/openstack/bifrost/tree/tools/virsh_dev_env/README.md) to get sample templates and instructions for creating the bifrost VM.
+
+Once the **baremetal** VM is finished, you can login by ssh and start installing bifrost there. To proceed, follow this steps:
+
+ 1. Change to root user, install git
+ 2. Clone releng project (cd /opt, git clone https://gerrit.opnfv.org/gerrit/releng)
+ 3. cd /opt/releng/prototypes/puppet-infracloud
+ 4. Copy hiera to the right folder (cp hiera/common_baremetal.yaml /var/lib/hiera/common.yaml)
+ 5. Ensure hostname is properly set ( hostnamectl set-hostname baremetal.opnfvlocal , hostname -f )
+ 6. Install puppet and modules ( ./install_puppet.sh , ./install_modules.sh )
+ 7. Apply puppet to install bifrost (puppet apply manifests/site.pp --modulepath=/etc/puppet/modules:/opt/releng/prototypes/puppet-infracloud/modules)
+
+ With these steps you will have a bifrost controller up and running.
+
+Deploy baremetal servers
+--------------------------
+Once you have bifrost controller ready, you need to use it to start deployment of the baremetal servers.
+On the same bifrost VM, follow these steps:
+
+ 1. Source bifrost env vars: source /opt/stack/bifrost/env-vars
+ 2. Export baremetal servers inventory:  export BIFROST_INVENTORY-SOURCE=/opt/stack/baremetal.json 
+ 3. Enroll the servers: ansible-playbook -vvv -i inventory/bifrost_inventory.py enroll-dynamic.yaml -e @/etc/bifrost/bifrost_global_vars
+ 4. Deploy the servers:  ansible-playbook -vvv -i inventory/bifrost_inventory.py deploy-dynamic.yaml -e @/etc/bifrost/bifrost_global_vars
+ 5. Wait until they are on **active** state, check it with: ironic node-list
+
+In case of some server needing to be redeployed, you can reset it and redeploy again with:
+
+ 1. ironic node-set-provision-state <name_of_server> deleted
+ 2. Wait and check with ironic node-list until the server is on **available** state
+ 3. Redeploy again: ansible-playbook -vvv -i inventory/bifrost_inventory.py deploy-dynamic.yaml -e @/etc/bifrost/bifrost_global_vars
+
+Deploy baremetal servers
+--------------------------
+Once all the servers are on **active** state, they can be accessed by ssh and InfraCloud manifests can be deployed on them, to properly deploy a controller and a compute.
+On each of those, follow that steps:
+
+ 1. ssh from the bifrost controller to their external ips: ssh root@172.30.13.90
+ 2. cd /opt, clone releng project (git clone https://gerrit.opnfv.org/gerrit/releng)
+ 3. Copy hiera to the right folder ( cp hiera/common_baremetal.yaml /var/lib/hiera/common.yaml)
+ 4. Install modules: ./install_modules.sh
+ 5. Apply puppet: puppet apply manifests/site.pp --modulepath=/etc/puppet/modules:/opt/releng/prototypes/puppet-infracloud/modules
+
+Once this has been done on controller and compute, you will have a working cloud. To start working with it, follow that steps:
+
+ 1. Ensure that controller00.opnfvlocal resolves properly to the external IP (this is already done in the bifrost controller)
+ 2. Copy releng/prototypes/puppet-infracloud/creds/clouds.yaml to $HOME/.config/openstack/clouds.yaml
+ 3. Install python-openstackclient
+ 4. Specify the cloud you want to use: export OS_CLOUD=opnfvlocal
+ 5. Now you can start operating in your cloud with openstack-client: openstack flavor list
+
index 5ea0083..9825ed3 100644 (file)
@@ -115,7 +115,7 @@ default_network_interface: eno3
 dhcp_static_mask: 255.255.255.128
 dhcp_pool_start: 10.20.0.130
 dhcp_pool_end: 10.20.0.254
-network_interface: eno1
+network_interface: eth1
 ipv4_nameserver: 8.8.8.8
 ipv4_subnet_mask: 255.255.255.0
 ipv4_gateway: 172.30.13.1
@@ -131,6 +131,7 @@ ironic_inventory:
     ansible_ssh_host: 172.30.13.90
     ipv4_gateway: 172.30.13.1
     ipv4_interface_mac: 00:1e:67:f9:9b:35
+    ipv4_subnet_mask: 255.255.255.192
     name: controller00.opnfvlocal
     nics:
     - mac: a4:bf:01:01:a9:fc
@@ -151,6 +152,7 @@ ironic_inventory:
     ipv4_address: 172.30.13.91
     ansible_ssh_host: 172.30.13.91
     ipv4_gateway: 172.30.13.1
+    ipv4_interface_mac: 00:1e:67:f6:9b:37
     ipv4_subnet_mask: 255.255.255.0
     name: compute00.opnfvlocal
     nics:
@@ -168,3 +170,4 @@ neutron_subnet_gateway: '172.30.13.1'
 neutron_subnet_allocation_pools:
   - 'start=172.30.13.100,end=172.30.13.254'
 virt_type: 'kvm'
+dib_dev_user_password: devuser
index f09bfe2..8cbfef8 100644 (file)
@@ -96,5 +96,6 @@ node 'baremetal.opnfvlocal', 'lfpod5-jumpserver' {
     ipv4_nameserver           => hiera('ipv4_nameserver'),
     ipv4_subnet_mask          => hiera('ipv4_subnet_mask'),
     bridge_name               => hiera('bridge_name'),
+    dib_dev_user_password     => hiera('dib_dev_user_password'),
   }
 }
index c4bff09..6b608a7 100644 (file)
@@ -224,6 +224,28 @@ class opnfv::server (
     }
   }
 
-  # add hosts entries
+  # ensure that we have non-pass sudo, and
+  # not require tty
+  file_line { 'sudo_rule_no_pw':
+    path => '/etc/sudoers',
+    line => '%wheel     ALL=(ALL)       NOPASSWD: ALL',
+  }
+  file_line { 'sudo_rule_notty':
+    path   => '/etc/sudoers',
+    line   => 'Defaults    requiretty',
+    match  => '.*requiretty.*',
+    match_for_absence => true,
+    ensure => absent,
+    multiple => true,
+  }
+
+  # disable selinux in case of RHEL
+  if ($::osfamily == 'RedHat') {
+    class { 'selinux':
+      mode => 'disabled',
+    }
+  }
+
+  # update hosts
   create_resources('host', hiera_hash('hosts'))
 }
diff --git a/utils/installer-adapter/ApexAdapter.py b/utils/installer-adapter/ApexAdapter.py
new file mode 100644 (file)
index 0000000..17a27b1
--- /dev/null
@@ -0,0 +1,32 @@
+##############################################################################
+# Copyright (c) 2016 Ericsson AB and others.
+# Author: Jose Lausuch (jose.lausuch@ericsson.com)
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+
+class ApexAdapter:
+
+    def __init__(self, installer_ip):
+        self.installer_ip = installer_ip
+
+    def get_deployment_info(self):
+        pass
+
+    def get_nodes(self):
+        pass
+
+    def get_controller_ips(self):
+        pass
+
+    def get_compute_ips(self):
+        pass
+
+    def get_file_from_installer(self, origin, target, options=None):
+        pass
+
+    def get_file_from_controller(self, origin, target, ip=None, options=None):
+        pass
diff --git a/utils/installer-adapter/CompassAdapter.py b/utils/installer-adapter/CompassAdapter.py
new file mode 100644 (file)
index 0000000..47cbc64
--- /dev/null
@@ -0,0 +1,32 @@
+##############################################################################
+# Copyright (c) 2016 Ericsson AB and others.
+# Author: Jose Lausuch (jose.lausuch@ericsson.com)
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+
+class CompassAdapter:
+
+    def __init__(self, installer_ip):
+        self.installer_ip = installer_ip
+
+    def get_deployment_info(self):
+        pass
+
+    def get_nodes(self):
+        pass
+
+    def get_controller_ips(self):
+        pass
+
+    def get_compute_ips(self):
+        pass
+
+    def get_file_from_installer(self, origin, target, options=None):
+        pass
+
+    def get_file_from_controller(self, origin, target, ip=None, options=None):
+        pass
diff --git a/utils/installer-adapter/FuelAdapter.py b/utils/installer-adapter/FuelAdapter.py
new file mode 100644 (file)
index 0000000..672fd51
--- /dev/null
@@ -0,0 +1,236 @@
+##############################################################################
+# Copyright (c) 2016 Ericsson AB and others.
+# Author: Jose Lausuch (jose.lausuch@ericsson.com)
+#         George Paraskevopoulos (geopar@intracom-telecom.com)
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import SSHUtils as ssh_utils
+import RelengLogger as rl
+
+
+class FuelAdapter:
+
+    def __init__(self, installer_ip, user="root", password="r00tme"):
+        self.installer_ip = installer_ip
+        self.installer_user = user
+        self.installer_password = password
+        self.installer_connection = ssh_utils.get_ssh_client(
+            installer_ip,
+            self.installer_user,
+            password=self.installer_password)
+        self.logger = rl.Logger("Handler").getLogger()
+
+    def runcmd_fuel_installer(self, cmd):
+        _, stdout, stderr = (self
+                             .installer_connection
+                             .exec_command(cmd))
+        error = stderr.readlines()
+        if len(error) > 0:
+            self.logger.error("error %s" % ''.join(error))
+            return error
+        output = ''.join(stdout.readlines())
+        return output
+
+    def runcmd_fuel_nodes(self):
+        return self.runcmd_fuel_installer('fuel nodes')
+
+    def runcmd_fuel_env(self):
+        return self.runcmd_fuel_installer('fuel env')
+
+    def get_clusters(self):
+        environments = []
+        output = self.runcmd_fuel_env()
+        lines = output.rsplit('\n')
+        if len(lines) < 2:
+            self.logger.infp("No environments found in the deployment.")
+            return None
+        else:
+            fields = lines[0].rsplit(' | ')
+
+            index_id = -1
+            index_status = -1
+            index_name = -1
+            index_release_id = -1
+
+            for i in range(0, len(fields) - 1):
+                if "id" in fields[i]:
+                    index_id = i
+                elif "status" in fields[i]:
+                    index_status = i
+                elif "name" in fields[i]:
+                    index_name = i
+                elif "release_id" in fields[i]:
+                    index_release_id = i
+
+            # order env info
+            for i in range(2, len(lines) - 1):
+                fields = lines[i].rsplit(' | ')
+                dict = {"id": fields[index_id].strip(),
+                        "status": fields[index_status].strip(),
+                        "name": fields[index_name].strip(),
+                        "release_id": fields[index_release_id].strip()}
+                environments.append(dict)
+
+        return environments
+
+    def get_nodes(self, options=None):
+        nodes = []
+        output = self.runcmd_fuel_nodes()
+        lines = output.rsplit('\n')
+        if len(lines) < 2:
+            self.logger.info("No nodes found in the deployment.")
+            return None
+        else:
+            # get fields indexes
+            fields = lines[0].rsplit(' | ')
+
+            index_id = -1
+            index_status = -1
+            index_name = -1
+            index_cluster = -1
+            index_ip = -1
+            index_mac = -1
+            index_roles = -1
+            index_online = -1
+
+            for i in range(0, len(fields) - 1):
+                if "id" in fields[i]:
+                    index_id = i
+                elif "status" in fields[i]:
+                    index_status = i
+                elif "name" in fields[i]:
+                    index_name = i
+                elif "cluster" in fields[i]:
+                    index_cluster = i
+                elif "ip" in fields[i]:
+                    index_ip = i
+                elif "mac" in fields[i]:
+                    index_mac = i
+                elif "roles " in fields[i]:
+                    index_roles = i
+                elif "online" in fields[i]:
+                    index_online = i
+
+            # order nodes info
+            for i in range(2, len(lines) - 1):
+                fields = lines[i].rsplit(' | ')
+                dict = {"id": fields[index_id].strip(),
+                        "status": fields[index_status].strip(),
+                        "name": fields[index_name].strip(),
+                        "cluster": fields[index_cluster].strip(),
+                        "ip": fields[index_ip].strip(),
+                        "mac": fields[index_mac].strip(),
+                        "roles": fields[index_roles].strip(),
+                        "online": fields[index_online].strip()}
+                if options and options['cluster']:
+                    if fields[index_cluster].strip() == options['cluster']:
+                        nodes.append(dict)
+                else:
+                    nodes.append(dict)
+
+        return nodes
+
+    def get_controller_ips(self, options):
+        nodes = self.get_nodes(options=options)
+        controllers = []
+        for node in nodes:
+            if "controller" in node["roles"]:
+                controllers.append(node['ip'])
+        return controllers
+
+    def get_compute_ips(self, options=None):
+        nodes = self.get_nodes(options=options)
+        computes = []
+        for node in nodes:
+            if "compute" in node["roles"]:
+                computes.append(node['ip'])
+        return computes
+
+    def get_deployment_info(self):
+        str = "Deployment details:\n"
+        str += "\tInstaller:  Fuel\n"
+        str += "\tScenario:   Unknown\n"
+        sdn = "None"
+        clusters = self.get_clusters()
+        str += "\tN.Clusters: %s\n" % len(clusters)
+        for cluster in clusters:
+            cluster_dic = {'cluster': cluster['id']}
+            str += "\tCluster info:\n"
+            str += "\t   ID:          %s\n" % cluster['id']
+            str += "\t   NAME:        %s\n" % cluster['name']
+            str += "\t   STATUS:      %s\n" % cluster['status']
+            nodes = self.get_nodes(options=cluster_dic)
+            num_nodes = len(nodes)
+            for node in nodes:
+                if "opendaylight" in node['roles']:
+                    sdn = "OpenDaylight"
+                elif "onos" in node['roles']:
+                    sdn = "ONOS"
+            num_controllers = len(
+                self.get_controller_ips(options=cluster_dic))
+            num_computes = len(self.get_compute_ips(options=cluster_dic))
+            ha = False
+            if num_controllers > 1:
+                ha = True
+
+            str += "\t   HA:          %s\n" % ha
+            str += "\t   NUM.NODES:   %s\n" % num_nodes
+            str += "\t   CONTROLLERS: %s\n" % num_controllers
+            str += "\t   COMPUTES:    %s\n" % num_computes
+            str += "\t   SDN CONTR.:  %s\n\n" % sdn
+        str += self.runcmd_fuel_nodes()
+        return str
+
+    def get_file_from_installer(self, remote_path, local_path, options=None):
+        self.logger.debug("Fetching %s from %s" %
+                          (remote_path, self.installer_ip))
+        get_file_result = ssh_utils.get_file(self.installer_connection,
+                                             remote_path,
+                                             local_path)
+        if get_file_result is None:
+            self.logger.error("SFTP failed to retrieve the file.")
+            return 1
+        self.logger.info("%s successfully copied from Fuel to %s" %
+                         (remote_path, local_path))
+
+    def get_file_from_controller(self,
+                                 remote_path,
+                                 local_path,
+                                 ip=None,
+                                 user='root',
+                                 options=None):
+        if ip is None:
+            controllers = self.get_controller_ips(options=options)
+            if len(controllers) == 0:
+                self.logger.info("No controllers found in the deployment.")
+                return 1
+            else:
+                target_ip = controllers[0]
+        else:
+            target_ip = ip
+
+        installer_jumphost = {
+            'ip': self.installer_ip,
+            'username': self.installer_user,
+            'password': self.installer_password
+        }
+        controller_conn = ssh_utils.get_ssh_client(
+            target_ip,
+            user,
+            jumphost=installer_jumphost)
+
+        self.logger.debug("Fetching %s from %s" %
+                          (remote_path, target_ip))
+
+        get_file_result = ssh_utils.get_file(controller_conn,
+                                             remote_path,
+                                             local_path)
+        if get_file_result is None:
+            self.logger.error("SFTP failed to retrieve the file.")
+            return 1
+        self.logger.info("%s successfully copied from %s to %s" %
+                         (remote_path, target_ip, local_path))
diff --git a/utils/installer-adapter/InstallerHandler.py b/utils/installer-adapter/InstallerHandler.py
new file mode 100644 (file)
index 0000000..b81b806
--- /dev/null
@@ -0,0 +1,78 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+# Author: Jose Lausuch (jose.lausuch@ericsson.com)
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+from FuelAdapter import FuelAdapter
+from ApexAdapter import ApexAdapter
+from CompassAdapter import CompassAdapter
+from JoidAdapter import JoidAdapter
+
+
+INSTALLERS = ["fuel", "apex", "compass", "joid"]
+
+
+class InstallerHandler:
+
+    def __init__(self,
+                 installer,
+                 installer_ip,
+                 installer_user,
+                 installer_pwd=None):
+        self.installer = installer.lower()
+        self.installer_ip = installer_ip
+        self.installer_user = installer_user
+        self.installer_pwd = installer_pwd
+
+        if self.installer == INSTALLERS[0]:
+            self.InstallerAdapter = FuelAdapter(self.installer_ip,
+                                                self.installer_user,
+                                                self.installer_pwd)
+        elif self.installer == INSTALLERS[1]:
+            self.InstallerAdapter = ApexAdapter(self.installer_ip)
+        elif self.installer == INSTALLERS[2]:
+            self.InstallerAdapter = CompassAdapter(self.installer_ip)
+        elif self.installer == INSTALLERS[3]:
+            self.InstallerAdapter = JoidAdapter(self.installer_ip)
+        else:
+            print("Installer %s is  not valid. "
+                  "Please use one of the followings: %s"
+                  % (self.installer, INSTALLERS))
+            exit(1)
+
+    def get_deployment_info(self):
+        return self.InstallerAdapter.get_deployment_info()
+
+    def get_nodes(self, options=None):
+        return self.InstallerAdapter.get_nodes(options=options)
+
+    def get_controller_ips(self, options=None):
+        return self.InstallerAdapter.get_controller_ips(options=options)
+
+    def get_compute_ips(self, options=None):
+        return self.InstallerAdapter.get_compute_ips(options=options)
+
+    def get_file_from_installer(self,
+                                remote_path,
+                                local_path,
+                                options=None):
+        return self.InstallerAdapter.get_file_from_installer(remote_path,
+                                                             local_path,
+                                                             options=options)
+
+    def get_file_from_controller(self,
+                                 remote_path,
+                                 local_path,
+                                 ip=None,
+                                 options=None):
+        return self.InstallerAdapter.get_file_from_controller(remote_path,
+                                                              local_path,
+                                                              ip=ip,
+                                                              options=options)
+
+    def get_all(self):
+        pass
diff --git a/utils/installer-adapter/JoidAdapter.py b/utils/installer-adapter/JoidAdapter.py
new file mode 100644 (file)
index 0000000..be8c2eb
--- /dev/null
@@ -0,0 +1,32 @@
+##############################################################################
+# Copyright (c) 2016 Ericsson AB and others.
+# Author: Jose Lausuch (jose.lausuch@ericsson.com)
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+
+class JoidAdapter:
+
+    def __init__(self, installer_ip):
+        self.installer_ip = installer_ip
+
+    def get_deployment_info(self):
+        pass
+
+    def get_nodes(self):
+        pass
+
+    def get_controller_ips(self):
+        pass
+
+    def get_compute_ips(self):
+        pass
+
+    def get_file_from_installer(self, origin, target, options=None):
+        pass
+
+    def get_file_from_controller(self, origin, target, ip=None, options=None):
+        pass
diff --git a/utils/installer-adapter/RelengLogger.py b/utils/installer-adapter/RelengLogger.py
new file mode 100644 (file)
index 0000000..6fa4ef2
--- /dev/null
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+#
+# jose.lausuch@ericsson.com
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Logging levels:
+#  Level     Numeric value
+#  CRITICAL  50
+#  ERROR     40
+#  WARNING   30
+#  INFO      20
+#  DEBUG     10
+#  NOTSET    0
+#
+# Usage:
+#  import RelengLogger as rl
+#  logger = fl.Logger("script_name").getLogger()
+#  logger.info("message to be shown with - INFO - ")
+#  logger.debug("message to be shown with - DEBUG -")
+
+import logging
+
+
+class Logger:
+
+    def __init__(self, logger_name, level="INFO"):
+
+        self.logger = logging.getLogger(logger_name)
+        self.logger.propagate = 0
+        self.logger.setLevel(logging.DEBUG)
+
+        ch = logging.StreamHandler()
+        formatter = logging.Formatter('%(asctime)s - %(name)s - '
+                                      '%(levelname)s - %(message)s')
+        ch.setFormatter(formatter)
+        if level.lower() == "debug":
+            ch.setLevel(logging.DEBUG)
+        else:
+            ch.setLevel(logging.INFO)
+        self.logger.addHandler(ch)
+
+        hdlr = logging.FileHandler('/tmp/releng.log')
+        hdlr.setFormatter(formatter)
+        hdlr.setLevel(logging.DEBUG)
+        self.logger.addHandler(hdlr)
+
+    def getLogger(self):
+        return self.logger
diff --git a/utils/installer-adapter/SSHUtils.py b/utils/installer-adapter/SSHUtils.py
new file mode 100644 (file)
index 0000000..c938886
--- /dev/null
@@ -0,0 +1,120 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+# Authors: George Paraskevopoulos (geopar@intracom-telecom.com)
+#          Jose Lausuch (jose.lausuch@ericsson.com)
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+
+import paramiko
+import RelengLogger as rl
+import os
+
+logger = rl.Logger('SSHUtils').getLogger()
+
+
+def get_ssh_client(hostname, username, password=None, jumphost=None):
+    client = None
+    try:
+        if jumphost is None:
+            client = paramiko.SSHClient()
+        else:
+            client = JumpHostHopClient()
+            client.configure_jump_host(jumphost['ip'],
+                                       jumphost['username'],
+                                       jumphost['password'])
+
+        if client is None:
+            raise Exception('Could not connect to client')
+
+        client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
+        client.connect(hostname,
+                       username=username,
+                       password=password)
+        return client
+    except Exception, e:
+        logger.error(e)
+        return None
+
+
+def get_file(ssh_conn, src, dest):
+    try:
+        sftp = ssh_conn.open_sftp()
+        sftp.get(src, dest)
+        return True
+    except Exception, e:
+        logger.error("Error [get_file(ssh_conn, '%s', '%s']: %s" %
+                     (src, dest, e))
+        return None
+
+
+def put_file(ssh_conn, src, dest):
+    try:
+        sftp = ssh_conn.open_sftp()
+        sftp.put(src, dest)
+        return True
+    except Exception, e:
+        logger.error("Error [put_file(ssh_conn, '%s', '%s']: %s" %
+                     (src, dest, e))
+        return None
+
+
+class JumpHostHopClient(paramiko.SSHClient):
+    '''
+    Connect to a remote server using a jumphost hop
+    '''
+    def __init__(self, *args, **kwargs):
+        self.logger = rl.Logger("JumpHostHopClient").getLogger()
+        self.jumphost_ssh = None
+        self.jumphost_transport = None
+        self.jumphost_channel = None
+        self.jumphost_ip = None
+        self.jumphost_ssh_key = None
+        self.local_ssh_key = os.path.join(os.getcwd(), 'id_rsa')
+        super(JumpHostHopClient, self).__init__(*args, **kwargs)
+
+    def configure_jump_host(self, jh_ip, jh_user, jh_pass,
+                            jh_ssh_key='/root/.ssh/id_rsa'):
+        self.jumphost_ip = jh_ip
+        self.jumphost_ssh_key = jh_ssh_key
+        self.jumphost_ssh = paramiko.SSHClient()
+        self.jumphost_ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
+        self.jumphost_ssh.connect(jh_ip,
+                                  username=jh_user,
+                                  password=jh_pass)
+        self.jumphost_transport = self.jumphost_ssh.get_transport()
+
+    def connect(self, hostname, port=22, username='root', password=None,
+                pkey=None, key_filename=None, timeout=None, allow_agent=True,
+                look_for_keys=True, compress=False, sock=None, gss_auth=False,
+                gss_kex=False, gss_deleg_creds=True, gss_host=None,
+                banner_timeout=None):
+        try:
+            if self.jumphost_ssh is None:
+                raise Exception('You must configure the jump '
+                                'host before calling connect')
+
+            get_file_res = get_file(self.jumphost_ssh,
+                                    self.jumphost_ssh_key,
+                                    self.local_ssh_key)
+            if get_file_res is None:
+                raise Exception('Could\'t fetch SSH key from jump host')
+            jumphost_key = (paramiko.RSAKey
+                            .from_private_key_file(self.local_ssh_key))
+
+            self.jumphost_channel = self.jumphost_transport.open_channel(
+                "direct-tcpip",
+                (hostname, 22),
+                (self.jumphost_ip, 22))
+
+            self.set_missing_host_key_policy(paramiko.AutoAddPolicy())
+            super(JumpHostHopClient, self).connect(hostname,
+                                                   username=username,
+                                                   pkey=jumphost_key,
+                                                   sock=self.jumphost_channel)
+            os.remove(self.local_ssh_key)
+        except Exception, e:
+            self.logger.error(e)
diff --git a/utils/installer-adapter/example.py b/utils/installer-adapter/example.py
new file mode 100644 (file)
index 0000000..804d79c
--- /dev/null
@@ -0,0 +1,22 @@
+# This is an example of usage of this Tool
+# Author: Jose Lausuch (jose.lausuch@ericsson.com)
+
+from InstallerHandler import InstallerHandler
+
+fuel_handler = InstallerHandler(installer='fuel',
+                                installer_ip='10.20.0.2',
+                                installer_user='root',
+                                installer_pwd='r00tme')
+print("Nodes in cluster 1:\n%s\n" %
+      fuel_handler.get_nodes(options={'cluster': '1'}))
+print("Nodes in cluster 2:\n%s\n" %
+      fuel_handler.get_nodes(options={'cluster': '2'}))
+print("Nodes:\n%s\n" % fuel_handler.get_nodes())
+print("Controller nodes:\n%s\n" % fuel_handler.get_controller_ips())
+print("Compute nodes:\n%s\n" % fuel_handler.get_compute_ips())
+print("\n%s\n" % fuel_handler.get_deployment_info())
+fuel_handler.get_file_from_installer('/root/deploy/dea.yaml', './dea.yaml')
+fuel_handler.get_file_from_controller(
+    '/etc/neutron/neutron.conf', './neutron.conf')
+fuel_handler.get_file_from_controller(
+    '/root/openrc', './openrc')
diff --git a/utils/test/dashboard/README.rst b/utils/test/dashboard/README.rst
new file mode 100644 (file)
index 0000000..e69de29
index 48fed88..6114e90 100644 (file)
@@ -1,5 +1,6 @@
 #! /usr/bin/env python
 
+import urlparse
 from ConfigParser import SafeConfigParser, NoOptionError
 
 
@@ -22,11 +23,12 @@ class APIConfig:
     """
 
     def __init__(self):
-        self._default_config_location = "../etc/config.ini"
+        self._default_config_location = "/etc/dashboard/config.ini"
         self.es_url = 'http://localhost:9200'
         self.es_creds = None
         self.kibana_url = None
         self.js_path = None
+        self.index_url = None
 
     def _get_str_parameter(self, section, param):
         try:
@@ -67,6 +69,8 @@ class APIConfig:
         obj.es_creds = obj._get_str_parameter("elastic", "creds")
         obj.kibana_url = obj._get_str_parameter("kibana", "url")
         obj.js_path = obj._get_str_parameter("kibana", "js_path")
+        index = obj._get_str_parameter("elastic", "index")
+        obj.index_url = urlparse.urljoin(obj.es_url, index)
 
         return obj
 
@@ -74,7 +78,9 @@ class APIConfig:
         return "elastic_url = %s \n" \
                "elastic_creds = %s \n" \
                "kibana_url = %s \n" \
+               "index_url = %s \n" \
                "js_path = %s \n" % (self.es_url,
                                     self.es_creds,
                                     self.kibana_url,
+                                    self.index_url,
                                     self.js_path)
index e120987..ff801b4 100644 (file)
@@ -1,7 +1,7 @@
 import yaml
 
 
-with open('./functest/testcases.yaml') as f:
+with open('/etc/dashboard/testcases.yaml') as f:
     testcases_yaml = yaml.safe_load(f)
 f.close()
 
index da7ccfc..651168b 100644 (file)
@@ -1,7 +1,7 @@
 import json
 
 import utility
-from common import elastic_access
+from dashboard.common import elastic_access
 
 
 class DashboardAssembler(object):
index 9ee8942..112d222 100644 (file)
@@ -1,13 +1,12 @@
 #! /usr/bin/env python
 import json
-import urlparse
 
 import argparse
 
-from common import elastic_access
-from common import logger_utils
-from conf import config
-from conf import testcases
+from dashboard.common import elastic_access
+from dashboard.common import logger_utils
+from dashboard.conf import config
+from dashboard.conf import testcases
 from dashboard_assembler import DashboardAssembler
 from visualization_assembler import VisualizationAssembler
 
@@ -131,10 +130,9 @@ class KibanaConstructor(object):
             }
         })
 
-        elastic_data = elastic_access.get_docs(
-            urlparse.urljoin(CONF.es_url, '/test_results/mongo2elastic'),
-            CONF.es_creds,
-            query)
+        elastic_data = elastic_access.get_docs(CONF.index_url,
+                                               CONF.es_creds,
+                                               query)
 
         pods_and_scenarios = {}
 
index dccd28a..55578bd 100644 (file)
@@ -2,7 +2,7 @@ import json
 
 from jinja2 import Environment, PackageLoader
 
-env = Environment(loader=PackageLoader('elastic2kibana', 'templates'))
+env = Environment(loader=PackageLoader('dashboard', 'elastic2kibana/templates'))
 env.filters['jsonify'] = json.dumps
 
 
index 1cb0ba8..d7e6e54 100644 (file)
@@ -1,7 +1,7 @@
 import json
 
 import utility
-from common import elastic_access
+from dashboard.common import elastic_access
 
 
 class VisStateBuilder(object):
diff --git a/utils/test/dashboard/dashboard/elastic2kibana_main.py b/utils/test/dashboard/dashboard/elastic2kibana_main.py
deleted file mode 100644 (file)
index 3ec27cb..0000000
+++ /dev/null
@@ -1,4 +0,0 @@
-from elastic2kibana.main import main
-
-if __name__ == '__main__':
-    main()
@@ -184,21 +184,3 @@ def format_vims(testcase):
         }
     }
     return True
-
-
-def format_qpi(testcase):
-    """
-    Look for these and leave any of those:
-        details.index
-
-    If none are present, then return False
-    """
-    details = testcase['details']
-    if 'index' not in details:
-        return False
-
-    for key, value in details.items():
-        if key != 'index':
-            del details[key]
-
-    return True
index 303d82c..688f55f 100644 (file)
@@ -5,15 +5,15 @@ import json
 import os
 import subprocess
 import traceback
-import urlparse
 import uuid
 
 import argparse
 
-from common import logger_utils, elastic_access
-from conf import testcases
-from conf.config import APIConfig
-from mongo2elastic import format
+from dashboard.common import elastic_access
+from dashboard.common import logger_utils
+from dashboard.conf import testcases
+from dashboard.conf.config import APIConfig
+from dashboard.mongo2elastic import format
 
 logger = logger_utils.DashboardLogger('mongo2elastic').get
 
@@ -187,7 +187,7 @@ class DocumentsPublisher(object):
             self._remove()
             exit(-1)
 
-    def get_existed_docs(self):
+    def get_exists(self):
         if self.days == 0:
             body = '''{{
                         "query": {{
@@ -241,10 +241,6 @@ class DocumentsPublisher(object):
 
 
 def main():
-    base_elastic_url = urlparse.urljoin(CONF.es_url, '/test_results/mongo2elastic')
-    days = args.latest_days
-    es_creds = CONF.es_creds
-
     for project, case_dicts in testcases.testcases_yaml.items():
         for case_dict in case_dicts:
             case = case_dict.get('name')
@@ -252,6 +248,6 @@ def main():
             DocumentsPublisher(project,
                                case,
                                fmt,
-                               days,
-                               base_elastic_url,
-                               es_creds).export().get_existed_docs().publish()
+                               args.latest_days,
+                               CONF.index_url,
+                               CONF.es_creds).export().get_exists().publish()
diff --git a/utils/test/dashboard/dashboard/mongo2elastic_main.py b/utils/test/dashboard/dashboard/mongo2elastic_main.py
deleted file mode 100644 (file)
index 141d8f3..0000000
+++ /dev/null
@@ -1,4 +0,0 @@
-from mongo2elastic.main import main
-
-if __name__ == '__main__':
-    main()
diff --git a/utils/test/dashboard/dashboard/qtip/format.py b/utils/test/dashboard/dashboard/qtip/format.py
new file mode 100644 (file)
index 0000000..b78fa5b
--- /dev/null
@@ -0,0 +1,19 @@
+#! /usr/bin/env python
+
+
+def format_qpi(testcase):
+    """
+    Look for these and leave any of those:
+        details.index
+
+    If none are present, then return False
+    """
+    details = testcase['details']
+    if 'index' not in details:
+        return False
+
+    for key, value in details.items():
+        if key != 'index':
+            del details[key]
+
+    return True
index d932798..77adc16 100644 (file)
@@ -2,6 +2,7 @@
 # the CONF object in config.ini must be updated
 [elastic]
 url = http://localhost:9200
+index = testapi/results
 creds =
 
 [kibana]
diff --git a/utils/test/dashboard/install.sh b/utils/test/dashboard/install.sh
new file mode 100755 (executable)
index 0000000..9fd60d9
--- /dev/null
@@ -0,0 +1,54 @@
+#!/bin/bash
+
+usage="
+Script to install dashboard automatically.
+This script should be run under root.
+
+usage:
+    bash $(basename "$0") [-h|--help] [-t <test_name>]
+
+where:
+    -h|--help         show this help text
+    -p|--project      project dashboard
+      <project_name>"
+
+# Parse parameters
+while [[ $# > 0 ]]
+    do
+    key="$1"
+    case $key in
+        -h|--help)
+            echo "$usage"
+            exit 0
+            shift
+        ;;
+        -p|--project)
+            PROJECT="$2"
+            shift
+        ;;
+        *)
+            echo "unknown option $1 $2"
+            exit 1
+        ;;
+    esac
+    shift # past argument or value
+done
+
+if [[ $(whoami) != "root" ]]; then
+    echo "Error: This script must be run as root!"
+    exit 1
+fi
+
+if [ -z ${PROJECT+x} ]; then
+    echo "project must be specified"
+    exit 1
+fi
+
+if [ $PROJECT != "functest" ] && [ $PROJECT != "qtip" ];then
+    echo "unsupported project $PROJECT"
+    exit 1
+fi
+
+cp -f dashboard/$PROJECT/format.py dashboard/mongo2elastic
+cp -f dashboard/$PROJECT/testcases.yaml etc/
+python setup.py install
diff --git a/utils/test/dashboard/setup.cfg b/utils/test/dashboard/setup.cfg
new file mode 100644 (file)
index 0000000..dd01358
--- /dev/null
@@ -0,0 +1,43 @@
+[metadata]
+name = dashboard
+summary = Test Result Collector
+description-file =
+    README.rst
+author = SerenaFeng
+author-email = feng.xiaowei@zte.com.cn
+#home-page = http://www.opnfv.org/
+classifier =
+    Environment :: opnfv
+    Intended Audience :: Information Technology
+    Intended Audience :: System Administrators
+    License :: OSI Approved :: Apache Software License
+    Operating System :: POSIX :: Linux
+    Programming Language :: Python
+    Programming Language :: Python :: 2
+    Programming Language :: Python :: 2.7
+
+[global]
+setup-hooks =
+    pbr.hooks.setup_hook
+
+[files]
+packages =
+    dashboard
+package_data =
+    dashboard =
+        elastic2kibana/templates/*.*
+data_files =
+    /etc/dashboard =
+        etc/config.ini
+        etc/testcases.yaml
+
+[entry_points]
+console_scripts =
+    dashboard_mongo2elastic = dashboard.mongo2elastic.main:main
+    dashboard_elastic2kibana = dashboard.elastic2kibana.main:main
+
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/utils/test/dashboard/setup.py b/utils/test/dashboard/setup.py
new file mode 100644 (file)
index 0000000..59637a5
--- /dev/null
@@ -0,0 +1,8 @@
+import setuptools
+
+__author__ = 'serena'
+
+
+setuptools.setup(
+    setup_requires=['pbr>=1.8'],
+    pbr=True)
index b527b78..653448e 100755 (executable)
@@ -82,6 +82,10 @@ for version in conf.versions:
         items = {}
         scenario_result_criteria = {}
 
+        scenario_file_name = (conf.REPORTING_PATH +
+                              "/functest/release/" + version +
+                              "/scenario_history.txt")
+
         # For all the scenarios get results
         for s, s_result in scenario_results.items():
             logger.info("---------------------------------")
@@ -185,11 +189,8 @@ for version in conf.versions:
                 scenario_criteria = conf.MAX_SCENARIO_CRITERIA
 
             s_score = str(scenario_score) + "/" + str(scenario_criteria)
-            s_score_percent = 0.0
-            try:
-                s_score_percent = float(scenario_score) / float(scenario_criteria) * 100
-            except:
-                logger.error("cannot calculate the score percent")
+            s_score_percent = utils.getScenarioPercent(scenario_score,
+                                                       scenario_criteria)
 
             s_status = "KO"
             if scenario_score < scenario_criteria:
@@ -209,10 +210,7 @@ for version in conf.versions:
                     f.write(info)
 
             # Save daily results in a file
-            path_validation_file = (conf.REPORTING_PATH +
-                                    "/functest/release/" + version +
-                                    "/scenario_history.txt")
-            with open(path_validation_file, "a") as f:
+            with open(scenario_file_name, "a") as f:
                 info = (reportingDate + "," + s + "," + installer +
                         "," + s_score + "," +
                         str(round(s_score_percent)) + "\n")
@@ -239,34 +237,39 @@ for version in conf.versions:
                                      version=version,
                                      date=reportingDate)
 
-    with open(conf.REPORTING_PATH + "/functest/release/" + version +
-              "/index-status-" + installer + ".html", "wb") as fh:
-        fh.write(outputText)
+        # csv
+        # generate sub files based on scenario_history.txt
+        scenario_installer_file_name = (conf.REPORTING_PATH +
+                                        "/functest/release/" + version +
+                                        "/scenario_history_" + installer +
+                                        ".txt")
+        scenario_installer_file = open(scenario_installer_file_name, "a")
+        logger.info("Generate CSV...")
+        with open(scenario_file_name, "r") as f:
+            for line in f:
+                if installer in line:
+                    logger.debug("Add new line... %s" % line)
+                    scenario_installer_file.write(line)
+        scenario_installer_file.close
+
+        with open(conf.REPORTING_PATH + "/functest/release/" + version +
+                  "/index-status-" + installer + ".html", "wb") as fh:
+            fh.write(outputText)
+        logger.info("CSV generated...")
 
-    # Generate outputs for export
-    # pdf
-    try:
-        pdf_path = ("http://testresults.opnfv.org/reporting/" +
-                    "functest/release/" + version +
-                    "/index-status-" + installer + ".html")
-        pdf_doc_name = (conf.REPORTING_PATH +
-                        "/functest/release/" + version +
-                        "/status-" + installer + ".pdf")
-        pdfkit.from_url(pdf_path, pdf_doc_name)
-    except IOError:
-        logger.info("pdf generated anyway...")
-    except:
-        logger.error("impossible to generate PDF")
-    # csv
-    # generate sub files based on scenario_history.txt
-    scenario_installer_file_name = (conf.REPORTING_PATH +
-                                    "/functest/release/" + version +
-                                    "/scenario_history_" +
-                                    installer + ".txt")
-    scenario_installer_file = open(scenario_installer_file_name, "w")
-    with open(path_validation_file, "r") as f:
-        for line in f:
-            if installer in line:
-                scenario_installer_file.write(line)
-    scenario_installer_file.close
+        # Generate outputs for export
+        # pdf
+        logger.info("Generate PDF...")
+        try:
+            pdf_path = ("http://testresults.opnfv.org/reporting/" +
+                        "functest/release/" + version +
+                        "/index-status-" + installer + ".html")
+            pdf_doc_name = (conf.REPORTING_PATH +
+                            "/functest/release/" + version +
+                            "/status-" + installer + ".pdf")
+            pdfkit.from_url(pdf_path, pdf_doc_name)
+            logger.info("PDF generated...")
+        except IOError:
+            logger.info("pdf generated anyway...")
+        except:
+            logger.error("impossible to generate PDF")
index 9ba02e8..74d6f19 100644 (file)
@@ -176,3 +176,11 @@ def getJenkinsUrl(build_tag):
         print 'Impossible to get jenkins url:'
 
     return jenkins_url
+
+def getScenarioPercent(scenario_score,scenario_criteria):
+    score = 0.0
+    try:
+        score = float(scenario_score) / float(scenario_criteria) * 100
+    except:
+        print 'Impossible to calculate the percentage score'
+    return score
index 60f1523..49809e9 100644 (file)
@@ -8,10 +8,7 @@
 #
 import datetime
 import jinja2
-import requests
-import sys
-import time
-import yaml
+import os
 
 import reportingUtils as utils
 import reportingConf as conf
@@ -20,6 +17,7 @@ from scenarios import config as cf
 
 # Logger
 logger = utils.getLogger("Yardstick-Status")
+reportingDate = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
 
 logger.info("*******************************************")
 logger.info("*   Generating reporting scenario status  *")
@@ -35,21 +33,23 @@ for version in conf.versions:
         # get scenarios results data
         scenario_results = utils.getScenarioStatus(installer, version)
         if 'colorado' == version:
-            stable_result = utils.getScenarioStatus(installer, 'stable/colorado')
-            for k,v in stable_result.items():
-                if not scenario_results.has_key(k):
+            stable_result = utils.getScenarioStatus(installer,
+                                                    'stable/colorado')
+            for k, v in stable_result.items():
+                if k not in scenario_results.keys():
                     scenario_results[k] = []
                 scenario_results[k] += stable_result[k]
         scenario_result_criteria = {}
 
         for s in scenario_results.keys():
-            if cf.has_key(installer) and cf[installer].has_key(s):
+            if installer in cf.keys() and s in cf[installer].keys():
                 scenario_results.pop(s)
 
         # From each scenarios get results list
         for s, s_result in scenario_results.items():
             logger.info("---------------------------------")
-            logger.info("installer %s, version %s, scenario %s:" % (installer, version, s))
+            logger.info("installer %s, version %s, scenario %s:" % (installer,
+                                                                    version, s))
 
             ten_criteria = len(s_result)
             ten_score = 0
@@ -62,15 +62,38 @@ for version in conf.versions:
             for v in four_result:
                 four_score += v
 
-            s_status = str(utils.get_status(four_result, s_result))
+            s_status = str(utils.get_percent(four_result, s_result))
             s_four_score = str(four_score) + '/' + str(four_criteria)
             s_ten_score = str(ten_score) + '/' + str(ten_criteria)
-            scenario_result_criteria[s] = sr.ScenarioResult(s_status, s_four_score, s_ten_score)
+            s_score_percent = utils.get_percent(four_result, s_result)
 
             if '100' == s_status:
                 logger.info(">>>>> scenario OK, save the information")
             else:
-                logger.info(">>>> scenario not OK, last 4 iterations = %s, last 10 days = %s" % (s_four_score, s_ten_score))
+                logger.info(">>>> scenario not OK, last 4 iterations = %s, \
+                            last 10 days = %s" % (s_four_score, s_ten_score))
+
+            # Save daily results in a file
+            path_validation_file = (conf.REPORTING_PATH +
+                                    "/release/" + version +
+                                    "/scenario_history.txt")
+
+            if not os.path.exists(path_validation_file):
+                with open(path_validation_file, 'w') as f:
+                    info = 'date,scenario,installer,details,score\n'
+                    f.write(info)
+
+            with open(path_validation_file, "a") as f:
+                info = (reportingDate + "," + s + "," + installer +
+                        "," + s_ten_score + "," +
+                        str(s_score_percent) + "\n")
+                f.write(info)
+
+            scenario_result_criteria[s] = sr.ScenarioResult(s_status,
+                                                            s_four_score,
+                                                            s_ten_score,
+                                                            s_score_percent)
+
             logger.info("--------------------------")
 
         templateLoader = jinja2.FileSystemLoader(conf.REPORTING_PATH)
@@ -82,7 +105,8 @@ for version in conf.versions:
         outputText = template.render(scenario_results=scenario_result_criteria,
                                      installer=installer,
                                      period=conf.PERIOD,
-                                     version=version)
+                                     version=version,
+                                     date=reportingDate)
 
         with open(conf.REPORTING_PATH + "/release/" + version +
                   "/index-status-" + installer + ".html", "wb") as fh:
index 71eb919..ec9ed76 100644 (file)
@@ -32,7 +32,7 @@ def getLogger(module):
 def getScenarioStatus(installer, version):
     url = (conf.URL_BASE + "?case=" + "scenario_status" +
            "&installer=" + installer +
-           "&version=" + version +"&period=" + str(conf.PERIOD))
+           "&version=" + version + "&period=" + str(conf.PERIOD))
     request = Request(url)
 
     try:
@@ -53,7 +53,7 @@ def getScenarioStatus(installer, version):
                     scenario_results[r['scenario']] = []
                 scenario_results[r['scenario']].append(r)
 
-        for k,v in scenario_results.items():
+        for k, v in scenario_results.items():
             # scenario_results[k] = v[:conf.LASTEST_TESTS]
             s_list = []
             for element in v:
@@ -66,20 +66,25 @@ def getScenarioStatus(installer, version):
     # return scenario_results
     return result_dict
 
+
 def subfind(given_list, pattern_list):
+
     for i in range(len(given_list)):
-        if given_list[i] == pattern_list[0] and given_list[i:i + conf.LASTEST_TESTS] == pattern_list:
+        if given_list[i] == pattern_list[0] and \
+                given_list[i:i + conf.LASTEST_TESTS] == pattern_list:
             return True
     return False
 
-def get_percent(status):
-    
+
+def _get_percent(status):
+
     if status * 100 % 6:
         return round(float(status) * 100 / 6, 1)
     else:
         return status * 100 / 6
 
-def get_status(four_list, ten_list):
+
+def get_percent(four_list, ten_list):
     four_score = 0
     ten_score = 0
 
@@ -97,13 +102,13 @@ def get_status(four_list, ten_list):
     else:
         status = four_score + 1
 
-    return get_percent(status)
+    return _get_percent(status)
 
 
 def _test():
     status = getScenarioStatus("compass", "master")
     print "status:++++++++++++++++++++++++"
-    print json.dumps(status,indent=4)
+    print json.dumps(status, indent=4)
 
 
 if __name__ == '__main__':    # pragma: no cover
index 61ffb2c..1f7eb2b 100644 (file)
@@ -9,10 +9,12 @@
 
 
 class ScenarioResult(object):
-    def __init__(self, status, four_days_score='', ten_days_score=''):
+    def __init__(self, status, four_days_score='', ten_days_score='',
+                 score_percent=0.0):
         self.status = status
         self.four_days_score = four_days_score
         self.ten_days_score = ten_days_score
+        self.score_percent = score_percent
 
     def getStatus(self):
         return self.status
@@ -22,3 +24,6 @@ class ScenarioResult(object):
 
     def getFourDaysScore(self):
         return self.four_days_score
+
+    def getScorePercent(self):
+        return self.score_percent
index 602ce8a..5a4dc34 100644 (file)
@@ -3,9 +3,56 @@
     <meta charset="utf-8">
     <!-- Bootstrap core CSS -->
     <link href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css" rel="stylesheet">
-    <link href="default.css" rel="stylesheet">
+    <link href="../../../css/default.css" rel="stylesheet">
     <script type="text/javascript" src="http://ajax.googleapis.com/ajax/libs/jquery/1/jquery.min.js"></script>
     <script type="text/javascript" src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js"></script>
+    <script type="text/javascript" src="http://d3js.org/d3.v2.min.js"></script>
+    <script type="text/javascript" src="../../../js/gauge.js"></script>
+    <script type="text/javascript" src="../../../js/trend.js"></script>
+    <script>
+        function onDocumentReady() {
+            // Gauge management
+            {% for scenario in scenario_results.keys() -%}
+            var gaugeScenario{{loop.index}} = gauge('#gaugeScenario{{loop.index}}');
+            {%- endfor %}
+            // assign success rate to the gauge
+            function updateReadings() {
+                {% for scenario in scenario_results.keys() -%}
+                 gaugeScenario{{loop.index}}.update({{scenario_results[scenario].getScorePercent()}});
+                 {%- endfor %}
+            }
+            updateReadings();
+        }
+
+        // trend line management
+        //d3.csv("./scenario_history.txt", function(data) {
+        d3.csv("./scenario_history.txt", function(data) {
+            // ***************************************
+            // Create the trend line
+            {% for scenario in scenario_results.keys() -%}
+            // for scenario {{scenario}}
+            // Filter results
+                var trend{{loop.index}} = data.filter(function(row) {
+                    return row["scenario"]=="{{scenario}}" && row["installer"]=="{{installer}}";
+                })
+            // Parse the date
+            trend{{loop.index}}.forEach(function(d) {
+                d.date = parseDate(d.date);
+                d.score = +d.score
+            });
+            // Draw the trend line
+            var mytrend = trend("#trend_svg{{loop.index}}",trend{{loop.index}})
+            // ****************************************
+            {%- endfor %}
+        });
+        if ( !window.isLoaded ) {
+            window.addEventListener("load", function() {
+            onDocumentReady();
+            }, false);
+        } else {
+            onDocumentReady();
+        }
+    </script>
     <script type="text/javascript">
     $(document).ready(function (){
         $(".btn-more").click(function() {
     <body>
     <div class="container">
       <div class="masthead">
-        <h3 class="text-muted">Yardstick status page ({{version}})</h3>
+          <h3 class="text-muted">Yardstick status page ({{version}}, {{date}})</h3>
         <nav>
           <ul class="nav nav-justified">
-            <li class="active"><a href="index.html">Home</a></li>
+            <li class="active"><a href="http://testresults.opnfv.org/reporting/index.html">Home</a></li>
             <li><a href="index-status-apex.html">Apex</a></li>
             <li><a href="index-status-compass.html">Compass</a></li>
             <li><a href="index-status-fuel.html">Fuel</a></li>
                     <tr>
                         <th width="40%">Scenario</th>
                         <th width="20%">Status</th>
-                        <th width="20%">Last 4 Iterations</th>
-                        <th width="20%">Last 10 Days</th>
+                        <th width="20%">Trend</th>
+                        <th width="10%">Last 4 Iterations</th>
+                        <th width="10%">Last 10 Days</th>
                     </tr>
                         {% for scenario,result in scenario_results.iteritems() -%}
                             <tr class="tr-ok">
                                 <td>{{scenario}}</td>
-                                <td>
-                                    <img src="../../img/gauge_{{ scenario_results[scenario].getStatus() }}.png">
-                                </td>
+                                <td><div id="gaugeScenario{{loop.index}}"></div></td>
+                                <td><div id="trend_svg{{loop.index}}"></div></td>
                                 <td>{{scenario_results[scenario].getFourDaysScore()}}</td>
                                 <td>{{scenario_results[scenario].getTenDaysScore()}}</td>
                             </tr>
similarity index 97%
rename from utils/test/result_collection_api/README.rst
rename to utils/test/testapi/README.rst
index c0075bc..44ab2a4 100644 (file)
@@ -25,7 +25,7 @@ How to install
 
 From within your environment, just run:
 
-    python setup.py install
+    ./install.sh
 
 How to run
 ^^^^^^^^^^
similarity index 96%
rename from utils/test/result_collection_api/docker/Dockerfile
rename to utils/test/testapi/docker/Dockerfile
index ffee4c2..b0272e6 100644 (file)
@@ -46,7 +46,7 @@ RUN pip install --upgrade pip
 RUN git config --global http.sslVerify false
 RUN git clone https://gerrit.opnfv.org/gerrit/releng /home/releng
 
-WORKDIR /home/releng/utils/test/result_collection_api/
+WORKDIR /home/releng/utils/test/testapi/
 RUN pip install -r requirements.txt
 RUN python setup.py install
 CMD ["bash", "docker/start-server.sh"]
diff --git a/utils/test/testapi/install.sh b/utils/test/testapi/install.sh
new file mode 100755 (executable)
index 0000000..43229ea
--- /dev/null
@@ -0,0 +1,20 @@
+#!/bin/bash
+
+usage="
+Script to install opnfv_tesgtapi automatically.
+This script should be run under root.
+
+usage:
+    bash $(basename "$0") [-h|--help] [-t <test_name>]
+
+where:
+    -h|--help         show this help text"
+
+if [[ $(whoami) != "root" ]]; then
+    echo "Error: This script must be run as root!"
+    exit 1
+fi
+
+cp -fr 3rd_party/static opnfv_testapi/tornado_swagger
+python setup.py install
+rm -fr opnfv_testapi/tornado_swagger/static
diff --git a/utils/test/testapi/update/templates/__init__.py b/utils/test/testapi/update/templates/__init__.py
new file mode 100644 (file)
index 0000000..e69de29