Merge "Push Functest API doc to artifact"
authorJose Lausuch <jose.lausuch@ericsson.com>
Fri, 7 Jul 2017 08:44:15 +0000 (08:44 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Fri, 7 Jul 2017 08:44:15 +0000 (08:44 +0000)
67 files changed:
jjb/barometer/barometer-build.sh [new file with mode: 0644]
jjb/barometer/barometer-upload-artifact.sh [new file with mode: 0644]
jjb/barometer/barometer.yml
jjb/ci_gate_security/anteater-security-audit-weekly.sh
jjb/ci_gate_security/anteater-security-audit.sh
jjb/ci_gate_security/opnfv-ci-gate-security.yml
jjb/compass4nfv/compass-ci-jobs.yml
jjb/daisy4nfv/daisy-daily-jobs.yml
jjb/daisy4nfv/daisy-project-jobs.yml
jjb/doctor/doctor.yml
jjb/dovetail/dovetail-ci-jobs.yml
jjb/dovetail/dovetail-cleanup.sh
jjb/dovetail/dovetail-run.sh
jjb/functest/functest-daily-jobs.yml
jjb/functest/functest-loop.sh
jjb/functest/functest-project-jobs.yml
jjb/functest/functest-suite.sh
jjb/functest/set-functest-env.sh
jjb/global/releng-macros.yml
jjb/global/slave-params.yml
jjb/netready/netready.yml
jjb/releng/automate.yml [moved from jjb/releng/testapi-automate.yml with 77% similarity]
jjb/releng/docker-deploy.sh [moved from jjb/releng/testapi-docker-deploy.sh with 63% similarity]
jjb/releng/docker-update.sh [new file with mode: 0644]
jjb/releng/opnfv-docker.sh
jjb/releng/opnfv-docker.yml
jjb/releng/testapi-docker-update.sh [deleted file]
jjb/storperf/storperf.yml
jjb/xci/bifrost-periodic-jobs.yml
jjb/xci/osa-periodic-jobs.yml
jjb/yardstick/yardstick-daily-jobs.yml
jjb/yardstick/yardstick-daily.sh
prototypes/xci/README.rst
prototypes/xci/config/pinned-versions
prototypes/xci/file/ansible-role-requirements.yml
prototypes/xci/scripts/update-osa-version-files.sh [new file with mode: 0755]
prototypes/xci/xci-deploy.sh
utils/fetch_os_creds.sh
utils/jenkins-jnlp-connect.sh
utils/push-test-logs.sh
utils/test/reporting/docker/reporting.sh
utils/test/reporting/docker/requirements.pip
utils/test/reporting/docker/supervisor.conf
utils/test/reporting/functest/reporting-status.py
utils/test/reporting/pages/angular.sh
utils/test/reporting/pages/app/index.html
utils/test/reporting/pages/app/scripts/app.config.js [deleted file]
utils/test/reporting/pages/app/scripts/controllers/table.controller.js
utils/test/reporting/pages/app/scripts/controllers/testvisual.controller.js
utils/test/reporting/pages/app/scripts/factory/table.factory.js
utils/test/reporting/pages/app/views/commons/testCaseVisual.html
utils/test/reporting/pages/config.sh [new file with mode: 0755]
utils/test/reporting/run_test.sh [new file with mode: 0755]
utils/test/reporting/run_unit_tests.sh [deleted file]
utils/test/reporting/utils/reporting_utils.py
utils/test/testapi/3rd_party/static/testapi-ui/app.js
utils/test/testapi/3rd_party/static/testapi-ui/components/results/resultsController.js
utils/test/testapi/deployment/deploy.py
utils/test/testapi/deployment/docker-compose.yml.template
utils/test/testapi/docker/Dockerfile
utils/test/testapi/docker/prepare-env.sh
utils/test/testapi/etc/config.ini
utils/test/testapi/htmlize/htmlize.py
utils/test/testapi/opnfv_testapi/common/config.py
utils/test/testapi/opnfv_testapi/resources/handlers.py
utils/test/testapi/opnfv_testapi/resources/result_handlers.py
utils/test/testapi/opnfv_testapi/tests/unit/fake_pymongo.py

diff --git a/jjb/barometer/barometer-build.sh b/jjb/barometer/barometer-build.sh
new file mode 100644 (file)
index 0000000..e40841b
--- /dev/null
@@ -0,0 +1,21 @@
+set -x
+
+OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
+OPNFV_ARTIFACT_URL="$GS_URL/$OPNFV_ARTIFACT_VERSION/"
+
+# log info to console
+echo "Starting the build of Barometer RPMs"
+echo "------------------------------------"
+echo
+
+cd ci
+./install_dependencies.sh
+./build_rpm.sh
+cd $WORKSPACE
+
+# save information regarding artifact into file
+(
+    echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
+    echo "OPNFV_ARTIFACT_URL=$OPNFV_ARTIFACT_URL"
+) > $WORKSPACE/opnfv.properties
+
diff --git a/jjb/barometer/barometer-upload-artifact.sh b/jjb/barometer/barometer-upload-artifact.sh
new file mode 100644 (file)
index 0000000..817cc57
--- /dev/null
@@ -0,0 +1,46 @@
+#!/bin/bash
+set -o nounset
+set -o pipefail
+
+RPM_WORKDIR=$WORKSPACE/rpmbuild
+RPM_DIR=$RPM_WORKDIR/RPMS/x86_64/
+cd $WORKSPACE/
+
+# source the opnfv.properties to get ARTIFACT_VERSION
+source $WORKSPACE/opnfv.properties
+
+# upload property files
+gsutil cp $WORKSPACE/opnfv.properties gs://$OPNFV_ARTIFACT_URL/opnfv.properties > gsutil.properties.log 2>&1
+gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1
+
+echo "Uploading the barometer RPMs to artifacts.opnfv.org"
+echo "---------------------------------------------------"
+echo
+
+gsutil -m cp -r $RPM_DIR/* gs://$OPNFV_ARTIFACT_URL > $WORKSPACE/gsutil.log 2>&1
+
+# Check if the RPMs were pushed
+gsutil ls gs://$OPNFV_ARTIFACT_URL > /dev/null 2>&1
+if [[ $? -ne 0 ]]; then
+  echo "Problem while uploading barometer RPMs to gs://$OPNFV_ARTIFACT_URL!"
+  echo "Check log $WORKSPACE/gsutil.log on the appropriate build server"
+  exit 1
+fi
+
+gsutil -m setmeta \
+    -h "Cache-Control:private, max-age=0, no-transform" \
+    gs://$OPNFV_ARTIFACT_URL/*.rpm > /dev/null 2>&1
+
+gsutil -m setmeta \
+    -h "Content-Type:text/html" \
+    -h "Cache-Control:private, max-age=0, no-transform" \
+    gs://$GS_URL/latest.properties \
+    gs://$OPNFV_ARTIFACT_URL/opnfv.properties > /dev/null 2>&1
+
+echo
+echo "--------------------------------------------------------"
+echo "Done!"
+echo "Artifact is available at $OPNFV_ARTIFACT_URL"
+
+#cleanup the RPM repo from the build machine.
+rm -rf $RPM_WORKDIR
index 68b8a04..2d3e972 100644 (file)
         - shell: |
             pwd
             cd src
-            ./install_build_deps.sh
             make clobber
             make
 
         - project-parameter:
             project: '{project}'
             branch: '{branch}'
+        - barometer-project-parameter:
+            gs-pathname: '{gs-pathname}'
         - 'opnfv-build-centos-defaults'
 
     scm:
          - timed: '@midnight'
 
     builders:
-        - shell: |
-            pwd
-            cd ci
-            ./install_dependencies.sh
-            ./build_rpm.sh
+        - shell:
+            !include-raw-escape: ./barometer-build.sh
+        - shell:
+            !include-raw-escape: ./barometer-upload-artifact.sh
+
+########################
+# parameter macros
+########################
+- parameter:
+    name: barometer-project-parameter
+    parameters:
+        - string:
+            name: GS_URL
+            default: '$GS_BASE{gs-pathname}'
+            description: "URL to Google Storage."
index 99f2820..436a173 100644 (file)
@@ -7,7 +7,7 @@ echo "Pulling releng-anteater docker image"
 echo "--------------------------------------------------------"
 docker pull opnfv/releng-anteater
 echo "--------------------------------------------------------"
-cmd="docker run --user nobody -id $vols opnfv/releng-anteater /bin/bash"
+cmd="docker run -id $vols opnfv/releng-anteater /bin/bash"
 echo "Running docker command $cmd"
 container_id=$($cmd)
 echo "Container ID is $container_id"
index d5c0e40..9bd3cc3 100644 (file)
@@ -15,18 +15,14 @@ echo "--------------------------------------------------------"
 docker pull opnfv/releng-anteater
 echo "--------------------------------------------------------"
 
-cmd="sudo docker run --privileged=true -id $envs $vols opnfv/releng-anteater /bin/bash"
-echo "Running docker command $cmd"
-container_id=$($cmd)
-echo "Container ID is $container_id"
-cmd="anteater --project $PROJECT --patchset /home/opnfv/anteater/$PROJECT/patchset"
-echo "Executing command inside container"
+cmd="docker run -i $envs $vols --rm opnfv/releng-anteater \
+/home/opnfv/venv/bin/anteater --project $PROJECT --patchset /home/opnfv/anteater/$PROJECT/patchset"
+echo "Running docker container"
 echo "$cmd"
-echo "--------------------------------------------------------"
-docker exec $container_id $cmd > $WORKSPACE/securityaudit.log 2>&1
+$cmd > $WORKSPACE/securityaudit.log 2>&1
 exit_code=$?
 echo "--------------------------------------------------------"
-echo "Stopping docker container with ID $container_id"
-docker stop $container_id
+echo "Docker container exited with code: $exit_code"
+echo "--------------------------------------------------------"
 cat securityaudit.log
 exit 0
index 2cbb5cd..e2f6ceb 100644 (file)
@@ -36,7 +36,7 @@
             branch: '{branch}'
 
     triggers:
-        - timed: '0 H/6 * * *'
+        - timed: '@weekly'
 
     builders:
         - anteater-security-audit-weekly
@@ -77,7 +77,7 @@
                     comment-contains-value: 'reverify'
             projects:
               - project-compare-type: 'REG_EXP'
-                project-pattern: 'sandbox|releng'
+                project-pattern: 'apex|armband|bamboo|barometer|bottlenecks|calipso|compass4nfv|conductor|cooper|functest|octopus|pharos|releng|sandbox'
                 branches:
                   - branch-compare-type: 'ANT'
                     branch-pattern: '**/{branch}'
             !include-raw:
                 - ./anteater-clone-all-repos.sh
                 - ./anteater-security-audit-weekly.sh
+
index f4f49b6..09ef441 100644 (file)
                 unstable-threshold: 'FAILURE'
         # dovetail only master by now, not sync with A/B/C branches
         # here the stream means the SUT stream, dovetail stream is defined in its own job
-        # only run on os-(nosdn|odl_l2)-(nofeature|bgpvpn)-ha scenario
+        # only run on os-(nosdn|odl_l2|onos|odl_l3)-nofeature-ha scenario
+        # run against SUT master branch, dovetail docker image with latest tag
+        # run against SUT danube branch, dovetail docker image with latest tag(odd days)and cvp.X.X.X tag(even days)
         - conditional-step:
-            condition-kind: regex-match
-            regex: os-(nosdn|odl_l2)-(nofeature|bgpvpn)-ha
-            label: '{scenario}'
+            condition-kind: and
+            condition-operands:
+                - condition-kind: regex-match
+                  regex: danube
+                  label: '{stream}'
+                - condition-kind: regex-match
+                  regex: os-(nosdn|odl_l2|onos|odl_l3)-nofeature-ha
+                  label: '{scenario}'
+                - condition-kind: day-of-week
+                  day-selector: select-days
+                  days:
+                      MON: true
+                      WED: true
+                      FRI: true
+                      SUN: true
+                  use-build-time: true
             steps:
                 - trigger-builds:
-                    - project: 'dovetail-compass-{pod}-proposed_tests-{stream}'
+                    - project: 'dovetail-compass-{pod}-proposed_tests-master'
                       current-parameters: false
                       predefined-parameters:
                         DEPLOY_SCENARIO={scenario}
                         build-step-failure-threshold: 'never'
                         failure-threshold: 'never'
                         unstable-threshold: 'FAILURE'
+        - conditional-step:
+            condition-kind: and
+            condition-operands:
+                - condition-kind: regex-match
+                  regex: danube
+                  label: '{stream}'
+                - condition-kind: regex-match
+                  regex: os-(nosdn|odl_l2|onos|odl_l3)-nofeature-ha
+                  label: '{scenario}'
+                - condition-kind: day-of-week
+                  day-selector: select-days
+                  days:
+                      TUES: true
+                      THURS: true
+                      SAT: true
+                  use-build-time: true
+            steps:
+                - trigger-builds:
+                    - project: 'dovetail-compass-{pod}-proposed_tests-danube'
+                      current-parameters: false
+                      predefined-parameters:
+                        DEPLOY_SCENARIO={scenario}
+                      block: true
+                      same-node: true
+                      block-thresholds:
+                        build-step-failure-threshold: 'never'
+                        failure-threshold: 'never'
+                        unstable-threshold: 'FAILURE'
+        - conditional-step:
+            condition-kind: and
+            condition-operands:
+                - condition-kind: regex-match
+                  regex: os-(nosdn|odl_l2|onos|odl_l3)-nofeature-ha
+                  label: '{scenario}'
+                - condition-kind: regex-match
+                  regex: master
+                  label: '{stream}'
+            steps:
+                - trigger-builds:
+                    - project: 'dovetail-compass-{pod}-proposed_tests-master'
 
 - job-template:
     name: 'compass-deploy-{pod}-daily-{stream}'
 - trigger:
     name: 'compass-os-onos-nofeature-ha-baremetal-centos-master-trigger'
     triggers:
-        - timed: '0 7 * * *'
+        - timed: ''
 - trigger:
     name: 'compass-os-ocl-nofeature-ha-baremetal-centos-master-trigger'
     triggers:
-        - timed: '0 11 * * *'
+        - timed: ''
 - trigger:
     name: 'compass-os-onos-sfc-ha-baremetal-centos-master-trigger'
     triggers:
-        - timed: '0 3 * * *'
+        - timed: ''
 - trigger:
     name: 'compass-os-odl_l2-moon-ha-baremetal-centos-master-trigger'
     triggers:
 - trigger:
     name: 'compass-os-nosdn-openo-ha-baremetal-master-trigger'
     triggers:
-        - timed: '0 3 * * *'
+        - timed: ''
 - trigger:
     name: 'compass-os-odl_l2-nofeature-ha-baremetal-master-trigger'
     triggers:
 - trigger:
     name: 'compass-os-onos-nofeature-ha-baremetal-master-trigger'
     triggers:
-        - timed: '0 14 * * *'
+        - timed: ''
 - trigger:
     name: 'compass-os-ocl-nofeature-ha-baremetal-master-trigger'
     triggers:
-        - timed: '0 10 * * *'
+        - timed: ''
 - trigger:
     name: 'compass-os-onos-sfc-ha-baremetal-master-trigger'
     triggers:
-        - timed: '0 6 * * *'
+        - timed: ''
 - trigger:
     name: 'compass-os-odl_l2-moon-ha-baremetal-master-trigger'
     triggers:
 - trigger:
     name: 'compass-os-ocl-nofeature-ha-baremetal-danube-trigger'
     triggers:
-        - timed: '0 5 * * *'
+        - timed: ''
 - trigger:
     name: 'compass-os-onos-sfc-ha-baremetal-danube-trigger'
     triggers:
-        - timed: ''
+        - timed: '0 5 * * *'
 - trigger:
     name: 'compass-os-odl_l2-moon-ha-baremetal-danube-trigger'
     triggers:
 - trigger:
     name: 'compass-os-nosdn-openo-ha-virtual-master-trigger'
     triggers:
-        - timed: '0 22 * * *'
+        - timed: ''
 - trigger:
     name: 'compass-os-odl_l2-nofeature-ha-virtual-master-trigger'
     triggers:
 - trigger:
     name: 'compass-os-onos-nofeature-ha-virtual-master-trigger'
     triggers:
-        - timed: '0 18 * * *'
+        - timed: ''
 - trigger:
     name: 'compass-os-ocl-nofeature-ha-virtual-master-trigger'
     triggers:
-        - timed: '0 16 * * *'
+        - timed: ''
 - trigger:
     name: 'compass-os-onos-sfc-ha-virtual-master-trigger'
     triggers:
-        - timed: '0 15 * * *'
+        - timed: ''
 - trigger:
     name: 'compass-os-odl_l2-moon-ha-virtual-master-trigger'
     triggers:
-        - timed: '0 14 * * *'
+        - timed: ''
 - trigger:
     name: 'compass-os-nosdn-kvm-ha-virtual-master-trigger'
     triggers:
index aac76ba..592e54d 100644 (file)
         # NOHA scenarios
         - 'os-nosdn-nofeature-noha':
             auto-trigger-name: 'daisy-{scenario}-{pod}-daily-{stream}-trigger'
+        # ODL_L3 scenarios
+        - 'os-odl_l3-nofeature-noha':
+            auto-trigger-name: 'daisy-{scenario}-{pod}-daily-{stream}-trigger'
+        # ODL_L2 scenarios
+        - 'os-odl_l2-nofeature-noha':
+            auto-trigger-name: 'daisy-{scenario}-{pod}-daily-{stream}-trigger'
 
     jobs:
         - '{project}-{scenario}-{pod}-daily-{stream}'
             installer: '{installer}'
         - string:
             name: DEPLOY_SCENARIO
-            default: 'os-nosdn-nofeature-ha'
+            default: 'os-nosdn-nofeature-noha'
         - 'daisy-project-parameter':
             gs-pathname: '{gs-pathname}'
         - string:
 # NOHA Scenarios
 - trigger:
     name: 'daisy-os-nosdn-nofeature-noha-baremetal-daily-master-trigger'
+    triggers:
+        - timed: 'H 12 * * *'
+# ODL_L3 Scenarios
+- trigger:
+    name: 'daisy-os-odl_l3-nofeature-noha-baremetal-daily-master-trigger'
+    triggers:
+        - timed: 'H 16 * * *'
+# ODL_L2 Scenarios
+- trigger:
+    name: 'daisy-os-odl_l2-nofeature-noha-baremetal-daily-master-trigger'
     triggers:
         - timed: ''
 #-----------------------------------------------
 - trigger:
     name: 'daisy-os-nosdn-nofeature-noha-virtual-daily-master-trigger'
     triggers:
-        - timed: 'H 8,22 * * *'
+        - timed: 'H 12 * * *'
+# ODL_L3 Scenarios
+- trigger:
+    name: 'daisy-os-odl_l3-nofeature-noha-virtual-daily-master-trigger'
+    triggers:
+        - timed: 'H 16 * * *'
+# ODL_L3 Scenarios
+- trigger:
+    name: 'daisy-os-odl_l2-nofeature-noha-virtual-daily-master-trigger'
+    triggers:
+        - timed: ''
 
index 0a9d43d..57e44e3 100644 (file)
             branch: '{stream}'
             gs-pathname: ''
             disabled: false
-        - danube:
-            branch: 'stable/{stream}'
-            gs-pathname: '/{stream}'
-            disabled: false
 
     phase:
         - 'build':
@@ -64,7 +60,7 @@
         - git-scm
 
     triggers:
-        - timed: '0 H/8 * * *'
+        - timed: '0 8 * * *'
 
     parameters:
         - project-parameter:
             description: 'Git URL to use on this Jenkins Slave'
         - string:
             name: DEPLOY_SCENARIO
-            default: 'os-nosdn-nofeature-ha'
+            default: 'os-nosdn-nofeature-noha'
         - '{installer}-project-parameter':
             gs-pathname: '{gs-pathname}'
 
index eb230b5..c5454c7 100644 (file)
 
     installer:
         - apex:
-            slave-label: 'ool-virtual1'
-            pod: 'ool-virtual1'
+            slave-label: 'doctor-apex-verify'
         - fuel:
-            slave-label: 'ool-virtual2'
-            pod: 'ool-virtual2'
+            slave-label: 'doctor-fuel-verify'
         #- joid:
         #    slave-label: 'ool-virtual3'
         #    pod: 'ool-virtual3'
         - verify:
             profiler: 'none'
             auto-trigger-name: 'doctor-verify'
+            is-python: false
         - profiling:
             profiler: 'poc'
             auto-trigger-name: 'experimental'
+            is-python: false
+        - python-verify:
+            profiler: 'none'
+            auto-trigger-name: 'doctor-verify'
+            is-python: true
 
     pod:
         - arm-pod2:
             default: 'doctor-notification'
         - string:
             name: TESTCASE_OPTIONS
-            default: '-e INSPECTOR_TYPE={inspector} -e PROFILER_TYPE={profiler} -v $WORKSPACE:/home/opnfv/repos/doctor'
+            default: '-e INSPECTOR_TYPE={inspector} -e PROFILER_TYPE={profiler} -e PYTHON_ENABLE={is-python} -v $WORKSPACE:/home/opnfv/repos/doctor'
             description: 'Addtional parameters specific to test case(s)'
         # functest-parameter
         - string:
index 682948d..43978f6 100644 (file)
@@ -25,7 +25,7 @@
         branch: 'stable/{stream}'
         dovetail-branch: master
         gs-pathname: '/{stream}'
-        docker-tag: 'latest'
+        docker-tag: 'cvp.0.2.0'
 
 #-----------------------------------
 # POD, PLATFORM, AND BRANCH MAPPING
             name: DOVETAIL_REPO_DIR
             default: "/home/opnfv/dovetail"
             description: "Directory where the dovetail repository is cloned"
+        - string:
+            name: SUT_BRANCH
+            default: '{branch}'
+            description: "SUT branch"
 
     scm:
         - git-scm
index 0ee789a..3ae0cbc 100755 (executable)
@@ -1,4 +1,11 @@
 #!/bin/bash
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
 
 [[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
 
index dce7e58..85bc54d 100755 (executable)
@@ -1,4 +1,11 @@
 #!/bin/bash
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
 
 #the noun INSTALLER is used in community, here is just the example to run.
 #multi-platforms are supported.
@@ -7,14 +14,12 @@ set -e
 [[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
 
 DOVETAIL_HOME=${WORKSPACE}/cvp
-if [ -d ${DOVETAIL_HOME} ]; then
-    sudo rm -rf ${DOVETAIL_HOME}/*
-else
-    sudo mkdir -p ${DOVETAIL_HOME}
-fi
+[ -d ${DOVETAIL_HOME} ] && sudo rm -rf ${DOVETAIL_HOME}
+
+mkdir -p ${DOVETAIL_HOME}
 
 DOVETAIL_CONFIG=${DOVETAIL_HOME}/pre_config
-sudo mkdir -p ${DOVETAIL_CONFIG}
+mkdir -p ${DOVETAIL_CONFIG}
 
 sshkey=""
 # The path of openrc.sh is defined in fetch_os_creds.sh
@@ -47,7 +52,12 @@ releng_repo=${WORKSPACE}/releng
 git clone https://gerrit.opnfv.org/gerrit/releng ${releng_repo} >/dev/null
 
 if [[ ${INSTALLER_TYPE} != 'joid' ]]; then
-    sudo /bin/bash ${releng_repo}/utils/fetch_os_creds.sh -d ${OPENRC} -i ${INSTALLER_TYPE} -a ${INSTALLER_IP} >${redirect}
+    echo "SUT branch is $SUT_BRANCH"
+    echo "dovetail branch is $BRANCH"
+    BRANCH_BACKUP=$BRANCH
+    export BRANCH=$SUT_BRANCH
+    ${releng_repo}/utils/fetch_os_creds.sh -d ${OPENRC} -i ${INSTALLER_TYPE} -a ${INSTALLER_IP} >${redirect}
+    export BRANCH=$BRANCH_BACKUP
 fi
 
 if [[ -f $OPENRC ]]; then
@@ -102,7 +112,8 @@ if [ "$INSTALLER_TYPE" == "fuel" ]; then
 fi
 
 # sdnvpn test case needs to download this image first before running
-sudo wget -nc http://artifacts.opnfv.org/sdnvpn/ubuntu-16.04-server-cloudimg-amd64-disk1.img -P ${DOVETAIL_CONFIG}
+echo "Download image ubuntu-16.04-server-cloudimg-amd64-disk1.img ..."
+wget -q -nc http://artifacts.opnfv.org/sdnvpn/ubuntu-16.04-server-cloudimg-amd64-disk1.img -P ${DOVETAIL_CONFIG}
 
 opts="--privileged=true -id"
 
index 8de092d..fdef6f4 100644 (file)
             installer: fuel
             <<: *danube
 # PODs for verify jobs triggered by each patch upload
-        - ool-virtual1:
-            slave-label: '{pod}'
-            installer: apex
-            <<: *master
+#        - ool-virtual1:
+#            slave-label: '{pod}'
+#            installer: apex
+#            <<: *master
 #--------------------------------
 
     testsuite:
index 6768906..00a5f13 100755 (executable)
@@ -2,7 +2,11 @@
 set +e
 
 [[ "$PUSH_RESULTS_TO_DB" == "true" ]] && flags+="-r"
-cmd="python ${FUNCTEST_REPO_DIR}/functest/ci/run_tests.py -t all ${flags}"
+if [ "$BRANCH" == 'master' ]; then
+    cmd="run_tests -t all ${flags}"
+else
+    cmd="python ${FUNCTEST_REPO_DIR}/functest/ci/run_tests.py -t all ${flags}"
+fi
 
 container_id=$(docker ps -a | grep opnfv/functest | awk '{print $1}' | head -1)
 docker exec $container_id $cmd
index d9e94f0..353423d 100644 (file)
@@ -19,7 +19,7 @@
         - danube:
             branch: 'stable/{stream}'
             gs-pathname: '/{stream}'
-            disabled: false
+            disabled: true
 
 - job-template:
     name: 'functest-verify-{stream}'
index 5d1ed28..9b7f135 100755 (executable)
@@ -10,7 +10,11 @@ global_ret_val=0
 
 tests=($(echo $FUNCTEST_SUITE_NAME | tr "," "\n"))
 for test in ${tests[@]}; do
-    cmd="python /home/opnfv/repos/functest/functest/ci/run_tests.py -t $test"
+    if [ "$BRANCH" == 'master' ]; then
+        cmd="run_tests -t $test"
+    else
+        cmd="python /home/opnfv/repos/functest/functest/ci/run_tests.py -t $test"
+    fi
     docker exec $container_id $cmd
     let global_ret_val+=$?
 done
index 558e248..5f936f5 100755 (executable)
@@ -90,7 +90,12 @@ if [ $(docker ps | grep "${FUNCTEST_IMAGE}:${DOCKER_TAG}" | wc -l) == 0 ]; then
     exit 1
 fi
 
-cmd="python ${FUNCTEST_REPO_DIR}/functest/ci/prepare_env.py start"
+if [ "$BRANCH" == 'master' ]; then
+    cmd="prepare_env start"
+else
+    cmd="python ${FUNCTEST_REPO_DIR}/functest/ci/prepare_env.py start"
+fi
+
 
 echo "Executing command inside the docker: ${cmd}"
 docker exec ${container_id} ${cmd}
index 5341db4..e4dfa8d 100644 (file)
             find "$local_path" | grep -e 'index.html$' -e 'pdf$' | \
                 sed -e "s|^$local_path|    http://$gs_path|" >> gerrit_comment.txt
 
+# To take advantage of this macro, have your build write
+# out the file 'gerrit_comment.txt' with information to post
+# back to gerrit and include this macro in the list of builders.
 - builder:
-    name: report-docs-build-result-to-gerrit
+    name: report-build-result-to-gerrit
     builders:
         - shell: |
             #!/bin/bash
     builders:
         - build-html-and-pdf-docs-output
         - upload-under-review-docs-to-opnfv-artifacts
-        - report-docs-build-result-to-gerrit
+        - report-build-result-to-gerrit
 
 - builder:
     name: upload-merged-docs
     builders:
         - build-html-and-pdf-docs-output
         - upload-generated-docs-to-opnfv-artifacts
-        - report-docs-build-result-to-gerrit
+        - report-build-result-to-gerrit
         - remove-old-docs-from-opnfv-artifacts
 
 - builder:
index 5744222..50859c4 100644 (file)
             default: 'daisy-baremetal'
         - string:
             name: INSTALLER_IP
-            default: '10.20.11.2'
+            default: '10.20.7.3'
             description: 'IP of the installer'
         - string:
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
 - parameter:
-    name: 'ool-defaults'
+    name: 'doctor-defaults'
     parameters:
         - node:
             name: SLAVE_NAME
             description: 'Slave name on Jenkins'
             allowed-slaves:
-                - ool-virtual1
-                - ool-virtual2
-                - ool-virtual3
+                - '{default-slave}'
             default-slaves:
                 - '{default-slave}'
         - string:
             default: /root/.ssh/id_rsa
             description: 'SSH key to be used'
 - parameter:
-    name: 'ool-virtual1-defaults'
+    name: 'doctor-apex-verify-defaults'
     parameters:
-        - 'ool-defaults':
-            default-slave: 'ool-virtual1'
+        - 'doctor-defaults':
+            default-slave: 'doctor-apex-verify'
 - parameter:
-    name: 'ool-virtual2-defaults'
+    name: 'doctor-fuel-verify-defaults'
     parameters:
-        - 'ool-defaults':
-            default-slave: 'ool-virtual2'
+        - 'doctor-defaults':
+            default-slave: 'doctor-fuel-verify'
 - parameter:
-    name: 'ool-virtual3-defaults'
+    name: 'doctor-joid-verify-defaults'
     parameters:
-        - 'ool-defaults':
-            default-slave: 'ool-virtual3'
+        - 'doctor-defaults':
+            default-slave: 'doctor-joid-verify'
 - parameter:
     name: 'multisite-virtual-defaults'
     parameters:
index 9a4d885..2702c45 100644 (file)
@@ -58,7 +58,7 @@
 - job-template:
     name: 'netready-build-gluon-packages-daily-{stream}'
 
-    disabled: false
+    disabled: true
 
     concurrent: true
 
similarity index 77%
rename from jjb/releng/testapi-automate.yml
rename to jjb/releng/automate.yml
index dd76538..73bef3e 100644 (file)
@@ -1,20 +1,22 @@
 - project:
-    name: testapi-automate
+    name: utils-automate
     stream:
         - master:
             branch: '{stream}'
-            gs-pathname: ''
+
+    module:
+        - 'testapi'
+        - 'reporting'
 
     phase:
-        - 'docker-update'
         - 'docker-deploy':
             slave-label: 'testresults'
         - 'generate-doc'
 
     jobs:
-        - 'testapi-automate-{stream}'
-        - 'testapi-automate-{phase}-{stream}'
-        - 'testapi-verify-{stream}'
+        - '{module}-automate-{stream}'
+        - '{module}-automate-{phase}-{stream}'
+        - '{module}-verify-{stream}'
 
     project: 'releng'
 
@@ -44,7 +46,7 @@
         - mongodb-backup
 
 - job-template:
-    name: 'testapi-verify-{stream}'
+    name: '{module}-verify-{stream}'
 
     parameters:
         - project-parameter:
                     branch-pattern: '**/{branch}'
                 file-paths:
                   - compare-type: 'ANT'
-                    pattern: 'utils/test/testapi/**'
+                    pattern: 'utils/test/{module}/**'
 
     builders:
-        - run-unit-tests
+        - shell: |
+            cd ./utils/test/{module}/
+            bash run_test.sh
+            cp *.xml $WORKSPACE
 
     publishers:
         - junit:
                     failing: 30
 
 - job-template:
-    name: 'testapi-automate-{stream}'
+    name: '{module}-automate-{stream}'
 
     project-type: multijob
 
             branch: '{branch}'
         - string:
             name: DOCKER_TAG
-            default: "latest"
-            description: "Tag name for testapi docker image"
+            default: 'latest'
+            description: 'Tag name for {module} docker image'
+        - string:
+            name: MODULE_NAME
+            default: '{module}'
+            description: "Name of the module"
         - 'opnfv-build-defaults'
 
     scm:
                     branch-pattern: '**/{branch}'
                 file-paths:
                   - compare-type: 'ANT'
-                    pattern: 'utils/test/testapi/**'
+                    pattern: 'utils/test/{module}/**'
 
     builders:
         - description-setter:
             description: "Built on $NODE_NAME"
-        - multijob:
-            name: docker-update
-            condition: SUCCESSFUL
-            projects:
-                - name: 'testapi-automate-docker-update-{stream}'
-                  current-parameters: true
-                  kill-phase-on: FAILURE
-                  abort-all-job: true
+        - docker-update
         - multijob:
             name: docker-deploy
             condition: SUCCESSFUL
             projects:
-                - name: 'testapi-automate-docker-deploy-{stream}'
+                - name: '{module}-automate-docker-deploy-{stream}'
                   current-parameters: false
                   predefined-parameters: |
                     GIT_BASE=$GIT_BASE
             name: generate-doc
             condition: SUCCESSFUL
             projects:
-                - name: 'testapi-automate-generate-doc-{stream}'
+                - name: '{module}-automate-generate-doc-{stream}'
                   current-parameters: true
                   kill-phase-on: FAILURE
                   abort-all-job: true
         - 'email-publisher'
 
 - job-template:
-    name: 'testapi-automate-{phase}-{stream}'
+    name: '{module}-automate-{phase}-{stream}'
 
     properties:
         - throttle:
         - project-parameter:
             project: '{project}'
             branch: '{branch}'
-        - string:
-            name: DOCKER_TAG
-            default: "latest"
-            description: "Tag name for testapi docker image"
 
     wrappers:
         - ssh-agent-wrapper
     builders:
         - description-setter:
             description: "Built on $NODE_NAME"
-        - 'testapi-automate-{phase}-macro'
+        - '{module}-automate-{phase}-macro'
 
 ################################
 # job builders
 ################################
 - builder:
-    name: mongodb-backup
-    builders:
-        - shell: |
-            bash ./jjb/releng/testapi-backup-mongodb.sh
-
-- builder:
-    name: 'run-unit-tests'
+    name: 'docker-update'
     builders:
-        - shell: |
-            bash ./utils/test/testapi/run_test.sh
-
-- builder:
-    name: 'testapi-automate-docker-update-macro'
-    builders:
-        - shell: |
-            bash ./jjb/releng/testapi-docker-update.sh
+        - shell:
+            !include-raw: ./docker-update.sh
 
 - builder:
     name: 'testapi-automate-generate-doc-macro'
         - shell: |
             bash ./utils/test/testapi/htmlize/push-doc-artifact.sh
 
+- builder:
+    name: 'reporting-automate-generate-doc-macro'
+    builders:
+        - shell: echo "To Be Done"
+
 - builder:
     name: 'testapi-automate-docker-deploy-macro'
     builders:
         - shell: |
-            bash ./jjb/releng/testapi-docker-deploy.sh
+            bash ./jjb/releng/docker-deploy.sh 'sudo docker run -dti -p 8082:8000 -e mongodb_url=mongodb://172.17.0.1:27017 -e base_url=http://testresults.opnfv.org/test opnfv/testapi' "http://testresults.opnfv.org/test/swagger/APIs"
+- builder:
+    name: 'reporting-automate-docker-deploy-macro'
+    builders:
+        - shell: |
+            bash ./jjb/releng/docker-deploy.sh 'sudo docker run -itd -p 8084:8000 -e SERVER_URL=http://testresults.opnfv.org/reporting2:8084 opnfv/reporting' "http://testresults.opnfv.org/reporting2/reporting/index.html"
+
+- builder:
+    name: mongodb-backup
+    builders:
+        - shell: |
+            bash ./jjb/releng/testapi-backup-mongodb.sh
 
 ################################
 # job publishers
     name: 'email-publisher'
     publishers:
         - email:
-            recipients: rohitsakala@gmail.com feng.xiaowei@zte.com.cn
+            recipients: rohitsakala@gmail.com feng.xiaowei@zte.com.cn morgan.richomme@orange.com
             notify-every-unstable-build: false
             send-to-individuals: true
similarity index 63%
rename from jjb/releng/testapi-docker-deploy.sh
rename to jjb/releng/docker-deploy.sh
index b4e60b0..b3b930f 100644 (file)
@@ -1,10 +1,30 @@
 #!/bin/bash
+#  Licensed to the Apache Software Foundation (ASF) under one   *
+#  or more contributor license agreements.  See the NOTICE file *
+#  distributed with this work for additional information        *
+#  regarding copyright ownership.  The ASF licenses this file   *
+#  to you under the Apache License, Version 2.0 (the            *
+#  "License"); you may not use this file except in compliance   *
+#  with the License.  You may obtain a copy of the License at   *
+#                                                               *
+#    http://www.apache.org/licenses/LICENSE-2.0                 *
+#                                                               *
+#  Unless required by applicable law or agreed to in writing,   *
+#  software distributed under the License is distributed on an  *
+#  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY       *
+#  KIND, either express or implied.  See the License for the    *
+#  specific language governing permissions and limitations      *
+#  under the License.                                           *
+
+# Assigning Variables
+command=$1
+url=$2
 
 function check() {
 
     # Verify hosted
     sleep 5
-    cmd=`curl -s --head  --request GET http://testresults.opnfv.org/test/swagger/spec | grep '200 OK' > /dev/null`
+    cmd=`curl -s --head  --request GET ${url} | grep '200 OK' > /dev/null`
     rc=$?
     echo $rc
 
@@ -63,7 +83,7 @@ else
 fi
 
 echo "Running a container with the new image"
-sudo docker run -dti -p "8082:8000" -e "mongodb_url=mongodb://172.17.0.1:27017" -e "swagger_url=http://testresults.opnfv.org/test" opnfv/testapi:latest
+$command:latest
 
 if check; then
     echo "TestResults Hosted."
@@ -71,7 +91,7 @@ else
     echo "TestResults Hosting Failed"
     if [[ $(sudo docker images | grep "opnfv/testapi" | grep "old" | awk '{print $3}') ]]; then
         echo "Running old Image"
-        sudo docker run -dti -p "8082:8000" -e "mongodb_url=mongodb://172.17.0.1:27017" -e "swagger_url=http://testresults.opnfv.org/test" opnfv/testapi:old
+        $command:old
         exit 1
     fi
 fi
diff --git a/jjb/releng/docker-update.sh b/jjb/releng/docker-update.sh
new file mode 100644 (file)
index 0000000..559ac83
--- /dev/null
@@ -0,0 +1,34 @@
+#!/bin/bash
+#  Licensed to the Apache Software Foundation (ASF) under one   *
+#  or more contributor license agreements.  See the NOTICE file *
+#  distributed with this work for additional information        *
+#  regarding copyright ownership.  The ASF licenses this file   *
+#  to you under the Apache License, Version 2.0 (the            *
+#  "License"); you may not use this file except in compliance   *
+#  with the License.  You may obtain a copy of the License at   *
+#                                                               *
+#    http://www.apache.org/licenses/LICENSE-2.0                 *
+#                                                               *
+#  Unless required by applicable law or agreed to in writing,   *
+#  software distributed under the License is distributed on an  *
+#  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY       *
+#  KIND, either express or implied.  See the License for the    *
+#  specific language governing permissions and limitations      *
+#  under the License.                                           *
+
+set -o errexit
+set -o nounset
+
+cd $WORKSPACE/utils/test/$MODULE_NAME/docker/
+
+# Remove previous containers
+docker ps -a | grep "opnfv/$MODULE_NAME" | awk '{ print $1 }' | xargs -r docker rm -f
+
+# Remove previous images
+docker images | grep "opnfv/$MODULE_NAME" | awk '{ print $3 }' | xargs -r docker rmi -f
+
+# Start build
+docker build --no-cache -t opnfv/$MODULE_NAME:$DOCKER_TAG .
+
+# Push Image
+docker push opnfv/$MODULE_NAME:$DOCKER_TAG
index 2aa52ad..ebd0c9f 100644 (file)
@@ -73,6 +73,8 @@ fi
 # Get tag version
 echo "Current branch: $BRANCH"
 
+BUILD_BRANCH=$BRANCH
+
 if [[ "$BRANCH" == "master" ]]; then
     DOCKER_TAG="latest"
 elif [[ -n "${RELEASE_VERSION-}" ]]; then
@@ -82,19 +84,17 @@ else
     DOCKER_TAG="stable"
 fi
 
+if [[ -n "${COMMIT_ID-}" && -n "${RELEASE_VERSION-}" ]]; then
+    DOCKER_TAG=$RELEASE_VERSION
+    BUILD_BRANCH=$COMMIT_ID
+fi
+
 # Start the build
 echo "Building docker image: $DOCKER_REPO_NAME:$DOCKER_TAG"
 echo "--------------------------------------------------------"
 echo
-if [[ $DOCKER_REPO_NAME == *"dovetail"* ]]; then
-    if [[ -n "${RELEASE_VERSION-}" ]]; then
-        DOCKER_TAG=${RELEASE_VERSION}
-    fi
-    cmd="docker build --no-cache -t $DOCKER_REPO_NAME:$DOCKER_TAG -f $DOCKERFILE ."
-else
-    cmd="docker build --no-cache -t $DOCKER_REPO_NAME:$DOCKER_TAG --build-arg BRANCH=$BRANCH
-        -f $DOCKERFILE ."
-fi
+cmd="docker build --no-cache -t $DOCKER_REPO_NAME:$DOCKER_TAG --build-arg BRANCH=$BUILD_BRANCH
+    -f $DOCKERFILE ."
 
 echo ${cmd}
 ${cmd}
index 5fe0eb9..095ba41 100644 (file)
             name: DOCKER_REPO_NAME
             default: "opnfv/{project}"
             description: "Dockerhub repo to be pushed to."
+        - string:
+            name: COMMIT_ID
+            default: ""
+            description: "commit id to make a snapshot docker image"
         - string:
             name: RELEASE_VERSION
             default: ""
diff --git a/jjb/releng/testapi-docker-update.sh b/jjb/releng/testapi-docker-update.sh
deleted file mode 100644 (file)
index 84f5c32..0000000
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/bin/bash
-
-set -o errexit
-set -o nounset
-
-cd $WORKSPACE/utils/test/testapi/docker/
-
-# Remove previous containers
-docker ps -a | grep "opnfv/testapi" | awk '{ print $1 }' | xargs -r docker rm -f
-
-# Remove previous images
-docker images | grep "opnfv/testapi" | awk '{ print $3 }' | xargs -r docker rmi -f
-
-# Start build
-docker build --no-cache -t opnfv/testapi:$DOCKER_TAG .
-
-# Push Image
-docker push opnfv/testapi:$DOCKER_TAG
index be53b27..13186a1 100644 (file)
         - git-scm
 
     triggers:
-        - timed: '0 18 * * *'
+        - timed: '0 22 * * *'
 
     builders:
         - shell: |
index 3e9ff67..9773cfd 100644 (file)
     # trigger is disabled until we know which jobs we will have
     # and adjust stuff accordingly
     triggers:
-        - timed: '#@midnight'
+        - timed: ''  # '@midnight'
 
     builders:
         - description-setter:
index 56a4b18..722b077 100644 (file)
     # trigger is disabled until we know which jobs we will have
     # and adjust stuff accordingly
     triggers:
-        - timed: '#@midnight'
+        - timed: ''  # '@midnight'
 
     builders:
         - description-setter:
index 5ff36f8..ff1d47e 100644 (file)
 
     publishers:
         - email:
-            recipients: jean.gaoliang@huawei.com limingjiang@huawei.com
+            recipients: jean.gaoliang@huawei.com limingjiang@huawei.com ross.b.brattain@intel.com
 
 ########################
 # builder macros
index 973f83a..1c2abad 100755 (executable)
@@ -31,7 +31,8 @@ fi
 opts="--privileged=true --rm"
 envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
     -e NODE_NAME=${NODE_NAME} -e EXTERNAL_NETWORK=${EXTERNAL_NETWORK} \
-    -e YARDSTICK_BRANCH=${BRANCH} -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO}"
+    -e YARDSTICK_BRANCH=${BRANCH} -e BRANCH=${BRANCH} \
+    -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO}"
 
 # Pull the image with correct tag
 echo "Yardstick: Pulling image opnfv/yardstick:${DOCKER_TAG}"
index b65abde..ecb8e19 100644 (file)
@@ -196,8 +196,10 @@ on each run.
 To enable it, you need to export the different DEV_PATH vars:
 
 - export OPNFV_RELENG_DEV_PATH=/opt/releng/
-- export OPENSTACK_BIFROST_DEV_PATH=/opt/bifrost
-- export OPENSTACK_OSA_DEV_PATH=/opt/openstack-ansible
+- export OPENSTACK_BIFROST_DEV_PATH=/opt/bifrost/
+- export OPENSTACK_OSA_DEV_PATH=/opt/openstack-ansible/
+
+Please note the trailing slahses.
 
 This will cause the deployment to pick the development copies stored at the
 specified directories, and use them instead of cloning those on every run.
index c426936..5817860 100755 (executable)
@@ -21,7 +21,7 @@
 #-------------------------------------------------------------------------------
 # use releng from master until the development work with the sandbox is complete
 export OPNFV_RELENG_VERSION="master"
-# HEAD of "master" as of 04.04.2017
+# HEAD of bifrost "master" as of 29.06.2017
 export OPENSTACK_BIFROST_VERSION=${OPENSTACK_BIFROST_VERSION:-"7c9bb5e07c6bc3b42c9a9e8457e5eef511075b38"}
-# HEAD of "master" as of 04.04.2017
-export OPENSTACK_OSA_VERSION=${OPENSTACK_OSA_VERSION:-"d9e1330c7ff9d72a604b6b4f3af765f66a01b30e"}
+# HEAD of osa "master" as of 29.06.2017
+export OPENSTACK_OSA_VERSION=${OPENSTACK_OSA_VERSION:-"0648818c64239b534d00db381c4609f28e40bda9"}
index 5a96e2a..57e0bb8 100644 (file)
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-# these versions are extracted based on the osa commit d9e1330c7ff9d72a604b6b4f3af765f66a01b30e on 04.04.2017
-# https://review.openstack.org/gitweb?p=openstack/openstack-ansible.git;a=commit;h=d9e1330c7ff9d72a604b6b4f3af765f66a01b30e
+# these versions are extracted based on the osa commit 0648818c64239b534d00db381c4609f28e40bda9 on 2017-06-27T22:02:17+00:00
+# https://review.openstack.org/gitweb?p=openstack/openstack-ansible.git;a=commit;h=0648818c64239b534d00db381c4609f28e40bda9
 - name: ansible-hardening
   scm: git
   src: https://git.openstack.org/openstack/ansible-hardening
-  version: 051fe3195f59d1ee8db06fca5d2cce7a25e58861
+  version: f422da8599c6d8f64ebfefbf0a0aa711ea1f9569
 - name: apt_package_pinning
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-apt_package_pinning
-  version: 364fc9fcd8ff652546c13d9c20ac808bc0e35f66
+  version: 4afe664efb5a2385a1d7071f68bc9001f16c0f41
 - name: pip_install
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-pip_install
-  version: 793ae4d01397bd91ebe18e9670e8e27d1ae91960
+  version: 348995b85f91f796b28656459474fb3935be737c
 - name: galera_client
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-galera_client
-  version: c093c13e01826da545bf9a0259e0be441bc1b5e1
+  version: 2055ebf1582a15c2b2a73985485be15884c9b2d3
 - name: galera_server
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-galera_server
-  version: fd0a6b104a32badbe7e7594e2c829261a53bfb11
+  version: 78a1259a10a5be95ab7d6ba3e8f2961805ae3a5b
 - name: ceph_client
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-ceph_client
-  version: 9149bfa8e3c4284b656834ba7765ea3aa48bec2e
+  version: aa3b0d959464f9362aaf29d6cf6225e1d4e302be
 - name: haproxy_server
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-haproxy_server
-  version: 32415ab81c61083ac5a83b65274703e4a5470e5e
+  version: f8bc5c6129c0d50ac3355c82560fbf22ee32479b
 - name: keepalived
   scm: git
   src: https://github.com/evrardjp/ansible-keepalived
-  version: 4f7c8eb16e3cbd8c8748f126c1eea73db5c8efe9
+  version: 3.0.1
 - name: lxc_container_create
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-lxc_container_create
-  version: 097da38126d90cfca36cdc3955aaf658a00db599
+  version: 1eab03452885f0a34cb78f54e3bcf5824abc012e
 - name: lxc_hosts
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-lxc_hosts
-  version: 2931d0c87a1c592ad7f1f2f83cdcf468e8dea932
+  version: f0b8782c03dde4dd65e70d8b03afc26a30c74f37
 - name: memcached_server
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-memcached_server
-  version: 58e17aa13ebe7b0aa5da7c00afc75d6716d2720d
-- name: openstack-ansible-security
-  scm: git
-  src: https://git.openstack.org/openstack/openstack-ansible-security
-  version: 9d745ec4fe8ac3e6d6cbb2412abe5196a9d2dad7
+  version: 6924e7d44d6e36cbe45507e43ef82af6ac0ae125
 - name: openstack_hosts
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-openstack_hosts
-  version: 2076dfddf418b1bdd64d3782346823902aa996bc
+  version: d68b1dd8fc5ef18c78172d4e9fa3ca01d7473dcf
 - name: os_keystone
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_keystone
-  version: cee7a02143a1826479e6444c6fb5f1c2b6074ab7
+  version: 0cafcc150da10a01ee0b4543167fdc88b9b91a85
 - name: openstack_openrc
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-openstack_openrc
-  version: fb98ad8d7bfe7fba0c964cb061313f1b8767c4b0
+  version: 18b7f31a19c4c9bc95abc07a83c9ba866eff538d
 - name: os_aodh
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_aodh
-  version: 9dcacb8fd6feef02e485f99c83535707ae67876b
+  version: 5bebd5a18aa7469803f26fb41df62495730afde3
 - name: os_barbican
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_barbican
-  version: bb3f39cb2f3c31c6980aa65c8953ff6293b992c0
+  version: fc95936f9375c3e9eab708b356e760e3eeb785d7
 - name: os_ceilometer
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_ceilometer
-  version: 178ad8245fa019f0610c628c58c377997b011e8a
+  version: daf94c5d1a009abb111b5ff7dea8b4f50473b227
 - name: os_cinder
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_cinder
-  version: 1321fd39d8f55d1dc3baf91b4194469b349d7dc4
+  version: d0c46f29d7bb02139a14ad46869ce411e80874d9
 - name: os_glance
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_glance
-  version: f39ef212bfa2edff8334bfb632cc463001c77c11
+  version: a1e3588769e6d17b074398f0ef2675f34438b73b
 - name: os_gnocchi
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_gnocchi
-  version: 318bd76e5e72402e8ff5b372b469c27a9395341b
+  version: f79b0f6e1db40b59390b7e40a90792e72afe55e6
 - name: os_heat
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_heat
-  version: 07d59ddb757b2d2557fba52ac537803e646e65b4
+  version: 7a5b703b35f36a5a63ce9934ef585c8967e9de5a
 - name: os_horizon
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_horizon
-  version: 69ef49c4f7a42f082f4bcff824d13f57145e2b83
+  version: cb4a27da79ad67b2826f637927514e0829c23c0f
 - name: os_ironic
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_ironic
-  version: 57e8a0eaaa2159f33e64a1b037180383196919d1
+  version: e5c24e40b0d08d8bc7b4641679a8731c2b2aca29
 - name: os_magnum
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_magnum
-  version: 8329c257dff25686827bd1cc904506d76ad1d12f
+  version: 3eeb33db25db48f04e496a3ee47323fffe2af864
 - name: os_trove
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_trove
-  version: b948402c76d6188caa7be376098354cdb850d638
+  version: 0cf74c1a917b07e557411ca1c1376491c97aa0a9
 - name: os_neutron
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_neutron
-  version: 2a92a4e1857e7457683aefd87ee5a4e751fc701a
+  version: 280788b20099532c13042966defcbcbf5d5dd994
 - name: os_nova
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_nova
-  version: 511963b7921ec7c2db24e8ee1d71a940b0aafae4
+  version: 031b386bdd29f895203a3d053c1dabba66cfeeb0
 - name: os_rally
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_rally
-  version: 96153c5b3285d11d00611a03135c9d8f267e0f52
+  version: 9125458265088eb8622f28df57f640509546a6d4
 - name: os_sahara
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_sahara
-  version: 012d3f3530f878e5143d58380f94d1f514baad04
+  version: 433d624b0ddb0d2778f014a175064572e15ea462
 - name: os_swift
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_swift
-  version: d62d6a23ac0b01d0320dbcb6c710dfd5f3cecfdf
+  version: 3b91c62e1de6e0d852476e3b74e39b7a55d77ec9
 - name: os_tempest
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_tempest
-  version: 9d2bfb09d1ebbc9102329b0d42de33aa321e57b1
+  version: 692209da1fdab6014e13e65be27ffb9b8c8578bb
 - name: plugins
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-plugins
-  version: 3d2e23bb7e1d6775789d7f65ce8a878a7ee1d3c7
+  version: 8685a0ba38b7f534dd4db971da6d54b495c79169
 - name: rabbitmq_server
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-rabbitmq_server
-  version: 9b0ce64fe235705e237bc4b476ecc0ad602d67a8
+  version: 50bffbf8f114c8100ec5e86ebac9baba5c4f233d
 - name: repo_build
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-repo_build
-  version: fe3ae20f74a912925d5c78040984957a6d55f9de
+  version: 9ce713e9762650e1041ba7d9ad3c207a0c65d0c4
 - name: repo_server
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-repo_server
-  version: 7ea0820e0941282cd5c5cc263e939ffbee54ba52
+  version: 275124b643d6e6a9c92d65be7a7f309fe6f0c6dc
 - name: rsyslog_client
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-rsyslog_client
-  version: 19615e47137eee46ee92c0308532fe1d2212333c
+  version: da0090d48b166e0ffe83c35483572e358a29d523
 - name: rsyslog_server
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-rsyslog_server
-  version: efd7b21798da49802012e390a0ddf7cc38636eeb
+  version: 0f4b5ac0e7a170bd9811875965b781d447a5517a
 - name: sshd
   scm: git
   src: https://github.com/willshersystems/ansible-sshd
-  version: 426e11c4dffeca09fcc4d16103a91e5e65180040
+  version: 0.5.1
 - name: bird
   scm: git
   src: https://github.com/logan2211/ansible-bird
-  version: 2c4d29560d3617abddf0e63e0c95536364dedd92
+  version: '1.2'
 - name: etcd
   scm: git
   src: https://github.com/logan2211/ansible-etcd
-  version: ef63b0c5fd352b61084fd5aca286ee7f3fea932b
+  version: '1.2'
 - name: unbound
   scm: git
   src: https://github.com/logan2211/ansible-unbound
-  version: 5329d03eb9c15373d648a801563087c576bbfcde
+  version: '1.4'
 - name: resolvconf
   scm: git
   src: https://github.com/logan2211/ansible-resolvconf
-  version: 3b2b7cf2e900b194829565b351bf32bb63954548
+  version: '1.2'
 - name: os_designate
   scm: git
   src: https://git.openstack.org/openstack/openstack-ansible-os_designate
-  version: b7098a6bdea73c869f45a86e0cc78d21b032161e
+  version: cc9760d0a08083c1168999422ccefa0d56ead093
 - name: ceph.ceph-common
   scm: git
   src: https://github.com/ceph/ansible-ceph-common
-  version: ef149767fa9565ec887f0bdb007ff752bd61e5d5
+  version: v2.2.9
 - name: ceph.ceph-docker-common
   scm: git
   src: https://github.com/ceph/ansible-ceph-docker-common
 - name: ceph-mon
   scm: git
   src: https://github.com/ceph/ansible-ceph-mon
-  version: c5be4d6056dfe6a482ca3fcc483a6050cc8929a1
+  version: v2.2.9
 - name: ceph-osd
   scm: git
   src: https://github.com/ceph/ansible-ceph-osd
-  version: 7bc5a61ceb96e487b7a9fe9643f6dafa6492f2b5
+  version: v2.2.9
+- name: os_octavia
+  scm: git
+  src: https://git.openstack.org/openstack/openstack-ansible-os_octavia
+  version: 48ff9a634a3ea34c6811ebc10057708dc23ed76e
+- name: os_molteniron
+  scm: git
+  src: https://git.openstack.org/openstack/openstack-ansible-os_molteniron
+  version: 0de6fe5251b54881ab3eb8bf0a8d694dd4362430
diff --git a/prototypes/xci/scripts/update-osa-version-files.sh b/prototypes/xci/scripts/update-osa-version-files.sh
new file mode 100755 (executable)
index 0000000..d822d25
--- /dev/null
@@ -0,0 +1,91 @@
+#!/bin/bash
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2017 SUSE LINUX GmbH and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# This script is used to pin the SHAs for the various roles in the
+# ansible-role-requirements file. It will also update the SHAs for
+# OSA and bifrost.
+
+set -e
+
+# NOTE(hwoarang) This could break if files are re-arranged in the future
+releng_xci_base="$(dirname $(readlink -f $0))/.."
+
+usage() {
+    echo """
+    ${0} <openstack-ansible commit SHA> [<bifrost commit SHA>]
+    """
+    exit 0
+}
+
+cleanup() {
+    [[ -d $tempdir ]] && rm -rf $tempdir
+}
+
+printme() {
+    echo "===> $1"
+}
+
+# Only need a single argument
+[[ $# -lt 1 || $# -gt 2 ]] && echo "Invalid number of arguments!" && usage
+
+tempdir="$(mktemp -d)"
+
+trap cleanup EXIT
+
+pushd $tempdir &> /dev/null
+
+printme "Downloading the sources-branch-updater-lib.sh library"
+
+printme "Cloning the openstack-ansible repository"
+(
+    git clone -q git://git.openstack.org/openstack/openstack-ansible && cd openstack-ansible && git checkout -q $1
+)
+
+popd &> /dev/null
+
+pushd $tempdir/openstack-ansible &> /dev/null
+source scripts/sources-branch-updater-lib.sh
+printme "Synchronize roles and packages"
+update_ansible_role_requirements "master" "true" "true"
+
+# Construct the ansible-role-requirements-file
+echo """---
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2017 Ericsson AB and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+# these versions are extracted based on the osa commit ${1} on $(git --no-pager log -1 --format=%cI $1)
+# https://review.openstack.org/gitweb?p=openstack/openstack-ansible.git;a=commit;h=$1""" > $releng_xci_base/file/ansible-role-requirements.yml
+cat $tempdir/openstack-ansible/ansible-role-requirements.yml >> $releng_xci_base/file/ansible-role-requirements.yml
+
+# Update the pinned OSA version
+sed -i -e "/^export OPENSTACK_OSA_VERSION/s@:-\"[a-z0-9]*@:-\"${1}@" \
+    -e "s/\(^# HEAD of osa.*of \).*/\1$(date +%d\.%m\.%Y)/" $releng_xci_base/config/pinned-versions
+
+# Update the pinned bifrost version
+[[ -n ${2:-} ]] && \
+    sed -i -e "/^export OPENSTACK_BIFROST_VERSION/s@:-\"[a-z0-9]*@:-\"${2}@" \
+    -e "s/\(^# HEAD of bifrost.*of \).*/\1$(date +%d\.%m\.%Y)/" $releng_xci_base/config/pinned-versions
+
+popd &> /dev/null
+
+printme ""
+printme "======================= Report ============================"
+printme ""
+printme "The $releng_xci_base/file/ansible-role-requirements.yml and"
+printme "$releng_xci_base/config/pinned-versions files have been"
+printme "updated. Please make sure you test the end result before"
+printme "committing it!"
+printme ""
+printme "==========================================================="
index 3a65983..a72c927 100755 (executable)
@@ -37,6 +37,15 @@ source "$XCI_PATH/config/${XCI_FLAVOR}-vars"
 # source xci configuration
 source $XCI_PATH/config/env-vars
 
+#-------------------------------------------------------------------------------
+# Sanitize local development environment variables
+#-------------------------------------------------------------------------------
+user_local_dev_vars=(OPNFV_RELENG_DEV_PATH OPNFV_OSA_DEV_PATH OPNFV_BIFROST_DEV_PATH)
+for local_user_var in ${user_local_dev_vars[@]}; do
+    [[ -n ${!local_user_var} ]] && export $local_user_var=${!local_user_var%/}/
+done
+unset user_local_dev_vars local_user_var
+
 #-------------------------------------------------------------------------------
 # Log info to console
 #-------------------------------------------------------------------------------
index 993c0b9..b514a91 100755 (executable)
@@ -153,36 +153,39 @@ elif [ "$installer_type" == "apex" ]; then
     sudo scp $ssh_options root@$installer_ip:/home/stack/overcloudrc.v3 $dest_path
 
 elif [ "$installer_type" == "compass" ]; then
-    verify_connectivity $installer_ip
-    controller_ip=$(sshpass -p'root' ssh 2>/dev/null $ssh_options root@${installer_ip} \
-        'mysql -ucompass -pcompass -Dcompass -e"select *  from cluster;"' \
-        | awk -F"," '{for(i=1;i<NF;i++)if($i~/\"127.0.0.1\"/) {print $(i+2);break;}}'  \
-        | grep -oP "\d+.\d+.\d+.\d+")
-
-    if [ -z $controller_ip ]; then
-        error "The controller $controller_ip is not up. Please check that the POD is correctly deployed."
-    fi
-
-    info "Fetching rc file from controller $controller_ip..."
-    sshpass -p root ssh 2>/dev/null $ssh_options root@${installer_ip} \
-        "scp $ssh_options ${controller_ip}:/opt/admin-openrc.sh ." &> /dev/null
-    sshpass -p root scp 2>/dev/null $ssh_options root@${installer_ip}:~/admin-openrc.sh $dest_path &> /dev/null
+    if [ "${BRANCH}" == "master" ]; then
+        sudo docker cp compass-tasks:/opt/openrc $dest_path &> /dev/null
+    else
+        verify_connectivity $installer_ip
+        controller_ip=$(sshpass -p'root' ssh 2>/dev/null $ssh_options root@${installer_ip} \
+            'mysql -ucompass -pcompass -Dcompass -e"select *  from cluster;"' \
+            | awk -F"," '{for(i=1;i<NF;i++)if($i~/\"127.0.0.1\"/) {print $(i+2);break;}}'  \
+            | grep -oP "\d+.\d+.\d+.\d+")
 
-    info "This file contains the mgmt keystone API, we need the public one for our rc file"
+        if [ -z $controller_ip ]; then
+            error "The controller $controller_ip is not up. Please check that the POD is correctly deployed."
+        fi
 
-    if grep "OS_AUTH_URL.*v2" $dest_path > /dev/null 2>&1 ; then
-        public_ip=$(sshpass -p root ssh $ssh_options root@${installer_ip} \
-            "ssh ${controller_ip} 'source /opt/admin-openrc.sh; openstack endpoint show identity '" \
-            | grep publicurl | awk '{print $4}')
-    else
-        public_ip=$(sshpass -p root ssh $ssh_options root@${installer_ip} \
-            "ssh ${controller_ip} 'source /opt/admin-openrc.sh; \
-                 openstack endpoint list --interface public --service identity '" \
-            | grep identity | awk '{print $14}')
+        info "Fetching rc file from controller $controller_ip..."
+        sshpass -p root ssh 2>/dev/null $ssh_options root@${installer_ip} \
+            "scp $ssh_options ${controller_ip}:/opt/admin-openrc.sh ." &> /dev/null
+        sshpass -p root scp 2>/dev/null $ssh_options root@${installer_ip}:~/admin-openrc.sh $dest_path &> /dev/null
+
+        info "This file contains the mgmt keystone API, we need the public one for our rc file"
+
+        if grep "OS_AUTH_URL.*v2" $dest_path > /dev/null 2>&1 ; then
+            public_ip=$(sshpass -p root ssh $ssh_options root@${installer_ip} \
+                "ssh ${controller_ip} 'source /opt/admin-openrc.sh; openstack endpoint show identity '" \
+                | grep publicurl | awk '{print $4}')
+        else
+            public_ip=$(sshpass -p root ssh $ssh_options root@${installer_ip} \
+                "ssh ${controller_ip} 'source /opt/admin-openrc.sh; \
+                     openstack endpoint list --interface public --service identity '" \
+                | grep identity | awk '{print $14}')
+        fi
+        info "public_ip: $public_ip"
+        swap_to_public $public_ip
     fi
-    info "public_ip: $public_ip"
-    swap_to_public $public_ip
-
 
 elif [ "$installer_type" == "joid" ]; then
     # do nothing...for the moment
index 8fce2e0..c46ca89 100755 (executable)
@@ -61,8 +61,8 @@ main () {
     #make pid dir
     pidfile="/var/run/$jenkinsuser/jenkins_jnlp_pid"
     if ! [ -d /var/run/$jenkinsuser/ ]; then
-        mkdir /var/run/$jenkinsuser/
-        chown $jenkinsuser:$jenkinsuser /var/run/$jenkinsuser/
+        sudo mkdir /var/run/$jenkinsuser/
+        sudo chown $jenkinsuser:$jenkinsuser /var/run/$jenkinsuser/
     fi
 
     if [[ $skip_monit != true ]]; then
index f0c488a..5021b78 100644 (file)
@@ -30,7 +30,8 @@ node_list=(\
 'arm-pod1' 'arm-pod3' \
 'huawei-pod1' 'huawei-pod2' 'huawei-pod3' 'huawei-pod4' 'huawei-pod5' \
 'huawei-pod6' 'huawei-pod7' 'huawei-pod12' \
-'huawei-virtual1' 'huawei-virtual2' 'huawei-virtual3' 'huawei-virtual4')
+'huawei-virtual1' 'huawei-virtual2' 'huawei-virtual3' 'huawei-virtual4'\
+'zte-virtual1')
 
 
 if [[ ! " ${node_list[@]} " =~ " ${testbed} " ]]; then
index 49f4517..7fe97a8 100755 (executable)
@@ -98,3 +98,5 @@ echo "daemon off;" >> /etc/nginx/nginx.conf
 cp /home/opnfv/utils/test/reporting/docker/supervisor.conf /etc/supervisor/conf.d/
 
 ln -s /usr/bin/nodejs /usr/bin/node
+
+cd pages && /bin/bash angular.sh
index 6de856e..aeee3ba 100644 (file)
@@ -12,3 +12,4 @@ PyYAML==3.11
 simplejson==3.8.1
 jinja2==2.8
 tornado==4.4.2
+requests==2.12.5
index 5e315ba..b323dd0 100644 (file)
@@ -1,22 +1,19 @@
 [supervisord]
 nodaemon = true
 
-[program:reporting_tornado]
+[program:tornado]
 user = root
 directory = /home/opnfv/utils/test/reporting/api/api
 command = python server.py --port=800%(process_num)d
 process_name=%(program_name)s%(process_num)d
 numprocs=4
 numprocs_start=1
-autorestart = true
 
-[program:reporting_nginx]
+[program:nginx]
 user = root
 command = service nginx restart
-autorestart = true
 
-[program:reporting_angular]
+[program:configuration]
 user = root
 directory = /home/opnfv/utils/test/reporting/pages
-command = bash angular.sh
-autorestart = true
+command = bash config.sh
index e700e04..77ab784 100755 (executable)
@@ -107,7 +107,6 @@ for version in versions:
         scenario_results = rp_utils.getScenarios(healthcheck,
                                                  installer,
                                                  version)
-
         # get nb of supported architecture (x86, aarch64)
         architectures = rp_utils.getArchitectures(scenario_results)
         logger.info("Supported architectures: {}".format(architectures))
index 080f27b..0e00ea6 100755 (executable)
@@ -1,8 +1,3 @@
-: ${SERVER_URL:='http://testresults.opnfv.org/reporting/api'}
-
-echo "var BASE_URL = 'http://${SERVER_URL}/landing-page'" >> app/scripts/app.config.js
-echo "var PROJECT_URL = 'http://${SERVER_URL}'" >> app/scripts/app.config.js
-
 apt-get install -y nodejs
 apt-get install -y npm
 npm install
index f4eb65a..843a623 100644 (file)
     <script src="scripts/controllers/auth.controller.js"></script>
     <script src="scripts/controllers/admin.controller.js"></script>
     <script src="scripts/controllers/main.controller.js"></script>
-    <script src="scripts/app.config.js"></script>
     <script src="scripts/controllers/testvisual.controller.js"></script>
 
     <!-- endbuild -->
 </body>
 
-</html>
\ No newline at end of file
+</html>
diff --git a/utils/test/reporting/pages/app/scripts/app.config.js b/utils/test/reporting/pages/app/scripts/app.config.js
deleted file mode 100644 (file)
index e69de29..0000000
index 0f3a17a..44d9441 100644 (file)
  * Controller of the opnfvdashBoardAngularApp
  */
 angular.module('opnfvApp')
-    .controller('TableController', ['$scope', '$state', '$stateParams', '$http', 'TableFactory', function($scope, $state, $stateParams, $http, TableFactory) {
-
-        $scope.filterlist = [];
-        $scope.selection = [];
-        $scope.statusList = [];
-        $scope.projectList = [];
-        $scope.installerList = [];
-        $scope.versionlist = [];
-        $scope.loopci = [];
-        $scope.time = [];
-        $scope.tableDataAll = {};
-        $scope.tableInfoAll = {};
-        $scope.scenario = {};
-
-        $scope.VersionConfig = {
-            create: true,
-            valueField: 'title',
-            labelField: 'title',
-            delimiter: '|',
-            maxItems: 1,
-            placeholder: 'Version',
-            onChange: function(value) {
-                checkElementArrayValue($scope.selection, $scope.VersionOption);
-                $scope.selection.push(value);
-                // console.log($scope.selection);
-                getScenarioData();
+    .controller('TableController', ['$scope', '$state', '$stateParams', '$http', 'TableFactory', '$timeout',
+        function($scope, $state, $stateParams, $http, TableFactory, $timeout) {
+
+            $scope.filterlist = [];
+            $scope.selection = [];
+            $scope.statusList = [];
+            $scope.projectList = [];
+            $scope.installerList = [];
+            $scope.versionlist = [];
+            $scope.loopci = [];
+            $scope.time = [];
+            $scope.tableDataAll = {};
+            $scope.tableInfoAll = {};
+            $scope.scenario = {};
+            // $scope.selectProjects = [];
+
+
+            $scope.VersionConfig = {
+                create: true,
+                valueField: 'title',
+                labelField: 'title',
+                delimiter: '|',
+                maxItems: 1,
+                placeholder: 'Version',
+                onChange: function(value) {
+                    checkElementArrayValue($scope.selection, $scope.VersionOption);
+                    $scope.selection.push(value);
+                    // console.log($scope.selection);
+                    getScenarioData();
 
+                }
             }
-        }
 
-        $scope.LoopConfig = {
-            create: true,
-            valueField: 'title',
-            labelField: 'title',
-            delimiter: '|',
-            maxItems: 1,
-            placeholder: 'Loop',
-            onChange: function(value) {
-                checkElementArrayValue($scope.selection, $scope.LoopOption);
-                $scope.selection.push(value);
-                // console.log($scope.selection);
-                getScenarioData();
+            $scope.LoopConfig = {
+                create: true,
+                valueField: 'title',
+                labelField: 'title',
+                delimiter: '|',
+                maxItems: 1,
+                placeholder: 'Loop',
+                onChange: function(value) {
+                    checkElementArrayValue($scope.selection, $scope.LoopOption);
+                    $scope.selection.push(value);
+                    // console.log($scope.selection);
+                    getScenarioData();
 
+                }
             }
-        }
 
-        $scope.TimeConfig = {
-            create: true,
-            valueField: 'title',
-            labelField: 'title',
-            delimiter: '|',
-            maxItems: 1,
-            placeholder: 'Time',
-            onChange: function(value) {
-                checkElementArrayValue($scope.selection, $scope.TimeOption);
-                $scope.selection.push(value);
-                // console.log($scope.selection)
-                getScenarioData();
+            $scope.TimeConfig = {
+                create: true,
+                valueField: 'title',
+                labelField: 'title',
+                delimiter: '|',
+                maxItems: 1,
+                placeholder: 'Time',
+                onChange: function(value) {
+                    checkElementArrayValue($scope.selection, $scope.TimeOption);
+                    $scope.selection.push(value);
+                    // console.log($scope.selection)
+                    getScenarioData();
 
 
+                }
             }
-        }
 
 
-        init();
+            init();
 
-        function init() {
-            $scope.toggleSelection = toggleSelection;
-            getScenarioData();
-            // radioSetting();
-            getFilters();
-        }
+            function init() {
+                $scope.toggleSelection = toggleSelection;
+                getScenarioData();
+                getFilters();
+            }
 
-        function getFilters() {
-            TableFactory.getFilter().get({
+            function getFilters() {
+                TableFactory.getFilter().get({
+
+                }).$promise.then(function(response) {
+                    if (response != null) {
+                        $scope.statusList = response.filters.status;
+                        $scope.projectList = response.filters.projects;
+                        $scope.installerList = response.filters.installers;
+                        $scope.versionlist = response.filters.version;
+                        $scope.loopci = response.filters.loops;
+                        $scope.time = response.filters.time;
+
+                        $scope.statusListString = $scope.statusList.toString();
+                        $scope.projectListString = $scope.projectList.toString();
+                        $scope.installerListString = $scope.installerList.toString();
+                        $scope.VersionSelected = $scope.versionlist[1];
+                        $scope.LoopCiSelected = $scope.loopci[0];
+                        $scope.TimeSelected = $scope.time[0];
+                        radioSetting($scope.versionlist, $scope.loopci, $scope.time);
+
+                    } else {
+                        alert("网络错误");
+                    }
+                })
+            }
 
+            function getScenarioData() {
 
-            }).$promise.then(function(response) {
-                if (response != null) {
-                    $scope.statusList = response.filters.status;
-                    $scope.projectList = response.filters.projects;
-                    $scope.installerList = response.filters.installers;
-                    $scope.versionlist = response.filters.version;
-                    $scope.loopci = response.filters.loops;
-                    $scope.time = response.filters.time;
+                // var utl = BASE_URL + '/scenarios';
+                var data = {
+                    'status': ['success', 'danger', 'warning'],
+                    'projects': ['functest', 'yardstick'],
+                    'installers': ['apex', 'compass', 'fuel', 'joid'],
+                    'version': $scope.VersionSelected,
+                    'loops': $scope.LoopCiSelected,
+                    'time': $scope.TimeSelected
+                };
 
-                    $scope.statusListString = $scope.statusList.toString();
-                    $scope.projectListString = $scope.projectList.toString();
-                    $scope.installerListString = $scope.installerList.toString();
-                    $scope.VersionSelected = $scope.versionlist[1];
-                    $scope.LoopCiSelected = $scope.loopci[0];
-                    $scope.TimeSelected = $scope.time[0];
-                    radioSetting($scope.versionlist, $scope.loopci, $scope.time);
+                TableFactory.getScenario(data).then(function(response) {
+                    if (response.status == 200) {
+                        $scope.scenario = response.data;
 
-                } else {
-                    alert("网络错误");
-                }
-            })
-        }
+                        reSettingcolspan();
+                    }
+
+                }, function(error) {
+
+                })
 
-        function getScenarioData() {
-
-            var utl = BASE_URL + '/scenarios';
-            var data = {
-                'status': ['success', 'danger', 'warning'],
-                'projects': ['functest', 'yardstick'],
-                'installers': ['apex', 'compass', 'fuel', 'joid'],
-                'version': $scope.VersionSelected,
-                'loops': $scope.LoopCiSelected,
-                'time': $scope.TimeSelected
-            };
-            var config = {
-                headers: {
-                    'Content-Type': 'application/x-www-form-urlencoded;charset=utf-8;'
-                }
             }
-            $http.post(utl, data, config).then(function(response) {
-                if (response.status == 200) {
-                    $scope.scenario = response.data;
+
+            function reSettingcolspan() {
+                if ($scope.selectProjects == undefined || $scope.selectProjects == null) {
                     constructJson();
-                }
-            })
-        }
+                    $scope.colspan = $scope.tableDataAll.colspan;
 
-        //construct json 
-        function constructJson() {
+                } else {
+                    constructJson();
+                    $scope.colspan = $scope.tempColspan;
+                }
+                // console.log("test")
+            }
 
-            var colspan;
-            var InstallerData;
-            var projectsInfo;
-            $scope.tableDataAll["scenario"] = [];
+            //construct json 
+            function constructJson(selectProject) {
 
+                var colspan;
+                var InstallerData;
+                var projectsInfo;
+                $scope.tableDataAll["scenario"] = [];
 
-            for (var item in $scope.scenario.scenarios) {
 
-                var headData = Object.keys($scope.scenario.scenarios[item].installers).sort();
-                var scenarioStatus = $scope.scenario.scenarios[item].status;
-                var scenarioStatusDisplay;
-                if (scenarioStatus == "success") {
-                    scenarioStatusDisplay = "navy";
-                } else if (scenarioStatus == "danger") {
-                    scenarioStatusDisplay = "danger";
-                } else if (scenarioStatus == "warning") {
-                    scenarioStatusDisplay = "warning";
-                }
+                for (var item in $scope.scenario.scenarios) {
 
-                InstallerData = headData;
-                var projectData = [];
-                var datadisplay = [];
-                var projects = [];
+                    var headData = Object.keys($scope.scenario.scenarios[item].installers).sort();
+                    var scenarioStatus = $scope.scenario.scenarios[item].status;
+                    var scenarioStatusDisplay;
+                    if (scenarioStatus == "success") {
+                        scenarioStatusDisplay = "navy";
+                    } else if (scenarioStatus == "danger") {
+                        scenarioStatusDisplay = "danger";
+                    } else if (scenarioStatus == "warning") {
+                        scenarioStatusDisplay = "warning";
+                    }
 
-                for (var j = 0; j < headData.length; j++) {
+                    InstallerData = headData;
+                    var projectData = [];
+                    var datadisplay = [];
+                    var projects = [];
 
-                    projectData.push($scope.scenario.scenarios[item].installers[headData[j]]);
-                }
-                for (var j = 0; j < projectData.length; j++) {
-
-                    for (var k = 0; k < projectData[j].length; k++) {
-                        projects.push(projectData[j][k].project);
-                        var temArray = [];
-                        if (projectData[j][k].score == null) {
-                            temArray.push("null");
-                            temArray.push(projectData[j][k].project);
-                            temArray.push(headData[j]);
-                        } else {
-                            temArray.push(projectData[j][k].score);
-                            temArray.push(projectData[j][k].project);
-                            temArray.push(headData[j]);
-                        }
+                    for (var j = 0; j < headData.length; j++) {
 
+                        projectData.push($scope.scenario.scenarios[item].installers[headData[j]]);
+                    }
+                    for (var j = 0; j < projectData.length; j++) {
+
+                        for (var k = 0; k < projectData[j].length; k++) {
+                            projects.push(projectData[j][k].project);
+                            var temArray = [];
+                            if (projectData[j][k].score == null) {
+                                temArray.push("null");
+                                temArray.push(projectData[j][k].project);
+                                temArray.push(headData[j]);
+                            } else {
+                                temArray.push(projectData[j][k].score);
+                                temArray.push(projectData[j][k].project);
+                                temArray.push(headData[j]);
+                            }
+
+
+                            if (projectData[j][k].status == "platinium") {
+                                temArray.push("primary");
+                                temArray.push("P");
+                            } else if (projectData[j][k].status == "gold") {
+                                temArray.push("danger");
+                                temArray.push("G");
+                            } else if (projectData[j][k].status == "silver") {
+                                temArray.push("warning");
+                                temArray.push("S");
+                            } else if (projectData[j][k].status == null) {
+                                temArray.push("null");
+                            }
+
+                            datadisplay.push(temArray);
 
-                        if (projectData[j][k].status == "platinium") {
-                            temArray.push("primary");
-                            temArray.push("P");
-                        } else if (projectData[j][k].status == "gold") {
-                            temArray.push("danger");
-                            temArray.push("G");
-                        } else if (projectData[j][k].status == "silver") {
-                            temArray.push("warning");
-                            temArray.push("S");
-                        } else if (projectData[j][k].status == null) {
-                            temArray.push("null");
                         }
 
-                        datadisplay.push(temArray);
-
                     }
 
-                }
+                    colspan = projects.length / headData.length;
 
-                colspan = projects.length / headData.length;
-
-                var tabledata = {
-                    scenarioName: item,
-                    Installer: InstallerData,
-                    projectData: projectData,
-                    projects: projects,
-                    datadisplay: datadisplay,
-                    colspan: colspan,
-                    status: scenarioStatus,
-                    statusDisplay: scenarioStatusDisplay
-                };
+                    var tabledata = {
+                        scenarioName: item,
+                        Installer: InstallerData,
+                        projectData: projectData,
+                        projects: projects,
+                        datadisplay: datadisplay,
+                        colspan: colspan,
+                        status: scenarioStatus,
+                        statusDisplay: scenarioStatusDisplay
+                    };
 
-                JSON.stringify(tabledata);
-                $scope.tableDataAll.scenario.push(tabledata);
+                    JSON.stringify(tabledata);
+                    $scope.tableDataAll.scenario.push(tabledata);
 
-                // console.log(tabledata);
 
-            }
+                    // console.log(tabledata);
+
+                }
 
 
-            projectsInfo = $scope.tableDataAll.scenario[0].projects;
+                projectsInfo = $scope.tableDataAll.scenario[0].projects;
 
-            var tempHeadData = [];
+                var tempHeadData = [];
 
-            for (var i = 0; i < InstallerData.length; i++) {
-                for (var j = 0; j < colspan; j++) {
-                    tempHeadData.push(InstallerData[i]);
+                for (var i = 0; i < InstallerData.length; i++) {
+                    for (var j = 0; j < colspan; j++) {
+                        tempHeadData.push(InstallerData[i]);
+                    }
                 }
-            }
 
-            //console.log(tempHeadData);
+                //console.log(tempHeadData);
 
-            var projectsInfoAll = [];
+                var projectsInfoAll = [];
 
-            for (var i = 0; i < projectsInfo.length; i++) {
-                var tempA = [];
-                tempA.push(projectsInfo[i]);
-                tempA.push(tempHeadData[i]);
-                projectsInfoAll.push(tempA);
+                for (var i = 0; i < projectsInfo.length; i++) {
+                    var tempA = [];
+                    tempA.push(projectsInfo[i]);
+                    tempA.push(tempHeadData[i]);
+                    projectsInfoAll.push(tempA);
 
-            }
-            //console.log(projectsInfoAll);
+                }
+                //console.log(projectsInfoAll);
 
-            $scope.tableDataAll["colspan"] = colspan;
-            $scope.tableDataAll["Installer"] = InstallerData;
-            $scope.tableDataAll["Projects"] = projectsInfoAll;
+                $scope.tableDataAll["colspan"] = colspan;
+                $scope.tableDataAll["Installer"] = InstallerData;
+                $scope.tableDataAll["Projects"] = projectsInfoAll;
 
-            // console.log($scope.tableDataAll);
-            $scope.colspan = $scope.tableDataAll.colspan;
+                // console.log($scope.tableDataAll);
+                $scope.colspan = $scope.tableDataAll.colspan;
+                console.log($scope.tableDataAll);
 
-        }
+            }
 
-        //get json element size
-        function getSize(jsondata) {
-            var size = 0;
-            for (var item in jsondata) {
-                size++;
+            //get json element size
+            function getSize(jsondata) {
+                var size = 0;
+                for (var item in jsondata) {
+                    size++;
+                }
+                return size;
             }
-            return size;
-        }
 
-        $scope.colspan = $scope.tableDataAll.colspan;
-        // console.log($scope.colspan);
 
+            // console.log($scope.colspan);
 
-        //find all same element index 
-        function getSameElementIndex(array, element) {
-            var indices = [];
-            var idx = array.indexOf(element);
-            while (idx != -1) {
-                indices.push(idx);
-                idx = array.indexOf(element, idx + 1);
-            }
-            //return indices;
-            var result = { element: element, index: indices };
-            JSON.stringify(result);
-            return result;
-        }
 
-        //delete element in array
-        function deletElement(array, index) {
-            array.splice(index, 1);
+            //find all same element index 
+            function getSameElementIndex(array, element) {
+                var indices = [];
+                var idx = array.indexOf(element);
+                while (idx != -1) {
+                    indices.push(idx);
+                    idx = array.indexOf(element, idx + 1);
+                }
+                //return indices;
+                var result = { element: element, index: indices };
+                JSON.stringify(result);
+                return result;
+            }
 
-        }
+            //delete element in array
+            function deletElement(array, index) {
+                array.splice(index, 1);
 
-        function radioSetting(array1, array2, array3) {
-            var tempVersion = [];
-            var tempLoop = [];
-            var tempTime = [];
-            for (var i = 0; i < array1.length; i++) {
-                var temp = {
-                    title: array1[i]
-                };
-                tempVersion.push(temp);
-            }
-            for (var i = 0; i < array2.length; i++) {
-                var temp = {
-                    title: array2[i]
-                };
-                tempLoop.push(temp);
             }
-            for (var i = 0; i < array3.length; i++) {
-                var temp = {
-                    title: array3[i]
-                };
-                tempTime.push(temp);
+
+            function radioSetting(array1, array2, array3) {
+                var tempVersion = [];
+                var tempLoop = [];
+                var tempTime = [];
+                for (var i = 0; i < array1.length; i++) {
+                    var temp = {
+                        title: array1[i]
+                    };
+                    tempVersion.push(temp);
+                }
+                for (var i = 0; i < array2.length; i++) {
+                    var temp = {
+                        title: array2[i]
+                    };
+                    tempLoop.push(temp);
+                }
+                for (var i = 0; i < array3.length; i++) {
+                    var temp = {
+                        title: array3[i]
+                    };
+                    tempTime.push(temp);
+                }
+                $scope.VersionOption = tempVersion;
+                $scope.LoopOption = tempLoop;
+                $scope.TimeOption = tempTime;
             }
-            $scope.VersionOption = tempVersion;
-            $scope.LoopOption = tempLoop;
-            $scope.TimeOption = tempTime;
-        }
 
-        //remove element in the array
-        function removeArrayValue(arr, value) {
-            for (var i = 0; i < arr.length; i++) {
-                if (arr[i] == value) {
-                    arr.splice(i, 1);
-                    break;
+            //remove element in the array
+            function removeArrayValue(arr, value) {
+                for (var i = 0; i < arr.length; i++) {
+                    if (arr[i] == value) {
+                        arr.splice(i, 1);
+                        break;
+                    }
                 }
             }
-        }
 
-        //check if exist element
-        function checkElementArrayValue(arrayA, arrayB) {
-            for (var i = 0; i < arrayB.length; i++) {
-                if (arrayA.indexOf(arrayB[i].title) > -1) {
-                    removeArrayValue(arrayA, arrayB[i].title);
+            //check if exist element
+            function checkElementArrayValue(arrayA, arrayB) {
+                for (var i = 0; i < arrayB.length; i++) {
+                    if (arrayA.indexOf(arrayB[i].title) > -1) {
+                        removeArrayValue(arrayA, arrayB[i].title);
+                    }
                 }
             }
-        }
 
-        function toggleSelection(status) {
-            var idx = $scope.selection.indexOf(status);
+            function toggleSelection(status) {
+                var idx = $scope.selection.indexOf(status);
+
+                if (idx > -1) {
+                    $scope.selection.splice(idx, 1);
+                    filterData($scope.selection)
+                } else {
+                    $scope.selection.push(status);
+                    filterData($scope.selection)
+                }
+                // console.log($scope.selection);
 
-            if (idx > -1) {
-                $scope.selection.splice(idx, 1);
-                filterData($scope.selection)
-            } else {
-                $scope.selection.push(status);
-                filterData($scope.selection)
             }
-            // console.log($scope.selection);
 
-        }
+            //filter function
+            function filterData(selection) {
 
-        //filter function
-        function filterData(selection) {
+                $scope.selectInstallers = [];
+                $scope.selectProjects = [];
+                $scope.selectStatus = [];
+                for (var i = 0; i < selection.length; i++) {
+                    if ($scope.statusListString.indexOf(selection[i]) > -1) {
+                        $scope.selectStatus.push(selection[i]);
+                    }
+                    if ($scope.projectListString.indexOf(selection[i]) > -1) {
+                        $scope.selectProjects.push(selection[i]);
+                    }
+                    if ($scope.installerListString.indexOf(selection[i]) > -1) {
+                        $scope.selectInstallers.push(selection[i]);
+                    }
+                }
+
+
+                // $scope.colspan = $scope.selectProjects.length;
+                //when some selection is empty, we set it full
+                if ($scope.selectInstallers.length == 0) {
+                    $scope.selectInstallers = $scope.installerList;
 
-            $scope.selectInstallers = [];
-            $scope.selectProjects = [];
-            $scope.selectStatus = [];
-            for (var i = 0; i < selection.length; i++) {
-                if ($scope.statusListString.indexOf(selection[i]) > -1) {
-                    $scope.selectStatus.push(selection[i]);
                 }
-                if ($scope.projectListString.indexOf(selection[i]) > -1) {
-                    $scope.selectProjects.push(selection[i]);
+                if ($scope.selectProjects.length == 0) {
+                    $scope.selectProjects = $scope.projectList;
+                    $scope.colspan = $scope.tableDataAll.colspan;
+                } else {
+                    $scope.colspan = $scope.selectProjects.length;
+                    $scope.tempColspan = $scope.colspan;
                 }
-                if ($scope.installerListString.indexOf(selection[i]) > -1) {
-                    $scope.selectInstallers.push(selection[i]);
+                if ($scope.selectStatus.length == 0) {
+                    $scope.selectStatus = $scope.statusList
                 }
-            }
 
-            $scope.colspan = $scope.selectProjects.length;
-            //when some selection is empty, we set it full
-            if ($scope.selectInstallers.length == 0) {
-                $scope.selectInstallers = $scope.installerList;
+                // console.log($scope.selectStatus);
+                // console.log($scope.selectProjects);
 
             }
-            if ($scope.selectProjects.length == 0) {
-                $scope.selectProjects = $scope.projectList;
-                $scope.colspan = $scope.tableDataAll.colspan;
-            }
-            if ($scope.selectStatus.length == 0) {
-                $scope.selectStatus = $scope.statusList
-            }
 
-            // console.log($scope.selectStatus);
-            // console.log($scope.selectProjects);
 
         }
-
-
-    }]);
\ No newline at end of file
+    ]);
\ No newline at end of file
index def8e72..894e10f 100644 (file)
@@ -16,7 +16,7 @@ angular.module('opnfvApp')
             $scope.vsperf = "542,185,640,414";
             $scope.stor = "658,187,750,410";
             $scope.qtip = "769,190,852,416";
-            $scope.bootleneck = "870,192,983,419";
+            $scope.bottlenecks = "870,192,983,419";
             $scope.noPopArea1 = "26,8,1190,180";
             $scope.noPopArea2 = "1018,193,1190,590";
             $scope.noPopArea3 = "37,455,1003,584";
@@ -41,25 +41,18 @@ angular.module('opnfvApp')
                 $scope.tableData = null;
                 $scope.modalName = name;
 
-                var url = PROJECT_URL + '/projects/' + name + '/cases';
-
-                var config = {
-                    headers: {
-                        'Content-Type': 'application/x-www-form-urlencoded;charset=utf-8;'
-                    }
-                }
-                $http.get(url, config).then(function(response) {
+                TableFactory.getProjectTestCases(name).then(function(response) {
                     if (response.status == 200) {
                         $scope.tableData = response.data;
 
                         $scope.tableData = constructObjectArray($scope.tableData);
                         console.log($scope.tableData);
                         $loading.finish('Key');
-
-
-
                     }
+                }, function(error) {
+
                 })
+
             }
 
             //construct key value for tableData
index 2a8cbd0..f0af34f 100644 (file)
@@ -4,11 +4,24 @@
  * get data factory
  */
 angular.module('opnfvApp')
-    .factory('TableFactory', function($resource, $rootScope) {
+    .factory('TableFactory', function($resource, $rootScope, $http) {
+
+        var BASE_URL = 'http://testresults.opnfv.org/reporting2';
+        $.ajax({
+          url: 'config.json',
+          async: false,
+          dataType: 'json',
+          success: function (response) {
+              BASE_URL = response.url;
+          },
+          error: function (response){
+              alert('fail to get api url, using default: http://testresults.opnfv.org/reporting2')
+          }
+        });
 
         return {
             getFilter: function() {
-                return $resource(BASE_URL + '/filters', {}, {
+                return $resource(BASE_URL + '/landing-page/filters', {}, {
                     'get': {
                         method: 'GET',
 
@@ -16,33 +29,42 @@ angular.module('opnfvApp')
                 });
             },
             getScenario: function() {
-                return $resource(BASE_URL + '/scenarios', {}, {
-                    'post': {
-                        method: 'POST',
+
+                var config = {
+                    headers: {
+                        'Content-Type': 'application/x-www-form-urlencoded;charset=utf-8;'
                     }
-                })
+                }
+
+                return $http.post(BASE_URL + '/landing-page/scenarios', {}, config);
             },
+
+
             getProjectUrl: function() {
-                return $resource(PROJECT_URL + '/projects-page/projects', {}, {
+                return $resource(BASE_URL + '/projects-page/projects', {}, {
                     'get': {
                         method: 'GET'
                     }
                 })
             },
-            getProjectTestCases: function() {
-                return $resource(PROJECT_URL + '/projects/:project/cases', { project: '@project' }, {
-                    'get': {
-                        method: 'GET'
+            getProjectTestCases: function(name) {
+                var config = {
+                    headers: {
+                        'Content-Type': 'application/x-www-form-urlencoded;charset=utf-8;'
                     }
-                })
+                };
+                return $http.get(BASE_URL + '/projects/' + name + '/cases', {}, config)
+
+
             },
             getProjectTestCaseDetail: function() {
-                return $resource(PROJECT_URL + '/projects/:project/cases/:testcase', { project: '@project', testcase: '@testcase' }, {
+                return $resource(BASE_URL + '/projects/:project/cases/:testcase', { project: '@project', testcase: '@testcase' }, {
                     'get': {
 
                         method: 'GET'
                     }
                 })
             }
+
         };
-    });
\ No newline at end of file
+    });
index 74eb56e..4de4e18 100644 (file)
@@ -20,7 +20,7 @@
                  <area shape="rect" coords={{vsperf}} alt="test" href="{{vsperfurl}}" onmouseover="pop(event)" ng-mouseover="myTrigger('vsperf')" />
                   <area shape="rect" coords={{stor}} alt="test" href="{{storperfurl}}" onmouseover="pop(event)" ng-mouseover="myTrigger('storperf')"/>
                    <area shape="rect" coords={{qtip}} alt="test" href="{{qtipurl}}"  onmouseover="pop(event)" ng-mouseover="myTrigger('qtip')" />
-                    <area shape="rect" coords={{bootleneck}} alt="test"  href="{{bottlenecksurl}}" onmouseover="pop(event)" ng-mouseover="myTrigger('bootlenecks')" />
+                    <area shape="rect" coords={{bottlenecks}} alt="test"  href="{{bottlenecksurl}}" onmouseover="pop(event)" ng-mouseover="myTrigger('bottlenecks')" />
                      <area shape="rect" coords={{noPopArea1}} alt="test" onmouseover="pophide(event)"  />
                       <area shape="rect" coords={{noPopArea2}} alt="test"  onmouseover="pophide(event)"  />
                        <area shape="rect" coords={{noPopArea3}} alt="test"  onmouseover="pophide(event)"  />
             $('#popup').hide();
             return true;
         }
-    </script>
\ No newline at end of file
+    </script>
diff --git a/utils/test/reporting/pages/config.sh b/utils/test/reporting/pages/config.sh
new file mode 100755 (executable)
index 0000000..f9bb89a
--- /dev/null
@@ -0,0 +1,3 @@
+: ${SERVER_URL:='testresults.opnfv.org/reporting2'}
+
+echo "{\"url\": \"http://${SERVER_URL}\"}" > dist/config.json
diff --git a/utils/test/reporting/run_test.sh b/utils/test/reporting/run_test.sh
new file mode 100755 (executable)
index 0000000..8c674ce
--- /dev/null
@@ -0,0 +1,44 @@
+#!/bin/bash
+set -o errexit
+set -o pipefail
+
+
+# Get script directory
+SCRIPTDIR=`dirname $0`
+
+# Creating virtual environment
+if [ ! -z $VIRTUAL_ENV ]; then
+    venv=$VIRTUAL_ENV
+else
+    venv=$SCRIPTDIR/.venv
+    virtualenv $venv
+fi
+
+source $venv/bin/activate
+
+export CONFIG_REPORTING_YAML=$SCRIPTDIR/reporting.yaml
+
+# ***************
+# Run unit tests
+# ***************
+echo "Running unit tests..."
+
+# install python packages
+easy_install -U setuptools
+easy_install -U pip
+pip install -r $SCRIPTDIR/docker/requirements.pip
+pip install -e $SCRIPTDIR
+
+python $SCRIPTDIR/setup.py develop
+
+# unit tests
+# TODO: remove cover-erase
+# To be deleted when all functest packages will be listed
+nosetests --with-xunit \
+         --cover-package=$SCRIPTDIR/utils \
+         --with-coverage \
+         --cover-xml \
+         $SCRIPTDIR/tests/unit
+rc=$?
+
+deactivate
diff --git a/utils/test/reporting/run_unit_tests.sh b/utils/test/reporting/run_unit_tests.sh
deleted file mode 100755 (executable)
index 6b0e3b2..0000000
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o pipefail
-
-# ******************************
-# prepare the env for the tests
-# ******************************
-# Either Workspace is set (CI)
-if [ -z $WORKSPACE ]
-then
-    WORKSPACE="."
-fi
-
-export CONFIG_REPORTING_YAML=./reporting.yaml
-
-# ***************
-# Run unit tests
-# ***************
-echo "Running unit tests..."
-
-# start vitual env
-virtualenv $WORKSPACE/reporting_venv
-source $WORKSPACE/reporting_venv/bin/activate
-
-# install python packages
-easy_install -U setuptools
-easy_install -U pip
-pip install -r $WORKSPACE/docker/requirements.pip
-pip install -e $WORKSPACE
-
-python $WORKSPACE/setup.py develop
-
-# unit tests
-# TODO: remove cover-erase
-# To be deleted when all functest packages will be listed
-nosetests --with-xunit \
-         --cover-package=utils \
-         --with-coverage \
-         --cover-xml \
-         tests/unit
-rc=$?
-
-deactivate
index 599a938..0a178ba 100644 (file)
@@ -117,19 +117,29 @@ def getScenarios(case, installer, version):
     url = ("http://" + url_base + "?case=" + case +
            "&period=" + str(period) + "&installer=" + installer +
            "&version=" + version)
-    request = Request(url)
 
     try:
+        request = Request(url)
         response = urlopen(request)
         k = response.read()
         results = json.loads(k)
         test_results = results['results']
-    except URLError as e:
-        print('Got an error code:', e)
+
+        page = results['pagination']['total_pages']
+        if page > 1:
+            test_results = []
+            for i in range(1, page + 1):
+                url_page = url + "&page=" + str(i)
+                request = Request(url_page)
+                response = urlopen(request)
+                k = response.read()
+                results = json.loads(k)
+                test_results += results['results']
+    except URLError as err:
+        print('Got an error code:', err)
 
     if test_results is not None:
         test_results.reverse()
-
         scenario_results = {}
 
         for r in test_results:
@@ -157,7 +167,6 @@ def getScenarioStats(scenario_results):
     return scenario_stats
 
 
-# TODO convergence with above function getScenarios
 def getScenarioStatus(installer, version):
     period = get_config('general.period')
     url_base = get_config('testapi.url')
@@ -213,8 +222,8 @@ def getQtipResults(version, installer):
         k = response.read()
         response.close()
         results = json.loads(k)['results']
-    except URLError as e:
-        print('Got an error code:', e)
+    except URLError as err:
+        print('Got an error code:', err)
 
     result_dict = {}
     if results:
@@ -427,9 +436,9 @@ def export_csv(scenario_file_name, installer, version):
                                     "/functest/scenario_history_" +
                                     installer + ".csv")
     scenario_installer_file = open(scenario_installer_file_name, "a")
-    with open(scenario_file_name, "r") as f:
+    with open(scenario_file_name, "r") as scenario_file:
         scenario_installer_file.write("date,scenario,installer,detail,score\n")
-        for line in f:
+        for line in scenario_file:
             if installer in line:
                 scenario_installer_file.write(line)
         scenario_installer_file.close
index 4a2f23a..8c701c3 100644 (file)
         $stateProvider.
             state('home', {
                 url: '/',
-                templateUrl: '/testapi-ui/components/home/home.html'
+                templateUrl: 'testapi-ui/components/home/home.html'
             }).
             state('about', {
                 url: '/about',
-                templateUrl: '/testapi-ui/components/about/about.html'
+                templateUrl: 'testapi-ui/components/about/about.html'
             }).
             state('guidelines', {
                 url: '/guidelines',
-                templateUrl: '/testapi-ui/components/guidelines/guidelines.html',
+                templateUrl: 'testapi-ui/components/guidelines/guidelines.html',
                 controller: 'GuidelinesController as ctrl'
             }).
             state('communityResults', {
                 url: '/community_results',
-                templateUrl: '/testapi-ui/components/results/results.html',
+                templateUrl: 'testapi-ui/components/results/results.html',
                 controller: 'ResultsController as ctrl'
             }).
             state('userResults', {
-                url: '/user_results',
+                url: 'user_results',
                 templateUrl: '/testapi-ui/components/results/results.html',
                 controller: 'ResultsController as ctrl'
             }).
             state('resultsDetail', {
                 url: '/results/:testID',
-                templateUrl: '/testapi-ui/components/results-report' +
+                templateUrl: 'testapi-ui/components/results-report' +
                              '/resultsReport.html',
                 controller: 'ResultsReportController as ctrl'
             }).
             }).
             state('authFailure', {
                 url: '/auth_failure',
-                templateUrl: '/testapi-ui/components/home/home.html',
+                templateUrl: 'testapi-ui/components/home/home.html',
                 controller: 'AuthFailureController as ctrl'
             }).
             state('logout', {
                 url: '/logout',
-                templateUrl: '/testapi-ui/components/logout/logout.html',
+                templateUrl: 'testapi-ui/components/logout/logout.html',
                 controller: 'LogoutController as ctrl'
             }).
             state('userVendors', {
index 93a549a..9e3540d 100644 (file)
             ctrl.resultsRequest =
                 $http.get(content_url).success(function (data) {
                     ctrl.data = data;
-                    ctrl.totalItems = 20 // ctrl.data.pagination.total_pages * ctrl.itemsPerPage;
-                    ctrl.currentPage = 1 // ctrl.data.pagination.current_page;
+                    ctrl.totalItems = ctrl.data.pagination.total_pages * ctrl.itemsPerPage;
+                    ctrl.currentPage = ctrl.data.pagination.current_page;
                 }).error(function (error) {
                     ctrl.data = null;
                     ctrl.totalItems = 0;
index 748bd34..6433fa6 100644 (file)
@@ -8,10 +8,10 @@ docker_compose_yml = './docker-compose.yml'
 docker_compose_template = './docker-compose.yml.template'
 
 
-def render_docker_compose(port, swagger_url):
+def render_docker_compose(port, base_url):
     vars = {
         "expose_port": port,
-        "swagger_url": swagger_url,
+        "base_url": base_url,
     }
     template = env.get_template(docker_compose_template)
     yml = template.render(vars=vars)
@@ -22,7 +22,7 @@ def render_docker_compose(port, swagger_url):
 
 
 def main(args):
-    render_docker_compose(args.expose_port, args.swagger_url)
+    render_docker_compose(args.expose_port, args.base_url)
     os.system('docker-compose -f {} up -d'.format(docker_compose_yml))
 
 
@@ -33,8 +33,8 @@ if __name__ == '__main__':
                         required=False,
                         default=8000,
                         help='testapi exposed port')
-    parser.add_argument('-su', '--swagger-url',
+    parser.add_argument('-l', '--base-url',
                         type=str,
                         required=True,
-                        help='testapi exposed swagger-url')
+                        help='testapi exposed base-url')
     main(parser.parse_args())
index 5b131f7..cd68404 100644 (file)
@@ -8,7 +8,7 @@ services:
     container_name: opnfv-testapi
     environment:
       - mongodb_url=mongodb://mongo:27017/
-      - swagger_url={{ vars.swagger_url }}
+      - base_url={{ vars.base_url }}
     ports:
       - "{{ vars.expose_port }}:8000"
     links:
index e031e19..5311f35 100644 (file)
@@ -9,7 +9,7 @@
 #
 # Execution:
 #    $ docker run -dti -p 8001:8000 \
-#      -e "swagger_url=http://10.63.243.17:8001" \
+#      -e "base_url=http://10.63.243.17:8001" \
 #      -e "mongodb_url=mongodb://10.63.243.17:27017/" \
 #      opnfv/testapi:tag
 #
index 9f07efb..4f1be7d 100755 (executable)
@@ -6,6 +6,10 @@ if [ "$mongodb_url" != "" ]; then
     sudo crudini --set --existing $FILE mongo url $mongodb_url
 fi
 
-if [ "$swagger_url" != "" ]; then
-    sudo crudini --set --existing $FILE swagger base_url $swagger_url
+if [ "$base_url" != "" ]; then
+    sudo crudini --set --existing $FILE api url $base_url/api/v1
+    sudo crudini --set --existing $FILE swagger base_url $base_url
+    sudo crudini --set --existing $FILE ui url $base_url
+    sudo echo "{\"testapiApiUrl\": \"$base_url/api/v1\"}" > \
+        /usr/local/lib/python2.7/dist-packages/opnfv_testapi/static/testapi-ui/config.json
 fi
index 692e488..9ae2520 100644 (file)
@@ -10,6 +10,10 @@ dbname = test_results_collection
 # Listening port
 url = http://localhost:8000/api/v1
 port = 8000
+
+# Number of results for one page (integer value)
+#results_per_page = 20
+
 # With debug_on set to true, error traces will be shown in HTTP responses
 debug = True
 authenticate = False
@@ -41,7 +45,7 @@ openid_ns = http://specs.openid.net/auth/2.0
 # Return endpoint in Refstack's API. Value indicating the endpoint
 # where the user should be returned to after signing in. Openstack Id
 # Idp only supports HTTPS address types. (string value)
-openid_return_to = /api/v1/auth/signin_return
+openid_return_to = v1/auth/signin_return
 
 # Claimed identifier. This value must be set to
 # "http://specs.openid.net/auth/2.0/identifier_select". or to user
index b8c4fb4..4576d9b 100644 (file)
@@ -40,13 +40,13 @@ if __name__ == '__main__':
                         type=str,
                         required=False,
                         default=('http://testresults.opnfv.org'
-                                 '/test/swagger/spec.json'),
+                                 '/test/swagger/resources.json'),
                         help='Resource Listing Spec File')
     parser.add_argument('-au', '--api-declaration-url',
                         type=str,
                         required=False,
                         default=('http://testresults.opnfv.org'
-                                 '/test/swagger/spec'),
+                                 '/test/swagger/APIs'),
                         help='API Declaration Spec File')
     parser.add_argument('-o', '--output-directory',
                         required=True,
index 46765ff..f73c0ab 100644 (file)
@@ -17,6 +17,7 @@ class Config(object):
     def __init__(self):
         self.file = self.CONFIG if self.CONFIG else self._default_config()
         self._parse()
+        self._parse_per_page()
         self.static_path = os.path.join(
             os.path.dirname(os.path.normpath(__file__)),
             os.pardir,
@@ -37,6 +38,10 @@ class Config(object):
         [setattr(self, '{}_{}'.format(section, k), self._parse_value(v))
          for k, v in config.items(section)]
 
+    def _parse_per_page(self):
+        if not hasattr(self, 'api_results_per_page'):
+            self.api_results_per_page = 20
+
     @staticmethod
     def _parse_value(value):
         try:
index 2fc31ca..c7fed8f 100644 (file)
@@ -101,22 +101,71 @@ class GenericApiHandler(web.RequestHandler):
     @web.asynchronous
     @gen.coroutine
     def _list(self, query=None, res_op=None, *args, **kwargs):
+        sort = kwargs.get('sort')
+        page = kwargs.get('page', 0)
+        last = kwargs.get('last', 0)
+        per_page = kwargs.get('per_page', 0)
         if query is None:
             query = {}
-        data = []
         cursor = self._eval_db(self.table, 'find', query)
-        if 'sort' in kwargs:
-            cursor = cursor.sort(kwargs.get('sort'))
-        if 'last' in kwargs:
-            cursor = cursor.limit(kwargs.get('last'))
+        records_count = yield cursor.count()
+        total_pages = self._calc_total_pages(records_count,
+                                             last,
+                                             page,
+                                             per_page)
+        pipelines = self._set_pipelines(query, sort, last, page, per_page)
+        cursor = self._eval_db(self.table,
+                               'aggregate',
+                               pipelines,
+                               allowDiskUse=True)
+        data = list()
         while (yield cursor.fetch_next):
             data.append(self.format_data(cursor.next_object()))
         if res_op is None:
             res = {self.table: data}
         else:
             res = res_op(data, *args)
+        if total_pages > 0:
+            res.update({
+                'pagination': {
+                    'current_page': kwargs.get('page'),
+                    'total_pages': total_pages
+                }
+            })
         self.finish_request(res)
 
+    @staticmethod
+    def _calc_total_pages(records_count, last, page, per_page):
+        records_nr = records_count
+        if (records_count > last) and (last > 0):
+            records_nr = last
+
+        total_pages = 0
+        if page > 0:
+            total_pages, remainder = divmod(records_nr, per_page)
+            if remainder > 0:
+                total_pages += 1
+        if page > total_pages:
+            raises.BadRequest(
+                'Request page > total_pages [{}]'.format(total_pages))
+        return total_pages
+
+    @staticmethod
+    def _set_pipelines(query, sort, last, page, per_page):
+        pipelines = list()
+        if query:
+            pipelines.append({'$match': query})
+        if sort:
+            pipelines.append({'$sort': sort})
+
+        if page > 0:
+            pipelines.append({'$skip': (page - 1) * per_page})
+            pipelines.append({'$limit': per_page})
+        elif last > 0:
+            pipelines.append({'$limit': last})
+
+        return pipelines
+
     @web.asynchronous
     @gen.coroutine
     @check.not_exist
index 824a89e..1773216 100644 (file)
@@ -11,12 +11,15 @@ from datetime import timedelta
 
 from bson import objectid
 
+from opnfv_testapi.common import config
 from opnfv_testapi.common import message
 from opnfv_testapi.common import raises
 from opnfv_testapi.resources import handlers
 from opnfv_testapi.resources import result_models
 from opnfv_testapi.tornado_swagger import swagger
 
+CONF = config.Config()
+
 
 class GenericResultHandler(handlers.GenericApiHandler):
     def __init__(self, application, request, **kwargs):
@@ -135,22 +138,28 @@ class ResultsCLHandler(GenericResultHandler):
             @type last: L{string}
             @in last: query
             @required last: False
+            @param page: which page to list
+            @type page: L{int}
+            @in page: query
+            @required page: False
             @param trust_indicator: must be float
             @type trust_indicator: L{float}
             @in trust_indicator: query
             @required trust_indicator: False
         """
+        limitations = {'sort': {'start_date': -1}}
         last = self.get_query_argument('last', 0)
         if last is not None:
             last = self.get_int('last', last)
+            limitations.update({'last': last})
 
-        page = self.get_query_argument('page', 0)
-        if page:
-            last = 20
+        page = self.get_query_argument('page', None)
+        if page is not None:
+            page = self.get_int('page', page)
+            limitations.update({'page': page,
+                                'per_page': CONF.api_results_per_page})
 
-        self._list(query=self.set_query(),
-                   sort=[('start_date', -1)],
-                   last=last)
+        self._list(query=self.set_query(), **limitations)
 
     @swagger.operation(nickname="createTestResult")
     def post(self):
index ef74a08..adaf6f7 100644 (file)
@@ -20,38 +20,52 @@ def thread_execute(method, *args, **kwargs):
 class MemCursor(object):
     def __init__(self, collection):
         self.collection = collection
-        self.count = len(self.collection)
+        self.length = len(self.collection)
         self.sorted = []
 
     def _is_next_exist(self):
-        return self.count != 0
+        return self.length != 0
 
     @property
     def fetch_next(self):
         return thread_execute(self._is_next_exist)
 
     def next_object(self):
-        self.count -= 1
+        self.length -= 1
         return self.collection.pop()
 
     def sort(self, key_or_list):
-        key = key_or_list[0][0]
-        if key_or_list[0][1] == -1:
-            reverse = True
-        else:
-            reverse = False
+        for k, v in key_or_list.iteritems():
+            if v == -1:
+                reverse = True
+            else:
+                reverse = False
 
-        if key_or_list is not None:
             self.collection = sorted(self.collection,
-                                     key=itemgetter(key), reverse=reverse)
+                                     key=itemgetter(k), reverse=reverse)
         return self
 
     def limit(self, limit):
         if limit != 0 and limit < len(self.collection):
-            self.collection = self.collection[0:limit]
-            self.count = limit
+            self.collection = self.collection[0: limit]
+            self.length = limit
+        return self
+
+    def skip(self, skip):
+        if skip < self.length and (skip > 0):
+            self.collection = self.collection[self.length - skip: -1]
+            self.length -= skip
+        elif skip >= self.length:
+            self.collection = []
+            self.length = 0
         return self
 
+    def _count(self):
+        return self.length
+
+    def count(self):
+        return thread_execute(self._count)
+
 
 class MemDb(object):
 
@@ -187,6 +201,27 @@ class MemDb(object):
     def find(self, *args):
         return MemCursor(self._find(*args))
 
+    def _aggregate(self, *args, **kwargs):
+        res = self.contents
+        print args
+        for arg in args[0]:
+            for k, v in arg.iteritems():
+                if k == '$match':
+                    res = self._find(v)
+        cursor = MemCursor(res)
+        for arg in args[0]:
+            for k, v in arg.iteritems():
+                if k == '$sort':
+                    cursor = cursor.sort(v)
+                elif k == '$skip':
+                    cursor = cursor.skip(v)
+                elif k == '$limit':
+                    cursor = cursor.limit(v)
+        return cursor
+
+    def aggregate(self, *args, **kwargs):
+        return self._aggregate(*args, **kwargs)
+
     def _update(self, spec, document, check_keys=True):
         updated = False