Merge "Comment Pending JJB Changes on Patchset"
authorTrevor Bramwell <tbramwell@linuxfoundation.org>
Fri, 21 Jul 2017 18:03:55 +0000 (18:03 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Fri, 21 Jul 2017 18:03:55 +0000 (18:03 +0000)
54 files changed:
jjb/apex/apex.yml
jjb/apex/apex.yml.j2
jjb/apex/scenarios.yaml.hidden
jjb/ci_gate_security/anteater-security-audit-weekly.sh
jjb/compass4nfv/compass-ci-jobs.yml
jjb/doctor/doctor.yml
jjb/dovetail/dovetail-ci-jobs.yml
jjb/fuel/fuel-daily-jobs.yml
jjb/functest/functest-alpine.sh [new file with mode: 0644]
jjb/functest/functest-daily-jobs.yml
jjb/functest/set-functest-env.sh
jjb/global/slave-params.yml
jjb/qtip/qtip-verify-jobs.yml
jjb/releng/automate.yml
jjb/releng/docker-deploy.sh
jjb/yardstick/yardstick-daily-jobs.yml
prototypes/bifrost/playbooks/opnfv-virtual.yaml
utils/fetch_os_creds.sh
utils/test/testapi/.gitignore [new file with mode: 0644]
utils/test/testapi/3rd_party/static/testapi-ui/app.js
utils/test/testapi/3rd_party/static/testapi-ui/components/results/results.html
utils/test/testapi/3rd_party/static/testapi-ui/components/results/resultsController.js
utils/test/testapi/3rd_party/static/testapi-ui/shared/header/header.html
utils/test/testapi/etc/config.ini
utils/test/testapi/htmlize/htmlize.py
utils/test/testapi/opnfv_testapi/cmd/server.py
utils/test/testapi/opnfv_testapi/common/check.py
utils/test/testapi/opnfv_testapi/common/config.py
utils/test/testapi/opnfv_testapi/common/message.py
utils/test/testapi/opnfv_testapi/db/__init__.py [new file with mode: 0644]
utils/test/testapi/opnfv_testapi/db/api.py [new file with mode: 0644]
utils/test/testapi/opnfv_testapi/resources/handlers.py
utils/test/testapi/opnfv_testapi/resources/result_handlers.py
utils/test/testapi/opnfv_testapi/resources/result_models.py
utils/test/testapi/opnfv_testapi/router/url_mappings.py
utils/test/testapi/opnfv_testapi/tests/unit/common/test_config.py
utils/test/testapi/opnfv_testapi/tests/unit/conftest.py [new file with mode: 0644]
utils/test/testapi/opnfv_testapi/tests/unit/executor.py
utils/test/testapi/opnfv_testapi/tests/unit/fake_pymongo.py
utils/test/testapi/opnfv_testapi/tests/unit/resources/test_base.py
utils/test/testapi/opnfv_testapi/tests/unit/resources/test_pod.py
utils/test/testapi/opnfv_testapi/tests/unit/resources/test_project.py
utils/test/testapi/opnfv_testapi/tests/unit/resources/test_result.py
utils/test/testapi/opnfv_testapi/tests/unit/resources/test_token.py
utils/test/testapi/opnfv_testapi/ui/auth/constants.py
utils/test/testapi/opnfv_testapi/ui/auth/sign.py
utils/test/testapi/opnfv_testapi/ui/auth/user.py
utils/test/testapi/opnfv_testapi/ui/root.py
utils/test/testapi/requirements.txt
utils/test/testapi/setup.py
utils/test/testapi/test-requirements.txt
utils/test/testapi/tox.ini
utils/test/testapi/update/templates/backup_mongodb.py
utils/test/testapi/update/templates/update_mongodb.py

index a395cf2..0123d75 100644 (file)
         - 'apex-csit-promote-daily-{stream}'
         - 'apex-fdio-promote-daily-{stream}'
         - 'apex-verify-iso-{stream}'
-        - 'apex-run-deploy-test-baremetal-{stream}'
+        - 'apex-{scenario}-baremetal-{scenario_stream}'
+        - 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
         - 'apex-upload-snapshot'
         - 'apex-create-snapshot'
     # stream:    branch with - in place of / (eg. stable-arno)
     # branch:    branch (eg. stable/arno)
     stream:
-        - master:
+        - master: &master
             branch: 'master'
             gs-pathname: ''
             build-slave: 'apex-build-master'
@@ -27,8 +28,9 @@
             baremetal-slave: 'apex-baremetal-master'
             verify-scenario: 'os-odl-nofeature-ha'
             concurrent-builds: 3
+            scenario_stream: 'master'
 
-        - danube:
+        - danube: &danube
             branch: 'stable/danube'
             gs-pathname: '/danube'
             build-slave: 'apex-build-danube'
             baremetal-slave: 'apex-baremetal-danube'
             verify-scenario: 'os-odl_l3-nofeature-ha'
             concurrent-builds: 1
-            disabled: false
+            scenario_stream: 'danube'
+            disabled: true
+
+    scenario:
+        - 'os-nosdn-nofeature-noha':
+              <<: *danube
+        - 'os-nosdn-nofeature-ha':
+              <<: *danube
+        - 'os-nosdn-nofeature-ha-ipv6':
+              <<: *danube
+        - 'os-nosdn-ovs-noha':
+              <<: *danube
+        - 'os-nosdn-ovs-ha':
+              <<: *danube
+        - 'os-nosdn-fdio-noha':
+              <<: *danube
+        - 'os-nosdn-fdio-ha':
+              <<: *danube
+        - 'os-nosdn-kvm-ha':
+              <<: *danube
+        - 'os-nosdn-kvm-noha':
+              <<: *danube
+        - 'os-odl_l2-fdio-noha':
+              <<: *danube
+        - 'os-odl_l2-fdio-ha':
+              <<: *danube
+        - 'os-odl_netvirt-fdio-noha':
+              <<: *danube
+        - 'os-odl_l2-sfc-noha':
+              <<: *danube
+        - 'os-odl_l3-nofeature-noha':
+              <<: *danube
+        - 'os-odl_l3-nofeature-ha':
+              <<: *danube
+        - 'os-odl_l3-ovs-noha':
+              <<: *danube
+        - 'os-odl_l3-ovs-ha':
+              <<: *danube
+        - 'os-odl-bgpvpn-ha':
+              <<: *danube
+        - 'os-odl-gluon-noha':
+              <<: *danube
+        - 'os-odl_l3-fdio-noha':
+              <<: *danube
+        - 'os-odl_l3-fdio-ha':
+              <<: *danube
+        - 'os-odl_l3-fdio_dvr-noha':
+              <<: *danube
+        - 'os-odl_l3-fdio_dvr-ha':
+              <<: *danube
+        - 'os-odl_l3-csit-noha':
+              <<: *danube
+        - 'os-onos-nofeature-ha':
+              <<: *danube
+        - 'os-ovn-nofeature-noha':
+              <<: *danube
+        - 'os-nosdn-nofeature-noha':
+              <<: *master
+        - 'os-nosdn-nofeature-ha':
+              <<: *master
+        - 'os-odl-nofeature-ha':
+              <<: *master
+        - 'os-odl-nofeature-noha':
+              <<: *master
+        - 'os-odl-bgpvpn-ha':
+              <<: *master
 
     platform:
          - 'baremetal'
 - job-template:
     name: 'apex-deploy-{platform}-{stream}'
 
-    # Job template for virtual deployment
-    #
-    # Required Variables:
-    #     stream:    branch with - in place of / (eg. stable)
-    #     branch:    branch (eg. stable)
     node: 'apex-{platform}-{stream}'
 
     concurrent: true
             fail: true
 
     parameters:
+        - '{project}-{platform}-{stream}-defaults'
         - project-parameter:
             project: '{project}'
             branch: '{branch}'
                 - 'apex-deploy.*'
                 - 'functest.*'
                 - 'yardstick.*'
+                - 'dovetail.*'
         - throttle:
             max-per-node: 1
             max-total: 10
 
 # Baremetal Deploy and Test
 - job-template:
-    name: 'apex-run-deploy-test-baremetal-{stream}'
+    name: 'apex-{scenario}-baremetal-{scenario_stream}'
 
-    # Job template for daily build
-    #
-    # Required Variables:
-    #     stream:    branch with - in place of / (eg. stable)
-    #     branch:    branch (eg. stable)
     project-type: 'multijob'
 
     disabled: false
 
     parameters:
         - '{project}-defaults'
-        - '{project}-baremetal-{stream}-defaults'
+        - '{project}-baremetal-{scenario_stream}-defaults'
         - project-parameter:
             project: '{project}'
             branch: '{branch}'
             gs-pathname: '{gs-pathname}'
         - string:
             name: DEPLOY_SCENARIO
-            default: '{verify-scenario}'
+            default: '{scenario}'
             description: "Scenario to deploy with."
     properties:
         - logrotate-default
                 - 'apex-runner.*'
                 - 'apex-.*-promote.*'
                 - 'apex-run.*'
+                - 'apex-.+-baremetal-.+'
+        - throttle:
+            max-per-node: 1
+            max-total: 10
+            option: 'project'
     builders:
         - description-setter:
             description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
             name: 'Baremetal Deploy'
             condition: ALWAYS
             projects:
-                - name: 'apex-deploy-baremetal-{stream}'
+                - name: 'apex-deploy-baremetal-{scenario_stream}'
                   node-parameters: true
                   current-parameters: true
                   predefined-parameters: |
                   kill-phase-on: FAILURE
                   abort-all-job: true
                   git-revision: false
+        - multijob:
+            name: 'OPNFV Test Suite'
+            condition: SUCCESSFUL
+            projects:
+                - name: 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
+                  node-parameters: true
+                  current-parameters: false
+                  predefined-parameters:
+                    DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+                  kill-phase-on: NEVER
+                  abort-all-job: true
+                  git-revision: false
+
+
+# Baremetal test job
+- job-template:
+    name: 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
+
+    project-type: 'multijob'
+
+    disabled: false
+
+    parameters:
+        - '{project}-defaults'
+        - '{project}-baremetal-{scenario_stream}-defaults'
+        - project-parameter:
+            project: '{project}'
+            branch: '{branch}'
+        - apex-parameter:
+            gs-pathname: '{gs-pathname}'
+        - string:
+            name: DEPLOY_SCENARIO
+            default: '{scenario}'
+            description: "Scenario to deploy with."
+    properties:
+        - logrotate-default
+        - build-blocker:
+            use-build-blocker: true
+            block-level: 'NODE'
+            blocking-jobs:
+                - 'apex-verify.*'
+                - 'apex-runner.*'
+                - 'apex-.*-promote.*'
+                - 'apex-run.*'
+                - 'apex-testsuite-.+-baremetal-.+'
+        - throttle:
+            max-per-node: 1
+            max-total: 10
+            option: 'project'
+    builders:
+        - description-setter:
+            description: "Testing on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
         - multijob:
             name: Functest
             condition: ALWAYS
             projects:
-                - name: 'functest-apex-baremetal-daily-{stream}'
+                - name: 'functest-apex-baremetal-daily-{scenario_stream}'
                   node-parameters: true
                   current-parameters: false
                   predefined-parameters:
             name: Yardstick
             condition: ALWAYS
             projects:
-                - name: 'yardstick-apex-baremetal-daily-{stream}'
+                - name: 'yardstick-apex-baremetal-daily-{scenario_stream}'
                   node-parameters: true
                   current-parameters: false
                   predefined-parameters:
                   kill-phase-on: NEVER
                   abort-all-job: false
                   git-revision: false
+        - multijob:
+            name: Dovetail
+            condition: ALWAYS
+            projects:
+                - name: 'dovetail-apex-baremetal-proposed_tests-{scenario_stream}'
+                  node-parameters: true
+                  current-parameters: false
+                  predefined-parameters:
+                    DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+                  kill-phase-on: NEVER
+                  enable-condition: "DEPLOY_SCENARIO =~ /os-(nosdn-nofeature|nosdn-kvm|odl_l3-fdio)-ha/"
+                  abort-all-job: false
+                  git-revision: false
+        - conditional-step:
+            condition-kind: current-status
+            condition-worst: SUCCESS
+            condtion-best: SUCCESS
+            on-evaluation-failure: mark-unstable
+            steps:
+                - shell: 'echo "Tests Passed"'
 
 
 # danube Daily
             condition: SUCCESSFUL
             projects:
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-nosdn-nofeature-noha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-nosdn-nofeature-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-nosdn-nofeature-ha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-nosdn-nofeature-ha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-nosdn-nofeature-ha-ipv6-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-nosdn-nofeature-ha-ipv6
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-nosdn-ovs-noha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-nosdn-ovs-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-nosdn-ovs-ha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-nosdn-ovs-ha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-nosdn-fdio-noha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-nosdn-fdio-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-nosdn-fdio-ha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-nosdn-fdio-ha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-nosdn-kvm-ha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-nosdn-kvm-ha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-nosdn-kvm-noha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-nosdn-kvm-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-odl_l2-fdio-noha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl_l2-fdio-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-odl_l2-fdio-ha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl_l2-fdio-ha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-odl_netvirt-fdio-noha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl_netvirt-fdio-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-odl_l2-sfc-noha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl_l2-sfc-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-odl_l3-nofeature-noha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl_l3-nofeature-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-odl_l3-nofeature-ha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl_l3-nofeature-ha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-odl_l3-ovs-noha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl_l3-ovs-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-odl_l3-ovs-ha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl_l3-ovs-ha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-odl-bgpvpn-ha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl-bgpvpn-ha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-odl-gluon-noha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl-gluon-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-odl_l3-fdio-noha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl_l3-fdio-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-odl_l3-fdio-ha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl_l3-fdio-ha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-odl_l3-fdio_dvr-noha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl_l3-fdio_dvr-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-odl_l3-fdio_dvr-ha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl_l3-fdio_dvr-ha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-odl_l3-csit-noha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl_l3-csit-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-onos-nofeature-ha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-onos-nofeature-ha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-danube'
+                - name: 'apex-os-ovn-nofeature-noha-baremetal-danube'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-ovn-nofeature-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
             condition: SUCCESSFUL
             projects:
 
-                - name: 'apex-run-deploy-test-baremetal-master'
+                - name: 'apex-os-nosdn-nofeature-noha-baremetal-master'
+                  node-parameters: false
+                  current-parameters: false
+                  predefined-parameters: |
+                    OPNFV_CLEAN=yes
+                  kill-phase-on: NEVER
+                  abort-all-job: true
+                  git-revision: false
+
+                - name: 'apex-os-nosdn-nofeature-ha-baremetal-master'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-nosdn-nofeature-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-master'
+                - name: 'apex-os-odl-nofeature-ha-baremetal-master'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-nosdn-nofeature-ha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-master'
+                - name: 'apex-os-odl-nofeature-noha-baremetal-master'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl-nofeature-ha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
                   git-revision: false
 
-                - name: 'apex-run-deploy-test-baremetal-master'
+                - name: 'apex-os-odl-bgpvpn-ha-baremetal-master'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO=os-odl-nofeature-noha
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
 - trigger:
     name: 'apex-master'
     triggers:
-        - timed: '0 3 1 1 7'
+        - timed: '0 12 * * *'
 - trigger:
     name: 'apex-danube'
     triggers:
-        - timed: '0 12 * * *'
+        - timed: '0 3 1 1 7'
index 752cf28..7466a83 100644 (file)
         - 'apex-csit-promote-daily-{stream}'
         - 'apex-fdio-promote-daily-{stream}'
         - 'apex-verify-iso-{stream}'
-        - 'apex-run-deploy-test-baremetal-{stream}'
+        - 'apex-{scenario}-baremetal-{scenario_stream}'
+        - 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
         - 'apex-upload-snapshot'
         - 'apex-create-snapshot'
     # stream:    branch with - in place of / (eg. stable-arno)
     # branch:    branch (eg. stable/arno)
     stream:
-        - master:
+        - master: &master
             branch: 'master'
             gs-pathname: ''
             build-slave: 'apex-build-master'
@@ -27,8 +28,9 @@
             baremetal-slave: 'apex-baremetal-master'
             verify-scenario: 'os-odl-nofeature-ha'
             concurrent-builds: 3
+            scenario_stream: 'master'
 
-        - danube:
+        - danube: &danube
             branch: 'stable/danube'
             gs-pathname: '/danube'
             build-slave: 'apex-build-danube'
             baremetal-slave: 'apex-baremetal-danube'
             verify-scenario: 'os-odl_l3-nofeature-ha'
             concurrent-builds: 1
-            disabled: false
+            scenario_stream: 'danube'
+            disabled: true
+
+    scenario:
+        {%- for stream in scenarios %}
+        {%- for scenario in scenarios[stream] %}
+        - '{{scenario}}':
+              <<: *{{stream}}
+        {%- endfor %}
+        {%- endfor %}
 
     platform:
          - 'baremetal'
 - job-template:
     name: 'apex-deploy-{platform}-{stream}'
 
-    # Job template for virtual deployment
-    #
-    # Required Variables:
-    #     stream:    branch with - in place of / (eg. stable)
-    #     branch:    branch (eg. stable)
     node: 'apex-{platform}-{stream}'
 
     concurrent: true
             fail: true
 
     parameters:
+        - '{project}-{platform}-{stream}-defaults'
         - project-parameter:
             project: '{project}'
             branch: '{branch}'
                 - 'apex-deploy.*'
                 - 'functest.*'
                 - 'yardstick.*'
+                - 'dovetail.*'
         - throttle:
             max-per-node: 1
             max-total: 10
 
 # Baremetal Deploy and Test
 - job-template:
-    name: 'apex-run-deploy-test-baremetal-{stream}'
+    name: 'apex-{scenario}-baremetal-{scenario_stream}'
 
-    # Job template for daily build
-    #
-    # Required Variables:
-    #     stream:    branch with - in place of / (eg. stable)
-    #     branch:    branch (eg. stable)
     project-type: 'multijob'
 
     disabled: false
 
     parameters:
         - '{project}-defaults'
-        - '{project}-baremetal-{stream}-defaults'
+        - '{project}-baremetal-{scenario_stream}-defaults'
         - project-parameter:
             project: '{project}'
             branch: '{branch}'
             gs-pathname: '{gs-pathname}'
         - string:
             name: DEPLOY_SCENARIO
-            default: '{verify-scenario}'
+            default: '{scenario}'
             description: "Scenario to deploy with."
     properties:
         - logrotate-default
                 - 'apex-runner.*'
                 - 'apex-.*-promote.*'
                 - 'apex-run.*'
+                - 'apex-.+-baremetal-.+'
+        - throttle:
+            max-per-node: 1
+            max-total: 10
+            option: 'project'
     builders:
         - description-setter:
             description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
             name: 'Baremetal Deploy'
             condition: ALWAYS
             projects:
-                - name: 'apex-deploy-baremetal-{stream}'
+                - name: 'apex-deploy-baremetal-{scenario_stream}'
                   node-parameters: true
                   current-parameters: true
                   predefined-parameters: |
                   kill-phase-on: FAILURE
                   abort-all-job: true
                   git-revision: false
+        - multijob:
+            name: 'OPNFV Test Suite'
+            condition: SUCCESSFUL
+            projects:
+                - name: 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
+                  node-parameters: true
+                  current-parameters: false
+                  predefined-parameters:
+                    DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+                  kill-phase-on: NEVER
+                  abort-all-job: true
+                  git-revision: false
+
+
+# Baremetal test job
+- job-template:
+    name: 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
+
+    project-type: 'multijob'
+
+    disabled: false
+
+    parameters:
+        - '{project}-defaults'
+        - '{project}-baremetal-{scenario_stream}-defaults'
+        - project-parameter:
+            project: '{project}'
+            branch: '{branch}'
+        - apex-parameter:
+            gs-pathname: '{gs-pathname}'
+        - string:
+            name: DEPLOY_SCENARIO
+            default: '{scenario}'
+            description: "Scenario to deploy with."
+    properties:
+        - logrotate-default
+        - build-blocker:
+            use-build-blocker: true
+            block-level: 'NODE'
+            blocking-jobs:
+                - 'apex-verify.*'
+                - 'apex-runner.*'
+                - 'apex-.*-promote.*'
+                - 'apex-run.*'
+                - 'apex-testsuite-.+-baremetal-.+'
+        - throttle:
+            max-per-node: 1
+            max-total: 10
+            option: 'project'
+    builders:
+        - description-setter:
+            description: "Testing on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
         - multijob:
             name: Functest
             condition: ALWAYS
             projects:
-                - name: 'functest-apex-baremetal-daily-{stream}'
+                - name: 'functest-apex-baremetal-daily-{scenario_stream}'
                   node-parameters: true
                   current-parameters: false
                   predefined-parameters:
             name: Yardstick
             condition: ALWAYS
             projects:
-                - name: 'yardstick-apex-baremetal-daily-{stream}'
+                - name: 'yardstick-apex-baremetal-daily-{scenario_stream}'
+                  node-parameters: true
+                  current-parameters: false
+                  predefined-parameters:
+                    DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+                  kill-phase-on: NEVER
+                  abort-all-job: false
+                  git-revision: false
+        - multijob:
+            name: Dovetail
+            condition: ALWAYS
+            projects:
+                - name: 'dovetail-apex-baremetal-proposed_tests-{scenario_stream}'
                   node-parameters: true
                   current-parameters: false
                   predefined-parameters:
                     DEPLOY_SCENARIO=$DEPLOY_SCENARIO
                   kill-phase-on: NEVER
+                  enable-condition: "DEPLOY_SCENARIO =~ /os-(nosdn-nofeature|nosdn-kvm|odl_l3-fdio)-ha/"
                   abort-all-job: false
                   git-revision: false
+        - conditional-step:
+            condition-kind: current-status
+            condition-worst: SUCCESS
+            condtion-best: SUCCESS
+            on-evaluation-failure: mark-unstable
+            steps:
+                - shell: 'echo "Tests Passed"'
 
 {% for stream in scenarios %}
 # {{ stream }} Daily
             condition: SUCCESSFUL
             projects:
 {% for scenario in scenarios[stream] %}
-                - name: 'apex-run-deploy-test-baremetal-{{ stream }}'
+                - name: 'apex-{{ scenario }}-baremetal-{{ stream }}'
                   node-parameters: false
                   current-parameters: false
                   predefined-parameters: |
-                    DEPLOY_SCENARIO={{scenario}}
                     OPNFV_CLEAN=yes
                   kill-phase-on: NEVER
                   abort-all-job: true
 - trigger:
     name: 'apex-master'
     triggers:
-        - timed: '0 3 1 1 7'
+        - timed: '0 12 * * *'
 - trigger:
     name: 'apex-danube'
     triggers:
-        - timed: '0 12 * * *'
+        - timed: '0 3 1 1 7'
 
index dc9107a..748cd21 100644 (file)
@@ -3,6 +3,7 @@ master:
   - 'os-nosdn-nofeature-ha'
   - 'os-odl-nofeature-ha'
   - 'os-odl-nofeature-noha'
+  - 'os-odl-bgpvpn-ha'
 danube:
   - 'os-nosdn-nofeature-noha'
   - 'os-nosdn-nofeature-ha'
index 436a173..1190963 100644 (file)
@@ -15,7 +15,7 @@ source $WORKSPACE/opnfv-projects.sh
 for project in "${PROJECT_LIST[@]}"
 
 do
-  cmd="anteater --project testproj --path /home/opnfv/anteater/allrepos/$project"
+  cmd="/home/opnfv/venv/bin/anteater --project testproj --path /home/opnfv/anteater/allrepos/$project"
   echo "Executing command inside container"
   echo "$cmd"
   echo "--------------------------------------------------------"
index 3335391..467e168 100644 (file)
 - trigger:
     name: 'compass-os-nosdn-kvm-ha-baremetal-master-trigger'
     triggers:
-        - timed: ''
+        - timed: '0 14 * * *'
 
 - trigger:
     name: 'compass-os-nosdn-nofeature-ha-baremetal-danube-trigger'
 - trigger:
     name: 'compass-os-nosdn-kvm-ha-virtual-master-trigger'
     triggers:
-        - timed: ''
+        - timed: '0 23 * * *'
 
 - trigger:
     name: 'compass-os-nosdn-nofeature-ha-virtual-danube-trigger'
index c5454c7..5bb8f74 100644 (file)
             #       so this symbolic link should not be in 'tests/'. Otherwise,
             #       we'll have the same log twice in jenkins console log.
             ln -sfn $HOME/opnfv/functest/results/{stream} functest_results
+            # NOTE: Get functest script in $WORKSPACE. This functest script is
+            #       needed to perform VM image download in set-functest-env.sh
+            #       from E release cycle.
+            mkdir -p functest/ci
+            wget https://git.opnfv.org/functest/plain/functest/ci/download_images.sh -O functest/ci/download_images.sh
         - 'functest-suite-builder'
         - shell: |
             functest_log="$HOME/opnfv/functest/results/{stream}/{project}.log"
index 43978f6..bcda2b7 100644 (file)
@@ -25,7 +25,7 @@
         branch: 'stable/{stream}'
         dovetail-branch: master
         gs-pathname: '/{stream}'
-        docker-tag: 'cvp.0.2.0'
+        docker-tag: 'cvp.0.3.0'
 
 #-----------------------------------
 # POD, PLATFORM, AND BRANCH MAPPING
 # that have not been switched using labels for slaves
 #--------------------------------
 #apex PODs
-        - lf-pod1:
-            slave-label: '{pod}'
+        - virtual:
+            slave-label: apex-virtual-master
             SUT: apex
             auto-trigger-name: 'daily-trigger-disabled'
             <<: *master
-        - lf-pod1:
-            slave-label: '{pod}'
+        - baremetal:
+            slave-label: apex-baremetal-master
+            SUT: apex
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *master
+        - virtual:
+            slave-label: apex-virtual-danube
+            SUT: apex
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *danube
+        - baremetal:
+            slave-label: apex-baremetal-danube
             SUT: apex
             auto-trigger-name: 'daily-trigger-disabled'
             <<: *danube
             <<: *danube
 #--------------------------------
     testsuite:
-        - 'debug'
         - 'compliance_set'
         - 'proposed_tests'
 
index 6867708..7a57cb5 100644 (file)
                         build-step-failure-threshold: 'never'
                         failure-threshold: 'never'
                         unstable-threshold: 'FAILURE'
+        # ZTE pod1 weekly(Sunday), os-odl_l2-nofeature-ha, run against master and danube
+        - conditional-step:
+            condition-kind: and
+            condition-operands:
+                - condition-kind: regex-match
+                  regex: os-odl_l2-nofeature-ha
+                  label: '{scenario}'
+                - condition-kind: regex-match
+                  regex: zte-pod1
+                  label: '{pod}'
+                - condition-kind: day-of-week
+                  day-selector: select-days
+                  days:
+                      SAT: true
+                  use-build-time: true
+            steps:
+                - trigger-builds:
+                    - project: 'dovetail-fuel-zte-pod1-proposed_tests-{stream}'
+                      current-parameters: false
+                      predefined-parameters:
+                        DEPLOY_SCENARIO={scenario}
+                      block: true
+                      same-node: true
+                      block-thresholds:
+                        build-step-failure-threshold: 'never'
+                        failure-threshold: 'never'
+                        unstable-threshold: 'FAILURE'
 
     publishers:
         - email:
 - trigger:
     name: 'fuel-os-odl_l2-nofeature-noha-virtual-daily-master-trigger'
     triggers:
-        - timed: '' # '35 15 * * *'
+        - timed: '35 15 * * *'
 - trigger:
     name: 'fuel-os-odl_l3-nofeature-noha-virtual-daily-master-trigger'
     triggers:
-        - timed: '' # '5 18 * * *'
+        - timed: '5 18 * * *'
 - trigger:
     name: 'fuel-os-onos-sfc-noha-virtual-daily-master-trigger'
     triggers:
 - trigger:
     name: 'fuel-os-nosdn-ovs-noha-virtual-daily-master-trigger'
     triggers:
-        - timed: '' # '5 9 * * *'
+        - timed: '5 9 * * *'
 - trigger:
     name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-virtual-daily-master-trigger'
     triggers:
diff --git a/jjb/functest/functest-alpine.sh b/jjb/functest/functest-alpine.sh
new file mode 100644 (file)
index 0000000..512a01e
--- /dev/null
@@ -0,0 +1,78 @@
+#!/bin/bash
+
+set -e
+set +u
+set +o pipefail
+
+[[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
+FUNCTEST_DIR=/home/opnfv/functest
+
+# Prepare OpenStack credentials volume
+if [[ ${INSTALLER_TYPE} == 'joid' ]]; then
+    rc_file=$LAB_CONFIG/admin-openrc
+elif [[ ${INSTALLER_TYPE} == 'compass' && ${BRANCH} == 'master' ]]; then
+    cacert_file_vol="-v ${HOME}/os_cacert:${FUNCTEST_DIR}/conf/os_cacert"
+    echo "export OS_CACERT=${FUNCTEST_DIR}/conf/os_cacert" >> ${HOME}/opnfv-openrc.sh
+    rc_file=${HOME}/opnfv-openrc.sh
+else
+    rc_file=${HOME}/opnfv-openrc.sh
+fi
+rc_file_vol="-v ${rc_file}:${FUNCTEST_DIR}/conf/openstack.creds"
+
+
+# Set iptables rule to allow forwarding return traffic for container
+if ! sudo iptables -C FORWARD -j RETURN 2> ${redirect} || ! sudo iptables -L FORWARD | awk 'NR==3' | grep RETURN 2> ${redirect}; then
+    sudo iptables -I FORWARD -j RETURN
+fi
+
+DEPLOY_TYPE=baremetal
+[[ $BUILD_TAG =~ "virtual" ]] && DEPLOY_TYPE=virt
+HOST_ARCH=$(uname -m)
+
+echo "Functest: Start Docker and prepare environment"
+
+echo "Functest: Download images that will be used by test cases"
+images_dir="${HOME}/opnfv/functest/images"
+download_script=${WORKSPACE}/functest/ci/download_images.sh
+if [[ ! -f ${download_script} ]]; then
+    # to support Danube as well
+    wget https://git.opnfv.org/functest/plain/functest/ci/download_images.sh -O ${download_script} 2> ${redirect}
+fi
+chmod +x ${download_script}
+${download_script} ${images_dir} ${DEPLOY_SCENARIO} ${HOST_ARCH} 2> ${redirect}
+
+images_vol="-v ${images_dir}:${FUNCTEST_DIR}/images"
+
+dir_result="${HOME}/opnfv/functest/results/${BRANCH##*/}"
+mkdir -p ${dir_result}
+sudo rm -rf ${dir_result}/*
+results_vol="-v ${dir_result}:${FUNCTEST_DIR}/results"
+custom_params=
+test -f ${HOME}/opnfv/functest/custom/params_${DOCKER_TAG} && custom_params=$(cat ${HOME}/opnfv/functest/custom/params_${DOCKER_TAG})
+
+envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
+    -e NODE_NAME=${NODE_NAME} -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} \
+    -e BUILD_TAG=${BUILD_TAG} -e DEPLOY_TYPE=${DEPLOY_TYPE}"
+
+if [[ ${INSTALLER_TYPE} == 'compass' && ${DEPLOY_SCENARIO} == *'os-nosdn-openo-ha'* ]]; then
+    ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
+    openo_msb_port=${openo_msb_port:-80}
+    openo_msb_endpoint="$(sshpass -p'root' ssh 2>/dev/null $ssh_options root@${installer_ip} \
+    'mysql -ucompass -pcompass -Dcompass -e "select package_config from cluster;" \
+    | sed s/,/\\n/g | grep openo_ip | cut -d \" -f 4'):$openo_msb_port"
+
+    envs=${env}" -e OPENO_MSB_ENDPOINT=${openo_msb_endpoint}"
+fi
+
+volumes="${images_vol} ${results_vol} ${sshkey_vol} ${rc_file_vol}"
+
+
+tiers=(healthcheck smoke)
+for tier in ${tiers[@]}; do
+    FUNCTEST_IMAGE=opnfv/functest-${tier}
+    echo "Functest: Pulling Functest Docker image ${FUNCTEST_IMAGE} ..."
+    docker pull ${FUNCTEST_IMAGE}>/dev/null
+    cmd="docker run ${envs} ${volumes} ${FUNCTEST_IMAGE}"
+    echo "Running Functest tier '${tier}'. CMD: ${cmd}"
+    ${cmd}
+done
index fdef6f4..cc9bac0 100644 (file)
 #            <<: *master
 #--------------------------------
 
+    alpine-pod:
+        - ericsson-virtual-pod1bl01:
+            slave-label: '{alpine-pod}'
+            installer: fuel
+            <<: *master
+
     testsuite:
         - 'suite':
             job-timeout: 60
 
     jobs:
         - 'functest-{installer}-{pod}-{testsuite}-{stream}'
+        - 'functest-alpine-{installer}-{alpine-pod}-{testsuite}-{stream}'
 
 ################################
 # job template
             description: "Built on $NODE_NAME"
         - 'functest-{testsuite}-builder'
 
+- job-template:
+    name: 'functest-alpine-{installer}-{alpine-pod}-{testsuite}-{stream}'
+
+    concurrent: true
+
+    properties:
+        - logrotate-default
+        - throttle:
+            enabled: true
+            max-per-node: 1
+            option: 'project'
+
+    wrappers:
+        - build-name:
+            name: '$BUILD_NUMBER Suite: $FUNCTEST_SUITE_NAME Scenario: $DEPLOY_SCENARIO'
+        - timeout:
+            timeout: '{job-timeout}'
+            abort: true
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+            branch: '{branch}'
+        - '{installer}-defaults'
+        - '{slave-label}-defaults'
+        - 'functest-{testsuite}-parameter'
+        - string:
+            name: DEPLOY_SCENARIO
+            default: 'os-nosdn-nofeature-noha'
+        - functest-parameter:
+            gs-pathname: '{gs-pathname}'
+
+    scm:
+        - git-scm
+
+    builders:
+        - description-setter:
+            description: "Built on $NODE_NAME"
+        - 'functest-alpine-daily-builder'
+
 ########################
 # parameter macros
 ########################
         - 'functest-store-results'
         - 'functest-exit'
 
+- builder:
+    name: functest-alpine-daily-builder
+    builders:
+        - shell:
+            !include-raw:
+                - ./functest-env-presetup.sh
+                - ../../utils/fetch_os_creds.sh
+                - ./functest-alpine.sh
+
 - builder:
     name: functest-daily
     builders:
index 5f936f5..f6071e3 100755 (executable)
@@ -9,6 +9,10 @@ set +o pipefail
 # Prepare OpenStack credentials volume
 if [[ ${INSTALLER_TYPE} == 'joid' ]]; then
     rc_file_vol="-v $LAB_CONFIG/admin-openrc:/home/opnfv/functest/conf/openstack.creds"
+elif [[ ${INSTALLER_TYPE} == 'compass' && ${BRANCH} == 'master' ]]; then
+    cacert_file_vol="-v ${HOME}/os_cacert:/home/opnfv/functest/conf/os_cacert"
+    echo "export OS_CACERT=/home/opnfv/functest/conf/os_cacert" >> ${HOME}/opnfv-openrc.sh
+    rc_file_vol="-v ${HOME}/opnfv-openrc.sh:/home/opnfv/functest/conf/openstack.creds"
 else
     rc_file_vol="-v ${HOME}/opnfv-openrc.sh:/home/opnfv/functest/conf/openstack.creds"
 fi
@@ -21,6 +25,7 @@ fi
 
 DEPLOY_TYPE=baremetal
 [[ $BUILD_TAG =~ "virtual" ]] && DEPLOY_TYPE=virt
+HOST_ARCH=$(uname -m)
 
 echo "Functest: Start Docker and prepare environment"
 
@@ -28,7 +33,7 @@ if [ "$BRANCH" != 'stable/danube' ]; then
   echo "Functest: Download images that will be used by test cases"
   images_dir="${HOME}/opnfv/functest/images"
   chmod +x ${WORKSPACE}/functest/ci/download_images.sh
-  ${WORKSPACE}/functest/ci/download_images.sh ${images_dir} 2> ${redirect}
+  ${WORKSPACE}/functest/ci/download_images.sh ${images_dir} ${DEPLOY_SCENARIO} ${HOST_ARCH} 2> ${redirect}
   images_vol="-v ${images_dir}:/home/opnfv/functest/images"
 fi
 
@@ -54,12 +59,11 @@ if [[ ${INSTALLER_TYPE} == 'compass' && ${DEPLOY_SCENARIO} == *'os-nosdn-openo-h
 fi
 
 if [ "$BRANCH" != 'stable/danube' ]; then
-  volumes="${images_vol} ${results_vol} ${sshkey_vol} ${stackrc_vol} ${rc_file_vol}"
+  volumes="${images_vol} ${results_vol} ${sshkey_vol} ${stackrc_vol} ${rc_file_vol} ${cacert_file_vol}"
 else
   volumes="${results_vol} ${sshkey_vol} ${stackrc_vol} ${rc_file_vol}"
 fi
 
-HOST_ARCH=$(uname -m)
 FUNCTEST_IMAGE="opnfv/functest"
 if [ "$HOST_ARCH" = "aarch64" ]; then
     FUNCTEST_IMAGE="${FUNCTEST_IMAGE}_${HOST_ARCH}"
index 50859c4..3694c0b 100644 (file)
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
+- parameter:
+    name: 'ericsson-virtual-pod1bl01-defaults'
+    parameters:
+        - label:
+            name: SLAVE_LABEL
+            default: 'ericsson-virtual-pod1bl01'
+        - string:
+            name: GIT_BASE
+            default: https://gerrit.opnfv.org/gerrit/$PROJECT
+            description: 'Git URL to use on this Jenkins Slave'
 - parameter:
     name: 'odl-netvirt-virtual-defaults'
     parameters:
index dd444c7..57d24b4 100644 (file)
@@ -7,6 +7,8 @@
     project: qtip
     jobs:
         - 'qtip-verify-{stream}'
+        - 'qtip-review-notebook-{stream}'
+        - 'qtip-merge-{stream}'
     stream:
         - master:
             branch: '{stream}'
     publishers:
         - publish-coverage
 
+# upload juypter notebook to artifacts for review
+- job-template:
+    name: 'qtip-review-notebook-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+            branch: '{branch}'
+        - 'opnfv-build-ubuntu-defaults'
+
+    scm:
+        - git-scm-gerrit
+
+    triggers:
+        - gerrit:
+            server-name: 'gerrit.opnfv.org'
+            trigger-on:
+                - patchset-created-event:
+                    exclude-drafts: 'false'
+                    exclude-trivial-rebase: 'false'
+                    exclude-no-code-change: 'false'
+                - draft-published-event
+                - comment-added-contains-event:
+                    comment-contains-value: 'recheck'
+                - comment-added-contains-event:
+                    comment-contains-value: 'reverify'
+            projects:
+              - project-compare-type: 'ANT'
+                project-pattern: '{project}'
+                branches:
+                  - branch-compare-type: 'ANT'
+                    branch-pattern: '**/{branch}'
+                disable-strict-forbidden-file-verification: 'true'
+                file-paths:
+                  - compare-type: ANT
+                    pattern: 'examples/**'
+    builders:
+        - upload-under-review-notebooks-to-opnfv-artifacts
+        - report-build-result-to-gerrit
+
+- job-template:
+    name: 'qtip-merge-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    parameters:
+        - project-parameter:
+            project: $GERRIT_PROJECT
+            branch: '{branch}'
+        - string:
+            name: GS_URL
+            default: '$GS_BASE{gs-pathname}'
+            description: "Directory where the build artifact will be located upon the completion of the build."
+        - string:
+            name: GERRIT_REFSPEC
+            default: 'refs/heads/{branch}'
+            description: "JJB configured GERRIT_REFSPEC parameter"
+
+    scm:
+        - git-scm
+
+    triggers:
+        - gerrit:
+            server-name: 'gerrit.opnfv.org'
+            trigger-on:
+                - change-merged-event
+                - comment-added-contains-event:
+                    comment-contains-value: 'remerge'
+            projects:
+                - project-compare-type: 'ANT'
+                  project-pattern: '*'
+                  branches:
+                      - branch-compare-type: 'ANT'
+                        branch-pattern: '**/{branch}'
+                  file-paths:
+                      - compare-type: ANT
+                        pattern: examples/**
+
+    builders:
+        - remove-old-docs-from-opnfv-artifacts
+
 ################################
 ## job builders
 #################################
             set -o xtrace
 
             tox
+
+# modified from upload-under-review-docs-to-opnfv-artifacts in global/releng-macro.yml
+- builder:
+    name: upload-under-review-notebooks-to-opnfv-artifacts
+    builders:
+        - shell: |
+            #!/bin/bash
+            set -o errexit
+            set -o pipefail
+            set -o xtrace
+            export PATH=$PATH:/usr/local/bin/
+
+            [[ $GERRIT_CHANGE_NUMBER =~ .+ ]]
+            [[ -d examples ]] || exit 0
+
+            echo
+            echo "###########################"
+            echo "UPLOADING DOCS UNDER REVIEW"
+            echo "###########################"
+            echo
+
+            gs_base="artifacts.opnfv.org/$PROJECT/review"
+            gs_path="$gs_base/$GERRIT_CHANGE_NUMBER"
+            local_path="upload/$GERRIT_CHANGE_NUMBER"
+
+            mkdir -p upload
+            cp -r examples "$local_path"
+            gsutil -m cp -r "$local_path" "gs://$gs_base/"
+
+            echo "Document link(s):" >> gerrit_comment.txt
+            find "$local_path" | grep -e 'ipynb$' | \
+                sed -e "s|^$local_path|    https://nbviewer.jupyter.org/urls/$gs_path|" >> gerrit_comment.txt
index 17b6fa7..4f6044b 100644 (file)
     builders:
         - shell: |
             cd ./utils/test/{module}/
-            bash run_test.sh
-            cp *.xml $WORKSPACE
+            tox
+            if [ -e *.xml ];then
+                cp *.xml $WORKSPACE
+            fi
 
     publishers:
-        - junit:
-            results: nosetests.xml
-        - cobertura:
-            report-file: "coverage.xml"
-            only-stable: "true"
-            health-auto-update: "false"
-            stability-auto-update: "false"
-            zoom-coverage-chart: "true"
-            targets:
-                - files:
-                    healthy: 10
-                    unhealthy: 20
-                    failing: 30
-                - method:
-                    healthy: 50
-                    unhealthy: 40
-                    failing: 30
+        - publish-coverage
 
 - job-template:
     name: '{module}-automate-{stream}'
     name: 'testapi-automate-docker-deploy-macro'
     builders:
         - shell: |
-            bash ./jjb/releng/docker-deploy.sh 'sudo docker run -dti -p 8082:8000 -e mongodb_url=mongodb://172.17.0.1:27017 -e base_url=http://testresults.opnfv.org/test opnfv/testapi' "http://testresults.opnfv.org/test/swagger/APIs"
+            bash ./jjb/releng/docker-deploy.sh "sudo docker run -dti -p 8082:8000
+            -e mongodb_url=mongodb://172.17.0.1:27017
+            -e base_url=http://testresults.opnfv.org/test opnfv/testapi" \
+            "http://testresults.opnfv.org/test/swagger/APIs" "testapi"
+
 - builder:
     name: 'reporting-automate-docker-deploy-macro'
     builders:
         - shell: |
-            bash ./jjb/releng/docker-deploy.sh 'sudo docker run -itd -p 8084:8000 opnfv/reporting' "http://testresults.opnfv.org/reporting2/reporting/index.html"
+            bash ./jjb/releng/docker-deploy.sh "sudo docker run -itd -p 8084:8000 opnfv/reporting" \
+            "http://testresults.opnfv.org/reporting2/reporting/index.html" "reporting"
 
 - builder:
     name: mongodb-backup
index b3b930f..2a3e078 100644 (file)
@@ -19,6 +19,7 @@
 # Assigning Variables
 command=$1
 url=$2
+module=$3
 
 function check() {
 
@@ -38,24 +39,26 @@ function check() {
 }
 
 echo "Getting contianer Id of the currently running one"
-contId=$(sudo docker ps | grep "opnfv/testapi:latest" | awk '{print $1}')
+contId=$(sudo docker ps | grep "opnfv/${module}:latest" | awk '{print $1}')
+
+echo $contId
 
 echo "Pulling the latest image"
-sudo docker pull opnfv/testapi:latest
+sudo docker pull opnfv/${module}:latest
 
-echo "Deleting old containers of opnfv/testapi:old"
-sudo docker ps -a | grep "opnfv/testapi" | grep "old" | awk '{print $1}' | xargs -r sudo docker rm -f
+echo "Deleting old containers of opnfv/${module}:old"
+sudo docker ps -a | grep "opnfv/${module}" | grep "old" | awk '{print $1}' | xargs -r sudo docker rm -f
 
-echo "Deleting old images of opnfv/testapi:latest"
-sudo docker images | grep "opnfv/testapi" | grep "old" | awk '{print $3}' | xargs -r sudo docker rmi -f
+echo "Deleting old images of opnfv/${module}:latest"
+sudo docker images | grep "opnfv/${module}" | grep "old" | awk '{print $3}' | xargs -r sudo docker rmi -f
 
 
 if [[ -z "$contId" ]]
 then
-    echo "No running testapi container"
+    echo "No running ${module} container"
 
-    echo "Removing stopped testapi containers in the previous iterations"
-    sudo docker ps -f status=exited | grep "opnfv_testapi" | awk '{print $1}' | xargs -r sudo docker rm -f
+    echo "Removing stopped ${module} containers in the previous iterations"
+    sudo docker ps -f status=exited | grep "opnfv_${module}" | awk '{print $1}' | xargs -r sudo docker rm -f
 else
     echo $contId
 
@@ -70,13 +73,13 @@ else
     fi
 
     echo "Changing current image tag to old"
-    sudo docker tag "$currImgId" opnfv/testapi:old
+    sudo docker tag "$currImgId" opnfv/${module}:old
 
-    echo "Removing stopped testapi containers in the previous iteration"
-    sudo docker ps -f status=exited | grep "opnfv_testapi" | awk '{print $1}' | xargs -r sudo docker rm -f
+    echo "Removing stopped ${module} containers in the previous iteration"
+    sudo docker ps -f status=exited | grep "opnfv_${module}" | awk '{print $1}' | xargs -r sudo docker rm -f
 
-    echo "Renaming the running container name to opnfv_testapi as to identify it."
-    sudo docker rename $contId opnfv_testapi
+    echo "Renaming the running container name to opnfv_${module} as to identify it."
+    sudo docker rename $contId opnfv_${module}
 
     echo "Stop the currently running container"
     sudo docker stop $contId
@@ -86,10 +89,10 @@ echo "Running a container with the new image"
 $command:latest
 
 if check; then
-    echo "TestResults Hosted."
+    echo "TestResults Module Hosted."
 else
-    echo "TestResults Hosting Failed"
-    if [[ $(sudo docker images | grep "opnfv/testapi" | grep "old" | awk '{print $3}') ]]; then
+    echo "TestResults Module Failed"
+    if [[ $(sudo docker images | grep "opnfv/${module}" | grep "old" | awk '{print $3}') ]]; then
         echo "Running old Image"
         $command:old
         exit 1
index 41840d5..4dcface 100644 (file)
 # that have been switched using labels for slaves
 #--------------------------------
     pod:
+# apex CI PODs
+        - virtual:
+            slave-label: apex-virtual-master
+            installer: apex
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *master
+        - baremetal:
+            slave-label: apex-baremetal-master
+            installer: apex
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *master
+        - virtual:
+            slave-label: apex-virtual-danube
+            installer: apex
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *danube
+        - baremetal:
+            slave-label: apex-baremetal-danube
+            installer: apex
+            auto-trigger-name: 'daily-trigger-disabled'
+            <<: *danube
 # fuel CI PODs
         - baremetal:
             slave-label: fuel-baremetal
             installer: joid
             auto-trigger-name: 'daily-trigger-disabled'
             <<: *danube
-
 # compass CI PODs
         - baremetal:
             slave-label: compass-baremetal
             auto-trigger-name: 'daily-trigger-disabled'
             <<: *danube
 #--------------------------------
-#    Installers not using labels
-#            CI PODs
-# This section should only contain the installers
-# that have not been switched using labels for slaves
-#--------------------------------
-        - lf-pod1:
-            slave-label: '{pod}'
-            installer: apex
-            auto-trigger-name: 'daily-trigger-disabled'
-            <<: *master
-        - lf-pod1:
-            slave-label: '{pod}'
-            installer: apex
-            auto-trigger-name: 'daily-trigger-disabled'
-            <<: *danube
-#--------------------------------
 #        None-CI PODs
 #--------------------------------
         - orange-pod1:
 # parameter macros
 ########################
 - parameter:
-    name: 'yardstick-params-fuel-baremetal'
+    name: 'yardstick-params-apex-virtual-master'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 - parameter:
-    name: 'yardstick-params-fuel-virtual'
+    name: 'yardstick-params-apex-baremetal-master'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 - parameter:
-    name: 'yardstick-params-armband-baremetal'
+    name: 'yardstick-params-apex-virtual-danube'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 - parameter:
-    name: 'yardstick-params-armband-virtual'
+    name: 'yardstick-params-apex-baremetal-danube'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 - parameter:
-    name: 'yardstick-params-arm-virtual1'
+    name: 'yardstick-params-fuel-baremetal'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 - parameter:
-    name: 'yardstick-params-joid-baremetal'
+    name: 'yardstick-params-fuel-virtual'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 - parameter:
-    name: 'yardstick-params-joid-virtual'
+    name: 'yardstick-params-armband-baremetal'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 - parameter:
-    name: 'yardstick-params-intel-pod8'
+    name: 'yardstick-params-armband-virtual'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
 - parameter:
-    name: 'yardstick-params-lf-pod1'
+    name: 'yardstick-params-arm-virtual1'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
-
 - parameter:
-    name: 'yardstick-params-lf-pod2'
+    name: 'yardstick-params-joid-baremetal'
+    parameters:
+        - string:
+            name: YARDSTICK_DB_BACKEND
+            default: '-i 104.197.68.199:8086'
+            description: 'Arguments to use in order to choose the backend DB'
+- parameter:
+    name: 'yardstick-params-joid-virtual'
+    parameters:
+        - string:
+            name: YARDSTICK_DB_BACKEND
+            default: '-i 104.197.68.199:8086'
+            description: 'Arguments to use in order to choose the backend DB'
+- parameter:
+    name: 'yardstick-params-intel-pod8'
     parameters:
         - string:
             name: YARDSTICK_DB_BACKEND
             default: '-i 104.197.68.199:8086'
             description: 'Arguments to use in order to choose the backend DB'
-
 - parameter:
     name: 'yardstick-params-compass-baremetal'
     parameters:
index 94de628..3cba98b 100644 (file)
@@ -48,6 +48,7 @@
       dib_os_element: "{{ ipa_dib_os_element|default('debian') }}"
       dib_os_release: "jessie"
       dib_elements: "ironic-agent {{ ipa_extra_dib_elements | default('') }}"
+      dib_notmpfs: true
       when: create_ipa_image | bool == true
     - role: bifrost-create-dib-image
       dib_imagetype: "qcow2"
@@ -57,6 +58,7 @@
       extra_dib_elements: "{{ lookup('env', 'EXTRA_DIB_ELEMENTS') | default('') }}"
       dib_elements: "vm enable-serial-console simple-init devuser growroot {{ extra_dib_elements }}"
       dib_packages: "{{ lookup('env', 'DIB_OS_PACKAGES') }}"
+      dib_notmpfs: true
       when: create_image_via_dib | bool == true and transform_boot_image | bool == false
     - role: bifrost-keystone-client-config
       user: "{{ ansible_env.SUDO_USER }}"
index 285f838..8374edb 100755 (executable)
@@ -12,7 +12,7 @@ set -o nounset
 set -o pipefail
 
 usage() {
-    echo "usage: $0 [-v] -d <destination> -i <installer_type> -a <installer_ip> [-s <ssh_key>]" >&2
+    echo "usage: $0 [-v] -d <destination> -i <installer_type> -a <installer_ip> [-o <os_cacert>] [-s <ssh_key>]" >&2
     echo "[-v] Virtualized deployment" >&2
     echo "[-s <ssh_key>] Path to ssh key. For MCP deployments only" >&2
 }
@@ -54,12 +54,13 @@ swap_to_public() {
 : ${DEPLOY_TYPE:=''}
 
 #Get options
-while getopts ":d:i:a:h:s:v" optchar; do
+while getopts ":d:i:a:h:s:o:v" optchar; do
     case "${optchar}" in
         d) dest_path=${OPTARG} ;;
         i) installer_type=${OPTARG} ;;
         a) installer_ip=${OPTARG} ;;
         s) ssh_key=${OPTARG} ;;
+        o) os_cacert=${OPTARG} ;;
         v) DEPLOY_TYPE="virt" ;;
         *) echo "Non-option argument: '-${OPTARG}'" >&2
            usage
@@ -70,6 +71,7 @@ done
 
 # set vars from env if not provided by user as options
 dest_path=${dest_path:-$HOME/opnfv-openrc.sh}
+os_cacert=${os_cacert:-$HOME/os_cacert}
 installer_type=${installer_type:-$INSTALLER_TYPE}
 installer_ip=${installer_ip:-$INSTALLER_IP}
 if [ "${installer_type}" == "fuel" ] && [ "${BRANCH}" == "master" ]; then
@@ -156,6 +158,7 @@ elif [ "$installer_type" == "compass" ]; then
     if [ "${BRANCH}" == "master" ]; then
         sudo docker cp compass-tasks:/opt/openrc $dest_path &> /dev/null
         sudo chown $(whoami):$(whoami) $dest_path
+        sudo docker cp compass-tasks:/opt/os_cacert $os_cacert &> /dev/null
     else
         verify_connectivity $installer_ip
         controller_ip=$(sshpass -p'root' ssh 2>/dev/null $ssh_options root@${installer_ip} \
diff --git a/utils/test/testapi/.gitignore b/utils/test/testapi/.gitignore
new file mode 100644 (file)
index 0000000..c7b63b5
--- /dev/null
@@ -0,0 +1,4 @@
+AUTHORS
+ChangeLog
+setup.cfg-e
+
index 8c701c3..bb31ab0 100644 (file)
@@ -54,8 +54,8 @@
                 controller: 'ResultsController as ctrl'
             }).
             state('userResults', {
-                url: 'user_results',
-                templateUrl: '/testapi-ui/components/results/results.html',
+                url: '/user_results',
+                templateUrl: 'testapi-ui/components/results/results.html',
                 controller: 'ResultsController as ctrl'
             }).
             state('resultsDetail', {
@@ -66,7 +66,7 @@
             }).
             state('profile', {
                 url: '/profile',
-                templateUrl: '/testapi-ui/components/profile/profile.html',
+                templateUrl: 'testapi-ui/components/profile/profile.html',
                 controller: 'ProfileController as ctrl'
             }).
             state('authFailure', {
index 3056e1d..2ae5339 100644 (file)
@@ -1,6 +1,23 @@
 <h3>{{ctrl.pageHeader}}</h3>
 <p>{{ctrl.pageParagraph}}</p>
-
+<form class="form-inline" ng-show="ctrl.isUserResults">
+<h4>Upload Results</h4>
+<div class="form-group col-m-3">
+     <input class="form-contrl btn btn-default" type = "file" file-model = "resultFile"/>
+</div>
+<div class="checkbox col-m-1">
+  <label>
+      <input type="checkbox" ng-model="ctrl.isPublic">public
+  </label>
+</div>
+<div class="form-group col-m-3">
+     <button class="btn btn-primary" ng-click = "ctrl.uploadFile()">upload result</button>
+</div>
+<div>
+<lable>{{ctrl.uploadState}}</label>
+</div>
+</form>
+<div class="row" style="margin-bottom:24px;"></div>
 <div class="result-filters">
     <h4>Filters</h4>
     <div class="row">
@@ -41,7 +58,6 @@
 
 <div cg-busy="{promise:ctrl.authRequest,message:'Loading'}"></div>
 <div cg-busy="{promise:ctrl.resultsRequest,message:'Loading'}"></div>
-
 <div ng-show="ctrl.data" class="results-table">
     <table ng-data="ctrl.data.result" ng-show="ctrl.data" class="table table-striped table-hover">
         <thead>
index 9e3540d..cc6cc0b 100644 (file)
         .module('testapiApp')
         .controller('ResultsController', ResultsController);
 
+    angular
+        .module('testapiApp')
+        .directive('fileModel', ['$parse', function ($parse) {
+            return {
+               restrict: 'A',
+               link: function(scope, element, attrs) {
+                  var model = $parse(attrs.fileModel);
+                  var modelSetter = model.assign;
+
+                  element.bind('change', function(){
+                     scope.$apply(function(){
+                        modelSetter(scope, element[0].files[0]);
+                     });
+                  });
+               }
+            };
+         }]);
+
     ResultsController.$inject = [
         '$scope', '$http', '$filter', '$state', 'testapiApiUrl','raiseAlert'
     ];
@@ -32,6 +50,7 @@
         raiseAlert) {
         var ctrl = this;
 
+        ctrl.uploadFile=uploadFile;
         ctrl.update = update;
         ctrl.open = open;
         ctrl.clearFilters = clearFilters;
@@ -76,6 +95,8 @@
         ctrl.format = 'yyyy-MM-dd';
 
         /** Check to see if this page should display user-specific results. */
+        // ctrl.isUserResults = $state.current.name === 'userResults';
+        // need auth to browse
         ctrl.isUserResults = $state.current.name === 'userResults';
 
         // Should only be on user-results-page if authenticated.
             'The most recently uploaded community test results are listed ' +
             'here.';
 
+        ctrl.uploadState = '';
+
+        ctrl.isPublic = false;
+
         if (ctrl.isUserResults) {
             ctrl.authRequest = $scope.auth.doSignCheck()
                 .then(ctrl.update);
-            ctrl.getUserProducts();
+            // ctrl.getUserProducts();
         } else {
             ctrl.update();
         }
 
+
+        function uploadFileToUrl(file, uploadUrl){
+           var fd = new FormData();
+           fd.append('file', file);
+           fd.append('public', ctrl.isPublic)
+
+           $http.post(uploadUrl, fd, {
+              transformRequest: angular.identity,
+              headers: {'Content-Type': undefined}
+           })
+
+           .success(function(data){
+              var id = data.href.substr(data.href.lastIndexOf('/')+1);
+              ctrl.uploadState = "Upload succeed. Result id is " + id;
+              ctrl.update();
+           })
+
+           .error(function(data, status){
+              ctrl.uploadState = "Upload failed. Error code is " + status;
+           });
+        }
+
+        function uploadFile(){
+           var file = $scope.resultFile;
+           console.log('file is ' );
+           console.dir(file);
+
+           var uploadUrl = testapiApiUrl + "/results/upload";
+           uploadFileToUrl(file, uploadUrl);
+        };
+
         /**
          * This will contact the TestAPI API to get a listing of test run
          * results.
index f2c49e8..85c33b6 100644 (file)
@@ -33,6 +33,7 @@ TestAPI
           </ul>
           <ul class="nav navbar-nav navbar-right">
             <li ng-class="{ active: header.isActive('/user_results')}" ng-if="auth.isAuthenticated"><a ui-sref="userResults">My Results</a></li>
+            <!--
             <li ng-if="auth.isAuthenticated" ng-class="{ active: header.isCatalogActive('user')}" class="dropdown" uib-dropdown>
                 <a role="button" class="dropdown-toggle" uib-dropdown-toggle>
                     My Catalog <strong class="caret"></strong>
@@ -42,6 +43,7 @@ TestAPI
                     <li><a ui-sref="userProducts">My Products</a></li>
                 </ul>
             </li>
+            -->
             <li ng-class="{ active: header.isActive('/profile')}" ng-if="auth.isAuthenticated"><a ui-sref="profile">Profile</a></li>
             <li ng-if="auth.isAuthenticated"><a href="" ng-click="auth.doSignOut()">Sign Out</a></li>
             <li ng-if="!auth.isAuthenticated"><a href="" ng-click="auth.doSignIn()">Sign In / Sign Up</a></li>
index 9ae2520..435188d 100644 (file)
@@ -12,7 +12,7 @@ url = http://localhost:8000/api/v1
 port = 8000
 
 # Number of results for one page (integer value)
-#results_per_page = 20
+results_per_page = 20
 
 # With debug_on set to true, error traces will be shown in HTTP responses
 debug = True
index 4576d9b..da6a6cf 100644 (file)
@@ -33,6 +33,7 @@ def main(args):
     else:
         exit(1)
 
+
 if __name__ == '__main__':
     parser = argparse.ArgumentParser(description='Create \
                                       Swagger Spec documentation')
index 545d5e3..a5ac5eb 100644 (file)
@@ -29,40 +29,18 @@ TODOs :
 
 """
 
-import argparse
-import sys
-
-import motor
 import tornado.ioloop
 
-from opnfv_testapi.common import config
+from opnfv_testapi.common.config import CONF
 from opnfv_testapi.router import url_mappings
 from opnfv_testapi.tornado_swagger import swagger
 
-CONF = None
-
-
-def parse_config(argv=[]):
-    global CONF
-    parser = argparse.ArgumentParser()
-    parser.add_argument("-c", "--config-file", dest='config_file',
-                        help="Config file location")
-    args = parser.parse_args(argv)
-    if args.config_file:
-        config.Config.CONFIG = args.config_file
-    CONF = config.Config()
-
-
-def get_db():
-    return motor.MotorClient(CONF.mongo_url)[CONF.mongo_dbname]
-
 
 def make_app():
     swagger.docs(base_url=CONF.swagger_base_url,
                  static_path=CONF.static_path)
     return swagger.Application(
         url_mappings.mappings,
-        db=get_db(),
         debug=CONF.api_debug,
         auth=CONF.api_authenticate,
         cookie_secret='opnfv-testapi',
@@ -70,7 +48,6 @@ def make_app():
 
 
 def main():
-    parse_config(sys.argv[1:])
     application = make_app()
     application.listen(CONF.api_port)
     tornado.ioloop.IOLoop.current().start()
index 67e8fbd..24ba876 100644 (file)
@@ -13,6 +13,7 @@ from tornado import web
 
 from opnfv_testapi.common import message
 from opnfv_testapi.common import raises
+from opnfv_testapi.db import api as dbapi
 
 
 def authenticate(method):
@@ -26,7 +27,7 @@ def authenticate(method):
             except KeyError:
                 raises.Unauthorized(message.unauthorized())
             query = {'access_token': token}
-            check = yield self._eval_db_find_one(query, 'tokens')
+            check = yield dbapi.db_find_one('tokens', query)
             if not check:
                 raises.Forbidden(message.invalid_token())
         ret = yield gen.coroutine(method)(self, *args, **kwargs)
@@ -38,7 +39,7 @@ def not_exist(xstep):
     @functools.wraps(xstep)
     def wrap(self, *args, **kwargs):
         query = kwargs.get('query')
-        data = yield self._eval_db_find_one(query)
+        data = yield dbapi.db_find_one(self.table, query)
         if not data:
             raises.NotFound(message.not_found(self.table, query))
         ret = yield gen.coroutine(xstep)(self, data, *args, **kwargs)
@@ -78,7 +79,7 @@ def carriers_exist(xstep):
         carriers = kwargs.pop('carriers', {})
         if carriers:
             for table, query in carriers:
-                exist = yield self._eval_db_find_one(query(), table)
+                exist = yield dbapi.db_find_one(table, query())
                 if not exist:
                     raises.Forbidden(message.not_found(table, query()))
         ret = yield gen.coroutine(xstep)(self, *args, **kwargs)
@@ -91,7 +92,7 @@ def new_not_exists(xstep):
     def wrap(self, *args, **kwargs):
         query = kwargs.get('query')
         if query:
-            to_data = yield self._eval_db_find_one(query())
+            to_data = yield dbapi.db_find_one(self.table, query())
             if to_data:
                 raises.Forbidden(message.exist(self.table, query()))
         ret = yield gen.coroutine(xstep)(self, *args, **kwargs)
@@ -105,7 +106,7 @@ def updated_one_not_exist(xstep):
         db_keys = kwargs.pop('db_keys', [])
         query = self._update_query(db_keys, data)
         if query:
-            to_data = yield self._eval_db_find_one(query)
+            to_data = yield dbapi.db_find_one(self.table, query)
             if to_data:
                 raises.Forbidden(message.exist(self.table, query))
         ret = yield gen.coroutine(xstep)(self, data, *args, **kwargs)
index f73c0ab..4cd53c6 100644 (file)
@@ -8,14 +8,16 @@
 # feng.xiaowei@zte.com.cn remove prepare_put_request            5-30-2016
 ##############################################################################
 import ConfigParser
+import argparse
 import os
+import sys
 
 
 class Config(object):
-    CONFIG = None
 
     def __init__(self):
-        self.file = self.CONFIG if self.CONFIG else self._default_config()
+        self.config_file = None
+        self._set_config_file()
         self._parse()
         self._parse_per_page()
         self.static_path = os.path.join(
@@ -24,11 +26,11 @@ class Config(object):
             'static')
 
     def _parse(self):
-        if not os.path.exists(self.file):
-            raise Exception("%s not found" % self.file)
+        if not os.path.exists(self.config_file):
+            raise Exception("%s not found" % self.config_file)
 
         config = ConfigParser.RawConfigParser()
-        config.read(self.file)
+        config.read(self.config_file)
         self._parse_section(config)
 
     def _parse_section(self, config):
@@ -53,8 +55,24 @@ class Config(object):
                 value = False
         return value
 
-    @staticmethod
-    def _default_config():
+    def _set_config_file(self):
+        if not self._set_sys_config_file():
+            self._set_default_config_file()
+
+    def _set_sys_config_file(self):
+        parser = argparse.ArgumentParser()
+        parser.add_argument("-c", "--config-file", dest='config_file',
+                            help="Config file location", metavar="FILE")
+        args, _ = parser.parse_known_args(sys.argv)
+        try:
+            self.config_file = args.config_file
+        finally:
+            return self.config_file is not None
+
+    def _set_default_config_file(self):
         is_venv = os.getenv('VIRTUAL_ENV')
-        return os.path.join('/' if not is_venv else is_venv,
-                            'etc/opnfv_testapi/config.ini')
+        self.config_file = os.path.join('/' if not is_venv else is_venv,
+                                        'etc/opnfv_testapi/config.ini')
+
+
+CONF = Config()
index 98536ff..951cbaf 100644 (file)
@@ -10,6 +10,10 @@ not_found_base = 'Could Not Found'
 exist_base = 'Already Exists'
 
 
+def key_error(key):
+    return "KeyError: '{}'".format(key)
+
+
 def no_body():
     return 'No Body'
 
diff --git a/utils/test/testapi/opnfv_testapi/db/__init__.py b/utils/test/testapi/opnfv_testapi/db/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/utils/test/testapi/opnfv_testapi/db/api.py b/utils/test/testapi/opnfv_testapi/db/api.py
new file mode 100644 (file)
index 0000000..c057480
--- /dev/null
@@ -0,0 +1,38 @@
+import motor
+
+from opnfv_testapi.common.config import CONF
+
+DB = motor.MotorClient(CONF.mongo_url)[CONF.mongo_dbname]
+
+
+def db_update(collection, query, update_req):
+    return _eval_db(collection, 'update', query, update_req, check_keys=False)
+
+
+def db_delete(collection, query):
+    return _eval_db(collection, 'remove', query)
+
+
+def db_aggregate(collection, pipelines):
+    return _eval_db(collection, 'aggregate', pipelines, allowDiskUse=True)
+
+
+def db_list(collection, query):
+    return _eval_db(collection, 'find', query)
+
+
+def db_save(collection, data):
+    return _eval_db(collection, 'insert', data, check_keys=False)
+
+
+def db_find_one(collection, query):
+    return _eval_db(collection, 'find_one', query)
+
+
+def _eval_db(collection, method, *args, **kwargs):
+    exec_collection = DB.__getattr__(collection)
+    return exec_collection.__getattribute__(method)(*args, **kwargs)
+
+
+def _eval_db_find_one(query, table=None):
+    return _eval_db(table, 'find_one', query)
index c7fed8f..8a3a2db 100644 (file)
@@ -20,8 +20,8 @@
 # feng.xiaowei@zte.com.cn remove DashboardHandler            5-30-2016
 ##############################################################################
 
-from datetime import datetime
 import json
+from datetime import datetime
 
 from tornado import gen
 from tornado import web
@@ -29,6 +29,7 @@ from tornado import web
 from opnfv_testapi.common import check
 from opnfv_testapi.common import message
 from opnfv_testapi.common import raises
+from opnfv_testapi.db import api as dbapi
 from opnfv_testapi.resources import models
 from opnfv_testapi.tornado_swagger import swagger
 
@@ -38,7 +39,6 @@ DEFAULT_REPRESENTATION = "application/json"
 class GenericApiHandler(web.RequestHandler):
     def __init__(self, application, request, **kwargs):
         super(GenericApiHandler, self).__init__(application, request, **kwargs)
-        self.db = self.settings["db"]
         self.json_args = None
         self.table = None
         self.table_cls = None
@@ -90,8 +90,7 @@ class GenericApiHandler(web.RequestHandler):
 
         if self.table != 'results':
             data.creation_date = datetime.now()
-        _id = yield self._eval_db(self.table, 'insert', data.format(),
-                                  check_keys=False)
+        _id = yield dbapi.db_save(self.table, data.format())
         if 'name' in self.json_args:
             resource = data.name
         else:
@@ -107,17 +106,17 @@ class GenericApiHandler(web.RequestHandler):
         per_page = kwargs.get('per_page', 0)
         if query is None:
             query = {}
-        cursor = self._eval_db(self.table, 'find', query)
-        records_count = yield cursor.count()
-        total_pages = self._calc_total_pages(records_count,
-                                             last,
-                                             page,
-                                             per_page)
+
+        total_pages = 0
+        if page > 0:
+            cursor = dbapi.db_list(self.table, query)
+            records_count = yield cursor.count()
+            total_pages = self._calc_total_pages(records_count,
+                                                 last,
+                                                 page,
+                                                 per_page)
         pipelines = self._set_pipelines(query, sort, last, page, per_page)
-        cursor = self._eval_db(self.table,
-                               'aggregate',
-                               pipelines,
-                               allowDiskUse=True)
+        cursor = dbapi.db_aggregate(self.table, pipelines)
         data = list()
         while (yield cursor.fetch_next):
             data.append(self.format_data(cursor.next_object()))
@@ -125,7 +124,7 @@ class GenericApiHandler(web.RequestHandler):
             res = {self.table: data}
         else:
             res = res_op(data, *args)
-        if total_pages > 0:
+        if page > 0:
             res.update({
                 'pagination': {
                     'current_page': kwargs.get('page'),
@@ -140,12 +139,10 @@ class GenericApiHandler(web.RequestHandler):
         if (records_count > last) and (last > 0):
             records_nr = last
 
-        total_pages = 0
-        if page > 0:
-            total_pages, remainder = divmod(records_nr, per_page)
-            if remainder > 0:
-                total_pages += 1
-        if page > total_pages:
+        total_pages, remainder = divmod(records_nr, per_page)
+        if remainder > 0:
+            total_pages += 1
+        if page > 1 and page > total_pages:
             raises.BadRequest(
                 'Request page > total_pages [{}]'.format(total_pages))
         return total_pages
@@ -175,7 +172,7 @@ class GenericApiHandler(web.RequestHandler):
     @check.authenticate
     @check.not_exist
     def _delete(self, data, query=None):
-        yield self._eval_db(self.table, 'remove', query)
+        yield dbapi.db_delete(self.table, query)
         self.finish_request()
 
     @check.authenticate
@@ -185,8 +182,7 @@ class GenericApiHandler(web.RequestHandler):
     def _update(self, data, query=None, **kwargs):
         data = self.table_cls.from_dict(data)
         update_req = self._update_requests(data)
-        yield self._eval_db(self.table, 'update', query, update_req,
-                            check_keys=False)
+        yield dbapi.db_update(self.table, query, update_req)
         update_req['_id'] = str(data._id)
         self.finish_request(update_req)
 
@@ -229,23 +225,6 @@ class GenericApiHandler(web.RequestHandler):
             query[key] = new
         return query if not equal else dict()
 
-    def _eval_db(self, table, method, *args, **kwargs):
-        exec_collection = self.db.__getattr__(table)
-        return exec_collection.__getattribute__(method)(*args, **kwargs)
-
-    def _eval_db_find_one(self, query, table=None):
-        if table is None:
-            table = self.table
-        return self._eval_db(table, 'find_one', query)
-
-    def db_save(self, collection, data):
-        self._eval_db(collection, 'insert', data, check_keys=False)
-
-    def db_find_one(self, query, collection=None):
-        if not collection:
-            collection = self.table
-        return self._eval_db(collection, 'find_one', query)
-
 
 class VersionHandler(GenericApiHandler):
     @swagger.operation(nickname='listAllVersions')
index 1773216..2bf1792 100644 (file)
@@ -6,19 +6,20 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+import logging
 from datetime import datetime
 from datetime import timedelta
+import json
 
 from bson import objectid
 
-from opnfv_testapi.common import config
+from opnfv_testapi.common.config import CONF
 from opnfv_testapi.common import message
 from opnfv_testapi.common import raises
 from opnfv_testapi.resources import handlers
 from opnfv_testapi.resources import result_models
 from opnfv_testapi.tornado_swagger import swagger
-
-CONF = config.Config()
+from opnfv_testapi.ui.auth import constants as auth_const
 
 
 class GenericResultHandler(handlers.GenericApiHandler):
@@ -40,6 +41,7 @@ class GenericResultHandler(handlers.GenericApiHandler):
         query = dict()
         date_range = dict()
 
+        query['public'] = {'$not': {'$eq': 'false'}}
         for k in self.request.query_arguments.keys():
             v = self.get_query_argument(k)
             if k == 'project' or k == 'pod' or k == 'case':
@@ -56,10 +58,24 @@ class GenericResultHandler(handlers.GenericApiHandler):
                 date_range.update({'$gte': str(v)})
             elif k == 'to':
                 date_range.update({'$lt': str(v)})
-            elif k != 'last' and k != 'page':
+            elif k == 'signed':
+                openid = self.get_secure_cookie(auth_const.OPENID)
+                role = self.get_secure_cookie(auth_const.ROLE)
+                logging.info('role:%s', role)
+                if role:
+                    del query['public']
+                    if role != "reviewer":
+                        query['user'] = openid
+            elif k not in ['last', 'page', 'descend']:
                 query[k] = v
             if date_range:
                 query['start_date'] = date_range
+
+            # if $lt is not provided,
+            # empty/None/null/'' start_date will also be returned
+            if 'start_date' in query and '$lt' not in query['start_date']:
+                query['start_date'].update({'$lt': str(datetime.now())})
+
         return query
 
 
@@ -84,9 +100,10 @@ class ResultsCLHandler(GenericResultHandler):
                  - criteria : the global criteria status passed or failed
                  - trust_indicator : evaluate the stability of the test case
                    to avoid running systematically long and stable test case
+                 - signed : get logined user result
 
                 GET /results/project=functest&case=vPing&version=Arno-R1 \
-                &pod=pod_name&period=15
+                &pod=pod_name&period=15&signed
             @return 200: all test results consist with query,
                          empty list if no result is found
             @rtype: L{TestResults}
@@ -146,18 +163,31 @@ class ResultsCLHandler(GenericResultHandler):
             @type trust_indicator: L{float}
             @in trust_indicator: query
             @required trust_indicator: False
+            @param signed: user results or all results
+            @type signed: L{string}
+            @in signed: query
+            @required signed: False
+            @param descend: true, newest2oldest; false, oldest2newest
+            @type descend: L{string}
+            @in descend: query
+            @required descend: False
         """
-        limitations = {'sort': {'start_date': -1}}
-        last = self.get_query_argument('last', 0)
-        if last is not None:
-            last = self.get_int('last', last)
-            limitations.update({'last': last})
-
-        page = self.get_query_argument('page', None)
-        if page is not None:
-            page = self.get_int('page', page)
-            limitations.update({'page': page,
-                                'per_page': CONF.api_results_per_page})
+        def descend_limit():
+            descend = self.get_query_argument('descend', 'true')
+            return -1 if descend.lower() == 'true' else 1
+
+        def last_limit():
+            return self.get_int('last', self.get_query_argument('last', 0))
+
+        def page_limit():
+            return self.get_int('page', self.get_query_argument('page', 0))
+
+        limitations = {
+            'sort': {'_id': descend_limit()},
+            'last': last_limit(),
+            'page': page_limit(),
+            'per_page': CONF.api_results_per_page
+        }
 
         self._list(query=self.set_query(), **limitations)
 
@@ -173,6 +203,9 @@ class ResultsCLHandler(GenericResultHandler):
             @raise 404: pod/project/testcase not exist
             @raise 400: body/pod_name/project_name/case_name not provided
         """
+        self._post()
+
+    def _post(self):
         def pod_query():
             return {'name': self.json_args.get('pod_name')}
 
@@ -187,9 +220,39 @@ class ResultsCLHandler(GenericResultHandler):
         carriers = [('pods', pod_query),
                     ('projects', project_query),
                     ('testcases', testcase_query)]
+
         self._create(miss_fields=miss_fields, carriers=carriers)
 
 
+class ResultsUploadHandler(ResultsCLHandler):
+    @swagger.operation(nickname="uploadTestResult")
+    def post(self):
+        """
+            @description: upload and create a test result
+            @param body: result to be created
+            @type body: L{ResultCreateRequest}
+            @in body: body
+            @rtype: L{CreateResponse}
+            @return 200: result is created.
+            @raise 404: pod/project/testcase not exist
+            @raise 400: body/pod_name/project_name/case_name not provided
+        """
+        logging.info('file upload')
+        fileinfo = self.request.files['file'][0]
+        is_public = self.get_body_argument('public')
+        logging.warning('public:%s', is_public)
+        logging.info('results is :%s', fileinfo['filename'])
+        logging.info('results is :%s', fileinfo['body'])
+        self.json_args = json.loads(fileinfo['body']).copy()
+        self.json_args['public'] = is_public
+
+        openid = self.get_secure_cookie(auth_const.OPENID)
+        if openid:
+            self.json_args['user'] = openid
+
+        super(ResultsUploadHandler, self)._post()
+
+
 class ResultsGURHandler(GenericResultHandler):
     @swagger.operation(nickname='getTestResultById')
     def get(self, result_id):
index 62a6dac..890bf82 100644 (file)
@@ -54,6 +54,8 @@ class ResultCreateRequest(models.ModelBase):
                  build_tag=None,
                  scenario=None,
                  criteria=None,
+                 user=None,
+                 public="true",
                  trust_indicator=None):
         self.pod_name = pod_name
         self.project_name = project_name
@@ -66,6 +68,8 @@ class ResultCreateRequest(models.ModelBase):
         self.build_tag = build_tag
         self.scenario = scenario
         self.criteria = criteria
+        self.user = user
+        self.public = public
         self.trust_indicator = trust_indicator if trust_indicator else TI(0)
 
 
@@ -89,7 +93,7 @@ class TestResult(models.ModelBase):
                  pod_name=None, installer=None, version=None,
                  start_date=None, stop_date=None, details=None,
                  build_tag=None, scenario=None, criteria=None,
-                 trust_indicator=None):
+                 user=None, public="true", trust_indicator=None):
         self._id = _id
         self.case_name = case_name
         self.project_name = project_name
@@ -102,6 +106,8 @@ class TestResult(models.ModelBase):
         self.build_tag = build_tag
         self.scenario = scenario
         self.criteria = criteria
+        self.user = user
+        self.public = public
         self.trust_indicator = trust_indicator
 
     @staticmethod
index a2312de..562fa5e 100644 (file)
@@ -8,7 +8,7 @@
 ##############################################################################
 import tornado.web
 
-from opnfv_testapi.common import config
+from opnfv_testapi.common.config import CONF
 from opnfv_testapi.resources import handlers
 from opnfv_testapi.resources import pod_handlers
 from opnfv_testapi.resources import project_handlers
@@ -48,6 +48,7 @@ mappings = [
     # Push results with mandatory request payload parameters
     # (project, case, and pod)
     (r"/api/v1/results", result_handlers.ResultsCLHandler),
+    (r'/api/v1/results/upload', result_handlers.ResultsUploadHandler),
     (r"/api/v1/results/([^/]+)", result_handlers.ResultsGURHandler),
 
     # scenarios
@@ -57,11 +58,12 @@ mappings = [
     # static path
     (r'/(.*\.(css|png|gif|js|html|json|map|woff2|woff|ttf))',
      tornado.web.StaticFileHandler,
-     {'path': config.Config().static_path}),
+     {'path': CONF.static_path}),
 
     (r'/', root.RootHandler),
     (r'/api/v1/auth/signin', sign.SigninHandler),
     (r'/api/v1/auth/signin_return', sign.SigninReturnHandler),
     (r'/api/v1/auth/signout', sign.SignoutHandler),
     (r'/api/v1/profile', user.ProfileHandler),
+
 ]
index 446b944..cc8743c 100644 (file)
@@ -1,16 +1,15 @@
-import os
+import argparse
 
-from opnfv_testapi.common import config
 
-
-def test_config_success():
-    config_file = os.path.join(os.path.dirname(__file__),
-                               '../../../../etc/config.ini')
-    config.Config.CONFIG = config_file
-    conf = config.Config()
-    assert conf.mongo_url == 'mongodb://127.0.0.1:27017/'
-    assert conf.mongo_dbname == 'test_results_collection'
-    assert conf.api_port == 8000
-    assert conf.api_debug is True
-    assert conf.api_authenticate is False
-    assert conf.swagger_base_url == 'http://localhost:8000'
+def test_config_normal(mocker, config_normal):
+    mocker.patch(
+        'argparse.ArgumentParser.parse_known_args',
+        return_value=(argparse.Namespace(config_file=config_normal), None))
+    from opnfv_testapi.common import config
+    CONF = config.Config()
+    assert CONF.mongo_url == 'mongodb://127.0.0.1:27017/'
+    assert CONF.mongo_dbname == 'test_results_collection'
+    assert CONF.api_port == 8000
+    assert CONF.api_debug is True
+    assert CONF.api_authenticate is False
+    assert CONF.swagger_base_url == 'http://localhost:8000'
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/conftest.py b/utils/test/testapi/opnfv_testapi/tests/unit/conftest.py
new file mode 100644 (file)
index 0000000..feff1da
--- /dev/null
@@ -0,0 +1,8 @@
+from os import path
+
+import pytest
+
+
+@pytest.fixture
+def config_normal():
+    return path.join(path.dirname(__file__), 'common/normal.ini')
index b30c325..b8f696c 100644 (file)
@@ -10,6 +10,20 @@ import functools
 import httplib
 
 
+def upload(excepted_status, excepted_response):
+    def _upload(create_request):
+        @functools.wraps(create_request)
+        def wrap(self):
+            request = create_request(self)
+            status, body = self.upload(request)
+            if excepted_status == httplib.OK:
+                getattr(self, excepted_response)(body)
+            else:
+                self.assertIn(excepted_response, body)
+        return wrap
+    return _upload
+
+
 def create(excepted_status, excepted_response):
     def _create(create_request):
         @functools.wraps(create_request)
index adaf6f7..0ca83df 100644 (file)
@@ -6,9 +6,10 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from operator import itemgetter
+
 from bson.objectid import ObjectId
 from concurrent.futures import ThreadPoolExecutor
-from operator import itemgetter
 
 
 def thread_execute(method, *args, **kwargs):
@@ -119,10 +120,14 @@ class MemDb(object):
 
     @staticmethod
     def _compare_date(spec, value):
+        gte = True
+        lt = False
         for k, v in spec.iteritems():
-            if k == '$gte' and value >= v:
-                return True
-        return False
+            if k == '$gte' and value < v:
+                gte = False
+            elif k == '$lt' and value < v:
+                lt = True
+        return gte and lt
 
     def _in(self, content, *args):
         if self.name == 'scenarios':
@@ -185,9 +190,8 @@ class MemDb(object):
                 elif k == 'trust_indicator.current':
                     if content.get('trust_indicator').get('current') != v:
                         return False
-                elif content.get(k, None) != v:
+                elif not isinstance(v, dict) and content.get(k, None) != v:
                     return False
-
         return True
 
     def _find(self, *args):
index 6e4d454..dcec4e9 100644 (file)
@@ -12,13 +12,9 @@ from os import path
 import mock
 from tornado import testing
 
-from opnfv_testapi.common import config
 from opnfv_testapi.resources import models
 from opnfv_testapi.tests.unit import fake_pymongo
 
-config.Config.CONFIG = path.join(path.dirname(__file__),
-                                 '../../../../etc/config.ini')
-
 
 class TestBase(testing.AsyncHTTPTestCase):
     headers = {'Content-Type': 'application/json; charset=UTF-8'}
@@ -37,20 +33,21 @@ class TestBase(testing.AsyncHTTPTestCase):
 
     def tearDown(self):
         self.db_patcher.stop()
+        self.config_patcher.stop()
 
     def _patch_server(self):
-        from opnfv_testapi.cmd import server
-        server.parse_config([
-            '--config-file',
-            path.join(path.dirname(__file__), path.pardir, 'common/normal.ini')
-        ])
-        self.db_patcher = mock.patch('opnfv_testapi.cmd.server.get_db',
-                                     self._fake_pymongo)
+        import argparse
+        config = path.join(path.dirname(__file__), '../common/normal.ini')
+        self.config_patcher = mock.patch(
+            'argparse.ArgumentParser.parse_known_args',
+            return_value=(argparse.Namespace(config_file=config), None))
+        self.db_patcher = mock.patch('opnfv_testapi.db.api.DB',
+                                     fake_pymongo)
+        self.config_patcher.start()
         self.db_patcher.start()
 
-    @staticmethod
-    def _fake_pymongo():
-        return fake_pymongo
+    def set_config_file(self):
+        self.config_file = 'normal.ini'
 
     def get_app(self):
         from opnfv_testapi.cmd import server
index 8e0ae40..cb4f1d9 100644 (file)
@@ -85,5 +85,6 @@ class TestPodGet(TestPodBase):
             else:
                 self.assert_get_body(pod, self.req_e)
 
+
 if __name__ == '__main__':
     unittest.main()
index 5a2ce75..0622ba8 100644 (file)
@@ -132,5 +132,6 @@ class TestProjectDelete(TestProjectBase):
         code, body = self.get(self.req_d.name)
         self.assertEqual(code, httplib.NOT_FOUND)
 
+
 if __name__ == '__main__':
     unittest.main()
index c8463cb..1e83ed3 100644 (file)
@@ -10,6 +10,7 @@ import copy
 import httplib
 import unittest
 from datetime import datetime, timedelta
+import json
 
 from opnfv_testapi.common import message
 from opnfv_testapi.resources import pod_models
@@ -60,9 +61,9 @@ class TestResultBase(base.TestBase):
         self.scenario = 'odl-l2'
         self.criteria = 'passed'
         self.trust_indicator = result_models.TI(0.7)
-        self.start_date = "2016-05-23 07:16:09.477097"
-        self.stop_date = "2016-05-23 07:16:19.477097"
-        self.update_date = "2016-05-24 07:16:19.477097"
+        self.start_date = str(datetime.now())
+        self.stop_date = str(datetime.now() + timedelta(minutes=1))
+        self.update_date = str(datetime.now() + timedelta(days=1))
         self.update_step = -0.05
         super(TestResultBase, self).setUp()
         self.details = Details(timestart='0', duration='9s', status='OK')
@@ -131,6 +132,22 @@ class TestResultBase(base.TestBase):
         _, res = self.create_d()
         return res.href.split('/')[-1]
 
+    def upload(self, req):
+        if req and not isinstance(req, str) and hasattr(req, 'format'):
+            req = req.format()
+        res = self.fetch(self.basePath + '/upload',
+                         method='POST',
+                         body=json.dumps(req),
+                         headers=self.headers)
+
+        return self._get_return(res, self.create_res)
+
+
+class TestResultUpload(TestResultBase):
+    @executor.upload(httplib.BAD_REQUEST, message.key_error('file'))
+    def test_filenotfind(self):
+        return None
+
 
 class TestResultCreate(TestResultBase):
     @executor.create(httplib.BAD_REQUEST, message.no_body())
@@ -208,9 +225,9 @@ class TestResultCreate(TestResultBase):
 class TestResultGet(TestResultBase):
     def setUp(self):
         super(TestResultGet, self).setUp()
+        self.req_10d_before = self._create_changed_date(days=-10)
         self.req_d_id = self._create_d()
         self.req_10d_later = self._create_changed_date(days=10)
-        self.req_10d_before = self._create_changed_date(days=-10)
 
     @executor.get(httplib.OK, 'assert_res')
     def test_getOne(self):
@@ -256,9 +273,9 @@ class TestResultGet(TestResultBase):
     def test_queryPeriodNotInt(self):
         return self._set_query('period=a')
 
-    @executor.query(httplib.OK, '_query_last_one', 1)
+    @executor.query(httplib.OK, '_query_period_one', 1)
     def test_queryPeriodSuccess(self):
-        return self._set_query('period=1')
+        return self._set_query('period=5')
 
     @executor.query(httplib.BAD_REQUEST, message.must_int('last'))
     def test_queryLastNotInt(self):
@@ -268,7 +285,17 @@ class TestResultGet(TestResultBase):
     def test_queryLast(self):
         return self._set_query('last=1')
 
-    @executor.query(httplib.OK, '_query_last_one', 1)
+    @executor.query(httplib.OK, '_query_success', 4)
+    def test_queryPublic(self):
+        self._create_public_data()
+        return self._set_query('')
+
+    @executor.query(httplib.OK, '_query_success', 1)
+    def test_queryPrivate(self):
+        self._create_private_data()
+        return self._set_query('public=false')
+
+    @executor.query(httplib.OK, '_query_period_one', 1)
     def test_combination(self):
         return self._set_query('pod',
                                'project',
@@ -279,7 +306,7 @@ class TestResultGet(TestResultBase):
                                'scenario',
                                'trust_indicator',
                                'criteria',
-                               'period=1')
+                               'period=5')
 
     @executor.query(httplib.OK, '_query_success', 0)
     def test_notFound(self):
@@ -294,6 +321,14 @@ class TestResultGet(TestResultBase):
                                'criteria',
                                'period=1')
 
+    @executor.query(httplib.OK, '_query_success', 1)
+    def test_filterErrorStartdate(self):
+        self._create_error_start_date(None)
+        self._create_error_start_date('None')
+        self._create_error_start_date('null')
+        self._create_error_start_date('')
+        return self._set_query('period=5')
+
     def _query_success(self, body, number):
         self.assertEqual(number, len(body.results))
 
@@ -301,6 +336,16 @@ class TestResultGet(TestResultBase):
         self.assertEqual(number, len(body.results))
         self.assert_res(body.results[0], self.req_10d_later)
 
+    def _query_period_one(self, body, number):
+        self.assertEqual(number, len(body.results))
+        self.assert_res(body.results[0], self.req_d)
+
+    def _create_error_start_date(self, start_date):
+        req = copy.deepcopy(self.req_d)
+        req.start_date = start_date
+        self.create(req)
+        return req
+
     def _create_changed_date(self, **kwargs):
         req = copy.deepcopy(self.req_d)
         req.start_date = datetime.now() + timedelta(**kwargs)
@@ -309,16 +354,29 @@ class TestResultGet(TestResultBase):
         self.create(req)
         return req
 
+    def _create_public_data(self, **kwargs):
+        req = copy.deepcopy(self.req_d)
+        req.public = 'true'
+        self.create(req)
+        return req
+
+    def _create_private_data(self, **kwargs):
+        req = copy.deepcopy(self.req_d)
+        req.public = 'false'
+        self.create(req)
+        return req
+
     def _set_query(self, *args):
         def get_value(arg):
             return self.__getattribute__(arg) \
                 if arg != 'trust_indicator' else self.trust_indicator.current
         uri = ''
         for arg in args:
-            if '=' in arg:
-                uri += arg + '&'
-            else:
-                uri += '{}={}&'.format(arg, get_value(arg))
+            if arg:
+                if '=' in arg:
+                    uri += arg + '&'
+                else:
+                    uri += '{}={}&'.format(arg, get_value(arg))
         return uri[0: -1]
 
 
index c9d4b72..940e256 100644 (file)
@@ -10,7 +10,6 @@ from tornado import web
 
 from opnfv_testapi.common import message
 from opnfv_testapi.resources import project_models
-from opnfv_testapi.router import url_mappings
 from opnfv_testapi.tests.unit import executor
 from opnfv_testapi.tests.unit import fake_pymongo
 from opnfv_testapi.tests.unit.resources import test_base as base
@@ -18,6 +17,7 @@ from opnfv_testapi.tests.unit.resources import test_base as base
 
 class TestToken(base.TestBase):
     def get_app(self):
+        from opnfv_testapi.router import url_mappings
         return web.Application(
             url_mappings.mappings,
             db=fake_pymongo,
@@ -109,5 +109,6 @@ class TestTokenUpdateProject(TestToken):
     def _update_success(self, request, body):
         self.assertIn(request.name, body)
 
+
 if __name__ == '__main__':
     unittest.main()
index 43f69d7..44ccb46 100644 (file)
@@ -1,4 +1,6 @@
 OPENID = 'openid'
+ROLE = 'role'
+DEFAULT_ROLE = 'user'
 
 # OpenID parameters
 OPENID_MODE = 'openid.mode'
index 6a9d94e..4623952 100644 (file)
@@ -1,11 +1,12 @@
 from six.moves.urllib import parse
+from tornado import gen
+from tornado import web
 
-from opnfv_testapi.common import config
+from opnfv_testapi.common.config import CONF
+from opnfv_testapi.db import api as dbapi
 from opnfv_testapi.ui.auth import base
 from opnfv_testapi.ui.auth import constants as const
 
-CONF = config.Config()
-
 
 class SigninHandler(base.BaseHandler):
     def get(self):
@@ -31,20 +32,30 @@ class SigninHandler(base.BaseHandler):
 
 
 class SigninReturnHandler(base.BaseHandler):
+    @web.asynchronous
+    @gen.coroutine
     def get(self):
         if self.get_query_argument(const.OPENID_MODE) == 'cancel':
             self._auth_failure('Authentication canceled.')
 
         openid = self.get_query_argument(const.OPENID_CLAIMED_ID)
-        user_info = {
+        role = const.DEFAULT_ROLE
+        new_user_info = {
             'openid': openid,
             'email': self.get_query_argument(const.OPENID_NS_SREG_EMAIL),
-            'fullname': self.get_query_argument(const.OPENID_NS_SREG_FULLNAME)
+            'fullname': self.get_query_argument(const.OPENID_NS_SREG_FULLNAME),
+            const.ROLE: role
         }
+        user = yield dbapi.db_find_one(self.table, {'openid': openid})
+        if not user:
+            dbapi.db_save(self.table, new_user_info)
+        else:
+            role = user.get(const.ROLE)
 
-        self.db_save(self.table, user_info)
-        if not self.get_secure_cookie('openid'):
-            self.set_secure_cookie('openid', openid)
+        self.clear_cookie(const.OPENID)
+        self.clear_cookie(const.ROLE)
+        self.set_secure_cookie(const.OPENID, openid)
+        self.set_secure_cookie(const.ROLE, role)
         self.redirect(url=CONF.ui_url)
 
     def _auth_failure(self, message):
@@ -57,9 +68,8 @@ class SigninReturnHandler(base.BaseHandler):
 class SignoutHandler(base.BaseHandler):
     def get(self):
         """Handle signout request."""
-        openid = self.get_secure_cookie(const.OPENID)
-        if openid:
-            self.clear_cookie(const.OPENID)
+        self.clear_cookie(const.OPENID)
+        self.clear_cookie(const.ROLE)
         params = {'openid_logout': CONF.osid_openid_logout_endpoint}
         url = parse.urljoin(CONF.ui_url,
                             '/#/logout?' + parse.urlencode(params))
index 140bca5..955cdee 100644 (file)
@@ -2,6 +2,7 @@ from tornado import gen
 from tornado import web
 
 from opnfv_testapi.common import raises
+from opnfv_testapi.db import api as dbapi
 from opnfv_testapi.ui.auth import base
 
 
@@ -12,12 +13,12 @@ class ProfileHandler(base.BaseHandler):
         openid = self.get_secure_cookie('openid')
         if openid:
             try:
-                user = yield self.db_find_one({'openid': openid})
+                user = yield dbapi.db_find_one(self.table, {'openid': openid})
                 self.finish_request({
                     "openid": user.get('openid'),
                     "email": user.get('email'),
                     "fullname": user.get('fullname'),
-                    "is_admin": False
+                    "role": user.get('role', 'user')
                 })
             except Exception:
                 pass
index bba7a86..5b2c922 100644 (file)
@@ -1,10 +1,10 @@
 from opnfv_testapi.resources.handlers import GenericApiHandler
-from opnfv_testapi.common import config
+from opnfv_testapi.common.config import CONF
 
 
 class RootHandler(GenericApiHandler):
     def get_template_path(self):
-        return config.Config().static_path
+        return CONF.static_path
 
     def get(self):
         self.render('testapi-ui/index.html')
index 955ffc8..4b6f75c 100644 (file)
@@ -2,9 +2,9 @@
 # of appearance. Changing the order has an impact on the overall integration
 # process, which may cause wedges in the gate later.
 
-pbr>=1.6
-setuptools>=16.0
-tornado>=3.1,<=4.3
+pbr>=2.0.0,!=2.1.0  # Apache-2.0
+setuptools>=16.0,!=24.0.0,!=34.0.0,!=34.0.1,!=34.0.2,!=34.0.3,!=34.1.0,!=34.1.1,!=34.2.0,!=34.3.0,!=34.3.1,!=34.3.2  # PSF/ZPL
+tornado>=3.1,<=4.3  # Apache-2.0
 epydoc>=0.3.1
-six>=1.9.0
-motor
+six>=1.9.0  # MIT
+motor  # Apache-2.0
index 15dda96..f689cb3 100644 (file)
@@ -3,7 +3,11 @@ import setuptools
 
 __author__ = 'serena'
 
+try:
+    import multiprocessing  # noqa
+except ImportError:
+    pass
 
 setuptools.setup(
-    setup_requires=['pbr>=1.8'],
+    setup_requires=['pbr==2.0.0'],
     pbr=True)
index 645687b..233f465 100644 (file)
@@ -2,7 +2,9 @@
 # of appearance. Changing the order has an impact on the overall integration
 # process, which may cause wedges in the gate later.
 
-mock
-pytest
-coverage
-nose>=1.3.1
+coverage>=4.0,!=4.4  # Apache-2.0
+mock>=2.0  # BSD
+nose  # LGPL
+pytest  # MIT
+pytest-cov  # MIT
+pytest-mock  # MIT
index 81c9dfa..d300f1a 100644 (file)
@@ -4,7 +4,7 @@
 # and then run "tox" from this directory.
 
 [tox]
-envlist = py27,pep8
+envlist = pep8,py27
 skipsdist = True
 sitepackages = True
 
@@ -16,9 +16,11 @@ deps =
   -rtest-requirements.txt
 commands=
   py.test \
-    --basetemp={envtmpdir} \
-    --cov \
-    {posargs}
+  --basetemp={envtmpdir} \
+  --cov \
+  --cov-report term-missing \
+  --cov-report xml \
+  {posargs}
 setenv=
   HOME = {envtmpdir}
   PYTHONPATH = {toxinidir}
index 7e0dd55..9c24377 100644 (file)
@@ -40,5 +40,6 @@ def backup(args):
     cmd = ['mongodump', '-o', '%s' % out]
     execute(cmd, args)
 
+
 if __name__ == '__main__':
     main(backup, parser)
index ba4334a..f759592 100644 (file)
@@ -85,5 +85,6 @@ def update(args):
     rename_fields(fields_old2New)
     rename_collections(collections_old2New)
 
+
 if __name__ == '__main__':
     main(update, parser)