Merge "Add huawei-pod2 and huawei virtual pod as CI POD"
authorMorgan Richomme <morgan.richomme@orange.com>
Fri, 1 Jul 2016 07:14:41 +0000 (07:14 +0000)
committerGerrit Code Review <gerrit@172.30.200.206>
Fri, 1 Jul 2016 07:14:41 +0000 (07:14 +0000)
29 files changed:
docs/jenkins-job-builder/opnfv-jjb-usage.rst
jjb/apex/apex.yml
jjb/armband/armband-ci-jobs.yml
jjb/armband/armband-project-jobs.yml
jjb/bottlenecks/bottlenecks-project-jobs.yml
jjb/compass4nfv/compass-ci-jobs.yml
jjb/compass4nfv/compass-project-jobs.yml
jjb/cperf/cperf-ci-jobs.yml
jjb/fastpathmetrics/fastpathmetrics.yml
jjb/fuel/fuel-ci-jobs.yml
jjb/fuel/fuel-project-jobs.yml
jjb/functest/functest-ci-jobs.yml
jjb/functest/set-functest-env.sh
jjb/joid/joid-ci-jobs.yml
jjb/joid/joid-deploy.sh
jjb/vswitchperf/vswitchperf.yml
jjb/yardstick/yardstick-ci-jobs.yml
utils/fetch_os_creds.sh
utils/test/reporting/functest/reporting-vims.py
utils/test/result_collection_api/opnfv_testapi/resources/models.py
utils/test/result_collection_api/opnfv_testapi/resources/pod_handlers.py
utils/test/result_collection_api/opnfv_testapi/resources/project_handlers.py
utils/test/result_collection_api/opnfv_testapi/resources/result_handlers.py
utils/test/result_collection_api/opnfv_testapi/resources/result_models.py
utils/test/result_collection_api/opnfv_testapi/resources/testcase_handlers.py
utils/test/scripts/create_kibana_dashboards.py
utils/test/scripts/kibana_cleanup.py
utils/test/scripts/mongo_to_elasticsearch.py
utils/test/scripts/shared_utils.py

index 6e2b6e2..73b31b2 100644 (file)
@@ -53,11 +53,13 @@ reviewed and submitted.
 * morgan.richomme@orange.com
 * vlaza@cloudbasesolutions.com
 * matthew.lijun@huawei.com
-* pbandzi@cisco.com
+* meimei@huawei.com
 * jose.lausuch@ericsson.com
 * koffirodrigue@gmail.com
 * r-mibu@cq.jp.nec.com
 
+Or Add the group releng-contributors
+
 Or just email a request for submission to opnfv-helpdesk@rt.linuxfoundation.org
 
 The Current merge and verify jobs for jenkins job builder can be found
index 6c14a81..578024e 100644 (file)
         - throttle:
             max-per-node: 1
             max-total: 10
+            option: 'project'
 
     builders:
         - 'apex-unit-test'
         - throttle:
             max-per-node: 1
             max-total: 10
+            option: 'project'
 
     builders:
         - 'apex-build'
         - throttle:
             max-per-node: 1
             max-total: 10
+            option: 'project'
 
     builders:
         - 'apex-deploy'
                 build-step-failure-threshold: 'never'
                 failure-threshold: 'never'
                 unstable-threshold: 'FAILURE'
+        - trigger-builds:
+          - project: 'apex-deploy-baremetal-os-onos-nofeature-ha-{stream1}'
+            predefined-parameters: |
+              BUILD_DIRECTORY=apex-build-{stream1}/build
+              OPNFV_CLEAN=yes
+            git-revision: true
+            same-node: true
+            block: true
+        - trigger-builds:
+          - project: 'functest-apex-{daily-slave}-daily-{stream1}'
+            predefined-parameters:
+              DEPLOY_SCENARIO=os-onos-nofeature-ha
+            block: true
+            same-node: true
+            block-thresholds:
+                build-step-failure-threshold: 'never'
+                failure-threshold: 'never'
+                unstable-threshold: 'FAILURE'
+        - trigger-builds:
+          - project: 'yardstick-apex-{slave}-daily-{stream1}'
+            predefined-parameters:
+              DEPLOY_SCENARIO=os-onos-nofeature-ha
+            block: true
+            same-node: true
+            block-thresholds:
+                build-step-failure-threshold: 'never'
+                failure-threshold: 'never'
+                unstable-threshold: 'FAILURE'
 
 - job-template:
     name: 'apex-gs-clean-{stream}'
index 7d39241..3d0db65 100644 (file)
@@ -58,6 +58,7 @@
             enabled: true
             max-total: 1
             max-per-node: 1
+            option: 'project'
         - build-blocker:
             use-build-blocker: true
             blocking-jobs:
             enabled: true
             max-total: 1
             max-per-node: 1
+            option: 'project'
         - build-blocker:
             use-build-blocker: true
             blocking-jobs:
index aa089e0..732a9ea 100644 (file)
@@ -71,6 +71,7 @@
             enabled: true
             max-total: 1
             max-per-node: 1
+            option: 'project'
 
     parameters:
         - project-parameter:
index a28e2a4..ea000d8 100644 (file)
             enabled: true
             max-total: 1
             max-per-node: 1
+            option: 'project'
 
     parameters:
         - project-parameter:
index 2ef25a7..4bbea49 100644 (file)
@@ -77,6 +77,7 @@
         - throttle:
             enabled: true
             max-per-node: 1
+            option: 'project'
         - build-blocker:
             use-build-blocker: true
             blocking-jobs:
         - throttle:
             enabled: true
             max-per-node: 1
+            option: 'project'
         - build-blocker:
             use-build-blocker: true
             blocking-jobs:
index cb5b2e5..a0438ee 100644 (file)
@@ -43,6 +43,7 @@
         - throttle:
             enabled: true
             max-per-node: 1
+            option: 'project'
         - build-blocker:
             use-build-blocker: true
             blocking-jobs:
         - throttle:
             enabled: true
             max-per-node: 1
+            option: 'project'
         - build-blocker:
             use-build-blocker: true
             blocking-jobs:
             enabled: true
             max-total: 1
             max-per-node: 1
+            option: 'project'
 
     parameters:
         - project-parameter:
             enabled: true
             max-total: 1
             max-per-node: 1
+            option: 'project'
 
     parameters:
         - project-parameter:
index f6a8aca..d6c8601 100644 (file)
@@ -45,6 +45,7 @@
         - throttle:
             enabled: true
             max-per-node: 1
+            option: 'project'
 
     wrappers:
         - build-name:
index db06afc..ad1b601 100644 (file)
@@ -83,6 +83,7 @@
             enabled: true
             max-total: 3
             max-per-node: 2
+            option: 'project'
 
     parameters:
         - project-parameter:
             enabled: true
             max-total: 3
             max-per-node: 2
+            option: 'project'
 
     parameters:
         - project-parameter:
index b1d084e..e78be54 100644 (file)
@@ -56,6 +56,8 @@
             auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
         - 'os-onos-sfc-ha':
             auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
+        - 'os-onos-nofeature-ha':
+            auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
         - 'os-odl_l2-sfc-ha':
             auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
         - 'os-odl_l2-bgpvpn-ha':
@@ -84,6 +86,7 @@
             enabled: true
             max-total: 4
             max-per-node: 1
+            option: 'project'
         - build-blocker:
             use-build-blocker: true
             blocking-jobs:
             enabled: true
             max-total: 4
             max-per-node: 1
+            option: 'project'
         - build-blocker:
             use-build-blocker: true
             blocking-jobs:
 - trigger:
     name: 'fuel-os-nosdn-nofeature-ha-baremetal-daily-master-trigger'
     triggers:
-        - timed: '0 19 * * *'
+        - timed: '0 20 * * *'
 - trigger:
     name: 'fuel-os-odl_l2-nofeature-ha-baremetal-daily-master-trigger'
     triggers:
 - trigger:
     name: 'fuel-os-odl_l3-nofeature-ha-baremetal-daily-master-trigger'
     triggers:
-        - timed: '0 5 * * *'
+        - timed: '0 4 * * *'
 - trigger:
     name: 'fuel-os-onos-sfc-ha-baremetal-daily-master-trigger'
     triggers:
-        - timed: '0 10 * * *'
+        - timed: '0 8 * * *'
+- trigger:
+    name: 'fuel-os-onos-nofeature-ha-baremetal-daily-master-trigger'
+    triggers:
+        - timed: '0 12 * * *'
 - trigger:
     name: 'fuel-os-odl_l2-sfc-ha-baremetal-daily-master-trigger'
     triggers:
-        - timed: '0 15 * * *'
+        - timed: '0 16 * * *'
 - trigger:
     name: 'fuel-os-odl_l2-bgpvpn-ha-baremetal-daily-master-trigger'
     triggers:
     name: 'fuel-os-onos-sfc-ha-virtual-daily-master-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'fuel-os-onos-nofeature-ha-virtual-daily-master-trigger'
+    triggers:
+        - timed: ''
 - trigger:
     name: 'fuel-os-odl_l2-bgpvpn-ha-virtual-daily-master-trigger'
     triggers:
     name: 'fuel-os-onos-sfc-ha-zte-pod1-daily-master-trigger'
     triggers:
         - timed: ''
+- trigger:
+    name: 'fuel-os-onos-nofeature-ha-zte-pod1-daily-master-trigger'
+    triggers:
+        - timed: ''
 - trigger:
     name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod1-daily-master-trigger'
     triggers:
index 4e59c01..67343fb 100644 (file)
@@ -38,6 +38,7 @@
             enabled: true
             max-total: 1
             max-per-node: 1
+            option: 'project'
 
     parameters:
         - project-parameter:
             enabled: true
             max-total: 2
             max-per-node: 1
+            option: 'project'
         - build-blocker:
             use-build-blocker: true
             blocking-jobs:
         - throttle:
             enabled: true
             max-per-node: 1
+            option: 'project'
         - build-blocker:
             use-build-blocker: true
             blocking-jobs:
index 8bbed4f..32251b8 100644 (file)
         - throttle:
             enabled: true
             max-per-node: 1
+            option: 'project'
 
     wrappers:
         - build-name:
         - choice:
             name: FUNCTEST_SUITE_NAME
             choices:
+                - 'healthcheck'
                 - 'tempest'
                 - 'rally'
                 - 'odl'
index aae6f76..0b8747a 100755 (executable)
@@ -5,6 +5,7 @@ set -e
 # labconfig is used only for joid
 labconfig=""
 if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
+    ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
     if sudo virsh list | grep instack; then
         instack_mac=$(sudo virsh domiflist instack | grep default | \
                       grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
@@ -17,8 +18,8 @@ if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
     fi
     INSTALLER_IP=$(/usr/sbin/arp -e | grep ${instack_mac} | awk {'print $1'})
     sshkey="-v /root/.ssh/id_rsa:/root/.ssh/id_rsa"
-    sudo scp root@${INSTALLER_IP}:/home/stack/stackrc .
-    stackrc="-v ./stackrc:/home/opnfv/functest/conf/stackrc"
+    sudo scp $ssh_options root@${INSTALLER_IP}:/home/stack/stackrc ${HOME}/stackrc
+    stackrc="-v ${HOME}/stackrc:/home/opnfv/functest/conf/stackrc"
 
     if sudo iptables -C FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable 2> ${redirect}; then
         sudo iptables -D FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable
index b13b51f..6927ad9 100644 (file)
             auto-trigger-name: 'daily-trigger-disabled'
         - 'os-nosdn-nofeature-ha':
             auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
+        - 'os-nosdn-lxd-ha':
+            auto-trigger-name: 'daily-trigger-disabled'
+        - 'os-nosdn-lxd-noha':
+            auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
         - 'os-odl_l2-nofeature-ha':
             auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
         - 'os-onos-nofeature-ha':
@@ -87,6 +91,7 @@
             enabled: true
             max-total: 4
             max-per-node: 1
+            option: 'project'
         - build-blocker:
             use-build-blocker: true
             blocking-jobs:
               same-node: true
               block: true
         - trigger-builds:
-            - project: 'functest-joid-{pod}-daily-{stream}'
+            - project: 'yardstick-joid-{pod}-daily-{stream}'
               current-parameters: false
               predefined-parameters:
                 DEPLOY_SCENARIO={scenario}
-              same-node: true
               block: true
+              same-node: true
               block-thresholds:
                 build-step-failure-threshold: 'never'
                 failure-threshold: 'never'
                 unstable-threshold: 'FAILURE'
         - trigger-builds:
-            - project: 'yardstick-joid-{pod}-daily-{stream}'
+            - project: 'functest-joid-{pod}-daily-{stream}'
               current-parameters: false
               predefined-parameters:
                 DEPLOY_SCENARIO={scenario}
             enabled: true
             max-total: 4
             max-per-node: 1
+            option: 'project'
         - build-blocker:
             use-build-blocker: true
             blocking-jobs:
     name: 'joid-os-onos-nofeature-ha-juniper-pod1-brahmaputra-trigger'
     triggers:
         - timed: ''
+
+# os-nosdn-lxd-noha trigger - branch: master
+- trigger:
+    name: 'joid-os-nosdn-lxd-noha-baremetal-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'joid-os-nosdn-lxd-noha-virtual-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'joid-os-nosdn-lxd-noha-orange-pod2-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'joid-os-nosdn-lxd-noha-juniper-pod1-master-trigger'
+    triggers:
+        - timed: ''
+
+# os-nosdn-lxd-noha trigger - branch: stable/brahmaputra
+- trigger:
+    name: 'joid-os-nosdn-lxd-noha-baremetal-brahmaputra-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'joid-os-nosdn-lxd-noha-virtual-brahmaputra-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'joid-os-nosdn-lxd-noha-orange-pod2-brahmaputra-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'joid-os-nosdn-lxd-noha-juniper-pod1-brahmaputra-trigger'
+    triggers:
+        - timed: ''
+
+# os-nosdn-lxd-ha trigger - branch: master
+- trigger:
+    name: 'joid-os-nosdn-lxd-ha-baremetal-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'joid-os-nosdn-lxd-ha-virtual-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'joid-os-nosdn-lxd-ha-orange-pod2-master-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'joid-os-nosdn-lxd-ha-juniper-pod1-master-trigger'
+    triggers:
+        - timed: ''
+
+# os-nosdn-lxd-ha trigger - branch: stable/brahmaputra
+- trigger:
+    name: 'joid-os-nosdn-lxd-ha-baremetal-brahmaputra-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'joid-os-nosdn-lxd-ha-virtual-brahmaputra-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'joid-os-nosdn-lxd-ha-orange-pod2-brahmaputra-trigger'
+    triggers:
+        - timed: ''
+- trigger:
+    name: 'joid-os-nosdn-lxd-ha-juniper-pod1-brahmaputra-trigger'
+    triggers:
+        - timed: ''
index bd25973..6d9e8ae 100644 (file)
@@ -140,9 +140,9 @@ sed -i -r -- "s/^(\s+osd-reformat: )'no'/\1'$CEPH_REFORMAT'/" $SRCBUNDLE
 ##
 
 echo "------ Deploy with juju ------"
-echo "Execute: ./deploy.sh -t $HA_MODE -o $OS_RELEASE -s $SDN_CONTROLLER -l $POD_NAME -d $UBUNTU_DISTRO"
+echo "Execute: ./deploy.sh -t $HA_MODE -o $OS_RELEASE -s $SDN_CONTROLLER -l $POD_NAME -d $UBUNTU_DISTRO -f $NFV_FEATURES"
 
-./deploy.sh -t $HA_MODE -o $OS_RELEASE -s $SDN_CONTROLLER -l $POD_NAME -d $UBUNTU_DISTRO
+./deploy.sh -t $HA_MODE -o $OS_RELEASE -s $SDN_CONTROLLER -l $POD_NAME -d $UBUNTU_DISTRO -f $NFV_FEATURES
 exit_on_error $? "Main deploy FAILED"
 
 ##
@@ -154,7 +154,7 @@ echo "------ Create OpenRC file [$JOID_ADMIN_OPENRC] ------"
 # get Keystone ip
 case "$HA_MODE" in
     "ha")
-        KEYSTONE=$(cat bundles.yaml |shyaml get-value openstack-phase2.services.keystone.options.vip)
+        KEYSTONE=$(cat bundles.yaml |shyaml get-value openstack-phase1.services.keystone.options.vip)
         ;;
     *)
         KEYSTONE=$(juju status keystone |grep public-address|sed -- 's/.*\: //')
index 0cf7f97..bf2fe8f 100644 (file)
@@ -59,6 +59,7 @@
             enabled: true
             max-total: 3
             max-per-node: 2
+            option: 'project'
 
     parameters:
         - project-parameter:
             enabled: true
             max-total: 3
             max-per-node: 2
+            option: 'project'
 
     parameters:
         - project-parameter:
index 48c335a..4f98e2c 100644 (file)
         - throttle:
             enabled: true
             max-per-node: 1
+            option: 'project'
 
     wrappers:
         - build-name:
index 8f5aa08..c240976 100755 (executable)
@@ -82,7 +82,8 @@ if [ "$installer_type" == "fuel" ]; then
 
     # Check if controller is alive (online='True')
     controller_ip=$(sshpass -p r00tme ssh 2>/dev/null $ssh_options root@${installer_ip} \
-        'fuel node | grep controller | grep "True\|  1" | awk "{print \$10}" | tail -1') &> /dev/null
+        'fuel node | grep controller | grep "True\|  1" | awk -F\| "{print \$5}" | tail -1' | \
+        sed 's/ //g') &> /dev/null
 
     if [ -z $controller_ip ]; then
         error "The controller $controller_ip is not up. Please check that the POD is correctly deployed."
index 922c6f3..4033687 100644 (file)
@@ -34,12 +34,13 @@ for installer in installers:
         print 'No kittez. Got an error code:', e
 
     test_results = results['results']
+    test_results.reverse()
 
     scenario_results = {}
     for r in test_results:
-        if not r['version'] in scenario_results.keys():
-            scenario_results[r['version']] = []
-        scenario_results[r['version']].append(r)
+        if not r['scenario'] in scenario_results.keys():
+            scenario_results[r['scenario']] = []
+        scenario_results[r['scenario']].append(r)
 
     for s, s_result in scenario_results.items():
         scenario_results[s] = s_result[0:5]
index 290a7f3..e79308b 100644 (file)
@@ -16,6 +16,7 @@
 from opnfv_testapi.tornado_swagger import swagger\r
 \r
 \r
+@swagger.model()\r
 class CreateResponse(object):\r
     def __init__(self, href=''):\r
         self.href = href\r
index df4112f..8f44439 100644 (file)
@@ -36,7 +36,7 @@ class PodCLHandler(GenericPodHandler):
             @param body: pod to be created
             @type body: L{PodCreateRequest}
             @in body: body
-            @rtype: L{Pod}
+            @rtype: L{CreateResponse}
             @return 200: pod is created.
             @raise 403: pod already exists
             @raise 400: body or name not provided
index 171ab76..1e9a972 100644 (file)
@@ -38,7 +38,7 @@ class ProjectCLHandler(GenericProjectHandler):
             @param body: project to be created
             @type body: L{ProjectCreateRequest}
             @in body: body
-            @rtype: L{Project}
+            @rtype: L{CreateResponse}
             @return 200: project is created.
             @raise 403: project already exists
             @raise 400:  body or name not provided
index fe2d71e..56bed6c 100644 (file)
@@ -134,7 +134,7 @@ class ResultsCLHandler(GenericResultHandler):
             @param body: result to be created
             @type body: L{ResultCreateRequest}
             @in body: body
-            @rtype: L{TestResult}
+            @rtype: L{CreateResponse}
             @return 200: result is created.
             @raise 404: pod/project/testcase not exist
             @raise 400: body/pod_name/project_name/case_name not provided
index fb6a809..fdd8059 100644 (file)
@@ -90,7 +90,6 @@ class TestResult(object):
         t.case_name = a_dict.get('case_name')
         t.pod_name = a_dict.get('pod_name')
         t.project_name = a_dict.get('project_name')
-        t.description = a_dict.get('description')
         t.start_date = str(a_dict.get('start_date'))
         t.stop_date = str(a_dict.get('stop_date'))
         t.details = a_dict.get('details')
@@ -119,7 +118,6 @@ class TestResult(object):
             "case_name": self.case_name,
             "project_name": self.project_name,
             "pod_name": self.pod_name,
-            "description": self.description,
             "start_date": str(self.start_date),
             "stop_date": str(self.stop_date),
             "version": self.version,
@@ -137,7 +135,6 @@ class TestResult(object):
             "case_name": self.case_name,
             "project_name": self.project_name,
             "pod_name": self.pod_name,
-            "description": self.description,
             "start_date": str(self.start_date),
             "stop_date": str(self.stop_date),
             "version": self.version,
index b4f9db9..253aa66 100644 (file)
@@ -41,7 +41,7 @@ class TestcaseCLHandler(GenericTestcaseHandler):
             @param body: testcase to be created
             @type body: L{TestcaseCreateRequest}
             @in body: body
-            @rtype: L{Testcase}
+            @rtype: L{CreateResponse}
             @return 200: testcase is created in this project.
             @raise 403: project not exist
                         or testcase already exists in this project
index 252ce21..73f4ed9 100644 (file)
@@ -7,7 +7,7 @@ import urlparse
 
 logger = logging.getLogger('create_kibana_dashboards')
 logger.setLevel(logging.DEBUG)
-file_handler = logging.FileHandler('/var/log/{}.log'.format(__name__))
+file_handler = logging.FileHandler('/var/log/{}.log'.format('create_kibana_dashboards'))
 file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s'))
 logger.addHandler(file_handler)
 
@@ -15,7 +15,7 @@ _installers = {'fuel', 'apex', 'compass', 'joid'}
 
 # see class VisualizationState for details on format
 _testcases = [
-    ('functest', 'Tempest',
+    ('functest', 'tempest_smoke_serial',
      [
          {
              "metrics": [
@@ -28,7 +28,7 @@ _testcases = [
              ],
              "type": "line",
              "metadata": {
-                 "label": "Tempest duration",
+                 "label": "tempest_smoke_serial duration",
                  "test_family": "VIM"
              }
          },
@@ -50,7 +50,7 @@ _testcases = [
              ],
              "type": "histogram",
              "metadata": {
-                 "label": "Tempest nr of tests/failures",
+                 "label": "tempest_smoke_serial nr of tests/failures",
                  "test_family": "VIM"
              }
          },
@@ -66,14 +66,14 @@ _testcases = [
              ],
              "type": "line",
              "metadata": {
-                 "label": "Tempest success percentage",
+                 "label": "tempest_smoke_serial success percentage",
                  "test_family": "VIM"
              }
          }
      ]
      ),
 
-    ('functest', 'Rally',
+    ('functest', 'rally_sanity',
      [
          {
              "metrics": [
@@ -86,7 +86,7 @@ _testcases = [
              ],
              "type": "line",
              "metadata": {
-                 "label": "Rally duration",
+                 "label": "rally_sanity duration",
                  "test_family": "VIM"
              }
          },
@@ -102,7 +102,7 @@ _testcases = [
              ],
              "type": "histogram",
              "metadata": {
-                 "label": "Rally nr of tests",
+                 "label": "rally_sanity nr of tests",
                  "test_family": "VIM"
              }
          },
@@ -118,14 +118,14 @@ _testcases = [
              ],
              "type": "line",
              "metadata": {
-                 "label": "Rally success percentage",
+                 "label": "rally_sanity success percentage",
                  "test_family": "VIM"
              }
          }
      ]
      ),
 
-    ('functest', 'vPing',
+    ('functest', 'vping_ssh',
      [
          {
              "metrics": [
@@ -145,7 +145,7 @@ _testcases = [
      ]
      ),
 
-    ('functest', 'vPing_userdata',
+    ('functest', 'vping_userdata',
      [
          {
              "metrics": [
@@ -165,7 +165,7 @@ _testcases = [
      ]
      ),
 
-    ('functest', 'ODL',
+    ('functest', 'odl',
      [
          {
              "metrics": [
@@ -207,7 +207,7 @@ _testcases = [
      ]
      ),
 
-    ('functest', 'ONOS',
+    ('functest', 'onos',
      [
          {
              "metrics": [
@@ -287,7 +287,7 @@ _testcases = [
      ]
      ),
 
-    ('functest', 'vIMS',
+    ('functest', 'vims',
      [
          {
              "metrics": [
@@ -418,13 +418,13 @@ _testcases = [
 
 
 class KibanaDashboard(dict):
-    def __init__(self, project_name, case_name, installer, pod, versions, visualization_detail):
+    def __init__(self, project_name, case_name, installer, pod, scenarios, visualization_detail):
         super(KibanaDashboard, self).__init__()
         self.project_name = project_name
         self.case_name = case_name
         self.installer = installer
         self.pod = pod
-        self.versions = versions
+        self.scenarios = scenarios
         self.visualization_detail = visualization_detail
         self._visualization_title = None
         self._kibana_visualizations = []
@@ -433,12 +433,12 @@ class KibanaDashboard(dict):
         self._create()
 
     def _create_visualizations(self):
-        for version in self.versions:
+        for scenario in self.scenarios:
             self._kibana_visualizations.append(KibanaVisualization(self.project_name,
                                                                    self.case_name,
                                                                    self.installer,
                                                                    self.pod,
-                                                                   version,
+                                                                   scenario,
                                                                    self.visualization_detail))
 
         self._visualization_title = self._kibana_visualizations[0].vis_state_title
@@ -447,7 +447,7 @@ class KibanaDashboard(dict):
         for visualization in self._kibana_visualizations:
             url = urlparse.urljoin(base_elastic_url, '/.kibana/visualization/{}'.format(visualization.id))
             logger.debug("publishing visualization '{}'".format(url))
-            shared_utils.publish_json(visualization, es_user, es_passwd, url)
+            shared_utils.publish_json(visualization, es_creds, url)
 
     def _construct_panels(self):
         size_x = 6
@@ -495,7 +495,7 @@ class KibanaDashboard(dict):
         },
             separators=(',', ':'))
         self['uiStateJSON'] = "{}"
-        self['version'] = 1
+        self['scenario'] = 1
         self['timeRestore'] = False
         self['kibanaSavedObjectMeta'] = {
             'searchSourceJSON': json.dumps({
@@ -517,7 +517,7 @@ class KibanaDashboard(dict):
     def _publish(self):
         url = urlparse.urljoin(base_elastic_url, '/.kibana/dashboard/{}'.format(self.id))
         logger.debug("publishing dashboard '{}'".format(url))
-        shared_utils.publish_json(self, es_user, es_passwd, url)
+        shared_utils.publish_json(self, es_creds, url)
 
     def publish(self):
         self._publish_visualizations()
@@ -533,13 +533,13 @@ class KibanaSearchSourceJSON(dict):
                 ]
     """
 
-    def __init__(self, project_name, case_name, installer, pod, version):
+    def __init__(self, project_name, case_name, installer, pod, scenario):
         super(KibanaSearchSourceJSON, self).__init__()
         self["filter"] = [
             {"match": {"project_name": {"query": project_name, "type": "phrase"}}},
             {"match": {"case_name": {"query": case_name, "type": "phrase"}}},
             {"match": {"installer": {"query": installer, "type": "phrase"}}},
-            {"match": {"version": {"query": version, "type": "phrase"}}}
+            {"match": {"scenario": {"query": scenario, "type": "phrase"}}}
         ]
         if pod != 'all':
             self["filter"].append({"match": {"pod_name": {"query": pod, "type": "phrase"}}})
@@ -564,14 +564,14 @@ class VisualizationState(dict):
                     {
                         "type": type,           # default date_histogram
                         "params": {
-                            "field": field      # default creation_date
+                            "field": field      # default start_date
                     },
                     {segment2}
                 ],
             "type": type,                       # default area
             "mode": mode,                       # default grouped for type 'histogram', stacked for other types
             "metadata": {
-                    "label": "Tempest duration",# mandatory, no default
+                    "label": "tempest_smoke_serial duration",# mandatory, no default
                     "test_family": "VIM"        # mandatory, no default
                 }
             }
@@ -634,7 +634,7 @@ class VisualizationState(dict):
                     "type": 'date_histogram' if 'type' not in segment else segment['type'],
                     "schema": "metric",
                     "params": {
-                        "field": "creation_date" if ('params' not in segment or 'field' not in segment['params'])
+                        "field": "start_date" if ('params' not in segment or 'field' not in segment['params'])
                         else segment['params']['field'],
                         "interval": "auto",
                         "customInterval": "2h",
@@ -649,7 +649,7 @@ class VisualizationState(dict):
                 "type": 'date_histogram',
                 "schema": "segment",
                 "params": {
-                    "field": "creation_date",
+                    "field": "start_date",
                     "interval": "auto",
                     "customInterval": "2h",
                     "min_doc_count": 1,
@@ -663,7 +663,7 @@ class VisualizationState(dict):
 
 
 class KibanaVisualization(dict):
-    def __init__(self, project_name, case_name, installer, pod, version, detail):
+    def __init__(self, project_name, case_name, installer, pod, scenario, detail):
         """
         We need two things
         1. filter created from
@@ -671,7 +671,7 @@ class KibanaVisualization(dict):
             case_name
             installer
             pod
-            version
+            scenario
         2. visualization state
             field for y axis (metric) with type (avg, sum, etc.)
             field for x axis (segment) with type (date_histogram)
@@ -686,27 +686,27 @@ class KibanaVisualization(dict):
                                                    self.vis_state_title,
                                                    installer,
                                                    pod,
-                                                   version)
+                                                   scenario)
         self.id = self['title'].replace(' ', '-').replace('/', '-')
         self['visState'] = json.dumps(vis_state, separators=(',', ':'))
         self['uiStateJSON'] = "{}"
         self['description'] = "Kibana visualization for project_name '{}', case_name '{}', data '{}', installer '{}'," \
-                              " pod '{}' and version '{}'".format(project_name,
+                              " pod '{}' and scenario '{}'".format(project_name,
                                                                   case_name,
                                                                   self.vis_state_title,
                                                                   installer,
                                                                   pod,
-                                                                  version)
-        self['version'] = 1
+                                                                  scenario)
+        self['scenario'] = 1
         self['kibanaSavedObjectMeta'] = {"searchSourceJSON": json.dumps(KibanaSearchSourceJSON(project_name,
                                                                                                case_name,
                                                                                                installer,
                                                                                                pod,
-                                                                                               version),
+                                                                                               scenario),
                                                                         separators=(',', ':'))}
 
 
-def _get_pods_and_versions(project_name, case_name, installer):
+def _get_pods_and_scenarios(project_name, case_name, installer):
     query_json = json.JSONEncoder().encode({
         "query": {
             "bool": {
@@ -723,30 +723,30 @@ def _get_pods_and_versions(project_name, case_name, installer):
     })
 
     elastic_data = shared_utils.get_elastic_data(urlparse.urljoin(base_elastic_url, '/test_results/mongo2elastic'),
-                                                 es_user, es_passwd, query_json)
+                                                 es_creds, query_json)
 
-    pods_and_versions = {}
+    pods_and_scenarios = {}
 
     for data in elastic_data:
         pod = data['pod_name']
-        if pod in pods_and_versions:
-            pods_and_versions[pod].add(data['version'])
+        if pod in pods_and_scenarios:
+            pods_and_scenarios[pod].add(data['scenario'])
         else:
-            pods_and_versions[pod] = {data['version']}
+            pods_and_scenarios[pod] = {data['scenario']}
 
-        if 'all' in pods_and_versions:
-            pods_and_versions['all'].add(data['version'])
+        if 'all' in pods_and_scenarios:
+            pods_and_scenarios['all'].add(data['scenario'])
         else:
-            pods_and_versions['all'] = {data['version']}
+            pods_and_scenarios['all'] = {data['scenario']}
 
-    return pods_and_versions
+    return pods_and_scenarios
 
 
 def construct_dashboards():
     """
     iterate over testcase and installer
     1. get available pods for each testcase/installer pair
-    2. get available version for each testcase/installer/pod tuple
+    2. get available scenario for each testcase/installer/pod tuple
     3. construct KibanaInput and append
 
     :return: list of KibanaDashboards
@@ -754,10 +754,10 @@ def construct_dashboards():
     kibana_dashboards = []
     for project_name, case_name, visualization_details in _testcases:
         for installer in _installers:
-            pods_and_versions = _get_pods_and_versions(project_name, case_name, installer)
+            pods_and_scenarios = _get_pods_and_scenarios(project_name, case_name, installer)
             for visualization_detail in visualization_details:
-                for pod, versions in pods_and_versions.iteritems():
-                    kibana_dashboards.append(KibanaDashboard(project_name, case_name, installer, pod, versions,
+                for pod, scenarios in pods_and_scenarios.iteritems():
+                    kibana_dashboards.append(KibanaDashboard(project_name, case_name, installer, pod, scenarios,
                                                              visualization_detail))
     return kibana_dashboards
 
@@ -794,26 +794,25 @@ if __name__ == '__main__':
     parser = argparse.ArgumentParser(description='Create Kibana dashboards from data in elasticsearch')
     parser.add_argument('-e', '--elasticsearch-url', default='http://localhost:9200',
                         help='the url of elasticsearch, defaults to http://localhost:9200')
+
     parser.add_argument('-js', '--generate_js_inputs', action='store_true',
                         help='Use this argument to generate javascript inputs for kibana landing page')
+
     parser.add_argument('--js_path', default='/usr/share/nginx/html/kibana_dashboards/conf.js',
                         help='Path of javascript file with inputs for kibana landing page')
+
     parser.add_argument('-k', '--kibana_url', default='https://testresults.opnfv.org/kibana/app/kibana',
                         help='The url of kibana for javascript inputs')
 
-    parser.add_argument('-u', '--elasticsearch-username',
-                        help='the username for elasticsearch')
-
-    parser.add_argument('-p', '--elasticsearch-password',
-                        help='the password for elasticsearch')
+    parser.add_argument('-u', '--elasticsearch-username', default=None,
+                        help='The username with password for elasticsearch in format username:password')
 
     args = parser.parse_args()
     base_elastic_url = args.elasticsearch_url
     generate_inputs = args.generate_js_inputs
     input_file_path = args.js_path
     kibana_url = args.kibana_url
-    es_user = args.elasticsearch_username
-    es_passwd = args.elasticsearch_password
+    es_creds = args.elasticsearch_username
 
     dashboards = construct_dashboards()
 
@@ -822,3 +821,4 @@ if __name__ == '__main__':
 
     if generate_inputs:
         generate_js_inputs(input_file_path, kibana_url, dashboards)
+
index 2cd52af..e8d452a 100644 (file)
@@ -7,16 +7,16 @@ import urlparse
 
 logger = logging.getLogger('clear_kibana')
 logger.setLevel(logging.DEBUG)
-file_handler = logging.FileHandler('/var/log/{}.log'.format(__name__))
+file_handler = logging.FileHandler('/var/log/{}.log'.format('clear_kibana'))
 file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s'))
 logger.addHandler(file_handler)
 
 
-def delete_all(url, es_user, es_passwd):
-    ids = shared_utils.get_elastic_data(url, es_user, es_passwd, body=None, field='_id')
+def delete_all(url, es_creds):
+    ids = shared_utils.get_elastic_data(url, es_creds, body=None, field='_id')
     for id in ids:
         del_url = '/'.join([url, id])
-        shared_utils.delete_request(del_url, es_user, es_passwd)
+        shared_utils.delete_request(del_url, es_creds)
 
 
 if __name__ == '__main__':
@@ -24,20 +24,17 @@ if __name__ == '__main__':
     parser.add_argument('-e', '--elasticsearch-url', default='http://localhost:9200',
                         help='the url of elasticsearch, defaults to http://localhost:9200')
 
-    parser.add_argument('-u', '--elasticsearch-username',
-                        help='the username for elasticsearch')
-
-    parser.add_argument('-p', '--elasticsearch-password',
-                        help='the password for elasticsearch')
+    parser.add_argument('-u', '--elasticsearch-username', default=None,
+                        help='The username with password for elasticsearch in format username:password')
 
     args = parser.parse_args()
     base_elastic_url = args.elasticsearch_url
-    es_user = args.elasticsearch_username
-    es_passwd = args.elasticsearch_password
+    es_creds = args.elasticsearch_username
 
     urls = (urlparse.urljoin(base_elastic_url, '/.kibana/visualization'),
             urlparse.urljoin(base_elastic_url, '/.kibana/dashboard'),
             urlparse.urljoin(base_elastic_url, '/.kibana/search'))
 
     for url in urls:
-        delete_all(url, es_user, es_passwd)
+        delete_all(url, es_creds)
+
index 8c36d30..2ffbc17 100644 (file)
@@ -11,7 +11,7 @@ import datetime
 
 logger = logging.getLogger('mongo_to_elasticsearch')
 logger.setLevel(logging.DEBUG)
-file_handler = logging.FileHandler('/var/log/{}.log'.format(__name__))
+file_handler = logging.FileHandler('/var/log/{}.log'.format('mongo_to_elasticsearch'))
 file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s'))
 logger.addHandler(file_handler)
 
@@ -21,7 +21,7 @@ def _get_dicts_from_list(testcase, dict_list, keys):
     for dictionary in dict_list:
         # iterate over dictionaries in input list
         if not isinstance(dictionary, dict):
-            logger.info("Skipping non-dict details testcase [{}]".format(testcase))
+            logger.info("Skipping non-dict details testcase '{}'".format(testcase))
             continue
         if keys == set(dictionary.keys()):
             # check the dictionary structure
@@ -143,6 +143,9 @@ def modify_functest_onos(testcase):
     """
     testcase_details = testcase['details']
 
+    if 'FUNCvirNet' not in testcase_details:
+        return modify_default_entry(testcase)
+
     funcvirnet_details = testcase_details['FUNCvirNet']['status']
     funcvirnet_statuses = _get_dicts_from_list(testcase, funcvirnet_details, {'Case result', 'Case name:'})
 
@@ -238,6 +241,7 @@ def modify_functest_odl(testcase):
             'failures': failed_tests,
             'success_percentage': 100 * passed_tests / float(all_tests)
         }
+        logger.debug("Modified odl testcase: '{}'".format(json.dumps(testcase, indent=2)))
         return True
 
 
@@ -296,7 +300,8 @@ def verify_mongo_entry(testcase):
                         'case_name',
                         'project_name',
                         'details']
-    mandatory_fields_to_modify = {'creation_date': _fix_date}
+    mandatory_fields_to_modify = {'start_date': _fix_date}
+    fields_to_swap_or_add = {'scenario': 'version'}
     if '_id' in testcase:
         mongo_id = testcase['_id']
     else:
@@ -320,6 +325,15 @@ def verify_mongo_entry(testcase):
             else:
                 testcase[key] = mandatory_fields_to_modify[key](value)
                 del mandatory_fields_to_modify[key]
+        elif key in fields_to_swap_or_add:
+            if value is None:
+                swapped_key = fields_to_swap_or_add[key]
+                swapped_value = testcase[swapped_key]
+                logger.info("Swapping field '{}' with value None for '{}' with value '{}'.".format(key, swapped_key, swapped_value))
+                testcase[key] = swapped_value
+                del fields_to_swap_or_add[key]
+            else:
+                del fields_to_swap_or_add[key]
         elif key in optional_fields:
             if value is None:
                 # empty optional field, remove
@@ -334,7 +348,16 @@ def verify_mongo_entry(testcase):
         logger.info("Skipping testcase with mongo _id '{}' because the testcase was missing"
                     " mandatory field(s) '{}'".format(mongo_id, mandatory_fields))
         return False
+    elif len(mandatory_fields_to_modify) > 0:
+        # some mandatory fields are missing
+        logger.info("Skipping testcase with mongo _id '{}' because the testcase was missing"
+                    " mandatory field(s) '{}'".format(mongo_id, mandatory_fields_to_modify.keys()))
+        return False
     else:
+        if len(fields_to_swap_or_add) > 0:
+            for key, swap_key in fields_to_swap_or_add.iteritems():
+                testcase[key] = testcase[swap_key]
+
         return True
 
 
@@ -346,16 +369,17 @@ def modify_mongo_entry(testcase):
     if verify_mongo_entry(testcase):
         project = testcase['project_name']
         case_name = testcase['case_name']
+        logger.info("Processing mongo test case '{}'".format(case_name))
         if project == 'functest':
-            if case_name == 'Rally':
+            if case_name == 'rally_sanity':
                 return modify_functest_rally(testcase)
-            elif case_name == 'ODL':
+            elif case_name.lower() == 'odl':
                 return modify_functest_odl(testcase)
-            elif case_name == 'ONOS':
+            elif case_name.lower() == 'onos':
                 return modify_functest_onos(testcase)
-            elif case_name == 'vIMS':
+            elif case_name.lower() == 'vims':
                 return modify_functest_vims(testcase)
-            elif case_name == 'Tempest':
+            elif case_name == 'tempest_smoke_serial':
                 return modify_functest_tempest(testcase)
         return modify_default_entry(testcase)
     else:
@@ -371,7 +395,12 @@ def publish_mongo_data(output_destination):
             for mongo_json_line in fobj:
                 test_result = json.loads(mongo_json_line)
                 if modify_mongo_entry(test_result):
-                    shared_utils.publish_json(test_result, es_user, es_passwd, output_destination)
+                    status, data = shared_utils.publish_json(test_result, es_creds, output_destination)
+                    if status > 300:
+                        project = test_result['project_name']
+                        case_name = test_result['case_name']
+                        logger.info('project {} case {} publish failed, due to [{}]'
+                                    .format(project, case_name, json.loads(data)['error']['reason']))
     finally:
         if os.path.exists(tmp_filename):
             os.remove(tmp_filename)
@@ -380,7 +409,7 @@ def publish_mongo_data(output_destination):
 def get_mongo_data(days):
     past_time = datetime.datetime.today() - datetime.timedelta(days=days)
     mongo_json_lines = subprocess.check_output(['mongoexport', '--db', 'test_results_collection', '-c', 'results',
-                                                '--query', '{{"creation_date":{{$gt:"{}"}}}}'
+                                                '--query', '{{"start_date":{{$gt:"{}"}}}}'
                                                .format(past_time)]).splitlines()
 
     mongo_data = []
@@ -392,7 +421,7 @@ def get_mongo_data(days):
     return mongo_data
 
 
-def publish_difference(mongo_data, elastic_data, output_destination, es_user, es_passwd):
+def publish_difference(mongo_data, elastic_data, output_destination, es_creds):
     for elastic_entry in elastic_data:
         if elastic_entry in mongo_data:
             mongo_data.remove(elastic_entry)
@@ -400,7 +429,7 @@ def publish_difference(mongo_data, elastic_data, output_destination, es_user, es
     logger.info('number of parsed test results: {}'.format(len(mongo_data)))
 
     for parsed_test_result in mongo_data:
-        shared_utils.publish_json(parsed_test_result, es_user, es_passwd, output_destination)
+        shared_utils.publish_json(parsed_test_result, es_creds, output_destination)
 
 
 if __name__ == '__main__':
@@ -418,42 +447,35 @@ if __name__ == '__main__':
     parser.add_argument('-e', '--elasticsearch-url', default='http://localhost:9200',
                         help='the url of elasticsearch, defaults to http://localhost:9200')
 
-    parser.add_argument('-u', '--elasticsearch-username',
-                        help='the username for elasticsearch')
-
-    parser.add_argument('-p', '--elasticsearch-password',
-                        help='the password for elasticsearch')
-
-    parser.add_argument('-m', '--mongodb-url', default='http://localhost:8082',
-                        help='the url of mongodb, defaults to http://localhost:8082')
+    parser.add_argument('-u', '--elasticsearch-username', default=None,
+                        help='The username with password for elasticsearch in format username:password')
 
     args = parser.parse_args()
-    base_elastic_url = urlparse.urljoin(args.elasticsearch_url, '/results/mongo2elastic')
+    base_elastic_url = urlparse.urljoin(args.elasticsearch_url, '/test_results/mongo2elastic')
     output_destination = args.output_destination
     days = args.merge_latest
-    es_user = args.elasticsearch_username
-    es_passwd = args.elasticsearch_password
+    es_creds = args.elasticsearch_username
 
     if output_destination == 'elasticsearch':
         output_destination = base_elastic_url
 
     # parsed_test_results will be printed/sent to elasticsearch
     if days == 0:
-        # TODO get everything from mongo
         publish_mongo_data(output_destination)
     elif days > 0:
         body = '''{{
     "query" : {{
         "range" : {{
-            "creation_date" : {{
+            "start_date" : {{
                 "gte" : "now-{}d"
             }}
         }}
     }}
 }}'''.format(days)
-        elastic_data = shared_utils.get_elastic_data(base_elastic_url, es_user, es_passwd, body)
+        elastic_data = shared_utils.get_elastic_data(base_elastic_url, es_creds, body)
         logger.info('number of hits in elasticsearch for now-{}d: {}'.format(days, len(elastic_data)))
         mongo_data = get_mongo_data(days)
-        publish_difference(mongo_data, elastic_data, output_destination, es_user, es_passwd)
+        publish_difference(mongo_data, elastic_data, output_destination, es_creds)
     else:
         raise Exception('Update must be non-negative')
+
index 899f844..8bbbdbe 100644 (file)
@@ -3,27 +3,29 @@ import json
 http = urllib3.PoolManager()
 
 
-def delete_request(url, username, password, body=None):
-    headers = urllib3.util.make_headers(basic_auth=':'.join([username, password]))
+def delete_request(url, creds, body=None):
+    headers = urllib3.make_headers(basic_auth=creds)
     http.request('DELETE', url, headers=headers, body=body)
 
 
-def publish_json(json_ojb, username, password, output_destination):
+def publish_json(json_ojb, creds, output_destination):
     json_dump = json.dumps(json_ojb)
     if output_destination == 'stdout':
         print json_dump
+        return 200, None
     else:
-        headers = urllib3.util.make_headers(basic_auth=':'.join([username, password]))
-        http.request('POST', output_destination, headers=headers, body=json_dump)
+        headers = urllib3.make_headers(basic_auth=creds)
+        result = http.request('POST', output_destination, headers=headers, body=json_dump)
+        return result.status, result.data
 
 
 def _get_nr_of_hits(elastic_json):
     return elastic_json['hits']['total']
 
 
-def get_elastic_data(elastic_url, username, password, body, field='_source'):
+def get_elastic_data(elastic_url, creds, body, field='_source'):
     # 1. get the number of results
-    headers = urllib3.util.make_headers(basic_auth=':'.join([username, password]))
+    headers = urllib3.make_headers(basic_auth=creds)
     elastic_json = json.loads(http.request('GET', elastic_url + '/_search?size=0', headers=headers, body=body).data)
     nr_of_hits = _get_nr_of_hits(elastic_json)
 
@@ -34,3 +36,4 @@ def get_elastic_data(elastic_url, username, password, body, field='_source'):
     for hit in elastic_json['hits']['hits']:
         elastic_data.append(hit[field])
     return elastic_data
+