Merge "bifrost: Refactor bifrost jobs"
authorFatih Degirmenci <fatih.degirmenci@ericsson.com>
Tue, 27 Sep 2016 20:22:58 +0000 (20:22 +0000)
committerGerrit Code Review <gerrit@172.30.200.206>
Tue, 27 Sep 2016 20:22:59 +0000 (20:22 +0000)
13 files changed:
jjb/daisy4nfv/daisy4nfv-basic.sh [new file with mode: 0755]
jjb/daisy4nfv/daisy4nfv-build.sh [new file with mode: 0755]
jjb/daisy4nfv/daisy4nfv-smoke-test.sh [new file with mode: 0755]
jjb/daisy4nfv/daisy4nfv-verify-jobs.yml [new file with mode: 0644]
jjb/daisy4nfv/daisy4nfv-virtual-deploy.sh [new file with mode: 0755]
jjb/fuel/fuel-daily-jobs.yml [moved from jjb/fuel/fuel-ci-jobs.yml with 100% similarity]
jjb/joid/joid-daily-jobs.yml [moved from jjb/joid/joid-ci-jobs.yml with 100% similarity]
jjb/multisite/multisite.yml
utils/push-test-logs.sh
utils/test/dashboard/dashboard/common/elastic_access.py
utils/test/dashboard/dashboard/elastic2kibana/main.py
utils/test/dashboard/dashboard/mongo2elastic/main.py
utils/test/dashboard/kibana_cleanup.py

diff --git a/jjb/daisy4nfv/daisy4nfv-basic.sh b/jjb/daisy4nfv/daisy4nfv-basic.sh
new file mode 100755 (executable)
index 0000000..87f5482
--- /dev/null
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+echo "--------------------------------------------------------"
+echo "This is diasy4nfv basic job!"
+echo "--------------------------------------------------------"
+
diff --git a/jjb/daisy4nfv/daisy4nfv-build.sh b/jjb/daisy4nfv/daisy4nfv-build.sh
new file mode 100755 (executable)
index 0000000..9eae848
--- /dev/null
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+echo "--------------------------------------------------------"
+echo "This is diasy4nfv build job!"
+echo "--------------------------------------------------------"
+
diff --git a/jjb/daisy4nfv/daisy4nfv-smoke-test.sh b/jjb/daisy4nfv/daisy4nfv-smoke-test.sh
new file mode 100755 (executable)
index 0000000..bd6eb7e
--- /dev/null
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+echo "--------------------------------------------------------"
+echo "This is diasy4nfv smoke test job!"
+echo "--------------------------------------------------------"
+
diff --git a/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml b/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml
new file mode 100644 (file)
index 0000000..6444cf8
--- /dev/null
@@ -0,0 +1,228 @@
+- project:
+    name: 'daisy4nfv-verify-jobs'
+
+    project: 'daisy4nfv'
+
+    installer: 'daisy4nfv'
+#####################################
+# branch definitions
+#####################################
+    stream:
+        - master:
+            branch: '{stream}'
+            gs-pathname: ''
+            disabled: false
+#####################################
+# patch verification phases
+#####################################
+    phase:
+        - 'basic':
+            slave-label: 'opnfv-build'
+        - 'build':
+            slave-label: 'opnfv-build-ubuntu'
+        - 'deploy-virtual':
+            slave-label: 'opnfv-build'
+        - 'smoke-test':
+            slave-label: 'opnfv-build'
+#####################################
+# jobs
+#####################################
+    jobs:
+        - 'daisy4nfv-verify-{stream}'
+        - 'daisy4nfv-verify-{phase}-{stream}'
+#####################################
+# job templates
+#####################################
+- job-template:
+    name: 'daisy4nfv-verify-{stream}'
+
+    project-type: multijob
+
+    disabled: false
+
+    concurrent: true
+
+    properties:
+        - throttle:
+            enabled: true
+            max-total: 4
+            option: 'project'
+
+    scm:
+        - gerrit-trigger-scm:
+            credentials-id: '{ssh-credentials}'
+            refspec: '$GERRIT_REFSPEC'
+            choosing-strategy: 'gerrit'
+
+    wrappers:
+        - ssh-agent-credentials:
+            users:
+                - '{ssh-credentials}'
+        - timeout:
+            timeout: 360
+            fail: true
+
+    triggers:
+        - gerrit:
+            trigger-on:
+                - patchset-created-event:
+                    exclude-drafts: 'false'
+                    exclude-trivial-rebase: 'false'
+                    exclude-no-code-change: 'false'
+                - draft-published-event
+                - comment-added-contains-event:
+                    comment-contains-value: 'recheck'
+                - comment-added-contains-event:
+                    comment-contains-value: 'reverify'
+            projects:
+              - project-compare-type: 'ANT'
+                project-pattern: '{project}'
+                branches:
+                  - branch-compare-type: 'ANT'
+                    branch-pattern: '**/{branch}'
+                forbidden-file-paths:
+                  - compare-type: ANT
+                    pattern: 'docs/**|.gitignore'
+            readable-message: true
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+        - gerrit-parameter:
+            branch: '{branch}'
+        - 'opnfv-build-defaults'
+        - 'daisy4nfv-verify-defaults':
+            gs-pathname: '{gs-pathname}'
+
+    builders:
+        - description-setter:
+            description: "Built on $NODE_NAME"
+        - multijob:
+            name: basic
+            condition: SUCCESSFUL
+            projects:
+                - name: 'daisy4nfv-verify-basic-{stream}'
+                  current-parameters: false
+                  node-parameters: false
+                  kill-phase-on: FAILURE
+                  abort-all-job: true
+        - multijob:
+            name: build
+            condition: SUCCESSFUL
+            projects:
+                - name: 'daisy4nfv-verify-build-{stream}'
+                  current-parameters: false
+                  node-parameters: false
+                  kill-phase-on: FAILURE
+                  abort-all-job: true
+        - multijob:
+            name: deploy-virtual
+            condition: SUCCESSFUL
+            projects:
+                - name: 'daisy4nfv-verify-deploy-virtual-{stream}'
+                  current-parameters: false
+                  node-parameters: false
+                  kill-phase-on: FAILURE
+                  abort-all-job: true
+        - multijob:
+            name: smoke-test
+            condition: SUCCESSFUL
+            projects:
+                - name: 'daisy4nfv-verify-smoke-test-{stream}'
+                  current-parameters: false
+                  node-parameters: false
+                  kill-phase-on: FAILURE
+                  abort-all-job: true
+
+- job-template:
+    name: 'daisy4nfv-verify-{phase}-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    concurrent: true
+
+    properties:
+        - throttle:
+            enabled: true
+            max-total: 6
+            option: 'project'
+        - build-blocker:
+            use-build-blocker: true
+            blocking-jobs:
+                - 'daisy4nfv-verify-deploy-.*'
+                - 'daisy4nfv-verify-test-.*'
+            block-level: 'NODE'
+
+    scm:
+        - gerrit-trigger-scm:
+            credentials-id: '{ssh-credentials}'
+            refspec: '$GERRIT_REFSPEC'
+            choosing-strategy: 'gerrit'
+
+    wrappers:
+        - ssh-agent-credentials:
+            users:
+                - '{ssh-credentials}'
+        - timeout:
+            timeout: 360
+            fail: true
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+        - gerrit-parameter:
+            branch: '{branch}'
+        - '{slave-label}-defaults'
+        - 'daisy4nfv-verify-defaults':
+            gs-pathname: '{gs-pathname}'
+
+    builders:
+        - description-setter:
+            description: "Built on $NODE_NAME"
+        - '{project}-verify-{phase}-macro'
+#####################################
+# builder macros
+#####################################
+- builder:
+    name: 'daisy4nfv-verify-basic-macro'
+    builders:
+        - shell:
+            !include-raw: ./daisy4nfv-basic.sh
+
+- builder:
+    name: 'daisy4nfv-verify-build-macro'
+    builders:
+        - shell:
+            !include-raw: ./daisy4nfv-build.sh
+
+- builder:
+    name: 'daisy4nfv-verify-deploy-virtual-macro'
+    builders:
+        - shell:
+            !include-raw: ./daisy4nfv-virtual-deploy.sh
+
+- builder:
+    name: 'daisy4nfv-verify-smoke-test-macro'
+    builders:
+        - shell: |
+            #!/bin/bash
+
+            echo "Not activated!"
+#####################################
+# parameter macros
+#####################################
+- parameter:
+    name: 'daisy4nfv-verify-defaults'
+    parameters:
+        - string:
+            name: BUILD_DIRECTORY
+            default: $WORKSPACE/build_output
+            description: "Directory where the build artifact will be located upon the completion of the build."
+        - string:
+            name: CACHE_DIRECTORY
+            default: $HOME/opnfv/cache/$INSTALLER_TYPE
+            description: "Directory where the cache to be used during the build is located."
+        - string:
+            name: GS_URL
+            default: artifacts.opnfv.org/$PROJECT{gs-pathname}
+            description: "URL to Google Storage."
diff --git a/jjb/daisy4nfv/daisy4nfv-virtual-deploy.sh b/jjb/daisy4nfv/daisy4nfv-virtual-deploy.sh
new file mode 100755 (executable)
index 0000000..8936be6
--- /dev/null
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+echo "--------------------------------------------------------"
+echo "This is diasy4nfv virtual deploy job!"
+echo "--------------------------------------------------------"
+
index 21b9730..24c03fd 100644 (file)
               current-parameters: true
               same-node: true
               block: true
-        - trigger-builds:
-            - project: 'functest-fuel-virtual-suite-{stream}'
-              current-parameters: true
-              predefined-parameters:
-                FUNCTEST_SUITE_NAME=healthcheck
-              same-node: true
-              block: true
-              block-thresholds:
-                build-step-failure-threshold: 'never'
-                failure-threshold: 'never'
-                unstable-threshold: 'FAILURE'
         - trigger-builds:
             - project: 'functest-fuel-virtual-suite-{stream}'
               current-parameters: true
index f24d884..87cee78 100644 (file)
@@ -23,7 +23,8 @@ dir_result="${HOME}/opnfv/$project/results/${branch}"
 node_list=(\
 'lf-pod1' 'lf-pod2' 'intel-pod2' 'intel-pod3' \
 'intel-pod5' 'intel-pod6' 'intel-pod7' 'intel-pod8' \
-'ericsson-pod2' \
+'ericsson-pod2' 'ericsson-pod3' 'ericsson-pod4' \
+'ericsson-virtual2' 'ericsson-virtual3' 'ericsson-virtual4' 'ericsson-virtual5' \
 'arm-pod1' 'arm-pod3' \
 'huawei-pod1' 'huawei-pod2' 'huawei-virtual1' 'huawei-virtual2' 'huawei-virtual3' 'huawei-virtual4')
 
index e90a17f..b454e9a 100644 (file)
@@ -5,41 +5,49 @@ import urllib3
 http = urllib3.PoolManager()
 
 
-def delete_request(url, creds, body=None):
+def _request(method, url, creds=None, body=None):
     headers = urllib3.make_headers(basic_auth=creds)
-    http.request('DELETE', url, headers=headers, body=body)
+    return http.request(method, url, headers=headers, body=body)
 
 
-def publish_json(json_ojb, creds, to):
-    json_dump = json.dumps(json_ojb)
+def _post(url, creds=None, body=None):
+    return _request('POST', url, creds=creds, body=body)
+
+
+def _get(url, creds=None, body=None):
+    return json.loads(_request('GET', url, creds=creds, body=body).data)
+
+
+def delete_docs(url, creds=None, body=None):
+    return _request('DELETE', url, creds=creds, body=body)
+
+
+def publish_docs(docs, creds, to):
+    json_docs = json.dumps(docs)
     if to == 'stdout':
-        print json_dump
+        print json_docs
         return 200, None
     else:
-        headers = urllib3.make_headers(basic_auth=creds)
-        result = http.request('POST', to, headers=headers, body=json_dump)
+        result = _post(to, creds=creds, body=json_docs)
         return result.status, result.data
 
 
-def _get_nr_of_hits(elastic_json):
-    return elastic_json['hits']['total']
+def _get_docs_nr(url, creds=None, body=None):
+    res_data = _get('{}/_search?size=0'.format(url), creds=creds, body=body)
+    print type(res_data), res_data
+    return res_data['hits']['total']
 
 
-def get_elastic_docs(elastic_url, creds, body=None, field = '_source'):
-
-    # 1. get the number of results
-    headers = urllib3.make_headers(basic_auth=creds)
-    elastic_json = json.loads(http.request('GET', elastic_url + '/_search?size=0', headers=headers, body=body).data)
-    print elastic_json
-    nr_of_hits = _get_nr_of_hits(elastic_json)
+def get_docs(url, creds=None, body=None, field='_source'):
 
-    # 2. get all results
-    elastic_json = json.loads(http.request('GET', elastic_url + '/_search?size={}'.format(nr_of_hits), headers=headers, body=body).data)
+    docs_nr = _get_docs_nr(url, creds=creds, body=body)
+    res_data = _get('{}/_search?size={}'.format(url, docs_nr),
+                    creds=creds, body=body)
 
-    elastic_docs = []
-    for hit in elastic_json['hits']['hits']:
-        elastic_docs.append(hit[field])
-    return elastic_docs
+    docs = []
+    for hit in res_data['hits']['hits']:
+        docs.append(hit[field])
+    return docs
 
 
 def get_elastic_docs_by_days(elastic_url, creds, days):
@@ -61,4 +69,4 @@ def get_elastic_docs_by_days(elastic_url, creds, days):
         }}'''.format(days)
     else:
         raise Exception('Update days must be non-negative')
-    return get_elastic_docs(elastic_url, creds, body)
+    return get_docs(elastic_url, creds, body)
index 37ce03e..38a49b6 100644 (file)
@@ -5,7 +5,8 @@ import urlparse
 import argparse
 from jinja2 import PackageLoader, Environment
 
-from common import logger_utils, elastic_access
+from common import elastic_access
+from common import logger_utils
 from conf import testcases
 from conf.config import APIConfig
 
@@ -59,7 +60,7 @@ class KibanaDashboard(dict):
             url = urlparse.urljoin(base_elastic_url, '/.kibana/visualization/{}'.format(visualization.id))
             logger.debug("publishing visualization '{}'".format(url))
             # logger.error("_publish_visualization: %s" % visualization)
-            elastic_access.publish_json(visualization, es_creds, url)
+            elastic_access.publish_docs(visualization, es_creds, url)
 
     def _construct_panels(self):
         size_x = 6
@@ -137,7 +138,7 @@ class KibanaDashboard(dict):
     def _publish(self):
         url = urlparse.urljoin(base_elastic_url, '/.kibana/dashboard/{}'.format(self.id))
         logger.debug("publishing dashboard '{}'".format(url))
-        elastic_access.publish_json(self, es_creds, url)
+        elastic_access.publish_docs(self, es_creds, url)
 
     def publish(self):
         self._publish_visualizations()
@@ -251,7 +252,7 @@ def _get_pods_and_scenarios(project_name, case_name, installer):
         }
     })
 
-    elastic_data = elastic_access.get_elastic_docs(urlparse.urljoin(base_elastic_url, '/test_results/mongo2elastic'),
+    elastic_data = elastic_access.get_docs(urlparse.urljoin(base_elastic_url, '/test_results/mongo2elastic'),
                                                    es_creds, query_json)
 
     pods_and_scenarios = {}
index 25b5320..82b01e4 100644 (file)
@@ -64,7 +64,7 @@ class DocumentPublisher:
             self._publish()
 
     def _publish(self):
-        status, data = elastic_access.publish_json(self.doc, self.creds, self.to)
+        status, data = elastic_access.publish_docs(self.doc, self.creds, self.to)
         if status > 300:
             logger.error('Publish record[{}] failed, due to [{}]'
                          .format(self.doc, json.loads(data)['error']['reason']))
index 9ce4994..ee01900 100644 (file)
@@ -14,10 +14,10 @@ logger.addHandler(file_handler)
 
 
 def delete_all(url, es_creds):
-    ids = elastic_access.get_elastic_docs(url, es_creds, body=None, field='_id')
+    ids = elastic_access.get_docs(url, es_creds, body=None, field='_id')
     for id in ids:
         del_url = '/'.join([url, id])
-        elastic_access.delete_request(del_url, es_creds)
+        elastic_access.delete_docs(del_url, es_creds)
 
 
 if __name__ == '__main__':