Merge "Add a trend line to Functest reporting And use d3js lib rather than static...
authorMorgan Richomme <morgan.richomme@orange.com>
Thu, 29 Sep 2016 15:23:01 +0000 (15:23 +0000)
committerGerrit Code Review <gerrit@172.30.200.206>
Thu, 29 Sep 2016 15:23:01 +0000 (15:23 +0000)
34 files changed:
jjb/daisy4nfv/daisy4nfv-basic.sh [new file with mode: 0755]
jjb/daisy4nfv/daisy4nfv-build.sh [new file with mode: 0755]
jjb/daisy4nfv/daisy4nfv-smoke-test.sh [new file with mode: 0755]
jjb/daisy4nfv/daisy4nfv-verify-jobs.yml [new file with mode: 0644]
jjb/daisy4nfv/daisy4nfv-virtual-deploy.sh [new file with mode: 0755]
jjb/doctor/doctor.yml
jjb/dovetail/dovetail-ci-jobs.yml
jjb/fastpathmetrics/fastpathmetrics.yml
jjb/fuel/fuel-daily-jobs.yml [moved from jjb/fuel/fuel-ci-jobs.yml with 100% similarity]
jjb/fuel/fuel-plugin-verify-jobs.yml [new file with mode: 0644]
jjb/infra/bifrost-verify-jobs.yml [new file with mode: 0644]
jjb/infra/bifrost-verify.sh [moved from jjb/infra/openstack-bifrost-verify.sh with 56% similarity]
jjb/infra/infra-daily-jobs.yml [deleted file]
jjb/infra/infra-deploy.sh [deleted file]
jjb/infra/infra-provision.sh [deleted file]
jjb/infra/infra-smoketest.sh [deleted file]
jjb/infra/openstack-bifrost-verify-jobs.yml [deleted file]
jjb/joid/joid-daily-jobs.yml [moved from jjb/joid/joid-ci-jobs.yml with 100% similarity]
jjb/multisite/multisite.yml
jjb/opnfv/opnfv-docker.sh
jjb/opnfv/opnfv-docs.yml
jjb/opnfv/slave-params.yml
jjb/qtip/qtip-cleanup.sh
prototypes/bifrost/scripts/destroy-env.sh
utils/push-test-logs.sh
utils/test/dashboard/dashboard/common/elastic_access.py
utils/test/dashboard/dashboard/conf/config.py
utils/test/dashboard/dashboard/elastic2kibana/main.py
utils/test/dashboard/dashboard/elastic2kibana/templates/duration.json [new file with mode: 0644]
utils/test/dashboard/dashboard/elastic2kibana/templates/success_percentage.json [new file with mode: 0644]
utils/test/dashboard/dashboard/elastic2kibana/templates/tests_failures.json [new file with mode: 0644]
utils/test/dashboard/dashboard/mongo2elastic/main.py
utils/test/dashboard/etc/config.ini
utils/test/dashboard/kibana_cleanup.py

diff --git a/jjb/daisy4nfv/daisy4nfv-basic.sh b/jjb/daisy4nfv/daisy4nfv-basic.sh
new file mode 100755 (executable)
index 0000000..87f5482
--- /dev/null
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+echo "--------------------------------------------------------"
+echo "This is diasy4nfv basic job!"
+echo "--------------------------------------------------------"
+
diff --git a/jjb/daisy4nfv/daisy4nfv-build.sh b/jjb/daisy4nfv/daisy4nfv-build.sh
new file mode 100755 (executable)
index 0000000..9eae848
--- /dev/null
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+echo "--------------------------------------------------------"
+echo "This is diasy4nfv build job!"
+echo "--------------------------------------------------------"
+
diff --git a/jjb/daisy4nfv/daisy4nfv-smoke-test.sh b/jjb/daisy4nfv/daisy4nfv-smoke-test.sh
new file mode 100755 (executable)
index 0000000..bd6eb7e
--- /dev/null
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+echo "--------------------------------------------------------"
+echo "This is diasy4nfv smoke test job!"
+echo "--------------------------------------------------------"
+
diff --git a/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml b/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml
new file mode 100644 (file)
index 0000000..6444cf8
--- /dev/null
@@ -0,0 +1,228 @@
+- project:
+    name: 'daisy4nfv-verify-jobs'
+
+    project: 'daisy4nfv'
+
+    installer: 'daisy4nfv'
+#####################################
+# branch definitions
+#####################################
+    stream:
+        - master:
+            branch: '{stream}'
+            gs-pathname: ''
+            disabled: false
+#####################################
+# patch verification phases
+#####################################
+    phase:
+        - 'basic':
+            slave-label: 'opnfv-build'
+        - 'build':
+            slave-label: 'opnfv-build-ubuntu'
+        - 'deploy-virtual':
+            slave-label: 'opnfv-build'
+        - 'smoke-test':
+            slave-label: 'opnfv-build'
+#####################################
+# jobs
+#####################################
+    jobs:
+        - 'daisy4nfv-verify-{stream}'
+        - 'daisy4nfv-verify-{phase}-{stream}'
+#####################################
+# job templates
+#####################################
+- job-template:
+    name: 'daisy4nfv-verify-{stream}'
+
+    project-type: multijob
+
+    disabled: false
+
+    concurrent: true
+
+    properties:
+        - throttle:
+            enabled: true
+            max-total: 4
+            option: 'project'
+
+    scm:
+        - gerrit-trigger-scm:
+            credentials-id: '{ssh-credentials}'
+            refspec: '$GERRIT_REFSPEC'
+            choosing-strategy: 'gerrit'
+
+    wrappers:
+        - ssh-agent-credentials:
+            users:
+                - '{ssh-credentials}'
+        - timeout:
+            timeout: 360
+            fail: true
+
+    triggers:
+        - gerrit:
+            trigger-on:
+                - patchset-created-event:
+                    exclude-drafts: 'false'
+                    exclude-trivial-rebase: 'false'
+                    exclude-no-code-change: 'false'
+                - draft-published-event
+                - comment-added-contains-event:
+                    comment-contains-value: 'recheck'
+                - comment-added-contains-event:
+                    comment-contains-value: 'reverify'
+            projects:
+              - project-compare-type: 'ANT'
+                project-pattern: '{project}'
+                branches:
+                  - branch-compare-type: 'ANT'
+                    branch-pattern: '**/{branch}'
+                forbidden-file-paths:
+                  - compare-type: ANT
+                    pattern: 'docs/**|.gitignore'
+            readable-message: true
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+        - gerrit-parameter:
+            branch: '{branch}'
+        - 'opnfv-build-defaults'
+        - 'daisy4nfv-verify-defaults':
+            gs-pathname: '{gs-pathname}'
+
+    builders:
+        - description-setter:
+            description: "Built on $NODE_NAME"
+        - multijob:
+            name: basic
+            condition: SUCCESSFUL
+            projects:
+                - name: 'daisy4nfv-verify-basic-{stream}'
+                  current-parameters: false
+                  node-parameters: false
+                  kill-phase-on: FAILURE
+                  abort-all-job: true
+        - multijob:
+            name: build
+            condition: SUCCESSFUL
+            projects:
+                - name: 'daisy4nfv-verify-build-{stream}'
+                  current-parameters: false
+                  node-parameters: false
+                  kill-phase-on: FAILURE
+                  abort-all-job: true
+        - multijob:
+            name: deploy-virtual
+            condition: SUCCESSFUL
+            projects:
+                - name: 'daisy4nfv-verify-deploy-virtual-{stream}'
+                  current-parameters: false
+                  node-parameters: false
+                  kill-phase-on: FAILURE
+                  abort-all-job: true
+        - multijob:
+            name: smoke-test
+            condition: SUCCESSFUL
+            projects:
+                - name: 'daisy4nfv-verify-smoke-test-{stream}'
+                  current-parameters: false
+                  node-parameters: false
+                  kill-phase-on: FAILURE
+                  abort-all-job: true
+
+- job-template:
+    name: 'daisy4nfv-verify-{phase}-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    concurrent: true
+
+    properties:
+        - throttle:
+            enabled: true
+            max-total: 6
+            option: 'project'
+        - build-blocker:
+            use-build-blocker: true
+            blocking-jobs:
+                - 'daisy4nfv-verify-deploy-.*'
+                - 'daisy4nfv-verify-test-.*'
+            block-level: 'NODE'
+
+    scm:
+        - gerrit-trigger-scm:
+            credentials-id: '{ssh-credentials}'
+            refspec: '$GERRIT_REFSPEC'
+            choosing-strategy: 'gerrit'
+
+    wrappers:
+        - ssh-agent-credentials:
+            users:
+                - '{ssh-credentials}'
+        - timeout:
+            timeout: 360
+            fail: true
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+        - gerrit-parameter:
+            branch: '{branch}'
+        - '{slave-label}-defaults'
+        - 'daisy4nfv-verify-defaults':
+            gs-pathname: '{gs-pathname}'
+
+    builders:
+        - description-setter:
+            description: "Built on $NODE_NAME"
+        - '{project}-verify-{phase}-macro'
+#####################################
+# builder macros
+#####################################
+- builder:
+    name: 'daisy4nfv-verify-basic-macro'
+    builders:
+        - shell:
+            !include-raw: ./daisy4nfv-basic.sh
+
+- builder:
+    name: 'daisy4nfv-verify-build-macro'
+    builders:
+        - shell:
+            !include-raw: ./daisy4nfv-build.sh
+
+- builder:
+    name: 'daisy4nfv-verify-deploy-virtual-macro'
+    builders:
+        - shell:
+            !include-raw: ./daisy4nfv-virtual-deploy.sh
+
+- builder:
+    name: 'daisy4nfv-verify-smoke-test-macro'
+    builders:
+        - shell: |
+            #!/bin/bash
+
+            echo "Not activated!"
+#####################################
+# parameter macros
+#####################################
+- parameter:
+    name: 'daisy4nfv-verify-defaults'
+    parameters:
+        - string:
+            name: BUILD_DIRECTORY
+            default: $WORKSPACE/build_output
+            description: "Directory where the build artifact will be located upon the completion of the build."
+        - string:
+            name: CACHE_DIRECTORY
+            default: $HOME/opnfv/cache/$INSTALLER_TYPE
+            description: "Directory where the cache to be used during the build is located."
+        - string:
+            name: GS_URL
+            default: artifacts.opnfv.org/$PROJECT{gs-pathname}
+            description: "URL to Google Storage."
diff --git a/jjb/daisy4nfv/daisy4nfv-virtual-deploy.sh b/jjb/daisy4nfv/daisy4nfv-virtual-deploy.sh
new file mode 100755 (executable)
index 0000000..8936be6
--- /dev/null
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+echo "--------------------------------------------------------"
+echo "This is diasy4nfv virtual deploy job!"
+echo "--------------------------------------------------------"
+
index f93ac9b..4958ca2 100644 (file)
               TESTCASE_OPTIONS=-e INSPECTOR_TYPE=congress -v $WORKSPACE:$HOME/opnfv/repos/doctor
             block: true
             same-node: true
+
+    publishers:
+        - postbuildscript:
+            builders:
+                - functest-copy-suite-log:
+                    suite: '{project}'
+        - archive:
+            artifacts: '{project}.log'
+
+- builder:
+    name: functest-copy-suite-log
+    builders:
         - shell: |
-            logfile=$HOME/opnfv/functest/results/{stream}/doctor.log
-            echo
-            echo "[$logfile]"
-            echo
-            [ -e $logfile ] && cat $logfile
+            cp $HOME/opnfv/functest/results/${{GIT_BRANCH##*/}}/{suite}.log $WORKSPACE/
index 9d2f69d..1dd1795 100644 (file)
     master: &master
         stream: master
         branch: '{stream}'
+        dovetail-branch: '{stream}'
         gs-pathname: ''
         docker-tag: 'latest'
     colorado: &colorado
         stream: colorado
         branch: 'stable/{stream}'
-        gs-pathname: '{stream}'
+        dovetail-branch: master
+        gs-pathname: '/{stream}'
         docker-tag: 'latest'
 
 #-----------------------------------
         - string:
             name: DOCKER_TAG
             default: '{docker-tag}'
-            description: 'Tag to pull docker image'
+            description: 'Tag to pull dovetail docker image'
         - string:
             name: CI_DEBUG
             default: 'true'
         - git-scm:
             credentials-id: '{ssh-credentials}'
             refspec: ''
-            branch: '{branch}'
+            branch: '{dovetail-branch}'
 
     builders:
         - description-setter:
index 504e07f..40df385 100644 (file)
@@ -18,7 +18,7 @@
             gs-pathname: ''
             disabled: false
         - colorado:
-            branch: '{stream}'
+            branch: 'stable/{stream}'
             gs-pathname: '/{stream}'
             disabled: false
 
diff --git a/jjb/fuel/fuel-plugin-verify-jobs.yml b/jjb/fuel/fuel-plugin-verify-jobs.yml
new file mode 100644 (file)
index 0000000..affc705
--- /dev/null
@@ -0,0 +1,226 @@
+- project:
+    name: 'fuel-plugin-verify-jobs'
+
+    project: 'fuel-plugin'
+
+    installer: 'fuel'
+#####################################
+# branch definitions
+#####################################
+    stream:
+        - master:
+            branch: '{stream}'
+            gs-pathname: ''
+            disabled: false
+#####################################
+# patch verification phases
+#####################################
+    phase:
+        - 'build':
+            slave-label: 'opnfv-build-ubuntu'
+        - 'test':
+            slave-label: 'opnfv-build-ubuntu'
+#####################################
+# jobs
+#####################################
+    jobs:
+        - 'fuel-verify-plugin-{stream}'
+        - 'fuel-verify-plugin-{phase}-{stream}'
+#####################################
+# job templates
+#####################################
+- job-template:
+    name: 'fuel-verify-plugin-{stream}'
+
+    project-type: multijob
+
+    disabled: '{obj:disabled}'
+
+    concurrent: true
+
+    properties:
+        - throttle:
+            enabled: true
+            max-total: 4
+            option: 'project'
+
+    # the url to plugin repo can essentially become a variable if
+    # the plugin name is injected to env by gerrit plugin
+    scm:
+        - git:
+            url: 'https://git.openstack.org/openstack/fuel-plugin-bgpvpn'
+            refspec: '$GERRIT_REFSPEC'
+            branches:
+                - 'origin/$GERRIT_BRANCH'
+            skip-tag: true
+            choosing-strategy: 'gerrit'
+            timeout: 10
+            wipe-workspace: true
+
+    wrappers:
+        - ssh-agent-credentials:
+            users:
+                - '{ssh-credentials}'
+        - timeout:
+            timeout: 360
+            fail: true
+
+    triggers:
+        - gerrit:
+            server-name: 'review.openstack.org'
+            silent-start: false
+            skip-vote:
+                successful: true
+                failed: true
+                unstable: true
+                notbuilt: true
+            escape-quotes: true
+            trigger-on:
+                - patchset-created-event:
+                    exclude-drafts: 'false'
+                    exclude-trivial-rebase: 'false'
+                    exclude-no-code-change: 'false'
+                - comment-added-contains-event:
+                    comment-contains-value: 'recheck'
+                - comment-added-contains-event:
+                    comment-contains-value: 'reverify'
+            projects:
+              - project-compare-type: 'PLAIN'
+                project-pattern: 'openstack/fuel-plugin-bgpvpn'
+                branches:
+                  - branch-compare-type: 'ANT'
+                    branch-pattern: '**/{branch}'
+                forbidden-file-paths:
+                  - compare-type: ANT
+                    pattern: 'README.md|.gitignore|.gitreview'
+            readable-message: true
+
+    parameters:
+        - project-parameter:
+            project: '{project}'
+        - gerrit-parameter:
+            branch: '{branch}'
+        - 'opnfv-build-defaults'
+        - 'fuel-verify-plugin-defaults':
+            gs-pathname: '{gs-pathname}'
+
+    builders:
+        - description-setter:
+            description: "Built on $NODE_NAME"
+        - multijob:
+            name: build
+            condition: SUCCESSFUL
+            projects:
+                - name: 'fuel-verify-plugin-build-{stream}'
+                  current-parameters: false
+                  predefined-parameters: |
+                    GERRIT_BRANCH=$GERRIT_BRANCH
+                    GERRIT_REFSPEC=$GERRIT_REFSPEC
+                    GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                    GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+                  node-parameters: false
+                  kill-phase-on: FAILURE
+                  abort-all-job: true
+        - multijob:
+            name: test
+            condition: SUCCESSFUL
+            projects:
+                - name: 'fuel-verify-plugin-test-{stream}'
+                  current-parameters: false
+                  predefined-parameters: |
+                    GERRIT_BRANCH=$GERRIT_BRANCH
+                    GERRIT_REFSPEC=$GERRIT_REFSPEC
+                    GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+                    GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+                  node-parameters: false
+                  kill-phase-on: FAILURE
+                  abort-all-job: true
+
+- job-template:
+    name: 'fuel-verify-plugin-{phase}-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    concurrent: true
+
+    properties:
+        - throttle:
+            enabled: true
+            max-total: 6
+            option: 'project'
+        - build-blocker:
+            use-build-blocker: true
+            blocking-jobs:
+                - 'fuel-verify-plugin-test-.*'
+            block-level: 'NODE'
+
+    # the url to plugin repo can essentially become a variable if
+    # the plugin name is injected to env by gerrit plugin
+    scm:
+        - git:
+            url: 'https://git.openstack.org/openstack/fuel-plugin-bgpvpn'
+            refspec: '$GERRIT_REFSPEC'
+            branches:
+                - 'origin/$GERRIT_BRANCH'
+            skip-tag: true
+            choosing-strategy: 'gerrit'
+            timeout: 10
+            wipe-workspace: true
+
+    wrappers:
+        - ssh-agent-credentials:
+            users:
+                - '{ssh-credentials}'
+        - timeout:
+            timeout: 360
+            fail: true
+    parameters:
+        - project-parameter:
+            project: '{project}'
+        - gerrit-parameter:
+            branch: '{branch}'
+        - '{slave-label}-defaults'
+        - '{installer}-defaults'
+        - 'fuel-verify-plugin-defaults':
+            gs-pathname: '{gs-pathname}'
+
+    builders:
+        - description-setter:
+            description: "Built on $NODE_NAME"
+        - 'fuel-verify-plugin-{phase}-macro'
+#####################################
+# builder macros
+#####################################
+- builder:
+    name: 'fuel-verify-plugin-build-macro'
+    builders:
+        - shell: |
+            #!/bin/bash
+
+            echo "Not activated!"
+
+- builder:
+    name: 'fuel-verify-plugin-test-macro'
+    builders:
+        - shell: |
+            #!/bin/bash
+
+            echo "Not activated!"
+#####################################
+# parameter macros
+#####################################
+- parameter:
+    name: 'fuel-verify-plugin-defaults'
+    parameters:
+        - string:
+            name: BUILD_DIRECTORY
+            default: $WORKSPACE/build_output
+            description: "Directory where the build artifact will be located upon the completion of the build."
+        - string:
+            name: CACHE_DIRECTORY
+            default: $HOME/opnfv/cache/$INSTALLER_TYPE
+            description: "Directory where the cache to be used during the build is located."
+        - string:
+            name: GS_URL
+            default: artifacts.opnfv.org/$PROJECT{gs-pathname}
+            description: "URL to Google Storage."
diff --git a/jjb/infra/bifrost-verify-jobs.yml b/jjb/infra/bifrost-verify-jobs.yml
new file mode 100644 (file)
index 0000000..b117b32
--- /dev/null
@@ -0,0 +1,176 @@
+- project:
+    name: 'openstack-bifrost-verify'
+#--------------------------------
+# branches
+#--------------------------------
+    stream:
+        - master:
+            branch: '{stream}'
+#--------------------------------
+# projects
+#--------------------------------
+    project:
+        - 'openstack':
+            project-repo: 'https://git.openstack.org/openstack/bifrost'
+            clone-location: '/opt/bifrost'
+        - 'opnfv':
+            project-repo: 'https://gerrit.opnfv.org/gerrit/releng'
+            clone-location: '/opt/releng'
+#--------------------------------
+# distros
+#--------------------------------
+    distro:
+        - 'trusty':
+            disabled: false
+            dib-os-release: 'trusty'
+            dib-os-element: 'ubuntu-minimal'
+            dib-os-packages: 'openssh-server,vlan,vim,less,bridge-utils,language-pack-en,iputils-ping,rsyslog,curl'
+        - 'centos7':
+            disabled: false
+            dib-os-release: '7'
+            dib-os-element: 'centos-minimal'
+            dib-os-packages: 'openssh-server,vim,less,bridge-utils,iputils,rsyslog,curl'
+        - 'suse':
+            disabled: true
+            dib-os-release: 'suse'
+            dib-os-element: 'suse'
+            dib-os-packages: ''
+#--------------------------------
+# type
+#--------------------------------
+    type:
+        - virtual
+#--------------------------------
+# jobs
+#--------------------------------
+    jobs:
+        - '{project}-bifrost-verify-{distro}-{type}-{stream}'
+#--------------------------------
+# job templates
+#--------------------------------
+- job-template:
+    name: '{project}-bifrost-verify-{distro}-{type}-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    concurrent: false
+
+    properties:
+        - build-blocker:
+            use-build-blocker: true
+            blocking-jobs:
+                - '.*-bifrost-verify.*-{type}'
+            block-level: 'NODE'
+
+    parameters:
+        - string:
+            name: PROJECT
+            default: '{project}'
+        - string:
+            name: PROJECT_REPO
+            default: '{project-repo}'
+        - string:
+            name: CLONE_LOCATION
+            default: '{clone-location}'
+        - string:
+            name: DISTRO
+            default: '{distro}'
+        - string:
+            name: DIB_OS_RELEASE
+            default: '{dib-os-release}'
+        - string:
+            name: DIB_OS_ELEMENT
+            default: '{dib-os-element}'
+        - string:
+            name: DIB_OS_PACKAGES
+            default: '{dib-os-packages}'
+        - string:
+            name: CLEAN_DIB_IMAGES
+            default: 'true'
+        - label:
+            name: SLAVE_LABEL
+            default: 'infra-{type}-{distro}'
+
+    scm:
+        - git:
+            url: '$PROJECT_REPO'
+            refspec: '$GERRIT_REFSPEC'
+            branches:
+                - 'origin/$GERRIT_BRANCH'
+            skip-tag: true
+            choosing-strategy: 'gerrit'
+            timeout: 10
+            wipe-workspace: true
+
+    triggers:
+        - '{project}-gerrit-trigger':
+            branch: '{branch}'
+
+    builders:
+        - description-setter:
+            description: "Built on $NODE_NAME"
+        - shell:
+            !include-raw-escape: ./bifrost-verify.sh
+
+    publishers:
+        - email:
+            recipients: fatih.degirmenci@ericsson.com yroblamo@redhat.com mchandras@suse.de jack.morgan@intel.com zhang.jun3g@zte.com.cn
+#--------------------------------
+# trigger macros
+#--------------------------------
+- trigger:
+    name: 'openstack-gerrit-trigger'
+    triggers:
+        - gerrit:
+            server-name: 'review.openstack.org'
+            silent-start: true
+            skip-vote:
+                successful: true
+                failed: true
+                unstable: true
+                notbuilt: true
+            escape-quotes: true
+            trigger-on:
+                - patchset-created-event:
+                    exclude-drafts: 'false'
+                    exclude-trivial-rebase: 'false'
+                    exclude-no-code-change: 'false'
+                - comment-added-contains-event:
+                    comment-contains-value: 'recheck'
+            projects:
+              - project-compare-type: 'PLAIN'
+                project-pattern: 'openstack/bifrost'
+                branches:
+                  - branch-compare-type: 'ANT'
+                    branch-pattern: '**/{branch}'
+                forbidden-file-paths:
+                  - compare-type: ANT
+                    pattern: 'doc/**'
+                  - compare-type: ANT
+                    pattern: 'releasenotes/**'
+            readable-message: true
+- trigger:
+    name: 'opnfv-gerrit-trigger'
+    triggers:
+        - gerrit:
+            server-name: 'gerrit.opnfv.org'
+            trigger-on:
+                - patchset-created-event:
+                    exclude-drafts: 'false'
+                    exclude-trivial-rebase: 'false'
+                    exclude-no-code-change: 'false'
+                - draft-published-event
+                - comment-added-contains-event:
+                    comment-contains-value: 'recheck'
+                - comment-added-contains-event:
+                    comment-contains-value: 'reverify'
+            projects:
+              - project-compare-type: 'ANT'
+                project-pattern: 'releng'
+                branches:
+                  - branch-compare-type: 'ANT'
+                    branch-pattern: '**/{branch}'
+                file-paths:
+                  - compare-type: ANT
+                    pattern: 'prototypes/bifrost/**'
+            readable-message: true
similarity index 56%
rename from jjb/infra/openstack-bifrost-verify.sh
rename to jjb/infra/bifrost-verify.sh
index c17cb88..759b50b 100755 (executable)
@@ -17,43 +17,38 @@ function fix_ownership() {
     if [ -z "${JOB_URL+x}" ]; then
         echo "Not running as part of Jenkins. Handle the logs manually."
     else
-        chown -R jenkins:jenkins $WORKSPACE
+        sudo chown -R jenkins:jenkins $WORKSPACE
     fi
 }
 
 # check distro to see if we support it
-# we will have centos and suse supported in future
-case "$DISTRO" in
-    trusty)
-        #start the test
-        echo "Starting provisioning of 3 VMs"
-        ;;
-    *)
-        echo "Distro $DISTRO is not supported!"
-        exit 1
-esac
+if [[ ! "$DISTRO" =~ (trusty|centos7|suse) ]]; then
+    echo "Distro $DISTRO is not supported!"
+    exit 1
+fi
 
 # remove previously cloned repos
-/bin/rm -rf /opt/bifrost /opt/puppet-infracloud /opt/stack /opt/releng
+sudo /bin/rm -rf /opt/bifrost /opt/puppet-infracloud /opt/stack /opt/releng
 
-# clone upstream bifrost repo and checkout the patch to verify
-git clone https://git.openstack.org/openstack/bifrost /opt/bifrost
-cd /opt/bifrost
-git fetch https://git.openstack.org/openstack/bifrost $GERRIT_REFSPEC && git checkout FETCH_HEAD
+# clone all the repos first and checkout the patch afterwards
+sudo git clone https://git.openstack.org/openstack/bifrost /opt/bifrost
+sudo git clone https://git.openstack.org/openstack-infra/puppet-infracloud /opt/puppet-infracloud
+sudo git clone https://gerrit.opnfv.org/gerrit/releng /opt/releng
 
-# clone puppet-infracloud
-git clone https://git.openstack.org/openstack-infra/puppet-infracloud /opt/puppet-infracloud
+# checkout the patch
+cd $CLONE_LOCATION
+sudo git fetch $PROJECT_REPO $GERRIT_REFSPEC && sudo git checkout FETCH_HEAD
 
 # combine opnfv and upstream scripts/playbooks
-cp -R $WORKSPACE/prototypes/bifrost/* /opt/bifrost/
+sudo /bin/cp -rf /opt/releng/prototypes/bifrost/* /opt/bifrost/
 
 # cleanup remnants of previous deployment
 cd /opt/bifrost
-./scripts/destroy-env.sh
+sudo -E ./scripts/destroy-env.sh
 
 # provision 3 VMs; jumphost, controller, and compute
 cd /opt/bifrost
-./scripts/test-bifrost-deployment.sh
+sudo -E ./scripts/test-bifrost-deployment.sh
 
 # list the provisioned VMs
 cd /opt/bifrost
diff --git a/jjb/infra/infra-daily-jobs.yml b/jjb/infra/infra-daily-jobs.yml
deleted file mode 100644 (file)
index d779d56..0000000
+++ /dev/null
@@ -1,166 +0,0 @@
-- project:
-    name: 'infra-daily-jobs'
-
-    project: 'releng'
-
-    installer: 'infra'
-#--------------------------------
-# BRANCH ANCHORS
-#--------------------------------
-    master: &master
-        stream: master
-        branch: '{stream}'
-        gs-pathname: ''
-#--------------------------------
-#        CI Slaves
-#--------------------------------
-    pod:
-        - virtual:
-            slave-label: infra-virtual-trusty
-            <<: *master
-#--------------------------------
-#       phases
-#--------------------------------
-    phase:
-        - 'provision'
-        - 'deploy'
-        - 'smoketest'
-#--------------------------------
-#       scenarios
-#--------------------------------
-    scenario:
-        - 'os-nosdn-nofeature-noha'
-#--------------------------------
-#       jobs
-#--------------------------------
-    jobs:
-        - 'infra-{scenario}-{pod}-daily-{stream}'
-        - 'infra-{phase}-{pod}-daily-{stream}'
-########################
-# job templates
-########################
-- job-template:
-    name: 'infra-{scenario}-{pod}-daily-{stream}'
-
-    concurrent: false
-
-    properties:
-        - build-blocker:
-            use-build-blocker: true
-            blocking-jobs:
-                - 'infra-os-.*?-{pod}-daily-{stream}'
-            block-level: 'NODE'
-
-    wrappers:
-        - build-name:
-            name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
-
-    triggers:
-        - timed: 'H */3 * * *'
-
-    parameters:
-        - project-parameter:
-            project: '{project}'
-        - '{installer}-defaults'
-        - '{slave-label}-defaults'
-        - string:
-            name: DEPLOY_SCENARIO
-            default: '{scenario}'
-
-    builders:
-        - description-setter:
-            description: "Built on $NODE_NAME"
-        - trigger-builds:
-            - project: 'infra-provision-{pod}-daily-{stream}'
-              current-parameters: false
-              predefined-parameters:
-                DEPLOY_SCENARIO={scenario}
-              same-node: true
-              block: true
-        - trigger-builds:
-            - project: 'infra-deploy-{pod}-daily-{stream}'
-              current-parameters: false
-              predefined-parameters:
-                DEPLOY_SCENARIO={scenario}
-              same-node: true
-              block: true
-        - trigger-builds:
-            - project: 'infra-smoketest-{pod}-daily-{stream}'
-              current-parameters: false
-              predefined-parameters:
-                DEPLOY_SCENARIO={scenario}
-              block: true
-              same-node: true
-              block-thresholds:
-                build-step-failure-threshold: 'never'
-                failure-threshold: 'never'
-                unstable-threshold: 'FAILURE'
-
-    publishers:
-        - email:
-            recipients: fatih.degirmenci@ericsson.com yroblamo@redhat.com mchandras@suse.de jack.morgan@intel.com zhang.jun3g@zte.com.cn
-
-- job-template:
-    name: 'infra-{phase}-{pod}-daily-{stream}'
-
-    concurrent: false
-
-    properties:
-        - build-blocker:
-            use-build-blocker: true
-            blocking-jobs:
-                - 'infra-provision-{pod}-daily-{stream}'
-                - 'infra-deploy-{pod}-daily-{stream}'
-                - 'infra-smoketest-{pod}-daily-{stream}'
-            block-level: 'NODE'
-
-    parameters:
-        - project-parameter:
-            project: '{project}'
-        - '{installer}-defaults'
-        - '{slave-label}-defaults'
-        - string:
-            name: DEPLOY_SCENARIO
-            default: 'os-nosdn-nofeature-noha'
-        - string:
-            name: CLEAN_DIB_IMAGES
-            default: 'false'
-
-    scm:
-        - git-scm:
-            credentials-id: '{ssh-credentials}'
-            refspec: ''
-            branch: '{branch}'
-
-    wrappers:
-        - build-name:
-            name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
-
-    builders:
-        - description-setter:
-            description: "Built on $NODE_NAME"
-        - 'infra-{phase}-daily-builder'
-#####################################
-# builder macros
-#####################################
-- builder:
-    name: 'infra-provision-daily-builder'
-    builders:
-        - shell: |
-            #!/bin/bash
-
-            sudo $WORKSPACE/jjb/infra/infra-provision.sh
-- builder:
-    name: 'infra-deploy-daily-builder'
-    builders:
-        - shell: |
-            #!/bin/bash
-
-            sudo $WORKSPACE/jjb/infra/infra-deploy.sh
-- builder:
-    name: 'infra-smoketest-daily-builder'
-    builders:
-        - shell: |
-            #!/bin/bash
-
-            sudo $WORKSPACE/jjb/infra/infra-smoketest.sh
diff --git a/jjb/infra/infra-deploy.sh b/jjb/infra/infra-deploy.sh
deleted file mode 100755 (executable)
index 35ef9a1..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-echo "Not activated!"
diff --git a/jjb/infra/infra-provision.sh b/jjb/infra/infra-provision.sh
deleted file mode 100755 (executable)
index 45ed3b9..0000000
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/bin/bash
-set -xe
-
-if [[ $(whoami) != "root" ]]; then
-    echo "Error: This script must be run as root!"
-    exit 1
-fi
-
-# remove previously cloned repos
-/bin/rm -rf /opt/bifrost /opt/puppet-infracloud /opt/releng
-
-# clone upstream repos
-git clone https://git.openstack.org/openstack/bifrost /opt/bifrost
-git clone https://git.openstack.org/openstack-infra/puppet-infracloud /opt/puppet-infracloud
-
-# clone opnfv releng repo
-git clone https://gerrit.opnfv.org/gerrit/releng /opt/releng
-
-# combine opnfv and upstream scripts/playbooks
-cp -R /opt/releng/prototypes/bifrost/* /opt/bifrost/
-
-# cleanup remnants of previous deployment
-cd /opt/bifrost
-./scripts/destroy-env.sh
-
-# provision 3 VMs; jumphost, controller, and compute
-cd /opt/bifrost
-./scripts/test-bifrost-deployment.sh
-
-# list the provisioned VMs
-cd /opt/bifrost
-source env-vars
-ironic node-list
-virsh list
diff --git a/jjb/infra/infra-smoketest.sh b/jjb/infra/infra-smoketest.sh
deleted file mode 100755 (executable)
index 35ef9a1..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-echo "Not activated!"
diff --git a/jjb/infra/openstack-bifrost-verify-jobs.yml b/jjb/infra/openstack-bifrost-verify-jobs.yml
deleted file mode 100644 (file)
index 8afe47c..0000000
+++ /dev/null
@@ -1,111 +0,0 @@
-- project:
-    name: 'openstack-bifrost-verify'
-
-    project: 'releng'
-#--------------------------------
-# branches
-#--------------------------------
-    stream:
-        - master:
-            branch: '{stream}'
-#--------------------------------
-# distros
-# jobs for centos7 and suse can be enabled once the support is there
-#--------------------------------
-    distro:
-        - 'trusty':
-            slave-label: infra-virtual-trusty
-            disabled: false
-        - 'centos7':
-            slave-label: infra-virtual-trusty
-            disabled: true
-        - 'suse':
-            slave-label: infra-virtual-trusty
-            disabled: true
-#--------------------------------
-# jobs
-#--------------------------------
-    jobs:
-        - 'openstack-bifrost-verify-{distro}-{stream}'
-#--------------------------------
-# job templates
-#--------------------------------
-- job-template:
-    name: 'openstack-bifrost-verify-{distro}-{stream}'
-
-    concurrent: false
-
-    disabled: '{obj:disabled}'
-
-    properties:
-        - build-blocker:
-            use-build-blocker: true
-            blocking-jobs:
-                - 'infra-os-.*?-daily-.*'
-            block-level: 'NODE'
-
-    parameters:
-        - project-parameter:
-            project: '{project}'
-        - string:
-            name: DISTRO
-            default: '{distro}'
-        - string:
-            name: CLEAN_DIB_IMAGES
-            default: 'true'
-        - '{slave-label}-defaults'
-
-    scm:
-        - git-scm:
-            credentials-id: '{ssh-credentials}'
-            refspec: ''
-            branch: '{branch}'
-
-    triggers:
-        - gerrit:
-            server-name: 'review.openstack.org'
-            silent-start: true
-            skip-vote:
-                successful: true
-                failed: true
-                unstable: true
-                notbuilt: true
-            escape-quotes: true
-            trigger-on:
-                - patchset-created-event:
-                    exclude-drafts: 'false'
-                    exclude-trivial-rebase: 'false'
-                    exclude-no-code-change: 'false'
-                - comment-added-contains-event:
-                    comment-contains-value: 'recheck'
-            projects:
-              - project-compare-type: 'PLAIN'
-                project-pattern: 'openstack/bifrost'
-                branches:
-                  - branch-compare-type: 'ANT'
-                    branch-pattern: '**/master'
-                forbidden-file-paths:
-                  - compare-type: ANT
-                    pattern: 'doc/**'
-                  - compare-type: ANT
-                    pattern: 'releasenotes/**'
-            readable-message: true
-
-    builders:
-        - description-setter:
-            description: "Built on $NODE_NAME"
-        - 'openstack-bifrost-verify-builder'
-
-    publishers:
-        - email:
-            recipients: fatih.degirmenci@ericsson.com yroblamo@redhat.com mchandras@suse.de jack.morgan@intel.com zhang.jun3g@zte.com.cn
-#####################################
-# builder macros
-#####################################
-- builder:
-    name: 'openstack-bifrost-verify-builder'
-    builders:
-        - shell: |
-            #!/bin/bash
-
-            sudo -E $WORKSPACE/jjb/infra/openstack-bifrost-verify.sh
index 21b9730..24c03fd 100644 (file)
               current-parameters: true
               same-node: true
               block: true
-        - trigger-builds:
-            - project: 'functest-fuel-virtual-suite-{stream}'
-              current-parameters: true
-              predefined-parameters:
-                FUNCTEST_SUITE_NAME=healthcheck
-              same-node: true
-              block: true
-              block-thresholds:
-                build-step-failure-threshold: 'never'
-                failure-threshold: 'never'
-                unstable-threshold: 'FAILURE'
         - trigger-builds:
             - project: 'functest-fuel-virtual-suite-{stream}'
               current-parameters: true
index c5edf7c..f56de7f 100644 (file)
@@ -59,7 +59,7 @@ if [[ "$UPDATE_LATEST_STABLE" == "true" ]]; then
         echo "ERROR: The image $DOCKER_REPO_NAME with tag $STABLE_TAG does not exist."
         exit 1
     fi
-    docker tag -f $DOCKER_REPO_NAME:$STABLE_TAG $DOCKER_REPO_NAME:latest_stable
+    docker tag $DOCKER_REPO_NAME:$STABLE_TAG $DOCKER_REPO_NAME:latest_stable
     echo "Pushing $DOCKER_REPO_NAME:latest_stable ..."
     docker push $DOCKER_REPO_NAME:latest_stable
     exit 0
@@ -119,7 +119,7 @@ else
 fi
 
 echo "Creating tag '$DOCKER_TAG'..."
-docker tag -f $DOCKER_REPO_NAME:$DOCKER_BRANCH_TAG $DOCKER_REPO_NAME:$DOCKER_TAG
+docker tag $DOCKER_REPO_NAME:$DOCKER_BRANCH_TAG $DOCKER_REPO_NAME:$DOCKER_TAG
 
 # list the images
 echo "Available images are:"
index 7436573..0ac8aa7 100644 (file)
     stream:
         - master:
             branch: '{stream}'
+            doc-version: ''
             gs-pathname: ''
             disabled: false
         - colorado:
             branch: 'stable/{stream}'
-            gs-pathname: '/{stream}'
+            doc-version: '2.0'
+            gs-pathname: '/{stream}/{doc-version}'
             disabled: false
 
 ########################
@@ -82,7 +84,7 @@
         - string:
             name: GS_URL
             default: '$GS_BASE{gs-pathname}'
-            description: "Directory where the build artifact will be located upon the completion     of the build."
+            description: "Directory where the build artifact will be located upon the completion of the build."
 
     scm:
         - gerrit-trigger-scm:
index ff24e9e..6cbaba4 100644 (file)
             name: GIT_BASE
             default: https://gerrit.opnfv.org/gerrit/$PROJECT
             description: 'Git URL to use on this Jenkins Slave'
-- parameter:
-    name: 'infra-virtual-trusty-defaults'
-    parameters:
-        - label:
-            name: SLAVE_LABEL
-            default: 'infra-virtual-trusty'
-        - string:
-            name: GIT_BASE
-            default: https://gerrit.opnfv.org/gerrit/$PROJECT
-            description: 'Git URL to use on this Jenkins Slave'
 #####################################################
 # Parameters for build slaves
 #####################################################
index b923aa2..95babb3 100644 (file)
@@ -6,15 +6,12 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-echo "Cleaning up QTIP  docker containers/images..."
-
 # Remove previous running containers if exist
 if [[ ! -z $(docker ps -a | grep opnfv/qtip) ]]; then
     echo "Removing existing opnfv/qtip containers..."
-    running_containers=$(docker ps | grep opnfv/qtip | awk '{print $1}')
-    docker stop ${running_containers}
-    all_containers=$(docker ps -a | grep opnfv/qtip | awk '{print $1}')
-    docker rm ${all_containers}
+    # workaround: sometimes it throws an error when stopping qtip container.
+    # To make sure ci job unblocked, remove qtip container by force without stopping it.
+    docker rm -f $(docker ps -a | grep opnfv/qtip | awk '{print $1}')
 fi
 
 # Remove existing images if exist
@@ -27,4 +24,3 @@ if [[ ! -z $(docker images | grep opnfv/qtip) ]]; then
         docker rmi opnfv/qtip:$tag
     done
 fi
-
index 72ade5b..6746457 100755 (executable)
@@ -37,8 +37,9 @@ rm -rf /var/log/libvirt/baremetal_logs/*.log
 CLEAN_DIB_IMAGES=${CLEAN_DIB_IMAGES:-false}
 
 if [ $CLEAN_DIB_IMAGES = "true" ]; then
-    rm -rf /httpboot
-    rm -rf /tftpboot
+    rm -rf /httpboot /tftpboot
+    mkdir /httpboot /tftpboot
+    chmod -R 755 /httpboot /tftpboot
 fi
 
 # remove VM disk images
index f24d884..87cee78 100644 (file)
@@ -23,7 +23,8 @@ dir_result="${HOME}/opnfv/$project/results/${branch}"
 node_list=(\
 'lf-pod1' 'lf-pod2' 'intel-pod2' 'intel-pod3' \
 'intel-pod5' 'intel-pod6' 'intel-pod7' 'intel-pod8' \
-'ericsson-pod2' \
+'ericsson-pod2' 'ericsson-pod3' 'ericsson-pod4' \
+'ericsson-virtual2' 'ericsson-virtual3' 'ericsson-virtual4' 'ericsson-virtual5' \
 'arm-pod1' 'arm-pod3' \
 'huawei-pod1' 'huawei-pod2' 'huawei-virtual1' 'huawei-virtual2' 'huawei-virtual3' 'huawei-virtual4')
 
index e90a17f..8c6494d 100644 (file)
@@ -5,60 +5,41 @@ import urllib3
 http = urllib3.PoolManager()
 
 
-def delete_request(url, creds, body=None):
+def _request(method, url, creds=None, body=None):
     headers = urllib3.make_headers(basic_auth=creds)
-    http.request('DELETE', url, headers=headers, body=body)
+    return http.request(method, url, headers=headers, body=body)
 
 
-def publish_json(json_ojb, creds, to):
-    json_dump = json.dumps(json_ojb)
-    if to == 'stdout':
-        print json_dump
-        return 200, None
-    else:
-        headers = urllib3.make_headers(basic_auth=creds)
-        result = http.request('POST', to, headers=headers, body=json_dump)
-        return result.status, result.data
+def _post(url, creds=None, body=None):
+    return _request('POST', url, creds=creds, body=body)
 
 
-def _get_nr_of_hits(elastic_json):
-    return elastic_json['hits']['total']
+def _get(url, creds=None, body=None):
+    return json.loads(_request('GET', url, creds=creds, body=body).data)
 
 
-def get_elastic_docs(elastic_url, creds, body=None, field = '_source'):
+def delete_docs(url, creds=None, body=None):
+    return _request('DELETE', url, creds=creds, body=body)
 
-    # 1. get the number of results
-    headers = urllib3.make_headers(basic_auth=creds)
-    elastic_json = json.loads(http.request('GET', elastic_url + '/_search?size=0', headers=headers, body=body).data)
-    print elastic_json
-    nr_of_hits = _get_nr_of_hits(elastic_json)
-
-    # 2. get all results
-    elastic_json = json.loads(http.request('GET', elastic_url + '/_search?size={}'.format(nr_of_hits), headers=headers, body=body).data)
-
-    elastic_docs = []
-    for hit in elastic_json['hits']['hits']:
-        elastic_docs.append(hit[field])
-    return elastic_docs
-
-
-def get_elastic_docs_by_days(elastic_url, creds, days):
-    if days == 0:
-        body = '''{
-            "query": {
-                "match_all": {}
-            }
-        }'''
-    elif days > 0:
-        body = '''{{
-            "query" : {{
-                "range" : {{
-                    "start_date" : {{
-                        "gte" : "now-{}d"
-                    }}
-                }}
-            }}
-        }}'''.format(days)
-    else:
-        raise Exception('Update days must be non-negative')
-    return get_elastic_docs(elastic_url, creds, body)
+
+def publish_docs(url, creds=None, body=None):
+    result = _post(url, creds=creds, body=(json.dumps(body)))
+    return result.status, result.data
+
+
+def _get_docs_nr(url, creds=None, body=None):
+    res_data = _get('{}/_search?size=0'.format(url), creds=creds, body=body)
+    print type(res_data), res_data
+    return res_data['hits']['total']
+
+
+def get_docs(url, creds=None, body=None, field='_source'):
+
+    docs_nr = _get_docs_nr(url, creds=creds, body=body)
+    res_data = _get('{}/_search?size={}'.format(url, docs_nr),
+                    creds=creds, body=body)
+
+    docs = []
+    for hit in res_data['hits']['hits']:
+        docs.append(hit[field])
+    return docs
index 2e0f1ca..b868999 100644 (file)
@@ -25,7 +25,6 @@ class APIConfig:
         self._default_config_location = "../etc/config.ini"
         self.elastic_url = 'http://localhost:9200'
         self.elastic_creds = None
-        self.destination = 'elasticsearch'
         self.kibana_url = None
         self.is_js = True
         self.js_path = None
@@ -67,7 +66,6 @@ class APIConfig:
         # Linking attributes to keys from file with their sections
         obj.elastic_url = obj._get_str_parameter("elastic", "url")
         obj.elastic_creds = obj._get_str_parameter("elastic", "creds")
-        obj.destination = obj._get_str_parameter("output", "destination")
         obj.kibana_url = obj._get_str_parameter("kibana", "url")
         obj.is_js = obj._get_bool_parameter("kibana", "js")
         obj.js_path = obj._get_str_parameter("kibana", "js_path")
@@ -77,12 +75,10 @@ class APIConfig:
     def __str__(self):
         return "elastic_url = %s \n" \
                "elastic_creds = %s \n" \
-               "destination = %s \n" \
                "kibana_url = %s \n" \
                "is_js = %s \n" \
                "js_path = %s \n" % (self.elastic_url,
-                                        self.elastic_creds,
-                                        self.destination,
-                                        self.kibana_url,
-                                        self.is_js,
-                                        self.js_path)
+                                    self.elastic_creds,
+                                    self.kibana_url,
+                                    self.is_js,
+                                    self.js_path)
index c1cbc30..95f758e 100644 (file)
@@ -3,8 +3,10 @@ import json
 import urlparse
 
 import argparse
+from jinja2 import PackageLoader, Environment
 
-from common import logger_utils, elastic_access
+from common import elastic_access
+from common import logger_utils
 from conf import testcases
 from conf.config import APIConfig
 
@@ -51,13 +53,14 @@ class KibanaDashboard(dict):
                                                                    scenario,
                                                                    self.visualization))
 
-        self._visualization_title = self._kibana_visualizations[0].vis_state_title
+        self._visualization_title = self._kibana_visualizations[0].vis_title
 
     def _publish_visualizations(self):
         for visualization in self._kibana_visualizations:
             url = urlparse.urljoin(base_elastic_url, '/.kibana/visualization/{}'.format(visualization.id))
             logger.debug("publishing visualization '{}'".format(url))
-            elastic_access.publish_json(visualization, es_creds, url)
+            # logger.error("_publish_visualization: %s" % visualization)
+            elastic_access.publish_docs(url, es_creds, visualization)
 
     def _construct_panels(self):
         size_x = 6
@@ -135,7 +138,7 @@ class KibanaDashboard(dict):
     def _publish(self):
         url = urlparse.urljoin(base_elastic_url, '/.kibana/dashboard/{}'.format(self.id))
         logger.debug("publishing dashboard '{}'".format(url))
-        elastic_access.publish_json(self, es_creds, url)
+        elastic_access.publish_docs(url, es_creds, self)
 
     def publish(self):
         self._publish_visualizations()
@@ -163,67 +166,29 @@ class KibanaSearchSourceJSON(dict):
             self["filter"].append({"match": {"pod_name": {"query": pod, "type": "phrase"}}})
 
 
-class VisualizationState(dict):
+class VisualizationBuilder(object):
     def __init__(self, visualization):
-        super(VisualizationState, self).__init__()
-        name = visualization.get('name')
-        fields = visualization.get('fields')
-
-        if name == 'tests_failures':
-            mode = 'grouped'
-            metric_type = 'sum'
-            self['type'] = 'histogram'
-        else:
-            # duration or success_percentage
-            mode = 'stacked'
-            metric_type = 'avg'
-            self['type'] = 'line'
-
-        self['params'] = {
-            "shareYAxis": True,
-            "addTooltip": True,
-            "addLegend": True,
-            "smoothLines": False,
-            "scale": "linear",
-            "interpolate": "linear",
-            "mode": mode,
-            "times": [],
-            "addTimeMarker": False,
-            "defaultYExtents": False,
-            "setYExtents": False,
-            "yAxis": {}
-        }
+        super(VisualizationBuilder, self).__init__()
+        self.visualization = visualization
 
-        self['aggs'] = []
+    def build(self):
+        name = self.visualization.get('name')
+        fields = self.visualization.get('fields')
 
-        i = 1
+        aggs = []
+        index = 1
         for field in fields:
-            self['aggs'].append({
-                "id": str(i),
-                "type": metric_type,
-                "schema": "metric",
-                "params": {
-                    "field": field.get('field')
-                }
-            })
-            i += 1
-
-        self['aggs'].append({
-                "id": str(i),
-                "type": 'date_histogram',
-                "schema": "segment",
-                "params": {
-                    "field": "start_date",
-                    "interval": "auto",
-                    "customInterval": "2h",
-                    "min_doc_count": 1,
-                    "extended_bounds": {}
-                }
+            aggs.append({
+                "id": index,
+                "field": field.get("field")
             })
+            index += 1
 
-        self['listeners'] = {}
-        self['title'] = ' '.join(['{} {}'.format(x['type'], x['params']['field']) for x in self['aggs']
-                                  if x['schema'] == 'metric'])
+        env = Environment(loader=PackageLoader('elastic2kibana', 'templates'))
+        env.filters['jsonify'] = json.dumps
+        template = env.get_template('{}.json'.format(name))
+        vis = template.render(aggs=aggs)
+        return json.loads(vis)
 
 
 class KibanaVisualization(dict):
@@ -243,24 +208,24 @@ class KibanaVisualization(dict):
         :return:
         """
         super(KibanaVisualization, self).__init__()
-        vis_state = VisualizationState(visualization)
-        self.vis_state_title = vis_state['title']
+        vis = VisualizationBuilder(visualization).build()
+        self.vis_title = vis['title']
         self['title'] = '{} {} {} {} {} {}'.format(project_name,
                                                    case_name,
-                                                   self.vis_state_title,
+                                                   self.vis_title,
                                                    installer,
                                                    pod,
                                                    scenario)
         self.id = self['title'].replace(' ', '-').replace('/', '-')
-        self['visState'] = json.dumps(vis_state, separators=(',', ':'))
+        self['visState'] = json.dumps(vis, separators=(',', ':'))
         self['uiStateJSON'] = "{}"
-        self['description'] = "Kibana visualization for project_name '{}', case_name '{}', data '{}', installer '{}'," \
+        self['description'] = "Kibana visualization for project_name '{}', case_name '{}', metric '{}', installer '{}'," \
                               " pod '{}' and scenario '{}'".format(project_name,
-                                                                  case_name,
-                                                                  self.vis_state_title,
-                                                                  installer,
-                                                                  pod,
-                                                                  scenario)
+                                                                   case_name,
+                                                                   self.vis_title,
+                                                                   installer,
+                                                                   pod,
+                                                                   scenario)
         self['scenario'] = 1
         self['kibanaSavedObjectMeta'] = {"searchSourceJSON": json.dumps(KibanaSearchSourceJSON(project_name,
                                                                                                case_name,
@@ -286,7 +251,7 @@ def _get_pods_and_scenarios(project_name, case_name, installer):
         }
     })
 
-    elastic_data = elastic_access.get_elastic_docs(urlparse.urljoin(base_elastic_url, '/test_results/mongo2elastic'),
+    elastic_data = elastic_access.get_docs(urlparse.urljoin(base_elastic_url, '/test_results/mongo2elastic'),
                                                    es_creds, query_json)
 
     pods_and_scenarios = {}
diff --git a/utils/test/dashboard/dashboard/elastic2kibana/templates/duration.json b/utils/test/dashboard/dashboard/elastic2kibana/templates/duration.json
new file mode 100644 (file)
index 0000000..f50a668
--- /dev/null
@@ -0,0 +1,45 @@
+{% set aggs = aggs|default([]) -%}
+
+{
+  "title": "duration",
+  "type": "line",
+  "listeners": {},
+  "params": {
+    "addLegend": true,
+    "shareYAxis": true,
+    "addTooltip": true,
+    "smoothLines": false,
+    "scale": "linear",
+    "interpolate": "linear",
+    "times": [],
+    "addTimeMarker": false,
+    "defaultYExtents": false,
+    "setYExtents": false,
+    "yAxis": {},
+    "mode": "stacked"
+  },
+  "aggs": [
+    {% for agg in aggs %}
+    {
+      "id": {{agg.id }},
+      "type": "avg",
+      "schema": "metric",
+      "params": {
+        "field": "{{agg.field}}"
+      }
+    },
+    {% endfor %}
+    {
+      "id": {{ aggs|length + 1 }},
+      "type": "date_histogram",
+      "schema": "segment",
+      "params": {
+        "field": "start_date",
+        "interval": "auto",
+        "customInterval": "2h",
+        "min_doc_count": 1,
+        "extended_bounds": {}
+      }
+    }
+  ]
+}
diff --git a/utils/test/dashboard/dashboard/elastic2kibana/templates/success_percentage.json b/utils/test/dashboard/dashboard/elastic2kibana/templates/success_percentage.json
new file mode 100644 (file)
index 0000000..9930708
--- /dev/null
@@ -0,0 +1,45 @@
+{% set aggs = aggs|default([]) -%}
+
+{
+  "title": "success_percentage",
+  "type": "line",
+  "listeners": {},
+  "params": {
+    "addLegend": true,
+    "shareYAxis": true,
+    "addTooltip": true,
+    "smoothLines": false,
+    "scale": "linear",
+    "interpolate": "linear",
+    "times": [],
+    "addTimeMarker": false,
+    "defaultYExtents": false,
+    "setYExtents": false,
+    "yAxis": {},
+    "mode": "stacked"
+  },
+  "aggs": [
+    {% for agg in aggs %}
+    {
+      "id": {{agg.id }},
+      "type": "avg",
+      "schema": "metric",
+      "params": {
+        "field": "{{agg.field}}"
+      }
+    },
+    {% endfor %}
+    {
+      "id": {{ aggs|length + 1 }},
+      "type": "date_histogram",
+      "schema": "segment",
+      "params": {
+        "field": "start_date",
+        "interval": "auto",
+        "customInterval": "2h",
+        "min_doc_count": 1,
+        "extended_bounds": {}
+      }
+    }
+  ]
+}
diff --git a/utils/test/dashboard/dashboard/elastic2kibana/templates/tests_failures.json b/utils/test/dashboard/dashboard/elastic2kibana/templates/tests_failures.json
new file mode 100644 (file)
index 0000000..01f9ba8
--- /dev/null
@@ -0,0 +1,45 @@
+{% set aggs = aggs|default([]) -%}
+
+{
+  "title": "tests_failures",
+  "type": "histogram",
+  "listeners": {},
+  "params": {
+    "addLegend": true,
+    "shareYAxis": true,
+    "addTooltip": true,
+    "smoothLines": false,
+    "scale": "linear",
+    "interpolate": "linear",
+    "times": [],
+    "addTimeMarker": false,
+    "defaultYExtents": false,
+    "setYExtents": false,
+    "yAxis": {},
+    "mode": "grouped"
+  },
+  "aggs": [
+    {% for agg in aggs %}
+    {
+      "id": {{agg.id }},
+      "type": "sum",
+      "schema": "metric",
+      "params": {
+        "field": "{{agg.field}}"
+      }
+    },
+    {% endfor %}
+    {
+      "id": {{ aggs|length + 1 }},
+      "type": "date_histogram",
+      "schema": "segment",
+      "params": {
+        "field": "start_date",
+        "interval": "auto",
+        "customInterval": "2h",
+        "min_doc_count": 1,
+        "extended_bounds": {}
+      }
+    }
+  ]
+}
index 25b5320..76efb14 100644 (file)
@@ -38,12 +38,12 @@ tmp_docs_file = './mongo-{}.json'.format(uuid.uuid4())
 
 class DocumentPublisher:
 
-    def __init__(self, doc, fmt, exist_docs, creds, to):
+    def __init__(self, doc, fmt, exist_docs, creds, elastic_url):
         self.doc = doc
         self.fmt = fmt
         self.creds = creds
         self.exist_docs = exist_docs
-        self.to = to
+        self.elastic_url = elastic_url
         self.is_formatted = True
 
     def format(self):
@@ -64,7 +64,7 @@ class DocumentPublisher:
             self._publish()
 
     def _publish(self):
-        status, data = elastic_access.publish_json(self.doc, self.creds, self.to)
+        status, data = elastic_access.publish_docs(self.elastic_url, self.creds, self.doc)
         if status > 300:
             logger.error('Publish record[{}] failed, due to [{}]'
                          .format(self.doc, json.loads(data)['error']['reason']))
@@ -163,14 +163,13 @@ class DocumentPublisher:
 
 class DocumentsPublisher:
 
-    def __init__(self, project, case, fmt, days, elastic_url, creds, to):
+    def __init__(self, project, case, fmt, days, elastic_url, creds):
         self.project = project
         self.case = case
         self.fmt = fmt
         self.days = days
         self.elastic_url = elastic_url
         self.creds = creds
-        self.to = to
         self.existed_docs = []
 
     def export(self):
@@ -200,7 +199,36 @@ class DocumentsPublisher:
             exit(-1)
 
     def get_existed_docs(self):
-        self.existed_docs = elastic_access.get_elastic_docs_by_days(self.elastic_url, self.creds, self.days)
+        if self.days == 0:
+            body = '''{{
+                        "query": {{
+                            "bool": {{
+                                "must": [
+                                    {{ "match": {{ "project_name": "{}" }} }},
+                                    {{ "match": {{ "case_name": "{}" }} }}
+                                ]
+                            }}
+                        }}
+                    }}'''.format(self.project, self.case)
+        elif self.days > 0:
+            body = '''{{
+                       "query": {{
+                           "bool": {{
+                               "must": [
+                                   {{ "match": {{ "project_name": "{}" }} }},
+                                   {{ "match": {{ "case_name": "{}" }} }}
+                               ],
+                               "filter": {{
+                                   "range": {{
+                                       "start_date": {{ "gte": "now-{}d" }}
+                                   }}
+                               }}
+                           }}
+                       }}
+                   }}'''.format(self.project, self.case, self.days)
+        else:
+            raise Exception('Update days must be non-negative')
+        self.existed_docs = elastic_access.get_docs(self.elastic_url, self.creds, body)
         return self
 
     def publish(self):
@@ -211,7 +239,7 @@ class DocumentsPublisher:
                                       self.fmt,
                                       self.existed_docs,
                                       self.creds,
-                                      self.to).format().publish()
+                                      self.elastic_url).format().publish()
         finally:
             fdocs.close()
             self._remove()
@@ -223,13 +251,9 @@ class DocumentsPublisher:
 
 def main():
     base_elastic_url = urlparse.urljoin(CONF.elastic_url, '/test_results/mongo2elastic')
-    to = CONF.destination
     days = args.latest_days
     es_creds = CONF.elastic_creds
 
-    if to == 'elasticsearch':
-        to = base_elastic_url
-
     for project, case_dicts in testcases.testcases_yaml.items():
         for case_dict in case_dicts:
             case = case_dict.get('name')
@@ -239,5 +263,4 @@ def main():
                                fmt,
                                days,
                                base_elastic_url,
-                               es_creds,
-                               to).export().get_existed_docs().publish()
+                               es_creds).export().get_existed_docs().publish()
index b94ac7b..1e67bd8 100644 (file)
@@ -4,10 +4,6 @@
 url = http://localhost:9200
 creds =
 
-[output]
-# elasticsearch or console
-destination = elasticsearch
-
 [kibana]
 url = http://10.63.243.17:5601/app/kibana
 js = true
index 9ce4994..ee01900 100644 (file)
@@ -14,10 +14,10 @@ logger.addHandler(file_handler)
 
 
 def delete_all(url, es_creds):
-    ids = elastic_access.get_elastic_docs(url, es_creds, body=None, field='_id')
+    ids = elastic_access.get_docs(url, es_creds, body=None, field='_id')
     for id in ids:
         del_url = '/'.join([url, id])
-        elastic_access.delete_request(del_url, es_creds)
+        elastic_access.delete_docs(del_url, es_creds)
 
 
 if __name__ == '__main__':