Merge "Add profiler parameter to doctor verify job"
authorRyota Mibu <r-mibu@cq.jp.nec.com>
Wed, 4 Jan 2017 04:22:22 +0000 (04:22 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Wed, 4 Jan 2017 04:22:22 +0000 (04:22 +0000)
17 files changed:
jjb/dovetail/dovetail-artifacts-upload.sh
jjb/dovetail/dovetail-cleanup.sh
jjb/fuel/fuel-verify-jobs.yml
jjb/functest/functest-cleanup.sh
jjb/global/releng-macros.yml
jjb/kvmfornfv/kvmfornfv.yml
jjb/releng/opnfv-lint.yml
jjb/vswitchperf/vswitchperf.yml
jjb/yardstick/yardstick-ci-jobs.yml
jjb/yardstick/yardstick-daily.sh
prototypes/puppet-infracloud/manifests/site.pp
utils/fetch_os_creds.sh
utils/push-test-logs.sh
utils/test/testapi/install.sh
utils/test/testapi/opnfv_testapi/resources/models.py
utils/test/testapi/opnfv_testapi/resources/result_models.py
utils/test/testapi/opnfv_testapi/tests/unit/test_result.py

index 94e4129..ce02dd5 100755 (executable)
@@ -17,6 +17,17 @@ cd ${CACHE_DIR}
 sudo docker pull ${DOCKER_REPO_NAME}:${DOCKER_TAG}
 sudo docker save -o ${STORE_FILE_NAME} ${DOCKER_REPO_NAME}:${DOCKER_TAG}
 
+OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
+GS_UPLOAD_LOCATION="${STORE_URL}/${OPNFV_ARTIFACT_VERSION}"
+(
+    echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
+    echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
+    echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
+    echo "OPNFV_ARTIFACT_URL=$GS_UPLOAD_LOCATION"
+    echo "OPNFV_BUILD_URL=$BUILD_URL"
+) > $WORKSPACE/opnfv.properties
+source $WORKSPACE/opnfv.properties
+
 importkey () {
 # clone releng repository
 echo "Cloning releng repository..."
@@ -45,7 +56,18 @@ echo
 
 cd $WORKSPACE
 # upload artifact and additional files to google storage
-gsutil cp ${CACHE_DIR}/${STORE_FILE_NAME} ${STORE_URL}/${STORE_FILE_NAME}
+gsutil cp ${CACHE_DIR}/${STORE_FILE_NAME} \
+${STORE_URL}/${STORE_FILE_NAME} > gsutil.dockerfile.log 2>&1
+gsutil cp $WORKSPACE/opnfv.properties \
+${STORE_URL}/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log 2>&1
+gsutil cp $WORKSPACE/opnfv.properties \
+    ${STORE_URL}/latest.properties > gsutil.latest.log 2>&1
+
+gsutil -m setmeta \
+    -h "Content-Type:text/html" \
+    -h "Cache-Control:private, max-age=0, no-transform" \
+    ${STORE_URL}/latest.properties \
+    ${STORE_URL}/opnfv-$OPNFV_ARTIFACT_VERSION.properties > /dev/null 2>&1
 
 gsutil -m setmeta \
     -h "Cache-Control:private, max-age=0, no-transform" \
index f215278..22b2ba2 100755 (executable)
@@ -2,14 +2,35 @@
 
 [[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
 
-echo "Cleaning up docker containers/images..."
-# Remove previous running containers if exist
+#clean up dependent project docker images, which has no containers and image tag None
+clean_images=(opnfv/functest opnfv/yardstick)
+for clean_image in "${clean_images[@]}"; do
+    echo "Removing image $image_id, which has no containers and image tag is None"
+    dangling_images=($(docker images -f "dangling=true" | grep ${clean_image} | awk '{print $3}'))
+    if [[ -n ${dangling_images} ]]; then
+        for image_id in "${dangling_images[@]}"; do
+            docker rmi $image_id >${redirect}
+        done
+    fi
+done
+
+echo "Remove containers with image dovetail:<None>..."
+dangling_images=($(docker images -f "dangling=true" | grep opnfv/dovetail | awk '{print $3}'))
+if [[ -n ${dangling_images} ]]; then
+    for image_id in "${dangling_images[@]}"; do
+        echo "Removing image $image_id with tag None and its related containers"
+        docker ps -a | grep $image_id | awk '{print $1}'| xargs docker rm -f >${redirect}
+        docker rmi $image_id >${redirect}
+    done
+fi
+
+echo "Cleaning up dovetail docker containers/images..."
 if [[ ! -z $(docker ps -a | grep opnfv/dovetail) ]]; then
     echo "Removing existing opnfv/dovetail containers..."
     docker ps -a | grep opnfv/dovetail | awk '{print $1}' | xargs docker rm -f >${redirect}
 fi
 
-# Remove existing images if exist
+echo "Remove dovetail existing images if exist..."
 if [[ ! -z $(docker images | grep opnfv/dovetail) ]]; then
     echo "Docker images to remove:"
     docker images | head -1 && docker images | grep opnfv/dovetail >${redirect}
index fe89853..4a76d9d 100644 (file)
 #####################################
     phase:
         - 'basic':
-            slave-label: 'opnfv-build'
+            slave-label: 'opnfv-build-ubuntu'
         - 'build':
             slave-label: 'opnfv-build-ubuntu'
         - 'deploy-virtual':
-            slave-label: 'opnfv-build'
+            slave-label: 'opnfv-build-ubuntu'
         - 'smoke-test':
-            slave-label: 'opnfv-build'
+            slave-label: 'opnfv-build-ubuntu'
 #####################################
 # jobs
 #####################################
@@ -97,7 +97,7 @@
             project: '{project}'
         - gerrit-parameter:
             branch: '{branch}'
-        - 'opnfv-build-defaults'
+        - 'opnfv-build-ubuntu-defaults'
         - 'fuel-verify-defaults':
             gs-pathname: '{gs-pathname}'
 
index f8140e0..b03d477 100755 (executable)
@@ -5,7 +5,7 @@
 echo "Cleaning up docker containers/images..."
 FUNCTEST_IMAGE=opnfv/functest
 # Remove containers along with image opnfv/functest:<none>
-dangling_images=($(docker images -f "dangling=true" | grep $FUNCTEST_IMAGE | awk '{print $1}'))
+dangling_images=($(docker images -f "dangling=true" | grep $FUNCTEST_IMAGE | awk '{print $3}'))
 if [[ -n ${dangling_images} ]]; then
     echo "  Removing $FUNCTEST_IMAGE:<none> images and their containers..."
     for image_id in "${dangling_images[@]}"; do
index cd92480..7647a35 100644 (file)
     name: check-bash-syntax
     builders:
         - shell: "find . -name '*.sh' | xargs bash -n"
+
+- builder:
+    name: lint-yaml-code
+    builders:
+        - shell: |
+            #!/bin/bash
+            set -o errexit
+            set -o pipefail
+            set -o xtrace
+            export PATH=$PATH:/usr/local/bin/
+
+            # generate and upload lint log
+            echo "Running yaml code on $PROJECT ..."
+
+            # Ensure we start with a clean environment
+            rm -f yaml-violation.log lint.log
+
+            # Get number of yaml violations. If none, this will be an
+            # empty string: ""
+            find . \
+                -path './releng_yamllint' -prune -o \
+                -path './.tox' -prune -o \
+                -type f -name "*.yml" -print \
+                -o -name "*.yaml" -print | \
+                xargs yamllint > yaml-violation.log || true
+
+            if [ -s "yaml-violation.log" ]; then
+              SHOWN=$(cat yaml-violation.log| grep -v "^$" |wc -l)
+              echo -e "First $SHOWN shown\n---" > lint.log
+              cat yaml-violation.log >> lint.log
+              sed -r -i '4,$s/^/ /g' lint.log
+            fi
+
index 33c73f9..8ab821c 100644 (file)
                   node-parameters: false
                   kill-phase-on: FAILURE
                   abort-all-job: true
-        - multijob:
-            name: test
-            condition: SUCCESSFUL
-            projects:
-                - name: 'kvmfornfv-verify-test-{stream}'
-                  current-parameters: false
-                  predefined-parameters: |
-                    GERRIT_BRANCH=$GERRIT_BRANCH
-                    GERRIT_REFSPEC=$GERRIT_REFSPEC
-                    GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-                  node-parameters: false
-                  kill-phase-on: FAILURE
-                  abort-all-job: true
+#        - multijob:
+#            name: test
+#            condition: SUCCESSFUL
+#            projects:
+#                - name: 'kvmfornfv-verify-test-{stream}'
+#                  current-parameters: false
+#                  predefined-parameters: |
+#                    GERRIT_BRANCH=$GERRIT_BRANCH
+#                    GERRIT_REFSPEC=$GERRIT_REFSPEC
+#                    GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+#                  node-parameters: false
+#                  kill-phase-on: FAILURE
+#                  abort-all-job: true
 - job-template:
     name: 'kvmfornfv-verify-{phase}-{stream}'
 
     scm:
         - git-scm
 
-    triggers:
-        - timed: '@midnight'
+#    triggers:
+#        - timed: '@midnight'
 
     builders:
         - description-setter:
index 7115cce..a96fce6 100644 (file)
@@ -9,6 +9,7 @@
 
     jobs:
         - 'opnfv-lint-verify-{stream}'
+        - 'opnfv-yamllint-verify-{stream}'
 
     stream:
         - master:
     builders:
         - lint-python-code
         - report-lint-result-to-gerrit
+
+- job-template:
+    name: 'opnfv-yamllint-verify-{stream}'
+
+    disabled: '{obj:disabled}'
+
+    concurrent: true
+
+    parameters:
+        - project-parameter:
+            project: $GERRIT_PROJECT
+        - gerrit-parameter:
+            branch: '{branch}'
+        - node:
+            name: SLAVE_NAME
+            description: Slaves to execute yamllint
+            default-slaves:
+                - lf-build1
+            allowed-multiselect: true
+            ignore-offline-nodes: true
+
+    scm:
+        - git-scm-gerrit
+
+    triggers:
+        - gerrit:
+            server-name: 'gerrit.opnfv.org'
+            trigger-on:
+                - patchset-created-event:
+                    exclude-drafts: 'false'
+                    exclude-trivial-rebase: 'false'
+                    exclude-no-code-change: 'false'
+                - draft-published-event
+                - comment-added-contains-event:
+                    comment-contains-value: 'recheck'
+                - comment-added-contains-event:
+                    comment-contains-value: 'reverify'
+            projects:
+              - project-compare-type: 'REG_EXP'
+                project-pattern: 'compass4nfv'
+                branches:
+                  - branch-compare-type: 'ANT'
+                    branch-pattern: '**/{branch}'
+                file-paths:
+                  - compare-type: ANT
+                    pattern: '**/*.yml'
+                  - compare-type: ANT
+                    pattern: '**/*.yaml'
+
+    builders:
+        - lint-yaml-code
+        - report-lint-result-to-gerrit
index 0d2c67b..fe8ea53 100644 (file)
@@ -46,7 +46,7 @@
             pwd
             cd src
             make clobber
-            make
+            make MORE_MAKE_FLAGS="-j 10"
             # run basic sanity test
             make sanity
             cd ../ci
             pwd
             cd src
             make clobber
-            make
+            make MORE_MAKE_FLAGS="-j 5"
             # run basic sanity test
             make sanity
             cd ../ci
             pwd
             cd src
             make clobber
-            make
+            make MORE_MAKE_FLAGS="-j 5"
             cd ../ci
             ./build-vsperf.sh merge
index 33f557a..1ad5417 100644 (file)
         - 'yardstick-cleanup'
         #- 'yardstick-fetch-os-creds'
         - 'yardstick-{testsuite}'
+        - 'yardstick-store-results'
 
     publishers:
         - email:
         - shell:
             !include-raw: ../../utils/fetch_os_creds.sh
 
+- builder:
+    name: yardstick-store-results
+    builders:
+        - shell:
+            !include-raw: ../../utils/push-test-logs.sh
+
 - builder:
     name: yardstick-cleanup
     builders:
index b370541..da9042b 100755 (executable)
@@ -37,8 +37,15 @@ envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
 echo "Yardstick: Pulling image opnfv/yardstick:${DOCKER_TAG}"
 docker pull opnfv/yardstick:$DOCKER_TAG >$redirect
 
+# map log directory
+branch=${GIT_BRANCH##*/}
+dir_result="${HOME}/opnfv/yardstick/results/${branch}"
+mkdir -p ${dir_result}
+sudo rm -rf ${dir_result}/*
+map_log_dir="-v ${dir_result}:/tmp/yardstick"
+
 # Run docker
-cmd="sudo docker run ${opts} ${envs} ${labconfig} ${sshkey} opnfv/yardstick:${DOCKER_TAG} \
+cmd="sudo docker run ${opts} ${envs} ${labconfig} ${map_log_dir} ${sshkey} opnfv/yardstick:${DOCKER_TAG} \
     exec_tests.sh ${YARDSTICK_DB_BACKEND} ${YARDSTICK_SCENARIO_SUITE_NAME}"
 echo "Yardstick: Running docker cmd: ${cmd}"
 ${cmd}
index 8cbfef8..3483b06 100644 (file)
@@ -10,13 +10,13 @@ node 'controller00.opnfvlocal' {
   $group = 'infracloud'
   include ::sudoers
 
-  class { 'opnfv::server':
+  class { '::opnfv::server':
     iptables_public_tcp_ports => [80,5000,5671,8774,9292,9696,35357], # logs,keystone,rabbit,nova,glance,neutron,keystone
     sysadmins                 => hiera('sysadmins', []),
     enable_unbound            => false,
     purge_apt_sources         => false,
   }
-  class { 'opnfv::controller':
+  class { '::opnfv::controller':
     keystone_rabbit_password         => hiera('keystone_rabbit_password'),
     neutron_rabbit_password          => hiera('neutron_rabbit_password'),
     nova_rabbit_password             => hiera('nova_rabbit_password'),
@@ -38,6 +38,7 @@ node 'controller00.opnfvlocal' {
     neutron_subnet_gateway           => hiera('neutron_subnet_gateway'),
     neutron_subnet_allocation_pools  => hiera('neutron_subnet_allocation_pools'),
     opnfv_password                   => hiera('opnfv_password'),
+    require                          => Class['::opnfv::server'],
   }
 }
 
@@ -45,13 +46,13 @@ node 'compute00.opnfvlocal' {
   $group = 'infracloud'
   include ::sudoers
 
-  class { 'opnfv::server':
+  class { '::opnfv::server':
     sysadmins                 => hiera('sysadmins', []),
     enable_unbound            => false,
     purge_apt_sources         => false,
   }
 
-  class { 'opnfv::compute':
+  class { '::opnfv::compute':
     nova_rabbit_password             => hiera('nova_rabbit_password'),
     neutron_rabbit_password          => hiera('neutron_rabbit_password'),
     neutron_admin_password           => hiera('neutron_admin_password'),
@@ -60,11 +61,12 @@ node 'compute00.opnfvlocal' {
     br_name                          => hiera('bridge_name'),
     controller_public_address        => 'controller00.opnfvlocal',
     virt_type                        => hiera('virt_type'),
+    require                          => Class['::opnfv::server'],
   }
 }
 
 node 'jumphost.opnfvlocal' {
-  class { 'opnfv::server':
+  class { '::opnfv::server':
     sysadmins                 => hiera('sysadmins', []),
     enable_unbound            => false,
     purge_apt_sources         => false,
@@ -97,5 +99,6 @@ node 'baremetal.opnfvlocal', 'lfpod5-jumpserver' {
     ipv4_subnet_mask          => hiera('ipv4_subnet_mask'),
     bridge_name               => hiera('bridge_name'),
     dib_dev_user_password     => hiera('dib_dev_user_password'),
+    require                   => Class['::opnfv::server'],
   }
 }
index 47fbc91..ecc571d 100755 (executable)
@@ -144,9 +144,17 @@ elif [ "$installer_type" == "compass" ]; then
     sshpass -p root scp 2>/dev/null $ssh_options root@${installer_ip}:~/admin-openrc.sh $dest_path &> /dev/null
 
     info "This file contains the mgmt keystone API, we need the public one for our rc file"
-    public_ip=$(sshpass -p root ssh $ssh_options root@${installer_ip} \
-        "ssh ${controller_ip} 'source /opt/admin-openrc.sh; openstack endpoint show identity '" \
-        | grep publicurl | awk '{print $4}')
+    grep "OS_AUTH_URL.*v2" $dest_path > /dev/null 2>&1
+    if [ $?  -eq 0 ] ; then
+        public_ip=$(sshpass -p root ssh $ssh_options root@${installer_ip} \
+            "ssh ${controller_ip} 'source /opt/admin-openrc.sh; openstack endpoint show identity '" \
+            | grep publicurl | awk '{print $4}')
+    else
+        public_ip=$(sshpass -p root ssh $ssh_options root@${installer_ip} \
+            "ssh ${controller_ip} 'source /opt/admin-openrc.sh; \
+                 openstack endpoint list --interface public --service identity '" \
+            | grep identity | awk '{print $14}')
+    fi
     info "public_ip: $public_ip"
 
 
index 586afce..913748f 100644 (file)
@@ -54,7 +54,7 @@ if [ -d "$dir_result" ]; then
         else
             gsutil ls gs://artifacts.opnfv.org/"$project"/ &>/dev/null
             if [ $? != 0 ]; then
-                echo "Not possible to push results to artifact: gsutil not installed.";
+                echo "Not possible to push results to artifact: some error happened when using gsutil";
             else
                 echo "Uploading logs to artifact $project_artifact"
                 gsutil -m cp -r "$dir_result"/* gs://artifacts.opnfv.org/"$project_artifact"/ >/dev/null 2>&1
index 43229ea..c55691a 100755 (executable)
@@ -10,9 +10,17 @@ usage:
 where:
     -h|--help         show this help text"
 
-if [[ $(whoami) != "root" ]]; then
-    echo "Error: This script must be run as root!"
-    exit 1
+# Ref :-  https://openstack.nimeyo.com/87286/openstack-packaging-all-definition-data-files-config-setup
+
+if [ -z "$VIRTUAL_ENV" ];
+then
+    if [[ $(whoami) != "root" ]];
+    then
+        echo "Error: This script must be run as root!"
+        exit 1
+    fi
+else
+    sed -i -e 's#/etc/opnfv_testapi =#etc/opnfv_testapi =#g' setup.cfg
 fi
 
 cp -fr 3rd_party/static opnfv_testapi/tornado_swagger
index c85c1d5..f518c97 100644 (file)
@@ -24,7 +24,6 @@ class ModelBase(object):
     def _format(self, excludes):\r
         new_obj = copy.deepcopy(self)\r
         dicts = new_obj.__dict__\r
-        print self, self.__class__\r
         for k in dicts.keys():\r
             if k in excludes:\r
                 del dicts[k]\r
@@ -32,9 +31,9 @@ class ModelBase(object):
                 if hasattr(dicts[k], 'format'):\r
                     dicts[k] = dicts[k].format()\r
                 elif isinstance(dicts[k], list):\r
-                    hs = []\r
-                    for h in dicts[k]:\r
-                        hs.append(h.format())\r
+                    hs = list()\r
+                    [hs.append(h.format() if hasattr(h, 'format') else str(h))\r
+                     for h in dicts[k]]\r
                     dicts[k] = hs\r
                 elif not isinstance(dicts[k], (str, int, float, dict)):\r
                     dicts[k] = str(dicts[k])\r
@@ -71,6 +70,7 @@ class ModelBase(object):
         return t\r
 \r
 \r
+@swagger.model()\r
 class CreateResponse(ModelBase):\r
     def __init__(self, href=''):\r
         self.href = href\r
index 3011902..50445fc 100644 (file)
@@ -112,7 +112,7 @@ class TestResult(models.ModelBase):
 @swagger.model()
 class TestResults(models.ModelBase):
     """
-        @property rgit esults:
+        @property results:
         @ptype results: C{list} of L{TestResult}
     """
     def __init__(self):
index 8479b35..10575a9 100644 (file)
@@ -25,12 +25,14 @@ class Details(object):
         self.timestart = timestart
         self.duration = duration
         self.status = status
+        self.items = [{'item1': 1}, {'item2': 2}]
 
     def format(self):
         return {
             "timestart": self.timestart,
             "duration": self.duration,
-            "status": self.status
+            "status": self.status,
+            'items': [{'item1': 1}, {'item2': 2}]
         }
 
     @staticmethod
@@ -43,6 +45,7 @@ class Details(object):
         t.timestart = a_dict.get('timestart')
         t.duration = a_dict.get('duration')
         t.status = a_dict.get('status')
+        t.items = a_dict.get('items')
         return t
 
 
@@ -104,6 +107,7 @@ class TestResultBase(TestBase):
         self.assertEqual(details_res.duration, details_req.duration)
         self.assertEqual(details_res.timestart, details_req.timestart)
         self.assertEqual(details_res.status, details_req.status)
+        self.assertEqual(details_res.items, details_req.items)
         self.assertEqual(result.build_tag, req.build_tag)
         self.assertEqual(result.scenario, req.scenario)
         self.assertEqual(result.criteria, req.criteria)