Merge "Yamllint on Pharos, Octopus, and SecuredLab Repos"
authorTrevor Bramwell <tbramwell@linuxfoundation.org>
Mon, 22 May 2017 16:25:24 +0000 (16:25 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Mon, 22 May 2017 16:25:24 +0000 (16:25 +0000)
jjb/apex/apex-iso-verify.sh
jjb/dovetail/dovetail-artifacts-upload.sh
jjb/dovetail/dovetail-artifacts-upload.yml
jjb/dovetail/dovetail-cleanup.sh
jjb/fuel/fuel-daily-jobs.yml
jjb/functest/functest-project-jobs.yml

index cdeac04..5378a90 100755 (executable)
@@ -29,76 +29,37 @@ fi
 # Make sure a pre-existing iso-verify isn't there
 rm_apex_iso_verify
 
+#make sure there is not an existing console log file for the VM
+sudo rm -f /var/log/libvirt/qemu/apex-iso-verify-console.log
+
 # run an install from the iso
 # This streams a serial console to tcp port 3737 on localhost
 sudo virt-install -n apex-iso-verify -r 4096 --vcpus 4 --os-variant=rhel7 \
- --accelerate -v --noautoconsole --nographics \
+ --accelerate -v --noautoconsole \
  --disk path=/var/lib/libvirt/images/apex-iso-verify.qcow2,size=30,format=qcow2 \
  -l $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso \
  --extra-args 'console=ttyS0 console=ttyS0,115200n8 serial inst.ks=file:/iso-verify.ks inst.stage2=hd:LABEL=OPNFV\x20CentOS\x207\x20x86_64:/' \
  --initrd-inject $BUILD_DIRECTORY/../ci/iso-verify.ks \
- --serial tcp,host=:3737,protocol=raw
-
-# Attach to tcpport 3737 and echo the output to stdout
-# watch for a 5 min time out, a power off message or a tcp disconnect
-python << EOP
-#!/usr/bin/env python
-
-import sys
-import socket
-from time import sleep
-from time import time
-
-
-TCP_IP = '127.0.0.1'
-TCP_PORT = 3737
-BUFFER_SIZE = 1024
-
-try:
-    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-    s.connect((TCP_IP, TCP_PORT))
-except Exception, e:
-    print "Failed to connect to the iso-verofy vm's serial console"
-    print "this probably means that the VM failed to start"
-    raise e
-
-activity = time()
-data = s.recv(BUFFER_SIZE)
-last_data = data
-while time() - activity < 300:
-    try:
-        if data != last_data:
-            activity = time()
-        last_data = data
-        data = s.recv(BUFFER_SIZE)
-        sys.stdout.write(data)
-        if 'Powering off' in data:
-            break
-        sleep(.5)
-    except socket.error, e:
-        # for now assuming that the connection was closed
-        # which is good, means the vm finished installing
-        # printing the error output just in case we need to debug
-        print "VM console connection lost: %s" % msg
-        break
-s.close()
-
-if time() - activity > 300:
-    print "failing due to console inactivity"
-    exit(1)
-else:
-    print "Success!"
-EOP
-
-# save the python return code for after cleanup
-python_rc=$?
+ --serial file,path=/var/log/libvirt/qemu/apex-iso-verify-console.log
+
+echo "Waiting for install to finish..."
+sleep 10
+end_time=$(($SECONDS+1500))
+while ! [[ `sudo tail -n1 /var/log/libvirt/qemu/apex-iso-verify-console.log` =~ 'Power down' ]]; do
+  if [ $SECONDS -gt $end_time ] || ! sudo virsh list --all | grep apex-iso-verify | grep running > /dev/null; then
+    sudo cat /var/log/libvirt/qemu/apex-iso-verify-console.log
+    sudo virsh list --all
+    echo "Error: Failed to find power down message after install"
+    exit 1
+  fi
+  sleep 10
+done
+
+sudo cat /var/log/libvirt/qemu/apex-iso-verify-console.log
 
 # clean up
 rm_apex_iso_verify
 
-# Exit with the RC of the Python job
-exit $python_rc
-
 echo
 echo "--------------------------------------------------------"
 echo "Done!"
index b23deca..f1a9e72 100755 (executable)
@@ -52,7 +52,7 @@ echo "signature Upload Complete!"
 
 upload () {
 # log info to console
-echo "Uploading to artifact. This could take some time..."
+echo "Uploading ${STORE_FILE_NAME} to artifact. This could take some time..."
 echo
 
 cd $WORKSPACE
index 3d9af5e..0c8efbe 100644 (file)
@@ -19,6 +19,8 @@
         - 'dovetail'
         - 'functest'
         - 'yardstick'
+        - 'testapi'
+        - 'mongo'
 
 #############################################
 # job template
@@ -55,7 +57,8 @@
 
     builders:
         - 'dovetail-builder-artifacts-upload'
-        - 'dovetail-workspace-cleanup'
+        - 'dovetail-upload-artifacts-cache-cleanup'
+        - 'dovetail-images-cleanup'
 
 ####################
 # parameter macros
@@ -94,7 +97,7 @@
             !include-raw: ./dovetail-artifacts-upload.sh
 
 - builder:
-    name: dovetail-workspace-cleanup
+    name: dovetail-upload-artifacts-cache-cleanup
     builders:
         - shell: |
             #!/bin/bash
 
             /bin/rm -rf $CACHE_DIR
 
-            # Remove previous running containers if exist
-            if [[ -n "$(docker ps -a | grep $DOCKER_REPO_NAME)" ]]; then
-                echo "Removing existing $DOCKER_REPO_NAME containers..."
-                docker ps -a | grep $DOCKER_REPO_NAME | awk '{print $1}' | xargs docker rm -f
-                t=60
-                # Wait max 60 sec for containers to be removed
-                while [[ $t -gt 0 ]] && [[ -n "$(docker ps| grep $DOCKER_REPO_NAME)" ]]; do
-                    sleep 1
-                    let t=t-1
-                done
-            fi
-
-            # Remove existing images if exist
-            if [[ -n "$(docker images | grep $DOCKER_REPO_NAME)" ]]; then
-                echo "Docker images to remove:"
-                docker images | head -1 && docker images | grep $DOCKER_REPO_NAME
-                image_tags=($(docker images | grep $DOCKER_REPO_NAME | awk '{print $2}'))
-                for tag in "${image_tags[@]}"; do
-                    if [[ -n "$(docker images|grep $DOCKER_REPO_NAME|grep $tag)" ]]; then
-                        echo "Removing docker image $DOCKER_REPO_NAME:$tag..."
-                        docker rmi -f $DOCKER_REPO_NAME:$tag
-                    fi
-                done
-            fi
+- builder:
+    name: dovetail-images-cleanup
+    builders:
+        - shell:
+            !include-raw: ./dovetail-cleanup.sh
index 22b2ba2..0ee789a 100755 (executable)
@@ -2,8 +2,8 @@
 
 [[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
 
-#clean up dependent project docker images, which has no containers and image tag None
-clean_images=(opnfv/functest opnfv/yardstick)
+# clean up dependent project docker images, which has no containers and image tag None
+clean_images=(opnfv/functest opnfv/yardstick opnfv/testapi mongo)
 for clean_image in "${clean_images[@]}"; do
     echo "Removing image $image_id, which has no containers and image tag is None"
     dangling_images=($(docker images -f "dangling=true" | grep ${clean_image} | awk '{print $3}'))
@@ -14,7 +14,7 @@ for clean_image in "${clean_images[@]}"; do
     fi
 done
 
-echo "Remove containers with image dovetail:<None>..."
+echo "Remove containers with image opnfv/dovetail:<None>..."
 dangling_images=($(docker images -f "dangling=true" | grep opnfv/dovetail | awk '{print $3}'))
 if [[ -n ${dangling_images} ]]; then
     for image_id in "${dangling_images[@]}"; do
@@ -24,7 +24,7 @@ if [[ -n ${dangling_images} ]]; then
     done
 fi
 
-echo "Cleaning up dovetail docker containers/images..."
+echo "Cleaning up dovetail docker containers..."
 if [[ ! -z $(docker ps -a | grep opnfv/dovetail) ]]; then
     echo "Removing existing opnfv/dovetail containers..."
     docker ps -a | grep opnfv/dovetail | awk '{print $1}' | xargs docker rm -f >${redirect}
index 2fa8687..5432c94 100644 (file)
@@ -73,8 +73,8 @@
             auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
         - 'os-odl_l2-sfc-ha':
             auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
-        - 'os-odl_l2-bgpvpn-ha':
-        #    auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
+        - 'os-odl_l2-bgpvpn-ha':
+            auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
         - 'os-nosdn-kvm-ha':
             auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
         - 'os-nosdn-ovs-ha':
     jobs:
         - 'fuel-{scenario}-{pod}-daily-{stream}'
         - 'fuel-deploy-{pod}-daily-{stream}'
-        - 'fuel-os-odl_l2-bgpvpn-ha-{pod}-daily-{stream}'
 
 ########################
 # job templates
                 build-step-failure-threshold: 'never'
                 failure-threshold: 'never'
                 unstable-threshold: 'FAILURE'
-
-    publishers:
-        - email:
-            recipients: peter.barabas@ericsson.com fzhadaev@mirantis.com
-
-- job-template:
-    name: 'fuel-os-odl_l2-bgpvpn-ha-{pod}-daily-{stream}'
-
-    disabled: '{obj:disabled}'
-
-    concurrent: false
-
-    properties:
-        - logrotate-default
-        - throttle:
-            enabled: true
-            max-total: 4
-            max-per-node: 1
-            option: 'project'
-        - build-blocker:
-            use-build-blocker: true
-            blocking-jobs:
-                - 'fuel-os-.*?-{pod}-daily-.*'
-                - 'fuel-os-.*?-{pod}-weekly-.*'
-            block-level: 'NODE'
-
-    wrappers:
-        - build-name:
-            name: '$BUILD_NUMBER - Scenario: os-odl_l2-bgpvpn-ha'
-
-    triggers:
-        - 'fuel-os-odl_l2-bgpvpn-ha-{pod}-daily-{stream}-trigger'
-
-    parameters:
-        - project-parameter:
-            project: '{project}'
-            branch: '{branch}'
-        - '{installer}-defaults'
-        - '{slave-label}-defaults':
-            installer: '{installer}'
-        - string:
-            name: DEPLOY_SCENARIO
-            default: "os-odl_l2-bgpvpn-ha"
-        - fuel-ci-parameter:
-            gs-pathname: '{gs-pathname}'
-
-    builders:
-        - description-setter:
-            description: "Built on $NODE_NAME"
-        - trigger-builds:
-            - project: 'fuel-deploy-{pod}-daily-{stream}'
-              current-parameters: false
-              predefined-parameters:
-                DEPLOY_SCENARIO=os-odl_l2-bgpvpn-ha
-              same-node: true
-              block: true
-        - trigger-builds:
-            - project: 'functest-fuel-{pod}-daily-{stream}'
-              current-parameters: false
-              predefined-parameters:
-                DEPLOY_SCENARIO=os-odl_l2-bgpvpn-ha
-              same-node: true
-              block: true
-              block-thresholds:
-                build-step-failure-threshold: 'never'
-                failure-threshold: 'never'
-                unstable-threshold: 'FAILURE'
-        - trigger-builds:
-            - project: 'yardstick-fuel-{pod}-daily-{stream}'
-              current-parameters: false
-              predefined-parameters:
-                DEPLOY_SCENARIO=os-odl_l2-bgpvpn-ha
-              block: true
-              same-node: true
-              block-thresholds:
-                build-step-failure-threshold: 'never'
-                failure-threshold: 'never'
-                unstable-threshold: 'FAILURE'
-        # 1.dovetail only master by now, not sync with A/B/C branches
+        # 1.dovetail only has master, based on D release
         # 2.here the stream means the SUT stream, dovetail stream is defined in its own job
-        # 3.only debug testsuite here(includes basic testcase,
-        #   i.e. refstack ipv6 vpn test cases from functest, HA test case
-        #   from yardstick)
+        # 3.only debug testsuite here(refstack, ha, ipv6, bgpvpn)
         # 4.not used for release criteria or compliance,
-        #   only to debug the dovetail tool bugs with fuel bgpvpn scenario
-        - trigger-builds:
-            - project: 'dovetail-fuel-{pod}-proposed_tests-{stream}'
-              current-parameters: false
-              predefined-parameters:
-                DEPLOY_SCENARIO=os-odl_l2-bgpvpn-ha
-              block: true
-              same-node: true
-              block-thresholds:
-                build-step-failure-threshold: 'never'
-                failure-threshold: 'never'
-                unstable-threshold: 'FAILURE'
+        #   only to debug the dovetail tool bugs with bgpvpn
+        # 5,only run against scenario os-odl_l2-bgpvpn-ha(regex used here, can extend to more scenarios future)
+        - conditional-step:
+            condition-kind: regex-match
+            regex: os-odl_l2-bgpvpn-ha
+            label: '{scenario}'
+            steps:
+                - trigger-builds:
+                    - project: 'dovetail-fuel-{pod}-proposed_tests-{stream}'
+                      current-parameters: false
+                      predefined-parameters:
+                        DEPLOY_SCENARIO={scenario}
+                      block: true
+                      same-node: true
+                      block-thresholds:
+                        build-step-failure-threshold: 'never'
+                        failure-threshold: 'never'
+                        unstable-threshold: 'FAILURE'
 
     publishers:
         - email:
-            recipients: peter.barabas@ericsson.com fzhadaev@mirantis.com matthew.lijun@huawei.com
-
+            recipients: peter.barabas@ericsson.com fzhadaev@mirantis.com
 
 - job-template:
     name: 'fuel-deploy-{pod}-daily-{stream}'
index 14ad73a..7036f20 100644 (file)
@@ -88,4 +88,4 @@
     name: functest-unit-tests-and-docs-build
     builders:
         - shell: |
-            $WORKSPACE/run_unit_tests.sh
+            cd $WORKSPACE && tox