Merge "Prohibit the importation of a list of libraries"
authorRoss Brattain <ross.b.brattain@intel.com>
Fri, 2 Mar 2018 19:29:11 +0000 (19:29 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Fri, 2 Mar 2018 19:29:11 +0000 (19:29 +0000)
261 files changed:
INFO
INFO.yaml
ansible/clone_repos.yml
ansible/prepare_env.yml
ansible/roles/convert_openrc/tasks/main.yml
ansible/roles/create_dockerfile/templates/centos/Dockerfile
ansible/roles/create_dockerfile/templates/ubuntu/Dockerfile
ansible/roles/create_storperf_admin_rc/tasks/main.yml
ansible/roles/create_storperf_admin_rc/templates/storperf_admin-rc.j2
ansible/roles/infra_destroy_previous_configuration/tasks/delete_vm.yml [new file with mode: 0644]
ansible/roles/infra_destroy_previous_configuration/tasks/main.yml
ansible/yardstick_config.yml
api/resources/v1/env.py
dashboard/Prox_BM_L2FWD-4Port_MultiSize-1518452496550.json [new file with mode: 0644]
docker/Dockerfile
docker/Dockerfile.aarch64.patch
docs/testing/developer/devguide/devguide_nsb_prox.rst [new file with mode: 0755]
docs/testing/developer/devguide/images/PROX_BNG_QOS.png [new file with mode: 0644]
docs/testing/developer/devguide/images/PROX_Baremetal_config.png [new file with mode: 0644]
docs/testing/developer/devguide/images/PROX_Gen_2port_cfg.png [new file with mode: 0644]
docs/testing/developer/devguide/images/PROX_Gen_GUI.png [new file with mode: 0644]
docs/testing/developer/devguide/images/PROX_Handle_2port_cfg.png [new file with mode: 0644]
docs/testing/developer/devguide/images/PROX_Hardware_Arch.png [new file with mode: 0644]
docs/testing/developer/devguide/images/PROX_Openstack_stack_list.png [new file with mode: 0644]
docs/testing/developer/devguide/images/PROX_Openstack_stack_show_a.png [new file with mode: 0644]
docs/testing/developer/devguide/images/PROX_Openstack_stack_show_b.png [new file with mode: 0644]
docs/testing/developer/devguide/images/PROX_SUT_GUI.png [new file with mode: 0644]
docs/testing/developer/devguide/images/PROX_Software_Arch.png [new file with mode: 0644]
docs/testing/developer/devguide/images/PROX_Test_BM_Script.png [new file with mode: 0644]
docs/testing/developer/devguide/images/PROX_Test_HEAT_Script.png [new file with mode: 0644]
docs/testing/developer/devguide/images/PROX_Traffic_profile.png [new file with mode: 0644]
docs/testing/developer/devguide/images/PROX_Yardstick_config.png [new file with mode: 0644]
docs/testing/user/userguide/04-installation.rst
docs/testing/user/userguide/05-yardstick_plugin.rst
docs/testing/user/userguide/08-api.rst
docs/testing/user/userguide/opnfv_yardstick_tc056.rst
docs/testing/user/userguide/opnfv_yardstick_tc057.rst
docs/testing/user/userguide/opnfv_yardstick_tc058.rst
gui/app/scripts/controllers/main.js
nsb_setup.sh
requirements.txt
samples/storage_bottlenecks.yaml [new file with mode: 0644]
samples/vnf_samples/nsut/prox/configs/gen_l2fwd-2.cfg
samples/vnf_samples/nsut/prox/configs/gen_l2fwd-4.cfg
samples/vnf_samples/nsut/prox/tc_prox_baremetal_lw_aftr-4.yaml
samples/vnf_samples/nsut/vfw/tc_heat_rfc2544_ipv4_1rule_1flow_64B_trex.yaml
samples/vnf_samples/nsut/vfw/tc_heat_rfc2544_ipv4_1rule_1flow_64B_trex_scale-up.yaml [new file with mode: 0644]
samples/vnf_samples/nsut/vfw/vfw-tg-topology-scale-up.yaml [new file with mode: 0644]
samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_4_ports_2_lb_1_sw.conf [new file with mode: 0644]
samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_4_ports_4_lb_1_sw.conf [new file with mode: 0644]
samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_6_ports_6_lb_1_sw.conf [new file with mode: 0644]
samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_6_ports_8_lb_1_sw.conf [new file with mode: 0644]
samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_8_ports_10_lb_1_sw.conf [new file with mode: 0644]
samples/vnf_samples/traffic_profiles/ipv4_throughput-scale-up.yaml [new file with mode: 0644]
samples/vnf_samples/traffic_profiles/prox_binsearch.yaml
test-requirements.txt
tests/ci/prepare_env.sh
tests/ci/prepare_storperf_admin-rc.sh
tests/opnfv/test_suites/opnfv_os-odl-bgpvpn-noha_daily.yaml [new file with mode: 0644]
tests/unit/__init__.py
tests/unit/network_services/collector/test_publisher.py
tests/unit/network_services/collector/test_subscriber.py
tests/unit/network_services/helpers/test_cpu.py
tests/unit/network_services/helpers/test_dpdkbindnic_helper.py
tests/unit/network_services/helpers/test_samplevnf_helper.py
tests/unit/network_services/libs/ixia_libs/test_IxNet.py
tests/unit/network_services/nfvi/test_collectd.py
tests/unit/network_services/nfvi/test_resource.py
tests/unit/network_services/traffic_profile/test_base.py
tests/unit/network_services/traffic_profile/test_fixed.py
tests/unit/network_services/traffic_profile/test_http.py
tests/unit/network_services/traffic_profile/test_ixia_rfc2544.py
tests/unit/network_services/traffic_profile/test_prox_binsearch.py
tests/unit/network_services/traffic_profile/test_rfc2544.py
tests/unit/network_services/traffic_profile/test_trex_traffic_profile.py [moved from tests/unit/network_services/traffic_profile/test_traffic_profile.py with 81% similarity]
tests/unit/network_services/vnf_generic/test_vnfdgen.py
tests/unit/network_services/vnf_generic/vnf/test_acl_vnf.py
tests/unit/network_services/vnf_generic/vnf/test_base.py
tests/unit/network_services/vnf_generic/vnf/test_cgnapt_vnf.py
tests/unit/network_services/vnf_generic/vnf/test_prox_helpers.py
tests/unit/network_services/vnf_generic/vnf/test_prox_vnf.py
tests/unit/network_services/vnf_generic/vnf/test_router_vnf.py
tests/unit/network_services/vnf_generic/vnf/test_sample_vnf.py
tests/unit/network_services/vnf_generic/vnf/test_tg_ixload.py
tests/unit/network_services/vnf_generic/vnf/test_tg_ping.py
tests/unit/network_services/vnf_generic/vnf/test_tg_prox.py
tests/unit/network_services/vnf_generic/vnf/test_tg_rfc2544_ixia.py
tests/unit/network_services/vnf_generic/vnf/test_tg_rfc2544_trex.py
tests/unit/network_services/vnf_generic/vnf/test_tg_trex.py
tests/unit/network_services/vnf_generic/vnf/test_udp_replay.py
tests/unit/network_services/vnf_generic/vnf/test_vfw_vnf.py
tests/unit/network_services/vnf_generic/vnf/test_vpe_vnf.py
tox.ini
yardstick/benchmark/contexts/base.py
yardstick/benchmark/contexts/dummy.py
yardstick/benchmark/contexts/heat.py
yardstick/benchmark/contexts/kubernetes.py
yardstick/benchmark/contexts/node.py
yardstick/benchmark/contexts/standalone/ovs_dpdk.py
yardstick/benchmark/contexts/standalone/sriov.py
yardstick/benchmark/core/__init__.py
yardstick/benchmark/core/task.py
yardstick/benchmark/runners/base.py
yardstick/benchmark/scenarios/lib/delete_network.py
yardstick/benchmark/scenarios/networking/sfc_openstack.py
yardstick/benchmark/scenarios/networking/vnf_generic.py
yardstick/cmd/commands/task.py
yardstick/common/ansible_common.py
yardstick/common/constants.py
yardstick/common/exceptions.py
yardstick/common/openstack_utils.py
yardstick/common/packages.py [new file with mode: 0644]
yardstick/common/privsep.py [new file with mode: 0644]
yardstick/common/utils.py
yardstick/dispatcher/influxdb.py
yardstick/error.py [new file with mode: 0644]
yardstick/network_services/constants.py [new file with mode: 0644]
yardstick/network_services/helpers/dpdkbindnic_helper.py
yardstick/network_services/traffic_profile/__init__.py
yardstick/network_services/traffic_profile/base.py
yardstick/network_services/traffic_profile/ixia_rfc2544.py
yardstick/network_services/traffic_profile/prox_binsearch.py
yardstick/network_services/traffic_profile/rfc2544.py
yardstick/network_services/traffic_profile/trex_traffic_profile.py [moved from yardstick/network_services/traffic_profile/traffic_profile.py with 85% similarity]
yardstick/network_services/utils.py
yardstick/network_services/vnf_generic/vnf/acl_vnf.py
yardstick/network_services/vnf_generic/vnf/prox_helpers.py
yardstick/network_services/vnf_generic/vnf/prox_vnf.py
yardstick/network_services/vnf_generic/vnf/sample_vnf.py
yardstick/network_services/vnf_generic/vnf/tg_ixload.py
yardstick/network_services/vnf_generic/vnf/tg_rfc2544_ixia.py
yardstick/network_services/vnf_generic/vnf/vfw_vnf.py
yardstick/network_services/vnf_generic/vnf/vnf_ssh_helper.py [new file with mode: 0644]
yardstick/orchestrator/heat.py
yardstick/ssh.py
yardstick/tests/__init__.py
yardstick/tests/fixture.py [new file with mode: 0644]
yardstick/tests/functional/base.py [new file with mode: 0644]
yardstick/tests/functional/benchmark/__init__.py [new file with mode: 0644]
yardstick/tests/functional/benchmark/scenarios/__init__.py [new file with mode: 0644]
yardstick/tests/functional/benchmark/scenarios/networking/__init__.py [new file with mode: 0644]
yardstick/tests/functional/benchmark/scenarios/networking/test_vnf_generic.py [new file with mode: 0644]
yardstick/tests/functional/common/fake_directory_package/README.md [new file with mode: 0644]
yardstick/tests/functional/common/fake_directory_package/setup.py [new file with mode: 0644]
yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/__init__.py [new file with mode: 0644]
yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/benchmark/__init__.py [new file with mode: 0644]
yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/benchmark/scenarios/__init__.py [new file with mode: 0644]
yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/benchmark/scenarios/dummy2/__init__.py [new file with mode: 0644]
yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/benchmark/scenarios/dummy2/dummy2.py [new file with mode: 0644]
yardstick/tests/functional/common/fake_pip_package/yardstick_new_plugin-1.0.0.tar.gz [new file with mode: 0644]
yardstick/tests/functional/common/test_packages.py [new file with mode: 0644]
yardstick/tests/unit/__init__.py
yardstick/tests/unit/apiserver/resources/test_env_action.py
yardstick/tests/unit/apiserver/utils/test_influx.py
yardstick/tests/unit/base.py [new file with mode: 0644]
yardstick/tests/unit/benchmark/contexts/standalone/test_ovs_dpdk.py
yardstick/tests/unit/benchmark/contexts/standalone/test_sriov.py
yardstick/tests/unit/benchmark/contexts/test_base.py [new file with mode: 0644]
yardstick/tests/unit/benchmark/contexts/test_dummy.py
yardstick/tests/unit/benchmark/contexts/test_heat.py
yardstick/tests/unit/benchmark/contexts/test_kubernetes.py
yardstick/tests/unit/benchmark/contexts/test_model.py
yardstick/tests/unit/benchmark/contexts/test_node.py
yardstick/tests/unit/benchmark/core/test_plugin.py
yardstick/tests/unit/benchmark/core/test_report.py
yardstick/tests/unit/benchmark/core/test_task.py
yardstick/tests/unit/benchmark/core/test_testcase.py
yardstick/tests/unit/benchmark/runner/test_base.py
yardstick/tests/unit/benchmark/runner/test_search.py
yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_baremetal.py
yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_general.py
yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_process.py
yardstick/tests/unit/benchmark/scenarios/availability/test_basemonitor.py
yardstick/tests/unit/benchmark/scenarios/availability/test_baseresultchecker.py
yardstick/tests/unit/benchmark/scenarios/availability/test_director.py
yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_general.py
yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_multi.py
yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_process.py
yardstick/tests/unit/benchmark/scenarios/availability/test_operation_general.py
yardstick/tests/unit/benchmark/scenarios/availability/test_result_checker_general.py
yardstick/tests/unit/benchmark/scenarios/availability/test_util.py
yardstick/tests/unit/benchmark/scenarios/compute/test_cachestat.py
yardstick/tests/unit/benchmark/scenarios/compute/test_computecapacity.py
yardstick/tests/unit/benchmark/scenarios/compute/test_cpuload.py
yardstick/tests/unit/benchmark/scenarios/compute/test_cyclictest.py
yardstick/tests/unit/benchmark/scenarios/compute/test_lmbench.py
yardstick/tests/unit/benchmark/scenarios/compute/test_memload.py
yardstick/tests/unit/benchmark/scenarios/compute/test_plugintest.py
yardstick/tests/unit/benchmark/scenarios/compute/test_qemumigrate.py
yardstick/tests/unit/benchmark/scenarios/compute/test_ramspeed.py
yardstick/tests/unit/benchmark/scenarios/compute/test_spec_cpu.py
yardstick/tests/unit/benchmark/scenarios/compute/test_spec_cpu_for_vm.py
yardstick/tests/unit/benchmark/scenarios/compute/test_unixbench.py
yardstick/tests/unit/benchmark/scenarios/dummy/test_dummy.py
yardstick/tests/unit/benchmark/scenarios/lib/test_add_memory_load.py
yardstick/tests/unit/benchmark/scenarios/lib/test_attach_volume.py
yardstick/tests/unit/benchmark/scenarios/lib/test_check_connectivity.py
yardstick/tests/unit/benchmark/scenarios/lib/test_check_numa_info.py
yardstick/tests/unit/benchmark/scenarios/lib/test_check_value.py
yardstick/tests/unit/benchmark/scenarios/lib/test_create_flavor.py
yardstick/tests/unit/benchmark/scenarios/lib/test_create_image.py
yardstick/tests/unit/benchmark/scenarios/lib/test_create_keypair.py
yardstick/tests/unit/benchmark/scenarios/lib/test_create_network.py
yardstick/tests/unit/benchmark/scenarios/lib/test_create_port.py
yardstick/tests/unit/benchmark/scenarios/lib/test_create_router.py
yardstick/tests/unit/benchmark/scenarios/lib/test_create_sec_group.py
yardstick/tests/unit/benchmark/scenarios/lib/test_create_server.py
yardstick/tests/unit/benchmark/scenarios/lib/test_create_subnet.py
yardstick/tests/unit/benchmark/scenarios/lib/test_create_volume.py
yardstick/tests/unit/benchmark/scenarios/lib/test_delete_flavor.py
yardstick/tests/unit/benchmark/scenarios/lib/test_delete_floating_ip.py
yardstick/tests/unit/benchmark/scenarios/lib/test_delete_image.py
yardstick/tests/unit/benchmark/scenarios/lib/test_delete_keypair.py
yardstick/tests/unit/benchmark/scenarios/lib/test_delete_network.py
yardstick/tests/unit/benchmark/scenarios/lib/test_delete_port.py
yardstick/tests/unit/benchmark/scenarios/lib/test_delete_router.py
yardstick/tests/unit/benchmark/scenarios/lib/test_delete_router_gateway.py
yardstick/tests/unit/benchmark/scenarios/lib/test_delete_router_interface.py
yardstick/tests/unit/benchmark/scenarios/lib/test_delete_server.py
yardstick/tests/unit/benchmark/scenarios/lib/test_delete_volume.py
yardstick/tests/unit/benchmark/scenarios/lib/test_detach_volume.py
yardstick/tests/unit/benchmark/scenarios/lib/test_get_flavor.py
yardstick/tests/unit/benchmark/scenarios/lib/test_get_migrate_target_host.py
yardstick/tests/unit/benchmark/scenarios/lib/test_get_numa_info.py
yardstick/tests/unit/benchmark/scenarios/lib/test_get_server.py
yardstick/tests/unit/benchmark/scenarios/lib/test_get_server_ip.py
yardstick/tests/unit/benchmark/scenarios/networking/test_iperf3.py
yardstick/tests/unit/benchmark/scenarios/networking/test_netperf.py
yardstick/tests/unit/benchmark/scenarios/networking/test_netperf_node.py
yardstick/tests/unit/benchmark/scenarios/networking/test_netutilization.py
yardstick/tests/unit/benchmark/scenarios/networking/test_networkcapacity.py
yardstick/tests/unit/benchmark/scenarios/networking/test_nstat.py
yardstick/tests/unit/benchmark/scenarios/networking/test_ping.py
yardstick/tests/unit/benchmark/scenarios/networking/test_ping6.py
yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen.py
yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk.py
yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk_throughput.py
yardstick/tests/unit/benchmark/scenarios/networking/test_sfc.py
yardstick/tests/unit/benchmark/scenarios/networking/test_vnf_generic.py
yardstick/tests/unit/benchmark/scenarios/networking/test_vsperf.py
yardstick/tests/unit/benchmark/scenarios/networking/test_vsperf_dpdk.py
yardstick/tests/unit/benchmark/scenarios/parser/test_parser.py
yardstick/tests/unit/benchmark/scenarios/storage/test_bonnie.py
yardstick/tests/unit/benchmark/scenarios/storage/test_fio.py
yardstick/tests/unit/benchmark/scenarios/storage/test_storagecapacity.py
yardstick/tests/unit/benchmark/scenarios/storage/test_storperf.py
yardstick/tests/unit/benchmark/scenarios/test_base.py
yardstick/tests/unit/common/test_ansible_common.py
yardstick/tests/unit/common/test_httpClient.py
yardstick/tests/unit/common/test_openstack_utils.py
yardstick/tests/unit/common/test_packages.py [new file with mode: 0644]
yardstick/tests/unit/common/test_template_format.py
yardstick/tests/unit/common/test_utils.py
yardstick/tests/unit/common/test_yaml_loader.py
yardstick/tests/unit/dispatcher/test_influxdb.py
yardstick/tests/unit/orchestrator/test_heat.py
yardstick/tests/unit/orchestrator/test_kubernetes.py
yardstick/tests/unit/test_cmd/commands/test_env.py
yardstick/tests/unit/test_cmd/commands/test_testcase.py
yardstick/tests/unit/test_cmd/test_NSBperf.py
yardstick/tests/unit/test_ssh.py

diff --git a/INFO b/INFO
index 35b2828..1a49af2 100644 (file)
--- a/INFO
+++ b/INFO
@@ -22,6 +22,7 @@ ross.b.brattain@intel.com
 chenjiankun1@huawei.com
 rodolfo.alonso.hernandez@intel.com
 emma.l.foley@intel.com
+abhijit.sinha@intel.com
 
 Link to TSC approval: http://meetbot.opnfv.org/meetings/
 Link to approval of additional submitters:
index f84f695..730cd4a 100644 (file)
--- a/INFO.yaml
+++ b/INFO.yaml
@@ -74,6 +74,10 @@ committers:
       email: '14_ykl@tongji.edu.cn'
       company: 'tongji.edu.cn'
       id: 'tjuyinkanglin'
+    - name: 'Abhijit Sinha'
+      email: 'abhijit.sinha@intel.com'
+      company: 'intel.com'
+      id: 'abhijitsinha'
 tsc:
     # yamllint disable rule:line-length
     approval: 'http//meetbot.opnfv.org/meetings/'
index 6f69b6b..f35be00 100644 (file)
     YARDSTICK_REPO: "{{ lookup('env', 'YARDSTICK_REPO')|default('https://gerrit.opnfv.org/gerrit/yardstick', true) }}"
     YARDSTICK_REPO_DIR: "{{ lookup('env', 'YARDSTICK_REPO_DIR')|default('/home/opnfv/repos/yardstick', true) }}"
     YARDSTICK_BRANCH: "{{ lookup('env', 'YARDSTICK_BRANCH')|default('master', true) }}"
-    RELENG_REPO: "{{ lookup('env', 'RELENG_REPO')|default('https://gerrit.opnfv.org/gerrit/releng', true) }}"
-    RELENG_REPO_DIR: "{{ lookup('env', 'RELENG_REPO_DIR')|default('/home/opnfv/repos/releng', true) }}"
-    RELENG_BRANCH: "{{ lookup('env', 'RELENG_BRANCH')|default('master', true) }}"
-
 
   tasks:
-    - name: Updating releng -> "{{ RELENG_BRANCH }}"
-      git:
-        repo: "{{ RELENG_REPO }}"
-        dest: "{{ RELENG_REPO_DIR }}"
-        version: "{{ RELENG_BRANCH }}"
-        accept_hostkey: yes
-        recursive: no
-        force: yes
-
     - name: Updating yardstick -> "{{ YARDSTICK_BRANCH }}"
       git:
         repo: "{{ YARDSTICK_REPO }}"
index a1299c3..3215213 100644 (file)
     - fail: msg="{{ INSTALLER_TYPE }} not in {{ INSTALLERS }}"
       when: not openrc_present and (INSTALLER_TYPE not in INSTALLERS)
 
-    - name: fetch OS credentials
-      command: "{{ RELENG_REPO_DIR }}/utils/fetch_os_creds.sh {{ '-v' if DEPLOY_TYPE == 'virt' else '' }} -d {{ OPENRC }} -i {{ INSTALLER_TYPE }} -a {{ INSTALLER_IP }}"
-      when: not openrc_present
-
 
   roles:
     - role: convert_openrc
index 1606b0b..be621f0 100644 (file)
       auth_url: "{{ openrc.OS_AUTH_URL }}"
       password: "{{ openrc.OS_PASSWORD }}"
       username: "{{ openrc.OS_USERNAME }}"
-      project_name: "{{ openrc.OS_PROJECT_NAME }}"
-#      tenant_name: "{{ openrc.OS_TENANT_NAME }}"
+      project_name: "{{ openrc.OS_PROJECT_NAME|default(openrc.OS_TENANT_NAME) }}"
       project_domain_name: "{{ openrc.OS_PROJECT_DOMAIN_NAME }}"
-#      user_domain_name: "{{ openrc.OS_USER_DOMAIN_NAME }}"
-      # BUGS: We need to specify identity_api_version == 3, but we can't do it here
-      # because it is not the write place
-      # we need to set it via OS_IDENTITY_API_VERSION or clouds.yaml
-#      identity_api_version: "{{ openrc.OS_IDENTITY_API_VERSION }}"
 
 - debug: var=os_auth
 
@@ -44,7 +38,7 @@
     clouds:
         demo:
             # must specify API version here
-            identity_api_version: "{{ openrc.OS_IDENTITY_API_VERSION }}"
+            identity_api_version: "{{ openrc.OS_IDENTITY_API_VERSION|default(3) }}"
             auth: "{{ os_auth }}"
 
 - template:
index ca104c8..96d8a00 100644 (file)
@@ -18,7 +18,6 @@ ENV REPOS_DIR /home/opnfv/repos
 
 # Yardstick repo
 ENV YARDSTICK_REPO_DIR ${REPOS_DIR}/yardstick
-ENV RELENG_REPO_DIR ${REPOS_DIR}/releng
 
 RUN yum -y install\
     deltarpm \
@@ -50,7 +49,6 @@ RUN yum -y install\
 RUN mkdir -p ${REPOS_DIR} && \
     git config --global http.sslVerify false && \
     git clone --depth 1 -b $BRANCH https://gerrit.opnfv.org/gerrit/yardstick ${YARDSTICK_REPO_DIR}  && \
-    git clone --depth 1 https://gerrit.opnfv.org/gerrit/releng ${RELENG_REPO_DIR}
 
 # install yardstick + dependencies
 # explicity pin pip version to avoid future issues like the ill-fated pip 8.0.0 release
index 7fbc4f0..6eba3a8 100644 (file)
@@ -18,7 +18,6 @@ ENV REPOS_DIR /home/opnfv/repos
 
 # Yardstick repo
 ENV YARDSTICK_REPO_DIR ${REPOS_DIR}/yardstick
-ENV RELENG_REPO_DIR ${REPOS_DIR}/releng
 RUN sed -i -e 's/^deb /deb [arch=amd64] /g;s/^deb-src /# deb-src /g' /etc/apt/sources.list && \
     echo "\n\
 deb [arch=arm64] http://ports.ubuntu.com/ubuntu-ports/ trusty main universe multiverse restricted \n\
@@ -63,7 +62,6 @@ RUN apt-get update && apt-get install -y \
 RUN mkdir -p ${REPOS_DIR} && \
     git config --global http.sslVerify false && \
     git clone --depth 1 -b $BRANCH https://gerrit.opnfv.org/gerrit/yardstick ${YARDSTICK_REPO_DIR}  && \
-    git clone --depth 1 https://gerrit.opnfv.org/gerrit/releng ${RELENG_REPO_DIR}
 
 # install yardstick + dependencies
 # explicity pin pip version to avoid future issues like the ill-fated pip 8.0.0 release
index bd1418e..f63d1d8 100644 (file)
 # See the License for the specific language governing permissions and
 # limitations under the License.
 ---
-- name: Fetch TENANT_ID
-  os_project_facts:
-    name: admin
-  environment: "{{ openrc }}"
-
-- name: Fetch TENANT_ID
-  set_fact:
-    os_tenant_id: "{{ openstack_projects[0].id }}"
-
 - name: Create storperf_admin-rc
   template:
     src: storperf_admin-rc.j2
index 410ab24..888e871 100644 (file)
@@ -1,7 +1,5 @@
 OS_AUTH_URL="{{ openrc.OS_AUTH_URL }}"
 OS_USERNAME="{{ openrc.OS_USERNAME|default('admin') }}"
 OS_PASSWORD="{{ openrc.OS_PASSWORD|default('console') }}"
-OS_TENANT_NAME="{{ openrc.OS_TENANT_NAME|default('admin') }}"
 OS_VOLUME_API_VERSION="{{ openrc.OS_VOLUME_API_VERSION|default('2') }}"
-OS_PROJECT_NAME="{{ openrc.OS_PROJECT_NAME|default(openrc.OS_TENANT_NAME) }}"
-OS_TENANT_ID="{{ os_tenant_id }}"
+OS_PROJECT_NAME="{{ openrc.OS_PROJECT_NAME|openrc.OS_TENANT_NAME|default('admin') }}"
diff --git a/ansible/roles/infra_destroy_previous_configuration/tasks/delete_vm.yml b/ansible/roles/infra_destroy_previous_configuration/tasks/delete_vm.yml
new file mode 100644 (file)
index 0000000..5e43ee8
--- /dev/null
@@ -0,0 +1,29 @@
+# Copyright (c) 2017-2018 Intel Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+---
+# Ignore errors as VM can be destroyed without been undefined.
+- name: Destroy old VMs
+  virt:
+    command: destroy
+    name: "{{ node_item.hostname }}"
+  when: node_item.hostname in virt_vms.list_vms
+  ignore_errors: yes
+
+# Ignore errors as VM can be running while undefined
+- name: Undefine old VMs
+  virt:
+    command: undefine
+    name: "{{ node_item.hostname }}"
+  when: node_item.hostname in virt_vms.list_vms
+  ignore_errors: yes
index 5595cd5..e6c2c02 100644 (file)
   register: virt_vms
 
 - name: Destroy old VMs
-  virt:
-    command: destroy
-    name: "{{ item.hostname }}"
-  when: item.hostname in virt_vms.list_vms
-  with_items: "{{ infra_deploy_vars.nodes }}"
-
-- name: Undefine old VMs
-  virt:
-    command: undefine
-    name: "{{ item.hostname }}"
-  when: item.hostname in virt_vms.list_vms
+  include_tasks: delete_vm.yml
+  extra_vars: "{{ virt_vms }}"
+  loop_control:
+    loop_var: node_item
   with_items: "{{ infra_deploy_vars.nodes }}"
 
 - name: Delete old networks
index 79ec195..32b8539 100644 (file)
@@ -21,7 +21,6 @@ OPENRC: "{{ opnfv_root }}/openrc"
 INSTALLERS: [apex, compass, fuel, joid]
 INSTALLER_TYPE: "{{ lookup('env', 'INSTALLER_TYPE') }}"
 YARDSTICK_REPO_DIR: "{{ lookup('env', 'YARDSTICK_REPO_DIR')|default('/home/opnfv/repos/yardstick', true) }}"
-RELENG_REPO_DIR: "{{ lookup('env', 'RELENG_REPO_DIR')|default('/home/opnfv/repos/releng', true) }}"
 storperf_rc: "{{ opnfv_root }}/storperf_admin-rc"
 
 DISPATCHER_TYPES:
index 7c831fd..75c981a 100644 (file)
@@ -22,6 +22,8 @@ import collections
 from six.moves import configparser
 from oslo_serialization import jsonutils
 from docker import Client
+from docker.errors import APIError
+from requests.exceptions import HTTPError
 
 from api.database.v1.handlers import AsyncTaskHandler
 from api.utils import influx
@@ -44,7 +46,7 @@ class V1Env(ApiResource):
     def post(self):
         return self._dispatch_post()
 
-    def create_grafana(self, args):
+    def create_grafana(self, *args):
         task_id = str(uuid.uuid4())
 
         thread = threading.Thread(target=self._create_grafana, args=(task_id,))
@@ -82,7 +84,7 @@ class V1Env(ApiResource):
 
             self._update_task_status(task_id)
             LOG.info('Finished')
-        except Exception as e:
+        except (APIError, HTTPError) as e:
             self._update_task_error(task_id, str(e))
             LOG.exception('Create grafana failed')
 
@@ -117,7 +119,7 @@ class V1Env(ApiResource):
             "isDefault": True,
         }
         try:
-            HttpClient().post(url, data, timeout=10)
+            HttpClient().post(url, data, timeout=60)
         except Exception:
             LOG.exception('Create datasources failed')
             raise
@@ -145,7 +147,7 @@ class V1Env(ApiResource):
         return any(t in a['RepoTags'][0]
                    for a in client.images() if a['RepoTags'])
 
-    def create_influxdb(self, args):
+    def create_influxdb(self, *args):
         task_id = str(uuid.uuid4())
 
         thread = threading.Thread(target=self._create_influxdb, args=(task_id,))
@@ -185,7 +187,7 @@ class V1Env(ApiResource):
             self._update_task_status(task_id)
 
             LOG.info('Finished')
-        except Exception as e:
+        except APIError as e:
             self._update_task_error(task_id, str(e))
             LOG.exception('Creating influxdb failed')
 
@@ -217,7 +219,7 @@ class V1Env(ApiResource):
                                consts.INFLUXDB_DB_NAME)
             client.create_database(consts.INFLUXDB_DB_NAME)
             LOG.info('Success to config influxDB')
-        except Exception:
+        except HTTPError:
             LOG.exception('Config influxdb failed')
 
     def _change_output_to_influxdb(self, ip):
@@ -236,7 +238,7 @@ class V1Env(ApiResource):
         with open(consts.CONF_FILE, 'w') as f:
             parser.write(f)
 
-    def prepare_env(self, args):
+    def prepare_env(self, *args):
         task_id = str(uuid.uuid4())
 
         thread = threading.Thread(target=self._prepare_env_daemon,
@@ -287,7 +289,7 @@ class V1Env(ApiResource):
 
             self._update_task_status(task_id)
             LOG.info('Finished')
-        except Exception as e:
+        except (subprocess.CalledProcessError, OSError) as e:
             self._update_task_error(task_id, str(e))
             LOG.exception('Prepare env failed')
 
@@ -373,7 +375,7 @@ class V1Env(ApiResource):
         LOG.info('Source openrc: Sourcing')
         try:
             self._source_file(consts.OPENRC)
-        except Exception as e:
+        except subprocess.CalledProcessError as e:
             LOG.exception('Failed to source openrc')
             return result_handler(consts.API_ERROR, str(e))
         LOG.info('Source openrc: Done')
diff --git a/dashboard/Prox_BM_L2FWD-4Port_MultiSize-1518452496550.json b/dashboard/Prox_BM_L2FWD-4Port_MultiSize-1518452496550.json
new file mode 100644 (file)
index 0000000..3c78ab1
--- /dev/null
@@ -0,0 +1,5817 @@
+{
+  "__inputs": [
+    {
+      "name": "DS_YARDSTICK",
+      "label": "yardstick",
+      "description": "",
+      "type": "datasource",
+      "pluginId": "influxdb",
+      "pluginName": "InfluxDB"
+    }
+  ],
+  "__requires": [
+    {
+      "type": "grafana",
+      "id": "grafana",
+      "name": "Grafana",
+      "version": "4.4.3"
+    },
+    {
+      "type": "panel",
+      "id": "graph",
+      "name": "Graph",
+      "version": ""
+    },
+    {
+      "type": "datasource",
+      "id": "influxdb",
+      "name": "InfluxDB",
+      "version": "1.0.0"
+    },
+    {
+      "type": "panel",
+      "id": "singlestat",
+      "name": "Singlestat",
+      "version": ""
+    },
+    {
+      "type": "panel",
+      "id": "text",
+      "name": "Text",
+      "version": ""
+    }
+  ],
+  "annotations": {
+    "list": []
+  },
+  "editable": true,
+  "gnetId": null,
+  "graphTooltip": 0,
+  "hideControls": false,
+  "id": null,
+  "links": [],
+  "refresh": false,
+  "rows": [
+    {
+      "collapse": false,
+      "height": "100px",
+      "panels": [
+        {
+          "content": "<h5 style=\"font-family:Verdana\"> <a style=\"color:#31A7D3\"><a style=\"font: 32px '#31A7D3'\"><center>OPNFV_Yardstick_NSB_PROX_BM_L2FWD_4Port_Test</center> </a></h5>\n<center>\n<p>The application does Port forwarding without touching packets. It will take packets in from one port and forward them unmodified to another port </p>\n<p>The KPI is the number of packets per second for a specified packet size with an accepted minimal packet loss </p>\n</center>",
+          "editable": true,
+          "error": false,
+          "id": 3,
+          "links": [],
+          "mode": "html",
+          "span": 12,
+          "title": "",
+          "type": "text"
+        },
+        {
+          "content": "<h5 style=\"font-family:Verdana\"> <a style=\"color:#31A7D3\"><a style=\"font: 22px '#31A7D3'\"><center>Throughput</center> </a></h5>\n",
+          "editable": true,
+          "error": false,
+          "height": "40",
+          "id": 7,
+          "links": [],
+          "minSpan": 12,
+          "mode": "html",
+          "span": 12,
+          "title": "",
+          "type": "text"
+        }
+      ],
+      "repeat": null,
+      "repeatIteration": null,
+      "repeatRowId": null,
+      "showTitle": false,
+      "title": "Row",
+      "titleSize": "h6"
+    },
+    {
+      "collapse": false,
+      "height": "300px",
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "dashLength": 10,
+          "dashes": false,
+          "datasource": "${DS_YARDSTICK}",
+          "decimals": 4,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {},
+          "id": 6,
+          "legend": {
+            "alignAsTable": true,
+            "avg": true,
+            "current": false,
+            "max": true,
+            "min": true,
+            "show": true,
+            "sortDesc": true,
+            "total": false,
+            "values": true
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "spaceLength": 10,
+          "span": 12,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "alias": "Cumulative Packets Sents",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "query": "SELECT mean(\"tg__0.xe0.out_packets\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE $timeFilter GROUP BY time($interval) fill(null)",
+              "rawQuery": false,
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "vnf__0.packets_fwd"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            }
+          ],
+          "thresholds": [
+            {
+              "colorMode": "custom",
+              "fill": true,
+              "fillColor": "rgba(216, 200, 27, 0.27)",
+              "op": "gt",
+              "value": 2
+            },
+            {
+              "colorMode": "custom",
+              "fill": true,
+              "fillColor": "rgba(234, 112, 112, 0.22)",
+              "op": "gt",
+              "value": 2
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Cumulative Load Sent by Generator",
+          "tooltip": {
+            "msResolution": true,
+            "shared": true,
+            "sort": 0,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "xaxis": {
+            "buckets": null,
+            "mode": "time",
+            "name": null,
+            "show": true,
+            "values": []
+          },
+          "yaxes": [
+            {
+              "format": "short",
+              "label": "Packets Per Second",
+              "logBase": 1,
+              "max": null,
+              "min": "0",
+              "show": true
+            },
+            {
+              "format": "short",
+              "label": null,
+              "logBase": 1,
+              "max": null,
+              "min": null,
+              "show": true
+            }
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "dashLength": 10,
+          "dashes": false,
+          "datasource": "${DS_YARDSTICK}",
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {},
+          "id": 9,
+          "legend": {
+            "alignAsTable": true,
+            "avg": true,
+            "current": false,
+            "max": true,
+            "min": true,
+            "show": true,
+            "total": false,
+            "values": true
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "spaceLength": 10,
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "alias": "TG xe-0 in packets",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.xe0.in_packets"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            },
+            {
+              "alias": "TG xe-1 in packets",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "B",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.xe1.in_packets"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            },
+            {
+              "alias": "TG xe-2 in packets",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "C",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.xe2.in_packets"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            },
+            {
+              "alias": "TG xe-3 in packets",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "D",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.xe3.in_packets"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            }
+          ],
+          "thresholds": [
+            {
+              "colorMode": "custom",
+              "fill": true,
+              "fillColor": "rgba(216, 200, 27, 0.27)",
+              "op": "gt",
+              "value": 2
+            },
+            {
+              "colorMode": "custom",
+              "fill": true,
+              "fillColor": "rgba(234, 112, 112, 0.22)",
+              "op": "gt",
+              "value": 2
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Load Received by Generator",
+          "tooltip": {
+            "msResolution": true,
+            "shared": true,
+            "sort": 0,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "xaxis": {
+            "buckets": null,
+            "mode": "time",
+            "name": null,
+            "show": true,
+            "values": []
+          },
+          "yaxes": [
+            {
+              "format": "short",
+              "label": "Packets Per Second",
+              "logBase": 1,
+              "max": null,
+              "min": null,
+              "show": true
+            },
+            {
+              "format": "short",
+              "label": null,
+              "logBase": 1,
+              "max": null,
+              "min": null,
+              "show": true
+            }
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "dashLength": 10,
+          "dashes": false,
+          "datasource": "${DS_YARDSTICK}",
+          "decimals": 4,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {},
+          "id": 43,
+          "legend": {
+            "alignAsTable": true,
+            "avg": true,
+            "current": false,
+            "max": true,
+            "min": true,
+            "show": true,
+            "sortDesc": true,
+            "total": false,
+            "values": true
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "spaceLength": 10,
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "alias": "TG xe-0 Out packets",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.xe0.out_packets"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            },
+            {
+              "alias": "TG xe-1 Out packets",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "B",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.xe1.out_packets"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            },
+            {
+              "alias": "TG xe-2 Out packets",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "C",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.xe2.out_packets"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            },
+            {
+              "alias": "TG xe-3 Out packets",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "D",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.xe3.out_packets"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            }
+          ],
+          "thresholds": [
+            {
+              "colorMode": "custom",
+              "fill": true,
+              "fillColor": "rgba(216, 200, 27, 0.27)",
+              "op": "gt",
+              "value": 2
+            },
+            {
+              "colorMode": "custom",
+              "fill": true,
+              "fillColor": "rgba(234, 112, 112, 0.22)",
+              "op": "gt",
+              "value": 2
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Load Sent by Generator",
+          "tooltip": {
+            "msResolution": true,
+            "shared": true,
+            "sort": 0,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "xaxis": {
+            "buckets": null,
+            "mode": "time",
+            "name": null,
+            "show": true,
+            "values": []
+          },
+          "yaxes": [
+            {
+              "format": "short",
+              "label": "Packets Per Second",
+              "logBase": 1,
+              "max": null,
+              "min": "0",
+              "show": true
+            },
+            {
+              "format": "short",
+              "label": null,
+              "logBase": 1,
+              "max": null,
+              "min": null,
+              "show": true
+            }
+          ]
+        }
+      ],
+      "repeat": null,
+      "repeatIteration": null,
+      "repeatRowId": null,
+      "showTitle": false,
+      "title": "New row",
+      "titleSize": "h6"
+    },
+    {
+      "collapse": false,
+      "height": "300px",
+      "panels": [
+        {
+          "content": "<h5 style=\"font-family:Verdana\"> <a style=\"color:#31A7D3\"><a style=\"font: 22px '#31A7D3'\"><center>Prox L2Fwd Traffic Gen stats</center> </a></h5>\n",
+          "editable": true,
+          "error": false,
+          "height": "40",
+          "id": 8,
+          "links": [],
+          "minSpan": 12,
+          "mode": "html",
+          "span": 12,
+          "title": "",
+          "type": "text"
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "dashLength": 10,
+          "dashes": false,
+          "datasource": "${DS_YARDSTICK}",
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {},
+          "height": "300",
+          "id": 4,
+          "legend": {
+            "alignAsTable": true,
+            "avg": true,
+            "current": false,
+            "max": true,
+            "min": true,
+            "rightSide": false,
+            "show": true,
+            "sortDesc": true,
+            "total": false,
+            "values": true
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 1,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "spaceLength": 10,
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "alias": "SUT Packets Received",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "previous"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "C",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "vnf__0.curr_packets_in"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            }
+          ],
+          "thresholds": [
+            {
+              "colorMode": "custom",
+              "fill": true,
+              "fillColor": "rgba(216, 200, 27, 0.27)",
+              "op": "gt",
+              "value": 2
+            },
+            {
+              "colorMode": "custom",
+              "fill": true,
+              "fillColor": "rgba(234, 112, 112, 0.22)",
+              "op": "gt",
+              "value": 2
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "SUT Stats - Load Received By SUT",
+          "tooltip": {
+            "msResolution": true,
+            "shared": true,
+            "sort": 0,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "xaxis": {
+            "buckets": null,
+            "mode": "time",
+            "name": null,
+            "show": true,
+            "values": []
+          },
+          "yaxes": [
+            {
+              "format": "short",
+              "label": "Packets per Second",
+              "logBase": 1,
+              "max": null,
+              "min": null,
+              "show": true
+            },
+            {
+              "format": "short",
+              "label": null,
+              "logBase": 1,
+              "max": null,
+              "min": null,
+              "show": true
+            }
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "dashLength": 10,
+          "dashes": false,
+          "datasource": "${DS_YARDSTICK}",
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {},
+          "height": "300",
+          "id": 39,
+          "legend": {
+            "alignAsTable": true,
+            "avg": true,
+            "current": false,
+            "max": true,
+            "min": true,
+            "rightSide": false,
+            "show": true,
+            "total": false,
+            "values": true
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 1,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "spaceLength": 10,
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "alias": "SUT Packets Sent",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "previous"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "vnf__0.curr_packets_in"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            }
+          ],
+          "thresholds": [
+            {
+              "colorMode": "custom",
+              "fill": true,
+              "fillColor": "rgba(216, 200, 27, 0.27)",
+              "op": "gt",
+              "value": 2
+            },
+            {
+              "colorMode": "custom",
+              "fill": true,
+              "fillColor": "rgba(234, 112, 112, 0.22)",
+              "op": "gt",
+              "value": 2
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "SUT Stats - Load Forwarded By SUT",
+          "tooltip": {
+            "msResolution": true,
+            "shared": true,
+            "sort": 0,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "xaxis": {
+            "buckets": null,
+            "mode": "time",
+            "name": null,
+            "show": true,
+            "values": []
+          },
+          "yaxes": [
+            {
+              "format": "short",
+              "label": "Packets per Second",
+              "logBase": 1,
+              "max": null,
+              "min": null,
+              "show": true
+            },
+            {
+              "format": "short",
+              "label": null,
+              "logBase": 1,
+              "max": null,
+              "min": null,
+              "show": true
+            }
+          ]
+        }
+      ],
+      "repeat": null,
+      "repeatIteration": null,
+      "repeatRowId": null,
+      "showTitle": false,
+      "title": "New row",
+      "titleSize": "h6"
+    },
+    {
+      "collapse": false,
+      "height": "250px",
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "dashLength": 10,
+          "dashes": false,
+          "datasource": "${DS_YARDSTICK}",
+          "decimals": 4,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {},
+          "id": 2,
+          "legend": {
+            "alignAsTable": true,
+            "avg": true,
+            "current": false,
+            "max": true,
+            "min": true,
+            "show": true,
+            "total": false,
+            "values": true
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "spaceLength": 10,
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "alias": "Load Requested by Generator",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.TxThroughput"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            },
+            {
+              "alias": "Rx Throughput",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "hide": true,
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "B",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.RxThroughput"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            }
+          ],
+          "thresholds": [
+            {
+              "colorMode": "custom",
+              "fill": true,
+              "fillColor": "rgba(216, 200, 27, 0.27)",
+              "op": "gt",
+              "value": 2
+            },
+            {
+              "colorMode": "custom",
+              "fill": true,
+              "fillColor": "rgba(234, 112, 112, 0.22)",
+              "op": "gt",
+              "value": 2
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Theoretical Throughput",
+          "tooltip": {
+            "msResolution": true,
+            "shared": true,
+            "sort": 0,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "xaxis": {
+            "buckets": null,
+            "mode": "time",
+            "name": null,
+            "show": true,
+            "values": []
+          },
+          "yaxes": [
+            {
+              "format": "short",
+              "label": "Packets Per Second",
+              "logBase": 1,
+              "max": null,
+              "min": null,
+              "show": true
+            },
+            {
+              "format": "short",
+              "label": null,
+              "logBase": 1,
+              "max": null,
+              "min": null,
+              "show": true
+            }
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "dashLength": 10,
+          "dashes": false,
+          "datasource": "${DS_YARDSTICK}",
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {},
+          "id": 5,
+          "legend": {
+            "alignAsTable": true,
+            "avg": true,
+            "current": false,
+            "max": true,
+            "min": true,
+            "show": true,
+            "total": false,
+            "values": true
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "spaceLength": 10,
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "alias": "Packet Size",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.PktSize"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            }
+          ],
+          "thresholds": [
+            {
+              "colorMode": "custom",
+              "fill": true,
+              "fillColor": "rgba(216, 200, 27, 0.27)",
+              "op": "gt",
+              "value": 2
+            },
+            {
+              "colorMode": "custom",
+              "fill": true,
+              "fillColor": "rgba(234, 112, 112, 0.22)",
+              "op": "gt",
+              "value": 2
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Packet size",
+          "tooltip": {
+            "msResolution": true,
+            "shared": true,
+            "sort": 0,
+            "value_type": "cumulative"
+          },
+          "type": "graph",
+          "xaxis": {
+            "buckets": null,
+            "mode": "time",
+            "name": null,
+            "show": true,
+            "values": []
+          },
+          "yaxes": [
+            {
+              "format": "bytes",
+              "label": "Packet Size",
+              "logBase": 1,
+              "max": null,
+              "min": null,
+              "show": true
+            },
+            {
+              "format": "short",
+              "label": null,
+              "logBase": 1,
+              "max": null,
+              "min": null,
+              "show": true
+            }
+          ]
+        }
+      ],
+      "repeat": null,
+      "repeatIteration": null,
+      "repeatRowId": null,
+      "showTitle": false,
+      "title": "New row",
+      "titleSize": "h6"
+    },
+    {
+      "collapse": false,
+      "height": "250px",
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "dashLength": 10,
+          "dashes": false,
+          "datasource": "${DS_YARDSTICK}",
+          "fill": 1,
+          "id": 10,
+          "legend": {
+            "alignAsTable": true,
+            "avg": true,
+            "current": false,
+            "max": true,
+            "min": true,
+            "show": true,
+            "sortDesc": true,
+            "total": false,
+            "values": true
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "spaceLength": 10,
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "alias": "SUCCESS Tx Total",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "none"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "B",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.Success_tx_total"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            },
+            {
+              "alias": "SUCCESS Rx Total",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.Success_rx_total"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            },
+            {
+              "alias": "SUCCESS ALLOWABLE LOST PACKETS",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "C",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.Success_can_be_lost"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            }
+          ],
+          "thresholds": [],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "SUCCESS CRITERIA: TX Total  = Rx Total + Tolerated Loss",
+          "tooltip": {
+            "shared": true,
+            "sort": 0,
+            "value_type": "individual"
+          },
+          "type": "graph",
+          "xaxis": {
+            "buckets": null,
+            "mode": "time",
+            "name": null,
+            "show": true,
+            "values": []
+          },
+          "yaxes": [
+            {
+              "format": "short",
+              "label": "Packets Per Second",
+              "logBase": 1,
+              "max": null,
+              "min": null,
+              "show": true
+            },
+            {
+              "format": "short",
+              "label": null,
+              "logBase": 1,
+              "max": null,
+              "min": null,
+              "show": true
+            }
+          ]
+        },
+        {
+          "cacheTimeout": null,
+          "colorBackground": false,
+          "colorValue": false,
+          "colors": [
+            "rgba(245, 54, 54, 0.9)",
+            "rgba(237, 129, 40, 0.89)",
+            "rgba(50, 172, 45, 0.97)"
+          ],
+          "datasource": "${DS_YARDSTICK}",
+          "format": "none",
+          "gauge": {
+            "maxValue": 100,
+            "minValue": 0,
+            "show": false,
+            "thresholdLabels": false,
+            "thresholdMarkers": true
+          },
+          "height": "35",
+          "id": 12,
+          "interval": null,
+          "links": [],
+          "mappingType": 1,
+          "mappingTypes": [
+            {
+              "name": "value to text",
+              "value": 1
+            },
+            {
+              "name": "range to text",
+              "value": 2
+            }
+          ],
+          "maxDataPoints": 100,
+          "nullPointMode": "connected",
+          "nullText": null,
+          "postfix": "",
+          "postfixFontSize": "50%",
+          "prefix": "",
+          "prefixFontSize": "50%",
+          "rangeMaps": [
+            {
+              "from": "null",
+              "text": "N/A",
+              "to": "null"
+            }
+          ],
+          "span": 3,
+          "sparkline": {
+            "fillColor": "rgba(31, 118, 189, 0.18)",
+            "full": false,
+            "lineColor": "rgb(31, 120, 193)",
+            "show": false
+          },
+          "tableColumn": "",
+          "targets": [
+            {
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.duration"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            }
+          ],
+          "thresholds": "",
+          "title": "Test Interval",
+          "type": "singlestat",
+          "valueFontSize": "80%",
+          "valueMaps": [
+            {
+              "op": "=",
+              "text": "N/A",
+              "value": "null"
+            }
+          ],
+          "valueName": "avg"
+        },
+        {
+          "cacheTimeout": null,
+          "colorBackground": false,
+          "colorValue": false,
+          "colors": [
+            "rgba(245, 54, 54, 0.9)",
+            "rgba(237, 129, 40, 0.89)",
+            "rgba(50, 172, 45, 0.97)"
+          ],
+          "datasource": "${DS_YARDSTICK}",
+          "format": "none",
+          "gauge": {
+            "maxValue": 100,
+            "minValue": 0,
+            "show": false,
+            "thresholdLabels": false,
+            "thresholdMarkers": true
+          },
+          "height": "30",
+          "id": 11,
+          "interval": null,
+          "links": [],
+          "mappingType": 1,
+          "mappingTypes": [
+            {
+              "name": "value to text",
+              "value": 1
+            },
+            {
+              "name": "range to text",
+              "value": 2
+            }
+          ],
+          "maxDataPoints": 100,
+          "nullPointMode": "connected",
+          "nullText": null,
+          "postfix": "",
+          "postfixFontSize": "50%",
+          "prefix": "",
+          "prefixFontSize": "50%",
+          "rangeMaps": [
+            {
+              "from": "null",
+              "text": "N/A",
+              "to": "null"
+            }
+          ],
+          "span": 3,
+          "sparkline": {
+            "fillColor": "rgba(31, 118, 189, 0.18)",
+            "full": false,
+            "lineColor": "rgb(31, 120, 193)",
+            "show": false
+          },
+          "tableColumn": "",
+          "targets": [
+            {
+              "alias": "Test Duration",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.test_duration"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            }
+          ],
+          "thresholds": "",
+          "title": "Test Duration",
+          "type": "singlestat",
+          "valueFontSize": "80%",
+          "valueMaps": [
+            {
+              "op": "=",
+              "text": "N/A",
+              "value": "null"
+            }
+          ],
+          "valueName": "avg"
+        },
+        {
+          "cacheTimeout": null,
+          "colorBackground": false,
+          "colorValue": false,
+          "colors": [
+            "rgba(245, 54, 54, 0.9)",
+            "rgba(237, 129, 40, 0.89)",
+            "rgba(50, 172, 45, 0.97)"
+          ],
+          "datasource": "${DS_YARDSTICK}",
+          "format": "none",
+          "gauge": {
+            "maxValue": 100,
+            "minValue": 0,
+            "show": false,
+            "thresholdLabels": false,
+            "thresholdMarkers": true
+          },
+          "height": "30",
+          "id": 13,
+          "interval": null,
+          "links": [],
+          "mappingType": 1,
+          "mappingTypes": [
+            {
+              "name": "value to text",
+              "value": 1
+            },
+            {
+              "name": "range to text",
+              "value": 2
+            }
+          ],
+          "maxDataPoints": 100,
+          "nullPointMode": "connected",
+          "nullText": null,
+          "postfix": "",
+          "postfixFontSize": "50%",
+          "prefix": "",
+          "prefixFontSize": "50%",
+          "rangeMaps": [
+            {
+              "from": "null",
+              "text": "N/A",
+              "to": "null"
+            }
+          ],
+          "span": 3,
+          "sparkline": {
+            "fillColor": "rgba(31, 118, 189, 0.18)",
+            "full": false,
+            "lineColor": "rgb(31, 120, 193)",
+            "show": false
+          },
+          "tableColumn": "",
+          "targets": [
+            {
+              "alias": "Test Precision",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.test_precision"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            }
+          ],
+          "thresholds": "",
+          "title": "Test Precision",
+          "type": "singlestat",
+          "valueFontSize": "80%",
+          "valueMaps": [
+            {
+              "op": "=",
+              "text": "N/A",
+              "value": "null"
+            }
+          ],
+          "valueName": "avg"
+        },
+        {
+          "cacheTimeout": null,
+          "colorBackground": false,
+          "colorValue": false,
+          "colors": [
+            "rgba(245, 54, 54, 0.9)",
+            "rgba(237, 129, 40, 0.89)",
+            "rgba(50, 172, 45, 0.97)"
+          ],
+          "datasource": "${DS_YARDSTICK}",
+          "format": "none",
+          "gauge": {
+            "maxValue": 100,
+            "minValue": 0,
+            "show": false,
+            "thresholdLabels": false,
+            "thresholdMarkers": true
+          },
+          "height": "30",
+          "id": 14,
+          "interval": null,
+          "links": [],
+          "mappingType": 1,
+          "mappingTypes": [
+            {
+              "name": "value to text",
+              "value": 1
+            },
+            {
+              "name": "range to text",
+              "value": 2
+            }
+          ],
+          "maxDataPoints": 100,
+          "nullPointMode": "connected",
+          "nullText": null,
+          "postfix": "",
+          "postfixFontSize": "50%",
+          "prefix": "",
+          "prefixFontSize": "50%",
+          "rangeMaps": [
+            {
+              "from": "null",
+              "text": "N/A",
+              "to": "null"
+            }
+          ],
+          "span": 3,
+          "sparkline": {
+            "fillColor": "rgba(31, 118, 189, 0.18)",
+            "full": false,
+            "lineColor": "rgb(31, 120, 193)",
+            "show": false
+          },
+          "tableColumn": "",
+          "targets": [
+            {
+              "alias": "Tolerated Loss",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.tolerated_loss"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            }
+          ],
+          "thresholds": "",
+          "title": "Tolerated Loss",
+          "type": "singlestat",
+          "valueFontSize": "80%",
+          "valueMaps": [
+            {
+              "op": "=",
+              "text": "N/A",
+              "value": "null"
+            }
+          ],
+          "valueName": "avg"
+        }
+      ],
+      "repeat": null,
+      "repeatIteration": null,
+      "repeatRowId": null,
+      "showTitle": false,
+      "title": "New row",
+      "titleSize": "h6"
+    },
+    {
+      "collapse": false,
+      "height": "30",
+      "panels": [
+        {
+          "content": "<center>Packet size</center>",
+          "height": "30px",
+          "id": 15,
+          "links": [],
+          "mode": "html",
+          "span": 4,
+          "title": "",
+          "type": "text"
+        },
+        {
+          "content": "<center>Theoretical Max Throughput (Million Packets Per Second)</center>",
+          "height": "30px",
+          "id": 16,
+          "links": [],
+          "mode": "html",
+          "span": 4,
+          "title": "",
+          "type": "text"
+        },
+        {
+          "content": "<center>Max Actual Throughput (Million Packets Per Second)</center>",
+          "height": "30px",
+          "id": 17,
+          "links": [],
+          "mode": "html",
+          "span": 4,
+          "title": "",
+          "type": "text"
+        }
+      ],
+      "repeat": null,
+      "repeatIteration": null,
+      "repeatRowId": null,
+      "showTitle": false,
+      "title": "Dashboard Row",
+      "titleSize": "h6"
+    },
+    {
+      "collapse": false,
+      "height": "30px",
+      "panels": [
+        {
+          "cacheTimeout": null,
+          "colorBackground": false,
+          "colorValue": false,
+          "colors": [
+            "rgba(245, 54, 54, 0.9)",
+            "rgba(237, 129, 40, 0.89)",
+            "rgba(50, 172, 45, 0.97)"
+          ],
+          "datasource": "${DS_YARDSTICK}",
+          "decimals": 0,
+          "format": "none",
+          "gauge": {
+            "maxValue": 100,
+            "minValue": 0,
+            "show": false,
+            "thresholdLabels": false,
+            "thresholdMarkers": true
+          },
+          "height": "30px",
+          "id": 18,
+          "interval": null,
+          "links": [],
+          "mappingType": 1,
+          "mappingTypes": [
+            {
+              "name": "value to text",
+              "value": 1
+            },
+            {
+              "name": "range to text",
+              "value": 2
+            }
+          ],
+          "maxDataPoints": 100,
+          "nullPointMode": "connected",
+          "nullText": null,
+          "postfix": "",
+          "postfixFontSize": "50%",
+          "prefix": "",
+          "prefixFontSize": "50%",
+          "rangeMaps": [
+            {
+              "from": "null",
+              "text": "N/A",
+              "to": "null"
+            }
+          ],
+          "span": 4,
+          "sparkline": {
+            "fillColor": "rgba(31, 118, 189, 0.18)",
+            "full": false,
+            "lineColor": "rgb(31, 120, 193)",
+            "show": false
+          },
+          "tableColumn": "",
+          "targets": [
+            {
+              "alias": "Theoretical Max Throughput  (Mpps)",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "query": "SELECT mean(\"tg__0.Result_pktSize\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 64 AND $timeFilter GROUP BY time($__interval) fill(null)",
+              "rawQuery": true,
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.Result_pktSize"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": [
+                {
+                  "key": "tg__0.Result_pktSize",
+                  "operator": "=",
+                  "value": "64"
+                }
+              ]
+            }
+          ],
+          "thresholds": "",
+          "title": "",
+          "type": "singlestat",
+          "valueFontSize": "80%",
+          "valueMaps": [
+            {
+              "op": "=",
+              "text": "N/A",
+              "value": "null"
+            }
+          ],
+          "valueName": "avg"
+        },
+        {
+          "cacheTimeout": null,
+          "colorBackground": false,
+          "colorValue": false,
+          "colors": [
+            "rgba(245, 54, 54, 0.9)",
+            "rgba(237, 129, 40, 0.89)",
+            "rgba(50, 172, 45, 0.97)"
+          ],
+          "datasource": "${DS_YARDSTICK}",
+          "decimals": 4,
+          "format": "none",
+          "gauge": {
+            "maxValue": 100,
+            "minValue": 0,
+            "show": false,
+            "thresholdLabels": false,
+            "thresholdMarkers": true
+          },
+          "height": "30px",
+          "id": 19,
+          "interval": null,
+          "links": [],
+          "mappingType": 1,
+          "mappingTypes": [
+            {
+              "name": "value to text",
+              "value": 1
+            },
+            {
+              "name": "range to text",
+              "value": 2
+            }
+          ],
+          "maxDataPoints": 100,
+          "nullPointMode": "connected",
+          "nullText": null,
+          "postfix": "",
+          "postfixFontSize": "50%",
+          "prefix": "",
+          "prefixFontSize": "50%",
+          "rangeMaps": [
+            {
+              "from": "null",
+              "text": "N/A",
+              "to": "null"
+            }
+          ],
+          "span": 4,
+          "sparkline": {
+            "fillColor": "rgba(31, 118, 189, 0.18)",
+            "full": false,
+            "lineColor": "rgb(31, 120, 193)",
+            "show": false
+          },
+          "tableColumn": "",
+          "targets": [
+            {
+              "alias": "Max Throughput  (Mpps)",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "query": "SELECT mean(\"tg__0.Result_theor_max_throughput\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 64 AND $timeFilter GROUP BY time($__interval) fill(null)",
+              "rawQuery": true,
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.Result_theor_max_throughput"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": [
+                {
+                  "key": "tg__0.Result_pktSize",
+                  "operator": "=",
+                  "value": "64"
+                }
+              ]
+            }
+          ],
+          "thresholds": "",
+          "title": "",
+          "type": "singlestat",
+          "valueFontSize": "80%",
+          "valueMaps": [
+            {
+              "op": "=",
+              "text": "N/A",
+              "value": "null"
+            }
+          ],
+          "valueName": "avg"
+        },
+        {
+          "cacheTimeout": null,
+          "colorBackground": false,
+          "colorValue": false,
+          "colors": [
+            "rgba(245, 54, 54, 0.9)",
+            "rgba(237, 129, 40, 0.89)",
+            "rgba(50, 172, 45, 0.97)"
+          ],
+          "datasource": "${DS_YARDSTICK}",
+          "decimals": 4,
+          "format": "none",
+          "gauge": {
+            "maxValue": 100,
+            "minValue": 0,
+            "show": false,
+            "thresholdLabels": false,
+            "thresholdMarkers": true
+          },
+          "height": "30px",
+          "id": 20,
+          "interval": null,
+          "links": [],
+          "mappingType": 1,
+          "mappingTypes": [
+            {
+              "name": "value to text",
+              "value": 1
+            },
+            {
+              "name": "range to text",
+              "value": 2
+            }
+          ],
+          "maxDataPoints": 100,
+          "nullPointMode": "connected",
+          "nullText": null,
+          "postfix": "",
+          "postfixFontSize": "50%",
+          "prefix": "",
+          "prefixFontSize": "50%",
+          "rangeMaps": [
+            {
+              "from": "null",
+              "text": "N/A",
+              "to": "null"
+            }
+          ],
+          "span": 4,
+          "sparkline": {
+            "fillColor": "rgba(31, 118, 189, 0.18)",
+            "full": false,
+            "lineColor": "rgb(31, 120, 193)",
+            "show": false
+          },
+          "tableColumn": "",
+          "targets": [
+            {
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "query": "SELECT mean(\"tg__0.Result_Actual_throughput\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 64 AND $timeFilter GROUP BY time($__interval) fill(null)",
+              "rawQuery": true,
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.Result_Actual_throughput"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": [
+                {
+                  "key": "tg__0.Result_pktSize",
+                  "operator": "=",
+                  "value": "64"
+                }
+              ]
+            }
+          ],
+          "thresholds": "",
+          "title": "",
+          "type": "singlestat",
+          "valueFontSize": "80%",
+          "valueMaps": [
+            {
+              "op": "=",
+              "text": "N/A",
+              "value": "null"
+            }
+          ],
+          "valueName": "avg"
+        }
+      ],
+      "repeat": null,
+      "repeatIteration": null,
+      "repeatRowId": null,
+      "showTitle": false,
+      "title": "Dashboard Row",
+      "titleSize": "h3"
+    },
+    {
+      "collapse": false,
+      "height": "30",
+      "panels": [
+        {
+          "cacheTimeout": null,
+          "colorBackground": false,
+          "colorValue": false,
+          "colors": [
+            "rgba(245, 54, 54, 0.9)",
+            "rgba(237, 129, 40, 0.89)",
+            "rgba(50, 172, 45, 0.97)"
+          ],
+          "datasource": "${DS_YARDSTICK}",
+          "decimals": null,
+          "format": "none",
+          "gauge": {
+            "maxValue": 100,
+            "minValue": 0,
+            "show": false,
+            "thresholdLabels": false,
+            "thresholdMarkers": true
+          },
+          "height": "30px",
+          "id": 21,
+          "interval": null,
+          "links": [],
+          "mappingType": 1,
+          "mappingTypes": [
+            {
+              "name": "value to text",
+              "value": 1
+            },
+            {
+              "name": "range to text",
+              "value": 2
+            }
+          ],
+          "maxDataPoints": 100,
+          "nullPointMode": "connected",
+          "nullText": null,
+          "postfix": "",
+          "postfixFontSize": "50%",
+          "prefix": "",
+          "prefixFontSize": "50%",
+          "rangeMaps": [
+            {
+              "from": "null",
+              "text": "N/A",
+              "to": "null"
+            }
+          ],
+          "span": 4,
+          "sparkline": {
+            "fillColor": "rgba(31, 118, 189, 0.18)",
+            "full": false,
+            "lineColor": "rgb(31, 120, 193)",
+            "show": false
+          },
+          "tableColumn": "",
+          "targets": [
+            {
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "query": "SELECT mean(\"tg__0.Result_pktSize\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 128 AND $timeFilter GROUP BY time($__interval) fill(null)",
+              "rawQuery": true,
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.Result_pktSize"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": [
+                {
+                  "key": "tg__0.Result_pktSize",
+                  "operator": "=",
+                  "value": "128"
+                }
+              ]
+            }
+          ],
+          "thresholds": "",
+          "title": "",
+          "type": "singlestat",
+          "valueFontSize": "80%",
+          "valueMaps": [
+            {
+              "op": "=",
+              "text": "N/A",
+              "value": "null"
+            }
+          ],
+          "valueName": "avg"
+        },
+        {
+          "cacheTimeout": null,
+          "colorBackground": false,
+          "colorValue": false,
+          "colors": [
+            "rgba(245, 54, 54, 0.9)",
+            "rgba(237, 129, 40, 0.89)",
+            "rgba(50, 172, 45, 0.97)"
+          ],
+          "datasource": "${DS_YARDSTICK}",
+          "decimals": 4,
+          "format": "none",
+          "gauge": {
+            "maxValue": 100,
+            "minValue": 0,
+            "show": false,
+            "thresholdLabels": false,
+            "thresholdMarkers": true
+          },
+          "height": "30px",
+          "id": 22,
+          "interval": null,
+          "links": [],
+          "mappingType": 1,
+          "mappingTypes": [
+            {
+              "name": "value to text",
+              "value": 1
+            },
+            {
+              "name": "range to text",
+              "value": 2
+            }
+          ],
+          "maxDataPoints": 100,
+          "nullPointMode": "connected",
+          "nullText": null,
+          "postfix": "",
+          "postfixFontSize": "50%",
+          "prefix": "",
+          "prefixFontSize": "50%",
+          "rangeMaps": [
+            {
+              "from": "null",
+              "text": "N/A",
+              "to": "null"
+            }
+          ],
+          "span": 4,
+          "sparkline": {
+            "fillColor": "rgba(31, 118, 189, 0.18)",
+            "full": false,
+            "lineColor": "rgb(31, 120, 193)",
+            "show": false
+          },
+          "tableColumn": "",
+          "targets": [
+            {
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "query": "SELECT mean(\"tg__0.Result_theor_max_throughput\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 128 AND $timeFilter GROUP BY time($__interval) fill(null)",
+              "rawQuery": true,
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.Result_theor_max_throughput"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": [
+                {
+                  "key": "tg__0.Result_pktSize",
+                  "operator": "=",
+                  "value": "128"
+                }
+              ]
+            }
+          ],
+          "thresholds": "",
+          "title": "",
+          "type": "singlestat",
+          "valueFontSize": "80%",
+          "valueMaps": [
+            {
+              "op": "=",
+              "text": "N/A",
+              "value": "null"
+            }
+          ],
+          "valueName": "avg"
+        },
+        {
+          "cacheTimeout": null,
+          "colorBackground": false,
+          "colorValue": false,
+          "colors": [
+            "rgba(245, 54, 54, 0.9)",
+            "rgba(237, 129, 40, 0.89)",
+            "rgba(50, 172, 45, 0.97)"
+          ],
+          "datasource": "${DS_YARDSTICK}",
+          "decimals": 4,
+          "format": "none",
+          "gauge": {
+            "maxValue": 100,
+            "minValue": 0,
+            "show": false,
+            "thresholdLabels": false,
+            "thresholdMarkers": true
+          },
+          "height": "30px",
+          "id": 23,
+          "interval": null,
+          "links": [],
+          "mappingType": 1,
+          "mappingTypes": [
+            {
+              "name": "value to text",
+              "value": 1
+            },
+            {
+              "name": "range to text",
+              "value": 2
+            }
+          ],
+          "maxDataPoints": 100,
+          "nullPointMode": "connected",
+          "nullText": null,
+          "postfix": "",
+          "postfixFontSize": "50%",
+          "prefix": "",
+          "prefixFontSize": "50%",
+          "rangeMaps": [
+            {
+              "from": "null",
+              "text": "N/A",
+              "to": "null"
+            }
+          ],
+          "span": 4,
+          "sparkline": {
+            "fillColor": "rgba(31, 118, 189, 0.18)",
+            "full": false,
+            "lineColor": "rgb(31, 120, 193)",
+            "show": false
+          },
+          "tableColumn": "",
+          "targets": [
+            {
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "query": "SELECT mean(\"tg__0.Result_Actual_throughput\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 128 AND $timeFilter GROUP BY time($__interval) fill(null)",
+              "rawQuery": true,
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.Result_Actual_throughput"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": [
+                {
+                  "key": "tg__0.Result_pktSize",
+                  "operator": "=",
+                  "value": "128"
+                }
+              ]
+            }
+          ],
+          "thresholds": "",
+          "title": "",
+          "type": "singlestat",
+          "valueFontSize": "80%",
+          "valueMaps": [
+            {
+              "op": "=",
+              "text": "N/A",
+              "value": "null"
+            }
+          ],
+          "valueName": "avg"
+        }
+      ],
+      "repeat": null,
+      "repeatIteration": null,
+      "repeatRowId": null,
+      "showTitle": false,
+      "title": "Dashboard Row",
+      "titleSize": "h6"
+    },
+    {
+      "collapse": false,
+      "height": "30px",
+      "panels": [
+        {
+          "cacheTimeout": null,
+          "colorBackground": false,
+          "colorValue": false,
+          "colors": [
+            "rgba(245, 54, 54, 0.9)",
+            "rgba(237, 129, 40, 0.89)",
+            "rgba(50, 172, 45, 0.97)"
+          ],
+          "datasource": "${DS_YARDSTICK}",
+          "decimals": 0,
+          "format": "none",
+          "gauge": {
+            "maxValue": 100,
+            "minValue": 0,
+            "show": false,
+            "thresholdLabels": false,
+            "thresholdMarkers": true
+          },
+          "id": 24,
+          "interval": null,
+          "links": [],
+          "mappingType": 1,
+          "mappingTypes": [
+            {
+              "name": "value to text",
+              "value": 1
+            },
+            {
+              "name": "range to text",
+              "value": 2
+            }
+          ],
+          "maxDataPoints": 100,
+          "nullPointMode": "connected",
+          "nullText": null,
+          "postfix": "",
+          "postfixFontSize": "50%",
+          "prefix": "",
+          "prefixFontSize": "50%",
+          "rangeMaps": [
+            {
+              "from": "null",
+              "text": "N/A",
+              "to": "null"
+            }
+          ],
+          "span": 4,
+          "sparkline": {
+            "fillColor": "rgba(31, 118, 189, 0.18)",
+            "full": false,
+            "lineColor": "rgb(31, 120, 193)",
+            "show": false
+          },
+          "tableColumn": "",
+          "targets": [
+            {
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "query": "SELECT mean(\"tg__0.Result_pktSize\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 256 AND $timeFilter GROUP BY time($__interval) fill(null)",
+              "rawQuery": true,
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.Result_pktSize"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": [
+                {
+                  "key": "tg__0.Result_pktSiuze",
+                  "operator": "=",
+                  "value": "256"
+                }
+              ]
+            }
+          ],
+          "thresholds": "",
+          "title": "",
+          "type": "singlestat",
+          "valueFontSize": "80%",
+          "valueMaps": [
+            {
+              "op": "=",
+              "text": "N/A",
+              "value": "null"
+            }
+          ],
+          "valueName": "avg"
+        },
+        {
+          "cacheTimeout": null,
+          "colorBackground": false,
+          "colorValue": false,
+          "colors": [
+            "rgba(245, 54, 54, 0.9)",
+            "rgba(237, 129, 40, 0.89)",
+            "rgba(50, 172, 45, 0.97)"
+          ],
+          "datasource": "${DS_YARDSTICK}",
+          "decimals": 4,
+          "format": "none",
+          "gauge": {
+            "maxValue": 100,
+            "minValue": 0,
+            "show": false,
+            "thresholdLabels": false,
+            "thresholdMarkers": true
+          },
+          "id": 25,
+          "interval": null,
+          "links": [],
+          "mappingType": 1,
+          "mappingTypes": [
+            {
+              "name": "value to text",
+              "value": 1
+            },
+            {
+              "name": "range to text",
+              "value": 2
+            }
+          ],
+          "maxDataPoints": 100,
+          "nullPointMode": "connected",
+          "nullText": null,
+          "postfix": "",
+          "postfixFontSize": "50%",
+          "prefix": "",
+          "prefixFontSize": "50%",
+          "rangeMaps": [
+            {
+              "from": "null",
+              "text": "N/A",
+              "to": "null"
+            }
+          ],
+          "span": 4,
+          "sparkline": {
+            "fillColor": "rgba(31, 118, 189, 0.18)",
+            "full": false,
+            "lineColor": "rgb(31, 120, 193)",
+            "show": false
+          },
+          "tableColumn": "",
+          "targets": [
+            {
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "query": "SELECT mean(\"tg__0.Result_theor_max_throughput\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 256 AND $timeFilter GROUP BY time($__interval) fill(null)",
+              "rawQuery": true,
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.Result_theor_max_throughput"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": [
+                {
+                  "key": "tg__0.Result_pktSize",
+                  "operator": "=",
+                  "value": "256"
+                }
+              ]
+            }
+          ],
+          "thresholds": "",
+          "title": "",
+          "type": "singlestat",
+          "valueFontSize": "80%",
+          "valueMaps": [
+            {
+              "op": "=",
+              "text": "N/A",
+              "value": "null"
+            }
+          ],
+          "valueName": "avg"
+        },
+        {
+          "cacheTimeout": null,
+          "colorBackground": false,
+          "colorValue": false,
+          "colors": [
+            "rgba(245, 54, 54, 0.9)",
+            "rgba(237, 129, 40, 0.89)",
+            "rgba(50, 172, 45, 0.97)"
+          ],
+          "datasource": "${DS_YARDSTICK}",
+          "decimals": 4,
+          "format": "none",
+          "gauge": {
+            "maxValue": 100,
+            "minValue": 0,
+            "show": false,
+            "thresholdLabels": false,
+            "thresholdMarkers": true
+          },
+          "id": 26,
+          "interval": null,
+          "links": [],
+          "mappingType": 1,
+          "mappingTypes": [
+            {
+              "name": "value to text",
+              "value": 1
+            },
+            {
+              "name": "range to text",
+              "value": 2
+            }
+          ],
+          "maxDataPoints": 100,
+          "nullPointMode": "connected",
+          "nullText": null,
+          "postfix": "",
+          "postfixFontSize": "50%",
+          "prefix": "",
+          "prefixFontSize": "50%",
+          "rangeMaps": [
+            {
+              "from": "null",
+              "text": "N/A",
+              "to": "null"
+            }
+          ],
+          "span": 4,
+          "sparkline": {
+            "fillColor": "rgba(31, 118, 189, 0.18)",
+            "full": false,
+            "lineColor": "rgb(31, 120, 193)",
+            "show": false
+          },
+          "tableColumn": "",
+          "targets": [
+            {
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "query": "SELECT mean(\"tg__0.Result_Actual_throughput\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 256 AND $timeFilter GROUP BY time($__interval) fill(null)",
+              "rawQuery": true,
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.Result_Actual_throughput"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": [
+                {
+                  "key": "tg__0.Result_pktSize",
+                  "operator": "=",
+                  "value": "256"
+                }
+              ]
+            }
+          ],
+          "thresholds": "",
+          "title": "",
+          "type": "singlestat",
+          "valueFontSize": "80%",
+          "valueMaps": [
+            {
+              "op": "=",
+              "text": "N/A",
+              "value": "null"
+            }
+          ],
+          "valueName": "avg"
+        }
+      ],
+      "repeat": null,
+      "repeatIteration": null,
+      "repeatRowId": null,
+      "showTitle": false,
+      "title": "Dashboard Row",
+      "titleSize": "h3"
+    },
+    {
+      "collapse": false,
+      "height": -82,
+      "panels": [
+        {
+          "cacheTimeout": null,
+          "colorBackground": false,
+          "colorValue": false,
+          "colors": [
+            "rgba(245, 54, 54, 0.9)",
+            "rgba(237, 129, 40, 0.89)",
+            "rgba(50, 172, 45, 0.97)"
+          ],
+          "datasource": "${DS_YARDSTICK}",
+          "format": "none",
+          "gauge": {
+            "maxValue": 100,
+            "minValue": 0,
+            "show": false,
+            "thresholdLabels": false,
+            "thresholdMarkers": true
+          },
+          "height": "30px",
+          "id": 27,
+          "interval": null,
+          "links": [],
+          "mappingType": 1,
+          "mappingTypes": [
+            {
+              "name": "value to text",
+              "value": 1
+            },
+            {
+              "name": "range to text",
+              "value": 2
+            }
+          ],
+          "maxDataPoints": 100,
+          "nullPointMode": "connected",
+          "nullText": null,
+          "postfix": "",
+          "postfixFontSize": "50%",
+          "prefix": "",
+          "prefixFontSize": "50%",
+          "rangeMaps": [
+            {
+              "from": "null",
+              "text": "N/A",
+              "to": "null"
+            }
+          ],
+          "span": 4,
+          "sparkline": {
+            "fillColor": "rgba(31, 118, 189, 0.18)",
+            "full": false,
+            "lineColor": "rgb(31, 120, 193)",
+            "show": false
+          },
+          "tableColumn": "",
+          "targets": [
+            {
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "query": "SELECT mean(\"tg__0.Result_pktSize\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 512 AND $timeFilter GROUP BY time($__interval) fill(null)",
+              "rawQuery": true,
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.Result_pktSize"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": [
+                {
+                  "key": "tg__0.Result_pktSize",
+                  "operator": "=",
+                  "value": "512"
+                }
+              ]
+            }
+          ],
+          "thresholds": "",
+          "title": "",
+          "type": "singlestat",
+          "valueFontSize": "80%",
+          "valueMaps": [
+            {
+              "op": "=",
+              "text": "N/A",
+              "value": "null"
+            }
+          ],
+          "valueName": "avg"
+        },
+        {
+          "cacheTimeout": null,
+          "colorBackground": false,
+          "colorValue": false,
+          "colors": [
+            "rgba(245, 54, 54, 0.9)",
+            "rgba(237, 129, 40, 0.89)",
+            "rgba(50, 172, 45, 0.97)"
+          ],
+          "datasource": "${DS_YARDSTICK}",
+          "decimals": 4,
+          "format": "none",
+          "gauge": {
+            "maxValue": 100,
+            "minValue": 0,
+            "show": false,
+            "thresholdLabels": false,
+            "thresholdMarkers": true
+          },
+          "height": "30px",
+          "id": 28,
+          "interval": null,
+          "links": [],
+          "mappingType": 1,
+          "mappingTypes": [
+            {
+              "name": "value to text",
+              "value": 1
+            },
+            {
+              "name": "range to text",
+              "value": 2
+            }
+          ],
+          "maxDataPoints": 100,
+          "nullPointMode": "connected",
+          "nullText": null,
+          "postfix": "",
+          "postfixFontSize": "50%",
+          "prefix": "",
+          "prefixFontSize": "50%",
+          "rangeMaps": [
+            {
+              "from": "null",
+              "text": "N/A",
+              "to": "null"
+            }
+          ],
+          "span": 4,
+          "sparkline": {
+            "fillColor": "rgba(31, 118, 189, 0.18)",
+            "full": false,
+            "lineColor": "rgb(31, 120, 193)",
+            "show": false
+          },
+          "tableColumn": "",
+          "targets": [
+            {
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "query": "SELECT mean(\"tg__0.Result_theor_max_throughput\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 512 AND $timeFilter GROUP BY time($__interval) fill(null)",
+              "rawQuery": true,
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.Result_theor_max_throughput"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": [
+                {
+                  "key": "tg__0.Result_pktSize",
+                  "operator": "=",
+                  "value": "512"
+                }
+              ]
+            }
+          ],
+          "thresholds": "",
+          "title": "",
+          "type": "singlestat",
+          "valueFontSize": "80%",
+          "valueMaps": [
+            {
+              "op": "=",
+              "text": "N/A",
+              "value": "null"
+            }
+          ],
+          "valueName": "avg"
+        },
+        {
+          "cacheTimeout": null,
+          "colorBackground": false,
+          "colorValue": false,
+          "colors": [
+            "rgba(245, 54, 54, 0.9)",
+            "rgba(237, 129, 40, 0.89)",
+            "rgba(50, 172, 45, 0.97)"
+          ],
+          "datasource": "${DS_YARDSTICK}",
+          "decimals": 4,
+          "format": "none",
+          "gauge": {
+            "maxValue": 100,
+            "minValue": 0,
+            "show": false,
+            "thresholdLabels": false,
+            "thresholdMarkers": true
+          },
+          "height": "30px",
+          "id": 29,
+          "interval": null,
+          "links": [],
+          "mappingType": 1,
+          "mappingTypes": [
+            {
+              "name": "value to text",
+              "value": 1
+            },
+            {
+              "name": "range to text",
+              "value": 2
+            }
+          ],
+          "maxDataPoints": 100,
+          "nullPointMode": "connected",
+          "nullText": null,
+          "postfix": "",
+          "postfixFontSize": "50%",
+          "prefix": "",
+          "prefixFontSize": "50%",
+          "rangeMaps": [
+            {
+              "from": "null",
+              "text": "N/A",
+              "to": "null"
+            }
+          ],
+          "span": 4,
+          "sparkline": {
+            "fillColor": "rgba(31, 118, 189, 0.18)",
+            "full": false,
+            "lineColor": "rgb(31, 120, 193)",
+            "show": false
+          },
+          "tableColumn": "",
+          "targets": [
+            {
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "query": "SELECT mean(\"tg__0.Result_Actual_throughput\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 512 AND $timeFilter GROUP BY time($__interval) fill(null)",
+              "rawQuery": true,
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.Result_Actual_throughput"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": [
+                {
+                  "key": "tg__0.Result_pktSize",
+                  "operator": "=",
+                  "value": "512"
+                }
+              ]
+            }
+          ],
+          "thresholds": "",
+          "title": "",
+          "type": "singlestat",
+          "valueFontSize": "80%",
+          "valueMaps": [
+            {
+              "op": "=",
+              "text": "N/A",
+              "value": "null"
+            }
+          ],
+          "valueName": "avg"
+        }
+      ],
+      "repeat": null,
+      "repeatIteration": null,
+      "repeatRowId": null,
+      "showTitle": false,
+      "title": "Dashboard Row",
+      "titleSize": "h3"
+    },
+    {
+      "collapse": false,
+      "height": "30px",
+      "panels": [
+        {
+          "cacheTimeout": null,
+          "colorBackground": false,
+          "colorValue": false,
+          "colors": [
+            "rgba(245, 54, 54, 0.9)",
+            "rgba(237, 129, 40, 0.89)",
+            "rgba(50, 172, 45, 0.97)"
+          ],
+          "datasource": "${DS_YARDSTICK}",
+          "format": "none",
+          "gauge": {
+            "maxValue": 100,
+            "minValue": 0,
+            "show": false,
+            "thresholdLabels": false,
+            "thresholdMarkers": true
+          },
+          "id": 30,
+          "interval": null,
+          "links": [],
+          "mappingType": 1,
+          "mappingTypes": [
+            {
+              "name": "value to text",
+              "value": 1
+            },
+            {
+              "name": "range to text",
+              "value": 2
+            }
+          ],
+          "maxDataPoints": 100,
+          "nullPointMode": "connected",
+          "nullText": null,
+          "postfix": "",
+          "postfixFontSize": "50%",
+          "prefix": "",
+          "prefixFontSize": "50%",
+          "rangeMaps": [
+            {
+              "from": "null",
+              "text": "N/A",
+              "to": "null"
+            }
+          ],
+          "span": 4,
+          "sparkline": {
+            "fillColor": "rgba(31, 118, 189, 0.18)",
+            "full": false,
+            "lineColor": "rgb(31, 120, 193)",
+            "show": false
+          },
+          "tableColumn": "",
+          "targets": [
+            {
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "query": "SELECT mean(\"tg__0.Result_pktSize\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 1024 AND $timeFilter GROUP BY time($__interval) fill(null)",
+              "rawQuery": true,
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.Result_pktSize"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": [
+                {
+                  "key": "tg__0.Result_pktSize",
+                  "operator": "=",
+                  "value": "1024"
+                }
+              ]
+            }
+          ],
+          "thresholds": "",
+          "title": "",
+          "type": "singlestat",
+          "valueFontSize": "80%",
+          "valueMaps": [
+            {
+              "op": "=",
+              "text": "N/A",
+              "value": "null"
+            }
+          ],
+          "valueName": "avg"
+        },
+        {
+          "cacheTimeout": null,
+          "colorBackground": false,
+          "colorValue": false,
+          "colors": [
+            "rgba(245, 54, 54, 0.9)",
+            "rgba(237, 129, 40, 0.89)",
+            "rgba(50, 172, 45, 0.97)"
+          ],
+          "datasource": "${DS_YARDSTICK}",
+          "decimals": 4,
+          "format": "none",
+          "gauge": {
+            "maxValue": 100,
+            "minValue": 0,
+            "show": false,
+            "thresholdLabels": false,
+            "thresholdMarkers": true
+          },
+          "id": 31,
+          "interval": null,
+          "links": [],
+          "mappingType": 1,
+          "mappingTypes": [
+            {
+              "name": "value to text",
+              "value": 1
+            },
+            {
+              "name": "range to text",
+              "value": 2
+            }
+          ],
+          "maxDataPoints": 100,
+          "nullPointMode": "connected",
+          "nullText": null,
+          "postfix": "",
+          "postfixFontSize": "50%",
+          "prefix": "",
+          "prefixFontSize": "50%",
+          "rangeMaps": [
+            {
+              "from": "null",
+              "text": "N/A",
+              "to": "null"
+            }
+          ],
+          "span": 4,
+          "sparkline": {
+            "fillColor": "rgba(31, 118, 189, 0.18)",
+            "full": false,
+            "lineColor": "rgb(31, 120, 193)",
+            "show": false
+          },
+          "tableColumn": "",
+          "targets": [
+            {
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "query": "SELECT mean(\"tg__0.Result_theor_max_throughput\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 1024 AND $timeFilter GROUP BY time($__interval) fill(null)",
+              "rawQuery": true,
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.Result_theor_max_throughput"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": [
+                {
+                  "key": "tg__0.Result_pktSize",
+                  "operator": "=",
+                  "value": "1024"
+                }
+              ]
+            }
+          ],
+          "thresholds": "",
+          "title": "",
+          "type": "singlestat",
+          "valueFontSize": "80%",
+          "valueMaps": [
+            {
+              "op": "=",
+              "text": "N/A",
+              "value": "null"
+            }
+          ],
+          "valueName": "avg"
+        },
+        {
+          "cacheTimeout": null,
+          "colorBackground": false,
+          "colorValue": false,
+          "colors": [
+            "rgba(245, 54, 54, 0.9)",
+            "rgba(237, 129, 40, 0.89)",
+            "rgba(50, 172, 45, 0.97)"
+          ],
+          "datasource": "${DS_YARDSTICK}",
+          "decimals": 4,
+          "format": "none",
+          "gauge": {
+            "maxValue": 100,
+            "minValue": 0,
+            "show": false,
+            "thresholdLabels": false,
+            "thresholdMarkers": true
+          },
+          "id": 32,
+          "interval": null,
+          "links": [],
+          "mappingType": 1,
+          "mappingTypes": [
+            {
+              "name": "value to text",
+              "value": 1
+            },
+            {
+              "name": "range to text",
+              "value": 2
+            }
+          ],
+          "maxDataPoints": 100,
+          "nullPointMode": "connected",
+          "nullText": null,
+          "postfix": "",
+          "postfixFontSize": "50%",
+          "prefix": "",
+          "prefixFontSize": "50%",
+          "rangeMaps": [
+            {
+              "from": "null",
+              "text": "N/A",
+              "to": "null"
+            }
+          ],
+          "span": 4,
+          "sparkline": {
+            "fillColor": "rgba(31, 118, 189, 0.18)",
+            "full": false,
+            "lineColor": "rgb(31, 120, 193)",
+            "show": false
+          },
+          "tableColumn": "",
+          "targets": [
+            {
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "query": "SELECT mean(\"tg__0.Result_Actual_throughput\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 1024 AND $timeFilter GROUP BY time($__interval) fill(null)",
+              "rawQuery": true,
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.Result_Actual_throughput"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": [
+                {
+                  "key": "tg__0.Result_pktSize",
+                  "operator": "=",
+                  "value": "1024"
+                }
+              ]
+            }
+          ],
+          "thresholds": "",
+          "title": "",
+          "type": "singlestat",
+          "valueFontSize": "80%",
+          "valueMaps": [
+            {
+              "op": "=",
+              "text": "N/A",
+              "value": "null"
+            }
+          ],
+          "valueName": "avg"
+        }
+      ],
+      "repeat": null,
+      "repeatIteration": null,
+      "repeatRowId": null,
+      "showTitle": false,
+      "title": "Dashboard Row",
+      "titleSize": "h3"
+    },
+    {
+      "collapse": false,
+      "height": "30px",
+      "panels": [
+        {
+          "cacheTimeout": null,
+          "colorBackground": false,
+          "colorValue": false,
+          "colors": [
+            "rgba(245, 54, 54, 0.9)",
+            "rgba(237, 129, 40, 0.89)",
+            "rgba(50, 172, 45, 0.97)"
+          ],
+          "datasource": "${DS_YARDSTICK}",
+          "format": "none",
+          "gauge": {
+            "maxValue": 100,
+            "minValue": 0,
+            "show": false,
+            "thresholdLabels": false,
+            "thresholdMarkers": true
+          },
+          "height": "30px",
+          "id": 33,
+          "interval": null,
+          "links": [],
+          "mappingType": 1,
+          "mappingTypes": [
+            {
+              "name": "value to text",
+              "value": 1
+            },
+            {
+              "name": "range to text",
+              "value": 2
+            }
+          ],
+          "maxDataPoints": 100,
+          "nullPointMode": "connected",
+          "nullText": null,
+          "postfix": "",
+          "postfixFontSize": "50%",
+          "prefix": "",
+          "prefixFontSize": "50%",
+          "rangeMaps": [
+            {
+              "from": "null",
+              "text": "N/A",
+              "to": "null"
+            }
+          ],
+          "span": 4,
+          "sparkline": {
+            "fillColor": "rgba(31, 118, 189, 0.18)",
+            "full": false,
+            "lineColor": "rgb(31, 120, 193)",
+            "show": false
+          },
+          "tableColumn": "",
+          "targets": [
+            {
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "query": "SELECT mean(\"tg__0.Result_pktSize\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 1280 AND $timeFilter GROUP BY time($__interval) fill(null)",
+              "rawQuery": true,
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.Result_pktSize"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": [
+                {
+                  "key": "tg__0.Result_pktSize",
+                  "operator": "=",
+                  "value": "1280"
+                }
+              ]
+            }
+          ],
+          "thresholds": "",
+          "title": "",
+          "type": "singlestat",
+          "valueFontSize": "80%",
+          "valueMaps": [
+            {
+              "op": "=",
+              "text": "N/A",
+              "value": "null"
+            }
+          ],
+          "valueName": "avg"
+        },
+        {
+          "cacheTimeout": null,
+          "colorBackground": false,
+          "colorValue": false,
+          "colors": [
+            "rgba(245, 54, 54, 0.9)",
+            "rgba(237, 129, 40, 0.89)",
+            "rgba(50, 172, 45, 0.97)"
+          ],
+          "datasource": "${DS_YARDSTICK}",
+          "decimals": 4,
+          "format": "none",
+          "gauge": {
+            "maxValue": 100,
+            "minValue": 0,
+            "show": false,
+            "thresholdLabels": false,
+            "thresholdMarkers": true
+          },
+          "height": "30px",
+          "id": 34,
+          "interval": null,
+          "links": [],
+          "mappingType": 1,
+          "mappingTypes": [
+            {
+              "name": "value to text",
+              "value": 1
+            },
+            {
+              "name": "range to text",
+              "value": 2
+            }
+          ],
+          "maxDataPoints": 100,
+          "nullPointMode": "connected",
+          "nullText": null,
+          "postfix": "",
+          "postfixFontSize": "50%",
+          "prefix": "",
+          "prefixFontSize": "50%",
+          "rangeMaps": [
+            {
+              "from": "null",
+              "text": "N/A",
+              "to": "null"
+            }
+          ],
+          "span": 4,
+          "sparkline": {
+            "fillColor": "rgba(31, 118, 189, 0.18)",
+            "full": false,
+            "lineColor": "rgb(31, 120, 193)",
+            "show": false
+          },
+          "tableColumn": "",
+          "targets": [
+            {
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "query": "SELECT mean(\"tg__0.Result_theor_max_throughput\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 1280 AND $timeFilter GROUP BY time($__interval) fill(null)",
+              "rawQuery": true,
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.Result_theor_max_throughput"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": [
+                {
+                  "key": "tg__0.Result_pktSize",
+                  "operator": "=",
+                  "value": "1280"
+                }
+              ]
+            }
+          ],
+          "thresholds": "",
+          "title": "",
+          "type": "singlestat",
+          "valueFontSize": "80%",
+          "valueMaps": [
+            {
+              "op": "=",
+              "text": "N/A",
+              "value": "null"
+            }
+          ],
+          "valueName": "avg"
+        },
+        {
+          "cacheTimeout": null,
+          "colorBackground": false,
+          "colorValue": false,
+          "colors": [
+            "rgba(245, 54, 54, 0.9)",
+            "rgba(237, 129, 40, 0.89)",
+            "rgba(50, 172, 45, 0.97)"
+          ],
+          "datasource": "${DS_YARDSTICK}",
+          "decimals": 4,
+          "format": "none",
+          "gauge": {
+            "maxValue": 100,
+            "minValue": 0,
+            "show": false,
+            "thresholdLabels": false,
+            "thresholdMarkers": true
+          },
+          "height": "30px",
+          "id": 35,
+          "interval": null,
+          "links": [],
+          "mappingType": 1,
+          "mappingTypes": [
+            {
+              "name": "value to text",
+              "value": 1
+            },
+            {
+              "name": "range to text",
+              "value": 2
+            }
+          ],
+          "maxDataPoints": 100,
+          "nullPointMode": "connected",
+          "nullText": null,
+          "postfix": "",
+          "postfixFontSize": "50%",
+          "prefix": "",
+          "prefixFontSize": "50%",
+          "rangeMaps": [
+            {
+              "from": "null",
+              "text": "N/A",
+              "to": "null"
+            }
+          ],
+          "span": 4,
+          "sparkline": {
+            "fillColor": "rgba(31, 118, 189, 0.18)",
+            "full": false,
+            "lineColor": "rgb(31, 120, 193)",
+            "show": false
+          },
+          "tableColumn": "",
+          "targets": [
+            {
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "query": "SELECT mean(\"tg__0.Result_Actual_throughput\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 1280 AND $timeFilter GROUP BY time($__interval) fill(null)",
+              "rawQuery": true,
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.Result_Actual_throughput"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": [
+                {
+                  "key": "tg__0.Result_pktSize",
+                  "operator": "=",
+                  "value": "1280"
+                }
+              ]
+            }
+          ],
+          "thresholds": "",
+          "title": "",
+          "type": "singlestat",
+          "valueFontSize": "80%",
+          "valueMaps": [
+            {
+              "op": "=",
+              "text": "N/A",
+              "value": "null"
+            }
+          ],
+          "valueName": "avg"
+        }
+      ],
+      "repeat": null,
+      "repeatIteration": null,
+      "repeatRowId": null,
+      "showTitle": false,
+      "title": "Dashboard Row",
+      "titleSize": "h6"
+    },
+    {
+      "collapse": false,
+      "height": "30",
+      "panels": [
+        {
+          "cacheTimeout": null,
+          "colorBackground": false,
+          "colorValue": false,
+          "colors": [
+            "rgba(245, 54, 54, 0.9)",
+            "rgba(237, 129, 40, 0.89)",
+            "rgba(50, 172, 45, 0.97)"
+          ],
+          "datasource": "${DS_YARDSTICK}",
+          "format": "none",
+          "gauge": {
+            "maxValue": 100,
+            "minValue": 0,
+            "show": false,
+            "thresholdLabels": false,
+            "thresholdMarkers": true
+          },
+          "id": 44,
+          "interval": null,
+          "links": [],
+          "mappingType": 1,
+          "mappingTypes": [
+            {
+              "name": "value to text",
+              "value": 1
+            },
+            {
+              "name": "range to text",
+              "value": 2
+            }
+          ],
+          "maxDataPoints": 100,
+          "nullPointMode": "connected",
+          "nullText": null,
+          "postfix": "",
+          "postfixFontSize": "50%",
+          "prefix": "",
+          "prefixFontSize": "50%",
+          "rangeMaps": [
+            {
+              "from": "null",
+              "text": "N/A",
+              "to": "null"
+            }
+          ],
+          "span": 4,
+          "sparkline": {
+            "fillColor": "rgba(31, 118, 189, 0.18)",
+            "full": false,
+            "lineColor": "rgb(31, 120, 193)",
+            "show": false
+          },
+          "tableColumn": "",
+          "targets": [
+            {
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "orderByTime": "ASC",
+              "policy": "default",
+              "query": "SELECT mean(\"tg__0.Result_pktSize\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 1518 AND $timeFilter GROUP BY time($__interval) fill(null)",
+              "rawQuery": true,
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "value"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            }
+          ],
+          "thresholds": "",
+          "title": "",
+          "type": "singlestat",
+          "valueFontSize": "80%",
+          "valueMaps": [
+            {
+              "op": "=",
+              "text": "N/A",
+              "value": "null"
+            }
+          ],
+          "valueName": "avg"
+        },
+        {
+          "cacheTimeout": null,
+          "colorBackground": false,
+          "colorValue": false,
+          "colors": [
+            "rgba(245, 54, 54, 0.9)",
+            "rgba(237, 129, 40, 0.89)",
+            "rgba(50, 172, 45, 0.97)"
+          ],
+          "datasource": "${DS_YARDSTICK}",
+          "decimals": 4,
+          "format": "none",
+          "gauge": {
+            "maxValue": 100,
+            "minValue": 0,
+            "show": false,
+            "thresholdLabels": false,
+            "thresholdMarkers": true
+          },
+          "id": 45,
+          "interval": null,
+          "links": [],
+          "mappingType": 1,
+          "mappingTypes": [
+            {
+              "name": "value to text",
+              "value": 1
+            },
+            {
+              "name": "range to text",
+              "value": 2
+            }
+          ],
+          "maxDataPoints": 100,
+          "nullPointMode": "connected",
+          "nullText": null,
+          "postfix": "",
+          "postfixFontSize": "50%",
+          "prefix": "",
+          "prefixFontSize": "50%",
+          "rangeMaps": [
+            {
+              "from": "null",
+              "text": "N/A",
+              "to": "null"
+            }
+          ],
+          "span": 4,
+          "sparkline": {
+            "fillColor": "rgba(31, 118, 189, 0.18)",
+            "full": false,
+            "lineColor": "rgb(31, 120, 193)",
+            "show": false
+          },
+          "tableColumn": "",
+          "targets": [
+            {
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "orderByTime": "ASC",
+              "policy": "default",
+              "query": "SELECT mean(\"tg__0.Result_theor_max_throughput\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 1518 AND $timeFilter GROUP BY time($__interval) fill(null)",
+              "rawQuery": true,
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "value"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            }
+          ],
+          "thresholds": "",
+          "title": "",
+          "type": "singlestat",
+          "valueFontSize": "80%",
+          "valueMaps": [
+            {
+              "op": "=",
+              "text": "N/A",
+              "value": "null"
+            }
+          ],
+          "valueName": "avg"
+        },
+        {
+          "cacheTimeout": null,
+          "colorBackground": false,
+          "colorValue": false,
+          "colors": [
+            "rgba(245, 54, 54, 0.9)",
+            "rgba(237, 129, 40, 0.89)",
+            "rgba(50, 172, 45, 0.97)"
+          ],
+          "datasource": "${DS_YARDSTICK}",
+          "decimals": 4,
+          "format": "none",
+          "gauge": {
+            "maxValue": 100,
+            "minValue": 0,
+            "show": false,
+            "thresholdLabels": false,
+            "thresholdMarkers": true
+          },
+          "id": 46,
+          "interval": null,
+          "links": [],
+          "mappingType": 1,
+          "mappingTypes": [
+            {
+              "name": "value to text",
+              "value": 1
+            },
+            {
+              "name": "range to text",
+              "value": 2
+            }
+          ],
+          "maxDataPoints": 100,
+          "nullPointMode": "connected",
+          "nullText": null,
+          "postfix": "",
+          "postfixFontSize": "50%",
+          "prefix": "",
+          "prefixFontSize": "50%",
+          "rangeMaps": [
+            {
+              "from": "null",
+              "text": "N/A",
+              "to": "null"
+            }
+          ],
+          "span": 4,
+          "sparkline": {
+            "fillColor": "rgba(31, 118, 189, 0.18)",
+            "full": false,
+            "lineColor": "rgb(31, 120, 193)",
+            "show": false
+          },
+          "tableColumn": "",
+          "targets": [
+            {
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "orderByTime": "ASC",
+              "policy": "default",
+              "query": "SELECT mean(\"tg__0.Result_Actual_throughput\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 1518 AND $timeFilter GROUP BY time($__interval) fill(null)",
+              "rawQuery": true,
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "value"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            }
+          ],
+          "thresholds": "",
+          "title": "",
+          "type": "singlestat",
+          "valueFontSize": "80%",
+          "valueMaps": [
+            {
+              "op": "=",
+              "text": "N/A",
+              "value": "null"
+            }
+          ],
+          "valueName": "avg"
+        }
+      ],
+      "repeat": null,
+      "repeatIteration": null,
+      "repeatRowId": null,
+      "showTitle": false,
+      "title": "Dashboard Row",
+      "titleSize": "h6"
+    },
+    {
+      "collapse": false,
+      "height": "40px",
+      "panels": [
+        {
+          "content": "<h5 style=\"font-family:Verdana\"> <a style=\"color:#31A7D3\"><a style=\"font: 22px '#31A7D3'\"><center>Latency</center> </a></h5>",
+          "height": "40",
+          "id": 41,
+          "links": [],
+          "mode": "html",
+          "span": 12,
+          "title": "",
+          "type": "text"
+        }
+      ],
+      "repeat": null,
+      "repeatIteration": null,
+      "repeatRowId": null,
+      "showTitle": false,
+      "title": "Dashboard Row",
+      "titleSize": "h6"
+    },
+    {
+      "collapse": false,
+      "height": 250,
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "dashLength": 10,
+          "dashes": false,
+          "datasource": "${DS_YARDSTICK}",
+          "fill": 1,
+          "height": "300px",
+          "id": 47,
+          "legend": {
+            "alignAsTable": true,
+            "avg": true,
+            "current": false,
+            "max": true,
+            "min": true,
+            "show": true,
+            "sortDesc": true,
+            "total": false,
+            "values": true
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "spaceLength": 10,
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "alias": "xe0 Latency Avg",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "none"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "B",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.LatencyAvg.5"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            },
+            {
+              "alias": "xe0 Latency Max",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.LatencyMax.5"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            }
+          ],
+          "thresholds": [],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "xe0 Latency",
+          "tooltip": {
+            "shared": true,
+            "sort": 0,
+            "value_type": "individual"
+          },
+          "type": "graph",
+          "xaxis": {
+            "buckets": null,
+            "mode": "time",
+            "name": null,
+            "show": true,
+            "values": []
+          },
+          "yaxes": [
+            {
+              "format": "short",
+              "label": "usec",
+              "logBase": 1,
+              "max": "65000",
+              "min": "0",
+              "show": true
+            },
+            {
+              "format": "short",
+              "label": null,
+              "logBase": 1,
+              "max": null,
+              "min": "0",
+              "show": true
+            }
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "dashLength": 10,
+          "dashes": false,
+          "datasource": "${DS_YARDSTICK}",
+          "fill": 1,
+          "height": "300px",
+          "id": 48,
+          "legend": {
+            "alignAsTable": true,
+            "avg": true,
+            "current": false,
+            "max": true,
+            "min": true,
+            "show": true,
+            "total": false,
+            "values": true
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "spaceLength": 10,
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "alias": "xe1 Latency Avg",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "none"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "B",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.LatencyAvg.6"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            },
+            {
+              "alias": "xe1 Latency Max",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.LatencyMax.6"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            }
+          ],
+          "thresholds": [],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "xe1 Latency",
+          "tooltip": {
+            "shared": true,
+            "sort": 0,
+            "value_type": "individual"
+          },
+          "type": "graph",
+          "xaxis": {
+            "buckets": null,
+            "mode": "time",
+            "name": null,
+            "show": true,
+            "values": []
+          },
+          "yaxes": [
+            {
+              "format": "short",
+              "label": "usec",
+              "logBase": 1,
+              "max": "65000",
+              "min": "0",
+              "show": true
+            },
+            {
+              "format": "short",
+              "label": null,
+              "logBase": 1,
+              "max": null,
+              "min": null,
+              "show": true
+            }
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "dashLength": 10,
+          "dashes": false,
+          "datasource": "${DS_YARDSTICK}",
+          "fill": 1,
+          "height": "300px",
+          "id": 49,
+          "legend": {
+            "alignAsTable": true,
+            "avg": true,
+            "current": false,
+            "max": true,
+            "min": true,
+            "show": true,
+            "total": false,
+            "values": true
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "spaceLength": 10,
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "alias": "xe2 Latency Avg",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "none"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "B",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.LatencyAvg.7"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            },
+            {
+              "alias": "xe2 Latency Max",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.LatencyMax.7"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            }
+          ],
+          "thresholds": [],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "xe2 Latency",
+          "tooltip": {
+            "shared": true,
+            "sort": 0,
+            "value_type": "individual"
+          },
+          "type": "graph",
+          "xaxis": {
+            "buckets": null,
+            "mode": "time",
+            "name": null,
+            "show": true,
+            "values": []
+          },
+          "yaxes": [
+            {
+              "format": "short",
+              "label": "usec",
+              "logBase": 1,
+              "max": "65000",
+              "min": "0",
+              "show": true
+            },
+            {
+              "format": "short",
+              "label": null,
+              "logBase": 1,
+              "max": null,
+              "min": null,
+              "show": true
+            }
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "dashLength": 10,
+          "dashes": false,
+          "datasource": "${DS_YARDSTICK}",
+          "fill": 1,
+          "height": "300px",
+          "id": 50,
+          "legend": {
+            "alignAsTable": true,
+            "avg": true,
+            "current": false,
+            "max": true,
+            "min": true,
+            "show": true,
+            "total": false,
+            "values": true
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "spaceLength": 10,
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "alias": "xe3 Latency Avg",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "none"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "B",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.LatencyAvg.8"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            },
+            {
+              "alias": "xe3 Latency Max",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "null"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "A",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "tg__0.LatencyMax.8"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "mean"
+                  }
+                ]
+              ],
+              "tags": []
+            }
+          ],
+          "thresholds": [],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "xe3 Latency",
+          "tooltip": {
+            "shared": true,
+            "sort": 0,
+            "value_type": "individual"
+          },
+          "type": "graph",
+          "xaxis": {
+            "buckets": null,
+            "mode": "time",
+            "name": null,
+            "show": true,
+            "values": []
+          },
+          "yaxes": [
+            {
+              "format": "short",
+              "label": "usec",
+              "logBase": 1,
+              "max": "65000",
+              "min": "0",
+              "show": true
+            },
+            {
+              "format": "short",
+              "label": null,
+              "logBase": 1,
+              "max": null,
+              "min": null,
+              "show": true
+            }
+          ]
+        }
+      ],
+      "repeat": null,
+      "repeatIteration": null,
+      "repeatRowId": null,
+      "showTitle": false,
+      "title": "Dashboard Row",
+      "titleSize": "h6"
+    },
+    {
+      "collapse": false,
+      "height": "40px",
+      "panels": [
+        {
+          "content": "<h5 style=\"font-family:Verdana\"> <a style=\"color:#31A7D3\"><a style=\"font: 22px '#31A7D3'\"><center>SUT CPU Utilization</center> </a></h5>",
+          "height": "40px",
+          "id": 51,
+          "links": [],
+          "mode": "html",
+          "span": 12,
+          "title": "",
+          "type": "text"
+        }
+      ],
+      "repeat": null,
+      "repeatIteration": null,
+      "repeatRowId": null,
+      "showTitle": false,
+      "title": "Dashboard Row",
+      "titleSize": "h6"
+    },
+    {
+      "collapse": false,
+      "height": 250,
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "dashLength": 10,
+          "dashes": false,
+          "datasource": "${DS_YARDSTICK}",
+          "decimals": 5,
+          "fill": 1,
+          "height": "300px",
+          "id": 52,
+          "legend": {
+            "alignAsTable": true,
+            "avg": true,
+            "current": false,
+            "max": true,
+            "min": true,
+            "show": true,
+            "sortDesc": false,
+            "total": false,
+            "values": true
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "spaceLength": 10,
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "alias": "CPU 0 Utilization",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "none"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "B",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "vnf__0.collect_stats.core.cpu.0.percent-user"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "distinct"
+                  }
+                ]
+              ],
+              "tags": []
+            }
+          ],
+          "thresholds": [],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "CPU 0 Utilization  - Master Core",
+          "tooltip": {
+            "shared": true,
+            "sort": 0,
+            "value_type": "individual"
+          },
+          "type": "graph",
+          "xaxis": {
+            "buckets": null,
+            "mode": "time",
+            "name": null,
+            "show": true,
+            "values": []
+          },
+          "yaxes": [
+            {
+              "format": "short",
+              "label": "% Utilization",
+              "logBase": 1,
+              "max": "100",
+              "min": "0",
+              "show": true
+            },
+            {
+              "format": "short",
+              "label": null,
+              "logBase": 1,
+              "max": null,
+              "min": "0",
+              "show": true
+            }
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "dashLength": 10,
+          "dashes": false,
+          "datasource": "${DS_YARDSTICK}",
+          "decimals": 5,
+          "fill": 1,
+          "height": "300px",
+          "id": 53,
+          "legend": {
+            "alignAsTable": true,
+            "avg": true,
+            "current": false,
+            "max": true,
+            "min": true,
+            "show": true,
+            "sortDesc": true,
+            "total": false,
+            "values": true
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "spaceLength": 10,
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "alias": "CPU 1 Utilization  - L2FWD XE0 to XE1",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "none"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "B",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "vnf__0.collect_stats.core.cpu.1.percent-user"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "distinct"
+                  }
+                ]
+              ],
+              "tags": []
+            }
+          ],
+          "thresholds": [],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "CPU 1 Utilization  - L2FWD XE0 to XE1",
+          "tooltip": {
+            "shared": true,
+            "sort": 0,
+            "value_type": "individual"
+          },
+          "type": "graph",
+          "xaxis": {
+            "buckets": null,
+            "mode": "time",
+            "name": null,
+            "show": true,
+            "values": []
+          },
+          "yaxes": [
+            {
+              "format": "short",
+              "label": "% Utilization",
+              "logBase": 1,
+              "max": "100",
+              "min": "0",
+              "show": true
+            },
+            {
+              "format": "short",
+              "label": null,
+              "logBase": 1,
+              "max": null,
+              "min": "0",
+              "show": true
+            }
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "dashLength": 10,
+          "dashes": false,
+          "datasource": "${DS_YARDSTICK}",
+          "decimals": 5,
+          "fill": 1,
+          "height": "300px",
+          "id": 54,
+          "legend": {
+            "alignAsTable": true,
+            "avg": true,
+            "current": false,
+            "max": true,
+            "min": true,
+            "show": true,
+            "sortDesc": true,
+            "total": false,
+            "values": true
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "spaceLength": 10,
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "alias": "CPU 2 Utilization",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "none"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "B",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "vnf__0.collect_stats.core.cpu.2.percent-user"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "distinct"
+                  }
+                ]
+              ],
+              "tags": []
+            }
+          ],
+          "thresholds": [],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "CPU 2 Utilization  - L2FWD XE1 to XE0",
+          "tooltip": {
+            "shared": true,
+            "sort": 0,
+            "value_type": "individual"
+          },
+          "type": "graph",
+          "xaxis": {
+            "buckets": null,
+            "mode": "time",
+            "name": null,
+            "show": true,
+            "values": []
+          },
+          "yaxes": [
+            {
+              "format": "short",
+              "label": "% Utilization",
+              "logBase": 1,
+              "max": "100",
+              "min": "0",
+              "show": true
+            },
+            {
+              "format": "short",
+              "label": null,
+              "logBase": 1,
+              "max": null,
+              "min": "0",
+              "show": true
+            }
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "dashLength": 10,
+          "dashes": false,
+          "datasource": "${DS_YARDSTICK}",
+          "decimals": 5,
+          "fill": 1,
+          "height": "300px",
+          "id": 55,
+          "legend": {
+            "alignAsTable": true,
+            "avg": true,
+            "current": false,
+            "max": true,
+            "min": true,
+            "show": true,
+            "sortDesc": true,
+            "total": false,
+            "values": true
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "spaceLength": 10,
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "alias": "CPU 3 Utilization",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "none"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "B",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "vnf__0.collect_stats.core.cpu.3.percent-user"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "distinct"
+                  }
+                ]
+              ],
+              "tags": []
+            }
+          ],
+          "thresholds": [],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "CPU 3 Utilization  - L2FWD XE2 to XE3",
+          "tooltip": {
+            "shared": true,
+            "sort": 0,
+            "value_type": "individual"
+          },
+          "type": "graph",
+          "xaxis": {
+            "buckets": null,
+            "mode": "time",
+            "name": null,
+            "show": true,
+            "values": []
+          },
+          "yaxes": [
+            {
+              "format": "short",
+              "label": "% Utilization",
+              "logBase": 1,
+              "max": "100",
+              "min": "0",
+              "show": true
+            },
+            {
+              "format": "short",
+              "label": null,
+              "logBase": 1,
+              "max": null,
+              "min": "0",
+              "show": true
+            }
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "dashLength": 10,
+          "dashes": false,
+          "datasource": "${DS_YARDSTICK}",
+          "decimals": 5,
+          "fill": 1,
+          "height": "300px",
+          "id": 56,
+          "legend": {
+            "alignAsTable": true,
+            "avg": true,
+            "current": false,
+            "max": true,
+            "min": true,
+            "show": true,
+            "sortDesc": true,
+            "total": false,
+            "values": true
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "spaceLength": 10,
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "alias": "CPU 4 Utilization",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "none"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "B",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "vnf__0.collect_stats.core.cpu.4.percent-user"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "distinct"
+                  }
+                ]
+              ],
+              "tags": []
+            }
+          ],
+          "thresholds": [],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "CPU 4 Utilization  - L2FWD XE3 to XE2",
+          "tooltip": {
+            "shared": true,
+            "sort": 0,
+            "value_type": "individual"
+          },
+          "type": "graph",
+          "xaxis": {
+            "buckets": null,
+            "mode": "time",
+            "name": null,
+            "show": true,
+            "values": []
+          },
+          "yaxes": [
+            {
+              "format": "short",
+              "label": "% Utilization",
+              "logBase": 1,
+              "max": "100",
+              "min": "0",
+              "show": true
+            },
+            {
+              "format": "short",
+              "label": null,
+              "logBase": 1,
+              "max": null,
+              "min": "0",
+              "show": true
+            }
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "dashLength": 10,
+          "dashes": false,
+          "datasource": "${DS_YARDSTICK}",
+          "decimals": 5,
+          "fill": 1,
+          "height": "300px",
+          "id": 57,
+          "legend": {
+            "alignAsTable": true,
+            "avg": true,
+            "current": false,
+            "max": true,
+            "min": true,
+            "show": true,
+            "sortDesc": true,
+            "total": false,
+            "values": true
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "seriesOverrides": [],
+          "spaceLength": 10,
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "alias": "CPU 5 Utilization",
+              "dsType": "influxdb",
+              "groupBy": [
+                {
+                  "params": [
+                    "$__interval"
+                  ],
+                  "type": "time"
+                },
+                {
+                  "params": [
+                    "none"
+                  ],
+                  "type": "fill"
+                }
+              ],
+              "measurement": "tc_prox_baremetal_l2fwd-4",
+              "orderByTime": "ASC",
+              "policy": "default",
+              "refId": "B",
+              "resultFormat": "time_series",
+              "select": [
+                [
+                  {
+                    "params": [
+                      "vnf__0.collect_stats.core.cpu.5.percent-user"
+                    ],
+                    "type": "field"
+                  },
+                  {
+                    "params": [],
+                    "type": "distinct"
+                  }
+                ]
+              ],
+              "tags": []
+            }
+          ],
+          "thresholds": [],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "CPU 5 Utilization",
+          "tooltip": {
+            "shared": true,
+            "sort": 0,
+            "value_type": "individual"
+          },
+          "type": "graph",
+          "xaxis": {
+            "buckets": null,
+            "mode": "time",
+            "name": null,
+            "show": true,
+            "values": []
+          },
+          "yaxes": [
+            {
+              "format": "short",
+              "label": "% Utilization",
+              "logBase": 1,
+              "max": "100",
+              "min": "0",
+              "show": true
+            },
+            {
+              "format": "short",
+              "label": null,
+              "logBase": 1,
+              "max": null,
+              "min": "0",
+              "show": true
+            }
+          ]
+        }
+      ],
+      "repeat": null,
+      "repeatIteration": null,
+      "repeatRowId": null,
+      "showTitle": false,
+      "title": "Dashboard Row",
+      "titleSize": "h6"
+    }
+  ],
+  "schemaVersion": 14,
+  "style": "dark",
+  "tags": [
+    "yardstick",
+    "NSB",
+    "Prox",
+    "L2fwd",
+    "4Port",
+    "BM"
+  ],
+  "templating": {
+    "list": []
+  },
+  "time": {
+    "from": "2018-02-12T15:17:27.733Z",
+    "to": "2018-02-12T16:44:28.270Z"
+  },
+  "timepicker": {
+    "refresh_intervals": [
+      "5s",
+      "10s",
+      "30s",
+      "1m",
+      "5m",
+      "15m",
+      "30m",
+      "1h",
+      "2h",
+      "1d"
+    ],
+    "time_options": [
+      "5m",
+      "15m",
+      "1h",
+      "6h",
+      "12h",
+      "24h",
+      "2d",
+      "7d",
+      "30d"
+    ]
+  },
+  "timezone": "browser",
+  "title": "Prox_BM_L2FWD-4Port_MultiSize",
+  "version": 29
+}
\ No newline at end of file
index ddd8dfa..959315c 100644 (file)
@@ -21,18 +21,16 @@ ENV REPOS_DIR="/home/opnfv/repos" \
 
 # Yardstick repo
 ENV YARDSTICK_REPO_DIR="${REPOS_DIR}/yardstick" \
-    RELENG_REPO_DIR="${REPOS_DIR}/releng" \
     STORPERF_REPO_DIR="${REPOS_DIR}/storperf"
 
 RUN apt-get update && apt-get install -y git python-setuptools python-pip && apt-get -y autoremove && apt-get clean
 RUN easy_install -U setuptools==30.0.0
-RUN pip install appdirs==1.4.0 pyopenssl==17.5.0 python-openstackclient==3.11.0
+RUN pip install appdirs==1.4.0 pyopenssl==17.5.0 python-openstackclient==3.11.0 python-heatclient==1.11.0
 
 RUN mkdir -p ${REPOS_DIR}
 
 RUN git config --global http.sslVerify false
 RUN git clone --depth 1 -b $BRANCH https://gerrit.opnfv.org/gerrit/yardstick ${YARDSTICK_REPO_DIR}
-RUN git clone --depth 1 https://gerrit.opnfv.org/gerrit/releng ${RELENG_REPO_DIR}
 RUN git clone --depth 1 -b $BRANCH https://gerrit.opnfv.org/gerrit/storperf ${STORPERF_REPO_DIR}
 
 WORKDIR ${YARDSTICK_REPO_DIR}
index ca93351..24e3952 100644 (file)
@@ -39,7 +39,7 @@ index 2ee5b4c..23e5ea5 100644
 +RUN apt-get update && apt-get install -y git python-setuptools python-pip && apt-get -y autoremove && \
 +    apt-get install -y libssl-dev && apt-get -y install libffi-dev && apt-get clean
  RUN easy_install -U setuptools==30.0.0
- RUN pip install appdirs==1.4.0 pyopenssl==17.5.0
+ RUN pip install appdirs==1.4.0 pyopenssl==17.5.0 python-openstackclient==3.11.0
 
 @@ -43,8 +44,8 @@ RUN echo "daemon off;" >> /etc/nginx/nginx.conf
 
diff --git a/docs/testing/developer/devguide/devguide_nsb_prox.rst b/docs/testing/developer/devguide/devguide_nsb_prox.rst
new file mode 100755 (executable)
index 0000000..fc533b2
--- /dev/null
@@ -0,0 +1,1226 @@
+Introduction
+=============
+
+This document describes the steps to create a new NSB PROX test based on
+existing PROX functionalities. NSB PROX provides is a simple approximation
+of an operation and can be used to develop best practices and TCO models
+for Telco customers, investigate the impact of new Intel compute,
+network and storage technologies, characterize performance, and develop
+optimal system architectures and configurations.
+
+.. contents::
+
+Prerequisites
+=============
+
+In order to integrate PROX tests into NSB, the following prerequisites are required.
+
+.. _`dpdk wiki page`: http://dpdk.org/
+.. _`yardstick wiki page`: https://wiki.opnfv.org/display/yardstick/
+.. _`Prox documentation`: https://01.org/intel-data-plane-performance-demonstrators/documentation/prox-documentation
+.. _`openstack wiki page`: https://wiki.openstack.org/wiki/Main_Page
+.. _`grafana getting started`: http://docs.grafana.org/guides/gettingstarted/
+.. _`opnfv grafana dashboard`: https://wiki.opnfv.org/display/yardstick/How+to+work+with+grafana+dashboard
+.. _`Prox command line`: https://01.org/intel-data-plane-performance-demonstrators/documentation/prox-documentation#Command_line_options
+.. _`grafana deployment`: https://wiki.opnfv.org/display/yardstick/How+to+deploy+InfluxDB+and+Grafana+locally
+.. _`Prox options`: https://01.org/intel-data-plane-performance-demonstrators/documentation/prox-documentation#.5Beal_options.5D
+.. _`NSB Installation`: http://artifacts.opnfv.org/yardstick/docs/userguide/index.html#document-09-installation
+
+* A working knowledge of Yardstick. See `yardstick wiki page`_.
+* A working knowledge of PROX. See `Prox documentation`_.
+* Knowledge of Openstack. See `openstack wiki page`_.
+* Knowledge of how to use Grafana. See `grafana getting started`_.
+* How to Deploy InfluxDB & Grafana. See `grafana deployment`_.
+* How to use Grafana in OPNFV/Yardstick. See `opnfv grafana dashboard`_.
+* How to install NSB. See `NSB Installation`_
+
+Sample Prox Test Hardware Architecture
+======================================
+
+The following is a diagram of a sample NSB PROX Hardware Architecture
+for both NSB PROX on Bare metal and on Openstack.
+
+In this example when running yardstick on baremetal, yardstick will
+run on the deployment node, the generator will run on the deployment node
+and the SUT(SUT) will run on the Controller Node.
+
+
+.. image:: images/PROX_Hardware_Arch.png
+   :width: 800px
+   :alt: Sample NSB PROX Hard Architecture
+
+Prox Test Architecture
+======================
+
+In order to create a new test, one must understand the architecture of
+the test.
+
+A NSB Prox test architecture is composed of:
+
+* A traffic generator. This provides blocks of data on 1 or more ports
+  to the SUT.
+  The traffic generator also consumes the result packets from the system
+  under test.
+* A SUT consumes the packets generated by the packet
+  generator, and applies one or more tasks to the packets and return the
+  modified packets to the traffic generator.
+
+  This is an example of a sample NSB PROX test architecture.
+
+.. image:: images/PROX_Software_Arch.png
+   :width: 800px
+   :alt: NSB PROX test Architecture
+
+This diagram is of a sample NSB PROX test application.
+
+* Traffic Generator
+
+  * Generator Tasks - Composted of 1 or more tasks (It is possible to
+    have multiple tasks sending packets to same port No. See Tasks Ai and Aii
+    plus Di and Dii)
+
+    * Task Ai - Generates Packets on Port 0 of Traffic Generator
+      and send to Port 0 of SUT Port 0
+    * Task Aii - Generates Packets on Port 0 of Traffic Generator
+      and send to Port 0 of SUT Port 0
+    * Task B - Generates Packets on Port 1 of Traffic Generator
+      and send to Port 1 of SUT Port 1
+    * Task C - Generates Packets on Port 2 of Traffic Generator
+      and send to Port 2 of SUT Port 2
+    * Task Di - Generates Packets on Port 3 of Traffic Generator
+      and send to Port 3 of SUT Port 3
+    * Task Dii - Generates Packets on Port 0 of Traffic Generator
+      and send to Port 0 of SUT Port 0
+
+  * Verifier Tasks - Composed of 1 or more tasks which receives
+    packets from SUT
+
+    * Task E - Receives packets on Port 0 of Traffic Generator sent
+      from Port 0 of SUT Port 0
+    * Task F - Receives packets on Port 1 of Traffic Generator sent
+      from Port 1 of SUT Port 1
+    * Task G - Receives packets on Port 2 of Traffic Generator sent
+      from Port 2 of SUT Port 2
+    * Task H - Receives packets on Port 3 of Traffic Generator sent
+      from Port 3 of SUT Port 3
+
+* SUT
+
+  * Receiver Tasks - Receives packets from generator - Composed on 1 or
+    more tasks which consume the packs sent from Traffic Generator
+
+    * Task A - Receives Packets on Port 0 of System-Under-Test from
+      Traffic Generator Port 0, and forwards packets to Task E
+    * Task B - Receives Packets on Port 1 of System-Under-Test from
+      Traffic Generator Port 1, and forwards packets to Task E
+    * Task C - Receives Packets on Port 2 of System-Under-Test from
+      Traffic Generator Port 2, and forwards packets to Task E
+    * Task D - Receives Packets on Port 3 of System-Under-Test from
+      Traffic Generator Port 3, and forwards packets to Task E
+
+  * Processing Tasks - Composed of multiple tasks in series which carry
+    out some processing on received packets before forwarding to the
+    task.
+
+    * Task E - This receives packets from the Receiver Tasks,
+      carries out some operation on the data and forwards to result
+      packets to the next task in the sequence - Task F
+    * Task F - This receives packets from the previous Task - Task
+      E, carries out some operation on the data and forwards to result
+      packets to the next task in the sequence - Task G
+    * Task G - This receives packets from the previous Task - Task F
+      and distributes the result packages to the Transmitter tasks
+
+  * Transmitter Tasks - Composed on 1 or more tasks which send the
+    processed packets back to the Traffic Generator
+
+    * Task H - Receives Packets from Task G of System-Under-Test and
+      sends packets to Traffic Generator Port 0
+    * Task I - Receives Packets from Task G of System-Under-Test and
+      sends packets to Traffic Generator Port 1
+    * Task J - Receives Packets from Task G of System-Under-Test and
+      sends packets to Traffic Generator Port 2
+    * Task K - Receives Packets From Task G of System-Under-Test and
+      sends packets to Traffic Generator Port 3
+
+NSB Prox Test
+=============
+
+A NSB Prox test is composed of the following components :-
+
+* Test Description File. Usually called
+  ``tc_prox_<context>_<test>-<ports>.yaml`` where
+
+  * <context> is either ``baremetal`` or ``heat_context``
+  * <test> is the a one or 2 word description of the test.
+  * <ports> is the number of ports used
+
+  Example tests ``tc_prox_baremetal_l2fwd-2.yaml`` or
+  ``tc_prox_heat_context_vpe-4.yaml``. This file describes the components
+  of the test, in the case of openstack the network description and
+  server descriptions, in the case of baremetal the hardware
+  description location. It also contains the name of the Traffic Generator, the SUT config file
+  and the traffic profile description, all described below. See nsb-test-description-label_
+
+* Traffic Profile file. Example ``prox_binsearch.yaml``. This describes the packet size, tolerated
+  loss, initial line rate to start traffic at, test interval etc See nsb-traffic-profile-label_
+
+* Traffic Generator Config file. Usually called ``gen_<test>-<ports>.cfg``.
+
+  This describes the activity of the traffic generator
+
+  * What each core of the traffic generator does,
+  * The packet of data sent by a core on a port of the traffic generator
+    to the system under test
+  * What core is used to wait on what port for data from the system
+    under test.
+
+  Example traffic generator config file  ``gen_l2fwd-4.cfg``
+  See nsb-traffic-generator-label_
+
+* SUT Config file. Usually called ``handle_<test>-<ports>.cfg``.
+
+  This describes the activity of the SUTs
+
+  * What each core of the  does,
+  * What cores receives packets from what ports
+  * What cores perform operations on the packets and pass the packets onto
+    another core
+  * What cores receives packets from what cores and transmit the packets on
+    the ports to the Traffic Verifier tasks of the Traffic Generator.
+
+  Example traffic generator config file  ``handle_l2fwd-4.cfg``
+  See nsb-sut-generator-label_
+
+* NSB PROX Baremetal Configuration file. Usually called
+  ``prox-baremetal-<ports>.yaml``
+
+  * <ports> is the number of ports used
+
+  This is required for baremetal only.  This describes hardware, NICs,
+  IP addresses, Network drivers, usernames and passwords.
+  See baremetal-config-label_
+
+* Grafana Dashboard. Usually called
+  ``Prox_<context>_<test>-<port>-<DateAndTime>.json`` where
+
+  * <context> Is either ``BM`` or ``heat``
+  * <test> Is the a one or 2 word description of the test.
+  * <port> is the number of ports used express as ``2Port`` or ``4Port``
+  * <DateAndTime> is the Date and Time expressed as a string.
+
+  Example grafana dashboard ``Prox_BM_L2FWD-4Port-1507804504588.json``
+
+Other files may be required. These are test specific files and will be
+covered later.
+
+.. _nsb-test-description-label:
+
+**Test Description File**
+
+Here we will discuss the test description for both
+baremetal and openstack.
+
+*Test Description File for Baremetal*
+-------------------------------------
+
+This section will introduce the meaning of the Test case description
+file. We will use ``tc_prox_baremetal_l2fwd-2.yaml`` as an example to
+show you how to understand the test description file.
+
+.. image:: images/PROX_Test_BM_Script.png
+   :width: 800px
+   :alt: NSB PROX Test Description File
+
+Now let's examine the components of the file in detail
+
+1. ``traffic_profile`` - This specifies the traffic profile for the
+   test. In this case ``prox_binsearch.yaml`` is used. See nsb-traffic-profile-label_
+
+2. ``topology`` - This is either ``prox-tg-topology-1.yaml`` or
+    ``prox-tg-topology-2.yaml`` or ``prox-tg-topology-4.yaml``
+    depending on number of ports required.
+
+3. ``nodes`` - This names the Traffic Generator and the System
+   under Test. Does not need to change.
+
+4. ``prox_path`` - Location of the Prox executable on the traffic
+   generator (Either baremetal or Openstack Virtual Machine)
+
+5. ``prox_config`` - This is the ``SUT Config File``.
+   In this case it is ``handle_l2fwd-2.cfg``
+
+   A number of additional parameters can be added. This example
+   is taken from VPE::
+
+    options:
+      vnf__0:
+        prox_path: /opt/nsb_bin/prox
+        prox_config: ``configs/handle_vpe-4.cfg``
+        prox_args:
+          ``-t``: ````
+        prox_files:
+          ``configs/vpe_ipv4.lua`` : ````
+          ``configs/vpe_dscp.lua`` : ````
+          ``configs/vpe_cpe_table.lua`` : ````
+          ``configs/vpe_user_table.lua`` : ````
+          ``configs/vpe_rules.lua`` : ````
+        prox_generate_parameter: True
+
+   ``prox_files`` - this specified that a number of addition files
+   need to be provided for the test to run correctly. This files
+   could provide routing information,hashing information or a
+   hashing algorithm and ip/mac information.
+
+   ``prox_generate_parameter`` - this specifies that the NSB application
+   is required to provide information to the nsb Prox in the form
+   of a file called ``parameters.lua``, which contains information
+   retrieved from either the hardware or the openstack configuration.
+
+6. ``prox_args`` - this specifies the command line arguments to start
+   prox. See `prox command line`_.
+
+7. ``prox_config`` - This specifies the Traffic Generator config file.
+
+8. ``runner`` - This is set to ``Duration`` - This specified that the
+   test run for a set duration. Other runner types are available
+   but it is recommend to use ``Duration``
+
+9. ``context`` - This is ``context`` for a 2 port Baremetal configuration.
+   If a 4 port configuration was required then file
+   ``prox-baremetal-4.yaml`` would be used. This is the NSB Prox
+   baremetal configuration file.
+
+.. _nsb-traffic-profile-label:
+
+*Traffic Profile file*
+----------------------
+
+This describes the details of the traffic flow. In this case ``prox_binsearch.yaml`` is used.
+
+.. image:: images/PROX_Traffic_profile.png
+   :width: 800px
+   :alt: NSB PROX Traffic Profile
+
+
+1. ``name`` - The name of the traffic profile. This name should match the name specified in the
+   ``traffic_profile`` field in the Test Description File.
+
+2. ``traffic_type`` - This specifies the type of traffic pattern generated, This name matches
+   class name of the traffic generator See::
+
+      network_services/traffic_profile/prox_binsearch.py class ProxBinSearchProfile(ProxProfile)
+
+   In this case it lowers the traffic rate until the number of packets
+   sent is equal to the number of packets received (plus a
+   tolerated loss). Once it achieves this it increases the traffic
+   rate in order to find the highest rate with no traffic loss.
+
+   Custom traffic types can be created by creating a new traffic profile class.
+
+3. ``tolerated_loss`` - This specifies the percentage of packets that can be lost/dropped before
+   we declare success or failure. Success is Transmitted-Packets from Traffic Generator is greater than or equal to
+   packets received by Traffic Generator plus tolerated loss.
+
+4. ``test_precision`` - This specifies the precision of the test results. For some tests the success criteria
+   may never be achieved because the test precision may be greater than the successful throughput. For finer
+   results increase the precision by making this value smaller.
+
+5. ``packet_sizes`` - This specifies the range of packets size this test is run for.
+
+6. ``duration`` - This specifies the sample duration that the test uses to check for success or failure.
+
+7. ``lower_bound`` - This specifies the test initial lower bound sample rate. On success this value is increased.
+
+8. ``upper_bound`` - This specifies the test initial upper bound sample rate. On success this value is decreased.
+
+Other traffic profiles exist eg prox_ACL.yaml which does not
+compare what is received with what is transmitted. It just
+sends packet at max rate.
+
+It is possible to create custom traffic profiles with by
+creating new file in the same folder as prox_binsearch.yaml.
+See this prox_vpe.yaml as example::
+
+     schema: ``nsb:traffic_profile:0.1``
+
+     name:            prox_vpe
+     description:     Prox vPE traffic profile
+
+     traffic_profile:
+       traffic_type: ProxBinSearchProfile
+       tolerated_loss: 100.0 #0.001
+       test_precision: 0.01
+     # The minimum size of the Ethernet frame for the vPE test is 68 bytes.
+       packet_sizes: [68]
+       duration: 5
+       lower_bound: 0.0
+       upper_bound: 100.0
+
+*Test Description File for Openstack*
+-------------------------------------
+
+We will use ``tc_prox_heat_context_l2fwd-2.yaml`` as a example to show
+you how to understand the test description file.
+
+.. image:: images/PROX_Test_HEAT_Script.png
+   :width: 800px
+   :alt: NSB PROX Test Description File
+
+Now lets examine the components of the file in detail
+
+Sections 1 to 8 are exactly the same in Baremetal and in Heat. Section
+``9`` is replaced with sections A to F. Section 9 was for a baremetal
+configuration file. This has no place in a heat configuration.
+
+A. ``image`` - yardstick-samplevnfs. This is the name of the image
+   created during the installation of NSB. This is fixed.
+
+B. ``flavor`` - The flavor is created dynamically. However we could
+   use an already existing flavor if required. In that case the
+   flavor would be named::
+
+    flavor: yardstick-flavor
+
+C. ``extra_specs`` - This allows us to specify the number of
+   cores sockets and hyperthreading assigned to it. In this case
+   we have 1 socket with 10 codes and no hyperthreading enabled.
+
+D. ``placement_groups`` - default. Do not change for NSB PROX.
+
+E. ``servers`` - ``tg_0`` is the traffic generator and ``vnf_0``
+   is the system under test.
+
+F. ``networks`` - is composed of a management network labeled ``mgmt``
+   and one uplink network labeled ``uplink_0``  and one downlink
+   network labeled ``downlink_0`` for 2 ports. If this was a 4 port
+   configuration there would be 2 extra downlink ports. See this
+   example from a 4 port l2fwd test.::
+
+    networks:
+      mgmt:
+        cidr: '10.0.1.0/24'
+      uplink_0:
+        cidr: '10.0.2.0/24'
+        gateway_ip: 'null'
+        port_security_enabled: False
+        enable_dhcp: 'false'
+      downlink_0:
+        cidr: '10.0.3.0/24'
+        gateway_ip: 'null'
+        port_security_enabled: False
+        enable_dhcp: 'false'
+      downlink_1:
+        cidr: '10.0.4.0/24'
+        gateway_ip: 'null'
+        port_security_enabled: False
+        enable_dhcp: 'false'
+      downlink_2:
+        cidr: '10.0.5.0/24'
+        gateway_ip: 'null'
+        port_security_enabled: False
+        enable_dhcp: 'false'
+
+.. _nsb-traffic-generator-label:
+
+*Traffic Generator Config file*
+-------------------------------
+
+This section will describe the traffic generator config file.
+This is the same for both baremetal and heat. See this example
+of ``gen_l2fwd_multiflow-2.cfg`` to explain the options.
+
+.. image:: images/PROX_Gen_2port_cfg.png
+   :width: 1400px
+   :alt: NSB PROX Gen Config File
+
+The configuration file is divided into multiple sections, each
+of which is used to define some parameters and options.::
+
+  [eal options]
+  [variables]
+  [port 0]
+  [port 1]
+  [port .]
+  [port Z]
+  [defaults]
+  [global]
+  [core 0]
+  [core 1]
+  [core 2]
+  [core .]
+  [core Z]
+
+See `prox options`_ for details
+
+Now let's examine the components of the file in detail
+
+1. ``[eal options]`` - This specified the EAL (Environmental
+   Abstraction Layer) options. These are default values and
+   are not changed. See `dpdk wiki page`_.
+
+2. ``[variables]`` - This section contains variables, as
+   the name suggests. Variables for Core numbers, mac
+   addresses, ip addresses etc. They are assigned as a
+   ``key = value`` where the key is used in place of the value.
+
+   .. caution::
+     A special case for valuables with a value beginning with
+     ``@@``. These values are dynamically updated by the NSB
+     application at run time. Values like MAC address,
+     IP Address etc.
+
+3. ``[port 0]`` - This section describes the DPDK Port. The number
+   following the keyword ``port`` usually refers to the DPDK Port
+   Id. usually starting from ``0``. Because you can have multiple
+   ports this entry usually repeated. Eg. For a 2 port setup
+   ``[port0]`` and ``[port 1]`` and for a 4 port setup ``[port 0]``,
+   ``[port 1]``, ``[port 2]`` and ``[port 3]``::
+
+      [port 0]
+      name=p0
+      mac=hardware
+      rx desc=2048
+      tx desc=2048
+      promiscuous=yes
+
+   a. In this example ``name = p0`` assigned the name ``p0`` to the
+      port. Any name can be assigned to a port.
+   b. ``mac=hardware`` sets the MAC address assigned by the hardware
+      to data from this port.
+   c. ``rx desc=2048`` sets the number of available descriptors to
+      allocate for receive packets. This can be changed and can
+      effect performance.
+   d. ``tx desc=2048`` sets the number of available descriptors to
+      allocate for transmit packets. This can be changed and can
+      effect performance.
+   e. ``promiscuous=yes`` this enables promiscuous mode for this port.
+
+4. ``[defaults]`` - Here default operations and settings can be over
+   written. In this example ``mempool size=4K`` the number of mbufs
+   per task is altered. Altering this value could effect
+   performance. See `prox options`_ for details.
+
+5. ``[global]`` - Here application wide setting are supported. Things
+   like application name, start time, duration and memory
+   configurations can be set here. In this example.::
+
+      [global]
+      start time=5
+      name=Basic Gen
+
+    a. ``start time=5`` Time is seconds after which average
+       stats will be started.
+    b. ``name=Basic Gen`` Name of the configuration.
+
+6. ``[core 0]`` - This core is designated the master core. Every
+   Prox application must have a master core. The master mode must
+   be assigned to exactly one task, running alone on one core.::
+
+    [core 0]
+    mode=master
+
+7. ``[core 1]`` - This describes the activity on core 1. Cores can
+   be configured by means of a set of [core #] sections, where
+   # represents either:
+
+   a. an absolute core number: e.g. on a 10-core, dual socket
+      system with hyper-threading,
+      cores are numbered from 0 to 39.
+
+   b. PROX allows a core to be identified by a core number, the
+      letter 's', and a socket number.
+
+      It is possible to write a baremetal and an openstack test which use
+      the same traffic generator config file and SUT config file.
+      In this case it is advisable not to use physical
+      core numbering.
+
+      However it is also possible to write NSB Prox tests that
+      have been optimized for a particular hardware configuration.
+      In this case it is advisable to use the core numbering.
+      It is up to the user to make sure that cores from
+      the right sockets are used (i.e. from the socket on which the NIC
+      is attached to), to ensure good performance (EPA).
+
+   Each core can be assigned with a set of tasks, each running
+   one of the implemented packet processing modes.::
+
+     [core 1]
+     name=p0
+     task=0
+     mode=gen
+     tx port=p0
+     bps=1250000000
+     ; Ethernet + IP + UDP
+     pkt inline=${sut_mac0} 70 00 00 00 00 01 08 00 45 00 00 1c 00 01 00 00 40 11 f7 7d 98 10 64 01 98 10 64 02 13 88 13 88 00 08 55 7b
+     ; src_ip: 152.16.100.0/8
+     random=0000XXX1
+     rand_offset=29
+     ; dst_ip: 152.16.100.0/8
+     random=0000XXX0
+     rand_offset=33
+     random=0001001110001XXX0001001110001XXX
+     rand_offset=34
+
+   a. ``name=p0`` - Name assigned to the core.
+   b. ``task=0`` - Each core can run a set of tasks. Starting with ``0``.
+      Task 1 can be defined later in this core or
+      can be defined in another ``[core 1]`` section with ``task=1``
+      later in configuration file. Sometimes running
+      multiple task related to the same packet on the same physical
+      core improves performance, however sometimes it
+      is optimal to move task to a separate core. This is best
+      decided by checking performance.
+   c. ``mode=gen`` - Specifies the action carried out by this task on
+      this core. Supported modes are: classify, drop, gen, lat, genl4, nop, l2fwd, gredecap,
+      greencap, lbpos, lbnetwork, lbqinq, lb5tuple, ipv6_decap, ipv6_encap,
+      qinqdecapv4, qinqencapv4, qos, routing, impair,
+      mirror, unmpls, tagmpls, nat, decapnsh, encapnsh, police, acl
+      Which are :-
+
+       * Classify
+       * Drop
+       * Basic Forwarding (no touch)
+       * L2 Forwarding (change MAC)
+       * GRE encap/decap
+       * Load balance based on packet fields
+       * Symmetric load balancing
+       * QinQ encap/decap IPv4/IPv6
+       * ARP
+       * QoS
+       * Routing
+       * Unmpls
+       * Nsh encap/decap
+       * Policing
+       * ACL
+
+      In the traffic generator we expect a core to generate packets (``gen``)
+      and to receive packets & calculate latency (``lat``)
+      This core does ``gen`` . ie it is a traffic generator.
+
+      To understand what each of the modes support please see
+      `prox documentation`_.
+
+   d. ``tx port=p0`` - This specifies that the packets generated are
+      transmitted to port ``p0``
+   e. ``bps=1250000000`` - This indicates Bytes Per Second to
+      generate packets.
+   f. ``; Ethernet + IP + UDP`` - This is a comment. Items starting with
+      ``;`` are ignored.
+   g. ``pkt inline=${sut_mac0} 70 00 00 00 ...`` - Defines the packet
+      format as a sequence of bytes (each
+      expressed in hexadecimal notation). This defines the packet
+      that is generated. This packets begins
+      with the hexadecimal sequence assigned to ``sut_mac`` and the
+      remainder of the bytes in the string.
+      This packet could now be sent or modified by ``random=..``
+      described below before being sent to target.
+   h. ``; src_ip: 152.16.100.0/8`` - Comment
+   i. ``random=0000XXX1`` - This describes a field of the packet
+      containing random data. This string can be
+      8,16,24 or 32 character long and represents 1,2,3 or 4
+      bytes of data. In this case it describes a byte of
+      data. Each character in string can be 0,1 or ``X``. 0 or 1
+      are fixed bit values in the data packet and ``X`` is a
+      random bit. So random=0000XXX1 generates 00000001(1),
+      00000011(3), 00000101(5), 00000111(7),
+      00001001(9), 00001011(11), 00001101(13) and 00001111(15)
+      combinations.
+   j. ``rand_offset=29`` - Defines where to place the previously
+      defined random field.
+   k. ``; dst_ip: 152.16.100.0/8`` - Comment
+   l. ``random=0000XXX0`` - This is another random field which
+      generates a byte of 00000000(0), 00000010(2),
+      00000100(4), 00000110(6), 00001000(8), 00001010(10),
+      00001100(12) and 00001110(14) combinations.
+   m. ``rand_offset=33`` - Defines where to place the previously
+      defined random field.
+   n. ``random=0001001110001XXX0001001110001XXX`` - This is
+      another random field which generates 4 bytes.
+   o. ``rand_offset=34`` - Defines where to place the previously
+      defined 4 byte random field.
+
+   Core 2 executes same scenario as Core 1. The only difference
+   in this case is that the packets are generated
+   for Port 1.
+
+8. ``[core 3]`` - This defines the activities on core 3. The purpose
+   of ``core 3`` and ``core 4`` is to receive packets
+   sent by the SUT.::
+
+     [core 3]
+     name=rec 0
+     task=0
+     mode=lat
+     rx port=p0
+     lat pos=42
+
+   a. ``name=rec 0`` - Name assigned to the core.
+   b. ``task=0`` - Each core can run a set of tasks. Starting with
+      ``0``. Task 1 can be defined later in this core or
+      can be defined in another ``[core 1]`` section with
+      ``task=1`` later in configuration file. Sometimes running
+      multiple task related to the same packet on the same
+      physical core improves performance, however sometimes it
+      is optimal to move task to a separate core. This is
+      best decided by checking performance.
+   c. ``mode=lat`` - Specifies the action carried out by this task on this core. Supported modes are: acl,
+      classify, drop, gredecap, greencap, ipv6_decap, ipv6_encap, l2fwd, lbnetwork, lbpos, lbqinq, nop,
+      police, qinqdecapv4, qinqencapv4, qos, routing, impair, lb5tuple, mirror, unmpls, tagmpls,
+      nat, decapnsh, encapnsh, gen, genl4 and lat. This task(0) per core(3) receives packets on port.
+   d. ``rx port=p0`` - The port to receive packets on ``Port 0``. Core 4 will receive packets on ``Port 1``.
+   e. ``lat pos=42`` - Describes where to put a 4-byte timestamp in the packet. Note that the packet length should
+      be longer than ``lat pos`` + 4 bytes to avoid truncation of the timestamp. It defines where the timestamp is
+      to be read from. Note that the SUT workload might cause the position of the timestamp to change
+      (i.e. due to encapsulation).
+
+.. _nsb-sut-generator-label:
+
+*SUT Config file*
+-------------------------------
+
+This section will describes the SUT(VNF) config file. This is the same for both
+baremetal and heat. See this example of ``handle_l2fwd_multiflow-2.cfg`` to explain the options.
+
+.. image:: images/PROX_Handle_2port_cfg.png
+   :width: 1400px
+   :alt: NSB PROX Handle Config File
+
+See `prox options`_ for details
+
+Now let's examine the components of the file in detail
+
+1. ``[eal options]`` - same as the Generator config file. This specified the EAL (Environmental Abstraction Layer)
+   options. These are default values and are not changed.
+   See `dpdk wiki page`_.
+
+2. ``[port 0]`` - This section describes the DPDK Port. The number following the keyword ``port`` usually refers to the DPDK Port Id. usually starting from ``0``.
+   Because you can have multiple ports this entry usually repeated. Eg. For a 2 port setup ``[port0]`` and ``[port 1]`` and for a 4 port setup ``[port 0]``, ``[port 1]``,
+   ``[port 2]`` and ``[port 3]``::
+
+      [port 0]
+      name=if0
+      mac=hardware
+      rx desc=2048
+      tx desc=2048
+      promiscuous=yes
+
+   a. In this example ``name =if0`` assigned the name ``if0`` to the port. Any name can be assigned to a port.
+   b. ``mac=hardware`` sets the MAC address assigned by the hardware to data from this port.
+   c. ``rx desc=2048`` sets the number of available descriptors to allocate for receive packets. This can be changed and can effect performance.
+   d. ``tx desc=2048`` sets the number of available descriptors to allocate for transmit packets. This can be changed and can effect performance.
+   e. ``promiscuous=yes`` this enables promiscuous mode for this port.
+
+3. ``[defaults]`` - Here default operations and settings can be over written.::
+
+     [defaults]
+     mempool size=8K
+     memcache size=512
+
+   a. In this example ``mempool size=8K`` the number of mbufs per task is altered. Altering this value could effect performance. See `prox options`_ for details.
+   b. ``memcache size=512`` - number of mbufs cached per core, default is 256 this is the cache_size. Altering this value could effect performance.
+
+4. ``[global]`` - Here application wide setting are supported. Things like application name, start time, duration and memory configurations can be set here.
+   In this example.::
+
+      [global]
+      start time=5
+      name=Basic Gen
+
+    a. ``start time=5`` Time is seconds after which average stats will be started.
+    b. ``name=Handle L2FWD Multiflow (2x)`` Name of the configuration.
+
+5. ``[core 0]`` - This core is designated the master core. Every Prox application must have a master core. The master mode must be assigned to
+   exactly one task, running alone on one core.::
+
+    [core 0]
+    mode=master
+
+6. ``[core 1]`` - This describes the activity on core 1. Cores can be configured by means of a set of [core #] sections,   where # represents either:
+
+   a. an absolute core number: e.g. on a 10-core, dual socket system with hyper-threading,
+      cores are numbered from 0 to 39.
+
+   b. PROX allows a core to be identified by a core number, the letter 's', and a socket number.
+      However NSB PROX is hardware agnostic (physical and virtual configurations are the same) it
+      is advisable no to use physical core numbering.
+
+   Each core can be assigned with a set of tasks, each running one of the implemented packet processing modes.::
+
+     [core 1]
+     name=none
+     task=0
+     mode=l2fwd
+     dst mac=@@tester_mac1
+     rx port=if0
+     tx port=if1
+
+   a. ``name=none`` - No name assigned to the core.
+   b. ``task=0`` - Each core can run a set of tasks. Starting with ``0``. Task 1 can be defined later in this core or
+      can be defined in another ``[core 1]`` section with ``task=1`` later in configuration file. Sometimes running
+      multiple task related to the same packet on the same physical core improves performance, however sometimes it
+      is optimal to move task to a separate core. This is best decided by checking performance.
+   c. ``mode=l2fwd`` - Specifies the action carried out by this task on this core. Supported modes are: acl,
+      classify, drop, gredecap, greencap, ipv6_decap, ipv6_encap, l2fwd, lbnetwork, lbpos, lbqinq, nop,
+      police, qinqdecapv4, qinqencapv4, qos, routing, impair, lb5tuple, mirror, unmpls, tagmpls,
+      nat, decapnsh, encapnsh, gen, genl4 and lat. This code does ``l2fwd`` .. ie it does the L2FWD.
+
+   d. ``dst mac=@@tester_mac1`` - The destination mac address of the packet will be set to the MAC address of ``Port 1`` of destination device. (The Traffic Generator/Verifier)
+   e. ``rx port=if0`` - This specifies that the packets are received from ``Port 0`` called if0
+   f. ``tx port=if1`` - This specifies that the packets are transmitted to ``Port 1``  called if1
+
+   If this example we receive a packet on core on a port, carry out operation on the packet on the core and transmit it on on another port still using the same task on the same core.
+
+   On some implementation you may wish to use multiple tasks, like this.::
+
+     [core 1]
+     name=rx_task
+     task=0
+     mode=l2fwd
+     dst mac=@@tester_p0
+     rx port=if0
+     tx cores=1t1
+     drop=no
+
+     name=l2fwd_if0
+     task=1
+     mode=nop
+     rx ring=yes
+     tx port=if0
+     drop=no
+
+   In this example you can see Core 1/Task 0 called ``rx_task`` receives the packet from if0 and perform the l2fwd. However instead of sending the packet to a
+   port it sends it to a core see ``tx cores=1t1``. In this case it sends it to Core 1/Task 1.
+
+   Core 1/Task 1 called ``l2fwd_if0``, receives the packet, not from a port but from the ring. See ``rx ring=yes``. It does not perform any operation on the packet See ``mode=none``
+   and sends the packets to ``if0`` see ``tx port=if0``.
+
+   It is also possible to implement more complex operations be chaining multiple operations in sequence and using rings to pass packets from one core to another.
+
+   In thus example we show a Broadband Network Gateway (BNG) with Quality of Service (QoS).  Communication from task to task is via rings.
+
+   .. image:: images/PROX_BNG_QOS.png
+      :width: 1000px
+      :alt: NSB PROX Config File for BNG_QOS
+
+*Baremetal Configuration file*
+------------------------------
+
+.. _baremetal-config-label:
+
+This is required for baremetal testing. It describes the IP address of the various ports, the Network devices drivers and MAC addresses and the network
+configuration.
+
+In this example we will describe a 2 port configuration. This file is the same for all 2 port NSB Prox tests on the same platforms/configuration.
+
+  .. image:: images/PROX_Baremetal_config.png
+     :width: 1000px
+     :alt: NSB PROX Yardstick Config
+
+Now lets describe the sections of the file.
+
+  1. ``TrafficGen`` - This section describes the Traffic Generator node of the test configuration. The name of the node ``trafficgen_1`` must match the node name
+     in the ``Test Description File for Baremetal`` mentioned earlier. The password attribute of the test needs to be configured. All other parameters
+     can remain as default settings.
+  2. ``interfaces`` - This defines the DPDK interfaces on the Traffic Generator.
+  3. ``xe0`` is DPDK Port 0. ``lspci`` and `` ./dpdk-devbind.py -s`` can be used to provide the interface information. ``netmask`` and ``local_ip`` should not be changed
+  4. ``xe1`` is DPDK Port 1. If more than 2 ports are required then ``xe1`` section needs to be repeated and modified accordingly.
+  5. ``vnf`` - This section describes the SUT of the test configuration. The name of the node ``vnf`` must match the node name in the
+     ``Test Description File for Baremetal`` mentioned earlier. The password attribute of the test needs to be configured. All other parameters
+     can remain as default settings
+  6. ``interfaces`` - This defines the DPDK interfaces on the SUT
+  7. ``xe0`` - Same as 3 but for the ``SUT``.
+  8. ``xe1`` - Same as 4 but for the ``SUT`` also.
+  9. ``routing_table`` - All parameters should remain unchanged.
+  10. ``nd_route_tbl`` - All parameters should remain unchanged.
+
+*Grafana Dashboard*
+-------------------
+
+The grafana dashboard visually displays the results of the tests. The steps required to produce a grafana dashboard are described here.
+
+.. _yardstick-config-label:
+
+  a. Configure ``yardstick`` to use influxDB to store test results. See file ``/etc/yardstick/yardstick.conf``.
+
+     .. image:: images/PROX_Yardstick_config.png
+        :width: 1000px
+        :alt: NSB PROX Yardstick Config
+
+     1. Specify the dispatcher to use influxDB to store results.
+     2. "target = .. " - Specify location of influxDB to store results.
+        "db_name = yardstick" - name of database. Do not change
+        "username = root" - username to use to store result. (Many tests are run as root)
+        "password = ... " - Please set to root user password
+
+  b. Deploy InfludDB & Grafana. See how to Deploy InfluxDB & Grafana. See `grafana deployment`_.
+  c. Generate the test data. Run the tests as follows .::
+
+       yardstick --debug task start tc_prox_<context>_<test>-ports.yaml
+
+     eg.::
+
+       yardstick --debug task start tc_prox_heat_context_l2fwd-4.yaml
+
+  d. Now build the dashboard for the test you just ran. The easiest way to do this is to copy an existing dashboard and rename the
+     test and the field names. The procedure to do so is described here. See `opnfv grafana dashboard`_.
+
+How to run NSB Prox Test on an baremetal environment
+====================================================
+
+In order to run the NSB PROX test.
+
+  1. Install NSB on Traffic Generator node and Prox in SUT. See `NSB Installation`_
+
+  2. To enter container::
+
+       docker exec -it yardstick /bin/bash
+
+  3. Install baremetal configuration file (POD files)
+
+     a. Go to location of PROX tests in container ::
+
+          cd /home/opnfv/repos/yardstick/samples/vnf_samples/nsut/prox
+
+     b. Install prox-baremetal-2.yam and prox-baremetal-4.yaml for that topology
+        into this directory as per baremetal-config-label_
+
+     c. Install and configure ``yardstick.conf`` ::
+
+            cd /etc/yardstick/
+
+        Modify /etc/yardstick/yardstick.conf as per yardstick-config-label_
+
+  4. Execute the test. Eg.::
+
+        yardstick --debug task start ./tc_prox_baremetal_l2fwd-4.yaml
+
+How to run NSB Prox Test on an Openstack environment
+====================================================
+
+In order to run the NSB PROX test.
+
+  1. Install NSB on Openstack deployment node. See  `NSB Installation`_
+
+  2. To enter container::
+
+       docker exec -it yardstick /bin/bash
+
+  3. Install configuration file
+
+     a. Goto location of PROX tests in container ::
+
+          cd /home/opnfv/repos/yardstick/samples/vnf_samples/nsut/prox
+
+     b. Install and configure ``yardstick.conf`` ::
+
+            cd /etc/yardstick/
+
+        Modify /etc/yardstick/yardstick.conf as per yardstick-config-label_
+
+
+  4. Execute the test. Eg.::
+
+        yardstick --debug task start ./tc_prox_heat_context_l2fwd-4.yaml
+
+Frequently Asked Questions
+==========================
+
+Here is a list of frequently asked questions.
+
+*NSB Prox does not work on Baremetal, How do I resolve this?*
+-------------------------------------------------------------
+
+If PROX NSB does not work on baremetal, problem is either in network configuration or test file.
+
+*Solution*
+
+1. Verify network configuration. Execute existing baremetal test.::
+
+       yardstick --debug task start ./tc_prox_baremetal_l2fwd-4.yaml
+
+   If test does not work then error in network configuration.
+
+      a. Check DPDK on Traffic Generator and SUT via:- ::
+
+           /root/dpdk-17./usertools/dpdk-devbind.py
+
+      b. Verify MAC addresses match ``prox-baremetal-<ports>.yaml`` via ``ifconfig`` and ``dpdk-devbind``
+
+      c. Check your eth port is what you expect. You would not be the first person to think that
+         the port your cable is plugged into is ethX when in fact it is ethY. Use
+         ethtool to visually confirm that the eth is where you expect.::
+
+            ethtool -p ethX
+
+         A led should start blinking on port. (On both System-Under-Test and Traffic Generator)
+
+      d. Check cable.
+
+         Install Linux kernel network driver and ensure your ports are
+         ``bound`` to the driver via ``dpdk-devbind``. Bring up port on both
+         SUT and Traffic Generator and check connection.
+
+         i) On SUT and on Traffic Generator::
+
+              ifconfig ethX/enoX up
+
+         ii) Check link
+
+               ethtool ethX/enoX
+
+             See ``Link detected`` if ``yes`` .... Cable is good. If ``no`` you have an issue with your cable/port.
+
+2. If existing baremetal works then issue is with your test. Check the traffic generator gen_<test>-<ports>.cfg to ensure
+   it is producing a valid packet.
+
+*How do I debug NSB Prox on Baremetal?*
+---------------------------------------
+
+*Solution*
+
+1. Execute the test as follows::
+
+     yardstick --debug task start ./tc_prox_baremetal_l2fwd-4.yaml
+
+2. Login to Traffic Generator as ``root``.::
+
+     cd
+     /opt/nsb_bin/prox -f /tmp/gen_<test>-<ports>.cfg
+
+3. Login to SUT as ``root``.::
+
+     cd
+     /opt/nsb_bin/prox -f /tmp/handle_<test>-<ports>.cfg
+
+4. Now let's examine the Generator Output. In this case the output of gen_l2fwd-4.cfg.
+
+     .. image:: images/PROX_Gen_GUI.png
+        :width: 1000px
+        :alt: NSB PROX Traffic Generator GUI
+
+   Now let's examine the output
+
+     1. Indicates the amount of data successfully transmitted on Port 0
+     2. Indicates the amount of data successfully received on port 1
+     3. Indicates the amount of data successfully handled for port 1
+
+   It appears what is transmitted is received.
+
+   .. Caution::
+      The number of packets MAY not exactly match because the ports are read in sequence.
+
+   .. Caution::
+      What is transmitted on PORT X may not always be received on same port. Please check the Test scenario.
+
+5. Now lets examine the SUT Output
+
+     .. image:: images/PROX_SUT_GUI.png
+        :width: 1400px
+        :alt: NSB PROX SUT GUI
+
+   Now lets examine the output
+
+     1. What is received on 0 is transmitted on 1, received on 1 transmitted on 0,
+        received on 2 transmitted on 3 and received on 3 transmitted on 2.
+     2. No packets are Failed.
+     3. No Packets are discarded.
+
+  We can also dump the packets being received or transmitted via the following commands. ::
+
+       dump                   Arguments: <core id> <task id> <nb packets>
+                              Create a hex dump of <nb_packets> from <task_id> on <core_id> showing how
+                              packets have changed between RX and TX.
+       dump_rx                Arguments: <core id> <task id> <nb packets>
+                              Create a hex dump of <nb_packets> from <task_id> on <core_id> at RX
+       dump_tx                Arguments: <core id> <task id> <nb packets>
+                              Create a hex dump of <nb_packets> from <task_id> on <core_id> at TX
+
+  eg.::
+
+       dump_tx 1 0 1
+
+*NSB Prox works on Baremetal but not in Openstack. How do I resolve this?*
+--------------------------------------------------------------------------
+
+NSB Prox on Baremetal is a lot more forgiving than NSB Prox on Openstack. A badly
+formed packed may still work with PROX on Baremetal. However on
+Openstack the packet must be correct and all fields of the header correct.
+Eg A packet with an invalid Protocol ID would still work in Baremetal
+but this packet would be rejected by openstack.
+
+*Solution*
+
+ 1. Check the validity of the packet.
+ 2. Use a known good packet in your test
+ 3. If using ``Random`` fields in the traffic generator, disable them and retry.
+
+
+*How do I debug NSB Prox on Openstack?*
+---------------------------------------
+
+*Solution*
+
+1. Execute the test as follows::
+
+     yardstick --debug task start --keep-deploy ./tc_prox_heat_context_l2fwd-4.yaml
+
+2. Access docker image if required via::
+
+      docker exec -it yardstick /bin/bash
+
+3. Install openstack credentials.
+
+   Depending on your openstack deployment, the location of these credentials may vary.
+   On this platform I do this via::
+
+     scp root@10.237.222.55:/etc/kolla/admin-openrc.sh .
+     source ./admin-openrc.sh
+
+4. List Stack details
+
+   a. Get the name of the Stack.
+
+         .. image:: images/PROX_Openstack_stack_list.png
+            :width: 1000px
+            :alt: NSB PROX openstack stack list
+
+   b. Get the Floating IP of the Traffic Generator & SUT
+
+      This generates a lot of information. Please not the floating IP of the VNF and
+      the Traffic Generator.
+
+         .. image:: images/PROX_Openstack_stack_show_a.png
+            :width: 1000px
+            :alt: NSB PROX openstack stack show (Top)
+
+      From here you can see the floating IP Address of the SUT / VNF
+
+         .. image:: images/PROX_Openstack_stack_show_b.png
+            :width: 1000px
+            :alt: NSB PROX openstack stack show (Top)
+
+      From here you can see the floating IP Address of the Traffic Generator
+
+   c. Get ssh identity file
+
+      In the docker container locate the identity file.::
+
+        cd /home/opnfv/repos/yardstick/yardstick/resources/files
+        ls -lt
+
+5. Login to SUT as ``Ubuntu``.::
+
+     ssh -i ./yardstick_key-01029d1d ubuntu@172.16.2.158
+
+   Change to root::
+
+     sudo su
+
+    Now continue as baremetal.
+
+6. Login to SUT as ``Ubuntu``.::
+
+     ssh -i ./yardstick_key-01029d1d ubuntu@172.16.2.156
+
+   Change to root::
+
+     sudo su
+
+    Now continue as baremetal.
+
+*How do I resolve "Quota exceeded for resources"*
+-------------------------------------------------
+
+*Solution*
+
+This usually occurs due to 2 reasons when executing an openstack test.
+
+1. One or more stacks already exists and are consuming all resources. To resolve ::
+
+     openstack stack list
+
+   Response::
+
+     +--------------------------------------+--------------------+-----------------+----------------------+--------------+
+     | ID                                   | Stack Name         | Stack Status    | Creation Time        | Updated Time |
+     +--------------------------------------+--------------------+-----------------+----------------------+--------------+
+     | acb559d7-f575-4266-a2d4-67290b556f15 | yardstick-e05ba5a4 | CREATE_COMPLETE | 2017-12-06T15:00:05Z | None         |
+     | 7edf21ce-8824-4c86-8edb-f7e23801a01b | yardstick-08bda9e3 | CREATE_COMPLETE | 2017-12-06T14:56:43Z | None         |
+     +--------------------------------------+--------------------+-----------------+----------------------+--------------+
+
+   In this case 2 stacks already exist.
+
+   To remove stack::
+
+     openstack stack delete yardstick-08bda9e3
+     Are you sure you want to delete this stack(s) [y/N]? y
+
+2. The openstack configuration quotas are too small.
+
+   The solution is to increase the quota. Use below to query existing quotas::
+
+     openstack quota show
+
+   And to set quota::
+
+     openstack quota set <resource>
+
+*Openstack Cli fails or hangs. How do I resolve this?*
+------------------------------------------------------
+
+*Solution*
+
+If it fails due to ::
+
+   Missing value auth-url required for auth plugin password
+
+Check your shell environment for Openstack variables. One of them should contain the authentication URL ::
+
+
+   OS_AUTH_URL=``https://192.168.72.41:5000/v3``
+
+Or similar. Ensure that openstack configurations are exported. ::
+
+   cat  /etc/kolla/admin-openrc.sh
+
+Result ::
+
+   export OS_PROJECT_DOMAIN_NAME=default
+   export OS_USER_DOMAIN_NAME=default
+   export OS_PROJECT_NAME=admin
+   export OS_TENANT_NAME=admin
+   export OS_USERNAME=admin
+   export OS_PASSWORD=BwwSEZqmUJA676klr9wa052PFjNkz99tOccS9sTc
+   export OS_AUTH_URL=http://193.168.72.41:35357/v3
+   export OS_INTERFACE=internal
+   export OS_IDENTITY_API_VERSION=3
+   export EXTERNAL_NETWORK=yardstick-public
+
+and visible.
+
+If the Openstack Cli appears to hang, then verify the proxys and no_proxy are set correctly.
+They should be similar to ::
+
+   FTP_PROXY="http://proxy.ir.intel.com:911/"
+   HTTPS_PROXY="http://proxy.ir.intel.com:911/"
+   HTTP_PROXY="http://proxy.ir.intel.com:911/"
+   NO_PROXY="localhost,127.0.0.1,10.237.222.55,10.237.223.80,10.237.222.134,.ir.intel.com"
+   ftp_proxy="http://proxy.ir.intel.com:911/"
+   http_proxy="http://proxy.ir.intel.com:911/"
+   https_proxy="http://proxy.ir.intel.com:911/"
+   no_proxy="localhost,127.0.0.1,10.237.222.55,10.237.223.80,10.237.222.134,.ir.intel.com"
+
+Where
+
+    1) 10.237.222.55 = IP Address of deployment node
+    2) 10.237.223.80 = IP Address of Controller node
+    3) 10.237.222.134 = IP Address of Compute Node
+    4) ir.intel.com = local no proxy
+
+
+
+
+
+
diff --git a/docs/testing/developer/devguide/images/PROX_BNG_QOS.png b/docs/testing/developer/devguide/images/PROX_BNG_QOS.png
new file mode 100644 (file)
index 0000000..3c72094
Binary files /dev/null and b/docs/testing/developer/devguide/images/PROX_BNG_QOS.png differ
diff --git a/docs/testing/developer/devguide/images/PROX_Baremetal_config.png b/docs/testing/developer/devguide/images/PROX_Baremetal_config.png
new file mode 100644 (file)
index 0000000..5cd9140
Binary files /dev/null and b/docs/testing/developer/devguide/images/PROX_Baremetal_config.png differ
diff --git a/docs/testing/developer/devguide/images/PROX_Gen_2port_cfg.png b/docs/testing/developer/devguide/images/PROX_Gen_2port_cfg.png
new file mode 100644 (file)
index 0000000..07731ca
Binary files /dev/null and b/docs/testing/developer/devguide/images/PROX_Gen_2port_cfg.png differ
diff --git a/docs/testing/developer/devguide/images/PROX_Gen_GUI.png b/docs/testing/developer/devguide/images/PROX_Gen_GUI.png
new file mode 100644 (file)
index 0000000..e96aea3
Binary files /dev/null and b/docs/testing/developer/devguide/images/PROX_Gen_GUI.png differ
diff --git a/docs/testing/developer/devguide/images/PROX_Handle_2port_cfg.png b/docs/testing/developer/devguide/images/PROX_Handle_2port_cfg.png
new file mode 100644 (file)
index 0000000..6505bed
Binary files /dev/null and b/docs/testing/developer/devguide/images/PROX_Handle_2port_cfg.png differ
diff --git a/docs/testing/developer/devguide/images/PROX_Hardware_Arch.png b/docs/testing/developer/devguide/images/PROX_Hardware_Arch.png
new file mode 100644 (file)
index 0000000..6e69dd6
Binary files /dev/null and b/docs/testing/developer/devguide/images/PROX_Hardware_Arch.png differ
diff --git a/docs/testing/developer/devguide/images/PROX_Openstack_stack_list.png b/docs/testing/developer/devguide/images/PROX_Openstack_stack_list.png
new file mode 100644 (file)
index 0000000..f67d10e
Binary files /dev/null and b/docs/testing/developer/devguide/images/PROX_Openstack_stack_list.png differ
diff --git a/docs/testing/developer/devguide/images/PROX_Openstack_stack_show_a.png b/docs/testing/developer/devguide/images/PROX_Openstack_stack_show_a.png
new file mode 100644 (file)
index 0000000..00e7620
Binary files /dev/null and b/docs/testing/developer/devguide/images/PROX_Openstack_stack_show_a.png differ
diff --git a/docs/testing/developer/devguide/images/PROX_Openstack_stack_show_b.png b/docs/testing/developer/devguide/images/PROX_Openstack_stack_show_b.png
new file mode 100644 (file)
index 0000000..bbe9b86
Binary files /dev/null and b/docs/testing/developer/devguide/images/PROX_Openstack_stack_show_b.png differ
diff --git a/docs/testing/developer/devguide/images/PROX_SUT_GUI.png b/docs/testing/developer/devguide/images/PROX_SUT_GUI.png
new file mode 100644 (file)
index 0000000..204083d
Binary files /dev/null and b/docs/testing/developer/devguide/images/PROX_SUT_GUI.png differ
diff --git a/docs/testing/developer/devguide/images/PROX_Software_Arch.png b/docs/testing/developer/devguide/images/PROX_Software_Arch.png
new file mode 100644 (file)
index 0000000..c31f1e2
Binary files /dev/null and b/docs/testing/developer/devguide/images/PROX_Software_Arch.png differ
diff --git a/docs/testing/developer/devguide/images/PROX_Test_BM_Script.png b/docs/testing/developer/devguide/images/PROX_Test_BM_Script.png
new file mode 100644 (file)
index 0000000..32530eb
Binary files /dev/null and b/docs/testing/developer/devguide/images/PROX_Test_BM_Script.png differ
diff --git a/docs/testing/developer/devguide/images/PROX_Test_HEAT_Script.png b/docs/testing/developer/devguide/images/PROX_Test_HEAT_Script.png
new file mode 100644 (file)
index 0000000..754973b
Binary files /dev/null and b/docs/testing/developer/devguide/images/PROX_Test_HEAT_Script.png differ
diff --git a/docs/testing/developer/devguide/images/PROX_Traffic_profile.png b/docs/testing/developer/devguide/images/PROX_Traffic_profile.png
new file mode 100644 (file)
index 0000000..660bca3
Binary files /dev/null and b/docs/testing/developer/devguide/images/PROX_Traffic_profile.png differ
diff --git a/docs/testing/developer/devguide/images/PROX_Yardstick_config.png b/docs/testing/developer/devguide/images/PROX_Yardstick_config.png
new file mode 100644 (file)
index 0000000..8d346b0
Binary files /dev/null and b/docs/testing/developer/devguide/images/PROX_Yardstick_config.png differ
index caebecc..cac8146 100644 (file)
@@ -172,13 +172,13 @@ Environment variables in the ``openrc`` file have to include at least::
    OS_AUTH_URL
    OS_USERNAME
    OS_PASSWORD
-   OS_TENANT_NAME
+   OS_PROJECT_NAME
    EXTERNAL_NETWORK
 
 A sample ``openrc`` file may look like this::
 
   export OS_PASSWORD=console
-  export OS_TENANT_NAME=admin
+  export OS_PROJECT_NAME=admin
   export OS_AUTH_URL=http://172.16.1.222:35357/v2.0
   export OS_USERNAME=admin
   export OS_VOLUME_API_VERSION=2
@@ -464,7 +464,7 @@ Thirdly, create and configure Grafana container::
 
    yardstick env grafana
 
-Then you can run a test case and visit http://host_ip:3000
+Then you can run a test case and visit http://host_ip:1948
 (``admin``/``admin``) to see the results.
 
 .. note:: Executing ``yardstick env`` command to deploy InfluxDB and Grafana
@@ -502,9 +502,9 @@ Configure influxDB::
 
 Run Grafana::
 
-   sudo -EH docker run -d --name grafana -p 3000:3000 grafana/grafana
+   sudo -EH docker run -d --name grafana -p 1948:3000 grafana/grafana
 
-Log on http://{YOUR_IP_HERE}:3000 using ``admin``/``admin`` and configure
+Log on http://{YOUR_IP_HERE}:1948 using ``admin``/``admin`` and configure
 database resource to be ``{YOUR_IP_HERE}:8086``.
 
 .. image:: images/Grafana_config.png
index ec0b49f..679ce79 100644 (file)
@@ -49,7 +49,7 @@ environment and other dependencies:
 3. Make sure Jump Host have access to the OpenStack Controller API.
 4. Make sure Jump Host must have internet connectivity for downloading docker image.
 5. You need to know where to get basic openstack Keystone authorization info, such as
-   OS_PASSWORD, OS_TENANT_NAME, OS_AUTH_URL, OS_USERNAME.
+   OS_PASSWORD, OS_PROJECT_NAME, OS_AUTH_URL, OS_USERNAME.
 6. To run a Storperf container, you need to have OpenStack Controller environment
    variables defined and passed to Storperf container. The best way to do this is to
    put environment variables in a "storperf_admin-rc" file. The storperf_admin-rc
@@ -58,8 +58,6 @@ environment and other dependencies:
 * OS_AUTH_URL
 * OS_USERNAME
 * OS_PASSWORD
-* OS_TENANT_ID
-* OS_TENANT_NAME
 * OS_PROJECT_NAME
 * OS_PROJECT_ID
 * OS_USER_DOMAIN_ID
@@ -76,8 +74,9 @@ test/ci/prepare_storperf_admin-rc.sh
   USERNAME=${OS_USERNAME:-admin}
   PASSWORD=${OS_PASSWORD:-console}
 
+  # OS_TENANT_NAME is still present to keep backward compatibility with legacy
+  # deployments, but should be replaced by OS_PROJECT_NAME.
   TENANT_NAME=${OS_TENANT_NAME:-admin}
-  TENANT_ID=`openstack project show admin|grep '\bid\b' |awk -F '|' '{print $3}'|sed -e 's/^[[:space:]]*//'`
   PROJECT_NAME=${OS_PROJECT_NAME:-$TENANT_NAME}
   PROJECT_ID=`openstack project show admin|grep '\bid\b' |awk -F '|' '{print $3}'|sed -e 's/^[[:space:]]*//'`
   USER_DOMAIN_ID=${OS_USER_DOMAIN_ID:-default}
@@ -90,8 +89,6 @@ test/ci/prepare_storperf_admin-rc.sh
   echo "OS_PASSWORD="$PASSWORD >> ~/storperf_admin-rc
   echo "OS_PROJECT_NAME="$PROJECT_NAME >> ~/storperf_admin-rc
   echo "OS_PROJECT_ID="$PROJECT_ID >> ~/storperf_admin-rc
-  echo "OS_TENANT_NAME="$TENANT_NAME >> ~/storperf_admin-rc
-  echo "OS_TENANT_ID="$TENANT_ID >> ~/storperf_admin-rc
   echo "OS_USER_DOMAIN_ID="$USER_DOMAIN_ID >> ~/storperf_admin-rc
 
 
index ff6e622..92fa408 100644 (file)
@@ -252,7 +252,6 @@ Example::
                 "OS_PASSWORD": "console",
                 "OS_PROJECT_DOMAIN_NAME": "default",
                 "OS_PROJECT_NAME": "admin",
-                "OS_TENANT_NAME": "admin",
                 "OS_USERNAME": "admin",
                 "OS_USER_DOMAIN_NAME": "default"
             },
index e6e06df..09b866c 100644 (file)
@@ -10,7 +10,7 @@ Yardstick Test Case Description TC056
 
 +-----------------------------------------------------------------------------+
 |OpenStack Controller Messaging Queue Service High Availability               |
-+==============+==============================================================+
++--------------+--------------------------------------------------------------+
 |test case id  | OPNFV_YARDSTICK_TC056:OpenStack Controller Messaging Queue   |
 |              | Service High Availability                                    |
 +--------------+--------------------------------------------------------------+
index 2a4ce40..bb42b2a 100644 (file)
@@ -10,8 +10,11 @@ Yardstick Test Case Description TC057
 
 +-----------------------------------------------------------------------------+
 |OpenStack Controller Cluster Management Service High Availability            |
-+==============+==============================================================+
-|test case id  |                                                              |
+|                                                                             |
++--------------+--------------------------------------------------------------+
+|test case id  | OPNFV_YARDSTICK_TC057_HA: OpenStack Controller Cluster       |
+|              | Management Service High Availability                         |
+|              |                                                              |
 +--------------+--------------------------------------------------------------+
 |test purpose  | This test case will verify the quorum configuration of the   |
 |              | cluster manager(pacemaker) on controller nodes. When a       |
@@ -53,10 +56,11 @@ Yardstick Test Case Description TC057
 |              | "openstack-cmd" for this monitor.                            |
 |              | 2) command_name: which is the command name used for request  |
 |              |                                                              |
-|              | In this case, the command_name of monitor1 should be services|
-|              | that are managed by the cluster manager. (Since rabbitmq and |
-|              | haproxy are managed by pacemaker, most Openstack Services    |
-|              | can be used to check high availability in this case)         |
+|              | In this case, the command_name of monitor1 should be         |
+|              | services that are managed by the cluster manager.            |
+|              | (Since rabbitmq and haproxy are managed by pacemaker,        |
+|              | most Openstack Services can be used to check high            |
+|              | availability in this case)                                   |
 |              |                                                              |
 |              | (e.g.)                                                       |
 |              | monitor1:                                                    |
@@ -155,8 +159,8 @@ Yardstick Test Case Description TC057
 |              | Result: The test case is passed or not.                      |
 |              |                                                              |
 +--------------+------+----------------------------------+--------------------+
-|post-action   | It is the action when the test cases exist. It will check the|
-|              | status of the cluster messaging process(corosync) on the     |
+|post-action   | It is the action when the test cases exist. It will check    |
+|              | the status of the cluster messaging process(corosync) on the |
 |              | host, and restart the process if it is not running for next  |
 |              | test cases                                                   |
 +--------------+------+----------------------------------+--------------------+
index fb9a4c2..7c323e9 100644 (file)
@@ -10,8 +10,9 @@ Yardstick Test Case Description TC058
 
 +-----------------------------------------------------------------------------+
 |OpenStack Controller Virtual Router Service High Availability                |
-+==============+==============================================================+
-|test case id  | OPNFV_YARDSTICK_TC058:OpenStack Controller Virtual Router    |
+|                                                                             |
++--------------+--------------------------------------------------------------+
+|test case id  | OPNFV_YARDSTICK_TC058: OpenStack Controller Virtual Router   |
 |              | Service High Availability                                    |
 +--------------+--------------------------------------------------------------+
 |test purpose  | This test case will verify the high availability of virtual  |
index ceec83f..dc5c326 100644 (file)
@@ -107,7 +107,7 @@ angular.module('yardStickGui2App')
             $scope.envInfo = [
                 { name: 'OS_USERNAME', value: '' },
                 { name: 'OS_PASSWORD', value: '' },
-                { name: 'OS_TENANT_NAME', value: '' },
+                { name: 'OS_PROJECT_NAME', value: '' },
                 { name: 'EXTERNAL_NETWORK', value: '' }
             ];
 
@@ -298,7 +298,7 @@ angular.module('yardStickGui2App')
                 $scope.envInfo = [
                     { name: 'OS_USERNAME', value: '' },
                     { name: 'OS_PASSWORD', value: '' },
-                    { name: 'OS_TENANT_NAME', value: '' },
+                    { name: 'OS_PROJECT_NAME', value: '' },
                     { name: 'EXTERNAL_NETWORK', value: '' }
                 ];
                 $scope.displayOpenrcFile = null;
index 4a8e4db..50fc017 100755 (executable)
@@ -63,7 +63,7 @@ for i in "${pkg[@]}"; do
     fi
 done
 
-pip install ansible==2.3.2 shade==1.17.0 docker-py==1.10.6
+pip install ansible==2.4.2 shade==1.22.2 docker-py==1.10.6
 
 ANSIBLE_SCRIPTS="ansible"
 
index aacafdf..d45e4b1 100644 (file)
@@ -37,11 +37,13 @@ os-client-config==1.28.0    # OSI Approved  Apache Software License
 osc-lib==1.7.0          # OSI Approved  Apache Software License
 oslo.config==4.11.1     # OSI Approved  Apache Software License
 oslo.i18n==3.17.0       # OSI Approved  Apache Software License
+oslo.privsep===1.22.1   # OSI Approved  Apache Software License
 oslo.serialization==2.20.1  # OSI Approved  Apache Software License
 oslo.utils==3.28.0      # OSI Approved  Apache Software License
 paramiko==2.2.1         # LGPL; OSI Approved  GNU Library or Lesser General Public License (LGPL)
 pbr==3.1.1              # OSI Approved  Apache Software License; Apache License, Version 2.0
 pika==0.10.0            # BSD; OSI Approved  BSD License
+pip==9.0.1;python_version=='2.7'        # MIT
 positional==1.1.2       # OSI Approved  Apache Software License
 pycrypto==2.6.1         # Public Domain
 pyparsing==2.2.0        # MIT License; OSI Approved  MIT License
diff --git a/samples/storage_bottlenecks.yaml b/samples/storage_bottlenecks.yaml
new file mode 100644 (file)
index 0000000..1aa0d7e
--- /dev/null
@@ -0,0 +1,77 @@
+##############################################################################
+# Copyright (c) 2018 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+# Sample benchmark task config file
+# measure storage performance using fio
+#
+# For this sample just like running the command below on the test vm and
+# getting benchmark info back to the yardstick.
+#
+# sudo fio -filename=/home/ubuntu/data.raw -bs=4k -ipdepth=1 -rw=rw \
+#          -ramp_time=10 -runtime=60 -name=yardstick-fio -ioengine=libaio \
+#          -direct=1 -group_reporting -numjobs=1 -time_based \
+#          --output-format=json
+
+schema: "yardstick:task:0.1"
+run_in_parallel: true
+
+{% set directory = directory or '/FIO_Test' %}
+{% set stack_num = stack_num or 1 %}
+{% set volume_num = volume_num or "1" %}
+{% set rw = rw or "randrw" %}
+{% set bs = bs or "4k" %}
+{% set size = size or "30g" %}
+{% set rwmixwrite = rwmixwrite or "50" %}
+{% set numjobs = numjobs or "1" %}
+{% set direct = direct or "1" %}
+{% set volume_size = volume_size or 50 %}
+
+scenarios:
+{% for num in range(stack_num) %}
+-
+  type: Fio
+  options:
+    filename: {{ directory }}/test
+    directory: {{ directory }}
+    bs: {{bs}}
+    rw: {{rw}}
+    size: {{size}}
+    rwmixwrite: {{rwmixwrite}}
+    numjobs: {{numjobs}}
+    direct: {{direct}}
+    ramp_time: 10
+
+  host: demo.storage_bottlenecks-{{num}}-{{volume_num}}
+
+  runner:
+    type: Duration
+    duration: 60
+    interval: 1
+{% endfor %}
+
+contexts:
+{% for context_num in range(stack_num) %}
+-
+  name: storage_bottlenecks-{{context_num}}-{{volume_num}}
+  image: yardstick-image
+  flavor: yardstick-flavor
+  user: ubuntu
+
+  servers:
+    demo:
+      volume:
+        size: {{volume_size}}
+      volume_mountpoint: "/dev/vdb"
+      floating_ip: true
+
+  networks:
+    test:
+      cidr: "10.0.1.0/24"
+      port_security_enabled: true
+{% endfor %}
\ No newline at end of file
index e7fad98..192f2f8 100644 (file)
@@ -49,7 +49,8 @@ mode=gen
 tx port=p0
 bps=1250000000
 ; Ethernet + IP + UDP
-pkt inline=${sut_mac0} 70 00 00 00 00 01 08 00 45 00 00 1c 00 01 00 00 40 11 f7 7d 00 00 00 01 00 00 00 02 13 88 13 88 00 08 55 7b
+pkt inline=${sut_mac0} 3c fd fe 9f a3 08 08 00 45 45 00 00 1c 00 01 00 00 40 11 f7 7d c0 a8 01 01 c0 a8 01 01 13 88 13 88 00 08 55 7b
+lat pos=38
 
 [core 2]
 name=gen 1
@@ -58,19 +59,20 @@ mode=gen
 tx port=p1
 bps=1250000000
 ; Ethernet + IP + UDP
-pkt inline=${sut_mac1} 70 00 00 00 00 02 08 00 45 00 00 1c 00 01 00 00 40 11 f7 7d 00 00 00 01 00 00 00 03 13 88 13 88 00 08 55 7b
+pkt inline=${sut_mac1} 3c fd fe 9f a3 08 08 00 45 45 00 00 1c 00 01 00 00 40 11 f7 7d c0 a8 01 01 c0 a8 01 01 13 88 13 88 00 08 55 7b
+lat pos=38
 
 [core 3]
 name=rec 0
 task=0
 mode=lat
 rx port=p0
-lat pos=42
+lat pos=38
 
 [core 4]
 name=rec 0
 task=0
 mode=lat
 rx port=p1
-lat pos=42
+lat pos=38
 
index 5b79185..0db21b6 100644 (file)
@@ -61,7 +61,8 @@ mode=gen
 tx port=p0
 bps=1250000000
 ; Ethernet + IP + UDP
-pkt inline=${sut_mac0} 3c fd fe 9f a3 a0 08 a0 00 45 00 00 1c 00 01 00 00 40 11 f7 7d c0 a8 01 01 c0 a8 01 01 13 88 13 88 00 08 55 7b
+pkt inline=${sut_mac0} 3c fd fe 9f a3 08 08 00 45 45 00 00 1c 00 01 00 00 40 11 f7 7d c0 a8 01 01 c0 a8 01 01 13 88 13 88 00 08 55 7b
+lat pos=38
 
 [core 2]
 name=gen 1
@@ -70,7 +71,8 @@ mode=gen
 tx port=p1
 bps=1250000000
 ; Ethernet + IP + UDP
-pkt inline=${sut_mac1}  3c fd fe 9f a5 50 08 a0 00 45 00 00 1c 00 01 00 00 40 11 f7 7d c0 a8 01 01 c0 a8 01 01 13 88 13 88 00 08 55 7b
+pkt inline=${sut_mac1} 3c fd fe 9f a3 08 08 00 45 45 00 00 1c 00 01 00 00 40 11 f7 7d c0 a8 01 01 c0 a8 01 01 13 88 13 88 00 08 55 7b
+lat pos=38
 
 [core 3]
 name=gen 2
@@ -79,7 +81,8 @@ mode=gen
 tx port=p2
 bps=1250000000
 ; Ethernet + IP + UDP
-pkt inline=${sut_mac2}  3c fd fe 9f a5 50 08 a0 00 45 00 00 1c 00 01 00 00 40 11 f7 7d c0 a8 01 01 c0 a8 01 01 13 88 13 88 00 08 55 7b
+pkt inline=${sut_mac2} 3c fd fe 9f a5 08 08 00 45 45 00 00 1c 00 01 00 00 40 11 f7 7d c0 a8 01 01 c0 a8 01 01 13 88 13 88 00 08 55 7b
+lat pos=38
 
 [core 4]
 name=gen 3
@@ -88,28 +91,33 @@ mode=gen
 tx port=p3
 bps=1250000000
 ; Ethernet + IP + UDP
-pkt inline=${sut_mac3}  3c fd fe 9f a5 50 08 a0 00 45 00 00 1c 00 01 00 00 40 11 f7 7d c0 a8 01 01 c0 a8 01 01 13 88 13 88 00 08 55 7b
+pkt inline=${sut_mac3} 3c fd fe 9f a5 08 08 00 45 45 00 00 1c 00 01 00 00 40 11 f7 7d c0 a8 01 01 c0 a8 01 01 13 88 13 88 00 08 55 7b
+lat pos=38
 
 [core 5]
 name=rec 0
 task=0
 mode=lat
 rx port=p0
+lat pos=38
 
 [core 6]
 name=rec 1
 task=0
 mode=lat
 rx port=p1
+lat pos=38
 
 [core 7]
 name=rec 2
 task=0
 mode=lat
 rx port=p2
+lat pos=38
 
 [core 8]
 name=rec 3
 task=0
 mode=lat
 rx port=p3
+lat pos=38
\ No newline at end of file
index 37af37d..c190910 100644 (file)
@@ -50,5 +50,5 @@ context:
   type: Node
   name: yardstick
   nfvi_type: baremetal
-  file: /etc/yardstick/nodes/prox-baremetal-4.yml
+  file: prox-baremetal-4.yaml
 
index 2799a7e..2e096a1 100644 (file)
 # limitations under the License.
 
 ---
+{% set provider = provider or none %}
+{% set physical_networks = physical_networks or ['physnet1', 'physnet2'] %}
+{% set segmentation_id = segmentation_id or none %}
+
 schema: yardstick:task:0.1
 scenarios:
 - type: NSPerf
@@ -68,10 +72,24 @@ context:
     xe0:
       cidr: '10.0.2.0/24'
       gateway_ip: 'null'
+      {% if provider %}
+      provider: {{ provider }}
+      physical_network: {{ physical_networks[0] }}
+        {% if segmentation_id %}
+      segmentation_id: {{ segmentation_id }}
+        {% endif %}
+      {% endif %}
       port_security_enabled: False
       enable_dhcp: 'false'
     xe1:
       cidr: '10.0.3.0/24'
       gateway_ip: 'null'
+      {% if provider %}
+      provider: {{ provider }}
+      physical_network: {{ physical_networks[1] }}
+        {% if segmentation_id %}
+      segmentation_id: {{ segmentation_id }}
+        {% endif %}
+      {% endif %}
       port_security_enabled: False
       enable_dhcp: 'false'
diff --git a/samples/vnf_samples/nsut/vfw/tc_heat_rfc2544_ipv4_1rule_1flow_64B_trex_scale-up.yaml b/samples/vnf_samples/nsut/vfw/tc_heat_rfc2544_ipv4_1rule_1flow_64B_trex_scale-up.yaml
new file mode 100644 (file)
index 0000000..eaeee71
--- /dev/null
@@ -0,0 +1,89 @@
+# Copyright (c) 2016-2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+{% set mem = mem or 20480 %}
+{% set vcpus = vcpus or 10 %}
+{% set vports = vports or 2 %}
+---
+schema: yardstick:task:0.1
+scenarios:
+- type: NSPerf
+  traffic_profile: ../../traffic_profiles/ipv4_throughput-scale-up.yaml
+  extra_args:
+    vports: {{ vports }}
+  topology: vfw-tg-topology-scale-up.yaml
+  nodes:
+    tg__0: tg_0.yardstick
+    vnf__0: vnf_0.yardstick
+  options:
+    framesize:
+      uplink: {64B: 100}
+      downlink: {64B: 100}
+    flow:
+      src_ip: [
+{% for vport in range(0,vports,2|int) %}
+       {'tg__0': 'xe{{vport}}'},
+{% endfor %}  ]
+      dst_ip: [
+{% for vport in range(1,vports,2|int) %}
+      {'tg__0': 'xe{{vport}}'},
+{% endfor %}  ]
+      count: 1
+    traffic_type: 4
+    rfc2544:
+      allowed_drop_rate: 0.0001 - 0.0001
+    vnf__0:
+      rules: acl_1rule.yaml
+      vnf_config: {lb_config: 'SW', file: vfw_vnf_pipeline_cores_{{vcpus}}_ports_{{vports}}_lb_1_sw.conf }
+  runner:
+    type: Iteration
+    iterations: 10
+    interval: 35
+context:
+  # put node context first, so we don't HEAT deploy if node has errors
+  name: yardstick
+  image: yardstick-samplevnfs
+  flavor:
+    vcpus: {{ vcpus }}
+    ram: {{ mem }}
+    disk: 6
+    extra_specs:
+      hw:cpu_sockets: 1
+      hw:cpu_cores: {{ vcpus }}
+      hw:cpu_threads: 1
+  user: ubuntu
+  placement_groups:
+    pgrp1:
+      policy: "availability"
+  servers:
+    tg_0:
+      floating_ip: true
+      placement: "pgrp1"
+    vnf_0:
+      floating_ip: true
+      placement: "pgrp1"
+  networks:
+    mgmt:
+      cidr: '10.0.1.0/24'
+{% for vport in range(1,vports,2|int) %}
+    uplink_{{loop.index0}}:
+      cidr: '10.1.{{vport}}.0/24'
+      gateway_ip: 'null'
+      port_security_enabled: False
+      enable_dhcp: 'false'
+    downlink_{{loop.index0}}:
+      cidr: '10.1.{{vport+1}}.0/24'
+      gateway_ip: 'null'
+      port_security_enabled: False
+      enable_dhcp: 'false'
+{% endfor %}
diff --git a/samples/vnf_samples/nsut/vfw/vfw-tg-topology-scale-up.yaml b/samples/vnf_samples/nsut/vfw/vfw-tg-topology-scale-up.yaml
new file mode 100644 (file)
index 0000000..d4bf8d6
--- /dev/null
@@ -0,0 +1,52 @@
+# Copyright (c) 2016-2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+---
+{% set vports = get(extra_args, 'vports', '2') %}
+nsd:nsd-catalog:
+    nsd:
+    -   id: 3tg-topology
+        name: 3tg-topology
+        short-name: 3tg-topology
+        description: 3tg-topology
+        constituent-vnfd:
+        -   member-vnf-index: '1'
+            vnfd-id-ref: tg__0
+            VNF model: ../../vnf_descriptors/tg_rfc2544_tpl.yaml      #VNF type
+        -   member-vnf-index: '2'
+            vnfd-id-ref: vnf__0
+            VNF model: ../../vnf_descriptors/vfw_vnf.yaml      #VNF type
+
+        vld:
+{% for vport in range(0,vports,2|int) %}
+        -   id: uplink_{{loop.index0}}
+            name: tg__0 to vnf__0 link {{vport + 1}}
+            type: ELAN
+            vnfd-connection-point-ref:
+            -   member-vnf-index-ref: '1'
+                vnfd-connection-point-ref: xe{{vport}}
+                vnfd-id-ref: tg__0
+            -   member-vnf-index-ref: '2'
+                vnfd-connection-point-ref: xe{{vport}}
+                vnfd-id-ref: vnf__0
+        -   id: downlink_{{loop.index0}}
+            name: vnf__0 to tg__0 link {{vport + 2}}
+            type: ELAN
+            vnfd-connection-point-ref:
+            -   member-vnf-index-ref: '2'
+                vnfd-connection-point-ref: xe{{vport+1}}
+                vnfd-id-ref: vnf__0
+            -   member-vnf-index-ref: '1'
+                vnfd-connection-point-ref: xe{{vport+1}}
+                vnfd-id-ref: tg__0
+{% endfor %}
diff --git a/samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_4_ports_2_lb_1_sw.conf b/samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_4_ports_2_lb_1_sw.conf
new file mode 100644 (file)
index 0000000..b31d054
--- /dev/null
@@ -0,0 +1,52 @@
+[PIPELINE0]
+type = MASTER
+core = 0
+
+[PIPELINE1]
+type = ARPICMP
+core = 0
+pktq_in = SWQ0
+pktq_out = SWQ1
+pktq_in_prv = RXQ1.0
+prv_to_pub_map = (1,0)
+
+[PIPELINE2]
+type = TXRX
+core = 1
+pipeline_txrx_type = RXRX
+dest_if_offset = 176
+pktq_in = RXQ1.0 RXQ0.0
+pktq_out = SWQ2 SWQ3 SWQ0
+
+[PIPELINE3]
+type = LOADB
+core = 2
+pktq_in = SWQ2 SWQ3
+pktq_out = SWQ4 SWQ5
+outport_offset = 136
+n_vnf_threads = 1
+n_lb_tuples = 5
+loadb_debug = 0
+lib_arp_debug = 0
+prv_que_handler = (0,)
+
+[PIPELINE4]
+type = VFW
+core = 3
+pktq_in = SWQ4 SWQ5
+pktq_out = SWQ6 SWQ7
+n_rules = 10
+prv_que_handler = (0)
+n_flows = 2000000
+traffic_type = 4
+pkt_type = ipv4
+tcp_be_liberal = 0
+
+[PIPELINE5]
+type = TXRX
+core = 1
+pipeline_txrx_type = TXTX
+dest_if_offset = 176
+pktq_in = SWQ6 SWQ7 SWQ1
+pktq_out = TXQ1.0 TXQ0.0
+
diff --git a/samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_4_ports_4_lb_1_sw.conf b/samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_4_ports_4_lb_1_sw.conf
new file mode 100644 (file)
index 0000000..3bf8dc6
--- /dev/null
@@ -0,0 +1,52 @@
+
+[PIPELINE0]
+type = MASTER
+core = 0
+
+[PIPELINE1]
+type = ARPICMP
+core = 0
+pktq_in = SWQ0
+pktq_out = SWQ1
+pktq_in_prv = RXQ2.0 RXQ3.0
+prv_to_pub_map = (2,0)(3,1)
+
+[PIPELINE2]
+type = TXRX
+core = 1
+pipeline_txrx_type = RXRX
+dest_if_offset = 176
+pktq_in = RXQ2.0 RXQ0.0 RXQ3.0 RXQ1.0
+pktq_out = SWQ2 SWQ3 SWQ4 SWQ5 SWQ0
+
+[PIPELINE3]
+type = LOADB
+core = 2
+pktq_in = SWQ2 SWQ3 SWQ4 SWQ5
+pktq_out = SWQ6 SWQ7 SWQ8 SWQ9
+outport_offset = 136
+n_vnf_threads = 1
+n_lb_tuples = 5
+loadb_debug = 0
+lib_arp_debug = 0
+prv_que_handler = (0,2,)
+
+[PIPELINE4]
+type = VFW
+core = 3
+pktq_in = SWQ6 SWQ7 SWQ8 SWQ9
+pktq_out = SWQ10 SWQ11 SWQ12 SWQ13
+n_rules = 10
+prv_que_handler = (0)
+n_flows = 2000000
+traffic_type = 4
+pkt_type = ipv4
+tcp_be_liberal = 0
+
+[PIPELINE5]
+type = TXRX
+core = 1
+pipeline_txrx_type = TXTX
+dest_if_offset = 176
+pktq_in = SWQ10 SWQ11 SWQ12 SWQ13 SWQ1
+pktq_out = TXQ2.0 TXQ0.0 TXQ3.0 TXQ1.0
diff --git a/samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_6_ports_6_lb_1_sw.conf b/samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_6_ports_6_lb_1_sw.conf
new file mode 100644 (file)
index 0000000..1d55d88
--- /dev/null
@@ -0,0 +1,51 @@
+[PIPELINE0]
+type = MASTER
+core = 0
+
+[PIPELINE1]
+type = ARPICMP
+core = 0
+pktq_in = SWQ0
+pktq_out = SWQ1
+pktq_in_prv = RXQ5.0 RXQ3.0 RXQ4.0
+prv_to_pub_map = (5,2)(3,0)(4,1)
+
+[PIPELINE2]
+type = TXRX
+core = 1
+pipeline_txrx_type = RXRX
+dest_if_offset = 176
+pktq_in = RXQ5.0 RXQ2.0 RXQ3.0 RXQ0.0 RXQ4.0 RXQ1.0
+pktq_out = SWQ2 SWQ3 SWQ4 SWQ5 SWQ6 SWQ7 SWQ0
+
+[PIPELINE3]
+type = LOADB
+core = 2
+pktq_in = SWQ2 SWQ3 SWQ4 SWQ5 SWQ6 SWQ7
+pktq_out = SWQ8 SWQ9 SWQ10 SWQ11 SWQ12 SWQ13
+outport_offset = 136
+n_vnf_threads = 1
+n_lb_tuples = 5
+loadb_debug = 0
+lib_arp_debug = 0
+prv_que_handler = (0,2,4,)
+
+[PIPELINE4]
+type = VFW
+core = 3
+pktq_in = SWQ8 SWQ9 SWQ10 SWQ11 SWQ12 SWQ13
+pktq_out = SWQ14 SWQ15 SWQ16 SWQ17 SWQ18 SWQ19
+n_rules = 10
+prv_que_handler = (0)
+n_flows = 2000000
+traffic_type = 4
+pkt_type = ipv4
+tcp_be_liberal = 0
+
+[PIPELINE5]
+type = TXRX
+core = 1
+pipeline_txrx_type = TXTX
+dest_if_offset = 176
+pktq_in = SWQ14 SWQ15 SWQ16 SWQ17 SWQ18 SWQ19 SWQ1
+pktq_out = TXQ5.0 TXQ2.0 TXQ3.0 TXQ0.0 TXQ4.0 TXQ1.0
diff --git a/samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_6_ports_8_lb_1_sw.conf b/samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_6_ports_8_lb_1_sw.conf
new file mode 100644 (file)
index 0000000..8434fee
--- /dev/null
@@ -0,0 +1,52 @@
+[PIPELINE0]
+type = MASTER
+core = 0
+
+[PIPELINE1]
+type = ARPICMP
+core = 0
+pktq_in = SWQ0
+pktq_out = SWQ1
+pktq_in_prv = RXQ6.0 RXQ7.0 RXQ4.0 RXQ5.0
+prv_to_pub_map = (6,2)(7,3)(4,0)(5,1)
+
+[PIPELINE2]
+type = TXRX
+core = 1
+pipeline_txrx_type = RXRX
+dest_if_offset = 176
+pktq_in = RXQ6.0 RXQ2.0 RXQ7.0 RXQ3.0 RXQ4.0 RXQ0.0 RXQ5.0 RXQ1.0
+pktq_out = SWQ2 SWQ3 SWQ4 SWQ5 SWQ6 SWQ7 SWQ8 SWQ9 SWQ0
+
+[PIPELINE3]
+type = LOADB
+core = 2
+pktq_in = SWQ2 SWQ3 SWQ4 SWQ5 SWQ6 SWQ7 SWQ8 SWQ9
+pktq_out = SWQ10 SWQ11 SWQ12 SWQ13 SWQ14 SWQ15 SWQ16 SWQ17
+outport_offset = 136
+n_vnf_threads = 1
+n_lb_tuples = 5
+loadb_debug = 0
+lib_arp_debug = 0
+prv_que_handler = (0,2,4,6,)
+
+[PIPELINE4]
+type = VFW
+core = 3
+pktq_in = SWQ10 SWQ11 SWQ12 SWQ13 SWQ14 SWQ15 SWQ16 SWQ17
+pktq_out = SWQ18 SWQ19 SWQ20 SWQ21 SWQ22 SWQ23 SWQ24 SWQ25
+n_rules = 10
+prv_que_handler = (0)
+n_flows = 2000000
+traffic_type = 4
+pkt_type = ipv4
+tcp_be_liberal = 0
+
+[PIPELINE5]
+type = TXRX
+core = 1
+pipeline_txrx_type = TXTX
+dest_if_offset = 176
+pktq_in = SWQ18 SWQ19 SWQ20 SWQ21 SWQ22 SWQ23 SWQ24 SWQ25 SWQ1
+pktq_out = TXQ6.0 TXQ2.0 TXQ7.0 TXQ3.0 TXQ4.0 TXQ0.0 TXQ5.0 TXQ1.0
+
diff --git a/samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_8_ports_10_lb_1_sw.conf b/samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_8_ports_10_lb_1_sw.conf
new file mode 100644 (file)
index 0000000..51d97e0
--- /dev/null
@@ -0,0 +1,52 @@
+[PIPELINE0]
+type = MASTER
+core = 0
+
+[PIPELINE1]
+type = ARPICMP
+core = 0
+pktq_in = SWQ0
+pktq_out = SWQ1
+pktq_in_prv = RXQ7.0 RXQ8.0 RXQ5.0 RXQ6.0 RXQ9.0
+prv_to_pub_map = (7,2)(8,3)(5,0)(6,1)(9,4)
+
+[PIPELINE2]
+type = TXRX
+core = 1
+pipeline_txrx_type = RXRX
+dest_if_offset = 176
+pktq_in = RXQ7.0 RXQ2.0 RXQ8.0 RXQ3.0 RXQ5.0 RXQ0.0 RXQ6.0 RXQ1.0 RXQ9.0 RXQ4.0
+pktq_out = SWQ2 SWQ3 SWQ4 SWQ5 SWQ6 SWQ7 SWQ8 SWQ9 SWQ10 SWQ11 SWQ0
+
+[PIPELINE3]
+type = LOADB
+core = 2
+pktq_in = SWQ2 SWQ3 SWQ4 SWQ5 SWQ6 SWQ7 SWQ8 SWQ9 SWQ10 SWQ11
+pktq_out = SWQ12 SWQ13 SWQ14 SWQ15 SWQ16 SWQ17 SWQ18 SWQ19 SWQ20 SWQ21
+outport_offset = 136
+n_vnf_threads = 1
+n_lb_tuples = 5
+loadb_debug = 0
+lib_arp_debug = 0
+prv_que_handler = (0,2,4,6,8,)
+
+[PIPELINE4]
+type = VFW
+core = 3
+pktq_in = SWQ12 SWQ13 SWQ14 SWQ15 SWQ16 SWQ17 SWQ18 SWQ19 SWQ20 SWQ21
+pktq_out = SWQ22 SWQ23 SWQ24 SWQ25 SWQ26 SWQ27 SWQ28 SWQ29 SWQ30 SWQ31
+n_rules = 10
+prv_que_handler = (0)
+n_flows = 2000000
+traffic_type = 4
+pkt_type = ipv4
+tcp_be_liberal = 0
+
+[PIPELINE5]
+type = TXRX
+core = 1
+pipeline_txrx_type = TXTX
+dest_if_offset = 176
+pktq_in = SWQ22 SWQ23 SWQ24 SWQ25 SWQ26 SWQ27 SWQ28 SWQ29 SWQ30 SWQ31 SWQ1
+pktq_out = TXQ7.0 TXQ2.0 TXQ8.0 TXQ3.0 TXQ5.0 TXQ0.0 TXQ6.0 TXQ1.0 TXQ9.0 TXQ4.0
+
diff --git a/samples/vnf_samples/traffic_profiles/ipv4_throughput-scale-up.yaml b/samples/vnf_samples/traffic_profiles/ipv4_throughput-scale-up.yaml
new file mode 100644 (file)
index 0000000..d2cc18c
--- /dev/null
@@ -0,0 +1,102 @@
+# Copyright (c) 2016-2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# flow definition for ACL tests - 1K flows - ipv4 only
+#
+# the number of flows defines the widest range of parameters
+# for example if srcip_range=1.0.0.1-1.0.0.255 and dst_ip_range=10.0.0.1-10.0.1.255
+# and it should define only 16 flows
+#
+# there is assumption that packets generated will have a random sequences of following addresses pairs
+# in the packets
+# 1. src=1.x.x.x(x.x.x =random from 1..255) dst=10.x.x.x (random from 1..512)
+# 2. src=1.x.x.x(x.x.x =random from 1..255) dst=10.x.x.x (random from 1..512)
+# ...
+# 512. src=1.x.x.x(x.x.x =random from 1..255) dst=10.x.x.x (random from 1..512)
+#
+# not all combination should be filled
+# Any other field with random range will be added to flow definition
+#
+# the example.yaml provides all possibilities for traffic generation
+#
+# the profile defines a public and private side to make limited traffic correlation
+# between private and public side same way as it is made by IXIA solution.
+#
+{% set vports = get(extra_args, 'vports', '2') %}
+---
+schema: "nsb:traffic_profile:0.1"
+
+# This file is a template, it will be filled with values from tc.yaml before passing to the traffic generator
+
+name: rfc2544
+description: Traffic profile to run RFC2544 latency
+traffic_profile:
+  traffic_type: RFC2544Profile # defines traffic behavior - constant or look for highest possible throughput
+  frame_rate: 100  # pc of linerate
+  # that specifies a range (e.g. ipv4 address, port)
+{% set count = 0 %}
+{% for vport in range(vports|int) %}
+uplink_{{vport}}:
+  ipv4:
+    id: {{count + 1 }}
+    outer_l2:
+      framesize:
+        64B: "{{ get(imix, 'imix.uplink.64B', '0') }}"
+        128B: "{{ get(imix, 'imix.uplink.128B', '0') }}"
+        256B: "{{ get(imix, 'imix.uplink.256B', '0') }}"
+        373b: "{{ get(imix, 'imix.uplink.373B', '0') }}"
+        512B: "{{ get(imix, 'imix.uplink.512B', '0') }}"
+        570B: "{{ get(imix, 'imix.uplink.570B', '0') }}"
+        1400B: "{{ get(imix, 'imix.uplink.1400B', '0') }}"
+        1500B: "{{ get(imix, 'imix.uplink.1500B', '0') }}"
+        1518B: "{{ get(imix, 'imix.uplink.1518B', '0') }}"
+    outer_l3v4:
+      proto: "udp"
+      srcip4: "{{ get(flow, 'flow.src_ip_{{vport}}', '1.1.1.1-1.1.255.255') }}"
+      dstip4: "{{ get(flow, 'flow.dst_ip_{{vport}}', '90.90.1.1-90.90.255.255') }}"
+      count: "{{ get(flow, 'flow.count', '1') }}"
+      ttl: 32
+      dscp: 0
+    outer_l4:
+      srcport: "{{ get(flow, 'flow.src_port_{{vport}}', '1234-4321') }}"
+      dstport: "{{ get(flow, 'flow.dst_port_{{vport}}', '2001-4001') }}"
+      count: "{{ get(flow, 'flow.count', '1') }}"
+downlink_{{vport}}:
+  ipv4:
+    id: {{count + 2}}
+    outer_l2:
+      framesize:
+        64B: "{{ get(imix, 'imix.downlink.64B', '0') }}"
+        128B: "{{ get(imix, 'imix.downlink.128B', '0') }}"
+        256B: "{{ get(imix, 'imix.downlink.256B', '0') }}"
+        373b: "{{ get(imix, 'imix.downlink.373B', '0') }}"
+        512B: "{{ get(imix, 'imix.downlink.512B', '0') }}"
+        570B: "{{ get(imix, 'imix.downlink.570B', '0') }}"
+        1400B: "{{ get(imix, 'imix.downlink.1400B', '0') }}"
+        1500B: "{{ get(imix, 'imix.downlink.1500B', '0') }}"
+        1518B: "{{ get(imix, 'imix.downlink.1518B', '0') }}"
+
+    outer_l3v4:
+      proto: "udp"
+      srcip4: "{{ get(flow, 'flow.dst_ip_{{vport}}', '90.90.1.1-90.90.255.255') }}"
+      dstip4: "{{ get(flow, 'flow.src_ip_{{vport}}', '1.1.1.1-1.1.255.255') }}"
+      count: "{{ get(flow, 'flow.count', '1') }}"
+      ttl: 32
+      dscp: 0
+    outer_l4:
+      srcport: "{{ get(flow, 'flow.dst_port_{{vport}}', '1234-4321') }}"
+      dstport: "{{ get(flow, 'flow.src_port_{{vport}}', '2001-4001') }}"
+      count: "{{ get(flow, 'flow.count', '1') }}"
+{% set count = count + 2 %}
+{% endfor %}
\ No newline at end of file
index 805250e..e1a4f59 100644 (file)
@@ -21,9 +21,9 @@ traffic_profile:
   traffic_type: ProxBinSearchProfile
   tolerated_loss: 0.001
   test_precision: 0.1
-#  packet_sizes: [64, 128, 256, 512, 1024, 1280, 1518]
-  packet_sizes: [64]
-  duration: 10
+  packet_sizes: [64, 128, 256, 512, 1024, 1280, 1518]
+  packet_sizes: [64]
+  duration: 30
   lower_bound: 0.0
   upper_bound: 100.0
 
index ee9815c..4828e98 100644 (file)
@@ -4,6 +4,7 @@
 
 coverage==4.4.2             # Apache 2.0; OSI Approved  Apache Software License; http://www.apache.org/licenses/LICENSE-2.0; http://www.apache.org/licenses/LICENSE-2.0
 fixtures==3.0.0             # OSI Approved  BSD License; OSI Approved  Apache Software License
+oslotest===2.17.1           # OSI Approved  Apache Software License
 packaging==16.8.0           # BSD or Apache License, Version 2.0
 pyflakes==1.0.0             # MIT; OSI Approved  MIT License
 pylint==1.8.1               # GPLv2
index caef8ac..37b72b3 100755 (executable)
@@ -28,34 +28,12 @@ OPENRC=/etc/yardstick/openstack.creds
 INSTALLERS=(apex compass fuel joid)
 
 RC_VAR_EXIST=false
-if [ "${OS_AUTH_URL}" -a "${OS_USERNAME}" -a "${OS_PASSWORD}" -a "${EXTERNAL_NETWORK}" ];then
+if [[ "${OS_AUTH_URL}" && "${OS_USERNAME}" && "${OS_PASSWORD}" && "${EXTERNAL_NETWORK}" ]];then
     RC_VAR_EXIST=true
 fi
 
-if [ "${RC_VAR_EXIST}" = false ]; then
-    if [ ! -f $OPENRC ];then
-        # credentials file is not given, check if environment variables are set
-        # to get the creds using fetch_os_creds.sh later on
-        echo "INFO: Checking environment variables INSTALLER_TYPE and INSTALLER_IP"
-        if [ -z ${INSTALLER_TYPE} ]; then
-            echo "environment variable 'INSTALLER_TYPE' is not defined."
-            exit 1
-        elif [[ ${INSTALLERS[@]} =~ ${INSTALLER_TYPE} ]]; then
-            echo "INSTALLER_TYPE env variable found: ${INSTALLER_TYPE}"
-        else
-            echo "Invalid env variable INSTALLER_TYPE=${INSTALLER_TYPE}"
-            exit 1
-        fi
-
-        if [ "$DEPLOY_TYPE" == "virt" ]; then
-            FETCH_CRED_ARG="-v -d $OPENRC -i ${INSTALLER_TYPE} -a ${INSTALLER_IP}"
-        else
-            FETCH_CRED_ARG="-d $OPENRC -i ${INSTALLER_TYPE} -a ${INSTALLER_IP}"
-        fi
-
-        $RELENG_REPO_DIR/utils/fetch_os_creds.sh $FETCH_CRED_ARG
-    fi
-    source $OPENRC
+if [[ "${RC_VAR_EXIST}" = false && -f ${OPENRC} ]]; then
+    . ${OPENRC}
 fi
 
 export EXTERNAL_NETWORK INSTALLER_TYPE DEPLOY_TYPE NODE_NAME
index 558375e..ef7c229 100755 (executable)
@@ -15,8 +15,9 @@ AUTH_URL=${OS_AUTH_URL}
 USERNAME=${OS_USERNAME:-admin}
 PASSWORD=${OS_PASSWORD:-console}
 
+# OS_TENANT_NAME is still present to keep backward compatibility with legacy
+# deployments, but should be replaced by OS_PROJECT_NAME.
 TENANT_NAME=${OS_TENANT_NAME:-admin}
-TENANT_ID=`openstack project show admin|grep '\bid\b' |awk -F '|' '{print $3}'|sed -e 's/^[[:space:]]*//'`
 PROJECT_NAME=${OS_PROJECT_NAME:-$TENANT_NAME}
 PROJECT_ID=`openstack project show admin|grep '\bid\b' |awk -F '|' '{print $3}'|sed -e 's/^[[:space:]]*//'`
 
@@ -30,8 +31,6 @@ echo "OS_USERNAME="$USERNAME >> ~/storperf_admin-rc
 echo "OS_PASSWORD="$PASSWORD >> ~/storperf_admin-rc
 echo "OS_PROJECT_NAME="$PROJECT_NAME >> ~/storperf_admin-rc
 echo "OS_PROJECT_ID="$PROJECT_ID >> ~/storperf_admin-rc
-echo "OS_TENANT_NAME="$TENANT_NAME >> ~/storperf_admin-rc
-echo "OS_TENANT_ID="$TENANT_ID >> ~/storperf_admin-rc
 echo "OS_USER_DOMAIN_ID="$USER_DOMAIN_ID >> ~/storperf_admin-rc
 echo "OS_PROJECT_DOMAIN_NAME="$OS_PROJECT_DOMAIN_NAME >> ~/storperf_admin-rc
 echo "OS_USER_DOMAIN_NAME="$OS_USER_DOMAIN_NAME >> ~/storperf_admin-rc
diff --git a/tests/opnfv/test_suites/opnfv_os-odl-bgpvpn-noha_daily.yaml b/tests/opnfv/test_suites/opnfv_os-odl-bgpvpn-noha_daily.yaml
new file mode 100644 (file)
index 0000000..e55b833
--- /dev/null
@@ -0,0 +1,23 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+---
+# os-odl-bgpvpn-noha daily task suite
+
+schema: "yardstick:suite:0.1"
+
+name: "os-odl-bgpvpn-noha"
+test_cases_dir: "tests/opnfv/test_cases/"
+test_cases:
+-
+  file_name: opnfv_yardstick_tc002.yaml
+-
+  file_name: opnfv_yardstick_tc005.yaml
+-
+  file_name: opnfv_yardstick_tc012.yaml
index a468b27..5935abb 100644 (file)
 # See the License for the specific language governing permissions and\r
 # limitations under the License.\r
 \r
-from __future__ import absolute_import\r
-import mock\r
+from yardstick import tests\r
 \r
 \r
-STL_MOCKS = {\r
-    'trex_stl_lib': mock.MagicMock(),\r
-    'trex_stl_lib.base64': mock.MagicMock(),\r
-    'trex_stl_lib.binascii': mock.MagicMock(),\r
-    'trex_stl_lib.collections': mock.MagicMock(),\r
-    'trex_stl_lib.copy': mock.MagicMock(),\r
-    'trex_stl_lib.datetime': mock.MagicMock(),\r
-    'trex_stl_lib.functools': mock.MagicMock(),\r
-    'trex_stl_lib.imp': mock.MagicMock(),\r
-    'trex_stl_lib.inspect': mock.MagicMock(),\r
-    'trex_stl_lib.json': mock.MagicMock(),\r
-    'trex_stl_lib.linecache': mock.MagicMock(),\r
-    'trex_stl_lib.math': mock.MagicMock(),\r
-    'trex_stl_lib.os': mock.MagicMock(),\r
-    'trex_stl_lib.platform': mock.MagicMock(),\r
-    'trex_stl_lib.pprint': mock.MagicMock(),\r
-    'trex_stl_lib.random': mock.MagicMock(),\r
-    'trex_stl_lib.re': mock.MagicMock(),\r
-    'trex_stl_lib.scapy': mock.MagicMock(),\r
-    'trex_stl_lib.socket': mock.MagicMock(),\r
-    'trex_stl_lib.string': mock.MagicMock(),\r
-    'trex_stl_lib.struct': mock.MagicMock(),\r
-    'trex_stl_lib.sys': mock.MagicMock(),\r
-    'trex_stl_lib.threading': mock.MagicMock(),\r
-    'trex_stl_lib.time': mock.MagicMock(),\r
-    'trex_stl_lib.traceback': mock.MagicMock(),\r
-    'trex_stl_lib.trex_stl_async_client': mock.MagicMock(),\r
-    'trex_stl_lib.trex_stl_client': mock.MagicMock(),\r
-    'trex_stl_lib.trex_stl_exceptions': mock.MagicMock(),\r
-    'trex_stl_lib.trex_stl_ext': mock.MagicMock(),\r
-    'trex_stl_lib.trex_stl_jsonrpc_client': mock.MagicMock(),\r
-    'trex_stl_lib.trex_stl_packet_builder_interface': mock.MagicMock(),\r
-    'trex_stl_lib.trex_stl_packet_builder_scapy': mock.MagicMock(),\r
-    'trex_stl_lib.trex_stl_port': mock.MagicMock(),\r
-    'trex_stl_lib.trex_stl_stats': mock.MagicMock(),\r
-    'trex_stl_lib.trex_stl_streams': mock.MagicMock(),\r
-    'trex_stl_lib.trex_stl_types': mock.MagicMock(),\r
-    'trex_stl_lib.types': mock.MagicMock(),\r
-    'trex_stl_lib.utils': mock.MagicMock(),\r
-    'trex_stl_lib.utils.argparse': mock.MagicMock(),\r
-    'trex_stl_lib.utils.collections': mock.MagicMock(),\r
-    'trex_stl_lib.utils.common': mock.MagicMock(),\r
-    'trex_stl_lib.utils.json': mock.MagicMock(),\r
-    'trex_stl_lib.utils.os': mock.MagicMock(),\r
-    'trex_stl_lib.utils.parsing_opts': mock.MagicMock(),\r
-    'trex_stl_lib.utils.pwd': mock.MagicMock(),\r
-    'trex_stl_lib.utils.random': mock.MagicMock(),\r
-    'trex_stl_lib.utils.re': mock.MagicMock(),\r
-    'trex_stl_lib.utils.string': mock.MagicMock(),\r
-    'trex_stl_lib.utils.sys': mock.MagicMock(),\r
-    'trex_stl_lib.utils.text_opts': mock.MagicMock(),\r
-    'trex_stl_lib.utils.text_tables': mock.MagicMock(),\r
-    'trex_stl_lib.utils.texttable': mock.MagicMock(),\r
-    'trex_stl_lib.warnings': mock.MagicMock(),\r
-    'trex_stl_lib.yaml': mock.MagicMock(),\r
-    'trex_stl_lib.zlib': mock.MagicMock(),\r
-    'trex_stl_lib.zmq': mock.MagicMock(),\r
-}\r
+# NOTE(ralonsoh): to be removed. Replace all occurrences of\r
+# tests.unit.STL_MOCKS with yardstick.tests.STL_MOCKS\r
+STL_MOCKS = tests.STL_MOCKS\r
index d1e56e1..4a17584 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 # Copyright (c) 2016-2017 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
index f324f62..d4b4ecf 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 # Copyright (c) 2016-2017 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
index 7ea6bd0..1f9d3f2 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 # Copyright (c) 2016-2017 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
index e30aee8..367072e 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 # Copyright (c) 2016-2017 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 
 import mock
 import unittest
+
+import os
+
+from yardstick.error import IncorrectConfig, SSHError
+from yardstick.error import IncorrectNodeSetup
+from yardstick.error import IncorrectSetup
+from yardstick.network_services.helpers.dpdkbindnic_helper import DpdkInterface
+from yardstick.network_services.helpers.dpdkbindnic_helper import DpdkNode
 from yardstick.network_services.helpers.dpdkbindnic_helper import DpdkBindHelper
 from yardstick.network_services.helpers.dpdkbindnic_helper import DpdkBindHelperException
 from yardstick.network_services.helpers.dpdkbindnic_helper import NETWORK_KERNEL
@@ -26,7 +32,269 @@ from yardstick.network_services.helpers.dpdkbindnic_helper import NETWORK_OTHER
 from yardstick.network_services.helpers.dpdkbindnic_helper import CRYPTO_OTHER
 
 
+NAME = "tg_0"
+
+
+class TestDpdkInterface(unittest.TestCase):
+
+    SAMPLE_NETDEVS = {
+        'enp11s0': {
+            'address': '0a:de:ad:be:ef:f5',
+            'device': '0x1533',
+            'driver': 'igb',
+            'ifindex': '2',
+            'interface_name': 'enp11s0',
+            'operstate': 'down',
+            'pci_bus_id': '0000:0b:00.0',
+            'subsystem_device': '0x1533',
+            'subsystem_vendor': '0x15d9',
+            'vendor': '0x8086'
+        },
+        'lan': {
+            'address': '0a:de:ad:be:ef:f4',
+            'device': '0x153a',
+            'driver': 'e1000e',
+            'ifindex': '3',
+            'interface_name': 'lan',
+            'operstate': 'up',
+            'pci_bus_id': '0000:00:19.0',
+            'subsystem_device': '0x153a',
+            'subsystem_vendor': '0x15d9',
+            'vendor': '0x8086'
+        }
+    }
+
+    SAMPLE_VM_NETDEVS = {
+        'eth1': {
+            'address': 'fa:de:ad:be:ef:5b',
+            'device': '0x0001',
+            'driver': 'virtio_net',
+            'ifindex': '3',
+            'interface_name': 'eth1',
+            'operstate': 'down',
+            'pci_bus_id': '0000:00:04.0',
+            'vendor': '0x1af4'
+        }
+    }
+
+    def test_parse_netdev_info(self):
+        output = """\
+/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/ifindex:2
+/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/address:0a:de:ad:be:ef:f5
+/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/operstate:down
+/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/device/vendor:0x8086
+/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/device/device:0x1533
+/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/device/subsystem_vendor:0x15d9
+/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/device/subsystem_device:0x1533
+/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/driver:igb
+/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/pci_bus_id:0000:0b:00.0
+/sys/devices/pci0000:00/0000:00:19.0/net/lan/ifindex:3
+/sys/devices/pci0000:00/0000:00:19.0/net/lan/address:0a:de:ad:be:ef:f4
+/sys/devices/pci0000:00/0000:00:19.0/net/lan/operstate:up
+/sys/devices/pci0000:00/0000:00:19.0/net/lan/device/vendor:0x8086
+/sys/devices/pci0000:00/0000:00:19.0/net/lan/device/device:0x153a
+/sys/devices/pci0000:00/0000:00:19.0/net/lan/device/subsystem_vendor:0x15d9
+/sys/devices/pci0000:00/0000:00:19.0/net/lan/device/subsystem_device:0x153a
+/sys/devices/pci0000:00/0000:00:19.0/net/lan/driver:e1000e
+/sys/devices/pci0000:00/0000:00:19.0/net/lan/pci_bus_id:0000:00:19.0
+"""
+        res = DpdkBindHelper.parse_netdev_info(output)
+        self.assertDictEqual(res, self.SAMPLE_NETDEVS)
+
+    def test_parse_netdev_info_virtio(self):
+        output = """\
+/sys/devices/pci0000:00/0000:00:04.0/virtio1/net/eth1/ifindex:3
+/sys/devices/pci0000:00/0000:00:04.0/virtio1/net/eth1/address:fa:de:ad:be:ef:5b
+/sys/devices/pci0000:00/0000:00:04.0/virtio1/net/eth1/operstate:down
+/sys/devices/pci0000:00/0000:00:04.0/virtio1/net/eth1/device/vendor:0x1af4
+/sys/devices/pci0000:00/0000:00:04.0/virtio1/net/eth1/device/device:0x0001
+/sys/devices/pci0000:00/0000:00:04.0/virtio1/net/eth1/driver:virtio_net
+"""
+        res = DpdkBindHelper.parse_netdev_info(output)
+        self.assertDictEqual(res, self.SAMPLE_VM_NETDEVS)
+
+    def test_probe_missing_values(self):
+        mock_dpdk_node = mock.Mock()
+        mock_dpdk_node.netdevs = self.SAMPLE_NETDEVS.copy()
+
+        interface = {'local_mac': '0a:de:ad:be:ef:f5'}
+        dpdk_intf = DpdkInterface(mock_dpdk_node, interface)
+
+        dpdk_intf.probe_missing_values()
+        self.assertEqual(interface['vpci'], '0000:0b:00.0')
+
+        interface['local_mac'] = '0a:de:ad:be:ef:f4'
+        dpdk_intf.probe_missing_values()
+        self.assertEqual(interface['vpci'], '0000:00:19.0')
+
+    def test_probe_missing_values_no_update(self):
+        mock_dpdk_node = mock.Mock()
+        mock_dpdk_node.netdevs = self.SAMPLE_NETDEVS.copy()
+        del mock_dpdk_node.netdevs['enp11s0']['driver']
+        del mock_dpdk_node.netdevs['lan']['driver']
+
+        interface = {'local_mac': '0a:de:ad:be:ef:f5'}
+        dpdk_intf = DpdkInterface(mock_dpdk_node, interface)
+
+        dpdk_intf.probe_missing_values()
+        self.assertNotIn('vpci', interface)
+        self.assertNotIn('driver', interface)
+
+    def test_probe_missing_values_negative(self):
+        mock_dpdk_node = mock.Mock()
+        mock_dpdk_node.netdevs.values.side_effect = IncorrectNodeSetup
+
+        interface = {'local_mac': '0a:de:ad:be:ef:f5'}
+        dpdk_intf = DpdkInterface(mock_dpdk_node, interface)
+
+        with self.assertRaises(IncorrectConfig):
+            dpdk_intf.probe_missing_values()
+
+
+class TestDpdkNode(unittest.TestCase):
+
+    INTERFACES = [
+        {'name': 'name1',
+         'virtual-interface': {
+             'local_mac': 404,
+             'vpci': 'pci10',
+         }},
+        {'name': 'name2',
+         'virtual-interface': {
+             'local_mac': 404,
+             'vpci': 'pci2',
+         }},
+        {'name': 'name3',
+         'virtual-interface': {
+             'local_mac': 404,
+             'vpci': 'some-pci1',
+         }},
+    ]
+
+    def test_probe_dpdk_drivers(self):
+        mock_ssh_helper = mock.Mock()
+        mock_ssh_helper.execute.return_value = 0, '', ''
+
+        interfaces = [
+            {'name': 'name1',
+             'virtual-interface': {
+                 'local_mac': 404,
+                 'vpci': 'pci10',
+             }},
+            {'name': 'name2',
+             'virtual-interface': {
+                 'local_mac': 404,
+                 'vpci': 'pci2',
+             }},
+            {'name': 'name3',
+             'virtual-interface': {
+                 'local_mac': 404,
+                 'vpci': 'some-pci1',
+             }},
+        ]
+
+        dpdk_node = DpdkNode(NAME, interfaces, mock_ssh_helper)
+        dpdk_helper = dpdk_node.dpdk_helper
+
+        dpdk_helper.probe_real_kernel_drivers = mock.Mock()
+        dpdk_helper.real_kernel_interface_driver_map = {
+            'pci1': 'driver1',
+            'pci2': 'driver2',
+            'pci3': 'driver3',
+            'pci4': 'driver1',
+            'pci6': 'driver3',
+        }
+
+        dpdk_node._probe_dpdk_drivers()
+        self.assertNotIn('driver', interfaces[0]['virtual-interface'])
+        self.assertEqual(interfaces[1]['virtual-interface']['driver'], 'driver2')
+        self.assertEqual(interfaces[2]['virtual-interface']['driver'], 'driver1')
+
+    def test_check(self):
+        def update():
+            if not mock_force_rebind.called:
+                raise IncorrectConfig
+
+            interfaces[0]['virtual-interface'].update({
+                'vpci': '0000:01:02.1',
+                'local_ip': '10.20.30.40',
+                'netmask': '255.255.0.0',
+                'driver': 'ixgbe',
+            })
+
+        mock_ssh_helper = mock.Mock()
+        mock_ssh_helper.execute.return_value = 0, '', ''
+
+        interfaces = [
+            {'name': 'name1',
+             'virtual-interface': {
+                 'local_mac': 404,
+             }},
+        ]
+
+        dpdk_node = DpdkNode(NAME, interfaces, mock_ssh_helper)
+        dpdk_node._probe_missing_values = mock_probe_missing = mock.Mock(side_effect=update)
+        dpdk_node._force_rebind = mock_force_rebind = mock.Mock()
+
+        self.assertIsNone(dpdk_node.check())
+        self.assertEqual(mock_probe_missing.call_count, 2)
+
+    @mock.patch('yardstick.network_services.helpers.dpdkbindnic_helper.DpdkInterface')
+    def test_check_negative(self, mock_intf_type):
+        mock_ssh_helper = mock.Mock()
+        mock_ssh_helper.execute.return_value = 0, '', ''
+
+        mock_intf_type().check.side_effect = SSHError
+
+        dpdk_node = DpdkNode(NAME, self.INTERFACES, mock_ssh_helper)
+
+        with self.assertRaises(IncorrectSetup):
+            dpdk_node.check()
+
+    def test_probe_netdevs(self):
+        mock_ssh_helper = mock.Mock()
+        mock_ssh_helper.execute.return_value = 0, '', ''
+
+        expected = {'key1': 500, 'key2': 'hello world'}
+        update = {'key1': 1000, 'key3': []}
+
+        dpdk_node = DpdkNode(NAME, self.INTERFACES, mock_ssh_helper)
+        dpdk_helper = dpdk_node.dpdk_helper
+        dpdk_helper.find_net_devices = mock.Mock(side_effect=[expected, update])
+
+        self.assertDictEqual(dpdk_node.netdevs, {})
+        dpdk_node._probe_netdevs()
+        self.assertDictEqual(dpdk_node.netdevs, expected)
+
+        expected = {'key1': 1000, 'key2': 'hello world', 'key3': []}
+        dpdk_node._probe_netdevs()
+        self.assertDictEqual(dpdk_node.netdevs, expected)
+
+    def test_probe_netdevs_setup_negative(self):
+        mock_ssh_helper = mock.Mock()
+        mock_ssh_helper.execute.return_value = 0, '', ''
+
+        dpdk_node = DpdkNode(NAME, self.INTERFACES, mock_ssh_helper)
+        dpdk_helper = dpdk_node.dpdk_helper
+        dpdk_helper.find_net_devices = mock.Mock(side_effect=DpdkBindHelperException)
+
+        with self.assertRaises(DpdkBindHelperException):
+            dpdk_node._probe_netdevs()
+
+    def test_force_rebind(self):
+        mock_ssh_helper = mock.Mock()
+        mock_ssh_helper.execute.return_value = 0, '', ''
+
+        dpdk_node = DpdkNode(NAME, self.INTERFACES, mock_ssh_helper)
+        dpdk_helper = dpdk_node.dpdk_helper
+        dpdk_helper.force_dpdk_rebind = mock_helper_func = mock.Mock()
+
+        dpdk_node._force_rebind()
+        self.assertEqual(mock_helper_func.call_count, 1)
+
+
 class TestDpdkBindHelper(unittest.TestCase):
+    bin_path = "/opt/nsb_bin"
     EXAMPLE_OUTPUT = """
 
 Network devices using DPDK-compatible driver
@@ -111,13 +379,15 @@ Other crypto devices
     def test___init__(self):
         conn = mock.Mock()
         conn.provision_tool = mock.Mock(return_value='path_to_tool')
+        conn.join_bin_path.return_value = os.path.join(self.bin_path, DpdkBindHelper.DPDK_DEVBIND)
 
         dpdk_bind_helper = DpdkBindHelper(conn)
 
         self.assertEqual(conn, dpdk_bind_helper.ssh_helper)
         self.assertEqual(self.CLEAN_STATUS, dpdk_bind_helper.dpdk_status)
         self.assertIsNone(dpdk_bind_helper.status_nic_row_re)
-        self.assertIsNone(dpdk_bind_helper._dpdk_devbind)
+        self.assertEqual(dpdk_bind_helper.dpdk_devbind,
+                         os.path.join(self.bin_path, dpdk_bind_helper.DPDK_DEVBIND))
         self.assertIsNone(dpdk_bind_helper._status_cmd_attr)
 
     def test__dpdk_execute(self):
@@ -125,8 +395,7 @@ Other crypto devices
         conn.execute = mock.Mock(return_value=(0, 'output', 'error'))
         conn.provision_tool = mock.Mock(return_value='tool_path')
         dpdk_bind_helper = DpdkBindHelper(conn)
-        self.assertEqual((0, 'output', 'error'),
-                         dpdk_bind_helper._dpdk_execute('command'))
+        self.assertEqual((0, 'output', 'error'), dpdk_bind_helper._dpdk_execute('command'))
 
     def test__dpdk_execute_failure(self):
         conn = mock.Mock()
@@ -141,7 +410,7 @@ Other crypto devices
 
         dpdk_bind_helper = DpdkBindHelper(conn)
 
-        dpdk_bind_helper._addline(NETWORK_KERNEL, self.ONE_INPUT_LINE)
+        dpdk_bind_helper._add_line(NETWORK_KERNEL, self.ONE_INPUT_LINE)
 
         self.assertIsNotNone(dpdk_bind_helper.dpdk_status)
         self.assertEqual(self.ONE_INPUT_LINE_PARSED, dpdk_bind_helper.dpdk_status[NETWORK_KERNEL])
@@ -161,11 +430,35 @@ Other crypto devices
 
         dpdk_bind_helper = DpdkBindHelper(conn)
 
-        dpdk_bind_helper.parse_dpdk_status_output(self.EXAMPLE_OUTPUT)
+        dpdk_bind_helper._parse_dpdk_status_output(self.EXAMPLE_OUTPUT)
 
         self.maxDiff = None
         self.assertEqual(self.PARSED_EXAMPLE, dpdk_bind_helper.dpdk_status)
 
+    def test_kernel_bound_pci_addresses(self):
+        mock_ssh_helper = mock.Mock()
+        mock_ssh_helper.execute.return_value = 0, '', ''
+
+        expected = ['a', 'b', 3]
+
+        dpdk_helper = DpdkBindHelper(mock_ssh_helper)
+        dpdk_helper.dpdk_status = {
+            NETWORK_DPDK: [{'vpci': 4}, {'vpci': 5}, {'vpci': 'g'}],
+            NETWORK_KERNEL: [{'vpci': 'a'}, {'vpci': 'b'}, {'vpci': 3}],
+            CRYPTO_DPDK: [],
+        }
+
+        result = dpdk_helper.kernel_bound_pci_addresses
+        self.assertEqual(result, expected)
+
+    def test_find_net_devices_negative(self):
+        mock_ssh_helper = mock.Mock()
+        mock_ssh_helper.execute.return_value = 1, 'error', 'debug'
+
+        dpdk_helper = DpdkBindHelper(mock_ssh_helper)
+
+        self.assertDictEqual(dpdk_helper.find_net_devices(), {})
+
     def test_read_status(self):
         conn = mock.Mock()
         conn.execute = mock.Mock(return_value=(0, self.EXAMPLE_OUTPUT, ''))
@@ -180,7 +473,7 @@ Other crypto devices
 
         dpdk_bind_helper = DpdkBindHelper(conn)
 
-        dpdk_bind_helper.parse_dpdk_status_output(self.EXAMPLE_OUTPUT)
+        dpdk_bind_helper._parse_dpdk_status_output(self.EXAMPLE_OUTPUT)
 
         self.assertEqual(['0000:00:04.0', '0000:00:05.0'],
                           dpdk_bind_helper._get_bound_pci_addresses(NETWORK_DPDK))
@@ -192,18 +485,18 @@ Other crypto devices
 
         dpdk_bind_helper = DpdkBindHelper(conn)
 
-        dpdk_bind_helper.parse_dpdk_status_output(self.EXAMPLE_OUTPUT)
+        dpdk_bind_helper._parse_dpdk_status_output(self.EXAMPLE_OUTPUT)
 
         self.assertEqual({'0000:00:04.0': 'igb_uio',
-                           '0000:00:03.0': 'virtio-pci',
-                           '0000:00:05.0': 'igb_uio',
-                           },
-                          dpdk_bind_helper.interface_driver_map)
+                          '0000:00:03.0': 'virtio-pci',
+                          '0000:00:05.0': 'igb_uio',
+                          },
+                         dpdk_bind_helper.interface_driver_map)
 
     def test_bind(self):
         conn = mock.Mock()
         conn.execute = mock.Mock(return_value=(0, '', ''))
-        conn.provision_tool = mock.Mock(return_value='/opt/nsb_bin/dpdk-devbind.py')
+        conn.join_bin_path.return_value = os.path.join(self.bin_path, DpdkBindHelper.DPDK_DEVBIND)
 
         dpdk_bind_helper = DpdkBindHelper(conn)
         dpdk_bind_helper.read_status = mock.Mock()
@@ -217,7 +510,7 @@ Other crypto devices
     def test_bind_single_pci(self):
         conn = mock.Mock()
         conn.execute = mock.Mock(return_value=(0, '', ''))
-        conn.provision_tool = mock.Mock(return_value='/opt/nsb_bin/dpdk-devbind.py')
+        conn.join_bin_path.return_value = os.path.join(self.bin_path, DpdkBindHelper.DPDK_DEVBIND)
 
         dpdk_bind_helper = DpdkBindHelper(conn)
         dpdk_bind_helper.read_status = mock.Mock()
@@ -257,3 +550,84 @@ Other crypto devices
         }
 
         self.assertDictEqual(expected, dpdk_bind_helper.used_drivers)
+
+    def test_force_dpdk_rebind(self):
+        mock_ssh_helper = mock.Mock()
+        mock_ssh_helper.execute.return_value = 0, '', ''
+
+        dpdk_helper = DpdkBindHelper(mock_ssh_helper, 'driver2')
+        dpdk_helper.dpdk_status = {
+            NETWORK_DPDK: [
+                {
+                    'vpci': 'pci1',
+                },
+                {
+                    'vpci': 'pci3',
+                },
+                {
+                    'vpci': 'pci6',
+                },
+                {
+                    'vpci': 'pci3',
+                },
+            ]
+        }
+        dpdk_helper.real_kernel_interface_driver_map = {
+            'pci1': 'real_driver1',
+            'pci2': 'real_driver2',
+            'pci3': 'real_driver1',
+            'pci4': 'real_driver4',
+            'pci6': 'real_driver6',
+        }
+        dpdk_helper.load_dpdk_driver = mock.Mock()
+        dpdk_helper.read_status = mock.Mock()
+        dpdk_helper.save_real_kernel_interface_driver_map = mock.Mock()
+        dpdk_helper.save_used_drivers = mock.Mock()
+        dpdk_helper.bind = mock_bind = mock.Mock()
+
+        dpdk_helper.force_dpdk_rebind()
+        self.assertEqual(mock_bind.call_count, 2)
+
+    def test_save_real_kernel_drivers(self):
+        mock_ssh_helper = mock.Mock()
+        mock_ssh_helper.execute.return_value = 0, '', ''
+
+        dpdk_helper = DpdkBindHelper(mock_ssh_helper)
+        dpdk_helper.real_kernel_drivers = {
+            'abc': '123',
+        }
+        dpdk_helper.real_kernel_interface_driver_map = {
+            'abc': 'AAA',
+            'def': 'DDD',
+            'abs': 'AAA',
+            'ghi': 'GGG',
+        }
+
+        # save_used_drivers must be called before save_real_kernel_drivers can be
+        with self.assertRaises(AttributeError):
+            dpdk_helper.save_real_kernel_drivers()
+
+        dpdk_helper.save_used_drivers()
+
+        expected_used_drivers = {
+            'AAA': ['abc', 'abs'],
+            'DDD': ['def'],
+            'GGG': ['ghi'],
+        }
+        dpdk_helper.save_real_kernel_drivers()
+        self.assertDictEqual(dpdk_helper.used_drivers, expected_used_drivers)
+        self.assertDictEqual(dpdk_helper.real_kernel_drivers, {})
+
+    def test_get_real_kernel_driver(self):
+        mock_ssh_helper = mock.Mock()
+        mock_ssh_helper.execute.side_effect = [
+            (0, 'non-matching text', ''),
+            (0, 'pre Kernel modules: real_driver1', ''),
+            (0, 'before Ethernet middle Virtio network device after', ''),
+        ]
+
+        dpdk_helper = DpdkBindHelper(mock_ssh_helper)
+
+        self.assertIsNone(dpdk_helper.get_real_kernel_driver('abc'))
+        self.assertEqual(dpdk_helper.get_real_kernel_driver('abc'), 'real_driver1')
+        self.assertEqual(dpdk_helper.get_real_kernel_driver('abc'), DpdkBindHelper.VIRTIO_DRIVER)
index 05acdfa..3b6c89d 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 # Copyright (c) 2016-2017 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # limitations under the License.
 #
 
-from __future__ import absolute_import
-from __future__ import division
-
-import unittest
-
 import mock
+import os
+import six
+import unittest
 
-from yardstick.network_services.helpers.samplevnf_helper import MultiPortConfig, PortPairs
+from yardstick.network_services.helpers import samplevnf_helper
 from yardstick.network_services.vnf_generic.vnf.base import VnfdHelper
 
 
@@ -30,31 +26,32 @@ class TestPortPairs(unittest.TestCase):
     def test_port_pairs_list(self):
         vnfd = TestMultiPortConfig.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
         interfaces = vnfd['vdu'][0]['external-interface']
-        port_pairs = PortPairs(interfaces)
+        port_pairs = samplevnf_helper.PortPairs(interfaces)
         self.assertEqual(port_pairs.port_pair_list, [("xe0", "xe1")])
 
     def test_valid_networks(self):
         vnfd = TestMultiPortConfig.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
         interfaces = vnfd['vdu'][0]['external-interface']
-        port_pairs = PortPairs(interfaces)
-        self.assertEqual(port_pairs.valid_networks, [("uplink_0", "downlink_0")])
+        port_pairs = samplevnf_helper.PortPairs(interfaces)
+        self.assertEqual(port_pairs.valid_networks, [
+                         ("uplink_0", "downlink_0")])
 
     def test_all_ports(self):
         vnfd = TestMultiPortConfig.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
         interfaces = vnfd['vdu'][0]['external-interface']
-        port_pairs = PortPairs(interfaces)
+        port_pairs = samplevnf_helper.PortPairs(interfaces)
         self.assertEqual(set(port_pairs.all_ports), {"xe0", "xe1"})
 
     def test_uplink_ports(self):
         vnfd = TestMultiPortConfig.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
         interfaces = vnfd['vdu'][0]['external-interface']
-        port_pairs = PortPairs(interfaces)
+        port_pairs = samplevnf_helper.PortPairs(interfaces)
         self.assertEqual(port_pairs.uplink_ports, ["xe0"])
 
     def test_downlink_ports(self):
         vnfd = TestMultiPortConfig.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
         interfaces = vnfd['vdu'][0]['external-interface']
-        port_pairs = PortPairs(interfaces)
+        port_pairs = samplevnf_helper.PortPairs(interfaces)
         self.assertEqual(port_pairs.downlink_ports, ["xe1"])
 
 
@@ -63,14 +60,14 @@ class TestMultiPortConfig(unittest.TestCase):
     VNFD_0 = {'short-name': 'VpeVnf',
               'vdu':
                   [{'routing_table':
-                        [{'network': '152.16.100.20',
-                          'netmask': '255.255.255.0',
-                          'gateway': '152.16.100.20',
-                          'if': 'xe0'},
-                         {'network': '152.16.40.20',
-                          'netmask': '255.255.255.0',
-                          'gateway': '152.16.40.20',
-                          'if': 'xe1'}],
+                    [{'network': '152.16.100.20',
+                      'netmask': '255.255.255.0',
+                      'gateway': '152.16.100.20',
+                      'if': 'xe0'},
+                     {'network': '152.16.40.20',
+                      'netmask': '255.255.255.0',
+                      'gateway': '152.16.40.20',
+                      'if': 'xe1'}],
                     'description': 'VPE approximation using DPDK',
                     'name': 'vpevnf-baremetal',
                     'nd_route_tbl':
@@ -121,7 +118,7 @@ class TestMultiPortConfig(unittest.TestCase):
                                 },
                                 'vnfd-connection-point-ref': 'xe1',
                                 'name': 'xe1'}
-                        ]}],
+                    ]}],
               'description': 'Vpe approximation using DPDK',
               'mgmt-interface':
                   {'vdu-id': 'vpevnf-baremetal',
@@ -143,67 +140,85 @@ class TestMultiPortConfig(unittest.TestCase):
         }
     }
 
+    def setUp(self):
+        self._mock_open = mock.patch.object(six.moves.builtins, 'open')
+        self.mock_open = self._mock_open.start()
+        self._mock_os = mock.patch.object(os, 'path')
+        self.mock_os = self._mock_os.start()
+        self._mock_config_parser = mock.patch.object(
+            samplevnf_helper, 'ConfigParser')
+        self.mock_config_parser = self._mock_config_parser.start()
+
+        self.addCleanup(self._cleanup)
+
+    def _cleanup(self):
+        self._mock_open.stop()
+        self._mock_os.stop()
+        self._mock_config_parser.stop()
+
     def test_validate_ip_and_prefixlen(self):
-        ip_addr, prefix_len = MultiPortConfig.validate_ip_and_prefixlen('10.20.30.40', '16')
+        ip_addr, prefix_len = (
+            samplevnf_helper.MultiPortConfig.validate_ip_and_prefixlen(
+                '10.20.30.40', '16'))
         self.assertEqual(ip_addr, '10.20.30.40')
         self.assertEqual(prefix_len, 16)
 
-        ip_addr, prefix_len = MultiPortConfig.validate_ip_and_prefixlen('::1', '40')
+        ip_addr, prefix_len = (
+            samplevnf_helper.MultiPortConfig.validate_ip_and_prefixlen(
+            '::1', '40'))
         self.assertEqual(ip_addr, '0000:0000:0000:0000:0000:0000:0000:0001')
         self.assertEqual(prefix_len, 40)
 
     def test_validate_ip_and_prefixlen_negative(self):
         with self.assertRaises(AttributeError):
-            MultiPortConfig.validate_ip_and_prefixlen('', '')
+            samplevnf_helper.MultiPortConfig.validate_ip_and_prefixlen('', '')
 
         with self.assertRaises(AttributeError):
-            MultiPortConfig.validate_ip_and_prefixlen('10.20.30.400', '16')
+            samplevnf_helper.MultiPortConfig.validate_ip_and_prefixlen(
+                '10.20.30.400', '16')
 
         with self.assertRaises(AttributeError):
-            MultiPortConfig.validate_ip_and_prefixlen('10.20.30.40', '33')
+            samplevnf_helper.MultiPortConfig.validate_ip_and_prefixlen(
+                '10.20.30.40', '33')
 
         with self.assertRaises(AttributeError):
-            MultiPortConfig.validate_ip_and_prefixlen('::1', '129')
+            samplevnf_helper.MultiPortConfig.validate_ip_and_prefixlen(
+                '::1', '129')
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.os')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    def test___init__(self, mock_open, mock_os, ConfigParser):
+    def test___init__(self):
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = mock.Mock()
         vnfd_mock = mock.MagicMock()
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         self.assertEqual(0, opnfv_vnf.swq)
-        mock_os.path = mock.MagicMock()
-        mock_os.path.isfile = mock.Mock(return_value=False)
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        self.mock_os.path = mock.MagicMock()
+        self.mock_os.path.isfile = mock.Mock(return_value=False)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         self.assertEqual(0, opnfv_vnf.swq)
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.os')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    def test_update_timer(self, mock_open, mock_os, ConfigParser):
+    def test_update_timer(self):
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = mock.Mock()
         vnfd_mock = mock.MagicMock()
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         opnfv_vnf.get_config_tpl_data = mock.MagicMock()
         opnfv_vnf.socket = 0
         opnfv_vnf.start_core = 0
         opnfv_vnf.update_write_parser = mock.MagicMock()
-        self.assertEqual(None, opnfv_vnf.update_timer())
+        self.assertIsNone(opnfv_vnf.update_timer())
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.os')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    def test_generate_script(self, mock_open, mock_os, ConfigParser):
+    def test_generate_script(self):
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = mock.Mock()
         vnfd_mock = VnfdHelper(self.VNFD_0)
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         opnfv_vnf.get_config_tpl_data = mock.MagicMock()
         opnfv_vnf.socket = 0
         opnfv_vnf.start_core = 0
@@ -218,15 +233,13 @@ class TestMultiPortConfig(unittest.TestCase):
         opnfv_vnf.lb_config = 'HW'
         self.assertIsNotNone(opnfv_vnf.generate_script(self.VNFD))
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.os')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    def test_generate_script_data(self, mock_open, mock_os, ConfigParser):
+    def test_generate_script_data(self):
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = mock.Mock()
         vnfd_mock = mock.MagicMock()
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         opnfv_vnf.get_config_tpl_data = mock.MagicMock()
         opnfv_vnf.socket = 0
         opnfv_vnf.start_core = 0
@@ -240,15 +253,13 @@ class TestMultiPortConfig(unittest.TestCase):
         opnfv_vnf.generate_rule_config = mock.Mock()
         self.assertIsNotNone(opnfv_vnf.generate_script_data())
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.os')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    def test_generate_rule_config(self, mock_open, mock_os, ConfigParser):
+    def test_generate_rule_config(self):
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = mock.Mock()
         vnfd_mock = mock.MagicMock()
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         opnfv_vnf.get_config_tpl_data = mock.MagicMock()
         opnfv_vnf.socket = 0
         opnfv_vnf.start_core = 0
@@ -261,9 +272,11 @@ class TestMultiPortConfig(unittest.TestCase):
         opnfv_vnf.get_port_pairs = mock.Mock()
         opnfv_vnf.vnf_type = 'ACL'
         opnfv_vnf.get_ports_gateway = mock.Mock(return_value=u'1.1.1.1')
-        opnfv_vnf.get_netmask_gateway = mock.Mock(return_value=u'255.255.255.0')
+        opnfv_vnf.get_netmask_gateway = mock.Mock(
+            return_value=u'255.255.255.0')
         opnfv_vnf.get_ports_gateway6 = mock.Mock(return_value=u'1.1.1.1')
-        opnfv_vnf.get_netmask_gateway6 = mock.Mock(return_value=u'255.255.255.0')
+        opnfv_vnf.get_netmask_gateway6 = mock.Mock(
+            return_value=u'255.255.255.0')
         opnfv_vnf.txrx_pipeline = ''
         opnfv_vnf.vnfd = self.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
         opnfv_vnf.interfaces = opnfv_vnf.vnfd['vdu'][0]['external-interface']
@@ -272,15 +285,13 @@ class TestMultiPortConfig(unittest.TestCase):
         opnfv_vnf.rules = 'new'
         self.assertIsNotNone(opnfv_vnf.generate_rule_config())
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.os')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    def test_generate_action_config(self, mock_open, mock_os, ConfigParser):
+    def test_generate_action_config(self):
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = mock.Mock()
         vnfd_mock = mock.MagicMock()
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         opnfv_vnf.get_config_tpl_data = mock.MagicMock()
         opnfv_vnf.socket = 0
         opnfv_vnf.start_core = 0
@@ -293,22 +304,22 @@ class TestMultiPortConfig(unittest.TestCase):
         opnfv_vnf.get_port_pairs = mock.Mock()
         opnfv_vnf.vnf_type = 'VFW'
         opnfv_vnf.get_ports_gateway = mock.Mock(return_value=u'1.1.1.1')
-        opnfv_vnf.get_netmask_gateway = mock.Mock(return_value=u'255.255.255.0')
+        opnfv_vnf.get_netmask_gateway = mock.Mock(
+            return_value=u'255.255.255.0')
         opnfv_vnf.get_ports_gateway6 = mock.Mock(return_value=u'1.1.1.1')
-        opnfv_vnf.get_netmask_gateway6 = mock.Mock(return_value=u'255.255.255.0')
+        opnfv_vnf.get_netmask_gateway6 = mock.Mock(
+            return_value=u'255.255.255.0')
         opnfv_vnf.txrx_pipeline = ''
         opnfv_vnf.rules = ''
         self.assertIsNotNone(opnfv_vnf.generate_action_config())
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.os')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    def test_generate_arp_config6(self, mock_open, mock_os, ConfigParser):
+    def test_generate_arp_config6(self):
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = mock.Mock()
         vnfd_mock = mock.MagicMock()
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         opnfv_vnf.get_config_tpl_data = mock.MagicMock()
         opnfv_vnf.socket = 0
         opnfv_vnf.start_core = 0
@@ -321,24 +332,24 @@ class TestMultiPortConfig(unittest.TestCase):
         opnfv_vnf.get_port_pairs = mock.Mock()
         opnfv_vnf.vnf_type = 'VFW'
         opnfv_vnf.get_ports_gateway = mock.Mock(return_value=u'1.1.1.1')
-        opnfv_vnf.get_netmask_gateway = mock.Mock(return_value=u'255.255.255.0')
+        opnfv_vnf.get_netmask_gateway = mock.Mock(
+            return_value=u'255.255.255.0')
         opnfv_vnf.get_ports_gateway6 = mock.Mock(return_value=u'1.1.1.1')
-        opnfv_vnf.get_netmask_gateway6 = mock.Mock(return_value=u'255.255.255.0')
+        opnfv_vnf.get_netmask_gateway6 = mock.Mock(
+            return_value=u'255.255.255.0')
         opnfv_vnf.txrx_pipeline = ''
         opnfv_vnf.rules = ''
         opnfv_vnf.interfaces = mock.MagicMock()
         opnfv_vnf.get_ports_gateway6 = mock.Mock()
         self.assertIsNotNone(opnfv_vnf.generate_arp_config6())
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.os')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    def test_generate_arp_config(self, mock_open, mock_os, ConfigParser):
+    def test_generate_arp_config(self):
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = mock.Mock()
         vnfd_mock = mock.MagicMock()
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         opnfv_vnf.get_config_tpl_data = mock.MagicMock()
         opnfv_vnf.socket = 0
         opnfv_vnf.start_core = 0
@@ -351,24 +362,24 @@ class TestMultiPortConfig(unittest.TestCase):
         opnfv_vnf.get_port_pairs = mock.Mock()
         opnfv_vnf.vnf_type = 'VFW'
         opnfv_vnf.get_ports_gateway = mock.Mock(return_value=u'1.1.1.1')
-        opnfv_vnf.get_netmask_gateway = mock.Mock(return_value=u'255.255.255.0')
+        opnfv_vnf.get_netmask_gateway = mock.Mock(
+            return_value=u'255.255.255.0')
         opnfv_vnf.get_ports_gateway6 = mock.Mock(return_value=u'1.1.1.1')
-        opnfv_vnf.get_netmask_gateway6 = mock.Mock(return_value=u'255.255.255.0')
+        opnfv_vnf.get_netmask_gateway6 = mock.Mock(
+            return_value=u'255.255.255.0')
         opnfv_vnf.txrx_pipeline = ''
         opnfv_vnf.rules = ''
         opnfv_vnf.interfaces = mock.MagicMock()
         opnfv_vnf.get_ports_gateway6 = mock.Mock()
         self.assertIsNotNone(opnfv_vnf.generate_arp_config())
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.os')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    def test_get_ports_gateway(self, mock_open, mock_os, ConfigParser):
+    def test_get_ports_gateway(self):
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = mock.Mock()
         vnfd_mock = mock.MagicMock()
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         opnfv_vnf.get_config_tpl_data = mock.MagicMock()
         opnfv_vnf.socket = 0
         opnfv_vnf.start_core = 0
@@ -387,15 +398,13 @@ class TestMultiPortConfig(unittest.TestCase):
         opnfv_vnf.vnfd = self.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
         self.assertIsNotNone(opnfv_vnf.get_ports_gateway('xe0'))
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.os')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    def test_get_ports_gateway6(self, mock_open, mock_os, ConfigParser):
+    def test_get_ports_gateway6(self):
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = mock.Mock()
         vnfd_mock = mock.MagicMock()
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         opnfv_vnf.get_config_tpl_data = mock.MagicMock()
         opnfv_vnf.socket = 0
         opnfv_vnf.start_core = 0
@@ -414,15 +423,13 @@ class TestMultiPortConfig(unittest.TestCase):
         opnfv_vnf.vnfd = self.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
         self.assertIsNotNone(opnfv_vnf.get_ports_gateway6('xe0'))
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.os')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    def test_get_netmask_gateway(self, mock_open, mock_os, ConfigParser):
+    def test_get_netmask_gateway(self):
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = mock.Mock()
         vnfd_mock = mock.MagicMock()
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         opnfv_vnf.get_config_tpl_data = mock.MagicMock()
         opnfv_vnf.socket = 0
         opnfv_vnf.start_core = 0
@@ -441,15 +448,13 @@ class TestMultiPortConfig(unittest.TestCase):
         opnfv_vnf.vnfd = self.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
         self.assertIsNotNone(opnfv_vnf.get_netmask_gateway('xe0'))
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.os')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    def test_get_netmask_gateway6(self, mock_open, mock_os, ConfigParser):
+    def test_get_netmask_gateway6(self):
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = mock.Mock()
         vnfd_mock = mock.MagicMock()
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         opnfv_vnf.get_config_tpl_data = mock.MagicMock()
         opnfv_vnf.socket = 0
         opnfv_vnf.start_core = 0
@@ -468,16 +473,14 @@ class TestMultiPortConfig(unittest.TestCase):
         opnfv_vnf.vnfd = self.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
         self.assertIsNotNone(opnfv_vnf.get_netmask_gateway6('xe0'))
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.os')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    def test_generate_link_config(self, mock_open, mock_os, ConfigParser):
+    def test_generate_link_config(self):
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = mock.Mock()
         vnfd_mock = mock.MagicMock()
 
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         opnfv_vnf.get_config_tpl_data = mock.MagicMock()
         opnfv_vnf.socket = 0
         opnfv_vnf.start_core = 0
@@ -495,20 +498,19 @@ class TestMultiPortConfig(unittest.TestCase):
         opnfv_vnf.vnfd = self.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
         opnfv_vnf.interfaces = opnfv_vnf.vnfd['vdu'][0]['external-interface']
         opnfv_vnf.all_ports = ['32', '1', '987']
-        opnfv_vnf.validate_ip_and_prefixlen = mock.Mock(return_value=('10.20.30.40', 16))
+        opnfv_vnf.validate_ip_and_prefixlen = mock.Mock(
+            return_value=('10.20.30.40', 16))
 
         result = opnfv_vnf.generate_link_config()
         self.assertEqual(len(result.splitlines()), 9)
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.os')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    def test_generate_config(self, mock_open, mock_os, ConfigParser):
+    def test_generate_config(self):
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = mock.Mock()
         vnfd_mock = mock.MagicMock()
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         opnfv_vnf.get_config_tpl_data = mock.MagicMock()
         opnfv_vnf.socket = 0
         opnfv_vnf.start_core = 0
@@ -531,17 +533,13 @@ class TestMultiPortConfig(unittest.TestCase):
         opnfv_vnf.is_openstack = False
         self.assertIsNone(opnfv_vnf.generate_config())
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.os')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.OrderedDict')
-    def test_get_config_tpl_data(self, mock_open, mock_os, ConfigParser,
-                                 OrderedDict):
+    def test_get_config_tpl_data(self):
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = mock.Mock()
         vnfd_mock = mock.MagicMock()
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         opnfv_vnf.socket = 0
         opnfv_vnf.start_core = 0
         opnfv_vnf.port_pair_list = [("xe0", "xe1")]
@@ -555,17 +553,13 @@ class TestMultiPortConfig(unittest.TestCase):
 
         self.assertIsNotNone(opnfv_vnf.get_config_tpl_data('filename'))
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.os')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.OrderedDict')
-    def test_get_txrx_tpl_data(self, mock_open, mock_os, ConfigParser,
-                               OrderedDict):
+    def test_get_txrx_tpl_data(self):
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = mock.Mock()
         vnfd_mock = mock.MagicMock()
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         opnfv_vnf.socket = 0
         opnfv_vnf.start_core = 0
         opnfv_vnf.port_pair_list = [("xe0", "xe1")]
@@ -579,17 +573,13 @@ class TestMultiPortConfig(unittest.TestCase):
 
         self.assertIsNotNone(opnfv_vnf.get_txrx_tpl_data('filename'))
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.os')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.OrderedDict')
-    def test_init_write_parser_template(self, mock_open, mock_os, ConfigParser,
-                                        OrderedDict):
+    def test_init_write_parser_template(self):
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = mock.Mock()
         vnfd_mock = mock.MagicMock()
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         opnfv_vnf.socket = 0
         opnfv_vnf.start_core = 0
         opnfv_vnf.port_pair_list = [("xe0", "xe1")]
@@ -608,17 +598,13 @@ class TestMultiPortConfig(unittest.TestCase):
         opnfv_vnf.write_parser.set = mock.Mock()
         self.assertIsNone(opnfv_vnf.init_write_parser_template('filename'))
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.os')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.OrderedDict')
-    def test_init_write_parser_template_2(self, mock_open, mock_os, ConfigParser,
-                                          OrderedDict):
+    def test_init_write_parser_template_2(self):
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = mock.Mock()
         vnfd_mock = mock.MagicMock()
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         opnfv_vnf.socket = 0
         opnfv_vnf.start_core = 0
         opnfv_vnf.port_pair_list = [("xe0", "xe1")]
@@ -632,17 +618,13 @@ class TestMultiPortConfig(unittest.TestCase):
         opnfv_vnf.read_parser.items = mock.MagicMock()
         self.assertIsNone(opnfv_vnf.init_write_parser_template('filename'))
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.os')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.OrderedDict')
-    def test_update_write_parser(self, mock_open, mock_os, ConfigParser,
-                                 OrderedDict):
+    def test_update_write_parser(self):
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = mock.Mock()
         vnfd_mock = mock.MagicMock()
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         opnfv_vnf.socket = 0
         opnfv_vnf.start_core = 0
         opnfv_vnf.port_pair_list = [("xe0", "xe1")]
@@ -658,17 +640,13 @@ class TestMultiPortConfig(unittest.TestCase):
         opnfv_vnf.pipeline_counter = 0
         self.assertIsNone(opnfv_vnf.update_write_parser({'filename': 1}))
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.os')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.OrderedDict')
-    def test_get_worker_threads(self, mock_open, mock_os, ConfigParser,
-                                OrderedDict):
+    def test_get_worker_threads(self):
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = mock.Mock()
         vnfd_mock = mock.MagicMock()
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         opnfv_vnf.socket = 0
         opnfv_vnf.start_core = 0
         opnfv_vnf.port_pair_list = [("xe0", "xe1")]
@@ -692,17 +670,14 @@ class TestMultiPortConfig(unittest.TestCase):
         result = opnfv_vnf.get_worker_threads(3)
         self.assertEqual(2, result)
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.os')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.OrderedDict')
-    def test_generate_next_core_id(self, mock_open, mock_os, ConfigParser,
-                                   OrderedDict):
+    # TODO(elfoley): Split this test into smaller tests
+    def test_generate_next_core_id(self):
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = mock.Mock()
         vnfd_mock = mock.MagicMock()
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         opnfv_vnf.socket = 0
         opnfv_vnf.start_core = 0
         opnfv_vnf.port_pair_list = [("xe0", "xe1")]
@@ -719,26 +694,22 @@ class TestMultiPortConfig(unittest.TestCase):
         opnfv_vnf.worker_config = '1t'
         opnfv_vnf.start_core = 0
         result = opnfv_vnf.generate_next_core_id()
-        self.assertEqual(None, result)
+        self.assertIsNone(result)
         opnfv_vnf.worker_config = '2t'
         opnfv_vnf.start_core = 'a'
         self.assertRaises(ValueError, opnfv_vnf.generate_next_core_id)
         opnfv_vnf.worker_config = '2t'
         opnfv_vnf.start_core = 1
         result = opnfv_vnf.generate_next_core_id()
-        self.assertEqual(None, result)
+        self.assertIsNone(result)
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.os')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.OrderedDict')
-    def test_generate_lb_to_port_pair_mapping(self, mock_open, mock_os, ConfigParser,
-                                              OrderedDict):
+    def test_generate_lb_to_port_pair_mapping(self):
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = mock.Mock()
         vnfd_mock = VnfdHelper(self.VNFD_0)
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         opnfv_vnf.socket = 0
         opnfv_vnf.start_core = 0
         opnfv_vnf.txrx_pipeline = ''
@@ -754,24 +725,20 @@ class TestMultiPortConfig(unittest.TestCase):
         opnfv_vnf.worker_config = '1t'
         opnfv_vnf.start_core = 0
         opnfv_vnf.lb_count = 1
-        opnfv_vnf._port_pairs = PortPairs(vnfd_mock.interfaces)
+        opnfv_vnf._port_pairs = samplevnf_helper.PortPairs(vnfd_mock.interfaces)
         opnfv_vnf.port_pair_list = opnfv_vnf._port_pairs.port_pair_list
         result = opnfv_vnf.generate_lb_to_port_pair_mapping()
-        self.assertEqual(None, result)
+        self.assertIsNone(result)
         result = opnfv_vnf.set_priv_to_pub_mapping()
         self.assertEqual('(0,1)', result)
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.os')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.OrderedDict')
-    def test_set_priv_que_handler(self, mock_open, mock_os, ConfigParser,
-                                  OrderedDict):
+    def test_set_priv_que_handler(self):
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = mock.Mock()
         vnfd_mock = VnfdHelper(self.VNFD_0)
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         opnfv_vnf.socket = 0
         opnfv_vnf.start_core = 0
         opnfv_vnf.port_pair_list = [("xe0", "xe1")]
@@ -790,11 +757,10 @@ class TestMultiPortConfig(unittest.TestCase):
         opnfv_vnf.start_core = 0
         opnfv_vnf.lb_count = 1
         result = opnfv_vnf.set_priv_que_handler()
-        self.assertEqual(None, result)
+        self.assertIsNone(result)
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    def test_generate_arp_route_tbl(self, *_):
+    def test_generate_arp_route_tbl(self):
+        # ELF: could n=do this in setup
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = ""
@@ -821,7 +787,8 @@ class TestMultiPortConfig(unittest.TestCase):
             },
         ]
 
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         opnfv_vnf.all_ports = [3, 2, 5]
 
         expected = 'routeadd net 32 10.20.30.40 0xfffff000\n' \
@@ -830,17 +797,13 @@ class TestMultiPortConfig(unittest.TestCase):
         result = opnfv_vnf.generate_arp_route_tbl()
         self.assertEqual(result, expected)
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.os')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.OrderedDict')
-    def test_generate_arpicmp_data(self, mock_open, mock_os, ConfigParser,
-                                   OrderedDict):
+    def test_generate_arpicmp_data(self):
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = mock.Mock()
         vnfd_mock = mock.MagicMock()
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         opnfv_vnf.socket = 0
         opnfv_vnf.start_core = 0
         opnfv_vnf.port_pair_list = [("xe0", "xe1")]
@@ -875,17 +838,13 @@ class TestMultiPortConfig(unittest.TestCase):
         result = opnfv_vnf.generate_arpicmp_data()
         self.assertIsNotNone(result)
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.os')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.OrderedDict')
-    def test_generate_final_txrx_data(self, mock_open, mock_os, ConfigParser,
-                                      OrderedDict):
+    def test_generate_final_txrx_data(self):
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = mock.Mock()
         vnfd_mock = mock.MagicMock()
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         opnfv_vnf.socket = 0
         opnfv_vnf.start_core = 0
         opnfv_vnf.port_pair_list = [("xe0", "xe1")]
@@ -917,17 +876,13 @@ class TestMultiPortConfig(unittest.TestCase):
         result = opnfv_vnf.generate_final_txrx_data()
         self.assertIsNotNone(result)
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.os')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.OrderedDict')
-    def test_generate_initial_txrx_data(self, mock_open, mock_os,
-                                        ConfigParser, OrderedDict):
+    def test_generate_initial_txrx_data(self):
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = mock.Mock()
         vnfd_mock = mock.MagicMock()
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         opnfv_vnf.socket = 0
         opnfv_vnf.start_core = 0
         opnfv_vnf.port_pair_list = [("xe0", "xe1")]
@@ -971,17 +926,13 @@ class TestMultiPortConfig(unittest.TestCase):
         result = opnfv_vnf.generate_initial_txrx_data()
         self.assertIsNotNone(result)
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.os')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.OrderedDict')
-    def test_generate_lb_data(self, mock_open, mock_os, ConfigParser,
-                              OrderedDict):
+    def test_generate_lb_data(self):
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = mock.Mock()
         vnfd_mock = mock.MagicMock()
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         opnfv_vnf.socket = 0
         opnfv_vnf.start_core = 0
         opnfv_vnf.port_pair_list = [("xe0", "xe1")]
@@ -1008,17 +959,13 @@ class TestMultiPortConfig(unittest.TestCase):
         result = opnfv_vnf.generate_lb_data()
         self.assertIsNotNone(result)
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.os')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.OrderedDict')
-    def test_generate_vnf_data(self, mock_open, mock_os, ConfigParser,
-                               OrderedDict):
+    def test_generate_vnf_data(self):
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = mock.Mock()
         vnfd_mock = mock.MagicMock()
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         opnfv_vnf.socket = 0
         opnfv_vnf.start_core = 0
         opnfv_vnf.port_pair_list = [("xe0", "xe1")]
@@ -1057,17 +1004,13 @@ class TestMultiPortConfig(unittest.TestCase):
         result = opnfv_vnf.generate_vnf_data()
         self.assertIsNotNone(result)
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.os')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.OrderedDict')
-    def test_generate_config_data(self, mock_open, mock_os, ConfigParser,
-                                  OrderedDict):
+    def test_generate_config_data(self):
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = mock.Mock()
         vnfd_mock = VnfdHelper(self.VNFD_0)
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         opnfv_vnf.socket = 0
         opnfv_vnf.start_core = 0
         opnfv_vnf.port_pair_list = [("xe0", "xe1")]
@@ -1123,17 +1066,13 @@ class TestMultiPortConfig(unittest.TestCase):
         result = opnfv_vnf.generate_config_data()
         self.assertIsNone(result)
 
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.open')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.os')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.ConfigParser')
-    @mock.patch('yardstick.network_services.helpers.samplevnf_helper.OrderedDict')
-    def test_init_eal(self, mock_open, mock_os, ConfigParser,
-                      OrderedDict):
+    def test_init_eal(self):
         topology_file = mock.Mock()
         config_tpl = mock.Mock()
         tmp_file = mock.Mock()
         vnfd_mock = mock.MagicMock()
-        opnfv_vnf = MultiPortConfig(topology_file, config_tpl, tmp_file, vnfd_mock)
+        opnfv_vnf = samplevnf_helper.MultiPortConfig(
+            topology_file, config_tpl, tmp_file, vnfd_mock)
         opnfv_vnf.socket = 0
         opnfv_vnf.start_core = 0
         opnfv_vnf.port_pair_list = [("xe0", "xe1")]
index 3f374fb..2a97048 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 # Copyright (c) 2016-2017 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -36,7 +34,7 @@ class TestIxNextgen(unittest.TestCase):
         self.assertIsNone(ixnet_gen._bidir)
 
     @mock.patch("yardstick.network_services.libs.ixia_libs.IxNet.IxNet.sys")
-    def test_connect(self, mock_sys):
+    def test_connect(self, *args):
 
         ixnet_gen = IxNextgen()
         ixnet_gen.get_config = mock.MagicMock()
index 866c31d..0ae1756 100644 (file)
@@ -34,7 +34,7 @@ class TestAmqpConsumer(unittest.TestCase):
         self.amqp_consumer._connection.add_on_close_callback = \
             mock.Mock(return_value=0)
         self.amqp_consumer._connection.channel = mock.Mock(return_value=0)
-        self.assertEqual(None, self.amqp_consumer.on_connection_open(10))
+        self.assertIsNone(self.amqp_consumer.on_connection_open(10))
 
     def test_on_connection_closed(self):
         self.amqp_consumer._connection = mock.Mock(autospec=AmqpConsumer)
@@ -42,13 +42,11 @@ class TestAmqpConsumer(unittest.TestCase):
         self.amqp_consumer._connection.ioloop.stop = mock.Mock(return_value=0)
         self.amqp_consumer._connection.add_timeout = mock.Mock(return_value=0)
         self.amqp_consumer._closing = True
-        self.assertEqual(None,
-                         self.amqp_consumer.on_connection_closed("", 404,
-                                                                 "Not Found"))
+        self.assertIsNone(
+            self.amqp_consumer.on_connection_closed("", 404, "Not Found"))
         self.amqp_consumer._closing = False
-        self.assertEqual(None,
-                         self.amqp_consumer.on_connection_closed("", 404,
-                                                                 "Not Found"))
+        self.assertIsNone(
+            self.amqp_consumer.on_connection_closed("", 404, "Not Found"))
 
     def test_reconnect(self):
         self.amqp_consumer._connection = mock.Mock(autospec=AmqpConsumer)
@@ -56,7 +54,7 @@ class TestAmqpConsumer(unittest.TestCase):
         self.amqp_consumer._connection.ioloop.stop = mock.Mock(return_value=0)
         self.amqp_consumer.connect = mock.Mock(return_value=0)
         self.amqp_consumer._closing = True
-        self.assertEqual(None, self.amqp_consumer.reconnect())
+        self.assertIsNone(self.amqp_consumer.reconnect())
 
     def test_on_channel_open(self):
         self.amqp_consumer._connection = mock.Mock(autospec=AmqpConsumer)
@@ -66,9 +64,8 @@ class TestAmqpConsumer(unittest.TestCase):
         self.amqp_consumer.add_on_channel_close_callback = mock.Mock()
         self.amqp_consumer._channel.exchange_declare = \
             mock.Mock(return_value=0)
-        self.assertEqual(None,
-                         self.amqp_consumer.on_channel_open(
-                             self.amqp_consumer._channel))
+        self.assertIsNone(
+            self.amqp_consumer.on_channel_open(self.amqp_consumer._channel))
 
     def test_add_on_channel_close_callback(self):
         self.amqp_consumer._connection = mock.Mock(autospec=AmqpConsumer)
@@ -76,46 +73,44 @@ class TestAmqpConsumer(unittest.TestCase):
             mock.Mock(return_value=0)
         self.amqp_consumer._channel = mock.Mock()
         self.amqp_consumer._channel.add_on_close_callback = mock.Mock()
-        self.assertEqual(None,
-                         self.amqp_consumer.add_on_channel_close_callback())
+        self.assertIsNone(self.amqp_consumer.add_on_channel_close_callback())
 
     def test_on_channel_closed(self):
         self.amqp_consumer._connection = mock.Mock(autospec=AmqpConsumer)
         self.amqp_consumer._connection.close = mock.Mock(return_value=0)
         _channel = mock.Mock()
-        self.assertEqual(None,
-                         self.amqp_consumer.on_channel_closed(_channel,
-                                                              "", ""))
+        self.assertIsNone(
+            self.amqp_consumer.on_channel_closed(_channel, "", ""))
 
     def test_ion_exchange_declareok(self):
         self.amqp_consumer.setup_queue = mock.Mock(return_value=0)
-        self.assertEqual(None, self.amqp_consumer.on_exchange_declareok(10))
+        self.assertIsNone(self.amqp_consumer.on_exchange_declareok(10))
 
     def test_setup_queue(self):
         self.amqp_consumer._channel = mock.Mock()
         self.amqp_consumer._channel.add_on_close_callback = mock.Mock()
-        self.assertEqual(None, self.amqp_consumer.setup_queue("collectd"))
+        self.assertIsNone(self.amqp_consumer.setup_queue("collectd"))
 
     def test_on_queue_declareok(self):
         self.amqp_consumer._channel = mock.Mock()
         self.amqp_consumer._channel.queue_bind = mock.Mock()
-        self.assertEqual(None, self.amqp_consumer.on_queue_declareok(10))
+        self.assertIsNone(self.amqp_consumer.on_queue_declareok(10))
 
     def test__on_bindok(self):
         self.amqp_consumer._channel = mock.Mock()
         self.amqp_consumer._channel.basic_consume = mock.Mock()
         self.amqp_consumer.add_on_cancel_callback = mock.Mock()
-        self.assertEqual(None, self.amqp_consumer._on_bindok(10))
+        self.assertIsNone(self.amqp_consumer._on_bindok(10))
 
     def test_add_on_cancel_callback(self):
         self.amqp_consumer._channel = mock.Mock()
         self.amqp_consumer._channel.add_on_cancel_callback = mock.Mock()
-        self.assertEqual(None, self.amqp_consumer.add_on_cancel_callback())
+        self.assertIsNone(self.amqp_consumer.add_on_cancel_callback())
 
     def test_on_consumer_cancelled(self):
         self.amqp_consumer._channel = mock.Mock()
         self.amqp_consumer._channel.close = mock.Mock()
-        self.assertEqual(None, self.amqp_consumer.on_consumer_cancelled(10))
+        self.assertIsNone(self.amqp_consumer.on_consumer_cancelled(10))
 
     def test_on_message(self):
         body = "msg {} cpu/cpu-0/ipc 101010:10"
@@ -123,25 +118,24 @@ class TestAmqpConsumer(unittest.TestCase):
         basic_deliver = mock.Mock()
         basic_deliver.delivery_tag = mock.Mock(return_value=0)
         self.amqp_consumer.ack_message = mock.Mock()
-        self.assertEqual(None,
-                         self.amqp_consumer.on_message(10, basic_deliver,
-                                                       properties, body))
+        self.assertIsNone(
+            self.amqp_consumer.on_message(10, basic_deliver, properties, body))
 
     def test_ack_message(self):
         self.amqp_consumer._channel = mock.Mock()
         self.amqp_consumer._channel.basic_ack = mock.Mock()
-        self.assertEqual(None, self.amqp_consumer.ack_message(10))
+        self.assertIsNone(self.amqp_consumer.ack_message(10))
 
     def test_on_cancelok(self):
         self.amqp_consumer._channel = mock.Mock()
         self.amqp_consumer._channel.close = mock.Mock()
-        self.assertEqual(None, self.amqp_consumer.on_cancelok(10))
+        self.assertIsNone(self.amqp_consumer.on_cancelok(10))
 
     def test_run(self):
         self.amqp_consumer._connection = mock.Mock(autospec=AmqpConsumer)
         self.amqp_consumer.connect = mock.Mock()
         self.amqp_consumer._connection.ioloop.start = mock.Mock()
-        self.assertEqual(None, self.amqp_consumer.run())
+        self.assertIsNone(self.amqp_consumer.run())
 
     def test_stop(self):
         self.amqp_consumer._connection = mock.Mock(autospec=AmqpConsumer)
@@ -149,9 +143,9 @@ class TestAmqpConsumer(unittest.TestCase):
         self.amqp_consumer._connection.ioloop.start = mock.Mock()
         self.amqp_consumer._channel = mock.Mock()
         self.amqp_consumer._channel.basic_cancel = mock.Mock()
-        self.assertEqual(None, self.amqp_consumer.stop())
+        self.assertIsNone(self.amqp_consumer.stop())
 
     def test_close_connection(self):
         self.amqp_consumer._connection = mock.Mock(autospec=AmqpConsumer)
         self.amqp_consumer._connection.close = mock.Mock()
-        self.assertEqual(None, self.amqp_consumer.close_connection())
+        self.assertIsNone(self.amqp_consumer.close_connection())
index 7ad1662..f5f7f0f 100644 (file)
@@ -245,7 +245,7 @@ class TestResourceProfile(unittest.TestCase):
         self.resource_profile.run_collectd_amqp = \
             mock.Mock(return_value=0)
         res = self.resource_profile.amqp_process_for_nfvi_kpi()
-        self.assertEqual(None, res)
+        self.assertIsNone(res)
 
     def test_amqp_collect_nfvi_kpi(self):
         self.resource_profile.amqp_client = \
@@ -271,6 +271,3 @@ class TestResourceProfile(unittest.TestCase):
         # TODO(efoley): Fix this incorrect test.
         # Should check that we don't try to stop amqp when it's not running
         self.assertIsNone(self.resource_profile.stop())
-
-if __name__ == '__main__':
-    unittest.main()
index 2906103..3b88049 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 # Copyright (c) 2016-2017 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-#
 
-# Unittest for yardstick.network_services.traffic_profile.test_base
+import sys
 
-from __future__ import absolute_import
-import unittest
 import mock
+import unittest
 
-from yardstick.network_services.traffic_profile.base import \
-    TrafficProfile, DummyProfile
+from yardstick.common import exceptions
+from yardstick.network_services import traffic_profile as tprofile_package
+from yardstick.network_services.traffic_profile import base
+from yardstick import tests as y_tests
 
 
 class TestTrafficProfile(unittest.TestCase):
@@ -43,20 +41,33 @@ class TestTrafficProfile(unittest.TestCase):
             return _mock
 
     def test___init__(self):
-        traffic_profile = TrafficProfile(self.TRAFFIC_PROFILE)
+        traffic_profile = base.TrafficProfile(self.TRAFFIC_PROFILE)
         self.assertEqual(self.TRAFFIC_PROFILE, traffic_profile.params)
 
     def test_execute(self):
-        traffic_profile = TrafficProfile(self.TRAFFIC_PROFILE)
-        self.assertRaises(NotImplementedError, traffic_profile.execute_traffic, {})
+        traffic_profile = base.TrafficProfile(self.TRAFFIC_PROFILE)
+        self.assertRaises(NotImplementedError,
+                          traffic_profile.execute_traffic, {})
+
+    def test_get_existing_traffic_profile(self):
+        traffic_profile_list = [
+            'RFC2544Profile', 'FixedProfile', 'TrafficProfileGenericHTTP',
+            'IXIARFC2544Profile', 'ProxACLProfile', 'ProxBinSearchProfile',
+            'ProxProfile', 'ProxRampProfile']
+        with mock.patch.dict(sys.modules, y_tests.STL_MOCKS):
+            tprofile_package.register_modules()
+
+            for tp in traffic_profile_list:
+                traffic_profile = base.TrafficProfile.get(
+                    {'traffic_profile': {'traffic_type': tp}})
+                self.assertEqual(tp, traffic_profile.__class__.__name__)
 
-    def test_get(self):
-        traffic_profile = TrafficProfile(self.TRAFFIC_PROFILE)
-        self.assertRaises(RuntimeError, traffic_profile.get,
-                          self.TRAFFIC_PROFILE)
+    def test_get_non_existing_traffic_profile(self):
+        self.assertRaises(exceptions.TrafficProfileNotImplemented,
+                          base.TrafficProfile.get, self.TRAFFIC_PROFILE)
 
 
 class TestDummyProfile(unittest.TestCase):
     def test_execute(self):
-        dummy_profile = DummyProfile(TrafficProfile)
+        dummy_profile = base.DummyProfile(base.TrafficProfile)
         self.assertIsNone(dummy_profile.execute({}))
index eb182a2..dec9496 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 # Copyright (c) 2016-2017 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -119,4 +117,4 @@ class TestFixedProfile(unittest.TestCase):
         fixed_profile = FixedProfile(self.TRAFFIC_PROFILE)
         fixed_profile.params = self.TRAFFIC_PROFILE
         fixed_profile.first_run = True
-        self.assertEqual(None, fixed_profile.execute(traffic_generator))
+        self.assertIsNone(fixed_profile.execute(traffic_generator))
index e818a05..5d8029e 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 # Copyright (c) 2016-2017 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -33,13 +31,11 @@ class TestTrafficProfileGenericHTTP(unittest.TestCase):
         traffic_profile_generic_htt_p = \
                 TrafficProfileGenericHTTP(TrafficProfile)
         traffic_generator = {}
-        self.assertEqual(None,
-                         traffic_profile_generic_htt_p.execute(
-                             traffic_generator))
+        self.assertIsNone(
+            traffic_profile_generic_htt_p.execute(traffic_generator))
 
     def test__send_http_request(self):
         traffic_profile_generic_htt_p = \
                 TrafficProfileGenericHTTP(TrafficProfile)
-        self.assertEqual(None,
-                         traffic_profile_generic_htt_p._send_http_request(
+        self.assertIsNone(traffic_profile_generic_htt_p._send_http_request(
                              "10.1.1.1", "250", "/req"))
index 616921e..e8910d6 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 # Copyright (c) 2016-2017 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -29,7 +27,7 @@ stl_patch = mock.patch.dict("sys.modules", STL_MOCKS)
 stl_patch.start()
 
 if stl_patch:
-    from yardstick.network_services.traffic_profile.traffic_profile \
+    from yardstick.network_services.traffic_profile.trex_traffic_profile \
         import TrexProfile
     from yardstick.network_services.traffic_profile.ixia_rfc2544 import \
         IXIARFC2544Profile
@@ -179,9 +177,7 @@ class TestIXIARFC2544Profile(unittest.TestCase):
                "dst_mac_0": "00:00:00:00:00:03",
                "dst_mac_1": "00:00:00:00:00:04",
                "dst_mac_2": "00:00:00:00:00:04"}
-        result = r_f_c2544_profile._get_ixia_traffic_profile(
-            self.PROFILE, mac, xfile="tmp",
-            static_traffic=STATIC_TRAFFIC)
+        result = r_f_c2544_profile._get_ixia_traffic_profile(self.PROFILE, mac)
         self.assertIsNotNone(result)
 
     def test_get_ixia_traffic_profile(self):
@@ -225,7 +221,6 @@ class TestIXIARFC2544Profile(unittest.TestCase):
                     "proto": "udp",
                     "srcip4": "152.16.40.20",
                     "ttl": 32,
-                    "count": "1"
                 },
                 "outer_l4": {
                     "dstport": "2001",
@@ -260,7 +255,6 @@ class TestIXIARFC2544Profile(unittest.TestCase):
                     "proto": "udp",
                     "srcip4": "152.16.40.20",
                     "ttl": 32,
-                    "count": "1"
                 },
                 "outer_l3v6": {
                     "count": 1024,
@@ -269,7 +263,6 @@ class TestIXIARFC2544Profile(unittest.TestCase):
                     "proto": "udp",
                     "srcip4": "152.16.40.20",
                     "ttl": 32,
-                    "count": "1"
                 },
                 "outer_l4": {
                     "dstport": "1234",
@@ -289,12 +282,11 @@ class TestIXIARFC2544Profile(unittest.TestCase):
                "dst_mac_0": "00:00:00:00:00:03",
                "dst_mac_1": "00:00:00:00:00:04",
                "dst_mac_2": "00:00:00:00:00:04"}
-        result = r_f_c2544_profile._get_ixia_traffic_profile(
-            self.PROFILE, mac, xfile="tmp", static_traffic=STATIC_TRAFFIC)
+        result = r_f_c2544_profile._get_ixia_traffic_profile(self.PROFILE, mac)
         self.assertIsNotNone(result)
 
     @mock.patch("yardstick.network_services.traffic_profile.ixia_rfc2544.open")
-    def test_get_ixia_traffic_profile_v6(self, mock_open):
+    def test_get_ixia_traffic_profile_v6(self, *args):
         traffic_generator = mock.Mock(autospec=TrexProfile)
         traffic_generator.my_ports = [0, 1]
         traffic_generator.uplink_ports = [-1]
@@ -435,8 +427,7 @@ class TestIXIARFC2544Profile(unittest.TestCase):
                                      'outer_l4': {'dstport': '2001',
                                                   'srcport': '1234'}}},
                         'schema': 'isb:traffic_profile:0.1'}
-        result = r_f_c2544_profile._get_ixia_traffic_profile(
-            profile_data, mac, static_traffic=STATIC_TRAFFIC)
+        result = r_f_c2544_profile._get_ixia_traffic_profile(profile_data, mac)
         self.assertIsNotNone(result)
 
     def test__get_ixia_traffic_profile_default_args(self):
@@ -459,8 +450,7 @@ class TestIXIARFC2544Profile(unittest.TestCase):
         ixia_obj = mock.MagicMock()
         r_f_c2544_profile = IXIARFC2544Profile(self.TRAFFIC_PROFILE)
         r_f_c2544_profile.rate = 100
-        result = r_f_c2544_profile._ixia_traffic_generate(traffic_generator,
-                                                          traffic, ixia_obj)
+        result = r_f_c2544_profile._ixia_traffic_generate(traffic, ixia_obj)
         self.assertIsNone(result)
 
     def test_execute(self):
@@ -482,7 +472,7 @@ class TestIXIARFC2544Profile(unittest.TestCase):
         r_f_c2544_profile.get_multiplier = mock.Mock()
         r_f_c2544_profile._ixia_traffic_generate = mock.Mock()
         ixia_obj = mock.MagicMock()
-        self.assertEqual(None, r_f_c2544_profile.execute_traffic(traffic_generator, ixia_obj))
+        self.assertIsNone(r_f_c2544_profile.execute_traffic(traffic_generator, ixia_obj))
 
     def test_update_traffic_profile(self):
         traffic_generator = mock.Mock(autospec=TrexProfile)
@@ -511,13 +501,6 @@ class TestIXIARFC2544Profile(unittest.TestCase):
         self.assertEqual(r_f_c2544_profile.ports, ports_expected)
 
     def test_get_drop_percentage(self):
-        traffic_generator = mock.Mock(autospec=TrexProfile)
-        traffic_generator.networks = {
-            "uplink_0": ["xe0"],
-            "downlink_0": ["xe1"],
-        }
-        traffic_generator.client = \
-            mock.Mock(return_value=True)
         r_f_c2544_profile = IXIARFC2544Profile(self.TRAFFIC_PROFILE)
         r_f_c2544_profile.params = self.PROFILE
         ixia_obj = mock.MagicMock()
@@ -541,17 +524,11 @@ class TestIXIARFC2544Profile(unittest.TestCase):
                              "out_packets": 1000}
         tol_min = 100.0
         tolerance = 0.0
-        self.assertIsNotNone(r_f_c2544_profile.get_drop_percentage(
-                             traffic_generator, samples,
-                             tol_min, tolerance, ixia_obj))
+        self.assertIsNotNone(
+            r_f_c2544_profile.get_drop_percentage(samples, tol_min, tolerance,
+                                                  ixia_obj))
 
     def test_get_drop_percentage_update(self):
-        traffic_generator = mock.Mock(autospec=TrexProfile)
-        traffic_generator.my_ports = [0, 1]
-        traffic_generator.uplink_ports = [0]
-        traffic_generator.downlink_ports = [1]
-        traffic_generator.client = \
-            mock.Mock(return_value=True)
         r_f_c2544_profile = IXIARFC2544Profile(self.TRAFFIC_PROFILE)
         r_f_c2544_profile.params = self.PROFILE
         ixia_obj = mock.MagicMock()
@@ -575,17 +552,11 @@ class TestIXIARFC2544Profile(unittest.TestCase):
                              "out_packets": 1002}
         tol_min = 0.0
         tolerance = 1.0
-        self.assertIsNotNone(r_f_c2544_profile.get_drop_percentage(
-                             traffic_generator, samples,
-                             tol_min, tolerance, ixia_obj))
+        self.assertIsNotNone(
+            r_f_c2544_profile.get_drop_percentage(samples, tol_min, tolerance,
+                                                  ixia_obj))
 
     def test_get_drop_percentage_div_zero(self):
-        traffic_generator = mock.Mock(autospec=TrexProfile)
-        traffic_generator.my_ports = [0, 1]
-        traffic_generator.uplink_ports = [0]
-        traffic_generator.downlink_ports = [1]
-        traffic_generator.client = \
-            mock.Mock(return_value=True)
         r_f_c2544_profile = IXIARFC2544Profile(self.TRAFFIC_PROFILE)
         r_f_c2544_profile.params = self.PROFILE
         ixia_obj = mock.MagicMock()
@@ -610,9 +581,9 @@ class TestIXIARFC2544Profile(unittest.TestCase):
         tol_min = 0.0
         tolerance = 0.0
         r_f_c2544_profile.tmp_throughput = 0
-        self.assertIsNotNone(r_f_c2544_profile.get_drop_percentage(
-                             traffic_generator, samples,
-                             tol_min, tolerance, ixia_obj))
+        self.assertIsNotNone(
+            r_f_c2544_profile.get_drop_percentage(samples, tol_min, tolerance,
+                                                  ixia_obj))
 
     def test_get_multiplier(self):
         r_f_c2544_profile = IXIARFC2544Profile(self.TRAFFIC_PROFILE)
@@ -636,11 +607,5 @@ class TestIXIARFC2544Profile(unittest.TestCase):
             mock.Mock(return_value={})
         r_f_c2544_profile.full_profile = {}
         r_f_c2544_profile._ixia_traffic_generate = mock.Mock()
-        self.assertEqual(
-            None,
-            r_f_c2544_profile.start_ixia_latency(traffic_generator,
-                                                 ixia_obj))
-
-
-if __name__ == '__main__':
-    unittest.main()
+        self.assertIsNone(
+            r_f_c2544_profile.start_ixia_latency(traffic_generator, ixia_obj))
index c1f1c82..1b4189b 100644 (file)
@@ -32,7 +32,7 @@ if stl_patch:
 class TestProxBinSearchProfile(unittest.TestCase):
 
     def test_execute_1(self):
-        def target(*args, **kwargs):
+        def target(*args, **_):
             runs.append(args[2])
             if args[2] < 0 or args[2] > 100:
                 raise RuntimeError(' '.join([str(args), str(runs)]))
@@ -43,6 +43,8 @@ class TestProxBinSearchProfile(unittest.TestCase):
         tp_config = {
             'traffic_profile': {
                 'packet_sizes': [200],
+                'test_precision': 2.0,
+                'tolerated_loss': 0.001,
             },
         }
 
@@ -61,11 +63,47 @@ class TestProxBinSearchProfile(unittest.TestCase):
 
         profile.execute_traffic(traffic_generator)
         self.assertEqual(round(profile.current_lower, 2), 74.69)
-        self.assertEqual(round(profile.current_upper, 2), 75.39)
-        self.assertEqual(len(runs), 8)
+        self.assertEqual(round(profile.current_upper, 2), 76.09)
+        self.assertEqual(len(runs), 7)
+
+        # Result Samples inc theor_max
+        result_tuple = {"Result_Actual_throughput": 7.5e-07,
+                        "Result_theor_max_throughput": 0.00012340000000000002,
+                        "Result_pktSize": 200}
+        profile.queue.put.assert_called_with(result_tuple)
+
+        success_result_tuple = {"Success_CurrentDropPackets": 0.5,
+                                "Success_DropPackets": 0.5,
+                                "Success_LatencyAvg": 5.3,
+                                "Success_LatencyMax": 5.2,
+                                "Success_LatencyMin": 5.1,
+                                "Success_PktSize": 200,
+                                "Success_RxThroughput": 7.5e-07,
+                                "Success_Throughput": 7.5e-07,
+                                "Success_TxThroughput": 0.00012340000000000002}
+
+        calls = profile.queue.put(success_result_tuple)
+        profile.queue.put.assert_has_calls(calls)
+
+        success_result_tuple2 = {"Success_CurrentDropPackets": 0.5,
+                                "Success_DropPackets": 0.5,
+                                "Success_LatencyAvg": 5.3,
+                                "Success_LatencyMax": 5.2,
+                                "Success_LatencyMin": 5.1,
+                                "Success_PktSize": 200,
+                                "Success_RxThroughput": 7.5e-07,
+                                "Success_Throughput": 7.5e-07,
+                                "Success_TxThroughput": 123.4,
+                                "Success_can_be_lost": 409600,
+                                "Success_drop_total": 20480,
+                                "Success_rx_total": 4075520,
+                                "Success_tx_total": 4096000}
+
+        calls = profile.queue.put(success_result_tuple2)
+        profile.queue.put.assert_has_calls(calls)
 
     def test_execute_2(self):
-        def target(*args, **kwargs):
+        def target(*args, **_):
             runs.append(args[2])
             if args[2] < 0 or args[2] > 100:
                 raise RuntimeError(' '.join([str(args), str(runs)]))
@@ -77,6 +115,7 @@ class TestProxBinSearchProfile(unittest.TestCase):
             'traffic_profile': {
                 'packet_sizes': [200],
                 'test_precision': 2.0,
+                'tolerated_loss': 0.001,
             },
         }
 
@@ -97,3 +136,50 @@ class TestProxBinSearchProfile(unittest.TestCase):
         self.assertEqual(round(profile.current_lower, 2), 24.06)
         self.assertEqual(round(profile.current_upper, 2), 25.47)
         self.assertEqual(len(runs), 7)
+
+    def test_execute_3(self):
+        def target(*args, **_):
+            runs.append(args[2])
+            if args[2] < 0 or args[2] > 100:
+                raise RuntimeError(' '.join([str(args), str(runs)]))
+            if args[2] > 75.0:
+                return fail_tuple, {}
+            return success_tuple, {}
+
+        tp_config = {
+            'traffic_profile': {
+                'packet_sizes': [200],
+                'test_precision': 2.0,
+                'tolerated_loss': 0.001,
+            },
+        }
+
+        runs = []
+        success_tuple = ProxTestDataTuple(10.0, 1, 2, 3, 4, [5.1, 5.2, 5.3], 995, 1000, 123.4)
+        fail_tuple = ProxTestDataTuple(10.0, 1, 2, 3, 4, [5.6, 5.7, 5.8], 850, 1000, 123.4)
+
+        traffic_generator = mock.MagicMock()
+
+        profile_helper = mock.MagicMock()
+        profile_helper.run_test = target
+
+        profile = ProxBinSearchProfile(tp_config)
+        profile.init(mock.MagicMock())
+        profile._profile_helper = profile_helper
+
+        profile.upper_bound = 100.0
+        profile.lower_bound = 99.0
+        profile.execute_traffic(traffic_generator)
+
+
+        # Result Samples
+        result_tuple = {"Result_theor_max_throughput": 0, "Result_pktSize": 200}
+        profile.queue.put.assert_called_with(result_tuple)
+
+        # Check for success_ tuple (None expected)
+        calls = profile.queue.put.mock_calls
+        for call in calls:
+            for call_detail in call[1]:
+                for k in call_detail:
+                    if "Success_" in k:
+                        self.assertRaises(AttributeError)
index 2212337..21c8f6d 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 # Copyright (c) 2016-2017 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -15,9 +13,6 @@
 # limitations under the License.
 #
 
-from __future__ import absolute_import
-from __future__ import division
-
 import unittest
 import mock
 
@@ -29,7 +24,7 @@ stl_patch = mock.patch.dict("sys.modules", STL_MOCKS)
 stl_patch.start()
 
 if stl_patch:
-    from yardstick.network_services.traffic_profile.traffic_profile \
+    from yardstick.network_services.traffic_profile.trex_traffic_profile \
         import TrexProfile
     from yardstick.network_services.traffic_profile.rfc2544 import \
         RFC2544Profile
@@ -51,29 +46,29 @@ class TestRFC2544Profile(unittest.TestCase):
                'traffic_profile': {'traffic_type': 'RFC2544Profile',
                                    'frame_rate': 100},
                'downlink_0': {'ipv4':
-                          {'outer_l2': {'framesize':
-                                        {'64B': '100', '1518B': '0',
-                                         '128B': '0', '1400B': '0',
-                                         '256B': '0', '373b': '0',
-                                         '570B': '0'}},
-                           'outer_l3v4': {'dstip4': '1.1.1.1-1.15.255.255',
-                                          'proto': 'udp',
-                                          'srcip4': '90.90.1.1-90.105.255.255',
-                                          'dscp': 0, 'ttl': 32, 'count': 1},
-                           'outer_l4': {'srcport': '2001',
-                               'dsrport': '1234', 'count': 1}}},
+                              {'outer_l2': {'framesize':
+                                            {'64B': '100', '1518B': '0',
+                                             '128B': '0', '1400B': '0',
+                                             '256B': '0', '373b': '0',
+                                             '570B': '0'}},
+                               'outer_l3v4': {'dstip4': '1.1.1.1-1.15.255.255',
+                                              'proto': 'udp',
+                                              'srcip4': '90.90.1.1-90.105.255.255',
+                                              'dscp': 0, 'ttl': 32, 'count': 1},
+                               'outer_l4': {'srcport': '2001',
+                                            'dsrport': '1234', 'count': 1}}},
                'uplink_0': {'ipv4':
-                           {'outer_l2': {'framesize':
-                                         {'64B': '100', '1518B': '0',
-                                          '128B': '0', '1400B': '0',
-                                          '256B': '0', '373b': '0',
-                                          '570B': '0'}},
-                            'outer_l3v4': {'dstip4': '9.9.1.1-90.105.255.255',
-                                           'proto': 'udp',
-                                           'srcip4': '1.1.1.1-1.15.255.255',
-                                           'dscp': 0, 'ttl': 32, 'count': 1},
-                            'outer_l4': {'dstport': '2001',
-                                'srcport': '1234', 'count': 1}}},
+                            {'outer_l2': {'framesize':
+                                          {'64B': '100', '1518B': '0',
+                                           '128B': '0', '1400B': '0',
+                                           '256B': '0', '373b': '0',
+                                           '570B': '0'}},
+                             'outer_l3v4': {'dstip4': '9.9.1.1-90.105.255.255',
+                                            'proto': 'udp',
+                                            'srcip4': '1.1.1.1-1.15.255.255',
+                                            'dscp': 0, 'ttl': 32, 'count': 1},
+                             'outer_l4': {'dstport': '2001',
+                                          'srcport': '1234', 'count': 1}}},
                'schema': 'isb:traffic_profile:0.1'}
 
     def test___init__(self):
@@ -86,12 +81,11 @@ class TestRFC2544Profile(unittest.TestCase):
             "uplink_0": ["xe0"],
             "downlink_0": ["xe1"],
         }
-        traffic_generator.client = \
-            mock.Mock(return_value=True)
+        traffic_generator.client.return_value = True
         r_f_c2544_profile = RFC2544Profile(self.TRAFFIC_PROFILE)
         r_f_c2544_profile.params = self.PROFILE
         r_f_c2544_profile.first_run = True
-        self.assertEqual(None, r_f_c2544_profile.execute_traffic(traffic_generator))
+        self.assertIsNone(r_f_c2544_profile.execute_traffic(traffic_generator))
 
     def test_get_drop_percentage(self):
         traffic_generator = mock.Mock(autospec=TrexProfile)
@@ -99,7 +93,7 @@ class TestRFC2544Profile(unittest.TestCase):
             "uplink_0": ["xe0"],
             "downlink_0": ["xe1"],
         }
-        traffic_generator.client = mock.Mock(return_value=True)
+        traffic_generator.client.return_value = True
 
         r_f_c2544_profile = RFC2544Profile(self.TRAFFIC_PROFILE)
         r_f_c2544_profile.params = self.PROFILE
@@ -133,7 +127,7 @@ class TestRFC2544Profile(unittest.TestCase):
                 'rx_throughput_fps': 20,
             },
         }
-        traffic_generator.generate_samples = mock.MagicMock(return_value=samples)
+        traffic_generator.generate_samples.return_value = samples
         traffic_generator.RUN_DURATION = 30
         traffic_generator.rfc2544_helper.tolerance_low = 0.0001
         traffic_generator.rfc2544_helper.tolerance_high = 0.0001
@@ -164,8 +158,6 @@ class TestRFC2544Profile(unittest.TestCase):
                 "in_packets": 1000,
                 "out_packets": 1002,
             }
-        tol_min = 0.0
-        tolerance = 1.0
         expected = {
             'DropPercentage': 0.1996,
             'RxThroughput': 33.333333333333336,
@@ -181,7 +173,8 @@ class TestRFC2544Profile(unittest.TestCase):
                 'rx_throughput_fps': 20,
             },
         }
-        traffic_generator.generate_samples = mock.MagicMock(return_value=samples)
+        traffic_generator.generate_samples = mock.MagicMock(
+            return_value=samples)
         traffic_generator.RUN_DURATION = 30
         traffic_generator.rfc2544_helper.tolerance_low = 0.0001
         traffic_generator.rfc2544_helper.tolerance_high = 0.0001
@@ -198,7 +191,7 @@ class TestRFC2544Profile(unittest.TestCase):
             mock.Mock(return_value=True)
         r_f_c2544_profile = RFC2544Profile(self.TRAFFIC_PROFILE)
         r_f_c2544_profile.params = self.PROFILE
-        self.assertEqual(None, r_f_c2544_profile.execute_traffic(traffic_generator))
+        self.assertIsNone(r_f_c2544_profile.execute_traffic(traffic_generator))
         samples = {}
         for ifname in range(1):
             name = "xe{}".format(ifname)
@@ -208,8 +201,6 @@ class TestRFC2544Profile(unittest.TestCase):
                              "tx_throughput_mbps": 10,
                              "in_packets": 1000,
                              "out_packets": 0}
-        tol_min = 0.0
-        tolerance = 0.0
         r_f_c2544_profile.throughput_max = 0
         expected = {
             'DropPercentage': 100.0, 'RxThroughput': 100 / 3.0,
@@ -221,7 +212,7 @@ class TestRFC2544Profile(unittest.TestCase):
                 'tx_throughput_mbps': 10, 'rx_throughput_fps': 20
             }
         }
-        traffic_generator.generate_samples = mock.MagicMock(return_value=samples)
+        traffic_generator.generate_samples = mock.Mock(return_value=samples)
         traffic_generator.RUN_DURATION = 30
         traffic_generator.rfc2544_helper.tolerance_low = 0.0001
         traffic_generator.rfc2544_helper.tolerance_high = 0.0001
@@ -281,10 +272,5 @@ class TestRFC2544Profile(unittest.TestCase):
         r_f_c2544_profile.calculate_pps = mock.Mock(return_value=[2274546.67,
                                                                   1.0])
 
-        self.assertEqual(None,
-                         r_f_c2544_profile.execute_latency(traffic_generator,
-                                                           samples))
-
-
-if __name__ == '__main__':
-    unittest.main()
+        self.assertIsNone(r_f_c2544_profile.execute_latency(traffic_generator,
+                                                            samples))
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 # Copyright (c) 2016-2017 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-#
 
-from __future__ import absolute_import
+import ipaddress
 
-import unittest
 import mock
+import six
+import unittest
 
 from tests.unit import STL_MOCKS
+from yardstick.common import exceptions as y_exc
 
 STLClient = mock.MagicMock()
 stl_patch = mock.patch.dict("sys.modules", STL_MOCKS)
@@ -28,16 +27,16 @@ stl_patch.start()
 
 if stl_patch:
     from yardstick.network_services.traffic_profile.base import TrafficProfile
-    from yardstick.network_services.traffic_profile.traffic_profile import TrexProfile
-    from yardstick.network_services.traffic_profile.traffic_profile import SRC
-    from yardstick.network_services.traffic_profile.traffic_profile import DST
-    from yardstick.network_services.traffic_profile.traffic_profile import ETHERNET
-    from yardstick.network_services.traffic_profile.traffic_profile import IP
-    from yardstick.network_services.traffic_profile.traffic_profile import IPv6
-    from yardstick.network_services.traffic_profile.traffic_profile import UDP
-    from yardstick.network_services.traffic_profile.traffic_profile import SRC_PORT
-    from yardstick.network_services.traffic_profile.traffic_profile import DST_PORT
-    from yardstick.network_services.traffic_profile.traffic_profile import TYPE_OF_SERVICE
+    from yardstick.network_services.traffic_profile.trex_traffic_profile import TrexProfile
+    from yardstick.network_services.traffic_profile.trex_traffic_profile import SRC
+    from yardstick.network_services.traffic_profile.trex_traffic_profile import DST
+    from yardstick.network_services.traffic_profile.trex_traffic_profile import ETHERNET
+    from yardstick.network_services.traffic_profile.trex_traffic_profile import IP
+    from yardstick.network_services.traffic_profile.trex_traffic_profile import IPv6
+    from yardstick.network_services.traffic_profile.trex_traffic_profile import UDP
+    from yardstick.network_services.traffic_profile.trex_traffic_profile import SRC_PORT
+    from yardstick.network_services.traffic_profile.trex_traffic_profile import DST_PORT
+    from yardstick.network_services.traffic_profile.trex_traffic_profile import TYPE_OF_SERVICE
 
 
 class TestTrexProfile(unittest.TestCase):
@@ -152,11 +151,11 @@ class TestTrexProfile(unittest.TestCase):
 
         trex_profile = \
             TrexProfile(TrafficProfile)
-        self.assertEqual(None, trex_profile.set_qinq(qinq))
+        self.assertIsNone(trex_profile.set_qinq(qinq))
 
         qinq = {"S-VLAN": {"id": "128-130", "priority": 0, "cfi": 0},
                 "C-VLAN": {"id": "512-515", "priority": 0, "cfi": 0}}
-        self.assertEqual(None, trex_profile.set_qinq(qinq))
+        self.assertIsNone(trex_profile.set_qinq(qinq))
 
     def test__set_outer_l2_fields(self):
         trex_profile = \
@@ -165,14 +164,14 @@ class TestTrexProfile(unittest.TestCase):
                 "C-VLAN": {"id": 512, "priority": 0, "cfi": 0}}
         outer_l2 = self.PROFILE[TrafficProfile.UPLINK]['ipv4']['outer_l2']
         outer_l2['QinQ'] = qinq
-        self.assertEqual(None, trex_profile._set_outer_l2_fields(outer_l2))
+        self.assertIsNone(trex_profile._set_outer_l2_fields(outer_l2))
 
     def test__set_outer_l3v4_fields(self):
         trex_profile = \
             TrexProfile(TrafficProfile)
         outer_l3v4 = self.PROFILE[TrafficProfile.UPLINK]['ipv4']['outer_l3v4']
         outer_l3v4['proto'] = 'tcp'
-        self.assertEqual(None, trex_profile._set_outer_l3v4_fields(outer_l3v4))
+        self.assertIsNone(trex_profile._set_outer_l3v4_fields(outer_l3v4))
 
     def test__set_outer_l3v6_fields(self):
         trex_profile = \
@@ -181,13 +180,13 @@ class TestTrexProfile(unittest.TestCase):
         outer_l3v6['proto'] = 'tcp'
         outer_l3v6['tc'] = 1
         outer_l3v6['hlim'] = 10
-        self.assertEqual(None, trex_profile._set_outer_l3v6_fields(outer_l3v6))
+        self.assertIsNone(trex_profile._set_outer_l3v6_fields(outer_l3v6))
 
     def test__set_outer_l4_fields(self):
         trex_profile = \
             TrexProfile(TrafficProfile)
         outer_l4 = self.PROFILE[TrafficProfile.UPLINK]['ipv4']['outer_l4']
-        self.assertEqual(None, trex_profile._set_outer_l4_fields(outer_l4))
+        self.assertIsNone(trex_profile._set_outer_l4_fields(outer_l4))
 
     def test_get_streams(self):
         trex_profile = \
@@ -215,11 +214,27 @@ class TestTrexProfile(unittest.TestCase):
             TrexProfile(TrafficProfile)
         self.assertEqual({}, trex_profile.generate_imix_data(False))
 
-    def test__get_start_end_ipv6(self):
-        trex_profile = \
-            TrexProfile(TrafficProfile)
-        self.assertRaises(SystemExit, trex_profile._get_start_end_ipv6,
-                          "1.1.1.3", "1.1.1.1")
+    def test__count_ip_ipv4(self):
+        start, end, count = TrexProfile._count_ip('1.1.1.1', '1.2.3.4')
+        self.assertEqual('1.1.1.1', str(start))
+        self.assertEqual('1.2.3.4', str(end))
+        diff = (int(ipaddress.IPv4Address(six.u('1.2.3.4'))) -
+                int(ipaddress.IPv4Address(six.u('1.1.1.1'))))
+        self.assertEqual(diff, count)
+
+    def test__count_ip_ipv6(self):
+        start_ip = '0064:ff9b:0:0:0:0:9810:6414'
+        end_ip = '0064:ff9b:0:0:0:0:9810:6420'
+        start, end, count = TrexProfile._count_ip(start_ip, end_ip)
+        self.assertEqual(0x98106414, start)
+        self.assertEqual(0x98106420, end)
+        self.assertEqual(0x98106420 - 0x98106414, count)
+
+    def test__count_ip_ipv6_exception(self):
+        start_ip = '0064:ff9b:0:0:0:0:9810:6420'
+        end_ip = '0064:ff9b:0:0:0:0:9810:6414'
+        with self.assertRaises(y_exc.IPv6RangeError):
+            TrexProfile._count_ip(start_ip, end_ip)
 
     def test__dscp_range_action_partial_actual_count_zero(self):
         traffic_profile = TrexProfile(TrafficProfile)
@@ -258,13 +273,17 @@ class TestTrexProfile(unittest.TestCase):
     def test__general_single_action_partial(self):
         trex_profile = TrexProfile(TrafficProfile)
 
-        trex_profile._general_single_action_partial(ETHERNET)(SRC)(self.EXAMPLE_ETHERNET_ADDR)
-        self.assertEqual(self.EXAMPLE_ETHERNET_ADDR, trex_profile.ether_packet.src)
+        trex_profile._general_single_action_partial(ETHERNET)(SRC)(
+            self.EXAMPLE_ETHERNET_ADDR)
+        self.assertEqual(self.EXAMPLE_ETHERNET_ADDR,
+                         trex_profile.ether_packet.src)
 
-        trex_profile._general_single_action_partial(IP)(DST)(self.EXAMPLE_IP_ADDR)
+        trex_profile._general_single_action_partial(IP)(DST)(
+            self.EXAMPLE_IP_ADDR)
         self.assertEqual(self.EXAMPLE_IP_ADDR, trex_profile.ip_packet.dst)
 
-        trex_profile._general_single_action_partial(IPv6)(DST)(self.EXAMPLE_IPv6_ADDR)
+        trex_profile._general_single_action_partial(IPv6)(DST)(
+            self.EXAMPLE_IPv6_ADDR)
         self.assertEqual(self.EXAMPLE_IPv6_ADDR, trex_profile.ip6_packet.dst)
 
         trex_profile._general_single_action_partial(UDP)(SRC_PORT)(5060)
index ee881c9..2ab1412 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 # Copyright (c) 2016-2017 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
index f9a1014..2971b55 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 # Copyright (c) 2016-2017 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -21,6 +19,7 @@ import os
 
 from tests.unit import STL_MOCKS
 from tests.unit.network_services.vnf_generic.vnf.test_base import mock_ssh
+from yardstick.common import utils
 
 
 STLClient = mock.MagicMock()
@@ -312,7 +311,7 @@ class TestAclApproxVnf(unittest.TestCase):
         acl_approx_vnf.ssh_helper.run.assert_called_once()
 
     @mock.patch("yardstick.network_services.vnf_generic.vnf.acl_vnf.YangModel")
-    @mock.patch("yardstick.network_services.vnf_generic.vnf.acl_vnf.find_relative_file")
+    @mock.patch.object(utils, 'find_relative_file')
     @mock.patch("yardstick.network_services.vnf_generic.vnf.sample_vnf.Context")
     @mock.patch(SSH_HELPER)
     def test_instantiate(self, ssh, *args):
@@ -345,4 +344,4 @@ class TestAclApproxVnf(unittest.TestCase):
         acl_approx_vnf.vnf_execute = mock.MagicMock()
         acl_approx_vnf.dpdk_devbind = "dpdk-devbind.py"
         acl_approx_vnf._resource_collect_stop = mock.Mock()
-        self.assertEqual(None, acl_approx_vnf.terminate())
+        self.assertIsNone(acl_approx_vnf.terminate())
index e9488f7..664373f 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 # Copyright (c) 2016-2017 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -195,7 +193,7 @@ class TestQueueFileWrapper(unittest.TestCase):
     def test_close(self):
         queue_file_wrapper = \
             base.QueueFileWrapper(self.q_in, self.q_out, self.prompt)
-        self.assertEqual(None, queue_file_wrapper.close())
+        self.assertIsNone(queue_file_wrapper.close())
 
     def test_read(self):
         queue_file_wrapper = \
index 62b3c74..edaa0ad 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 # Copyright (c) 2016-2017 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
index 0ac46c6..faceeb6 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 # Copyright (c) 2016-2017 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -24,6 +22,7 @@ import mock
 import unittest
 
 from tests.unit import STL_MOCKS
+from yardstick.common import utils
 from yardstick.network_services.vnf_generic.vnf.base import VnfdHelper
 
 
@@ -962,7 +961,7 @@ class TestProxDpdkVnfSetupEnvHelper(unittest.TestCase):
         result = setup_helper.prox_config_data
         self.assertEqual(result, expected)
 
-    @mock.patch('yardstick.network_services.vnf_generic.vnf.prox_helpers.find_relative_file')
+    @mock.patch.object(utils, 'find_relative_file')
     def test_build_config_file_no_additional_file(self, mock_find_path):
         vnf1 = {
             'prox_args': {'-c': ""},
@@ -996,7 +995,7 @@ class TestProxDpdkVnfSetupEnvHelper(unittest.TestCase):
         self.assertEqual(helper._prox_config_data, '4')
         self.assertEqual(helper.remote_path, '5')
 
-    @mock.patch('yardstick.network_services.vnf_generic.vnf.prox_helpers.find_relative_file')
+    @mock.patch.object(utils, 'find_relative_file')
     def test_build_config_file_additional_file_string(self, mock_find_path):
         vnf1 = {
             'prox_args': {'-c': ""},
@@ -1028,7 +1027,7 @@ class TestProxDpdkVnfSetupEnvHelper(unittest.TestCase):
         helper.build_config_file()
         self.assertDictEqual(helper.additional_files, expected)
 
-    @mock.patch('yardstick.network_services.vnf_generic.vnf.prox_helpers.find_relative_file')
+    @mock.patch.object(utils, 'find_relative_file')
     def test_build_config_file_additional_file(self, mock_find_path):
         vnf1 = {
             'prox_args': {'-c': ""},
@@ -1730,7 +1729,7 @@ class TestProxProfileHelper(unittest.TestCase):
         }
 
         self.assertIsNone(helper._test_cores)
-        expected = [12, 23]
+        expected = [3, 4]
         result = helper.test_cores
         self.assertEqual(result, expected)
         self.assertIs(result, helper._test_cores)
@@ -1787,7 +1786,7 @@ class TestProxProfileHelper(unittest.TestCase):
         }
 
         self.assertIsNone(helper._latency_cores)
-        expected = [12, 23]
+        expected = [3, 4]
         result = helper.latency_cores
         self.assertEqual(result, expected)
         self.assertIs(result, helper._latency_cores)
@@ -1842,7 +1841,7 @@ class TestProxProfileHelper(unittest.TestCase):
             }
         }
 
-        expected = [7, 8]
+        expected = [3, 4]
         result = helper.get_cores(helper.PROX_CORE_GEN_MODE)
         self.assertEqual(result, expected)
 
@@ -1984,8 +1983,8 @@ class TestProxMplsProfileHelper(unittest.TestCase):
             }
         }
 
-        expected_tagged = [7]
-        expected_plain = [8]
+        expected_tagged = [3]
+        expected_plain = [4]
         self.assertIsNone(helper._cores_tuple)
         self.assertEqual(helper.tagged_cores, expected_tagged)
         self.assertEqual(helper.plain_cores, expected_plain)
@@ -2060,10 +2059,10 @@ class TestProxBngProfileHelper(unittest.TestCase):
             }
         }
 
-        expected_cpe = [7]
-        expected_inet = [8]
-        expected_arp = [4, 3]
-        expected_arp_task = [0, 4]
+        expected_cpe = [3]
+        expected_inet = [4]
+        expected_arp = [6, 9]
+        expected_arp_task = [0, 6]
         expected_combined = (expected_cpe, expected_inet, expected_arp, expected_arp_task)
 
         self.assertIsNone(helper._cores_tuple)
@@ -2131,8 +2130,8 @@ class TestProxVpeProfileHelper(unittest.TestCase):
             }
         }
 
-        expected_cpe = [7]
-        expected_inet = [8]
+        expected_cpe = [3]
+        expected_inet = [4]
         expected_combined = (expected_cpe, expected_inet)
 
         self.assertIsNone(helper._cores_tuple)
@@ -2245,8 +2244,8 @@ class TestProxlwAFTRProfileHelper(unittest.TestCase):
             }
         }
 
-        expected_tun = [7]
-        expected_inet = [8]
+        expected_tun = [3]
+        expected_inet = [4]
         expected_combined = (expected_tun, expected_inet)
 
         self.assertIsNone(helper._cores_tuple)
index 7692790..159b1f7 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 # Copyright (c) 2016-2017 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -131,6 +129,8 @@ class TestProxApproxVnf(unittest.TestCase):
                 'packets_in',
                 'packets_fwd',
                 'packets_dropped',
+                'curr_packets_fwd',
+                'curr_packets_in'
             ],
         },
         'connection-point': [
@@ -329,7 +329,7 @@ class TestProxApproxVnf(unittest.TestCase):
             'packets_in': 0,
             'packets_dropped': 0,
             'packets_fwd': 0,
-            'collect_stats': {'core': {}},
+            'collect_stats': {'core': {}}
         }
         result = prox_approx_vnf.collect_kpi()
         self.assertEqual(result, expected)
@@ -352,7 +352,11 @@ class TestProxApproxVnf(unittest.TestCase):
             'collect_stats': {'core': {'result': 234}},
         }
         result = prox_approx_vnf.collect_kpi()
-        self.assertEqual(result, expected)
+        self.assertEqual(result['packets_in'], expected['packets_in'])
+        self.assertEqual(result['packets_dropped'], expected['packets_dropped'])
+        self.assertEqual(result['packets_fwd'], expected['packets_fwd'])
+        self.assertNotEqual(result['packets_fwd'], 0)
+        self.assertNotEqual(result['packets_fwd'], 0)
 
     @mock.patch(SSH_HELPER)
     def test_collect_kpi_error(self, ssh, *args):
@@ -373,6 +377,25 @@ class TestProxApproxVnf(unittest.TestCase):
         file_path = os.path.join(curr_path, filename)
         return file_path
 
+    @mock.patch('yardstick.common.utils.open', create=True)
+    @mock.patch('yardstick.benchmark.scenarios.networking.vnf_generic.open', create=True)
+    @mock.patch('yardstick.network_services.helpers.iniparser.open', create=True)
+    @mock.patch(SSH_HELPER)
+    def test_run_prox(self, ssh, *_):
+        mock_ssh(ssh)
+
+        prox_approx_vnf = ProxApproxVnf(NAME, self.VNFD0)
+        prox_approx_vnf.scenario_helper.scenario_cfg = self.SCENARIO_CFG
+        prox_approx_vnf.ssh_helper.join_bin_path.return_value = '/tool_path12/tool_file34'
+        prox_approx_vnf.setup_helper.remote_path = 'configs/file56.cfg'
+
+        expected = "sudo bash -c 'cd /tool_path12; " \
+                   "/tool_path12/tool_file34 -o cli -t  -f /tmp/l3-swap-2.cfg '"
+
+        prox_approx_vnf._run()
+        result = prox_approx_vnf.ssh_helper.run.call_args[0][0]
+        self.assertEqual(result, expected)
+
     @mock.patch(SSH_HELPER)
     def bad_test_instantiate(self, *args):
         prox_approx_vnf = ProxApproxVnf(NAME, self.VNFD0)
index 9ed6fd5..c6292f2 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 # Copyright (c) 2016-2017 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -261,7 +259,4 @@ class TestRouterVNF(unittest.TestCase):
         router_vnf = RouterVNF(name, vnfd)
         router_vnf._vnf_process = mock.MagicMock()
         router_vnf._vnf_process.terminate = mock.Mock()
-        self.assertEqual(None, router_vnf.terminate())
-
-if __name__ == '__main__':
-    unittest.main()
+        self.assertIsNone(router_vnf.terminate())
index af941c0..c7d2abc 100644 (file)
@@ -1,6 +1,4 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2017 Intel Corporation
+# Copyright (c) 2017-2018 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # limitations under the License.
 #
 
+from copy import deepcopy
+
 import unittest
 import mock
-from copy import deepcopy
+import six
 
 from tests.unit.network_services.vnf_generic.vnf.test_base import mock_ssh
 from tests.unit import STL_MOCKS
 from yardstick.benchmark.contexts.base import Context
 from yardstick.common import exceptions as y_exceptions
+from yardstick.common import utils
 from yardstick.network_services.nfvi.resource import ResourceProfile
 from yardstick.network_services.vnf_generic.vnf.base import VnfdHelper
 
@@ -36,7 +37,8 @@ stl_patch = mock.patch.dict("sys.modules", STL_MOCKS)
 stl_patch.start()
 
 if stl_patch:
-    from yardstick.network_services.vnf_generic.vnf.sample_vnf import VnfSshHelper
+    from yardstick.network_services.vnf_generic.vnf import sample_vnf
+    from yardstick.network_services.vnf_generic.vnf.vnf_ssh_helper import VnfSshHelper
     from yardstick.network_services.vnf_generic.vnf.sample_vnf import SampleVNFDeployHelper
     from yardstick.network_services.vnf_generic.vnf.sample_vnf import ScenarioHelper
     from yardstick.network_services.vnf_generic.vnf.sample_vnf import ResourceHelper
@@ -528,46 +530,25 @@ class TestDpdkVnfSetupEnvHelper(unittest.TestCase):
         result = DpdkVnfSetupEnvHelper._update_traffic_type(ip_pipeline_cfg, traffic_options)
         self.assertEqual(result, expected)
 
-    def test__setup_hugepages(self):
-        vnfd_helper = VnfdHelper(self.VNFD_0)
+    @mock.patch.object(six, 'BytesIO', return_value=six.BytesIO(b'100\n'))
+    @mock.patch.object(utils, 'read_meminfo',
+                       return_value={'Hugepagesize': '2048'})
+    def test__setup_hugepages(self, mock_meminfo, *args):
         ssh_helper = mock.Mock()
-        ssh_helper.execute.return_value = 0, '', ''
-        scenario_helper = mock.Mock()
-        dpdk_setup_helper = DpdkVnfSetupEnvHelper(vnfd_helper, ssh_helper, scenario_helper)
-
-        result = dpdk_setup_helper._setup_hugepages()
-        expect_start_list = ['awk', 'awk', 'echo']
-        expect_in_list = ['meminfo', 'nr_hugepages', '16']
-        call_args_iter = (args[0][0] for args in ssh_helper.execute.call_args_list)
-        self.assertIsNone(result)
-        self.assertEqual(ssh_helper.execute.call_count, 3)
-        for expect_start, expect_in, arg0 in zip(expect_start_list, expect_in_list,
-                                                 call_args_iter):
-            self.assertTrue(arg0.startswith(expect_start))
-            self.assertIn(expect_in, arg0)
-
-    def test__setup_hugepages_2_mb(self):
-        vnfd_helper = VnfdHelper(self.VNFD_0)
-        ssh_helper = mock.Mock()
-        ssh_helper.execute.return_value = 0, '2048kB  ', ''
-        scenario_helper = mock.Mock()
-        dpdk_setup_helper = DpdkVnfSetupEnvHelper(vnfd_helper, ssh_helper, scenario_helper)
-
-        result = dpdk_setup_helper._setup_hugepages()
-        expect_start_list = ['awk', 'awk', 'echo']
-        expect_in_list = ['meminfo', 'nr_hugepages', '8192']
-        call_args_iter = (args[0][0] for args in ssh_helper.execute.call_args_list)
-        self.assertIsNone(result)
-        self.assertEqual(ssh_helper.execute.call_count, 3)
-        for expect_start, expect_in, arg0 in zip(expect_start_list, expect_in_list,
-                                                 call_args_iter):
-            self.assertTrue(arg0.startswith(expect_start))
-            self.assertIn(expect_in, arg0)
+        dpdk_setup_helper = DpdkVnfSetupEnvHelper(
+            mock.ANY, ssh_helper, mock.ANY)
+        with mock.patch.object(sample_vnf.LOG, 'info') as mock_info:
+            dpdk_setup_helper._setup_hugepages()
+            mock_info.assert_called_once_with(
+                'Hugepages size (kB): %s, number claimed: %s, number set: '
+                '%s', 2048, 8192, 100)
+        mock_meminfo.assert_called_once_with(ssh_helper)
 
     @mock.patch('yardstick.network_services.vnf_generic.vnf.sample_vnf.open')
-    @mock.patch('yardstick.network_services.vnf_generic.vnf.sample_vnf.find_relative_file')
+    @mock.patch.object(utils, 'find_relative_file')
     @mock.patch('yardstick.network_services.vnf_generic.vnf.sample_vnf.MultiPortConfig')
-    def test_build_config(self, mock_multi_port_config_class, mock_find, *args):
+    @mock.patch.object(utils, 'open_relative_file')
+    def test_build_config(self, mock_open_rf, mock_multi_port_config_class, mock_find, *args):
         mock_multi_port_config = mock_multi_port_config_class()
         vnfd_helper = VnfdHelper(self.VNFD_0)
         ssh_helper = mock.Mock()
@@ -584,6 +565,20 @@ class TestDpdkVnfSetupEnvHelper(unittest.TestCase):
         self.assertGreaterEqual(mock_multi_port_config.generate_config.call_count, 1)
         self.assertGreaterEqual(mock_multi_port_config.generate_script.call_count, 1)
 
+        scenario_helper.vnf_cfg = {'file': 'fake_file'}
+        dpdk_setup_helper = DpdkVnfSetupEnvHelper(vnfd_helper, ssh_helper, scenario_helper)
+        mock_open_rf.side_effect = mock.mock_open(read_data='fake_data')
+        dpdk_setup_helper.PIPELINE_COMMAND = expected = 'pipeline command'
+
+        result = dpdk_setup_helper.build_config()
+
+        mock_open_rf.assert_called_once()
+        self.assertEqual(result, expected)
+        self.assertGreaterEqual(ssh_helper.upload_config_file.call_count, 2)
+        self.assertGreaterEqual(mock_find.call_count, 1)
+        self.assertGreaterEqual(mock_multi_port_config.generate_config.call_count, 1)
+        self.assertGreaterEqual(mock_multi_port_config.generate_script.call_count, 1)
+
     def test__build_pipeline_kwargs(self):
         vnfd_helper = VnfdHelper(self.VNFD_0)
         ssh_helper = mock.Mock()
@@ -619,42 +614,29 @@ class TestDpdkVnfSetupEnvHelper(unittest.TestCase):
         ssh_helper = mock.Mock()
         ssh_helper.execute = execute
 
-        dpdk_vnf_setup_env_helper = DpdkVnfSetupEnvHelper(vnfd_helper, ssh_helper, mock.Mock())
-        dpdk_vnf_setup_env_helper._validate_cpu_cfg = mock.Mock(return_value=[])
-
-        self.assertIsInstance(dpdk_vnf_setup_env_helper.setup_vnf_environment(), ResourceProfile)
-
-    def test__setup_dpdk_early_success(self):
-        vnfd_helper = VnfdHelper(self.VNFD_0)
-        ssh_helper = mock.Mock()
-        ssh_helper.execute.return_value = 0, 'output', ''
-        ssh_helper.join_bin_path.return_value = 'joined_path'
-        ssh_helper.provision_tool.return_value = 'provision string'
         scenario_helper = mock.Mock()
-        dpdk_setup_helper = DpdkVnfSetupEnvHelper(vnfd_helper, ssh_helper, scenario_helper)
-        dpdk_setup_helper._setup_hugepages = mock.Mock()
+        scenario_helper.nodes = [None, None]
+        dpdk_vnf_setup_env_helper = DpdkVnfSetupEnvHelper(vnfd_helper, ssh_helper, scenario_helper)
+        dpdk_vnf_setup_env_helper._validate_cpu_cfg = mock.Mock(return_value=[])
 
-        self.assertIsNone(dpdk_setup_helper._setup_dpdk())
-        self.assertEqual(dpdk_setup_helper.ssh_helper.execute.call_count, 2)
+        with mock.patch.object(dpdk_vnf_setup_env_helper, '_setup_dpdk'):
+            self.assertIsInstance(
+                dpdk_vnf_setup_env_helper.setup_vnf_environment(),
+                ResourceProfile)
 
-    @mock.patch('yardstick.ssh.SSH')
-    def test__setup_dpdk_short(self, _):
-        def execute_side(cmd):
-            if 'joined_path' in cmd:
-                return 0, 'output', ''
-            return 1, 'bad output', 'error output'
-
-        vnfd_helper = VnfdHelper(self.VNFD_0)
+    def test__setup_dpdk(self):
         ssh_helper = mock.Mock()
-        ssh_helper.execute.side_effect = execute_side
-        ssh_helper.join_bin_path.return_value = 'joined_path'
-        ssh_helper.provision_tool.return_value = 'provision string'
-        scenario_helper = mock.Mock()
-        dpdk_setup_helper = DpdkVnfSetupEnvHelper(vnfd_helper, ssh_helper, scenario_helper)
-        dpdk_setup_helper._setup_hugepages = mock.Mock()
-
-        self.assertIsNone(dpdk_setup_helper._setup_dpdk())
-        self.assertEqual(dpdk_setup_helper.ssh_helper.execute.call_count, 3)
+        ssh_helper.execute = mock.Mock()
+        ssh_helper.execute.return_value = (0, 0, 0)
+        dpdk_setup_helper = DpdkVnfSetupEnvHelper(mock.ANY, ssh_helper, mock.ANY)
+        with mock.patch.object(dpdk_setup_helper, '_setup_hugepages') as \
+                mock_setup_hp:
+            dpdk_setup_helper._setup_dpdk()
+        mock_setup_hp.assert_called_once()
+        ssh_helper.execute.assert_has_calls([
+            mock.call('sudo modprobe uio && sudo modprobe igb_uio'),
+            mock.call('lsmod | grep -i igb_uio')
+        ])
 
     @mock.patch('yardstick.ssh.SSH')
     def test__setup_resources(self, _):
@@ -693,6 +675,7 @@ class TestDpdkVnfSetupEnvHelper(unittest.TestCase):
         # ssh_helper.execute = mock.Mock(return_value = (0, 'text', ''))
         # ssh_helper.execute.return_value = 0, 'output', ''
         scenario_helper = mock.Mock()
+        scenario_helper.nodes = [None, None]
         rv = ['0000:05:00.1', '0000:05:00.0']
 
         dpdk_setup_helper = DpdkVnfSetupEnvHelper(vnfd_helper, ssh_helper, scenario_helper)
@@ -711,6 +694,7 @@ class TestDpdkVnfSetupEnvHelper(unittest.TestCase):
         vnfd_helper = VnfdHelper(self.VNFD_0)
         ssh_helper = mock.Mock()
         scenario_helper = mock.Mock()
+        scenario_helper.nodes = [None, None]
         dpdk_setup_helper = DpdkVnfSetupEnvHelper(vnfd_helper, ssh_helper, scenario_helper)
         dpdk_setup_helper.dpdk_bind_helper.bind = mock.Mock()
         dpdk_setup_helper.dpdk_bind_helper.used_drivers = {
@@ -1389,7 +1373,7 @@ class TestSampleVNFDeployHelper(unittest.TestCase):
 
     @mock.patch('yardstick.network_services.vnf_generic.vnf.sample_vnf.time')
     @mock.patch('subprocess.check_output')
-    def test_deploy_vnfs_disabled(self, *args):
+    def test_deploy_vnfs_disabled(self, *_):
         vnfd_helper = mock.Mock()
         ssh_helper = mock.Mock()
         ssh_helper.join_bin_path.return_value = 'joined_path'
index d770681..d831ddd 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 # Copyright (c) 2016-2017 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # limitations under the License.
 #
 
-from __future__ import absolute_import
+import subprocess
 
-import unittest
 import mock
-import subprocess
+import unittest
+import six
 
 from tests.unit import STL_MOCKS
+from yardstick import ssh
+from yardstick.common import utils
 
 
 STLClient = mock.MagicMock()
@@ -145,13 +145,13 @@ class TestIxLoadTrafficGen(unittest.TestCase):
             ssh.from_node.return_value = ssh_mock
             vnfd = self.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
             ixload_traffic_gen = IxLoadTrafficGen(NAME, vnfd)
-            self.assertEqual(None, ixload_traffic_gen.listen_traffic({}))
+            self.assertIsNone(ixload_traffic_gen.listen_traffic({}))
 
-    @mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.makedirs")
+    @mock.patch.object(utils, 'find_relative_file')
+    @mock.patch.object(utils, 'makedirs')
     @mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.call")
     @mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.shutil")
-    def test_instantiate(self, call, shutil, mock_makedirs):
-        # pylint: disable=unused-argument
+    def test_instantiate(self, shutil, *args):
         with mock.patch("yardstick.ssh.SSH") as ssh:
             ssh_mock = mock.Mock(autospec=ssh.SSH)
             ssh_mock.execute = \
@@ -175,19 +175,18 @@ class TestIxLoadTrafficGen(unittest.TestCase):
                                                                        '1C/1T',
                                                                        'worker_threads': 1}}
                                              }})
-            with mock.patch('yardstick.benchmark.scenarios.networking.vnf_generic.open',
-                            create=True) as mock_open:
+            with mock.patch.object(six.moves.builtins, 'open',
+                                   create=True) as mock_open:
                 mock_open.return_value = mock.MagicMock()
                 ixload_traffic_gen.instantiate(scenario_cfg, {})
 
     @mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.call")
-    @mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.shutil")
     @mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.open")
     @mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.min")
     @mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.max")
     @mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.len")
-    def test_run_traffic(self, call, shutil, main_open, min, max, len):
-        # pylint: disable=unused-argument
+    @mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.shutil")
+    def test_run_traffic(self, shutil, *args):
         mock_traffic_profile = mock.Mock(autospec=TrafficProfile)
         mock_traffic_profile.get_traffic_definition.return_value = "64"
         mock_traffic_profile.params = self.TRAFFIC_PROFILE
@@ -213,13 +212,12 @@ class TestIxLoadTrafficGen(unittest.TestCase):
             self.assertIsNone(result)
 
     @mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.call")
-    @mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.shutil")
     @mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.open")
     @mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.min")
     @mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.max")
     @mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.len")
-    def test_run_traffic_csv(self, call, shutil, main_open, min, max, len):
-        # pylint: disable=unused-argument
+    @mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.shutil")
+    def test_run_traffic_csv(self, shutil, *args):
         mock_traffic_profile = mock.Mock(autospec=TrafficProfile)
         mock_traffic_profile.get_traffic_definition.return_value = "64"
         mock_traffic_profile.params = self.TRAFFIC_PROFILE
@@ -247,20 +245,15 @@ class TestIxLoadTrafficGen(unittest.TestCase):
             self.assertIsNone(result)
 
     @mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.call")
+    @mock.patch.object(ssh, 'SSH')
     def test_terminate(self, *args):
-        with mock.patch("yardstick.ssh.SSH") as ssh:
-            vnfd = self.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
-            ssh_mock = mock.Mock(autospec=ssh.SSH)
-            ssh_mock.execute = \
-                mock.Mock(return_value=(0, "", ""))
-            ssh.from_node.return_value = ssh_mock
-            ixload_traffic_gen = IxLoadTrafficGen(NAME, vnfd)
-            self.assertEqual(None, ixload_traffic_gen.terminate())
+        vnfd = self.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
+        ixload_traffic_gen = IxLoadTrafficGen(NAME, vnfd)
+        self.assertIsNone(ixload_traffic_gen.terminate())
 
-    @mock.patch("yardstick.ssh.SSH")
     @mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.call")
-    def test_parse_csv_read(self, mock_call, mock_ssh):
-        # pylint: disable=unused-argument
+    @mock.patch.object(ssh, 'SSH')
+    def test_parse_csv_read(self, mock_ssh, *args):
         vnfd = self.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
         kpi_data = {
             'HTTP Total Throughput (Kbps)': 1,
@@ -282,10 +275,9 @@ class TestIxLoadTrafficGen(unittest.TestCase):
         for key_left, key_right in IxLoadResourceHelper.KPI_LIST.items():
             self.assertEqual(result[key_left][-1], int(kpi_data[key_right]))
 
-    @mock.patch("yardstick.ssh.SSH")
     @mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.call")
-    def test_parse_csv_read_value_error(self, mock_call, mock_ssh):
-        # pylint: disable=unused-argument
+    @mock.patch.object(ssh, 'SSH')
+    def test_parse_csv_read_value_error(self, mock_ssh, *args):
         vnfd = self.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
         http_reader = [{
             'HTTP Total Throughput (Kbps)': 1,
@@ -305,10 +297,9 @@ class TestIxLoadTrafficGen(unittest.TestCase):
         ixload_traffic_gen.resource_helper.parse_csv_read(http_reader)
         self.assertDictEqual(ixload_traffic_gen.resource_helper.result, init_value)
 
-    @mock.patch("yardstick.ssh.SSH")
     @mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.call")
-    def test_parse_csv_read_error(self, mock_call, mock_ssh):
-        # pylint: disable=unused-argument
+    @mock.patch.object(ssh, 'SSH')
+    def test_parse_csv_read_error(self, mock_ssh, *args):
         vnfd = self.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
         http_reader = [{
             'HTTP Total Throughput (Kbps)': 1,
index fb26f20..91a353d 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 # Copyright (c) 2016-2017 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -36,7 +34,7 @@ if stl_patch:
     from yardstick.network_services.vnf_generic.vnf.tg_ping import PingTrafficGen
     from yardstick.network_services.vnf_generic.vnf.tg_ping import PingResourceHelper
     from yardstick.network_services.vnf_generic.vnf.tg_ping import PingSetupEnvHelper
-    from yardstick.network_services.vnf_generic.vnf.sample_vnf import VnfSshHelper
+    from yardstick.network_services.vnf_generic.vnf.vnf_ssh_helper import VnfSshHelper
 
 
 class TestPingResourceHelper(unittest.TestCase):
index 0104e7f..2151a32 100644 (file)
@@ -423,4 +423,4 @@ class TestProxTrafficGen(unittest.TestCase):
         prox_traffic_gen._vnf_wrapper.setup_helper = mock.MagicMock()
         prox_traffic_gen._vnf_wrapper._vnf_process = mock.MagicMock()
         prox_traffic_gen._vnf_wrapper.resource_helper = mock.MagicMock()
-        self.assertEqual(None, prox_traffic_gen.terminate())
+        self.assertIsNone(prox_traffic_gen.terminate())
index e9f718c..61fc012 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 # Copyright (c) 2016-2017 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 #
 
 import os
-import unittest
+
 import mock
+import six
+import unittest
 
 from tests.unit import STL_MOCKS
 
@@ -170,7 +170,7 @@ class TestIXIATrafficGen(unittest.TestCase):
             ssh.from_node.return_value = ssh_mock
             vnfd = self.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
             ixnet_traffic_gen = IxiaTrafficGen(NAME, vnfd)
-            self.assertEqual(None, ixnet_traffic_gen.listen_traffic({}))
+            self.assertIsNone(ixnet_traffic_gen.listen_traffic({}))
 
     def test_instantiate(self, *args):
         with mock.patch("yardstick.ssh.SSH") as ssh:
@@ -228,7 +228,7 @@ class TestIXIATrafficGen(unittest.TestCase):
             ixnet_traffic_gen._ixia_traffic_gen.ix_stop_traffic = mock.Mock()
             ixnet_traffic_gen._traffic_process = mock.MagicMock()
             ixnet_traffic_gen._traffic_process.terminate = mock.Mock()
-            self.assertEqual(None, ixnet_traffic_gen.terminate())
+            self.assertIsNone(ixnet_traffic_gen.terminate())
 
     def _get_file_abspath(self, filename):
         curr_path = os.path.dirname(os.path.abspath(__file__))
@@ -341,7 +341,7 @@ class TestIXIATrafficGen(unittest.TestCase):
             'task_path': '/path/to/task'
         }
 
-        @mock.patch('yardstick.benchmark.scenarios.networking.vnf_generic.open', create=True)
+        @mock.patch.object(six.moves.builtins, 'open', create=True)
         @mock.patch('yardstick.network_services.vnf_generic.vnf.tg_rfc2544_ixia.open',
                     mock.mock_open(), create=True)
         @mock.patch('yardstick.network_services.vnf_generic.vnf.tg_rfc2544_ixia.LOG.exception')
index 7342cfc..b9a95a9 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 # Copyright (c) 2016-2017 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
index 6180715..f80d1f9 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 # Copyright (c) 2016-2017 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -483,7 +481,3 @@ class TestTrexTrafficGen(unittest.TestCase):
         client = mock.Mock(autospec=STLClient)
         client.connect = mock.Mock(return_value=0)
         self.assertIsNotNone(trex_traffic_gen.resource_helper._connect(client))
-
-
-if __name__ == '__main__':
-    unittest.main()
index 472052b..4cf4320 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 # Copyright (c) 2016-2017 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
index f0a5666..48fc87e 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 # Copyright (c) 2016-2017 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -22,6 +20,9 @@ import os
 from tests.unit import STL_MOCKS
 from tests.unit.network_services.vnf_generic.vnf.test_base import mock_ssh
 
+from yardstick.common import utils
+
+
 STLClient = mock.MagicMock()
 stl_patch = mock.patch.dict("sys.modules", STL_MOCKS)
 stl_patch.start()
@@ -331,7 +332,7 @@ pipeline>
         vfw_approx_vnf._run()
         vfw_approx_vnf.ssh_helper.run.assert_called_once()
 
-    @mock.patch("yardstick.network_services.vnf_generic.vnf.vfw_vnf.find_relative_file")
+    @mock.patch.object(utils, 'find_relative_file')
     @mock.patch("yardstick.network_services.vnf_generic.vnf.vfw_vnf.YangModel")
     @mock.patch("yardstick.network_services.vnf_generic.vnf.sample_vnf.Context")
     @mock.patch(SSH_HELPER)
index c074dfb..8c45d97 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 # Copyright (c) 2016-2017 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
diff --git a/tox.ini b/tox.ini
index 822ffda..313f1ec 100644 (file)
--- a/tox.ini
+++ b/tox.ini
@@ -6,6 +6,8 @@ envlist = py{27,3},pep8,functional{,-py3},coverage
 [testenv]
 usedevelop=True
 passenv = http_proxy HTTP_PROXY https_proxy HTTPS_PROXY no_proxy NO_PROXY BRANCH
+setenv =
+   VIRTUAL_ENV={envdir}
 deps =
     -r{toxinidir}/requirements.txt
     -r{toxinidir}/test-requirements.txt
index c9b5b51..ae8319e 100644 (file)
@@ -6,17 +6,41 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-from __future__ import absolute_import
 import abc
 import six
 
 import yardstick.common.utils as utils
 
 
+class Flags(object):
+    """Class to represent the status of the flags in a context"""
+
+    _FLAGS = {'no_setup': False,
+              'no_teardown': False}
+
+    def __init__(self, **kwargs):
+        for name, value in self._FLAGS.items():
+            setattr(self, name, value)
+
+        for name, value in ((name, value) for (name, value) in kwargs.items()
+                            if name in self._FLAGS):
+            setattr(self, name, value)
+
+    def parse(self, **kwargs):
+        """Read in values matching the flags stored in this object"""
+        if not kwargs:
+            return
+
+        for name, value in ((name, value) for (name, value) in kwargs.items()
+                            if name in self._FLAGS):
+            setattr(self, name, value)
+
+
 @six.add_metaclass(abc.ABCMeta)
 class Context(object):
     """Class that represents a context in the logical model"""
     list = []
+    SHORT_TASK_ID_LEN = 8
 
     @staticmethod
     def split_name(name, sep='.'):
@@ -29,10 +53,28 @@ class Context(object):
 
     def __init__(self):
         Context.list.append(self)
+        self._flags = Flags()
+        self._name = None
+        self._task_id = None
 
-    @abc.abstractmethod
     def init(self, attrs):
-        """Initiate context."""
+        """Initiate context"""
+        self._name = attrs['name']
+        self._task_id = attrs['task_id']
+        self._flags.parse(**attrs.get('flags', {}))
+        self._name_task_id = '{}-{}'.format(
+            self._name, self._task_id[:self.SHORT_TASK_ID_LEN])
+
+    @property
+    def name(self):
+        if self._flags.no_setup or self._flags.no_teardown:
+            return self._name
+        else:
+            return self._name_task_id
+
+    @property
+    def assigned_name(self):
+        return self._name
 
     @staticmethod
     def get_cls(context_type):
index 8ae4b65..a9e4564 100644 (file)
@@ -7,33 +7,25 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-from __future__ import absolute_import
-import logging
-
 from yardstick.benchmark.contexts.base import Context
 
 
-LOG = logging.getLogger(__name__)
-
-
 class DummyContext(Context):
-    """Class that handle dummy info"""
-
-    __context_type__ = "Dummy"
+    """Class that handle dummy info.
 
-    def __init__(self):
-        super(DummyContext, self).__init__()
+    This class is also used to test the abstract class Context because it
+    provides a minimal concrete implementation of a subclass.
+    """
 
-    def init(self, attrs):
-        pass
+    __context_type__ = "Dummy"
 
     def deploy(self):
-        """don't need to deploy"""
+        """Don't need to deploy"""
         pass
 
     def undeploy(self):
-        """don't need to undeploy"""
-        super(DummyContext, self).undeploy()
+        """Don't need to undeploy"""
+        pass
 
     def _get_server(self, attr_name):
         return None
index 4ba543b..77ac248 100644 (file)
@@ -13,7 +13,6 @@ from __future__ import print_function
 import collections
 import logging
 import os
-import uuid
 import errno
 from collections import OrderedDict
 
@@ -25,9 +24,12 @@ from yardstick.benchmark.contexts.model import Network
 from yardstick.benchmark.contexts.model import PlacementGroup, ServerGroup
 from yardstick.benchmark.contexts.model import Server
 from yardstick.benchmark.contexts.model import update_scheduler_hints
+from yardstick.common import exceptions as y_exc
 from yardstick.common.openstack_utils import get_neutron_client
-from yardstick.orchestrator.heat import HeatTemplate, get_short_key_uuid
+from yardstick.orchestrator.heat import HeatStack
+from yardstick.orchestrator.heat import HeatTemplate
 from yardstick.common import constants as consts
+from yardstick.common import utils
 from yardstick.common.utils import source_env
 from yardstick.ssh import SSH
 
@@ -50,7 +52,6 @@ class HeatContext(Context):
     __context_type__ = "Heat"
 
     def __init__(self):
-        self.name = None
         self.stack = None
         self.networks = OrderedDict()
         self.heat_timeout = None
@@ -68,13 +69,8 @@ class HeatContext(Context):
         self.template_file = None
         self.heat_parameters = None
         self.neutron_client = None
-        # generate an uuid to identify yardstick_key
-        # the first 8 digits of the uuid will be used
-        self.key_uuid = uuid.uuid4()
         self.heat_timeout = None
-        self.key_filename = ''.join(
-            [consts.YARDSTICK_ROOT_PATH, 'yardstick/resources/files/yardstick_key-',
-             get_short_key_uuid(self.key_uuid)])
+        self.key_filename = None
         super(HeatContext, self).__init__()
 
     @staticmethod
@@ -95,13 +91,15 @@ class HeatContext(Context):
         return sorted_networks
 
     def init(self, attrs):
-        self.check_environment()
-        """initializes itself from the supplied arguments"""
-        self.name = attrs["name"]
+        """Initializes itself from the supplied arguments"""
+        super(HeatContext, self).init(attrs)
 
+        self.check_environment()
         self._user = attrs.get("user")
 
         self.template_file = attrs.get("heat_template")
+
+        self.heat_timeout = attrs.get("timeout", DEFAULT_HEAT_TIMEOUT)
         if self.template_file:
             self.heat_parameters = attrs.get("heat_parameters")
             return
@@ -113,8 +111,6 @@ class HeatContext(Context):
 
         self._flavor = attrs.get("flavor")
 
-        self.heat_timeout = attrs.get("timeout", DEFAULT_HEAT_TIMEOUT)
-
         self.placement_groups = [PlacementGroup(name, self, pg_attrs["policy"])
                                  for name, pg_attrs in attrs.get(
                                  "placement_groups", {}).items()]
@@ -137,7 +133,16 @@ class HeatContext(Context):
             self._server_map[server.dn] = server
 
         self.attrs = attrs
-        SSH.gen_keys(self.key_filename)
+
+        self.key_filename = ''.join(
+            [consts.YARDSTICK_ROOT_PATH,
+             'yardstick/resources/files/yardstick_key-',
+              self.name])
+        # Permissions may have changed since creation; this can be fixed. If we
+        # overwrite the file, we lose future access to VMs using this key.
+        # As long as the file exists, even if it is unreadable, keep it intact
+        if not os.path.exists(self.key_filename):
+            SSH.gen_keys(self.key_filename)
 
     def check_environment(self):
         try:
@@ -176,7 +181,7 @@ class HeatContext(Context):
                 template.add_flavor(**self.flavor)
                 self.flavors.add(flavor)
 
-        template.add_keypair(self.keypair_name, self.key_uuid)
+        template.add_keypair(self.keypair_name, self.name)
         template.add_security_group(self.secgroup_name)
 
         for network in self.networks.values():
@@ -298,6 +303,25 @@ class HeatContext(Context):
                     network.network_type = neutron_net.get('provider:network_type')
                     network.neutron_info = neutron_net
 
+    def _create_new_stack(self, heat_template):
+         try:
+             return heat_template.create(block=True,
+                                         timeout=self.heat_timeout)
+         except KeyboardInterrupt:
+             raise y_exc.StackCreationInterrupt
+         except:
+             LOG.exception("stack failed")
+             # let the other failures happen, we want stack trace
+             raise
+
+    def _retrieve_existing_stack(self, stack_name):
+        stack = HeatStack(stack_name)
+        if stack.get():
+            return stack
+        else:
+            LOG.warning("Stack %s does not exist", self.name)
+            return None
+
     def deploy(self):
         """deploys template into a stack using cloud"""
         LOG.info("Deploying context '%s' START", self.name)
@@ -308,15 +332,14 @@ class HeatContext(Context):
         if self.template_file is None:
             self._add_resources_to_template(heat_template)
 
-        try:
-            self.stack = heat_template.create(block=True,
-                                              timeout=self.heat_timeout)
-        except KeyboardInterrupt:
-            raise SystemExit("\nStack create interrupted")
-        except:
-            LOG.exception("stack failed")
-            # let the other failures happen, we want stack trace
-            raise
+        if self._flags.no_setup:
+            # Try to get an existing stack, returns a stack or None
+            self.stack = self._retrieve_existing_stack(self.name)
+            if not self.stack:
+                self.stack = self._create_new_stack(heat_template)
+
+        else:
+            self.stack = self._create_new_stack(heat_template)
 
         # TODO: use Neutron to get segmentation-id
         self.get_neutron_info()
@@ -380,20 +403,27 @@ class HeatContext(Context):
             "local_ip": private_ip,
         }
 
+    def _delete_key_file(self):
+        try:
+            utils.remove_file(self.key_filename)
+            utils.remove_file(self.key_filename + ".pub")
+        except OSError:
+            LOG.exception("There was an error removing the key file %s",
+                          self.key_filename)
+
     def undeploy(self):
         """undeploys stack from cloud"""
+        if self._flags.no_teardown:
+            LOG.info("Undeploying context '%s' SKIP", self.name)
+            return
+
         if self.stack:
             LOG.info("Undeploying context '%s' START", self.name)
             self.stack.delete()
             self.stack = None
             LOG.info("Undeploying context '%s' DONE", self.name)
 
-        if os.path.exists(self.key_filename):
-            try:
-                os.remove(self.key_filename)
-                os.remove(self.key_filename + ".pub")
-            except OSError:
-                LOG.exception("Key filename %s", self.key_filename)
+            self._delete_key_file()
 
         super(HeatContext, self).undeploy()
 
@@ -429,13 +459,17 @@ class HeatContext(Context):
             server.private_ip = self.stack.outputs.get(
                 attr_name.get("private_ip_attr", object()), None)
         else:
-            server = self._server_map.get(attr_name, None)
+            try:
+                server = self._server_map[attr_name]
+            except KeyError:
+                attr_name_no_suffix = attr_name.split("-")[0]
+                server = self._server_map.get(attr_name_no_suffix, None)
             if server is None:
                 return None
 
         pkey = pkg_resources.resource_string(
             'yardstick.resources',
-            h_join('files/yardstick_key', get_short_key_uuid(self.key_uuid))).decode('utf-8')
+            h_join('files/yardstick_key', self.name)).decode('utf-8')
 
         result = {
             "user": server.context.user,
index 2334e50..4bea991 100644 (file)
@@ -29,7 +29,6 @@ class KubernetesContext(Context):
     __context_type__ = "Kubernetes"
 
     def __init__(self):
-        self.name = ''
         self.ssh_key = ''
         self.key_path = ''
         self.public_key_path = ''
@@ -38,7 +37,7 @@ class KubernetesContext(Context):
         super(KubernetesContext, self).__init__()
 
     def init(self, attrs):
-        self.name = attrs.get('name', '')
+        super(KubernetesContext, self).init(attrs)
 
         template_cfg = attrs.get('servers', {})
         self.template = KubernetesTemplate(self.name, template_cfg)
index ffc82c8..fa619a9 100644 (file)
@@ -35,7 +35,6 @@ class NodeContext(Context):
     __context_type__ = "Node"
 
     def __init__(self):
-        self.name = None
         self.file_path = None
         self.nodes = []
         self.networks = {}
@@ -60,7 +59,8 @@ class NodeContext(Context):
 
     def init(self, attrs):
         """initializes itself from the supplied arguments"""
-        self.name = attrs["name"]
+        super(NodeContext, self).init(attrs)
+
         self.file_path = file_path = attrs.get("file", "pod.yaml")
 
         try:
@@ -157,7 +157,7 @@ class NodeContext(Context):
         except StopIteration:
             pass
         else:
-            raise ValueError("Duplicate nodes!!! Nodes: %s %s",
+            raise ValueError("Duplicate nodes!!! Nodes: %s %s" %
                              (node, duplicate))
 
         node["name"] = attr_name
@@ -204,7 +204,7 @@ class NodeContext(Context):
         self.client._put_file_shell(script_file, '~/{}'.format(script))
 
         cmd = 'sudo bash {} {}'.format(script, options)
-        status, stdout, stderr = self.client.execute(cmd)
+        status, _, stderr = self.client.execute(cmd)
         if status:
             raise RuntimeError(stderr)
 
index c931d85..a18b42e 100644 (file)
@@ -59,7 +59,6 @@ class OvsDpdkContext(Context):
         self.first_run = True
         self.dpdk_devbind = ''
         self.vm_names = []
-        self.name = None
         self.nfvi_host = []
         self.nodes = []
         self.networks = {}
@@ -74,8 +73,8 @@ class OvsDpdkContext(Context):
 
     def init(self, attrs):
         """initializes itself from the supplied arguments"""
+        super(OvsDpdkContext, self).init(attrs)
 
-        self.name = attrs["name"]
         self.file_path = attrs.get("file", "pod.yaml")
 
         self.nodes, self.nfvi_host, self.host_mgmt = \
index 9cca3e1..d762055 100644 (file)
@@ -43,7 +43,6 @@ class SriovContext(Context):
         self.first_run = True
         self.dpdk_devbind = ''
         self.vm_names = []
-        self.name = None
         self.nfvi_host = []
         self.nodes = []
         self.networks = {}
@@ -57,8 +56,8 @@ class SriovContext(Context):
 
     def init(self, attrs):
         """initializes itself from the supplied arguments"""
+        super(SriovContext, self).init(attrs)
 
-        self.name = attrs["name"]
         self.file_path = attrs.get("file", "pod.yaml")
 
         self.nodes, self.nfvi_host, self.host_mgmt = \
index 3e3aa99..3914e32 100644 (file)
@@ -23,6 +23,7 @@ class Param(object):
         self.task_args_file = kwargs.get('task-args-file')
         self.keep_deploy = kwargs.get('keep-deploy')
         self.parse_only = kwargs.get('parse-only')
+        self.render_only = kwargs.get('render-only')
         self.output_file = kwargs.get('output-file', '/tmp/yardstick.out')
         self.suite = kwargs.get('suite')
         self.task_id = kwargs.get('task_id')
index 9b1b3f8..7f6309a 100644 (file)
@@ -7,10 +7,6 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-""" Handler for yardstick command 'task' """
-
-from __future__ import absolute_import
-from __future__ import print_function
 import sys
 import os
 from collections import OrderedDict
@@ -31,9 +27,10 @@ from yardstick.benchmark.runners import base as base_runner
 from yardstick.common.constants import CONF_FILE
 from yardstick.common.yaml_loader import yaml_load
 from yardstick.dispatcher.base import Base as DispatcherBase
-from yardstick.common.task_template import TaskTemplate
-from yardstick.common import utils
 from yardstick.common import constants
+from yardstick.common import exceptions as y_exc
+from yardstick.common import task_template
+from yardstick.common import utils
 from yardstick.common.html_template import report_template
 
 output_file_default = "/tmp/yardstick.out"
@@ -57,7 +54,7 @@ class Task(object):     # pragma: no cover
         out_types = [s.strip() for s in dispatchers.split(',')]
         output_config['DEFAULT']['dispatcher'] = out_types
 
-    def start(self, args, **kwargs):
+    def start(self, args, **kwargs):  # pylint: disable=unused-argument
         """Start a benchmark scenario."""
 
         atexit.register(self.atexit_handler)
@@ -69,7 +66,7 @@ class Task(object):     # pragma: no cover
 
         try:
             output_config = utils.parse_ini_file(CONF_FILE)
-        except Exception:
+        except Exception:  # pylint: disable=broad-except
             # all error will be ignore, the default value is {}
             output_config = {}
 
@@ -89,8 +86,7 @@ class Task(object):     # pragma: no cover
 
         if args.suite:
             # 1.parse suite, return suite_params info
-            task_files, task_args, task_args_fnames = \
-                parser.parse_suite()
+            task_files, task_args, task_args_fnames = parser.parse_suite()
         else:
             task_files = [parser.path]
             task_args = [args.task_args]
@@ -103,32 +99,33 @@ class Task(object):     # pragma: no cover
             sys.exit(0)
 
         testcases = {}
-        # parse task_files
-        for i in range(0, len(task_files)):
-            one_task_start_time = time.time()
-            parser.path = task_files[i]
-            scenarios, run_in_parallel, meet_precondition, contexts = \
-                parser.parse_task(self.task_id, task_args[i],
-                                  task_args_fnames[i])
+        tasks = self._parse_tasks(parser, task_files, args, task_args,
+                                  task_args_fnames)
 
-            self.contexts.extend(contexts)
-
-            if not meet_precondition:
-                LOG.info("meet_precondition is %s, please check envrionment",
-                         meet_precondition)
+        # Execute task files.
+        for i, _ in enumerate(task_files):
+            one_task_start_time = time.time()
+            self.contexts.extend(tasks[i]['contexts'])
+            if not tasks[i]['meet_precondition']:
+                LOG.info('"meet_precondition" is %s, please check environment',
+                         tasks[i]['meet_precondition'])
                 continue
 
-            case_name = os.path.splitext(os.path.basename(task_files[i]))[0]
             try:
-                data = self._run(scenarios, run_in_parallel, args.output_file)
+                data = self._run(tasks[i]['scenarios'],
+                                 tasks[i]['run_in_parallel'],
+                                 output_config)
             except KeyboardInterrupt:
                 raise
-            except Exception:
-                LOG.error('Testcase: "%s" FAILED!!!', case_name, exc_info=True)
-                testcases[case_name] = {'criteria': 'FAIL', 'tc_data': []}
+            except Exception:  # pylint: disable=broad-except
+                LOG.error('Testcase: "%s" FAILED!!!', tasks[i]['case_name'],
+                          exc_info=True)
+                testcases[tasks[i]['case_name']] = {'criteria': 'FAIL',
+                                                    'tc_data': []}
             else:
-                LOG.info('Testcase: "%s" SUCCESS!!!', case_name)
-                testcases[case_name] = {'criteria': 'PASS', 'tc_data': data}
+                LOG.info('Testcase: "%s" SUCCESS!!!', tasks[i]['case_name'])
+                testcases[tasks[i]['case_name']] = {'criteria': 'PASS',
+                                                    'tc_data': data}
 
             if args.keep_deploy:
                 # keep deployment, forget about stack
@@ -151,9 +148,8 @@ class Task(object):     # pragma: no cover
         LOG.info("Total finished in %d secs",
                  total_end_time - total_start_time)
 
-        scenario = scenarios[0]
-        LOG.info("To generate report, execute command "
-                 "'yardstick report generate %(task_id)s %(tc)s'", scenario)
+        LOG.info('To generate report, execute command "yardstick report '
+                 'generate %(task_id)s <yaml_name>s"', self.task_id)
         LOG.info("Task ALL DONE, exiting")
         return result
 
@@ -232,11 +228,12 @@ class Task(object):     # pragma: no cover
 
     def _do_output(self, output_config, result):
         dispatchers = DispatcherBase.get(output_config)
+        dispatchers = (d for d in dispatchers if d.__dispatcher_type__ != 'Influxdb')
 
         for dispatcher in dispatchers:
             dispatcher.flush_result_data(result)
 
-    def _run(self, scenarios, run_in_parallel, output_file):
+    def _run(self, scenarios, run_in_parallel, output_config):
         """Deploys context and calls runners"""
         for context in self.contexts:
             context.deploy()
@@ -247,14 +244,14 @@ class Task(object):     # pragma: no cover
         # Start all background scenarios
         for scenario in filter(_is_background_scenario, scenarios):
             scenario["runner"] = dict(type="Duration", duration=1000000000)
-            runner = self.run_one_scenario(scenario, output_file)
+            runner = self.run_one_scenario(scenario, output_config)
             background_runners.append(runner)
 
         runners = []
         if run_in_parallel:
             for scenario in scenarios:
                 if not _is_background_scenario(scenario):
-                    runner = self.run_one_scenario(scenario, output_file)
+                    runner = self.run_one_scenario(scenario, output_config)
                     runners.append(runner)
 
             # Wait for runners to finish
@@ -263,12 +260,12 @@ class Task(object):     # pragma: no cover
                 if status != 0:
                     raise RuntimeError(
                         "{0} runner status {1}".format(runner.__execution_type__, status))
-                LOG.info("Runner ended, output in %s", output_file)
+                LOG.info("Runner ended")
         else:
             # run serially
             for scenario in scenarios:
                 if not _is_background_scenario(scenario):
-                    runner = self.run_one_scenario(scenario, output_file)
+                    runner = self.run_one_scenario(scenario, output_config)
                     status = runner_join(runner, background_runners, self.outputs, result)
                     if status != 0:
                         LOG.error('Scenario NO.%s: "%s" ERROR!',
@@ -276,7 +273,7 @@ class Task(object):     # pragma: no cover
                                   scenario.get('type'))
                         raise RuntimeError(
                             "{0} runner status {1}".format(runner.__execution_type__, status))
-                    LOG.info("Runner ended, output in %s", output_file)
+                    LOG.info("Runner ended")
 
         # Abort background runners
         for runner in background_runners:
@@ -313,10 +310,34 @@ class Task(object):     # pragma: no cover
         else:
             return op
 
-    def run_one_scenario(self, scenario_cfg, output_file):
+    def _parse_tasks(self, parser, task_files, args, task_args,
+                     task_args_fnames):
+        tasks = []
+
+        # Parse task_files.
+        for i, _ in enumerate(task_files):
+            parser.path = task_files[i]
+            tasks.append(parser.parse_task(self.task_id, task_args[i],
+                                           task_args_fnames[i]))
+            tasks[i]['case_name'] = os.path.splitext(
+                os.path.basename(task_files[i]))[0]
+
+        if args.render_only:
+            utils.makedirs(args.render_only)
+            for idx, task in enumerate(tasks):
+                output_file_name = os.path.abspath(os.path.join(
+                    args.render_only,
+                    '{0:03d}-{1}.yml'.format(idx, task['case_name'])))
+                utils.write_file(output_file_name, task['rendered'])
+
+            sys.exit(0)
+
+        return tasks
+
+    def run_one_scenario(self, scenario_cfg, output_config):
         """run one scenario using context"""
         runner_cfg = scenario_cfg["runner"]
-        runner_cfg['output_filename'] = output_file
+        runner_cfg['output_config'] = output_config
 
         options = scenario_cfg.get('options', {})
         scenario_cfg['options'] = self._parse_options(options)
@@ -344,7 +365,7 @@ class Task(object):     # pragma: no cover
             try:
                 config_context_target(item)
             except KeyError:
-                pass
+                LOG.debug("Got a KeyError in config_context_target(%s)", item)
             else:
                 break
 
@@ -478,33 +499,42 @@ class TaskParser(object):       # pragma: no cover
 
         return valid_task_files, valid_task_args, valid_task_args_fnames
 
-    def parse_task(self, task_id, task_args=None, task_args_file=None):
-        """parses the task file and return an context and scenario instances"""
-        LOG.info("Parsing task config: %s", self.path)
+    def _render_task(self, task_args, task_args_file):
+        """Render the input task with the given arguments
 
+        :param task_args: (dict) arguments to render the task
+        :param task_args_file: (str) file containing the arguments to render
+                               the task
+        :return: (str) task file rendered
+        """
         try:
             kw = {}
             if task_args_file:
                 with open(task_args_file) as f:
-                    kw.update(parse_task_args("task_args_file", f.read()))
-            kw.update(parse_task_args("task_args", task_args))
+                    kw.update(parse_task_args('task_args_file', f.read()))
+            kw.update(parse_task_args('task_args', task_args))
         except TypeError:
-            raise TypeError()
+            raise y_exc.TaskRenderArgumentError()
 
+        input_task = None
         try:
             with open(self.path) as f:
-                try:
-                    input_task = f.read()
-                    rendered_task = TaskTemplate.render(input_task, **kw)
-                except Exception as e:
-                    LOG.exception('Failed to render template:\n%s\n', input_task)
-                    raise e
-                LOG.debug("Input task is:\n%s\n", rendered_task)
-
-                cfg = yaml_load(rendered_task)
-        except IOError as ioerror:
-            sys.exit(ioerror)
+                input_task = f.read()
+            rendered_task = task_template.TaskTemplate.render(input_task, **kw)
+            LOG.debug('Input task is:\n%s', rendered_task)
+            parsed_task = yaml_load(rendered_task)
+        except (IOError, OSError):
+            raise y_exc.TaskReadError(task_file=self.path)
+        except Exception:
+            raise y_exc.TaskRenderError(input_task=input_task)
 
+        return parsed_task, rendered_task
+
+    def parse_task(self, task_id, task_args=None, task_args_file=None):
+        """parses the task file and return an context and scenario instances"""
+        LOG.info("Parsing task config: %s", self.path)
+
+        cfg, rendered = self._render_task(task_args, task_args_file)
         self._check_schema(cfg["schema"], "task")
         meet_precondition = self._check_precondition(cfg)
 
@@ -518,17 +548,15 @@ class TaskParser(object):       # pragma: no cover
             context_cfgs = [{"type": "Dummy"}]
 
         contexts = []
-        name_suffix = '-{}'.format(task_id[:8])
         for cfg_attrs in context_cfgs:
-            try:
-                cfg_attrs['name'] = '{}{}'.format(cfg_attrs['name'],
-                                                  name_suffix)
-            except KeyError:
-                pass
+
+            cfg_attrs['task_id'] = task_id
             # default to Heat context because we are testing OpenStack
             context_type = cfg_attrs.get("type", "Heat")
             context = Context.get(context_type)
             context.init(cfg_attrs)
+            # Update the name in case the context has used the name_suffix
+            cfg_attrs['name'] = context.name
             contexts.append(context)
 
         run_in_parallel = cfg.get("run_in_parallel", False)
@@ -542,16 +570,74 @@ class TaskParser(object):       # pragma: no cover
             # relative to task path
             scenario["task_path"] = os.path.dirname(self.path)
 
-            change_server_name(scenario, name_suffix)
-
-            try:
-                for node in scenario['nodes']:
-                    scenario['nodes'][node] += name_suffix
-            except KeyError:
-                pass
+            self._change_node_names(scenario, contexts)
 
         # TODO we need something better here, a class that represent the file
-        return cfg["scenarios"], run_in_parallel, meet_precondition, contexts
+        return {'scenarios': cfg['scenarios'],
+                'run_in_parallel': run_in_parallel,
+                'meet_precondition': meet_precondition,
+                'contexts': contexts,
+                'rendered': rendered}
+
+    @staticmethod
+    def _change_node_names(scenario, contexts):
+        """Change the node names in a scenario, depending on the context config
+
+        The nodes (VMs or physical servers) are referred in the context section
+        with the name of the server and the name of the context:
+            <server name>.<context name>
+
+        If the context is going to be undeployed at the end of the test, the
+        task ID is suffixed to the name to avoid interferences with previous
+        deployments. If the context needs to be deployed at the end of the
+        test, the name assigned is kept.
+
+        There are several places where a node name could appear in the scenario
+        configuration:
+        scenario:
+          host: athena.demo
+          target: kratos.demo
+          targets:
+            - athena.demo
+            - kratos.demo
+
+        scenario:
+          options:
+            server_name:  # JIRA: YARDSTICK-810
+              host: athena.demo
+              target: kratos.demo
+
+        scenario:
+          nodes:
+            tg__0: tg_0.yardstick
+            vnf__0: vnf_0.yardstick
+        """
+        def qualified_name(name):
+            node_name, context_name = name.split('.')
+            try:
+                ctx = next((context for context in contexts
+                       if context.assigned_name == context_name))
+            except StopIteration:
+                raise y_exc.ScenarioConfigContextNameNotFound(
+                    context_name=context_name)
+
+            return '{}.{}'.format(node_name, ctx.name)
+
+        if 'host' in scenario:
+            scenario['host'] = qualified_name(scenario['host'])
+        if 'target' in scenario:
+            scenario['target'] = qualified_name(scenario['target'])
+        server_name = scenario.get('options', {}).get('server_name', {})
+        if 'host' in server_name:
+            server_name['host'] = qualified_name(server_name['host'])
+        if 'target' in server_name:
+            server_name['target'] = qualified_name(server_name['target'])
+        if 'targets' in scenario:
+            for idx, target in enumerate(scenario['targets']):
+                scenario['targets'][idx] = qualified_name(target)
+        if 'nodes' in scenario:
+            for scenario_node, target in scenario['nodes'].items():
+                scenario['nodes'][scenario_node] = qualified_name(target)
 
     def _check_schema(self, cfg_schema, schema_type):
         """Check if config file is using the correct schema type"""
@@ -685,30 +771,3 @@ def parse_task_args(src_name, args):
               % {"src": src_name, "src_type": type(kw)})
         raise TypeError()
     return kw
-
-
-def change_server_name(scenario, suffix):
-
-    def add_suffix(cfg, key):
-        try:
-            value = cfg[key]
-        except KeyError:
-            pass
-        else:
-            try:
-                value['name'] += suffix
-            except TypeError:
-                cfg[key] += suffix
-
-    server_name = scenario.get('options', {}).get('server_name', {})
-
-    add_suffix(scenario, 'host')
-    add_suffix(scenario, 'target')
-    add_suffix(server_name, 'host')
-    add_suffix(server_name, 'target')
-
-    try:
-        key = 'targets'
-        scenario[key] = ['{}{}'.format(a, suffix) for a in scenario[key]]
-    except KeyError:
-        pass
index a887fa5..99386a4 100755 (executable)
@@ -23,6 +23,7 @@ import multiprocessing
 import subprocess
 import time
 import traceback
+from subprocess import CalledProcessError
 
 import importlib
 
@@ -30,6 +31,7 @@ from six.moves.queue import Empty
 
 import yardstick.common.utils as utils
 from yardstick.benchmark.scenarios import base as base_scenario
+from yardstick.dispatcher.base import Base as DispatcherBase
 
 log = logging.getLogger(__name__)
 
@@ -39,7 +41,7 @@ def _execute_shell_command(command):
     exitcode = 0
     try:
         output = subprocess.check_output(command, shell=True)
-    except Exception:
+    except CalledProcessError:
         exitcode = -1
         output = traceback.format_exc()
         log.error("exec command '%s' error:\n ", command)
@@ -137,6 +139,8 @@ class Runner(object):
             Runner.release(runner)
 
     def __init__(self, config):
+        self.task_id = None
+        self.case_name = None
         self.config = config
         self.periodic_action_process = None
         self.output_queue = multiprocessing.Queue()
@@ -170,6 +174,8 @@ class Runner(object):
         cls = getattr(module, path_split[-1])
 
         self.config['object'] = class_name
+        self.case_name = scenario_cfg['tc']
+        self.task_id = scenario_cfg['task_id']
         self.aborted.clear()
 
         # run a potentially configured pre-start action
@@ -245,10 +251,24 @@ class Runner(object):
 
     def get_result(self):
         result = []
+
+        dispatcher = self.config['output_config']['DEFAULT']['dispatcher']
+        output_in_influxdb = 'influxdb' in dispatcher
+
         while not self.result_queue.empty():
             log.debug("result_queue size %s", self.result_queue.qsize())
             try:
-                result.append(self.result_queue.get(True, 1))
+                one_record = self.result_queue.get(True, 1)
             except Empty:
                 pass
+            else:
+                if output_in_influxdb:
+                    self._output_to_influxdb(one_record)
+
+                result.append(one_record)
         return result
+
+    def _output_to_influxdb(self, record):
+        dispatchers = DispatcherBase.get(self.config['output_config'])
+        dispatcher = next((d for d in dispatchers if d.__dispatcher_type__ == 'Influxdb'))
+        dispatcher.upload_one_record(record, self.case_name, '', task_id=self.task_id)
index e8796bf..2e8b595 100644 (file)
@@ -7,14 +7,12 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-from __future__ import print_function
-from __future__ import absolute_import
-
 import logging
 
 from yardstick.benchmark.scenarios import base
 import yardstick.common.openstack_utils as op_utils
 
+
 LOG = logging.getLogger(__name__)
 
 
@@ -30,7 +28,7 @@ class DeleteNetwork(base.Scenario):
 
         self.network_id = self.options.get("network_id", None)
 
-        self.neutron_client = op_utils.get_neutron_client()
+        self.shade_client = op_utils.get_shade_client()
 
         self.setup_done = False
 
@@ -45,7 +43,7 @@ class DeleteNetwork(base.Scenario):
         if not self.setup_done:
             self.setup()
 
-        status = op_utils.delete_neutron_net(self.neutron_client,
+        status = op_utils.delete_neutron_net(self.shade_client,
                                              network_id=self.network_id)
         if status:
             result.update({"delete_network": 1})
@@ -53,3 +51,4 @@ class DeleteNetwork(base.Scenario):
         else:
             result.update({"delete_network": 0})
             LOG.error("Delete network failed!")
+        return status
index d5feabb..aaab213 100644 (file)
@@ -34,11 +34,13 @@ def get_credentials(service):  # pragma: no cover
 
     # The most common way to pass these info to the script is to do it through
     # environment variables.
+    # NOTE(ralonsoh): OS_TENANT_NAME is deprecated.
+    project_name = os.environ.get('OS_PROJECT_NAME', 'admin')
     creds.update({
         "username": os.environ.get('OS_USERNAME', "admin"),
         password: os.environ.get("OS_PASSWORD", 'admin'),
         "auth_url": os.environ.get("OS_AUTH_URL"),
-        tenant: os.environ.get("OS_TENANT_NAME", "admin"),
+        tenant: os.environ.get("OS_TENANT_NAME", project_name),
     })
     cacert = os.environ.get("OS_CACERT")
     if cacert is not None:
@@ -59,7 +61,7 @@ def get_instances(nova_client):  # pragma: no cover
     try:
         instances = nova_client.servers.list(search_opts={'all_tenants': 1})
         return instances
-    except Exception as e:
+    except Exception as e:  # pylint: disable=broad-except
         print("Error [get_instances(nova_client)]:", e)
         return None
 
@@ -72,7 +74,7 @@ def get_SFs(nova_client):  # pragma: no cover
             if "sfc_test" not in instance.name:
                 SFs.append(instance)
         return SFs
-    except Exception as e:
+    except Exception as e:  # pylint: disable=broad-except
         print("Error [get_SFs(nova_client)]:", e)
         return None
 
@@ -93,7 +95,7 @@ def create_floating_ips(neutron_client):  # pragma: no cover
             ip_json = neutron_client.create_floatingip({'floatingip': props})
             fip_addr = ip_json['floatingip']['floating_ip_address']
             ips.append(fip_addr)
-    except Exception as e:
+    except Exception as e:  # pylint: disable=broad-except
         print("Error [create_floating_ip(neutron_client)]:", e)
         return None
     return ips
@@ -106,7 +108,7 @@ def floatIPtoSFs(SFs, floatips):  # pragma: no cover
             SF.add_floating_ip(floatips[i])
             i = i + 1
         return True
-    except Exception as e:
+    except Exception as e:  # pylint: disable=broad-except
         print(("Error [add_floating_ip(nova_client, '%s', '%s')]:" %
                (SF, floatips[i]), e))
         return False
@@ -122,7 +124,3 @@ def get_an_IP():  # pragma: no cover
     floatips = create_floating_ips(neutron_client)
     floatIPtoSFs(SFs, floatips)
     return floatips
-
-
-if __name__ == '__main__':  # pragma: no cover
-    get_an_IP()
index b94bfc9..0e47852 100644 (file)
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-""" NSPerf specific scenario definition """
-
-from __future__ import absolute_import
 
+import copy
 import logging
-import errno
 
 import ipaddress
-
-import copy
+from itertools import chain
 import os
 import sys
-import re
-from itertools import chain
 
 import six
 import yaml
-from collections import defaultdict
 
-from yardstick.benchmark.scenarios import base
+from yardstick.benchmark.scenarios import base as scenario_base
+from yardstick.error import IncorrectConfig
 from yardstick.common.constants import LOG_DIR
 from yardstick.common.process import terminate_children
-from yardstick.common.utils import import_modules_from_package, itersubclasses
-from yardstick.common.yaml_loader import yaml_load
+from yardstick.common import utils
 from yardstick.network_services.collector.subscriber import Collector
 from yardstick.network_services.vnf_generic import vnfdgen
 from yardstick.network_services.vnf_generic.vnf.base import GenericVNF
-from yardstick.network_services.traffic_profile.base import TrafficProfile
+from yardstick.network_services import traffic_profile
+from yardstick.network_services.traffic_profile import base as tprofile_base
 from yardstick.network_services.utils import get_nsb_option
 from yardstick import ssh
 
-
-LOG = logging.getLogger(__name__)
-
-
-class SSHError(Exception):
-    """Class handles ssh connection error exception"""
-    pass
-
-
-class SSHTimeout(SSHError):
-    """Class handles ssh connection timeout exception"""
-    pass
-
-
-class IncorrectConfig(Exception):
-    """Class handles incorrect configuration during setup"""
-    pass
-
-
-class IncorrectSetup(Exception):
-    """Class handles incorrect setup during setup"""
-    pass
+traffic_profile.register_modules()
 
 
-class SshManager(object):
-    def __init__(self, node, timeout=120):
-        super(SshManager, self).__init__()
-        self.node = node
-        self.conn = None
-        self.timeout = timeout
-
-    def __enter__(self):
-        """
-        args -> network device mappings
-        returns -> ssh connection ready to be used
-        """
-        try:
-            self.conn = ssh.SSH.from_node(self.node)
-            self.conn.wait(timeout=self.timeout)
-        except SSHError as error:
-            LOG.info("connect failed to %s, due to %s", self.node["ip"], error)
-        # self.conn defaults to None
-        return self.conn
-
-    def __exit__(self, exc_type, exc_val, exc_tb):
-        if self.conn:
-            self.conn.close()
-
-
-def find_relative_file(path, task_path):
-    """
-    Find file in one of places: in abs of path or
-    relative to TC scenario file. In this order.
-
-    :param path:
-    :param task_path:
-    :return str: full path to file
-    """
-    # fixme: create schema to validate all fields have been provided
-    for lookup in [os.path.abspath(path), os.path.join(task_path, path)]:
-        try:
-            with open(lookup):
-                return lookup
-        except IOError:
-            pass
-    raise IOError(errno.ENOENT, 'Unable to find {} file'.format(path))
-
-
-def open_relative_file(path, task_path):
-    try:
-        return open(path)
-    except IOError as e:
-        if e.errno == errno.ENOENT:
-            return open(os.path.join(task_path, path))
-        raise
+LOG = logging.getLogger(__name__)
 
 
-class NetworkServiceTestCase(base.Scenario):
+class NetworkServiceTestCase(scenario_base.Scenario):
     """Class handles Generic framework to do pre-deployment VNF &
        Network service testing  """
 
@@ -130,16 +53,12 @@ class NetworkServiceTestCase(base.Scenario):
         self.scenario_cfg = scenario_cfg
         self.context_cfg = context_cfg
 
-        # fixme: create schema to validate all fields have been provided
-        with open_relative_file(scenario_cfg["topology"],
-                                scenario_cfg['task_path']) as stream:
-            topology_yaml = yaml_load(stream)
-
-        self.topology = topology_yaml["nsd:nsd-catalog"]["nsd"][0]
+        self._render_topology()
         self.vnfs = []
         self.collector = None
         self.traffic_profile = None
         self.node_netdevs = {}
+        self.bin_path = get_nsb_option('bin_path', '')
 
     def _get_ip_flow_range(self, ip_start_range):
 
@@ -211,37 +130,47 @@ class NetworkServiceTestCase(base.Scenario):
     def _get_traffic_profile(self):
         profile = self.scenario_cfg["traffic_profile"]
         path = self.scenario_cfg["task_path"]
-        with open_relative_file(profile, path) as infile:
+        with utils.open_relative_file(profile, path) as infile:
+            return infile.read()
+
+    def _get_topology(self):
+        topology = self.scenario_cfg["topology"]
+        path = self.scenario_cfg["task_path"]
+        with utils.open_relative_file(topology, path) as infile:
             return infile.read()
 
     def _fill_traffic_profile(self):
-        traffic_mapping = self._get_traffic_profile()
-        traffic_map_data = {
+        tprofile = self._get_traffic_profile()
+        extra_args = self.scenario_cfg.get('extra_args', {})
+        tprofile_data = {
             'flow': self._get_traffic_flow(),
             'imix': self._get_traffic_imix(),
-            TrafficProfile.UPLINK: {},
-            TrafficProfile.DOWNLINK: {},
+            tprofile_base.TrafficProfile.UPLINK: {},
+            tprofile_base.TrafficProfile.DOWNLINK: {},
+            'extra_args': extra_args
         }
 
-        traffic_vnfd = vnfdgen.generate_vnfd(traffic_mapping, traffic_map_data)
-        self.traffic_profile = TrafficProfile.get(traffic_vnfd)
-        return self.traffic_profile
+        traffic_vnfd = vnfdgen.generate_vnfd(tprofile, tprofile_data)
+        self.traffic_profile = tprofile_base.TrafficProfile.get(traffic_vnfd)
+
+    def _render_topology(self):
+        topology = self._get_topology()
+        topology_args = self.scenario_cfg.get('extra_args', {})
+        topolgy_data = {
+            'extra_args': topology_args
+        }
+        topology_yaml = vnfdgen.generate_vnfd(topology, topolgy_data)
+        self.topology = topology_yaml["nsd:nsd-catalog"]["nsd"][0]
 
     def _find_vnf_name_from_id(self, vnf_id):
         return next((vnfd["vnfd-id-ref"]
                      for vnfd in self.topology["constituent-vnfd"]
                      if vnf_id == vnfd["member-vnf-index"]), None)
 
-    @staticmethod
-    def get_vld_networks(networks):
-        # network name is vld_id
-        vld_map = {}
-        for name, n in networks.items():
-            try:
-                vld_map[n['vld_id']] = n
-            except KeyError:
-                vld_map[name] = n
-        return vld_map
+    def _find_vnfd_from_vnf_idx(self, vnf_id):
+        return next((vnfd
+                     for vnfd in self.topology["constituent-vnfd"]
+                     if vnf_id == vnfd["member-vnf-index"]), None)
 
     @staticmethod
     def find_node_if(nodes, name, if_name, vld_id):
@@ -293,7 +222,9 @@ class NetworkServiceTestCase(base.Scenario):
                 node1_if["peer_ifname"] = node0_if_name
 
                 # just load the network
-                vld_networks = self.get_vld_networks(self.context_cfg["networks"])
+                vld_networks = {n.get('vld_id', name): n for name, n in
+                                self.context_cfg["networks"].items()}
+
                 node0_if["network"] = vld_networks.get(vld["id"], {})
                 node1_if["network"] = vld_networks.get(vld["id"], {})
 
@@ -332,10 +263,6 @@ class NetworkServiceTestCase(base.Scenario):
             node0_if["peer_intf"] = node1_copy
             node1_if["peer_intf"] = node0_copy
 
-    def _find_vnfd_from_vnf_idx(self, vnf_idx):
-        return next((vnfd for vnfd in self.topology["constituent-vnfd"]
-                     if vnf_idx == vnfd["member-vnf-index"]), None)
-
     def _update_context_with_topology(self):
         for vnfd in self.topology["constituent-vnfd"]:
             vnf_idx = vnfd["member-vnf-index"]
@@ -343,43 +270,6 @@ class NetworkServiceTestCase(base.Scenario):
             vnfd = self._find_vnfd_from_vnf_idx(vnf_idx)
             self.context_cfg["nodes"][vnf_name].update(vnfd)
 
-    def _probe_netdevs(self, node, node_dict, timeout=120):
-        try:
-            return self.node_netdevs[node]
-        except KeyError:
-            pass
-
-        netdevs = {}
-        cmd = "PATH=$PATH:/sbin:/usr/sbin ip addr show"
-
-        with SshManager(node_dict, timeout=timeout) as conn:
-            if conn:
-                exit_status = conn.execute(cmd)[0]
-                if exit_status != 0:
-                    raise IncorrectSetup("Node's %s lacks ip tool." % node)
-                exit_status, stdout, _ = conn.execute(
-                    self.FIND_NETDEVICE_STRING)
-                if exit_status != 0:
-                    raise IncorrectSetup(
-                        "Cannot find netdev info in sysfs" % node)
-                netdevs = node_dict['netdevs'] = self.parse_netdev_info(stdout)
-
-        self.node_netdevs[node] = netdevs
-        return netdevs
-
-    @classmethod
-    def _probe_missing_values(cls, netdevs, network):
-
-        mac_lower = network['local_mac'].lower()
-        for netdev in netdevs.values():
-            if netdev['address'].lower() != mac_lower:
-                continue
-            network.update({
-                'driver': netdev['driver'],
-                'vpci': netdev['pci_bus_id'],
-                'ifindex': netdev['ifindex'],
-            })
-
     def _generate_pod_yaml(self):
         context_yaml = os.path.join(LOG_DIR, "pod-{}.yaml".format(self.scenario_cfg['task_id']))
         # convert OrderedDict to a list
@@ -405,82 +295,16 @@ class NetworkServiceTestCase(base.Scenario):
             pass
         return new_node
 
-    TOPOLOGY_REQUIRED_KEYS = frozenset({
-        "vpci", "local_ip", "netmask", "local_mac", "driver"})
-
     def map_topology_to_infrastructure(self):
         """ This method should verify if the available resources defined in pod.yaml
         match the topology.yaml file.
 
         :return: None. Side effect: context_cfg is updated
         """
-        num_nodes = len(self.context_cfg["nodes"])
-        # OpenStack instance creation time is probably proportional to the number
-        # of instances
-        timeout = 120 * num_nodes
-        for node, node_dict in self.context_cfg["nodes"].items():
-
-            for network in node_dict["interfaces"].values():
-                missing = self.TOPOLOGY_REQUIRED_KEYS.difference(network)
-                if not missing:
-                    continue
-
-                # only ssh probe if there are missing values
-                # ssh probe won't work on Ixia, so we had better define all our values
-                try:
-                    netdevs = self._probe_netdevs(node, node_dict, timeout=timeout)
-                except (SSHError, SSHTimeout):
-                    raise IncorrectConfig(
-                        "Unable to probe missing interface fields '%s', on node %s "
-                        "SSH Error" % (', '.join(missing), node))
-                try:
-                    self._probe_missing_values(netdevs, network)
-                except KeyError:
-                    pass
-                else:
-                    missing = self.TOPOLOGY_REQUIRED_KEYS.difference(
-                        network)
-                if missing:
-                    raise IncorrectConfig(
-                        "Require interface fields '%s' not found, topology file "
-                        "corrupted" % ', '.join(missing))
-
-        # we have to generate pod.yaml here so we have vpci and driver
-        self._generate_pod_yaml()
         # 3. Use topology file to find connections & resolve dest address
         self._resolve_topology()
         self._update_context_with_topology()
 
-    FIND_NETDEVICE_STRING = r"""find /sys/devices/pci* -type d -name net -exec sh -c '{ grep -sH ^ \
-$1/ifindex $1/address $1/operstate $1/device/vendor $1/device/device \
-$1/device/subsystem_vendor $1/device/subsystem_device ; \
-printf "%s/driver:" $1 ; basename $(readlink -s $1/device/driver); } \
-' sh  \{\}/* \;
-"""
-    BASE_ADAPTER_RE = re.compile(
-        '^/sys/devices/(.*)/net/([^/]*)/([^:]*):(.*)$', re.M)
-
-    @classmethod
-    def parse_netdev_info(cls, stdout):
-        network_devices = defaultdict(dict)
-        matches = cls.BASE_ADAPTER_RE.findall(stdout)
-        for bus_path, interface_name, name, value in matches:
-            dirname, bus_id = os.path.split(bus_path)
-            if 'virtio' in bus_id:
-                # for some stupid reason VMs include virtio1/
-                # in PCI device path
-                bus_id = os.path.basename(dirname)
-            # remove extra 'device/' from 'device/vendor,
-            # device/subsystem_vendor', etc.
-            if 'device/' in name:
-                name = name.split('/')[1]
-            network_devices[interface_name][name] = value
-            network_devices[interface_name][
-                'interface_name'] = interface_name
-            network_devices[interface_name]['pci_bus_id'] = bus_id
-        # convert back to regular dict
-        return dict(network_devices)
-
     @classmethod
     def get_vnf_impl(cls, vnf_model_id):
         """ Find the implementing class from vnf_model["vnf"]["name"] field
@@ -488,13 +312,14 @@ printf "%s/driver:" $1 ; basename $(readlink -s $1/device/driver); } \
         :param vnf_model_id: parsed vnfd model ID field
         :return: subclass of GenericVNF
         """
-        import_modules_from_package(
+        utils.import_modules_from_package(
             "yardstick.network_services.vnf_generic.vnf")
         expected_name = vnf_model_id
         classes_found = []
 
         def impl():
-            for name, class_ in ((c.__name__, c) for c in itersubclasses(GenericVNF)):
+            for name, class_ in ((c.__name__, c) for c in
+                                 utils.itersubclasses(GenericVNF)):
                 if name == expected_name:
                     yield class_
                 classes_found.append(name)
@@ -547,7 +372,7 @@ printf "%s/driver:" $1 ; basename $(readlink -s $1/device/driver); } \
             context_cfg = self.context_cfg
 
         vnfs = []
-        # we assume OrderedDict for consistenct in instantiation
+        # we assume OrderedDict for consistency in instantiation
         for node_name, node in context_cfg["nodes"].items():
             LOG.debug(node)
             try:
@@ -556,7 +381,7 @@ printf "%s/driver:" $1 ; basename $(readlink -s $1/device/driver); } \
                 LOG.debug("no model for %s, skipping", node_name)
                 continue
             file_path = scenario_cfg['task_path']
-            with open_relative_file(file_name, file_path) as stream:
+            with utils.open_relative_file(file_name, file_path) as stream:
                 vnf_model = stream.read()
             vnfd = vnfdgen.generate_vnfd(vnf_model, node)
             # TODO: here add extra context_cfg["nodes"] regardless of template
@@ -606,6 +431,9 @@ printf "%s/driver:" $1 ; basename $(readlink -s $1/device/driver); } \
                 vnf.terminate()
             raise
 
+        # we have to generate pod.yaml here after VNF has probed so we know vpci and driver
+        self._generate_pod_yaml()
+
         # 3. Run experiment
         # Start listeners first to avoid losing packets
         for traffic_gen in traffic_runners:
index e2e8bf6..a3488a2 100644 (file)
@@ -7,10 +7,6 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-""" Handler for yardstick command 'task' """
-from __future__ import print_function
-from __future__ import absolute_import
-
 import logging
 
 from yardstick.benchmark.core.task import Task
@@ -42,6 +38,8 @@ class TaskCommands(object):     # pragma: no cover
              action="store_true")
     @cliargs("--parse-only", help="parse the config file and exit",
              action="store_true")
+    @cliargs("--render-only", help="Render the tasks files, store the result "
+             "in the directory given and exit", type=str, dest="render_only")
     @cliargs("--output-file", help="file where output is stored, default %s" %
              output_file_default, default=output_file_default)
     @cliargs("--suite", help="process test suite file instead of a task file",
@@ -54,9 +52,8 @@ class TaskCommands(object):     # pragma: no cover
         LOG.info('Task START')
         try:
             result = Task().start(param, **kwargs)
-        except Exception as e:
+        except Exception as e:  # pylint: disable=broad-except
             self._write_error_data(e)
-            LOG.exception("")
 
         if result.get('result', {}).get('criteria') == 'PASS':
             LOG.info('Task SUCCESS')
index 9a4426b..be262c2 100644 (file)
@@ -33,7 +33,7 @@ from six import StringIO
 from chainmap import ChainMap
 
 from yardstick.common.utils import Timer
-
+from yardstick.common import constants as consts
 
 cgitb.enable(format="text")
 
@@ -435,6 +435,7 @@ class AnsibleCommon(object):
         ansible_dict = dict(os.environ, **{
             "ANSIBLE_LOG_PATH": os.path.join(directory, log_file),
             "ANSIBLE_LOG_BASE": directory,
+            "ANSIBLE_ROLES_PATH": consts.ANSIBLE_ROLES_PATH,
             # # required for SSH to work
             # "ANSIBLE_SSH_ARGS": "-o UserKnownHostsFile=/dev/null "
             #                     "-o GSSAPIAuthentication=no "
@@ -516,7 +517,7 @@ class AnsibleCommon(object):
         #  playbook dir: use include to point to files in  consts.ANSIBLE_DIR
 
         if not os.path.isdir(directory):
-            raise OSError("Not a directory, %s", directory)
+            raise OSError("Not a directory, %s" % directory)
         timeout = self.get_timeout(timeout, self.default_timeout)
 
         self.counter += 1
index 646a1f2..43c2c19 100644 (file)
@@ -83,6 +83,7 @@ YARDSTICK_ROOT_PATH = dirname(
 TASK_LOG_DIR = get_param('dir.tasklog', '/var/log/yardstick/')
 CONF_SAMPLE_DIR = join(REPOS_DIR, 'etc/yardstick/')
 ANSIBLE_DIR = join(REPOS_DIR, 'ansible')
+ANSIBLE_ROLES_PATH = join(REPOS_DIR, 'ansible/roles/')
 SAMPLE_CASE_DIR = join(REPOS_DIR, 'samples')
 TESTCASE_DIR = join(YARDSTICK_ROOT_PATH, 'tests/opnfv/test_cases/')
 TESTSUITE_DIR = join(YARDSTICK_ROOT_PATH, 'tests/opnfv/test_suites/')
index 8e6077b..9946bf1 100644 (file)
@@ -68,3 +68,35 @@ class HeatTemplateError(YardstickException):
     """Error in Heat during the stack deployment"""
     message = ('Error in Heat during the creation of the OpenStack stack '
                '"%(stack_name)"')
+
+
+class IPv6RangeError(YardstickException):
+    message = 'Start IP "%(start_ip)s" is greater than end IP "%(end_ip)s"'
+
+
+class TrafficProfileNotImplemented(YardstickException):
+    message = 'No implementation for traffic profile %(profile_class)s.'
+
+
+class DPDKSetupDriverError(YardstickException):
+    message = '"igb_uio" driver is not loaded'
+
+
+class ScenarioConfigContextNameNotFound(YardstickException):
+    message = 'Context name "%(context_name)s" not found'
+
+
+class StackCreationInterrupt(YardstickException):
+    message = 'Stack create interrupted.'
+
+
+class TaskRenderArgumentError(YardstickException):
+    message = 'Error reading the task input arguments'
+
+
+class TaskReadError(YardstickException):
+    message = 'Failed to read task %(task_file)s'
+
+
+class TaskRenderError(YardstickException):
+    message = 'Failed to render template:\n%(input_task)s'
index c5b17c2..e3f67ba 100644 (file)
@@ -15,6 +15,7 @@ import logging
 from keystoneauth1 import loading
 from keystoneauth1 import session
 import shade
+from shade import exc
 
 from cinderclient import client as cinderclient
 from novaclient import client as novaclient
@@ -32,38 +33,22 @@ DEFAULT_API_VERSION = '2'
 #   CREDENTIALS
 # *********************************************
 def get_credentials():
-    """Returns a creds dictionary filled with parsed from env"""
-    creds = {}
-
-    keystone_api_version = os.getenv('OS_IDENTITY_API_VERSION')
-
-    if keystone_api_version is None or keystone_api_version == '2':
-        keystone_v3 = False
-        tenant_env = 'OS_TENANT_NAME'
-        tenant = 'tenant_name'
-    else:
-        keystone_v3 = True
-        tenant_env = 'OS_PROJECT_NAME'
-        tenant = 'project_name'
-
-    # The most common way to pass these info to the script is to do it
-    # through environment variables.
-    creds.update({
-        "username": os.environ.get("OS_USERNAME"),
-        "password": os.environ.get("OS_PASSWORD"),
-        "auth_url": os.environ.get("OS_AUTH_URL"),
-        tenant: os.environ.get(tenant_env)
-    })
-
-    if keystone_v3:
-        if os.getenv('OS_USER_DOMAIN_NAME') is not None:
-            creds.update({
-                "user_domain_name": os.getenv('OS_USER_DOMAIN_NAME')
-            })
-        if os.getenv('OS_PROJECT_DOMAIN_NAME') is not None:
-            creds.update({
-                "project_domain_name": os.getenv('OS_PROJECT_DOMAIN_NAME')
-            })
+    """Returns a creds dictionary filled with parsed from env
+
+    Keystone API version used is 3; v2 was deprecated in 2014 (Icehouse). Along
+    with this deprecation, environment variable 'OS_TENANT_NAME' is replaced by
+    'OS_PROJECT_NAME'.
+    """
+    creds = {'username': os.environ.get('OS_USERNAME'),
+             'password': os.environ.get('OS_PASSWORD'),
+             'auth_url': os.environ.get('OS_AUTH_URL'),
+             'project_name': os.environ.get('OS_PROJECT_NAME')
+             }
+
+    if os.getenv('OS_USER_DOMAIN_NAME'):
+        creds['user_domain_name'] = os.getenv('OS_USER_DOMAIN_NAME')
+    if os.getenv('OS_PROJECT_DOMAIN_NAME'):
+        creds['project_domain_name'] = os.getenv('OS_PROJECT_DOMAIN_NAME')
 
     return creds
 
@@ -174,6 +159,7 @@ def get_glance_client():    # pragma: no cover
 def get_shade_client():
     return shade.openstack_cloud()
 
+
 # *********************************************
 #   NOVA
 # *********************************************
@@ -272,7 +258,8 @@ def create_aggregate_with_host(nova_client, aggregate_name, av_zone,
 def create_keypair(name, key_path=None):    # pragma: no cover
     try:
         with open(key_path) as fpubkey:
-            keypair = get_nova_client().keypairs.create(name=name, public_key=fpubkey.read())
+            keypair = get_nova_client().keypairs.create(
+                name=name, public_key=fpubkey.read())
             return keypair
     except Exception:  # pylint: disable=broad-except
         log.exception("Error [create_keypair(nova_client)]")
@@ -291,8 +278,7 @@ def create_instance_and_wait_for_active(json_body):    # pragma: no cover
     VM_BOOT_TIMEOUT = 180
     nova_client = get_nova_client()
     instance = create_instance(json_body)
-    count = VM_BOOT_TIMEOUT / SLEEP
-    for _ in range(count, -1, -1):
+    for _ in range(int(VM_BOOT_TIMEOUT / SLEEP)):
         status = get_instance_status(nova_client, instance)
         if status.lower() == "active":
             return instance
@@ -304,9 +290,11 @@ def create_instance_and_wait_for_active(json_body):    # pragma: no cover
     return None
 
 
-def attach_server_volume(server_id, volume_id, device=None):    # pragma: no cover
+def attach_server_volume(server_id, volume_id,
+                         device=None):    # pragma: no cover
     try:
-        get_nova_client().volumes.create_server_volume(server_id, volume_id, device)
+        get_nova_client().volumes.create_server_volume(server_id,
+                                                       volume_id, device)
     except Exception:  # pylint: disable=broad-except
         log.exception("Error [attach_server_volume(nova_client, '%s', '%s')]",
                       server_id, volume_id)
@@ -370,7 +358,8 @@ def get_server_by_name(name):   # pragma: no cover
 
 def create_flavor(name, ram, vcpus, disk, **kwargs):   # pragma: no cover
     try:
-        return get_nova_client().flavors.create(name, ram, vcpus, disk, **kwargs)
+        return get_nova_client().flavors.create(name, ram, vcpus,
+                                                disk, **kwargs)
     except Exception:  # pylint: disable=broad-except
         log.exception("Error [create_flavor(nova_client, %s, %s, %s, %s, %s)]",
                       name, ram, disk, vcpus, kwargs['is_public'])
@@ -455,13 +444,11 @@ def create_neutron_net(neutron_client, json_body):      # pragma: no cover
         raise Exception("operation error")
 
 
-def delete_neutron_net(neutron_client, network_id):      # pragma: no cover
+def delete_neutron_net(shade_client, network_id):
     try:
-        neutron_client.delete_network(network_id)
-        return True
-    except Exception:  # pylint: disable=broad-except
-        log.error("Error [delete_neutron_net(neutron_client, '%s')]",
-                  network_id)
+        return shade_client.delete_network(network_id)
+    except exc.OpenStackCloudException:
+        log.error("Error [delete_neutron_net(shade_client, '%s')]", network_id)
         return False
 
 
@@ -558,7 +545,8 @@ def get_security_group_id(neutron_client, sg_name):      # pragma: no cover
     return id
 
 
-def create_security_group(neutron_client, sg_name, sg_description):      # pragma: no cover
+def create_security_group(neutron_client, sg_name,
+                          sg_description):      # pragma: no cover
     json_body = {'security_group': {'name': sg_name,
                                     'description': sg_description}}
     try:
@@ -611,8 +599,8 @@ def create_secgroup_rule(neutron_client, sg_id, direction, protocol,
         return False
 
 
-def create_security_group_full(neutron_client,
-                               sg_name, sg_description):      # pragma: no cover
+def create_security_group_full(neutron_client, sg_name,
+                               sg_description):      # pragma: no cover
     sg_id = get_security_group_id(neutron_client, sg_name)
     if sg_id != '':
         log.info("Using existing security group '%s'...", sg_name)
@@ -670,22 +658,18 @@ def create_image(glance_client, image_name, file_path, disk_format,
         else:
             log.info("Creating image '%s' from '%s'...", image_name, file_path)
 
-            image = glance_client.images.create(name=image_name,
-                                                visibility=public,
-                                                disk_format=disk_format,
-                                                container_format=container_format,
-                                                min_disk=min_disk,
-                                                min_ram=min_ram,
-                                                tags=tag,
-                                                protected=protected,
-                                                **kwargs)
+            image = glance_client.images.create(
+                name=image_name, visibility=public, disk_format=disk_format,
+                container_format=container_format, min_disk=min_disk,
+                min_ram=min_ram, tags=tag, protected=protected, **kwargs)
             image_id = image.id
             with open(file_path) as image_data:
                 glance_client.images.upload(image_id, image_data)
         return image_id
     except Exception:  # pylint: disable=broad-except
-        log.error("Error [create_glance_image(glance_client, '%s', '%s', '%s')]",
-                  image_name, file_path, public)
+        log.error(
+            "Error [create_glance_image(glance_client, '%s', '%s', '%s')]",
+            image_name, file_path, public)
         return None
 
 
@@ -725,7 +709,8 @@ def create_volume(cinder_client, volume_name, volume_size,
         return None
 
 
-def delete_volume(cinder_client, volume_id, forced=False):      # pragma: no cover
+def delete_volume(cinder_client, volume_id,
+                  forced=False):      # pragma: no cover
     try:
         if forced:
             try:
diff --git a/yardstick/common/packages.py b/yardstick/common/packages.py
new file mode 100644 (file)
index 0000000..f20217f
--- /dev/null
@@ -0,0 +1,87 @@
+# Copyright (c) 2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import re
+
+import pip
+from pip import exceptions as pip_exceptions
+from pip.operations import freeze
+
+from yardstick.common import privsep
+
+
+LOG = logging.getLogger(__name__)
+
+ACTION_INSTALL = 'install'
+ACTION_UNINSTALL = 'uninstall'
+
+
+@privsep.yardstick_root.entrypoint
+def _pip_main(package, action, target=None):
+    if action == ACTION_UNINSTALL:
+        cmd = [action, package, '-y']
+    elif action == ACTION_INSTALL:
+        cmd = [action, package, '--upgrade']
+        if target:
+            cmd.append('--target=%s' % target)
+    return pip.main(cmd)
+
+
+def _pip_execute_action(package, action=ACTION_INSTALL, target=None):
+    """Execute an action with a PIP package.
+
+    According to [1], a package could be a URL, a local directory, a local dist
+    file or a requirements file.
+
+    [1] https://pip.pypa.io/en/stable/reference/pip_install/#argument-handling
+    """
+    try:
+        status = _pip_main(package, action, target)
+    except pip_exceptions.PipError:
+        status = 1
+
+    if not status:
+        LOG.info('Action "%s" executed, package %s', package, action)
+    else:
+        LOG.info('Error executing action "%s", package %s', package, action)
+    return status
+
+
+def pip_remove(package):
+    """Remove an installed PIP package"""
+    return _pip_execute_action(package, action=ACTION_UNINSTALL)
+
+
+def pip_install(package, target=None):
+    """Install a PIP package"""
+    return _pip_execute_action(package, action=ACTION_INSTALL, target=target)
+
+
+def pip_list(pkg_name=None):
+    """Dict of installed PIP packages with version.
+
+    If 'pkg_name' is not None, will return only those packages matching the
+    name."""
+    pip_regex = re.compile(r"(?P<name>.*)==(?P<version>[\w\.]+)")
+    git_regex = re.compile(r".*@(?P<version>[\w]+)#egg=(?P<name>[\w]+)")
+
+    pkg_dict = {}
+    for _pkg in freeze.freeze(local_only=True):
+        match = pip_regex.match(_pkg) or git_regex.match(_pkg)
+        if match and (not pkg_name or (
+                pkg_name and match.group('name').find(pkg_name) != -1)):
+            pkg_dict[match.group('name')] = match.group('version')
+
+    return pkg_dict
diff --git a/yardstick/common/privsep.py b/yardstick/common/privsep.py
new file mode 100644 (file)
index 0000000..4ae5104
--- /dev/null
@@ -0,0 +1,23 @@
+# Copyright (c) 2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from oslo_privsep import capabilities as c
+from oslo_privsep import priv_context
+
+yardstick_root = priv_context.PrivContext(
+    "yardstick",
+    cfg_section="yardstick_privileged",
+    pypath=__name__ + ".yardstick_root",
+    capabilities=[c.CAP_SYS_ADMIN]
+)
index 8604e90..357f66b 100644 (file)
@@ -22,6 +22,7 @@ import ipaddress
 import logging
 import os
 import random
+import re
 import socket
 import subprocess
 import sys
@@ -30,6 +31,7 @@ import six
 from flask import jsonify
 from six.moves import configparser
 from oslo_serialization import jsonutils
+from oslo_utils import encodeutils
 
 import yardstick
 
@@ -64,7 +66,7 @@ def itersubclasses(cls, _seen=None):
                 yield sub
 
 
-def import_modules_from_package(package):
+def import_modules_from_package(package, raise_exception=False):
     """Import modules given a package name
 
     :param: package - Full package name. For example: rally.deploy.engines
@@ -85,10 +87,27 @@ def import_modules_from_package(package):
         for module_name in missing_modules:
             try:
                 importlib.import_module(module_name)
-            except (ImportError, SyntaxError):
+            except (ImportError, SyntaxError) as exc:
+                if raise_exception:
+                    raise exc
                 logger.exception('Unable to import module %s', module_name)
 
 
+NON_NONE_DEFAULT = object()
+
+
+def get_key_with_default(data, key, default=NON_NONE_DEFAULT):
+    value = data.get(key, default)
+    if value is NON_NONE_DEFAULT:
+        raise KeyError(key)
+    return value
+
+
+def make_dict_from_map(data, key_map):
+    return {dest_key: get_key_with_default(data, src_key, default)
+            for dest_key, (src_key, default) in key_map.items()}
+
+
 def makedirs(d):
     try:
         os.makedirs(d)
@@ -105,13 +124,12 @@ def remove_file(path):
             raise
 
 
-def execute_command(cmd):
+def execute_command(cmd, **kwargs):
     exec_msg = "Executing command: '%s'" % cmd
     logger.debug(exec_msg)
 
-    output = subprocess.check_output(cmd.split()).split(os.linesep)
-
-    return output
+    output = subprocess.check_output(cmd.split(), **kwargs)
+    return encodeutils.safe_decode(output, incoming='utf-8').split(os.linesep)
 
 
 def source_env(env_file):
@@ -395,3 +413,45 @@ class Timer(object):
 
     def __getattr__(self, item):
         return getattr(self.delta, item)
+
+
+def read_meminfo(ssh_client):
+    """Read "/proc/meminfo" file and parse all keys and values"""
+
+    cpuinfo = six.BytesIO()
+    ssh_client.get_file_obj('/proc/meminfo', cpuinfo)
+    lines = cpuinfo.getvalue().decode('utf-8')
+    matches = re.findall(r"([\w\(\)]+):\s+(\d+)( kB)*", lines)
+    output = {}
+    for match in matches:
+        output[match[0]] = match[1]
+
+    return output
+
+
+def find_relative_file(path, task_path):
+    """
+    Find file in one of places: in abs of path or relative to a directory path,
+    in this order.
+
+    :param path:
+    :param task_path:
+    :return str: full path to file
+    """
+    # fixme: create schema to validate all fields have been provided
+    for lookup in [os.path.abspath(path), os.path.join(task_path, path)]:
+        try:
+            with open(lookup):
+                return lookup
+        except IOError:
+            pass
+    raise IOError(errno.ENOENT, 'Unable to find {} file'.format(path))
+
+
+def open_relative_file(path, task_path):
+    try:
+        return open(path)
+    except IOError as e:
+        if e.errno == errno.ENOENT:
+            return open(os.path.join(task_path, path))
+        raise
index 632b433..e8c7cf5 100644 (file)
@@ -11,8 +11,10 @@ from __future__ import absolute_import
 
 import logging
 import time
+import os
 
 import requests
+from requests import ConnectionError
 
 from yardstick.common import utils
 from third_party.influxdb.influxdb_line_protocol import make_lines
@@ -38,7 +40,8 @@ class InfluxdbDispatcher(DispatchBase):
 
         self.influxdb_url = "%s/write?db=%s" % (self.target, self.db_name)
 
-        self.task_id = -1
+        self.task_id = None
+        self.tags = None
 
     def flush_result_data(self, data):
         LOG.debug('Test result all : %s', data)
@@ -57,28 +60,41 @@ class InfluxdbDispatcher(DispatchBase):
             for record in data['tc_data']:
                 # skip results with no data because we influxdb encode empty dicts
                 if record.get("data"):
-                    self._upload_one_record(record, case, tc_criteria)
+                    self.upload_one_record(record, case, tc_criteria)
 
         return 0
 
-    def _upload_one_record(self, data, case, tc_criteria):
+    def upload_one_record(self, data, case, tc_criteria, task_id=None):
+        if task_id:
+            self.task_id = task_id
+
+        line = self._data_to_line_protocol(data, case, tc_criteria)
+        LOG.debug('Test result line format : %s', line)
+
         try:
-            line = self._data_to_line_protocol(data, case, tc_criteria)
-            LOG.debug('Test result line format : %s', line)
             res = requests.post(self.influxdb_url,
                                 data=line,
                                 auth=(self.username, self.password),
                                 timeout=self.timeout)
+        except ConnectionError as err:
+            LOG.exception('Failed to record result data: %s', err)
+        else:
             if res.status_code != 204:
                 LOG.error('Test result posting finished with status code'
                           ' %d.', res.status_code)
                 LOG.error(res.text)
 
-        except Exception as err:
-            LOG.exception('Failed to record result data: %s', err)
-
     def _data_to_line_protocol(self, data, case, criteria):
         msg = {}
+
+        if not self.tags:
+            self.tags = {
+                'deploy_scenario': os.environ.get('DEPLOY_SCENARIO', 'unknown'),
+                'installer': os.environ.get('INSTALLER_TYPE', 'unknown'),
+                'pod_name': os.environ.get('NODE_NAME', 'unknown'),
+                'version': os.environ.get('YARDSTICK_BRANCH', 'unknown')
+            }
+
         point = {
             "measurement": case,
             "fields": utils.flatten_dict_key(data["data"]),
@@ -93,7 +109,7 @@ class InfluxdbDispatcher(DispatchBase):
     def _get_nano_timestamp(self, results):
         try:
             timestamp = results["timestamp"]
-        except Exception:
+        except KeyError:
             timestamp = time.time()
 
         return str(int(float(timestamp) * 1000000000))
diff --git a/yardstick/error.py b/yardstick/error.py
new file mode 100644 (file)
index 0000000..9b84de1
--- /dev/null
@@ -0,0 +1,48 @@
+# Copyright (c) 2016-2017 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+class SSHError(Exception):
+    """Class handles ssh connection error exception"""
+    pass
+
+
+class SSHTimeout(SSHError):
+    """Class handles ssh connection timeout exception"""
+    pass
+
+
+class IncorrectConfig(Exception):
+    """Class handles incorrect configuration during setup"""
+    pass
+
+
+class IncorrectSetup(Exception):
+    """Class handles incorrect setup during setup"""
+    pass
+
+
+class IncorrectNodeSetup(IncorrectSetup):
+    """Class handles incorrect setup during setup"""
+    pass
+
+
+class ErrorClass(object):
+
+    def __init__(self, *args, **kwargs):
+        if 'test' not in kwargs:
+            raise RuntimeError
+
+    def __getattr__(self, item):
+        raise AttributeError
diff --git a/yardstick/network_services/constants.py b/yardstick/network_services/constants.py
new file mode 100644 (file)
index 0000000..79951e3
--- /dev/null
@@ -0,0 +1,17 @@
+# Copyright (c) 2016-2017 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+REMOTE_TMP = "/tmp"
+DEFAULT_VNF_TIMEOUT = 3600
+PROCESS_JOIN_TIMEOUT = 3
index 8c44b26..05b822c 100644 (file)
@@ -1,4 +1,4 @@
-# Copyright (c) 2016-2017 Intel Corporation
+# Copyright (c) 2016-2018 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # See the License for the specific language governing permissions and
 # limitations under the License.
 import logging
+import os
 
 import re
-import itertools
+from collections import defaultdict
+from itertools import chain
 
-import six
+from yardstick.common.utils import validate_non_string_sequence
+from yardstick.error import IncorrectConfig
+from yardstick.error import IncorrectSetup
+from yardstick.error import IncorrectNodeSetup
+from yardstick.error import SSHTimeout
+from yardstick.error import SSHError
 
 NETWORK_KERNEL = 'network_kernel'
 NETWORK_DPDK = 'network_dpdk'
@@ -25,7 +32,6 @@ CRYPTO_KERNEL = 'crypto_kernel'
 CRYPTO_DPDK = 'crypto_dpdk'
 CRYPTO_OTHER = 'crypto_other'
 
-
 LOG = logging.getLogger(__name__)
 
 
@@ -33,6 +39,166 @@ class DpdkBindHelperException(Exception):
     pass
 
 
+class DpdkInterface(object):
+    TOPOLOGY_REQUIRED_KEYS = frozenset({
+        "vpci", "local_ip", "netmask", "local_mac", "driver"})
+
+    def __init__(self, dpdk_node, interface):
+        super(DpdkInterface, self).__init__()
+        self.dpdk_node = dpdk_node
+        self.interface = interface
+
+        try:
+            assert self.local_mac
+        except (AssertionError, KeyError):
+            raise IncorrectConfig
+
+    @property
+    def local_mac(self):
+        return self.interface['local_mac']
+
+    @property
+    def mac_lower(self):
+        return self.local_mac.lower()
+
+    @property
+    def missing_fields(self):
+        return self.TOPOLOGY_REQUIRED_KEYS.difference(self.interface)
+
+    @staticmethod
+    def _detect_socket(netdev):
+        try:
+            socket = netdev['numa_node']
+        except KeyError:
+            # Where is this documented?
+            # It seems for dual-sockets systems the second socket PCI bridge
+            # will have an address > 0x0f, e.g.
+            # Bridge PCI->PCI (P#524320 busid=0000:80:02.0 id=8086:6f04
+            if netdev['pci_bus_id'][5] == "0":
+                socket = 0
+            else:
+                # this doesn't handle quad-sockets
+                # TODO: fix this for quad-socket
+                socket = 1
+        return socket
+
+    def probe_missing_values(self):
+        try:
+            for netdev in self.dpdk_node.netdevs.values():
+                if netdev['address'].lower() == self.mac_lower:
+                    socket = self._detect_socket(netdev)
+                    self.interface.update({
+                        'vpci': netdev['pci_bus_id'],
+                        'driver': netdev['driver'],
+                        'socket': socket,
+                        # don't need ifindex
+                    })
+
+        except KeyError:
+            # if we don't find all the keys then don't update
+            pass
+
+        except (IncorrectNodeSetup, SSHError, SSHTimeout):
+            raise IncorrectConfig(
+                "Unable to probe missing interface fields '%s', on node %s "
+                "SSH Error" % (', '.join(self.missing_fields), self.dpdk_node.node_key))
+
+
+class DpdkNode(object):
+
+    def __init__(self, node_name, interfaces, ssh_helper, timeout=120):
+        super(DpdkNode, self).__init__()
+        self.interfaces = interfaces
+        self.ssh_helper = ssh_helper
+        self.node_key = node_name
+        self.timeout = timeout
+        self._dpdk_helper = None
+        self.netdevs = {}
+
+        try:
+            self.dpdk_interfaces = {intf['name']: DpdkInterface(self, intf['virtual-interface'])
+                                    for intf in self.interfaces}
+        except IncorrectConfig:
+            template = "MAC address is required for all interfaces, missing on: {}"
+            errors = (intf['name'] for intf in self.interfaces if
+                      'local_mac' not in intf['virtual-interface'])
+            raise IncorrectSetup(template.format(", ".join(errors)))
+
+    @property
+    def dpdk_helper(self):
+        if not isinstance(self._dpdk_helper, DpdkBindHelper):
+            self._dpdk_helper = DpdkBindHelper(self.ssh_helper)
+        return self._dpdk_helper
+
+    @property
+    def _interface_missing_iter(self):
+        return chain.from_iterable(self._interface_missing_map.values())
+
+    @property
+    def _interface_missing_map(self):
+        return {name: intf.missing_fields for name, intf in self.dpdk_interfaces.items()}
+
+    def _probe_netdevs(self):
+        self.netdevs.update(self.dpdk_helper.find_net_devices())
+
+    def _force_rebind(self):
+        return self.dpdk_helper.force_dpdk_rebind()
+
+    def _probe_dpdk_drivers(self):
+        self.dpdk_helper.probe_real_kernel_drivers()
+        for pci, driver in self.dpdk_helper.real_kernel_interface_driver_map.items():
+            for intf in self.interfaces:
+                vintf = intf['virtual-interface']
+                # stupid substring matches
+                # don't use netdev use interface
+                if vintf['vpci'].endswith(pci):
+                    vintf['driver'] = driver
+                    # we can't update netdevs because we may not have netdev info
+
+    def _probe_missing_values(self):
+        for intf in self.dpdk_interfaces.values():
+            intf.probe_missing_values()
+
+    def check(self):
+        # only ssh probe if there are missing values
+        # ssh probe won't work on Ixia, so we had better define all our values
+        try:
+            missing_fields_set = set(self._interface_missing_iter)
+
+            # if we are only missing driver then maybe we can get kernel module
+            # this requires vpci
+            if missing_fields_set == {'driver'}:
+                self._probe_dpdk_drivers()
+                # we can't reprobe missing values because we may not have netdev info
+
+            # if there are any other missing then we have to netdev probe
+            if missing_fields_set.difference({'driver'}):
+                self._probe_netdevs()
+                try:
+                    self._probe_missing_values()
+                except IncorrectConfig:
+                    # ignore for now
+                    pass
+
+                # check again and verify we have all the fields
+                if set(self._interface_missing_iter):
+                    # last chance fallback, rebind everything and probe
+                    # this probably won't work
+                    self._force_rebind()
+                    self._probe_netdevs()
+                    self._probe_missing_values()
+
+            errors = ("{} missing: {}".format(name, ", ".join(missing_fields)) for
+                      name, missing_fields in self._interface_missing_map.items() if
+                      missing_fields)
+            errors = "\n".join(errors)
+            if errors:
+                raise IncorrectSetup(errors)
+
+        finally:
+            self._dpdk_helper = None
+
+
 class DpdkBindHelper(object):
     DPDK_STATUS_CMD = "{dpdk_devbind} --status"
     DPDK_BIND_CMD = "sudo {dpdk_devbind} {force} -b {driver} {vpci}"
@@ -42,6 +208,8 @@ class DpdkBindHelper(object):
     SKIP_RE = re.compile('(====|<none>|^$)')
     NIC_ROW_FIELDS = ['vpci', 'dev_type', 'iface', 'driver', 'unused', 'active']
 
+    UIO_DRIVER = "uio"
+
     HEADER_DICT_PAIRS = [
         (re.compile('^Network.*DPDK.*$'), NETWORK_DPDK),
         (re.compile('^Network.*kernel.*$'), NETWORK_KERNEL),
@@ -51,6 +219,42 @@ class DpdkBindHelper(object):
         (re.compile('^Other crypto.*$'), CRYPTO_OTHER),
     ]
 
+    FIND_NETDEVICE_STRING = r"""\
+find /sys/devices/pci* -type d -name net -exec sh -c '{ grep -sH ^ \
+$1/ifindex $1/address $1/operstate $1/device/vendor $1/device/device \
+$1/device/subsystem_vendor $1/device/subsystem_device $1/device/numa_node ; \
+printf "%s/driver:" $1 ; basename $(readlink -s $1/device/driver); } \
+' sh  \{\}/* \;
+"""
+
+    BASE_ADAPTER_RE = re.compile('^/sys/devices/(.*)/net/([^/]*)/([^:]*):(.*)$', re.M)
+    DPDK_DEVBIND = "dpdk-devbind.py"
+
+    @classmethod
+    def parse_netdev_info(cls, stdout):
+        network_devices = defaultdict(dict)
+        match_iter = (match.groups() for match in cls.BASE_ADAPTER_RE.finditer(stdout))
+        for bus_path, interface_name, name, value in match_iter:
+            dir_name, bus_id = os.path.split(bus_path)
+            if 'virtio' in bus_id:
+                # for some stupid reason VMs include virtio1/
+                # in PCI device path
+                bus_id = os.path.basename(dir_name)
+
+            # remove extra 'device/' from 'device/vendor,
+            # device/subsystem_vendor', etc.
+            if 'device' in name:
+                name = name.split('/')[1]
+
+            network_devices[interface_name].update({
+                name: value,
+                'interface_name': interface_name,
+                'pci_bus_id': bus_id,
+            })
+
+        # convert back to regular dict
+        return dict(network_devices)
+
     def clean_status(self):
         self.dpdk_status = {
             NETWORK_KERNEL: [],
@@ -61,11 +265,17 @@ class DpdkBindHelper(object):
             CRYPTO_OTHER: [],
         }
 
-    def __init__(self, ssh_helper):
+    # TODO: add support for driver other than igb_uio
+    def __init__(self, ssh_helper, dpdk_driver="igb_uio"):
+        self.ssh_helper = ssh_helper
+        self.real_kernel_interface_driver_map = {}
+        self.dpdk_driver = dpdk_driver
         self.dpdk_status = None
         self.status_nic_row_re = None
-        self._dpdk_devbind = None
+        self.dpdk_devbind = self.ssh_helper.join_bin_path(self.DPDK_DEVBIND)
         self._status_cmd_attr = None
+        self.used_drivers = None
+        self.real_kernel_drivers = {}
 
         self.ssh_helper = ssh_helper
         self.clean_status()
@@ -73,15 +283,16 @@ class DpdkBindHelper(object):
     def _dpdk_execute(self, *args, **kwargs):
         res = self.ssh_helper.execute(*args, **kwargs)
         if res[0] != 0:
-            raise DpdkBindHelperException('{} command failed with rc={}'.format(
-                self.dpdk_devbind, res[0]))
+            template = '{} command failed with rc={}'
+            raise DpdkBindHelperException(template.format(self.dpdk_devbind, res[0]))
         return res
 
-    @property
-    def dpdk_devbind(self):
-        if self._dpdk_devbind is None:
-            self._dpdk_devbind = self.ssh_helper.provision_tool(tool_file="dpdk-devbind.py")
-        return self._dpdk_devbind
+    def load_dpdk_driver(self):
+        cmd_template = "sudo modprobe {} && sudo modprobe {}"
+        self.ssh_helper.execute(cmd_template.format(self.UIO_DRIVER, self.dpdk_driver))
+
+    def check_dpdk_driver(self):
+        return self.ssh_helper.execute("lsmod | grep -i {}".format(self.dpdk_driver))[0]
 
     @property
     def _status_cmd(self):
@@ -89,12 +300,14 @@ class DpdkBindHelper(object):
             self._status_cmd_attr = self.DPDK_STATUS_CMD.format(dpdk_devbind=self.dpdk_devbind)
         return self._status_cmd_attr
 
-    def _addline(self, active_list, line):
+    def _add_line(self, active_list, line):
         if active_list is None:
             return
+
         res = self.NIC_ROW_RE.match(line)
         if res is None:
             return
+
         new_data = {k: v for k, v in zip(self.NIC_ROW_FIELDS, res.groups())}
         new_data['active'] = bool(new_data['active'])
         self.dpdk_status[active_list].append(new_data)
@@ -106,14 +319,14 @@ class DpdkBindHelper(object):
                 return a_dict
         return active_dict
 
-    def parse_dpdk_status_output(self, input):
+    def _parse_dpdk_status_output(self, output):
         active_dict = None
         self.clean_status()
-        for a_row in input.splitlines():
+        for a_row in output.splitlines():
             if self.SKIP_RE.match(a_row):
                 continue
             active_dict = self._switch_active_dict(a_row, active_dict)
-            self._addline(active_dict, a_row)
+            self._add_line(active_dict, a_row)
         return self.dpdk_status
 
     def _get_bound_pci_addresses(self, active_dict):
@@ -130,31 +343,85 @@ class DpdkBindHelper(object):
     @property
     def interface_driver_map(self):
         return {interface['vpci']: interface['driver']
-                for interface in itertools.chain.from_iterable(self.dpdk_status.values())}
+                for interface in chain.from_iterable(self.dpdk_status.values())}
 
     def read_status(self):
-        return self.parse_dpdk_status_output(self._dpdk_execute(self._status_cmd)[1])
+        return self._parse_dpdk_status_output(self._dpdk_execute(self._status_cmd)[1])
+
+    def find_net_devices(self):
+        exit_status, stdout, _ = self.ssh_helper.execute(self.FIND_NETDEVICE_STRING)
+        if exit_status != 0:
+            return {}
+
+        return self.parse_netdev_info(stdout)
 
     def bind(self, pci_addresses, driver, force=True):
-        # accept single PCI or list of PCI
-        if isinstance(pci_addresses, six.string_types):
-            pci_addresses = [pci_addresses]
+        # accept single PCI or sequence of PCI
+        pci_addresses = validate_non_string_sequence(pci_addresses, [pci_addresses])
+
         cmd = self.DPDK_BIND_CMD.format(dpdk_devbind=self.dpdk_devbind,
                                         driver=driver,
                                         vpci=' '.join(list(pci_addresses)),
                                         force='--force' if force else '')
         LOG.debug(cmd)
         self._dpdk_execute(cmd)
+
         # update the inner status dict
         self.read_status()
 
+    def probe_real_kernel_drivers(self):
+        self.read_status()
+        self.save_real_kernel_interface_driver_map()
+
+    def force_dpdk_rebind(self):
+        self.load_dpdk_driver()
+        self.read_status()
+        self.save_real_kernel_interface_driver_map()
+        self.save_used_drivers()
+
+        real_driver_map = {}
+        # only rebind devices that are bound to DPDK
+        for pci in self.dpdk_bound_pci_addresses:
+            # messy
+            real_driver = self.real_kernel_interface_driver_map[pci]
+            real_driver_map.setdefault(real_driver, []).append(pci)
+        for real_driver, pcis in real_driver_map.items():
+            self.bind(pcis, real_driver, force=True)
+
     def save_used_drivers(self):
         # invert the map, so we can bind by driver type
         self.used_drivers = {}
-        # sort for stabililty
+        # sort for stability
         for vpci, driver in sorted(self.interface_driver_map.items()):
             self.used_drivers.setdefault(driver, []).append(vpci)
 
+    KERNEL_DRIVER_RE = re.compile(r"Kernel modules: (\S+)", re.M)
+    VIRTIO_DRIVER_RE = re.compile(r"Ethernet.*Virtio network device", re.M)
+    VIRTIO_DRIVER = "virtio-pci"
+
+    def save_real_kernel_drivers(self):
+        # invert the map, so we can bind by driver type
+        self.real_kernel_drivers = {}
+        # sort for stability
+        for vpci, driver in sorted(self.real_kernel_interface_driver_map.items()):
+            self.used_drivers.setdefault(driver, []).append(vpci)
+
+    def get_real_kernel_driver(self, pci):
+        out = self.ssh_helper.execute('lspci -k -s %s' % pci)[1]
+        match = self.KERNEL_DRIVER_RE.search(out)
+        if match:
+            return match.group(1)
+
+        match = self.VIRTIO_DRIVER_RE.search(out)
+        if match:
+            return self.VIRTIO_DRIVER
+
+        return None
+
+    def save_real_kernel_interface_driver_map(self):
+        iter1 = ((pci, self.get_real_kernel_driver(pci)) for pci in self.interface_driver_map)
+        self.real_kernel_interface_driver_map = {pci: driver for pci, driver in iter1 if driver}
+
     def rebind_drivers(self, force=True):
         for driver, vpcis in self.used_drivers.items():
             self.bind(vpcis, driver, force)
index e69de29..356b36b 100644 (file)
@@ -0,0 +1,33 @@
+# Copyright (c) 2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import importlib
+
+
+def register_modules():
+    modules = [
+        'yardstick.network_services.traffic_profile.trex_traffic_profile',
+        'yardstick.network_services.traffic_profile.fixed',
+        'yardstick.network_services.traffic_profile.http',
+        'yardstick.network_services.traffic_profile.http_ixload',
+        'yardstick.network_services.traffic_profile.ixia_rfc2544',
+        'yardstick.network_services.traffic_profile.prox_ACL',
+        'yardstick.network_services.traffic_profile.prox_binsearch',
+        'yardstick.network_services.traffic_profile.prox_profile',
+        'yardstick.network_services.traffic_profile.prox_ramp',
+        'yardstick.network_services.traffic_profile.rfc2544',
+    ]
+
+    for module in modules:
+        importlib.import_module(module)
index ad256b4..162bab2 100644 (file)
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-""" Base class for the generic traffic profile implementation """
 
-from __future__ import absolute_import
-from yardstick.common.utils import import_modules_from_package, itersubclasses
+from yardstick.common import exceptions
+from yardstick.common import utils
 
 
 class TrafficProfile(object):
@@ -33,13 +32,12 @@ class TrafficProfile(object):
         :return:
         """
         profile_class = tp_config["traffic_profile"]["traffic_type"]
-        import_modules_from_package(
-            "yardstick.network_services.traffic_profile")
         try:
-            return next(c for c in itersubclasses(TrafficProfile)
+            return next(c for c in utils.itersubclasses(TrafficProfile)
                         if c.__name__ == profile_class)(tp_config)
         except StopIteration:
-            raise RuntimeError("No implementation for %s", profile_class)
+            raise exceptions.TrafficProfileNotImplemented(
+                profile_class=profile_class)
 
     def __init__(self, tp_config):
         # e.g. RFC2544 start_ip, stop_ip, drop_rate,
index 7881131..7f04722 100644 (file)
@@ -15,7 +15,7 @@
 from __future__ import absolute_import
 import logging
 
-from yardstick.network_services.traffic_profile.traffic_profile import \
+from yardstick.network_services.traffic_profile.trex_traffic_profile import \
     TrexProfile
 
 LOG = logging.getLogger(__name__)
@@ -26,7 +26,7 @@ class IXIARFC2544Profile(TrexProfile):
     UPLINK = 'uplink'
     DOWNLINK = 'downlink'
 
-    def _get_ixia_traffic_profile(self, profile_data, mac=None, xfile=None, static_traffic=None):
+    def _get_ixia_traffic_profile(self, profile_data, mac=None):
         if mac is None:
             mac = {}
 
@@ -74,12 +74,12 @@ class IXIARFC2544Profile(TrexProfile):
                     },
                     'outer_l4': value['outer_l4'],
                 }
-            except Exception:
+            except KeyError:
                 continue
 
         return result
 
-    def _ixia_traffic_generate(self, traffic_generator, traffic, ixia_obj):
+    def _ixia_traffic_generate(self, traffic, ixia_obj):
         for key, value in traffic.items():
             if key.startswith((self.UPLINK, self.DOWNLINK)):
                 value["iload"] = str(self.rate)
@@ -106,7 +106,7 @@ class IXIARFC2544Profile(TrexProfile):
 
         self.ports = [port for port in port_generator()]
 
-    def execute_traffic(self, traffic_generator, ixia_obj, mac=None, xfile=None):
+    def execute_traffic(self, traffic_generator, ixia_obj, mac=None):
         if mac is None:
             mac = {}
         if self.first_run:
@@ -114,28 +114,27 @@ class IXIARFC2544Profile(TrexProfile):
             self.pg_id = 0
             self.update_traffic_profile(traffic_generator)
             traffic = \
-                self._get_ixia_traffic_profile(self.full_profile, mac, xfile)
+                self._get_ixia_traffic_profile(self.full_profile, mac)
             self.max_rate = self.rate
             self.min_rate = 0
             self.get_multiplier()
-            self._ixia_traffic_generate(traffic_generator, traffic, ixia_obj)
+            self._ixia_traffic_generate(traffic, ixia_obj)
 
     def get_multiplier(self):
         self.rate = round((self.max_rate + self.min_rate) / 2.0, 2)
         multiplier = round(self.rate / self.pps, 2)
         return str(multiplier)
 
-    def start_ixia_latency(self, traffic_generator, ixia_obj,
-                           mac=None, xfile=None):
+    def start_ixia_latency(self, traffic_generator, ixia_obj, mac=None):
         if mac is None:
             mac = {}
         self.update_traffic_profile(traffic_generator)
         traffic = \
-            self._get_ixia_traffic_profile(self.full_profile, mac, xfile)
-        self._ixia_traffic_generate(traffic_generator, traffic, ixia_obj)
+            self._get_ixia_traffic_profile(self.full_profile, mac)
+        self._ixia_traffic_generate(traffic, ixia_obj)
 
-    def get_drop_percentage(self, traffic_generator, samples, tol_min,
-                            tolerance, ixia_obj, mac=None, xfile=None):
+    def get_drop_percentage(self, samples, tol_min, tolerance, ixia_obj,
+                            mac=None):
         if mac is None:
             mac = {}
         status = 'Running'
@@ -179,6 +178,6 @@ class IXIARFC2544Profile(TrexProfile):
             samples['DropPercentage'] = drop_percent
             return status, samples
         self.get_multiplier()
-        traffic = self._get_ixia_traffic_profile(self.full_profile, mac, xfile)
-        self._ixia_traffic_generate(traffic_generator, traffic, ixia_obj)
+        traffic = self._get_ixia_traffic_profile(self.full_profile, mac)
+        self._ixia_traffic_generate(traffic, ixia_obj)
         return status, samples
index 1fd6ec4..5700f98 100644 (file)
@@ -16,6 +16,8 @@
 from __future__ import absolute_import
 
 import logging
+import datetime
+import time
 
 from yardstick.network_services.traffic_profile.prox_profile import ProxProfile
 
@@ -81,19 +83,66 @@ class ProxBinSearchProfile(ProxProfile):
         # success, the binary search will complete on an integer multiple
         # of the precision, rather than on a fraction of it.
 
+        theor_max_thruput = 0
+
+        result_samples = {}
+
+        # Store one time only value in influxdb
+        single_samples = {
+            "test_duration" : traffic_gen.scenario_helper.scenario_cfg["runner"]["duration"],
+            "test_precision" : self.params["traffic_profile"]["test_precision"],
+            "tolerated_loss" : self.params["traffic_profile"]["tolerated_loss"],
+            "duration" : duration
+        }
+        self.queue.put(single_samples)
+        self.prev_time = time.time()
+
         # throughput and packet loss from the most recent successful test
         successful_pkt_loss = 0.0
         for test_value in self.bounds_iterator(LOG):
             result, port_samples = self._profile_helper.run_test(pkt_size, duration,
                                                                  test_value, self.tolerated_loss)
+            self.curr_time = time.time()
+            diff_time = self.curr_time - self.prev_time
+            self.prev_time = self.curr_time
 
             if result.success:
                 LOG.debug("Success! Increasing lower bound")
                 self.current_lower = test_value
                 successful_pkt_loss = result.pkt_loss
+                samples = result.get_samples(pkt_size, successful_pkt_loss, port_samples)
+                samples["TxThroughput"] = samples["TxThroughput"] * 1000 * 1000
+
+                # store results with success tag in influxdb
+                success_samples = {'Success_' + key: value for key, value in samples.items()}
+
+                success_samples["Success_rx_total"] = int(result.rx_total / diff_time)
+                success_samples["Success_tx_total"] = int(result.tx_total / diff_time)
+                success_samples["Success_can_be_lost"] = int(result.can_be_lost / diff_time)
+                success_samples["Success_drop_total"] = int(result.drop_total / diff_time)
+                self.queue.put(success_samples)
+
+                # Store Actual throughput for result samples
+                result_samples["Result_Actual_throughput"] = \
+                    success_samples["Success_RxThroughput"]
             else:
                 LOG.debug("Failure... Decreasing upper bound")
                 self.current_upper = test_value
+                samples = result.get_samples(pkt_size, successful_pkt_loss, port_samples)
+
+            for k in samples:
+                    tmp = samples[k]
+                    if isinstance(tmp, dict):
+                        for k2 in tmp:
+                            samples[k][k2] = int(samples[k][k2] / diff_time)
 
-            samples = result.get_samples(pkt_size, successful_pkt_loss, port_samples)
+            if theor_max_thruput < samples["TxThroughput"]:
+                theor_max_thruput = samples['TxThroughput']
+                self.queue.put({'theor_max_throughput': theor_max_thruput})
+
+            LOG.debug("Collect TG KPIs %s %s", datetime.datetime.now(), samples)
             self.queue.put(samples)
+
+        result_samples["Result_pktSize"] = pkt_size
+        result_samples["Result_theor_max_throughput"] = theor_max_thruput/ (1000 * 1000)
+        self.queue.put(result_samples)
index b1ca8a3..83020c8 100644 (file)
@@ -21,7 +21,7 @@ from trex_stl_lib.trex_stl_client import STLStream
 from trex_stl_lib.trex_stl_streams import STLFlowLatencyStats
 from trex_stl_lib.trex_stl_streams import STLTXCont
 
-from yardstick.network_services.traffic_profile.traffic_profile \
+from yardstick.network_services.traffic_profile.trex_traffic_profile \
     import TrexProfile
 
 LOGGING = logging.getLogger(__name__)
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-""" Trex Traffic Profile definitions """
 
-from __future__ import absolute_import
 import struct
 import socket
 import logging
 from random import SystemRandom
-import six
 import ipaddress
 
-from yardstick.network_services.traffic_profile.base import TrafficProfile
+import six
+
+from yardstick.common import exceptions as y_exc
+from yardstick.network_services.traffic_profile import base
 from trex_stl_lib.trex_stl_client import STLStream
 from trex_stl_lib.trex_stl_streams import STLFlowLatencyStats
 from trex_stl_lib.trex_stl_streams import STLTXCont
@@ -48,7 +48,7 @@ TYPE_OF_SERVICE = 'tos'
 LOG = logging.getLogger(__name__)
 
 
-class TrexProfile(TrafficProfile):
+class TrexProfile(base.TrafficProfile):
     """ This class handles Trex Traffic profile generation and execution """
 
     PROTO_MAP = {
@@ -78,31 +78,32 @@ class TrexProfile(TrafficProfile):
                                            op='inc',
                                            step=1)
             self.vm_flow_vars.append(stl_vm_flow_var)
-            stl_vm_wr_flow_var = STLVmWrFlowVar(fv_name='mac_{}'.format(direction),
-                                                pkt_offset='Ether.{}'.format(direction))
+            stl_vm_wr_flow_var = STLVmWrFlowVar(
+                fv_name='mac_{}'.format(direction),
+                pkt_offset='Ether.{}'.format(direction))
             self.vm_flow_vars.append(stl_vm_wr_flow_var)
         return partial
 
     def _ip_range_action_partial(self, direction, count=1):
         # pylint: disable=unused-argument
         def partial(min_value, max_value, count):
-            ip1 = int(ipaddress.IPv4Address(min_value))
-            ip2 = int(ipaddress.IPv4Address(max_value))
-            actual_count = (ip2 - ip1)
+            _, _, actual_count = self._count_ip(min_value, max_value)
             if not actual_count:
                 count = 1
             elif actual_count < int(count):
                 count = actual_count
 
-            stl_vm_flow_var = STLVmFlowVarRepeatableRandom(name="ip4_{}".format(direction),
-                                                           min_value=min_value,
-                                                           max_value=max_value,
-                                                           size=4,
-                                                           limit=int(count),
-                                                           seed=0x1235)
+            stl_vm_flow_var = STLVmFlowVarRepeatableRandom(
+                name="ip4_{}".format(direction),
+                min_value=min_value,
+                max_value=max_value,
+                size=4,
+                limit=int(count),
+                seed=0x1235)
             self.vm_flow_vars.append(stl_vm_flow_var)
-            stl_vm_wr_flow_var = STLVmWrFlowVar(fv_name='ip4_{}'.format(direction),
-                                                pkt_offset='IP.{}'.format(direction))
+            stl_vm_wr_flow_var = STLVmWrFlowVar(
+                fv_name='ip4_{}'.format(direction),
+                pkt_offset='IP.{}'.format(direction))
             self.vm_flow_vars.append(stl_vm_wr_flow_var)
             stl_vm_fix_ipv4 = STLVmFixIpv4(offset="IP")
             self.vm_flow_vars.append(stl_vm_fix_ipv4)
@@ -111,7 +112,7 @@ class TrexProfile(TrafficProfile):
     def _ip6_range_action_partial(self, direction, _):
         def partial(min_value, max_value, count):
             # pylint: disable=unused-argument
-            min_value, max_value = self._get_start_end_ipv6(min_value, max_value)
+            min_value, max_value, _ = self._count_ip(min_value, max_value)
             stl_vm_flow_var = STLVmFlowVar(name="ip6_{}".format(direction),
                                            min_value=min_value,
                                            max_value=max_value,
@@ -119,13 +120,14 @@ class TrexProfile(TrafficProfile):
                                            op='random',
                                            step=1)
             self.vm_flow_vars.append(stl_vm_flow_var)
-            stl_vm_wr_flow_var = STLVmWrFlowVar(fv_name='ip6_{}'.format(direction),
-                                                pkt_offset='IPv6.{}'.format(direction),
-                                                offset_fixup=8)
+            stl_vm_wr_flow_var = STLVmWrFlowVar(
+                fv_name='ip6_{}'.format(direction),
+                pkt_offset='IPv6.{}'.format(direction),
+                offset_fixup=8)
             self.vm_flow_vars.append(stl_vm_wr_flow_var)
         return partial
 
-    def _dscp_range_action_partial(self, *_):
+    def _dscp_range_action_partial(self, *args):
         def partial(min_value, max_value, count):
             # pylint: disable=unused-argument
             stl_vm_flow_var = STLVmFlowVar(name="dscp",
@@ -149,15 +151,17 @@ class TrexProfile(TrafficProfile):
             elif int(count) > actual_count:
                 count = actual_count
 
-            stl_vm_flow_var = STLVmFlowVarRepeatableRandom(name="port_{}".format(field),
-                                                           min_value=min_value,
-                                                           max_value=max_value,
-                                                           size=2,
-                                                           limit=int(count),
-                                                           seed=0x1235)
+            stl_vm_flow_var = STLVmFlowVarRepeatableRandom(
+                name="port_{}".format(field),
+                min_value=min_value,
+                max_value=max_value,
+                size=2,
+                limit=int(count),
+                seed=0x1235)
             self.vm_flow_vars.append(stl_vm_flow_var)
-            stl_vm_wr_flow_var = STLVmWrFlowVar(fv_name='port_{}'.format(field),
-                                                pkt_offset=self.udp[field])
+            stl_vm_wr_flow_var = STLVmWrFlowVar(
+                fv_name='port_{}'.format(field),
+                pkt_offset=self.udp[field])
             self.vm_flow_vars.append(stl_vm_wr_flow_var)
         return partial
 
@@ -448,20 +452,18 @@ class TrexProfile(TrafficProfile):
         self.profile = STLProfile(self.streams)
 
     @classmethod
-    def _get_start_end_ipv6(cls, start_ip, end_ip):
-        try:
-            ip1 = socket.inet_pton(socket.AF_INET6, start_ip)
-            ip2 = socket.inet_pton(socket.AF_INET6, end_ip)
-            hi1, lo1 = struct.unpack('!QQ', ip1)
-            hi2, lo2 = struct.unpack('!QQ', ip2)
-            if ((hi1 << 64) | lo1) > ((hi2 << 64) | lo2):
-                raise SystemExit("IPv6: start_ip is greater then end_ip")
-            max_p1 = abs(int(lo1) - int(lo2))
-            base_p1 = lo1
-        except Exception as ex_error:
-            raise SystemExit(ex_error)
-        else:
-            return base_p1, max_p1 + base_p1
+    def _count_ip(cls, start_ip, end_ip):
+        start = ipaddress.ip_address(six.u(start_ip))
+        end = ipaddress.ip_address(six.u(end_ip))
+        if start.version == 4:
+            return start, end, int(end) - int(start)
+        elif start.version == 6:
+            if int(start) > int(end):
+                raise y_exc.IPv6RangeError(start_ip=str(start),
+                                           end_ip=str(end))
+            _, lo1 = struct.unpack('!QQ', start.packed)
+            _, lo2 = struct.unpack('!QQ', end.packed)
+            return lo1, lo2, lo2 - lo1
 
     @classmethod
     def _get_random_value(cls, min_port, max_port):
index 7a1815e..4b987fa 100644 (file)
@@ -121,7 +121,6 @@ def provision_tool(connection, tool_path, tool_file=None):
         tool_path = get_nsb_option('tool_path')
     if tool_file:
         tool_path = os.path.join(tool_path, tool_file)
-    bin_path = get_nsb_option("bin_path")
     exit_status = connection.execute("which %s > /dev/null 2>&1" % tool_path)[0]
     if exit_status == 0:
         return encodeutils.safe_decode(tool_path, incoming='utf-8').rstrip()
index 1390dd0..f3cafef 100644 (file)
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from __future__ import absolute_import
-from __future__ import print_function
 import logging
 
-from yardstick.benchmark.scenarios.networking.vnf_generic import find_relative_file
+from yardstick.common import utils
 from yardstick.network_services.vnf_generic.vnf.sample_vnf import SampleVNF, DpdkVnfSetupEnvHelper
 from yardstick.network_services.yang_model import YangModel
 
@@ -62,8 +60,9 @@ class AclApproxVnf(SampleVNF):
         self.acl_rules = None
 
     def _start_vnf(self):
-        yang_model_path = find_relative_file(self.scenario_helper.options['rules'],
-                                             self.scenario_helper.task_path)
+        yang_model_path = utils.find_relative_file(
+            self.scenario_helper.options['rules'],
+            self.scenario_helper.task_path)
         yang_model = YangModel(yang_model_path)
         self.acl_rules = yang_model.get_rules()
         super(AclApproxVnf, self)._start_vnf()
index 285ead3..29f9c7b 100644 (file)
@@ -30,7 +30,6 @@ import six
 from six.moves import cStringIO
 from six.moves import zip, StringIO
 
-from yardstick.benchmark.scenarios.networking.vnf_generic import find_relative_file
 from yardstick.common import utils
 from yardstick.common.utils import SocketTopology, join_non_strings, try_int
 from yardstick.network_services.helpers.iniparser import ConfigParser
@@ -798,7 +797,7 @@ class ProxDpdkVnfSetupEnvHelper(DpdkVnfSetupEnvHelper):
         options = self.scenario_helper.options
         config_path = options['prox_config']
         config_file = os.path.basename(config_path)
-        config_path = find_relative_file(config_path, task_path)
+        config_path = utils.find_relative_file(config_path, task_path)
         self.additional_files = {}
 
         try:
@@ -815,7 +814,7 @@ class ProxDpdkVnfSetupEnvHelper(DpdkVnfSetupEnvHelper):
             prox_files = [prox_files]
         for key_prox_file in prox_files:
             base_prox_file = os.path.basename(key_prox_file)
-            key_prox_path = find_relative_file(key_prox_file, task_path)
+            key_prox_path = utils.find_relative_file(key_prox_file, task_path)
             remote_prox_file = self.copy_to_target(key_prox_path, base_prox_file)
             self.additional_files[base_prox_file] = remote_prox_file
 
@@ -929,6 +928,7 @@ class ProxResourceHelper(ClientResourceHelper):
         func = getattr(self.sut, cmd, None)
         if func:
             return func(*args, **kwargs)
+        return None
 
     def _connect(self, client=None):
         """Run and connect to prox on the remote system """
@@ -1005,11 +1005,18 @@ class ProxDataHelper(object):
     def samples(self):
         samples = {}
         for port_name, port_num in self.vnfd_helper.ports_iter():
-            port_rx_total, port_tx_total = self.sut.port_stats([port_num])[6:8]
-            samples[port_name] = {
-                "in_packets": port_rx_total,
-                "out_packets": port_tx_total,
-            }
+            try:
+                port_rx_total, port_tx_total = self.sut.port_stats([port_num])[6:8]
+                samples[port_name] = {
+                    "in_packets": port_rx_total,
+                    "out_packets": port_tx_total,
+                }
+            except (KeyError, TypeError, NameError, MemoryError, ValueError,
+                    SystemError, BufferError):
+                samples[port_name] = {
+                    "in_packets": 0,
+                    "out_packets": 0,
+                }
         return samples
 
     def __enter__(self):
@@ -1127,7 +1134,7 @@ class ProxProfileHelper(object):
             for key, value in section:
                 if key == "mode" and value == mode:
                     core_tuple = CoreSocketTuple(section_name)
-                    core = core_tuple.find_in_topology(self.cpu_topology)
+                    core = core_tuple.core_id
                     cores.append(core)
 
         return cores
@@ -1149,6 +1156,10 @@ class ProxProfileHelper(object):
         :return: return lat_min, lat_max, lat_avg
         :rtype: list
         """
+
+        if not self._latency_cores:
+            self._latency_cores = self.get_cores(self.PROX_CORE_LAT_MODE)
+
         if self._latency_cores:
             return self.sut.lat_stats(self._latency_cores)
         return []
@@ -1198,12 +1209,12 @@ class ProxMplsProfileHelper(ProxProfileHelper):
 
                 if item_value.startswith("tag"):
                     core_tuple = CoreSocketTuple(section_name)
-                    core_tag = core_tuple.find_in_topology(self.cpu_topology)
+                    core_tag = core_tuple.core_id
                     cores_tagged.append(core_tag)
 
                 elif item_value.startswith("udp"):
                     core_tuple = CoreSocketTuple(section_name)
-                    core_udp = core_tuple.find_in_topology(self.cpu_topology)
+                    core_udp = core_tuple.core_id
                     cores_plain.append(core_udp)
 
         return cores_tagged, cores_plain
@@ -1276,23 +1287,23 @@ class ProxBngProfileHelper(ProxProfileHelper):
 
                 if item_value.startswith("cpe"):
                     core_tuple = CoreSocketTuple(section_name)
-                    cpe_core = core_tuple.find_in_topology(self.cpu_topology)
+                    cpe_core = core_tuple.core_id
                     cpe_cores.append(cpe_core)
 
                 elif item_value.startswith("inet"):
                     core_tuple = CoreSocketTuple(section_name)
-                    inet_core = core_tuple.find_in_topology(self.cpu_topology)
+                    inet_core = core_tuple.core_id
                     inet_cores.append(inet_core)
 
                 elif item_value.startswith("arp"):
                     core_tuple = CoreSocketTuple(section_name)
-                    arp_core = core_tuple.find_in_topology(self.cpu_topology)
+                    arp_core = core_tuple.core_id
                     arp_cores.append(arp_core)
 
                 # We check the tasks/core separately
                 if item_value.startswith("arp_task"):
                     core_tuple = CoreSocketTuple(section_name)
-                    arp_task_core = core_tuple.find_in_topology(self.cpu_topology)
+                    arp_task_core = core_tuple.core_id
                     arp_tasks_core.append(arp_task_core)
 
         return cpe_cores, inet_cores, arp_cores, arp_tasks_core
@@ -1455,12 +1466,12 @@ class ProxVpeProfileHelper(ProxProfileHelper):
 
                 if item_value.startswith("cpe"):
                     core_tuple = CoreSocketTuple(section_name)
-                    core_tag = core_tuple.find_in_topology(self.cpu_topology)
+                    core_tag = core_tuple.core_id
                     cpe_cores.append(core_tag)
 
                 elif item_value.startswith("inet"):
                     core_tuple = CoreSocketTuple(section_name)
-                    inet_core = core_tuple.find_in_topology(self.cpu_topology)
+                    inet_core = core_tuple.core_id
                     inet_cores.append(inet_core)
 
         return cpe_cores, inet_cores
@@ -1639,7 +1650,7 @@ class ProxlwAFTRProfileHelper(ProxProfileHelper):
                 continue
 
             core_tuple = CoreSocketTuple(section_name)
-            core_tag = core_tuple.find_in_topology(self.cpu_topology)
+            core_tag = core_tuple.core_id
             for item_value in (v for k, v in section if k == 'name'):
                 if item_value.startswith('tun'):
                     tun_cores.append(core_tag)
index b7d295e..2cdb3f9 100644 (file)
 
 import errno
 import logging
+import datetime
+import time
 
 
 from yardstick.common.process import check_if_process_failed
 from yardstick.network_services.vnf_generic.vnf.prox_helpers import ProxDpdkVnfSetupEnvHelper
 from yardstick.network_services.vnf_generic.vnf.prox_helpers import ProxResourceHelper
-from yardstick.network_services.vnf_generic.vnf.sample_vnf import SampleVNF, PROCESS_JOIN_TIMEOUT
+from yardstick.network_services.vnf_generic.vnf.sample_vnf import SampleVNF
+from yardstick.network_services.constants import PROCESS_JOIN_TIMEOUT
 
 LOG = logging.getLogger(__name__)
 
@@ -39,6 +42,9 @@ class ProxApproxVnf(SampleVNF):
         if resource_helper_type is None:
             resource_helper_type = ProxResourceHelper
 
+        self.prev_packets_in = 0
+        self.prev_packets_sent = 0
+        self.prev_time = time.time()
         super(ProxApproxVnf, self).__init__(name, vnfd, setup_env_helper_type,
                                             resource_helper_type)
 
@@ -79,12 +85,13 @@ class ProxApproxVnf(SampleVNF):
             raise RuntimeError("Failed ..Invalid no of ports .. "
                                "1, 2 or 4 ports only supported at this time")
 
-        port_stats = self.vnf_execute('port_stats', range(port_count))
+        self.port_stats = self.vnf_execute('port_stats', range(port_count))
+        curr_time = time.time()
         try:
-            rx_total = port_stats[6]
-            tx_total = port_stats[7]
+            rx_total = self.port_stats[6]
+            tx_total = self.port_stats[7]
         except IndexError:
-            LOG.error("port_stats parse fail %s", port_stats)
+            LOG.debug("port_stats parse fail ")
             # return empty dict so we don't mess up existing KPIs
             return {}
 
@@ -96,7 +103,17 @@ class ProxApproxVnf(SampleVNF):
             # collectd KPIs here and not TG KPIs, so use a different method name
             "collect_stats": self.resource_helper.collect_collectd_kpi(),
         }
-        LOG.debug("%s collect KPIs %s", self.APP_NAME, result)
+        curr_packets_in = int((rx_total - self.prev_packets_in) / (curr_time - self.prev_time))
+        curr_packets_fwd = int((tx_total - self.prev_packets_sent) / (curr_time - self.prev_time))
+
+        result["curr_packets_in"] = curr_packets_in
+        result["curr_packets_fwd"] = curr_packets_fwd
+
+        self.prev_packets_in = rx_total
+        self.prev_packets_sent = tx_total
+        self.prev_time = curr_time
+
+        LOG.debug("%s collect KPIs %s %s", self.APP_NAME, datetime.datetime.now(), result)
         return result
 
     def _tear_down(self):
index fbaaa0c..f16b414 100644 (file)
@@ -1,4 +1,4 @@
-# Copyright (c) 2016-2017 Intel Corporation
+# Copyright (c) 2016-2018 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 from collections import Mapping
 import logging
 from multiprocessing import Queue, Value, Process
+
 import os
 import posixpath
 import re
-from six.moves import cStringIO
 import subprocess
 import time
 
+import six
+
 from trex_stl_lib.trex_stl_client import LoggerApi
 from trex_stl_lib.trex_stl_client import STLClient
 from trex_stl_lib.trex_stl_exceptions import STLError
 from yardstick.benchmark.contexts.base import Context
-from yardstick.benchmark.scenarios.networking.vnf_generic import find_relative_file
 from yardstick.common import exceptions as y_exceptions
 from yardstick.common.process import check_if_process_failed
-from yardstick.network_services.helpers.dpdkbindnic_helper import DpdkBindHelper
-from yardstick.network_services.helpers.samplevnf_helper import PortPairs
+from yardstick.common import utils
+from yardstick.network_services.constants import DEFAULT_VNF_TIMEOUT
+from yardstick.network_services.constants import PROCESS_JOIN_TIMEOUT
+from yardstick.network_services.constants import REMOTE_TMP
+from yardstick.network_services.helpers.dpdkbindnic_helper import DpdkBindHelper, DpdkNode
 from yardstick.network_services.helpers.samplevnf_helper import MultiPortConfig
+from yardstick.network_services.helpers.samplevnf_helper import PortPairs
 from yardstick.network_services.nfvi.resource import ResourceProfile
 from yardstick.network_services.utils import get_nsb_option
-from yardstick.network_services.vnf_generic.vnf.base import GenericVNF
 from yardstick.network_services.vnf_generic.vnf.base import GenericTrafficGen
+from yardstick.network_services.vnf_generic.vnf.base import GenericVNF
 from yardstick.network_services.vnf_generic.vnf.base import QueueFileWrapper
-from yardstick.ssh import AutoConnectSSH
+from yardstick.network_services.vnf_generic.vnf.vnf_ssh_helper import VnfSshHelper
 
 
-DPDK_VERSION = "dpdk-16.07"
-
 LOG = logging.getLogger(__name__)
 
 
-REMOTE_TMP = "/tmp"
-DEFAULT_VNF_TIMEOUT = 3600
-PROCESS_JOIN_TIMEOUT = 3
-
-
-class VnfSshHelper(AutoConnectSSH):
-
-    def __init__(self, node, bin_path, wait=None):
-        self.node = node
-        kwargs = self.args_from_node(self.node)
-        if wait:
-            kwargs.setdefault('wait', wait)
-
-        super(VnfSshHelper, self).__init__(**kwargs)
-        self.bin_path = bin_path
-
-    @staticmethod
-    def get_class():
-        # must return static class name, anything else refers to the calling class
-        # i.e. the subclass, not the superclass
-        return VnfSshHelper
-
-    def copy(self):
-        # this copy constructor is different from SSH classes, since it uses node
-        return self.get_class()(self.node, self.bin_path)
-
-    def upload_config_file(self, prefix, content):
-        cfg_file = os.path.join(REMOTE_TMP, prefix)
-        LOG.debug(content)
-        file_obj = cStringIO(content)
-        self.put_file_obj(file_obj, cfg_file)
-        return cfg_file
-
-    def join_bin_path(self, *args):
-        return os.path.join(self.bin_path, *args)
-
-    def provision_tool(self, tool_path=None, tool_file=None):
-        if tool_path is None:
-            tool_path = self.bin_path
-        return super(VnfSshHelper, self).provision_tool(tool_path, tool_file)
-
-
 class SetupEnvHelper(object):
 
     CFG_CONFIG = os.path.join(REMOTE_TMP, "sample_config")
@@ -119,6 +80,8 @@ class DpdkVnfSetupEnvHelper(SetupEnvHelper):
 
     APP_NAME = 'DpdkVnf'
     FIND_NET_CMD = "find /sys/class/net -lname '*{}*' -printf '%f'"
+    NR_HUGEPAGES_PATH = '/proc/sys/vm/nr_hugepages'
+    HUGEPAGES_KB = 1024 * 1024 * 16
 
     @staticmethod
     def _update_packet_type(ip_pipeline_cfg, traffic_options):
@@ -155,24 +118,22 @@ class DpdkVnfSetupEnvHelper(SetupEnvHelper):
         self.dpdk_bind_helper = DpdkBindHelper(ssh_helper)
 
     def _setup_hugepages(self):
-        cmd = "awk '/Hugepagesize/ { print $2$3 }' < /proc/meminfo"
-        hugepages = self.ssh_helper.execute(cmd)[1].rstrip()
-
-        memory_path = \
-            '/sys/kernel/mm/hugepages/hugepages-%s/nr_hugepages' % hugepages
-        self.ssh_helper.execute("awk -F: '{ print $1 }' < %s" % memory_path)
-
-        if hugepages == "2048kB":
-            pages = 8192
-        else:
-            pages = 16
-
-        self.ssh_helper.execute("echo %s | sudo tee %s" % (pages, memory_path))
+        meminfo = utils.read_meminfo(self.ssh_helper)
+        hp_size_kb = int(meminfo['Hugepagesize'])
+        nr_hugepages = int(abs(self.HUGEPAGES_KB / hp_size_kb))
+        self.ssh_helper.execute('echo %s | sudo tee %s' %
+                                (nr_hugepages, self.NR_HUGEPAGES_PATH))
+        hp = six.BytesIO()
+        self.ssh_helper.get_file_obj(self.NR_HUGEPAGES_PATH, hp)
+        nr_hugepages_set = int(hp.getvalue().decode('utf-8').splitlines()[0])
+        LOG.info('Hugepages size (kB): %s, number claimed: %s, number set: %s',
+                 hp_size_kb, nr_hugepages, nr_hugepages_set)
 
     def build_config(self):
         vnf_cfg = self.scenario_helper.vnf_cfg
         task_path = self.scenario_helper.task_path
 
+        config_file = vnf_cfg.get('file')
         lb_count = vnf_cfg.get('lb_count', 3)
         lb_config = vnf_cfg.get('lb_config', 'SW')
         worker_config = vnf_cfg.get('worker_config', '1C/1T')
@@ -185,7 +146,8 @@ class DpdkVnfSetupEnvHelper(SetupEnvHelper):
             'vnf_type': self.VNF_TYPE,
         }
 
-        config_tpl_cfg = find_relative_file(self.DEFAULT_CONFIG_TPL_CFG, task_path)
+        config_tpl_cfg = utils.find_relative_file(self.DEFAULT_CONFIG_TPL_CFG,
+                                                  task_path)
         config_basename = posixpath.basename(self.CFG_CONFIG)
         script_basename = posixpath.basename(self.CFG_SCRIPT)
         multiport = MultiPortConfig(self.scenario_helper.topology,
@@ -200,12 +162,20 @@ class DpdkVnfSetupEnvHelper(SetupEnvHelper):
                                     self.socket)
 
         multiport.generate_config()
-        with open(self.CFG_CONFIG) as handle:
-            new_config = handle.read()
-
-        new_config = self._update_traffic_type(new_config, traffic_options)
-        new_config = self._update_packet_type(new_config, traffic_options)
-
+        if config_file:
+            with utils.open_relative_file(config_file, task_path) as infile:
+                new_config = ['[EAL]']
+                vpci = []
+                for port in self.vnfd_helper.port_pairs.all_ports:
+                    interface = self.vnfd_helper.find_interface(name=port)
+                    vpci.append(interface['virtual-interface']["vpci"])
+                new_config.extend('w = {0}'.format(item) for item in vpci)
+                new_config = '\n'.join(new_config) + '\n' + infile.read()
+        else:
+            with open(self.CFG_CONFIG) as handle:
+                new_config = handle.read()
+            new_config = self._update_traffic_type(new_config, traffic_options)
+            new_config = self._update_packet_type(new_config, traffic_options)
         self.ssh_helper.upload_config_file(config_basename, new_config)
         self.ssh_helper.upload_config_file(script_basename,
                                            multiport.generate_script(self.vnfd_helper))
@@ -234,7 +204,6 @@ class DpdkVnfSetupEnvHelper(SetupEnvHelper):
 
     def setup_vnf_environment(self):
         self._setup_dpdk()
-        self.bound_pci = [v['virtual-interface']["vpci"] for v in self.vnfd_helper.interfaces]
         self.kill_vnf()
         # bind before _setup_resources so we can use dpdk_port_num
         self._detect_and_bind_drivers()
@@ -250,21 +219,14 @@ class DpdkVnfSetupEnvHelper(SetupEnvHelper):
         self.ssh_helper.execute("sudo killall %s" % self.APP_NAME)
 
     def _setup_dpdk(self):
-        """ setup dpdk environment needed for vnf to run """
-
+        """Setup DPDK environment needed for VNF to run"""
         self._setup_hugepages()
-        self.ssh_helper.execute("sudo modprobe uio && sudo modprobe igb_uio")
+        self.dpdk_bind_helper.load_dpdk_driver()
 
-        exit_status = self.ssh_helper.execute("lsmod | grep -i igb_uio")[0]
+        exit_status = self.dpdk_bind_helper.check_dpdk_driver()
         if exit_status == 0:
             return
 
-        dpdk = self.ssh_helper.join_bin_path(DPDK_VERSION)
-        dpdk_setup = self.ssh_helper.provision_tool(tool_file="nsb_setup.sh")
-        exit_status = self.ssh_helper.execute("which {} >/dev/null 2>&1".format(dpdk))[0]
-        if exit_status != 0:
-            self.ssh_helper.execute("bash %s dpdk >/dev/null 2>&1" % dpdk_setup)
-
     def get_collectd_options(self):
         options = self.scenario_helper.all_options.get("collectd", {})
         # override with specific node settings
@@ -290,9 +252,22 @@ class DpdkVnfSetupEnvHelper(SetupEnvHelper):
                                plugins=plugins, interval=collectd_options.get("interval"),
                                timeout=self.scenario_helper.timeout)
 
+    def _check_interface_fields(self):
+        num_nodes = len(self.scenario_helper.nodes)
+        # OpenStack instance creation time is probably proportional to the number
+        # of instances
+        timeout = 120 * num_nodes
+        dpdk_node = DpdkNode(self.scenario_helper.name, self.vnfd_helper.interfaces,
+                             self.ssh_helper, timeout)
+        dpdk_node.check()
+
     def _detect_and_bind_drivers(self):
         interfaces = self.vnfd_helper.interfaces
 
+        self._check_interface_fields()
+        # check for bound after probe
+        self.bound_pci = [v['virtual-interface']["vpci"] for v in interfaces]
+
         self.dpdk_bind_helper.read_status()
         self.dpdk_bind_helper.save_used_drivers()
 
index 3ab30b5..02e7803 100644 (file)
@@ -22,10 +22,10 @@ import shutil
 from collections import OrderedDict
 from subprocess import call
 
-from yardstick.common.utils import makedirs
+from yardstick.common import utils
 from yardstick.network_services.vnf_generic.vnf.sample_vnf import SampleVNFTrafficGen
 from yardstick.network_services.vnf_generic.vnf.sample_vnf import ClientResourceHelper
-from yardstick.benchmark.scenarios.networking.vnf_generic import find_relative_file
+
 
 LOG = logging.getLogger(__name__)
 
@@ -93,9 +93,10 @@ class IxLoadResourceHelper(ClientResourceHelper):
     def setup(self):
         # NOTE: fixup scenario_helper to hanlde ixia
         self.resource_file_name = \
-            find_relative_file(self.scenario_helper.scenario_cfg['ixia_profile'],
-                               self.scenario_helper.scenario_cfg["task_path"])
-        makedirs(self.RESULTS_MOUNT)
+            utils.find_relative_file(
+                self.scenario_helper.scenario_cfg['ixia_profile'],
+                self.scenario_helper.scenario_cfg["task_path"])
+        utils.makedirs(self.RESULTS_MOUNT)
         cmd = MOUNT_CMD.format(self.vnfd_helper.mgmt_interface, self)
         LOG.debug(cmd)
 
@@ -103,7 +104,7 @@ class IxLoadResourceHelper(ClientResourceHelper):
             call(cmd, shell=True)
 
         shutil.rmtree(self.RESULTS_MOUNT, ignore_errors=True)
-        makedirs(self.RESULTS_MOUNT)
+        utils.makedirs(self.RESULTS_MOUNT)
         shutil.copy(self.resource_file_name, self.RESULTS_MOUNT)
 
     def make_aggregates(self):
index 630c8b9..265d0b7 100644 (file)
@@ -19,11 +19,11 @@ import os
 import logging
 import sys
 
-from yardstick.common.utils import ErrorClass
+from yardstick.common import utils
+from yardstick import error
 from yardstick.network_services.vnf_generic.vnf.sample_vnf import SampleVNFTrafficGen
 from yardstick.network_services.vnf_generic.vnf.sample_vnf import ClientResourceHelper
 from yardstick.network_services.vnf_generic.vnf.sample_vnf import Rfc2544ResourceHelper
-from yardstick.benchmark.scenarios.networking.vnf_generic import find_relative_file
 
 LOG = logging.getLogger(__name__)
 
@@ -36,7 +36,7 @@ sys.path.append(IXNET_LIB)
 try:
     from IxNet import IxNextgen
 except ImportError:
-    IxNextgen = ErrorClass
+    IxNextgen = error.ErrorClass
 
 
 class IxiaRfc2544Helper(Rfc2544ResourceHelper):
@@ -122,8 +122,9 @@ class IxiaResourceHelper(ClientResourceHelper):
 
         # we don't know client_file_name until runtime as instantiate
         client_file_name = \
-            find_relative_file(self.scenario_helper.scenario_cfg['ixia_profile'],
-                               self.scenario_helper.scenario_cfg["task_path"])
+            utils.find_relative_file(
+                self.scenario_helper.scenario_cfg['ixia_profile'],
+                self.scenario_helper.scenario_cfg["task_path"])
         self.client.ix_load_config(client_file_name)
         time.sleep(WAIT_AFTER_CFG_LOAD)
 
@@ -149,7 +150,7 @@ class IxiaResourceHelper(ClientResourceHelper):
                 self.client.ix_stop_traffic()
                 samples = self.generate_samples(traffic_profile.ports)
                 self._queue.put(samples)
-                status, samples = traffic_profile.get_drop_percentage(self, samples, min_tol,
+                status, samples = traffic_profile.get_drop_percentage(samples, min_tol,
                                                                       max_tol, self.client, mac)
 
                 current = samples['CurrentDropPercentage']
index 6c95648..61e9985 100644 (file)
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from __future__ import absolute_import
 import logging
 
-from yardstick.benchmark.scenarios.networking.vnf_generic import find_relative_file
+from yardstick.common import utils
 from yardstick.network_services.vnf_generic.vnf.sample_vnf import SampleVNF, DpdkVnfSetupEnvHelper
 from yardstick.network_services.yang_model import YangModel
 
@@ -60,8 +59,9 @@ class FWApproxVnf(SampleVNF):
         self.vfw_rules = None
 
     def _start_vnf(self):
-        yang_model_path = find_relative_file(self.scenario_helper.options['rules'],
-                                             self.scenario_helper.task_path)
+        yang_model_path = utils.find_relative_file(
+            self.scenario_helper.options['rules'],
+            self.scenario_helper.task_path)
         yang_model = YangModel(yang_model_path)
         self.vfw_rules = yang_model.get_rules()
         super(FWApproxVnf, self)._start_vnf()
diff --git a/yardstick/network_services/vnf_generic/vnf/vnf_ssh_helper.py b/yardstick/network_services/vnf_generic/vnf/vnf_ssh_helper.py
new file mode 100644 (file)
index 0000000..8e02cf3
--- /dev/null
@@ -0,0 +1,61 @@
+# Copyright (c) 2016-2017 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import os
+
+from six.moves import StringIO
+
+from yardstick.network_services.constants import REMOTE_TMP
+from yardstick.ssh import AutoConnectSSH
+
+LOG = logging.getLogger(__name__)
+
+
+class VnfSshHelper(AutoConnectSSH):
+
+    def __init__(self, node, bin_path, wait=None):
+        self.node = node
+        kwargs = self.args_from_node(self.node)
+        if wait:
+            # if wait is defined here we want to override
+            kwargs['wait'] = wait
+
+        super(VnfSshHelper, self).__init__(**kwargs)
+        self.bin_path = bin_path
+
+    @staticmethod
+    def get_class():
+        # must return static class name, anything else refers to the calling class
+        # i.e. the subclass, not the superclass
+        return VnfSshHelper
+
+    def copy(self):
+        # this copy constructor is different from SSH classes, since it uses node
+        return self.get_class()(self.node, self.bin_path)
+
+    def upload_config_file(self, prefix, content):
+        cfg_file = os.path.join(REMOTE_TMP, prefix)
+        LOG.debug(content)
+        file_obj = StringIO(content)
+        self.put_file_obj(file_obj, cfg_file)
+        return cfg_file
+
+    def join_bin_path(self, *args):
+        return os.path.join(self.bin_path, *args)
+
+    def provision_tool(self, tool_path=None, tool_file=None):
+        if tool_path is None:
+            tool_path = self.bin_path
+        return super(VnfSshHelper, self).provision_tool(tool_path, tool_file)
index 754482e..20be89f 100644 (file)
@@ -30,17 +30,11 @@ from yardstick.common import template_format
 log = logging.getLogger(__name__)
 
 
-HEAT_KEY_UUID_LENGTH = 8
-
 PROVIDER_SRIOV = "sriov"
 
 _DEPLOYED_STACKS = {}
 
 
-def get_short_key_uuid(uuid):
-    return str(uuid)[:HEAT_KEY_UUID_LENGTH]
-
-
 class HeatStack(object):
     """Represents a Heat stack (deployed template) """
 
@@ -50,6 +44,13 @@ class HeatStack(object):
         self._cloud = shade.openstack_cloud()
         self._stack = None
 
+    def _update_stack_tracking(self):
+        outputs = self._stack.outputs
+        self.outputs = {output['output_key']: output['output_value'] for output
+                        in outputs}
+        if self.uuid:
+            _DEPLOYED_STACKS[self.uuid] = self._stack
+
     def create(self, template, heat_parameters, wait, timeout):
         """Creates an OpenStack stack from a template"""
         with tempfile.NamedTemporaryFile('wb', delete=False) as template_file:
@@ -58,11 +59,21 @@ class HeatStack(object):
             self._stack = self._cloud.create_stack(
                 self.name, template_file=template_file.name, wait=wait,
                 timeout=timeout, **heat_parameters)
-        outputs = self._stack.outputs
-        self.outputs = {output['output_key']: output['output_value'] for output
-                        in outputs}
-        if self.uuid:
-            _DEPLOYED_STACKS[self.uuid] = self._stack
+
+        self._update_stack_tracking()
+
+    def get(self):
+        """Retrieves an existing stack from the target cloud
+
+        Returns a bool indicating whether the stack exists in the target cloud
+        If the stack exists, it will be stored as self._stack
+        """
+        self._stack = self._cloud.get_stack(self.name)
+        if not self._stack:
+            return False
+
+        self._update_stack_tracking()
+        return True
 
     @staticmethod
     def stacks_exist():
@@ -74,7 +85,14 @@ class HeatStack(object):
         if self.uuid is None:
             return
 
-        ret = self._cloud.delete_stack(self.uuid, wait=wait)
+        try:
+            ret = self._cloud.delete_stack(self.uuid, wait=wait)
+        except TypeError:
+            # NOTE(ralonsoh): this exception catch solves a bug in Shade, which
+            # tries to retrieve and read the stack status when it's already
+            # deleted.
+            ret = True
+
         _DEPLOYED_STACKS.pop(self.uuid)
         self._stack = None
         return ret
@@ -406,7 +424,7 @@ name (i.e. %s).
             }
         }
 
-    def add_keypair(self, name, key_uuid):
+    def add_keypair(self, name, key_id):
         """add to the template a Nova KeyPair"""
         log.debug("adding Nova::KeyPair '%s'", name)
         self.resources[name] = {
@@ -418,7 +436,7 @@ name (i.e. %s).
                     pkg_resources.resource_string(
                         'yardstick.resources',
                         'files/yardstick_key-' +
-                        get_short_key_uuid(key_uuid) + '.pub'),
+                        key_id + '.pub'),
                     'utf-8')
             }
         }
@@ -473,7 +491,36 @@ name (i.e. %s).
                      'port_range_max': '65535'},
                     {'remote_ip_prefix': '::/0',
                      'ethertype': 'IPv6',
-                     'protocol': 'ipv6-icmp'}
+                     'protocol': 'ipv6-icmp'},
+                    {'remote_ip_prefix': '0.0.0.0/0',
+                     'direction': 'egress',
+                     'protocol': 'tcp',
+                     'port_range_min': '1',
+                     'port_range_max': '65535'},
+                    {'remote_ip_prefix': '0.0.0.0/0',
+                     'direction': 'egress',
+                     'protocol': 'udp',
+                     'port_range_min': '1',
+                     'port_range_max': '65535'},
+                    {'remote_ip_prefix': '0.0.0.0/0',
+                     'direction': 'egress',
+                     'protocol': 'icmp'},
+                    {'remote_ip_prefix': '::/0',
+                     'direction': 'egress',
+                     'ethertype': 'IPv6',
+                     'protocol': 'tcp',
+                     'port_range_min': '1',
+                     'port_range_max': '65535'},
+                    {'remote_ip_prefix': '::/0',
+                     'direction': 'egress',
+                     'ethertype': 'IPv6',
+                     'protocol': 'udp',
+                     'port_range_min': '1',
+                     'port_range_max': '65535'},
+                    {'remote_ip_prefix': '::/0',
+                     'direction': 'egress',
+                     'ethertype': 'IPv6',
+                     'protocol': 'ipv6-icmp'},
                 ]
             }
         }
index 6ddf327..d7adc0d 100644 (file)
@@ -78,7 +78,7 @@ from oslo_utils import encodeutils
 from scp import SCPClient
 import six
 
-from yardstick.common.utils import try_int
+from yardstick.common.utils import try_int, NON_NONE_DEFAULT, make_dict_from_map
 from yardstick.network_services.utils import provision_tool
 
 
@@ -102,6 +102,7 @@ class SSH(object):
     """Represent ssh connection."""
 
     SSH_PORT = paramiko.config.SSH_PORT
+    DEFAULT_WAIT_TIMEOUT = 120
 
     @staticmethod
     def gen_keys(key_filename, bit_count=2048):
@@ -120,6 +121,18 @@ class SSH(object):
         # i.e. the subclass, not the superclass
         return SSH
 
+    @classmethod
+    def get_arg_key_map(cls):
+        return {
+            'user': ('user', NON_NONE_DEFAULT),
+            'host': ('ip', NON_NONE_DEFAULT),
+            'port': ('ssh_port', cls.SSH_PORT),
+            'pkey': ('pkey', None),
+            'key_filename': ('key_filename', None),
+            'password': ('password', None),
+            'name': ('name', None),
+        }
+
     def __init__(self, user, host, port=None, pkey=None,
                  key_filename=None, password=None, name=None):
         """Initialize SSH client.
@@ -137,6 +150,7 @@ class SSH(object):
         else:
             self.log = logging.getLogger(__name__)
 
+        self.wait_timeout = self.DEFAULT_WAIT_TIMEOUT
         self.user = user
         self.host = host
         # everybody wants to debug this in the caller, do it here instead
@@ -162,16 +176,9 @@ class SSH(object):
             overrides = {}
         if defaults is None:
             defaults = {}
+
         params = ChainMap(overrides, node, defaults)
-        return {
-            'user': params['user'],
-            'host': params['ip'],
-            'port': params.get('ssh_port', cls.SSH_PORT),
-            'pkey': params.get('pkey'),
-            'key_filename': params.get('key_filename'),
-            'password': params.get('password'),
-            'name': params.get('name'),
-        }
+        return make_dict_from_map(params, cls.get_arg_key_map())
 
     @classmethod
     def from_node(cls, node, overrides=None, defaults=None):
@@ -186,7 +193,7 @@ class SSH(object):
                 return key_class.from_private_key(key)
             except paramiko.SSHException as e:
                 errors.append(e)
-        raise SSHError("Invalid pkey: %s" % (errors))
+        raise SSHError("Invalid pkey: %s" % errors)
 
     @property
     def is_connected(self):
@@ -287,7 +294,7 @@ class SSH(object):
 
         while True:
             # Block until data can be read/write.
-            r, w, e = select.select([session], writes, [session], 1)
+            e = select.select([session], writes, [session], 1)[2]
 
             if session.recv_ready():
                 data = encodeutils.safe_decode(session.recv(4096), 'utf-8')
@@ -361,17 +368,20 @@ class SSH(object):
         stderr.seek(0)
         return exit_status, stdout.read(), stderr.read()
 
-    def wait(self, timeout=120, interval=1):
+    def wait(self, timeout=None, interval=1):
         """Wait for the host will be available via ssh."""
-        start_time = time.time()
+        if timeout is None:
+            timeout = self.wait_timeout
+
+        end_time = time.time() + timeout
         while True:
             try:
                 return self.execute("uname")
             except (socket.error, SSHError) as e:
                 self.log.debug("Ssh is still unavailable: %r", e)
                 time.sleep(interval)
-            if time.time() > (start_time + timeout):
-                raise SSHTimeout("Timeout waiting for '%s'", self.host)
+            if time.time() > end_time:
+                raise SSHTimeout("Timeout waiting for '%s'" % self.host)
 
     def put(self, files, remote_path=b'.', recursive=False):
         client = self._get_client()
@@ -447,24 +457,40 @@ class SSH(object):
 
 class AutoConnectSSH(SSH):
 
+    @classmethod
+    def get_arg_key_map(cls):
+        arg_key_map = super(AutoConnectSSH, cls).get_arg_key_map()
+        arg_key_map['wait'] = ('wait', True)
+        return arg_key_map
+
     # always wait or we will get OpenStack SSH errors
     def __init__(self, user, host, port=None, pkey=None,
                  key_filename=None, password=None, name=None, wait=True):
         super(AutoConnectSSH, self).__init__(user, host, port, pkey, key_filename, password, name)
-        self._wait = wait
+        if wait and wait is not True:
+            self.wait_timeout = int(wait)
 
     def _make_dict(self):
         data = super(AutoConnectSSH, self)._make_dict()
         data.update({
-            'wait': self._wait
+            'wait': self.wait_timeout
         })
         return data
 
     def _connect(self):
         if not self.is_connected:
-            self._get_client()
-            if self._wait:
-                self.wait()
+            interval = 1
+            timeout = self.wait_timeout
+
+            end_time = time.time() + timeout
+            while True:
+                try:
+                    return self._get_client()
+                except (socket.error, SSHError) as e:
+                    self.log.debug("Ssh is still unavailable: %r", e)
+                    time.sleep(interval)
+                if time.time() > end_time:
+                    raise SSHTimeout("Timeout waiting for '%s'" % self.host)
 
     def drop_connection(self):
         """ Don't close anything, just force creation of a new client """
index e69de29..56e3106 100644 (file)
@@ -0,0 +1,75 @@
+# Copyright (c) 2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import mock
+
+
+STL_MOCKS = {
+    'trex_stl_lib': mock.MagicMock(),
+    'trex_stl_lib.base64': mock.MagicMock(),
+    'trex_stl_lib.binascii': mock.MagicMock(),
+    'trex_stl_lib.collections': mock.MagicMock(),
+    'trex_stl_lib.copy': mock.MagicMock(),
+    'trex_stl_lib.datetime': mock.MagicMock(),
+    'trex_stl_lib.functools': mock.MagicMock(),
+    'trex_stl_lib.imp': mock.MagicMock(),
+    'trex_stl_lib.inspect': mock.MagicMock(),
+    'trex_stl_lib.json': mock.MagicMock(),
+    'trex_stl_lib.linecache': mock.MagicMock(),
+    'trex_stl_lib.math': mock.MagicMock(),
+    'trex_stl_lib.os': mock.MagicMock(),
+    'trex_stl_lib.platform': mock.MagicMock(),
+    'trex_stl_lib.pprint': mock.MagicMock(),
+    'trex_stl_lib.random': mock.MagicMock(),
+    'trex_stl_lib.re': mock.MagicMock(),
+    'trex_stl_lib.scapy': mock.MagicMock(),
+    'trex_stl_lib.socket': mock.MagicMock(),
+    'trex_stl_lib.string': mock.MagicMock(),
+    'trex_stl_lib.struct': mock.MagicMock(),
+    'trex_stl_lib.sys': mock.MagicMock(),
+    'trex_stl_lib.threading': mock.MagicMock(),
+    'trex_stl_lib.time': mock.MagicMock(),
+    'trex_stl_lib.traceback': mock.MagicMock(),
+    'trex_stl_lib.trex_stl_async_client': mock.MagicMock(),
+    'trex_stl_lib.trex_stl_client': mock.MagicMock(),
+    'trex_stl_lib.trex_stl_exceptions': mock.MagicMock(),
+    'trex_stl_lib.trex_stl_ext': mock.MagicMock(),
+    'trex_stl_lib.trex_stl_jsonrpc_client': mock.MagicMock(),
+    'trex_stl_lib.trex_stl_packet_builder_interface': mock.MagicMock(),
+    'trex_stl_lib.trex_stl_packet_builder_scapy': mock.MagicMock(),
+    'trex_stl_lib.trex_stl_port': mock.MagicMock(),
+    'trex_stl_lib.trex_stl_stats': mock.MagicMock(),
+    'trex_stl_lib.trex_stl_streams': mock.MagicMock(),
+    'trex_stl_lib.trex_stl_types': mock.MagicMock(),
+    'trex_stl_lib.types': mock.MagicMock(),
+    'trex_stl_lib.utils': mock.MagicMock(),
+    'trex_stl_lib.utils.argparse': mock.MagicMock(),
+    'trex_stl_lib.utils.collections': mock.MagicMock(),
+    'trex_stl_lib.utils.common': mock.MagicMock(),
+    'trex_stl_lib.utils.json': mock.MagicMock(),
+    'trex_stl_lib.utils.os': mock.MagicMock(),
+    'trex_stl_lib.utils.parsing_opts': mock.MagicMock(),
+    'trex_stl_lib.utils.pwd': mock.MagicMock(),
+    'trex_stl_lib.utils.random': mock.MagicMock(),
+    'trex_stl_lib.utils.re': mock.MagicMock(),
+    'trex_stl_lib.utils.string': mock.MagicMock(),
+    'trex_stl_lib.utils.sys': mock.MagicMock(),
+    'trex_stl_lib.utils.text_opts': mock.MagicMock(),
+    'trex_stl_lib.utils.text_tables': mock.MagicMock(),
+    'trex_stl_lib.utils.texttable': mock.MagicMock(),
+    'trex_stl_lib.warnings': mock.MagicMock(),
+    'trex_stl_lib.yaml': mock.MagicMock(),
+    'trex_stl_lib.zlib': mock.MagicMock(),
+    'trex_stl_lib.zmq': mock.MagicMock(),
+}
diff --git a/yardstick/tests/fixture.py b/yardstick/tests/fixture.py
new file mode 100644 (file)
index 0000000..94d20eb
--- /dev/null
@@ -0,0 +1,47 @@
+# Copyright 2017 Intel Corporation
+# All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+import fixtures
+import mock
+import six
+
+from yardstick.common import task_template
+
+
+class PluginParserFixture(fixtures.Fixture):
+    """PluginParser fixture.
+
+    This class is intended to be used as a fixture within unit tests and
+    therefore consumers must register it using useFixture() within their
+    unit test class.
+    """
+
+    def __init__(self, rendered_plugin):
+        super(PluginParserFixture, self).__init__()
+        self._rendered_plugin = rendered_plugin
+
+    def _setUp(self):
+        self.addCleanup(self._restore)
+        self._mock_tasktemplate_render = mock.patch.object(
+            task_template.TaskTemplate, 'render')
+        self.mock_tasktemplate_render = self._mock_tasktemplate_render.start()
+        self.mock_tasktemplate_render.return_value = self._rendered_plugin
+        self._mock_open = mock.patch.object(six.moves.builtins, 'open', create=True)
+        self.mock_open = self._mock_open.start()
+        self.mock_open.side_effect = mock.mock_open()
+
+    def _restore(self):
+        self._mock_tasktemplate_render.stop()
+        self._mock_open.stop()
diff --git a/yardstick/tests/functional/base.py b/yardstick/tests/functional/base.py
new file mode 100644 (file)
index 0000000..51be013
--- /dev/null
@@ -0,0 +1,46 @@
+# Copyright (c) 2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import abc
+import six
+
+from oslo_config import cfg
+from oslotest import base
+
+
+CONF = cfg.CONF
+
+
+@six.add_metaclass(abc.ABCMeta)
+class BaseFunctionalTestCase(base.BaseTestCase):
+    """Base class for functional tests."""
+
+    def setUp(self):
+        super(BaseFunctionalTestCase, self).setUp()
+
+    def config(self, **kw):
+        """Override some configuration values.
+
+        The keyword arguments are the names of configuration options to
+        override and their values.
+
+        If a group argument is supplied, the overrides are applied to
+        the specified configuration option group.
+
+        All overrides are automatically cleared at the end of the current
+        test by the fixtures cleanup process.
+        """
+        group = kw.pop('group', None)
+        for k, v in kw.items():
+            CONF.set_override(k, v, group)
diff --git a/yardstick/tests/functional/benchmark/__init__.py b/yardstick/tests/functional/benchmark/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/yardstick/tests/functional/benchmark/scenarios/__init__.py b/yardstick/tests/functional/benchmark/scenarios/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/yardstick/tests/functional/benchmark/scenarios/networking/__init__.py b/yardstick/tests/functional/benchmark/scenarios/networking/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/yardstick/tests/functional/benchmark/scenarios/networking/test_vnf_generic.py b/yardstick/tests/functional/benchmark/scenarios/networking/test_vnf_generic.py
new file mode 100644 (file)
index 0000000..38f1a97
--- /dev/null
@@ -0,0 +1,195 @@
+# Copyright (c) 2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import copy
+import sys
+
+import mock
+import unittest
+
+from yardstick import tests as y_tests
+from yardstick.common import utils
+
+
+with mock.patch.dict(sys.modules, y_tests.STL_MOCKS):
+    from yardstick.benchmark.scenarios.networking import vnf_generic
+
+
+TRAFFIC_PROFILE_1 = """
+schema: nsb:traffic_profile:0.1
+name: rfc2544
+description: Traffic profile to run RFC2544 latency
+traffic_profile:
+  traffic_type : RFC2544Profile
+  frame_rate : 100
+uplink_0:
+  ipv4:
+    id: 1
+    outer_l2:
+      framesize:
+        64B: "{{get(imix, 'imix.uplink.64B', '0') }}"
+        128B: "{{get(imix, 'imix.uplink.128B', '0') }}"
+"""
+
+TRAFFIC_PROFILE_2 = """
+{% set vports = get(extra_args, 'vports', 1) %}
+traffic_profile:
+  traffic_type : RFC2544Profile
+{% for vport in range(vports|int) %}
+uplink_{{vport}}:
+  ipv4: 192.168.0.{{vport}}
+{% endfor %}
+"""
+
+TOPOLOGY_PROFILE = """
+{% set vports = get(extra_args, 'vports', 2) %}
+nsd:nsd-catalog:
+    nsd:
+    -   id: 3tg-topology
+        vld:
+{% for vport in range(0,vports,2|int) %}
+        -   id: uplink_{{loop.index0}}
+            name: tg__0 to vnf__0 link {{vport + 1}}
+            vnfd-connection-point-ref:
+            -   vnfd-connection-point-ref: xe{{vport}}
+        -   id: downlink_{{loop.index0}}
+            name: vnf__0 to tg__0 link {{vport + 2}}
+            vnfd-connection-point-ref:
+            -   vnfd-connection-point-ref: xe{{vport+1}}
+{% endfor %}
+"""
+
+class VnfGenericTestCase(unittest.TestCase):
+
+    def setUp(self):
+        scenario_cfg = {'topology': 'fake_topology',
+                        'task_path': 'fake_path',
+                        'traffic_profile': 'fake_fprofile_path'}
+        context_cfg = {}
+        topology_yaml = {'nsd:nsd-catalog': {'nsd': [mock.Mock()]}}
+
+        with mock.patch.object(utils, 'open_relative_file') as mock_open_path:
+            mock_open_path.side_effect = mock.mock_open(read_data=str(topology_yaml))
+            self.ns_testcase = vnf_generic.NetworkServiceTestCase(scenario_cfg,
+                                                                  context_cfg)
+        self.ns_testcase._get_traffic_profile = mock.Mock()
+        self.ns_testcase._get_topology = mock.Mock()
+
+    def test__fill_traffic_profile_no_args(self):
+        traffic_profile = copy.deepcopy(TRAFFIC_PROFILE_1)
+        self.ns_testcase._get_traffic_profile.return_value = traffic_profile
+
+        self.ns_testcase._fill_traffic_profile()
+        config = self.ns_testcase.traffic_profile.params
+        self.assertEqual('nsb:traffic_profile:0.1', config['schema'])
+        self.assertEqual('rfc2544', config['name'])
+        self.assertEqual('Traffic profile to run RFC2544 latency',
+                         config['description'])
+        t_profile = {'traffic_type': 'RFC2544Profile',
+                     'frame_rate': 100}
+        self.assertEqual(t_profile, config['traffic_profile'])
+        uplink_0 = {
+            'ipv4': {'id': 1,
+                     'outer_l2': {'framesize': {'128B': '0', '64B': '0'}}
+                     }
+        }
+        self.assertEqual(uplink_0, config['uplink_0'])
+
+    def test__fill_traffic_profile_with_args(self):
+        traffic_profile = copy.deepcopy(TRAFFIC_PROFILE_2)
+        self.ns_testcase._get_traffic_profile.return_value = traffic_profile
+        self.ns_testcase.scenario_cfg['extra_args'] = {'vports': 3}
+
+        self.ns_testcase._fill_traffic_profile()
+        config = self.ns_testcase.traffic_profile.params
+        self.assertEqual({'ipv4': '192.168.0.0'}, config['uplink_0'])
+        self.assertEqual({'ipv4': '192.168.0.1'}, config['uplink_1'])
+        self.assertEqual({'ipv4': '192.168.0.2'}, config['uplink_2'])
+        self.assertNotIn('uplink_3', config)
+
+    def test__fill_traffic_profile_incorrect_args(self):
+        traffic_profile = copy.deepcopy(TRAFFIC_PROFILE_2)
+        self.ns_testcase._get_traffic_profile.return_value = traffic_profile
+        self.ns_testcase.scenario_cfg['extra_args'] = {'incorrect_vports': 3}
+
+        self.ns_testcase._fill_traffic_profile()
+        config = self.ns_testcase.traffic_profile.params
+        self.assertEqual({'ipv4': '192.168.0.0'}, config['uplink_0'])
+        self.assertNotIn('uplink_1', config)
+
+    def test__render_topology_with_args(self):
+        topology_profile = copy.deepcopy(TOPOLOGY_PROFILE)
+        self.ns_testcase._get_topology.return_value = topology_profile
+        self.ns_testcase.scenario_cfg['extra_args'] = {'vports': 6}
+
+        self.ns_testcase._render_topology()
+        topology = self.ns_testcase.topology
+        self.assertEqual("3tg-topology", topology['id'])
+        vld = self.ns_testcase.topology['vld']
+        self.assertEqual(len(vld), 6)
+        for index, vport in enumerate(range(0, 6, 2)):
+            self.assertEqual('uplink_{}'.format(index), vld[vport]['id'])
+            self.assertEqual('tg__0 to vnf__0 link {}'.format(vport + 1), vld[vport]['name'])
+            self.assertEqual('xe{}'.format(vport),
+                             vld[vport]['vnfd-connection-point-ref'][0]
+                             ['vnfd-connection-point-ref'])
+
+            self.assertEqual('downlink_{}'.format(index), vld[vport + 1]['id'])
+            self.assertEqual('vnf__0 to tg__0 link {}'.format(vport + 2), vld[vport + 1]['name'])
+            self.assertEqual('xe{}'.format(vport + 1),
+                             vld[vport + 1]['vnfd-connection-point-ref'][0]
+                             ['vnfd-connection-point-ref'])
+
+    def test__render_topology_incorrect_args(self):
+        topology_profile = copy.deepcopy(TOPOLOGY_PROFILE)
+        self.ns_testcase._get_topology.return_value = topology_profile
+        self.ns_testcase.scenario_cfg['extra_args'] = {'fake_vports': 5}
+
+        self.ns_testcase._render_topology()
+
+        topology = self.ns_testcase.topology
+        self.assertEqual("3tg-topology", topology['id'])
+        vld = self.ns_testcase.topology['vld']
+        self.assertEqual(len(vld), 2)
+
+        self.assertEqual('uplink_0', vld[0]['id'])
+        self.assertEqual('tg__0 to vnf__0 link 1', vld[0]['name'])
+        self.assertEqual('xe0',
+                         vld[0]['vnfd-connection-point-ref'][0]['vnfd-connection-point-ref'])
+
+        self.assertEqual('downlink_0', vld[1]['id'])
+        self.assertEqual('vnf__0 to tg__0 link 2', vld[1]['name'])
+        self.assertEqual('xe1',
+                         vld[1]['vnfd-connection-point-ref'][0]['vnfd-connection-point-ref'])
+
+    def test__render_topology_no_args(self):
+        topology_profile = copy.deepcopy(TOPOLOGY_PROFILE)
+        self.ns_testcase._get_topology.return_value = topology_profile
+
+        self.ns_testcase._render_topology()
+
+        topology = self.ns_testcase.topology
+        self.assertEqual("3tg-topology", topology['id'])
+        vld = self.ns_testcase.topology['vld']
+        self.assertEqual(len(vld), 2)
+
+        self.assertEqual('uplink_0', vld[0]['id'])
+        self.assertEqual('tg__0 to vnf__0 link 1', vld[0]['name'])
+        self.assertEqual('xe0',
+                         vld[0]['vnfd-connection-point-ref'][0]['vnfd-connection-point-ref'])
+
+        self.assertEqual('downlink_0', vld[1]['id'])
+        self.assertEqual('vnf__0 to tg__0 link 2', vld[1]['name'])
+        self.assertEqual('xe1',
+                         vld[1]['vnfd-connection-point-ref'][0]['vnfd-connection-point-ref'])
diff --git a/yardstick/tests/functional/common/fake_directory_package/README.md b/yardstick/tests/functional/common/fake_directory_package/README.md
new file mode 100644 (file)
index 0000000..689e470
--- /dev/null
@@ -0,0 +1,2 @@
+# yardstick_new_plugin
+Yardstick plugin
diff --git a/yardstick/tests/functional/common/fake_directory_package/setup.py b/yardstick/tests/functional/common/fake_directory_package/setup.py
new file mode 100644 (file)
index 0000000..cf938ef
--- /dev/null
@@ -0,0 +1,29 @@
+# Copyright (c) 2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from setuptools import setup, find_packages
+
+setup(
+    name='yardstick_new_plugin_2',
+    version='1.0.0',
+    packages=find_packages(),
+    include_package_data=True,
+    url='https://www.opnfv.org',
+    entry_points={
+        'yardstick.scenarios': [
+            'Dummy2 = yardstick_new_plugin.benchmark.scenarios.dummy2.dummy2:'
+            'Dummy2',
+        ]
+    },
+)
diff --git a/yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/__init__.py b/yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/benchmark/__init__.py b/yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/benchmark/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/benchmark/scenarios/__init__.py b/yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/benchmark/scenarios/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/benchmark/scenarios/dummy2/__init__.py b/yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/benchmark/scenarios/dummy2/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/benchmark/scenarios/dummy2/dummy2.py b/yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/benchmark/scenarios/dummy2/dummy2.py
new file mode 100644 (file)
index 0000000..a2211ec
--- /dev/null
@@ -0,0 +1,40 @@
+# Copyright (c) 2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from yardstick.benchmark.scenarios import base
+
+
+LOG = logging.getLogger(__name__)
+
+
+class Dummy2(base.Scenario):
+    """Execute Dummy (v2!) echo"""
+    __scenario_type__ = "Dummy2"
+
+    def __init__(self, scenario_cfg, context_cfg):
+        self.scenario_cfg = scenario_cfg
+        self.context_cfg = context_cfg
+        self.setup_done = False
+
+    def setup(self):
+        self.setup_done = True
+
+    def run(self, result):
+        if not self.setup_done:
+            self.setup()
+
+        result["hello"] = "yardstick"
+        LOG.info("Dummy (v2!) echo hello yardstick!")
diff --git a/yardstick/tests/functional/common/fake_pip_package/yardstick_new_plugin-1.0.0.tar.gz b/yardstick/tests/functional/common/fake_pip_package/yardstick_new_plugin-1.0.0.tar.gz
new file mode 100644 (file)
index 0000000..e5379a7
Binary files /dev/null and b/yardstick/tests/functional/common/fake_pip_package/yardstick_new_plugin-1.0.0.tar.gz differ
diff --git a/yardstick/tests/functional/common/test_packages.py b/yardstick/tests/functional/common/test_packages.py
new file mode 100644 (file)
index 0000000..5dead4e
--- /dev/null
@@ -0,0 +1,94 @@
+# Copyright (c) 2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+from os import path
+import re
+
+from yardstick.common import packages
+from yardstick.common import utils
+from yardstick.tests.functional import base
+
+
+class PipPackagesTestCase(base.BaseFunctionalTestCase):
+
+    TMP_FOLDER = '/tmp/pip_packages/'
+    PYTHONPATH = 'PYTHONPATH=%s' % TMP_FOLDER
+
+    def setUp(self):
+        super(PipPackagesTestCase, self).setUp()
+        privsep_helper = os.path.join(
+            os.getenv('VIRTUAL_ENV'), 'bin', 'privsep-helper')
+        self.config(
+            helper_command=' '.join(['sudo', '-EH', privsep_helper]),
+            group='yardstick_privileged')
+        self.addCleanup(self._cleanup)
+
+    def _cleanup(self):
+        utils.execute_command('sudo rm -rf %s' % self.TMP_FOLDER)
+
+    def _remove_package(self, package):
+        os.system('%s pip uninstall %s -y' % (self.PYTHONPATH, package))
+
+    def _list_packages(self):
+        pip_list_regex = re.compile(
+            r"(?P<name>[\w\.-]+) \((?P<version>[\w\d_\.\-]+),*.*\)")
+        pkg_dict = {}
+        pkgs = utils.execute_command('pip list',
+                                     env={'PYTHONPATH': self.TMP_FOLDER})
+        for line in pkgs:
+            match = pip_list_regex.match(line)
+            if match and match.group('name'):
+                pkg_dict[match.group('name')] = match.group('version')
+        return pkg_dict
+
+    def test_install_from_folder(self):
+        dirname = path.dirname(__file__)
+        package_dir = dirname + '/fake_directory_package'
+        package_name = 'yardstick-new-plugin-2'
+        self.addCleanup(self._remove_package, package_name)
+        self._remove_package(package_name)
+        self.assertFalse(package_name in self._list_packages())
+
+        self.assertEqual(0, packages.pip_install(package_dir, self.TMP_FOLDER))
+        self.assertTrue(package_name in self._list_packages())
+
+    def test_install_from_pip_package(self):
+        dirname = path.dirname(__file__)
+        package_path = (dirname +
+                        '/fake_pip_package/yardstick_new_plugin-1.0.0.tar.gz')
+        package_name = 'yardstick-new-plugin'
+        self.addCleanup(self._remove_package, package_name)
+        self._remove_package(package_name)
+        self.assertFalse(package_name in self._list_packages())
+
+        self.assertEqual(0, packages.pip_install(package_path, self.TMP_FOLDER))
+        self.assertTrue(package_name in self._list_packages())
+
+    # NOTE(ralonsoh): an stable test plugin project is needed in OPNFV git
+    # server to execute this test.
+    # def test_install_from_url(self):
+
+    def test_pip_freeze(self):
+        # NOTE (ralonsoh): from requirements.txt file. The best way to test
+        # this function is to parse requirements.txt and test-requirements.txt
+        # and check all packages.
+        pkgs_ref = {'Babel': '2.3.4',
+                    'SQLAlchemy': '1.1.12',
+                    'influxdb': '4.1.1',
+                    'netifaces': '0.10.6',
+                    'unicodecsv': '0.14.1'}
+        pkgs = packages.pip_list()
+        for name, version in (pkgs_ref.items()):
+            self.assertEqual(version, pkgs[name])
index a468b27..c05f91c 100644 (file)
 # See the License for the specific language governing permissions and\r
 # limitations under the License.\r
 \r
-from __future__ import absolute_import\r
+import sys\r
+\r
 import mock\r
 \r
+from yardstick import tests\r
+\r
 \r
-STL_MOCKS = {\r
-    'trex_stl_lib': mock.MagicMock(),\r
-    'trex_stl_lib.base64': mock.MagicMock(),\r
-    'trex_stl_lib.binascii': mock.MagicMock(),\r
-    'trex_stl_lib.collections': mock.MagicMock(),\r
-    'trex_stl_lib.copy': mock.MagicMock(),\r
-    'trex_stl_lib.datetime': mock.MagicMock(),\r
-    'trex_stl_lib.functools': mock.MagicMock(),\r
-    'trex_stl_lib.imp': mock.MagicMock(),\r
-    'trex_stl_lib.inspect': mock.MagicMock(),\r
-    'trex_stl_lib.json': mock.MagicMock(),\r
-    'trex_stl_lib.linecache': mock.MagicMock(),\r
-    'trex_stl_lib.math': mock.MagicMock(),\r
-    'trex_stl_lib.os': mock.MagicMock(),\r
-    'trex_stl_lib.platform': mock.MagicMock(),\r
-    'trex_stl_lib.pprint': mock.MagicMock(),\r
-    'trex_stl_lib.random': mock.MagicMock(),\r
-    'trex_stl_lib.re': mock.MagicMock(),\r
-    'trex_stl_lib.scapy': mock.MagicMock(),\r
-    'trex_stl_lib.socket': mock.MagicMock(),\r
-    'trex_stl_lib.string': mock.MagicMock(),\r
-    'trex_stl_lib.struct': mock.MagicMock(),\r
-    'trex_stl_lib.sys': mock.MagicMock(),\r
-    'trex_stl_lib.threading': mock.MagicMock(),\r
-    'trex_stl_lib.time': mock.MagicMock(),\r
-    'trex_stl_lib.traceback': mock.MagicMock(),\r
-    'trex_stl_lib.trex_stl_async_client': mock.MagicMock(),\r
-    'trex_stl_lib.trex_stl_client': mock.MagicMock(),\r
-    'trex_stl_lib.trex_stl_exceptions': mock.MagicMock(),\r
-    'trex_stl_lib.trex_stl_ext': mock.MagicMock(),\r
-    'trex_stl_lib.trex_stl_jsonrpc_client': mock.MagicMock(),\r
-    'trex_stl_lib.trex_stl_packet_builder_interface': mock.MagicMock(),\r
-    'trex_stl_lib.trex_stl_packet_builder_scapy': mock.MagicMock(),\r
-    'trex_stl_lib.trex_stl_port': mock.MagicMock(),\r
-    'trex_stl_lib.trex_stl_stats': mock.MagicMock(),\r
-    'trex_stl_lib.trex_stl_streams': mock.MagicMock(),\r
-    'trex_stl_lib.trex_stl_types': mock.MagicMock(),\r
-    'trex_stl_lib.types': mock.MagicMock(),\r
-    'trex_stl_lib.utils': mock.MagicMock(),\r
-    'trex_stl_lib.utils.argparse': mock.MagicMock(),\r
-    'trex_stl_lib.utils.collections': mock.MagicMock(),\r
-    'trex_stl_lib.utils.common': mock.MagicMock(),\r
-    'trex_stl_lib.utils.json': mock.MagicMock(),\r
-    'trex_stl_lib.utils.os': mock.MagicMock(),\r
-    'trex_stl_lib.utils.parsing_opts': mock.MagicMock(),\r
-    'trex_stl_lib.utils.pwd': mock.MagicMock(),\r
-    'trex_stl_lib.utils.random': mock.MagicMock(),\r
-    'trex_stl_lib.utils.re': mock.MagicMock(),\r
-    'trex_stl_lib.utils.string': mock.MagicMock(),\r
-    'trex_stl_lib.utils.sys': mock.MagicMock(),\r
-    'trex_stl_lib.utils.text_opts': mock.MagicMock(),\r
-    'trex_stl_lib.utils.text_tables': mock.MagicMock(),\r
-    'trex_stl_lib.utils.texttable': mock.MagicMock(),\r
-    'trex_stl_lib.warnings': mock.MagicMock(),\r
-    'trex_stl_lib.yaml': mock.MagicMock(),\r
-    'trex_stl_lib.zlib': mock.MagicMock(),\r
-    'trex_stl_lib.zmq': mock.MagicMock(),\r
-}\r
+mock_stl = mock.patch.dict(sys.modules, tests.STL_MOCKS)\r
+mock_stl.start()\r
index cf646a2..6578416 100644 (file)
@@ -33,11 +33,3 @@ class EnvTestCase(APITestCase):
         time.sleep(0)
 
         self.assertIn(u'status', resp)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 883608b..dce6c1c 100644 (file)
@@ -50,11 +50,3 @@ class QueryTestCase(unittest.TestCase):
             influx.query(sql)
         except Exception as e:  # pylint: disable=broad-except
             self.assertIsInstance(e, RuntimeError)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
diff --git a/yardstick/tests/unit/base.py b/yardstick/tests/unit/base.py
new file mode 100644 (file)
index 0000000..b943efc
--- /dev/null
@@ -0,0 +1,23 @@
+# Copyright 2018 Intel Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import abc
+
+import six
+import unittest
+
+
+@six.add_metaclass(abc.ABCMeta)
+class BaseUnitTestCase(unittest.TestCase):
+    """Base class for unit tests"""
index 02a8552..0223fd3 100644 (file)
@@ -26,14 +26,6 @@ class OvsDpdkContextTestCase(unittest.TestCase):
     NODES_ovs_dpdk_SAMPLE = "nodes_ovs_dpdk_sample.yaml"
     NODES_DUPLICATE_SAMPLE = "nodes_duplicate_sample.yaml"
 
-    ATTRS = {
-        'name': 'StandaloneOvsDpdk',
-        'file': 'pod',
-        'flavor': {},
-        'servers': {},
-        'networks': {},
-    }
-
     NETWORKS = {
         'mgmt': {'cidr': '152.16.100.10/24'},
         'private_0': {
@@ -55,7 +47,17 @@ class OvsDpdkContextTestCase(unittest.TestCase):
     }
 
     def setUp(self):
+        self.attrs = {
+            'name': 'foo',
+            'task_id': '1234567890',
+            'file': self._get_file_abspath(self.NODES_ovs_dpdk_SAMPLE)
+        }
         self.ovs_dpdk = ovs_dpdk.OvsDpdkContext()
+        self.addCleanup(self._remove_contexts)
+
+    def _remove_contexts(self):
+        if self.ovs_dpdk in self.ovs_dpdk.list:
+            self.ovs_dpdk._delete_context()
 
     @mock.patch('yardstick.benchmark.contexts.standalone.model.Server')
     @mock.patch('yardstick.benchmark.contexts.standalone.model.StandaloneContextHelper')
@@ -66,9 +68,18 @@ class OvsDpdkContextTestCase(unittest.TestCase):
         self.assertTrue(self.ovs_dpdk.first_run)
 
     def test_init(self):
+        ATTRS = {
+            'name': 'StandaloneOvsDpdk',
+            'task_id': '1234567890',
+            'file': 'pod',
+            'flavor': {},
+            'servers': {},
+            'networks': {},
+        }
+
         self.ovs_dpdk.helper.parse_pod_file = mock.Mock(
             return_value=[{}, {}, {}])
-        self.assertIsNone(self.ovs_dpdk.init(self.ATTRS))
+        self.assertIsNone(self.ovs_dpdk.init(ATTRS))
 
     def test_setup_ovs(self):
         with mock.patch("yardstick.ssh.SSH") as ssh:
@@ -186,12 +197,7 @@ class OvsDpdkContextTestCase(unittest.TestCase):
 
     def test__get_server_with_dic_attr_name(self):
 
-        attrs = {
-            'name': 'foo',
-            'file': self._get_file_abspath(self.NODES_ovs_dpdk_SAMPLE)
-        }
-
-        self.ovs_dpdk.init(attrs)
+        self.ovs_dpdk.init(self.attrs)
 
         attr_name = {'name': 'foo.bar'}
         result = self.ovs_dpdk._get_server(attr_name)
@@ -200,14 +206,9 @@ class OvsDpdkContextTestCase(unittest.TestCase):
 
     def test__get_server_not_found(self):
 
-        attrs = {
-            'name': 'foo',
-            'file': self._get_file_abspath(self.NODES_ovs_dpdk_SAMPLE)
-        }
-
         self.ovs_dpdk.helper.parse_pod_file = mock.Mock(
             return_value=[{}, {}, {}])
-        self.ovs_dpdk.init(attrs)
+        self.ovs_dpdk.init(self.attrs)
 
         attr_name = 'bar.foo'
         result = self.ovs_dpdk._get_server(attr_name)
@@ -216,12 +217,7 @@ class OvsDpdkContextTestCase(unittest.TestCase):
 
     def test__get_server_mismatch(self):
 
-        attrs = {
-            'name': 'foo',
-            'file': self._get_file_abspath(self.NODES_ovs_dpdk_SAMPLE)
-        }
-
-        self.ovs_dpdk.init(attrs)
+        self.ovs_dpdk.init(self.attrs)
 
         attr_name = 'bar.foo1'
         result = self.ovs_dpdk._get_server(attr_name)
@@ -230,31 +226,23 @@ class OvsDpdkContextTestCase(unittest.TestCase):
 
     def test__get_server_duplicate(self):
 
-        attrs = {
-            'name': 'foo',
-            'file': self._get_file_abspath(self.NODES_DUPLICATE_SAMPLE)
-        }
+        self.attrs['file'] = self._get_file_abspath(self.NODES_DUPLICATE_SAMPLE)
 
-        self.ovs_dpdk.init(attrs)
+        self.ovs_dpdk.init(self.attrs)
 
-        attr_name = 'node1.foo'
+        attr_name = 'node1.foo-12345678'
         with self.assertRaises(ValueError):
             self.ovs_dpdk._get_server(attr_name)
 
     def test__get_server_found(self):
 
-        attrs = {
-            'name': 'foo',
-            'file': self._get_file_abspath(self.NODES_ovs_dpdk_SAMPLE)
-        }
-
-        self.ovs_dpdk.init(attrs)
+        self.ovs_dpdk.init(self.attrs)
 
-        attr_name = 'node1.foo'
+        attr_name = 'node1.foo-12345678'
         result = self.ovs_dpdk._get_server(attr_name)
 
         self.assertEqual(result['ip'], '10.229.47.137')
-        self.assertEqual(result['name'], 'node1.foo')
+        self.assertEqual(result['name'], 'node1.foo-12345678')
         self.assertEqual(result['user'], 'root')
         self.assertEqual(result['key_filename'], '/root/.yardstick_key')
 
index f323fcd..f0953ef 100644 (file)
@@ -29,6 +29,7 @@ class SriovContextTestCase(unittest.TestCase):
 
     ATTRS = {
         'name': 'StandaloneSriov',
+        'task_id': '1234567890',
         'file': 'pod',
         'flavor': {},
         'servers': {},
@@ -56,7 +57,17 @@ class SriovContextTestCase(unittest.TestCase):
     }
 
     def setUp(self):
+        self.attrs = {
+            'name': 'foo',
+            'task_id': '1234567890',
+            'file': self._get_file_abspath(self.NODES_SRIOV_SAMPLE)
+        }
         self.sriov = sriov.SriovContext()
+        self.addCleanup(self._remove_contexts)
+
+    def _remove_contexts(self):
+        if self.sriov in self.sriov.list:
+            self.sriov._delete_context()
 
     @mock.patch('yardstick.benchmark.contexts.standalone.sriov.Libvirt')
     @mock.patch('yardstick.benchmark.contexts.standalone.model.StandaloneContextHelper')
@@ -105,12 +116,7 @@ class SriovContextTestCase(unittest.TestCase):
 
     def test__get_server_with_dic_attr_name(self):
 
-        attrs = {
-            'name': 'foo',
-            'file': self._get_file_abspath(self.NODES_SRIOV_SAMPLE)
-        }
-
-        self.sriov.init(attrs)
+        self.sriov.init(self.attrs)
 
         attr_name = {'name': 'foo.bar'}
         result = self.sriov._get_server(attr_name)
@@ -119,13 +125,8 @@ class SriovContextTestCase(unittest.TestCase):
 
     def test__get_server_not_found(self):
 
-        attrs = {
-            'name': 'foo',
-            'file': self._get_file_abspath(self.NODES_SRIOV_SAMPLE)
-        }
-
         self.sriov.helper.parse_pod_file = mock.Mock(return_value=[{}, {}, {}])
-        self.sriov.init(attrs)
+        self.sriov.init(self.attrs)
 
         attr_name = 'bar.foo'
         result = self.sriov._get_server(attr_name)
@@ -134,12 +135,7 @@ class SriovContextTestCase(unittest.TestCase):
 
     def test__get_server_mismatch(self):
 
-        attrs = {
-            'name': 'foo',
-            'file': self._get_file_abspath(self.NODES_SRIOV_SAMPLE)
-        }
-
-        self.sriov.init(attrs)
+        self.sriov.init(self.attrs)
 
         attr_name = 'bar.foo1'
         result = self.sriov._get_server(attr_name)
@@ -148,25 +144,29 @@ class SriovContextTestCase(unittest.TestCase):
 
     def test__get_server_duplicate(self):
 
-        attrs = {
-            'name': 'foo',
-            'file': self._get_file_abspath(self.NODES_DUPLICATE_SAMPLE)
-        }
+        self.attrs['file'] = self._get_file_abspath(self.NODES_DUPLICATE_SAMPLE)
 
-        self.sriov.init(attrs)
+        self.sriov.init(self.attrs)
 
-        attr_name = 'node1.foo'
+        attr_name = 'node1.foo-12345678'
         with self.assertRaises(ValueError):
             self.sriov._get_server(attr_name)
 
     def test__get_server_found(self):
 
-        attrs = {
-            'name': 'foo',
-            'file': self._get_file_abspath(self.NODES_SRIOV_SAMPLE)
-        }
+        self.sriov.init(self.attrs)
+
+        attr_name = 'node1.foo-12345678'
+        result = self.sriov._get_server(attr_name)
+
+        self.assertEqual(result['ip'], '10.229.47.137')
+        self.assertEqual(result['name'], 'node1.foo-12345678')
+        self.assertEqual(result['user'], 'root')
+        self.assertEqual(result['key_filename'], '/root/.yardstick_key')
 
-        self.sriov.init(attrs)
+    def test__get_server_no_task_id(self):
+        self.attrs['flags'] = {'no_setup': True}
+        self.sriov.init(self.attrs)
 
         attr_name = 'node1.foo'
         result = self.sriov._get_server(attr_name)
diff --git a/yardstick/tests/unit/benchmark/contexts/test_base.py b/yardstick/tests/unit/benchmark/contexts/test_base.py
new file mode 100644 (file)
index 0000000..153c6a5
--- /dev/null
@@ -0,0 +1,43 @@
+# Copyright (c) 2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+
+from yardstick.benchmark.contexts import base
+
+
+class FlagsTestCase(unittest.TestCase):
+
+    def setUp(self):
+        self.flags = base.Flags()
+
+    def test___init__(self):
+        self.assertFalse(self.flags.no_setup)
+        self.assertFalse(self.flags.no_teardown)
+
+    def test___init__with_flags(self):
+        flags = base.Flags(no_setup=True)
+        self.assertTrue(flags.no_setup)
+        self.assertFalse(flags.no_teardown)
+
+    def test_parse(self):
+        self.flags.parse(no_setup=True, no_teardown="False")
+
+        self.assertTrue(self.flags.no_setup)
+        self.assertEqual(self.flags.no_teardown, "False")
+
+    def test_parse_forbidden_flags(self):
+        self.flags.parse(foo=42)
+        with self.assertRaises(AttributeError):
+            _ = self.flags.foo
index 1a54035..e393001 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
 #
@@ -9,9 +7,6 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-# Unittest for yardstick.benchmark.contexts.dummy
-
-from __future__ import absolute_import
 import unittest
 
 from yardstick.benchmark.contexts import dummy
@@ -20,10 +15,55 @@ from yardstick.benchmark.contexts import dummy
 class DummyContextTestCase(unittest.TestCase):
 
     def setUp(self):
+        self.attrs = {
+            'name': 'foo',
+            'task_id': '1234567890',
+        }
         self.test_context = dummy.DummyContext()
+        self.addCleanup(self.test_context._delete_context)
+
+    def test___init__(self):
+        self.assertFalse(self.test_context._flags.no_setup)
+        self.assertFalse(self.test_context._flags.no_teardown)
+        self.assertIsNone(self.test_context._name)
+        self.assertIsNone(self.test_context._task_id)
+
+    def test_init(self):
+        self.test_context.init(self.attrs)
+        self.assertEqual(self.test_context._name, 'foo')
+        self.assertEqual(self.test_context._task_id, '1234567890')
+        self.assertFalse(self.test_context._flags.no_setup)
+        self.assertFalse(self.test_context._flags.no_teardown)
+
+        self.assertEqual(self.test_context.name, 'foo-12345678')
+        self.assertEqual(self.test_context.assigned_name, 'foo')
+
+    def test_init_flags_no_setup(self):
+        self.attrs['flags'] = {'no_setup': True, 'no_teardown': False}
+
+        self.test_context.init(self.attrs)
+
+        self.assertEqual(self.test_context._name, 'foo')
+        self.assertEqual(self.test_context._task_id, '1234567890')
+        self.assertTrue(self.test_context._flags.no_setup)
+        self.assertFalse(self.test_context._flags.no_teardown)
+
+        self.assertEqual(self.test_context.name, 'foo')
+        self.assertEqual(self.test_context.assigned_name, 'foo')
+
+    def test_init_flags_no_teardown(self):
+        self.attrs['flags'] = {'no_setup': False, 'no_teardown': True}
+
+        self.test_context.init(self.attrs)
+
+        self.assertFalse(self.test_context._flags.no_setup)
+        self.assertTrue(self.test_context._flags.no_teardown)
+
+        self.assertEqual(self.test_context.name, 'foo')
+        self.assertEqual(self.test_context.assigned_name, 'foo')
 
     def test__get_server(self):
-        self.test_context.init(None)
+        self.test_context.init(self.attrs)
         self.test_context.deploy()
 
         result = self.test_context._get_server(None)
index 4348bb0..f48d6f3 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2015 Ericsson AB and others.
 #
 from collections import OrderedDict
 from itertools import count
 import logging
-import os
-import uuid
 
 import mock
 import unittest
 
+import shade
+
+from yardstick.benchmark.contexts import base
 from yardstick.benchmark.contexts import heat
 from yardstick.benchmark.contexts import model
+from yardstick.common import exceptions as y_exc
+from yardstick.orchestrator import heat as orch_heat
+from yardstick import ssh
 
 
 LOG = logging.getLogger(__name__)
@@ -33,10 +35,18 @@ class HeatContextTestCase(unittest.TestCase):
 
     def setUp(self):
         self.test_context = heat.HeatContext()
+        self.addCleanup(self._remove_contexts)
         self.mock_context = mock.Mock(spec=heat.HeatContext())
 
+    def _remove_contexts(self):
+        if self.test_context in self.test_context.list:
+            self.test_context._delete_context()
+
     def test___init__(self):
-        self.assertIsNone(self.test_context.name)
+        self.assertIsNone(self.test_context._name)
+        self.assertIsNone(self.test_context._task_id)
+        self.assertFalse(self.test_context._flags.no_setup)
+        self.assertFalse(self.test_context._flags.no_teardown)
         self.assertIsNone(self.test_context.stack)
         self.assertEqual(self.test_context.networks, OrderedDict())
         self.assertEqual(self.test_context.servers, [])
@@ -50,20 +60,21 @@ class HeatContextTestCase(unittest.TestCase):
         self.assertIsNone(self.test_context._user)
         self.assertIsNone(self.test_context.template_file)
         self.assertIsNone(self.test_context.heat_parameters)
-        self.assertIsNotNone(self.test_context.key_uuid)
-        self.assertIsNotNone(self.test_context.key_filename)
+        self.assertIsNone(self.test_context.key_filename)
 
+    @mock.patch.object(ssh.SSH, 'gen_keys')
     @mock.patch('yardstick.benchmark.contexts.heat.PlacementGroup')
     @mock.patch('yardstick.benchmark.contexts.heat.ServerGroup')
     @mock.patch('yardstick.benchmark.contexts.heat.Network')
     @mock.patch('yardstick.benchmark.contexts.heat.Server')
-    def test_init(self, mock_server, mock_network, mock_sg, mock_pg):
+    def test_init(self, mock_server, mock_network, mock_sg, mock_pg, mock_ssh_gen_keys):
 
         pgs = {'pgrp1': {'policy': 'availability'}}
         sgs = {'servergroup1': {'policy': 'affinity'}}
         networks = {'bar': {'cidr': '10.0.1.0/24'}}
         servers = {'baz': {'floating_ip': True, 'placement': 'pgrp1'}}
         attrs = {'name': 'foo',
+                 'task_id': '1234567890',
                  'placement_groups': pgs,
                  'server_groups': sgs,
                  'networks': networks,
@@ -71,9 +82,13 @@ class HeatContextTestCase(unittest.TestCase):
 
         self.test_context.init(attrs)
 
-        self.assertEqual(self.test_context.name, "foo")
-        self.assertEqual(self.test_context.keypair_name, "foo-key")
-        self.assertEqual(self.test_context.secgroup_name, "foo-secgroup")
+        self.assertFalse(self.test_context._flags.no_setup)
+        self.assertFalse(self.test_context._flags.no_teardown)
+        self.assertEqual(self.test_context._name, "foo")
+        self.assertEqual(self.test_context._task_id, '1234567890')
+        self.assertEqual(self.test_context.name, "foo-12345678")
+        self.assertEqual(self.test_context.keypair_name, "foo-12345678-key")
+        self.assertEqual(self.test_context.secgroup_name, "foo-12345678-secgroup")
 
         mock_pg.assert_called_with('pgrp1', self.test_context,
                                    pgs['pgrp1']['policy'])
@@ -90,40 +105,79 @@ class HeatContextTestCase(unittest.TestCase):
                                        servers['baz'])
         self.assertEqual(len(self.test_context.servers), 1)
 
-        if os.path.exists(self.test_context.key_filename):
-            try:
-                os.remove(self.test_context.key_filename)
-                os.remove(self.test_context.key_filename + ".pub")
-            except OSError:
-                LOG.exception("key_filename: %s",
-                              self.test_context.key_filename)
+        mock_ssh_gen_keys.assert_called()
+
+    def test_init_no_name_or_task_id(self):
+        attrs = {}
+        self.assertRaises(KeyError, self.test_context.init, attrs)
+
+    def test_name(self):
+        self.test_context._name = 'foo'
+        self.test_context._task_id = '1234567890'
+        self.test_context._name_task_id = '{}-{}'.format(
+            self.test_context._name, self.test_context._task_id[:8])
+        self.assertEqual(self.test_context.name, 'foo-12345678')
+        self.assertEqual(self.test_context.assigned_name, 'foo')
+
+    def test_name_flags(self):
+        self.test_context._flags = base.Flags(
+            **{"no_setup": True, "no_teardown": True})
+        self.test_context._name = 'foo'
+        self.test_context._task_id = '1234567890'
+
+        self.assertEqual(self.test_context.name, 'foo')
+        self.assertEqual(self.test_context.assigned_name, 'foo')
+
+    @mock.patch('yardstick.ssh.SSH.gen_keys')
+    def test_init_no_setup_no_teardown(self, *args):
+
+        attrs = {'name': 'foo',
+                 'task_id': '1234567890',
+                 'placement_groups': {},
+                 'server_groups': {},
+                 'networks': {},
+                 'servers': {},
+                 'flags': {
+                     'no_setup': True,
+                     'no_teardown': True,
+                     },
+                }
+
+        self.test_context.init(attrs)
+        self.assertTrue(self.test_context._flags.no_setup)
+        self.assertTrue(self.test_context._flags.no_teardown)
 
     @mock.patch('yardstick.benchmark.contexts.heat.HeatTemplate')
     def test__add_resources_to_template_no_servers(self, mock_template):
-
-        self.test_context.keypair_name = "foo-key"
-        self.test_context.secgroup_name = "foo-secgroup"
+        self.test_context._name = 'ctx'
+        self.test_context._task_id = '1234567890'
+        self.test_context._name_task_id = '{}-{}'.format(
+            self.test_context._name, self.test_context._task_id[:8])
+        self.test_context.keypair_name = "ctx-key"
+        self.test_context.secgroup_name = "ctx-secgroup"
         self.test_context.key_uuid = "2f2e4997-0a8e-4eb7-9fa4-f3f8fbbc393b"
-        netattrs = {'cidr': '10.0.0.0/24', 'provider': None, 'external_network': 'ext_net'}
-        self.mock_context.name = 'bar'
+        netattrs = {'cidr': '10.0.0.0/24', 'provider': None,
+                    'external_network': 'ext_net'}
+
         self.test_context.networks = OrderedDict(
-            {"fool-network": model.Network("fool-network", self.mock_context,
+            {"mynet": model.Network("mynet", self.test_context,
                                            netattrs)})
 
         self.test_context._add_resources_to_template(mock_template)
         mock_template.add_keypair.assert_called_with(
-            "foo-key",
-            "2f2e4997-0a8e-4eb7-9fa4-f3f8fbbc393b")
-        mock_template.add_security_group.assert_called_with("foo-secgroup")
-#        mock_template.add_network.assert_called_with("bar-fool-network", 'physnet1', None)
+            "ctx-key",
+            "ctx-12345678")
+        mock_template.add_security_group.assert_called_with("ctx-secgroup")
+        mock_template.add_network.assert_called_with(
+            "ctx-12345678-mynet", 'physnet1', None, None, None, None)
         mock_template.add_router.assert_called_with(
-            "bar-fool-network-router",
+            "ctx-12345678-mynet-router",
             netattrs["external_network"],
-            "bar-fool-network-subnet")
+            "ctx-12345678-mynet-subnet")
         mock_template.add_router_interface.assert_called_with(
-            "bar-fool-network-router-if0",
-            "bar-fool-network-router",
-            "bar-fool-network-subnet")
+            "ctx-12345678-mynet-router-if0",
+            "ctx-12345678-mynet-router",
+            "ctx-12345678-mynet-subnet")
 
     @mock.patch('yardstick.benchmark.contexts.heat.HeatTemplate')
     def test_attrs_get(self, *args):
@@ -148,23 +202,103 @@ class HeatContextTestCase(unittest.TestCase):
         with self.assertRaises(AttributeError):
             self.test_context.user = 'foo'
 
+    def test__create_new_stack(self):
+        template = mock.Mock()
+        self.test_context._create_new_stack(template)
+        template.create.assert_called_once()
+
+    def test__create_new_stack_stack_create_failed(self):
+        template = mock.Mock()
+        template.create.side_effect = y_exc.HeatTemplateError
+
+        self.assertRaises(y_exc.HeatTemplateError,
+                          self.test_context._create_new_stack,
+                          template)
+
+    def test__create_new_stack_keyboard_interrupt(self):
+        template = mock.Mock()
+        template.create.side_effect = KeyboardInterrupt
+        self.assertRaises(y_exc.StackCreationInterrupt,
+                          self.test_context._create_new_stack,
+                          template)
+
+    @mock.patch.object(orch_heat.HeatTemplate, 'add_keypair')
+    @mock.patch.object(heat.HeatContext, '_create_new_stack')
+    def test_deploy_stack_creation_failed(self, mock_create, *args):
+        self.test_context._name = 'foo'
+        self.test_context._task_id = '1234567890'
+        self.test_context._name_task_id = 'foo-12345678'
+        mock_create.side_effect = y_exc.HeatTemplateError
+        self.assertRaises(y_exc.HeatTemplateError,
+                          self.test_context.deploy)
+
     @mock.patch('yardstick.benchmark.contexts.heat.HeatTemplate')
     def test_deploy(self, mock_template):
-        self.test_context.name = 'foo'
+        self.test_context._name = 'foo'
+        self.test_context._task_id = '1234567890'
+        self.test_context._name_task_id = '{}-{}'.format(
+            self.test_context._name, self.test_context._task_id[:8])
         self.test_context.template_file = '/bar/baz/some-heat-file'
         self.test_context.heat_parameters = {'image': 'cirros'}
         self.test_context.get_neutron_info = mock.MagicMock()
         self.test_context.deploy()
 
-        mock_template.assert_called_with('foo',
+        mock_template.assert_called_with('foo-12345678',
                                          '/bar/baz/some-heat-file',
                                          {'image': 'cirros'})
         self.assertIsNotNone(self.test_context.stack)
 
+    # TODO: patch objects
+    @mock.patch.object(heat, 'HeatTemplate')
+    @mock.patch.object(heat.HeatContext, '_retrieve_existing_stack')
+    @mock.patch.object(heat.HeatContext, '_create_new_stack')
+    def test_deploy_no_setup(self, mock_create_new_stack, mock_retrieve_existing_stack, *args):
+        self.test_context._name = 'foo'
+        self.test_context._task_id = '1234567890'
+        # Might be able to get rid of these
+        self.test_context.template_file = '/bar/baz/some-heat-file'
+        self.test_context.heat_parameters = {'image': 'cirros'}
+        self.test_context.get_neutron_info = mock.MagicMock()
+        self.test_context._flags.no_setup = True
+        self.test_context.deploy()
+
+        # check that heat client is called...
+        mock_create_new_stack.assert_not_called()
+        mock_retrieve_existing_stack.assert_called_with(self.test_context.name)
+        self.assertIsNotNone(self.test_context.stack)
+
+    @mock.patch.object(shade, 'openstack_cloud')
+    @mock.patch.object(heat.HeatTemplate, 'add_keypair')
+    @mock.patch.object(heat.HeatContext, '_create_new_stack')
+    @mock.patch.object(heat.HeatStack, 'get')
+    def test_deploy_try_retrieve_context_does_not_exist(self,
+                                                        mock_get_stack,
+                                                        mock_create_new_stack,
+                                                        *args):
+        self.test_context._name = 'demo'
+        self.test_context._task_id = '1234567890'
+        self.test_context._flags.no_setup = True
+        self.test_context.get_neutron_info = mock.MagicMock()
+
+        # TODo: Check is this the right value to return, should it be None instead?
+        mock_get_stack.return_value = []
+
+        self.test_context.deploy()
+
+        mock_get_stack.assert_called()
+        mock_create_new_stack.assert_called()
+
+    def test_check_for_context(self):
+        pass
+        # check that the context exists
+
     def test_add_server_port(self):
         network1 = mock.MagicMock()
         network2 = mock.MagicMock()
-        self.test_context.name = 'foo'
+        self.test_context._name = 'foo'
+        self.test_context._task_id = '1234567890'
+        self.test_context._name_task_id = '{}-{}'.format(
+            self.test_context._name, self.test_context._task_id[:8])
         self.test_context.stack = mock.MagicMock()
         self.test_context.networks = {
             'a': network1,
@@ -173,15 +307,15 @@ class HeatContextTestCase(unittest.TestCase):
         self.test_context.stack.outputs = {
             u'b': u'10.20.30.45',
             u'b-subnet_id': 1,
-            u'foo-a-subnet-cidr': u'10.20.0.0/15',
-            u'foo-a-subnet-gateway_ip': u'10.20.30.1',
+            u'foo-12345678-a-subnet-cidr': u'10.20.0.0/15',
+            u'foo-12345678-a-subnet-gateway_ip': u'10.20.30.1',
             u'b-mac_address': u'00:01',
             u'b-device_id': u'dev21',
             u'b-network_id': u'net789',
             u'd': u'40.30.20.15',
             u'd-subnet_id': 2,
-            u'foo-c-subnet-cidr': u'40.30.0.0/18',
-            u'foo-c-subnet-gateway_ip': u'40.30.20.254',
+            u'foo-12345678-c-subnet-cidr': u'40.30.0.0/18',
+            u'foo-12345678-c-subnet-gateway_ip': u'40.30.20.254',
             u'd-mac_address': u'00:10',
             u'd-device_id': u'dev43',
             u'd-network_id': u'net987',
@@ -218,17 +352,41 @@ class HeatContextTestCase(unittest.TestCase):
         self.assertEqual(len(server.interfaces), 3)
         self.assertDictEqual(server.interfaces['port_a'], expected)
 
+    @mock.patch('yardstick.benchmark.contexts.heat.os')
+    @mock.patch.object(heat.HeatContext, '_delete_key_file')
     @mock.patch('yardstick.benchmark.contexts.heat.HeatTemplate')
-    def test_undeploy(self, mock_template):
+    def test_undeploy(self, mock_template, mock_delete_key, *args):
         self.test_context.stack = mock_template
+        self.test_context._name = 'foo'
+        self.test_context._task_id = '1234567890'
+        self.test_context._name_task_id = '{}-{}'.format(
+            self.test_context._name, self.test_context._task_id[:8])
+        # mock_os.path.exists.return_value = True
+        self.test_context.key_filename = 'foo/bar/foobar'
         self.test_context.undeploy()
+        mock_delete_key.assert_called()
         self.assertTrue(mock_template.delete.called)
 
+    @mock.patch('yardstick.benchmark.contexts.heat.HeatTemplate')
+    def test_undeploy_no_teardown(self, mock_template):
+        self.test_context.stack = mock_template
+        self.test_context._name = 'foo'
+        self.test_context._task_id = '1234567890'
+        self.test_context._flags.no_teardown = True
+        self.test_context.undeploy()
+
+        mock_template.delete.assert_not_called()
+
     @mock.patch('yardstick.benchmark.contexts.heat.HeatTemplate')
     @mock.patch('yardstick.benchmark.contexts.heat.os')
     def test_undeploy_key_filename(self, mock_os, mock_template):
         self.test_context.stack = mock_template
+        self.test_context._name = 'foo'
+        self.test_context._task_id = '1234567890'
+        self.test_context._name_task_id = '{}-{}'.format(
+            self.test_context._name, self.test_context._task_id)
         mock_os.path.exists.return_value = True
+        self.test_context.key_filename = 'foo/bar/foobar'
         self.assertIsNone(self.test_context.undeploy())
 
     @mock.patch("yardstick.benchmark.contexts.heat.pkg_resources")
@@ -249,24 +407,27 @@ class HeatContextTestCase(unittest.TestCase):
         baz3_server.public_ip = '127.0.0.3'
         baz3_server.context.user = 'zab'
 
-        self.test_context.name = 'bar'
+        self.test_context._name = 'bar'
+        self.test_context._task_id = '1234567890'
+        self.test_context._name_task_id = '{}-{}'.format(
+            self.test_context._name, self.test_context._task_id[:8])
         self.test_context._user = 'bot'
         self.test_context.stack = mock.Mock()
         self.test_context.stack.outputs = {
             'private_ip': '10.0.0.1',
             'public_ip': '127.0.0.1',
         }
-        self.test_context.key_uuid = uuid.uuid4()
         self.test_context._server_map = {
             'baz3': baz3_server,
             'foo2': foo2_server,
         }
 
         attr_name = {
-            'name': 'foo.bar',
+            'name': 'foo.bar-12345678',
             'private_ip_attr': 'private_ip',
             'public_ip_attr': 'public_ip',
         }
+        self.test_context.key_uuid = 'foo-42'
         result = self.test_context._get_server(attr_name)
         self.assertEqual(result['user'], 'bot')
         self.assertEqual(result['ip'], '127.0.0.1')
@@ -288,22 +449,26 @@ class HeatContextTestCase(unittest.TestCase):
         baz3_server.public_ip = '127.0.0.3'
         baz3_server.context.user = 'zab'
 
-        self.test_context.name = 'bar'
+        self.test_context._name = 'bar'
+        self.test_context._task_id = '1234567890'
+        self.test_context._name_task_id = '{}-{}'.format(
+            self.test_context._name, self.test_context._task_id[:8])
         self.test_context._user = 'bot'
         self.test_context.stack = mock.Mock()
         self.test_context.stack.outputs = {
             'private_ip': '10.0.0.1',
             'public_ip': '127.0.0.1',
         }
-        self.test_context.key_uuid = uuid.uuid4()
         self.test_context._server_map = {
             'baz3': baz3_server,
             'foo2': foo2_server,
         }
 
         attr_name = {
-            'name': 'foo.bar',
+            'name': 'foo.bar-12345678',
         }
+
+        self.test_context.key_uuid = 'foo-42'
         result = self.test_context._get_server(attr_name)
         self.assertEqual(result['user'], 'bot')
         # no private ip attr mapping in the map results in None value in the result
@@ -327,13 +492,14 @@ class HeatContextTestCase(unittest.TestCase):
         baz3_server.public_ip = None
         baz3_server.context.user = 'zab'
 
-        self.test_context.name = 'bar1'
+        self.test_context._name = 'bar1'
+        self.test_context._task_id = '1234567890'
+        self.test_context._name_task_id = 'bar1-12345678'
         self.test_context.stack = mock.Mock()
         self.test_context.stack.outputs = {
             'private_ip': '10.0.0.1',
             'public_ip': '127.0.0.1',
         }
-        self.test_context.key_uuid = uuid.uuid4()
         self.test_context.generate_routing_table = mock.MagicMock(return_value=[])
 
         self.test_context._server_map = {
@@ -365,19 +531,19 @@ class HeatContextTestCase(unittest.TestCase):
         baz3_server.public_ip = None
         baz3_server.context.user = 'zab'
 
-        self.test_context.name = 'bar1'
+        self.test_context._name = 'bar1'
         self.test_context.stack = mock.Mock()
         self.test_context.stack.outputs = {
             'private_ip': '10.0.0.1',
             'public_ip': '127.0.0.1',
         }
-        self.test_context.key_uuid = uuid.uuid4()
         self.test_context._server_map = {
             'baz3': baz3_server,
             'foo2': foo2_server,
             'wow4': None,
         }
 
+        self.test_context.key_uuid = 'foo-42'
         attr_name = 'wow4'
         result = self.test_context._get_server(attr_name)
         self.assertIsNone(result)
@@ -398,18 +564,21 @@ class HeatContextTestCase(unittest.TestCase):
         baz3_server.public_ip = None
         baz3_server.context.user = 'zab'
 
-        self.test_context.name = 'bar1'
+        self.test_context._name = 'bar1'
+        self.test_context._task_id = '1235467890'
+        self.test_context._name_task_id = '{}-{}'.format(
+            self.test_context._name, self.test_context._task_id[:8])
         self.test_context.stack = mock.Mock()
         self.test_context.stack.outputs = {
             'private_ip': '10.0.0.1',
             'public_ip': '127.0.0.1',
         }
-        self.test_context.key_uuid = uuid.uuid4()
         self.test_context._server_map = {
             'baz3': baz3_server,
             'foo2': foo2_server,
         }
 
+        self.test_context.key_uuid = 'foo-42'
         attr_name = {
             'name': 'foo.wow4',
             'private_ip_attr': 'private_ip',
@@ -434,18 +603,18 @@ class HeatContextTestCase(unittest.TestCase):
         baz3_server.public_ip = None
         baz3_server.context.user = 'zab'
 
-        self.mock_context.name = 'bar1'
+        self.mock_context._name = 'bar1'
         self.test_context.stack = mock.Mock()
         self.mock_context.stack.outputs = {
             'private_ip': '10.0.0.1',
             'public_ip': '127.0.0.1',
         }
-        self.mock_context.key_uuid = uuid.uuid4()
         self.mock_context._server_map = {
             'baz3': baz3_server,
             'foo2': foo2_server,
         }
 
+        self.test_context.key_uuid = 'foo-42'
         attr_name = 'foo.wow4'
         result = self.test_context._get_server(attr_name)
         self.assertIsNone(result)
index e149e0d..22153e4 100644 (file)
 import mock
 import unittest
 
-from yardstick.benchmark.contexts.base import Context
 from yardstick.benchmark.contexts import kubernetes
 
 
 context_cfg = {
     'type': 'Kubernetes',
     'name': 'k8s',
+    'task_id': '1234567890',
     'servers': {
         'host': {
             'image': 'openretriever/yardstick',
@@ -40,11 +40,12 @@ class KubernetesTestCase(unittest.TestCase):
 
     def setUp(self):
         self.k8s_context = kubernetes.KubernetesContext()
+        self.addCleanup(self._remove_contexts)
         self.k8s_context.init(context_cfg)
 
-    def tearDown(self):
-        # clear kubernetes contexts from global list so we don't break other tests
-        Context.list = []
+    def _remove_contexts(self):
+        if self.k8s_context in self.k8s_context.list:
+            self.k8s_context._delete_context()
 
     @mock.patch.object(kubernetes.KubernetesContext, '_delete_services')
     @mock.patch.object(kubernetes.KubernetesContext, '_delete_ssh_key')
@@ -166,11 +167,3 @@ class KubernetesTestCase(unittest.TestCase):
     def test_delete_services(self, mock_delete):
         self.k8s_context._delete_services()
         self.assertTrue(mock_delete.called)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 28011d4..76c4da5 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2015 Ericsson AB and others.
 #
index 5329d30..9761f6d 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2015-2017 Huawei Technologies Co.,Ltd and others.
 #
@@ -9,9 +7,6 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-# Unittest for yardstick.benchmark.contexts.node
-
-from __future__ import absolute_import
 import os
 import unittest
 import errno
@@ -21,10 +16,6 @@ from yardstick.common import constants as consts
 from yardstick.benchmark.contexts import node
 
 
-# pylint: disable=unused-argument
-# disable this for now because I keep forgetting mock patch arg ordering
-
-
 class NodeContextTestCase(unittest.TestCase):
 
     PREFIX = 'yardstick.benchmark.contexts.node'
@@ -34,7 +25,17 @@ class NodeContextTestCase(unittest.TestCase):
 
     def setUp(self):
         self.test_context = node.NodeContext()
+        self.addCleanup(self._remove_contexts)
         self.os_path_join = os.path.join
+        self.attrs = {
+            'name': 'foo',
+            'task_id': '1234567890',
+            'file': self._get_file_abspath(self.NODES_SAMPLE)
+        }
+
+    def _remove_contexts(self):
+        if self.test_context in self.test_context.list:
+            self.test_context._delete_context()
 
     def _get_file_abspath(self, filename):
         curr_path = os.path.dirname(os.path.abspath(__file__))
@@ -42,7 +43,7 @@ class NodeContextTestCase(unittest.TestCase):
         return file_path
 
     def test___init__(self):
-        self.assertIsNone(self.test_context.name)
+        self.assertIsNone(self.test_context._name)
         self.assertIsNone(self.test_context.file_path)
         self.assertEqual(self.test_context.nodes, [])
         self.assertEqual(self.test_context.controllers, [])
@@ -74,6 +75,7 @@ class NodeContextTestCase(unittest.TestCase):
 
         attrs = {
             'name': 'foo',
+            'task_id': '1234567890',
             'file': error_path,
         }
         read_mock.side_effect = IOError(errno.EBUSY, 'busy')
@@ -97,37 +99,19 @@ class NodeContextTestCase(unittest.TestCase):
         self.assertEqual(str(raised.exception), str(read_mock.side_effect))
 
     def test_read_config_file(self):
-
-        attrs = {
-            'name': 'foo',
-            'file': self._get_file_abspath(self.NODES_SAMPLE)
-        }
-
-        self.test_context.init(attrs)
+        self.test_context.init(self.attrs)
 
         self.assertIsNotNone(self.test_context.read_config_file())
 
     def test__dispatch_script(self):
-
-        attrs = {
-            'name': 'foo',
-            'file': self._get_file_abspath(self.NODES_SAMPLE)
-        }
-
-        self.test_context.init(attrs)
+        self.test_context.init(self.attrs)
 
         self.test_context.env = {'bash': [{'script': 'dummy'}]}
         self.test_context._execute_script = mock.Mock()
         self.assertEqual(self.test_context._dispatch_script('bash'), None)
 
     def test__dispatch_ansible(self):
-
-        attrs = {
-            'name': 'foo',
-            'file': self._get_file_abspath(self.NODES_SAMPLE)
-        }
-
-        self.test_context.init(attrs)
+        self.test_context.init(self.attrs)
 
         self.test_context.env = {'ansible': [{'script': 'dummy'}]}
         self.test_context._do_ansible_job = mock.Mock()
@@ -136,19 +120,13 @@ class NodeContextTestCase(unittest.TestCase):
         self.assertEqual(self.test_context._dispatch_ansible('ansible'), None)
 
     @mock.patch("{}.AnsibleCommon".format(PREFIX))
-    def test__do_ansible_job(self, mock_ansible):
-        self.assertEqual(None, self.test_context._do_ansible_job('dummy'))
-
-    def test_successful_init(self):
-
-        attrs = {
-            'name': 'foo',
-            'file': self._get_file_abspath(self.NODES_SAMPLE)
-        }
+    def test__do_ansible_job(self, *args):
+        self.assertIsNone(self.test_context._do_ansible_job('dummy'))
 
-        self.test_context.init(attrs)
+    def test_init(self):
+        self.test_context.init(self.attrs)
 
-        self.assertEqual(self.test_context.name, "foo")
+        self.assertEqual(self.test_context.name, "foo-12345678")
         self.assertEqual(len(self.test_context.nodes), 4)
         self.assertEqual(len(self.test_context.controllers), 2)
         self.assertEqual(len(self.test_context.computes), 1)
@@ -156,81 +134,44 @@ class NodeContextTestCase(unittest.TestCase):
         self.assertEqual(len(self.test_context.baremetals), 1)
         self.assertEqual(self.test_context.baremetals[0]["name"], "node4")
 
-    def test__get_server_with_dic_attr_name(self):
-
-        attrs = {
-            'name': 'foo',
-            'file': self._get_file_abspath(self.NODES_SAMPLE)
-        }
-
-        self.test_context.init(attrs)
-
-        attr_name = {'name': 'foo.bar'}
-        result = self.test_context._get_server(attr_name)
+    def test__get_server_with_dict_attr_name(self):
+        self.test_context.init(self.attrs)
+        result = self.test_context._get_server({'name': 'node1.foo-12345678'})
 
-        self.assertEqual(result, None)
+        self.assertIsNone(result, None)
 
     def test__get_server_not_found(self):
+        self.test_context.init(self.attrs)
 
-        attrs = {
-            'name': 'foo',
-            'file': self._get_file_abspath(self.NODES_SAMPLE)
-        }
-
-        self.test_context.init(attrs)
-
-        attr_name = 'bar.foo'
-        result = self.test_context._get_server(attr_name)
-
-        self.assertEqual(result, None)
+        self.assertIsNone(self.test_context._get_server('bar.foo-12345678'))
 
     def test__get_server_mismatch(self):
+        self.test_context.init(self.attrs)
 
-        attrs = {
-            'name': 'foo',
-            'file': self._get_file_abspath(self.NODES_SAMPLE)
-        }
-
-        self.test_context.init(attrs)
-
-        attr_name = 'bar.foo1'
-        result = self.test_context._get_server(attr_name)
-
-        self.assertEqual(result, None)
+        self.assertIsNone(self.test_context._get_server('bar.foo1'))
 
     def test__get_server_duplicate(self):
+        self.attrs['file'] = self._get_file_abspath(
+            self.NODES_DUPLICATE_SAMPLE)
+        self.test_context.init(self.attrs)
 
-        attrs = {
-            'name': 'foo',
-            'file': self._get_file_abspath(self.NODES_DUPLICATE_SAMPLE)
-        }
-
-        self.test_context.init(attrs)
-
-        attr_name = 'node1.foo'
         with self.assertRaises(ValueError):
-            self.test_context._get_server(attr_name)
+            self.test_context._get_server('node1.foo-12345678')
 
     def test__get_server_found(self):
+        self.test_context.init(self.attrs)
 
-        attrs = {
-            'name': 'foo',
-            'file': self._get_file_abspath(self.NODES_SAMPLE)
-        }
-
-        self.test_context.init(attrs)
-
-        attr_name = 'node1.foo'
-        result = self.test_context._get_server(attr_name)
+        result = self.test_context._get_server('node1.foo-12345678')
 
         self.assertEqual(result['ip'], '10.229.47.137')
-        self.assertEqual(result['name'], 'node1.foo')
+        self.assertEqual(result['name'], 'node1.foo-12345678')
         self.assertEqual(result['user'], 'root')
         self.assertEqual(result['key_filename'], '/root/.yardstick_key')
 
     @mock.patch('{}.NodeContext._dispatch_script'.format(PREFIX))
     def test_deploy(self, dispatch_script_mock):
         obj = node.NodeContext()
+        self.addCleanup(obj._delete_context)
         obj.env = {
             'type': 'script'
         }
@@ -240,6 +181,7 @@ class NodeContextTestCase(unittest.TestCase):
     @mock.patch('{}.NodeContext._dispatch_ansible'.format(PREFIX))
     def test_deploy_anisible(self, dispatch_ansible_mock):
         obj = node.NodeContext()
+        self.addCleanup(obj._delete_context)
         obj.env = {
             'type': 'ansible'
         }
@@ -268,6 +210,7 @@ class NodeContextTestCase(unittest.TestCase):
     @mock.patch('{}.ssh.SSH.execute'.format(PREFIX))
     def test_execute_remote_script(self, execute_mock, put_file_mock):
         obj = node.NodeContext()
+        self.addCleanup(obj._delete_context)
         obj.env = {'prefix': 'yardstick.benchmark.scenarios.compute'}
         node_name_args = 'node5'
         obj.nodes = [{
@@ -288,14 +231,18 @@ class NodeContextTestCase(unittest.TestCase):
     def test_execute_script_local(self, local_execute_mock):
         node_name = 'local'
         info = {}
-        node.NodeContext()._execute_script(node_name, info)
+        obj = node.NodeContext()
+        self.addCleanup(obj._delete_context)
+        obj._execute_script(node_name, info)
         self.assertTrue(local_execute_mock.called)
 
     @mock.patch('{}.NodeContext._execute_remote_script'.format(PREFIX))
     def test_execute_script_remote(self, remote_execute_mock):
         node_name = 'node5'
         info = {}
-        node.NodeContext()._execute_script(node_name, info)
+        obj = node.NodeContext()
+        self.addCleanup(obj._delete_context)
+        obj._execute_script(node_name, info)
         self.assertTrue(remote_execute_mock.called)
 
     def test_get_script(self):
@@ -303,13 +250,16 @@ class NodeContextTestCase(unittest.TestCase):
         info_args = {
             'script': script_args
         }
-        script, options = node.NodeContext()._get_script(info_args)
+        obj = node.NodeContext()
+        self.addCleanup(obj._delete_context)
+        script, options = obj._get_script(info_args)
         self.assertEqual(script_args, script)
         self.assertEqual('', options)
 
     def test_node_info(self):
         node_name_args = 'node5'
         obj = node.NodeContext()
+        self.addCleanup(obj._delete_context)
         obj.nodes = [{'name': node_name_args, 'check': node_name_args}]
         node_info = obj._get_node_info(node_name_args)
         self.assertEqual(node_info.get('check'), node_name_args)
@@ -318,6 +268,7 @@ class NodeContextTestCase(unittest.TestCase):
     def test_get_client(self, wait_mock):
         node_name_args = 'node5'
         obj = node.NodeContext()
+        self.addCleanup(obj._delete_context)
         obj.nodes = [{
             'name': node_name_args,
             'user': 'ubuntu',
@@ -328,26 +279,38 @@ class NodeContextTestCase(unittest.TestCase):
         self.assertTrue(wait_mock.called)
 
     def test_get_server(self):
-        self.test_context.name = 'vnf1'
-        self.test_context.nodes = [{'name': 'my', 'value': 100}]
+        self.test_context.init(self.attrs)
+        self.test_context._name = 'foo'
+        self.test_context._task_id = '1234567890'
+        self.test_context._name_task_id = '{}-{}'.format(
+            self.test_context._name, self.test_context._task_id[:8])
+        self.assertEqual('foo-12345678', self.test_context.name)
+        self.assertIsNotNone(self.test_context._task_id)
 
-        with self.assertRaises(ValueError):
-            self.test_context.get_server('my.vnf2')
+        result = self.test_context.get_server('node1.foo-12345678')
 
-        expected = {'name': 'my.vnf1', 'value': 100, 'interfaces': {}}
-        result = self.test_context.get_server('my.vnf1')
-        self.assertDictEqual(result, expected)
+        self.assertEqual(result['ip'], '10.229.47.137')
+        self.assertEqual(result['name'], 'node1.foo-12345678')
+        self.assertEqual(result['user'], 'root')
+        self.assertEqual(result['key_filename'], '/root/.yardstick_key')
+
+    def test_get_server_server_not_in_context(self):
+        self.test_context.init(self.attrs)
+
+        with self.assertRaises(ValueError):
+            self.test_context.get_server('my2.foo-12345678')
 
     def test_get_context_from_server(self):
-        self.test_context.name = 'vnf1'
+        self.test_context._name = 'vnf1'
+        self.test_context._task_id = '1234567890'
+        self.test_context._name_task_id = '{}-{}'.format(
+            self.test_context._name, self.test_context._task_id[:8])
         self.test_context.nodes = [{'name': 'my', 'value': 100}]
         self.test_context.attrs = {'attr1': 200}
 
-        with self.assertRaises(ValueError):
-            self.test_context.get_context_from_server('my.vnf2')
-
-        result = self.test_context.get_context_from_server('my.vnf1')
-        self.assertIs(result, self.test_context)
+        self.assertIs(
+            self.test_context.get_context_from_server('my.vnf1-12345678'),
+            self.test_context)
 
     # TODO: Split this into more granular tests
     def test__get_network(self):
@@ -393,11 +356,3 @@ class NodeContextTestCase(unittest.TestCase):
         expected = network1
         result = self.test_context._get_network(attr_name)
         self.assertDictEqual(result, expected)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 1d6e805..0d14e4e 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
 #
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-# Unittest for yardstick.benchmark.core.plugin
-from __future__ import absolute_import
+import copy
 import os
-from os.path import dirname as dirname
+import pkg_resources
 
-try:
-    from unittest import mock
-except ImportError:
-    import mock
-import unittest
+import mock
+import testtools
 
+from yardstick import ssh
 from yardstick.benchmark.core import plugin
+from yardstick.tests import fixture
+
 
+class PluginTestCase(testtools.TestCase):
 
-class Arg(object):
+    FILE = """
+schema: "yardstick:plugin:0.1"
 
-    def __init__(self):
-        # self.input_file = ('plugin/sample_config.yaml',)
-        self.input_file = [
-            os.path.join(os.path.abspath(
-                dirname(dirname(dirname(dirname(dirname(dirname(__file__))))))),
-                'plugin/sample_config.yaml')]
+plugins:
+    name: sample
 
+deployment:
+    ip: 10.1.0.50
+    user: root
+    password: root
+"""
 
-@mock.patch('yardstick.benchmark.core.plugin.ssh')
-class pluginTestCase(unittest.TestCase):
+    NAME = 'sample'
+    DEPLOYMENT = {'ip': '10.1.0.50', 'user': 'root', 'password': 'root'}
 
     def setUp(self):
-        self.result = {}
-
-    def test_install(self, mock_ssh):
-        p = plugin.Plugin()
-        mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
-        input_file = Arg()
-        p.install(input_file)
-        expected_result = {}
-        self.assertEqual(self.result, expected_result)
-
-    def test_remove(self, mock_ssh):
-        p = plugin.Plugin()
-        mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
-        input_file = Arg()
-        p.remove(input_file)
-        expected_result = {}
-        self.assertEqual(self.result, expected_result)
-
-    def test_install_setup_run(self, mock_ssh):
-        p = plugin.Plugin()
-        mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
-        plugins = {
-            "name": "sample"
-        }
-        deployment = {
-            "ip": "10.1.0.50",
-            "user": "root",
-            "password": "root"
-        }
-        plugin_name = plugins.get("name")
-        p._install_setup(plugin_name, deployment)
-        self.assertIsNotNone(p.client)
-
-        p._run(plugin_name)
-        expected_result = {}
-        self.assertEqual(self.result, expected_result)
-
-    def test_remove_setup_run(self, mock_ssh):
-        p = plugin.Plugin()
-        mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
-        plugins = {
-            "name": "sample"
-        }
-        deployment = {
-            "ip": "10.1.0.50",
-            "user": "root",
-            "password": "root"
-        }
-        plugin_name = plugins.get("name")
-        p._remove_setup(plugin_name, deployment)
-        self.assertIsNotNone(p.client)
-
-        p._run(plugin_name)
-        expected_result = {}
-        self.assertEqual(self.result, expected_result)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
+        super(PluginTestCase, self).setUp()
+        self.plugin_parser = plugin.PluginParser(mock.Mock())
+        self.plugin = plugin.Plugin()
+        self.useFixture(fixture.PluginParserFixture(PluginTestCase.FILE))
+
+        self._mock_ssh_from_node = mock.patch.object(ssh.SSH, 'from_node')
+        self.mock_ssh_from_node = self._mock_ssh_from_node.start()
+        self.mock_ssh_obj = mock.Mock()
+        self.mock_ssh_from_node.return_value = self.mock_ssh_obj
+        self.mock_ssh_obj.wait = mock.Mock()
+        self.mock_ssh_obj._put_file_shell = mock.Mock()
+
+        self.addCleanup(self._cleanup)
+
+    def _cleanup(self):
+        self._mock_ssh_from_node.stop()
+
+    def test_install(self):
+        args = mock.Mock()
+        args.input_file = [mock.Mock()]
+        with mock.patch.object(self.plugin, '_install_setup') as \
+                mock_install, \
+                mock.patch.object(self.plugin, '_run') as mock_run:
+            self.plugin.install(args)
+            mock_install.assert_called_once_with(PluginTestCase.NAME,
+                                                 PluginTestCase.DEPLOYMENT)
+            mock_run.assert_called_once_with(PluginTestCase.NAME)
+
+    def test_remove(self):
+        args = mock.Mock()
+        args.input_file = [mock.Mock()]
+        with mock.patch.object(self.plugin, '_remove_setup') as \
+                mock_remove, \
+                mock.patch.object(self.plugin, '_run') as mock_run:
+            self.plugin.remove(args)
+            mock_remove.assert_called_once_with(PluginTestCase.NAME,
+                                                PluginTestCase.DEPLOYMENT)
+            mock_run.assert_called_once_with(PluginTestCase.NAME)
+
+    @mock.patch.object(pkg_resources, 'resource_filename',
+                       return_value='script')
+    def test__install_setup(self, mock_resource_filename):
+        plugin_name = 'plugin_name'
+        self.plugin._install_setup(plugin_name, PluginTestCase.DEPLOYMENT)
+        mock_resource_filename.assert_called_once_with(
+            'yardstick.resources', 'scripts/install/' + plugin_name + '.bash')
+        self.mock_ssh_from_node.assert_called_once_with(
+            PluginTestCase.DEPLOYMENT)
+        self.mock_ssh_obj.wait.assert_called_once_with(timeout=600)
+        self.mock_ssh_obj._put_file_shell.assert_called_once_with(
+            'script', '~/{0}.sh'.format(plugin_name))
+
+    @mock.patch.object(pkg_resources, 'resource_filename',
+                       return_value='script')
+    @mock.patch.object(os, 'environ', return_value='1.2.3.4')
+    def test__install_setup_with_ip_local(self, mock_os_environ,
+                                          mock_resource_filename):
+        plugin_name = 'plugin_name'
+        deployment = copy.deepcopy(PluginTestCase.DEPLOYMENT)
+        deployment['ip'] = 'local'
+        self.plugin._install_setup(plugin_name, deployment)
+        mock_os_environ.__getitem__.assert_called_once_with('JUMP_HOST_IP')
+        mock_resource_filename.assert_called_once_with(
+            'yardstick.resources',
+            'scripts/install/' + plugin_name + '.bash')
+        self.mock_ssh_from_node.assert_called_once_with(
+            deployment, overrides={'ip': os.environ["JUMP_HOST_IP"]})
+        self.mock_ssh_obj.wait.assert_called_once_with(timeout=600)
+        self.mock_ssh_obj._put_file_shell.assert_called_once_with(
+            'script', '~/{0}.sh'.format(plugin_name))
+
+    @mock.patch.object(pkg_resources, 'resource_filename',
+                       return_value='script')
+    def test__remove_setup(self, mock_resource_filename):
+        plugin_name = 'plugin_name'
+        self.plugin._remove_setup(plugin_name, PluginTestCase.DEPLOYMENT)
+        mock_resource_filename.assert_called_once_with(
+            'yardstick.resources',
+            'scripts/remove/' + plugin_name + '.bash')
+        self.mock_ssh_from_node.assert_called_once_with(
+            PluginTestCase.DEPLOYMENT)
+        self.mock_ssh_obj.wait.assert_called_once_with(timeout=600)
+        self.mock_ssh_obj._put_file_shell.assert_called_once_with(
+            'script', '~/{0}.sh'.format(plugin_name))
+
+    @mock.patch.object(pkg_resources, 'resource_filename',
+                       return_value='script')
+    @mock.patch.object(os, 'environ', return_value='1.2.3.4')
+    def test__remove_setup_with_ip_local(self, mock_os_environ,
+                                         mock_resource_filename):
+        plugin_name = 'plugin_name'
+        deployment = copy.deepcopy(PluginTestCase.DEPLOYMENT)
+        deployment['ip'] = 'local'
+        self.plugin._remove_setup(plugin_name, deployment)
+        mock_os_environ.__getitem__.assert_called_once_with('JUMP_HOST_IP')
+        mock_resource_filename.assert_called_once_with(
+            'yardstick.resources',
+            'scripts/remove/' + plugin_name + '.bash')
+        self.mock_ssh_from_node.assert_called_once_with(
+            deployment, overrides={'ip': os.environ["JUMP_HOST_IP"]})
+        self.mock_ssh_obj.wait.assert_called_once_with(timeout=600)
+        self.mock_ssh_obj._put_file_shell.mock_os_environ(
+            'script', '~/{0}.sh'.format(plugin_name))
index 3d9a503..a684ad7 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2017 Rajesh Kudaka.
 #
index 3d9a10d..82a90b1 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
 #
@@ -9,43 +7,41 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-# Unittest for yardstick.benchmark.core.task
-
-from __future__ import print_function
-
-from __future__ import absolute_import
+import copy
+import io
 import os
-import unittest
-
-try:
-    from unittest import mock
-except ImportError:
-    import mock
+import sys
 
+import mock
+import six
+import unittest
+import uuid
 
+from yardstick.benchmark.contexts import dummy
 from yardstick.benchmark.core import task
 from yardstick.common import constants as consts
-
-
-# pylint: disable=unused-argument
-# disable this for now because I keep forgetting mock patch arg ordering
+from yardstick.common import exceptions
+from yardstick.common import task_template
+from yardstick.common import utils
 
 
 class TaskTestCase(unittest.TestCase):
 
-    @mock.patch('yardstick.benchmark.core.task.Context')
-    def test_parse_nodes_host_target_same_context(self, mock_context):
-        nodes = {
-            "host": "node1.LF",
-            "target": "node2.LF"
+    @mock.patch.object(task, 'Context')
+    def test_parse_nodes_with_context_same_context(self, mock_context):
+        scenario_cfg = {
+            "nodes": {
+                "host": "node1.LF",
+                "target": "node2.LF"
+            }
         }
-        scenario_cfg = {"nodes": nodes}
         server_info = {
             "ip": "10.20.0.3",
             "user": "root",
             "key_filename": "/root/.ssh/id_rsa"
         }
         mock_context.get_server.return_value = server_info
+
         context_cfg = task.parse_nodes_with_context(scenario_cfg)
 
         self.assertEqual(context_cfg["host"], server_info)
@@ -57,15 +53,22 @@ class TaskTestCase(unittest.TestCase):
         t._set_dispatchers(output_config)
         self.assertEqual(output_config, output_config)
 
-    @mock.patch('yardstick.benchmark.core.task.DispatcherBase')
+    @mock.patch.object(task, 'DispatcherBase')
     def test__do_output(self, mock_dispatcher):
         t = task.Task()
         output_config = {"DEFAULT": {"dispatcher": "file, http"}}
-        mock_dispatcher.get = mock.MagicMock(return_value=[mock.MagicMock(),
-                                                           mock.MagicMock()])
-        self.assertEqual(None, t._do_output(output_config, {}))
 
-    @mock.patch('yardstick.benchmark.core.task.Context')
+        dispatcher1 = mock.MagicMock()
+        dispatcher1.__dispatcher_type__ = 'file'
+
+        dispatcher2 = mock.MagicMock()
+        dispatcher2.__dispatcher_type__ = 'http'
+
+        mock_dispatcher.get = mock.MagicMock(return_value=[dispatcher1,
+                                                           dispatcher2])
+        self.assertIsNone(t._do_output(output_config, {}))
+
+    @mock.patch.object(task, 'Context')
     def test_parse_networks_from_nodes(self, mock_context):
         nodes = {
             'node1': {
@@ -129,9 +132,9 @@ class TaskTestCase(unittest.TestCase):
         self.assertEqual(mock_context.get_network.call_count, expected_get_network_calls)
         self.assertDictEqual(networks, expected)
 
-    @mock.patch('yardstick.benchmark.core.task.Context')
-    @mock.patch('yardstick.benchmark.core.task.base_runner')
-    def test_run(self, mock_base_runner, mock_ctx):
+    @mock.patch.object(task, 'Context')
+    @mock.patch.object(task, 'base_runner')
+    def test_run(self, mock_base_runner, *args):
         scenario = {
             'host': 'athena.demo',
             'target': 'ares.demo',
@@ -152,8 +155,8 @@ class TaskTestCase(unittest.TestCase):
         t._run([scenario], False, "yardstick.out")
         self.assertTrue(runner.run.called)
 
-    @mock.patch('yardstick.benchmark.core.task.os')
-    def test_check_precondition(self, mock_os):
+    @mock.patch.object(os, 'environ')
+    def test_check_precondition(self, mock_os_environ):
         cfg = {
             'precondition': {
                 'installer_type': 'compass',
@@ -163,7 +166,7 @@ class TaskTestCase(unittest.TestCase):
         }
 
         t = task.TaskParser('/opt')
-        mock_os.environ.get.side_effect = ['compass',
+        mock_os_environ.get.side_effect = ['compass',
                                            'os-nosdn',
                                            'huawei-pod1']
         result = t._check_precondition(cfg)
@@ -172,82 +175,74 @@ class TaskTestCase(unittest.TestCase):
     def test_parse_suite_no_constraint_no_args(self):
         SAMPLE_SCENARIO_PATH = "no_constraint_no_args_scenario_sample.yaml"
         t = task.TaskParser(self._get_file_abspath(SAMPLE_SCENARIO_PATH))
-        with mock.patch('yardstick.benchmark.core.task.os.environ',
+        with mock.patch.object(os, 'environ',
                         new={'NODE_NAME': 'huawei-pod1', 'INSTALLER_TYPE': 'compass'}):
             task_files, task_args, task_args_fnames = t.parse_suite()
-        print("files=%s, args=%s, fnames=%s" % (task_files, task_args,
-                                                task_args_fnames))
+
         self.assertEqual(task_files[0], self.change_to_abspath(
                          'tests/opnfv/test_cases/opnfv_yardstick_tc037.yaml'))
         self.assertEqual(task_files[1], self.change_to_abspath(
                          'tests/opnfv/test_cases/opnfv_yardstick_tc043.yaml'))
-        self.assertEqual(task_args[0], None)
-        self.assertEqual(task_args[1], None)
-        self.assertEqual(task_args_fnames[0], None)
-        self.assertEqual(task_args_fnames[1], None)
+        self.assertIsNone(task_args[0])
+        self.assertIsNone(task_args[1])
+        self.assertIsNone(task_args_fnames[0])
+        self.assertIsNone(task_args_fnames[1])
 
-    @mock.patch('yardstick.benchmark.core.task.os.environ')
-    def test_parse_suite_no_constraint_with_args(self, mock_environ):
+    def test_parse_suite_no_constraint_with_args(self):
         SAMPLE_SCENARIO_PATH = "no_constraint_with_args_scenario_sample.yaml"
         t = task.TaskParser(self._get_file_abspath(SAMPLE_SCENARIO_PATH))
-        with mock.patch('yardstick.benchmark.core.task.os.environ',
+        with mock.patch.object(os, 'environ',
                         new={'NODE_NAME': 'huawei-pod1', 'INSTALLER_TYPE': 'compass'}):
             task_files, task_args, task_args_fnames = t.parse_suite()
-        print("files=%s, args=%s, fnames=%s" % (task_files, task_args,
-                                                task_args_fnames))
+
         self.assertEqual(task_files[0], self.change_to_abspath(
                          'tests/opnfv/test_cases/opnfv_yardstick_tc037.yaml'))
         self.assertEqual(task_files[1], self.change_to_abspath(
                          'tests/opnfv/test_cases/opnfv_yardstick_tc043.yaml'))
-        self.assertEqual(task_args[0], None)
+        self.assertIsNone(task_args[0])
         self.assertEqual(task_args[1],
                          '{"host": "node1.LF","target": "node2.LF"}')
-        self.assertEqual(task_args_fnames[0], None)
-        self.assertEqual(task_args_fnames[1], None)
+        self.assertIsNone(task_args_fnames[0])
+        self.assertIsNone(task_args_fnames[1])
 
-    @mock.patch('yardstick.benchmark.core.task.os.environ')
-    def test_parse_suite_with_constraint_no_args(self, mock_environ):
+    def test_parse_suite_with_constraint_no_args(self):
         SAMPLE_SCENARIO_PATH = "with_constraint_no_args_scenario_sample.yaml"
         t = task.TaskParser(self._get_file_abspath(SAMPLE_SCENARIO_PATH))
-        with mock.patch('yardstick.benchmark.core.task.os.environ',
+        with mock.patch.object(os, 'environ',
                         new={'NODE_NAME': 'huawei-pod1', 'INSTALLER_TYPE': 'compass'}):
             task_files, task_args, task_args_fnames = t.parse_suite()
-        print("files=%s, args=%s, fnames=%s" % (task_files, task_args,
-                                                task_args_fnames))
         self.assertEqual(task_files[0], self.change_to_abspath(
                          'tests/opnfv/test_cases/opnfv_yardstick_tc037.yaml'))
         self.assertEqual(task_files[1], self.change_to_abspath(
                          'tests/opnfv/test_cases/opnfv_yardstick_tc043.yaml'))
-        self.assertEqual(task_args[0], None)
-        self.assertEqual(task_args[1], None)
-        self.assertEqual(task_args_fnames[0], None)
-        self.assertEqual(task_args_fnames[1], None)
+        self.assertIsNone(task_args[0])
+        self.assertIsNone(task_args[1])
+        self.assertIsNone(task_args_fnames[0])
+        self.assertIsNone(task_args_fnames[1])
 
-    @mock.patch('yardstick.benchmark.core.task.os.environ')
-    def test_parse_suite_with_constraint_with_args(self, mock_environ):
+    def test_parse_suite_with_constraint_with_args(self):
         SAMPLE_SCENARIO_PATH = "with_constraint_with_args_scenario_sample.yaml"
         t = task.TaskParser(self._get_file_abspath(SAMPLE_SCENARIO_PATH))
-        with mock.patch('yardstick.benchmark.core.task.os.environ',
+        with mock.patch('os.environ',
                         new={'NODE_NAME': 'huawei-pod1', 'INSTALLER_TYPE': 'compass'}):
             task_files, task_args, task_args_fnames = t.parse_suite()
-        print("files=%s, args=%s, fnames=%s" % (task_files, task_args,
-                                                task_args_fnames))
+
         self.assertEqual(task_files[0], self.change_to_abspath(
                          'tests/opnfv/test_cases/opnfv_yardstick_tc037.yaml'))
         self.assertEqual(task_files[1], self.change_to_abspath(
                          'tests/opnfv/test_cases/opnfv_yardstick_tc043.yaml'))
-        self.assertEqual(task_args[0], None)
+        self.assertIsNone(task_args[0])
         self.assertEqual(task_args[1],
                          '{"host": "node1.LF","target": "node2.LF"}')
-        self.assertEqual(task_args_fnames[0], None)
-        self.assertEqual(task_args_fnames[1], None)
+        self.assertIsNone(task_args_fnames[0])
+        self.assertIsNone(task_args_fnames[1])
 
     def test_parse_options(self):
         options = {
             'openstack': {
                 'EXTERNAL_NETWORK': '$network'
             },
-            'ndoes': ['node1', '$node'],
+            'nodes': ['node1', '$node'],
             'host': '$host'
         }
 
@@ -258,48 +253,57 @@ class TaskTestCase(unittest.TestCase):
             'host': 'server.yardstick'
         }
 
-        idle_result = {
+        expected_result = {
             'openstack': {
                 'EXTERNAL_NETWORK': 'ext-net'
             },
-            'ndoes': ['node1', 'node2'],
+            'nodes': ['node1', 'node2'],
+            'host': 'server.yardstick'
+        }
+
+        actual_result = t._parse_options(options)
+        self.assertEqual(expected_result, actual_result)
+
+    def test_parse_options_no_teardown(self):
+        options = {
+            'openstack': {
+                'EXTERNAL_NETWORK': '$network'
+            },
+            'nodes': ['node1', '$node'],
+            'host': '$host',
+            'contexts' : {'name': "my-context",
+                          'no_teardown': True}
+        }
+
+        t = task.Task()
+        t.outputs = {
+            'network': 'ext-net',
+            'node': 'node2',
             'host': 'server.yardstick'
         }
 
+        expected_result = {
+            'openstack': {
+                'EXTERNAL_NETWORK': 'ext-net'
+            },
+            'nodes': ['node1', 'node2'],
+            'host': 'server.yardstick',
+            'contexts': {'name': 'my-context',
+                         'no_teardown': True,
+                        }
+        }
+
         actual_result = t._parse_options(options)
-        self.assertEqual(idle_result, actual_result)
-
-    def test_change_server_name_host_str(self):
-        scenario = {'host': 'demo'}
-        suffix = '-8'
-        task.change_server_name(scenario, suffix)
-        self.assertTrue(scenario['host'], 'demo-8')
-
-    def test_change_server_name_host_dict(self):
-        scenario = {'host': {'name': 'demo'}}
-        suffix = '-8'
-        task.change_server_name(scenario, suffix)
-        self.assertTrue(scenario['host']['name'], 'demo-8')
-
-    def test_change_server_name_target_str(self):
-        scenario = {'target': 'demo'}
-        suffix = '-8'
-        task.change_server_name(scenario, suffix)
-        self.assertTrue(scenario['target'], 'demo-8')
-
-    def test_change_server_name_target_dict(self):
-        scenario = {'target': {'name': 'demo'}}
-        suffix = '-8'
-        task.change_server_name(scenario, suffix)
-        self.assertTrue(scenario['target']['name'], 'demo-8')
-
-    @mock.patch('yardstick.benchmark.core.task.utils')
-    @mock.patch('yardstick.benchmark.core.task.logging')
-    def test_set_log(self, mock_logging, mock_utils):
+        self.assertEqual(expected_result, actual_result)
+
+    @mock.patch('six.moves.builtins.open', side_effect=mock.mock_open())
+    @mock.patch.object(task, 'utils')
+    @mock.patch('logging.root')
+    def test_set_log(self, mock_logging_root, *args):
         task_obj = task.Task()
         task_obj.task_id = 'task_id'
         task_obj._set_log()
-        self.assertTrue(mock_logging.root.addHandler.called)
+        mock_logging_root.addHandler.assert_called()
 
     def _get_file_abspath(self, filename):
         curr_path = os.path.dirname(os.path.abspath(__file__))
@@ -310,9 +314,219 @@ class TaskTestCase(unittest.TestCase):
         return os.path.join(consts.YARDSTICK_ROOT_PATH, filepath)
 
 
-def main():
-    unittest.main()
+class TaskParserTestCase(unittest.TestCase):
+
+    TASK = """
+{% set value1 = value1 or 'var1' %}
+{% set value2 = value2 or 'var2' %}
+key1: {{ value1 }}
+key2:
+    - {{ value2 }}"""
 
+    TASK_RENDERED_1 = u"""
 
-if __name__ == '__main__':
-    main()
+
+key1: var1
+key2:
+    - var2"""
+
+    TASK_RENDERED_2 = u"""
+
+
+key1: var3
+key2:
+    - var4"""
+
+    def setUp(self):
+        self.parser = task.TaskParser('fake/path')
+        self.scenario = {
+            'host': 'athena.demo',
+            'target': 'kratos.demo',
+            'targets': [
+                'ares.demo', 'mars.demo'
+                ],
+            'options': {
+                'server_name': {
+                    'host': 'jupiter.demo',
+                    'target': 'saturn.demo',
+                    },
+                },
+            'nodes': {
+                'tg__0': 'tg_0.demo',
+                'vnf__0': 'vnf_0.demo',
+                }
+            }
+
+    def test__change_node_names(self):
+
+        ctx_attrs = {
+            'name': 'demo',
+            'task_id': '1234567890',
+            'servers': [
+                'athena', 'kratos',
+                'ares', 'mars',
+                'jupiter', 'saturn',
+                'tg_0', 'vnf_0'
+                ]
+            }
+
+        my_context = dummy.DummyContext()
+        my_context.init(ctx_attrs)
+
+        expected_scenario = {
+            'host': 'athena.demo-12345678',
+            'target': 'kratos.demo-12345678',
+            'targets': [
+                'ares.demo-12345678', 'mars.demo-12345678'
+                ],
+            'options': {
+                'server_name': {
+                    'host': 'jupiter.demo-12345678',
+                    'target': 'saturn.demo-12345678',
+                    },
+                },
+            'nodes': {
+                'tg__0': 'tg_0.demo-12345678',
+                'vnf__0': 'vnf_0.demo-12345678',
+                }
+            }
+
+        scenario = copy.deepcopy(self.scenario)
+
+        self.parser._change_node_names(scenario, [my_context])
+        self.assertEqual(scenario, expected_scenario)
+
+    def test__change_node_names_context_not_found(self):
+        scenario = copy.deepcopy(self.scenario)
+        self.assertRaises(exceptions.ScenarioConfigContextNameNotFound,
+                          self.parser._change_node_names,
+                          scenario, [])
+
+    def test__change_node_names_context_name_unchanged(self):
+        ctx_attrs = {
+            'name': 'demo',
+            'task_id': '1234567890',
+            'flags': {
+                'no_setup': True,
+                'no_teardown': True
+                }
+            }
+
+        my_context = dummy.DummyContext()
+        my_context.init(ctx_attrs)
+
+        scenario = copy.deepcopy(self.scenario)
+        expected_scenario = copy.deepcopy(self.scenario)
+
+        self.parser._change_node_names(scenario, [my_context])
+        self.assertEqual(scenario, expected_scenario)
+
+    def test__parse_tasks(self):
+        task_obj = task.Task()
+        _uuid = uuid.uuid4()
+        task_obj.task_id = _uuid
+        task_files = ['/directory/task_file_name.yml']
+        mock_parser = mock.Mock()
+        mock_parser.parse_task.return_value = {'rendered': 'File content'}
+        mock_args = mock.Mock()
+        mock_args.render_only = False
+
+        tasks = task_obj._parse_tasks(mock_parser, task_files, mock_args,
+                                      ['arg1'], ['file_arg1'])
+        self.assertEqual(
+            [{'rendered': 'File content', 'case_name': 'task_file_name'}],
+            tasks)
+        mock_parser.parse_task.assert_called_once_with(
+            _uuid, 'arg1', 'file_arg1')
+
+    @mock.patch.object(sys, 'exit')
+    @mock.patch.object(utils, 'write_file')
+    @mock.patch.object(utils, 'makedirs')
+    def test__parse_tasks_render_only(self, mock_makedirs, mock_write_file,
+                                      mock_exit):
+        task_obj = task.Task()
+        _uuid = uuid.uuid4()
+        task_obj.task_id = _uuid
+        task_files = ['/directory/task_file_name.yml']
+        mock_parser = mock.Mock()
+        mock_parser.parse_task.return_value = {'rendered': 'File content'}
+        mock_args = mock.Mock()
+        mock_args.render_only = '/output_directory'
+
+        task_obj._parse_tasks(mock_parser, task_files, mock_args,
+                              ['arg1'], ['file_arg1'])
+        mock_makedirs.assert_called_once_with('/output_directory')
+        mock_write_file.assert_called_once_with(
+            '/output_directory/000-task_file_name.yml', 'File content')
+        mock_exit.assert_called_once_with(0)
+
+    def test__render_task_no_args(self):
+        task_parser = task.TaskParser('task_file')
+        task_str = io.StringIO(six.text_type(self.TASK))
+        with mock.patch.object(six.moves.builtins, 'open',
+                               return_value=task_str) as mock_open:
+            parsed, rendered = task_parser._render_task(None, None)
+
+        self.assertEqual(self.TASK_RENDERED_1, rendered)
+        self.assertEqual({'key1': 'var1', 'key2': ['var2']}, parsed)
+        mock_open.assert_called_once_with('task_file')
+
+    def test__render_task_arguments(self):
+        task_parser = task.TaskParser('task_file')
+        task_str = io.StringIO(six.text_type(self.TASK))
+        with mock.patch.object(six.moves.builtins, 'open',
+                               return_value=task_str) as mock_open:
+            parsed, rendered = task_parser._render_task('value1: "var1"', None)
+
+        self.assertEqual(self.TASK_RENDERED_1, rendered)
+        self.assertEqual({'key1': 'var1', 'key2': ['var2']}, parsed)
+        mock_open.assert_called_once_with('task_file')
+
+    def test__render_task_file_arguments(self):
+        task_parser = task.TaskParser('task_file')
+        with mock.patch.object(six.moves.builtins, 'open') as mock_open:
+            mock_open.side_effect = (
+                io.StringIO(six.text_type('value2: var4')),
+                io.StringIO(six.text_type(self.TASK))
+            )
+            parsed, rendered = task_parser._render_task('value1: "var3"',
+                                                        'args_file')
+
+        self.assertEqual(self.TASK_RENDERED_2, rendered)
+        self.assertEqual({'key1': 'var3', 'key2': ['var4']}, parsed)
+        mock_open.assert_has_calls([mock.call('args_file'),
+                                    mock.call('task_file')])
+
+    def test__render_task_error_arguments(self):
+        with self.assertRaises(exceptions.TaskRenderArgumentError):
+            task.TaskParser('task_file')._render_task('value1="var3"', None)
+
+    def test__render_task_error_task_file(self):
+        task_parser = task.TaskParser('task_file')
+        with mock.patch.object(six.moves.builtins, 'open') as mock_open:
+            mock_open.side_effect = (
+                io.StringIO(six.text_type('value2: var4')),
+                IOError()
+            )
+            with self.assertRaises(exceptions.TaskReadError):
+                task_parser._render_task('value1: "var3"', 'args_file')
+
+        mock_open.assert_has_calls([mock.call('args_file'),
+                                    mock.call('task_file')])
+
+    def test__render_task_render_error(self):
+        task_parser = task.TaskParser('task_file')
+        with mock.patch.object(six.moves.builtins, 'open') as mock_open, \
+                mock.patch.object(task_template.TaskTemplate, 'render',
+                                  side_effect=TypeError) as mock_render:
+            mock_open.side_effect = (
+                io.StringIO(six.text_type('value2: var4')),
+                io.StringIO(six.text_type(self.TASK))
+            )
+            with self.assertRaises(exceptions.TaskRenderError):
+                task_parser._render_task('value1: "var3"', 'args_file')
+
+        mock_open.assert_has_calls([mock.call('args_file'),
+                                    mock.call('task_file')])
+        mock_render.assert_has_calls(
+            [mock.call(self.TASK, value1='var3', value2='var4')])
index 1f5aad7..1194658 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
 #
@@ -35,11 +33,3 @@ class TestcaseUT(unittest.TestCase):
         casename = Arg()
         result = t.show(casename)
         self.assertTrue(result)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 0fdc423..727207f 100644 (file)
@@ -11,6 +11,8 @@ import time
 
 import mock
 import unittest
+from subprocess import CalledProcessError
+
 
 from yardstick.benchmark.runners import base
 from yardstick.benchmark.runners import iteration
@@ -20,19 +22,19 @@ class ActionTestCase(unittest.TestCase):
 
     @mock.patch("yardstick.benchmark.runners.base.subprocess")
     def test__execute_shell_command(self, mock_subprocess):
-        mock_subprocess.check_output.side_effect = Exception()
+        mock_subprocess.check_output.side_effect = CalledProcessError(-1, '')
 
         self.assertEqual(base._execute_shell_command("")[0], -1)
 
     @mock.patch("yardstick.benchmark.runners.base.subprocess")
     def test__single_action(self, mock_subprocess):
-        mock_subprocess.check_output.side_effect = Exception()
+        mock_subprocess.check_output.side_effect = CalledProcessError(-1, '')
 
         base._single_action(0, "echo", mock.MagicMock())
 
     @mock.patch("yardstick.benchmark.runners.base.subprocess")
     def test__periodic_action(self, mock_subprocess):
-        mock_subprocess.check_output.side_effect = Exception()
+        mock_subprocess.check_output.side_effect = CalledProcessError(-1, '')
 
         base._periodic_action(0, "echo", mock.MagicMock())
 
@@ -40,7 +42,14 @@ class ActionTestCase(unittest.TestCase):
 class RunnerTestCase(unittest.TestCase):
 
     def setUp(self):
-        self.runner = iteration.IterationRunner({})
+        config = {
+            'output_config': {
+                'DEFAULT': {
+                    'dispatcher': 'file'
+                }
+            }
+        }
+        self.runner = iteration.IterationRunner(config)
 
     @mock.patch("yardstick.benchmark.runners.iteration.multiprocessing")
     def test_get_output(self, *args):
@@ -81,11 +90,3 @@ class RunnerTestCase(unittest.TestCase):
 
         with self.assertRaises(NotImplementedError):
             runner._run_benchmark(mock.Mock(), mock.Mock(), mock.Mock(), mock.Mock())
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 1bc0744..4e5b4fe 100644 (file)
@@ -17,15 +17,8 @@ import time
 import mock
 import unittest
 
-from yardstick.tests.unit import STL_MOCKS
-
-STLClient = mock.MagicMock()
-stl_patch = mock.patch.dict("sys.modules", STL_MOCKS)
-stl_patch.start()
-
-if stl_patch:
-    from yardstick.benchmark.runners.search import SearchRunner
-    from yardstick.benchmark.runners.search import SearchRunnerHelper
+from yardstick.benchmark.runners.search import SearchRunner
+from yardstick.benchmark.runners.search import SearchRunnerHelper
 
 
 class TestSearchRunnerHelper(unittest.TestCase):
index f0921c0..d5c95a0 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
 #
index 612b5a6..c1b3c0d 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2016 Juan Qiu and others
 # juan_ qiu@tongji.edu.cn
index 0a8e832..2e9f1c6 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
 #
index 9bc04eb..ce97277 100644 (file)
@@ -113,7 +113,3 @@ class BaseMonitorTestCase(unittest.TestCase):
         except Exception:  # pylint: disable=broad-except
             pass
         self.assertIsNone(cls)
-
-
-if __name__ == "__main__":
-    unittest.main()
index ae74d24..d4df028 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2016 Huan Li and others
 # lihuansse@tongji.edu.cn
index 72ce7b0..e49544e 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2016 Huan Li and others
 # lihuansse@tongji.edu.cn
index 7022ea6..5907c8b 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2016 Huan Li and others
 # lihuansse@tongji.edu.cn
index 0d61d9b..e9c6802 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2016 Huan Li and others
 # lihuansse@tongji.edu.cn
index 41ce544..a6d2ca3 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
 #
index a965f7f..2b09c03 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2016 Huan Li and others
 # lihuansse@tongji.edu.cn
index 234adcb..324a5bd 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2016 Huan Li and others
 # lihuansse@tongji.edu.cn
index 548efe9..4d97585 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2016 Kanglin Yin and others
 # 14_ykl@tongji.edu.cn
index b0ddfc6..6f66c30 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
 #
index 7b9a5ad..4bef589 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
 #
index 840ac78..da6e6a2 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2015 Ericsson AB and others.
 #
index 51ffd24..f24ec24 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2015 Huawei Technologies Co.,Ltd and other.
 #
@@ -166,10 +164,3 @@ class CyclictestTestCase(unittest.TestCase):
 
         mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
         self.assertRaises(RuntimeError, c.run, result)
-
-
-def main():
-    unittest.main()
-
-if __name__ == '__main__':
-    main()
index b3152d1..9640ce0 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2015 Ericsson AB and others.
 #
@@ -193,10 +191,3 @@ class LmbenchTestCase(unittest.TestCase):
 
         mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
         self.assertRaises(RuntimeError, l.run, self.result)
-
-
-def main():
-    unittest.main()
-
-if __name__ == '__main__':
-    main()
index ebae999..8213d44 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
 #
@@ -109,11 +107,3 @@ class MEMLoadTestCase(unittest.TestCase):
         with open(output) as f:
             sample_output = f.read()
         return sample_output
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 680f6ad..8753017 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
 #
index 26a26cd..03003d0 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2015 Huawei Technologies Co.,Ltd and other.
 #
@@ -157,11 +155,3 @@ class QemuMigrateTestCase(unittest.TestCase):
 
         mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
         self.assertRaises(RuntimeError, q.run, result)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 4f71fbb..dcc0e81 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
 #
@@ -235,10 +233,3 @@ class RamspeedTestCase(unittest.TestCase):
         mock_ssh.SSH.from_node().execute.return_value = (1, '', 'No such type_id: 30 for \
                                                Ramspeed scenario')
         self.assertRaises(RuntimeError, r.run, self.result)
-
-
-def main():
-    unittest.main()
-
-if __name__ == '__main__':
-    main()
index 74612d7..643e1ea 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
 #
@@ -75,11 +73,3 @@ class SpecCPUTestCase(unittest.TestCase):
 
         mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
         self.assertRaises(RuntimeError, s.run, self.result)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index c428e1f..74ef576 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
 #
@@ -76,9 +74,3 @@ class SpecCPUforVMTestCase(unittest.TestCase):
 
         mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
         self.assertRaises(RuntimeError, s.run, self.result)
-
-def main():
-    unittest.main()
-
-if __name__ == '__main__':
-    main()
index fec355b..6339a2d 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2015 Huawei Technologies Co.,Ltd and other.
 #
@@ -162,10 +160,3 @@ class UnixbenchTestCase(unittest.TestCase):
 
         mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
         self.assertRaises(RuntimeError, u.run, result)
-
-
-def main():
-    unittest.main()
-
-if __name__ == '__main__':
-    main()
index bc51318..875302d 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
 #
index bda07f7..98d967f 100644 (file)
@@ -55,11 +55,3 @@ class AddMemoryLoadTestCase(unittest.TestCase):
         obj = AddMemoryLoad(scenario_cfg, context_cfg)
         obj.run({})
         self.assertTrue(mock_from_node.called)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 25b911d..a61195f 100644 (file)
@@ -24,11 +24,3 @@ class AttachVolumeTestCase(unittest.TestCase):
         obj = AttachVolume(args, {})
         obj.run({})
         self.assertTrue(mock_attach_server_volume.called)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 7188c29..a48353a 100644 (file)
@@ -76,11 +76,3 @@ class CheckConnectivityTestCase(unittest.TestCase):
         obj.setup()
 
         mock_ssh.SSH.execute.return_value = (0, '100', '')
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index f983f9c..a50e752 100644 (file)
@@ -74,11 +74,3 @@ class CheckNumaInfoTestCase(unittest.TestCase):
         obj = CheckNumaInfo(scenario_cfg, {})
         status = obj._check_vm2_status(info1, info2)
         self.assertFalse(status)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 5a40e7d..7a2324b 100644 (file)
@@ -33,10 +33,3 @@ class CheckValueTestCase(unittest.TestCase):
         obj = CheckValue(scenario_cfg, {})
         self.assertRaises(AssertionError, obj.run, self.result)
         self.assertEqual({}, self.result)
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 036ae95..663ca5d 100644 (file)
@@ -27,11 +27,3 @@ class CreateFlavorTestCase(unittest.TestCase):
         obj = CreateFlavor(args, {})
         obj.run({})
         self.assertTrue(mock_create_flavor.called)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index b269579..639cf29 100644 (file)
@@ -33,11 +33,3 @@ class CreateImageTestCase(unittest.TestCase):
         obj.run({})
         mock_create_image.assert_called_once()
         mock_get_glance_client.assert_called_once()
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 10e351b..1c3d6ce 100644 (file)
@@ -25,11 +25,3 @@ class CreateKeypairTestCase(unittest.TestCase):
         obj = create_keypair.CreateKeypair(args, {})
         obj.run({})
         mock_op_utils.create_keypair.assert_called_once()
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index e038285..ad4adee 100644 (file)
@@ -28,11 +28,3 @@ class CreateNetworkTestCase(unittest.TestCase):
         obj.run({})
         self.assertTrue(mock_get_neutron_client.called)
         self.assertTrue(mock_create_neutron_net.called)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 0f15058..9a1611c 100644 (file)
@@ -25,11 +25,3 @@ class CreatePortTestCase(unittest.TestCase):
         obj = CreatePort(args, {})
         obj.run({})
         self.assertTrue(mock_get_neutron_client.called)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 8f3914b..1079214 100644 (file)
@@ -28,11 +28,3 @@ class CreateRouterTestCase(unittest.TestCase):
         obj.run({})
         self.assertTrue(mock_get_neutron_client.called)
         self.assertTrue(mock_create_neutron_router.called)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index c1c137c..b557673 100644 (file)
@@ -28,11 +28,3 @@ class CreateSecGroupTestCase(unittest.TestCase):
         obj.run({})
         self.assertTrue(mock_get_neutron_client.called)
         self.assertTrue(mock_create_security_group_full.called)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 74003b9..faee98f 100644 (file)
@@ -32,11 +32,3 @@ class CreateServerTestCase(unittest.TestCase):
         self.assertTrue(mock_get_glance_client.called)
         self.assertTrue(mock_get_neutron_client.called)
         self.assertTrue(mock_create_instance_and_wait_for_active.called)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index b7f29df..1536e83 100644 (file)
@@ -30,11 +30,3 @@ class CreateSubnetTestCase(unittest.TestCase):
         obj.run({})
         self.assertTrue(mock_get_neutron_client.called)
         self.assertTrue(mock_create_neutron_subnet.called)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index ca055db..4bfec32 100644 (file)
@@ -98,11 +98,3 @@ class CreateVolumeTestCase(unittest.TestCase):
         self.assertTrue(mock_image_id.called)
         self.assertTrue(mock_get_glance_client.called)
         self.assertTrue(mock_get_cinder_client.called)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 4a91b89..e345afe 100644 (file)
@@ -25,11 +25,3 @@ class DeleteFlavorTestCase(unittest.TestCase):
         obj.run({})
         self.assertTrue(mock_get_nova_client.called)
         self.assertTrue(mock_delete_flavor.called)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index df23212..be99719 100644 (file)
@@ -25,11 +25,3 @@ class DeleteFloatingIpTestCase(unittest.TestCase):
         obj.run({})
         self.assertTrue(mock_get_nova_client.called)
         self.assertTrue(mock_delete_floating_ip.called)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 9edc2ff..eb3f9fc 100644 (file)
@@ -27,11 +27,3 @@ class DeleteImageTestCase(unittest.TestCase):
         self.assertTrue(mock_delete_image.called)
         self.assertTrue(mock_image_id.called)
         self.assertTrue(mock_get_glance_client.called)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 73894a9..38cc929 100644 (file)
@@ -25,11 +25,3 @@ class DeleteKeypairTestCase(unittest.TestCase):
         obj.run({})
         self.assertTrue(mock_get_nova_client.called)
         self.assertTrue(mock_delete_keypair.called)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 5f11713..aef99ee 100644 (file)
@@ -6,30 +6,44 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+
+from oslo_utils import uuidutils
 import unittest
 import mock
 
-from yardstick.benchmark.scenarios.lib.delete_network import DeleteNetwork
+import yardstick.common.openstack_utils as op_utils
+from yardstick.benchmark.scenarios.lib import delete_network
 
 
 class DeleteNetworkTestCase(unittest.TestCase):
 
-    @mock.patch('yardstick.common.openstack_utils.get_neutron_client')
-    @mock.patch('yardstick.common.openstack_utils.delete_neutron_net')
-    def test_delete_network(self, mock_get_neutron_client, mock_delete_neutron_net):
-        options = {
-            'network_id': '123-123-123'
-        }
-        args = {"options": options}
-        obj = DeleteNetwork(args, {})
-        obj.run({})
-        self.assertTrue(mock_get_neutron_client.called)
-        self.assertTrue(mock_delete_neutron_net.called)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
+    def setUp(self):
+        self._mock_delete_neutron_net = mock.patch.object(
+            op_utils, 'delete_neutron_net')
+        self.mock_delete_neutron_net = self._mock_delete_neutron_net.start()
+        self._mock_get_shade_client = mock.patch.object(
+            op_utils, 'get_shade_client')
+        self.mock_get_shade_client = self._mock_get_shade_client.start()
+        self._mock_log = mock.patch.object(delete_network, 'LOG')
+        self.mock_log = self._mock_log.start()
+        _uuid = uuidutils.generate_uuid()
+        self.args = {'options': {'network_id': _uuid}}
+        self._del_obj = delete_network.DeleteNetwork(self.args, mock.ANY)
+
+        self.addCleanup(self._stop_mock)
+
+    def _stop_mock(self):
+        self._mock_delete_neutron_net.stop()
+        self._mock_get_shade_client.stop()
+        self._mock_log.stop()
+
+    def test_run(self):
+        self.mock_delete_neutron_net.return_value = True
+        self.assertTrue(self._del_obj.run({}))
+        self.mock_log.info.assert_called_once_with(
+            "Delete network successful!")
+
+    def test_run_fail(self):
+        self.mock_delete_neutron_net.return_value = False
+        self.assertFalse(self._del_obj.run({}))
+        self.mock_log.error.assert_called_once_with("Delete network failed!")
index de3179b..008ed91 100644 (file)
@@ -23,11 +23,3 @@ class DeletePortTestCase(unittest.TestCase):
         obj = DeletePort(args, {})
         obj.run({})
         self.assertTrue(mock_get_neutron_client.called)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 73cb812..9b31566 100644 (file)
@@ -25,11 +25,3 @@ class DeleteRouterTestCase(unittest.TestCase):
         obj.run({})
         self.assertTrue(mock_get_neutron_client.called)
         self.assertTrue(mock_delete_neutron_router.called)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 3cfc4ed..e19c38d 100644 (file)
@@ -25,11 +25,3 @@ class DeleteRouterGatewayTestCase(unittest.TestCase):
         obj.run({})
         self.assertTrue(mock_get_neutron_client.called)
         self.assertTrue(mock_remove_gateway_router.called)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 67aff10..6c4fdd5 100644 (file)
@@ -26,11 +26,3 @@ class DeleteRouterInterfaceTestCase(unittest.TestCase):
         obj.run({})
         self.assertTrue(mock_get_neutron_client.called)
         self.assertTrue(mock_remove_interface_router.called)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 622ead5..dedce2d 100644 (file)
@@ -25,11 +25,3 @@ class DeleteServerTestCase(unittest.TestCase):
         obj.run({})
         self.assertTrue(mock_get_nova_client.called)
         self.assertTrue(mock_delete_instance.called)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 9438b07..2ea82e2 100644 (file)
@@ -25,11 +25,3 @@ class DeleteVolumeTestCase(unittest.TestCase):
         obj.run({})
         self.assertTrue(mock_get_cinder_client.called)
         self.assertTrue(mock_delete_volume.called)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 87af63a..34fbac6 100644 (file)
@@ -24,11 +24,3 @@ class DetachVolumeTestCase(unittest.TestCase):
         obj = DetachVolume(args, {})
         obj.run({})
         self.assertTrue(mock_detach_volume.called)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index bf12e0a..e9025f3 100644 (file)
@@ -23,11 +23,3 @@ class GetFlavorTestCase(unittest.TestCase):
         obj = GetFlavor(args, {})
         obj.run({})
         self.assertTrue(mock_get_flavor_by_name.called)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index f046c92..aa9f63e 100644 (file)
@@ -41,11 +41,3 @@ class GetMigrateTargetHostTestCase(unittest.TestCase):
         host = obj._get_migrate_host('host5')
         self.assertTrue(mock_get_nova_client.called)
         self.assertEqual(host, 'host4')
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 50d5238..4b2132c 100644 (file)
@@ -101,11 +101,3 @@ class GetNumaInfoTestCase(unittest.TestCase):
         obj = GetNumaInfo(scenario_cfg, {})
         result = obj._get_current_host_name('1')
         self.assertEqual(result, 'host5')
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index aebbf54..97b81ed 100644 (file)
@@ -40,11 +40,3 @@ class GetServerTestCase(unittest.TestCase):
         obj = GetServer(scenario_cfg, {})
         obj.run({})
         self.assertTrue(mock_get_nova_client.called)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 3d20d54..04fca16 100644 (file)
@@ -31,11 +31,3 @@ class GetServerIpTestCase(unittest.TestCase):
         obj = GetServerIp(scenario_cfg, {})
         result = obj.run({})
         self.assertEqual(result, {'ip': '127.0.0.1'})
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 4d37452..d663638 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2015 Ericsson AB and others.
 #
@@ -193,10 +191,3 @@ class IperfTestCase(unittest.TestCase):
         with open(output) as f:
             sample_output = f.read()
         return sample_output
-
-
-def main():
-    unittest.main()
-
-if __name__ == '__main__':
-    main()
index d82a009..5907562 100755 (executable)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
 #
@@ -121,10 +119,3 @@ class NetperfTestCase(unittest.TestCase):
         with open(output) as f:
             sample_output = f.read()
         return sample_output
-
-
-def main():
-    unittest.main()
-
-if __name__ == '__main__':
-    main()
index 8be9bb9..956a9c0 100755 (executable)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
 #
@@ -121,10 +119,3 @@ class NetperfNodeTestCase(unittest.TestCase):
         with open(output) as f:
             sample_output = f.read()
         return sample_output
-
-
-def main():
-    unittest.main()
-
-if __name__ == '__main__':
-    main()
index 1227e05..4cdfde6 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2016 Huawei Technologies Co.,Ltd and other.
 #
index 3e7a3c5..36e8c8a 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
 #
index 7dd5351..b02d584 100644 (file)
@@ -103,11 +103,3 @@ class NstatTestCase(unittest.TestCase):
 
         mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
         self.assertRaises(RuntimeError, n.run, result)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 0635324..4adfab1 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2015 Ericsson AB and others.
 #
@@ -92,10 +90,3 @@ class PingTestCase(unittest.TestCase):
 
         mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
         self.assertRaises(RuntimeError, p.run, result)
-
-
-def main():
-    unittest.main()
-
-if __name__ == '__main__':
-    main()
index d2be6f5..4662c85 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2015 Ericsson AB and others.
 #
@@ -116,10 +114,3 @@ class PingTestCase(unittest.TestCase):
         mock_ssh.SSH.from_node().execute.side_effect = [
             (0, 'host1', ''), (1, '', 'FOOBAR')]
         self.assertRaises(RuntimeError, p.run, result)
-
-
-def main():
-    unittest.main()
-
-if __name__ == '__main__':
-    main()
index acd9027..6aea03a 100644 (file)
@@ -665,11 +665,3 @@ class PktgenTestCase(unittest.TestCase):
         expected_result["packets_received"] = 149300
         expected_result["packetsize"] = 60
         self.assertEqual(result, expected_result)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 99399ab..9760871 100644 (file)
@@ -176,11 +176,3 @@ class PktgenDPDKLatencyTestCase(unittest.TestCase):
 
         self.mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
         self.assertRaises(RuntimeError, p.run, result)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 1b12bd5..e90fb07 100644 (file)
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
 ##############################################################################
 # Copyright (c) 2017 Nokia and others.
 #
@@ -192,11 +191,3 @@ class PktgenDPDKTestCase(unittest.TestCase):
 
         mock_ssh.SSH().execute.assert_called_with(
             "sudo /dpdk/destdir/bin/dpdk-procinfo -- --stats-reset > /dev/null 2>&1")
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 78c0352..a5e5e39 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2015 Ericsson AB and others.
 #
@@ -68,10 +66,3 @@ class SfcTestCase(unittest.TestCase):
         self.sfc.setup()
         self.sfc.run(result)
         self.sfc.teardown()
-
-
-def main():
-    unittest.main()
-
-if __name__ == '__main__':
-    main()
index fb55b5e..ec22d61 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 # Copyright (c) 2016-2017 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-#
-
-# Unittest for yardstick.benchmark.scenarios.networking.test_vnf_generic
-
-from __future__ import absolute_import
 
+from copy import deepcopy
 import os
-import errno
-import unittest
-import mock
+import sys
 
-from copy import deepcopy
+import mock
+import unittest
 
-from yardstick.tests.unit import STL_MOCKS
-from yardstick.benchmark.scenarios.networking.vnf_generic import \
-    SshManager, NetworkServiceTestCase, IncorrectConfig, \
-    open_relative_file
+from yardstick import tests
+from yardstick.common import utils
 from yardstick.network_services.collector.subscriber import Collector
-from yardstick.network_services.vnf_generic.vnf.base import \
-    GenericTrafficGen, GenericVNF
+from yardstick.network_services.traffic_profile import base
+from yardstick.network_services.vnf_generic import vnfdgen
+from yardstick.error import IncorrectConfig
+from yardstick.network_services.vnf_generic.vnf.base import GenericTrafficGen
+from yardstick.network_services.vnf_generic.vnf.base import GenericVNF
+
+
+stl_patch = mock.patch.dict(sys.modules, tests.STL_MOCKS)
+stl_patch.start()
 
+if stl_patch:
+    from yardstick.benchmark.scenarios.networking import vnf_generic
 
 # pylint: disable=unused-argument
 # disable this for now because I keep forgetting mock patch arg ordering
@@ -317,6 +317,7 @@ class TestNetworkServiceTestCase(unittest.TestCase):
             'task_id': 'a70bdf4a-8e67-47a3-9dc1-273c14506eb7',
             'tc': 'tc_ipv4_1Mflow_64B_packetsize',
             'traffic_profile': 'ipv4_throughput_vpe.yaml',
+            'extra_args': {'arg1': 'value1', 'arg2': 'value2'},
             'type': 'ISB',
             'tc_options': {
                 'rfc2544': {
@@ -345,23 +346,14 @@ class TestNetworkServiceTestCase(unittest.TestCase):
             },
         }
 
-        self.s = NetworkServiceTestCase(self.scenario_cfg, self.context_cfg)
+        self.s = vnf_generic.NetworkServiceTestCase(self.scenario_cfg,
+                                                    self.context_cfg)
 
     def _get_file_abspath(self, filename):
         curr_path = os.path.dirname(os.path.abspath(__file__))
         file_path = os.path.join(curr_path, filename)
         return file_path
 
-    def test_ssh_manager(self):
-        with mock.patch("yardstick.ssh.SSH") as ssh:
-            ssh_mock = mock.Mock(autospec=ssh.SSH)
-            ssh_mock.execute = \
-                mock.Mock(return_value=(0, SYS_CLASS_NET + IP_ADDR_SHOW, ""))
-            ssh.from_node.return_value = ssh_mock
-            for _, node_dict in self.context_cfg["nodes"].items():
-                with SshManager(node_dict) as conn:
-                    self.assertIsNotNone(conn)
-
     def test___init__(self):
         assert self.topology
 
@@ -415,7 +407,10 @@ class TestNetworkServiceTestCase(unittest.TestCase):
                 'public_ip': ['1.1.1.1'],
             },
         }
-
+        # NOTE(ralonsoh): check the expected output. This test could be
+        # incorrect
+        # result = {'flow': {'dst_ip0': '152.16.40.2-152.16.40.254',
+        #                    'src_ip0': '152.16.100.2-152.16.100.254'}}
         self.assertEqual({'flow': {}}, self.s._get_traffic_flow())
 
     def test___get_traffic_flow_error(self):
@@ -425,16 +420,16 @@ class TestNetworkServiceTestCase(unittest.TestCase):
 
     def test_get_vnf_imp(self):
         vnfd = COMPLETE_TREX_VNFD['vnfd:vnfd-catalog']['vnfd'][0]['class-name']
-        with mock.patch.dict("sys.modules", STL_MOCKS):
+        with mock.patch.dict(sys.modules, tests.STL_MOCKS):
             self.assertIsNotNone(self.s.get_vnf_impl(vnfd))
 
-            with self.assertRaises(IncorrectConfig) as raised:
-                self.s.get_vnf_impl('NonExistentClass')
+        with self.assertRaises(vnf_generic.IncorrectConfig) as raised:
+            self.s.get_vnf_impl('NonExistentClass')
 
-            exc_str = str(raised.exception)
-            print(exc_str)
-            self.assertIn('No implementation', exc_str)
-            self.assertIn('found in', exc_str)
+        exc_str = str(raised.exception)
+        print(exc_str)
+        self.assertIn('No implementation', exc_str)
+        self.assertIn('found in', exc_str)
 
     def test_load_vnf_models_invalid(self):
         self.context_cfg["nodes"]['tg__1']['VNF model'] = \
@@ -456,39 +451,37 @@ class TestNetworkServiceTestCase(unittest.TestCase):
             self.s.load_vnf_models(self.scenario_cfg, self.context_cfg))
 
     def test_map_topology_to_infrastructure(self):
-        with mock.patch("yardstick.ssh.SSH") as ssh:
-            ssh_mock = mock.Mock(autospec=ssh.SSH)
-            ssh_mock.execute = \
-                mock.Mock(return_value=(0, SYS_CLASS_NET + IP_ADDR_SHOW, ""))
-            ssh.from_node.return_value = ssh_mock
-            self.s.map_topology_to_infrastructure()
+        self.s.map_topology_to_infrastructure()
 
         nodes = self.context_cfg["nodes"]
-        self.assertEqual(
-            "../../vnf_descriptors/tg_rfc2544_tpl.yaml", nodes['tg__1']['VNF model'])
-        self.assertEqual("../../vnf_descriptors/vpe_vnf.yaml",
+        self.assertEqual('../../vnf_descriptors/tg_rfc2544_tpl.yaml',
+                         nodes['tg__1']['VNF model'])
+        self.assertEqual('../../vnf_descriptors/vpe_vnf.yaml',
                          nodes['vnf__1']['VNF model'])
 
     def test_map_topology_to_infrastructure_insufficient_nodes(self):
-        del self.context_cfg['nodes']['vnf__1']
-        with mock.patch("yardstick.ssh.SSH") as ssh:
-            ssh_mock = mock.Mock(autospec=ssh.SSH)
-            ssh_mock.execute = \
-                mock.Mock(return_value=(1, SYS_CLASS_NET + IP_ADDR_SHOW, ""))
-            ssh.from_node.return_value = ssh_mock
+        cfg = deepcopy(self.context_cfg)
+        del cfg['nodes']['vnf__1']
 
+        cfg_patch = mock.patch.object(self.s, 'context_cfg', cfg)
+        with cfg_patch:
             with self.assertRaises(IncorrectConfig):
                 self.s.map_topology_to_infrastructure()
 
     def test_map_topology_to_infrastructure_config_invalid(self):
-        cfg = dict(self.context_cfg)
+        ssh_mock = mock.Mock()
+        ssh_mock.execute.return_value = 0, SYS_CLASS_NET + IP_ADDR_SHOW, ""
+
+        cfg = deepcopy(self.s.context_cfg)
+
+        # delete all, we don't know which will come first
         del cfg['nodes']['vnf__1']['interfaces']['xe0']['local_mac']
-        with mock.patch("yardstick.ssh.SSH") as ssh:
-            ssh_mock = mock.Mock(autospec=ssh.SSH)
-            ssh_mock.execute = \
-                mock.Mock(return_value=(0, SYS_CLASS_NET + IP_ADDR_SHOW, ""))
-            ssh.from_node.return_value = ssh_mock
+        del cfg['nodes']['vnf__1']['interfaces']['xe1']['local_mac']
+        del cfg['nodes']['tg__1']['interfaces']['xe0']['local_mac']
+        del cfg['nodes']['tg__1']['interfaces']['xe1']['local_mac']
 
+        config_patch = mock.patch.object(self.s, 'context_cfg', cfg)
+        with config_patch:
             with self.assertRaises(IncorrectConfig):
                 self.s.map_topology_to_infrastructure()
 
@@ -503,10 +496,8 @@ class TestNetworkServiceTestCase(unittest.TestCase):
             for interface in self.tg__1['interfaces'].values():
                 del interface['local_mac']
 
-            with mock.patch(
-                    "yardstick.benchmark.scenarios.networking.vnf_generic.LOG"):
-                with self.assertRaises(IncorrectConfig) as raised:
-                    self.s._resolve_topology()
+            with self.assertRaises(vnf_generic.IncorrectConfig) as raised:
+                self.s._resolve_topology()
 
             self.assertIn('not found', str(raised.exception))
 
@@ -518,10 +509,8 @@ class TestNetworkServiceTestCase(unittest.TestCase):
             self.s.topology["vld"][0]['vnfd-connection-point-ref'].append(
                 self.s.topology["vld"][0]['vnfd-connection-point-ref'][0])
 
-            with mock.patch(
-                    "yardstick.benchmark.scenarios.networking.vnf_generic.LOG"):
-                with self.assertRaises(IncorrectConfig) as raised:
-                    self.s._resolve_topology()
+            with self.assertRaises(vnf_generic.IncorrectConfig) as raised:
+                self.s._resolve_topology()
 
             self.assertIn('wrong endpoint count', str(raised.exception))
 
@@ -529,10 +518,8 @@ class TestNetworkServiceTestCase(unittest.TestCase):
             self.s.topology["vld"][0]['vnfd-connection-point-ref'] = \
                 self.s.topology["vld"][0]['vnfd-connection-point-ref'][:1]
 
-            with mock.patch(
-                    "yardstick.benchmark.scenarios.networking.vnf_generic.LOG"):
-                with self.assertRaises(IncorrectConfig) as raised:
-                    self.s._resolve_topology()
+            with self.assertRaises(vnf_generic.IncorrectConfig) as raised:
+                self.s._resolve_topology()
 
             self.assertIn('wrong endpoint count', str(raised.exception))
 
@@ -578,7 +565,7 @@ class TestNetworkServiceTestCase(unittest.TestCase):
             self.s.load_vnf_models = mock.Mock(return_value=self.s.vnfs)
             self.s._fill_traffic_profile = \
                 mock.Mock(return_value=TRAFFIC_PROFILE)
-            self.assertEqual(None, self.s.setup())
+            self.assertIsNone(self.s.setup())
 
     def test_setup_exception(self):
         with mock.patch("yardstick.ssh.SSH") as ssh:
@@ -625,15 +612,48 @@ class TestNetworkServiceTestCase(unittest.TestCase):
             self.assertEqual({'imix': {'64B': 100}},
                              self.s._get_traffic_imix())
 
-    def test__fill_traffic_profile(self):
-        with mock.patch.dict("sys.modules", STL_MOCKS):
-            self.scenario_cfg["traffic_profile"] = \
-                self._get_file_abspath("ipv4_throughput_vpe.yaml")
-            self.scenario_cfg["traffic_options"]["flow"] = \
-                self._get_file_abspath("ipv4_1flow_Packets_vpe.yaml")
-            self.scenario_cfg["traffic_options"]["imix"] = \
-                self._get_file_abspath("imix_voice.yaml")
-            self.assertIsNotNone(self.s._fill_traffic_profile())
+    @mock.patch.object(base.TrafficProfile, 'get')
+    @mock.patch.object(vnfdgen, 'generate_vnfd')
+    def test__fill_traffic_profile(self, mock_generate, mock_tprofile_get):
+        fake_tprofile = mock.Mock()
+        fake_vnfd = mock.Mock()
+        with mock.patch.object(self.s, '_get_traffic_profile',
+                               return_value=fake_tprofile) as mock_get_tp:
+            mock_generate.return_value = fake_vnfd
+            self.s._fill_traffic_profile()
+            mock_get_tp.assert_called_once()
+            mock_generate.assert_called_once_with(
+                fake_tprofile,
+                {'downlink': {},
+                 'extra_args': {'arg1': 'value1', 'arg2': 'value2'},
+                 'flow': {'flow': {}},
+                 'imix': {'imix': {'64B': 100}},
+                 'uplink': {}}
+            )
+            mock_tprofile_get.assert_called_once_with(fake_vnfd)
+
+    @mock.patch.object(utils, 'open_relative_file')
+    def test__get_topology(self, mock_open_path):
+        self.s.scenario_cfg['topology'] = 'fake_topology'
+        self.s.scenario_cfg['task_path'] = 'fake_path'
+        mock_open_path.side_effect = mock.mock_open(read_data='fake_data')
+        self.assertEqual('fake_data', self.s._get_topology())
+        mock_open_path.assert_called_once_with('fake_topology', 'fake_path')
+
+    @mock.patch.object(vnfdgen, 'generate_vnfd')
+    def test__render_topology(self, mock_generate):
+        fake_topology = 'fake_topology'
+        mock_generate.return_value = {'nsd:nsd-catalog': {'nsd': ['fake_nsd']}}
+        with mock.patch.object(self.s, '_get_topology',
+                               return_value=fake_topology) as mock_get_topology:
+            self.s._render_topology()
+            mock_get_topology.assert_called_once()
+
+        mock_generate.assert_called_once_with(
+            fake_topology,
+            {'extra_args': {'arg1': 'value1', 'arg2': 'value2'}}
+        )
+        self.assertEqual(self.s.topology, 'fake_nsd')
 
     def test_teardown(self):
         vnf = mock.Mock(autospec=GenericVNF)
@@ -658,141 +678,3 @@ class TestNetworkServiceTestCase(unittest.TestCase):
             mock.Mock(return_value=True)
         with self.assertRaises(RuntimeError):
             self.s.teardown()
-
-    SAMPLE_NETDEVS = {
-        'enp11s0': {
-            'address': '0a:de:ad:be:ef:f5',
-            'device': '0x1533',
-            'driver': 'igb',
-            'ifindex': '2',
-            'interface_name': 'enp11s0',
-            'operstate': 'down',
-            'pci_bus_id': '0000:0b:00.0',
-            'subsystem_device': '0x1533',
-            'subsystem_vendor': '0x15d9',
-            'vendor': '0x8086'
-        },
-        'lan': {
-            'address': '0a:de:ad:be:ef:f4',
-            'device': '0x153a',
-            'driver': 'e1000e',
-            'ifindex': '3',
-            'interface_name': 'lan',
-            'operstate': 'up',
-            'pci_bus_id': '0000:00:19.0',
-            'subsystem_device': '0x153a',
-            'subsystem_vendor': '0x15d9',
-            'vendor': '0x8086'
-        }
-    }
-
-    SAMPLE_VM_NETDEVS = {
-        'eth1': {
-            'address': 'fa:de:ad:be:ef:5b',
-            'device': '0x0001',
-            'driver': 'virtio_net',
-            'ifindex': '3',
-            'interface_name': 'eth1',
-            'operstate': 'down',
-            'pci_bus_id': '0000:00:04.0',
-            'vendor': '0x1af4'
-        }
-    }
-
-    def test_parse_netdev_info(self):
-        output = """\
-/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/ifindex:2
-/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/address:0a:de:ad:be:ef:f5
-/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/operstate:down
-/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/device/vendor:0x8086
-/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/device/device:0x1533
-/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/device/subsystem_vendor:0x15d9
-/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/device/subsystem_device:0x1533
-/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/driver:igb
-/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/pci_bus_id:0000:0b:00.0
-/sys/devices/pci0000:00/0000:00:19.0/net/lan/ifindex:3
-/sys/devices/pci0000:00/0000:00:19.0/net/lan/address:0a:de:ad:be:ef:f4
-/sys/devices/pci0000:00/0000:00:19.0/net/lan/operstate:up
-/sys/devices/pci0000:00/0000:00:19.0/net/lan/device/vendor:0x8086
-/sys/devices/pci0000:00/0000:00:19.0/net/lan/device/device:0x153a
-/sys/devices/pci0000:00/0000:00:19.0/net/lan/device/subsystem_vendor:0x15d9
-/sys/devices/pci0000:00/0000:00:19.0/net/lan/device/subsystem_device:0x153a
-/sys/devices/pci0000:00/0000:00:19.0/net/lan/driver:e1000e
-/sys/devices/pci0000:00/0000:00:19.0/net/lan/pci_bus_id:0000:00:19.0
-"""
-        res = NetworkServiceTestCase.parse_netdev_info(output)
-        assert res == self.SAMPLE_NETDEVS
-
-    def test_parse_netdev_info_virtio(self):
-        output = """\
-/sys/devices/pci0000:00/0000:00:04.0/virtio1/net/eth1/ifindex:3
-/sys/devices/pci0000:00/0000:00:04.0/virtio1/net/eth1/address:fa:de:ad:be:ef:5b
-/sys/devices/pci0000:00/0000:00:04.0/virtio1/net/eth1/operstate:down
-/sys/devices/pci0000:00/0000:00:04.0/virtio1/net/eth1/device/vendor:0x1af4
-/sys/devices/pci0000:00/0000:00:04.0/virtio1/net/eth1/device/device:0x0001
-/sys/devices/pci0000:00/0000:00:04.0/virtio1/net/eth1/driver:virtio_net
-"""
-        res = NetworkServiceTestCase.parse_netdev_info(output)
-        assert res == self.SAMPLE_VM_NETDEVS
-
-    def test_probe_missing_values(self):
-        netdevs = self.SAMPLE_NETDEVS.copy()
-        network = {'local_mac': '0a:de:ad:be:ef:f5'}
-        NetworkServiceTestCase._probe_missing_values(netdevs, network)
-        assert network['vpci'] == '0000:0b:00.0'
-
-        network = {'local_mac': '0a:de:ad:be:ef:f4'}
-        NetworkServiceTestCase._probe_missing_values(netdevs, network)
-        assert network['vpci'] == '0000:00:19.0'
-
-    # TODO: Split this into several tests, for different IOError sub-types
-    def test_open_relative_path(self):
-        mock_open = mock.mock_open()
-        mock_open_result = mock_open()
-        mock_open_call_count = 1  # initial call to get result
-
-        module_name = \
-            'yardstick.benchmark.scenarios.networking.vnf_generic.open'
-
-        # test
-        with mock.patch(module_name, mock_open, create=True):
-            self.assertEqual(open_relative_file(
-                'foo', 'bar'), mock_open_result)
-
-            mock_open_call_count += 1  # one more call expected
-            self.assertEqual(mock_open.call_count, mock_open_call_count)
-            self.assertIn('foo', mock_open.call_args_list[-1][0][0])
-            self.assertNotIn('bar', mock_open.call_args_list[-1][0][0])
-
-            def open_effect(*args, **kwargs):
-                if kwargs.get('name', args[0]) == os.path.join('bar', 'foo'):
-                    return mock_open_result
-                raise IOError(errno.ENOENT, 'not found')
-
-            mock_open.side_effect = open_effect
-            self.assertEqual(open_relative_file(
-                'foo', 'bar'), mock_open_result)
-
-            mock_open_call_count += 2  # two more calls expected
-            self.assertEqual(mock_open.call_count, mock_open_call_count)
-            self.assertIn('foo', mock_open.call_args_list[-1][0][0])
-            self.assertIn('bar', mock_open.call_args_list[-1][0][0])
-
-            # test an IOError of type ENOENT
-            mock_open.side_effect = IOError(errno.ENOENT, 'not found')
-            with self.assertRaises(IOError):
-                # the second call still raises
-                open_relative_file('foo', 'bar')
-
-            mock_open_call_count += 2  # two more calls expected
-            self.assertEqual(mock_open.call_count, mock_open_call_count)
-            self.assertIn('foo', mock_open.call_args_list[-1][0][0])
-            self.assertIn('bar', mock_open.call_args_list[-1][0][0])
-
-            # test an IOError other than ENOENT
-            mock_open.side_effect = IOError(errno.EBUSY, 'busy')
-            with self.assertRaises(IOError):
-                open_relative_file('foo', 'bar')
-
-            mock_open_call_count += 1  # one more call expected
-            self.assertEqual(mock_open.call_count, mock_open_call_count)
index be8ac55..419605b 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 # Copyright 2016 Intel Corporation.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -126,11 +124,3 @@ class VsperfTestCase(unittest.TestCase):
 
         result = {}
         self.assertRaises(RuntimeError, p.run, result)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 1923960..1d2278e 100644 (file)
@@ -211,11 +211,3 @@ class VsperfDPDKTestCase(unittest.TestCase):
 
         result = {}
         self.assertRaises(RuntimeError, self.scenario.run, result)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index ee2bbc0..9fd5cce 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2015 Huawei Technologies Co.,Ltd and other.
 #
@@ -70,10 +68,3 @@ class ParserTestCase(unittest.TestCase):
         self.mock_call.return_value = 0
         self.scenario.teardown()
         self.assertTrue(self.scenario.teardown_done)
-
-
-def main():
-    unittest.main()
-
-if __name__ == '__main__':
-    main()
index b98dcea..d785065 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
 #
@@ -9,8 +7,6 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-# Unittest for yardstick.benchmark.scenarios.storage.bonnie.Bonnie
-
 from __future__ import absolute_import
 
 import unittest
@@ -65,11 +61,3 @@ class BonnieTestCase(unittest.TestCase):
 
         mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
         self.assertRaises(RuntimeError, b.run, self.result)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 0cffea2..f47d1ca 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2015 Ericsson AB and others.
 #
@@ -263,11 +261,3 @@ class FioTestCase(unittest.TestCase):
         with open(output) as f:
             sample_output = f.read()
         return sample_output
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 095674f..c1c731b 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
 #
@@ -99,10 +97,3 @@ class StorageCapacityTestCase(unittest.TestCase):
 
         mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
         self.assertRaises(RuntimeError, c.run, self.result)
-
-
-def main():
-    unittest.main()
-
-if __name__ == '__main__':
-    main()
index 52786d7..5844746 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2016 Huawei Technologies Co.,Ltd.
 #
@@ -233,10 +231,3 @@ class StorPerfTestCase(unittest.TestCase):
         s = storperf.StorPerf(args, self.ctx)
 
         self.assertRaises(AssertionError, s.teardown(), self.result)
-
-
-def main():
-    unittest.main()
-
-if __name__ == '__main__':
-    main()
index a95e6bc..9853385 100644 (file)
 #    License for the specific language governing permissions and limitations
 #    under the License.
 
-import unittest
-
 from yardstick.benchmark.scenarios import base
+from yardstick.tests.unit import base as ut_base
 
 
-class ScenarioTestCase(unittest.TestCase):
+class ScenarioTestCase(ut_base.BaseUnitTestCase):
 
     def test_get_scenario_type(self):
         scenario_type = 'dummy scenario'
@@ -87,7 +86,7 @@ class ScenarioTestCase(unittest.TestCase):
                          str(exc.exception))
 
 
-class IterScenarioClassesTestCase(unittest.TestCase):
+class IterScenarioClassesTestCase(ut_base.BaseUnitTestCase):
 
     def test_no_scenario_type_defined(self):
         some_existing_scenario_class_names = [
index 89ea128..b01195f 100644 (file)
@@ -99,7 +99,7 @@ class AnsibleNodeTestCase(unittest.TestCase):
 
     def test_ansible_node_getattr(self):
         a = ansible_common.AnsibleNode({"name": "name"})
-        self.assertEqual(getattr(a, "nosuch", None), None)
+        self.assertIsNone(getattr(a, "nosuch", None))
 
 
 class AnsibleNodeDictTestCase(unittest.TestCase):
@@ -145,7 +145,7 @@ class AnsibleNodeDictTestCase(unittest.TestCase):
 
 class AnsibleCommonTestCase(unittest.TestCase):
     def test_get_timeouts(self):
-        self.assertAlmostEquals(ansible_common.AnsibleCommon.get_timeout(-100), 1200.0)
+        self.assertAlmostEqual(ansible_common.AnsibleCommon.get_timeout(-100), 1200.0)
 
     def test__init__(self):
         ansible_common.AnsibleCommon({})
index eb09d1a..12a8be3 100644 (file)
@@ -33,11 +33,3 @@ class HttpClientTestCase(unittest.TestCase):
         url = 'http://localhost:5000/hello'
         httpClient.HttpClient().get(url)
         mock_requests.get.assert_called_with(url)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index b685e63..4863f05 100644 (file)
@@ -11,6 +11,7 @@ from oslo_utils import uuidutils
 import unittest
 import mock
 
+from shade import exc
 from yardstick.common import openstack_utils
 
 
@@ -53,4 +54,32 @@ class GetNetworkIdTestCase(unittest.TestCase):
 
         output = openstack_utils.get_network_id(mock_shade_client,
                                                 'network_name')
-        self.assertEqual(None, output)
+        self.assertIsNone(output)
+
+
+class DeleteNeutronNetTestCase(unittest.TestCase):
+
+    def setUp(self):
+        self.mock_shade_client = mock.Mock()
+        self.mock_shade_client.delete_network = mock.Mock()
+
+    def test_delete_neutron_net(self):
+        self.mock_shade_client.delete_network.return_value = True
+        output = openstack_utils.delete_neutron_net(self.mock_shade_client,
+                                                    'network_id')
+        self.assertTrue(output)
+
+    def test_delete_neutron_net_fail(self):
+        self.mock_shade_client.delete_network.return_value = False
+        output = openstack_utils.delete_neutron_net(self.mock_shade_client,
+                                                    'network_id')
+        self.assertFalse(output)
+
+    @mock.patch.object(openstack_utils, 'log')
+    def test_delete_neutron_net_exception(self, mock_logger):
+        self.mock_shade_client.delete_network.side_effect = (
+            exc.OpenStackCloudException('error message'))
+        output = openstack_utils.delete_neutron_net(self.mock_shade_client,
+                                                    'network_id')
+        self.assertFalse(output)
+        mock_logger.error.assert_called_once()
diff --git a/yardstick/tests/unit/common/test_packages.py b/yardstick/tests/unit/common/test_packages.py
new file mode 100644 (file)
index 0000000..ba59a30
--- /dev/null
@@ -0,0 +1,88 @@
+# Copyright (c) 2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import mock
+from pip import exceptions as pip_exceptions
+from pip.operations import freeze
+import unittest
+
+from yardstick.common import packages
+
+
+class PipExecuteActionTestCase(unittest.TestCase):
+
+    def setUp(self):
+        self._mock_pip_main = mock.patch.object(packages, '_pip_main')
+        self.mock_pip_main = self._mock_pip_main.start()
+        self.mock_pip_main.return_value = 0
+        self._mock_freeze = mock.patch.object(freeze, 'freeze')
+        self.mock_freeze = self._mock_freeze.start()
+        self.addCleanup(self._cleanup)
+
+    def _cleanup(self):
+        self._mock_pip_main.stop()
+        self._mock_freeze.stop()
+
+    def test_pip_execute_action(self):
+        self.assertEqual(0, packages._pip_execute_action('test_package'))
+
+    def test_remove(self):
+        self.assertEqual(0, packages._pip_execute_action('test_package',
+                                                         action='uninstall'))
+
+    def test_install(self):
+        self.assertEqual(0, packages._pip_execute_action(
+            'test_package', action='install', target='temp_dir'))
+
+    def test_pip_execute_action_error(self):
+        self.mock_pip_main.return_value = 1
+        self.assertEqual(1, packages._pip_execute_action('test_package'))
+
+    def test_pip_execute_action_exception(self):
+        self.mock_pip_main.side_effect = pip_exceptions.PipError
+        self.assertEqual(1, packages._pip_execute_action('test_package'))
+
+    def test_pip_list(self):
+        pkg_input = [
+            'XStatic-Rickshaw==1.5.0.0',
+            'xvfbwrapper==0.2.9',
+            '-e git+https://git.opnfv.org/yardstick@50773a24afc02c9652b662ecca'
+            '2fc5621ea6097a#egg=yardstick',
+            'zope.interface==4.4.3'
+        ]
+        pkg_dict = {
+            'XStatic-Rickshaw': '1.5.0.0',
+            'xvfbwrapper': '0.2.9',
+            'yardstick': '50773a24afc02c9652b662ecca2fc5621ea6097a',
+            'zope.interface': '4.4.3'
+        }
+        self.mock_freeze.return_value = pkg_input
+
+        pkg_output = packages.pip_list()
+        for pkg_name, pkg_version in pkg_output.items():
+            self.assertEqual(pkg_dict.get(pkg_name), pkg_version)
+
+    def test_pip_list_single_package(self):
+        pkg_input = [
+            'XStatic-Rickshaw==1.5.0.0',
+            'xvfbwrapper==0.2.9',
+            '-e git+https://git.opnfv.org/yardstick@50773a24afc02c9652b662ecca'
+            '2fc5621ea6097a#egg=yardstick',
+            'zope.interface==4.4.3'
+        ]
+        self.mock_freeze.return_value = pkg_input
+
+        pkg_output = packages.pip_list(pkg_name='xvfbwrapper')
+        self.assertEqual(1, len(pkg_output))
+        self.assertEqual(pkg_output.get('xvfbwrapper'), '0.2.9')
index 44aa803..56253ef 100644 (file)
@@ -45,10 +45,3 @@ class TemplateFormatTestCase(unittest.TestCase):
                 "Resources: {}\n" \
                 "Outputs: {}"
         self.assertRaises(ValueError, template_format.parse, yaml2)
-
-
-def main():
-    unittest.main()
-
-if __name__ == '__main__':
-    main()
index 033bb02..e71d0ff 100644 (file)
@@ -19,6 +19,8 @@ from six.moves import configparser
 import unittest
 
 import yardstick
+from yardstick import ssh
+import yardstick.error
 from yardstick.common import utils
 from yardstick.common import constants
 
@@ -125,6 +127,63 @@ class CommonUtilTestCase(unittest.TestCase):
             ("=".join(item) for item in sorted(flattened_data.items())))
         self.assertEqual(result, line)
 
+    def test_get_key_with_default_negative(self):
+        with self.assertRaises(KeyError):
+            utils.get_key_with_default({}, 'key1')
+
+    @mock.patch('yardstick.common.utils.open', create=True)
+    def test_(self, mock_open):
+        mock_open.side_effect = IOError
+
+        with self.assertRaises(IOError):
+            utils.find_relative_file('my/path', 'task/path')
+
+        self.assertEqual(mock_open.call_count, 2)
+
+    @mock.patch('yardstick.common.utils.open', create=True)
+    def test_open_relative_path(self, mock_open):
+        mock_open_result = mock_open()
+        mock_open_call_count = 1  # initial call to get result
+
+        self.assertEqual(utils.open_relative_file('foo', 'bar'), mock_open_result)
+
+        mock_open_call_count += 1  # one more call expected
+        self.assertEqual(mock_open.call_count, mock_open_call_count)
+        self.assertIn('foo', mock_open.call_args_list[-1][0][0])
+        self.assertNotIn('bar', mock_open.call_args_list[-1][0][0])
+
+        def open_effect(*args, **kwargs):
+            if kwargs.get('name', args[0]) == os.path.join('bar', 'foo'):
+                return mock_open_result
+            raise IOError(errno.ENOENT, 'not found')
+
+        mock_open.side_effect = open_effect
+        self.assertEqual(utils.open_relative_file('foo', 'bar'), mock_open_result)
+
+        mock_open_call_count += 2  # two more calls expected
+        self.assertEqual(mock_open.call_count, mock_open_call_count)
+        self.assertIn('foo', mock_open.call_args_list[-1][0][0])
+        self.assertIn('bar', mock_open.call_args_list[-1][0][0])
+
+        # test an IOError of type ENOENT
+        mock_open.side_effect = IOError(errno.ENOENT, 'not found')
+        with self.assertRaises(IOError):
+            # the second call still raises
+            utils.open_relative_file('foo', 'bar')
+
+        mock_open_call_count += 2  # two more calls expected
+        self.assertEqual(mock_open.call_count, mock_open_call_count)
+        self.assertIn('foo', mock_open.call_args_list[-1][0][0])
+        self.assertIn('bar', mock_open.call_args_list[-1][0][0])
+
+        # test an IOError other than ENOENT
+        mock_open.side_effect = IOError(errno.EBUSY, 'busy')
+        with self.assertRaises(IOError):
+            utils.open_relative_file('foo', 'bar')
+
+        mock_open_call_count += 1  # one more call expected
+        self.assertEqual(mock_open.call_count, mock_open_call_count)
+
 
 class TestMacAddressToHex(unittest.TestCase):
 
@@ -930,9 +989,9 @@ class TestUtils(unittest.TestCase):
 
     def test_error_class(self):
         with self.assertRaises(RuntimeError):
-            utils.ErrorClass()
+            yardstick.error.ErrorClass()
 
-        error_instance = utils.ErrorClass(test='')
+        error_instance = yardstick.error.ErrorClass(test='')
         with self.assertRaises(AttributeError):
             error_instance.get_name()
 
@@ -1075,8 +1134,27 @@ class SafeDecodeUtf8TestCase(unittest.TestCase):
         self.assertEqual('this is a byte array', out)
 
 
-def main():
-    unittest.main()
-
-if __name__ == '__main__':
-    main()
+class ReadMeminfoTestCase(unittest.TestCase):
+
+    MEMINFO = (b'MemTotal:       65860500 kB\n'
+               b'MemFree:        28690900 kB\n'
+               b'MemAvailable:   52873764 kB\n'
+               b'Active(anon):    3015676 kB\n'
+               b'HugePages_Total:       8\n'
+               b'Hugepagesize:    1048576 kB')
+    MEMINFO_DICT = {'MemTotal': '65860500',
+                    'MemFree': '28690900',
+                    'MemAvailable': '52873764',
+                    'Active(anon)': '3015676',
+                    'HugePages_Total': '8',
+                    'Hugepagesize': '1048576'}
+
+    def test_read_meminfo(self):
+        ssh_client = ssh.SSH('user', 'host')
+        with mock.patch.object(ssh_client, 'get_file_obj') as \
+                mock_get_client, \
+                mock.patch.object(six, 'BytesIO',
+                                  return_value=six.BytesIO(self.MEMINFO)):
+            output = utils.read_meminfo(ssh_client)
+            mock_get_client.assert_called_once_with('/proc/meminfo', mock.ANY)
+        self.assertEqual(self.MEMINFO_DICT, output)
index 6c2beb4..e621dcb 100644 (file)
@@ -12,7 +12,6 @@
 
 # yardstick: this file is copied from python-heatclient and slightly modified
 
-from __future__ import absolute_import
 import unittest
 
 from yardstick.common import yaml_loader
@@ -23,10 +22,3 @@ class TemplateFormatTestCase(unittest.TestCase):
     def test_parse_to_value_exception(self):
 
         self.assertEqual(yaml_loader.yaml_load("string"), u"string")
-
-
-def main():
-    unittest.main()
-
-if __name__ == '__main__':
-    main()
index bca94e3..c844d4b 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
 #
@@ -104,11 +102,3 @@ class InfluxdbDispatcherTestCase(unittest.TestCase):
         mock_time.time.return_value = 1451461248.925574
         self.assertEqual(influxdb._get_nano_timestamp(results),
                          '1451461248925574144')
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index f53c9b7..9ab8740 100644 (file)
@@ -9,6 +9,7 @@
 
 import tempfile
 
+import munch
 import mock
 from oslo_serialization import jsonutils
 from oslo_utils import uuidutils
@@ -40,12 +41,16 @@ class HeatStackTestCase(unittest.TestCase):
         self._mock_stack_delete = mock.patch.object(self.heatstack._cloud,
                                                     'delete_stack')
         self.mock_stack_delete = self._mock_stack_delete.start()
+        self._mock_stack_get = mock.patch.object(self.heatstack._cloud,
+                                                 'get_stack')
+        self.mock_stack_get = self._mock_stack_get.start()
 
         self.addCleanup(self._cleanup)
 
     def _cleanup(self):
         self._mock_stack_create.stop()
         self._mock_stack_delete.stop()
+        self._mock_stack_get.stop()
         heat._DEPLOYED_STACKS = {}
 
     def test_create(self):
@@ -89,6 +94,100 @@ class HeatStackTestCase(unittest.TestCase):
         self.assertFalse(heat._DEPLOYED_STACKS)
         self.mock_stack_delete.assert_called_once_with(id, wait=True)
 
+    def test_delete_bug_in_shade(self):
+        id = uuidutils.generate_uuid()
+        self.heatstack._stack = FakeStack(
+            outputs=mock.Mock(), status=mock.Mock(), id=id)
+        heat._DEPLOYED_STACKS[id] = self.heatstack._stack
+        self.mock_stack_delete.side_effect = TypeError()
+
+        ret = self.heatstack.delete(wait=True)
+        self.assertTrue(ret)
+        self.assertFalse(heat._DEPLOYED_STACKS)
+        self.mock_stack_delete.assert_called_once_with(id, wait=True)
+
+    def test_get(self):
+        # make sure shade/get_stack is called with the appropriate vars
+        self.mock_stack_get.return_value = munch.Munch(
+            id="my-existing-stack-id",
+            outputs=[
+                {
+                 u'output_value': u'b734d06a-dec7-...',
+                 u'output_key': u'ares.demo-test-port-network_id',
+                 u'description': u''
+                },
+                {u'output_value': u'b08da78c-2218-...',
+                 u'output_key': u'ares.demo-test-port-subnet_id',
+                 u'description': u''
+                },
+                {u'output_value': u'10.0.1.0/24',
+                 u'output_key': u'demo-test-subnet-cidr',
+                 u'description': u''
+                },
+                {u'output_value': u'b08da78c-2218-...',
+                 u'output_key': u'demo-test-subnet',
+                 u'description': u''
+                },
+                {u'output_value': u'b1a03624-aefc-...',
+                 u'output_key': u'ares.demo',
+                 u'description': u''
+                },
+                {u'output_value': u'266a8088-c630-...',
+                 u'output_key': u'demo-secgroup',
+                 u'description': u''
+                },
+                {u'output_value': u'10.0.1.5',
+                 u'output_key': u'ares.demo-test-port',
+                 u'description': u''
+                },
+                {u'output_value': u'10.0.1.1',
+                 u'output_key': u'demo-test-subnet-gateway_ip',
+                 u'description': u''
+                },
+                {u'output_value': u'',
+                 u'output_key': u'ares.demo-test-port-device_id',
+                 u'description': u''
+                },
+                {u'output_value': u'172.24.4.7',
+                 u'output_key': u'ares.demo-fip',
+                 u'description': u''
+                },
+                {u'output_value': u'fa:16:3e:6c:c3:0f',
+                 u'output_key': u'ares.demo-test-port-mac_address',
+                 u'description': u''}
+            ]
+        )
+        expected_outputs = {
+            'ares.demo-test-port-network_id': 'b734d06a-dec7-...',
+            'ares.demo-test-port-subnet_id': 'b08da78c-2218-...',
+            'demo-test-subnet-cidr': '10.0.1.0/24',
+            'demo-test-subnet': 'b08da78c-2218-...',
+            'ares.demo': 'b1a03624-aefc-...',
+            'demo-secgroup': '266a8088-c630-...',
+            'ares.demo-test-port': '10.0.1.5',
+            'demo-test-subnet-gateway_ip': '10.0.1.1',
+            'ares.demo-test-port-device_id': '',
+            'ares.demo-fip': '172.24.4.7',
+            'ares.demo-test-port-mac_address': 'fa:16:3e:6c:c3:0f',
+        }
+
+        stack_id = "my-existing-stack-id"
+        self.heatstack.name = "my-existing-stack"
+        self.heatstack.get()
+
+        self.mock_stack_get.assert_called_once_with(self.heatstack.name)
+        self.assertEqual(expected_outputs, self.heatstack.outputs)
+        self.assertEqual(1, len(heat._DEPLOYED_STACKS))
+        self.assertEqual(self.heatstack._stack,
+                         heat._DEPLOYED_STACKS[stack_id])
+
+    def test_get_invalid_name(self):
+        # No context matching this name exists
+        self.mock_stack_get.return_value = []
+        self.heatstack.name = 'not-a-stack'
+        self.heatstack.get()
+        self.assertEqual(0, len(heat._DEPLOYED_STACKS))
+
 
 class HeatTemplateTestCase(unittest.TestCase):
 
@@ -148,7 +247,8 @@ class HeatTemplateTestCase(unittest.TestCase):
 
     def test__add_resources_to_template_raw(self):
         test_context = node.NodeContext()
-        test_context.name = 'foo'
+        self.addCleanup(test_context._delete_context)
+        test_context._name = 'foo'
         test_context.template_file = '/tmp/some-heat-file'
         test_context.heat_parameters = {'image': 'cirros'}
         test_context.key_filename = "/tmp/1234"
index 33fa1dc..f2bc5b0 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 ##############################################################################
 # Copyright (c) 2017 Intel Corporation
 #
@@ -104,11 +102,3 @@ service ssh restart;while true ; do sleep 10000; done']
         mock_get_pod_list.return_value.items = []
         pods = k8s_template.get_rc_pods()
         self.assertEqual(pods, [])
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 13c3ed4..1156b66 100644 (file)
@@ -63,11 +63,3 @@ class EnvCommandTestCase(unittest.TestCase):
         except Exception as e:  # pylint: disable=broad-except
             # NOTE(ralonsoh): try to reduce the scope of this exception.
             self.assertIsInstance(e, IndexError)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 7ef157c..f6f842e 100644 (file)
@@ -19,11 +19,3 @@ class TestcaseCommandsUT(unittest.TestCase):
         mock_client.get.return_value = {'result': []}
         TestcaseCommands().do_list({})
         self.assertTrue(mock_print.called)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()
index 483e82a..d64b0c5 100644 (file)
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
 # Copyright (c) 2016-2017 Intel Corporation
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -91,17 +89,17 @@ class TestYardstickNSCli(unittest.TestCase):
         subprocess.check_output = mock.Mock(return_value=0)
         args = {"vnf": "vpe",
                 "test": "tc_baremetal_rfc2544_ipv4_1flow_1518B.yaml"}
-        self.assertEqual(None, yardstick_ns_cli.run_test(args, test_path))
+        self.assertIsNone(yardstick_ns_cli.run_test(args, test_path))
         os.chdir(cur_dir)
         args = {"vnf": "vpe1"}
-        self.assertEqual(None, yardstick_ns_cli.run_test(args, test_path))
+        self.assertIsNone(yardstick_ns_cli.run_test(args, test_path))
         os.chdir(cur_dir)
         args = {"vnf": "vpe",
                 "test": "tc_baremetal_rfc2544_ipv4_1flow_1518B.yaml."}
-        self.assertEqual(None, yardstick_ns_cli.run_test(args, test_path))
+        self.assertIsNone(yardstick_ns_cli.run_test(args, test_path))
         os.chdir(cur_dir)
         args = []
-        self.assertEqual(None, yardstick_ns_cli.run_test(args, test_path))
+        self.assertIsNone(yardstick_ns_cli.run_test(args, test_path))
         os.chdir(cur_dir)
 
     def test_terminate_if_less_options(self):
index dbaae8c..f922900 100644 (file)
@@ -21,12 +21,13 @@ import os
 import socket
 import unittest
 from io import StringIO
+from itertools import count
 
 import mock
 from oslo_utils import encodeutils
 
 from yardstick import ssh
-from yardstick.ssh import SSHError
+from yardstick.ssh import SSHError, SSHTimeout
 from yardstick.ssh import SSH
 from yardstick.ssh import AutoConnectSSH
 
@@ -508,13 +509,45 @@ class SSHRunTestCase(unittest.TestCase):
 
 class TestAutoConnectSSH(unittest.TestCase):
 
-    def test__connect_with_wait(self):
-        auto_connect_ssh = AutoConnectSSH('user1', 'host1', wait=True)
-        auto_connect_ssh._get_client = mock.Mock()
-        auto_connect_ssh.wait = mock_wait = mock.Mock()
+    def test__connect_loop(self):
+        auto_connect_ssh = AutoConnectSSH('user1', 'host1', wait=0)
+        auto_connect_ssh._get_client = mock__get_client = mock.Mock()
 
         auto_connect_ssh._connect()
-        self.assertEqual(mock_wait.call_count, 1)
+        self.assertEqual(mock__get_client.call_count, 1)
+
+    def test___init___negative(self):
+        with self.assertRaises(TypeError):
+            AutoConnectSSH('user1', 'host1', wait=['wait'])
+
+        with self.assertRaises(ValueError):
+            AutoConnectSSH('user1', 'host1', wait='wait')
+
+    @mock.patch('yardstick.ssh.time')
+    def test__connect_loop_ssh_error(self, mock_time):
+        mock_time.time.side_effect = count()
+
+        auto_connect_ssh = AutoConnectSSH('user1', 'host1', wait=10)
+        auto_connect_ssh._get_client = mock__get_client = mock.Mock()
+        mock__get_client.side_effect = SSHError
+
+        with self.assertRaises(SSHTimeout):
+            auto_connect_ssh._connect()
+
+        self.assertEqual(mock_time.time.call_count, 12)
+
+    def test_get_file_obj(self):
+        auto_connect_ssh = AutoConnectSSH('user1', 'host1', wait=10)
+        auto_connect_ssh._get_client = mock__get_client = mock.Mock()
+        mock_client = mock__get_client()
+        mock_open_sftp = mock_client.open_sftp()
+        mock_sftp = mock.Mock()
+        mock_open_sftp.__enter__ = mock.Mock(return_value=mock_sftp)
+        mock_open_sftp.__exit__ = mock.Mock()
+
+        auto_connect_ssh.get_file_obj('remote/path', mock.Mock())
+
+        self.assertEqual(mock_sftp.getfo.call_count, 1)
 
     def test__make_dict(self):
         auto_connect_ssh = AutoConnectSSH('user1', 'host1')
@@ -527,7 +560,7 @@ class TestAutoConnectSSH(unittest.TestCase):
             'key_filename': None,
             'password': None,
             'name': None,
-            'wait': True,
+            'wait': AutoConnectSSH.DEFAULT_WAIT_TIMEOUT,
         }
         result = auto_connect_ssh._make_dict()
         self.assertDictEqual(result, expected)
@@ -537,6 +570,13 @@ class TestAutoConnectSSH(unittest.TestCase):
 
         self.assertEqual(auto_connect_ssh.get_class(), AutoConnectSSH)
 
+    def test_drop_connection(self):
+        auto_connect_ssh = AutoConnectSSH('user1', 'host1')
+        self.assertFalse(auto_connect_ssh._client)
+        auto_connect_ssh._client = True
+        auto_connect_ssh.drop_connection()
+        self.assertFalse(auto_connect_ssh._client)
+
     @mock.patch('yardstick.ssh.SCPClient')
     def test_put(self, mock_scp_client_type):
         auto_connect_ssh = AutoConnectSSH('user1', 'host1')
@@ -562,11 +602,3 @@ class TestAutoConnectSSH(unittest.TestCase):
 
         auto_connect_ssh.put_file('a', 'b')
         self.assertEqual(mock_put_sftp.call_count, 1)
-
-
-def main():
-    unittest.main()
-
-
-if __name__ == '__main__':
-    main()