Merge "Adding python package requirement for VNF testing."
authorJing Lu <lvjing5@huawei.com>
Tue, 17 Jan 2017 02:48:34 +0000 (02:48 +0000)
committerGerrit Code Review <gerrit@opnfv.org>
Tue, 17 Jan 2017 02:48:34 +0000 (02:48 +0000)
218 files changed:
api/base.py
api/conf.py
api/database/__init__.py
api/database/handler.py [new file with mode: 0644]
api/database/handlers.py [new file with mode: 0644]
api/database/models.py
api/resources/asynctask.py [new file with mode: 0644]
api/resources/env_action.py
api/resources/release_action.py
api/resources/results.py
api/resources/samples_action.py
api/resources/testsuites_action.py [new file with mode: 0644]
api/server.py
api/swagger/docs/release_action.yaml [moved from api/swagger/docs/testcases.yaml with 100% similarity]
api/swagger/docs/testsuites_action.yaml [new file with mode: 0644]
api/swagger/models.py
api/urls.py
api/utils/common.py
api/utils/daemonthread.py
api/utils/influx.py
api/views.py
ez_setup.py
requirements.txt
samples/tosca.yaml
setup.py
tests/functional/test_cli_runner.py
tests/functional/test_cli_scenario.py
tests/functional/utils.py
tests/unit/api/utils/test_common.py
tests/unit/api/utils/test_influx.py
tests/unit/benchmark/contexts/test_dummy.py
tests/unit/benchmark/contexts/test_heat.py
tests/unit/benchmark/contexts/test_model.py
tests/unit/benchmark/contexts/test_node.py
tests/unit/benchmark/core/__init__.py [new file with mode: 0644]
tests/unit/benchmark/core/no_constraint_no_args_scenario_sample.yaml [moved from tests/unit/cmd/commands/no_constraint_no_args_scenario_sample.yaml with 100% similarity]
tests/unit/benchmark/core/no_constraint_with_args_scenario_sample.yaml [moved from tests/unit/cmd/commands/no_constraint_with_args_scenario_sample.yaml with 100% similarity]
tests/unit/benchmark/core/test_plugin.py [moved from tests/unit/cmd/commands/test_plugin.py with 68% similarity]
tests/unit/benchmark/core/test_task.py [moved from tests/unit/cmd/commands/test_task.py with 59% similarity]
tests/unit/benchmark/core/test_testcase.py [moved from tests/unit/cmd/commands/test_testcase.py with 61% similarity]
tests/unit/benchmark/core/with_constraint_no_args_scenario_sample.yaml [moved from tests/unit/cmd/commands/with_constraint_no_args_scenario_sample.yaml with 100% similarity]
tests/unit/benchmark/core/with_constraint_with_args_scenario_sample.yaml [moved from tests/unit/cmd/commands/with_constraint_with_args_scenario_sample.yaml with 100% similarity]
tests/unit/benchmark/scenarios/availability/test_attacker_baremetal.py
tests/unit/benchmark/scenarios/availability/test_attacker_general.py
tests/unit/benchmark/scenarios/availability/test_attacker_process.py
tests/unit/benchmark/scenarios/availability/test_basemonitor.py
tests/unit/benchmark/scenarios/availability/test_baseoperation.py
tests/unit/benchmark/scenarios/availability/test_baseresultchecker.py
tests/unit/benchmark/scenarios/availability/test_director.py
tests/unit/benchmark/scenarios/availability/test_monitor_command.py
tests/unit/benchmark/scenarios/availability/test_monitor_general.py
tests/unit/benchmark/scenarios/availability/test_monitor_process.py
tests/unit/benchmark/scenarios/availability/test_operation_general.py
tests/unit/benchmark/scenarios/availability/test_result_checker_general.py
tests/unit/benchmark/scenarios/availability/test_scenario_general.py
tests/unit/benchmark/scenarios/availability/test_serviceha.py
tests/unit/benchmark/scenarios/compute/test_cachestat.py
tests/unit/benchmark/scenarios/compute/test_computecapacity.py
tests/unit/benchmark/scenarios/compute/test_cpuload.py
tests/unit/benchmark/scenarios/compute/test_cyclictest.py
tests/unit/benchmark/scenarios/compute/test_lmbench.py
tests/unit/benchmark/scenarios/compute/test_memload.py
tests/unit/benchmark/scenarios/compute/test_plugintest.py
tests/unit/benchmark/scenarios/compute/test_ramspeed.py
tests/unit/benchmark/scenarios/compute/test_unixbench.py
tests/unit/benchmark/scenarios/dummy/test_dummy.py
tests/unit/benchmark/scenarios/networking/test_iperf3.py
tests/unit/benchmark/scenarios/networking/test_netperf.py
tests/unit/benchmark/scenarios/networking/test_netperf_node.py
tests/unit/benchmark/scenarios/networking/test_netutilization.py
tests/unit/benchmark/scenarios/networking/test_networkcapacity.py
tests/unit/benchmark/scenarios/networking/test_ping.py
tests/unit/benchmark/scenarios/networking/test_ping6.py
tests/unit/benchmark/scenarios/networking/test_pktgen.py
tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk.py
tests/unit/benchmark/scenarios/networking/test_sfc.py
tests/unit/benchmark/scenarios/networking/test_vsperf.py
tests/unit/benchmark/scenarios/networking/test_vtc_instantiation_validation.py
tests/unit/benchmark/scenarios/networking/test_vtc_instantiation_validation_noisy.py
tests/unit/benchmark/scenarios/networking/test_vtc_throughput.py
tests/unit/benchmark/scenarios/networking/test_vtc_throughput_noisy_test.py
tests/unit/benchmark/scenarios/parser/test_parser.py
tests/unit/benchmark/scenarios/storage/test_fio.py
tests/unit/benchmark/scenarios/storage/test_storagecapacity.py
tests/unit/benchmark/scenarios/storage/test_storperf.py
tests/unit/cmd/commands/test_env.py
tests/unit/common/test_httpClient.py
tests/unit/common/test_openstack_utils.py
tests/unit/common/test_template_format.py
tests/unit/common/test_utils.py
tests/unit/dispatcher/test_influxdb.py
tests/unit/dispatcher/test_influxdb_line_protocol.py
tests/unit/orchestrator/test_heat.py [new file with mode: 0644]
tests/unit/test_ssh.py
third_party/influxdb/influxdb_line_protocol.py
yardstick/__init__.py
yardstick/benchmark/__init__.py
yardstick/benchmark/contexts/base.py
yardstick/benchmark/contexts/dummy.py
yardstick/benchmark/contexts/heat.py
yardstick/benchmark/contexts/model.py
yardstick/benchmark/contexts/node.py
yardstick/benchmark/core/__init__.py [new file with mode: 0644]
yardstick/benchmark/core/plugin.py [new file with mode: 0644]
yardstick/benchmark/core/runner.py [new file with mode: 0644]
yardstick/benchmark/core/scenario.py [new file with mode: 0644]
yardstick/benchmark/core/task.py [new file with mode: 0644]
yardstick/benchmark/core/testcase.py [new file with mode: 0644]
yardstick/benchmark/runners/arithmetic.py
yardstick/benchmark/runners/base.py
yardstick/benchmark/runners/duration.py
yardstick/benchmark/runners/iteration.py
yardstick/benchmark/runners/sequence.py
yardstick/benchmark/scenarios/availability/actionrollbackers.py
yardstick/benchmark/scenarios/availability/attacker/attacker_baremetal.py
yardstick/benchmark/scenarios/availability/attacker/attacker_general.py
yardstick/benchmark/scenarios/availability/attacker/attacker_process.py
yardstick/benchmark/scenarios/availability/attacker/baseattacker.py
yardstick/benchmark/scenarios/availability/director.py
yardstick/benchmark/scenarios/availability/monitor/basemonitor.py
yardstick/benchmark/scenarios/availability/monitor/monitor_command.py
yardstick/benchmark/scenarios/availability/monitor/monitor_general.py
yardstick/benchmark/scenarios/availability/monitor/monitor_process.py
yardstick/benchmark/scenarios/availability/operation/baseoperation.py
yardstick/benchmark/scenarios/availability/operation/operation_general.py
yardstick/benchmark/scenarios/availability/result_checker/baseresultchecker.py
yardstick/benchmark/scenarios/availability/result_checker/result_checker_general.py
yardstick/benchmark/scenarios/availability/scenario_general.py
yardstick/benchmark/scenarios/availability/serviceha.py
yardstick/benchmark/scenarios/base.py
yardstick/benchmark/scenarios/compute/cachestat.py
yardstick/benchmark/scenarios/compute/computecapacity.py
yardstick/benchmark/scenarios/compute/cpuload.py
yardstick/benchmark/scenarios/compute/cyclictest.py
yardstick/benchmark/scenarios/compute/lmbench.py
yardstick/benchmark/scenarios/compute/memload.py
yardstick/benchmark/scenarios/compute/perf.py
yardstick/benchmark/scenarios/compute/plugintest.py
yardstick/benchmark/scenarios/compute/ramspeed.py
yardstick/benchmark/scenarios/compute/unixbench.py
yardstick/benchmark/scenarios/dummy/dummy.py
yardstick/benchmark/scenarios/networking/iperf3.py
yardstick/benchmark/scenarios/networking/netperf.py
yardstick/benchmark/scenarios/networking/netperf_node.py
yardstick/benchmark/scenarios/networking/netutilization.py
yardstick/benchmark/scenarios/networking/networkcapacity.py
yardstick/benchmark/scenarios/networking/ping.py
yardstick/benchmark/scenarios/networking/ping6.py
yardstick/benchmark/scenarios/networking/pktgen.py
yardstick/benchmark/scenarios/networking/pktgen_dpdk.py
yardstick/benchmark/scenarios/networking/sfc.py
yardstick/benchmark/scenarios/networking/sfc_openstack.py
yardstick/benchmark/scenarios/networking/vsperf.py
yardstick/benchmark/scenarios/networking/vtc_instantiation_validation.py
yardstick/benchmark/scenarios/networking/vtc_instantiation_validation_noisy.py
yardstick/benchmark/scenarios/networking/vtc_throughput.py
yardstick/benchmark/scenarios/networking/vtc_throughput_noisy.py
yardstick/benchmark/scenarios/parser/parser.py
yardstick/benchmark/scenarios/storage/fio.py
yardstick/benchmark/scenarios/storage/storagecapacity.py
yardstick/benchmark/scenarios/storage/storperf.py
yardstick/cmd/__init__.py
yardstick/cmd/cli.py
yardstick/cmd/commands/__init__.py
yardstick/cmd/commands/env.py
yardstick/cmd/commands/plugin.py
yardstick/cmd/commands/runner.py
yardstick/cmd/commands/scenario.py
yardstick/cmd/commands/task.py
yardstick/cmd/commands/testcase.py
yardstick/common/constants.py
yardstick/common/httpClient.py
yardstick/common/openstack_utils.py
yardstick/common/task_template.py
yardstick/common/template_format.py
yardstick/common/utils.py
yardstick/definitions.py
yardstick/dispatcher/__init__.py
yardstick/dispatcher/base.py
yardstick/dispatcher/file.py
yardstick/dispatcher/http.py
yardstick/dispatcher/influxdb.py
yardstick/main.py
yardstick/orchestrator/heat.py
yardstick/plot/plotter.py
yardstick/ssh.py
yardstick/vTC/apexlake/experimental_framework/__init__.py
yardstick/vTC/apexlake/experimental_framework/api.py
yardstick/vTC/apexlake/experimental_framework/benchmarking_unit.py
yardstick/vTC/apexlake/experimental_framework/benchmarks/benchmark_base_class.py
yardstick/vTC/apexlake/experimental_framework/benchmarks/instantiation_validation_benchmark.py
yardstick/vTC/apexlake/experimental_framework/benchmarks/instantiation_validation_noisy_neighbors_benchmark.py
yardstick/vTC/apexlake/experimental_framework/benchmarks/multi_tenancy_throughput_benchmark.py
yardstick/vTC/apexlake/experimental_framework/benchmarks/rfc2544_throughput_benchmark.py
yardstick/vTC/apexlake/experimental_framework/benchmarks/test_benchmark.py
yardstick/vTC/apexlake/experimental_framework/common.py
yardstick/vTC/apexlake/experimental_framework/constants/framework_parameters.py
yardstick/vTC/apexlake/experimental_framework/deployment_unit.py
yardstick/vTC/apexlake/experimental_framework/heat_manager.py
yardstick/vTC/apexlake/experimental_framework/heat_template_generation.py
yardstick/vTC/apexlake/experimental_framework/packet_generators/base_packet_generator.py
yardstick/vTC/apexlake/experimental_framework/packet_generators/dpdk_packet_generator.py
yardstick/vTC/apexlake/setup.py
yardstick/vTC/apexlake/tests/api_test.py
yardstick/vTC/apexlake/tests/base_packet_generator_test.py
yardstick/vTC/apexlake/tests/benchmark_base_class_test.py
yardstick/vTC/apexlake/tests/benchmarking_unit_test.py
yardstick/vTC/apexlake/tests/common_test.py
yardstick/vTC/apexlake/tests/conf_file_sections_test.py
yardstick/vTC/apexlake/tests/deployment_unit_test.py
yardstick/vTC/apexlake/tests/dpdk_packet_generator_test.py
yardstick/vTC/apexlake/tests/generates_template_test.py
yardstick/vTC/apexlake/tests/heat_manager_test.py
yardstick/vTC/apexlake/tests/instantiation_validation_bench_test.py
yardstick/vTC/apexlake/tests/instantiation_validation_noisy_bench_test.py
yardstick/vTC/apexlake/tests/multi_tenancy_throughput_benchmark_test.py
yardstick/vTC/apexlake/tests/rfc2544_throughput_benchmark_test.py
yardstick/vTC/apexlake/tests/tree_node_test.py

index 7671527..5270085 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import re
 import importlib
 import logging
index df44042..abaf34a 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 from pyroute2 import IPDB
 
 
@@ -24,4 +25,6 @@ TEST_CASE_PRE = 'opnfv_yardstick_'
 
 TEST_SUITE_PATH = '../tests/opnfv/test_suites/'
 
+TEST_SUITE_PRE = 'opnfv_'
+
 OUTPUT_CONFIG_FILE_PATH = '/etc/yardstick/yardstick.conf'
index bc2708b..d7cf4f9 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
 
 from sqlalchemy import create_engine
@@ -21,9 +22,3 @@ db_session = scoped_session(sessionmaker(autocommit=False,
                                          bind=engine))
 Base = declarative_base()
 Base.query = db_session.query_property()
-
-
-def init_db():
-    subclasses = [subclass.__name__ for subclass in Base.__subclasses__()]
-    logger.debug('Import models: %s', subclasses)
-    Base.metadata.create_all(bind=engine)
diff --git a/api/database/handler.py b/api/database/handler.py
new file mode 100644 (file)
index 0000000..f6a2257
--- /dev/null
@@ -0,0 +1,30 @@
+# ############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+# ############################################################################
+from api.database import db_session
+from api.database.models import AsyncTasks
+
+
+class AsyncTaskHandler(object):
+    def insert(self, kwargs):
+        task = AsyncTasks(**kwargs)
+        db_session.add(task)
+        db_session.commit()
+        return task
+
+    def update_status(self, task, status):
+        task.status = status
+        db_session.commit()
+
+    def update_error(self, task, error):
+        task.error = error
+        db_session.commit()
+
+    def get_task_by_taskid(self, task_id):
+        task = AsyncTasks.query.filter_by(task_id=task_id).first()
+        return task
diff --git a/api/database/handlers.py b/api/database/handlers.py
new file mode 100644 (file)
index 0000000..42979b5
--- /dev/null
@@ -0,0 +1,31 @@
+##############################################################################
+# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+from api.database import db_session
+from api.database.models import Tasks
+
+
+class TasksHandler(object):
+
+    def insert(self, kwargs):
+        task = Tasks(**kwargs)
+        db_session.add(task)
+        db_session.commit()
+        return task
+
+    def update_status(self, task, status):
+        task.status = status
+        db_session.commit()
+
+    def update_error(self, task, error):
+        task.error = error
+        db_session.commit()
+
+    def get_task_by_taskid(self, task_id):
+        task = Tasks.query.filter_by(task_id=task_id).first()
+        return task
index 25e3238..2270de9 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 from sqlalchemy import Column
 from sqlalchemy import Integer
 from sqlalchemy import String
@@ -23,3 +24,14 @@ class Tasks(Base):
 
     def __repr__(self):
         return '<Task %r>' % Tasks.task_id
+
+
+class AsyncTasks(Base):
+    __tablename__ = 'asynctasks'
+    id = Column(Integer, primary_key=True)
+    task_id = Column(String(30))
+    status = Column(Integer)
+    error = Column(String(120))
+
+    def __repr__(self):
+        return '<Task %r>' % AsyncTasks.task_id
diff --git a/api/resources/asynctask.py b/api/resources/asynctask.py
new file mode 100644 (file)
index 0000000..dd2a710
--- /dev/null
@@ -0,0 +1,35 @@
+# ############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+# ############################################################################
+import uuid
+
+from api.utils import common as common_utils
+from api.database.models import AsyncTasks
+
+
+def default(args):
+    return _get_status(args)
+
+
+def _get_status(args):
+    try:
+        task_id = args['task_id']
+        uuid.UUID(task_id)
+    except KeyError:
+        message = 'measurement and task_id must be provided'
+        return common_utils.error_handler(message)
+
+    asynctask = AsyncTasks.query.filter_by(task_id=task_id).first()
+
+    try:
+        status = asynctask.status
+        error = asynctask.error if asynctask.error else []
+
+        return common_utils.result_handler(status, error)
+    except AttributeError:
+        return common_utils.error_handler('no such task')
index 59a1692..8955f3c 100644 (file)
@@ -6,34 +6,44 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
+
+import errno
+import json
 import logging
-import threading
+import os
 import subprocess
+import threading
 import time
-import json
-import os
-import errno
-import ConfigParser
+import uuid
 
-from docker import Client
+from six.moves import configparser
 
-from yardstick.common import constants as config
-from yardstick.common import utils as yardstick_utils
-from yardstick.common.httpClient import HttpClient
 from api import conf as api_conf
+from api.database.handler import AsyncTaskHandler
 from api.utils import influx
 from api.utils.common import result_handler
+from docker import Client
+from yardstick.common import constants as config
+from yardstick.common import utils as yardstick_utils
+from yardstick.common.httpClient import HttpClient
 
 logger = logging.getLogger(__name__)
+logger.setLevel(logging.DEBUG)
 
 
 def createGrafanaContainer(args):
-    thread = threading.Thread(target=_create_grafana)
+    task_id = str(uuid.uuid4())
+
+    thread = threading.Thread(target=_create_grafana, args=(task_id,))
     thread.start()
-    return result_handler('success', [])
 
+    return result_handler('success', {'task_id': task_id})
+
+
+def _create_grafana(task_id):
+    _create_task(task_id)
 
-def _create_grafana():
     client = Client(base_url=config.DOCKER_URL)
 
     try:
@@ -48,7 +58,10 @@ def _create_grafana():
         _create_data_source()
 
         _create_dashboard()
+
+        _update_task_status(task_id)
     except Exception as e:
+        _update_task_error(task_id, str(e))
         logger.debug('Error: %s', e)
 
 
@@ -96,12 +109,17 @@ def _check_image_exist(client, t):
 
 
 def createInfluxDBContainer(args):
-    thread = threading.Thread(target=_create_influxdb)
+    task_id = str(uuid.uuid4())
+
+    thread = threading.Thread(target=_create_influxdb, args=(task_id,))
     thread.start()
-    return result_handler('success', [])
+
+    return result_handler('success', {'task_id': task_id})
 
 
-def _create_influxdb():
+def _create_influxdb(task_id):
+    _create_task(task_id)
+
     client = Client(base_url=config.DOCKER_URL)
 
     try:
@@ -116,7 +134,10 @@ def _create_influxdb():
         time.sleep(5)
 
         _config_influxdb()
+
+        _update_task_status(task_id)
     except Exception as e:
+        _update_task_error(task_id, str(e))
         logger.debug('Error: %s', e)
 
 
@@ -148,7 +169,7 @@ def _config_influxdb():
 def _change_output_to_influxdb():
     yardstick_utils.makedirs(config.YARDSTICK_CONFIG_DIR)
 
-    parser = ConfigParser.ConfigParser()
+    parser = configparser.ConfigParser()
     parser.read(config.YARDSTICK_CONFIG_SAMPLE_FILE)
 
     parser.set('DEFAULT', 'dispatcher', 'influxdb')
@@ -160,34 +181,44 @@ def _change_output_to_influxdb():
 
 
 def prepareYardstickEnv(args):
-    thread = threading.Thread(target=_prepare_env_daemon)
+    task_id = str(uuid.uuid4())
+
+    thread = threading.Thread(target=_prepare_env_daemon, args=(task_id,))
     thread.start()
-    return result_handler('success', [])
+
+    return result_handler('success', {'task_id': task_id})
 
 
-def _prepare_env_daemon():
+def _prepare_env_daemon(task_id):
+    _create_task(task_id)
 
     installer_ip = os.environ.get('INSTALLER_IP', 'undefined')
     installer_type = os.environ.get('INSTALLER_TYPE', 'undefined')
 
-    _check_variables(installer_ip, installer_type)
+    try:
+        _check_variables(installer_ip, installer_type)
 
-    _create_directories()
+        _create_directories()
 
-    rc_file = config.OPENSTACK_RC_FILE
+        rc_file = config.OPENSTACK_RC_FILE
 
-    _get_remote_rc_file(rc_file, installer_ip, installer_type)
+        _get_remote_rc_file(rc_file, installer_ip, installer_type)
 
-    _source_file(rc_file)
+        _source_file(rc_file)
 
-    _append_external_network(rc_file)
+        _append_external_network(rc_file)
 
-    # update the external_network
-    _source_file(rc_file)
+        # update the external_network
+        _source_file(rc_file)
 
-    _clean_images()
+        _clean_images()
 
-    _load_images()
+        _load_images()
+
+        _update_task_status(task_id)
+    except Exception as e:
+        _update_task_error(task_id, str(e))
+        logger.debug('Error: %s', e)
 
 
 def _check_variables(installer_ip, installer_type):
@@ -257,3 +288,27 @@ def _load_images():
                          cwd=config.YARDSTICK_REPOS_DIR)
     output = p.communicate()[0]
     logger.debug('The result is: %s', output)
+
+
+def _create_task(task_id):
+    async_handler = AsyncTaskHandler()
+    task_dict = {
+        'task_id': task_id,
+        'status': 0
+    }
+    async_handler.insert(task_dict)
+
+
+def _update_task_status(task_id):
+    async_handler = AsyncTaskHandler()
+
+    task = async_handler.get_task_by_taskid(task_id)
+    async_handler.update_status(task, 1)
+
+
+def _update_task_error(task_id, error):
+    async_handler = AsyncTaskHandler()
+
+    task = async_handler.get_task_by_taskid(task_id)
+    async_handler.update_status(task, 2)
+    async_handler.update_error(task, error)
index fda0ffd..c5aa20a 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import uuid
 import os
 import logging
@@ -23,8 +24,8 @@ def runTestCase(args):
     except KeyError:
         return common_utils.error_handler('Lack of testcase argument')
 
-    testcase = os.path.join(conf.TEST_CASE_PATH,
-                            conf.TEST_CASE_PRE + testcase + '.yaml')
+    testcase_name = conf.TEST_CASE_PRE + testcase
+    testcase = os.path.join(conf.TEST_CASE_PATH, testcase_name + '.yaml')
 
     task_id = str(uuid.uuid4())
 
@@ -33,6 +34,10 @@ def runTestCase(args):
     logger.debug('The command_list is: %s', command_list)
 
     logger.debug('Start to execute command list')
-    common_utils.exec_command_task(command_list, task_id)
+    task_dict = {
+        'task_id': task_id,
+        'details': testcase_name
+    }
+    common_utils.exec_command_task(command_list, task_dict)
 
     return common_utils.result_handler('success', task_id)
index 3de09fd..86fc251 100644 (file)
@@ -6,13 +6,13 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
 import uuid
-import re
 
 from api.utils import influx as influx_utils
 from api.utils import common as common_utils
-from api import conf
+from api.database.handlers import TasksHandler
 
 logger = logging.getLogger(__name__)
 
@@ -23,39 +23,36 @@ def default(args):
 
 def getResult(args):
     try:
-        measurement = args['measurement']
         task_id = args['task_id']
 
-        if re.search("[^a-zA-Z0-9_-]", measurement):
-            raise ValueError('invalid measurement parameter')
-
         uuid.UUID(task_id)
     except KeyError:
-        message = 'measurement and task_id must be provided'
+        message = 'task_id must be provided'
         return common_utils.error_handler(message)
 
-    query_template = "select * from %s where task_id='%s'"
-    query_sql = query_template % ('tasklist', task_id)
-    data = common_utils.translate_to_str(influx_utils.query(query_sql))
+    task = TasksHandler().get_task_by_taskid(task_id)
 
     def _unfinished():
         return common_utils.result_handler(0, [])
 
     def _finished():
-        query_sql = query_template % (conf.TEST_CASE_PRE + measurement,
-                                      task_id)
-        data = common_utils.translate_to_str(influx_utils.query(query_sql))
-        if not data:
-            query_sql = query_template % (measurement, task_id)
+        testcases = task.details.split(',')
+
+        def get_data(testcase):
+            query_template = "select * from %s where task_id='%s'"
+            query_sql = query_template % (testcase, task_id)
             data = common_utils.translate_to_str(influx_utils.query(query_sql))
+            return data
+
+        result = {k: get_data(k) for k in testcases}
 
-        return common_utils.result_handler(1, data)
+        return common_utils.result_handler(1, result)
 
     def _error():
-        return common_utils.result_handler(2, data[0]['error'])
+        return common_utils.result_handler(2, task.error)
 
     try:
-        status = data[0]['status']
+        status = task.status
 
         switcher = {
             0: _unfinished,
index 545447a..490e48b 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import uuid
 import os
 import logging
@@ -19,11 +20,11 @@ logger = logging.getLogger(__name__)
 def runTestCase(args):
     try:
         opts = args.get('opts', {})
-        testcase = args['testcase']
+        testcase_name = args['testcase']
     except KeyError:
         return common_utils.error_handler('Lack of testcase argument')
 
-    testcase = os.path.join(conf.SAMPLE_PATH, testcase + '.yaml')
+    testcase = os.path.join(conf.SAMPLE_PATH, testcase_name + '.yaml')
 
     task_id = str(uuid.uuid4())
 
@@ -32,6 +33,10 @@ def runTestCase(args):
     logger.debug('The command_list is: %s', command_list)
 
     logger.debug('Start to execute command list')
-    common_utils.exec_command_task(command_list, task_id)
+    task_dict = {
+        'task_id': task_id,
+        'details': testcase_name
+    }
+    common_utils.exec_command_task(command_list, task_dict)
 
     return common_utils.result_handler('success', task_id)
diff --git a/api/resources/testsuites_action.py b/api/resources/testsuites_action.py
new file mode 100644 (file)
index 0000000..f833dc2
--- /dev/null
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+"""Yardstick test suite api action"""
+
+from __future__ import absolute_import
+import uuid
+import os
+import logging
+
+from api import conf
+from api.utils import common as common_utils
+
+logger = logging.getLogger(__name__)
+
+
+def runTestSuite(args):
+    try:
+        opts = args.get('opts', {})
+        testsuite = args['testsuite']
+    except KeyError:
+        return common_utils.error_handler('Lack of testsuite argument')
+
+    if 'suite' not in opts:
+        opts['suite'] = 'true'
+
+    testsuite = os.path.join(conf.TEST_SUITE_PATH,
+                             conf.TEST_SUITE_PRE + testsuite + '.yaml')
+
+    task_id = str(uuid.uuid4())
+
+    command_list = ['task', 'start']
+    command_list = common_utils.get_command_list(command_list, opts, testsuite)
+    logger.debug('The command_list is: %s', command_list)
+
+    logger.debug('Start to execute command list')
+    common_utils.exec_command_task(command_list, task_id)
+
+    return common_utils.result_handler('success', task_id)
index fac821b..5bac1ba 100644 (file)
@@ -6,14 +6,21 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
+
+import inspect
 import logging
+from functools import reduce
+from six.moves import filter
 
+from flasgger import Swagger
 from flask import Flask
 from flask_restful import Api
-from flasgger import Swagger
 
-from api.database import init_db
+from api.database import Base
 from api.database import db_session
+from api.database import engine
+from api.database import models
 from api.urls import urlpatterns
 from yardstick import _init_logging
 
@@ -21,8 +28,6 @@ logger = logging.getLogger(__name__)
 
 app = Flask(__name__)
 
-init_db()
-
 Swagger(app)
 
 api = Api(app)
@@ -33,6 +38,21 @@ def shutdown_session(exception=None):
     db_session.remove()
 
 
+def init_db():
+    def func(a):
+        try:
+            if issubclass(a[1], Base):
+                return True
+        except TypeError:
+            pass
+        return False
+
+    subclses = filter(func, inspect.getmembers(models, inspect.isclass))
+    logger.debug('Import models: %s', [a[1] for a in subclses])
+    Base.metadata.create_all(bind=engine)
+
+
+init_db()
 reduce(lambda a, b: a.add_resource(b.resource, b.url,
                                    endpoint=b.endpoint) or a, urlpatterns, api)
 
diff --git a/api/swagger/docs/testsuites_action.yaml b/api/swagger/docs/testsuites_action.yaml
new file mode 100644 (file)
index 0000000..ebf01e4
--- /dev/null
@@ -0,0 +1,50 @@
+TestSuites Actions
+
+This API may offer many actions, including runTestSuite
+
+action: runTestSuite
+This api offer the interface to run a test suite in yardstick
+we will return a task_id for querying
+you can use the returned task_id to get the result data
+---
+tags:
+  - Testsuite Action
+parameters:
+  - in: body
+    name: body
+    description: this is the input json dict
+    schema:
+      id: TestSuiteActionModel
+      required:
+        - action
+        - args
+      properties:
+        action:
+          type: string
+          description: this is action for testsuite
+          default: runTestSuite
+        args:
+          schema:
+            id: TestSuiteActionArgsModel
+            required:
+              - testsuite
+            properties:
+              testsuite:
+                type: string
+                description: this is the test suite name
+                default: smoke
+              opts:
+                schema:
+                  id: TestSuiteActionArgsOptsModel
+responses:
+  200:
+    description: A result json dict
+    schema:
+      id: result
+      properties:
+        status:
+          type: string
+          default: success
+        result:
+          type: string
+          description: task_id of this task
index 7c65fbb..d3c7a9b 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 from flask_restful import fields
 from flask_restful_swagger import swagger
 
@@ -42,6 +43,37 @@ class TestCaseActionModel:
     }
 
 
+# for testsuite/action runTestSuite action
+@swagger.model
+class TestSuiteActionArgsOptsTaskArgModel:
+    resource_fields = {
+    }
+
+
+@swagger.model
+class TestSuiteActionArgsOptsModel:
+    resource_fields = {
+        'task-args': TestSuiteActionArgsOptsTaskArgModel,
+        'keep-deploy': fields.String,
+        'suite': fields.String
+    }
+
+@swagger.model
+class TestSuiteActionArgsModel:
+    resource_fields = {
+        'testsuite': fields.String,
+        'opts': TestSuiteActionArgsOptsModel
+    }
+
+
+@swagger.model
+class TestSuiteActionModel:
+    resource_fields = {
+        'action': fields.String,
+        'args': TestSuiteActionArgsModel
+    }
+
+
 # for results
 @swagger.model
 class ResultModel:
index 0fffd12..04b7485 100644 (file)
@@ -6,13 +6,16 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 from api import views
 from api.utils.common import Url
 
 
 urlpatterns = [
+    Url('/yardstick/asynctask', views.Asynctask, 'asynctask'),
     Url('/yardstick/testcases/release/action', views.ReleaseAction, 'release'),
     Url('/yardstick/testcases/samples/action', views.SamplesAction, 'samples'),
+    Url('/yardstick/testsuites/action', views.TestsuitesAction, 'testsuites'),
     Url('/yardstick/results', views.Results, 'results'),
     Url('/yardstick/env/action', views.EnvAction, 'env')
 ]
index e3e64a7..1c800ce 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import collections
 import logging
 
@@ -13,18 +14,19 @@ from flask import jsonify
 
 from api.utils.daemonthread import DaemonThread
 from yardstick.cmd.cli import YardstickCLI
+import six
 
 logger = logging.getLogger(__name__)
 
 
-def translate_to_str(object):
-    if isinstance(object, collections.Mapping):
-        return {str(k): translate_to_str(v) for k, v in object.items()}
-    elif isinstance(object, list):
-        return [translate_to_str(ele) for ele in object]
-    elif isinstance(object, unicode):
-        return str(object)
-    return object
+def translate_to_str(obj):
+    if isinstance(obj, collections.Mapping):
+        return {str(k): translate_to_str(v) for k, v in obj.items()}
+    elif isinstance(obj, list):
+        return [translate_to_str(ele) for ele in obj]
+    elif isinstance(obj, six.text_type):
+        return str(obj)
+    return obj
 
 
 def get_command_list(command_list, opts, args):
@@ -40,8 +42,8 @@ def get_command_list(command_list, opts, args):
     return command_list
 
 
-def exec_command_task(command_list, task_id):   # pragma: no cover
-    daemonthread = DaemonThread(YardstickCLI().api, (command_list, task_id))
+def exec_command_task(command_list, task_dict):   # pragma: no cover
+    daemonthread = DaemonThread(YardstickCLI().api, (command_list, task_dict))
     daemonthread.start()
 
 
index 47c0b91..0049834 100644 (file)
@@ -6,13 +6,13 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import threading
 import os
-import datetime
 import errno
 
 from api import conf
-from api.utils.influx import write_data_tasklist
+from api.database.handlers import TasksHandler
 
 
 class DaemonThread(threading.Thread):
@@ -21,19 +21,24 @@ class DaemonThread(threading.Thread):
         super(DaemonThread, self).__init__(target=method, args=args)
         self.method = method
         self.command_list = args[0]
-        self.task_id = args[1]
+        self.task_dict = args[1]
 
     def run(self):
-        timestamp = datetime.datetime.now()
+        self.task_dict['status'] = 0
+        task_id = self.task_dict['task_id']
 
         try:
-            write_data_tasklist(self.task_id, timestamp, 0)
-            self.method(self.command_list, self.task_id)
-            write_data_tasklist(self.task_id, timestamp, 1)
+            task_handler = TasksHandler()
+            task = task_handler.insert(self.task_dict)
+
+            self.method(self.command_list, task_id)
+
+            task_handler.update_status(task, 1)
         except Exception as e:
-            write_data_tasklist(self.task_id, timestamp, 2, error=str(e))
+            task_handler.update_status(task, 2)
+            task_handler.update_error(task, str(e))
         finally:
-            _handle_testsuite_file(self.task_id)
+            _handle_testsuite_file(task_id)
 
 
 def _handle_testsuite_file(task_id):
index 9366ed3..275c63a 100644 (file)
@@ -6,11 +6,13 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
+
 import logging
-from urlparse import urlsplit
 
+import six.moves.configparser as ConfigParser
+from six.moves.urllib.parse import urlsplit
 from influxdb import InfluxDBClient
-import ConfigParser
 
 from api import conf
 
@@ -21,46 +23,26 @@ def get_data_db_client():
     parser = ConfigParser.ConfigParser()
     try:
         parser.read(conf.OUTPUT_CONFIG_FILE_PATH)
-        dispatcher = parser.get('DEFAULT', 'dispatcher')
 
-        if 'influxdb' != dispatcher:
+        if 'influxdb' != parser.get('DEFAULT', 'dispatcher'):
             raise RuntimeError
 
-        ip = _get_ip(parser.get('dispatcher_influxdb', 'target'))
-        username = parser.get('dispatcher_influxdb', 'username')
-        password = parser.get('dispatcher_influxdb', 'password')
-        db_name = parser.get('dispatcher_influxdb', 'db_name')
-        return InfluxDBClient(ip, conf.PORT, username, password, db_name)
+        return _get_client(parser)
     except ConfigParser.NoOptionError:
         logger.error('can not find the key')
         raise
 
 
-def _get_ip(url):
-    return urlsplit(url).hostname
-
-
-def _write_data(measurement, field, timestamp, tags):
-    point = {
-        'measurement': measurement,
-        'fields': field,
-        'time': timestamp,
-        'tags': tags
-    }
-
-    try:
-        client = get_data_db_client()
-
-        logger.debug('Start to write data: %s', point)
-        client.write_points([point])
-    except RuntimeError:
-        logger.debug('dispatcher is not influxdb')
+def _get_client(parser):
+    ip = _get_ip(parser.get('dispatcher_influxdb', 'target'))
+    username = parser.get('dispatcher_influxdb', 'username')
+    password = parser.get('dispatcher_influxdb', 'password')
+    db_name = parser.get('dispatcher_influxdb', 'db_name')
+    return InfluxDBClient(ip, conf.PORT, username, password, db_name)
 
 
-def write_data_tasklist(task_id, timestamp, status, error=''):
-    field = {'status': status, 'error': error}
-    tags = {'task_id': task_id}
-    _write_data('tasklist', field, timestamp, tags)
+def _get_ip(url):
+    return urlsplit(url).hostname
 
 
 def query(query_sql):
index ee13b47..0c39bfa 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
 import os
 
@@ -24,13 +25,32 @@ TestCaseActionArgsOptsModel = models.TestCaseActionArgsOptsModel
 TestCaseActionArgsOptsTaskArgModel = models.TestCaseActionArgsOptsTaskArgModel
 
 
+class Asynctask(ApiResource):
+    def get(self):
+        return self._dispatch_get()
+
+
 class ReleaseAction(ApiResource):
-    @swag_from(os.getcwd() + '/swagger/docs/testcases.yaml')
+    @swag_from(os.getcwd() + '/swagger/docs/release_action.yaml')
     def post(self):
         return self._dispatch_post()
 
 
 class SamplesAction(ApiResource):
+
+    def post(self):
+        return self._dispatch_post()
+
+
+TestSuiteActionModel = models.TestSuiteActionModel
+TestSuiteActionArgsModel = models.TestSuiteActionArgsModel
+TestSuiteActionArgsOptsModel = models.TestSuiteActionArgsOptsModel
+TestSuiteActionArgsOptsTaskArgModel = \
+    models.TestSuiteActionArgsOptsTaskArgModel
+
+
+class TestsuitesAction(ApiResource):
+    @swag_from(os.getcwd() + '/swagger/docs/testsuites_action.yaml')
     def post(self):
         return self._dispatch_post()
 
@@ -39,11 +59,13 @@ ResultModel = models.ResultModel
 
 
 class Results(ApiResource):
+
     @swag_from(os.getcwd() + '/swagger/docs/results.yaml')
     def get(self):
         return self._dispatch_get()
 
 
 class EnvAction(ApiResource):
+
     def post(self):
         return self._dispatch_post()
index a693849..6771f36 100644 (file)
@@ -13,6 +13,7 @@ the appropriate options to ``use_setuptools()``.
 
 This file can also be run as a script to install or upgrade setuptools.
 """
+from __future__ import absolute_import
 import os
 import shutil
 import sys
@@ -21,7 +22,6 @@ import zipfile
 import optparse
 import subprocess
 import platform
-import textwrap
 import contextlib
 
 from distutils import log
@@ -29,7 +29,7 @@ from distutils import log
 try:
     from urllib.request import urlopen
 except ImportError:
-    from urllib2 import urlopen
+    from six.moves.urllib import urlopen
 
 try:
     from site import USER_SITE
@@ -39,6 +39,7 @@ except ImportError:
 DEFAULT_VERSION = "6.1"
 DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/"
 
+
 def _python_cmd(*args):
     """
     Return True if the command succeeded.
@@ -130,7 +131,7 @@ def _do_download(version, download_base, to_dir, download_delay):
 
 
 def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
-        to_dir=os.curdir, download_delay=15):
+                   to_dir=os.curdir, download_delay=15):
     to_dir = os.path.abspath(to_dir)
     rep_modules = 'pkg_resources', 'setuptools'
     imported = set(sys.modules).intersection(rep_modules)
@@ -145,14 +146,14 @@ def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
         return _do_download(version, download_base, to_dir, download_delay)
     except pkg_resources.VersionConflict as VC_err:
         if imported:
-            msg = textwrap.dedent("""
-                The required version of setuptools (>={version}) is not available,
-                and can't be installed while this script is running. Please
-                install a more recent version first, using
-                'easy_install -U setuptools'.
-
-                (Currently using {VC_err.args[0]!r})
-                """).format(VC_err=VC_err, version=version)
+            msg = """\
+The required version of setuptools (>={version}) is not available,
+and can't be installed while this script is running. Please
+install a more recent version first, using
+'easy_install -U setuptools'.
+
+(Currently using {VC_err.args[0]!r})
+""".format(VC_err=VC_err, version=version)
             sys.stderr.write(msg)
             sys.exit(2)
 
@@ -160,6 +161,7 @@ def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
         del pkg_resources, sys.modules['pkg_resources']
         return _do_download(version, download_base, to_dir, download_delay)
 
+
 def _clean_check(cmd, target):
     """
     Run the command to download target. If the command fails, clean up before
@@ -172,6 +174,7 @@ def _clean_check(cmd, target):
             os.unlink(target)
         raise
 
+
 def download_file_powershell(url, target):
     """
     Download the file at url to target using Powershell (which will validate
@@ -191,6 +194,7 @@ def download_file_powershell(url, target):
     ]
     _clean_check(cmd, target)
 
+
 def has_powershell():
     if platform.system() != 'Windows':
         return False
@@ -202,12 +206,15 @@ def has_powershell():
             return False
     return True
 
+
 download_file_powershell.viable = has_powershell
 
+
 def download_file_curl(url, target):
     cmd = ['curl', url, '--silent', '--output', target]
     _clean_check(cmd, target)
 
+
 def has_curl():
     cmd = ['curl', '--version']
     with open(os.path.devnull, 'wb') as devnull:
@@ -217,12 +224,15 @@ def has_curl():
             return False
     return True
 
+
 download_file_curl.viable = has_curl
 
+
 def download_file_wget(url, target):
     cmd = ['wget', url, '--quiet', '--output-document', target]
     _clean_check(cmd, target)
 
+
 def has_wget():
     cmd = ['wget', '--version']
     with open(os.path.devnull, 'wb') as devnull:
@@ -232,8 +242,10 @@ def has_wget():
             return False
     return True
 
+
 download_file_wget.viable = has_wget
 
+
 def download_file_insecure(url, target):
     """
     Use Python to download the file, even though it cannot authenticate the
@@ -250,8 +262,10 @@ def download_file_insecure(url, target):
     with open(target, "wb") as dst:
         dst.write(data)
 
+
 download_file_insecure.viable = lambda: True
 
+
 def get_best_downloader():
     downloaders = (
         download_file_powershell,
@@ -262,8 +276,10 @@ def get_best_downloader():
     viable_downloaders = (dl for dl in downloaders if dl.viable())
     return next(viable_downloaders, None)
 
+
 def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
-        to_dir=os.curdir, delay=15, downloader_factory=get_best_downloader):
+                        to_dir=os.curdir, delay=15,
+                        downloader_factory=get_best_downloader):
     """
     Download setuptools from a specified location and return its filename
 
@@ -287,12 +303,14 @@ def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
         downloader(url, saveto)
     return os.path.realpath(saveto)
 
+
 def _build_install_args(options):
     """
     Build the arguments to 'python setup.py install' on the setuptools package
     """
     return ['--user'] if options.user_install else []
 
+
 def _parse_args():
     """
     Parse the command line for options
@@ -318,6 +336,7 @@ def _parse_args():
     # positional arguments are ignored
     return options
 
+
 def main():
     """Install or upgrade setuptools and EasyInstall"""
     options = _parse_args()
@@ -328,5 +347,6 @@ def main():
     )
     return _install(archive, _build_install_args(options))
 
+
 if __name__ == '__main__':
     sys.exit(main())
index d5f3a78..1f9d851 100644 (file)
@@ -19,7 +19,7 @@ extras==0.0.3
 fixtures==1.4.0
 flake8==2.5.4
 funcsigs==0.4
-functools32==3.2.3.post2
+functools32==3.2.3.post2; python_version <= '2.7'
 futures==3.0.5
 iso8601==0.1.11
 Jinja2==2.8
@@ -31,7 +31,8 @@ linecache2==1.0.0
 lxml==3.5.0
 MarkupSafe==0.23
 mccabe==0.4.0
-mock==1.3.0
+# upgrade to version 2.0.0 to match python3 unittest.mock features
+mock==2.0.0
 monotonic==1.0
 msgpack-python==0.4.7
 netaddr==0.7.18
index 4472f7e..21c7891 100644 (file)
@@ -5,145 +5,147 @@ import:
        
 
 metadata:
-        ID:clearwater
-        Vendor:HP
+         ID: clearwater
+         Vendor: HP
 dsl_definitions:
-       compute_props_host_ellis:&compute_props_host_ellis
-                num_cpu:4
-                mem_size:4096
-       compute_props_host_bono:&compute_props_host_bono
-                num_cpu:3
-                mem_size:2048
+        compute_props_host_ellis: &compute_props_host_ellis
+                 num_cpu: 4
+                 mem_size: 4096
+        compute_props_host_bono: &compute_props_host_bono
+                 num_cpu: 3
+                 mem_size: 2048
 node_types:
-       tosca.nodes.compute.ellis:
-               derived_from:tosca.nodes.compute
+        tosca.nodes.compute.ellis:
+                derived_from: tosca.nodes.compute
 
-       tosca.nodes.compute.bono:
-               derived_from:tosca.nodes.compute
+        tosca.nodes.compute.bono:
+                derived_from: tosca.nodes.compute
 
 topology_template:
-               # a description of the topology template
-               description:>
-                       Vdus used in a vnfd
-       inputs:
-               storage_size:
-                       type:scalar-unit.size
-                       default:2048
-                       description:The required storage resource
-               storage_location:
-                       type:string
-                       description:>
-                               Block storage mount point (filesystem path).
-       node_templates:
+                # A description of the topology template
+                description: >
+                        Vdus used in a vnfd
+        inputs:
+                storage_size:
+                        type: scalar-unit.size
+                        default: 2048
+                        description: The required storage resource
+                        default: 3000
+                        description: The required storage resource
+                storage_location:
+                        type: string
+                        description: >
+                                Block storage mount point (filesystem path).
+        node_templates:
                ellis:
-                       type:tosca.nodes.Compute
-                       capabilities:
-                               os:
-                                       properties:
-                                               architecture:
-                                               type:
-                                               distribution:
-                                               version:
-                               host:
-                                       properties:*compute_props_host_ellis
-                               scalable:
-                                       properties:
-                                               min_instances:1
-                                               default_instances:1
-                       requirements:
-                               - local_storage:
-                                       node:ellis_BlockStorage
-                                               relationship:
-                                                       type:AttachesTo
-                                                       properties:
-                                                               location:{ get_input:storage_location }
-                       interfaces:
-                               Standard:
-                                       start:
-                                               implementation:start.sh
-                                       delete:
-                                               implementaion:stop.sh
-                                       stop:
-                                               implementaion:shutdown.sh
+                        type: tosca.nodes.Compute
+                        capabilities:
+                                os:
+                                        properties:
+                                                architecture:
+                                                type:
+                                                distribution:
+                                                version:
+                                host:
+                                        properties: *compute_props_host_ellis
+                                scalable:
+                                        properties:
+                                                min_instances: 1
+                                                default_instances: 1
+                        requirements:
+                                - local_storage:
+                                        node: ellis_BlockStorage
+                                                relationship:
+                                                        type: AttachesTo
+                                                        properties:
+                                                                location: { get_input:storage_location }
+                        interfaces:
+                                Standard:
+                                        start:
+                                                implementation: start.sh
+                                        delete:
+                                                implementaion: stop.sh
+                                        stop:
+                                                implementaion: shutdown.sh
                ellis_BlockStorage:
-                       type:tosca.nodes.BlockStorage
-                       properties:
-                               size:{ get_input:storage_size }
+                        type: tosca.nodes.BlockStorage
+                        properties:
+                                size: { get_input:storage_size }
                bono:
-                       type:tosca.nodes.Compute
-                       capabilities:
-                               os:
-                                       properties:
-                                               architecture:
-                                               type:
-                                               distribution:
-                                               version:
-                               host:
-                                       properties:*compute_props_host_bono
-                               scalable:
-                                       properties:
-                                               min_instances:3
-                                               default_instances:3
-                       requirements:
-                               - local_storage:
-                                       node:bono_BlockStorage
-                                               relationship:
-                                                       type:AttachesTo
-                                                       properties:
-                                                               location:{ get_input:storage_location }
-                       interfaces:
-                               Standard:
-                                       start:
-                                               implementation:start.sh
-                                       delete:
-                                               implementaion:stop.sh
-                                       stop:
-                                               implementaion:shutdown.sh
+                        type: tosca.nodes.Compute
+                        capabilities:
+                                os:
+                                        properties:
+                                                architecture:
+                                                type:
+                                                distribution:
+                                                version:
+                                host:
+                                        properties: *compute_props_host_bono
+                                scalable:
+                                        properties:
+                                                min_instances: 3
+                                                default_instances: 3
+                        requirements:
+                                - local_storage:
+                                        node: bono_BlockStorage
+                                                relationship:
+                                                        type: AttachesTo
+                                                        properties:
+                                                                location: { get_input:storage_location }
+                        interfaces:
+                                Standard:
+                                        start:
+                                                implementation: start.sh
+                                        delete:
+                                                implementaion: stop.sh
+                                        stop:
+                                                implementaion: shutdown.sh
                bono_BlockStorage:
-                       type:tosca.nodes.BlockStorage
-                       properties:
-                               size:{ get_input:storage_size }
+                        type: tosca.nodes.BlockStorage
+                        properties:
+                                size: { get_input:storage_size }
        clearwater_network1:
-                       type:tosca.nodes.network.Network
-                       properties:
-                       ip_version:4
-       ellis_port1:
-                       type:tosca.nodes.network.Port
-                       requirements:
-                               - binding:
-                                       node:ellis
-                               - link:
-                                        node:clearwater_network1
+                        type:tosca.nodes.network.Network
+                        properties:
+                        ip_version:4
+        ellis_port1:
+                        type:tosca.nodes.network.Port
+                        requirements:
+                                - binding:
+                                        node:ellis
+                                - link:
+                                         node:clearwater_network1
        clearwater_network2:
-                       type:tosca.nodes.network.Network
-                       properties:
-                       ip_version:4
-       ellis_port2:
-                       type:tosca.nodes.network.Port
-                       requirements:
-                               - binding:
-                                       node:ellis
-                               - link:
-                                        node:clearwater_network2
+                        type:tosca.nodes.network.Network
+                        properties:
+                        ip_version:4
+        ellis_port2:
+                        type:tosca.nodes.network.Port
+                        requirements:
+                                - binding:
+                                        node:ellis
+                                - link:
+                                         node:clearwater_network2
        clearwater_network1:
-                       type:tosca.nodes.network.Network
-                       properties:
-                       ip_version:4
-       bono_port1:
-                       type:tosca.nodes.network.Port
-                       requirements:
-                               - binding:
-                                       node:bono
-                               - link:
-                                        node:clearwater_network1
+                        type:tosca.nodes.network.Network
+                        properties:
+                        ip_version:4
+        bono_port1:
+                        type:tosca.nodes.network.Port
+                        requirements:
+                                - binding:
+                                        node:bono
+                                - link:
+                                         node:clearwater_network1
        clearwater_network2:
-                       type:tosca.nodes.network.Network
-                       properties:
-                       ip_version:4
-       bono_port2:
-                       type:tosca.nodes.network.Port
-                       requirements:
-                               - binding:
-                                       node:bono
-                               - link:
-                                        node:clearwater_network2
\ No newline at end of file
+                        type:tosca.nodes.network.Network
+                        properties:
+                        ip_version:4
+        bono_port2:
+                        type:tosca.nodes.network.Port
+                        requirements:
+                                - binding:
+                                        node:bono
+                                - link:
+                                         node:clearwater_network2
\ No newline at end of file
index 0100b46..315ab67 100755 (executable)
--- a/setup.py
+++ b/setup.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
 from setuptools import setup, find_packages
 
 
index 195b572..620edc3 100755 (executable)
@@ -8,6 +8,7 @@
 ##############################################################################
 
 
+from __future__ import absolute_import
 import unittest
 
 from tests.functional import utils
@@ -46,4 +47,3 @@ class RunnerTestCase(unittest.TestCase):
         res = self.yardstick("runner show Sequence")
         sequence = "sequence - list of values which are executed" in res
         self.assertTrue(sequence)
-
index 8779737..4741e82 100755 (executable)
@@ -8,6 +8,7 @@
 ##############################################################################
 
 
+from __future__ import absolute_import
 import unittest
 
 from tests.functional import utils
@@ -59,4 +60,3 @@ class ScenarioTestCase(unittest.TestCase):
         res = self.yardstick("scenario show Pktgen")
         pktgen = "Execute pktgen between two hosts" in res
         self.assertTrue(pktgen)
-
index aaaaaac..b96d2dd 100755 (executable)
@@ -7,13 +7,13 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
+from __future__ import absolute_import
+
 import copy
-import json
 import os
-import shutil
 import subprocess
 
-
+from oslo_serialization import jsonutils
 from oslo_utils import encodeutils
 
 
@@ -40,11 +40,11 @@ class Yardstick(object):
         """Call yardstick in the shell
 
         :param cmd: yardstick command
-        :param getjson: in cases, when yardstick prints JSON, you can catch output
-            deserialized
+        :param getjson: in cases, when yardstick prints JSON, you can catch
+         output deserialized
         TO DO:
-        :param report_path: if present, yardstick command and its output will be
-            written to file with passed file name
+        :param report_path: if present, yardstick command and its output will
+         be written to file with passed file name
         :param raw: don't write command itself to report file. Only output
             will be written
         """
@@ -53,11 +53,11 @@ class Yardstick(object):
             cmd = cmd.split(" ")
         try:
             output = encodeutils.safe_decode(subprocess.check_output(
-                self.args + cmd, stderr=subprocess.STDOUT, env=self.env))
+                self.args + cmd, stderr=subprocess.STDOUT, env=self.env),
+                'utf-8')
 
             if getjson:
-                return json.loads(output)
+                return jsonutils.loads(output)
             return output
         except subprocess.CalledProcessError as e:
             raise e
-
index 5d17740..acf6e41 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import unittest
 
 from api.utils import common
index 0852da2..aff0cab 100644 (file)
@@ -6,19 +6,23 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import unittest
 import mock
-import uuid
-import datetime
 
 from api.utils import influx
 
+import six.moves.configparser as ConfigParser
+
 
 class GetDataDbClientTestCase(unittest.TestCase):
 
     @mock.patch('api.utils.influx.ConfigParser')
     def test_get_data_db_client_dispatcher_not_influxdb(self, mock_parser):
         mock_parser.ConfigParser().get.return_value = 'file'
+        # reset exception to avoid
+        # TypeError: catching classes that do not inherit from BaseException
+        mock_parser.NoOptionError = ConfigParser.NoOptionError
         try:
             influx.get_data_db_client()
         except Exception as e:
@@ -35,38 +39,14 @@ class GetIpTestCase(unittest.TestCase):
         self.assertEqual(result, output)
 
 
-class WriteDataTestCase(unittest.TestCase):
-
-    @mock.patch('api.utils.influx.get_data_db_client')
-    def test_write_data(self, mock_get_client):
-        measurement = 'tasklist'
-        field = {'status': 1}
-        timestamp = datetime.datetime.now()
-        tags = {'task_id': str(uuid.uuid4())}
-
-        influx._write_data(measurement, field, timestamp, tags)
-        mock_get_client.assert_called_with()
-
-
-class WriteDataTasklistTestCase(unittest.TestCase):
-
-    @mock.patch('api.utils.influx._write_data')
-    def test_write_data_tasklist(self, mock_write_data):
-        task_id = str(uuid.uuid4())
-        timestamp = datetime.datetime.now()
-        status = 1
-        influx.write_data_tasklist(task_id, timestamp, status)
-
-        field = {'status': status, 'error': ''}
-        tags = {'task_id': task_id}
-        mock_write_data.assert_called_with('tasklist', field, timestamp, tags)
-
-
 class QueryTestCase(unittest.TestCase):
 
     @mock.patch('api.utils.influx.ConfigParser')
     def test_query_dispatcher_not_influxdb(self, mock_parser):
         mock_parser.ConfigParser().get.return_value = 'file'
+        # reset exception to avoid
+        # TypeError: catching classes that do not inherit from BaseException
+        mock_parser.NoOptionError = ConfigParser.NoOptionError
         try:
             sql = 'select * form tasklist'
             influx.query(sql)
index 5214e66..1a54035 100644 (file)
@@ -11,6 +11,7 @@
 
 # Unittest for yardstick.benchmark.contexts.dummy
 
+from __future__ import absolute_import
 import unittest
 
 from yardstick.benchmark.contexts import dummy
index f891b0a..f8f3492 100644 (file)
 
 # Unittest for yardstick.benchmark.contexts.heat
 
-import mock
+from __future__ import absolute_import
+
+import logging
+import os
 import unittest
+import uuid
+
+import mock
 
-from yardstick.benchmark.contexts import model
 from yardstick.benchmark.contexts import heat
 
 
+LOG = logging.getLogger(__name__)
+
+
 class HeatContextTestCase(unittest.TestCase):
 
     def setUp(self):
@@ -39,6 +47,8 @@ class HeatContextTestCase(unittest.TestCase):
         self.assertIsNone(self.test_context._user)
         self.assertIsNone(self.test_context.template_file)
         self.assertIsNone(self.test_context.heat_parameters)
+        self.assertIsNotNone(self.test_context.key_uuid)
+        self.assertIsNotNone(self.test_context.key_filename)
 
     @mock.patch('yardstick.benchmark.contexts.heat.PlacementGroup')
     @mock.patch('yardstick.benchmark.contexts.heat.Network')
@@ -55,6 +65,7 @@ class HeatContextTestCase(unittest.TestCase):
 
         self.test_context.init(attrs)
 
+        self.assertEqual(self.test_context.name, "foo")
         self.assertEqual(self.test_context.keypair_name, "foo-key")
         self.assertEqual(self.test_context.secgroup_name, "foo-secgroup")
 
@@ -66,17 +77,29 @@ class HeatContextTestCase(unittest.TestCase):
             'bar', self.test_context, networks['bar'])
         self.assertTrue(len(self.test_context.networks) == 1)
 
-        mock_server.assert_called_with('baz', self.test_context, servers['baz'])
+        mock_server.assert_called_with('baz', self.test_context,
+                                       servers['baz'])
         self.assertTrue(len(self.test_context.servers) == 1)
 
+        if os.path.exists(self.test_context.key_filename):
+            try:
+                os.remove(self.test_context.key_filename)
+                os.remove(self.test_context.key_filename + ".pub")
+            except OSError:
+                LOG.exception("key_filename: %s",
+                              self.test_context.key_filename)
+
     @mock.patch('yardstick.benchmark.contexts.heat.HeatTemplate')
     def test__add_resources_to_template_no_servers(self, mock_template):
 
         self.test_context.keypair_name = "foo-key"
         self.test_context.secgroup_name = "foo-secgroup"
+        self.test_context.key_uuid = "2f2e4997-0a8e-4eb7-9fa4-f3f8fbbc393b"
 
         self.test_context._add_resources_to_template(mock_template)
-        mock_template.add_keypair.assert_called_with("foo-key")
+        mock_template.add_keypair.assert_called_with(
+            "foo-key",
+            "2f2e4997-0a8e-4eb7-9fa4-f3f8fbbc393b")
         mock_template.add_security_group.assert_called_with("foo-secgroup")
 
     @mock.patch('yardstick.benchmark.contexts.heat.HeatTemplate')
@@ -105,6 +128,8 @@ class HeatContextTestCase(unittest.TestCase):
         self.mock_context.name = 'bar'
         self.mock_context.stack.outputs = {'public_ip': '127.0.0.1',
                                            'private_ip': '10.0.0.1'}
+        self.mock_context.key_uuid = uuid.uuid4()
+
         attr_name = {'name': 'foo.bar',
                      'public_ip_attr': 'public_ip',
                      'private_ip_attr': 'private_ip'}
index a1978e3..537a8c0 100644 (file)
@@ -11,6 +11,7 @@
 
 # Unittest for yardstick.benchmark.contexts.model
 
+from __future__ import absolute_import
 import mock
 import unittest
 
@@ -119,7 +120,8 @@ class NetworkTestCase(unittest.TestCase):
 
         attrs = {'external_network': 'ext_net'}
         test_network = model.Network('foo', self.mock_context, attrs)
-        exp_router = model.Router('router', 'foo', self.mock_context, 'ext_net')
+        exp_router = model.Router('router', 'foo', self.mock_context,
+                                  'ext_net')
 
         self.assertEqual(test_network.router.stack_name, exp_router.stack_name)
         self.assertEqual(test_network.router.stack_if_name,
@@ -219,4 +221,3 @@ class ServerTestCase(unittest.TestCase):
             user=self.mock_context.user,
             key_name=self.mock_context.keypair_name,
             scheduler_hints='hints')
-
index 6939b85..de5ba70 100644 (file)
@@ -11,6 +11,7 @@
 
 # Unittest for yardstick.benchmark.contexts.node
 
+from __future__ import absolute_import
 import os
 import unittest
 
@@ -21,6 +22,7 @@ class NodeContextTestCase(unittest.TestCase):
 
     NODES_SAMPLE = "nodes_sample.yaml"
     NODES_DUPLICATE_SAMPLE = "nodes_duplicate_sample.yaml"
+
     def setUp(self):
         self.test_context = node.NodeContext()
 
diff --git a/tests/unit/benchmark/core/__init__.py b/tests/unit/benchmark/core/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
similarity index 68%
rename from tests/unit/cmd/commands/test_plugin.py
rename to tests/unit/benchmark/core/test_plugin.py
index 2e823fd..edc1034 100644 (file)
@@ -9,43 +9,54 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-# Unittest for yardstick.cmd.commands.plugin
+# Unittest for yardstick.benchmark.core.plugin
+from __future__ import absolute_import
+import os
+from os.path import dirname as dirname
 
-import mock
+try:
+    from unittest import mock
+except ImportError:
+    import mock
 import unittest
 
-from yardstick.cmd.commands import plugin
+from yardstick.benchmark.core import plugin
 
 
 class Arg(object):
+
     def __init__(self):
-        self.input_file = ('plugin/sample_config.yaml',)
+        # self.input_file = ('plugin/sample_config.yaml',)
+        self.input_file = [
+            os.path.join(os.path.abspath(
+                dirname(dirname(dirname(dirname(dirname(__file__)))))),
+                'plugin/sample_config.yaml')]
 
 
-@mock.patch('yardstick.cmd.commands.plugin.ssh')
-class pluginCommandsTestCase(unittest.TestCase):
+@mock.patch('yardstick.benchmark.core.plugin.ssh')
+class pluginTestCase(unittest.TestCase):
 
     def setUp(self):
         self.result = {}
 
-    def test_do_install(self, mock_ssh):
-        p = plugin.PluginCommands()
+    def test_install(self, mock_ssh):
+        p = plugin.Plugin()
         mock_ssh.SSH().execute.return_value = (0, '', '')
         input_file = Arg()
-        p.do_install(input_file)
+        p.install(input_file)
         expected_result = {}
         self.assertEqual(self.result, expected_result)
 
-    def test_do_remove(self, mock_ssh):
-        p = plugin.PluginCommands()
+    def test_remove(self, mock_ssh):
+        p = plugin.Plugin()
         mock_ssh.SSH().execute.return_value = (0, '', '')
         input_file = Arg()
-        p.do_remove(input_file)
+        p.remove(input_file)
         expected_result = {}
         self.assertEqual(self.result, expected_result)
 
     def test_install_setup_run(self, mock_ssh):
-        p = plugin.PluginCommands()
+        p = plugin.Plugin()
         mock_ssh.SSH().execute.return_value = (0, '', '')
         plugins = {
             "name": "sample"
@@ -64,7 +75,7 @@ class pluginCommandsTestCase(unittest.TestCase):
         self.assertEqual(self.result, expected_result)
 
     def test_remove_setup_run(self, mock_ssh):
-        p = plugin.PluginCommands()
+        p = plugin.Plugin()
         mock_ssh.SSH().execute.return_value = (0, '', '')
         plugins = {
             "name": "sample"
@@ -81,3 +92,11 @@ class pluginCommandsTestCase(unittest.TestCase):
         p._run(plugin_name)
         expected_result = {}
         self.assertEqual(self.result, expected_result)
+
+
+def main():
+    unittest.main()
+
+
+if __name__ == '__main__':
+    main()
similarity index 59%
rename from tests/unit/cmd/commands/test_task.py
rename to tests/unit/benchmark/core/test_task.py
index 0177fd0..5dd32ea 100644 (file)
@@ -9,18 +9,26 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-# Unittest for yardstick.cmd.commands.task
+# Unittest for yardstick.benchmark.core.task
 
+from __future__ import print_function
+
+from __future__ import absolute_import
 import os
-import mock
 import unittest
 
-from yardstick.cmd.commands import task
+try:
+    from unittest import mock
+except ImportError:
+    import mock
+
+
+from yardstick.benchmark.core import task
 
 
-class TaskCommandsTestCase(unittest.TestCase):
+class TaskTestCase(unittest.TestCase):
 
-    @mock.patch('yardstick.cmd.commands.task.Context')
+    @mock.patch('yardstick.benchmark.core.task.Context')
     def test_parse_nodes_host_target_same_context(self, mock_context):
         nodes = {
             "host": "node1.LF",
@@ -28,9 +36,9 @@ class TaskCommandsTestCase(unittest.TestCase):
         }
         scenario_cfg = {"nodes": nodes}
         server_info = {
-           "ip": "10.20.0.3",
-           "user": "root",
-           "key_filename": "/root/.ssh/id_rsa"
+            "ip": "10.20.0.3",
+            "user": "root",
+            "key_filename": "/root/.ssh/id_rsa"
         }
         mock_context.get_server.return_value = server_info
         context_cfg = task.parse_nodes_with_context(scenario_cfg)
@@ -38,108 +46,111 @@ class TaskCommandsTestCase(unittest.TestCase):
         self.assertEqual(context_cfg["host"], server_info)
         self.assertEqual(context_cfg["target"], server_info)
 
-    @mock.patch('yardstick.cmd.commands.task.Context')
-    @mock.patch('yardstick.cmd.commands.task.base_runner')
+    @mock.patch('yardstick.benchmark.core.task.Context')
+    @mock.patch('yardstick.benchmark.core.task.base_runner')
     def test_run(self, mock_base_runner, mock_ctx):
-        scenario = \
-            {'host': 'athena.demo',
-             'target': 'ares.demo',
-             'runner':
-                 {'duration': 60,
-                  'interval': 1,
-                  'type': 'Duration'
-                 },
-                 'type': 'Ping'}
-
-        t = task.TaskCommands()
+        scenario = {
+            'host': 'athena.demo',
+            'target': 'ares.demo',
+            'runner': {
+                'duration': 60,
+                'interval': 1,
+                'type': 'Duration'
+            },
+            'type': 'Ping'
+        }
+
+        t = task.Task()
         runner = mock.Mock()
         runner.join.return_value = 0
         mock_base_runner.Runner.get.return_value = runner
         t._run([scenario], False, "yardstick.out")
         self.assertTrue(runner.run.called)
 
-    @mock.patch('yardstick.cmd.commands.task.os')
+    @mock.patch('yardstick.benchmark.core.task.os')
     def test_check_precondition(self, mock_os):
-        cfg = \
-            {'precondition':
-                 {'installer_type': 'compass',
-                  'deploy_scenarios': 'os-nosdn',
-                  'pod_name': 'huawei-pod1'
-                 }
+        cfg = {
+            'precondition': {
+                'installer_type': 'compass',
+                'deploy_scenarios': 'os-nosdn',
+                'pod_name': 'huawei-pod1'
             }
+        }
 
         t = task.TaskParser('/opt')
-        mock_os.environ.get.side_effect = ['compass', 'os-nosdn', 'huawei-pod1']
+        mock_os.environ.get.side_effect = ['compass',
+                                           'os-nosdn',
+                                           'huawei-pod1']
         result = t._check_precondition(cfg)
         self.assertTrue(result)
 
-    @mock.patch('yardstick.cmd.commands.task.os.environ')
+    @mock.patch('yardstick.benchmark.core.task.os.environ')
     def test_parse_suite_no_constraint_no_args(self, mock_environ):
         SAMPLE_SCENARIO_PATH = "no_constraint_no_args_scenario_sample.yaml"
         t = task.TaskParser(self._get_file_abspath(SAMPLE_SCENARIO_PATH))
         mock_environ.get.side_effect = ['huawei-pod1', 'compass']
         task_files, task_args, task_args_fnames = t.parse_suite()
-        print ("files=%s, args=%s, fnames=%s" % (task_files, task_args,
-               task_args_fnames))
+        print("files=%s, args=%s, fnames=%s" % (task_files, task_args,
+                                                task_args_fnames))
         self.assertEqual(task_files[0],
-            'tests/opnfv/test_cases/opnfv_yardstick_tc037.yaml')
+                         'tests/opnfv/test_cases/opnfv_yardstick_tc037.yaml')
         self.assertEqual(task_files[1],
-            'tests/opnfv/test_cases/opnfv_yardstick_tc043.yaml')
+                         'tests/opnfv/test_cases/opnfv_yardstick_tc043.yaml')
         self.assertEqual(task_args[0], None)
         self.assertEqual(task_args[1], None)
         self.assertEqual(task_args_fnames[0], None)
         self.assertEqual(task_args_fnames[1], None)
 
-    @mock.patch('yardstick.cmd.commands.task.os.environ')
+    @mock.patch('yardstick.benchmark.core.task.os.environ')
     def test_parse_suite_no_constraint_with_args(self, mock_environ):
         SAMPLE_SCENARIO_PATH = "no_constraint_with_args_scenario_sample.yaml"
         t = task.TaskParser(self._get_file_abspath(SAMPLE_SCENARIO_PATH))
         mock_environ.get.side_effect = ['huawei-pod1', 'compass']
         task_files, task_args, task_args_fnames = t.parse_suite()
-        print ("files=%s, args=%s, fnames=%s" % (task_files, task_args,
-               task_args_fnames))
+        print("files=%s, args=%s, fnames=%s" % (task_files, task_args,
+                                                task_args_fnames))
         self.assertEqual(task_files[0],
-            'tests/opnfv/test_cases/opnfv_yardstick_tc037.yaml')
+                         'tests/opnfv/test_cases/opnfv_yardstick_tc037.yaml')
         self.assertEqual(task_files[1],
-            'tests/opnfv/test_cases/opnfv_yardstick_tc043.yaml')
+                         'tests/opnfv/test_cases/opnfv_yardstick_tc043.yaml')
         self.assertEqual(task_args[0], None)
         self.assertEqual(task_args[1],
-                        '{"host": "node1.LF","target": "node2.LF"}')
+                         '{"host": "node1.LF","target": "node2.LF"}')
         self.assertEqual(task_args_fnames[0], None)
         self.assertEqual(task_args_fnames[1], None)
 
-    @mock.patch('yardstick.cmd.commands.task.os.environ')
+    @mock.patch('yardstick.benchmark.core.task.os.environ')
     def test_parse_suite_with_constraint_no_args(self, mock_environ):
         SAMPLE_SCENARIO_PATH = "with_constraint_no_args_scenario_sample.yaml"
         t = task.TaskParser(self._get_file_abspath(SAMPLE_SCENARIO_PATH))
         mock_environ.get.side_effect = ['huawei-pod1', 'compass']
         task_files, task_args, task_args_fnames = t.parse_suite()
-        print ("files=%s, args=%s, fnames=%s" % (task_files, task_args,
-               task_args_fnames))
+        print("files=%s, args=%s, fnames=%s" % (task_files, task_args,
+                                                task_args_fnames))
         self.assertEqual(task_files[0],
-            'tests/opnfv/test_cases/opnfv_yardstick_tc037.yaml')
+                         'tests/opnfv/test_cases/opnfv_yardstick_tc037.yaml')
         self.assertEqual(task_files[1],
-            'tests/opnfv/test_cases/opnfv_yardstick_tc043.yaml')
+                         'tests/opnfv/test_cases/opnfv_yardstick_tc043.yaml')
         self.assertEqual(task_args[0], None)
         self.assertEqual(task_args[1], None)
         self.assertEqual(task_args_fnames[0], None)
         self.assertEqual(task_args_fnames[1], None)
 
-    @mock.patch('yardstick.cmd.commands.task.os.environ')
+    @mock.patch('yardstick.benchmark.core.task.os.environ')
     def test_parse_suite_with_constraint_with_args(self, mock_environ):
         SAMPLE_SCENARIO_PATH = "with_constraint_with_args_scenario_sample.yaml"
         t = task.TaskParser(self._get_file_abspath(SAMPLE_SCENARIO_PATH))
         mock_environ.get.side_effect = ['huawei-pod1', 'compass']
         task_files, task_args, task_args_fnames = t.parse_suite()
-        print ("files=%s, args=%s, fnames=%s" % (task_files, task_args,
-               task_args_fnames))
+        print("files=%s, args=%s, fnames=%s" % (task_files, task_args,
+                                                task_args_fnames))
         self.assertEqual(task_files[0],
-            'tests/opnfv/test_cases/opnfv_yardstick_tc037.yaml')
+                         'tests/opnfv/test_cases/opnfv_yardstick_tc037.yaml')
         self.assertEqual(task_files[1],
-            'tests/opnfv/test_cases/opnfv_yardstick_tc043.yaml')
+                         'tests/opnfv/test_cases/opnfv_yardstick_tc043.yaml')
         self.assertEqual(task_args[0], None)
         self.assertEqual(task_args[1],
-                        '{"host": "node1.LF","target": "node2.LF"}')
+                         '{"host": "node1.LF","target": "node2.LF"}')
         self.assertEqual(task_args_fnames[0], None)
         self.assertEqual(task_args_fnames[1], None)
 
@@ -148,3 +159,10 @@ class TaskCommandsTestCase(unittest.TestCase):
         file_path = os.path.join(curr_path, filename)
         return file_path
 
+
+def main():
+    unittest.main()
+
+
+if __name__ == '__main__':
+    main()
similarity index 61%
rename from tests/unit/cmd/commands/test_testcase.py
rename to tests/unit/benchmark/core/test_testcase.py
index c55c367..c7da2de 100644 (file)
 
 # Unittest for yardstick.cmd.commands.testcase
 
-import mock
+from __future__ import absolute_import
 import unittest
 
-from yardstick.cmd.commands import testcase
-from yardstick.cmd.commands.testcase import TestcaseCommands
+from yardstick.benchmark.core import testcase
+
 
 class Arg(object):
+
     def __init__(self):
-        self.casename=('opnfv_yardstick_tc001',)
+        self.casename = ('opnfv_yardstick_tc001',)
+
 
-class TestcaseCommandsUT(unittest.TestCase):
+class TestcaseUT(unittest.TestCase):
 
-    def test_do_list(self):
-        t = testcase.TestcaseCommands()
-        result = t.do_list("")
+    def test_list_all(self):
+        t = testcase.Testcase()
+        result = t.list_all("")
         self.assertEqual(result, True)
 
-    def test_do_show(self):
-        t = testcase.TestcaseCommands()
+    def test_show(self):
+        t = testcase.Testcase()
         casename = Arg()
-        result = t.do_show(casename)
+        result = t.show(casename)
         self.assertEqual(result, True)
 
+
+def main():
+    unittest.main()
+
+
+if __name__ == '__main__':
+    main()
index 340f94c..9e2e8b1 100644 (file)
@@ -9,15 +9,20 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-# Unittest for yardstick.benchmark.scenarios.availability.attacker.attacker_baremetal
+# Unittest for
+# yardstick.benchmark.scenarios.availability.attacker.attacker_baremetal
 
+from __future__ import absolute_import
 import mock
 import unittest
 
-from yardstick.benchmark.scenarios.availability.attacker import baseattacker
-from yardstick.benchmark.scenarios.availability.attacker import attacker_baremetal
+from yardstick.benchmark.scenarios.availability.attacker import \
+    attacker_baremetal
 
-@mock.patch('yardstick.benchmark.scenarios.availability.attacker.attacker_baremetal.subprocess')
+
+@mock.patch(
+    'yardstick.benchmark.scenarios.availability.attacker.attacker_baremetal'
+    '.subprocess')
 class ExecuteShellTestCase(unittest.TestCase):
 
     def test__fun_execute_shell_command_successful(self, mock_subprocess):
@@ -26,34 +31,37 @@ class ExecuteShellTestCase(unittest.TestCase):
         exitcode, output = attacker_baremetal._execute_shell_command(cmd)
         self.assertEqual(exitcode, 0)
 
-    def test__fun_execute_shell_command_fail_cmd_exception(self, mock_subprocess):
+    def test__fun_execute_shell_command_fail_cmd_exception(self,
+                                                           mock_subprocess):
         cmd = "env"
         mock_subprocess.check_output.side_effect = RuntimeError
         exitcode, output = attacker_baremetal._execute_shell_command(cmd)
         self.assertEqual(exitcode, -1)
 
 
-@mock.patch('yardstick.benchmark.scenarios.availability.attacker.attacker_baremetal.ssh')
+@mock.patch(
+    'yardstick.benchmark.scenarios.availability.attacker.attacker_baremetal'
+    '.ssh')
 class AttackerBaremetalTestCase(unittest.TestCase):
 
     def setUp(self):
-        host = { 
-            "ipmi_ip": "10.20.0.5", 
-            "ipmi_user": "root", 
-            "ipmi_pwd": "123456", 
-            "ip": "10.20.0.5", 
-            "user": "root", 
-            "key_filename": "/root/.ssh/id_rsa" 
-        } 
-        self.context = {"node1": host} 
-        self.attacker_cfg = { 
-            'fault_type': 'bear-metal-down', 
-            'host': 'node1', 
-        } 
+        host = {
+            "ipmi_ip": "10.20.0.5",
+            "ipmi_user": "root",
+            "ipmi_pwd": "123456",
+            "ip": "10.20.0.5",
+            "user": "root",
+            "key_filename": "/root/.ssh/id_rsa"
+        }
+        self.context = {"node1": host}
+        self.attacker_cfg = {
+            'fault_type': 'bear-metal-down',
+            'host': 'node1',
+        }
 
     def test__attacker_baremetal_all_successful(self, mock_ssh):
-
-        ins = attacker_baremetal.BaremetalAttacker(self.attacker_cfg, self.context)
+        ins = attacker_baremetal.BaremetalAttacker(self.attacker_cfg,
+                                                   self.context)
 
         mock_ssh.SSH().execute.return_value = (0, "running", '')
         ins.setup()
@@ -61,8 +69,8 @@ class AttackerBaremetalTestCase(unittest.TestCase):
         ins.recover()
 
     def test__attacker_baremetal_check_failuer(self, mock_ssh):
-
-        ins = attacker_baremetal.BaremetalAttacker(self.attacker_cfg, self.context)
+        ins = attacker_baremetal.BaremetalAttacker(self.attacker_cfg,
+                                                   self.context)
         mock_ssh.SSH().execute.return_value = (0, "error check", '')
         ins.setup()
 
@@ -70,7 +78,8 @@ class AttackerBaremetalTestCase(unittest.TestCase):
 
         self.attacker_cfg["jump_host"] = 'node1'
         self.context["node1"]["pwd"] = "123456"
-        ins = attacker_baremetal.BaremetalAttacker(self.attacker_cfg, self.context)
+        ins = attacker_baremetal.BaremetalAttacker(self.attacker_cfg,
+                                                   self.context)
 
         mock_ssh.SSH().execute.return_value = (0, "running", '')
         ins.setup()
index aa2e0cc..322b583 100644 (file)
 # Unittest for yardstick.benchmark.scenarios.availability.attacker
 # .attacker_general
 
+from __future__ import absolute_import
 import mock
 import unittest
 
 from yardstick.benchmark.scenarios.availability.attacker import baseattacker
 
+
 @mock.patch('yardstick.benchmark.scenarios.availability.attacker.'
             'attacker_general.ssh')
 class GeneralAttackerServiceTestCase(unittest.TestCase):
@@ -30,10 +32,10 @@ class GeneralAttackerServiceTestCase(unittest.TestCase):
         self.context = {"node1": host}
         self.attacker_cfg = {
             'fault_type': 'general-attacker',
-            'action_parameter':{'process_name':'nova_api'},
-            'rollback_parameter':{'process_name':'nova_api'},
-            'key':'stop-service',
-            'attack_key':'stop-service',
+            'action_parameter': {'process_name': 'nova_api'},
+            'rollback_parameter': {'process_name': 'nova_api'},
+            'key': 'stop-service',
+            'attack_key': 'stop-service',
             'host': 'node1',
         }
 
index eb0cce7..d7771bd 100644 (file)
@@ -9,14 +9,18 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-# Unittest for yardstick.benchmark.scenarios.availability.attacker.attacker_process
+# Unittest for
+# yardstick.benchmark.scenarios.availability.attacker.attacker_process
 
+from __future__ import absolute_import
 import mock
 import unittest
 
 from yardstick.benchmark.scenarios.availability.attacker import baseattacker
 
-@mock.patch('yardstick.benchmark.scenarios.availability.attacker.attacker_process.ssh')
+
+@mock.patch(
+    'yardstick.benchmark.scenarios.availability.attacker.attacker_process.ssh')
 class AttackerServiceTestCase(unittest.TestCase):
 
     def setUp(self):
index a20cf81..7030c78 100644 (file)
@@ -9,21 +9,25 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-# Unittest for yardstick.benchmark.scenarios.availability.monitor.monitor_command
+# Unittest for
+# yardstick.benchmark.scenarios.availability.monitor.monitor_command
 
+from __future__ import absolute_import
 import mock
 import unittest
 
 from yardstick.benchmark.scenarios.availability.monitor import basemonitor
 
 
-@mock.patch('yardstick.benchmark.scenarios.availability.monitor.basemonitor.BaseMonitor')
+@mock.patch(
+    'yardstick.benchmark.scenarios.availability.monitor.basemonitor'
+    '.BaseMonitor')
 class MonitorMgrTestCase(unittest.TestCase):
 
     def setUp(self):
         config = {
             'monitor_type': 'openstack-api',
-            'key' : 'service-status'
+            'key': 'service-status'
         }
 
         self.monitor_configs = []
@@ -42,10 +46,12 @@ class MonitorMgrTestCase(unittest.TestCase):
         monitorMgr.init_monitors(self.monitor_configs, None)
         monitorIns = monitorMgr['service-status']
 
+
 class BaseMonitorTestCase(unittest.TestCase):
 
     class MonitorSimple(basemonitor.BaseMonitor):
         __monitor_type__ = "MonitorForTest"
+
         def setup(self):
             self.monitor_result = False
 
@@ -65,14 +71,15 @@ class BaseMonitorTestCase(unittest.TestCase):
         ins.start_monitor()
         ins.wait_monitor()
 
-
     def test__basemonitor_all_successful(self):
         ins = self.MonitorSimple(self.monitor_cfg, None)
         ins.setup()
         ins.run()
         ins.verify_SLA()
 
-    @mock.patch('yardstick.benchmark.scenarios.availability.monitor.basemonitor.multiprocessing')
+    @mock.patch(
+        'yardstick.benchmark.scenarios.availability.monitor.basemonitor'
+        '.multiprocessing')
     def test__basemonitor_func_false(self, mock_multiprocess):
         ins = self.MonitorSimple(self.monitor_cfg, None)
         ins.setup()
@@ -87,4 +94,3 @@ class BaseMonitorTestCase(unittest.TestCase):
         except Exception:
             pass
         self.assertIsNone(cls)
-
index d85f1e1..03ec149 100644 (file)
@@ -9,26 +9,31 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-# Unittest for yardstick.benchmark.scenarios.availability.operation.baseoperation
+# Unittest for
+# yardstick.benchmark.scenarios.availability.operation.baseoperation
 
+from __future__ import absolute_import
 import mock
 import unittest
 
-from yardstick.benchmark.scenarios.availability.operation import  baseoperation
+from yardstick.benchmark.scenarios.availability.operation import baseoperation
 
-@mock.patch('yardstick.benchmark.scenarios.availability.operation.baseoperation.BaseOperation')
+
+@mock.patch(
+    'yardstick.benchmark.scenarios.availability.operation.baseoperation'
+    '.BaseOperation')
 class OperationMgrTestCase(unittest.TestCase):
 
     def setUp(self):
         config = {
             'operation_type': 'general-operation',
-            'key' : 'service-status'
+            'key': 'service-status'
         }
 
         self.operation_configs = []
         self.operation_configs.append(config)
 
-    def  test_all_successful(self, mock_operation):
+    def test_all_successful(self, mock_operation):
         mgr_ins = baseoperation.OperationMgr()
         mgr_ins.init_operations(self.operation_configs, None)
         operation_ins = mgr_ins["service-status"]
@@ -59,7 +64,7 @@ class BaseOperationTestCase(unittest.TestCase):
     def setUp(self):
         self.config = {
             'operation_type': 'general-operation',
-            'key' : 'service-status'
+            'key': 'service-status'
         }
 
     def test_all_successful(self):
@@ -70,7 +75,7 @@ class BaseOperationTestCase(unittest.TestCase):
 
     def test_get_script_fullpath(self):
         base_ins = baseoperation.BaseOperation(self.config, None)
-        base_ins.get_script_fullpath("ha_tools/test.bash");
+        base_ins.get_script_fullpath("ha_tools/test.bash")
 
     def test_get_operation_cls_successful(self):
         base_ins = baseoperation.BaseOperation(self.config, None)
index 9972d6b..36ce900 100644 (file)
 # Unittest for yardstick.benchmark.scenarios.availability.result_checker
 # .baseresultchecker
 
+from __future__ import absolute_import
 import mock
 import unittest
 
-from yardstick.benchmark.scenarios.availability.result_checker import baseresultchecker
+from yardstick.benchmark.scenarios.availability.result_checker import \
+    baseresultchecker
 
 
 @mock.patch('yardstick.benchmark.scenarios.availability.result_checker'
-    '.baseresultchecker.BaseResultChecker')
+            '.baseresultchecker.BaseResultChecker')
 class ResultCheckerMgrTestCase(unittest.TestCase):
 
     def setUp(self):
         config = {
             'checker_type': 'general-result-checker',
-            'key' : 'process-checker'
+            'key': 'process-checker'
         }
 
         self.checker_configs = []
@@ -52,6 +54,7 @@ class BaseResultCheckerTestCase(unittest.TestCase):
 
     class ResultCheckeSimple(baseresultchecker.BaseResultChecker):
         __result_checker__type__ = "ResultCheckeForTest"
+
         def setup(self):
             self.success = False
 
@@ -61,7 +64,7 @@ class BaseResultCheckerTestCase(unittest.TestCase):
     def setUp(self):
         self.checker_cfg = {
             'checker_type': 'general-result-checker',
-            'key' : 'process-checker'
+            'key': 'process-checker'
         }
 
     def test_baseresultchecker_setup_verify_successful(self):
@@ -81,8 +84,10 @@ class BaseResultCheckerTestCase(unittest.TestCase):
         path = ins.get_script_fullpath("test.bash")
 
     def test_get_resultchecker_cls_successful(self):
-        baseresultchecker.BaseResultChecker.get_resultchecker_cls("ResultCheckeForTest")
+        baseresultchecker.BaseResultChecker.get_resultchecker_cls(
+            "ResultCheckeForTest")
 
     def test_get_resultchecker_cls_fail(self):
         with self.assertRaises(RuntimeError):
-            baseresultchecker.BaseResultChecker.get_resultchecker_cls("ResultCheckeNotExist")
+            baseresultchecker.BaseResultChecker.get_resultchecker_cls(
+                "ResultCheckeNotExist")
index 0611672..d01a60e 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.availability.director
 
+from __future__ import absolute_import
 import mock
 import unittest
 
 from yardstick.benchmark.scenarios.availability.director import Director
-from yardstick.benchmark.scenarios.availability import  actionplayers
 
 
 @mock.patch('yardstick.benchmark.scenarios.availability.director.basemonitor')
 @mock.patch('yardstick.benchmark.scenarios.availability.director.baseattacker')
-@mock.patch('yardstick.benchmark.scenarios.availability.director.baseoperation')
-@mock.patch('yardstick.benchmark.scenarios.availability.director.baseresultchecker')
+@mock.patch(
+    'yardstick.benchmark.scenarios.availability.director.baseoperation')
+@mock.patch(
+    'yardstick.benchmark.scenarios.availability.director.baseresultchecker')
 class DirectorTestCase(unittest.TestCase):
 
     def setUp(self):
         self.scenario_cfg = {
             'type': "general_scenario",
             'options': {
-                'attackers':[{
+                'attackers': [{
                     'fault_type': "general-attacker",
                     'key': "kill-process"}],
                 'monitors': [{
@@ -36,11 +38,11 @@ class DirectorTestCase(unittest.TestCase):
                     'key': "service-status"}],
                 'operations': [{
                     'operation_type': 'general-operation',
-                    'key' : 'service-status'}],
+                    'key': 'service-status'}],
                 'resultCheckers': [{
                     'checker_type': 'general-result-checker',
-                    'key' : 'process-checker',}],
-                'steps':[
+                    'key': 'process-checker', }],
+                'steps': [
                     {
                         'actionKey': "service-status",
                         'actionType': "operation",
@@ -57,7 +59,7 @@ class DirectorTestCase(unittest.TestCase):
                         'actionKey': "service-status",
                         'actionType': "monitor",
                         'index': 4},
-                    ]
+                ]
             }
         }
         host = {
@@ -67,15 +69,19 @@ class DirectorTestCase(unittest.TestCase):
         }
         self.ctx = {"nodes": {"node1": host}}
 
-    def test_director_all_successful(self, mock_checer, mock_opertion, mock_attacker, mock_monitor):
+    def test_director_all_successful(self, mock_checer, mock_opertion,
+                                     mock_attacker, mock_monitor):
         ins = Director(self.scenario_cfg, self.ctx)
         opertion_action = ins.createActionPlayer("operation", "service-status")
         attacker_action = ins.createActionPlayer("attacker", "kill-process")
-        checker_action = ins.createActionPlayer("resultchecker", "process-checker")
+        checker_action = ins.createActionPlayer("resultchecker",
+                                                "process-checker")
         monitor_action = ins.createActionPlayer("monitor", "service-status")
 
-        opertion_rollback = ins.createActionRollbacker("operation", "service-status")
-        attacker_rollback = ins.createActionRollbacker("attacker", "kill-process")
+        opertion_rollback = ins.createActionRollbacker("operation",
+                                                       "service-status")
+        attacker_rollback = ins.createActionRollbacker("attacker",
+                                                       "kill-process")
         ins.executionSteps.append(opertion_rollback)
         ins.executionSteps.append(attacker_rollback)
 
@@ -91,13 +97,8 @@ class DirectorTestCase(unittest.TestCase):
         ins.verify()
         ins.knockoff()
 
-    def test_director_get_wrong_item(self, mock_checer, mock_opertion, mock_attacker, mock_monitor):
+    def test_director_get_wrong_item(self, mock_checer, mock_opertion,
+                                     mock_attacker, mock_monitor):
         ins = Director(self.scenario_cfg, self.ctx)
         ins.createActionPlayer("wrong_type", "wrong_key")
         ins.createActionRollbacker("wrong_type", "wrong_key")
-
-
-
-
-
-
index c8cda7d..a84bfd2 100644 (file)
@@ -9,14 +9,19 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-# Unittest for yardstick.benchmark.scenarios.availability.monitor.monitor_command
+# Unittest for
+# yardstick.benchmark.scenarios.availability.monitor.monitor_command
 
+from __future__ import absolute_import
 import mock
 import unittest
 
 from yardstick.benchmark.scenarios.availability.monitor import monitor_command
 
-@mock.patch('yardstick.benchmark.scenarios.availability.monitor.monitor_command.subprocess')
+
+@mock.patch(
+    'yardstick.benchmark.scenarios.availability.monitor.monitor_command'
+    '.subprocess')
 class ExecuteShellTestCase(unittest.TestCase):
 
     def test__fun_execute_shell_command_successful(self, mock_subprocess):
@@ -25,13 +30,17 @@ class ExecuteShellTestCase(unittest.TestCase):
         exitcode, output = monitor_command._execute_shell_command(cmd)
         self.assertEqual(exitcode, 0)
 
-    def test__fun_execute_shell_command_fail_cmd_exception(self, mock_subprocess):
+    def test__fun_execute_shell_command_fail_cmd_exception(self,
+                                                           mock_subprocess):
         cmd = "env"
         mock_subprocess.check_output.side_effect = RuntimeError
         exitcode, output = monitor_command._execute_shell_command(cmd)
         self.assertEqual(exitcode, -1)
 
-@mock.patch('yardstick.benchmark.scenarios.availability.monitor.monitor_command.subprocess')
+
+@mock.patch(
+    'yardstick.benchmark.scenarios.availability.monitor.monitor_command'
+    '.subprocess')
 class MonitorOpenstackCmdTestCase(unittest.TestCase):
 
     def setUp(self):
@@ -48,7 +57,6 @@ class MonitorOpenstackCmdTestCase(unittest.TestCase):
             'sla': {'max_outage_time': 5}
         }
 
-
     def test__monitor_command_monitor_func_successful(self, mock_subprocess):
 
         instance = monitor_command.MonitorOpenstackCmd(self.config, None)
@@ -69,11 +77,15 @@ class MonitorOpenstackCmdTestCase(unittest.TestCase):
         instance._result = {"outage_time": 10}
         instance.verify_SLA()
 
-    @mock.patch('yardstick.benchmark.scenarios.availability.monitor.monitor_command.ssh')
-    def test__monitor_command_ssh_monitor_successful(self, mock_ssh, mock_subprocess):
+    @mock.patch(
+        'yardstick.benchmark.scenarios.availability.monitor.monitor_command'
+        '.ssh')
+    def test__monitor_command_ssh_monitor_successful(self, mock_ssh,
+                                                     mock_subprocess):
 
         self.config["host"] = "node1"
-        instance = monitor_command.MonitorOpenstackCmd(self.config, self.context)
+        instance = monitor_command.MonitorOpenstackCmd(
+            self.config, self.context)
         instance.setup()
         mock_ssh.SSH().execute.return_value = (0, "0", '')
         ret = instance.monitor_func()
index de7d26c..369f6f4 100644 (file)
@@ -12,6 +12,7 @@
 # Unittest for yardstick.benchmark.scenarios.availability.monitor
 # .monitor_general
 
+from __future__ import absolute_import
 import mock
 import unittest
 from yardstick.benchmark.scenarios.availability.monitor import monitor_general
@@ -22,6 +23,7 @@ from yardstick.benchmark.scenarios.availability.monitor import monitor_general
 @mock.patch('yardstick.benchmark.scenarios.availability.monitor.'
             'monitor_general.open')
 class GeneralMonitorServiceTestCase(unittest.TestCase):
+
     def setUp(self):
         host = {
             "ip": "10.20.0.5",
@@ -53,23 +55,26 @@ class GeneralMonitorServiceTestCase(unittest.TestCase):
         ins.setup()
         mock_ssh.SSH().execute.return_value = (0, "running", '')
         ins.monitor_func()
-        ins._result = {'outage_time' : 0}
+        ins._result = {'outage_time': 0}
         ins.verify_SLA()
 
-    def test__monitor_general_all_successful_noparam(self, mock_open, mock_ssh):
-        ins = monitor_general.GeneralMonitor(self.monitor_cfg_noparam, self.context)
+    def test__monitor_general_all_successful_noparam(self, mock_open,
+                                                     mock_ssh):
+        ins = monitor_general.GeneralMonitor(
+            self.monitor_cfg_noparam, self.context)
 
         ins.setup()
         mock_ssh.SSH().execute.return_value = (0, "running", '')
         ins.monitor_func()
-        ins._result = {'outage_time' : 0}
+        ins._result = {'outage_time': 0}
         ins.verify_SLA()
 
     def test__monitor_general_failure(self, mock_open, mock_ssh):
-        ins = monitor_general.GeneralMonitor(self.monitor_cfg_noparam, self.context)
+        ins = monitor_general.GeneralMonitor(
+            self.monitor_cfg_noparam, self.context)
 
         ins.setup()
         mock_ssh.SSH().execute.return_value = (1, "error", 'error')
         ins.monitor_func()
-        ins._result = {'outage_time' : 2}
+        ins._result = {'outage_time': 2}
         ins.verify_SLA()
index dda104b..8270405 100644 (file)
@@ -9,14 +9,18 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-# Unittest for yardstick.benchmark.scenarios.availability.monitor.monitor_process
+# Unittest for
+# yardstick.benchmark.scenarios.availability.monitor.monitor_process
 
+from __future__ import absolute_import
 import mock
 import unittest
 
 from yardstick.benchmark.scenarios.availability.monitor import monitor_process
 
-@mock.patch('yardstick.benchmark.scenarios.availability.monitor.monitor_process.ssh')
+
+@mock.patch(
+    'yardstick.benchmark.scenarios.availability.monitor.monitor_process.ssh')
 class MonitorProcessTestCase(unittest.TestCase):
 
     def setUp(self):
@@ -53,4 +57,3 @@ class MonitorProcessTestCase(unittest.TestCase):
         ins.monitor_func()
         ins._result = {"outage_time": 10}
         ins.verify_SLA()
-
index 26cd3f7..2c6dc16 100644 (file)
 # Unittest for yardstick.benchmark.scenarios.availability.operation
 # .operation_general
 
+from __future__ import absolute_import
 import mock
 import unittest
-from yardstick.benchmark.scenarios.availability.operation import operation_general
+from yardstick.benchmark.scenarios.availability.operation import \
+    operation_general
+
 
 @mock.patch('yardstick.benchmark.scenarios.availability.operation.'
             'operation_general.ssh')
@@ -46,7 +49,7 @@ class GeneralOperaionTestCase(unittest.TestCase):
 
     def test__operation_successful(self, mock_open, mock_ssh):
         ins = operation_general.GeneralOperaion(self.operation_cfg,
-            self.context);
+                                                self.context)
         mock_ssh.SSH().execute.return_value = (0, "success", '')
         ins.setup()
         ins.run()
@@ -54,7 +57,7 @@ class GeneralOperaionTestCase(unittest.TestCase):
 
     def test__operation_successful_noparam(self, mock_open, mock_ssh):
         ins = operation_general.GeneralOperaion(self.operation_cfg_noparam,
-            self.context);
+                                                self.context)
         mock_ssh.SSH().execute.return_value = (0, "success", '')
         ins.setup()
         ins.run()
@@ -62,7 +65,7 @@ class GeneralOperaionTestCase(unittest.TestCase):
 
     def test__operation_fail(self, mock_open, mock_ssh):
         ins = operation_general.GeneralOperaion(self.operation_cfg,
-            self.context);
+                                                self.context)
         mock_ssh.SSH().execute.return_value = (1, "failed", '')
         ins.setup()
         ins.run()
index bbadf0a..c5451fa 100644 (file)
 # Unittest for yardstick.benchmark.scenarios.availability.result_checker
 # .result_checker_general
 
+from __future__ import absolute_import
 import mock
 import unittest
 import copy
 
-from yardstick.benchmark.scenarios.availability.result_checker import  result_checker_general
+from yardstick.benchmark.scenarios.availability.result_checker import \
+    result_checker_general
 
 
 @mock.patch('yardstick.benchmark.scenarios.availability.result_checker.'
@@ -35,16 +37,16 @@ class GeneralResultCheckerTestCase(unittest.TestCase):
         self.checker_cfg = {
             'parameter': {'processname': 'process'},
             'checker_type': 'general-result-checker',
-            'condition' : 'eq',
-            'expectedValue' : 1,
-            'key' : 'process-checker',
-            'checker_key' : 'process-checker',
+            'condition': 'eq',
+            'expectedValue': 1,
+            'key': 'process-checker',
+            'checker_key': 'process-checker',
             'host': 'node1'
         }
 
     def test__result_checker_eq(self, mock_open, mock_ssh):
         ins = result_checker_general.GeneralResultChecker(self.checker_cfg,
-            self.context);
+                                                          self.context)
         mock_ssh.SSH().execute.return_value = (0, "1", '')
         ins.setup()
         self.assertTrue(ins.verify())
@@ -53,7 +55,7 @@ class GeneralResultCheckerTestCase(unittest.TestCase):
         config = copy.deepcopy(self.checker_cfg)
         config['condition'] = 'gt'
         ins = result_checker_general.GeneralResultChecker(config,
-            self.context);
+                                                          self.context)
         mock_ssh.SSH().execute.return_value = (0, "2", '')
         ins.setup()
         self.assertTrue(ins.verify())
@@ -62,7 +64,7 @@ class GeneralResultCheckerTestCase(unittest.TestCase):
         config = copy.deepcopy(self.checker_cfg)
         config['condition'] = 'gt_eq'
         ins = result_checker_general.GeneralResultChecker(config,
-            self.context);
+                                                          self.context)
         mock_ssh.SSH().execute.return_value = (0, "1", '')
         ins.setup()
         self.assertTrue(ins.verify())
@@ -71,7 +73,7 @@ class GeneralResultCheckerTestCase(unittest.TestCase):
         config = copy.deepcopy(self.checker_cfg)
         config['condition'] = 'lt'
         ins = result_checker_general.GeneralResultChecker(config,
-            self.context);
+                                                          self.context)
         mock_ssh.SSH().execute.return_value = (0, "0", '')
         ins.setup()
         self.assertTrue(ins.verify())
@@ -80,7 +82,7 @@ class GeneralResultCheckerTestCase(unittest.TestCase):
         config = copy.deepcopy(self.checker_cfg)
         config['condition'] = 'lt_eq'
         ins = result_checker_general.GeneralResultChecker(config,
-            self.context);
+                                                          self.context)
         mock_ssh.SSH().execute.return_value = (0, "1", '')
         ins.setup()
         self.assertTrue(ins.verify())
@@ -90,7 +92,7 @@ class GeneralResultCheckerTestCase(unittest.TestCase):
         config['condition'] = 'in'
         config['expectedValue'] = "value"
         ins = result_checker_general.GeneralResultChecker(config,
-            self.context);
+                                                          self.context)
         mock_ssh.SSH().execute.return_value = (0, "value return", '')
         ins.setup()
         self.assertTrue(ins.verify())
@@ -99,7 +101,7 @@ class GeneralResultCheckerTestCase(unittest.TestCase):
         config = copy.deepcopy(self.checker_cfg)
         config['condition'] = 'wrong'
         ins = result_checker_general.GeneralResultChecker(config,
-            self.context);
+                                                          self.context)
         mock_ssh.SSH().execute.return_value = (0, "1", '')
         ins.setup()
         self.assertFalse(ins.verify())
@@ -108,7 +110,7 @@ class GeneralResultCheckerTestCase(unittest.TestCase):
         config = copy.deepcopy(self.checker_cfg)
         config.pop('parameter')
         ins = result_checker_general.GeneralResultChecker(config,
-            self.context);
+                                                          self.context)
         mock_ssh.SSH().execute.return_value = (1, "fail", '')
         ins.setup()
-        ins.verify()
\ No newline at end of file
+        ins.verify()
index bab9d62..593fc77 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.availability.scenario_general
 
+from __future__ import absolute_import
 import mock
 import unittest
 
-from yardstick.benchmark.scenarios.availability.scenario_general import ScenarioGeneral
+from yardstick.benchmark.scenarios.availability.scenario_general import \
+    ScenarioGeneral
 
 
-@mock.patch('yardstick.benchmark.scenarios.availability.scenario_general.Director')
+@mock.patch(
+    'yardstick.benchmark.scenarios.availability.scenario_general.Director')
 class ScenarioGeneralTestCase(unittest.TestCase):
 
     def setUp(self):
         self.scenario_cfg = {
             'type': "general_scenario",
             'options': {
-                'attackers':[{
+                'attackers': [{
                     'fault_type': "general-attacker",
                     'key': "kill-process"}],
                 'monitors': [{
                     'monitor_type': "general-monitor",
                     'key': "service-status"}],
-                'steps':[
+                'steps': [
                     {
                         'actionKey': "kill-process",
                         'actionType': "attacker",
index 6e58b6e..4ae5089 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.availability.serviceha
 
+from __future__ import absolute_import
 import mock
 import unittest
 
 from yardstick.benchmark.scenarios.availability import serviceha
 
+
 @mock.patch('yardstick.benchmark.scenarios.availability.serviceha.basemonitor')
-@mock.patch('yardstick.benchmark.scenarios.availability.serviceha.baseattacker')
+@mock.patch(
+    'yardstick.benchmark.scenarios.availability.serviceha.baseattacker')
 class ServicehaTestCase(unittest.TestCase):
 
     def setUp(self):
@@ -48,7 +51,8 @@ class ServicehaTestCase(unittest.TestCase):
         sla = {"outage_time": 5}
         self.args = {"options": options, "sla": sla}
 
-    def test__serviceha_setup_run_successful(self, mock_attacker, mock_monitor):
+    def test__serviceha_setup_run_successful(self, mock_attacker,
+                                             mock_monitor):
         p = serviceha.ServiceHA(self.args, self.ctx)
 
         p.setup()
index f5a6b5f..8a06c75 100644 (file)
@@ -11,6 +11,7 @@
 
 # Unittest for yardstick.benchmark.scenarios.compute.cachestat.CACHEstat
 
+from __future__ import absolute_import
 import mock
 import unittest
 import os
@@ -72,11 +73,19 @@ class CACHEstatTestCase(unittest.TestCase):
         output = self._read_file("cachestat_sample_output.txt")
         mock_ssh.SSH().execute.return_value = (0, output, '')
         result = c._get_cache_usage()
-        expected_result = {"cachestat": {"cache0": {"HITS": "6462",\
- "DIRTIES": "29", "RATIO": "100.0%", "MISSES": "0", "BUFFERS_MB": "1157",\
- "CACHE_MB": "66782"}}, "average": {"HITS": 6462, "DIRTIES": 29, "RATIO": "100.0%",\
- "MISSES": 0, "BUFFERS_MB":1157, "CACHE_MB": 66782}, "max": {"HITS": 6462,\
- "DIRTIES": 29, "RATIO": 100.0, "MISSES": 0, "BUFFERS_MB": 1157, "CACHE_MB": 66782}}
+        expected_result = {"cachestat": {"cache0": {"HITS": "6462",
+                                                    "DIRTIES": "29",
+                                                    "RATIO": "100.0%",
+                                                    "MISSES": "0",
+                                                    "BUFFERS_MB": "1157",
+                                                    "CACHE_MB": "66782"}},
+                           "average": {"HITS": 6462, "DIRTIES": 29,
+                                       "RATIO": "100.0%",
+                                       "MISSES": 0, "BUFFERS_MB": 1157,
+                                       "CACHE_MB": 66782},
+                           "max": {"HITS": 6462,
+                                   "DIRTIES": 29, "RATIO": 100.0, "MISSES": 0,
+                                   "BUFFERS_MB": 1157, "CACHE_MB": 66782}}
 
         self.assertEqual(result, expected_result)
 
index da06b5d..4efa669 100644 (file)
@@ -9,12 +9,15 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-# Unittest for yardstick.benchmark.scenarios.compute.computecapacity.ComputeCapacity
+# Unittest for
+# yardstick.benchmark.scenarios.compute.computecapacity.ComputeCapacity
+
+from __future__ import absolute_import
 
-import mock
 import unittest
-import os
-import json
+
+import mock
+from oslo_serialization import jsonutils
 
 from yardstick.benchmark.scenarios.compute import computecapacity
 
@@ -53,7 +56,7 @@ class ComputeCapacityTestCase(unittest.TestCase):
 
         mock_ssh.SSH().execute.return_value = (0, SAMPLE_OUTPUT, '')
         c.run(self.result)
-        expected_result = json.loads(SAMPLE_OUTPUT)
+        expected_result = jsonutils.loads(SAMPLE_OUTPUT)
         self.assertEqual(self.result, expected_result)
 
     def test_capacity_unsuccessful_script_error(self, mock_ssh):
index 77f2a02..ffa7812 100644 (file)
@@ -11,6 +11,7 @@
 
 # Unittest for yardstick.benchmark.scenarios.compute.lmbench.Lmbench
 
+from __future__ import absolute_import
 import mock
 import unittest
 import os
@@ -208,7 +209,7 @@ class CPULoadTestCase(unittest.TestCase):
                      '%nice': '0.03'}}}
 
         self.assertDictEqual(result, expected_result)
-    
+
     def test_run_proc_stat(self, mock_ssh):
         options = {
             "interval": 1,
index 8074290..04ca2ab 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.compute.cyclictest.Cyclictest
 
-import mock
+from __future__ import absolute_import
+
 import unittest
-import json
+
+import mock
+from oslo_serialization import jsonutils
 
 from yardstick.benchmark.scenarios.compute import cyclictest
 
@@ -85,17 +88,17 @@ class CyclictestTestCase(unittest.TestCase):
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
 
         c.run(result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         self.assertEqual(result, expected_result)
 
     def test_cyclictest_successful_sla(self, mock_ssh):
         result = {}
         self.scenario_cfg.update({"sla": {
-                "action": "monitor",
-                "max_min_latency": 100,
-                "max_avg_latency": 500,
-                "max_max_latency": 1000
-            }
+            "action": "monitor",
+            "max_min_latency": 100,
+            "max_avg_latency": 500,
+            "max_max_latency": 1000
+        }
         })
         c = cyclictest.Cyclictest(self.scenario_cfg, self.context_cfg)
         mock_ssh.SSH().execute.return_value = (0, '', '')
@@ -106,7 +109,7 @@ class CyclictestTestCase(unittest.TestCase):
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
 
         c.run(result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         self.assertEqual(result, expected_result)
 
     def test_cyclictest_unsuccessful_sla_min_latency(self, mock_ssh):
index 6be1163..5b72ef7 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.compute.lmbench.Lmbench
 
-import mock
+from __future__ import absolute_import
+
 import unittest
-import json
+
+import mock
+from oslo_serialization import jsonutils
 
 from yardstick.benchmark.scenarios.compute import lmbench
 
@@ -65,7 +68,8 @@ class LmbenchTestCase(unittest.TestCase):
         sample_output = '[{"latency": 4.944, "size": 0.00049}]'
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
         l.run(self.result)
-        expected_result = json.loads('{"latencies": ' + sample_output + "}")
+        expected_result = jsonutils.loads(
+            '{"latencies": ' + sample_output + "}")
         self.assertEqual(self.result, expected_result)
 
     def test_successful_bandwidth_run_no_sla(self, mock_ssh):
@@ -82,7 +86,7 @@ class LmbenchTestCase(unittest.TestCase):
         sample_output = '{"size(MB)": 0.262144, "bandwidth(MBps)": 11025.5}'
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
         l.run(self.result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         self.assertEqual(self.result, expected_result)
 
     def test_successful_latency_run_sla(self, mock_ssh):
@@ -101,7 +105,8 @@ class LmbenchTestCase(unittest.TestCase):
         sample_output = '[{"latency": 4.944, "size": 0.00049}]'
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
         l.run(self.result)
-        expected_result = json.loads('{"latencies": ' + sample_output + "}")
+        expected_result = jsonutils.loads(
+            '{"latencies": ' + sample_output + "}")
         self.assertEqual(self.result, expected_result)
 
     def test_successful_bandwidth_run_sla(self, mock_ssh):
@@ -121,7 +126,7 @@ class LmbenchTestCase(unittest.TestCase):
         sample_output = '{"size(MB)": 0.262144, "bandwidth(MBps)": 11025.5}'
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
         l.run(self.result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         self.assertEqual(self.result, expected_result)
 
     def test_unsuccessful_latency_run_sla(self, mock_ssh):
@@ -163,7 +168,7 @@ class LmbenchTestCase(unittest.TestCase):
 
         options = {
             "test_type": "latency_for_cache",
-            "repetition":1,
+            "repetition": 1,
             "warmup": 0
         }
         args = {
@@ -175,7 +180,7 @@ class LmbenchTestCase(unittest.TestCase):
         sample_output = "{\"L1cache\": 1.6}"
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
         l.run(self.result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         self.assertEqual(self.result, expected_result)
 
     def test_unsuccessful_script_error(self, mock_ssh):
index cdf518d..76625ef 100644 (file)
@@ -11,6 +11,7 @@
 
 # Unittest for yardstick.benchmark.scenarios.compute.memload.MEMLoad
 
+from __future__ import absolute_import
 import mock
 import unittest
 import os
@@ -74,15 +75,17 @@ class MEMLoadTestCase(unittest.TestCase):
         mock_ssh.SSH().execute.return_value = (0, output, '')
         result = m._get_mem_usage()
         expected_result = {"max": {"used": 76737332, "cached": 67252400,
-                           "free": 187016644, "shared": 2844,
-                           "total": 263753976, "buffers": 853528},
+                                   "free": 187016644, "shared": 2844,
+                                   "total": 263753976, "buffers": 853528},
                            "average": {"used": 76737332, "cached": 67252400,
-                           "free": 187016644, "shared": 2844,
-                           "total": 263753976, "buffers": 853528},
+                                       "free": 187016644, "shared": 2844,
+                                       "total": 263753976, "buffers": 853528},
                            "free": {"memory0": {"used": "76737332",
-                           "cached": "67252400", "free": "187016644",
-                           "shared": "2844", "total": "263753976",
-                           "buffers": "853528"}}}
+                                                "cached": "67252400",
+                                                "free": "187016644",
+                                                "shared": "2844",
+                                                "total": "263753976",
+                                                "buffers": "853528"}}}
         self.assertEqual(result, expected_result)
 
     def _read_file(self, filename):
@@ -91,4 +94,3 @@ class MEMLoadTestCase(unittest.TestCase):
         with open(output) as f:
             sample_output = f.read()
         return sample_output
-
index 94f5273..a5331ca 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.compute.plugintest.PluginTest
 
-import mock
-import json
+from __future__ import absolute_import
+
 import unittest
-import os
+
+import mock
+from oslo_serialization import jsonutils
 
 from yardstick.benchmark.scenarios.compute import plugintest
 
@@ -50,7 +52,7 @@ class PluginTestTestCase(unittest.TestCase):
         sample_output = '{"Test Output": "Hello world!"}'
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
         s.run(self.result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         self.assertEqual(self.result, expected_result)
 
     def test_sample_unsuccessful_script_error(self, mock_ssh):
index 100102d..82cc938 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.compute.ramspeed.Ramspeed
 
-import mock
+from __future__ import absolute_import
+
 import unittest
-import json
+
+import mock
+from oslo_serialization import jsonutils
 
 from yardstick.benchmark.scenarios.compute import ramspeed
 
@@ -69,12 +72,12 @@ class RamspeedTestCase(unittest.TestCase):
  "Bandwidth(MBps)": 14756.45}, {"Test_type": "INTEGER & WRITING",\
  "Block_size(kb)": 4096, "Bandwidth(MBps)": 14604.44}, {"Test_type":\
  "INTEGER & WRITING", "Block_size(kb)": 8192, "Bandwidth(MBps)": 14159.86},\
- {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 16384, "Bandwidth(MBps)":\
14128.94}, {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 32768,\
- "Bandwidth(MBps)": 8340.85}]}'
+ {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 16384,\
"Bandwidth(MBps)": 14128.94}, {"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 32768, "Bandwidth(MBps)": 8340.85}]}'
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
         r.run(self.result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         self.assertEqual(self.result, expected_result)
 
     def test_ramspeed_successful_run_sla(self, mock_ssh):
@@ -105,12 +108,12 @@ class RamspeedTestCase(unittest.TestCase):
  "Bandwidth(MBps)": 14756.45}, {"Test_type": "INTEGER & WRITING",\
  "Block_size(kb)": 4096, "Bandwidth(MBps)": 14604.44}, {"Test_type":\
  "INTEGER & WRITING", "Block_size(kb)": 8192, "Bandwidth(MBps)": 14159.86},\
- {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 16384, "Bandwidth(MBps)":\
14128.94}, {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 32768,\
- "Bandwidth(MBps)": 8340.85}]}'
+ {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 16384,\
"Bandwidth(MBps)": 14128.94}, {"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 32768, "Bandwidth(MBps)": 8340.85}]}'
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
         r.run(self.result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         self.assertEqual(self.result, expected_result)
 
     def test_ramspeed_unsuccessful_run_sla(self, mock_ssh):
@@ -176,7 +179,7 @@ class RamspeedTestCase(unittest.TestCase):
  "Bandwidth(MBps)": 9401.58}]}'
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
         r.run(self.result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         self.assertEqual(self.result, expected_result)
 
     def test_ramspeed_mem_successful_run_sla(self, mock_ssh):
@@ -197,7 +200,7 @@ class RamspeedTestCase(unittest.TestCase):
  "Bandwidth(MBps)": 9401.58}]}'
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
         r.run(self.result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         self.assertEqual(self.result, expected_result)
 
     def test_ramspeed_mem_unsuccessful_run_sla(self, mock_ssh):
index 0935bca..747bda1 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.compute.unixbench.Unixbench
 
-import mock
+from __future__ import absolute_import
+
 import unittest
-import json
+
+import mock
+from oslo_serialization import jsonutils
 
 from yardstick.benchmark.scenarios.compute import unixbench
 
@@ -57,7 +60,7 @@ class UnixbenchTestCase(unittest.TestCase):
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
 
         u.run(result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         self.assertEqual(result, expected_result)
 
     def test_unixbench_successful_in_quiet_mode(self, mock_ssh):
@@ -65,7 +68,7 @@ class UnixbenchTestCase(unittest.TestCase):
         options = {
             "test_type": 'dhry2reg',
             "run_mode": 'quiet',
-            "copies":1
+            "copies": 1
         }
         args = {
             "options": options,
@@ -79,10 +82,9 @@ class UnixbenchTestCase(unittest.TestCase):
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
 
         u.run(result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         self.assertEqual(result, expected_result)
 
-
     def test_unixbench_successful_sla(self, mock_ssh):
 
         options = {
@@ -106,7 +108,7 @@ class UnixbenchTestCase(unittest.TestCase):
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
 
         u.run(result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         self.assertEqual(result, expected_result)
 
     def test_unixbench_unsuccessful_sla_single_score(self, mock_ssh):
index 1f9b729..560675d 100644 (file)
@@ -11,6 +11,7 @@
 
 # Unittest for yardstick.benchmark.scenarios.dummy.dummy
 
+from __future__ import absolute_import
 import unittest
 
 from yardstick.benchmark.scenarios.dummy import dummy
index 91f800b..ea53cb9 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.networking.iperf3.Iperf
 
-import mock
-import unittest
+from __future__ import absolute_import
+
 import os
-import json
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
 
 from yardstick.benchmark.scenarios.networking import iperf3
 
@@ -78,7 +81,7 @@ class IperfTestCase(unittest.TestCase):
 
         sample_output = self._read_sample_output(self.output_name_tcp)
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         p.run(result)
         self.assertEqual(result, expected_result)
 
@@ -97,7 +100,7 @@ class IperfTestCase(unittest.TestCase):
 
         sample_output = self._read_sample_output(self.output_name_tcp)
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         p.run(result)
         self.assertEqual(result, expected_result)
 
@@ -119,8 +122,7 @@ class IperfTestCase(unittest.TestCase):
         self.assertRaises(AssertionError, p.run, result)
 
     def test_iperf_successful_sla_jitter(self, mock_ssh):
-
-        options = {"udp":"udp","bandwidth":"20m"}
+        options = {"udp": "udp", "bandwidth": "20m"}
         args = {
             'options': options,
             'sla': {'jitter': 10}
@@ -133,13 +135,12 @@ class IperfTestCase(unittest.TestCase):
 
         sample_output = self._read_sample_output(self.output_name_udp)
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         p.run(result)
         self.assertEqual(result, expected_result)
 
     def test_iperf_unsuccessful_sla_jitter(self, mock_ssh):
-
-        options = {"udp":"udp","bandwidth":"20m"}
+        options = {"udp": "udp", "bandwidth": "20m"}
         args = {
             'options': options,
             'sla': {'jitter': 0.0001}
@@ -167,7 +168,7 @@ class IperfTestCase(unittest.TestCase):
         mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
         self.assertRaises(RuntimeError, p.run, result)
 
-    def _read_sample_output(self,filename):
+    def _read_sample_output(self, filename):
         curr_path = os.path.dirname(os.path.abspath(__file__))
         output = os.path.join(curr_path, filename)
         with open(output) as f:
index 3f22473..1b5dd64 100755 (executable)
 
 # Unittest for yardstick.benchmark.scenarios.networking.netperf.Netperf
 
-import mock
-import unittest
+from __future__ import absolute_import
+
 import os
-import json
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
 
 from yardstick.benchmark.scenarios.networking import netperf
 
@@ -59,7 +62,7 @@ class NetperfTestCase(unittest.TestCase):
 
         sample_output = self._read_sample_output()
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         p.run(result)
         self.assertEqual(result, expected_result)
 
@@ -78,7 +81,7 @@ class NetperfTestCase(unittest.TestCase):
 
         sample_output = self._read_sample_output()
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         p.run(result)
         self.assertEqual(result, expected_result)
 
index 1c39b29..29a7edf 100755 (executable)
 # Unittest for
 # yardstick.benchmark.scenarios.networking.netperf_node.NetperfNode
 
-import mock
-import unittest
+from __future__ import absolute_import
+
 import os
-import json
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
 
 from yardstick.benchmark.scenarios.networking import netperf_node
 
@@ -59,7 +62,7 @@ class NetperfNodeTestCase(unittest.TestCase):
 
         sample_output = self._read_sample_output()
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         p.run(result)
         self.assertEqual(result, expected_result)
 
@@ -78,7 +81,7 @@ class NetperfNodeTestCase(unittest.TestCase):
 
         sample_output = self._read_sample_output()
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         p.run(result)
         self.assertEqual(result, expected_result)
 
index eb6626f..7c04f5e 100644 (file)
@@ -9,8 +9,10 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-# Unittest for yardstick.benchmark.scenarios.networking.netutilization.NetUtilization
+# Unittest for
+# yardstick.benchmark.scenarios.networking.netutilization.NetUtilization
 
+from __future__ import absolute_import
 import mock
 import unittest
 import os
index e42832f..3f8d84e 100644 (file)
@@ -9,27 +9,32 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-# Unittest for yardstick.benchmark.scenarios.networking.networkcapacity.NetworkCapacity
+# Unittest for
+# yardstick.benchmark.scenarios.networking.networkcapacity.NetworkCapacity
+
+from __future__ import absolute_import
 
-import mock
 import unittest
-import os
-import json
+
+import mock
+from oslo_serialization import jsonutils
 
 from yardstick.benchmark.scenarios.networking import networkcapacity
 
-SAMPLE_OUTPUT = '{"Number of connections":"308","Number of frames received": "166503"}'
+SAMPLE_OUTPUT = \
+    '{"Number of connections":"308","Number of frames received": "166503"}'
+
 
 @mock.patch('yardstick.benchmark.scenarios.networking.networkcapacity.ssh')
 class NetworkCapacityTestCase(unittest.TestCase):
 
     def setUp(self):
         self.ctx = {
-                'host': {
-                    'ip': '172.16.0.137',
-                    'user': 'cirros',
-                    'password': "root"
-                },
+            'host': {
+                'ip': '172.16.0.137',
+                'user': 'cirros',
+                'password': "root"
+            },
         }
 
         self.result = {}
@@ -46,7 +51,7 @@ class NetworkCapacityTestCase(unittest.TestCase):
 
         mock_ssh.SSH().execute.return_value = (0, SAMPLE_OUTPUT, '')
         c.run(self.result)
-        expected_result = json.loads(SAMPLE_OUTPUT)
+        expected_result = jsonutils.loads(SAMPLE_OUTPUT)
         self.assertEqual(self.result, expected_result)
 
     def test_capacity_unsuccessful_script_error(self, mock_ssh):
index 8d35b84..5535a79 100644 (file)
@@ -11,6 +11,7 @@
 
 # Unittest for yardstick.benchmark.scenarios.networking.ping.Ping
 
+from __future__ import absolute_import
 import mock
 import unittest
 
@@ -37,7 +38,7 @@ class PingTestCase(unittest.TestCase):
         args = {
             'options': {'packetsize': 200},
             'target': 'ares.demo'
-            }
+        }
         result = {}
 
         p = ping.Ping(args, self.ctx)
@@ -53,7 +54,7 @@ class PingTestCase(unittest.TestCase):
             'options': {'packetsize': 200},
             'sla': {'max_rtt': 150},
             'target': 'ares.demo'
-            }
+        }
         result = {}
 
         p = ping.Ping(args, self.ctx)
index 0b8fba2..e22cacb 100644 (file)
@@ -11,6 +11,7 @@
 
 # Unittest for yardstick.benchmark.scenarios.networking.ping.Ping
 
+from __future__ import absolute_import
 import mock
 import unittest
 
@@ -21,37 +22,37 @@ class PingTestCase(unittest.TestCase):
 
     def setUp(self):
         self.ctx = {
-            'nodes':{
-            'host1': {
-                'ip': '172.16.0.137',
-                'user': 'cirros',
-                'role': "Controller",
-                'key_filename': "mykey.key",
-                'password': "root"
+            'nodes': {
+                'host1': {
+                    'ip': '172.16.0.137',
+                    'user': 'cirros',
+                    'role': "Controller",
+                    'key_filename': "mykey.key",
+                    'password': "root"
                 },
-            'host2': {
-                "ip": "172.16.0.138",
-                "key_filename": "/root/.ssh/id_rsa",
-                "role": "Compute",
-                "name": "node3.IPV6",
-                "user": "root"
+                'host2': {
+                    "ip": "172.16.0.138",
+                    "key_filename": "/root/.ssh/id_rsa",
+                    "role": "Compute",
+                    "name": "node3.IPV6",
+                    "user": "root"
                 },
             }
         }
 
     def test_get_controller_node(self):
         args = {
-            'options': {'host': 'host1','packetsize': 200, 'ping_count': 5},
+            'options': {'host': 'host1', 'packetsize': 200, 'ping_count': 5},
             'sla': {'max_rtt': 50}
         }
         p = ping6.Ping6(args, self.ctx)
-        controller_node = p._get_controller_node(['host1','host2'])
+        controller_node = p._get_controller_node(['host1', 'host2'])
         self.assertEqual(controller_node, 'host1')
 
     @mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
     def test_ping_successful_setup(self, mock_ssh):
         args = {
-            'options': {'host': 'host1','packetsize': 200, 'ping_count': 5},
+            'options': {'host': 'host1', 'packetsize': 200, 'ping_count': 5},
             'sla': {'max_rtt': 50}
         }
         p = ping6.Ping6(args, self.ctx)
@@ -63,58 +64,57 @@ class PingTestCase(unittest.TestCase):
     @mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
     def test_ping_successful_no_sla(self, mock_ssh):
         args = {
-            'options': {'host': 'host1','packetsize': 200, 'ping_count': 5},
+            'options': {'host': 'host1', 'packetsize': 200, 'ping_count': 5},
 
         }
         result = {}
 
         p = ping6.Ping6(args, self.ctx)
         p.client = mock_ssh.SSH()
-        mock_ssh.SSH().execute.side_effect = [(0, 'host1', ''),(0, 100, '')]
+        mock_ssh.SSH().execute.side_effect = [(0, 'host1', ''), (0, 100, '')]
         p.run(result)
         self.assertEqual(result, {'rtt': 100.0})
 
     @mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
     def test_ping_successful_sla(self, mock_ssh):
-
         args = {
-            'options': {'host': 'host1','packetsize': 200, 'ping_count': 5},
+            'options': {'host': 'host1', 'packetsize': 200, 'ping_count': 5},
             'sla': {'max_rtt': 150}
         }
         result = {}
 
         p = ping6.Ping6(args, self.ctx)
         p.client = mock_ssh.SSH()
-        mock_ssh.SSH().execute.side_effect = [(0, 'host1', ''),(0, 100, '')]
+        mock_ssh.SSH().execute.side_effect = [(0, 'host1', ''), (0, 100, '')]
         p.run(result)
         self.assertEqual(result, {'rtt': 100.0})
 
     @mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
     def test_ping_unsuccessful_sla(self, mock_ssh):
-
         args = {
-            'options': {'host': 'host1','packetsize': 200, 'ping_count': 5},
+            'options': {'host': 'host1', 'packetsize': 200, 'ping_count': 5},
             'sla': {'max_rtt': 50}
         }
         result = {}
 
         p = ping6.Ping6(args, self.ctx)
         p.client = mock_ssh.SSH()
-        mock_ssh.SSH().execute.side_effect = [(0, 'host1', ''),(0, 100, '')]
+        mock_ssh.SSH().execute.side_effect = [(0, 'host1', ''), (0, 100, '')]
         self.assertRaises(AssertionError, p.run, result)
 
     @mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
     def test_ping_unsuccessful_script_error(self, mock_ssh):
 
         args = {
-            'options': {'host': 'host1','packetsize': 200, 'ping_count': 5},
+            'options': {'host': 'host1', 'packetsize': 200, 'ping_count': 5},
             'sla': {'max_rtt': 150}
         }
         result = {}
 
         p = ping6.Ping6(args, self.ctx)
         p.client = mock_ssh.SSH()
-        mock_ssh.SSH().execute.side_effect = [(0, 'host1', ''),(1, '', 'FOOBAR')]
+        mock_ssh.SSH().execute.side_effect = [
+            (0, 'host1', ''), (1, '', 'FOOBAR')]
         self.assertRaises(RuntimeError, p.run, result)
 
 
index 13a4c1b..f50fa10 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.networking.pktgen.Pktgen
 
-import mock
+from __future__ import absolute_import
+
 import unittest
-import json
+
+import mock
+from oslo_serialization import jsonutils
 
 from yardstick.benchmark.scenarios.networking import pktgen
 
@@ -133,7 +136,7 @@ class PktgenTestCase(unittest.TestCase):
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
 
         p.run(result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         expected_result["packets_received"] = 149300
         self.assertEqual(result, expected_result)
 
@@ -159,7 +162,7 @@ class PktgenTestCase(unittest.TestCase):
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
 
         p.run(result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         expected_result["packets_received"] = 149300
         self.assertEqual(result, expected_result)
 
index afc87ab..7ba4db9 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.networking.pktgen.Pktgen
 
-import mock
+from __future__ import absolute_import
 import unittest
-import json
+
+import mock
 
 from yardstick.benchmark.scenarios.networking import pktgen_dpdk
 
+
 @mock.patch('yardstick.benchmark.scenarios.networking.pktgen_dpdk.ssh')
 class PktgenDPDKLatencyTestCase(unittest.TestCase):
 
@@ -116,7 +118,11 @@ class PktgenDPDKLatencyTestCase(unittest.TestCase):
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
 
         p.run(result)
-        self.assertEqual(result, {"avg_latency": 132})
+        # with python 3 we get float, might be due python division changes
+        # AssertionError: {'avg_latency': 132.33333333333334} != {
+        # 'avg_latency': 132}
+        delta = result['avg_latency'] - 132
+        self.assertLessEqual(delta, 1)
 
     def test_pktgen_dpdk_successful_sla(self, mock_ssh):
 
@@ -169,5 +175,6 @@ class PktgenDPDKLatencyTestCase(unittest.TestCase):
 def main():
     unittest.main()
 
+
 if __name__ == '__main__':
     main()
index 618efc3..224a43b 100644 (file)
@@ -11,6 +11,7 @@
 
 # Unittest for yardstick.benchmark.scenarios.networking.sfc
 
+from __future__ import absolute_import
 import mock
 import unittest
 
@@ -27,7 +28,7 @@ class SfcTestCase(unittest.TestCase):
         context_cfg['target'] = dict()
         context_cfg['target']['user'] = 'root'
         context_cfg['target']['password'] = 'opnfv'
-        context_cfg['target']['ip'] = '127.0.0.1' 
+        context_cfg['target']['ip'] = '127.0.0.1'
 
         # Used in Sfc.run()
         context_cfg['host'] = dict()
@@ -58,7 +59,8 @@ class SfcTestCase(unittest.TestCase):
     @mock.patch('yardstick.benchmark.scenarios.networking.sfc.subprocess')
     def test2_run_for_success(self, mock_subprocess, mock_openstack, mock_ssh):
         # Mock a successfull SSH in Sfc.setup() and Sfc.run()
-        mock_ssh.SSH().execute.return_value = (0, 'vxlan_tool.py', 'succeeded timed out')
+        mock_ssh.SSH().execute.return_value = (
+            0, 'vxlan_tool.py', 'succeeded timed out')
         mock_openstack.get_an_IP.return_value = "127.0.0.1"
         mock_subprocess.call.return_value = 'mocked!'
 
index 25d5221..76d2afd 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.networking.vsperf.Vsperf
 
-import mock
+from __future__ import absolute_import
+try:
+    from unittest import mock
+except ImportError:
+    import mock
 import unittest
-import os
-import subprocess
 
 from yardstick.benchmark.scenarios.networking import vsperf
 
 
 @mock.patch('yardstick.benchmark.scenarios.networking.vsperf.subprocess')
 @mock.patch('yardstick.benchmark.scenarios.networking.vsperf.ssh')
-@mock.patch("__builtin__.open", return_value=None)
+@mock.patch("yardstick.benchmark.scenarios.networking.vsperf.open",
+            mock.mock_open())
 class VsperfTestCase(unittest.TestCase):
 
     def setUp(self):
@@ -58,7 +61,7 @@ class VsperfTestCase(unittest.TestCase):
             }
         }
 
-    def test_vsperf_setup(self, mock_open, mock_ssh, mock_subprocess):
+    def test_vsperf_setup(self, mock_ssh, mock_subprocess):
         p = vsperf.Vsperf(self.args, self.ctx)
         mock_ssh.SSH().execute.return_value = (0, '', '')
         mock_subprocess.call().execute.return_value = None
@@ -67,7 +70,7 @@ class VsperfTestCase(unittest.TestCase):
         self.assertIsNotNone(p.client)
         self.assertEqual(p.setup_done, True)
 
-    def test_vsperf_teardown(self, mock_open, mock_ssh, mock_subprocess):
+    def test_vsperf_teardown(self, mock_ssh, mock_subprocess):
         p = vsperf.Vsperf(self.args, self.ctx)
 
         # setup() specific mocks
@@ -81,7 +84,7 @@ class VsperfTestCase(unittest.TestCase):
         p.teardown()
         self.assertEqual(p.setup_done, False)
 
-    def test_vsperf_run_ok(self, mock_open, mock_ssh, mock_subprocess):
+    def test_vsperf_run_ok(self, mock_ssh, mock_subprocess):
         p = vsperf.Vsperf(self.args, self.ctx)
 
         # setup() specific mocks
@@ -90,14 +93,16 @@ class VsperfTestCase(unittest.TestCase):
 
         # run() specific mocks
         mock_ssh.SSH().execute.return_value = (0, '', '')
-        mock_ssh.SSH().execute.return_value = (0, 'throughput_rx_fps\r\n14797660.000\r\n', '')
+        mock_ssh.SSH().execute.return_value = (
+            0, 'throughput_rx_fps\r\n14797660.000\r\n', '')
 
         result = {}
         p.run(result)
 
         self.assertEqual(result['throughput_rx_fps'], '14797660.000')
 
-    def test_vsperf_run_falied_vsperf_execution(self, mock_open, mock_ssh, mock_subprocess):
+    def test_vsperf_run_falied_vsperf_execution(self, mock_ssh,
+                                                mock_subprocess):
         p = vsperf.Vsperf(self.args, self.ctx)
 
         # setup() specific mocks
@@ -110,7 +115,7 @@ class VsperfTestCase(unittest.TestCase):
         result = {}
         self.assertRaises(RuntimeError, p.run, result)
 
-    def test_vsperf_run_falied_csv_report(self, mock_open, mock_ssh, mock_subprocess):
+    def test_vsperf_run_falied_csv_report(self, mock_ssh, mock_subprocess):
         p = vsperf.Vsperf(self.args, self.ctx)
 
         # setup() specific mocks
@@ -128,5 +133,6 @@ class VsperfTestCase(unittest.TestCase):
 def main():
     unittest.main()
 
+
 if __name__ == '__main__':
     main()
index 418dd39..07b3da9 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.networking.iperf3.Iperf
 
-import mock
+from __future__ import absolute_import
 import unittest
 
-from yardstick.benchmark.scenarios.networking import vtc_instantiation_validation
+from yardstick.benchmark.scenarios.networking import \
+    vtc_instantiation_validation
 
 
 class VtcInstantiationValidationTestCase(unittest.TestCase):
@@ -34,7 +35,8 @@ class VtcInstantiationValidationTestCase(unittest.TestCase):
         scenario['options']['vlan_sender'] = ''
         scenario['options']['vlan_receiver'] = ''
 
-        self.vt = vtc_instantiation_validation.VtcInstantiationValidation(scenario, '')
+        self.vt = vtc_instantiation_validation.VtcInstantiationValidation(
+            scenario, '')
 
     def test_run_for_success(self):
         result = {}
index e0a4624..34f3610 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.networking.iperf3.Iperf
 
-import mock
+from __future__ import absolute_import
 import unittest
 
-from yardstick.benchmark.scenarios.networking import vtc_instantiation_validation_noisy
+from yardstick.benchmark.scenarios.networking import \
+    vtc_instantiation_validation_noisy
 
 
 class VtcInstantiationValidationiNoisyTestCase(unittest.TestCase):
@@ -37,7 +38,9 @@ class VtcInstantiationValidationiNoisyTestCase(unittest.TestCase):
         scenario['options']['amount_of_ram'] = '1G'
         scenario['options']['number_of_cores'] = '1'
 
-        self.vt = vtc_instantiation_validation_noisy.VtcInstantiationValidationNoisy(scenario, '')
+        self.vt = \
+            vtc_instantiation_validation_noisy.VtcInstantiationValidationNoisy(
+                scenario, '')
 
     def test_run_for_success(self):
         result = {}
index ecdf555..a73fad5 100644 (file)
@@ -11,6 +11,7 @@
 
 # Unittest for yardstick.benchmark.scenarios.networking.iperf3.Iperf
 
+from __future__ import absolute_import
 import mock
 import unittest
 
index 98957b1..e1b162c 100644 (file)
@@ -11,6 +11,7 @@
 
 # Unittest for yardstick.benchmark.scenarios.networking.iperf3.Iperf
 
+from __future__ import absolute_import
 import mock
 import unittest
 
index d11a6d5..59b98a0 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.parser.Parser
 
-import mock
+from __future__ import absolute_import
+
 import unittest
-import json
+
+import mock
+from oslo_serialization import jsonutils
 
 from yardstick.benchmark.scenarios.parser import parser
 
+
 @mock.patch('yardstick.benchmark.scenarios.parser.parser.subprocess')
 class ParserTestCase(unittest.TestCase):
 
@@ -32,8 +36,8 @@ class ParserTestCase(unittest.TestCase):
 
     def test_parser_successful(self, mock_subprocess):
         args = {
-            'options': {'yangfile':'/root/yardstick/samples/yang.yaml',
-            'toscafile':'/root/yardstick/samples/tosca.yaml'},
+            'options': {'yangfile': '/root/yardstick/samples/yang.yaml',
+                        'toscafile': '/root/yardstick/samples/tosca.yaml'},
         }
         p = parser.Parser(args, {})
         result = {}
@@ -41,7 +45,7 @@ class ParserTestCase(unittest.TestCase):
         sample_output = '{"yangtotosca": "success"}'
 
         p.run(result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
 
     def test_parser_teardown_successful(self, mock_subprocess):
 
index 153d150..603ff38 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.storage.fio.Fio
 
-import mock
-import unittest
-import json
+from __future__ import absolute_import
+
 import os
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
 
 from yardstick.benchmark.scenarios.storage import fio
 
@@ -74,7 +77,7 @@ class FioTestCase(unittest.TestCase):
         expected_result = '{"read_bw": 83888, "read_iops": 20972,' \
             '"read_lat": 236.8, "write_bw": 84182, "write_iops": 21045,'\
             '"write_lat": 233.55}'
-        expected_result = json.loads(expected_result)
+        expected_result = jsonutils.loads(expected_result)
         self.assertEqual(result, expected_result)
 
     def test_fio_successful_read_no_sla(self, mock_ssh):
@@ -98,7 +101,7 @@ class FioTestCase(unittest.TestCase):
 
         expected_result = '{"read_bw": 36113, "read_iops": 9028,' \
             '"read_lat": 108.7}'
-        expected_result = json.loads(expected_result)
+        expected_result = jsonutils.loads(expected_result)
         self.assertEqual(result, expected_result)
 
     def test_fio_successful_write_no_sla(self, mock_ssh):
@@ -122,7 +125,7 @@ class FioTestCase(unittest.TestCase):
 
         expected_result = '{"write_bw": 35107, "write_iops": 8776,'\
             '"write_lat": 111.74}'
-        expected_result = json.loads(expected_result)
+        expected_result = jsonutils.loads(expected_result)
         self.assertEqual(result, expected_result)
 
     def test_fio_successful_lat_sla(self, mock_ssh):
@@ -150,10 +153,9 @@ class FioTestCase(unittest.TestCase):
         expected_result = '{"read_bw": 83888, "read_iops": 20972,' \
             '"read_lat": 236.8, "write_bw": 84182, "write_iops": 21045,'\
             '"write_lat": 233.55}'
-        expected_result = json.loads(expected_result)
+        expected_result = jsonutils.loads(expected_result)
         self.assertEqual(result, expected_result)
 
-
     def test_fio_unsuccessful_lat_sla(self, mock_ssh):
 
         options = {
@@ -200,7 +202,7 @@ class FioTestCase(unittest.TestCase):
         expected_result = '{"read_bw": 83888, "read_iops": 20972,' \
             '"read_lat": 236.8, "write_bw": 84182, "write_iops": 21045,'\
             '"write_lat": 233.55}'
-        expected_result = json.loads(expected_result)
+        expected_result = jsonutils.loads(expected_result)
         self.assertEqual(result, expected_result)
 
     def test_fio_unsuccessful_bw_iops_sla(self, mock_ssh):
@@ -248,8 +250,10 @@ class FioTestCase(unittest.TestCase):
             sample_output = f.read()
         return sample_output
 
+
 def main():
     unittest.main()
 
+
 if __name__ == '__main__':
     main()
index ace0ca3..6fb5f56 100644 (file)
@@ -9,35 +9,41 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-# Unittest for yardstick.benchmark.scenarios.storage.storagecapacity.StorageCapacity
+# Unittest for
+# yardstick.benchmark.scenarios.storage.storagecapacity.StorageCapacity
+
+from __future__ import absolute_import
 
-import mock
 import unittest
-import os
-import json
+
+import mock
+from oslo_serialization import jsonutils
 
 from yardstick.benchmark.scenarios.storage import storagecapacity
 
-DISK_SIZE_SAMPLE_OUTPUT = '{"Numberf of devides": "2", "Total disk size in bytes": "1024000000"}'
+DISK_SIZE_SAMPLE_OUTPUT = \
+    '{"Numberf of devides": "2", "Total disk size in bytes": "1024000000"}'
 BLOCK_SIZE_SAMPLE_OUTPUT = '{"/dev/sda": 1024, "/dev/sdb": 4096}'
 DISK_UTIL_RAW_OUTPUT = "vda 10.00\nvda 0.00"
-DISK_UTIL_SAMPLE_OUTPUT = '{"vda": {"avg_util": 5.0, "max_util": 10.0, "min_util": 0.0}}'
+DISK_UTIL_SAMPLE_OUTPUT = \
+    '{"vda": {"avg_util": 5.0, "max_util": 10.0, "min_util": 0.0}}'
+
 
 @mock.patch('yardstick.benchmark.scenarios.storage.storagecapacity.ssh')
 class StorageCapacityTestCase(unittest.TestCase):
 
     def setUp(self):
         self.scn = {
-               "options": {
-                   'test_type': 'disk_size'
-               }
+            "options": {
+                'test_type': 'disk_size'
+            }
         }
         self.ctx = {
-                "host": {
-                    'ip': '172.16.0.137',
-                    'user': 'cirros',
-                    'password': "root"
-                }
+            "host": {
+                'ip': '172.16.0.137',
+                'user': 'cirros',
+                'password': "root"
+            }
         }
         self.result = {}
 
@@ -54,7 +60,8 @@ class StorageCapacityTestCase(unittest.TestCase):
 
         mock_ssh.SSH().execute.return_value = (0, DISK_SIZE_SAMPLE_OUTPUT, '')
         c.run(self.result)
-        expected_result = json.loads(DISK_SIZE_SAMPLE_OUTPUT)
+        expected_result = jsonutils.loads(
+            DISK_SIZE_SAMPLE_OUTPUT)
         self.assertEqual(self.result, expected_result)
 
     def test_capacity_block_size_successful(self, mock_ssh):
@@ -67,7 +74,8 @@ class StorageCapacityTestCase(unittest.TestCase):
 
         mock_ssh.SSH().execute.return_value = (0, BLOCK_SIZE_SAMPLE_OUTPUT, '')
         c.run(self.result)
-        expected_result = json.loads(BLOCK_SIZE_SAMPLE_OUTPUT)
+        expected_result = jsonutils.loads(
+            BLOCK_SIZE_SAMPLE_OUTPUT)
         self.assertEqual(self.result, expected_result)
 
     def test_capacity_disk_utilization_successful(self, mock_ssh):
@@ -82,7 +90,8 @@ class StorageCapacityTestCase(unittest.TestCase):
 
         mock_ssh.SSH().execute.return_value = (0, DISK_UTIL_RAW_OUTPUT, '')
         c.run(self.result)
-        expected_result = json.loads(DISK_UTIL_SAMPLE_OUTPUT)
+        expected_result = jsonutils.loads(
+            DISK_UTIL_SAMPLE_OUTPUT)
         self.assertEqual(self.result, expected_result)
 
     def test_capacity_unsuccessful_script_error(self, mock_ssh):
@@ -91,6 +100,7 @@ class StorageCapacityTestCase(unittest.TestCase):
         mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
         self.assertRaises(RuntimeError, c.run, self.result)
 
+
 def main():
     unittest.main()
 
index 8fc97d2..adc9d47 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.storage.storperf.StorPerf
 
-import mock
+from __future__ import absolute_import
+
 import unittest
-import requests
-import json
+
+import mock
+from oslo_serialization import jsonutils
 
 from yardstick.benchmark.scenarios.storage import storperf
 
 
 def mocked_requests_config_post(*args, **kwargs):
     class MockResponseConfigPost:
+
         def __init__(self, json_data, status_code):
             self.content = json_data
             self.status_code = status_code
 
-    return MockResponseConfigPost('{"stack_id": "dac27db1-3502-4300-b301-91c64e6a1622","stack_created": "false"}', 200)
+    return MockResponseConfigPost(
+        '{"stack_id": "dac27db1-3502-4300-b301-91c64e6a1622",'
+        '"stack_created": "false"}',
+        200)
 
 
 def mocked_requests_config_get(*args, **kwargs):
     class MockResponseConfigGet:
+
         def __init__(self, json_data, status_code):
             self.content = json_data
             self.status_code = status_code
 
-    return MockResponseConfigGet('{"stack_id": "dac27db1-3502-4300-b301-91c64e6a1622","stack_created": "true"}', 200)
+    return MockResponseConfigGet(
+        '{"stack_id": "dac27db1-3502-4300-b301-91c64e6a1622",'
+        '"stack_created": "true"}',
+        200)
 
 
 def mocked_requests_job_get(*args, **kwargs):
     class MockResponseJobGet:
+
         def __init__(self, json_data, status_code):
             self.content = json_data
             self.status_code = status_code
 
-    return MockResponseJobGet('{"status": "completed", "_ssd_preconditioning.queue-depth.8.block-size.16384.duration": 6}', 200)
+    return MockResponseJobGet(
+        '{"status": "completed",\
+         "_ssd_preconditioning.queue-depth.8.block-size.16384.duration": 6}',
+        200)
 
 
 def mocked_requests_job_post(*args, **kwargs):
     class MockResponseJobPost:
+
         def __init__(self, json_data, status_code):
             self.content = json_data
             self.status_code = status_code
@@ -58,6 +73,7 @@ def mocked_requests_job_post(*args, **kwargs):
 
 def mocked_requests_job_delete(*args, **kwargs):
     class MockResponseJobDelete:
+
         def __init__(self, json_data, status_code):
             self.content = json_data
             self.status_code = status_code
@@ -67,6 +83,7 @@ def mocked_requests_job_delete(*args, **kwargs):
 
 def mocked_requests_delete(*args, **kwargs):
     class MockResponseDelete:
+
         def __init__(self, json_data, status_code):
             self.json_data = json_data
             self.status_code = status_code
@@ -76,6 +93,7 @@ def mocked_requests_delete(*args, **kwargs):
 
 def mocked_requests_delete_failed(*args, **kwargs):
     class MockResponseDeleteFailed:
+
         def __init__(self, json_data, status_code):
             self.json_data = json_data
             self.status_code = status_code
@@ -130,8 +148,9 @@ class StorPerfTestCase(unittest.TestCase):
                 side_effect=mocked_requests_job_post)
     @mock.patch('yardstick.benchmark.scenarios.storage.storperf.requests.get',
                 side_effect=mocked_requests_job_get)
-    @mock.patch('yardstick.benchmark.scenarios.storage.storperf.requests.delete',
-                side_effect=mocked_requests_job_delete)
+    @mock.patch(
+        'yardstick.benchmark.scenarios.storage.storperf.requests.delete',
+        side_effect=mocked_requests_job_delete)
     def test_successful_run(self, mock_post, mock_get, mock_delete):
         options = {
             "agent_count": 8,
@@ -152,15 +171,18 @@ class StorPerfTestCase(unittest.TestCase):
         s = storperf.StorPerf(args, self.ctx)
         s.setup_done = True
 
-        sample_output = '{"status": "completed", "_ssd_preconditioning.queue-depth.8.block-size.16384.duration": 6}'
+        sample_output = '{"status": "completed",\
+         "_ssd_preconditioning.queue-depth.8.block-size.16384.duration": 6}'
 
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
 
         s.run(self.result)
 
         self.assertEqual(self.result, expected_result)
 
-    @mock.patch('yardstick.benchmark.scenarios.storage.storperf.requests.delete', side_effect=mocked_requests_delete)
+    @mock.patch(
+        'yardstick.benchmark.scenarios.storage.storperf.requests.delete',
+        side_effect=mocked_requests_delete)
     def test_successful_teardown(self, mock_delete):
         options = {
             "agent_count": 8,
@@ -184,7 +206,9 @@ class StorPerfTestCase(unittest.TestCase):
 
         self.assertFalse(s.setup_done)
 
-    @mock.patch('yardstick.benchmark.scenarios.storage.storperf.requests.delete', side_effect=mocked_requests_delete_failed)
+    @mock.patch(
+        'yardstick.benchmark.scenarios.storage.storperf.requests.delete',
+        side_effect=mocked_requests_delete_failed)
     def test_failed_teardown(self, mock_delete):
         options = {
             "agent_count": 8,
index af1ab80..c6e0e1d 100644 (file)
@@ -6,19 +6,59 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import unittest
 import mock
+import uuid
 
 from yardstick.cmd.commands.env import EnvCommand
 
 
 class EnvCommandTestCase(unittest.TestCase):
 
-    @mock.patch('yardstick.cmd.commands.env.HttpClient')
-    def test_do_influxdb(self, mock_http_client):
+    @mock.patch('yardstick.cmd.commands.env.EnvCommand._start_async_task')
+    @mock.patch('yardstick.cmd.commands.env.EnvCommand._check_status')
+    def test_do_influxdb(self, check_status_mock, start_async_task_mock):
         env = EnvCommand()
         env.do_influxdb({})
-        self.assertTrue(mock_http_client().post.called)
+        self.assertTrue(start_async_task_mock.called)
+        self.assertTrue(check_status_mock.called)
+
+    @mock.patch('yardstick.cmd.commands.env.EnvCommand._start_async_task')
+    @mock.patch('yardstick.cmd.commands.env.EnvCommand._check_status')
+    def test_do_grafana(self, check_status_mock, start_async_task_mock):
+        env = EnvCommand()
+        env.do_grafana({})
+        self.assertTrue(start_async_task_mock.called)
+        self.assertTrue(check_status_mock.called)
+
+    @mock.patch('yardstick.cmd.commands.env.EnvCommand._start_async_task')
+    @mock.patch('yardstick.cmd.commands.env.EnvCommand._check_status')
+    def test_do_prepare(self, check_status_mock, start_async_task_mock):
+        env = EnvCommand()
+        env.do_prepare({})
+        self.assertTrue(start_async_task_mock.called)
+        self.assertTrue(check_status_mock.called)
+
+    @mock.patch('yardstick.cmd.commands.env.HttpClient.post')
+    def test_start_async_task(self, post_mock):
+        data = {'action': 'createGrafanaContainer'}
+        EnvCommand()._start_async_task(data)
+        self.assertTrue(post_mock.called)
+
+    @mock.patch('yardstick.cmd.commands.env.HttpClient.get')
+    @mock.patch('yardstick.cmd.commands.env.EnvCommand._print_status')
+    def test_check_status(self, print_mock, get_mock):
+        task_id = str(uuid.uuid4())
+        get_mock.return_value = {'status': 2, 'result': 'error'}
+        status = EnvCommand()._check_status(task_id, 'hello world')
+        self.assertEqual(status, 2)
+
+    def test_print_status(self):
+        try:
+            EnvCommand()._print_status('hello', 'word')
+        except Exception as e:
+            self.assertIsInstance(e, IndexError)
 
 
 def main():
index b39dc23..eb09d1a 100644 (file)
@@ -6,9 +6,12 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
+
 import unittest
+
 import mock
-import json
+from oslo_serialization import jsonutils
 
 from yardstick.common import httpClient
 
@@ -21,8 +24,15 @@ class HttpClientTestCase(unittest.TestCase):
         data = {'hello': 'world'}
         headers = {'Content-Type': 'application/json'}
         httpClient.HttpClient().post(url, data)
-        mock_requests.post.assert_called_with(url, data=json.dumps(data),
-                                              headers=headers)
+        mock_requests.post.assert_called_with(
+            url, data=jsonutils.dump_as_bytes(data),
+            headers=headers)
+
+    @mock.patch('yardstick.common.httpClient.requests')
+    def test_get(self, mock_requests):
+        url = 'http://localhost:5000/hello'
+        httpClient.HttpClient().get(url)
+        mock_requests.get.assert_called_with(url)
 
 
 def main():
index ef619aa..d610e18 100644 (file)
@@ -11,6 +11,7 @@
 
 # Unittest for yardstick.common.openstack_utils
 
+from __future__ import absolute_import
 import unittest
 import mock
 
index 0e1a1a5..2a7d80b 100644 (file)
@@ -12,6 +12,7 @@
 
 # yardstick: this file is copied from python-heatclient and slightly modified
 
+from __future__ import absolute_import
 import mock
 import unittest
 import yaml
index a64c1f1..267c713 100644 (file)
@@ -9,6 +9,7 @@
 
 # Unittest for yardstick.common.utils
 
+from __future__ import absolute_import
 import os
 import mock
 import unittest
@@ -17,9 +18,10 @@ from yardstick.common import utils
 
 
 class IterSubclassesTestCase(unittest.TestCase):
-# Disclaimer: this class is a modified copy from
-# rally/tests/unit/common/plugin/test_discover.py
-# Copyright 2015: Mirantis Inc.
+    # Disclaimer: this class is a modified copy from
+    # rally/tests/unit/common/plugin/test_discover.py
+    # Copyright 2015: Mirantis Inc.
+
     def test_itersubclasses(self):
         class A(object):
             pass
index 5553c86..b84389e 100644 (file)
 
 # Unittest for yardstick.dispatcher.influxdb
 
-import mock
+from __future__ import absolute_import
 import unittest
 
+try:
+    from unittest import mock
+except ImportError:
+    import mock
+
 from yardstick.dispatcher.influxdb import InfluxdbDispatcher
 
+
 class InfluxdbDispatcherTestCase(unittest.TestCase):
 
     def setUp(self):
@@ -24,7 +30,9 @@ class InfluxdbDispatcherTestCase(unittest.TestCase):
             "context_cfg": {
                 "host": {
                     "ip": "10.229.43.154",
-                    "key_filename": "/root/yardstick/yardstick/resources/files/yardstick_key",
+                    "key_filename":
+                        "/root/yardstick/yardstick/resources/files"
+                        "/yardstick_key",
                     "name": "kvm.LF",
                     "user": "root"
                 },
@@ -35,7 +43,8 @@ class InfluxdbDispatcherTestCase(unittest.TestCase):
             "scenario_cfg": {
                 "runner": {
                     "interval": 1,
-                    "object": "yardstick.benchmark.scenarios.networking.ping.Ping",
+                    "object": "yardstick.benchmark.scenarios.networking.ping"
+                              ".Ping",
                     "output_filename": "/tmp/yardstick.out",
                     "runner_id": 8921,
                     "duration": 10,
@@ -63,7 +72,7 @@ class InfluxdbDispatcherTestCase(unittest.TestCase):
             },
             "runner_id": 8921
         }
-        self.data3 ={
+        self.data3 = {
             "benchmark": {
                 "data": {
                     "mpstat": {
@@ -99,26 +108,35 @@ class InfluxdbDispatcherTestCase(unittest.TestCase):
         self.assertEqual(influxdb.flush_result_data(), 0)
 
     def test__dict_key_flatten(self):
-        line = 'mpstat.loadavg1=0.29,rtt=1.03,mpstat.loadavg0=1.09,mpstat.cpu0.%idle=99.00,mpstat.cpu0.%sys=0.00'
+        line = 'mpstat.loadavg1=0.29,rtt=1.03,mpstat.loadavg0=1.09,' \
+               'mpstat.cpu0.%idle=99.00,mpstat.cpu0.%sys=0.00'
+        # need to sort for assert to work
+        line = ",".join(sorted(line.split(',')))
         influxdb = InfluxdbDispatcher(None)
-        flattened_data = influxdb._dict_key_flatten(self.data3['benchmark']['data'])
-        result = ",".join([k+"="+v for k, v in flattened_data.items()])
+        flattened_data = influxdb._dict_key_flatten(
+            self.data3['benchmark']['data'])
+        result = ",".join(
+            [k + "=" + v for k, v in sorted(flattened_data.items())])
         self.assertEqual(result, line)
 
     def test__get_nano_timestamp(self):
         influxdb = InfluxdbDispatcher(None)
         results = {'benchmark': {'timestamp': '1451461248.925574'}}
-        self.assertEqual(influxdb._get_nano_timestamp(results), '1451461248925574144')
+        self.assertEqual(influxdb._get_nano_timestamp(results),
+                         '1451461248925574144')
 
     @mock.patch('yardstick.dispatcher.influxdb.time')
     def test__get_nano_timestamp_except(self, mock_time):
         results = {}
         influxdb = InfluxdbDispatcher(None)
         mock_time.time.return_value = 1451461248.925574
-        self.assertEqual(influxdb._get_nano_timestamp(results), '1451461248925574144')
+        self.assertEqual(influxdb._get_nano_timestamp(results),
+                         '1451461248925574144')
+
 
 def main():
     unittest.main()
 
+
 if __name__ == '__main__':
     main()
index 42553c4..debb199 100644 (file)
@@ -3,6 +3,7 @@
 # yardstick comment: this file is a modified copy of
 # influxdb-python/influxdb/tests/test_line_protocol.py
 
+from __future__ import absolute_import
 import unittest
 from third_party.influxdb.influxdb_line_protocol import make_lines
 
diff --git a/tests/unit/orchestrator/test_heat.py b/tests/unit/orchestrator/test_heat.py
new file mode 100644 (file)
index 0000000..97314c2
--- /dev/null
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+
+##############################################################################
+# Copyright (c) 2017 Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.orchestrator.heat
+
+import unittest
+import uuid
+
+from yardstick.orchestrator import heat
+
+
+class HeatContextTestCase(unittest.TestCase):
+
+    def test_get_short_key_uuid(self):
+        u = uuid.uuid4()
+        k = heat.get_short_key_uuid(u)
+        self.assertEqual(heat.HEAT_KEY_UUID_LENGTH, len(k))
+        self.assertIn(k, str(u))
index 045ac0f..1c63c00 100644 (file)
 # yardstick comment: this file is a modified copy of
 # rally/tests/unit/common/test_sshutils.py
 
+from __future__ import absolute_import
 import os
 import socket
 import unittest
-from cStringIO import StringIO
+from io import StringIO
 
 import mock
+from oslo_utils import encodeutils
 
 from yardstick import ssh
 
@@ -274,7 +276,9 @@ class SSHRunTestCase(unittest.TestCase):
         fake_stdin.close = mock.Mock(side_effect=close)
         self.test_client.run("cmd", stdin=fake_stdin)
         call = mock.call
-        send_calls = [call("line1"), call("line2"), call("e2")]
+        send_calls = [call(encodeutils.safe_encode("line1", "utf-8")),
+                      call(encodeutils.safe_encode("line2", "utf-8")),
+                      call(encodeutils.safe_encode("e2", "utf-8"))]
         self.assertEqual(send_calls, self.fake_session.send.mock_calls)
 
     @mock.patch("yardstick.ssh.select")
@@ -288,10 +292,10 @@ class SSHRunTestCase(unittest.TestCase):
         self.fake_session.exit_status_ready.side_effect = [0, 0, 0, True]
         self.fake_session.send_ready.return_value = True
         self.fake_session.send.side_effect = len
-        fake_stdin = StringIO("line1\nline2\n")
+        fake_stdin = StringIO(u"line1\nline2\n")
         self.test_client.run("cmd", stdin=fake_stdin, keep_stdin_open=True)
         call = mock.call
-        send_calls = [call("line1\nline2\n")]
+        send_calls = [call(encodeutils.safe_encode("line1\nline2\n", "utf-8"))]
         self.assertEqual(send_calls, self.fake_session.send.mock_calls)
 
     @mock.patch("yardstick.ssh.select")
@@ -393,5 +397,6 @@ class SSHRunTestCase(unittest.TestCase):
 def main():
     unittest.main()
 
+
 if __name__ == '__main__':
     main()
index eee9821..12b010c 100644 (file)
 # yardstick comment: this file is a modified copy of
 # influxdb-python/influxdb/line_protocol.py
 
+from __future__ import absolute_import
 from __future__ import unicode_literals
+
 from copy import copy
 
+from oslo_utils import encodeutils
 from six import binary_type, text_type, integer_types
 
 
@@ -64,7 +67,7 @@ def _get_unicode(data, force=False):
     Try to return a text aka unicode object from the given data.
     """
     if isinstance(data, binary_type):
-        return data.decode('utf-8')
+        return encodeutils.safe_decode(data, 'utf-8')
     elif data is None:
         return ''
     elif force:
index 5c279c8..3ae915c 100644 (file)
@@ -7,6 +7,7 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
+from __future__ import absolute_import
 import logging
 import os
 import sys
index 8b292ac..898013f 100644 (file)
@@ -7,6 +7,7 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
+from __future__ import absolute_import
 import yardstick.common.utils as utils
 
 utils.import_modules_from_package("yardstick.benchmark.contexts")
index 76a8288..054ce42 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import abc
 import six
 
index 6901b26..0e76b5a 100644 (file)
@@ -7,6 +7,7 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
+from __future__ import absolute_import
 import logging
 
 from yardstick.benchmark.contexts.base import Context
index fcbe825..0b2fbdc 100644 (file)
@@ -7,19 +7,28 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
+from __future__ import absolute_import
+from __future__ import print_function
+
+import collections
+import logging
 import os
 import sys
-import pkg_resources
+import uuid
+
 import paramiko
+import pkg_resources
 
 from yardstick.benchmark.contexts.base import Context
-from yardstick.benchmark.contexts.model import Server
-from yardstick.benchmark.contexts.model import PlacementGroup
 from yardstick.benchmark.contexts.model import Network
+from yardstick.benchmark.contexts.model import PlacementGroup
+from yardstick.benchmark.contexts.model import Server
 from yardstick.benchmark.contexts.model import update_scheduler_hints
-from yardstick.orchestrator.heat import HeatTemplate
+from yardstick.orchestrator.heat import HeatTemplate, get_short_key_uuid
 from yardstick.definitions import YARDSTICK_ROOT_PATH
 
+LOG = logging.getLogger(__name__)
+
 
 class HeatContext(Context):
     '''Class that represents a context in the logical model'''
@@ -40,8 +49,12 @@ class HeatContext(Context):
         self._user = None
         self.template_file = None
         self.heat_parameters = None
-        self.key_filename = YARDSTICK_ROOT_PATH + \
-            'yardstick/resources/files/yardstick_key'
+        # generate an uuid to identify yardstick_key
+        # the first 8 digits of the uuid will be used
+        self.key_uuid = uuid.uuid4()
+        self.key_filename = ''.join(
+            [YARDSTICK_ROOT_PATH, 'yardstick/resources/files/yardstick_key-',
+             get_short_key_uuid(self.key_uuid)])
         super(self.__class__, self).__init__()
 
     def init(self, attrs):
@@ -79,16 +92,12 @@ class HeatContext(Context):
             self.servers.append(server)
             self._server_map[server.dn] = server
 
-        print "Generating RSA host key ..."
         rsa_key = paramiko.RSAKey.generate(bits=2048, progress_func=None)
-        print "Writing yardstick_key ..."
         rsa_key.write_private_key_file(self.key_filename)
-        print "Writing yardstick_key.pub ..."
         open(self.key_filename + ".pub", "w").write("%s %s\n" %
                                                     (rsa_key.get_name(),
                                                      rsa_key.get_base64()))
         del rsa_key
-        print "... done!"
 
     @property
     def image(self):
@@ -107,7 +116,7 @@ class HeatContext(Context):
 
     def _add_resources_to_template(self, template):
         '''add to the template the resources represented by this context'''
-        template.add_keypair(self.keypair_name)
+        template.add_keypair(self.keypair_name, self.key_uuid)
         template.add_security_group(self.secgroup_name)
 
         for network in self.networks:
@@ -192,7 +201,7 @@ class HeatContext(Context):
 
     def deploy(self):
         '''deploys template into a stack using cloud'''
-        print "Deploying context '%s'" % self.name
+        print("Deploying context '%s'" % self.name)
 
         heat_template = HeatTemplate(self.name, self.template_file,
                                      self.heat_parameters)
@@ -213,29 +222,29 @@ class HeatContext(Context):
         for server in self.servers:
             if len(server.ports) > 0:
                 # TODO(hafe) can only handle one internal network for now
-                port = server.ports.values()[0]
+                port = list(server.ports.values())[0]
                 server.private_ip = self.stack.outputs[port["stack_name"]]
 
             if server.floating_ip:
                 server.public_ip = \
                     self.stack.outputs[server.floating_ip["stack_name"]]
 
-        print "Context '%s' deployed" % self.name
+        print("Context '%s' deployed" % self.name)
 
     def undeploy(self):
         '''undeploys stack from cloud'''
         if self.stack:
-            print "Undeploying context '%s'" % self.name
+            print("Undeploying context '%s'" % self.name)
             self.stack.delete()
             self.stack = None
-            print "Context '%s' undeployed" % self.name
+            print("Context '%s' undeployed" % self.name)
 
         if os.path.exists(self.key_filename):
             try:
                 os.remove(self.key_filename)
                 os.remove(self.key_filename + ".pub")
-            except OSError, e:
-                print ("Error: %s - %s." % (e.key_filename, e.strerror))
+            except OSError:
+                LOG.exception("Key filename %s", self.key_filename)
 
     def _get_server(self, attr_name):
         '''lookup server info by name from context
@@ -243,9 +252,10 @@ class HeatContext(Context):
         with attribute name mapping when using external heat templates
         '''
         key_filename = pkg_resources.resource_filename(
-            'yardstick.resources', 'files/yardstick_key')
+            'yardstick.resources',
+            'files/yardstick_key-' + get_short_key_uuid(self.key_uuid))
 
-        if type(attr_name) is dict:
+        if isinstance(attr_name, collections.Mapping):
             cname = attr_name["name"].split(".")[1]
             if cname != self.name:
                 return None
index d31f4af..1d0a5a1 100644 (file)
 """ Logical model
 
 """
+from __future__ import absolute_import
+from six.moves import range
 
 
 class Object(object):
     '''Base class for classes in the logical model
     Contains common attributes and methods
     '''
+
     def __init__(self, name, context):
         # model identities and reference
         self.name = name
@@ -61,6 +64,7 @@ class PlacementGroup(Object):
 
 class Router(Object):
     '''Class that represents a router in the logical model'''
+
     def __init__(self, name, network_name, context, external_gateway_info):
         super(self.__class__, self).__init__(name, context)
 
index 78bce82..6db51cc 100644 (file)
@@ -7,6 +7,7 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
+from __future__ import absolute_import
 import sys
 import os
 import yaml
@@ -49,11 +50,11 @@ class NodeContext(Context):
 
         self.nodes.extend(cfg["nodes"])
         self.controllers.extend([node for node in cfg["nodes"]
-                                if node["role"] == "Controller"])
+                                 if node["role"] == "Controller"])
         self.computes.extend([node for node in cfg["nodes"]
-                             if node["role"] == "Compute"])
+                              if node["role"] == "Compute"])
         self.baremetals.extend([node for node in cfg["nodes"]
-                               if node["role"] == "Baremetal"])
+                                if node["role"] == "Baremetal"])
         LOG.debug("Nodes: %r", self.nodes)
         LOG.debug("Controllers: %r", self.controllers)
         LOG.debug("Computes: %r", self.computes)
diff --git a/yardstick/benchmark/core/__init__.py b/yardstick/benchmark/core/__init__.py
new file mode 100644 (file)
index 0000000..12c83f8
--- /dev/null
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+from __future__ import print_function
+
+
+class Param(object):
+
+    def __init__(self, kwargs):
+        # list
+        self.inputfile = kwargs.get('inputfile')
+        self.task_args = kwargs.get('task-args')
+        self.task_args_file = kwargs.get('task-args-file')
+        self.keep_deploy = kwargs.get('keep-deploy')
+        self.parse_only = kwargs.get('parse-only')
+        self.output_file = kwargs.get('output-file', '/tmp/yardstick.out')
+        self.suite = kwargs.get('suite')
+
+        # list
+        self.input_file = kwargs.get('input_file')
+
+        # list
+        self.casename = kwargs.get('casename')
+
+        # list
+        self.type = kwargs.get('type')
+
+
+def print_hbar(barlen):
+    '''print to stdout a horizontal bar'''
+    print("+"),
+    print("-" * barlen),
+    print("+")
diff --git a/yardstick/benchmark/core/plugin.py b/yardstick/benchmark/core/plugin.py
new file mode 100644 (file)
index 0000000..3080f5d
--- /dev/null
@@ -0,0 +1,213 @@
+##############################################################################
+# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+""" Handler for yardstick command 'plugin' """
+
+from __future__ import print_function
+from __future__ import absolute_import
+import os
+import sys
+import yaml
+import time
+import logging
+import pkg_resources
+import yardstick.ssh as ssh
+
+from yardstick.common.task_template import TaskTemplate
+
+LOG = logging.getLogger(__name__)
+
+
+class Plugin(object):
+    """Plugin commands.
+
+       Set of commands to manage plugins.
+    """
+
+    def install(self, args):
+        """Install a plugin."""
+
+        total_start_time = time.time()
+        parser = PluginParser(args.input_file[0])
+
+        plugins, deployment = parser.parse_plugin()
+        plugin_name = plugins.get("name")
+        print("Installing plugin: %s" % plugin_name)
+
+        LOG.info("Executing _install_setup()")
+        self._install_setup(plugin_name, deployment)
+
+        LOG.info("Executing _run()")
+        self._run(plugin_name)
+
+        total_end_time = time.time()
+        LOG.info("total finished in %d secs",
+                 total_end_time - total_start_time)
+
+        print("Done, exiting")
+
+    def remove(self, args):
+        """Remove a plugin."""
+
+        total_start_time = time.time()
+        parser = PluginParser(args.input_file[0])
+
+        plugins, deployment = parser.parse_plugin()
+        plugin_name = plugins.get("name")
+        print("Removing plugin: %s" % plugin_name)
+
+        LOG.info("Executing _remove_setup()")
+        self._remove_setup(plugin_name, deployment)
+
+        LOG.info("Executing _run()")
+        self._run(plugin_name)
+
+        total_end_time = time.time()
+        LOG.info("total finished in %d secs",
+                 total_end_time - total_start_time)
+
+        print("Done, exiting")
+
+    def _install_setup(self, plugin_name, deployment):
+        """Deployment environment setup"""
+        target_script = plugin_name + ".bash"
+        self.script = pkg_resources.resource_filename(
+            'yardstick.resources', 'scripts/install/' + target_script)
+
+        deployment_user = deployment.get("user")
+        deployment_ssh_port = deployment.get("ssh_port", ssh.DEFAULT_PORT)
+        deployment_ip = deployment.get("ip", None)
+        deployment_password = deployment.get("password", None)
+        deployment_key_filename = deployment.get("key_filename",
+                                                 "/root/.ssh/id_rsa")
+
+        if deployment_ip == "local":
+            installer_ip = os.environ.get("INSTALLER_IP", None)
+
+            if deployment_password is not None:
+                self._login_via_password(deployment_user, installer_ip,
+                                         deployment_password,
+                                         deployment_ssh_port)
+            else:
+                self._login_via_key(self, deployment_user, installer_ip,
+                                    deployment_key_filename,
+                                    deployment_ssh_port)
+        else:
+            if deployment_password is not None:
+                self._login_via_password(deployment_user, deployment_ip,
+                                         deployment_password,
+                                         deployment_ssh_port)
+            else:
+                self._login_via_key(self, deployment_user, deployment_ip,
+                                    deployment_key_filename,
+                                    deployment_ssh_port)
+        # copy script to host
+        remotepath = '~/%s.sh' % plugin_name
+
+        LOG.info("copying script to host: %s", remotepath)
+        self.client._put_file_shell(self.script, remotepath)
+
+    def _remove_setup(self, plugin_name, deployment):
+        """Deployment environment setup"""
+        target_script = plugin_name + ".bash"
+        self.script = pkg_resources.resource_filename(
+            'yardstick.resources', 'scripts/remove/' + target_script)
+
+        deployment_user = deployment.get("user")
+        deployment_ssh_port = deployment.get("ssh_port", ssh.DEFAULT_PORT)
+        deployment_ip = deployment.get("ip", None)
+        deployment_password = deployment.get("password", None)
+        deployment_key_filename = deployment.get("key_filename",
+                                                 "/root/.ssh/id_rsa")
+
+        if deployment_ip == "local":
+            installer_ip = os.environ.get("INSTALLER_IP", None)
+
+            if deployment_password is not None:
+                self._login_via_password(deployment_user, installer_ip,
+                                         deployment_password,
+                                         deployment_ssh_port)
+            else:
+                self._login_via_key(self, deployment_user, installer_ip,
+                                    deployment_key_filename,
+                                    deployment_ssh_port)
+        else:
+            if deployment_password is not None:
+                self._login_via_password(deployment_user, deployment_ip,
+                                         deployment_password,
+                                         deployment_ssh_port)
+            else:
+                self._login_via_key(self, deployment_user, deployment_ip,
+                                    deployment_key_filename,
+                                    deployment_ssh_port)
+
+        # copy script to host
+        remotepath = '~/%s.sh' % plugin_name
+
+        LOG.info("copying script to host: %s", remotepath)
+        self.client._put_file_shell(self.script, remotepath)
+
+    def _login_via_password(self, user, ip, password, ssh_port):
+        LOG.info("Log in via pw, user:%s, host:%s", user, ip)
+        self.client = ssh.SSH(user, ip, password=password, port=ssh_port)
+        self.client.wait(timeout=600)
+
+    def _login_via_key(self, user, ip, key_filename, ssh_port):
+        LOG.info("Log in via key, user:%s, host:%s", user, ip)
+        self.client = ssh.SSH(user, ip, key_filename=key_filename,
+                              port=ssh_port)
+        self.client.wait(timeout=600)
+
+    def _run(self, plugin_name):
+        """Run installation script """
+        cmd = "sudo bash %s" % plugin_name + ".sh"
+
+        LOG.info("Executing command: %s", cmd)
+        status, stdout, stderr = self.client.execute(cmd)
+
+
+class PluginParser(object):
+    """Parser for plugin configration files in yaml format"""
+
+    def __init__(self, path):
+        self.path = path
+
+    def parse_plugin(self):
+        """parses the plugin file and return a plugins instance
+           and a deployment instance
+        """
+
+        print("Parsing plugin config:", self.path)
+
+        try:
+            kw = {}
+            with open(self.path) as f:
+                try:
+                    input_plugin = f.read()
+                    rendered_plugin = TaskTemplate.render(input_plugin, **kw)
+                except Exception as e:
+                    print("Failed to render template:\n%(plugin)s\n%(err)s\n"
+                          % {"plugin": input_plugin, "err": e})
+                    raise e
+                print("Input plugin is:\n%s\n" % rendered_plugin)
+
+                cfg = yaml.load(rendered_plugin)
+        except IOError as ioerror:
+            sys.exit(ioerror)
+
+        self._check_schema(cfg["schema"], "plugin")
+
+        return cfg["plugins"], cfg["deployment"]
+
+    def _check_schema(self, cfg_schema, schema_type):
+        """Check if configration file is using the correct schema type"""
+
+        if cfg_schema != "yardstick:" + schema_type + ":0.1":
+            sys.exit("error: file %s has unknown schema %s" % (self.path,
+                                                               cfg_schema))
diff --git a/yardstick/benchmark/core/runner.py b/yardstick/benchmark/core/runner.py
new file mode 100644 (file)
index 0000000..5f8132d
--- /dev/null
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+""" Handler for yardstick command 'runner' """
+
+from __future__ import absolute_import
+from __future__ import print_function
+from yardstick.benchmark.runners.base import Runner
+from yardstick.benchmark.core import print_hbar
+
+
+class Runners(object):
+    '''Runner commands.
+
+       Set of commands to discover and display runner types.
+    '''
+
+    def list_all(self, args):
+        '''List existing runner types'''
+        types = Runner.get_types()
+        print_hbar(78)
+        print("| %-16s | %-60s" % ("Type", "Description"))
+        print_hbar(78)
+        for rtype in types:
+            print("| %-16s | %-60s" % (rtype.__execution_type__,
+                                       rtype.__doc__.split("\n")[0]))
+        print_hbar(78)
+
+    def show(self, args):
+        '''Show details of a specific runner type'''
+        rtype = Runner.get_cls(args.type[0])
+        print(rtype.__doc__)
diff --git a/yardstick/benchmark/core/scenario.py b/yardstick/benchmark/core/scenario.py
new file mode 100644 (file)
index 0000000..15335af
--- /dev/null
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+""" Handler for yardstick command 'scenario' """
+
+from __future__ import absolute_import
+from __future__ import print_function
+from yardstick.benchmark.scenarios.base import Scenario
+from yardstick.benchmark.core import print_hbar
+
+
+class Scenarios(object):
+    '''Scenario commands.
+
+       Set of commands to discover and display scenario types.
+    '''
+
+    def list_all(self, args):
+        '''List existing scenario types'''
+        types = Scenario.get_types()
+        print_hbar(78)
+        print("| %-16s | %-60s" % ("Type", "Description"))
+        print_hbar(78)
+        for stype in types:
+            print("| %-16s | %-60s" % (stype.__scenario_type__,
+                                       stype.__doc__.split("\n")[0]))
+        print_hbar(78)
+
+    def show(self, args):
+        '''Show details of a specific scenario type'''
+        stype = Scenario.get_cls(args.type[0])
+        print(stype.__doc__)
diff --git a/yardstick/benchmark/core/task.py b/yardstick/benchmark/core/task.py
new file mode 100644 (file)
index 0000000..d9a8576
--- /dev/null
@@ -0,0 +1,514 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+""" Handler for yardstick command 'task' """
+
+from __future__ import absolute_import
+from __future__ import print_function
+import sys
+import os
+import yaml
+import atexit
+import ipaddress
+import time
+import logging
+import uuid
+import errno
+from six.moves import filter
+
+from yardstick.benchmark.contexts.base import Context
+from yardstick.benchmark.runners import base as base_runner
+from yardstick.common.task_template import TaskTemplate
+from yardstick.common.utils import source_env
+from yardstick.common import constants
+
+output_file_default = "/tmp/yardstick.out"
+test_cases_dir_default = "tests/opnfv/test_cases/"
+LOG = logging.getLogger(__name__)
+
+
+class Task(object):     # pragma: no cover
+    '''Task commands.
+
+       Set of commands to manage benchmark tasks.
+    '''
+
+    def start(self, args, **kwargs):
+        '''Start a benchmark scenario.'''
+
+        atexit.register(atexit_handler)
+
+        self.task_id = kwargs.get('task_id', str(uuid.uuid4()))
+
+        check_environment()
+
+        total_start_time = time.time()
+        parser = TaskParser(args.inputfile[0])
+
+        if args.suite:
+            # 1.parse suite, return suite_params info
+            task_files, task_args, task_args_fnames = \
+                parser.parse_suite()
+        else:
+            task_files = [parser.path]
+            task_args = [args.task_args]
+            task_args_fnames = [args.task_args_file]
+
+        LOG.info("\ntask_files:%s, \ntask_args:%s, \ntask_args_fnames:%s",
+                 task_files, task_args, task_args_fnames)
+
+        if args.parse_only:
+            sys.exit(0)
+
+        if os.path.isfile(args.output_file):
+            os.remove(args.output_file)
+        # parse task_files
+        for i in range(0, len(task_files)):
+            one_task_start_time = time.time()
+            parser.path = task_files[i]
+            scenarios, run_in_parallel, meet_precondition = parser.parse_task(
+                self.task_id, task_args[i], task_args_fnames[i])
+
+            if not meet_precondition:
+                LOG.info("meet_precondition is %s, please check envrionment",
+                         meet_precondition)
+                continue
+
+            self._run(scenarios, run_in_parallel, args.output_file)
+
+            if args.keep_deploy:
+                # keep deployment, forget about stack
+                # (hide it for exit handler)
+                Context.list = []
+            else:
+                for context in Context.list:
+                    context.undeploy()
+                Context.list = []
+            one_task_end_time = time.time()
+            LOG.info("task %s finished in %d secs", task_files[i],
+                     one_task_end_time - one_task_start_time)
+
+        total_end_time = time.time()
+        LOG.info("total finished in %d secs",
+                 total_end_time - total_start_time)
+
+        print("Done, exiting")
+
+    def _run(self, scenarios, run_in_parallel, output_file):
+        '''Deploys context and calls runners'''
+        for context in Context.list:
+            context.deploy()
+
+        background_runners = []
+
+        # Start all background scenarios
+        for scenario in filter(_is_background_scenario, scenarios):
+            scenario["runner"] = dict(type="Duration", duration=1000000000)
+            runner = run_one_scenario(scenario, output_file)
+            background_runners.append(runner)
+
+        runners = []
+        if run_in_parallel:
+            for scenario in scenarios:
+                if not _is_background_scenario(scenario):
+                    runner = run_one_scenario(scenario, output_file)
+                    runners.append(runner)
+
+            # Wait for runners to finish
+            for runner in runners:
+                runner_join(runner)
+                print("Runner ended, output in", output_file)
+        else:
+            # run serially
+            for scenario in scenarios:
+                if not _is_background_scenario(scenario):
+                    runner = run_one_scenario(scenario, output_file)
+                    runner_join(runner)
+                    print("Runner ended, output in", output_file)
+
+        # Abort background runners
+        for runner in background_runners:
+            runner.abort()
+
+        # Wait for background runners to finish
+        for runner in background_runners:
+            if runner.join(timeout=60) is None:
+                # Nuke if it did not stop nicely
+                base_runner.Runner.terminate(runner)
+                runner_join(runner)
+            else:
+                base_runner.Runner.release(runner)
+            print("Background task ended")
+
+
+# TODO: Move stuff below into TaskCommands class !?
+
+
+class TaskParser(object):       # pragma: no cover
+    '''Parser for task config files in yaml format'''
+
+    def __init__(self, path):
+        self.path = path
+
+    def _meet_constraint(self, task, cur_pod, cur_installer):
+        if "constraint" in task:
+            constraint = task.get('constraint', None)
+            if constraint is not None:
+                tc_fit_pod = constraint.get('pod', None)
+                tc_fit_installer = constraint.get('installer', None)
+                LOG.info("cur_pod:%s, cur_installer:%s,tc_constraints:%s",
+                         cur_pod, cur_installer, constraint)
+                if cur_pod and tc_fit_pod and cur_pod not in tc_fit_pod:
+                    return False
+                if cur_installer and tc_fit_installer and \
+                        cur_installer not in tc_fit_installer:
+                    return False
+        return True
+
+    def _get_task_para(self, task, cur_pod):
+        task_args = task.get('task_args', None)
+        if task_args is not None:
+            task_args = task_args.get(cur_pod, None)
+        task_args_fnames = task.get('task_args_fnames', None)
+        if task_args_fnames is not None:
+            task_args_fnames = task_args_fnames.get(cur_pod, None)
+        return task_args, task_args_fnames
+
+    def parse_suite(self):
+        '''parse the suite file and return a list of task config file paths
+           and lists of optional parameters if present'''
+        LOG.info("\nParsing suite file:%s", self.path)
+
+        try:
+            with open(self.path) as stream:
+                cfg = yaml.load(stream)
+        except IOError as ioerror:
+            sys.exit(ioerror)
+
+        self._check_schema(cfg["schema"], "suite")
+        LOG.info("\nStarting scenario:%s", cfg["name"])
+
+        test_cases_dir = cfg.get("test_cases_dir", test_cases_dir_default)
+        if test_cases_dir[-1] != os.sep:
+            test_cases_dir += os.sep
+
+        cur_pod = os.environ.get('NODE_NAME', None)
+        cur_installer = os.environ.get('INSTALLER_TYPE', None)
+
+        valid_task_files = []
+        valid_task_args = []
+        valid_task_args_fnames = []
+
+        for task in cfg["test_cases"]:
+            # 1.check file_name
+            if "file_name" in task:
+                task_fname = task.get('file_name', None)
+                if task_fname is None:
+                    continue
+            else:
+                continue
+            # 2.check constraint
+            if self._meet_constraint(task, cur_pod, cur_installer):
+                valid_task_files.append(test_cases_dir + task_fname)
+            else:
+                continue
+            # 3.fetch task parameters
+            task_args, task_args_fnames = self._get_task_para(task, cur_pod)
+            valid_task_args.append(task_args)
+            valid_task_args_fnames.append(task_args_fnames)
+
+        return valid_task_files, valid_task_args, valid_task_args_fnames
+
+    def parse_task(self, task_id, task_args=None, task_args_file=None):
+        '''parses the task file and return an context and scenario instances'''
+        print("Parsing task config:", self.path)
+
+        try:
+            kw = {}
+            if task_args_file:
+                with open(task_args_file) as f:
+                    kw.update(parse_task_args("task_args_file", f.read()))
+            kw.update(parse_task_args("task_args", task_args))
+        except TypeError:
+            raise TypeError()
+
+        try:
+            with open(self.path) as f:
+                try:
+                    input_task = f.read()
+                    rendered_task = TaskTemplate.render(input_task, **kw)
+                except Exception as e:
+                    print("Failed to render template:\n%(task)s\n%(err)s\n"
+                          % {"task": input_task, "err": e})
+                    raise e
+                print("Input task is:\n%s\n" % rendered_task)
+
+                cfg = yaml.load(rendered_task)
+        except IOError as ioerror:
+            sys.exit(ioerror)
+
+        self._check_schema(cfg["schema"], "task")
+        meet_precondition = self._check_precondition(cfg)
+
+        # TODO: support one or many contexts? Many would simpler and precise
+        # TODO: support hybrid context type
+        if "context" in cfg:
+            context_cfgs = [cfg["context"]]
+        elif "contexts" in cfg:
+            context_cfgs = cfg["contexts"]
+        else:
+            context_cfgs = [{"type": "Dummy"}]
+
+        name_suffix = '-{}'.format(task_id[:8])
+        for cfg_attrs in context_cfgs:
+            cfg_attrs['name'] = '{}{}'.format(cfg_attrs['name'], name_suffix)
+            context_type = cfg_attrs.get("type", "Heat")
+            if "Heat" == context_type and "networks" in cfg_attrs:
+                # bugfix: if there are more than one network,
+                # only add "external_network" on first one.
+                # the name of netwrok should follow this rule:
+                # test, test2, test3 ...
+                # sort network with the length of network's name
+                sorted_networks = sorted(cfg_attrs["networks"])
+                # config external_network based on env var
+                cfg_attrs["networks"][sorted_networks[0]]["external_network"] \
+                    = os.environ.get("EXTERNAL_NETWORK", "net04_ext")
+
+            context = Context.get(context_type)
+            context.init(cfg_attrs)
+
+        run_in_parallel = cfg.get("run_in_parallel", False)
+
+        # add tc and task id for influxdb extended tags
+        for scenario in cfg["scenarios"]:
+            task_name = os.path.splitext(os.path.basename(self.path))[0]
+            scenario["tc"] = task_name
+            scenario["task_id"] = task_id
+
+            change_server_name(scenario, name_suffix)
+
+            try:
+                change_server_name(scenario['nodes'], name_suffix)
+            except KeyError:
+                pass
+
+        # TODO we need something better here, a class that represent the file
+        return cfg["scenarios"], run_in_parallel, meet_precondition
+
+    def _check_schema(self, cfg_schema, schema_type):
+        '''Check if config file is using the correct schema type'''
+
+        if cfg_schema != "yardstick:" + schema_type + ":0.1":
+            sys.exit("error: file %s has unknown schema %s" % (self.path,
+                                                               cfg_schema))
+
+    def _check_precondition(self, cfg):
+        '''Check if the envrionment meet the preconditon'''
+
+        if "precondition" in cfg:
+            precondition = cfg["precondition"]
+            installer_type = precondition.get("installer_type", None)
+            deploy_scenarios = precondition.get("deploy_scenarios", None)
+            tc_fit_pods = precondition.get("pod_name", None)
+            installer_type_env = os.environ.get('INSTALL_TYPE', None)
+            deploy_scenario_env = os.environ.get('DEPLOY_SCENARIO', None)
+            pod_name_env = os.environ.get('NODE_NAME', None)
+
+            LOG.info("installer_type: %s, installer_type_env: %s",
+                     installer_type, installer_type_env)
+            LOG.info("deploy_scenarios: %s, deploy_scenario_env: %s",
+                     deploy_scenarios, deploy_scenario_env)
+            LOG.info("tc_fit_pods: %s, pod_name_env: %s",
+                     tc_fit_pods, pod_name_env)
+            if installer_type and installer_type_env:
+                if installer_type_env not in installer_type:
+                    return False
+            if deploy_scenarios and deploy_scenario_env:
+                deploy_scenarios_list = deploy_scenarios.split(',')
+                for deploy_scenario in deploy_scenarios_list:
+                    if deploy_scenario_env.startswith(deploy_scenario):
+                        return True
+                return False
+            if tc_fit_pods and pod_name_env:
+                if pod_name_env not in tc_fit_pods:
+                    return False
+        return True
+
+
+def atexit_handler():
+    '''handler for process termination'''
+    base_runner.Runner.terminate_all()
+
+    if len(Context.list) > 0:
+        print("Undeploying all contexts")
+        for context in Context.list:
+            context.undeploy()
+
+
+def is_ip_addr(addr):
+    '''check if string addr is an IP address'''
+    try:
+        ipaddress.ip_address(addr.encode('utf-8'))
+        return True
+    except ValueError:
+        return False
+
+
+def _is_same_heat_context(host_attr, target_attr):
+    '''check if two servers are in the same heat context
+    host_attr: either a name for a server created by yardstick or a dict
+    with attribute name mapping when using external heat templates
+    target_attr: either a name for a server created by yardstick or a dict
+    with attribute name mapping when using external heat templates
+    '''
+    host = None
+    target = None
+    for context in Context.list:
+        if context.__context_type__ != "Heat":
+            continue
+
+        host = context._get_server(host_attr)
+        if host is None:
+            continue
+
+        target = context._get_server(target_attr)
+        if target is None:
+            return False
+
+        # Both host and target is not None, then they are in the
+        # same heat context.
+        return True
+
+    return False
+
+
+def _is_background_scenario(scenario):
+    if "run_in_background" in scenario:
+        return scenario["run_in_background"]
+    else:
+        return False
+
+
+def run_one_scenario(scenario_cfg, output_file):
+    '''run one scenario using context'''
+    runner_cfg = scenario_cfg["runner"]
+    runner_cfg['output_filename'] = output_file
+
+    # TODO support get multi hosts/vms info
+    context_cfg = {}
+    if "host" in scenario_cfg:
+        context_cfg['host'] = Context.get_server(scenario_cfg["host"])
+
+    if "target" in scenario_cfg:
+        if is_ip_addr(scenario_cfg["target"]):
+            context_cfg['target'] = {}
+            context_cfg['target']["ipaddr"] = scenario_cfg["target"]
+        else:
+            context_cfg['target'] = Context.get_server(scenario_cfg["target"])
+            if _is_same_heat_context(scenario_cfg["host"],
+                                     scenario_cfg["target"]):
+                context_cfg["target"]["ipaddr"] = \
+                    context_cfg["target"]["private_ip"]
+            else:
+                context_cfg["target"]["ipaddr"] = \
+                    context_cfg["target"]["ip"]
+
+    if "targets" in scenario_cfg:
+        ip_list = []
+        for target in scenario_cfg["targets"]:
+            if is_ip_addr(target):
+                ip_list.append(target)
+                context_cfg['target'] = {}
+            else:
+                context_cfg['target'] = Context.get_server(target)
+                if _is_same_heat_context(scenario_cfg["host"], target):
+                    ip_list.append(context_cfg["target"]["private_ip"])
+                else:
+                    ip_list.append(context_cfg["target"]["ip"])
+        context_cfg['target']['ipaddr'] = ','.join(ip_list)
+
+    if "nodes" in scenario_cfg:
+        context_cfg["nodes"] = parse_nodes_with_context(scenario_cfg)
+    runner = base_runner.Runner.get(runner_cfg)
+
+    print("Starting runner of type '%s'" % runner_cfg["type"])
+    runner.run(scenario_cfg, context_cfg)
+
+    return runner
+
+
+def parse_nodes_with_context(scenario_cfg):
+    '''paras the 'nodes' fields in scenario '''
+    nodes = scenario_cfg["nodes"]
+
+    nodes_cfg = {}
+    for nodename in nodes:
+        nodes_cfg[nodename] = Context.get_server(nodes[nodename])
+
+    return nodes_cfg
+
+
+def runner_join(runner):
+    '''join (wait for) a runner, exit process at runner failure'''
+    status = runner.join()
+    base_runner.Runner.release(runner)
+    if status != 0:
+        sys.exit("Runner failed")
+
+
+def print_invalid_header(source_name, args):
+    print("Invalid %(source)s passed:\n\n %(args)s\n"
+          % {"source": source_name, "args": args})
+
+
+def parse_task_args(src_name, args):
+    try:
+        kw = args and yaml.safe_load(args)
+        kw = {} if kw is None else kw
+    except yaml.parser.ParserError as e:
+        print_invalid_header(src_name, args)
+        print("%(source)s has to be YAML. Details:\n\n%(err)s\n"
+              % {"source": src_name, "err": e})
+        raise TypeError()
+
+    if not isinstance(kw, dict):
+        print_invalid_header(src_name, args)
+        print("%(src)s had to be dict, actually %(src_type)s\n"
+              % {"src": src_name, "src_type": type(kw)})
+        raise TypeError()
+    return kw
+
+
+def check_environment():
+    auth_url = os.environ.get('OS_AUTH_URL', None)
+    if not auth_url:
+        try:
+            source_env(constants.OPENSTACK_RC_FILE)
+        except IOError as e:
+            if e.errno != errno.EEXIST:
+                raise
+            LOG.debug('OPENRC file not found')
+
+
+def change_server_name(scenario, suffix):
+    try:
+        scenario['host'] += suffix
+    except KeyError:
+        pass
+
+    try:
+        scenario['target'] += suffix
+    except KeyError:
+        pass
+
+    try:
+        key = 'targets'
+        scenario[key] = ['{}{}'.format(a, suffix) for a in scenario[key]]
+    except KeyError:
+        pass
diff --git a/yardstick/benchmark/core/testcase.py b/yardstick/benchmark/core/testcase.py
new file mode 100644 (file)
index 0000000..7430485
--- /dev/null
@@ -0,0 +1,115 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+""" Handler for yardstick command 'testcase' """
+from __future__ import absolute_import
+from __future__ import print_function
+import os
+import yaml
+import sys
+
+from yardstick.benchmark.core import print_hbar
+from yardstick.common.task_template import TaskTemplate
+from yardstick.definitions import YARDSTICK_ROOT_PATH
+
+
+class Testcase(object):
+    '''Testcase commands.
+
+       Set of commands to discover and display test cases.
+    '''
+
+    def __init__(self):
+        self.test_case_path = YARDSTICK_ROOT_PATH + 'tests/opnfv/test_cases/'
+        self.testcase_list = []
+
+    def list_all(self, args):
+        '''List existing test cases'''
+
+        try:
+            testcase_files = os.listdir(self.test_case_path)
+        except Exception as e:
+            print("Failed to list dir:\n%(path)s\n%(err)s\n"
+                  % {"path": self.test_case_path, "err": e})
+            raise e
+        testcase_files.sort()
+
+        for testcase_file in testcase_files:
+            record = self._get_record(testcase_file)
+            self.testcase_list.append(record)
+
+        self._format_print(self.testcase_list)
+        return True
+
+    def show(self, args):
+        '''Show details of a specific test case'''
+        testcase_name = args.casename[0]
+        testcase_path = self.test_case_path + testcase_name + ".yaml"
+        try:
+            with open(testcase_path) as f:
+                try:
+                    testcase_info = f.read()
+                    print(testcase_info)
+
+                except Exception as e:
+                    print("Failed to load test cases:"
+                          "\n%(testcase_file)s\n%(err)s\n"
+                          % {"testcase_file": testcase_path, "err": e})
+                    raise e
+        except IOError as ioerror:
+            sys.exit(ioerror)
+        return True
+
+    def _get_record(self, testcase_file):
+
+        try:
+            with open(self.test_case_path + testcase_file) as f:
+                try:
+                    testcase_info = f.read()
+                except Exception as e:
+                    print("Failed to load test cases:"
+                          "\n%(testcase_file)s\n%(err)s\n"
+                          % {"testcase_file": testcase_file, "err": e})
+                    raise e
+                description, installer, deploy_scenarios = \
+                    self._parse_testcase(testcase_info)
+
+                record = {'Name': testcase_file.split(".")[0],
+                          'Description': description,
+                          'installer': installer,
+                          'deploy_scenarios': deploy_scenarios}
+                return record
+        except IOError as ioerror:
+            sys.exit(ioerror)
+
+    def _parse_testcase(self, testcase_info):
+
+        kw = {}
+        rendered_testcase = TaskTemplate.render(testcase_info, **kw)
+        testcase_cfg = yaml.load(rendered_testcase)
+        test_precondition = testcase_cfg.get('precondition', None)
+        installer_type = 'all'
+        deploy_scenarios = 'all'
+        if test_precondition is not None:
+            installer_type = test_precondition.get('installer_type', 'all')
+            deploy_scenarios = test_precondition.get('deploy_scenarios', 'all')
+
+        description = testcase_info.split("\n")[2][1:].strip()
+        return description, installer_type, deploy_scenarios
+
+    def _format_print(self, testcase_list):
+        '''format output'''
+
+        print_hbar(88)
+        print("| %-21s | %-60s" % ("Testcase Name", "Description"))
+        print_hbar(88)
+        for testcase_record in testcase_list:
+            print("| %-16s | %-60s" % (testcase_record['Name'],
+                                       testcase_record['Description']))
+        print_hbar(88)
index 69ea915..956c3ff 100755 (executable)
@@ -24,12 +24,17 @@ until the end of the shortest list is reached (optimally all lists should be
 defined with the same number of values when using such iter_type).
 '''
 
-import os
-import multiprocessing
+from __future__ import absolute_import
+
+import itertools
 import logging
-import traceback
+import multiprocessing
+import os
 import time
-import itertools
+import traceback
+
+import six
+from six.moves import range
 
 from yardstick.benchmark.runners import base
 
@@ -71,8 +76,8 @@ def _worker_process(queue, cls, method_name, scenario_cfg,
         return -1 if start > stop else 1
 
     param_iters = \
-        [xrange(d['start'], d['stop'] + margin(d['start'], d['stop']),
-                d['step']) for d in runner_cfg['iterators']]
+        [range(d['start'], d['stop'] + margin(d['start'], d['stop']),
+               d['step']) for d in runner_cfg['iterators']]
     param_names = [d['name'] for d in runner_cfg['iterators']]
 
     iter_type = runner_cfg.get("iter_type", "nested_for_loops")
@@ -82,10 +87,10 @@ def _worker_process(queue, cls, method_name, scenario_cfg,
         loop_iter = itertools.product(*param_iters)
     elif iter_type == 'tuple_loops':
         # Combine each i;th index of respective parameter list
-        loop_iter = itertools.izip(*param_iters)
+        loop_iter = six.moves.zip(*param_iters)
     else:
         LOG.warning("iter_type unrecognized: %s", iter_type)
-        raise
+        raise TypeError("iter_type unrecognized: %s", iter_type)
 
     # Populate options and run the requested method for each value combination
     for comb_values in loop_iter:
index 8f3f75f..0e02927 100755 (executable)
@@ -16,6 +16,7 @@
 # yardstick comment: this is a modified copy of
 # rally/rally/benchmark/runners/base.py
 
+from __future__ import absolute_import
 import importlib
 import logging
 import multiprocessing
index 1412c0c..89cac7d 100644 (file)
@@ -19,6 +19,7 @@
 '''A runner that runs a specific time before it returns
 '''
 
+from __future__ import absolute_import
 import os
 import multiprocessing
 import logging
index 3a839b6..930f883 100644 (file)
@@ -19,6 +19,7 @@
 '''A runner that runs a configurable number of times before it returns
 '''
 
+from __future__ import absolute_import
 import os
 import multiprocessing
 import logging
index 3b06e2a..e6abeab 100644 (file)
@@ -20,6 +20,7 @@
 The input value in the sequence is specified in a list in the input file.
 '''
 
+from __future__ import absolute_import
 import os
 import multiprocessing
 import logging
index 38f57d4..28c338d 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
 
 LOG = logging.getLogger(__name__)
index e88fed6..7eb93a8 100644 (file)
@@ -6,11 +6,14 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
-import traceback
 import subprocess
+import traceback
+
 import yardstick.ssh as ssh
-from baseattacker import BaseAttacker
+from yardstick.benchmark.scenarios.availability.attacker.baseattacker import \
+    BaseAttacker
 
 LOG = logging.getLogger(__name__)
 
index 595067a..38a9668 100644 (file)
@@ -6,11 +6,13 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
 
-from baseattacker import BaseAttacker
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios.availability import util
+from yardstick.benchmark.scenarios.availability.attacker.baseattacker import \
+    BaseAttacker
 
 LOG = logging.getLogger(__name__)
 
@@ -40,7 +42,7 @@ class GeneralAttacker(BaseAttacker):
             str = util.buildshellparams(actionParameter)
             LOG.debug("inject parameter is: {0}".format(actionParameter))
             LOG.debug("inject parameter values are: {0}"
-                      .format(actionParameter.values()))
+                      .format(list(actionParameter.values())))
             l = list(item for item in actionParameter.values())
             self.action_param = str.format(*l)
 
@@ -49,7 +51,7 @@ class GeneralAttacker(BaseAttacker):
             str = util.buildshellparams(rollbackParameter)
             LOG.debug("recover parameter is: {0}".format(rollbackParameter))
             LOG.debug("recover parameter values are: {0}".
-                      format(rollbackParameter.values()))
+                      format(list(rollbackParameter.values())))
             l = list(item for item in rollbackParameter.values())
             self.rollback_param = str.format(*l)
 
index 1d190a1..521c579 100644 (file)
@@ -6,10 +6,12 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
 
-from baseattacker import BaseAttacker
 import yardstick.ssh as ssh
+from yardstick.benchmark.scenarios.availability.attacker.baseattacker import \
+    BaseAttacker
 
 LOG = logging.getLogger(__name__)
 
index f96e577..f5f74f2 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import pkg_resources
 import yaml
 import logging
index 104c683..76fcc0e 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
 
 from yardstick.benchmark.scenarios.availability.monitor import basemonitor
index 38d1c4e..a11966a 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import pkg_resources
 import logging
 import multiprocessing
@@ -23,6 +24,7 @@ monitor_conf_path = pkg_resources.resource_filename(
 
 class MonitorMgr(object):
     """docstring for MonitorMgr"""
+
     def __init__(self):
         self._monitor_list = []
 
@@ -130,7 +132,7 @@ class BaseMonitor(multiprocessing.Process):
         total_time = end_time - begin_time
 
         self._queue.put({"total_time": total_time,
-                         "outage_time": last_outage-first_outage,
+                         "outage_time": last_outage - first_outage,
                          "total_count": total_count,
                          "outage_count": outage_count})
 
index cd33e61..6ddb73e 100644 (file)
@@ -6,11 +6,13 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
 import subprocess
 import traceback
+
 import yardstick.ssh as ssh
-import basemonitor as basemonitor
+from yardstick.benchmark.scenarios.availability.monitor import basemonitor
 
 LOG = logging.getLogger(__name__)
 
index 461a2de..78a6031 100644 (file)
@@ -6,10 +6,11 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
 import yardstick.ssh as ssh
 
-import basemonitor as basemonitor
+from yardstick.benchmark.scenarios.availability.monitor import basemonitor
 from yardstick.benchmark.scenarios.availability.util import buildshellparams
 
 
index 5f492ad..10b398e 100644 (file)
@@ -6,10 +6,11 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
 import yardstick.ssh as ssh
 
-import basemonitor as basemonitor
+from yardstick.benchmark.scenarios.availability.monitor import basemonitor
 
 LOG = logging.getLogger(__name__)
 
index 80efd1b..709884b 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import pkg_resources
 import yaml
 import logging
index c82df83..42d70f4 100644 (file)
@@ -6,8 +6,13 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
-from baseoperation import BaseOperation
+
+from yardstick.benchmark.scenarios.availability.operation.baseoperation \
+    import \
+    BaseOperation
+
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios.availability.util import buildshellparams
 
index a24f26e..70e0040 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import pkg_resources
 import yaml
 import logging
@@ -46,7 +47,7 @@ class ResultCheckerMgr(object):
     def verify(self):
         result = True
         for obj in self._result_checker_list:
-                result &= obj.success
+            result &= obj.success
         return result
 
 
index 275aff0..75c433a 100644 (file)
@@ -6,9 +6,13 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
 
-from baseresultchecker import BaseResultChecker
+
+from yardstick.benchmark.scenarios.availability.result_checker \
+    .baseresultchecker import \
+    BaseResultChecker
 from yardstick.benchmark.scenarios.availability import Condition
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios.availability.util import buildshellparams
index b064c67..2d7ce66 100644 (file)
@@ -6,8 +6,8 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
-import traceback
 
 from yardstick.benchmark.scenarios import base
 from yardstick.benchmark.scenarios.availability.director import Director
@@ -34,8 +34,8 @@ class ScenarioGeneral(base.Scenario):
         orderedSteps = sorted(steps, key=lambda x: x['index'])
         for step in orderedSteps:
             LOG.debug(
-                "\033[94m running step: {0} .... \033[0m"
-                .format(orderedSteps.index(step)+1))
+                "\033[94m running step: %s .... \033[0m",
+                orderedSteps.index(step) + 1)
             try:
                 actionPlayer = self.director.createActionPlayer(
                     step['actionType'], step['actionKey'])
@@ -44,9 +44,8 @@ class ScenarioGeneral(base.Scenario):
                     step['actionType'], step['actionKey'])
                 if actionRollbacker:
                     self.director.executionSteps.append(actionRollbacker)
-            except Exception, e:
-                LOG.debug(e.message)
-                traceback.print_exc()
+            except Exception:
+                LOG.exception("Exception")
                 LOG.debug(
                     "\033[91m exception when running step: {0} .... \033[0m"
                     .format(orderedSteps.index(step)))
index 46a197c..b981c8c 100755 (executable)
@@ -6,6 +6,8 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import print_function
+from __future__ import absolute_import
 import logging
 from yardstick.benchmark.scenarios import base
 from yardstick.benchmark.scenarios.availability.monitor import basemonitor
@@ -109,15 +111,16 @@ def _test():    # pragma: no cover
     sla = {"outage_time": 5}
     args = {"options": options, "sla": sla}
 
-    print "create instance"
+    print("create instance")
     terstInstance = ServiceHA(args, ctx)
 
     terstInstance.setup()
     result = {}
     terstInstance.run(result)
-    print result
+    print(result)
 
     terstInstance.teardown()
 
+
 if __name__ == '__main__':    # pragma: no cover
     _test()
index 33efbcb..5f5c07d 100644 (file)
@@ -19,6 +19,7 @@
 """ Scenario base class
 """
 
+from __future__ import absolute_import
 import yardstick.common.utils as utils
 
 
index 20786ff..0f60d46 100644 (file)
@@ -9,12 +9,14 @@
 
 """cache hit/miss ratio and usage statistics"""
 
+from __future__ import absolute_import
 import pkg_resources
 import logging
 import re
 import yardstick.ssh as ssh
 
 from yardstick.benchmark.scenarios import base
+from six.moves import zip
 
 LOG = logging.getLogger(__name__)
 
@@ -120,7 +122,7 @@ class CACHEstat(base.Scenario):
                 ite += 1
                 values = line[:]
                 if values and len(values) == len(fields):
-                    cachestat[cache] = dict(zip(fields, values))
+                    cachestat[cache] = dict(list(zip(fields, values)))
 
         for entry in cachestat:
             for item in average:
index 7f0c58d..9d518f7 100644 (file)
@@ -6,9 +6,12 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-import pkg_resources
+from __future__ import absolute_import
+
 import logging
-import json
+
+import pkg_resources
+from oslo_serialization import jsonutils
 
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
@@ -66,4 +69,4 @@ class ComputeCapacity(base.Scenario):
         if status:
             raise RuntimeError(stderr)
 
-        result.update(json.loads(stdout))
+        result.update(jsonutils.loads(stdout))
index 9d71038..121d5a7 100644 (file)
@@ -9,13 +9,16 @@
 
 """Processor statistics and system load."""
 
+from __future__ import absolute_import
+
 import logging
-import time
 import re
-import yardstick.ssh as ssh
+import time
 
-from yardstick.benchmark.scenarios import base
+from six.moves import map, zip
 
+import yardstick.ssh as ssh
+from yardstick.benchmark.scenarios import base
 
 LOG = logging.getLogger(__name__)
 
@@ -145,7 +148,7 @@ class CPULoad(base.Scenario):
                     cpu = 'cpu' if line[0] == 'all' else 'cpu' + line[0]
                     values = line[1:]
                     if values and len(values) == len(fields):
-                        temp_dict = dict(zip(fields, values))
+                        temp_dict = dict(list(zip(fields, values)))
                         if cpu not in maximum:
                             maximum[cpu] = temp_dict
                         else:
@@ -177,7 +180,7 @@ class CPULoad(base.Scenario):
                     cpu = 'cpu' if line[0] == 'all' else 'cpu' + line[0]
                     values = line[1:]
                     if values and len(values) == len(fields):
-                        average[cpu] = dict(zip(fields, values))
+                        average[cpu] = dict(list(zip(fields, values)))
                     else:
                         raise RuntimeError("mpstat average: parse error",
                                            fields, line)
@@ -210,9 +213,9 @@ class CPULoad(base.Scenario):
 
             cpu = cur_list[0]
 
-            cur_stats = map(int, cur_list[1:])
+            cur_stats = list(map(int, cur_list[1:]))
             if self.interval > 0:
-                prev_stats = map(int, prev_list[1:])
+                prev_stats = list(map(int, prev_list[1:]))
             else:
                 prev_stats = [0] * len(cur_stats)
 
@@ -236,9 +239,9 @@ class CPULoad(base.Scenario):
                 else:
                     return "%.2f" % (100.0 * (x - y) / samples)
 
-            load = map(_percent, cur_stats, prev_stats)
+            load = list(map(_percent, cur_stats, prev_stats))
 
-            mpstat[cpu] = dict(zip(fields, load))
+            mpstat[cpu] = dict(list(zip(fields, load)))
 
         return {'mpstat': mpstat}
 
@@ -278,7 +281,7 @@ class CPULoad(base.Scenario):
 #     p = CPULoad(args, ctx)
 #     p.run(result)
 #     import json
-#     print json.dumps(result)
+#     print(oslo_serialization.jsonutils.dump_as_bytes(result))
 
 # if __name__ == '__main__':
 #     _test()
index 568e6e7..76bafff 100644 (file)
@@ -6,12 +6,16 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-import pkg_resources
+from __future__ import absolute_import
+from __future__ import print_function
+
 import logging
-import json
+import os
 import re
 import time
-import os
+
+import pkg_resources
+from oslo_serialization import jsonutils
 
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
@@ -183,7 +187,7 @@ class Cyclictest(base.Scenario):
         if status:
             raise RuntimeError(stderr)
 
-        result.update(json.loads(stdout))
+        result.update(jsonutils.loads(stdout))
 
         if "sla" in self.scenario_cfg:
             sla_error = ""
@@ -236,7 +240,8 @@ def _test():    # pragma: no cover
 
     cyclictest = Cyclictest(args, ctx)
     cyclictest.run(result)
-    print result
+    print(result)
+
 
 if __name__ == '__main__':    # pragma: no cover
     _test()
index 518840c..6a17ae8 100644 (file)
@@ -6,9 +6,13 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-import pkg_resources
+from __future__ import absolute_import
+from __future__ import print_function
+
 import logging
-import json
+
+import pkg_resources
+from oslo_serialization import jsonutils
 
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
@@ -130,9 +134,10 @@ class Lmbench(base.Scenario):
             raise RuntimeError(stderr)
 
         if test_type == 'latency':
-            result.update({"latencies": json.loads(stdout)})
+            result.update(
+                {"latencies": jsonutils.loads(stdout)})
         else:
-            result.update(json.loads(stdout))
+            result.update(jsonutils.loads(stdout))
 
         if "sla" in self.scenario_cfg:
             sla_error = ""
@@ -185,7 +190,8 @@ def _test():
 
     p = Lmbench(args, ctx)
     p.run(result)
-    print result
+    print(result)
+
 
 if __name__ == '__main__':
     _test()
index e1ba93d..35528d4 100644 (file)
@@ -9,10 +9,12 @@
 
 """Memory load and statistics."""
 
+from __future__ import absolute_import
 import logging
 import yardstick.ssh as ssh
 
 from yardstick.benchmark.scenarios import base
+from six.moves import zip
 
 LOG = logging.getLogger(__name__)
 
@@ -88,7 +90,7 @@ class MEMLoad(base.Scenario):
                 ite += 1
                 values = line[1:]
                 if values and len(values) == len(fields):
-                    free[memory] = dict(zip(fields, values))
+                    free[memory] = dict(list(zip(fields, values)))
 
         for entry in free:
             for item in average:
index 8f1a4d6..ae49906 100644 (file)
@@ -6,9 +6,13 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-import pkg_resources
+from __future__ import absolute_import
+from __future__ import print_function
+
 import logging
-import json
+
+import pkg_resources
+from oslo_serialization import jsonutils
 
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
@@ -100,7 +104,7 @@ class Perf(base.Scenario):
         if status:
             raise RuntimeError(stdout)
 
-        result.update(json.loads(stdout))
+        result.update(jsonutils.loads(stdout))
 
         if "sla" in self.scenario_cfg:
             metric = self.scenario_cfg['sla']['metric']
@@ -140,7 +144,8 @@ def _test():
 
     p = Perf(args, ctx)
     p.run(result)
-    print result
+    print(result)
+
 
 if __name__ == '__main__':
     _test()
index e7ec91c..c9d0259 100644 (file)
@@ -6,8 +6,11 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
+
 import logging
-import json
+
+from oslo_serialization import jsonutils
 
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
@@ -53,4 +56,4 @@ class PluginTest(base.Scenario):
         if status:
             raise RuntimeError(stderr)
 
-        result.update(json.loads(stdout))
+        result.update(jsonutils.loads(stdout))
index db70af9..4330202 100644 (file)
@@ -6,9 +6,12 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-import pkg_resources
+from __future__ import absolute_import
+
 import logging
-import json
+
+import pkg_resources
+from oslo_serialization import jsonutils
 
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
@@ -131,7 +134,7 @@ class Ramspeed(base.Scenario):
         if status:
             raise RuntimeError(stderr)
 
-        result.update(json.loads(stdout))
+        result.update(jsonutils.loads(stdout))
 
         if "sla" in self.scenario_cfg:
             sla_error = ""
index b22be29..4a2eb97 100644 (file)
@@ -6,9 +6,13 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-import pkg_resources
+from __future__ import absolute_import
+from __future__ import print_function
+
 import logging
-import json
+
+import pkg_resources
+from oslo_serialization import jsonutils
 
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
@@ -115,7 +119,7 @@ class Unixbench(base.Scenario):
         if status:
             raise RuntimeError(stderr)
 
-        result.update(json.loads(stdout))
+        result.update(jsonutils.loads(stdout))
 
         if "sla" in self.scenario_cfg:
             sla_error = ""
@@ -152,7 +156,7 @@ def _test():  # pragma: no cover
 
     p = Unixbench(args, ctx)
     p.run(result)
-    print result
+    print(result)
 
 
 if __name__ == '__main__':
index de6742c..95146e0 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
 
 from yardstick.benchmark.scenarios import base
index 13fa015..b8ec9ac 100644 (file)
 # iperf3 scenario
 # iperf3 homepage at: http://software.es.net/iperf/
 
+from __future__ import absolute_import
+from __future__ import print_function
+
 import logging
-import json
+
 import pkg_resources
+from oslo_serialization import jsonutils
 
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
@@ -89,7 +93,7 @@ For more info see http://software.es.net/iperf
         self.host.close()
         status, stdout, stderr = self.target.execute("pkill iperf3")
         if status:
-            LOG.warn(stderr)
+            LOG.warning(stderr)
         self.target.close()
 
     def run(self, result):
@@ -138,7 +142,8 @@ For more info see http://software.es.net/iperf
         # Note: convert all ints to floats in order to avoid
         # schema conflicts in influxdb. We probably should add
         # a format func in the future.
-        result.update(json.loads(stdout, parse_int=float))
+        result.update(
+            jsonutils.loads(stdout, parse_int=float))
 
         if "sla" in self.scenario_cfg:
             sla_iperf = self.scenario_cfg["sla"]
@@ -188,7 +193,8 @@ def _test():
 
     p = Iperf(args, ctx)
     p.run(result)
-    print result
+    print(result)
+
 
 if __name__ == '__main__':
     _test()
index 28f5bea..80dbed3 100755 (executable)
@@ -7,9 +7,13 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 # bulk data test and req/rsp test are supported
-import pkg_resources
+from __future__ import absolute_import
+from __future__ import print_function
+
 import logging
-import json
+
+import pkg_resources
+from oslo_serialization import jsonutils
 
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
@@ -129,7 +133,7 @@ class Netperf(base.Scenario):
         if status:
             raise RuntimeError(stderr)
 
-        result.update(json.loads(stdout))
+        result.update(jsonutils.loads(stdout))
 
         if result['mean_latency'] == '':
             raise RuntimeError(stdout)
@@ -175,7 +179,7 @@ def _test():
 
     netperf = Netperf(args, ctx)
     netperf.run(result)
-    print result
+    print(result)
 
 
 if __name__ == '__main__':
index a76982b..0cf52b8 100755 (executable)
@@ -7,9 +7,13 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 # bulk data test and req/rsp test are supported
-import pkg_resources
+from __future__ import absolute_import
+from __future__ import print_function
+
 import logging
-import json
+
+import pkg_resources
+from oslo_serialization import jsonutils
 
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
@@ -152,7 +156,7 @@ class NetperfNode(base.Scenario):
         if status:
             raise RuntimeError(stderr)
 
-        result.update(json.loads(stdout))
+        result.update(jsonutils.loads(stdout))
 
         if result['mean_latency'] == '':
             raise RuntimeError(stdout)
@@ -200,7 +204,8 @@ def _test():    # pragma: no cover
 
     netperf = NetperfNode(args, ctx)
     netperf.run(result)
-    print result
+    print(result)
+
 
 if __name__ == '__main__':
     _test()
index 1ea92cc..1ba6f1e 100644 (file)
@@ -6,11 +6,13 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
 import re
 
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
+from six.moves import zip
 
 LOG = logging.getLogger(__name__)
 
@@ -121,7 +123,7 @@ class NetUtilization(base.Scenario):
                     values = line[1:]
 
                     if values and len(values) == len(fields):
-                        temp_dict = dict(zip(fields, values))
+                        temp_dict = dict(list(zip(fields, values)))
                         if net_interface not in maximum:
                             maximum[net_interface] = temp_dict
                         else:
@@ -158,7 +160,8 @@ class NetUtilization(base.Scenario):
                     net_interface = line[0]
                     values = line[1:]
                     if values and len(values) == len(fields):
-                        average[net_interface] = dict(zip(fields, values))
+                        average[net_interface] = dict(
+                            list(zip(fields, values)))
                     else:
                         raise RuntimeError("network_utilization average: \
                                            parse error", fields, line)
index 250f7ea..e7ce835 100644 (file)
@@ -6,9 +6,12 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-import pkg_resources
+from __future__ import absolute_import
+
 import logging
-import json
+
+import pkg_resources
+from oslo_serialization import jsonutils
 
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
@@ -67,4 +70,4 @@ class NetworkCapacity(base.Scenario):
         if status:
             raise RuntimeError(stderr)
 
-        result.update(json.loads(stdout))
+        result.update(jsonutils.loads(stdout))
index 6e49a14..eb173f1 100644 (file)
@@ -9,6 +9,8 @@
 
 # ping scenario
 
+from __future__ import print_function
+from __future__ import absolute_import
 import pkg_resources
 import logging
 
@@ -122,7 +124,8 @@ def _test():    # pragma: no cover
 
     p = Ping(args, ctx)
     p.run(result)
-    print result
+    print(result)
+
 
 if __name__ == '__main__':    # pragma: no cover
     _test()
index f4d23ce..dd42722 100644 (file)
@@ -7,6 +7,7 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
+from __future__ import absolute_import
 import pkg_resources
 import logging
 
index e2df706..69663ec 100644 (file)
@@ -6,9 +6,13 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-import pkg_resources
+from __future__ import absolute_import
+from __future__ import print_function
+
 import logging
-import json
+
+import pkg_resources
+from oslo_serialization import jsonutils
 
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
@@ -130,7 +134,7 @@ class Pktgen(base.Scenario):
         if status:
             raise RuntimeError(stderr)
 
-        result.update(json.loads(stdout))
+        result.update(jsonutils.loads(stdout))
 
         result['packets_received'] = self._iptables_get_result()
 
@@ -170,7 +174,7 @@ def _test():
 
     p = Pktgen(args, ctx)
     p.run(result)
-    print result
+    print(result)
 
 
 if __name__ == '__main__':
index 503ea97..2bdb91a 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import pkg_resources
 import logging
 import time
index 1bd99b9..87fea4f 100644 (file)
@@ -1,9 +1,14 @@
-import pkg_resources
+from __future__ import absolute_import
+
 import logging
 import subprocess
-import sfc_openstack
+
+import pkg_resources
+from six.moves import range
+
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
+from yardstick.benchmark.scenarios.networking import sfc_openstack
 
 LOG = logging.getLogger(__name__)
 
@@ -199,7 +204,7 @@ class Sfc(base.Scenario):  # pragma: no cover
     sfc = Sfc(scenario_cfg, context_cfg)
     sfc.setup()
     sfc.run(result)
-    print result
+    print(result)
     sfc.teardown()
 
 if __name__ == '__main__':  # pragma: no cover
index d1d45d8..caaf100 100644 (file)
@@ -1,3 +1,5 @@
+from __future__ import print_function
+from __future__ import absolute_import
 import os
 from novaclient import client as novaclient
 from neutronclient.v2_0 import client as neutronclient
@@ -40,8 +42,8 @@ def get_credentials(service):  # pragma: no cover
                       "ca_file": cacert})
         creds.update({"insecure": "True", "https_insecure": "True"})
         if not os.path.isfile(cacert):
-            print ("WARNING: The 'OS_CACERT' environment variable is " +
-                   "set to %s but the file does not exist." % cacert)
+            print(("WARNING: The 'OS_CACERT' environment variable is " +
+                   "set to %s but the file does not exist." % cacert))
     return creds
 
 
@@ -49,8 +51,8 @@ def get_instances(nova_client):  # pragma: no cover
     try:
         instances = nova_client.servers.list(search_opts={'all_tenants': 1})
         return instances
-    except Exception, e:
-        print "Error [get_instances(nova_client)]:", e
+    except Exception as e:
+        print("Error [get_instances(nova_client)]:", e)
         return None
 
 
@@ -62,8 +64,8 @@ def get_SFs(nova_client):  # pragma: no cover
             if "sfc_test" not in instance.name:
                 SFs.append(instance)
         return SFs
-    except Exception, e:
-        print "Error [get_SFs(nova_client)]:", e
+    except Exception as e:
+        print("Error [get_SFs(nova_client)]:", e)
         return None
 
 
@@ -83,8 +85,8 @@ def create_floating_ips(neutron_client):  # pragma: no cover
             ip_json = neutron_client.create_floatingip({'floatingip': props})
             fip_addr = ip_json['floatingip']['floating_ip_address']
             ips.append(fip_addr)
-    except Exception, e:
-        print "Error [create_floating_ip(neutron_client)]:", e
+    except Exception as e:
+        print("Error [create_floating_ip(neutron_client)]:", e)
         return None
     return ips
 
@@ -96,9 +98,9 @@ def floatIPtoSFs(SFs, floatips):  # pragma: no cover
             SF.add_floating_ip(floatips[i])
             i = i + 1
         return True
-    except Exception, e:
-        print ("Error [add_floating_ip(nova_client, '%s', '%s')]:" %
-               (SF, floatips[i]), e)
+    except Exception as e:
+        print(("Error [add_floating_ip(nova_client, '%s', '%s')]:" %
+               (SF, floatips[i]), e))
         return False
 
 
@@ -113,5 +115,6 @@ def get_an_IP():  # pragma: no cover
     floatIPtoSFs(SFs, floatips)
     return floatips
 
+
 if __name__ == '__main__':  # pragma: no cover
     get_an_IP()
index 4f4ef21..9d6db7c 100644 (file)
@@ -13,6 +13,7 @@
 # limitations under the License.
 """ Vsperf specific scenario definition """
 
+from __future__ import absolute_import
 import logging
 import os
 import subprocess
@@ -211,7 +212,7 @@ class Vsperf(base.Scenario):
 
         # convert result.csv to JSON format
         reader = csv.DictReader(stdout.split('\r\n'))
-        result.update(reader.next())
+        result.update(next(reader))
 
         # sla check; go through all defined SLAs and check if values measured
         # by VSPERF are higher then those defined by SLAs
index bec23fc..bf42d9a 100644 (file)
@@ -7,6 +7,7 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
+from __future__ import absolute_import
 import logging
 import os
 
@@ -81,7 +82,7 @@ class VtcInstantiationValidation(base.Scenario):
                 heat_template_parameters,
                 deployment_configuration,
                 openstack_credentials)
-        except Exception as e:
-            LOG.info('Exception: {}'.format(e.message))
-        LOG.info('Got output: {}'.format(res))
+        except Exception:
+            LOG.exception('Exception')
+        LOG.info('Got output: %s', res)
         result.update(res)
index 8d9bf09..fb6e762 100644 (file)
@@ -7,6 +7,7 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
+from __future__ import absolute_import
 import logging
 import os
 
@@ -92,7 +93,7 @@ class VtcInstantiationValidationNoisy(base.Scenario):
                 heat_template_parameters,
                 deployment_configuration,
                 openstack_credentials)
-        except Exception as e:
-            LOG.info('Exception: {}'.format(e.message))
-        LOG.info('Got output: {}'.format(res))
+        except Exception:
+            LOG.exception('Exception')
+        LOG.info('Got output: %s', res)
         result.update(res)
index ff20279..0754d37 100644 (file)
@@ -7,6 +7,7 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
+from __future__ import absolute_import
 import logging
 import os
 
@@ -81,7 +82,7 @@ class VtcThroughput(base.Scenario):
                 heat_template_parameters,
                 deployment_configuration,
                 openstack_credentials)
-        except Exception as e:
-            LOG.info('Exception: {}'.format(e.message))
-        LOG.info('Got output: {}'.format(res))
+        except Exception:
+            LOG.exception("Exception")
+        LOG.info('Got output: %s', res)
         result.update(res)
index f032267..552ef80 100644 (file)
@@ -7,6 +7,7 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
+from __future__ import absolute_import
 import logging
 import os
 
@@ -91,7 +92,7 @@ class VtcThroughputNoisy(base.Scenario):
                 heat_template_parameters,
                 deployment_configuration,
                 openstack_credentials)
-        except Exception as e:
-            LOG.info('Exception: {}'.format(e.message))
-        LOG.info('Got output: {}'.format(res))
+        except Exception:
+            LOG.exception('Exception')
+        LOG.info('Got output: %s', res)
         result.update(res)
index 006258d..6d39733 100644 (file)
@@ -6,6 +6,8 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import print_function
+from __future__ import absolute_import
 import pkg_resources
 import logging
 import subprocess
@@ -58,10 +60,12 @@ class Parser(base.Scenario):
         cmd1 = "%s %s %s" % (self.parser_script, yangfile, toscafile)
         cmd2 = "chmod 777 %s" % (self.parser_script)
         subprocess.call(cmd2, shell=True)
-        output = subprocess.call(cmd1, shell=True, stdout=subprocess.PIPE)
-        print "yangtotosca finished"
+        p = subprocess.Popen(cmd1, shell=True, stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE)
+        p.communicate()
+        print("yangtotosca finished")
 
-        result['yangtotosca'] = "success" if output == 0 else "fail"
+        result['yangtotosca'] = "success" if p.returncode == 0 else "fail"
 
     def teardown(self):
         ''' for scenario teardown remove parser and pyang '''
@@ -76,5 +80,6 @@ def _test():
     '''internal test function'''
     pass
 
+
 if __name__ == '__main__':
     _test()
index 4e00423..2a8738e 100644 (file)
@@ -6,9 +6,13 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-import pkg_resources
+from __future__ import absolute_import
+from __future__ import print_function
+
 import logging
-import json
+
+import pkg_resources
+from oslo_serialization import jsonutils
 
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
@@ -114,7 +118,7 @@ class Fio(base.Scenario):
         if status:
             raise RuntimeError(stderr)
 
-        raw_data = json.loads(stdout)
+        raw_data = jsonutils.loads(stdout)
 
         # The bandwidth unit is KB/s, and latency unit is us
         if rw in ["read", "randread", "rw", "randrw"]:
@@ -175,7 +179,8 @@ def _test():
 
     fio = Fio(args, ctx)
     fio.run(result)
-    print result
+    print(result)
+
 
 if __name__ == '__main__':
     _test()
index bf5bc28..c437f22 100644 (file)
@@ -6,9 +6,13 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-import pkg_resources
+from __future__ import absolute_import
+
 import logging
-import json
+
+import pkg_resources
+from oslo_serialization import jsonutils
+from six.moves import range
 
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
@@ -131,4 +135,4 @@ class StorageCapacity(base.Scenario):
             if status:
                 raise RuntimeError(stderr)
 
-            result.update(json.loads(stdout))
+            result.update(jsonutils.loads(stdout))
index 72ceff7..6ea0351 100644 (file)
@@ -6,11 +6,14 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
+
 import logging
-import json
-import requests
 import time
 
+import requests
+from oslo_serialization import jsonutils
+
 from yardstick.benchmark.scenarios import base
 
 LOG = logging.getLogger(__name__)
@@ -73,7 +76,8 @@ class StorPerf(base.Scenario):
         setup_query = requests.get('http://%s:5000/api/v1.0/configurations'
                                    % self.target)
 
-        setup_query_content = json.loads(setup_query.content)
+        setup_query_content = jsonutils.loads(
+            setup_query.content)
         if setup_query_content["stack_created"]:
             self.setup_done = True
             LOG.debug("stack_created: %s",
@@ -96,7 +100,8 @@ class StorPerf(base.Scenario):
         setup_res = requests.post('http://%s:5000/api/v1.0/configurations'
                                   % self.target, json=env_args)
 
-        setup_res_content = json.loads(setup_res.content)
+        setup_res_content = jsonutils.loads(
+            setup_res.content)
 
         if setup_res.status_code != 200:
             raise RuntimeError("Failed to create a stack, error message:",
@@ -114,7 +119,8 @@ class StorPerf(base.Scenario):
         report_res = requests.get('http://{}:5000/api/v1.0/jobs'.format
                                   (self.target), params={'id': job_id})
 
-        report_res_content = json.loads(report_res.content)
+        report_res_content = jsonutils.loads(
+            report_res.content)
 
         if report_res.status_code != 200:
             raise RuntimeError("Failed to fetch report, error message:",
@@ -154,7 +160,7 @@ class StorPerf(base.Scenario):
         job_res = requests.post('http://%s:5000/api/v1.0/jobs' % self.target,
                                 json=job_args)
 
-        job_res_content = json.loads(job_res.content)
+        job_res_content = jsonutils.loads(job_res.content)
 
         if job_res.status_code != 200:
             raise RuntimeError("Failed to start a job, error message:",
@@ -171,7 +177,8 @@ class StorPerf(base.Scenario):
                                             self.target)
 
             if terminate_res.status_code != 200:
-                terminate_res_content = json.loads(terminate_res.content)
+                terminate_res_content = jsonutils.loads(
+                    terminate_res.content)
                 raise RuntimeError("Failed to start a job, error message:",
                                    terminate_res_content["message"])
 
@@ -190,7 +197,8 @@ class StorPerf(base.Scenario):
 
             result_res = requests.get('http://%s:5000/api/v1.0/jobs?id=%s' %
                                       (self.target, job_id))
-            result_res_content = json.loads(result_res.content)
+            result_res_content = jsonutils.loads(
+                result_res.content)
 
             result.update(result_res_content)
 
@@ -200,7 +208,8 @@ class StorPerf(base.Scenario):
                                        configurations' % self.target)
 
         if teardown_res.status_code == 400:
-            teardown_res_content = json.loads(teardown_res.content)
+            teardown_res_content = jsonutils.loads(
+                teardown_res.content)
             raise RuntimeError("Failed to reset environment, error message:",
                                teardown_res_content['message'])
 
index df891e3..6b7d657 100644 (file)
@@ -6,10 +6,11 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import print_function
 
 
 def print_hbar(barlen):
     '''print to stdout a horizontal bar'''
-    print("+"),
-    print("-" * barlen),
+    print(("+"), end=' ')
+    print(("-" * barlen), end=' ')
     print("+")
index beaa187..05bf8f0 100644 (file)
@@ -11,6 +11,7 @@
 Command-line interface to yardstick
 '''
 
+from __future__ import absolute_import
 import logging
 import os
 import sys
index e69de29..5c53567 100644 (file)
@@ -0,0 +1,10 @@
+from __future__ import absolute_import
+from yardstick.benchmark.core import Param
+
+
+def change_osloobj_to_paras(args):
+    param = Param({})
+    for k in param.__dict__:
+        if hasattr(args, k):
+            setattr(param, k, getattr(args, k))
+    return param
index 098379a..dfcb637 100644 (file)
@@ -6,13 +6,17 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-import logging
+from __future__ import absolute_import
+from __future__ import print_function
 
-from yardstick.common.httpClient import HttpClient
-from yardstick.common import constants
+import os
+import sys
+import time
+
+from six.moves import range
 
-logger = logging.getLogger(__name__)
-logger.setLevel(logging.DEBUG)
+from yardstick.common import constants as consts
+from yardstick.common.httpClient import HttpClient
 
 
 class EnvCommand(object):
@@ -20,20 +24,65 @@ class EnvCommand(object):
 
         Set of commands to prepare environment
     '''
+
     def do_influxdb(self, args):
-        url = constants.YARDSTICK_ENV_ACTION_API
         data = {'action': 'createInfluxDBContainer'}
-        HttpClient().post(url, data)
-        logger.debug('Now creating and configing influxdb')
+        task_id = self._start_async_task(data)
+
+        start = '* creating influxDB'
+        self._check_status(task_id, start)
 
     def do_grafana(self, args):
-        url = constants.YARDSTICK_ENV_ACTION_API
         data = {'action': 'createGrafanaContainer'}
-        HttpClient().post(url, data)
-        logger.debug('Now creating and configing grafana')
+        task_id = self._start_async_task(data)
+
+        start = '* creating grafana'
+        self._check_status(task_id, start)
 
     def do_prepare(self, args):
-        url = constants.YARDSTICK_ENV_ACTION_API
         data = {'action': 'prepareYardstickEnv'}
-        HttpClient().post(url, data)
-        logger.debug('Now preparing environment')
+        task_id = self._start_async_task(data)
+
+        start = '* preparing yardstick environment'
+        self._check_status(task_id, start)
+
+    def _start_async_task(self, data):
+        url = consts.ENV_ACTION_API
+        return HttpClient().post(url, data)['result']['task_id']
+
+    def _check_status(self, task_id, start):
+        self._print_status(start, '[]\r')
+        url = '{}?task_id={}'.format(consts.ASYNC_TASK_API, task_id)
+
+        CHECK_STATUS_RETRY = 20
+        CHECK_STATUS_DELAY = 5
+
+        for retry in range(CHECK_STATUS_RETRY):
+            response = HttpClient().get(url)
+            status = response['status']
+
+            if status:
+                break
+
+            # wait until the async task finished
+            time.sleep(CHECK_STATUS_DELAY * (retry + 1))
+
+        switcher = {
+            0: 'Timeout',
+            1: 'Finished',
+            2: 'Error'
+        }
+        self._print_status(start, '[{}]'.format(switcher[status]))
+        if status == 2:
+            print(response['result'])
+            sys.stdout.flush()
+        return status
+
+    def _print_status(self, s, e):
+        try:
+            columns = int(os.popen('stty size', 'r').read().split()[1])
+            word = '{}{}{}'.format(s, ' ' * (columns - len(s) - len(e)), e)
+            sys.stdout.write(word)
+            sys.stdout.flush()
+        except IndexError:
+            pass
index cf66125..c3e951e 100644 (file)
@@ -9,18 +9,12 @@
 
 """ Handler for yardstick command 'plugin' """
 
-import os
-import sys
-import yaml
-import time
-import logging
-import pkg_resources
-import yardstick.ssh as ssh
+from __future__ import print_function
 
+from __future__ import absolute_import
+from yardstick.benchmark.core.plugin import Plugin
 from yardstick.common.utils import cliargs
-from yardstick.common.task_template import TaskTemplate
-
-LOG = logging.getLogger(__name__)
+from yardstick.cmd.commands import change_osloobj_to_paras
 
 
 class PluginCommands(object):
@@ -33,184 +27,12 @@ class PluginCommands(object):
              nargs=1)
     def do_install(self, args):
         '''Install a plugin.'''
-
-        total_start_time = time.time()
-        parser = PluginParser(args.input_file[0])
-
-        plugins, deployment = parser.parse_plugin()
-        plugin_name = plugins.get("name")
-        print("Installing plugin: %s" % plugin_name)
-
-        LOG.info("Executing _install_setup()")
-        self._install_setup(plugin_name, deployment)
-
-        LOG.info("Executing _run()")
-        self._run(plugin_name)
-
-        total_end_time = time.time()
-        LOG.info("total finished in %d secs",
-                 total_end_time - total_start_time)
-
-        print("Done, exiting")
+        param = change_osloobj_to_paras(args)
+        Plugin().install(param)
 
     @cliargs("input_file", type=str, help="path to plugin configuration file",
              nargs=1)
     def do_remove(self, args):
         '''Remove a plugin.'''
-
-        total_start_time = time.time()
-        parser = PluginParser(args.input_file[0])
-
-        plugins, deployment = parser.parse_plugin()
-        plugin_name = plugins.get("name")
-        print("Removing plugin: %s" % plugin_name)
-
-        LOG.info("Executing _remove_setup()")
-        self._remove_setup(plugin_name, deployment)
-
-        LOG.info("Executing _run()")
-        self._run(plugin_name)
-
-        total_end_time = time.time()
-        LOG.info("total finished in %d secs",
-                 total_end_time - total_start_time)
-
-        print("Done, exiting")
-
-    def _install_setup(self, plugin_name, deployment):
-        '''Deployment environment setup'''
-        target_script = plugin_name + ".bash"
-        self.script = pkg_resources.resource_filename(
-            'yardstick.resources', 'scripts/install/' + target_script)
-
-        deployment_user = deployment.get("user")
-        deployment_ssh_port = deployment.get("ssh_port", ssh.DEFAULT_PORT)
-        deployment_ip = deployment.get("ip", None)
-        deployment_password = deployment.get("password", None)
-        deployment_key_filename = deployment.get("key_filename",
-                                                 "/root/.ssh/id_rsa")
-
-        if deployment_ip == "local":
-            installer_ip = os.environ.get("INSTALLER_IP", None)
-
-            if deployment_password is not None:
-                self._login_via_password(deployment_user, installer_ip,
-                                         deployment_password,
-                                         deployment_ssh_port)
-            else:
-                self._login_via_key(self, deployment_user, installer_ip,
-                                    deployment_key_filename,
-                                    deployment_ssh_port)
-        else:
-            if deployment_password is not None:
-                self._login_via_password(deployment_user, deployment_ip,
-                                         deployment_password,
-                                         deployment_ssh_port)
-            else:
-                self._login_via_key(self, deployment_user, deployment_ip,
-                                    deployment_key_filename,
-                                    deployment_ssh_port)
-        # copy script to host
-        cmd = "cat > ~/%s.sh" % plugin_name
-
-        LOG.info("copying script to host: %s", cmd)
-        self.client.run(cmd, stdin=open(self.script, 'rb'))
-
-    def _remove_setup(self, plugin_name, deployment):
-        '''Deployment environment setup'''
-        target_script = plugin_name + ".bash"
-        self.script = pkg_resources.resource_filename(
-            'yardstick.resources', 'scripts/remove/' + target_script)
-
-        deployment_user = deployment.get("user")
-        deployment_ssh_port = deployment.get("ssh_port", ssh.DEFAULT_PORT)
-        deployment_ip = deployment.get("ip", None)
-        deployment_password = deployment.get("password", None)
-        deployment_key_filename = deployment.get("key_filename",
-                                                 "/root/.ssh/id_rsa")
-
-        if deployment_ip == "local":
-            installer_ip = os.environ.get("INSTALLER_IP", None)
-
-            if deployment_password is not None:
-                self._login_via_password(deployment_user, installer_ip,
-                                         deployment_password,
-                                         deployment_ssh_port)
-            else:
-                self._login_via_key(self, deployment_user, installer_ip,
-                                    deployment_key_filename,
-                                    deployment_ssh_port)
-        else:
-            if deployment_password is not None:
-                self._login_via_password(deployment_user, deployment_ip,
-                                         deployment_password,
-                                         deployment_ssh_port)
-            else:
-                self._login_via_key(self, deployment_user, deployment_ip,
-                                    deployment_key_filename,
-                                    deployment_ssh_port)
-
-        # copy script to host
-        cmd = "cat > ~/%s.sh" % plugin_name
-
-        LOG.info("copying script to host: %s", cmd)
-        self.client.run(cmd, stdin=open(self.script, 'rb'))
-
-    def _login_via_password(self, user, ip, password, ssh_port):
-        LOG.info("Log in via pw, user:%s, host:%s", user, ip)
-        self.client = ssh.SSH(user, ip, password=password, port=ssh_port)
-        self.client.wait(timeout=600)
-
-    def _login_via_key(self, user, ip, key_filename, ssh_port):
-        LOG.info("Log in via key, user:%s, host:%s", user, ip)
-        self.client = ssh.SSH(user, ip, key_filename=key_filename,
-                              port=ssh_port)
-        self.client.wait(timeout=600)
-
-    def _run(self, plugin_name):
-        '''Run installation script '''
-        cmd = "sudo bash %s" % plugin_name + ".sh"
-
-        LOG.info("Executing command: %s", cmd)
-        status, stdout, stderr = self.client.execute(cmd)
-
-
-class PluginParser(object):
-    '''Parser for plugin configration files in yaml format'''
-
-    def __init__(self, path):
-        self.path = path
-
-    def parse_plugin(self):
-        '''parses the plugin file and return a plugins instance
-           and a deployment instance
-        '''
-
-        print "Parsing plugin config:", self.path
-
-        try:
-            kw = {}
-            with open(self.path) as f:
-                try:
-                    input_plugin = f.read()
-                    rendered_plugin = TaskTemplate.render(input_plugin, **kw)
-                except Exception as e:
-                    print(("Failed to render template:\n%(plugin)s\n%(err)s\n")
-                          % {"plugin": input_plugin, "err": e})
-                    raise e
-                print(("Input plugin is:\n%s\n") % rendered_plugin)
-
-                cfg = yaml.load(rendered_plugin)
-        except IOError as ioerror:
-            sys.exit(ioerror)
-
-        self._check_schema(cfg["schema"], "plugin")
-
-        return cfg["plugins"], cfg["deployment"]
-
-    def _check_schema(self, cfg_schema, schema_type):
-        '''Check if configration file is using the correct schema type'''
-
-        if cfg_schema != "yardstick:" + schema_type + ":0.1":
-            sys.exit("error: file %s has unknown schema %s" % (self.path,
-                                                               cfg_schema))
+        param = change_osloobj_to_paras(args)
+        Plugin().remove(param)
index 84bc3c6..02176ab 100644 (file)
@@ -9,9 +9,12 @@
 
 """ Handler for yardstick command 'runner' """
 
-from yardstick.benchmark.runners.base import Runner
+from __future__ import print_function
+
+from __future__ import absolute_import
+from yardstick.benchmark.core.runner import Runners
 from yardstick.common.utils import cliargs
-from yardstick.cmd import print_hbar
+from yardstick.cmd.commands import change_osloobj_to_paras
 
 
 class RunnerCommands(object):
@@ -22,17 +25,11 @@ class RunnerCommands(object):
 
     def do_list(self, args):
         '''List existing runner types'''
-        types = Runner.get_types()
-        print_hbar(78)
-        print("| %-16s | %-60s" % ("Type", "Description"))
-        print_hbar(78)
-        for rtype in types:
-            print "| %-16s | %-60s" % (rtype.__execution_type__,
-                                       rtype.__doc__.split("\n")[0])
-        print_hbar(78)
+        param = change_osloobj_to_paras(args)
+        Runners().list_all(param)
 
     @cliargs("type", type=str, help="runner type", nargs=1)
     def do_show(self, args):
         '''Show details of a specific runner type'''
-        rtype = Runner.get_cls(args.type[0])
-        print rtype.__doc__
+        param = change_osloobj_to_paras(args)
+        Runners().show(param)
index 00d46cf..5a6d04f 100644 (file)
@@ -9,9 +9,11 @@
 
 """ Handler for yardstick command 'scenario' """
 
-from yardstick.benchmark.scenarios.base import Scenario
+from __future__ import print_function
+from __future__ import absolute_import
+from yardstick.benchmark.core.scenario import Scenarios
 from yardstick.common.utils import cliargs
-from yardstick.cmd import print_hbar
+from yardstick.cmd.commands import change_osloobj_to_paras
 
 
 class ScenarioCommands(object):
@@ -22,17 +24,11 @@ class ScenarioCommands(object):
 
     def do_list(self, args):
         '''List existing scenario types'''
-        types = Scenario.get_types()
-        print_hbar(78)
-        print("| %-16s | %-60s" % ("Type", "Description"))
-        print_hbar(78)
-        for stype in types:
-            print("| %-16s | %-60s" % (stype.__scenario_type__,
-                                       stype.__doc__.split("\n")[0]))
-        print_hbar(78)
+        param = change_osloobj_to_paras(args)
+        Scenarios().list_all(param)
 
     @cliargs("type", type=str, help="runner type", nargs=1)
     def do_show(self, args):
         '''Show details of a specific scenario type'''
-        stype = Scenario.get_cls(args.type[0])
-        print stype.__doc__
+        param = change_osloobj_to_paras(args)
+        Scenarios().show(param)
index 9524778..fa82f07 100644 (file)
@@ -8,28 +8,14 @@
 ##############################################################################
 
 """ Handler for yardstick command 'task' """
+from __future__ import print_function
 
-import sys
-import os
-import yaml
-import atexit
-import ipaddress
-import time
-import logging
-import uuid
-import errno
-from itertools import ifilter
-
-from yardstick.benchmark.contexts.base import Context
-from yardstick.benchmark.runners import base as base_runner
-from yardstick.common.task_template import TaskTemplate
+from __future__ import absolute_import
+from yardstick.benchmark.core.task import Task
 from yardstick.common.utils import cliargs
-from yardstick.common.utils import source_env
-from yardstick.common import constants
+from yardstick.cmd.commands import change_osloobj_to_paras
 
 output_file_default = "/tmp/yardstick.out"
-test_cases_dir_default = "tests/opnfv/test_cases/"
-LOG = logging.getLogger(__name__)
 
 
 class TaskCommands(object):
@@ -55,447 +41,5 @@ class TaskCommands(object):
     @cliargs("--suite", help="process test suite file instead of a task file",
              action="store_true")
     def do_start(self, args, **kwargs):
-        '''Start a benchmark scenario.'''
-
-        atexit.register(atexit_handler)
-
-        self.task_id = kwargs.get('task_id', str(uuid.uuid4()))
-
-        check_environment()
-
-        total_start_time = time.time()
-        parser = TaskParser(args.inputfile[0])
-
-        if args.suite:
-            # 1.parse suite, return suite_params info
-            task_files, task_args, task_args_fnames = \
-                parser.parse_suite()
-        else:
-            task_files = [parser.path]
-            task_args = [args.task_args]
-            task_args_fnames = [args.task_args_file]
-
-        LOG.info("\ntask_files:%s, \ntask_args:%s, \ntask_args_fnames:%s",
-                 task_files, task_args, task_args_fnames)
-
-        if args.parse_only:
-            sys.exit(0)
-
-        if os.path.isfile(args.output_file):
-            os.remove(args.output_file)
-        # parse task_files
-        for i in range(0, len(task_files)):
-            one_task_start_time = time.time()
-            parser.path = task_files[i]
-            scenarios, run_in_parallel, meet_precondition = parser.parse_task(
-                 self.task_id, task_args[i], task_args_fnames[i])
-
-            if not meet_precondition:
-                LOG.info("meet_precondition is %s, please check envrionment",
-                         meet_precondition)
-                continue
-
-            self._run(scenarios, run_in_parallel, args.output_file)
-
-            if args.keep_deploy:
-                # keep deployment, forget about stack
-                # (hide it for exit handler)
-                Context.list = []
-            else:
-                for context in Context.list:
-                    context.undeploy()
-                Context.list = []
-            one_task_end_time = time.time()
-            LOG.info("task %s finished in %d secs", task_files[i],
-                     one_task_end_time - one_task_start_time)
-
-        total_end_time = time.time()
-        LOG.info("total finished in %d secs",
-                 total_end_time - total_start_time)
-
-        print "Done, exiting"
-
-    def _run(self, scenarios, run_in_parallel, output_file):
-        '''Deploys context and calls runners'''
-        for context in Context.list:
-            context.deploy()
-
-        background_runners = []
-
-        # Start all background scenarios
-        for scenario in ifilter(_is_background_scenario, scenarios):
-            scenario["runner"] = dict(type="Duration", duration=1000000000)
-            runner = run_one_scenario(scenario, output_file)
-            background_runners.append(runner)
-
-        runners = []
-        if run_in_parallel:
-            for scenario in scenarios:
-                if not _is_background_scenario(scenario):
-                    runner = run_one_scenario(scenario, output_file)
-                    runners.append(runner)
-
-            # Wait for runners to finish
-            for runner in runners:
-                runner_join(runner)
-                print "Runner ended, output in", output_file
-        else:
-            # run serially
-            for scenario in scenarios:
-                if not _is_background_scenario(scenario):
-                    runner = run_one_scenario(scenario, output_file)
-                    runner_join(runner)
-                    print "Runner ended, output in", output_file
-
-        # Abort background runners
-        for runner in background_runners:
-            runner.abort()
-
-        # Wait for background runners to finish
-        for runner in background_runners:
-            if runner.join(timeout=60) is None:
-                # Nuke if it did not stop nicely
-                base_runner.Runner.terminate(runner)
-                runner_join(runner)
-            else:
-                base_runner.Runner.release(runner)
-            print "Background task ended"
-
-
-# TODO: Move stuff below into TaskCommands class !?
-
-
-class TaskParser(object):
-    '''Parser for task config files in yaml format'''
-    def __init__(self, path):
-        self.path = path
-
-    def _meet_constraint(self, task, cur_pod, cur_installer):
-        if "constraint" in task:
-            constraint = task.get('constraint', None)
-            if constraint is not None:
-                tc_fit_pod = constraint.get('pod', None)
-                tc_fit_installer = constraint.get('installer', None)
-                LOG.info("cur_pod:%s, cur_installer:%s,tc_constraints:%s",
-                         cur_pod, cur_installer, constraint)
-                if cur_pod and tc_fit_pod and cur_pod not in tc_fit_pod:
-                    return False
-                if cur_installer and tc_fit_installer and \
-                        cur_installer not in tc_fit_installer:
-                    return False
-        return True
-
-    def _get_task_para(self, task, cur_pod):
-        task_args = task.get('task_args', None)
-        if task_args is not None:
-            task_args = task_args.get(cur_pod, None)
-        task_args_fnames = task.get('task_args_fnames', None)
-        if task_args_fnames is not None:
-            task_args_fnames = task_args_fnames.get(cur_pod, None)
-        return task_args, task_args_fnames
-
-    def parse_suite(self):
-        '''parse the suite file and return a list of task config file paths
-           and lists of optional parameters if present'''
-        LOG.info("\nParsing suite file:%s", self.path)
-
-        try:
-            with open(self.path) as stream:
-                cfg = yaml.load(stream)
-        except IOError as ioerror:
-            sys.exit(ioerror)
-
-        self._check_schema(cfg["schema"], "suite")
-        LOG.info("\nStarting scenario:%s", cfg["name"])
-
-        test_cases_dir = cfg.get("test_cases_dir", test_cases_dir_default)
-        if test_cases_dir[-1] != os.sep:
-            test_cases_dir += os.sep
-
-        cur_pod = os.environ.get('NODE_NAME', None)
-        cur_installer = os.environ.get('INSTALLER_TYPE', None)
-
-        valid_task_files = []
-        valid_task_args = []
-        valid_task_args_fnames = []
-
-        for task in cfg["test_cases"]:
-            # 1.check file_name
-            if "file_name" in task:
-                task_fname = task.get('file_name', None)
-                if task_fname is None:
-                    continue
-            else:
-                continue
-            # 2.check constraint
-            if self._meet_constraint(task, cur_pod, cur_installer):
-                valid_task_files.append(test_cases_dir + task_fname)
-            else:
-                continue
-            # 3.fetch task parameters
-            task_args, task_args_fnames = self._get_task_para(task, cur_pod)
-            valid_task_args.append(task_args)
-            valid_task_args_fnames.append(task_args_fnames)
-
-        return valid_task_files, valid_task_args, valid_task_args_fnames
-
-    def parse_task(self, task_id, task_args=None, task_args_file=None):
-        '''parses the task file and return an context and scenario instances'''
-        print "Parsing task config:", self.path
-
-        try:
-            kw = {}
-            if task_args_file:
-                with open(task_args_file) as f:
-                    kw.update(parse_task_args("task_args_file", f.read()))
-            kw.update(parse_task_args("task_args", task_args))
-        except TypeError:
-            raise TypeError()
-
-        try:
-            with open(self.path) as f:
-                try:
-                    input_task = f.read()
-                    rendered_task = TaskTemplate.render(input_task, **kw)
-                except Exception as e:
-                    print(("Failed to render template:\n%(task)s\n%(err)s\n")
-                          % {"task": input_task, "err": e})
-                    raise e
-                print(("Input task is:\n%s\n") % rendered_task)
-
-                cfg = yaml.load(rendered_task)
-        except IOError as ioerror:
-            sys.exit(ioerror)
-
-        self._check_schema(cfg["schema"], "task")
-        meet_precondition = self._check_precondition(cfg)
-
-        # TODO: support one or many contexts? Many would simpler and precise
-        # TODO: support hybrid context type
-        if "context" in cfg:
-            context_cfgs = [cfg["context"]]
-        elif "contexts" in cfg:
-            context_cfgs = cfg["contexts"]
-        else:
-            context_cfgs = [{"type": "Dummy"}]
-
-        for cfg_attrs in context_cfgs:
-            context_type = cfg_attrs.get("type", "Heat")
-            if "Heat" == context_type and "networks" in cfg_attrs:
-                # bugfix: if there are more than one network,
-                # only add "external_network" on first one.
-                # the name of netwrok should follow this rule:
-                # test, test2, test3 ...
-                # sort network with the length of network's name
-                sorted_networks = sorted(cfg_attrs["networks"].keys())
-                # config external_network based on env var
-                cfg_attrs["networks"][sorted_networks[0]]["external_network"] \
-                    = os.environ.get("EXTERNAL_NETWORK", "net04_ext")
-
-            context = Context.get(context_type)
-            context.init(cfg_attrs)
-
-        run_in_parallel = cfg.get("run_in_parallel", False)
-
-        # add tc and task id for influxdb extended tags
-        for scenario in cfg["scenarios"]:
-            task_name = os.path.splitext(os.path.basename(self.path))[0]
-            scenario["tc"] = task_name
-            scenario["task_id"] = task_id
-
-        # TODO we need something better here, a class that represent the file
-        return cfg["scenarios"], run_in_parallel, meet_precondition
-
-    def _check_schema(self, cfg_schema, schema_type):
-        '''Check if config file is using the correct schema type'''
-
-        if cfg_schema != "yardstick:" + schema_type + ":0.1":
-            sys.exit("error: file %s has unknown schema %s" % (self.path,
-                                                               cfg_schema))
-
-    def _check_precondition(self, cfg):
-        '''Check if the envrionment meet the preconditon'''
-
-        if "precondition" in cfg:
-            precondition = cfg["precondition"]
-            installer_type = precondition.get("installer_type", None)
-            deploy_scenarios = precondition.get("deploy_scenarios", None)
-            tc_fit_pods = precondition.get("pod_name", None)
-            installer_type_env = os.environ.get('INSTALL_TYPE', None)
-            deploy_scenario_env = os.environ.get('DEPLOY_SCENARIO', None)
-            pod_name_env = os.environ.get('NODE_NAME', None)
-
-            LOG.info("installer_type: %s, installer_type_env: %s",
-                     installer_type, installer_type_env)
-            LOG.info("deploy_scenarios: %s, deploy_scenario_env: %s",
-                     deploy_scenarios, deploy_scenario_env)
-            LOG.info("tc_fit_pods: %s, pod_name_env: %s",
-                     tc_fit_pods, pod_name_env)
-            if installer_type and installer_type_env:
-                if installer_type_env not in installer_type:
-                    return False
-            if deploy_scenarios and deploy_scenario_env:
-                deploy_scenarios_list = deploy_scenarios.split(',')
-                for deploy_scenario in deploy_scenarios_list:
-                    if deploy_scenario_env.startswith(deploy_scenario):
-                        return True
-                return False
-            if tc_fit_pods and pod_name_env:
-                if pod_name_env not in tc_fit_pods:
-                    return False
-        return True
-
-
-def atexit_handler():
-    '''handler for process termination'''
-    base_runner.Runner.terminate_all()
-
-    if len(Context.list) > 0:
-        print "Undeploying all contexts"
-        for context in Context.list:
-            context.undeploy()
-
-
-def is_ip_addr(addr):
-    '''check if string addr is an IP address'''
-    try:
-        ipaddress.ip_address(unicode(addr))
-        return True
-    except ValueError:
-        return False
-
-
-def _is_same_heat_context(host_attr, target_attr):
-    '''check if two servers are in the same heat context
-    host_attr: either a name for a server created by yardstick or a dict
-    with attribute name mapping when using external heat templates
-    target_attr: either a name for a server created by yardstick or a dict
-    with attribute name mapping when using external heat templates
-    '''
-    host = None
-    target = None
-    for context in Context.list:
-        if context.__context_type__ != "Heat":
-            continue
-
-        host = context._get_server(host_attr)
-        if host is None:
-            continue
-
-        target = context._get_server(target_attr)
-        if target is None:
-            return False
-
-        # Both host and target is not None, then they are in the
-        # same heat context.
-        return True
-
-    return False
-
-
-def _is_background_scenario(scenario):
-    if "run_in_background" in scenario:
-        return scenario["run_in_background"]
-    else:
-        return False
-
-
-def run_one_scenario(scenario_cfg, output_file):
-    '''run one scenario using context'''
-    runner_cfg = scenario_cfg["runner"]
-    runner_cfg['output_filename'] = output_file
-
-    # TODO support get multi hosts/vms info
-    context_cfg = {}
-    if "host" in scenario_cfg:
-        context_cfg['host'] = Context.get_server(scenario_cfg["host"])
-
-    if "target" in scenario_cfg:
-        if is_ip_addr(scenario_cfg["target"]):
-            context_cfg['target'] = {}
-            context_cfg['target']["ipaddr"] = scenario_cfg["target"]
-        else:
-            context_cfg['target'] = Context.get_server(scenario_cfg["target"])
-            if _is_same_heat_context(scenario_cfg["host"],
-                                     scenario_cfg["target"]):
-                context_cfg["target"]["ipaddr"] = \
-                    context_cfg["target"]["private_ip"]
-            else:
-                context_cfg["target"]["ipaddr"] = \
-                    context_cfg["target"]["ip"]
-
-    if "targets" in scenario_cfg:
-        ip_list = []
-        for target in scenario_cfg["targets"]:
-            if is_ip_addr(target):
-                ip_list.append(target)
-                context_cfg['target'] = {}
-            else:
-                context_cfg['target'] = Context.get_server(target)
-                if _is_same_heat_context(scenario_cfg["host"], target):
-                    ip_list.append(context_cfg["target"]["private_ip"])
-                else:
-                    ip_list.append(context_cfg["target"]["ip"])
-        context_cfg['target']['ipaddr'] = ','.join(ip_list)
-
-    if "nodes" in scenario_cfg:
-        context_cfg["nodes"] = parse_nodes_with_context(scenario_cfg)
-    runner = base_runner.Runner.get(runner_cfg)
-
-    print "Starting runner of type '%s'" % runner_cfg["type"]
-    runner.run(scenario_cfg, context_cfg)
-
-    return runner
-
-
-def parse_nodes_with_context(scenario_cfg):
-    '''paras the 'nodes' fields in scenario '''
-    nodes = scenario_cfg["nodes"]
-
-    nodes_cfg = {}
-    for nodename in nodes:
-        nodes_cfg[nodename] = Context.get_server(nodes[nodename])
-
-    return nodes_cfg
-
-
-def runner_join(runner):
-    '''join (wait for) a runner, exit process at runner failure'''
-    status = runner.join()
-    base_runner.Runner.release(runner)
-    if status != 0:
-        sys.exit("Runner failed")
-
-
-def print_invalid_header(source_name, args):
-    print(("Invalid %(source)s passed:\n\n %(args)s\n")
-          % {"source": source_name, "args": args})
-
-
-def parse_task_args(src_name, args):
-    try:
-        kw = args and yaml.safe_load(args)
-        kw = {} if kw is None else kw
-    except yaml.parser.ParserError as e:
-        print_invalid_header(src_name, args)
-        print(("%(source)s has to be YAML. Details:\n\n%(err)s\n")
-              % {"source": src_name, "err": e})
-        raise TypeError()
-
-    if not isinstance(kw, dict):
-        print_invalid_header(src_name, args)
-        print(("%(src)s had to be dict, actually %(src_type)s\n")
-              % {"src": src_name, "src_type": type(kw)})
-        raise TypeError()
-    return kw
-
-
-def check_environment():
-    auth_url = os.environ.get('OS_AUTH_URL', None)
-    if not auth_url:
-        try:
-            source_env(constants.OPENSTACK_RC_FILE)
-        except IOError as e:
-            if e.errno != errno.EEXIST:
-                raise
-            LOG.debug('OPENRC file not found')
+        param = change_osloobj_to_paras(args)
+        Task().start(param)
index cb76c7a..92831ad 100644 (file)
@@ -8,14 +8,12 @@
 ##############################################################################
 
 """ Handler for yardstick command 'testcase' """
-import os
-import yaml
-import sys
+from __future__ import print_function
 
-from yardstick.cmd import print_hbar
-from yardstick.common.task_template import TaskTemplate
+from __future__ import absolute_import
+from yardstick.benchmark.core.testcase import Testcase
 from yardstick.common.utils import cliargs
-from yardstick.definitions import YARDSTICK_ROOT_PATH
+from yardstick.cmd.commands import change_osloobj_to_paras
 
 
 class TestcaseCommands(object):
@@ -23,92 +21,14 @@ class TestcaseCommands(object):
 
        Set of commands to discover and display test cases.
     '''
-    def __init__(self):
-        self.test_case_path = YARDSTICK_ROOT_PATH + 'tests/opnfv/test_cases/'
-        self.testcase_list = []
 
     def do_list(self, args):
         '''List existing test cases'''
-
-        try:
-            testcase_files = os.listdir(self.test_case_path)
-        except Exception as e:
-            print(("Failed to list dir:\n%(path)s\n%(err)s\n")
-                  % {"path": self.test_case_path, "err": e})
-            raise e
-        testcase_files.sort()
-
-        for testcase_file in testcase_files:
-            record = self._get_record(testcase_file)
-            self.testcase_list.append(record)
-
-        self._format_print(self.testcase_list)
-        return True
+        param = change_osloobj_to_paras(args)
+        Testcase().list_all(param)
 
     @cliargs("casename", type=str, help="test case name", nargs=1)
     def do_show(self, args):
         '''Show details of a specific test case'''
-        testcase_name = args.casename[0]
-        testcase_path = self.test_case_path + testcase_name + ".yaml"
-        try:
-            with open(testcase_path) as f:
-                try:
-                    testcase_info = f.read()
-                    print testcase_info
-
-                except Exception as e:
-                    print(("Failed to load test cases:"
-                           "\n%(testcase_file)s\n%(err)s\n")
-                          % {"testcase_file": testcase_path, "err": e})
-                    raise e
-        except IOError as ioerror:
-            sys.exit(ioerror)
-        return True
-
-    def _get_record(self, testcase_file):
-
-        try:
-            with open(self.test_case_path + testcase_file) as f:
-                try:
-                    testcase_info = f.read()
-                except Exception as e:
-                    print(("Failed to load test cases:"
-                           "\n%(testcase_file)s\n%(err)s\n")
-                          % {"testcase_file": testcase_file, "err": e})
-                    raise e
-                description, installer, deploy_scenarios = \
-                    self._parse_testcase(testcase_info)
-
-                record = {'Name': testcase_file.split(".")[0],
-                          'Description': description,
-                          'installer': installer,
-                          'deploy_scenarios': deploy_scenarios}
-                return record
-        except IOError as ioerror:
-            sys.exit(ioerror)
-
-    def _parse_testcase(self, testcase_info):
-
-        kw = {}
-        rendered_testcase = TaskTemplate.render(testcase_info, **kw)
-        testcase_cfg = yaml.load(rendered_testcase)
-        test_precondition = testcase_cfg.get('precondition', None)
-        installer_type = 'all'
-        deploy_scenarios = 'all'
-        if test_precondition is not None:
-            installer_type = test_precondition.get('installer_type', 'all')
-            deploy_scenarios = test_precondition.get('deploy_scenarios', 'all')
-
-        description = testcase_info.split("\n")[2][1:].strip()
-        return description, installer_type, deploy_scenarios
-
-    def _format_print(self, testcase_list):
-        '''format output'''
-
-        print_hbar(88)
-        print("| %-21s | %-60s" % ("Testcase Name", "Description"))
-        print_hbar(88)
-        for testcase_record in testcase_list:
-            print "| %-16s | %-60s" % (testcase_record['Name'],
-                                       testcase_record['Description'])
-        print_hbar(88)
+        param = change_osloobj_to_paras(args)
+        Testcase().show(param)
index 705e1ad..d99e216 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import os
 
 DOCKER_URL = 'unix://var/run/docker.sock'
@@ -51,4 +52,6 @@ LOAD_IMAGES_SCRIPT = 'tests/ci/load_images.sh'
 
 OPENSTACK_RC_FILE = join(YARDSTICK_CONFIG_DIR, 'openstack.creds')
 
-YARDSTICK_ENV_ACTION_API = 'http://localhost:5000/yardstick/env/action'
+BASE_URL = 'http://localhost:5000'
+ENV_ACTION_API = BASE_URL + '/yardstick/env/action'
+ASYNC_TASK_API = BASE_URL + '/yardstick/asynctask'
index ab2e9a3..11c2d75 100644 (file)
@@ -6,9 +6,11 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-import json
+from __future__ import absolute_import
+
 import logging
 
+from oslo_serialization import jsonutils
 import requests
 
 logger = logging.getLogger(__name__)
@@ -17,7 +19,7 @@ logger = logging.getLogger(__name__)
 class HttpClient(object):
 
     def post(self, url, data):
-        data = json.dumps(data)
+        data = jsonutils.dump_as_bytes(data)
         headers = {'Content-Type': 'application/json'}
         try:
             response = requests.post(url, data=data, headers=headers)
@@ -28,3 +30,7 @@ class HttpClient(object):
         except Exception as e:
             logger.debug('Failed: %s', e)
             raise
+
+    def get(self, url):
+        response = requests.get(url)
+        return response.json()
index d8dc61e..e351d16 100644 (file)
@@ -7,6 +7,7 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
+from __future__ import absolute_import
 import os
 import logging
 
index 2739323..bda8a1b 100755 (executable)
@@ -7,12 +7,14 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 # yardstick: this file is copied from rally and slightly modified
 ##############################################################################
+from __future__ import absolute_import
 import re
 import jinja2
 import jinja2.meta
 
 
 class TaskTemplate(object):
+
     @classmethod
     def render(cls, task_template, **kwargs):
         """Render jinja2 task template to Yardstick input task.
index 2deaf39..2432c5d 100644 (file)
 
 # yardstick: this file is copied from python-heatclient and slightly modified
 
-import json
+from __future__ import absolute_import
+
 import yaml
+from oslo_serialization import jsonutils
 
 if hasattr(yaml, 'CSafeLoader'):
     yaml_loader = yaml.CSafeLoader
@@ -46,7 +48,7 @@ def parse(tmpl_str):
     JSON or YAML format.
     '''
     if tmpl_str.startswith('{'):
-        tpl = json.loads(tmpl_str)
+        tpl = jsonutils.loads(tmpl_str)
     else:
         try:
             tpl = yaml.load(tmpl_str, Loader=yaml_loader)
index 3ecb0ae..57ace14 100644 (file)
 
 # yardstick comment: this is a modified copy of rally/rally/common/utils.py
 
-import os
-import sys
-import yaml
+from __future__ import absolute_import
+from __future__ import print_function
+
 import errno
-import subprocess
 import logging
+import os
+import subprocess
+import sys
+from functools import reduce
 
-from oslo_utils import importutils
+import yaml
 from keystoneauth1 import identity
 from keystoneauth1 import session
 from neutronclient.v2_0 import client
+from oslo_utils import importutils
 
 import yardstick
 
@@ -94,12 +98,12 @@ def get_para_from_yaml(file_path, args):
             value = reduce(func, args.split('.'), value)
 
             if value is None:
-                print 'parameter not found'
+                print('parameter not found')
                 return None
 
             return value
     else:
-        print 'file not exist'
+        print('file not exist')
         return None
 
 
index 300a78e..d4afac6 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
 import os
 
 dirname = os.path.dirname
index b519efc..dfb1307 100644 (file)
@@ -7,6 +7,7 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
+from __future__ import absolute_import
 from oslo_config import cfg
 
 import yardstick.common.utils as utils
index ffdddb0..6e863ca 100644 (file)
@@ -16,6 +16,7 @@
 # yardstick comment: this is a modified copy of
 # ceilometer/ceilometer/dispatcher/__init__.py
 
+from __future__ import absolute_import
 import abc
 import six
 
index c2cc265..9c728e9 100644 (file)
 # yardstick comment: this is a modified copy of
 # ceilometer/ceilometer/dispatcher/file.py
 
+from __future__ import absolute_import
+
 import logging
 import logging.handlers
-import json
 
+from oslo_serialization import jsonutils
 from oslo_config import cfg
 
 from yardstick.dispatcher.base import Base as DispatchBase
@@ -70,7 +72,7 @@ class FileDispatcher(DispatchBase):
 
     def record_result_data(self, data):
         if self.log:
-            self.log.info(json.dumps(data))
+            self.log.info(jsonutils.dump_as_bytes(data))
 
     def flush_result_data(self):
         pass
index 98e772d..7900861 100644 (file)
 # yardstick comment: this is a modified copy of
 # ceilometer/ceilometer/dispatcher/http.py
 
-import os
-import json
+from __future__ import absolute_import
+
 import logging
-import requests
+import os
 
+from oslo_serialization import jsonutils
+import requests
 from oslo_config import cfg
 
 from yardstick.dispatcher.base import Base as DispatchBase
@@ -81,16 +83,18 @@ class HttpDispatcher(DispatchBase):
                 case_name = v["scenario_cfg"]["tc"]
                 break
         if case_name == "":
-            LOG.error('Test result : %s', json.dumps(self.result))
+            LOG.error('Test result : %s',
+                      jsonutils.dump_as_bytes(self.result))
             LOG.error('The case_name cannot be found, no data will be posted.')
             return
 
         self.result["case_name"] = case_name
 
         try:
-            LOG.debug('Test result : %s', json.dumps(self.result))
+            LOG.debug('Test result : %s',
+                      jsonutils.dump_as_bytes(self.result))
             res = requests.post(self.target,
-                                data=json.dumps(self.result),
+                                data=jsonutils.dump_as_bytes(self.result),
                                 headers=self.headers,
                                 timeout=self.timeout)
             LOG.debug('Test result posting finished with status code'
index fc9f3e9..427e669 100644 (file)
@@ -7,16 +7,19 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-import os
-import json
+from __future__ import absolute_import
+
 import logging
-import requests
+import os
 import time
 
+import requests
+import six
 from oslo_config import cfg
+from oslo_serialization import jsonutils
 
-from yardstick.dispatcher.base import Base as DispatchBase
 from third_party.influxdb.influxdb_line_protocol import make_lines
+from yardstick.dispatcher.base import Base as DispatchBase
 
 LOG = logging.getLogger(__name__)
 
@@ -80,9 +83,9 @@ class InfluxdbDispatcher(DispatchBase):
                 if type(v) == dict or type(v) == list]:
             return data
 
-        for k, v in data.iteritems():
+        for k, v in six.iteritems(data):
             if type(v) == dict:
-                for n_k, n_v in v.iteritems():
+                for n_k, n_v in six.iteritems(v):
                     next_data["%s.%s" % (k, n_k)] = n_v
             elif type(v) == list:
                 for index, item in enumerate(v):
@@ -127,7 +130,7 @@ class InfluxdbDispatcher(DispatchBase):
         return make_lines(msg).encode('utf-8')
 
     def record_result_data(self, data):
-        LOG.debug('Test result : %s', json.dumps(data))
+        LOG.debug('Test result : %s', jsonutils.dump_as_bytes(data))
         self.raw_result.append(data)
         if self.target == '':
             # if the target was not set, do not do anything
@@ -148,7 +151,7 @@ class InfluxdbDispatcher(DispatchBase):
             return 0
 
         if self.tc == "":
-            LOG.error('Test result : %s', json.dumps(data))
+            LOG.error('Test result : %s', jsonutils.dump_as_bytes(data))
             LOG.error('The case_name cannot be found, no data will be posted.')
             return -1
 
@@ -171,5 +174,6 @@ class InfluxdbDispatcher(DispatchBase):
         return 0
 
     def flush_result_data(self):
-        LOG.debug('Test result all : %s', json.dumps(self.raw_result))
+        LOG.debug('Test result all : %s',
+                  jsonutils.dump_as_bytes(self.raw_result))
         return 0
index 418e3da..5d427be 100755 (executable)
@@ -38,6 +38,7 @@
     NFV TST
 
 """
+from __future__ import absolute_import
 import sys
 
 from yardstick.cmd.cli import YardstickCLI
index 4839455..7e0f360 100644 (file)
@@ -9,25 +9,37 @@
 
 """Heat template and stack management"""
 
-import time
+from __future__ import absolute_import
+from __future__ import print_function
+
+import collections
 import datetime
 import getpass
-import socket
 import logging
-import pkg_resources
-import json
+import socket
+import time
 
 import heatclient
+import pkg_resources
+from oslo_serialization import jsonutils
+from oslo_utils import encodeutils
 
-from yardstick.common import template_format
 import yardstick.common.openstack_utils as op_utils
-
+from yardstick.common import template_format
 
 log = logging.getLogger(__name__)
 
 
+HEAT_KEY_UUID_LENGTH = 8
+
+
+def get_short_key_uuid(uuid):
+    return str(uuid)[:HEAT_KEY_UUID_LENGTH]
+
+
 class HeatObject(object):
     ''' base class for template and stack'''
+
     def __init__(self):
         self._heat_client = None
         self.uuid = None
@@ -111,7 +123,7 @@ class HeatStack(HeatObject):
                 self._delete()
                 break
             except RuntimeError as err:
-                log.warn(err.args)
+                log.warning(err.args)
                 time.sleep(2)
             i += 1
 
@@ -165,7 +177,7 @@ class HeatTemplate(HeatObject):
 
         if template_file:
             with open(template_file) as stream:
-                print "Parsing external template:", template_file
+                print("Parsing external template:", template_file)
                 template_str = stream.read()
                 self._template = template_format.parse(template_str)
             self._parameters = heat_parameters
@@ -297,15 +309,20 @@ class HeatTemplate(HeatObject):
             }
         }
 
-    def add_keypair(self, name):
+    def add_keypair(self, name, key_uuid):
         '''add to the template a Nova KeyPair'''
         log.debug("adding Nova::KeyPair '%s'", name)
         self.resources[name] = {
             'type': 'OS::Nova::KeyPair',
             'properties': {
                 'name': name,
-                'public_key': pkg_resources.resource_string(
-                    'yardstick.resources', 'files/yardstick_key.pub')
+                # resource_string returns bytes, so we must decode to unicode
+                'public_key': encodeutils.safe_decode(
+                    pkg_resources.resource_string(
+                        'yardstick.resources',
+                        'files/yardstick_key-' +
+                        get_short_key_uuid(key_uuid) + '.pub'),
+                    'utf-8')
             }
         }
 
@@ -390,7 +407,7 @@ class HeatTemplate(HeatObject):
                 )
 
         if networks:
-            for i in range(len(networks)):
+            for i, _ in enumerate(networks):
                 server_properties['networks'].append({'network': networks[i]})
 
         if scheduler_hints:
@@ -400,11 +417,11 @@ class HeatTemplate(HeatObject):
             server_properties['user_data'] = user_data
 
         if metadata:
-            assert type(metadata) is dict
+            assert isinstance(metadata, collections.Mapping)
             server_properties['metadata'] = metadata
 
         if additional_properties:
-            assert type(additional_properties) is dict
+            assert isinstance(additional_properties, collections.Mapping)
             for prop in additional_properties:
                 server_properties[prop] = additional_properties[prop]
 
@@ -426,13 +443,15 @@ class HeatTemplate(HeatObject):
         stack = HeatStack(self.name)
 
         heat = self._get_heat_client()
-        json_template = json.dumps(self._template)
+        json_template = jsonutils.dump_as_bytes(
+            self._template)
         start_time = time.time()
         stack.uuid = self.uuid = heat.stacks.create(
             stack_name=self.name, template=json_template,
             parameters=self.heat_parameters)['stack']['id']
 
         status = self.status()
+        outputs = []
 
         if block:
             while status != u'CREATE_COMPLETE':
@@ -446,13 +465,12 @@ class HeatTemplate(HeatObject):
 
             end_time = time.time()
             outputs = getattr(heat.stacks.get(self.uuid), 'outputs')
+            log.info("Created stack '%s' in %d secs",
+                     self.name, end_time - start_time)
 
-        for output in outputs:
-            self.outputs[output["output_key"].encode("ascii")] = \
-                output["output_value"].encode("ascii")
-
-        log.info("Created stack '%s' in %d secs",
-                 self.name, end_time - start_time)
+        # keep outputs as unicode
+        self.outputs = {output["output_key"]: output["output_value"] for output
+                        in outputs}
 
         stack.outputs = self.outputs
         return stack
index 4cbbdfe..4e65303 100644 (file)
     $ yardstick-plot -i /tmp/yardstick.out -o /tmp/plots/
 '''
 
+from __future__ import absolute_import
+from __future__ import print_function
+
 import argparse
-import json
 import os
 import sys
 import time
-import matplotlib.pyplot as plt
+
 import matplotlib.lines as mlines
+import matplotlib.pyplot as plt
+from oslo_serialization import jsonutils
+from six.moves import range
+from six.moves import zip
 
 
 class Parser(object):
@@ -44,7 +50,7 @@ class Parser(object):
             prog='yardstick-plot',
             description="A tool for visualizing results from yardstick. "
                         "Currently supports plotting graphs for output files "
-                        "from tests: " + str(self.data.keys())
+                        "from tests: " + str(list(self.data.keys()))
         )
         parser.add_argument(
             '-i', '--input',
@@ -65,7 +71,7 @@ class Parser(object):
             self.scenarios[record["runner_id"]] = obj_name
             return
         runner_object = self.scenarios[record["runner_id"]]
-        for test_type in self.data.keys():
+        for test_type in self.data:
             if test_type in runner_object:
                 self.data[test_type].append(record)
 
@@ -80,17 +86,17 @@ class Parser(object):
         if self.args.input:
             input_file = self.args.input
         else:
-            print("No input file specified, reading from %s"
-                  % self.default_input_loc)
+            print(("No input file specified, reading from %s"
+                   % self.default_input_loc))
             input_file = self.default_input_loc
 
         try:
             with open(input_file) as f:
                 for line in f:
-                    record = json.loads(line)
+                    record = jsonutils.loads(line)
                     self._add_record(record)
         except IOError as e:
-            print(os.strerror(e.errno))
+            print((os.strerror(e.errno)))
             sys.exit(1)
 
 
@@ -126,7 +132,7 @@ class Plotter(object):
             os.makedirs(self.output_folder)
         new_file = os.path.join(self.output_folder, file_name)
         plt.savefig(new_file)
-        print("Saved graph to " + new_file)
+        print(("Saved graph to " + new_file))
 
     def _plot_ping(self, records):
         '''ping test result interpretation and visualization on the graph'''
@@ -143,7 +149,7 @@ class Plotter(object):
         if len(rtts) == 1:
             plt.bar(1, rtts[0], 0.35, color=self.colors[0])
         else:
-            plt.plot(seqs, rtts, self.colors[0]+'-')
+            plt.plot(seqs, rtts, self.colors[0] + '-')
 
         self._construct_legend(['rtt'])
         plt.xlabel("sequence number")
@@ -164,13 +170,13 @@ class Plotter(object):
                 received[i] = 0.0
                 plt.axvline(flows[i], color='r')
 
-        ppm = [1000000.0*(i - j)/i for i, j in zip(sent, received)]
+        ppm = [1000000.0 * (i - j) / i for i, j in zip(sent, received)]
 
         # If there is a single data-point then display a bar-chart
         if len(ppm) == 1:
             plt.bar(1, ppm[0], 0.35, color=self.colors[0])
         else:
-            plt.plot(flows, ppm, self.colors[0]+'-')
+            plt.plot(flows, ppm, self.colors[0] + '-')
 
         self._construct_legend(['ppm'])
         plt.xlabel("number of flows")
@@ -191,7 +197,7 @@ class Plotter(object):
         for i, val in enumerate(intervals):
             if val:
                 for j, _ in enumerate(intervals):
-                    kbps.append(val[j]['sum']['bits_per_second']/1000)
+                    kbps.append(val[j]['sum']['bits_per_second'] / 1000)
                     seconds.append(seconds[-1] + val[j]['sum']['seconds'])
             else:
                 kbps.append(0.0)
@@ -202,7 +208,7 @@ class Plotter(object):
                 plt.axvline(seconds[-1], color='r')
 
         self._construct_legend(['bandwidth'])
-        plt.plot(seconds[1:], kbps[1:], self.colors[0]+'-')
+        plt.plot(seconds[1:], kbps[1:], self.colors[0] + '-')
         plt.xlabel("time in seconds")
         plt.ylabel("bandwidth in Kb/s")
 
@@ -312,5 +318,6 @@ def main():
     print("Plotting graph(s)")
     plotter.plot()
 
+
 if __name__ == '__main__':
     main()
index 927ca94..1cad8ee 100644 (file)
@@ -25,7 +25,7 @@ Execute command and get output:
     status, stdout, stderr = ssh.execute("ps ax")
     if status:
         raise Exception("Command failed with non-zero status.")
-    print stdout.splitlines()
+    print(stdout.splitlines())
 
 Execute command with huge output:
 
@@ -62,6 +62,7 @@ Eventlet:
     sshclient = eventlet.import_patched("yardstick.ssh")
 
 """
+from __future__ import absolute_import
 import os
 import select
 import socket
@@ -70,6 +71,7 @@ import re
 
 import logging
 import paramiko
+from oslo_utils import encodeutils
 from scp import SCPClient
 import six
 
@@ -199,7 +201,8 @@ class SSH(object):
         session.exec_command(cmd)
         start_time = time.time()
 
-        data_to_send = ""
+        # encode on transmit, decode on receive
+        data_to_send = encodeutils.safe_encode("")
         stderr_data = None
 
         # If we have data to be sent to stdin then `select' should also
@@ -214,14 +217,15 @@ class SSH(object):
             r, w, e = select.select([session], writes, [session], 1)
 
             if session.recv_ready():
-                data = session.recv(4096)
+                data = encodeutils.safe_decode(session.recv(4096), 'utf-8')
                 self.log.debug("stdout: %r", data)
                 if stdout is not None:
                     stdout.write(data)
                 continue
 
             if session.recv_stderr_ready():
-                stderr_data = session.recv_stderr(4096)
+                stderr_data = encodeutils.safe_decode(
+                    session.recv_stderr(4096), 'utf-8')
                 self.log.debug("stderr: %r", stderr_data)
                 if stderr is not None:
                     stderr.write(stderr_data)
@@ -230,7 +234,8 @@ class SSH(object):
             if session.send_ready():
                 if stdin is not None and not stdin.closed:
                     if not data_to_send:
-                        data_to_send = stdin.read(4096)
+                        data_to_send = encodeutils.safe_encode(
+                            stdin.read(4096), incoming='utf-8')
                         if not data_to_send:
                             # we may need to keep stdin open
                             if not keep_stdin_open:
index d4ab29e..9c4eef1 100644 (file)
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-'''
+"""
 Experimental Framework
-'''
+"""
+from __future__ import absolute_import
+import os
+
+APEX_LAKE_ROOT = os.path.realpath(
+    os.path.join(os.path.dirname(os.path.dirname(__file__))))
index e0209be..24dd1f8 100644 (file)
@@ -12,6 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 import experimental_framework.benchmarking_unit as b_unit
 from experimental_framework import heat_template_generation, common
 
index 1963696..d5de308 100644 (file)
@@ -18,6 +18,7 @@ initialization, execution and finalization
 '''
 
 
+from __future__ import absolute_import
 import json
 import time
 import inspect
@@ -27,6 +28,7 @@ from experimental_framework import common
 # from experimental_framework import data_manager as data
 from experimental_framework import heat_template_generation as heat
 from experimental_framework import deployment_unit as deploy
+from six.moves import range
 
 
 class BenchmarkingUnit:
@@ -116,10 +118,10 @@ class BenchmarkingUnit:
         """
         common.LOG.info('Run Benchmarking Unit')
 
-        experiment = dict()
-        result = dict()
-        for iteration in range(0, self.iterations):
-            common.LOG.info('Iteration ' + str(iteration))
+        experiment = {}
+        result = {}
+        for iteration in range(self.iterations):
+            common.LOG.info('Iteration %s', iteration)
             for template_file_name in self.template_files:
                 experiment_name = BenchmarkingUnit.\
                     extract_experiment_name(template_file_name)
@@ -238,7 +240,7 @@ class BenchmarkingUnit:
         :return: (str) Experiment Name
         """
         strings = template_file_name.split('.')
-        return ".".join(strings[:(len(strings)-1)])
+        return ".".join(strings[:(len(strings) - 1)])
 
     @staticmethod
     def get_benchmark_class(complete_module_name):
@@ -253,7 +255,7 @@ class BenchmarkingUnit:
         """
         strings = complete_module_name.split('.')
         class_name = 'experimental_framework.benchmarks.{}'.format(strings[0])
-        pkg = __import__(class_name, globals(), locals(), [], -1)
+        pkg = __import__(class_name, globals(), locals(), [], 0)
         module = getattr(getattr(pkg, 'benchmarks'), strings[0])
         members = inspect.getmembers(module)
         for m in members:
index ac7fad8..96cce22 100644 (file)
@@ -13,6 +13,7 @@
 # limitations under the License.
 
 
+from __future__ import absolute_import
 import abc
 
 
@@ -30,7 +31,7 @@ class BenchmarkBaseClass(object):
             raise ValueError("Parameters need to be provided in a dict")
 
         for param in self.get_features()['parameters']:
-            if param not in params.keys():
+            if param not in list(params.keys()):
                 params[param] = self.get_features()['default_values'][param]
 
         for param in self.get_features()['parameters']:
index 320beca..db9d449 100644 (file)
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 import os
-import commands
 # import signal
 import time
+
+import subprocess
 from experimental_framework.benchmarks import benchmark_base_class as base
 from experimental_framework.constants import framework_parameters as fp
 from experimental_framework.constants import conf_file_sections as cfs
 from experimental_framework.packet_generators import dpdk_packet_generator \
     as dpdk
 import experimental_framework.common as common
+from six.moves import range
 
 
 THROUGHPUT = 'throughput'
@@ -36,7 +39,7 @@ class InstantiationValidationBenchmark(base.BenchmarkBaseClass):
 
     def __init__(self, name, params):
         base.BenchmarkBaseClass.__init__(self, name, params)
-        self.base_dir = "{}{}{}".format(
+        self.base_dir = os.path.join(
             common.get_base_dir(), fp.EXPERIMENTAL_FRAMEWORK_DIR,
             fp.DPDK_PKTGEN_DIR)
         self.results_file = self.base_dir + PACKETS_FILE_NAME
@@ -45,10 +48,11 @@ class InstantiationValidationBenchmark(base.BenchmarkBaseClass):
         self.interface_name = ''
 
         # Set the packet checker command
-        self.pkt_checker_command = common.get_base_dir()
-        self.pkt_checker_command += 'experimental_framework/libraries/'
-        self.pkt_checker_command += 'packet_checker/'
-        self.pkt_checker_command += PACKET_CHECKER_PROGRAM_NAME + ' '
+        self.pkt_checker_command = os.path.join(
+            common.get_base_dir(),
+            'experimental_framework/libraries/',
+            'packet_checker/',
+            PACKET_CHECKER_PROGRAM_NAME + ' ')
 
     def init(self):
         """
@@ -69,9 +73,11 @@ class InstantiationValidationBenchmark(base.BenchmarkBaseClass):
         features['description'] = 'Instantiation Validation Benchmark'
         features['parameters'] = [THROUGHPUT, VLAN_SENDER, VLAN_RECEIVER]
         features['allowed_values'] = dict()
-        features['allowed_values'][THROUGHPUT] = map(str, range(0, 100))
-        features['allowed_values'][VLAN_SENDER] = map(str, range(-1, 4096))
-        features['allowed_values'][VLAN_RECEIVER] = map(str, range(-1, 4096))
+        features['allowed_values'][THROUGHPUT] = [str(x) for x in range(100)]
+        features['allowed_values'][VLAN_SENDER] = [str(x) for x in
+                                                   range(-1, 4096)]
+        features['allowed_values'][VLAN_RECEIVER] = [str(x)
+                                                     for x in range(-1, 4096)]
         features['default_values'] = dict()
         features['default_values'][THROUGHPUT] = '1'
         features['default_values'][VLAN_SENDER] = '-1'
@@ -203,7 +209,7 @@ class InstantiationValidationBenchmark(base.BenchmarkBaseClass):
         # Start the packet checker
         current_dir = os.path.dirname(os.path.realpath(__file__))
         dir_list = self.pkt_checker_command.split('/')
-        directory = '/'.join(dir_list[0:len(dir_list)-1])
+        directory = os.pathsep.join(dir_list[0:len(dir_list) - 1])
         os.chdir(directory)
         command = "make"
         common.run_command(command)
@@ -245,10 +251,10 @@ class InstantiationValidationBenchmark(base.BenchmarkBaseClass):
         processes currently running on the host
         :return: type: list of int
         """
-        output = commands.getoutput("ps -ef |pgrep " +
-                                    PACKET_CHECKER_PROGRAM_NAME)
+        output = subprocess.check_output(
+            'pgrep "{}"'.format(PACKET_CHECKER_PROGRAM_NAME))
         if not output:
             pids = []
         else:
-            pids = map(int, output.split('\n'))
+            pids = [int(x) for x in output.splitlines()]
         return pids
index 1eab70c..5569b6c 100644 (file)
 # limitations under the License.
 
 
-import instantiation_validation_benchmark as base
+from __future__ import absolute_import
 from experimental_framework import common
+from experimental_framework.benchmarks import \
+    instantiation_validation_benchmark as base
+from six.moves import range
 
 
 NUM_OF_NEIGHBORS = 'num_of_neighbours'
@@ -38,7 +41,7 @@ class InstantiationValidationNoisyNeighborsBenchmark(
         self.template_file = common.get_template_dir() + \
             temp_name
         self.stack_name = 'neighbour'
-        self.neighbor_stack_names = list()
+        self.neighbor_stack_names = []
 
     def get_features(self):
         features = super(InstantiationValidationNoisyNeighborsBenchmark,
index f2a87b2..44c9f32 100644 (file)
 # limitations under the License.
 
 
+from __future__ import absolute_import
 from experimental_framework.benchmarks import rfc2544_throughput_benchmark \
     as base
 from experimental_framework import common
+from six.moves import range
 
 
 NETWORK_NAME = 'network'
index 9db62e6..5c7b55e 100644 (file)
@@ -11,6 +11,8 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+from __future__ import absolute_import
+from six.moves import range
 
 from experimental_framework.benchmarks import benchmark_base_class
 from experimental_framework.packet_generators \
@@ -60,8 +62,10 @@ class RFC2544ThroughputBenchmark(benchmark_base_class.BenchmarkBaseClass):
         features['allowed_values'] = dict()
         features['allowed_values'][PACKET_SIZE] = ['64', '128', '256', '512',
                                                    '1024', '1280', '1514']
-        features['allowed_values'][VLAN_SENDER] = map(str, range(-1, 4096))
-        features['allowed_values'][VLAN_RECEIVER] = map(str, range(-1, 4096))
+        features['allowed_values'][VLAN_SENDER] = [str(x) for x in
+                                                   range(-1, 4096)]
+        features['allowed_values'][VLAN_RECEIVER] = [str(x) for x in
+                                                     range(-1, 4096)]
         features['default_values'] = dict()
         features['default_values'][PACKET_SIZE] = '1280'
         features['default_values'][VLAN_SENDER] = '1007'
@@ -99,7 +103,7 @@ class RFC2544ThroughputBenchmark(benchmark_base_class.BenchmarkBaseClass):
         :return: packet_sizes (list)
         """
         packet_size = '1280'  # default value
-        if PACKET_SIZE in self.params.keys() and \
+        if PACKET_SIZE in list(self.params.keys()) and \
                 isinstance(self.params[PACKET_SIZE], str):
             packet_size = self.params[PACKET_SIZE]
         return packet_size
index cbb930d..5891832 100644 (file)
@@ -12,6 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 import time
 
 from experimental_framework.benchmarks import benchmark_base_class as base
index 4bacd38..feea8bd 100644 (file)
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import print_function
+from __future__ import absolute_import
 import os
 import re
-import ConfigParser
+import six.moves.configparser
 import logging
 import fileinput
 from experimental_framework.constants import conf_file_sections as cf
@@ -70,7 +72,7 @@ def init(api=False):
     init_conf_file(api)
     init_log()
     init_general_vars(api)
-    if len(CONF_FILE.get_variable_list(cf.CFS_PKTGEN)) > 0:
+    if CONF_FILE.get_variable_list(cf.CFS_PKTGEN):
         init_pktgen()
 
 
@@ -129,7 +131,7 @@ def init_general_vars(api=False):
 
     RESULT_DIR = "/tmp/apexlake/results/"
     if not os.path.isdir(RESULT_DIR):
-        os.mkdir(RESULT_DIR)
+        os.makedirs(RESULT_DIR)
 
     if cf.CFSO_RELEASE in CONF_FILE.get_variable_list(cf.CFS_OPENSTACK):
         RELEASE = CONF_FILE.get_variable(cf.CFS_OPENSTACK, cf.CFSO_RELEASE)
@@ -311,7 +313,7 @@ class ConfigurationFile:
         # config_file = BASE_DIR + config_file
         InputValidation.validate_file_exist(
             config_file, 'The provided configuration file does not exist')
-        self.config = ConfigParser.ConfigParser()
+        self.config = six.moves.configparser.ConfigParser()
         self.config.read(config_file)
         for section in sections:
             setattr(
@@ -457,7 +459,7 @@ def replace_in_file(file, text_to_search, text_to_replace):
     message = "The file does not exist"
     InputValidation.validate_file_exist(file, message)
     for line in fileinput.input(file, inplace=True):
-        print(line.replace(text_to_search, text_to_replace).rstrip())
+        print((line.replace(text_to_search, text_to_replace).rstrip()))
 
 
 # ------------------------------------------------------
@@ -610,7 +612,7 @@ class InputValidation(object):
         missing = [
             credential_key
             for credential_key in credential_keys
-            if credential_key not in credentials.keys()
+            if credential_key not in list(credentials.keys())
         ]
         if len(missing) == 0:
             return True
index 4ee3a8a..6e651bf 100644 (file)
@@ -13,6 +13,7 @@
 # limitations under the License.
 
 
+from __future__ import absolute_import
 from experimental_framework.constants import conf_file_sections as cfs
 
 # ------------------------------------------------------
index 22fec13..0bb507c 100644 (file)
@@ -12,6 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 import os
 import time
 
@@ -50,8 +51,8 @@ class DeploymentUnit:
                 time.sleep(5)
                 status = self.heat_manager.check_stack_status(stack_name)
             return True
-        except Exception as e:
-            common.LOG.debug(e.message)
+        except Exception:
+            common.LOG.debug("check_stack_status", exc_info=True)
             return False
 
     def destroy_all_deployed_stacks(self):
@@ -81,17 +82,16 @@ class DeploymentUnit:
             self.heat_manager.create_stack(template_file, stack_name,
                                            parameters)
             deployed = True
-        except Exception as e:
-            common.LOG.debug(e.message)
+        except Exception:
+            common.LOG.debug("create_stack", exc_info=True)
             deployed = False
 
         if not deployed and 'COMPLETE' in \
                 self.heat_manager.check_stack_status(stack_name):
             try:
                 self.destroy_heat_template(stack_name)
-            except Exception as e:
-                common.LOG.debug(e.message)
-                pass
+            except Exception:
+                common.LOG.debug("destroy_heat_template", exc_info=True)
 
         status = self.heat_manager.check_stack_status(stack_name)
         while status and 'CREATE_IN_PROGRESS' in status:
@@ -102,16 +102,15 @@ class DeploymentUnit:
                 attempt += 1
                 try:
                     self.destroy_heat_template(stack_name)
-                except Exception as e:
-                    common.LOG.debug(e.message)
-                    pass
+                except Exception:
+                    common.LOG.debug("destroy_heat_template", exc_info=True)
                 return self.deploy_heat_template(template_file, stack_name,
                                                  parameters, attempt)
             else:
                 try:
                     self.destroy_heat_template(stack_name)
-                except Exception as e:
-                    common.LOG.debug(e.message)
+                except Exception:
+                    common.LOG.debug("destroy_heat_template", exc_info=True)
                 finally:
                     return False
         if self.heat_manager.check_stack_status(stack_name) and \
index 7400ebd..a323334 100644 (file)
@@ -12,6 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 from keystoneclient.v2_0 import client as keystoneClient
 from heatclient import client as heatClient
 from heatclient.common import template_utils
@@ -97,7 +98,6 @@ class HeatManager:
                 if stack.stack_name == stack_name:
                     self.heat.stacks.delete(stack.id)
                     return True
-        except Exception as e:
-            common.LOG.debug(e.message)
-            pass
+        except Exception:
+            common.LOG.debug("destroy_heat_template", exc_info=True)
         return False
index e0c1a66..0f0af8b 100644 (file)
@@ -17,6 +17,7 @@
 Generation of the heat templates from the base template
 '''
 
+from __future__ import absolute_import
 import json
 import os
 import shutil
index 6dc32b6..9590036 100644 (file)
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 import os
-import base_packet_generator
-import experimental_framework.common as common
 import time
+
+
+import experimental_framework.common as common
 from experimental_framework.constants import conf_file_sections as conf_file
 from experimental_framework.constants import framework_parameters as fp
+from experimental_framework.packet_generators import base_packet_generator
 
 
 class DpdkPacketGenerator(base_packet_generator.BasePacketGenerator):
@@ -186,8 +189,7 @@ class DpdkPacketGenerator(base_packet_generator.BasePacketGenerator):
                     conf_file.CFSP_DPDK_PROGRAM_NAME,
                     conf_file.CFSP_DPDK_COREMASK,
                     conf_file.CFSP_DPDK_MEMORY_CHANNEL]:
-            if var not in variables.keys() or (var in variables.keys() and
-               variables[var] is ''):
+            if variables.get(var, '') == '':
                 raise ValueError("The variable " + var + " does not exist")
 
     @staticmethod
index 188a7f0..0211a57 100644 (file)
@@ -16,6 +16,7 @@
 Experimental Framework
 """
 
+from __future__ import absolute_import
 from distutils.core import setup
 
 
index 4b70b9b..b6191ed 100644 (file)
 # limitations under the License.
 
 
+from __future__ import absolute_import
 import unittest
 import mock
 import os
 import experimental_framework.common as common
+from experimental_framework import APEX_LAKE_ROOT
 from experimental_framework.api import FrameworkApi
 from experimental_framework.benchmarking_unit import BenchmarkingUnit
 import experimental_framework.benchmarks.\
     instantiation_validation_benchmark as iv
+from six.moves import map
+from six.moves import range
 
 
 class DummyBenchmarkingUnit(BenchmarkingUnit):
@@ -61,6 +65,7 @@ class DummyBenchmarkingUnit2(BenchmarkingUnit):
 
 
 class TestGeneratesTemplate(unittest.TestCase):
+
     def setUp(self):
         pass
 
@@ -92,11 +97,11 @@ class TestGeneratesTemplate(unittest.TestCase):
             iv.VLAN_RECEIVER]
         expected['allowed_values'] = dict()
         expected['allowed_values'][iv.THROUGHPUT] = \
-            map(str, range(0, 100))
+            list(map(str, list(range(0, 100))))
         expected['allowed_values'][iv.VLAN_SENDER] = \
-            map(str, range(-1, 4096))
+            list(map(str, list(range(-1, 4096))))
         expected['allowed_values'][iv.VLAN_RECEIVER] = \
-            map(str, range(-1, 4096))
+            list(map(str, list(range(-1, 4096))))
         expected['default_values'] = dict()
         expected['default_values'][iv.THROUGHPUT] = '1'
         expected['default_values'][iv.VLAN_SENDER] = '-1'
@@ -121,9 +126,8 @@ class TestGeneratesTemplate(unittest.TestCase):
     def test_execute_framework_for_success(self, mock_b_unit, mock_heat,
                                            mock_credentials, mock_log,
                                            mock_common_init):
-        common.TEMPLATE_DIR = "{}/{}/".format(
-            os.getcwd(), 'tests/data/generated_templates'
-        )
+        common.TEMPLATE_DIR = os.path.join(APEX_LAKE_ROOT,
+                                           'tests/data/generated_templates/')
 
         test_cases = dict()
         iterations = 1
index b0e27d0..153de17 100644 (file)
@@ -12,6 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 import unittest
 from experimental_framework.packet_generators import base_packet_generator
 
index 405c010..4e5eb9f 100644 (file)
@@ -13,6 +13,7 @@
 # limitations under the License.
 
 
+from __future__ import absolute_import
 import unittest
 from experimental_framework.benchmarks import benchmark_base_class as base
 
@@ -45,8 +46,8 @@ class TestBenchmarkBaseClass(unittest.TestCase):
         params['C'] = 'c'
         bench_base = DummyBechmarkBaseClass(name, params)
         self.assertEqual(name, bench_base.name)
-        self.assertIn('A', bench_base.params.keys())
-        self.assertIn('B', bench_base.params.keys())
+        self.assertIn('A', list(bench_base.params.keys()))
+        self.assertIn('B', list(bench_base.params.keys()))
         self.assertEqual('a', bench_base.params['A'])
         self.assertEqual('b', bench_base.params['B'])
 
index 652327a..7b33ba6 100644 (file)
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
+from __future__ import absolute_import
+import os
 import unittest
 import mock
+
+from experimental_framework import APEX_LAKE_ROOT
 from experimental_framework.benchmarking_unit import BenchmarkingUnit
 # from experimental_framework.data_manager import DataManager
 from experimental_framework.deployment_unit import DeploymentUnit
@@ -275,7 +278,8 @@ class TestBenchmarkingUnit(unittest.TestCase):
                                         mock_rfc2544, mock_log, mock_influx):
         mock_heat.return_value = list()
         mock_time.return_value = '12345'
-        mock_temp_dir.return_value = 'tests/data/test_templates/'
+        mock_temp_dir.return_value = os.path.join(APEX_LAKE_ROOT,
+                                                  'tests/data/test_templates/')
         common.TEMPLATE_FILE_EXTENSION = '.yaml'
         common.RESULT_DIR = 'tests/data/results/'
         common.INFLUXDB_IP = 'InfluxIP'
@@ -336,7 +340,8 @@ class TestBenchmarkingUnit(unittest.TestCase):
             mock_log):
         mock_heat.return_value = list()
         mock_time.return_value = '12345'
-        mock_temp_dir.return_value = 'tests/data/test_templates/'
+        mock_temp_dir.return_value = os.path.join(APEX_LAKE_ROOT,
+                                                  'tests/data/test_templates/')
         common.TEMPLATE_FILE_EXTENSION = '.yaml'
         common.RESULT_DIR = 'tests/data/results/'
 
index 486ed6d..b8dbfe6 100644 (file)
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 import unittest
 import mock
 import os
 import logging
-import ConfigParser
+import six.moves.configparser
 import experimental_framework.common as common
 import experimental_framework.constants.conf_file_sections as cf
+from experimental_framework import APEX_LAKE_ROOT
 
 __author__ = 'vmricco'
 
@@ -47,6 +49,7 @@ def reset_common():
 
 
 class DummyConfigurationFile(common.ConfigurationFile):
+
     def __init__(self, sections, conf_file=''):
         pass
 
@@ -58,6 +61,7 @@ class DummyConfigurationFile(common.ConfigurationFile):
 
 
 class DummyConfigurationFile2(common.ConfigurationFile):
+
     def __init__(self, sections):
         self.pktgen_counter = 0
 
@@ -74,7 +78,7 @@ class DummyConfigurationFile2(common.ConfigurationFile):
             self.pktgen_counter += 1
             return 'dpdk_pktgen'
         if variable_name == cf.CFSP_DPDK_PKTGEN_DIRECTORY:
-            return os.getcwd()
+            return APEX_LAKE_ROOT
         if variable_name == cf.CFSP_DPDK_PROGRAM_NAME:
             return 'program'
         if variable_name == cf.CFSP_DPDK_COREMASK:
@@ -86,7 +90,7 @@ class DummyConfigurationFile2(common.ConfigurationFile):
         if variable_name == cf.CFSP_DPDK_BUS_SLOT_NIC_2:
             return 'bus_slot_nic_2'
         if variable_name == cf.CFSP_DPDK_DPDK_DIRECTORY:
-            return os.getcwd()
+            return APEX_LAKE_ROOT
 
     def get_variable_list(self, section):
         if section == cf.CFS_PKTGEN:
@@ -114,8 +118,7 @@ class TestCommonInit(unittest.TestCase):
 
     def setUp(self):
         common.CONF_FILE = DummyConfigurationFile('')
-        self.dir = '{}/{}'.format(os.getcwd(),
-                                  'experimental_framework/')
+        self.dir = os.path.join(APEX_LAKE_ROOT, 'experimental_framework/')
 
     def tearDown(self):
         reset_common()
@@ -131,7 +134,8 @@ class TestCommonInit(unittest.TestCase):
                               init_general_vars, init_conf_file, mock_getcwd):
         mock_getcwd.return_value = self.dir
         common.init(True)
-        init_pkgen.assert_called_once()
+        if common.CONF_FILE.get_variable_list(cf.CFS_PKTGEN):
+            init_pkgen.assert_called_once()
         init_conf_file.assert_called_once()
         init_general_vars.assert_called_once()
         init_log.assert_called_once()
@@ -144,7 +148,7 @@ class TestCommonInit(unittest.TestCase):
     @mock.patch('experimental_framework.common.LOG')
     def test_init_general_vars_for_success(self, mock_log, mock_makedirs,
                                            mock_path_exists, mock_val_file):
-        common.BASE_DIR = "{}/".format(os.getcwd())
+        common.BASE_DIR = APEX_LAKE_ROOT
         mock_path_exists.return_value = False
         mock_val_file.return_value = True
         common.init_general_vars()
@@ -160,15 +164,19 @@ class TestCommonInit2(unittest.TestCase):
 
     def setUp(self):
         common.CONF_FILE = DummyConfigurationFile2('')
-        self.dir = '{}/{}'.format(os.getcwd(), 'experimental_framework/')
+        self.dir = os.path.join(APEX_LAKE_ROOT, 'experimental_framework')
 
     def tearDown(self):
         reset_common()
         common.CONF_FILE = None
 
+    @mock.patch('experimental_framework.common.InputValidation')
+    @mock.patch('os.path.exists')
+    @mock.patch('os.makedirs')
     @mock.patch('experimental_framework.common.LOG')
-    def test_init_general_vars_2_for_success(self, mock_log):
-        common.BASE_DIR = "{}/".format(os.getcwd())
+    def test_init_general_vars_2_for_success(self, mock_log, mock_makedirs,
+                                             mock_path_exists, mock_val_file):
+        common.BASE_DIR = APEX_LAKE_ROOT
         common.init_general_vars()
         self.assertEqual(common.TEMPLATE_FILE_EXTENSION, '.yaml')
         self.assertEqual(common.TEMPLATE_DIR, '/tmp/apexlake/heat_templates/')
@@ -183,14 +191,16 @@ class TestCommonInit2(unittest.TestCase):
     def test_init_pktgen_for_success(self):
         common.init_pktgen()
         self.assertEqual(common.PKTGEN, 'dpdk_pktgen')
-        directory = self.dir.split('experimental_framework/')[0]
+        directory = self.dir.split('experimental_framework')[0]
         self.assertEqual(common.PKTGEN_DIR, directory)
         self.assertEqual(common.PKTGEN_PROGRAM, 'program')
         self.assertEqual(common.PKTGEN_COREMASK, 'coremask')
         self.assertEqual(common.PKTGEN_MEMCHANNEL, 'memchannel')
         self.assertEqual(common.PKTGEN_BUS_SLOT_NIC_1, 'bus_slot_nic_1')
         self.assertEqual(common.PKTGEN_BUS_SLOT_NIC_2, 'bus_slot_nic_2')
-        expected_dir = "{}/".format(os.getcwd())
+        # we always add '/' to end of dirs for some reason
+        # probably because we aren't using os.path.join everywhere
+        expected_dir = APEX_LAKE_ROOT + '/'
         self.assertEqual(common.PKTGEN_DPDK_DIRECTORY, expected_dir)
 
     def test_init_pktgen_for_failure(self):
@@ -260,8 +270,8 @@ class TestConfigFileClass(unittest.TestCase):
             'Deployment-parameters',
             'Testcase-parameters'
         ]
-        c_file = './tests/data/common/conf.cfg'
-        common.BASE_DIR = os.getcwd()
+        c_file = os.path.join(APEX_LAKE_ROOT, 'tests/data/common/conf.cfg')
+        common.BASE_DIR = APEX_LAKE_ROOT
         self.conf_file = common.ConfigurationFile(self.sections, c_file)
 
     def tearDown(self):
@@ -275,7 +285,8 @@ class TestConfigFileClass(unittest.TestCase):
         sections = ['General', 'OpenStack', 'Experiment-VNF', 'PacketGen',
                     'Deployment-parameters', 'Testcase-parameters']
         c = DummyConfigurationFile3(
-            sections, config_file='./tests/data/common/conf.cfg')
+            sections, config_file=os.path.join(APEX_LAKE_ROOT,
+                                               'tests/data/common/conf.cfg'))
         self.assertEqual(
             DummyConfigurationFile3._config_section_map('', '', True),
             6)
@@ -285,8 +296,9 @@ class TestConfigFileClass(unittest.TestCase):
     def test__config_section_map_for_success(self):
         general_section = 'General'
         # openstack_section = 'OpenStack'
-        config_file = 'tests/data/common/conf.cfg'
-        config = ConfigParser.ConfigParser()
+        config_file = os.path.join(APEX_LAKE_ROOT,
+                                   'tests/data/common/conf.cfg')
+        config = six.moves.configparser.ConfigParser()
         config.read(config_file)
 
         expected = {
@@ -361,8 +373,9 @@ class TestCommonMethods(unittest.TestCase):
             'Deployment-parameters',
             'Testcase-parameters'
         ]
-        config_file = './tests/data/common/conf.cfg'
-        common.BASE_DIR = os.getcwd()
+        config_file = os.path.join(APEX_LAKE_ROOT,
+                                   'tests/data/common/conf.cfg')
+        common.BASE_DIR = APEX_LAKE_ROOT
         common.CONF_FILE = DummyConfigurationFile4(self.sections, config_file)
 
     def tearDown(self):
@@ -397,13 +410,14 @@ class TestCommonMethods(unittest.TestCase):
         self.assertEqual(expected, output)
 
     def test_get_file_first_line_for_success(self):
-        file = 'tests/data/common/conf.cfg'
+        file = os.path.join(APEX_LAKE_ROOT, 'tests/data/common/conf.cfg')
         expected = '[General]\n'
         output = common.get_file_first_line(file)
         self.assertEqual(expected, output)
 
     def test_replace_in_file_for_success(self):
-        filename = 'tests/data/common/file_replacement.txt'
+        filename = os.path.join(APEX_LAKE_ROOT,
+                                'tests/data/common/file_replacement.txt')
         text_to_search = 'replacement of'
         text_to_replace = '***'
         common.replace_in_file(filename, text_to_search, text_to_replace)
@@ -542,27 +556,14 @@ class TestinputValidation(unittest.TestCase):
             list(), ''
         )
 
-    def test_validate_file_exist_for_success(self):
-        filename = 'tests/data/common/file_replacement.txt'
-        output = common.InputValidation.validate_file_exist(filename, '')
-        self.assertTrue(output)
-
-    def test_validate_file_exist_for_failure(self):
-        filename = 'tests/data/common/file_replacement'
-        self.assertRaises(
-            ValueError,
-            common.InputValidation.validate_file_exist,
-            filename, ''
-        )
-
     def test_validate_directory_exist_and_format_for_success(self):
-        directory = 'tests/data/common/'
+        directory = os.path.join(APEX_LAKE_ROOT, 'tests/data/common/')
         output = common.InputValidation.\
             validate_directory_exist_and_format(directory, '')
         self.assertTrue(output)
 
     def test_validate_directory_exist_and_format_for_failure(self):
-        directory = 'tests/data/com/'
+        directory = os.path.join(APEX_LAKE_ROOT, 'tests/data/com/')
         self.assertRaises(
             ValueError,
             common.InputValidation.validate_directory_exist_and_format,
index 2b03edb..abf4134 100644 (file)
@@ -12,6 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 import unittest
 from experimental_framework.constants import conf_file_sections as cfs
 
index cec834e..5a9178f 100644 (file)
@@ -12,6 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 import unittest
 import logging
 import mock
index bad250e..0b0df6c 100644 (file)
@@ -12,6 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 import unittest
 import mock
 from experimental_framework.constants import conf_file_sections as conf_file
index dad3177..cc3e1bf 100644 (file)
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 import unittest
 import experimental_framework.heat_template_generation as heat_gen
 import mock
 import os
 import experimental_framework.common as common
+from experimental_framework import APEX_LAKE_ROOT
 
 __author__ = 'gpetralx'
 
@@ -45,6 +47,7 @@ def reset_common():
 
 
 class TestGeneratesTemplate(unittest.TestCase):
+
     def setUp(self):
         self.deployment_configuration = {
             'vnic_type': ['normal', 'direct'],
@@ -61,9 +64,11 @@ class TestGeneratesTemplate(unittest.TestCase):
     @mock.patch('experimental_framework.common.get_template_dir')
     def test_generates_template_for_success(self, mock_template_dir,
                                             mock_log):
-        generated_templates_dir = 'tests/data/generated_templates/'
+        generated_templates_dir = os.path.join(
+            APEX_LAKE_ROOT, 'tests/data/generated_templates/')
         mock_template_dir.return_value = generated_templates_dir
-        test_templates = 'tests/data/test_templates/'
+        test_templates = os.path.join(APEX_LAKE_ROOT,
+                                      'tests/data/test_templates/')
         heat_gen.generates_templates(self.template_name,
                                      self.deployment_configuration)
         for dirname, dirnames, filenames in os.walk(test_templates):
@@ -73,8 +78,9 @@ class TestGeneratesTemplate(unittest.TestCase):
                         self.assertListEqual(test.readlines(),
                                              generated.readlines())
 
-        t_name = '/tests/data/generated_templates/VTC_base_single_vm_wait.tmp'
-        self.template_name = "{}{}".format(os.getcwd(), t_name)
+        self.template_name = os.path.join(
+            APEX_LAKE_ROOT,
+            'tests/data/generated_templates/VTC_base_single_vm_wait.tmp')
         heat_gen.generates_templates(self.template_name,
                                      self.deployment_configuration)
         for dirname, dirnames, filenames in os.walk(test_templates):
@@ -86,7 +92,8 @@ class TestGeneratesTemplate(unittest.TestCase):
 
     @mock.patch('experimental_framework.common.get_template_dir')
     def test_get_all_heat_templates_for_success(self, template_dir):
-        generated_templates = 'tests/data/generated_templates/'
+        generated_templates = os.path.join(APEX_LAKE_ROOT,
+                                           'tests/data/generated_templates/')
         template_dir.return_value = generated_templates
         extension = '.yaml'
         expected = ['experiment_1.yaml', 'experiment_2.yaml']
index 0fe8554..58bd755 100644 (file)
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import print_function
+
+from __future__ import absolute_import
+import os
 import unittest
 import logging
 import experimental_framework.common as common
 
-from experimental_framework import heat_manager
+from experimental_framework import heat_manager, APEX_LAKE_ROOT
 import mock
 
 __author__ = 'gpetralx'
@@ -27,6 +31,7 @@ def get_mock_heat(version, *args, **kwargs):
 
 
 class MockStacks(object):
+
     def __init__(self, stacks):
         self.stacks = stacks
 
@@ -34,7 +39,7 @@ class MockStacks(object):
         list_name = list()
         for stack in self.stacks:
             list_name.append(stack.stack_name)
-        print list_name
+        print(list_name)
         return self.stacks
 
     def validate(self, template=None):
@@ -47,11 +52,12 @@ class MockStacks(object):
 
     def create(self, stack_name=None, files=None, template=None,
                parameters=None):
-        print stack_name
+        print(stack_name)
         self.stacks.append(MockStack(stack_name))
 
 
 class MockStacks_2(object):
+
     def __init__(self, stacks):
         self.stacks = stacks
 
@@ -60,6 +66,7 @@ class MockStacks_2(object):
 
 
 class MockStack(object):
+
     def __init__(self, stack_name):
         self.name = stack_name
 
@@ -80,6 +87,7 @@ class MockStack(object):
 
 
 class MockHeat(object):
+
     def __init__(self):
         stacks = [MockStack('stack_1'), MockStack('stack_2')]
         self.stacks_list = MockStacks(stacks)
@@ -90,18 +98,21 @@ class MockHeat(object):
 
 
 class MockHeat_2(MockHeat):
+
     def __init__(self):
         stacks = [MockStack('stack_1'), MockStack('stack_2')]
         self.stacks_list = MockStacks_2(stacks)
 
 
 class HeatManagerMock(heat_manager.HeatManager):
+
     def init_heat(self):
         if self.heat is None:
             self.heat = MockHeat()
 
 
 class HeatManagerMock_2(heat_manager.HeatManager):
+
     def init_heat(self):
         if self.heat is None:
             self.heat = MockHeat_2()
@@ -134,8 +145,9 @@ class TestHeatManager(unittest.TestCase):
                          self.heat_manager.check_stack_status('stack_x'))
 
     def test_validate_template_for_success(self):
-        template_file = \
-            'tests/data/test_templates/VTC_base_single_vm_wait_1.yaml'
+        template_file = os.path.join(
+            APEX_LAKE_ROOT,
+            'tests/data/test_templates/VTC_base_single_vm_wait_1.yaml')
         with self.assertRaises(ValueError):
             self.heat_manager.validate_heat_template(template_file)
 
@@ -180,11 +192,13 @@ class TestHeatManager_2(unittest.TestCase):
 
 
 class ServiceCatalog():
+
     def url_for(self, service_type):
         return 'http://heat_url'
 
 
 class KeystoneMock(object):
+
     @property
     def auth_token(self):
         return 'token'
@@ -193,6 +207,7 @@ class KeystoneMock(object):
 
 
 class TestHeatInit(unittest.TestCase):
+
     def setUp(self):
         credentials = dict()
         credentials['ip_controller'] = '1.1.1.1'
@@ -216,5 +231,5 @@ class TestHeatInit(unittest.TestCase):
                                                 tenant_name='project',
                                                 password='password',
                                                 auth_url='auth_uri')
-        heat_client.assert_called_once_with('1',  endpoint='http://heat_url',
+        heat_client.assert_called_once_with('1', endpoint='http://heat_url',
                                             token='token')
index 369129a..2bd8b7b 100644 (file)
@@ -12,6 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 import unittest
 import mock
 import os
@@ -21,6 +22,8 @@ import experimental_framework.benchmarks.\
     instantiation_validation_benchmark as iv_module
 from experimental_framework.benchmarks.\
     instantiation_validation_benchmark import InstantiationValidationBenchmark
+from six.moves import map
+from six.moves import range
 
 
 kill_counter = [0, 0]
@@ -204,11 +207,11 @@ class InstantiationValidationInitTest(unittest.TestCase):
         ]
         expected['allowed_values'] = dict()
         expected['allowed_values'][iv_module.THROUGHPUT] = \
-            map(str, range(0, 100))
+            list(map(str, list(range(0, 100))))
         expected['allowed_values'][iv_module.VLAN_SENDER] = \
-            map(str, range(-1, 4096))
+            list(map(str, list(range(-1, 4096))))
         expected['allowed_values'][iv_module.VLAN_RECEIVER] = \
-            map(str, range(-1, 4096))
+            list(map(str, list(range(-1, 4096))))
         expected['default_values'] = dict()
         expected['default_values'][iv_module.THROUGHPUT] = '1'
         expected['default_values'][iv_module.VLAN_SENDER] = '-1'
@@ -216,7 +219,7 @@ class InstantiationValidationInitTest(unittest.TestCase):
         output = self.iv.get_features()
         self.assertEqual(expected, output)
 
-    @mock.patch('commands.getoutput')
+    @mock.patch('subprocess.check_output')
     def test__get_pids_for_success(self, mock_getoutput):
         expected = [1234]
         mock_getoutput.return_value = '1234'
index f65600f..f9aa947 100644 (file)
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 import unittest
 import mock
-import os
+
+from six.moves import range
+
 import experimental_framework.common as common
 import experimental_framework.deployment_unit as deploy
 import experimental_framework.benchmarks.\
     instantiation_validation_noisy_neighbors_benchmark as mut
+from experimental_framework import APEX_LAKE_ROOT
 
 
 class InstantiationValidationInitTest(unittest.TestCase):
@@ -34,7 +38,7 @@ class InstantiationValidationInitTest(unittest.TestCase):
         openstack_credentials['heat_url'] = ''
         openstack_credentials['password'] = ''
         common.DEPLOYMENT_UNIT = deploy.DeploymentUnit(openstack_credentials)
-        common.BASE_DIR = os.getcwd()
+        common.BASE_DIR = APEX_LAKE_ROOT
         common.TEMPLATE_DIR = 'tests/data/generated_templates'
         self.iv = mut.\
             InstantiationValidationNoisyNeighborsBenchmark(name, params)
@@ -72,9 +76,11 @@ class InstantiationValidationInitTest(unittest.TestCase):
         expected['parameters'].append(mut.NUM_OF_NEIGHBORS)
         expected['parameters'].append(mut.AMOUNT_OF_RAM)
         expected['parameters'].append(mut.NUMBER_OF_CORES)
-        expected['allowed_values']['throughput'] = map(str, range(0, 100))
-        expected['allowed_values']['vlan_sender'] = map(str, range(-1, 4096))
-        expected['allowed_values']['vlan_receiver'] = map(str, range(-1, 4096))
+        expected['allowed_values']['throughput'] = [str(x) for x in range(100)]
+        expected['allowed_values']['vlan_sender'] = [str(x) for x in
+                                                     range(-1, 4096)]
+        expected['allowed_values']['vlan_receiver'] = [str(x) for x in
+                                                       range(-1, 4096)]
         expected['allowed_values'][mut.NUM_OF_NEIGHBORS] = \
             ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10']
         expected['allowed_values'][mut.NUMBER_OF_CORES] = \
@@ -115,10 +121,10 @@ class InstantiationValidationInitTest(unittest.TestCase):
                           'num_of_neighbours': 1}
         self.iv.template_file = 'template.yaml'
         self.iv.init()
-        mock_replace.assert_called_once_wih('file',
-                                            'local out_file = ""',
-                                            'local out_file = "' +
-                                            'res_file' + '"')
+        mock_replace.assert_called_once_with('file',
+                                             'local out_file = ""',
+                                             'local out_file = "' +
+                                             'res_file' + '"')
         mock_deploy_heat.assert_called_once_with('template.yaml',
                                                  'neighbour0',
                                                  {'cores': 1,
@@ -131,12 +137,14 @@ class InstantiationValidationInitTest(unittest.TestCase):
     @mock.patch('experimental_framework.common.'
                 'DEPLOYMENT_UNIT.destroy_heat_template')
     def test_finalize_for_success(self, mock_heat_destroy, mock_replace):
+        self.iv.lua_file = 'file'
+        self.iv.results_file = 'res_file'
         self.iv.neighbor_stack_names = ['neighbor0']
         stack_name = 'neighbor0'
         self.iv.finalize()
         mock_heat_destroy.assert_called_once_with(stack_name)
-        mock_replace.assert_called_once_wih('file',
-                                            'local out_file = ""',
-                                            'local out_file = "' +
-                                            'res_file' + '"')
+        mock_replace.assert_called_once_with('file',
+                                             'local out_file = "' +
+                                             'res_file' + '"',
+                                             'local out_file = ""')
         self.assertEqual(self.iv.neighbor_stack_names, list())
index fc5a7fd..39b38d7 100644 (file)
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 import unittest
 import mock
 import os
 import experimental_framework.common as common
 from experimental_framework.benchmarks \
     import multi_tenancy_throughput_benchmark as bench
+from six.moves import range
 
 __author__ = 'gpetralx'
 
 
 class MockDeploymentUnit(object):
+
     def deploy_heat_template(self, temp_file, stack_name, heat_param):
         pass
 
@@ -35,6 +38,7 @@ def get_deployment_unit():
 
 
 class TestMultiTenancyThroughputBenchmark(unittest.TestCase):
+
     def setUp(self):
         name = 'benchmark'
         params = dict()
@@ -47,9 +51,9 @@ class TestMultiTenancyThroughputBenchmark(unittest.TestCase):
     def test_get_features_for_sanity(self):
         output = self.benchmark.get_features()
         self.assertIsInstance(output, dict)
-        self.assertIn('parameters', output.keys())
-        self.assertIn('allowed_values', output.keys())
-        self.assertIn('default_values', output.keys())
+        self.assertIn('parameters', list(output.keys()))
+        self.assertIn('allowed_values', list(output.keys()))
+        self.assertIn('default_values', list(output.keys()))
         self.assertIsInstance(output['parameters'], list)
         self.assertIsInstance(output['allowed_values'], dict)
         self.assertIsInstance(output['default_values'], dict)
index ef3b0da..487de77 100644 (file)
@@ -13,6 +13,7 @@
 # limitations under the License.
 
 
+from __future__ import absolute_import
 import unittest
 import mock
 import os
@@ -37,9 +38,9 @@ class RFC2544ThroughputBenchmarkRunTest(unittest.TestCase):
     def test_get_features_for_sanity(self):
         output = self.benchmark.get_features()
         self.assertIsInstance(output, dict)
-        self.assertIn('parameters', output.keys())
-        self.assertIn('allowed_values', output.keys())
-        self.assertIn('default_values', output.keys())
+        self.assertIn('parameters', list(output.keys()))
+        self.assertIn('allowed_values', list(output.keys()))
+        self.assertIn('default_values', list(output.keys()))
         self.assertIsInstance(output['parameters'], list)
         self.assertIsInstance(output['allowed_values'], dict)
         self.assertIsInstance(output['default_values'], dict)
@@ -74,7 +75,6 @@ class RFC2544ThroughputBenchmarkRunTest(unittest.TestCase):
         output = self.benchmark.run()
         self.assertEqual(expected, output)
         conf_lua_file_mock.assert_called_once()
-        reset_lua_file_mock.assert_called_once()
         dpdk_instance = mock_dpdk()
         dpdk_instance.init_dpdk_pktgen.assert_called_once_with(
             dpdk_interfaces=2, pcap_file_0='packet_1.pcap',
index e51343f..fb38b69 100644 (file)
@@ -12,6 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 import unittest
 import experimental_framework.heat_template_generation as heat_gen
 
@@ -19,6 +20,7 @@ __author__ = 'gpetralx'
 
 
 class TestTreeNode(unittest.TestCase):
+
     def setUp(self):
         self.tree = heat_gen.TreeNode()