Add support for Python 3 39/25539/18
authorRoss Brattain <ross.b.brattain@intel.com>
Mon, 5 Dec 2016 21:11:54 +0000 (16:11 -0500)
committerRoss Brattain <ross.b.brattain@intel.com>
Fri, 13 Jan 2017 02:25:04 +0000 (18:25 -0800)
Porting to Python3 using Openstack guidelines:
https://wiki.openstack.org/wiki/Python3

This passes unittests on Python 3.5 and passes opnfv_smoke suite

Updates:
   use six for urlparse and urlopen
   fix exception.message attribute removal
   run unittests on python3
   use unitest.mock on python 3
   fix open mock for vsperf
   fix float division by using delta/eplison comparison
   use unicode in StringIO
   use plugin/sample_config.yaml relative path from test case
   fixed apexlake unittests
   upgraded to mock 2.0.0 to match python3 unittest.mock features
   fixed flake8 issues
   implement safe JSON decode with oslo_serialization.jsonutils.dump_as_bytes()
   implement safe unicode encode/decode with oslo_utils.encodeutils

heat: convert pub key file from bytes to unicode
    pkg_resources returns raw bytes, in python3
    we have to decode this to utf-8 unicode
    so JSON can encode it for heat template

JIRA: YARDSTICK-452

Change-Id: Ib80dd1d0c0eb0592acd832b82f6a7f8f7c20bfda
Signed-off-by: Ross Brattain <ross.b.brattain@intel.com>
204 files changed:
api/base.py
api/conf.py
api/database/__init__.py
api/database/models.py
api/resources/env_action.py
api/resources/release_action.py
api/resources/results.py
api/resources/samples_action.py
api/server.py
api/swagger/models.py
api/urls.py
api/utils/common.py
api/utils/daemonthread.py
api/utils/influx.py
api/views.py
ez_setup.py
requirements.txt
setup.py
tests/functional/test_cli_runner.py
tests/functional/test_cli_scenario.py
tests/functional/utils.py
tests/unit/api/utils/test_common.py
tests/unit/api/utils/test_influx.py
tests/unit/benchmark/contexts/test_dummy.py
tests/unit/benchmark/contexts/test_heat.py
tests/unit/benchmark/contexts/test_model.py
tests/unit/benchmark/contexts/test_node.py
tests/unit/benchmark/core/test_plugin.py
tests/unit/benchmark/core/test_task.py
tests/unit/benchmark/core/test_testcase.py
tests/unit/benchmark/scenarios/availability/test_attacker_baremetal.py
tests/unit/benchmark/scenarios/availability/test_attacker_general.py
tests/unit/benchmark/scenarios/availability/test_attacker_process.py
tests/unit/benchmark/scenarios/availability/test_basemonitor.py
tests/unit/benchmark/scenarios/availability/test_baseoperation.py
tests/unit/benchmark/scenarios/availability/test_baseresultchecker.py
tests/unit/benchmark/scenarios/availability/test_director.py
tests/unit/benchmark/scenarios/availability/test_monitor_command.py
tests/unit/benchmark/scenarios/availability/test_monitor_general.py
tests/unit/benchmark/scenarios/availability/test_monitor_process.py
tests/unit/benchmark/scenarios/availability/test_operation_general.py
tests/unit/benchmark/scenarios/availability/test_result_checker_general.py
tests/unit/benchmark/scenarios/availability/test_scenario_general.py
tests/unit/benchmark/scenarios/availability/test_serviceha.py
tests/unit/benchmark/scenarios/compute/test_cachestat.py
tests/unit/benchmark/scenarios/compute/test_computecapacity.py
tests/unit/benchmark/scenarios/compute/test_cpuload.py
tests/unit/benchmark/scenarios/compute/test_cyclictest.py
tests/unit/benchmark/scenarios/compute/test_lmbench.py
tests/unit/benchmark/scenarios/compute/test_memload.py
tests/unit/benchmark/scenarios/compute/test_plugintest.py
tests/unit/benchmark/scenarios/compute/test_ramspeed.py
tests/unit/benchmark/scenarios/compute/test_unixbench.py
tests/unit/benchmark/scenarios/dummy/test_dummy.py
tests/unit/benchmark/scenarios/networking/test_iperf3.py
tests/unit/benchmark/scenarios/networking/test_netperf.py
tests/unit/benchmark/scenarios/networking/test_netperf_node.py
tests/unit/benchmark/scenarios/networking/test_netutilization.py
tests/unit/benchmark/scenarios/networking/test_networkcapacity.py
tests/unit/benchmark/scenarios/networking/test_ping.py
tests/unit/benchmark/scenarios/networking/test_ping6.py
tests/unit/benchmark/scenarios/networking/test_pktgen.py
tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk.py
tests/unit/benchmark/scenarios/networking/test_sfc.py
tests/unit/benchmark/scenarios/networking/test_vsperf.py
tests/unit/benchmark/scenarios/networking/test_vtc_instantiation_validation.py
tests/unit/benchmark/scenarios/networking/test_vtc_instantiation_validation_noisy.py
tests/unit/benchmark/scenarios/networking/test_vtc_throughput.py
tests/unit/benchmark/scenarios/networking/test_vtc_throughput_noisy_test.py
tests/unit/benchmark/scenarios/parser/test_parser.py
tests/unit/benchmark/scenarios/storage/test_fio.py
tests/unit/benchmark/scenarios/storage/test_storagecapacity.py
tests/unit/benchmark/scenarios/storage/test_storperf.py
tests/unit/cmd/commands/test_env.py
tests/unit/common/test_httpClient.py
tests/unit/common/test_openstack_utils.py
tests/unit/common/test_template_format.py
tests/unit/common/test_utils.py
tests/unit/dispatcher/test_influxdb.py
tests/unit/dispatcher/test_influxdb_line_protocol.py
tests/unit/test_ssh.py
third_party/influxdb/influxdb_line_protocol.py
yardstick/__init__.py
yardstick/benchmark/__init__.py
yardstick/benchmark/contexts/base.py
yardstick/benchmark/contexts/dummy.py
yardstick/benchmark/contexts/heat.py
yardstick/benchmark/contexts/model.py
yardstick/benchmark/contexts/node.py
yardstick/benchmark/core/plugin.py
yardstick/benchmark/core/runner.py
yardstick/benchmark/core/scenario.py
yardstick/benchmark/core/task.py
yardstick/benchmark/core/testcase.py
yardstick/benchmark/runners/arithmetic.py
yardstick/benchmark/runners/base.py
yardstick/benchmark/runners/duration.py
yardstick/benchmark/runners/iteration.py
yardstick/benchmark/runners/sequence.py
yardstick/benchmark/scenarios/availability/actionrollbackers.py
yardstick/benchmark/scenarios/availability/attacker/attacker_baremetal.py
yardstick/benchmark/scenarios/availability/attacker/attacker_general.py
yardstick/benchmark/scenarios/availability/attacker/attacker_process.py
yardstick/benchmark/scenarios/availability/attacker/baseattacker.py
yardstick/benchmark/scenarios/availability/director.py
yardstick/benchmark/scenarios/availability/monitor/basemonitor.py
yardstick/benchmark/scenarios/availability/monitor/monitor_command.py
yardstick/benchmark/scenarios/availability/monitor/monitor_general.py
yardstick/benchmark/scenarios/availability/monitor/monitor_process.py
yardstick/benchmark/scenarios/availability/operation/baseoperation.py
yardstick/benchmark/scenarios/availability/operation/operation_general.py
yardstick/benchmark/scenarios/availability/result_checker/baseresultchecker.py
yardstick/benchmark/scenarios/availability/result_checker/result_checker_general.py
yardstick/benchmark/scenarios/availability/scenario_general.py
yardstick/benchmark/scenarios/availability/serviceha.py
yardstick/benchmark/scenarios/base.py
yardstick/benchmark/scenarios/compute/cachestat.py
yardstick/benchmark/scenarios/compute/computecapacity.py
yardstick/benchmark/scenarios/compute/cpuload.py
yardstick/benchmark/scenarios/compute/cyclictest.py
yardstick/benchmark/scenarios/compute/lmbench.py
yardstick/benchmark/scenarios/compute/memload.py
yardstick/benchmark/scenarios/compute/perf.py
yardstick/benchmark/scenarios/compute/plugintest.py
yardstick/benchmark/scenarios/compute/ramspeed.py
yardstick/benchmark/scenarios/compute/unixbench.py
yardstick/benchmark/scenarios/dummy/dummy.py
yardstick/benchmark/scenarios/networking/iperf3.py
yardstick/benchmark/scenarios/networking/netperf.py
yardstick/benchmark/scenarios/networking/netperf_node.py
yardstick/benchmark/scenarios/networking/netutilization.py
yardstick/benchmark/scenarios/networking/networkcapacity.py
yardstick/benchmark/scenarios/networking/ping.py
yardstick/benchmark/scenarios/networking/ping6.py
yardstick/benchmark/scenarios/networking/pktgen.py
yardstick/benchmark/scenarios/networking/pktgen_dpdk.py
yardstick/benchmark/scenarios/networking/sfc.py
yardstick/benchmark/scenarios/networking/sfc_openstack.py
yardstick/benchmark/scenarios/networking/vsperf.py
yardstick/benchmark/scenarios/networking/vtc_instantiation_validation.py
yardstick/benchmark/scenarios/networking/vtc_instantiation_validation_noisy.py
yardstick/benchmark/scenarios/networking/vtc_throughput.py
yardstick/benchmark/scenarios/networking/vtc_throughput_noisy.py
yardstick/benchmark/scenarios/parser/parser.py
yardstick/benchmark/scenarios/storage/fio.py
yardstick/benchmark/scenarios/storage/storagecapacity.py
yardstick/benchmark/scenarios/storage/storperf.py
yardstick/cmd/__init__.py
yardstick/cmd/cli.py
yardstick/cmd/commands/__init__.py
yardstick/cmd/commands/env.py
yardstick/cmd/commands/plugin.py
yardstick/cmd/commands/runner.py
yardstick/cmd/commands/scenario.py
yardstick/cmd/commands/task.py
yardstick/cmd/commands/testcase.py
yardstick/common/constants.py
yardstick/common/httpClient.py
yardstick/common/openstack_utils.py
yardstick/common/task_template.py
yardstick/common/template_format.py
yardstick/common/utils.py
yardstick/definitions.py
yardstick/dispatcher/__init__.py
yardstick/dispatcher/base.py
yardstick/dispatcher/file.py
yardstick/dispatcher/http.py
yardstick/dispatcher/influxdb.py
yardstick/main.py
yardstick/orchestrator/heat.py
yardstick/plot/plotter.py
yardstick/ssh.py
yardstick/vTC/apexlake/experimental_framework/__init__.py
yardstick/vTC/apexlake/experimental_framework/api.py
yardstick/vTC/apexlake/experimental_framework/benchmarking_unit.py
yardstick/vTC/apexlake/experimental_framework/benchmarks/benchmark_base_class.py
yardstick/vTC/apexlake/experimental_framework/benchmarks/instantiation_validation_benchmark.py
yardstick/vTC/apexlake/experimental_framework/benchmarks/instantiation_validation_noisy_neighbors_benchmark.py
yardstick/vTC/apexlake/experimental_framework/benchmarks/multi_tenancy_throughput_benchmark.py
yardstick/vTC/apexlake/experimental_framework/benchmarks/rfc2544_throughput_benchmark.py
yardstick/vTC/apexlake/experimental_framework/benchmarks/test_benchmark.py
yardstick/vTC/apexlake/experimental_framework/common.py
yardstick/vTC/apexlake/experimental_framework/constants/framework_parameters.py
yardstick/vTC/apexlake/experimental_framework/deployment_unit.py
yardstick/vTC/apexlake/experimental_framework/heat_manager.py
yardstick/vTC/apexlake/experimental_framework/heat_template_generation.py
yardstick/vTC/apexlake/experimental_framework/packet_generators/base_packet_generator.py
yardstick/vTC/apexlake/experimental_framework/packet_generators/dpdk_packet_generator.py
yardstick/vTC/apexlake/setup.py
yardstick/vTC/apexlake/tests/api_test.py
yardstick/vTC/apexlake/tests/base_packet_generator_test.py
yardstick/vTC/apexlake/tests/benchmark_base_class_test.py
yardstick/vTC/apexlake/tests/benchmarking_unit_test.py
yardstick/vTC/apexlake/tests/common_test.py
yardstick/vTC/apexlake/tests/conf_file_sections_test.py
yardstick/vTC/apexlake/tests/deployment_unit_test.py
yardstick/vTC/apexlake/tests/dpdk_packet_generator_test.py
yardstick/vTC/apexlake/tests/generates_template_test.py
yardstick/vTC/apexlake/tests/heat_manager_test.py
yardstick/vTC/apexlake/tests/instantiation_validation_bench_test.py
yardstick/vTC/apexlake/tests/instantiation_validation_noisy_bench_test.py
yardstick/vTC/apexlake/tests/multi_tenancy_throughput_benchmark_test.py
yardstick/vTC/apexlake/tests/rfc2544_throughput_benchmark_test.py
yardstick/vTC/apexlake/tests/tree_node_test.py

index 7671527..5270085 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import re
 import importlib
 import logging
index 3d9d190..abaf34a 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 from pyroute2 import IPDB
 
 
index 5b0bb05..d7cf4f9 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
 
 from sqlalchemy import create_engine
index 2fc141c..2270de9 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 from sqlalchemy import Column
 from sqlalchemy import Integer
 from sqlalchemy import String
index 7e24871..8955f3c 100644 (file)
@@ -6,25 +6,27 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
+
+import errno
+import json
 import logging
-import threading
+import os
 import subprocess
+import threading
 import time
 import uuid
-import json
-import os
-import errno
-import ConfigParser
 
-from docker import Client
+from six.moves import configparser
 
-from yardstick.common import constants as config
-from yardstick.common import utils as yardstick_utils
-from yardstick.common.httpClient import HttpClient
 from api import conf as api_conf
+from api.database.handler import AsyncTaskHandler
 from api.utils import influx
 from api.utils.common import result_handler
-from api.database.handler import AsyncTaskHandler
+from docker import Client
+from yardstick.common import constants as config
+from yardstick.common import utils as yardstick_utils
+from yardstick.common.httpClient import HttpClient
 
 logger = logging.getLogger(__name__)
 logger.setLevel(logging.DEBUG)
@@ -167,7 +169,7 @@ def _config_influxdb():
 def _change_output_to_influxdb():
     yardstick_utils.makedirs(config.YARDSTICK_CONFIG_DIR)
 
-    parser = ConfigParser.ConfigParser()
+    parser = configparser.ConfigParser()
     parser.read(config.YARDSTICK_CONFIG_SAMPLE_FILE)
 
     parser.set('DEFAULT', 'dispatcher', 'influxdb')
index d4dc246..c5aa20a 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import uuid
 import os
 import logging
index fd51895..86fc251 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
 import uuid
 
index df6db17..490e48b 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import uuid
 import os
 import logging
index 8cce4de..5bac1ba 100644 (file)
@@ -6,17 +6,20 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-import logging
-from itertools import ifilter
+from __future__ import absolute_import
+
 import inspect
+import logging
+from functools import reduce
+from six.moves import filter
 
+from flasgger import Swagger
 from flask import Flask
 from flask_restful import Api
-from flasgger import Swagger
 
 from api.database import Base
-from api.database import engine
 from api.database import db_session
+from api.database import engine
 from api.database import models
 from api.urls import urlpatterns
 from yardstick import _init_logging
@@ -44,7 +47,7 @@ def init_db():
             pass
         return False
 
-    subclses = ifilter(func, inspect.getmembers(models, inspect.isclass))
+    subclses = filter(func, inspect.getmembers(models, inspect.isclass))
     logger.debug('Import models: %s', [a[1] for a in subclses])
     Base.metadata.create_all(bind=engine)
 
index 9a0157e..d3c7a9b 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 from flask_restful import fields
 from flask_restful_swagger import swagger
 
index 58df291..04b7485 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 from api import views
 from api.utils.common import Url
 
index 6971c6d..1c800ce 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import collections
 import logging
 
@@ -13,18 +14,19 @@ from flask import jsonify
 
 from api.utils.daemonthread import DaemonThread
 from yardstick.cmd.cli import YardstickCLI
+import six
 
 logger = logging.getLogger(__name__)
 
 
-def translate_to_str(object):
-    if isinstance(object, collections.Mapping):
-        return {str(k): translate_to_str(v) for k, v in object.items()}
-    elif isinstance(object, list):
-        return [translate_to_str(ele) for ele in object]
-    elif isinstance(object, unicode):
-        return str(object)
-    return object
+def translate_to_str(obj):
+    if isinstance(obj, collections.Mapping):
+        return {str(k): translate_to_str(v) for k, v in obj.items()}
+    elif isinstance(obj, list):
+        return [translate_to_str(ele) for ele in obj]
+    elif isinstance(obj, six.text_type):
+        return str(obj)
+    return obj
 
 
 def get_command_list(command_list, opts, args):
index 19182c4..0049834 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import threading
 import os
 import errno
index d4b070f..275c63a 100644 (file)
@@ -6,10 +6,12 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
+
 import logging
-import ConfigParser
-from urlparse import urlsplit
 
+import six.moves.configparser as ConfigParser
+from six.moves.urllib.parse import urlsplit
 from influxdb import InfluxDBClient
 
 from api import conf
index eb81145..0c39bfa 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
 import os
 
@@ -36,6 +37,7 @@ class ReleaseAction(ApiResource):
 
 
 class SamplesAction(ApiResource):
+
     def post(self):
         return self._dispatch_post()
 
@@ -43,7 +45,8 @@ class SamplesAction(ApiResource):
 TestSuiteActionModel = models.TestSuiteActionModel
 TestSuiteActionArgsModel = models.TestSuiteActionArgsModel
 TestSuiteActionArgsOptsModel = models.TestSuiteActionArgsOptsModel
-TestSuiteActionArgsOptsTaskArgModel = models.TestSuiteActionArgsOptsTaskArgModel
+TestSuiteActionArgsOptsTaskArgModel = \
+    models.TestSuiteActionArgsOptsTaskArgModel
 
 
 class TestsuitesAction(ApiResource):
@@ -56,11 +59,13 @@ ResultModel = models.ResultModel
 
 
 class Results(ApiResource):
+
     @swag_from(os.getcwd() + '/swagger/docs/results.yaml')
     def get(self):
         return self._dispatch_get()
 
 
 class EnvAction(ApiResource):
+
     def post(self):
         return self._dispatch_post()
index a693849..6771f36 100644 (file)
@@ -13,6 +13,7 @@ the appropriate options to ``use_setuptools()``.
 
 This file can also be run as a script to install or upgrade setuptools.
 """
+from __future__ import absolute_import
 import os
 import shutil
 import sys
@@ -21,7 +22,6 @@ import zipfile
 import optparse
 import subprocess
 import platform
-import textwrap
 import contextlib
 
 from distutils import log
@@ -29,7 +29,7 @@ from distutils import log
 try:
     from urllib.request import urlopen
 except ImportError:
-    from urllib2 import urlopen
+    from six.moves.urllib import urlopen
 
 try:
     from site import USER_SITE
@@ -39,6 +39,7 @@ except ImportError:
 DEFAULT_VERSION = "6.1"
 DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/"
 
+
 def _python_cmd(*args):
     """
     Return True if the command succeeded.
@@ -130,7 +131,7 @@ def _do_download(version, download_base, to_dir, download_delay):
 
 
 def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
-        to_dir=os.curdir, download_delay=15):
+                   to_dir=os.curdir, download_delay=15):
     to_dir = os.path.abspath(to_dir)
     rep_modules = 'pkg_resources', 'setuptools'
     imported = set(sys.modules).intersection(rep_modules)
@@ -145,14 +146,14 @@ def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
         return _do_download(version, download_base, to_dir, download_delay)
     except pkg_resources.VersionConflict as VC_err:
         if imported:
-            msg = textwrap.dedent("""
-                The required version of setuptools (>={version}) is not available,
-                and can't be installed while this script is running. Please
-                install a more recent version first, using
-                'easy_install -U setuptools'.
-
-                (Currently using {VC_err.args[0]!r})
-                """).format(VC_err=VC_err, version=version)
+            msg = """\
+The required version of setuptools (>={version}) is not available,
+and can't be installed while this script is running. Please
+install a more recent version first, using
+'easy_install -U setuptools'.
+
+(Currently using {VC_err.args[0]!r})
+""".format(VC_err=VC_err, version=version)
             sys.stderr.write(msg)
             sys.exit(2)
 
@@ -160,6 +161,7 @@ def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
         del pkg_resources, sys.modules['pkg_resources']
         return _do_download(version, download_base, to_dir, download_delay)
 
+
 def _clean_check(cmd, target):
     """
     Run the command to download target. If the command fails, clean up before
@@ -172,6 +174,7 @@ def _clean_check(cmd, target):
             os.unlink(target)
         raise
 
+
 def download_file_powershell(url, target):
     """
     Download the file at url to target using Powershell (which will validate
@@ -191,6 +194,7 @@ def download_file_powershell(url, target):
     ]
     _clean_check(cmd, target)
 
+
 def has_powershell():
     if platform.system() != 'Windows':
         return False
@@ -202,12 +206,15 @@ def has_powershell():
             return False
     return True
 
+
 download_file_powershell.viable = has_powershell
 
+
 def download_file_curl(url, target):
     cmd = ['curl', url, '--silent', '--output', target]
     _clean_check(cmd, target)
 
+
 def has_curl():
     cmd = ['curl', '--version']
     with open(os.path.devnull, 'wb') as devnull:
@@ -217,12 +224,15 @@ def has_curl():
             return False
     return True
 
+
 download_file_curl.viable = has_curl
 
+
 def download_file_wget(url, target):
     cmd = ['wget', url, '--quiet', '--output-document', target]
     _clean_check(cmd, target)
 
+
 def has_wget():
     cmd = ['wget', '--version']
     with open(os.path.devnull, 'wb') as devnull:
@@ -232,8 +242,10 @@ def has_wget():
             return False
     return True
 
+
 download_file_wget.viable = has_wget
 
+
 def download_file_insecure(url, target):
     """
     Use Python to download the file, even though it cannot authenticate the
@@ -250,8 +262,10 @@ def download_file_insecure(url, target):
     with open(target, "wb") as dst:
         dst.write(data)
 
+
 download_file_insecure.viable = lambda: True
 
+
 def get_best_downloader():
     downloaders = (
         download_file_powershell,
@@ -262,8 +276,10 @@ def get_best_downloader():
     viable_downloaders = (dl for dl in downloaders if dl.viable())
     return next(viable_downloaders, None)
 
+
 def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
-        to_dir=os.curdir, delay=15, downloader_factory=get_best_downloader):
+                        to_dir=os.curdir, delay=15,
+                        downloader_factory=get_best_downloader):
     """
     Download setuptools from a specified location and return its filename
 
@@ -287,12 +303,14 @@ def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
         downloader(url, saveto)
     return os.path.realpath(saveto)
 
+
 def _build_install_args(options):
     """
     Build the arguments to 'python setup.py install' on the setuptools package
     """
     return ['--user'] if options.user_install else []
 
+
 def _parse_args():
     """
     Parse the command line for options
@@ -318,6 +336,7 @@ def _parse_args():
     # positional arguments are ignored
     return options
 
+
 def main():
     """Install or upgrade setuptools and EasyInstall"""
     options = _parse_args()
@@ -328,5 +347,6 @@ def main():
     )
     return _install(archive, _build_install_args(options))
 
+
 if __name__ == '__main__':
     sys.exit(main())
index 9c037ed..7041a96 100644 (file)
@@ -19,7 +19,7 @@ extras==0.0.3
 fixtures==1.4.0
 flake8==2.5.4
 funcsigs==0.4
-functools32==3.2.3.post2
+functools32==3.2.3.post2; python_version <= '2.7'
 futures==3.0.5
 iso8601==0.1.11
 Jinja2==2.8
@@ -31,7 +31,8 @@ linecache2==1.0.0
 lxml==3.5.0
 MarkupSafe==0.23
 mccabe==0.4.0
-mock==1.3.0
+# upgrade to version 2.0.0 to match python3 unittest.mock features
+mock==2.0.0
 monotonic==1.0
 msgpack-python==0.4.7
 netaddr==0.7.18
index 0100b46..315ab67 100755 (executable)
--- a/setup.py
+++ b/setup.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
 from setuptools import setup, find_packages
 
 
index 195b572..620edc3 100755 (executable)
@@ -8,6 +8,7 @@
 ##############################################################################
 
 
+from __future__ import absolute_import
 import unittest
 
 from tests.functional import utils
@@ -46,4 +47,3 @@ class RunnerTestCase(unittest.TestCase):
         res = self.yardstick("runner show Sequence")
         sequence = "sequence - list of values which are executed" in res
         self.assertTrue(sequence)
-
index 8779737..4741e82 100755 (executable)
@@ -8,6 +8,7 @@
 ##############################################################################
 
 
+from __future__ import absolute_import
 import unittest
 
 from tests.functional import utils
@@ -59,4 +60,3 @@ class ScenarioTestCase(unittest.TestCase):
         res = self.yardstick("scenario show Pktgen")
         pktgen = "Execute pktgen between two hosts" in res
         self.assertTrue(pktgen)
-
index aaaaaac..b96d2dd 100755 (executable)
@@ -7,13 +7,13 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
+from __future__ import absolute_import
+
 import copy
-import json
 import os
-import shutil
 import subprocess
 
-
+from oslo_serialization import jsonutils
 from oslo_utils import encodeutils
 
 
@@ -40,11 +40,11 @@ class Yardstick(object):
         """Call yardstick in the shell
 
         :param cmd: yardstick command
-        :param getjson: in cases, when yardstick prints JSON, you can catch output
-            deserialized
+        :param getjson: in cases, when yardstick prints JSON, you can catch
+         output deserialized
         TO DO:
-        :param report_path: if present, yardstick command and its output will be
-            written to file with passed file name
+        :param report_path: if present, yardstick command and its output will
+         be written to file with passed file name
         :param raw: don't write command itself to report file. Only output
             will be written
         """
@@ -53,11 +53,11 @@ class Yardstick(object):
             cmd = cmd.split(" ")
         try:
             output = encodeutils.safe_decode(subprocess.check_output(
-                self.args + cmd, stderr=subprocess.STDOUT, env=self.env))
+                self.args + cmd, stderr=subprocess.STDOUT, env=self.env),
+                'utf-8')
 
             if getjson:
-                return json.loads(output)
+                return jsonutils.loads(output)
             return output
         except subprocess.CalledProcessError as e:
             raise e
-
index 5d17740..acf6e41 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import unittest
 
 from api.utils import common
index 0852da2..90aef2c 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import unittest
 import mock
 import uuid
@@ -13,12 +14,17 @@ import datetime
 
 from api.utils import influx
 
+import six.moves.configparser as ConfigParser
+
 
 class GetDataDbClientTestCase(unittest.TestCase):
 
     @mock.patch('api.utils.influx.ConfigParser')
     def test_get_data_db_client_dispatcher_not_influxdb(self, mock_parser):
         mock_parser.ConfigParser().get.return_value = 'file'
+        # reset exception to avoid
+        # TypeError: catching classes that do not inherit from BaseException
+        mock_parser.NoOptionError = ConfigParser.NoOptionError
         try:
             influx.get_data_db_client()
         except Exception as e:
@@ -67,6 +73,9 @@ class QueryTestCase(unittest.TestCase):
     @mock.patch('api.utils.influx.ConfigParser')
     def test_query_dispatcher_not_influxdb(self, mock_parser):
         mock_parser.ConfigParser().get.return_value = 'file'
+        # reset exception to avoid
+        # TypeError: catching classes that do not inherit from BaseException
+        mock_parser.NoOptionError = ConfigParser.NoOptionError
         try:
             sql = 'select * form tasklist'
             influx.query(sql)
index 5214e66..1a54035 100644 (file)
@@ -11,6 +11,7 @@
 
 # Unittest for yardstick.benchmark.contexts.dummy
 
+from __future__ import absolute_import
 import unittest
 
 from yardstick.benchmark.contexts import dummy
index dd830a4..f8f3492 100644 (file)
 
 # Unittest for yardstick.benchmark.contexts.heat
 
+from __future__ import absolute_import
+
+import logging
 import os
-import mock
 import unittest
+import uuid
+
+import mock
 
-from yardstick.benchmark.contexts import model
 from yardstick.benchmark.contexts import heat
 
 
+LOG = logging.getLogger(__name__)
+
+
 class HeatContextTestCase(unittest.TestCase):
 
     def setUp(self):
@@ -70,7 +77,8 @@ class HeatContextTestCase(unittest.TestCase):
             'bar', self.test_context, networks['bar'])
         self.assertTrue(len(self.test_context.networks) == 1)
 
-        mock_server.assert_called_with('baz', self.test_context, servers['baz'])
+        mock_server.assert_called_with('baz', self.test_context,
+                                       servers['baz'])
         self.assertTrue(len(self.test_context.servers) == 1)
 
         if os.path.exists(self.test_context.key_filename):
@@ -78,7 +86,8 @@ class HeatContextTestCase(unittest.TestCase):
                 os.remove(self.test_context.key_filename)
                 os.remove(self.test_context.key_filename + ".pub")
             except OSError:
-                LOG.exception("key_filename: %s", e.key_filename)
+                LOG.exception("key_filename: %s",
+                              self.test_context.key_filename)
 
     @mock.patch('yardstick.benchmark.contexts.heat.HeatTemplate')
     def test__add_resources_to_template_no_servers(self, mock_template):
@@ -88,8 +97,9 @@ class HeatContextTestCase(unittest.TestCase):
         self.test_context.key_uuid = "2f2e4997-0a8e-4eb7-9fa4-f3f8fbbc393b"
 
         self.test_context._add_resources_to_template(mock_template)
-        mock_template.add_keypair.assert_called_with("foo-key",
-                                                     "2f2e4997-0a8e-4eb7-9fa4-f3f8fbbc393b")
+        mock_template.add_keypair.assert_called_with(
+            "foo-key",
+            "2f2e4997-0a8e-4eb7-9fa4-f3f8fbbc393b")
         mock_template.add_security_group.assert_called_with("foo-secgroup")
 
     @mock.patch('yardstick.benchmark.contexts.heat.HeatTemplate')
@@ -113,12 +123,13 @@ class HeatContextTestCase(unittest.TestCase):
 
         self.assertTrue(mock_template.delete.called)
 
-
     def test__get_server(self):
 
         self.mock_context.name = 'bar'
         self.mock_context.stack.outputs = {'public_ip': '127.0.0.1',
                                            'private_ip': '10.0.0.1'}
+        self.mock_context.key_uuid = uuid.uuid4()
+
         attr_name = {'name': 'foo.bar',
                      'public_ip_attr': 'public_ip',
                      'private_ip_attr': 'private_ip'}
index a1978e3..537a8c0 100644 (file)
@@ -11,6 +11,7 @@
 
 # Unittest for yardstick.benchmark.contexts.model
 
+from __future__ import absolute_import
 import mock
 import unittest
 
@@ -119,7 +120,8 @@ class NetworkTestCase(unittest.TestCase):
 
         attrs = {'external_network': 'ext_net'}
         test_network = model.Network('foo', self.mock_context, attrs)
-        exp_router = model.Router('router', 'foo', self.mock_context, 'ext_net')
+        exp_router = model.Router('router', 'foo', self.mock_context,
+                                  'ext_net')
 
         self.assertEqual(test_network.router.stack_name, exp_router.stack_name)
         self.assertEqual(test_network.router.stack_if_name,
@@ -219,4 +221,3 @@ class ServerTestCase(unittest.TestCase):
             user=self.mock_context.user,
             key_name=self.mock_context.keypair_name,
             scheduler_hints='hints')
-
index 6939b85..de5ba70 100644 (file)
@@ -11,6 +11,7 @@
 
 # Unittest for yardstick.benchmark.contexts.node
 
+from __future__ import absolute_import
 import os
 import unittest
 
@@ -21,6 +22,7 @@ class NodeContextTestCase(unittest.TestCase):
 
     NODES_SAMPLE = "nodes_sample.yaml"
     NODES_DUPLICATE_SAMPLE = "nodes_duplicate_sample.yaml"
+
     def setUp(self):
         self.test_context = node.NodeContext()
 
index 441116a..edc1034 100644 (file)
 ##############################################################################
 
 # Unittest for yardstick.benchmark.core.plugin
-
-import mock
+from __future__ import absolute_import
+import os
+from os.path import dirname as dirname
+
+try:
+    from unittest import mock
+except ImportError:
+    import mock
 import unittest
 
 from yardstick.benchmark.core import plugin
 
 
 class Arg(object):
+
     def __init__(self):
-        self.input_file = ('plugin/sample_config.yaml',)
+        # self.input_file = ('plugin/sample_config.yaml',)
+        self.input_file = [
+            os.path.join(os.path.abspath(
+                dirname(dirname(dirname(dirname(dirname(__file__)))))),
+                'plugin/sample_config.yaml')]
 
 
 @mock.patch('yardstick.benchmark.core.plugin.ssh')
index 463c43e..5dd32ea 100644 (file)
 
 # Unittest for yardstick.benchmark.core.task
 
+from __future__ import print_function
+
+from __future__ import absolute_import
 import os
-import mock
 import unittest
 
+try:
+    from unittest import mock
+except ImportError:
+    import mock
+
+
 from yardstick.benchmark.core import task
 
 
@@ -28,9 +36,9 @@ class TaskTestCase(unittest.TestCase):
         }
         scenario_cfg = {"nodes": nodes}
         server_info = {
-           "ip": "10.20.0.3",
-           "user": "root",
-           "key_filename": "/root/.ssh/id_rsa"
+            "ip": "10.20.0.3",
+            "user": "root",
+            "key_filename": "/root/.ssh/id_rsa"
         }
         mock_context.get_server.return_value = server_info
         context_cfg = task.parse_nodes_with_context(scenario_cfg)
@@ -48,7 +56,7 @@ class TaskTestCase(unittest.TestCase):
                 'duration': 60,
                 'interval': 1,
                 'type': 'Duration'
-             },
+            },
             'type': 'Ping'
         }
 
@@ -82,8 +90,8 @@ class TaskTestCase(unittest.TestCase):
         t = task.TaskParser(self._get_file_abspath(SAMPLE_SCENARIO_PATH))
         mock_environ.get.side_effect = ['huawei-pod1', 'compass']
         task_files, task_args, task_args_fnames = t.parse_suite()
-        print ("files=%s, args=%s, fnames=%s" % (task_files, task_args,
-               task_args_fnames))
+        print("files=%s, args=%s, fnames=%s" % (task_files, task_args,
+                                                task_args_fnames))
         self.assertEqual(task_files[0],
                          'tests/opnfv/test_cases/opnfv_yardstick_tc037.yaml')
         self.assertEqual(task_files[1],
@@ -99,8 +107,8 @@ class TaskTestCase(unittest.TestCase):
         t = task.TaskParser(self._get_file_abspath(SAMPLE_SCENARIO_PATH))
         mock_environ.get.side_effect = ['huawei-pod1', 'compass']
         task_files, task_args, task_args_fnames = t.parse_suite()
-        print ("files=%s, args=%s, fnames=%s" % (task_files, task_args,
-               task_args_fnames))
+        print("files=%s, args=%s, fnames=%s" % (task_files, task_args,
+                                                task_args_fnames))
         self.assertEqual(task_files[0],
                          'tests/opnfv/test_cases/opnfv_yardstick_tc037.yaml')
         self.assertEqual(task_files[1],
@@ -117,8 +125,8 @@ class TaskTestCase(unittest.TestCase):
         t = task.TaskParser(self._get_file_abspath(SAMPLE_SCENARIO_PATH))
         mock_environ.get.side_effect = ['huawei-pod1', 'compass']
         task_files, task_args, task_args_fnames = t.parse_suite()
-        print ("files=%s, args=%s, fnames=%s" % (task_files, task_args,
-               task_args_fnames))
+        print("files=%s, args=%s, fnames=%s" % (task_files, task_args,
+                                                task_args_fnames))
         self.assertEqual(task_files[0],
                          'tests/opnfv/test_cases/opnfv_yardstick_tc037.yaml')
         self.assertEqual(task_files[1],
@@ -134,8 +142,8 @@ class TaskTestCase(unittest.TestCase):
         t = task.TaskParser(self._get_file_abspath(SAMPLE_SCENARIO_PATH))
         mock_environ.get.side_effect = ['huawei-pod1', 'compass']
         task_files, task_args, task_args_fnames = t.parse_suite()
-        print ("files=%s, args=%s, fnames=%s" % (task_files, task_args,
-               task_args_fnames))
+        print("files=%s, args=%s, fnames=%s" % (task_files, task_args,
+                                                task_args_fnames))
         self.assertEqual(task_files[0],
                          'tests/opnfv/test_cases/opnfv_yardstick_tc037.yaml')
         self.assertEqual(task_files[1],
index 6e0473c..c7da2de 100644 (file)
 
 # Unittest for yardstick.cmd.commands.testcase
 
+from __future__ import absolute_import
 import unittest
 
 from yardstick.benchmark.core import testcase
 
 
 class Arg(object):
+
     def __init__(self):
         self.casename = ('opnfv_yardstick_tc001',)
 
index 340f94c..9e2e8b1 100644 (file)
@@ -9,15 +9,20 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-# Unittest for yardstick.benchmark.scenarios.availability.attacker.attacker_baremetal
+# Unittest for
+# yardstick.benchmark.scenarios.availability.attacker.attacker_baremetal
 
+from __future__ import absolute_import
 import mock
 import unittest
 
-from yardstick.benchmark.scenarios.availability.attacker import baseattacker
-from yardstick.benchmark.scenarios.availability.attacker import attacker_baremetal
+from yardstick.benchmark.scenarios.availability.attacker import \
+    attacker_baremetal
 
-@mock.patch('yardstick.benchmark.scenarios.availability.attacker.attacker_baremetal.subprocess')
+
+@mock.patch(
+    'yardstick.benchmark.scenarios.availability.attacker.attacker_baremetal'
+    '.subprocess')
 class ExecuteShellTestCase(unittest.TestCase):
 
     def test__fun_execute_shell_command_successful(self, mock_subprocess):
@@ -26,34 +31,37 @@ class ExecuteShellTestCase(unittest.TestCase):
         exitcode, output = attacker_baremetal._execute_shell_command(cmd)
         self.assertEqual(exitcode, 0)
 
-    def test__fun_execute_shell_command_fail_cmd_exception(self, mock_subprocess):
+    def test__fun_execute_shell_command_fail_cmd_exception(self,
+                                                           mock_subprocess):
         cmd = "env"
         mock_subprocess.check_output.side_effect = RuntimeError
         exitcode, output = attacker_baremetal._execute_shell_command(cmd)
         self.assertEqual(exitcode, -1)
 
 
-@mock.patch('yardstick.benchmark.scenarios.availability.attacker.attacker_baremetal.ssh')
+@mock.patch(
+    'yardstick.benchmark.scenarios.availability.attacker.attacker_baremetal'
+    '.ssh')
 class AttackerBaremetalTestCase(unittest.TestCase):
 
     def setUp(self):
-        host = { 
-            "ipmi_ip": "10.20.0.5", 
-            "ipmi_user": "root", 
-            "ipmi_pwd": "123456", 
-            "ip": "10.20.0.5", 
-            "user": "root", 
-            "key_filename": "/root/.ssh/id_rsa" 
-        } 
-        self.context = {"node1": host} 
-        self.attacker_cfg = { 
-            'fault_type': 'bear-metal-down', 
-            'host': 'node1', 
-        } 
+        host = {
+            "ipmi_ip": "10.20.0.5",
+            "ipmi_user": "root",
+            "ipmi_pwd": "123456",
+            "ip": "10.20.0.5",
+            "user": "root",
+            "key_filename": "/root/.ssh/id_rsa"
+        }
+        self.context = {"node1": host}
+        self.attacker_cfg = {
+            'fault_type': 'bear-metal-down',
+            'host': 'node1',
+        }
 
     def test__attacker_baremetal_all_successful(self, mock_ssh):
-
-        ins = attacker_baremetal.BaremetalAttacker(self.attacker_cfg, self.context)
+        ins = attacker_baremetal.BaremetalAttacker(self.attacker_cfg,
+                                                   self.context)
 
         mock_ssh.SSH().execute.return_value = (0, "running", '')
         ins.setup()
@@ -61,8 +69,8 @@ class AttackerBaremetalTestCase(unittest.TestCase):
         ins.recover()
 
     def test__attacker_baremetal_check_failuer(self, mock_ssh):
-
-        ins = attacker_baremetal.BaremetalAttacker(self.attacker_cfg, self.context)
+        ins = attacker_baremetal.BaremetalAttacker(self.attacker_cfg,
+                                                   self.context)
         mock_ssh.SSH().execute.return_value = (0, "error check", '')
         ins.setup()
 
@@ -70,7 +78,8 @@ class AttackerBaremetalTestCase(unittest.TestCase):
 
         self.attacker_cfg["jump_host"] = 'node1'
         self.context["node1"]["pwd"] = "123456"
-        ins = attacker_baremetal.BaremetalAttacker(self.attacker_cfg, self.context)
+        ins = attacker_baremetal.BaremetalAttacker(self.attacker_cfg,
+                                                   self.context)
 
         mock_ssh.SSH().execute.return_value = (0, "running", '')
         ins.setup()
index aa2e0cc..322b583 100644 (file)
 # Unittest for yardstick.benchmark.scenarios.availability.attacker
 # .attacker_general
 
+from __future__ import absolute_import
 import mock
 import unittest
 
 from yardstick.benchmark.scenarios.availability.attacker import baseattacker
 
+
 @mock.patch('yardstick.benchmark.scenarios.availability.attacker.'
             'attacker_general.ssh')
 class GeneralAttackerServiceTestCase(unittest.TestCase):
@@ -30,10 +32,10 @@ class GeneralAttackerServiceTestCase(unittest.TestCase):
         self.context = {"node1": host}
         self.attacker_cfg = {
             'fault_type': 'general-attacker',
-            'action_parameter':{'process_name':'nova_api'},
-            'rollback_parameter':{'process_name':'nova_api'},
-            'key':'stop-service',
-            'attack_key':'stop-service',
+            'action_parameter': {'process_name': 'nova_api'},
+            'rollback_parameter': {'process_name': 'nova_api'},
+            'key': 'stop-service',
+            'attack_key': 'stop-service',
             'host': 'node1',
         }
 
index eb0cce7..d7771bd 100644 (file)
@@ -9,14 +9,18 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-# Unittest for yardstick.benchmark.scenarios.availability.attacker.attacker_process
+# Unittest for
+# yardstick.benchmark.scenarios.availability.attacker.attacker_process
 
+from __future__ import absolute_import
 import mock
 import unittest
 
 from yardstick.benchmark.scenarios.availability.attacker import baseattacker
 
-@mock.patch('yardstick.benchmark.scenarios.availability.attacker.attacker_process.ssh')
+
+@mock.patch(
+    'yardstick.benchmark.scenarios.availability.attacker.attacker_process.ssh')
 class AttackerServiceTestCase(unittest.TestCase):
 
     def setUp(self):
index a20cf81..7030c78 100644 (file)
@@ -9,21 +9,25 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-# Unittest for yardstick.benchmark.scenarios.availability.monitor.monitor_command
+# Unittest for
+# yardstick.benchmark.scenarios.availability.monitor.monitor_command
 
+from __future__ import absolute_import
 import mock
 import unittest
 
 from yardstick.benchmark.scenarios.availability.monitor import basemonitor
 
 
-@mock.patch('yardstick.benchmark.scenarios.availability.monitor.basemonitor.BaseMonitor')
+@mock.patch(
+    'yardstick.benchmark.scenarios.availability.monitor.basemonitor'
+    '.BaseMonitor')
 class MonitorMgrTestCase(unittest.TestCase):
 
     def setUp(self):
         config = {
             'monitor_type': 'openstack-api',
-            'key' : 'service-status'
+            'key': 'service-status'
         }
 
         self.monitor_configs = []
@@ -42,10 +46,12 @@ class MonitorMgrTestCase(unittest.TestCase):
         monitorMgr.init_monitors(self.monitor_configs, None)
         monitorIns = monitorMgr['service-status']
 
+
 class BaseMonitorTestCase(unittest.TestCase):
 
     class MonitorSimple(basemonitor.BaseMonitor):
         __monitor_type__ = "MonitorForTest"
+
         def setup(self):
             self.monitor_result = False
 
@@ -65,14 +71,15 @@ class BaseMonitorTestCase(unittest.TestCase):
         ins.start_monitor()
         ins.wait_monitor()
 
-
     def test__basemonitor_all_successful(self):
         ins = self.MonitorSimple(self.monitor_cfg, None)
         ins.setup()
         ins.run()
         ins.verify_SLA()
 
-    @mock.patch('yardstick.benchmark.scenarios.availability.monitor.basemonitor.multiprocessing')
+    @mock.patch(
+        'yardstick.benchmark.scenarios.availability.monitor.basemonitor'
+        '.multiprocessing')
     def test__basemonitor_func_false(self, mock_multiprocess):
         ins = self.MonitorSimple(self.monitor_cfg, None)
         ins.setup()
@@ -87,4 +94,3 @@ class BaseMonitorTestCase(unittest.TestCase):
         except Exception:
             pass
         self.assertIsNone(cls)
-
index d85f1e1..03ec149 100644 (file)
@@ -9,26 +9,31 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-# Unittest for yardstick.benchmark.scenarios.availability.operation.baseoperation
+# Unittest for
+# yardstick.benchmark.scenarios.availability.operation.baseoperation
 
+from __future__ import absolute_import
 import mock
 import unittest
 
-from yardstick.benchmark.scenarios.availability.operation import  baseoperation
+from yardstick.benchmark.scenarios.availability.operation import baseoperation
 
-@mock.patch('yardstick.benchmark.scenarios.availability.operation.baseoperation.BaseOperation')
+
+@mock.patch(
+    'yardstick.benchmark.scenarios.availability.operation.baseoperation'
+    '.BaseOperation')
 class OperationMgrTestCase(unittest.TestCase):
 
     def setUp(self):
         config = {
             'operation_type': 'general-operation',
-            'key' : 'service-status'
+            'key': 'service-status'
         }
 
         self.operation_configs = []
         self.operation_configs.append(config)
 
-    def  test_all_successful(self, mock_operation):
+    def test_all_successful(self, mock_operation):
         mgr_ins = baseoperation.OperationMgr()
         mgr_ins.init_operations(self.operation_configs, None)
         operation_ins = mgr_ins["service-status"]
@@ -59,7 +64,7 @@ class BaseOperationTestCase(unittest.TestCase):
     def setUp(self):
         self.config = {
             'operation_type': 'general-operation',
-            'key' : 'service-status'
+            'key': 'service-status'
         }
 
     def test_all_successful(self):
@@ -70,7 +75,7 @@ class BaseOperationTestCase(unittest.TestCase):
 
     def test_get_script_fullpath(self):
         base_ins = baseoperation.BaseOperation(self.config, None)
-        base_ins.get_script_fullpath("ha_tools/test.bash");
+        base_ins.get_script_fullpath("ha_tools/test.bash")
 
     def test_get_operation_cls_successful(self):
         base_ins = baseoperation.BaseOperation(self.config, None)
index 9972d6b..36ce900 100644 (file)
 # Unittest for yardstick.benchmark.scenarios.availability.result_checker
 # .baseresultchecker
 
+from __future__ import absolute_import
 import mock
 import unittest
 
-from yardstick.benchmark.scenarios.availability.result_checker import baseresultchecker
+from yardstick.benchmark.scenarios.availability.result_checker import \
+    baseresultchecker
 
 
 @mock.patch('yardstick.benchmark.scenarios.availability.result_checker'
-    '.baseresultchecker.BaseResultChecker')
+            '.baseresultchecker.BaseResultChecker')
 class ResultCheckerMgrTestCase(unittest.TestCase):
 
     def setUp(self):
         config = {
             'checker_type': 'general-result-checker',
-            'key' : 'process-checker'
+            'key': 'process-checker'
         }
 
         self.checker_configs = []
@@ -52,6 +54,7 @@ class BaseResultCheckerTestCase(unittest.TestCase):
 
     class ResultCheckeSimple(baseresultchecker.BaseResultChecker):
         __result_checker__type__ = "ResultCheckeForTest"
+
         def setup(self):
             self.success = False
 
@@ -61,7 +64,7 @@ class BaseResultCheckerTestCase(unittest.TestCase):
     def setUp(self):
         self.checker_cfg = {
             'checker_type': 'general-result-checker',
-            'key' : 'process-checker'
+            'key': 'process-checker'
         }
 
     def test_baseresultchecker_setup_verify_successful(self):
@@ -81,8 +84,10 @@ class BaseResultCheckerTestCase(unittest.TestCase):
         path = ins.get_script_fullpath("test.bash")
 
     def test_get_resultchecker_cls_successful(self):
-        baseresultchecker.BaseResultChecker.get_resultchecker_cls("ResultCheckeForTest")
+        baseresultchecker.BaseResultChecker.get_resultchecker_cls(
+            "ResultCheckeForTest")
 
     def test_get_resultchecker_cls_fail(self):
         with self.assertRaises(RuntimeError):
-            baseresultchecker.BaseResultChecker.get_resultchecker_cls("ResultCheckeNotExist")
+            baseresultchecker.BaseResultChecker.get_resultchecker_cls(
+                "ResultCheckeNotExist")
index 0611672..d01a60e 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.availability.director
 
+from __future__ import absolute_import
 import mock
 import unittest
 
 from yardstick.benchmark.scenarios.availability.director import Director
-from yardstick.benchmark.scenarios.availability import  actionplayers
 
 
 @mock.patch('yardstick.benchmark.scenarios.availability.director.basemonitor')
 @mock.patch('yardstick.benchmark.scenarios.availability.director.baseattacker')
-@mock.patch('yardstick.benchmark.scenarios.availability.director.baseoperation')
-@mock.patch('yardstick.benchmark.scenarios.availability.director.baseresultchecker')
+@mock.patch(
+    'yardstick.benchmark.scenarios.availability.director.baseoperation')
+@mock.patch(
+    'yardstick.benchmark.scenarios.availability.director.baseresultchecker')
 class DirectorTestCase(unittest.TestCase):
 
     def setUp(self):
         self.scenario_cfg = {
             'type': "general_scenario",
             'options': {
-                'attackers':[{
+                'attackers': [{
                     'fault_type': "general-attacker",
                     'key': "kill-process"}],
                 'monitors': [{
@@ -36,11 +38,11 @@ class DirectorTestCase(unittest.TestCase):
                     'key': "service-status"}],
                 'operations': [{
                     'operation_type': 'general-operation',
-                    'key' : 'service-status'}],
+                    'key': 'service-status'}],
                 'resultCheckers': [{
                     'checker_type': 'general-result-checker',
-                    'key' : 'process-checker',}],
-                'steps':[
+                    'key': 'process-checker', }],
+                'steps': [
                     {
                         'actionKey': "service-status",
                         'actionType': "operation",
@@ -57,7 +59,7 @@ class DirectorTestCase(unittest.TestCase):
                         'actionKey': "service-status",
                         'actionType': "monitor",
                         'index': 4},
-                    ]
+                ]
             }
         }
         host = {
@@ -67,15 +69,19 @@ class DirectorTestCase(unittest.TestCase):
         }
         self.ctx = {"nodes": {"node1": host}}
 
-    def test_director_all_successful(self, mock_checer, mock_opertion, mock_attacker, mock_monitor):
+    def test_director_all_successful(self, mock_checer, mock_opertion,
+                                     mock_attacker, mock_monitor):
         ins = Director(self.scenario_cfg, self.ctx)
         opertion_action = ins.createActionPlayer("operation", "service-status")
         attacker_action = ins.createActionPlayer("attacker", "kill-process")
-        checker_action = ins.createActionPlayer("resultchecker", "process-checker")
+        checker_action = ins.createActionPlayer("resultchecker",
+                                                "process-checker")
         monitor_action = ins.createActionPlayer("monitor", "service-status")
 
-        opertion_rollback = ins.createActionRollbacker("operation", "service-status")
-        attacker_rollback = ins.createActionRollbacker("attacker", "kill-process")
+        opertion_rollback = ins.createActionRollbacker("operation",
+                                                       "service-status")
+        attacker_rollback = ins.createActionRollbacker("attacker",
+                                                       "kill-process")
         ins.executionSteps.append(opertion_rollback)
         ins.executionSteps.append(attacker_rollback)
 
@@ -91,13 +97,8 @@ class DirectorTestCase(unittest.TestCase):
         ins.verify()
         ins.knockoff()
 
-    def test_director_get_wrong_item(self, mock_checer, mock_opertion, mock_attacker, mock_monitor):
+    def test_director_get_wrong_item(self, mock_checer, mock_opertion,
+                                     mock_attacker, mock_monitor):
         ins = Director(self.scenario_cfg, self.ctx)
         ins.createActionPlayer("wrong_type", "wrong_key")
         ins.createActionRollbacker("wrong_type", "wrong_key")
-
-
-
-
-
-
index c8cda7d..a84bfd2 100644 (file)
@@ -9,14 +9,19 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-# Unittest for yardstick.benchmark.scenarios.availability.monitor.monitor_command
+# Unittest for
+# yardstick.benchmark.scenarios.availability.monitor.monitor_command
 
+from __future__ import absolute_import
 import mock
 import unittest
 
 from yardstick.benchmark.scenarios.availability.monitor import monitor_command
 
-@mock.patch('yardstick.benchmark.scenarios.availability.monitor.monitor_command.subprocess')
+
+@mock.patch(
+    'yardstick.benchmark.scenarios.availability.monitor.monitor_command'
+    '.subprocess')
 class ExecuteShellTestCase(unittest.TestCase):
 
     def test__fun_execute_shell_command_successful(self, mock_subprocess):
@@ -25,13 +30,17 @@ class ExecuteShellTestCase(unittest.TestCase):
         exitcode, output = monitor_command._execute_shell_command(cmd)
         self.assertEqual(exitcode, 0)
 
-    def test__fun_execute_shell_command_fail_cmd_exception(self, mock_subprocess):
+    def test__fun_execute_shell_command_fail_cmd_exception(self,
+                                                           mock_subprocess):
         cmd = "env"
         mock_subprocess.check_output.side_effect = RuntimeError
         exitcode, output = monitor_command._execute_shell_command(cmd)
         self.assertEqual(exitcode, -1)
 
-@mock.patch('yardstick.benchmark.scenarios.availability.monitor.monitor_command.subprocess')
+
+@mock.patch(
+    'yardstick.benchmark.scenarios.availability.monitor.monitor_command'
+    '.subprocess')
 class MonitorOpenstackCmdTestCase(unittest.TestCase):
 
     def setUp(self):
@@ -48,7 +57,6 @@ class MonitorOpenstackCmdTestCase(unittest.TestCase):
             'sla': {'max_outage_time': 5}
         }
 
-
     def test__monitor_command_monitor_func_successful(self, mock_subprocess):
 
         instance = monitor_command.MonitorOpenstackCmd(self.config, None)
@@ -69,11 +77,15 @@ class MonitorOpenstackCmdTestCase(unittest.TestCase):
         instance._result = {"outage_time": 10}
         instance.verify_SLA()
 
-    @mock.patch('yardstick.benchmark.scenarios.availability.monitor.monitor_command.ssh')
-    def test__monitor_command_ssh_monitor_successful(self, mock_ssh, mock_subprocess):
+    @mock.patch(
+        'yardstick.benchmark.scenarios.availability.monitor.monitor_command'
+        '.ssh')
+    def test__monitor_command_ssh_monitor_successful(self, mock_ssh,
+                                                     mock_subprocess):
 
         self.config["host"] = "node1"
-        instance = monitor_command.MonitorOpenstackCmd(self.config, self.context)
+        instance = monitor_command.MonitorOpenstackCmd(
+            self.config, self.context)
         instance.setup()
         mock_ssh.SSH().execute.return_value = (0, "0", '')
         ret = instance.monitor_func()
index de7d26c..369f6f4 100644 (file)
@@ -12,6 +12,7 @@
 # Unittest for yardstick.benchmark.scenarios.availability.monitor
 # .monitor_general
 
+from __future__ import absolute_import
 import mock
 import unittest
 from yardstick.benchmark.scenarios.availability.monitor import monitor_general
@@ -22,6 +23,7 @@ from yardstick.benchmark.scenarios.availability.monitor import monitor_general
 @mock.patch('yardstick.benchmark.scenarios.availability.monitor.'
             'monitor_general.open')
 class GeneralMonitorServiceTestCase(unittest.TestCase):
+
     def setUp(self):
         host = {
             "ip": "10.20.0.5",
@@ -53,23 +55,26 @@ class GeneralMonitorServiceTestCase(unittest.TestCase):
         ins.setup()
         mock_ssh.SSH().execute.return_value = (0, "running", '')
         ins.monitor_func()
-        ins._result = {'outage_time' : 0}
+        ins._result = {'outage_time': 0}
         ins.verify_SLA()
 
-    def test__monitor_general_all_successful_noparam(self, mock_open, mock_ssh):
-        ins = monitor_general.GeneralMonitor(self.monitor_cfg_noparam, self.context)
+    def test__monitor_general_all_successful_noparam(self, mock_open,
+                                                     mock_ssh):
+        ins = monitor_general.GeneralMonitor(
+            self.monitor_cfg_noparam, self.context)
 
         ins.setup()
         mock_ssh.SSH().execute.return_value = (0, "running", '')
         ins.monitor_func()
-        ins._result = {'outage_time' : 0}
+        ins._result = {'outage_time': 0}
         ins.verify_SLA()
 
     def test__monitor_general_failure(self, mock_open, mock_ssh):
-        ins = monitor_general.GeneralMonitor(self.monitor_cfg_noparam, self.context)
+        ins = monitor_general.GeneralMonitor(
+            self.monitor_cfg_noparam, self.context)
 
         ins.setup()
         mock_ssh.SSH().execute.return_value = (1, "error", 'error')
         ins.monitor_func()
-        ins._result = {'outage_time' : 2}
+        ins._result = {'outage_time': 2}
         ins.verify_SLA()
index dda104b..8270405 100644 (file)
@@ -9,14 +9,18 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-# Unittest for yardstick.benchmark.scenarios.availability.monitor.monitor_process
+# Unittest for
+# yardstick.benchmark.scenarios.availability.monitor.monitor_process
 
+from __future__ import absolute_import
 import mock
 import unittest
 
 from yardstick.benchmark.scenarios.availability.monitor import monitor_process
 
-@mock.patch('yardstick.benchmark.scenarios.availability.monitor.monitor_process.ssh')
+
+@mock.patch(
+    'yardstick.benchmark.scenarios.availability.monitor.monitor_process.ssh')
 class MonitorProcessTestCase(unittest.TestCase):
 
     def setUp(self):
@@ -53,4 +57,3 @@ class MonitorProcessTestCase(unittest.TestCase):
         ins.monitor_func()
         ins._result = {"outage_time": 10}
         ins.verify_SLA()
-
index 26cd3f7..2c6dc16 100644 (file)
 # Unittest for yardstick.benchmark.scenarios.availability.operation
 # .operation_general
 
+from __future__ import absolute_import
 import mock
 import unittest
-from yardstick.benchmark.scenarios.availability.operation import operation_general
+from yardstick.benchmark.scenarios.availability.operation import \
+    operation_general
+
 
 @mock.patch('yardstick.benchmark.scenarios.availability.operation.'
             'operation_general.ssh')
@@ -46,7 +49,7 @@ class GeneralOperaionTestCase(unittest.TestCase):
 
     def test__operation_successful(self, mock_open, mock_ssh):
         ins = operation_general.GeneralOperaion(self.operation_cfg,
-            self.context);
+                                                self.context)
         mock_ssh.SSH().execute.return_value = (0, "success", '')
         ins.setup()
         ins.run()
@@ -54,7 +57,7 @@ class GeneralOperaionTestCase(unittest.TestCase):
 
     def test__operation_successful_noparam(self, mock_open, mock_ssh):
         ins = operation_general.GeneralOperaion(self.operation_cfg_noparam,
-            self.context);
+                                                self.context)
         mock_ssh.SSH().execute.return_value = (0, "success", '')
         ins.setup()
         ins.run()
@@ -62,7 +65,7 @@ class GeneralOperaionTestCase(unittest.TestCase):
 
     def test__operation_fail(self, mock_open, mock_ssh):
         ins = operation_general.GeneralOperaion(self.operation_cfg,
-            self.context);
+                                                self.context)
         mock_ssh.SSH().execute.return_value = (1, "failed", '')
         ins.setup()
         ins.run()
index bbadf0a..c5451fa 100644 (file)
 # Unittest for yardstick.benchmark.scenarios.availability.result_checker
 # .result_checker_general
 
+from __future__ import absolute_import
 import mock
 import unittest
 import copy
 
-from yardstick.benchmark.scenarios.availability.result_checker import  result_checker_general
+from yardstick.benchmark.scenarios.availability.result_checker import \
+    result_checker_general
 
 
 @mock.patch('yardstick.benchmark.scenarios.availability.result_checker.'
@@ -35,16 +37,16 @@ class GeneralResultCheckerTestCase(unittest.TestCase):
         self.checker_cfg = {
             'parameter': {'processname': 'process'},
             'checker_type': 'general-result-checker',
-            'condition' : 'eq',
-            'expectedValue' : 1,
-            'key' : 'process-checker',
-            'checker_key' : 'process-checker',
+            'condition': 'eq',
+            'expectedValue': 1,
+            'key': 'process-checker',
+            'checker_key': 'process-checker',
             'host': 'node1'
         }
 
     def test__result_checker_eq(self, mock_open, mock_ssh):
         ins = result_checker_general.GeneralResultChecker(self.checker_cfg,
-            self.context);
+                                                          self.context)
         mock_ssh.SSH().execute.return_value = (0, "1", '')
         ins.setup()
         self.assertTrue(ins.verify())
@@ -53,7 +55,7 @@ class GeneralResultCheckerTestCase(unittest.TestCase):
         config = copy.deepcopy(self.checker_cfg)
         config['condition'] = 'gt'
         ins = result_checker_general.GeneralResultChecker(config,
-            self.context);
+                                                          self.context)
         mock_ssh.SSH().execute.return_value = (0, "2", '')
         ins.setup()
         self.assertTrue(ins.verify())
@@ -62,7 +64,7 @@ class GeneralResultCheckerTestCase(unittest.TestCase):
         config = copy.deepcopy(self.checker_cfg)
         config['condition'] = 'gt_eq'
         ins = result_checker_general.GeneralResultChecker(config,
-            self.context);
+                                                          self.context)
         mock_ssh.SSH().execute.return_value = (0, "1", '')
         ins.setup()
         self.assertTrue(ins.verify())
@@ -71,7 +73,7 @@ class GeneralResultCheckerTestCase(unittest.TestCase):
         config = copy.deepcopy(self.checker_cfg)
         config['condition'] = 'lt'
         ins = result_checker_general.GeneralResultChecker(config,
-            self.context);
+                                                          self.context)
         mock_ssh.SSH().execute.return_value = (0, "0", '')
         ins.setup()
         self.assertTrue(ins.verify())
@@ -80,7 +82,7 @@ class GeneralResultCheckerTestCase(unittest.TestCase):
         config = copy.deepcopy(self.checker_cfg)
         config['condition'] = 'lt_eq'
         ins = result_checker_general.GeneralResultChecker(config,
-            self.context);
+                                                          self.context)
         mock_ssh.SSH().execute.return_value = (0, "1", '')
         ins.setup()
         self.assertTrue(ins.verify())
@@ -90,7 +92,7 @@ class GeneralResultCheckerTestCase(unittest.TestCase):
         config['condition'] = 'in'
         config['expectedValue'] = "value"
         ins = result_checker_general.GeneralResultChecker(config,
-            self.context);
+                                                          self.context)
         mock_ssh.SSH().execute.return_value = (0, "value return", '')
         ins.setup()
         self.assertTrue(ins.verify())
@@ -99,7 +101,7 @@ class GeneralResultCheckerTestCase(unittest.TestCase):
         config = copy.deepcopy(self.checker_cfg)
         config['condition'] = 'wrong'
         ins = result_checker_general.GeneralResultChecker(config,
-            self.context);
+                                                          self.context)
         mock_ssh.SSH().execute.return_value = (0, "1", '')
         ins.setup()
         self.assertFalse(ins.verify())
@@ -108,7 +110,7 @@ class GeneralResultCheckerTestCase(unittest.TestCase):
         config = copy.deepcopy(self.checker_cfg)
         config.pop('parameter')
         ins = result_checker_general.GeneralResultChecker(config,
-            self.context);
+                                                          self.context)
         mock_ssh.SSH().execute.return_value = (1, "fail", '')
         ins.setup()
-        ins.verify()
\ No newline at end of file
+        ins.verify()
index bab9d62..593fc77 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.availability.scenario_general
 
+from __future__ import absolute_import
 import mock
 import unittest
 
-from yardstick.benchmark.scenarios.availability.scenario_general import ScenarioGeneral
+from yardstick.benchmark.scenarios.availability.scenario_general import \
+    ScenarioGeneral
 
 
-@mock.patch('yardstick.benchmark.scenarios.availability.scenario_general.Director')
+@mock.patch(
+    'yardstick.benchmark.scenarios.availability.scenario_general.Director')
 class ScenarioGeneralTestCase(unittest.TestCase):
 
     def setUp(self):
         self.scenario_cfg = {
             'type': "general_scenario",
             'options': {
-                'attackers':[{
+                'attackers': [{
                     'fault_type': "general-attacker",
                     'key': "kill-process"}],
                 'monitors': [{
                     'monitor_type': "general-monitor",
                     'key': "service-status"}],
-                'steps':[
+                'steps': [
                     {
                         'actionKey': "kill-process",
                         'actionType': "attacker",
index 6e58b6e..4ae5089 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.availability.serviceha
 
+from __future__ import absolute_import
 import mock
 import unittest
 
 from yardstick.benchmark.scenarios.availability import serviceha
 
+
 @mock.patch('yardstick.benchmark.scenarios.availability.serviceha.basemonitor')
-@mock.patch('yardstick.benchmark.scenarios.availability.serviceha.baseattacker')
+@mock.patch(
+    'yardstick.benchmark.scenarios.availability.serviceha.baseattacker')
 class ServicehaTestCase(unittest.TestCase):
 
     def setUp(self):
@@ -48,7 +51,8 @@ class ServicehaTestCase(unittest.TestCase):
         sla = {"outage_time": 5}
         self.args = {"options": options, "sla": sla}
 
-    def test__serviceha_setup_run_successful(self, mock_attacker, mock_monitor):
+    def test__serviceha_setup_run_successful(self, mock_attacker,
+                                             mock_monitor):
         p = serviceha.ServiceHA(self.args, self.ctx)
 
         p.setup()
index f5a6b5f..8a06c75 100644 (file)
@@ -11,6 +11,7 @@
 
 # Unittest for yardstick.benchmark.scenarios.compute.cachestat.CACHEstat
 
+from __future__ import absolute_import
 import mock
 import unittest
 import os
@@ -72,11 +73,19 @@ class CACHEstatTestCase(unittest.TestCase):
         output = self._read_file("cachestat_sample_output.txt")
         mock_ssh.SSH().execute.return_value = (0, output, '')
         result = c._get_cache_usage()
-        expected_result = {"cachestat": {"cache0": {"HITS": "6462",\
- "DIRTIES": "29", "RATIO": "100.0%", "MISSES": "0", "BUFFERS_MB": "1157",\
- "CACHE_MB": "66782"}}, "average": {"HITS": 6462, "DIRTIES": 29, "RATIO": "100.0%",\
- "MISSES": 0, "BUFFERS_MB":1157, "CACHE_MB": 66782}, "max": {"HITS": 6462,\
- "DIRTIES": 29, "RATIO": 100.0, "MISSES": 0, "BUFFERS_MB": 1157, "CACHE_MB": 66782}}
+        expected_result = {"cachestat": {"cache0": {"HITS": "6462",
+                                                    "DIRTIES": "29",
+                                                    "RATIO": "100.0%",
+                                                    "MISSES": "0",
+                                                    "BUFFERS_MB": "1157",
+                                                    "CACHE_MB": "66782"}},
+                           "average": {"HITS": 6462, "DIRTIES": 29,
+                                       "RATIO": "100.0%",
+                                       "MISSES": 0, "BUFFERS_MB": 1157,
+                                       "CACHE_MB": 66782},
+                           "max": {"HITS": 6462,
+                                   "DIRTIES": 29, "RATIO": 100.0, "MISSES": 0,
+                                   "BUFFERS_MB": 1157, "CACHE_MB": 66782}}
 
         self.assertEqual(result, expected_result)
 
index da06b5d..4efa669 100644 (file)
@@ -9,12 +9,15 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-# Unittest for yardstick.benchmark.scenarios.compute.computecapacity.ComputeCapacity
+# Unittest for
+# yardstick.benchmark.scenarios.compute.computecapacity.ComputeCapacity
+
+from __future__ import absolute_import
 
-import mock
 import unittest
-import os
-import json
+
+import mock
+from oslo_serialization import jsonutils
 
 from yardstick.benchmark.scenarios.compute import computecapacity
 
@@ -53,7 +56,7 @@ class ComputeCapacityTestCase(unittest.TestCase):
 
         mock_ssh.SSH().execute.return_value = (0, SAMPLE_OUTPUT, '')
         c.run(self.result)
-        expected_result = json.loads(SAMPLE_OUTPUT)
+        expected_result = jsonutils.loads(SAMPLE_OUTPUT)
         self.assertEqual(self.result, expected_result)
 
     def test_capacity_unsuccessful_script_error(self, mock_ssh):
index 77f2a02..ffa7812 100644 (file)
@@ -11,6 +11,7 @@
 
 # Unittest for yardstick.benchmark.scenarios.compute.lmbench.Lmbench
 
+from __future__ import absolute_import
 import mock
 import unittest
 import os
@@ -208,7 +209,7 @@ class CPULoadTestCase(unittest.TestCase):
                      '%nice': '0.03'}}}
 
         self.assertDictEqual(result, expected_result)
-    
+
     def test_run_proc_stat(self, mock_ssh):
         options = {
             "interval": 1,
index 8074290..04ca2ab 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.compute.cyclictest.Cyclictest
 
-import mock
+from __future__ import absolute_import
+
 import unittest
-import json
+
+import mock
+from oslo_serialization import jsonutils
 
 from yardstick.benchmark.scenarios.compute import cyclictest
 
@@ -85,17 +88,17 @@ class CyclictestTestCase(unittest.TestCase):
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
 
         c.run(result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         self.assertEqual(result, expected_result)
 
     def test_cyclictest_successful_sla(self, mock_ssh):
         result = {}
         self.scenario_cfg.update({"sla": {
-                "action": "monitor",
-                "max_min_latency": 100,
-                "max_avg_latency": 500,
-                "max_max_latency": 1000
-            }
+            "action": "monitor",
+            "max_min_latency": 100,
+            "max_avg_latency": 500,
+            "max_max_latency": 1000
+        }
         })
         c = cyclictest.Cyclictest(self.scenario_cfg, self.context_cfg)
         mock_ssh.SSH().execute.return_value = (0, '', '')
@@ -106,7 +109,7 @@ class CyclictestTestCase(unittest.TestCase):
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
 
         c.run(result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         self.assertEqual(result, expected_result)
 
     def test_cyclictest_unsuccessful_sla_min_latency(self, mock_ssh):
index 6be1163..5b72ef7 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.compute.lmbench.Lmbench
 
-import mock
+from __future__ import absolute_import
+
 import unittest
-import json
+
+import mock
+from oslo_serialization import jsonutils
 
 from yardstick.benchmark.scenarios.compute import lmbench
 
@@ -65,7 +68,8 @@ class LmbenchTestCase(unittest.TestCase):
         sample_output = '[{"latency": 4.944, "size": 0.00049}]'
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
         l.run(self.result)
-        expected_result = json.loads('{"latencies": ' + sample_output + "}")
+        expected_result = jsonutils.loads(
+            '{"latencies": ' + sample_output + "}")
         self.assertEqual(self.result, expected_result)
 
     def test_successful_bandwidth_run_no_sla(self, mock_ssh):
@@ -82,7 +86,7 @@ class LmbenchTestCase(unittest.TestCase):
         sample_output = '{"size(MB)": 0.262144, "bandwidth(MBps)": 11025.5}'
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
         l.run(self.result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         self.assertEqual(self.result, expected_result)
 
     def test_successful_latency_run_sla(self, mock_ssh):
@@ -101,7 +105,8 @@ class LmbenchTestCase(unittest.TestCase):
         sample_output = '[{"latency": 4.944, "size": 0.00049}]'
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
         l.run(self.result)
-        expected_result = json.loads('{"latencies": ' + sample_output + "}")
+        expected_result = jsonutils.loads(
+            '{"latencies": ' + sample_output + "}")
         self.assertEqual(self.result, expected_result)
 
     def test_successful_bandwidth_run_sla(self, mock_ssh):
@@ -121,7 +126,7 @@ class LmbenchTestCase(unittest.TestCase):
         sample_output = '{"size(MB)": 0.262144, "bandwidth(MBps)": 11025.5}'
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
         l.run(self.result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         self.assertEqual(self.result, expected_result)
 
     def test_unsuccessful_latency_run_sla(self, mock_ssh):
@@ -163,7 +168,7 @@ class LmbenchTestCase(unittest.TestCase):
 
         options = {
             "test_type": "latency_for_cache",
-            "repetition":1,
+            "repetition": 1,
             "warmup": 0
         }
         args = {
@@ -175,7 +180,7 @@ class LmbenchTestCase(unittest.TestCase):
         sample_output = "{\"L1cache\": 1.6}"
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
         l.run(self.result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         self.assertEqual(self.result, expected_result)
 
     def test_unsuccessful_script_error(self, mock_ssh):
index cdf518d..76625ef 100644 (file)
@@ -11,6 +11,7 @@
 
 # Unittest for yardstick.benchmark.scenarios.compute.memload.MEMLoad
 
+from __future__ import absolute_import
 import mock
 import unittest
 import os
@@ -74,15 +75,17 @@ class MEMLoadTestCase(unittest.TestCase):
         mock_ssh.SSH().execute.return_value = (0, output, '')
         result = m._get_mem_usage()
         expected_result = {"max": {"used": 76737332, "cached": 67252400,
-                           "free": 187016644, "shared": 2844,
-                           "total": 263753976, "buffers": 853528},
+                                   "free": 187016644, "shared": 2844,
+                                   "total": 263753976, "buffers": 853528},
                            "average": {"used": 76737332, "cached": 67252400,
-                           "free": 187016644, "shared": 2844,
-                           "total": 263753976, "buffers": 853528},
+                                       "free": 187016644, "shared": 2844,
+                                       "total": 263753976, "buffers": 853528},
                            "free": {"memory0": {"used": "76737332",
-                           "cached": "67252400", "free": "187016644",
-                           "shared": "2844", "total": "263753976",
-                           "buffers": "853528"}}}
+                                                "cached": "67252400",
+                                                "free": "187016644",
+                                                "shared": "2844",
+                                                "total": "263753976",
+                                                "buffers": "853528"}}}
         self.assertEqual(result, expected_result)
 
     def _read_file(self, filename):
@@ -91,4 +94,3 @@ class MEMLoadTestCase(unittest.TestCase):
         with open(output) as f:
             sample_output = f.read()
         return sample_output
-
index 94f5273..a5331ca 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.compute.plugintest.PluginTest
 
-import mock
-import json
+from __future__ import absolute_import
+
 import unittest
-import os
+
+import mock
+from oslo_serialization import jsonutils
 
 from yardstick.benchmark.scenarios.compute import plugintest
 
@@ -50,7 +52,7 @@ class PluginTestTestCase(unittest.TestCase):
         sample_output = '{"Test Output": "Hello world!"}'
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
         s.run(self.result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         self.assertEqual(self.result, expected_result)
 
     def test_sample_unsuccessful_script_error(self, mock_ssh):
index 100102d..82cc938 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.compute.ramspeed.Ramspeed
 
-import mock
+from __future__ import absolute_import
+
 import unittest
-import json
+
+import mock
+from oslo_serialization import jsonutils
 
 from yardstick.benchmark.scenarios.compute import ramspeed
 
@@ -69,12 +72,12 @@ class RamspeedTestCase(unittest.TestCase):
  "Bandwidth(MBps)": 14756.45}, {"Test_type": "INTEGER & WRITING",\
  "Block_size(kb)": 4096, "Bandwidth(MBps)": 14604.44}, {"Test_type":\
  "INTEGER & WRITING", "Block_size(kb)": 8192, "Bandwidth(MBps)": 14159.86},\
- {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 16384, "Bandwidth(MBps)":\
14128.94}, {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 32768,\
- "Bandwidth(MBps)": 8340.85}]}'
+ {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 16384,\
"Bandwidth(MBps)": 14128.94}, {"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 32768, "Bandwidth(MBps)": 8340.85}]}'
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
         r.run(self.result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         self.assertEqual(self.result, expected_result)
 
     def test_ramspeed_successful_run_sla(self, mock_ssh):
@@ -105,12 +108,12 @@ class RamspeedTestCase(unittest.TestCase):
  "Bandwidth(MBps)": 14756.45}, {"Test_type": "INTEGER & WRITING",\
  "Block_size(kb)": 4096, "Bandwidth(MBps)": 14604.44}, {"Test_type":\
  "INTEGER & WRITING", "Block_size(kb)": 8192, "Bandwidth(MBps)": 14159.86},\
- {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 16384, "Bandwidth(MBps)":\
14128.94}, {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 32768,\
- "Bandwidth(MBps)": 8340.85}]}'
+ {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 16384,\
"Bandwidth(MBps)": 14128.94}, {"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 32768, "Bandwidth(MBps)": 8340.85}]}'
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
         r.run(self.result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         self.assertEqual(self.result, expected_result)
 
     def test_ramspeed_unsuccessful_run_sla(self, mock_ssh):
@@ -176,7 +179,7 @@ class RamspeedTestCase(unittest.TestCase):
  "Bandwidth(MBps)": 9401.58}]}'
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
         r.run(self.result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         self.assertEqual(self.result, expected_result)
 
     def test_ramspeed_mem_successful_run_sla(self, mock_ssh):
@@ -197,7 +200,7 @@ class RamspeedTestCase(unittest.TestCase):
  "Bandwidth(MBps)": 9401.58}]}'
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
         r.run(self.result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         self.assertEqual(self.result, expected_result)
 
     def test_ramspeed_mem_unsuccessful_run_sla(self, mock_ssh):
index 0935bca..747bda1 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.compute.unixbench.Unixbench
 
-import mock
+from __future__ import absolute_import
+
 import unittest
-import json
+
+import mock
+from oslo_serialization import jsonutils
 
 from yardstick.benchmark.scenarios.compute import unixbench
 
@@ -57,7 +60,7 @@ class UnixbenchTestCase(unittest.TestCase):
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
 
         u.run(result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         self.assertEqual(result, expected_result)
 
     def test_unixbench_successful_in_quiet_mode(self, mock_ssh):
@@ -65,7 +68,7 @@ class UnixbenchTestCase(unittest.TestCase):
         options = {
             "test_type": 'dhry2reg',
             "run_mode": 'quiet',
-            "copies":1
+            "copies": 1
         }
         args = {
             "options": options,
@@ -79,10 +82,9 @@ class UnixbenchTestCase(unittest.TestCase):
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
 
         u.run(result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         self.assertEqual(result, expected_result)
 
-
     def test_unixbench_successful_sla(self, mock_ssh):
 
         options = {
@@ -106,7 +108,7 @@ class UnixbenchTestCase(unittest.TestCase):
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
 
         u.run(result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         self.assertEqual(result, expected_result)
 
     def test_unixbench_unsuccessful_sla_single_score(self, mock_ssh):
index 1f9b729..560675d 100644 (file)
@@ -11,6 +11,7 @@
 
 # Unittest for yardstick.benchmark.scenarios.dummy.dummy
 
+from __future__ import absolute_import
 import unittest
 
 from yardstick.benchmark.scenarios.dummy import dummy
index 91f800b..ea53cb9 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.networking.iperf3.Iperf
 
-import mock
-import unittest
+from __future__ import absolute_import
+
 import os
-import json
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
 
 from yardstick.benchmark.scenarios.networking import iperf3
 
@@ -78,7 +81,7 @@ class IperfTestCase(unittest.TestCase):
 
         sample_output = self._read_sample_output(self.output_name_tcp)
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         p.run(result)
         self.assertEqual(result, expected_result)
 
@@ -97,7 +100,7 @@ class IperfTestCase(unittest.TestCase):
 
         sample_output = self._read_sample_output(self.output_name_tcp)
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         p.run(result)
         self.assertEqual(result, expected_result)
 
@@ -119,8 +122,7 @@ class IperfTestCase(unittest.TestCase):
         self.assertRaises(AssertionError, p.run, result)
 
     def test_iperf_successful_sla_jitter(self, mock_ssh):
-
-        options = {"udp":"udp","bandwidth":"20m"}
+        options = {"udp": "udp", "bandwidth": "20m"}
         args = {
             'options': options,
             'sla': {'jitter': 10}
@@ -133,13 +135,12 @@ class IperfTestCase(unittest.TestCase):
 
         sample_output = self._read_sample_output(self.output_name_udp)
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         p.run(result)
         self.assertEqual(result, expected_result)
 
     def test_iperf_unsuccessful_sla_jitter(self, mock_ssh):
-
-        options = {"udp":"udp","bandwidth":"20m"}
+        options = {"udp": "udp", "bandwidth": "20m"}
         args = {
             'options': options,
             'sla': {'jitter': 0.0001}
@@ -167,7 +168,7 @@ class IperfTestCase(unittest.TestCase):
         mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
         self.assertRaises(RuntimeError, p.run, result)
 
-    def _read_sample_output(self,filename):
+    def _read_sample_output(self, filename):
         curr_path = os.path.dirname(os.path.abspath(__file__))
         output = os.path.join(curr_path, filename)
         with open(output) as f:
index 3f22473..1b5dd64 100755 (executable)
 
 # Unittest for yardstick.benchmark.scenarios.networking.netperf.Netperf
 
-import mock
-import unittest
+from __future__ import absolute_import
+
 import os
-import json
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
 
 from yardstick.benchmark.scenarios.networking import netperf
 
@@ -59,7 +62,7 @@ class NetperfTestCase(unittest.TestCase):
 
         sample_output = self._read_sample_output()
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         p.run(result)
         self.assertEqual(result, expected_result)
 
@@ -78,7 +81,7 @@ class NetperfTestCase(unittest.TestCase):
 
         sample_output = self._read_sample_output()
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         p.run(result)
         self.assertEqual(result, expected_result)
 
index 1c39b29..29a7edf 100755 (executable)
 # Unittest for
 # yardstick.benchmark.scenarios.networking.netperf_node.NetperfNode
 
-import mock
-import unittest
+from __future__ import absolute_import
+
 import os
-import json
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
 
 from yardstick.benchmark.scenarios.networking import netperf_node
 
@@ -59,7 +62,7 @@ class NetperfNodeTestCase(unittest.TestCase):
 
         sample_output = self._read_sample_output()
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         p.run(result)
         self.assertEqual(result, expected_result)
 
@@ -78,7 +81,7 @@ class NetperfNodeTestCase(unittest.TestCase):
 
         sample_output = self._read_sample_output()
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         p.run(result)
         self.assertEqual(result, expected_result)
 
index eb6626f..7c04f5e 100644 (file)
@@ -9,8 +9,10 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-# Unittest for yardstick.benchmark.scenarios.networking.netutilization.NetUtilization
+# Unittest for
+# yardstick.benchmark.scenarios.networking.netutilization.NetUtilization
 
+from __future__ import absolute_import
 import mock
 import unittest
 import os
index e42832f..3f8d84e 100644 (file)
@@ -9,27 +9,32 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-# Unittest for yardstick.benchmark.scenarios.networking.networkcapacity.NetworkCapacity
+# Unittest for
+# yardstick.benchmark.scenarios.networking.networkcapacity.NetworkCapacity
+
+from __future__ import absolute_import
 
-import mock
 import unittest
-import os
-import json
+
+import mock
+from oslo_serialization import jsonutils
 
 from yardstick.benchmark.scenarios.networking import networkcapacity
 
-SAMPLE_OUTPUT = '{"Number of connections":"308","Number of frames received": "166503"}'
+SAMPLE_OUTPUT = \
+    '{"Number of connections":"308","Number of frames received": "166503"}'
+
 
 @mock.patch('yardstick.benchmark.scenarios.networking.networkcapacity.ssh')
 class NetworkCapacityTestCase(unittest.TestCase):
 
     def setUp(self):
         self.ctx = {
-                'host': {
-                    'ip': '172.16.0.137',
-                    'user': 'cirros',
-                    'password': "root"
-                },
+            'host': {
+                'ip': '172.16.0.137',
+                'user': 'cirros',
+                'password': "root"
+            },
         }
 
         self.result = {}
@@ -46,7 +51,7 @@ class NetworkCapacityTestCase(unittest.TestCase):
 
         mock_ssh.SSH().execute.return_value = (0, SAMPLE_OUTPUT, '')
         c.run(self.result)
-        expected_result = json.loads(SAMPLE_OUTPUT)
+        expected_result = jsonutils.loads(SAMPLE_OUTPUT)
         self.assertEqual(self.result, expected_result)
 
     def test_capacity_unsuccessful_script_error(self, mock_ssh):
index 8d35b84..5535a79 100644 (file)
@@ -11,6 +11,7 @@
 
 # Unittest for yardstick.benchmark.scenarios.networking.ping.Ping
 
+from __future__ import absolute_import
 import mock
 import unittest
 
@@ -37,7 +38,7 @@ class PingTestCase(unittest.TestCase):
         args = {
             'options': {'packetsize': 200},
             'target': 'ares.demo'
-            }
+        }
         result = {}
 
         p = ping.Ping(args, self.ctx)
@@ -53,7 +54,7 @@ class PingTestCase(unittest.TestCase):
             'options': {'packetsize': 200},
             'sla': {'max_rtt': 150},
             'target': 'ares.demo'
-            }
+        }
         result = {}
 
         p = ping.Ping(args, self.ctx)
index 0b8fba2..e22cacb 100644 (file)
@@ -11,6 +11,7 @@
 
 # Unittest for yardstick.benchmark.scenarios.networking.ping.Ping
 
+from __future__ import absolute_import
 import mock
 import unittest
 
@@ -21,37 +22,37 @@ class PingTestCase(unittest.TestCase):
 
     def setUp(self):
         self.ctx = {
-            'nodes':{
-            'host1': {
-                'ip': '172.16.0.137',
-                'user': 'cirros',
-                'role': "Controller",
-                'key_filename': "mykey.key",
-                'password': "root"
+            'nodes': {
+                'host1': {
+                    'ip': '172.16.0.137',
+                    'user': 'cirros',
+                    'role': "Controller",
+                    'key_filename': "mykey.key",
+                    'password': "root"
                 },
-            'host2': {
-                "ip": "172.16.0.138",
-                "key_filename": "/root/.ssh/id_rsa",
-                "role": "Compute",
-                "name": "node3.IPV6",
-                "user": "root"
+                'host2': {
+                    "ip": "172.16.0.138",
+                    "key_filename": "/root/.ssh/id_rsa",
+                    "role": "Compute",
+                    "name": "node3.IPV6",
+                    "user": "root"
                 },
             }
         }
 
     def test_get_controller_node(self):
         args = {
-            'options': {'host': 'host1','packetsize': 200, 'ping_count': 5},
+            'options': {'host': 'host1', 'packetsize': 200, 'ping_count': 5},
             'sla': {'max_rtt': 50}
         }
         p = ping6.Ping6(args, self.ctx)
-        controller_node = p._get_controller_node(['host1','host2'])
+        controller_node = p._get_controller_node(['host1', 'host2'])
         self.assertEqual(controller_node, 'host1')
 
     @mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
     def test_ping_successful_setup(self, mock_ssh):
         args = {
-            'options': {'host': 'host1','packetsize': 200, 'ping_count': 5},
+            'options': {'host': 'host1', 'packetsize': 200, 'ping_count': 5},
             'sla': {'max_rtt': 50}
         }
         p = ping6.Ping6(args, self.ctx)
@@ -63,58 +64,57 @@ class PingTestCase(unittest.TestCase):
     @mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
     def test_ping_successful_no_sla(self, mock_ssh):
         args = {
-            'options': {'host': 'host1','packetsize': 200, 'ping_count': 5},
+            'options': {'host': 'host1', 'packetsize': 200, 'ping_count': 5},
 
         }
         result = {}
 
         p = ping6.Ping6(args, self.ctx)
         p.client = mock_ssh.SSH()
-        mock_ssh.SSH().execute.side_effect = [(0, 'host1', ''),(0, 100, '')]
+        mock_ssh.SSH().execute.side_effect = [(0, 'host1', ''), (0, 100, '')]
         p.run(result)
         self.assertEqual(result, {'rtt': 100.0})
 
     @mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
     def test_ping_successful_sla(self, mock_ssh):
-
         args = {
-            'options': {'host': 'host1','packetsize': 200, 'ping_count': 5},
+            'options': {'host': 'host1', 'packetsize': 200, 'ping_count': 5},
             'sla': {'max_rtt': 150}
         }
         result = {}
 
         p = ping6.Ping6(args, self.ctx)
         p.client = mock_ssh.SSH()
-        mock_ssh.SSH().execute.side_effect = [(0, 'host1', ''),(0, 100, '')]
+        mock_ssh.SSH().execute.side_effect = [(0, 'host1', ''), (0, 100, '')]
         p.run(result)
         self.assertEqual(result, {'rtt': 100.0})
 
     @mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
     def test_ping_unsuccessful_sla(self, mock_ssh):
-
         args = {
-            'options': {'host': 'host1','packetsize': 200, 'ping_count': 5},
+            'options': {'host': 'host1', 'packetsize': 200, 'ping_count': 5},
             'sla': {'max_rtt': 50}
         }
         result = {}
 
         p = ping6.Ping6(args, self.ctx)
         p.client = mock_ssh.SSH()
-        mock_ssh.SSH().execute.side_effect = [(0, 'host1', ''),(0, 100, '')]
+        mock_ssh.SSH().execute.side_effect = [(0, 'host1', ''), (0, 100, '')]
         self.assertRaises(AssertionError, p.run, result)
 
     @mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
     def test_ping_unsuccessful_script_error(self, mock_ssh):
 
         args = {
-            'options': {'host': 'host1','packetsize': 200, 'ping_count': 5},
+            'options': {'host': 'host1', 'packetsize': 200, 'ping_count': 5},
             'sla': {'max_rtt': 150}
         }
         result = {}
 
         p = ping6.Ping6(args, self.ctx)
         p.client = mock_ssh.SSH()
-        mock_ssh.SSH().execute.side_effect = [(0, 'host1', ''),(1, '', 'FOOBAR')]
+        mock_ssh.SSH().execute.side_effect = [
+            (0, 'host1', ''), (1, '', 'FOOBAR')]
         self.assertRaises(RuntimeError, p.run, result)
 
 
index 13a4c1b..f50fa10 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.networking.pktgen.Pktgen
 
-import mock
+from __future__ import absolute_import
+
 import unittest
-import json
+
+import mock
+from oslo_serialization import jsonutils
 
 from yardstick.benchmark.scenarios.networking import pktgen
 
@@ -133,7 +136,7 @@ class PktgenTestCase(unittest.TestCase):
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
 
         p.run(result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         expected_result["packets_received"] = 149300
         self.assertEqual(result, expected_result)
 
@@ -159,7 +162,7 @@ class PktgenTestCase(unittest.TestCase):
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
 
         p.run(result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
         expected_result["packets_received"] = 149300
         self.assertEqual(result, expected_result)
 
index afc87ab..7ba4db9 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.networking.pktgen.Pktgen
 
-import mock
+from __future__ import absolute_import
 import unittest
-import json
+
+import mock
 
 from yardstick.benchmark.scenarios.networking import pktgen_dpdk
 
+
 @mock.patch('yardstick.benchmark.scenarios.networking.pktgen_dpdk.ssh')
 class PktgenDPDKLatencyTestCase(unittest.TestCase):
 
@@ -116,7 +118,11 @@ class PktgenDPDKLatencyTestCase(unittest.TestCase):
         mock_ssh.SSH().execute.return_value = (0, sample_output, '')
 
         p.run(result)
-        self.assertEqual(result, {"avg_latency": 132})
+        # with python 3 we get float, might be due python division changes
+        # AssertionError: {'avg_latency': 132.33333333333334} != {
+        # 'avg_latency': 132}
+        delta = result['avg_latency'] - 132
+        self.assertLessEqual(delta, 1)
 
     def test_pktgen_dpdk_successful_sla(self, mock_ssh):
 
@@ -169,5 +175,6 @@ class PktgenDPDKLatencyTestCase(unittest.TestCase):
 def main():
     unittest.main()
 
+
 if __name__ == '__main__':
     main()
index 618efc3..224a43b 100644 (file)
@@ -11,6 +11,7 @@
 
 # Unittest for yardstick.benchmark.scenarios.networking.sfc
 
+from __future__ import absolute_import
 import mock
 import unittest
 
@@ -27,7 +28,7 @@ class SfcTestCase(unittest.TestCase):
         context_cfg['target'] = dict()
         context_cfg['target']['user'] = 'root'
         context_cfg['target']['password'] = 'opnfv'
-        context_cfg['target']['ip'] = '127.0.0.1' 
+        context_cfg['target']['ip'] = '127.0.0.1'
 
         # Used in Sfc.run()
         context_cfg['host'] = dict()
@@ -58,7 +59,8 @@ class SfcTestCase(unittest.TestCase):
     @mock.patch('yardstick.benchmark.scenarios.networking.sfc.subprocess')
     def test2_run_for_success(self, mock_subprocess, mock_openstack, mock_ssh):
         # Mock a successfull SSH in Sfc.setup() and Sfc.run()
-        mock_ssh.SSH().execute.return_value = (0, 'vxlan_tool.py', 'succeeded timed out')
+        mock_ssh.SSH().execute.return_value = (
+            0, 'vxlan_tool.py', 'succeeded timed out')
         mock_openstack.get_an_IP.return_value = "127.0.0.1"
         mock_subprocess.call.return_value = 'mocked!'
 
index 25d5221..76d2afd 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.networking.vsperf.Vsperf
 
-import mock
+from __future__ import absolute_import
+try:
+    from unittest import mock
+except ImportError:
+    import mock
 import unittest
-import os
-import subprocess
 
 from yardstick.benchmark.scenarios.networking import vsperf
 
 
 @mock.patch('yardstick.benchmark.scenarios.networking.vsperf.subprocess')
 @mock.patch('yardstick.benchmark.scenarios.networking.vsperf.ssh')
-@mock.patch("__builtin__.open", return_value=None)
+@mock.patch("yardstick.benchmark.scenarios.networking.vsperf.open",
+            mock.mock_open())
 class VsperfTestCase(unittest.TestCase):
 
     def setUp(self):
@@ -58,7 +61,7 @@ class VsperfTestCase(unittest.TestCase):
             }
         }
 
-    def test_vsperf_setup(self, mock_open, mock_ssh, mock_subprocess):
+    def test_vsperf_setup(self, mock_ssh, mock_subprocess):
         p = vsperf.Vsperf(self.args, self.ctx)
         mock_ssh.SSH().execute.return_value = (0, '', '')
         mock_subprocess.call().execute.return_value = None
@@ -67,7 +70,7 @@ class VsperfTestCase(unittest.TestCase):
         self.assertIsNotNone(p.client)
         self.assertEqual(p.setup_done, True)
 
-    def test_vsperf_teardown(self, mock_open, mock_ssh, mock_subprocess):
+    def test_vsperf_teardown(self, mock_ssh, mock_subprocess):
         p = vsperf.Vsperf(self.args, self.ctx)
 
         # setup() specific mocks
@@ -81,7 +84,7 @@ class VsperfTestCase(unittest.TestCase):
         p.teardown()
         self.assertEqual(p.setup_done, False)
 
-    def test_vsperf_run_ok(self, mock_open, mock_ssh, mock_subprocess):
+    def test_vsperf_run_ok(self, mock_ssh, mock_subprocess):
         p = vsperf.Vsperf(self.args, self.ctx)
 
         # setup() specific mocks
@@ -90,14 +93,16 @@ class VsperfTestCase(unittest.TestCase):
 
         # run() specific mocks
         mock_ssh.SSH().execute.return_value = (0, '', '')
-        mock_ssh.SSH().execute.return_value = (0, 'throughput_rx_fps\r\n14797660.000\r\n', '')
+        mock_ssh.SSH().execute.return_value = (
+            0, 'throughput_rx_fps\r\n14797660.000\r\n', '')
 
         result = {}
         p.run(result)
 
         self.assertEqual(result['throughput_rx_fps'], '14797660.000')
 
-    def test_vsperf_run_falied_vsperf_execution(self, mock_open, mock_ssh, mock_subprocess):
+    def test_vsperf_run_falied_vsperf_execution(self, mock_ssh,
+                                                mock_subprocess):
         p = vsperf.Vsperf(self.args, self.ctx)
 
         # setup() specific mocks
@@ -110,7 +115,7 @@ class VsperfTestCase(unittest.TestCase):
         result = {}
         self.assertRaises(RuntimeError, p.run, result)
 
-    def test_vsperf_run_falied_csv_report(self, mock_open, mock_ssh, mock_subprocess):
+    def test_vsperf_run_falied_csv_report(self, mock_ssh, mock_subprocess):
         p = vsperf.Vsperf(self.args, self.ctx)
 
         # setup() specific mocks
@@ -128,5 +133,6 @@ class VsperfTestCase(unittest.TestCase):
 def main():
     unittest.main()
 
+
 if __name__ == '__main__':
     main()
index 418dd39..07b3da9 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.networking.iperf3.Iperf
 
-import mock
+from __future__ import absolute_import
 import unittest
 
-from yardstick.benchmark.scenarios.networking import vtc_instantiation_validation
+from yardstick.benchmark.scenarios.networking import \
+    vtc_instantiation_validation
 
 
 class VtcInstantiationValidationTestCase(unittest.TestCase):
@@ -34,7 +35,8 @@ class VtcInstantiationValidationTestCase(unittest.TestCase):
         scenario['options']['vlan_sender'] = ''
         scenario['options']['vlan_receiver'] = ''
 
-        self.vt = vtc_instantiation_validation.VtcInstantiationValidation(scenario, '')
+        self.vt = vtc_instantiation_validation.VtcInstantiationValidation(
+            scenario, '')
 
     def test_run_for_success(self):
         result = {}
index e0a4624..34f3610 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.networking.iperf3.Iperf
 
-import mock
+from __future__ import absolute_import
 import unittest
 
-from yardstick.benchmark.scenarios.networking import vtc_instantiation_validation_noisy
+from yardstick.benchmark.scenarios.networking import \
+    vtc_instantiation_validation_noisy
 
 
 class VtcInstantiationValidationiNoisyTestCase(unittest.TestCase):
@@ -37,7 +38,9 @@ class VtcInstantiationValidationiNoisyTestCase(unittest.TestCase):
         scenario['options']['amount_of_ram'] = '1G'
         scenario['options']['number_of_cores'] = '1'
 
-        self.vt = vtc_instantiation_validation_noisy.VtcInstantiationValidationNoisy(scenario, '')
+        self.vt = \
+            vtc_instantiation_validation_noisy.VtcInstantiationValidationNoisy(
+                scenario, '')
 
     def test_run_for_success(self):
         result = {}
index ecdf555..a73fad5 100644 (file)
@@ -11,6 +11,7 @@
 
 # Unittest for yardstick.benchmark.scenarios.networking.iperf3.Iperf
 
+from __future__ import absolute_import
 import mock
 import unittest
 
index 98957b1..e1b162c 100644 (file)
@@ -11,6 +11,7 @@
 
 # Unittest for yardstick.benchmark.scenarios.networking.iperf3.Iperf
 
+from __future__ import absolute_import
 import mock
 import unittest
 
index d11a6d5..59b98a0 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.parser.Parser
 
-import mock
+from __future__ import absolute_import
+
 import unittest
-import json
+
+import mock
+from oslo_serialization import jsonutils
 
 from yardstick.benchmark.scenarios.parser import parser
 
+
 @mock.patch('yardstick.benchmark.scenarios.parser.parser.subprocess')
 class ParserTestCase(unittest.TestCase):
 
@@ -32,8 +36,8 @@ class ParserTestCase(unittest.TestCase):
 
     def test_parser_successful(self, mock_subprocess):
         args = {
-            'options': {'yangfile':'/root/yardstick/samples/yang.yaml',
-            'toscafile':'/root/yardstick/samples/tosca.yaml'},
+            'options': {'yangfile': '/root/yardstick/samples/yang.yaml',
+                        'toscafile': '/root/yardstick/samples/tosca.yaml'},
         }
         p = parser.Parser(args, {})
         result = {}
@@ -41,7 +45,7 @@ class ParserTestCase(unittest.TestCase):
         sample_output = '{"yangtotosca": "success"}'
 
         p.run(result)
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
 
     def test_parser_teardown_successful(self, mock_subprocess):
 
index 153d150..603ff38 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.storage.fio.Fio
 
-import mock
-import unittest
-import json
+from __future__ import absolute_import
+
 import os
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
 
 from yardstick.benchmark.scenarios.storage import fio
 
@@ -74,7 +77,7 @@ class FioTestCase(unittest.TestCase):
         expected_result = '{"read_bw": 83888, "read_iops": 20972,' \
             '"read_lat": 236.8, "write_bw": 84182, "write_iops": 21045,'\
             '"write_lat": 233.55}'
-        expected_result = json.loads(expected_result)
+        expected_result = jsonutils.loads(expected_result)
         self.assertEqual(result, expected_result)
 
     def test_fio_successful_read_no_sla(self, mock_ssh):
@@ -98,7 +101,7 @@ class FioTestCase(unittest.TestCase):
 
         expected_result = '{"read_bw": 36113, "read_iops": 9028,' \
             '"read_lat": 108.7}'
-        expected_result = json.loads(expected_result)
+        expected_result = jsonutils.loads(expected_result)
         self.assertEqual(result, expected_result)
 
     def test_fio_successful_write_no_sla(self, mock_ssh):
@@ -122,7 +125,7 @@ class FioTestCase(unittest.TestCase):
 
         expected_result = '{"write_bw": 35107, "write_iops": 8776,'\
             '"write_lat": 111.74}'
-        expected_result = json.loads(expected_result)
+        expected_result = jsonutils.loads(expected_result)
         self.assertEqual(result, expected_result)
 
     def test_fio_successful_lat_sla(self, mock_ssh):
@@ -150,10 +153,9 @@ class FioTestCase(unittest.TestCase):
         expected_result = '{"read_bw": 83888, "read_iops": 20972,' \
             '"read_lat": 236.8, "write_bw": 84182, "write_iops": 21045,'\
             '"write_lat": 233.55}'
-        expected_result = json.loads(expected_result)
+        expected_result = jsonutils.loads(expected_result)
         self.assertEqual(result, expected_result)
 
-
     def test_fio_unsuccessful_lat_sla(self, mock_ssh):
 
         options = {
@@ -200,7 +202,7 @@ class FioTestCase(unittest.TestCase):
         expected_result = '{"read_bw": 83888, "read_iops": 20972,' \
             '"read_lat": 236.8, "write_bw": 84182, "write_iops": 21045,'\
             '"write_lat": 233.55}'
-        expected_result = json.loads(expected_result)
+        expected_result = jsonutils.loads(expected_result)
         self.assertEqual(result, expected_result)
 
     def test_fio_unsuccessful_bw_iops_sla(self, mock_ssh):
@@ -248,8 +250,10 @@ class FioTestCase(unittest.TestCase):
             sample_output = f.read()
         return sample_output
 
+
 def main():
     unittest.main()
 
+
 if __name__ == '__main__':
     main()
index ace0ca3..6fb5f56 100644 (file)
@@ -9,35 +9,41 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-# Unittest for yardstick.benchmark.scenarios.storage.storagecapacity.StorageCapacity
+# Unittest for
+# yardstick.benchmark.scenarios.storage.storagecapacity.StorageCapacity
+
+from __future__ import absolute_import
 
-import mock
 import unittest
-import os
-import json
+
+import mock
+from oslo_serialization import jsonutils
 
 from yardstick.benchmark.scenarios.storage import storagecapacity
 
-DISK_SIZE_SAMPLE_OUTPUT = '{"Numberf of devides": "2", "Total disk size in bytes": "1024000000"}'
+DISK_SIZE_SAMPLE_OUTPUT = \
+    '{"Numberf of devides": "2", "Total disk size in bytes": "1024000000"}'
 BLOCK_SIZE_SAMPLE_OUTPUT = '{"/dev/sda": 1024, "/dev/sdb": 4096}'
 DISK_UTIL_RAW_OUTPUT = "vda 10.00\nvda 0.00"
-DISK_UTIL_SAMPLE_OUTPUT = '{"vda": {"avg_util": 5.0, "max_util": 10.0, "min_util": 0.0}}'
+DISK_UTIL_SAMPLE_OUTPUT = \
+    '{"vda": {"avg_util": 5.0, "max_util": 10.0, "min_util": 0.0}}'
+
 
 @mock.patch('yardstick.benchmark.scenarios.storage.storagecapacity.ssh')
 class StorageCapacityTestCase(unittest.TestCase):
 
     def setUp(self):
         self.scn = {
-               "options": {
-                   'test_type': 'disk_size'
-               }
+            "options": {
+                'test_type': 'disk_size'
+            }
         }
         self.ctx = {
-                "host": {
-                    'ip': '172.16.0.137',
-                    'user': 'cirros',
-                    'password': "root"
-                }
+            "host": {
+                'ip': '172.16.0.137',
+                'user': 'cirros',
+                'password': "root"
+            }
         }
         self.result = {}
 
@@ -54,7 +60,8 @@ class StorageCapacityTestCase(unittest.TestCase):
 
         mock_ssh.SSH().execute.return_value = (0, DISK_SIZE_SAMPLE_OUTPUT, '')
         c.run(self.result)
-        expected_result = json.loads(DISK_SIZE_SAMPLE_OUTPUT)
+        expected_result = jsonutils.loads(
+            DISK_SIZE_SAMPLE_OUTPUT)
         self.assertEqual(self.result, expected_result)
 
     def test_capacity_block_size_successful(self, mock_ssh):
@@ -67,7 +74,8 @@ class StorageCapacityTestCase(unittest.TestCase):
 
         mock_ssh.SSH().execute.return_value = (0, BLOCK_SIZE_SAMPLE_OUTPUT, '')
         c.run(self.result)
-        expected_result = json.loads(BLOCK_SIZE_SAMPLE_OUTPUT)
+        expected_result = jsonutils.loads(
+            BLOCK_SIZE_SAMPLE_OUTPUT)
         self.assertEqual(self.result, expected_result)
 
     def test_capacity_disk_utilization_successful(self, mock_ssh):
@@ -82,7 +90,8 @@ class StorageCapacityTestCase(unittest.TestCase):
 
         mock_ssh.SSH().execute.return_value = (0, DISK_UTIL_RAW_OUTPUT, '')
         c.run(self.result)
-        expected_result = json.loads(DISK_UTIL_SAMPLE_OUTPUT)
+        expected_result = jsonutils.loads(
+            DISK_UTIL_SAMPLE_OUTPUT)
         self.assertEqual(self.result, expected_result)
 
     def test_capacity_unsuccessful_script_error(self, mock_ssh):
@@ -91,6 +100,7 @@ class StorageCapacityTestCase(unittest.TestCase):
         mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
         self.assertRaises(RuntimeError, c.run, self.result)
 
+
 def main():
     unittest.main()
 
index 8fc97d2..adc9d47 100644 (file)
 
 # Unittest for yardstick.benchmark.scenarios.storage.storperf.StorPerf
 
-import mock
+from __future__ import absolute_import
+
 import unittest
-import requests
-import json
+
+import mock
+from oslo_serialization import jsonutils
 
 from yardstick.benchmark.scenarios.storage import storperf
 
 
 def mocked_requests_config_post(*args, **kwargs):
     class MockResponseConfigPost:
+
         def __init__(self, json_data, status_code):
             self.content = json_data
             self.status_code = status_code
 
-    return MockResponseConfigPost('{"stack_id": "dac27db1-3502-4300-b301-91c64e6a1622","stack_created": "false"}', 200)
+    return MockResponseConfigPost(
+        '{"stack_id": "dac27db1-3502-4300-b301-91c64e6a1622",'
+        '"stack_created": "false"}',
+        200)
 
 
 def mocked_requests_config_get(*args, **kwargs):
     class MockResponseConfigGet:
+
         def __init__(self, json_data, status_code):
             self.content = json_data
             self.status_code = status_code
 
-    return MockResponseConfigGet('{"stack_id": "dac27db1-3502-4300-b301-91c64e6a1622","stack_created": "true"}', 200)
+    return MockResponseConfigGet(
+        '{"stack_id": "dac27db1-3502-4300-b301-91c64e6a1622",'
+        '"stack_created": "true"}',
+        200)
 
 
 def mocked_requests_job_get(*args, **kwargs):
     class MockResponseJobGet:
+
         def __init__(self, json_data, status_code):
             self.content = json_data
             self.status_code = status_code
 
-    return MockResponseJobGet('{"status": "completed", "_ssd_preconditioning.queue-depth.8.block-size.16384.duration": 6}', 200)
+    return MockResponseJobGet(
+        '{"status": "completed",\
+         "_ssd_preconditioning.queue-depth.8.block-size.16384.duration": 6}',
+        200)
 
 
 def mocked_requests_job_post(*args, **kwargs):
     class MockResponseJobPost:
+
         def __init__(self, json_data, status_code):
             self.content = json_data
             self.status_code = status_code
@@ -58,6 +73,7 @@ def mocked_requests_job_post(*args, **kwargs):
 
 def mocked_requests_job_delete(*args, **kwargs):
     class MockResponseJobDelete:
+
         def __init__(self, json_data, status_code):
             self.content = json_data
             self.status_code = status_code
@@ -67,6 +83,7 @@ def mocked_requests_job_delete(*args, **kwargs):
 
 def mocked_requests_delete(*args, **kwargs):
     class MockResponseDelete:
+
         def __init__(self, json_data, status_code):
             self.json_data = json_data
             self.status_code = status_code
@@ -76,6 +93,7 @@ def mocked_requests_delete(*args, **kwargs):
 
 def mocked_requests_delete_failed(*args, **kwargs):
     class MockResponseDeleteFailed:
+
         def __init__(self, json_data, status_code):
             self.json_data = json_data
             self.status_code = status_code
@@ -130,8 +148,9 @@ class StorPerfTestCase(unittest.TestCase):
                 side_effect=mocked_requests_job_post)
     @mock.patch('yardstick.benchmark.scenarios.storage.storperf.requests.get',
                 side_effect=mocked_requests_job_get)
-    @mock.patch('yardstick.benchmark.scenarios.storage.storperf.requests.delete',
-                side_effect=mocked_requests_job_delete)
+    @mock.patch(
+        'yardstick.benchmark.scenarios.storage.storperf.requests.delete',
+        side_effect=mocked_requests_job_delete)
     def test_successful_run(self, mock_post, mock_get, mock_delete):
         options = {
             "agent_count": 8,
@@ -152,15 +171,18 @@ class StorPerfTestCase(unittest.TestCase):
         s = storperf.StorPerf(args, self.ctx)
         s.setup_done = True
 
-        sample_output = '{"status": "completed", "_ssd_preconditioning.queue-depth.8.block-size.16384.duration": 6}'
+        sample_output = '{"status": "completed",\
+         "_ssd_preconditioning.queue-depth.8.block-size.16384.duration": 6}'
 
-        expected_result = json.loads(sample_output)
+        expected_result = jsonutils.loads(sample_output)
 
         s.run(self.result)
 
         self.assertEqual(self.result, expected_result)
 
-    @mock.patch('yardstick.benchmark.scenarios.storage.storperf.requests.delete', side_effect=mocked_requests_delete)
+    @mock.patch(
+        'yardstick.benchmark.scenarios.storage.storperf.requests.delete',
+        side_effect=mocked_requests_delete)
     def test_successful_teardown(self, mock_delete):
         options = {
             "agent_count": 8,
@@ -184,7 +206,9 @@ class StorPerfTestCase(unittest.TestCase):
 
         self.assertFalse(s.setup_done)
 
-    @mock.patch('yardstick.benchmark.scenarios.storage.storperf.requests.delete', side_effect=mocked_requests_delete_failed)
+    @mock.patch(
+        'yardstick.benchmark.scenarios.storage.storperf.requests.delete',
+        side_effect=mocked_requests_delete_failed)
     def test_failed_teardown(self, mock_delete):
         options = {
             "agent_count": 8,
index e85c367..c6e0e1d 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import unittest
 import mock
 import uuid
index 94ac1c8..eb09d1a 100644 (file)
@@ -6,9 +6,12 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
+
 import unittest
+
 import mock
-import json
+from oslo_serialization import jsonutils
 
 from yardstick.common import httpClient
 
@@ -21,8 +24,9 @@ class HttpClientTestCase(unittest.TestCase):
         data = {'hello': 'world'}
         headers = {'Content-Type': 'application/json'}
         httpClient.HttpClient().post(url, data)
-        mock_requests.post.assert_called_with(url, data=json.dumps(data),
-                                              headers=headers)
+        mock_requests.post.assert_called_with(
+            url, data=jsonutils.dump_as_bytes(data),
+            headers=headers)
 
     @mock.patch('yardstick.common.httpClient.requests')
     def test_get(self, mock_requests):
index ef619aa..d610e18 100644 (file)
@@ -11,6 +11,7 @@
 
 # Unittest for yardstick.common.openstack_utils
 
+from __future__ import absolute_import
 import unittest
 import mock
 
index 0e1a1a5..2a7d80b 100644 (file)
@@ -12,6 +12,7 @@
 
 # yardstick: this file is copied from python-heatclient and slightly modified
 
+from __future__ import absolute_import
 import mock
 import unittest
 import yaml
index a64c1f1..267c713 100644 (file)
@@ -9,6 +9,7 @@
 
 # Unittest for yardstick.common.utils
 
+from __future__ import absolute_import
 import os
 import mock
 import unittest
@@ -17,9 +18,10 @@ from yardstick.common import utils
 
 
 class IterSubclassesTestCase(unittest.TestCase):
-# Disclaimer: this class is a modified copy from
-# rally/tests/unit/common/plugin/test_discover.py
-# Copyright 2015: Mirantis Inc.
+    # Disclaimer: this class is a modified copy from
+    # rally/tests/unit/common/plugin/test_discover.py
+    # Copyright 2015: Mirantis Inc.
+
     def test_itersubclasses(self):
         class A(object):
             pass
index 5553c86..b84389e 100644 (file)
 
 # Unittest for yardstick.dispatcher.influxdb
 
-import mock
+from __future__ import absolute_import
 import unittest
 
+try:
+    from unittest import mock
+except ImportError:
+    import mock
+
 from yardstick.dispatcher.influxdb import InfluxdbDispatcher
 
+
 class InfluxdbDispatcherTestCase(unittest.TestCase):
 
     def setUp(self):
@@ -24,7 +30,9 @@ class InfluxdbDispatcherTestCase(unittest.TestCase):
             "context_cfg": {
                 "host": {
                     "ip": "10.229.43.154",
-                    "key_filename": "/root/yardstick/yardstick/resources/files/yardstick_key",
+                    "key_filename":
+                        "/root/yardstick/yardstick/resources/files"
+                        "/yardstick_key",
                     "name": "kvm.LF",
                     "user": "root"
                 },
@@ -35,7 +43,8 @@ class InfluxdbDispatcherTestCase(unittest.TestCase):
             "scenario_cfg": {
                 "runner": {
                     "interval": 1,
-                    "object": "yardstick.benchmark.scenarios.networking.ping.Ping",
+                    "object": "yardstick.benchmark.scenarios.networking.ping"
+                              ".Ping",
                     "output_filename": "/tmp/yardstick.out",
                     "runner_id": 8921,
                     "duration": 10,
@@ -63,7 +72,7 @@ class InfluxdbDispatcherTestCase(unittest.TestCase):
             },
             "runner_id": 8921
         }
-        self.data3 ={
+        self.data3 = {
             "benchmark": {
                 "data": {
                     "mpstat": {
@@ -99,26 +108,35 @@ class InfluxdbDispatcherTestCase(unittest.TestCase):
         self.assertEqual(influxdb.flush_result_data(), 0)
 
     def test__dict_key_flatten(self):
-        line = 'mpstat.loadavg1=0.29,rtt=1.03,mpstat.loadavg0=1.09,mpstat.cpu0.%idle=99.00,mpstat.cpu0.%sys=0.00'
+        line = 'mpstat.loadavg1=0.29,rtt=1.03,mpstat.loadavg0=1.09,' \
+               'mpstat.cpu0.%idle=99.00,mpstat.cpu0.%sys=0.00'
+        # need to sort for assert to work
+        line = ",".join(sorted(line.split(',')))
         influxdb = InfluxdbDispatcher(None)
-        flattened_data = influxdb._dict_key_flatten(self.data3['benchmark']['data'])
-        result = ",".join([k+"="+v for k, v in flattened_data.items()])
+        flattened_data = influxdb._dict_key_flatten(
+            self.data3['benchmark']['data'])
+        result = ",".join(
+            [k + "=" + v for k, v in sorted(flattened_data.items())])
         self.assertEqual(result, line)
 
     def test__get_nano_timestamp(self):
         influxdb = InfluxdbDispatcher(None)
         results = {'benchmark': {'timestamp': '1451461248.925574'}}
-        self.assertEqual(influxdb._get_nano_timestamp(results), '1451461248925574144')
+        self.assertEqual(influxdb._get_nano_timestamp(results),
+                         '1451461248925574144')
 
     @mock.patch('yardstick.dispatcher.influxdb.time')
     def test__get_nano_timestamp_except(self, mock_time):
         results = {}
         influxdb = InfluxdbDispatcher(None)
         mock_time.time.return_value = 1451461248.925574
-        self.assertEqual(influxdb._get_nano_timestamp(results), '1451461248925574144')
+        self.assertEqual(influxdb._get_nano_timestamp(results),
+                         '1451461248925574144')
+
 
 def main():
     unittest.main()
 
+
 if __name__ == '__main__':
     main()
index 42553c4..debb199 100644 (file)
@@ -3,6 +3,7 @@
 # yardstick comment: this file is a modified copy of
 # influxdb-python/influxdb/tests/test_line_protocol.py
 
+from __future__ import absolute_import
 import unittest
 from third_party.influxdb.influxdb_line_protocol import make_lines
 
index 045ac0f..1c63c00 100644 (file)
 # yardstick comment: this file is a modified copy of
 # rally/tests/unit/common/test_sshutils.py
 
+from __future__ import absolute_import
 import os
 import socket
 import unittest
-from cStringIO import StringIO
+from io import StringIO
 
 import mock
+from oslo_utils import encodeutils
 
 from yardstick import ssh
 
@@ -274,7 +276,9 @@ class SSHRunTestCase(unittest.TestCase):
         fake_stdin.close = mock.Mock(side_effect=close)
         self.test_client.run("cmd", stdin=fake_stdin)
         call = mock.call
-        send_calls = [call("line1"), call("line2"), call("e2")]
+        send_calls = [call(encodeutils.safe_encode("line1", "utf-8")),
+                      call(encodeutils.safe_encode("line2", "utf-8")),
+                      call(encodeutils.safe_encode("e2", "utf-8"))]
         self.assertEqual(send_calls, self.fake_session.send.mock_calls)
 
     @mock.patch("yardstick.ssh.select")
@@ -288,10 +292,10 @@ class SSHRunTestCase(unittest.TestCase):
         self.fake_session.exit_status_ready.side_effect = [0, 0, 0, True]
         self.fake_session.send_ready.return_value = True
         self.fake_session.send.side_effect = len
-        fake_stdin = StringIO("line1\nline2\n")
+        fake_stdin = StringIO(u"line1\nline2\n")
         self.test_client.run("cmd", stdin=fake_stdin, keep_stdin_open=True)
         call = mock.call
-        send_calls = [call("line1\nline2\n")]
+        send_calls = [call(encodeutils.safe_encode("line1\nline2\n", "utf-8"))]
         self.assertEqual(send_calls, self.fake_session.send.mock_calls)
 
     @mock.patch("yardstick.ssh.select")
@@ -393,5 +397,6 @@ class SSHRunTestCase(unittest.TestCase):
 def main():
     unittest.main()
 
+
 if __name__ == '__main__':
     main()
index eee9821..12b010c 100644 (file)
 # yardstick comment: this file is a modified copy of
 # influxdb-python/influxdb/line_protocol.py
 
+from __future__ import absolute_import
 from __future__ import unicode_literals
+
 from copy import copy
 
+from oslo_utils import encodeutils
 from six import binary_type, text_type, integer_types
 
 
@@ -64,7 +67,7 @@ def _get_unicode(data, force=False):
     Try to return a text aka unicode object from the given data.
     """
     if isinstance(data, binary_type):
-        return data.decode('utf-8')
+        return encodeutils.safe_decode(data, 'utf-8')
     elif data is None:
         return ''
     elif force:
index 5c279c8..3ae915c 100644 (file)
@@ -7,6 +7,7 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
+from __future__ import absolute_import
 import logging
 import os
 import sys
index 8b292ac..898013f 100644 (file)
@@ -7,6 +7,7 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
+from __future__ import absolute_import
 import yardstick.common.utils as utils
 
 utils.import_modules_from_package("yardstick.benchmark.contexts")
index 76a8288..054ce42 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import abc
 import six
 
index 6901b26..0e76b5a 100644 (file)
@@ -7,6 +7,7 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
+from __future__ import absolute_import
 import logging
 
 from yardstick.benchmark.contexts.base import Context
index 29c47b3..0b2fbdc 100644 (file)
@@ -7,20 +7,28 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
+from __future__ import absolute_import
+from __future__ import print_function
+
+import collections
+import logging
 import os
 import sys
 import uuid
-import pkg_resources
+
 import paramiko
+import pkg_resources
 
 from yardstick.benchmark.contexts.base import Context
-from yardstick.benchmark.contexts.model import Server
-from yardstick.benchmark.contexts.model import PlacementGroup
 from yardstick.benchmark.contexts.model import Network
+from yardstick.benchmark.contexts.model import PlacementGroup
+from yardstick.benchmark.contexts.model import Server
 from yardstick.benchmark.contexts.model import update_scheduler_hints
 from yardstick.orchestrator.heat import HeatTemplate, get_short_key_uuid
 from yardstick.definitions import YARDSTICK_ROOT_PATH
 
+LOG = logging.getLogger(__name__)
+
 
 class HeatContext(Context):
     '''Class that represents a context in the logical model'''
@@ -193,7 +201,7 @@ class HeatContext(Context):
 
     def deploy(self):
         '''deploys template into a stack using cloud'''
-        print "Deploying context '%s'" % self.name
+        print("Deploying context '%s'" % self.name)
 
         heat_template = HeatTemplate(self.name, self.template_file,
                                      self.heat_parameters)
@@ -214,29 +222,29 @@ class HeatContext(Context):
         for server in self.servers:
             if len(server.ports) > 0:
                 # TODO(hafe) can only handle one internal network for now
-                port = server.ports.values()[0]
+                port = list(server.ports.values())[0]
                 server.private_ip = self.stack.outputs[port["stack_name"]]
 
             if server.floating_ip:
                 server.public_ip = \
                     self.stack.outputs[server.floating_ip["stack_name"]]
 
-        print "Context '%s' deployed" % self.name
+        print("Context '%s' deployed" % self.name)
 
     def undeploy(self):
         '''undeploys stack from cloud'''
         if self.stack:
-            print "Undeploying context '%s'" % self.name
+            print("Undeploying context '%s'" % self.name)
             self.stack.delete()
             self.stack = None
-            print "Context '%s' undeployed" % self.name
+            print("Context '%s' undeployed" % self.name)
 
         if os.path.exists(self.key_filename):
             try:
                 os.remove(self.key_filename)
                 os.remove(self.key_filename + ".pub")
-            except OSError, e:
-                print ("Error: %s - %s." % (e.key_filename, e.strerror))
+            except OSError:
+                LOG.exception("Key filename %s", self.key_filename)
 
     def _get_server(self, attr_name):
         '''lookup server info by name from context
@@ -247,7 +255,7 @@ class HeatContext(Context):
             'yardstick.resources',
             'files/yardstick_key-' + get_short_key_uuid(self.key_uuid))
 
-        if type(attr_name) is dict:
+        if isinstance(attr_name, collections.Mapping):
             cname = attr_name["name"].split(".")[1]
             if cname != self.name:
                 return None
index d31f4af..1d0a5a1 100644 (file)
 """ Logical model
 
 """
+from __future__ import absolute_import
+from six.moves import range
 
 
 class Object(object):
     '''Base class for classes in the logical model
     Contains common attributes and methods
     '''
+
     def __init__(self, name, context):
         # model identities and reference
         self.name = name
@@ -61,6 +64,7 @@ class PlacementGroup(Object):
 
 class Router(Object):
     '''Class that represents a router in the logical model'''
+
     def __init__(self, name, network_name, context, external_gateway_info):
         super(self.__class__, self).__init__(name, context)
 
index 78bce82..6db51cc 100644 (file)
@@ -7,6 +7,7 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
+from __future__ import absolute_import
 import sys
 import os
 import yaml
@@ -49,11 +50,11 @@ class NodeContext(Context):
 
         self.nodes.extend(cfg["nodes"])
         self.controllers.extend([node for node in cfg["nodes"]
-                                if node["role"] == "Controller"])
+                                 if node["role"] == "Controller"])
         self.computes.extend([node for node in cfg["nodes"]
-                             if node["role"] == "Compute"])
+                              if node["role"] == "Compute"])
         self.baremetals.extend([node for node in cfg["nodes"]
-                               if node["role"] == "Baremetal"])
+                                if node["role"] == "Baremetal"])
         LOG.debug("Nodes: %r", self.nodes)
         LOG.debug("Controllers: %r", self.controllers)
         LOG.debug("Computes: %r", self.computes)
index da12ce4..3080f5d 100644 (file)
@@ -10,6 +10,7 @@
 """ Handler for yardstick command 'plugin' """
 
 from __future__ import print_function
+from __future__ import absolute_import
 import os
 import sys
 import yaml
@@ -182,7 +183,7 @@ class PluginParser(object):
            and a deployment instance
         """
 
-        print ("Parsing plugin config:", self.path)
+        print("Parsing plugin config:", self.path)
 
         try:
             kw = {}
@@ -191,10 +192,10 @@ class PluginParser(object):
                     input_plugin = f.read()
                     rendered_plugin = TaskTemplate.render(input_plugin, **kw)
                 except Exception as e:
-                    print(("Failed to render template:\n%(plugin)s\n%(err)s\n")
+                    print("Failed to render template:\n%(plugin)s\n%(err)s\n"
                           % {"plugin": input_plugin, "err": e})
                     raise e
-                print(("Input plugin is:\n%s\n") % rendered_plugin)
+                print("Input plugin is:\n%s\n" % rendered_plugin)
 
                 cfg = yaml.load(rendered_plugin)
         except IOError as ioerror:
index e8dd21a..5f8132d 100644 (file)
@@ -9,6 +9,8 @@
 
 """ Handler for yardstick command 'runner' """
 
+from __future__ import absolute_import
+from __future__ import print_function
 from yardstick.benchmark.runners.base import Runner
 from yardstick.benchmark.core import print_hbar
 
@@ -26,11 +28,11 @@ class Runners(object):
         print("| %-16s | %-60s" % ("Type", "Description"))
         print_hbar(78)
         for rtype in types:
-            print "| %-16s | %-60s" % (rtype.__execution_type__,
-                                       rtype.__doc__.split("\n")[0])
+            print("| %-16s | %-60s" % (rtype.__execution_type__,
+                                       rtype.__doc__.split("\n")[0]))
         print_hbar(78)
 
     def show(self, args):
         '''Show details of a specific runner type'''
         rtype = Runner.get_cls(args.type[0])
-        print rtype.__doc__
+        print(rtype.__doc__)
index e228054..15335af 100644 (file)
@@ -9,6 +9,8 @@
 
 """ Handler for yardstick command 'scenario' """
 
+from __future__ import absolute_import
+from __future__ import print_function
 from yardstick.benchmark.scenarios.base import Scenario
 from yardstick.benchmark.core import print_hbar
 
@@ -33,4 +35,4 @@ class Scenarios(object):
     def show(self, args):
         '''Show details of a specific scenario type'''
         stype = Scenario.get_cls(args.type[0])
-        print stype.__doc__
+        print(stype.__doc__)
index 8fb1177..d9a8576 100644 (file)
@@ -9,6 +9,8 @@
 
 """ Handler for yardstick command 'task' """
 
+from __future__ import absolute_import
+from __future__ import print_function
 import sys
 import os
 import yaml
@@ -18,7 +20,7 @@ import time
 import logging
 import uuid
 import errno
-from itertools import ifilter
+from six.moves import filter
 
 from yardstick.benchmark.contexts.base import Context
 from yardstick.benchmark.runners import base as base_runner
@@ -71,7 +73,7 @@ class Task(object):     # pragma: no cover
             one_task_start_time = time.time()
             parser.path = task_files[i]
             scenarios, run_in_parallel, meet_precondition = parser.parse_task(
-                 self.task_id, task_args[i], task_args_fnames[i])
+                self.task_id, task_args[i], task_args_fnames[i])
 
             if not meet_precondition:
                 LOG.info("meet_precondition is %s, please check envrionment",
@@ -96,7 +98,7 @@ class Task(object):     # pragma: no cover
         LOG.info("total finished in %d secs",
                  total_end_time - total_start_time)
 
-        print "Done, exiting"
+        print("Done, exiting")
 
     def _run(self, scenarios, run_in_parallel, output_file):
         '''Deploys context and calls runners'''
@@ -106,7 +108,7 @@ class Task(object):     # pragma: no cover
         background_runners = []
 
         # Start all background scenarios
-        for scenario in ifilter(_is_background_scenario, scenarios):
+        for scenario in filter(_is_background_scenario, scenarios):
             scenario["runner"] = dict(type="Duration", duration=1000000000)
             runner = run_one_scenario(scenario, output_file)
             background_runners.append(runner)
@@ -121,14 +123,14 @@ class Task(object):     # pragma: no cover
             # Wait for runners to finish
             for runner in runners:
                 runner_join(runner)
-                print "Runner ended, output in", output_file
+                print("Runner ended, output in", output_file)
         else:
             # run serially
             for scenario in scenarios:
                 if not _is_background_scenario(scenario):
                     runner = run_one_scenario(scenario, output_file)
                     runner_join(runner)
-                    print "Runner ended, output in", output_file
+                    print("Runner ended, output in", output_file)
 
         # Abort background runners
         for runner in background_runners:
@@ -142,7 +144,7 @@ class Task(object):     # pragma: no cover
                 runner_join(runner)
             else:
                 base_runner.Runner.release(runner)
-            print "Background task ended"
+            print("Background task ended")
 
 
 # TODO: Move stuff below into TaskCommands class !?
@@ -150,6 +152,7 @@ class Task(object):     # pragma: no cover
 
 class TaskParser(object):       # pragma: no cover
     '''Parser for task config files in yaml format'''
+
     def __init__(self, path):
         self.path = path
 
@@ -224,7 +227,7 @@ class TaskParser(object):       # pragma: no cover
 
     def parse_task(self, task_id, task_args=None, task_args_file=None):
         '''parses the task file and return an context and scenario instances'''
-        print "Parsing task config:", self.path
+        print("Parsing task config:", self.path)
 
         try:
             kw = {}
@@ -241,10 +244,10 @@ class TaskParser(object):       # pragma: no cover
                     input_task = f.read()
                     rendered_task = TaskTemplate.render(input_task, **kw)
                 except Exception as e:
-                    print(("Failed to render template:\n%(task)s\n%(err)s\n")
+                    print("Failed to render template:\n%(task)s\n%(err)s\n"
                           % {"task": input_task, "err": e})
                     raise e
-                print(("Input task is:\n%s\n") % rendered_task)
+                print("Input task is:\n%s\n" % rendered_task)
 
                 cfg = yaml.load(rendered_task)
         except IOError as ioerror:
@@ -343,7 +346,7 @@ def atexit_handler():
     base_runner.Runner.terminate_all()
 
     if len(Context.list) > 0:
-        print "Undeploying all contexts"
+        print("Undeploying all contexts")
         for context in Context.list:
             context.undeploy()
 
@@ -351,7 +354,7 @@ def atexit_handler():
 def is_ip_addr(addr):
     '''check if string addr is an IP address'''
     try:
-        ipaddress.ip_address(unicode(addr))
+        ipaddress.ip_address(addr.encode('utf-8'))
         return True
     except ValueError:
         return False
@@ -434,7 +437,7 @@ def run_one_scenario(scenario_cfg, output_file):
         context_cfg["nodes"] = parse_nodes_with_context(scenario_cfg)
     runner = base_runner.Runner.get(runner_cfg)
 
-    print "Starting runner of type '%s'" % runner_cfg["type"]
+    print("Starting runner of type '%s'" % runner_cfg["type"])
     runner.run(scenario_cfg, context_cfg)
 
     return runner
@@ -460,7 +463,7 @@ def runner_join(runner):
 
 
 def print_invalid_header(source_name, args):
-    print(("Invalid %(source)s passed:\n\n %(args)s\n")
+    print("Invalid %(source)s passed:\n\n %(args)s\n"
           % {"source": source_name, "args": args})
 
 
@@ -470,13 +473,13 @@ def parse_task_args(src_name, args):
         kw = {} if kw is None else kw
     except yaml.parser.ParserError as e:
         print_invalid_header(src_name, args)
-        print(("%(source)s has to be YAML. Details:\n\n%(err)s\n")
+        print("%(source)s has to be YAML. Details:\n\n%(err)s\n"
               % {"source": src_name, "err": e})
         raise TypeError()
 
     if not isinstance(kw, dict):
         print_invalid_header(src_name, args)
-        print(("%(src)s had to be dict, actually %(src_type)s\n")
+        print("%(src)s had to be dict, actually %(src_type)s\n"
               % {"src": src_name, "src_type": type(kw)})
         raise TypeError()
     return kw
index d292ad2..7430485 100644 (file)
@@ -8,6 +8,8 @@
 ##############################################################################
 
 """ Handler for yardstick command 'testcase' """
+from __future__ import absolute_import
+from __future__ import print_function
 import os
 import yaml
 import sys
@@ -22,6 +24,7 @@ class Testcase(object):
 
        Set of commands to discover and display test cases.
     '''
+
     def __init__(self):
         self.test_case_path = YARDSTICK_ROOT_PATH + 'tests/opnfv/test_cases/'
         self.testcase_list = []
@@ -32,7 +35,7 @@ class Testcase(object):
         try:
             testcase_files = os.listdir(self.test_case_path)
         except Exception as e:
-            print(("Failed to list dir:\n%(path)s\n%(err)s\n")
+            print("Failed to list dir:\n%(path)s\n%(err)s\n"
                   % {"path": self.test_case_path, "err": e})
             raise e
         testcase_files.sort()
@@ -52,11 +55,11 @@ class Testcase(object):
             with open(testcase_path) as f:
                 try:
                     testcase_info = f.read()
-                    print testcase_info
+                    print(testcase_info)
 
                 except Exception as e:
-                    print(("Failed to load test cases:"
-                           "\n%(testcase_file)s\n%(err)s\n")
+                    print("Failed to load test cases:"
+                          "\n%(testcase_file)s\n%(err)s\n"
                           % {"testcase_file": testcase_path, "err": e})
                     raise e
         except IOError as ioerror:
@@ -70,8 +73,8 @@ class Testcase(object):
                 try:
                     testcase_info = f.read()
                 except Exception as e:
-                    print(("Failed to load test cases:"
-                           "\n%(testcase_file)s\n%(err)s\n")
+                    print("Failed to load test cases:"
+                          "\n%(testcase_file)s\n%(err)s\n"
                           % {"testcase_file": testcase_file, "err": e})
                     raise e
                 description, installer, deploy_scenarios = \
@@ -107,6 +110,6 @@ class Testcase(object):
         print("| %-21s | %-60s" % ("Testcase Name", "Description"))
         print_hbar(88)
         for testcase_record in testcase_list:
-            print "| %-16s | %-60s" % (testcase_record['Name'],
-                                       testcase_record['Description'])
+            print("| %-16s | %-60s" % (testcase_record['Name'],
+                                       testcase_record['Description']))
         print_hbar(88)
index 69ea915..956c3ff 100755 (executable)
@@ -24,12 +24,17 @@ until the end of the shortest list is reached (optimally all lists should be
 defined with the same number of values when using such iter_type).
 '''
 
-import os
-import multiprocessing
+from __future__ import absolute_import
+
+import itertools
 import logging
-import traceback
+import multiprocessing
+import os
 import time
-import itertools
+import traceback
+
+import six
+from six.moves import range
 
 from yardstick.benchmark.runners import base
 
@@ -71,8 +76,8 @@ def _worker_process(queue, cls, method_name, scenario_cfg,
         return -1 if start > stop else 1
 
     param_iters = \
-        [xrange(d['start'], d['stop'] + margin(d['start'], d['stop']),
-                d['step']) for d in runner_cfg['iterators']]
+        [range(d['start'], d['stop'] + margin(d['start'], d['stop']),
+               d['step']) for d in runner_cfg['iterators']]
     param_names = [d['name'] for d in runner_cfg['iterators']]
 
     iter_type = runner_cfg.get("iter_type", "nested_for_loops")
@@ -82,10 +87,10 @@ def _worker_process(queue, cls, method_name, scenario_cfg,
         loop_iter = itertools.product(*param_iters)
     elif iter_type == 'tuple_loops':
         # Combine each i;th index of respective parameter list
-        loop_iter = itertools.izip(*param_iters)
+        loop_iter = six.moves.zip(*param_iters)
     else:
         LOG.warning("iter_type unrecognized: %s", iter_type)
-        raise
+        raise TypeError("iter_type unrecognized: %s", iter_type)
 
     # Populate options and run the requested method for each value combination
     for comb_values in loop_iter:
index 8f3f75f..0e02927 100755 (executable)
@@ -16,6 +16,7 @@
 # yardstick comment: this is a modified copy of
 # rally/rally/benchmark/runners/base.py
 
+from __future__ import absolute_import
 import importlib
 import logging
 import multiprocessing
index 1412c0c..89cac7d 100644 (file)
@@ -19,6 +19,7 @@
 '''A runner that runs a specific time before it returns
 '''
 
+from __future__ import absolute_import
 import os
 import multiprocessing
 import logging
index 3a839b6..930f883 100644 (file)
@@ -19,6 +19,7 @@
 '''A runner that runs a configurable number of times before it returns
 '''
 
+from __future__ import absolute_import
 import os
 import multiprocessing
 import logging
index 3b06e2a..e6abeab 100644 (file)
@@ -20,6 +20,7 @@
 The input value in the sequence is specified in a list in the input file.
 '''
 
+from __future__ import absolute_import
 import os
 import multiprocessing
 import logging
index 38f57d4..28c338d 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
 
 LOG = logging.getLogger(__name__)
index e88fed6..7eb93a8 100644 (file)
@@ -6,11 +6,14 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
-import traceback
 import subprocess
+import traceback
+
 import yardstick.ssh as ssh
-from baseattacker import BaseAttacker
+from yardstick.benchmark.scenarios.availability.attacker.baseattacker import \
+    BaseAttacker
 
 LOG = logging.getLogger(__name__)
 
index 595067a..38a9668 100644 (file)
@@ -6,11 +6,13 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
 
-from baseattacker import BaseAttacker
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios.availability import util
+from yardstick.benchmark.scenarios.availability.attacker.baseattacker import \
+    BaseAttacker
 
 LOG = logging.getLogger(__name__)
 
@@ -40,7 +42,7 @@ class GeneralAttacker(BaseAttacker):
             str = util.buildshellparams(actionParameter)
             LOG.debug("inject parameter is: {0}".format(actionParameter))
             LOG.debug("inject parameter values are: {0}"
-                      .format(actionParameter.values()))
+                      .format(list(actionParameter.values())))
             l = list(item for item in actionParameter.values())
             self.action_param = str.format(*l)
 
@@ -49,7 +51,7 @@ class GeneralAttacker(BaseAttacker):
             str = util.buildshellparams(rollbackParameter)
             LOG.debug("recover parameter is: {0}".format(rollbackParameter))
             LOG.debug("recover parameter values are: {0}".
-                      format(rollbackParameter.values()))
+                      format(list(rollbackParameter.values())))
             l = list(item for item in rollbackParameter.values())
             self.rollback_param = str.format(*l)
 
index 1d190a1..521c579 100644 (file)
@@ -6,10 +6,12 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
 
-from baseattacker import BaseAttacker
 import yardstick.ssh as ssh
+from yardstick.benchmark.scenarios.availability.attacker.baseattacker import \
+    BaseAttacker
 
 LOG = logging.getLogger(__name__)
 
index f96e577..f5f74f2 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import pkg_resources
 import yaml
 import logging
index 104c683..76fcc0e 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
 
 from yardstick.benchmark.scenarios.availability.monitor import basemonitor
index 38d1c4e..a11966a 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import pkg_resources
 import logging
 import multiprocessing
@@ -23,6 +24,7 @@ monitor_conf_path = pkg_resources.resource_filename(
 
 class MonitorMgr(object):
     """docstring for MonitorMgr"""
+
     def __init__(self):
         self._monitor_list = []
 
@@ -130,7 +132,7 @@ class BaseMonitor(multiprocessing.Process):
         total_time = end_time - begin_time
 
         self._queue.put({"total_time": total_time,
-                         "outage_time": last_outage-first_outage,
+                         "outage_time": last_outage - first_outage,
                          "total_count": total_count,
                          "outage_count": outage_count})
 
index cd33e61..6ddb73e 100644 (file)
@@ -6,11 +6,13 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
 import subprocess
 import traceback
+
 import yardstick.ssh as ssh
-import basemonitor as basemonitor
+from yardstick.benchmark.scenarios.availability.monitor import basemonitor
 
 LOG = logging.getLogger(__name__)
 
index 461a2de..78a6031 100644 (file)
@@ -6,10 +6,11 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
 import yardstick.ssh as ssh
 
-import basemonitor as basemonitor
+from yardstick.benchmark.scenarios.availability.monitor import basemonitor
 from yardstick.benchmark.scenarios.availability.util import buildshellparams
 
 
index 5f492ad..10b398e 100644 (file)
@@ -6,10 +6,11 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
 import yardstick.ssh as ssh
 
-import basemonitor as basemonitor
+from yardstick.benchmark.scenarios.availability.monitor import basemonitor
 
 LOG = logging.getLogger(__name__)
 
index 80efd1b..709884b 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import pkg_resources
 import yaml
 import logging
index c82df83..42d70f4 100644 (file)
@@ -6,8 +6,13 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
-from baseoperation import BaseOperation
+
+from yardstick.benchmark.scenarios.availability.operation.baseoperation \
+    import \
+    BaseOperation
+
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios.availability.util import buildshellparams
 
index a24f26e..70e0040 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import pkg_resources
 import yaml
 import logging
@@ -46,7 +47,7 @@ class ResultCheckerMgr(object):
     def verify(self):
         result = True
         for obj in self._result_checker_list:
-                result &= obj.success
+            result &= obj.success
         return result
 
 
index 275aff0..75c433a 100644 (file)
@@ -6,9 +6,13 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
 
-from baseresultchecker import BaseResultChecker
+
+from yardstick.benchmark.scenarios.availability.result_checker \
+    .baseresultchecker import \
+    BaseResultChecker
 from yardstick.benchmark.scenarios.availability import Condition
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios.availability.util import buildshellparams
index b064c67..2d7ce66 100644 (file)
@@ -6,8 +6,8 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
-import traceback
 
 from yardstick.benchmark.scenarios import base
 from yardstick.benchmark.scenarios.availability.director import Director
@@ -34,8 +34,8 @@ class ScenarioGeneral(base.Scenario):
         orderedSteps = sorted(steps, key=lambda x: x['index'])
         for step in orderedSteps:
             LOG.debug(
-                "\033[94m running step: {0} .... \033[0m"
-                .format(orderedSteps.index(step)+1))
+                "\033[94m running step: %s .... \033[0m",
+                orderedSteps.index(step) + 1)
             try:
                 actionPlayer = self.director.createActionPlayer(
                     step['actionType'], step['actionKey'])
@@ -44,9 +44,8 @@ class ScenarioGeneral(base.Scenario):
                     step['actionType'], step['actionKey'])
                 if actionRollbacker:
                     self.director.executionSteps.append(actionRollbacker)
-            except Exception, e:
-                LOG.debug(e.message)
-                traceback.print_exc()
+            except Exception:
+                LOG.exception("Exception")
                 LOG.debug(
                     "\033[91m exception when running step: {0} .... \033[0m"
                     .format(orderedSteps.index(step)))
index 46a197c..b981c8c 100755 (executable)
@@ -6,6 +6,8 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import print_function
+from __future__ import absolute_import
 import logging
 from yardstick.benchmark.scenarios import base
 from yardstick.benchmark.scenarios.availability.monitor import basemonitor
@@ -109,15 +111,16 @@ def _test():    # pragma: no cover
     sla = {"outage_time": 5}
     args = {"options": options, "sla": sla}
 
-    print "create instance"
+    print("create instance")
     terstInstance = ServiceHA(args, ctx)
 
     terstInstance.setup()
     result = {}
     terstInstance.run(result)
-    print result
+    print(result)
 
     terstInstance.teardown()
 
+
 if __name__ == '__main__':    # pragma: no cover
     _test()
index 33efbcb..5f5c07d 100644 (file)
@@ -19,6 +19,7 @@
 """ Scenario base class
 """
 
+from __future__ import absolute_import
 import yardstick.common.utils as utils
 
 
index 20786ff..0f60d46 100644 (file)
@@ -9,12 +9,14 @@
 
 """cache hit/miss ratio and usage statistics"""
 
+from __future__ import absolute_import
 import pkg_resources
 import logging
 import re
 import yardstick.ssh as ssh
 
 from yardstick.benchmark.scenarios import base
+from six.moves import zip
 
 LOG = logging.getLogger(__name__)
 
@@ -120,7 +122,7 @@ class CACHEstat(base.Scenario):
                 ite += 1
                 values = line[:]
                 if values and len(values) == len(fields):
-                    cachestat[cache] = dict(zip(fields, values))
+                    cachestat[cache] = dict(list(zip(fields, values)))
 
         for entry in cachestat:
             for item in average:
index 7f0c58d..9d518f7 100644 (file)
@@ -6,9 +6,12 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-import pkg_resources
+from __future__ import absolute_import
+
 import logging
-import json
+
+import pkg_resources
+from oslo_serialization import jsonutils
 
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
@@ -66,4 +69,4 @@ class ComputeCapacity(base.Scenario):
         if status:
             raise RuntimeError(stderr)
 
-        result.update(json.loads(stdout))
+        result.update(jsonutils.loads(stdout))
index 9d71038..121d5a7 100644 (file)
@@ -9,13 +9,16 @@
 
 """Processor statistics and system load."""
 
+from __future__ import absolute_import
+
 import logging
-import time
 import re
-import yardstick.ssh as ssh
+import time
 
-from yardstick.benchmark.scenarios import base
+from six.moves import map, zip
 
+import yardstick.ssh as ssh
+from yardstick.benchmark.scenarios import base
 
 LOG = logging.getLogger(__name__)
 
@@ -145,7 +148,7 @@ class CPULoad(base.Scenario):
                     cpu = 'cpu' if line[0] == 'all' else 'cpu' + line[0]
                     values = line[1:]
                     if values and len(values) == len(fields):
-                        temp_dict = dict(zip(fields, values))
+                        temp_dict = dict(list(zip(fields, values)))
                         if cpu not in maximum:
                             maximum[cpu] = temp_dict
                         else:
@@ -177,7 +180,7 @@ class CPULoad(base.Scenario):
                     cpu = 'cpu' if line[0] == 'all' else 'cpu' + line[0]
                     values = line[1:]
                     if values and len(values) == len(fields):
-                        average[cpu] = dict(zip(fields, values))
+                        average[cpu] = dict(list(zip(fields, values)))
                     else:
                         raise RuntimeError("mpstat average: parse error",
                                            fields, line)
@@ -210,9 +213,9 @@ class CPULoad(base.Scenario):
 
             cpu = cur_list[0]
 
-            cur_stats = map(int, cur_list[1:])
+            cur_stats = list(map(int, cur_list[1:]))
             if self.interval > 0:
-                prev_stats = map(int, prev_list[1:])
+                prev_stats = list(map(int, prev_list[1:]))
             else:
                 prev_stats = [0] * len(cur_stats)
 
@@ -236,9 +239,9 @@ class CPULoad(base.Scenario):
                 else:
                     return "%.2f" % (100.0 * (x - y) / samples)
 
-            load = map(_percent, cur_stats, prev_stats)
+            load = list(map(_percent, cur_stats, prev_stats))
 
-            mpstat[cpu] = dict(zip(fields, load))
+            mpstat[cpu] = dict(list(zip(fields, load)))
 
         return {'mpstat': mpstat}
 
@@ -278,7 +281,7 @@ class CPULoad(base.Scenario):
 #     p = CPULoad(args, ctx)
 #     p.run(result)
 #     import json
-#     print json.dumps(result)
+#     print(oslo_serialization.jsonutils.dump_as_bytes(result))
 
 # if __name__ == '__main__':
 #     _test()
index 568e6e7..76bafff 100644 (file)
@@ -6,12 +6,16 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-import pkg_resources
+from __future__ import absolute_import
+from __future__ import print_function
+
 import logging
-import json
+import os
 import re
 import time
-import os
+
+import pkg_resources
+from oslo_serialization import jsonutils
 
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
@@ -183,7 +187,7 @@ class Cyclictest(base.Scenario):
         if status:
             raise RuntimeError(stderr)
 
-        result.update(json.loads(stdout))
+        result.update(jsonutils.loads(stdout))
 
         if "sla" in self.scenario_cfg:
             sla_error = ""
@@ -236,7 +240,8 @@ def _test():    # pragma: no cover
 
     cyclictest = Cyclictest(args, ctx)
     cyclictest.run(result)
-    print result
+    print(result)
+
 
 if __name__ == '__main__':    # pragma: no cover
     _test()
index 518840c..6a17ae8 100644 (file)
@@ -6,9 +6,13 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-import pkg_resources
+from __future__ import absolute_import
+from __future__ import print_function
+
 import logging
-import json
+
+import pkg_resources
+from oslo_serialization import jsonutils
 
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
@@ -130,9 +134,10 @@ class Lmbench(base.Scenario):
             raise RuntimeError(stderr)
 
         if test_type == 'latency':
-            result.update({"latencies": json.loads(stdout)})
+            result.update(
+                {"latencies": jsonutils.loads(stdout)})
         else:
-            result.update(json.loads(stdout))
+            result.update(jsonutils.loads(stdout))
 
         if "sla" in self.scenario_cfg:
             sla_error = ""
@@ -185,7 +190,8 @@ def _test():
 
     p = Lmbench(args, ctx)
     p.run(result)
-    print result
+    print(result)
+
 
 if __name__ == '__main__':
     _test()
index e1ba93d..35528d4 100644 (file)
@@ -9,10 +9,12 @@
 
 """Memory load and statistics."""
 
+from __future__ import absolute_import
 import logging
 import yardstick.ssh as ssh
 
 from yardstick.benchmark.scenarios import base
+from six.moves import zip
 
 LOG = logging.getLogger(__name__)
 
@@ -88,7 +90,7 @@ class MEMLoad(base.Scenario):
                 ite += 1
                 values = line[1:]
                 if values and len(values) == len(fields):
-                    free[memory] = dict(zip(fields, values))
+                    free[memory] = dict(list(zip(fields, values)))
 
         for entry in free:
             for item in average:
index 8f1a4d6..ae49906 100644 (file)
@@ -6,9 +6,13 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-import pkg_resources
+from __future__ import absolute_import
+from __future__ import print_function
+
 import logging
-import json
+
+import pkg_resources
+from oslo_serialization import jsonutils
 
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
@@ -100,7 +104,7 @@ class Perf(base.Scenario):
         if status:
             raise RuntimeError(stdout)
 
-        result.update(json.loads(stdout))
+        result.update(jsonutils.loads(stdout))
 
         if "sla" in self.scenario_cfg:
             metric = self.scenario_cfg['sla']['metric']
@@ -140,7 +144,8 @@ def _test():
 
     p = Perf(args, ctx)
     p.run(result)
-    print result
+    print(result)
+
 
 if __name__ == '__main__':
     _test()
index e7ec91c..c9d0259 100644 (file)
@@ -6,8 +6,11 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
+
 import logging
-import json
+
+from oslo_serialization import jsonutils
 
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
@@ -53,4 +56,4 @@ class PluginTest(base.Scenario):
         if status:
             raise RuntimeError(stderr)
 
-        result.update(json.loads(stdout))
+        result.update(jsonutils.loads(stdout))
index db70af9..4330202 100644 (file)
@@ -6,9 +6,12 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-import pkg_resources
+from __future__ import absolute_import
+
 import logging
-import json
+
+import pkg_resources
+from oslo_serialization import jsonutils
 
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
@@ -131,7 +134,7 @@ class Ramspeed(base.Scenario):
         if status:
             raise RuntimeError(stderr)
 
-        result.update(json.loads(stdout))
+        result.update(jsonutils.loads(stdout))
 
         if "sla" in self.scenario_cfg:
             sla_error = ""
index b22be29..4a2eb97 100644 (file)
@@ -6,9 +6,13 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-import pkg_resources
+from __future__ import absolute_import
+from __future__ import print_function
+
 import logging
-import json
+
+import pkg_resources
+from oslo_serialization import jsonutils
 
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
@@ -115,7 +119,7 @@ class Unixbench(base.Scenario):
         if status:
             raise RuntimeError(stderr)
 
-        result.update(json.loads(stdout))
+        result.update(jsonutils.loads(stdout))
 
         if "sla" in self.scenario_cfg:
             sla_error = ""
@@ -152,7 +156,7 @@ def _test():  # pragma: no cover
 
     p = Unixbench(args, ctx)
     p.run(result)
-    print result
+    print(result)
 
 
 if __name__ == '__main__':
index de6742c..95146e0 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
 
 from yardstick.benchmark.scenarios import base
index 13fa015..b8ec9ac 100644 (file)
 # iperf3 scenario
 # iperf3 homepage at: http://software.es.net/iperf/
 
+from __future__ import absolute_import
+from __future__ import print_function
+
 import logging
-import json
+
 import pkg_resources
+from oslo_serialization import jsonutils
 
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
@@ -89,7 +93,7 @@ For more info see http://software.es.net/iperf
         self.host.close()
         status, stdout, stderr = self.target.execute("pkill iperf3")
         if status:
-            LOG.warn(stderr)
+            LOG.warning(stderr)
         self.target.close()
 
     def run(self, result):
@@ -138,7 +142,8 @@ For more info see http://software.es.net/iperf
         # Note: convert all ints to floats in order to avoid
         # schema conflicts in influxdb. We probably should add
         # a format func in the future.
-        result.update(json.loads(stdout, parse_int=float))
+        result.update(
+            jsonutils.loads(stdout, parse_int=float))
 
         if "sla" in self.scenario_cfg:
             sla_iperf = self.scenario_cfg["sla"]
@@ -188,7 +193,8 @@ def _test():
 
     p = Iperf(args, ctx)
     p.run(result)
-    print result
+    print(result)
+
 
 if __name__ == '__main__':
     _test()
index 28f5bea..80dbed3 100755 (executable)
@@ -7,9 +7,13 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 # bulk data test and req/rsp test are supported
-import pkg_resources
+from __future__ import absolute_import
+from __future__ import print_function
+
 import logging
-import json
+
+import pkg_resources
+from oslo_serialization import jsonutils
 
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
@@ -129,7 +133,7 @@ class Netperf(base.Scenario):
         if status:
             raise RuntimeError(stderr)
 
-        result.update(json.loads(stdout))
+        result.update(jsonutils.loads(stdout))
 
         if result['mean_latency'] == '':
             raise RuntimeError(stdout)
@@ -175,7 +179,7 @@ def _test():
 
     netperf = Netperf(args, ctx)
     netperf.run(result)
-    print result
+    print(result)
 
 
 if __name__ == '__main__':
index a76982b..0cf52b8 100755 (executable)
@@ -7,9 +7,13 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 # bulk data test and req/rsp test are supported
-import pkg_resources
+from __future__ import absolute_import
+from __future__ import print_function
+
 import logging
-import json
+
+import pkg_resources
+from oslo_serialization import jsonutils
 
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
@@ -152,7 +156,7 @@ class NetperfNode(base.Scenario):
         if status:
             raise RuntimeError(stderr)
 
-        result.update(json.loads(stdout))
+        result.update(jsonutils.loads(stdout))
 
         if result['mean_latency'] == '':
             raise RuntimeError(stdout)
@@ -200,7 +204,8 @@ def _test():    # pragma: no cover
 
     netperf = NetperfNode(args, ctx)
     netperf.run(result)
-    print result
+    print(result)
+
 
 if __name__ == '__main__':
     _test()
index 1ea92cc..1ba6f1e 100644 (file)
@@ -6,11 +6,13 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import logging
 import re
 
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
+from six.moves import zip
 
 LOG = logging.getLogger(__name__)
 
@@ -121,7 +123,7 @@ class NetUtilization(base.Scenario):
                     values = line[1:]
 
                     if values and len(values) == len(fields):
-                        temp_dict = dict(zip(fields, values))
+                        temp_dict = dict(list(zip(fields, values)))
                         if net_interface not in maximum:
                             maximum[net_interface] = temp_dict
                         else:
@@ -158,7 +160,8 @@ class NetUtilization(base.Scenario):
                     net_interface = line[0]
                     values = line[1:]
                     if values and len(values) == len(fields):
-                        average[net_interface] = dict(zip(fields, values))
+                        average[net_interface] = dict(
+                            list(zip(fields, values)))
                     else:
                         raise RuntimeError("network_utilization average: \
                                            parse error", fields, line)
index 250f7ea..e7ce835 100644 (file)
@@ -6,9 +6,12 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-import pkg_resources
+from __future__ import absolute_import
+
 import logging
-import json
+
+import pkg_resources
+from oslo_serialization import jsonutils
 
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
@@ -67,4 +70,4 @@ class NetworkCapacity(base.Scenario):
         if status:
             raise RuntimeError(stderr)
 
-        result.update(json.loads(stdout))
+        result.update(jsonutils.loads(stdout))
index 6e49a14..eb173f1 100644 (file)
@@ -9,6 +9,8 @@
 
 # ping scenario
 
+from __future__ import print_function
+from __future__ import absolute_import
 import pkg_resources
 import logging
 
@@ -122,7 +124,8 @@ def _test():    # pragma: no cover
 
     p = Ping(args, ctx)
     p.run(result)
-    print result
+    print(result)
+
 
 if __name__ == '__main__':    # pragma: no cover
     _test()
index f4d23ce..dd42722 100644 (file)
@@ -7,6 +7,7 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
+from __future__ import absolute_import
 import pkg_resources
 import logging
 
index e2df706..69663ec 100644 (file)
@@ -6,9 +6,13 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-import pkg_resources
+from __future__ import absolute_import
+from __future__ import print_function
+
 import logging
-import json
+
+import pkg_resources
+from oslo_serialization import jsonutils
 
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
@@ -130,7 +134,7 @@ class Pktgen(base.Scenario):
         if status:
             raise RuntimeError(stderr)
 
-        result.update(json.loads(stdout))
+        result.update(jsonutils.loads(stdout))
 
         result['packets_received'] = self._iptables_get_result()
 
@@ -170,7 +174,7 @@ def _test():
 
     p = Pktgen(args, ctx)
     p.run(result)
-    print result
+    print(result)
 
 
 if __name__ == '__main__':
index 503ea97..2bdb91a 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import pkg_resources
 import logging
 import time
index 1bd99b9..87fea4f 100644 (file)
@@ -1,9 +1,14 @@
-import pkg_resources
+from __future__ import absolute_import
+
 import logging
 import subprocess
-import sfc_openstack
+
+import pkg_resources
+from six.moves import range
+
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
+from yardstick.benchmark.scenarios.networking import sfc_openstack
 
 LOG = logging.getLogger(__name__)
 
@@ -199,7 +204,7 @@ class Sfc(base.Scenario):  # pragma: no cover
     sfc = Sfc(scenario_cfg, context_cfg)
     sfc.setup()
     sfc.run(result)
-    print result
+    print(result)
     sfc.teardown()
 
 if __name__ == '__main__':  # pragma: no cover
index d1d45d8..caaf100 100644 (file)
@@ -1,3 +1,5 @@
+from __future__ import print_function
+from __future__ import absolute_import
 import os
 from novaclient import client as novaclient
 from neutronclient.v2_0 import client as neutronclient
@@ -40,8 +42,8 @@ def get_credentials(service):  # pragma: no cover
                       "ca_file": cacert})
         creds.update({"insecure": "True", "https_insecure": "True"})
         if not os.path.isfile(cacert):
-            print ("WARNING: The 'OS_CACERT' environment variable is " +
-                   "set to %s but the file does not exist." % cacert)
+            print(("WARNING: The 'OS_CACERT' environment variable is " +
+                   "set to %s but the file does not exist." % cacert))
     return creds
 
 
@@ -49,8 +51,8 @@ def get_instances(nova_client):  # pragma: no cover
     try:
         instances = nova_client.servers.list(search_opts={'all_tenants': 1})
         return instances
-    except Exception, e:
-        print "Error [get_instances(nova_client)]:", e
+    except Exception as e:
+        print("Error [get_instances(nova_client)]:", e)
         return None
 
 
@@ -62,8 +64,8 @@ def get_SFs(nova_client):  # pragma: no cover
             if "sfc_test" not in instance.name:
                 SFs.append(instance)
         return SFs
-    except Exception, e:
-        print "Error [get_SFs(nova_client)]:", e
+    except Exception as e:
+        print("Error [get_SFs(nova_client)]:", e)
         return None
 
 
@@ -83,8 +85,8 @@ def create_floating_ips(neutron_client):  # pragma: no cover
             ip_json = neutron_client.create_floatingip({'floatingip': props})
             fip_addr = ip_json['floatingip']['floating_ip_address']
             ips.append(fip_addr)
-    except Exception, e:
-        print "Error [create_floating_ip(neutron_client)]:", e
+    except Exception as e:
+        print("Error [create_floating_ip(neutron_client)]:", e)
         return None
     return ips
 
@@ -96,9 +98,9 @@ def floatIPtoSFs(SFs, floatips):  # pragma: no cover
             SF.add_floating_ip(floatips[i])
             i = i + 1
         return True
-    except Exception, e:
-        print ("Error [add_floating_ip(nova_client, '%s', '%s')]:" %
-               (SF, floatips[i]), e)
+    except Exception as e:
+        print(("Error [add_floating_ip(nova_client, '%s', '%s')]:" %
+               (SF, floatips[i]), e))
         return False
 
 
@@ -113,5 +115,6 @@ def get_an_IP():  # pragma: no cover
     floatIPtoSFs(SFs, floatips)
     return floatips
 
+
 if __name__ == '__main__':  # pragma: no cover
     get_an_IP()
index 4f4ef21..9d6db7c 100644 (file)
@@ -13,6 +13,7 @@
 # limitations under the License.
 """ Vsperf specific scenario definition """
 
+from __future__ import absolute_import
 import logging
 import os
 import subprocess
@@ -211,7 +212,7 @@ class Vsperf(base.Scenario):
 
         # convert result.csv to JSON format
         reader = csv.DictReader(stdout.split('\r\n'))
-        result.update(reader.next())
+        result.update(next(reader))
 
         # sla check; go through all defined SLAs and check if values measured
         # by VSPERF are higher then those defined by SLAs
index bec23fc..bf42d9a 100644 (file)
@@ -7,6 +7,7 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
+from __future__ import absolute_import
 import logging
 import os
 
@@ -81,7 +82,7 @@ class VtcInstantiationValidation(base.Scenario):
                 heat_template_parameters,
                 deployment_configuration,
                 openstack_credentials)
-        except Exception as e:
-            LOG.info('Exception: {}'.format(e.message))
-        LOG.info('Got output: {}'.format(res))
+        except Exception:
+            LOG.exception('Exception')
+        LOG.info('Got output: %s', res)
         result.update(res)
index 8d9bf09..fb6e762 100644 (file)
@@ -7,6 +7,7 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
+from __future__ import absolute_import
 import logging
 import os
 
@@ -92,7 +93,7 @@ class VtcInstantiationValidationNoisy(base.Scenario):
                 heat_template_parameters,
                 deployment_configuration,
                 openstack_credentials)
-        except Exception as e:
-            LOG.info('Exception: {}'.format(e.message))
-        LOG.info('Got output: {}'.format(res))
+        except Exception:
+            LOG.exception('Exception')
+        LOG.info('Got output: %s', res)
         result.update(res)
index ff20279..0754d37 100644 (file)
@@ -7,6 +7,7 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
+from __future__ import absolute_import
 import logging
 import os
 
@@ -81,7 +82,7 @@ class VtcThroughput(base.Scenario):
                 heat_template_parameters,
                 deployment_configuration,
                 openstack_credentials)
-        except Exception as e:
-            LOG.info('Exception: {}'.format(e.message))
-        LOG.info('Got output: {}'.format(res))
+        except Exception:
+            LOG.exception("Exception")
+        LOG.info('Got output: %s', res)
         result.update(res)
index f032267..552ef80 100644 (file)
@@ -7,6 +7,7 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
+from __future__ import absolute_import
 import logging
 import os
 
@@ -91,7 +92,7 @@ class VtcThroughputNoisy(base.Scenario):
                 heat_template_parameters,
                 deployment_configuration,
                 openstack_credentials)
-        except Exception as e:
-            LOG.info('Exception: {}'.format(e.message))
-        LOG.info('Got output: {}'.format(res))
+        except Exception:
+            LOG.exception('Exception')
+        LOG.info('Got output: %s', res)
         result.update(res)
index bb16e7c..6d39733 100644 (file)
@@ -6,6 +6,8 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import print_function
+from __future__ import absolute_import
 import pkg_resources
 import logging
 import subprocess
@@ -61,7 +63,7 @@ class Parser(base.Scenario):
         p = subprocess.Popen(cmd1, shell=True, stdout=subprocess.PIPE,
                              stderr=subprocess.PIPE)
         p.communicate()
-        print "yangtotosca finished"
+        print("yangtotosca finished")
 
         result['yangtotosca'] = "success" if p.returncode == 0 else "fail"
 
@@ -78,5 +80,6 @@ def _test():
     '''internal test function'''
     pass
 
+
 if __name__ == '__main__':
     _test()
index 4e00423..2a8738e 100644 (file)
@@ -6,9 +6,13 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-import pkg_resources
+from __future__ import absolute_import
+from __future__ import print_function
+
 import logging
-import json
+
+import pkg_resources
+from oslo_serialization import jsonutils
 
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
@@ -114,7 +118,7 @@ class Fio(base.Scenario):
         if status:
             raise RuntimeError(stderr)
 
-        raw_data = json.loads(stdout)
+        raw_data = jsonutils.loads(stdout)
 
         # The bandwidth unit is KB/s, and latency unit is us
         if rw in ["read", "randread", "rw", "randrw"]:
@@ -175,7 +179,8 @@ def _test():
 
     fio = Fio(args, ctx)
     fio.run(result)
-    print result
+    print(result)
+
 
 if __name__ == '__main__':
     _test()
index bf5bc28..c437f22 100644 (file)
@@ -6,9 +6,13 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-import pkg_resources
+from __future__ import absolute_import
+
 import logging
-import json
+
+import pkg_resources
+from oslo_serialization import jsonutils
+from six.moves import range
 
 import yardstick.ssh as ssh
 from yardstick.benchmark.scenarios import base
@@ -131,4 +135,4 @@ class StorageCapacity(base.Scenario):
             if status:
                 raise RuntimeError(stderr)
 
-            result.update(json.loads(stdout))
+            result.update(jsonutils.loads(stdout))
index 72ceff7..6ea0351 100644 (file)
@@ -6,11 +6,14 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
+
 import logging
-import json
-import requests
 import time
 
+import requests
+from oslo_serialization import jsonutils
+
 from yardstick.benchmark.scenarios import base
 
 LOG = logging.getLogger(__name__)
@@ -73,7 +76,8 @@ class StorPerf(base.Scenario):
         setup_query = requests.get('http://%s:5000/api/v1.0/configurations'
                                    % self.target)
 
-        setup_query_content = json.loads(setup_query.content)
+        setup_query_content = jsonutils.loads(
+            setup_query.content)
         if setup_query_content["stack_created"]:
             self.setup_done = True
             LOG.debug("stack_created: %s",
@@ -96,7 +100,8 @@ class StorPerf(base.Scenario):
         setup_res = requests.post('http://%s:5000/api/v1.0/configurations'
                                   % self.target, json=env_args)
 
-        setup_res_content = json.loads(setup_res.content)
+        setup_res_content = jsonutils.loads(
+            setup_res.content)
 
         if setup_res.status_code != 200:
             raise RuntimeError("Failed to create a stack, error message:",
@@ -114,7 +119,8 @@ class StorPerf(base.Scenario):
         report_res = requests.get('http://{}:5000/api/v1.0/jobs'.format
                                   (self.target), params={'id': job_id})
 
-        report_res_content = json.loads(report_res.content)
+        report_res_content = jsonutils.loads(
+            report_res.content)
 
         if report_res.status_code != 200:
             raise RuntimeError("Failed to fetch report, error message:",
@@ -154,7 +160,7 @@ class StorPerf(base.Scenario):
         job_res = requests.post('http://%s:5000/api/v1.0/jobs' % self.target,
                                 json=job_args)
 
-        job_res_content = json.loads(job_res.content)
+        job_res_content = jsonutils.loads(job_res.content)
 
         if job_res.status_code != 200:
             raise RuntimeError("Failed to start a job, error message:",
@@ -171,7 +177,8 @@ class StorPerf(base.Scenario):
                                             self.target)
 
             if terminate_res.status_code != 200:
-                terminate_res_content = json.loads(terminate_res.content)
+                terminate_res_content = jsonutils.loads(
+                    terminate_res.content)
                 raise RuntimeError("Failed to start a job, error message:",
                                    terminate_res_content["message"])
 
@@ -190,7 +197,8 @@ class StorPerf(base.Scenario):
 
             result_res = requests.get('http://%s:5000/api/v1.0/jobs?id=%s' %
                                       (self.target, job_id))
-            result_res_content = json.loads(result_res.content)
+            result_res_content = jsonutils.loads(
+                result_res.content)
 
             result.update(result_res_content)
 
@@ -200,7 +208,8 @@ class StorPerf(base.Scenario):
                                        configurations' % self.target)
 
         if teardown_res.status_code == 400:
-            teardown_res_content = json.loads(teardown_res.content)
+            teardown_res_content = jsonutils.loads(
+                teardown_res.content)
             raise RuntimeError("Failed to reset environment, error message:",
                                teardown_res_content['message'])
 
index df891e3..6b7d657 100644 (file)
@@ -6,10 +6,11 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import print_function
 
 
 def print_hbar(barlen):
     '''print to stdout a horizontal bar'''
-    print("+"),
-    print("-" * barlen),
+    print(("+"), end=' ')
+    print(("-" * barlen), end=' ')
     print("+")
index beaa187..05bf8f0 100644 (file)
@@ -11,6 +11,7 @@
 Command-line interface to yardstick
 '''
 
+from __future__ import absolute_import
 import logging
 import os
 import sys
index ba229d4..5c53567 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
 from yardstick.benchmark.core import Param
 
 
index d0fc75d..dfcb637 100644 (file)
@@ -6,13 +6,17 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 from __future__ import print_function
-import time
+
 import os
 import sys
+import time
+
+from six.moves import range
 
-from yardstick.common.httpClient import HttpClient
 from yardstick.common import constants as consts
+from yardstick.common.httpClient import HttpClient
 
 
 class EnvCommand(object):
@@ -20,6 +24,7 @@ class EnvCommand(object):
 
         Set of commands to prepare environment
     '''
+
     def do_influxdb(self, args):
         data = {'action': 'createInfluxDBContainer'}
         task_id = self._start_async_task(data)
@@ -52,7 +57,7 @@ class EnvCommand(object):
         CHECK_STATUS_RETRY = 20
         CHECK_STATUS_DELAY = 5
 
-        for retry in xrange(CHECK_STATUS_RETRY):
+        for retry in range(CHECK_STATUS_RETRY):
             response = HttpClient().get(url)
             status = response['status']
 
index 9409566..c3e951e 100644 (file)
@@ -9,6 +9,9 @@
 
 """ Handler for yardstick command 'plugin' """
 
+from __future__ import print_function
+
+from __future__ import absolute_import
 from yardstick.benchmark.core.plugin import Plugin
 from yardstick.common.utils import cliargs
 from yardstick.cmd.commands import change_osloobj_to_paras
index 62a2082..02176ab 100644 (file)
@@ -9,6 +9,9 @@
 
 """ Handler for yardstick command 'runner' """
 
+from __future__ import print_function
+
+from __future__ import absolute_import
 from yardstick.benchmark.core.runner import Runners
 from yardstick.common.utils import cliargs
 from yardstick.cmd.commands import change_osloobj_to_paras
index 6aa3a45..5a6d04f 100644 (file)
@@ -9,6 +9,8 @@
 
 """ Handler for yardstick command 'scenario' """
 
+from __future__ import print_function
+from __future__ import absolute_import
 from yardstick.benchmark.core.scenario import Scenarios
 from yardstick.common.utils import cliargs
 from yardstick.cmd.commands import change_osloobj_to_paras
index bd018bc..fa82f07 100644 (file)
@@ -8,11 +8,13 @@
 ##############################################################################
 
 """ Handler for yardstick command 'task' """
+from __future__ import print_function
+
+from __future__ import absolute_import
 from yardstick.benchmark.core.task import Task
 from yardstick.common.utils import cliargs
 from yardstick.cmd.commands import change_osloobj_to_paras
 
-
 output_file_default = "/tmp/yardstick.out"
 
 
index 6ff7962..92831ad 100644 (file)
@@ -8,6 +8,9 @@
 ##############################################################################
 
 """ Handler for yardstick command 'testcase' """
+from __future__ import print_function
+
+from __future__ import absolute_import
 from yardstick.benchmark.core.testcase import Testcase
 from yardstick.common.utils import cliargs
 from yardstick.cmd.commands import change_osloobj_to_paras
index 174d39b..d99e216 100644 (file)
@@ -6,6 +6,7 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
+from __future__ import absolute_import
 import os
 
 DOCKER_URL = 'unix://var/run/docker.sock'
index 6acd030..11c2d75 100644 (file)
@@ -6,9 +6,11 @@
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
-import json
+from __future__ import absolute_import
+
 import logging
 
+from oslo_serialization import jsonutils
 import requests
 
 logger = logging.getLogger(__name__)
@@ -17,7 +19,7 @@ logger = logging.getLogger(__name__)
 class HttpClient(object):
 
     def post(self, url, data):
-        data = json.dumps(data)
+        data = jsonutils.dump_as_bytes(data)
         headers = {'Content-Type': 'application/json'}
         try:
             response = requests.post(url, data=data, headers=headers)
index d8dc61e..e351d16 100644 (file)
@@ -7,6 +7,7 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
+from __future__ import absolute_import
 import os
 import logging
 
index 2739323..bda8a1b 100755 (executable)
@@ -7,12 +7,14 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 # yardstick: this file is copied from rally and slightly modified
 ##############################################################################
+from __future__ import absolute_import
 import re
 import jinja2
 import jinja2.meta
 
 
 class TaskTemplate(object):
+
     @classmethod
     def render(cls, task_template, **kwargs):
         """Render jinja2 task template to Yardstick input task.
index 2deaf39..2432c5d 100644 (file)
 
 # yardstick: this file is copied from python-heatclient and slightly modified
 
-import json
+from __future__ import absolute_import
+
 import yaml
+from oslo_serialization import jsonutils
 
 if hasattr(yaml, 'CSafeLoader'):
     yaml_loader = yaml.CSafeLoader
@@ -46,7 +48,7 @@ def parse(tmpl_str):
     JSON or YAML format.
     '''
     if tmpl_str.startswith('{'):
-        tpl = json.loads(tmpl_str)
+        tpl = jsonutils.loads(tmpl_str)
     else:
         try:
             tpl = yaml.load(tmpl_str, Loader=yaml_loader)
index 3ecb0ae..57ace14 100644 (file)
 
 # yardstick comment: this is a modified copy of rally/rally/common/utils.py
 
-import os
-import sys
-import yaml
+from __future__ import absolute_import
+from __future__ import print_function
+
 import errno
-import subprocess
 import logging
+import os
+import subprocess
+import sys
+from functools import reduce
 
-from oslo_utils import importutils
+import yaml
 from keystoneauth1 import identity
 from keystoneauth1 import session
 from neutronclient.v2_0 import client
+from oslo_utils import importutils
 
 import yardstick
 
@@ -94,12 +98,12 @@ def get_para_from_yaml(file_path, args):
             value = reduce(func, args.split('.'), value)
 
             if value is None:
-                print 'parameter not found'
+                print('parameter not found')
                 return None
 
             return value
     else:
-        print 'file not exist'
+        print('file not exist')
         return None
 
 
index 300a78e..d4afac6 100644 (file)
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
 import os
 
 dirname = os.path.dirname
index b519efc..dfb1307 100644 (file)
@@ -7,6 +7,7 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
+from __future__ import absolute_import
 from oslo_config import cfg
 
 import yardstick.common.utils as utils
index ffdddb0..6e863ca 100644 (file)
@@ -16,6 +16,7 @@
 # yardstick comment: this is a modified copy of
 # ceilometer/ceilometer/dispatcher/__init__.py
 
+from __future__ import absolute_import
 import abc
 import six
 
index c2cc265..9c728e9 100644 (file)
 # yardstick comment: this is a modified copy of
 # ceilometer/ceilometer/dispatcher/file.py
 
+from __future__ import absolute_import
+
 import logging
 import logging.handlers
-import json
 
+from oslo_serialization import jsonutils
 from oslo_config import cfg
 
 from yardstick.dispatcher.base import Base as DispatchBase
@@ -70,7 +72,7 @@ class FileDispatcher(DispatchBase):
 
     def record_result_data(self, data):
         if self.log:
-            self.log.info(json.dumps(data))
+            self.log.info(jsonutils.dump_as_bytes(data))
 
     def flush_result_data(self):
         pass
index 98e772d..7900861 100644 (file)
 # yardstick comment: this is a modified copy of
 # ceilometer/ceilometer/dispatcher/http.py
 
-import os
-import json
+from __future__ import absolute_import
+
 import logging
-import requests
+import os
 
+from oslo_serialization import jsonutils
+import requests
 from oslo_config import cfg
 
 from yardstick.dispatcher.base import Base as DispatchBase
@@ -81,16 +83,18 @@ class HttpDispatcher(DispatchBase):
                 case_name = v["scenario_cfg"]["tc"]
                 break
         if case_name == "":
-            LOG.error('Test result : %s', json.dumps(self.result))
+            LOG.error('Test result : %s',
+                      jsonutils.dump_as_bytes(self.result))
             LOG.error('The case_name cannot be found, no data will be posted.')
             return
 
         self.result["case_name"] = case_name
 
         try:
-            LOG.debug('Test result : %s', json.dumps(self.result))
+            LOG.debug('Test result : %s',
+                      jsonutils.dump_as_bytes(self.result))
             res = requests.post(self.target,
-                                data=json.dumps(self.result),
+                                data=jsonutils.dump_as_bytes(self.result),
                                 headers=self.headers,
                                 timeout=self.timeout)
             LOG.debug('Test result posting finished with status code'
index fc9f3e9..427e669 100644 (file)
@@ -7,16 +7,19 @@
 # http://www.apache.org/licenses/LICENSE-2.0
 ##############################################################################
 
-import os
-import json
+from __future__ import absolute_import
+
 import logging
-import requests
+import os
 import time
 
+import requests
+import six
 from oslo_config import cfg
+from oslo_serialization import jsonutils
 
-from yardstick.dispatcher.base import Base as DispatchBase
 from third_party.influxdb.influxdb_line_protocol import make_lines
+from yardstick.dispatcher.base import Base as DispatchBase
 
 LOG = logging.getLogger(__name__)
 
@@ -80,9 +83,9 @@ class InfluxdbDispatcher(DispatchBase):
                 if type(v) == dict or type(v) == list]:
             return data
 
-        for k, v in data.iteritems():
+        for k, v in six.iteritems(data):
             if type(v) == dict:
-                for n_k, n_v in v.iteritems():
+                for n_k, n_v in six.iteritems(v):
                     next_data["%s.%s" % (k, n_k)] = n_v
             elif type(v) == list:
                 for index, item in enumerate(v):
@@ -127,7 +130,7 @@ class InfluxdbDispatcher(DispatchBase):
         return make_lines(msg).encode('utf-8')
 
     def record_result_data(self, data):
-        LOG.debug('Test result : %s', json.dumps(data))
+        LOG.debug('Test result : %s', jsonutils.dump_as_bytes(data))
         self.raw_result.append(data)
         if self.target == '':
             # if the target was not set, do not do anything
@@ -148,7 +151,7 @@ class InfluxdbDispatcher(DispatchBase):
             return 0
 
         if self.tc == "":
-            LOG.error('Test result : %s', json.dumps(data))
+            LOG.error('Test result : %s', jsonutils.dump_as_bytes(data))
             LOG.error('The case_name cannot be found, no data will be posted.')
             return -1
 
@@ -171,5 +174,6 @@ class InfluxdbDispatcher(DispatchBase):
         return 0
 
     def flush_result_data(self):
-        LOG.debug('Test result all : %s', json.dumps(self.raw_result))
+        LOG.debug('Test result all : %s',
+                  jsonutils.dump_as_bytes(self.raw_result))
         return 0
index 418e3da..5d427be 100755 (executable)
@@ -38,6 +38,7 @@
     NFV TST
 
 """
+from __future__ import absolute_import
 import sys
 
 from yardstick.cmd.cli import YardstickCLI
index e32d360..7e0f360 100644 (file)
@@ -9,20 +9,23 @@
 
 """Heat template and stack management"""
 
-import time
+from __future__ import absolute_import
+from __future__ import print_function
+
+import collections
 import datetime
 import getpass
-import socket
 import logging
-import json
+import socket
+import time
 
-from oslo_utils import encodeutils
 import heatclient
 import pkg_resources
+from oslo_serialization import jsonutils
+from oslo_utils import encodeutils
 
-from yardstick.common import template_format
 import yardstick.common.openstack_utils as op_utils
-
+from yardstick.common import template_format
 
 log = logging.getLogger(__name__)
 
@@ -36,6 +39,7 @@ def get_short_key_uuid(uuid):
 
 class HeatObject(object):
     ''' base class for template and stack'''
+
     def __init__(self):
         self._heat_client = None
         self.uuid = None
@@ -119,7 +123,7 @@ class HeatStack(HeatObject):
                 self._delete()
                 break
             except RuntimeError as err:
-                log.warn(err.args)
+                log.warning(err.args)
                 time.sleep(2)
             i += 1
 
@@ -173,7 +177,7 @@ class HeatTemplate(HeatObject):
 
         if template_file:
             with open(template_file) as stream:
-                print "Parsing external template:", template_file
+                print("Parsing external template:", template_file)
                 template_str = stream.read()
                 self._template = template_format.parse(template_str)
             self._parameters = heat_parameters
@@ -312,6 +316,7 @@ class HeatTemplate(HeatObject):
             'type': 'OS::Nova::KeyPair',
             'properties': {
                 'name': name,
+                # resource_string returns bytes, so we must decode to unicode
                 'public_key': encodeutils.safe_decode(
                     pkg_resources.resource_string(
                         'yardstick.resources',
@@ -402,7 +407,7 @@ class HeatTemplate(HeatObject):
                 )
 
         if networks:
-            for i in range(len(networks)):
+            for i, _ in enumerate(networks):
                 server_properties['networks'].append({'network': networks[i]})
 
         if scheduler_hints:
@@ -412,11 +417,11 @@ class HeatTemplate(HeatObject):
             server_properties['user_data'] = user_data
 
         if metadata:
-            assert type(metadata) is dict
+            assert isinstance(metadata, collections.Mapping)
             server_properties['metadata'] = metadata
 
         if additional_properties:
-            assert type(additional_properties) is dict
+            assert isinstance(additional_properties, collections.Mapping)
             for prop in additional_properties:
                 server_properties[prop] = additional_properties[prop]
 
@@ -438,13 +443,15 @@ class HeatTemplate(HeatObject):
         stack = HeatStack(self.name)
 
         heat = self._get_heat_client()
-        json_template = json.dumps(self._template)
+        json_template = jsonutils.dump_as_bytes(
+            self._template)
         start_time = time.time()
         stack.uuid = self.uuid = heat.stacks.create(
             stack_name=self.name, template=json_template,
             parameters=self.heat_parameters)['stack']['id']
 
         status = self.status()
+        outputs = []
 
         if block:
             while status != u'CREATE_COMPLETE':
@@ -458,13 +465,12 @@ class HeatTemplate(HeatObject):
 
             end_time = time.time()
             outputs = getattr(heat.stacks.get(self.uuid), 'outputs')
+            log.info("Created stack '%s' in %d secs",
+                     self.name, end_time - start_time)
 
-        for output in outputs:
-            self.outputs[output["output_key"].encode("ascii")] = \
-                output["output_value"].encode("ascii")
-
-        log.info("Created stack '%s' in %d secs",
-                 self.name, end_time - start_time)
+        # keep outputs as unicode
+        self.outputs = {output["output_key"]: output["output_value"] for output
+                        in outputs}
 
         stack.outputs = self.outputs
         return stack
index 4cbbdfe..4e65303 100644 (file)
     $ yardstick-plot -i /tmp/yardstick.out -o /tmp/plots/
 '''
 
+from __future__ import absolute_import
+from __future__ import print_function
+
 import argparse
-import json
 import os
 import sys
 import time
-import matplotlib.pyplot as plt
+
 import matplotlib.lines as mlines
+import matplotlib.pyplot as plt
+from oslo_serialization import jsonutils
+from six.moves import range
+from six.moves import zip
 
 
 class Parser(object):
@@ -44,7 +50,7 @@ class Parser(object):
             prog='yardstick-plot',
             description="A tool for visualizing results from yardstick. "
                         "Currently supports plotting graphs for output files "
-                        "from tests: " + str(self.data.keys())
+                        "from tests: " + str(list(self.data.keys()))
         )
         parser.add_argument(
             '-i', '--input',
@@ -65,7 +71,7 @@ class Parser(object):
             self.scenarios[record["runner_id"]] = obj_name
             return
         runner_object = self.scenarios[record["runner_id"]]
-        for test_type in self.data.keys():
+        for test_type in self.data:
             if test_type in runner_object:
                 self.data[test_type].append(record)
 
@@ -80,17 +86,17 @@ class Parser(object):
         if self.args.input:
             input_file = self.args.input
         else:
-            print("No input file specified, reading from %s"
-                  % self.default_input_loc)
+            print(("No input file specified, reading from %s"
+                   % self.default_input_loc))
             input_file = self.default_input_loc
 
         try:
             with open(input_file) as f:
                 for line in f:
-                    record = json.loads(line)
+                    record = jsonutils.loads(line)
                     self._add_record(record)
         except IOError as e:
-            print(os.strerror(e.errno))
+            print((os.strerror(e.errno)))
             sys.exit(1)
 
 
@@ -126,7 +132,7 @@ class Plotter(object):
             os.makedirs(self.output_folder)
         new_file = os.path.join(self.output_folder, file_name)
         plt.savefig(new_file)
-        print("Saved graph to " + new_file)
+        print(("Saved graph to " + new_file))
 
     def _plot_ping(self, records):
         '''ping test result interpretation and visualization on the graph'''
@@ -143,7 +149,7 @@ class Plotter(object):
         if len(rtts) == 1:
             plt.bar(1, rtts[0], 0.35, color=self.colors[0])
         else:
-            plt.plot(seqs, rtts, self.colors[0]+'-')
+            plt.plot(seqs, rtts, self.colors[0] + '-')
 
         self._construct_legend(['rtt'])
         plt.xlabel("sequence number")
@@ -164,13 +170,13 @@ class Plotter(object):
                 received[i] = 0.0
                 plt.axvline(flows[i], color='r')
 
-        ppm = [1000000.0*(i - j)/i for i, j in zip(sent, received)]
+        ppm = [1000000.0 * (i - j) / i for i, j in zip(sent, received)]
 
         # If there is a single data-point then display a bar-chart
         if len(ppm) == 1:
             plt.bar(1, ppm[0], 0.35, color=self.colors[0])
         else:
-            plt.plot(flows, ppm, self.colors[0]+'-')
+            plt.plot(flows, ppm, self.colors[0] + '-')
 
         self._construct_legend(['ppm'])
         plt.xlabel("number of flows")
@@ -191,7 +197,7 @@ class Plotter(object):
         for i, val in enumerate(intervals):
             if val:
                 for j, _ in enumerate(intervals):
-                    kbps.append(val[j]['sum']['bits_per_second']/1000)
+                    kbps.append(val[j]['sum']['bits_per_second'] / 1000)
                     seconds.append(seconds[-1] + val[j]['sum']['seconds'])
             else:
                 kbps.append(0.0)
@@ -202,7 +208,7 @@ class Plotter(object):
                 plt.axvline(seconds[-1], color='r')
 
         self._construct_legend(['bandwidth'])
-        plt.plot(seconds[1:], kbps[1:], self.colors[0]+'-')
+        plt.plot(seconds[1:], kbps[1:], self.colors[0] + '-')
         plt.xlabel("time in seconds")
         plt.ylabel("bandwidth in Kb/s")
 
@@ -312,5 +318,6 @@ def main():
     print("Plotting graph(s)")
     plotter.plot()
 
+
 if __name__ == '__main__':
     main()
index 927ca94..1cad8ee 100644 (file)
@@ -25,7 +25,7 @@ Execute command and get output:
     status, stdout, stderr = ssh.execute("ps ax")
     if status:
         raise Exception("Command failed with non-zero status.")
-    print stdout.splitlines()
+    print(stdout.splitlines())
 
 Execute command with huge output:
 
@@ -62,6 +62,7 @@ Eventlet:
     sshclient = eventlet.import_patched("yardstick.ssh")
 
 """
+from __future__ import absolute_import
 import os
 import select
 import socket
@@ -70,6 +71,7 @@ import re
 
 import logging
 import paramiko
+from oslo_utils import encodeutils
 from scp import SCPClient
 import six
 
@@ -199,7 +201,8 @@ class SSH(object):
         session.exec_command(cmd)
         start_time = time.time()
 
-        data_to_send = ""
+        # encode on transmit, decode on receive
+        data_to_send = encodeutils.safe_encode("")
         stderr_data = None
 
         # If we have data to be sent to stdin then `select' should also
@@ -214,14 +217,15 @@ class SSH(object):
             r, w, e = select.select([session], writes, [session], 1)
 
             if session.recv_ready():
-                data = session.recv(4096)
+                data = encodeutils.safe_decode(session.recv(4096), 'utf-8')
                 self.log.debug("stdout: %r", data)
                 if stdout is not None:
                     stdout.write(data)
                 continue
 
             if session.recv_stderr_ready():
-                stderr_data = session.recv_stderr(4096)
+                stderr_data = encodeutils.safe_decode(
+                    session.recv_stderr(4096), 'utf-8')
                 self.log.debug("stderr: %r", stderr_data)
                 if stderr is not None:
                     stderr.write(stderr_data)
@@ -230,7 +234,8 @@ class SSH(object):
             if session.send_ready():
                 if stdin is not None and not stdin.closed:
                     if not data_to_send:
-                        data_to_send = stdin.read(4096)
+                        data_to_send = encodeutils.safe_encode(
+                            stdin.read(4096), incoming='utf-8')
                         if not data_to_send:
                             # we may need to keep stdin open
                             if not keep_stdin_open:
index d4ab29e..9c4eef1 100644 (file)
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-'''
+"""
 Experimental Framework
-'''
+"""
+from __future__ import absolute_import
+import os
+
+APEX_LAKE_ROOT = os.path.realpath(
+    os.path.join(os.path.dirname(os.path.dirname(__file__))))
index e0209be..24dd1f8 100644 (file)
@@ -12,6 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 import experimental_framework.benchmarking_unit as b_unit
 from experimental_framework import heat_template_generation, common
 
index 1963696..d5de308 100644 (file)
@@ -18,6 +18,7 @@ initialization, execution and finalization
 '''
 
 
+from __future__ import absolute_import
 import json
 import time
 import inspect
@@ -27,6 +28,7 @@ from experimental_framework import common
 # from experimental_framework import data_manager as data
 from experimental_framework import heat_template_generation as heat
 from experimental_framework import deployment_unit as deploy
+from six.moves import range
 
 
 class BenchmarkingUnit:
@@ -116,10 +118,10 @@ class BenchmarkingUnit:
         """
         common.LOG.info('Run Benchmarking Unit')
 
-        experiment = dict()
-        result = dict()
-        for iteration in range(0, self.iterations):
-            common.LOG.info('Iteration ' + str(iteration))
+        experiment = {}
+        result = {}
+        for iteration in range(self.iterations):
+            common.LOG.info('Iteration %s', iteration)
             for template_file_name in self.template_files:
                 experiment_name = BenchmarkingUnit.\
                     extract_experiment_name(template_file_name)
@@ -238,7 +240,7 @@ class BenchmarkingUnit:
         :return: (str) Experiment Name
         """
         strings = template_file_name.split('.')
-        return ".".join(strings[:(len(strings)-1)])
+        return ".".join(strings[:(len(strings) - 1)])
 
     @staticmethod
     def get_benchmark_class(complete_module_name):
@@ -253,7 +255,7 @@ class BenchmarkingUnit:
         """
         strings = complete_module_name.split('.')
         class_name = 'experimental_framework.benchmarks.{}'.format(strings[0])
-        pkg = __import__(class_name, globals(), locals(), [], -1)
+        pkg = __import__(class_name, globals(), locals(), [], 0)
         module = getattr(getattr(pkg, 'benchmarks'), strings[0])
         members = inspect.getmembers(module)
         for m in members:
index ac7fad8..96cce22 100644 (file)
@@ -13,6 +13,7 @@
 # limitations under the License.
 
 
+from __future__ import absolute_import
 import abc
 
 
@@ -30,7 +31,7 @@ class BenchmarkBaseClass(object):
             raise ValueError("Parameters need to be provided in a dict")
 
         for param in self.get_features()['parameters']:
-            if param not in params.keys():
+            if param not in list(params.keys()):
                 params[param] = self.get_features()['default_values'][param]
 
         for param in self.get_features()['parameters']:
index 320beca..db9d449 100644 (file)
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 import os
-import commands
 # import signal
 import time
+
+import subprocess
 from experimental_framework.benchmarks import benchmark_base_class as base
 from experimental_framework.constants import framework_parameters as fp
 from experimental_framework.constants import conf_file_sections as cfs
 from experimental_framework.packet_generators import dpdk_packet_generator \
     as dpdk
 import experimental_framework.common as common
+from six.moves import range
 
 
 THROUGHPUT = 'throughput'
@@ -36,7 +39,7 @@ class InstantiationValidationBenchmark(base.BenchmarkBaseClass):
 
     def __init__(self, name, params):
         base.BenchmarkBaseClass.__init__(self, name, params)
-        self.base_dir = "{}{}{}".format(
+        self.base_dir = os.path.join(
             common.get_base_dir(), fp.EXPERIMENTAL_FRAMEWORK_DIR,
             fp.DPDK_PKTGEN_DIR)
         self.results_file = self.base_dir + PACKETS_FILE_NAME
@@ -45,10 +48,11 @@ class InstantiationValidationBenchmark(base.BenchmarkBaseClass):
         self.interface_name = ''
 
         # Set the packet checker command
-        self.pkt_checker_command = common.get_base_dir()
-        self.pkt_checker_command += 'experimental_framework/libraries/'
-        self.pkt_checker_command += 'packet_checker/'
-        self.pkt_checker_command += PACKET_CHECKER_PROGRAM_NAME + ' '
+        self.pkt_checker_command = os.path.join(
+            common.get_base_dir(),
+            'experimental_framework/libraries/',
+            'packet_checker/',
+            PACKET_CHECKER_PROGRAM_NAME + ' ')
 
     def init(self):
         """
@@ -69,9 +73,11 @@ class InstantiationValidationBenchmark(base.BenchmarkBaseClass):
         features['description'] = 'Instantiation Validation Benchmark'
         features['parameters'] = [THROUGHPUT, VLAN_SENDER, VLAN_RECEIVER]
         features['allowed_values'] = dict()
-        features['allowed_values'][THROUGHPUT] = map(str, range(0, 100))
-        features['allowed_values'][VLAN_SENDER] = map(str, range(-1, 4096))
-        features['allowed_values'][VLAN_RECEIVER] = map(str, range(-1, 4096))
+        features['allowed_values'][THROUGHPUT] = [str(x) for x in range(100)]
+        features['allowed_values'][VLAN_SENDER] = [str(x) for x in
+                                                   range(-1, 4096)]
+        features['allowed_values'][VLAN_RECEIVER] = [str(x)
+                                                     for x in range(-1, 4096)]
         features['default_values'] = dict()
         features['default_values'][THROUGHPUT] = '1'
         features['default_values'][VLAN_SENDER] = '-1'
@@ -203,7 +209,7 @@ class InstantiationValidationBenchmark(base.BenchmarkBaseClass):
         # Start the packet checker
         current_dir = os.path.dirname(os.path.realpath(__file__))
         dir_list = self.pkt_checker_command.split('/')
-        directory = '/'.join(dir_list[0:len(dir_list)-1])
+        directory = os.pathsep.join(dir_list[0:len(dir_list) - 1])
         os.chdir(directory)
         command = "make"
         common.run_command(command)
@@ -245,10 +251,10 @@ class InstantiationValidationBenchmark(base.BenchmarkBaseClass):
         processes currently running on the host
         :return: type: list of int
         """
-        output = commands.getoutput("ps -ef |pgrep " +
-                                    PACKET_CHECKER_PROGRAM_NAME)
+        output = subprocess.check_output(
+            'pgrep "{}"'.format(PACKET_CHECKER_PROGRAM_NAME))
         if not output:
             pids = []
         else:
-            pids = map(int, output.split('\n'))
+            pids = [int(x) for x in output.splitlines()]
         return pids
index 1eab70c..5569b6c 100644 (file)
 # limitations under the License.
 
 
-import instantiation_validation_benchmark as base
+from __future__ import absolute_import
 from experimental_framework import common
+from experimental_framework.benchmarks import \
+    instantiation_validation_benchmark as base
+from six.moves import range
 
 
 NUM_OF_NEIGHBORS = 'num_of_neighbours'
@@ -38,7 +41,7 @@ class InstantiationValidationNoisyNeighborsBenchmark(
         self.template_file = common.get_template_dir() + \
             temp_name
         self.stack_name = 'neighbour'
-        self.neighbor_stack_names = list()
+        self.neighbor_stack_names = []
 
     def get_features(self):
         features = super(InstantiationValidationNoisyNeighborsBenchmark,
index f2a87b2..44c9f32 100644 (file)
 # limitations under the License.
 
 
+from __future__ import absolute_import
 from experimental_framework.benchmarks import rfc2544_throughput_benchmark \
     as base
 from experimental_framework import common
+from six.moves import range
 
 
 NETWORK_NAME = 'network'
index 9db62e6..5c7b55e 100644 (file)
@@ -11,6 +11,8 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+from __future__ import absolute_import
+from six.moves import range
 
 from experimental_framework.benchmarks import benchmark_base_class
 from experimental_framework.packet_generators \
@@ -60,8 +62,10 @@ class RFC2544ThroughputBenchmark(benchmark_base_class.BenchmarkBaseClass):
         features['allowed_values'] = dict()
         features['allowed_values'][PACKET_SIZE] = ['64', '128', '256', '512',
                                                    '1024', '1280', '1514']
-        features['allowed_values'][VLAN_SENDER] = map(str, range(-1, 4096))
-        features['allowed_values'][VLAN_RECEIVER] = map(str, range(-1, 4096))
+        features['allowed_values'][VLAN_SENDER] = [str(x) for x in
+                                                   range(-1, 4096)]
+        features['allowed_values'][VLAN_RECEIVER] = [str(x) for x in
+                                                     range(-1, 4096)]
         features['default_values'] = dict()
         features['default_values'][PACKET_SIZE] = '1280'
         features['default_values'][VLAN_SENDER] = '1007'
@@ -99,7 +103,7 @@ class RFC2544ThroughputBenchmark(benchmark_base_class.BenchmarkBaseClass):
         :return: packet_sizes (list)
         """
         packet_size = '1280'  # default value
-        if PACKET_SIZE in self.params.keys() and \
+        if PACKET_SIZE in list(self.params.keys()) and \
                 isinstance(self.params[PACKET_SIZE], str):
             packet_size = self.params[PACKET_SIZE]
         return packet_size
index cbb930d..5891832 100644 (file)
@@ -12,6 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 import time
 
 from experimental_framework.benchmarks import benchmark_base_class as base
index 4bacd38..feea8bd 100644 (file)
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import print_function
+from __future__ import absolute_import
 import os
 import re
-import ConfigParser
+import six.moves.configparser
 import logging
 import fileinput
 from experimental_framework.constants import conf_file_sections as cf
@@ -70,7 +72,7 @@ def init(api=False):
     init_conf_file(api)
     init_log()
     init_general_vars(api)
-    if len(CONF_FILE.get_variable_list(cf.CFS_PKTGEN)) > 0:
+    if CONF_FILE.get_variable_list(cf.CFS_PKTGEN):
         init_pktgen()
 
 
@@ -129,7 +131,7 @@ def init_general_vars(api=False):
 
     RESULT_DIR = "/tmp/apexlake/results/"
     if not os.path.isdir(RESULT_DIR):
-        os.mkdir(RESULT_DIR)
+        os.makedirs(RESULT_DIR)
 
     if cf.CFSO_RELEASE in CONF_FILE.get_variable_list(cf.CFS_OPENSTACK):
         RELEASE = CONF_FILE.get_variable(cf.CFS_OPENSTACK, cf.CFSO_RELEASE)
@@ -311,7 +313,7 @@ class ConfigurationFile:
         # config_file = BASE_DIR + config_file
         InputValidation.validate_file_exist(
             config_file, 'The provided configuration file does not exist')
-        self.config = ConfigParser.ConfigParser()
+        self.config = six.moves.configparser.ConfigParser()
         self.config.read(config_file)
         for section in sections:
             setattr(
@@ -457,7 +459,7 @@ def replace_in_file(file, text_to_search, text_to_replace):
     message = "The file does not exist"
     InputValidation.validate_file_exist(file, message)
     for line in fileinput.input(file, inplace=True):
-        print(line.replace(text_to_search, text_to_replace).rstrip())
+        print((line.replace(text_to_search, text_to_replace).rstrip()))
 
 
 # ------------------------------------------------------
@@ -610,7 +612,7 @@ class InputValidation(object):
         missing = [
             credential_key
             for credential_key in credential_keys
-            if credential_key not in credentials.keys()
+            if credential_key not in list(credentials.keys())
         ]
         if len(missing) == 0:
             return True
index 4ee3a8a..6e651bf 100644 (file)
@@ -13,6 +13,7 @@
 # limitations under the License.
 
 
+from __future__ import absolute_import
 from experimental_framework.constants import conf_file_sections as cfs
 
 # ------------------------------------------------------
index 22fec13..0bb507c 100644 (file)
@@ -12,6 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 import os
 import time
 
@@ -50,8 +51,8 @@ class DeploymentUnit:
                 time.sleep(5)
                 status = self.heat_manager.check_stack_status(stack_name)
             return True
-        except Exception as e:
-            common.LOG.debug(e.message)
+        except Exception:
+            common.LOG.debug("check_stack_status", exc_info=True)
             return False
 
     def destroy_all_deployed_stacks(self):
@@ -81,17 +82,16 @@ class DeploymentUnit:
             self.heat_manager.create_stack(template_file, stack_name,
                                            parameters)
             deployed = True
-        except Exception as e:
-            common.LOG.debug(e.message)
+        except Exception:
+            common.LOG.debug("create_stack", exc_info=True)
             deployed = False
 
         if not deployed and 'COMPLETE' in \
                 self.heat_manager.check_stack_status(stack_name):
             try:
                 self.destroy_heat_template(stack_name)
-            except Exception as e:
-                common.LOG.debug(e.message)
-                pass
+            except Exception:
+                common.LOG.debug("destroy_heat_template", exc_info=True)
 
         status = self.heat_manager.check_stack_status(stack_name)
         while status and 'CREATE_IN_PROGRESS' in status:
@@ -102,16 +102,15 @@ class DeploymentUnit:
                 attempt += 1
                 try:
                     self.destroy_heat_template(stack_name)
-                except Exception as e:
-                    common.LOG.debug(e.message)
-                    pass
+                except Exception:
+                    common.LOG.debug("destroy_heat_template", exc_info=True)
                 return self.deploy_heat_template(template_file, stack_name,
                                                  parameters, attempt)
             else:
                 try:
                     self.destroy_heat_template(stack_name)
-                except Exception as e:
-                    common.LOG.debug(e.message)
+                except Exception:
+                    common.LOG.debug("destroy_heat_template", exc_info=True)
                 finally:
                     return False
         if self.heat_manager.check_stack_status(stack_name) and \
index 7400ebd..a323334 100644 (file)
@@ -12,6 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 from keystoneclient.v2_0 import client as keystoneClient
 from heatclient import client as heatClient
 from heatclient.common import template_utils
@@ -97,7 +98,6 @@ class HeatManager:
                 if stack.stack_name == stack_name:
                     self.heat.stacks.delete(stack.id)
                     return True
-        except Exception as e:
-            common.LOG.debug(e.message)
-            pass
+        except Exception:
+            common.LOG.debug("destroy_heat_template", exc_info=True)
         return False
index e0c1a66..0f0af8b 100644 (file)
@@ -17,6 +17,7 @@
 Generation of the heat templates from the base template
 '''
 
+from __future__ import absolute_import
 import json
 import os
 import shutil
index 6dc32b6..9590036 100644 (file)
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 import os
-import base_packet_generator
-import experimental_framework.common as common
 import time
+
+
+import experimental_framework.common as common
 from experimental_framework.constants import conf_file_sections as conf_file
 from experimental_framework.constants import framework_parameters as fp
+from experimental_framework.packet_generators import base_packet_generator
 
 
 class DpdkPacketGenerator(base_packet_generator.BasePacketGenerator):
@@ -186,8 +189,7 @@ class DpdkPacketGenerator(base_packet_generator.BasePacketGenerator):
                     conf_file.CFSP_DPDK_PROGRAM_NAME,
                     conf_file.CFSP_DPDK_COREMASK,
                     conf_file.CFSP_DPDK_MEMORY_CHANNEL]:
-            if var not in variables.keys() or (var in variables.keys() and
-               variables[var] is ''):
+            if variables.get(var, '') == '':
                 raise ValueError("The variable " + var + " does not exist")
 
     @staticmethod
index 188a7f0..0211a57 100644 (file)
@@ -16,6 +16,7 @@
 Experimental Framework
 """
 
+from __future__ import absolute_import
 from distutils.core import setup
 
 
index 4b70b9b..b6191ed 100644 (file)
 # limitations under the License.
 
 
+from __future__ import absolute_import
 import unittest
 import mock
 import os
 import experimental_framework.common as common
+from experimental_framework import APEX_LAKE_ROOT
 from experimental_framework.api import FrameworkApi
 from experimental_framework.benchmarking_unit import BenchmarkingUnit
 import experimental_framework.benchmarks.\
     instantiation_validation_benchmark as iv
+from six.moves import map
+from six.moves import range
 
 
 class DummyBenchmarkingUnit(BenchmarkingUnit):
@@ -61,6 +65,7 @@ class DummyBenchmarkingUnit2(BenchmarkingUnit):
 
 
 class TestGeneratesTemplate(unittest.TestCase):
+
     def setUp(self):
         pass
 
@@ -92,11 +97,11 @@ class TestGeneratesTemplate(unittest.TestCase):
             iv.VLAN_RECEIVER]
         expected['allowed_values'] = dict()
         expected['allowed_values'][iv.THROUGHPUT] = \
-            map(str, range(0, 100))
+            list(map(str, list(range(0, 100))))
         expected['allowed_values'][iv.VLAN_SENDER] = \
-            map(str, range(-1, 4096))
+            list(map(str, list(range(-1, 4096))))
         expected['allowed_values'][iv.VLAN_RECEIVER] = \
-            map(str, range(-1, 4096))
+            list(map(str, list(range(-1, 4096))))
         expected['default_values'] = dict()
         expected['default_values'][iv.THROUGHPUT] = '1'
         expected['default_values'][iv.VLAN_SENDER] = '-1'
@@ -121,9 +126,8 @@ class TestGeneratesTemplate(unittest.TestCase):
     def test_execute_framework_for_success(self, mock_b_unit, mock_heat,
                                            mock_credentials, mock_log,
                                            mock_common_init):
-        common.TEMPLATE_DIR = "{}/{}/".format(
-            os.getcwd(), 'tests/data/generated_templates'
-        )
+        common.TEMPLATE_DIR = os.path.join(APEX_LAKE_ROOT,
+                                           'tests/data/generated_templates/')
 
         test_cases = dict()
         iterations = 1
index b0e27d0..153de17 100644 (file)
@@ -12,6 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 import unittest
 from experimental_framework.packet_generators import base_packet_generator
 
index 405c010..4e5eb9f 100644 (file)
@@ -13,6 +13,7 @@
 # limitations under the License.
 
 
+from __future__ import absolute_import
 import unittest
 from experimental_framework.benchmarks import benchmark_base_class as base
 
@@ -45,8 +46,8 @@ class TestBenchmarkBaseClass(unittest.TestCase):
         params['C'] = 'c'
         bench_base = DummyBechmarkBaseClass(name, params)
         self.assertEqual(name, bench_base.name)
-        self.assertIn('A', bench_base.params.keys())
-        self.assertIn('B', bench_base.params.keys())
+        self.assertIn('A', list(bench_base.params.keys()))
+        self.assertIn('B', list(bench_base.params.keys()))
         self.assertEqual('a', bench_base.params['A'])
         self.assertEqual('b', bench_base.params['B'])
 
index 652327a..7b33ba6 100644 (file)
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
+from __future__ import absolute_import
+import os
 import unittest
 import mock
+
+from experimental_framework import APEX_LAKE_ROOT
 from experimental_framework.benchmarking_unit import BenchmarkingUnit
 # from experimental_framework.data_manager import DataManager
 from experimental_framework.deployment_unit import DeploymentUnit
@@ -275,7 +278,8 @@ class TestBenchmarkingUnit(unittest.TestCase):
                                         mock_rfc2544, mock_log, mock_influx):
         mock_heat.return_value = list()
         mock_time.return_value = '12345'
-        mock_temp_dir.return_value = 'tests/data/test_templates/'
+        mock_temp_dir.return_value = os.path.join(APEX_LAKE_ROOT,
+                                                  'tests/data/test_templates/')
         common.TEMPLATE_FILE_EXTENSION = '.yaml'
         common.RESULT_DIR = 'tests/data/results/'
         common.INFLUXDB_IP = 'InfluxIP'
@@ -336,7 +340,8 @@ class TestBenchmarkingUnit(unittest.TestCase):
             mock_log):
         mock_heat.return_value = list()
         mock_time.return_value = '12345'
-        mock_temp_dir.return_value = 'tests/data/test_templates/'
+        mock_temp_dir.return_value = os.path.join(APEX_LAKE_ROOT,
+                                                  'tests/data/test_templates/')
         common.TEMPLATE_FILE_EXTENSION = '.yaml'
         common.RESULT_DIR = 'tests/data/results/'
 
index 486ed6d..b8dbfe6 100644 (file)
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 import unittest
 import mock
 import os
 import logging
-import ConfigParser
+import six.moves.configparser
 import experimental_framework.common as common
 import experimental_framework.constants.conf_file_sections as cf
+from experimental_framework import APEX_LAKE_ROOT
 
 __author__ = 'vmricco'
 
@@ -47,6 +49,7 @@ def reset_common():
 
 
 class DummyConfigurationFile(common.ConfigurationFile):
+
     def __init__(self, sections, conf_file=''):
         pass
 
@@ -58,6 +61,7 @@ class DummyConfigurationFile(common.ConfigurationFile):
 
 
 class DummyConfigurationFile2(common.ConfigurationFile):
+
     def __init__(self, sections):
         self.pktgen_counter = 0
 
@@ -74,7 +78,7 @@ class DummyConfigurationFile2(common.ConfigurationFile):
             self.pktgen_counter += 1
             return 'dpdk_pktgen'
         if variable_name == cf.CFSP_DPDK_PKTGEN_DIRECTORY:
-            return os.getcwd()
+            return APEX_LAKE_ROOT
         if variable_name == cf.CFSP_DPDK_PROGRAM_NAME:
             return 'program'
         if variable_name == cf.CFSP_DPDK_COREMASK:
@@ -86,7 +90,7 @@ class DummyConfigurationFile2(common.ConfigurationFile):
         if variable_name == cf.CFSP_DPDK_BUS_SLOT_NIC_2:
             return 'bus_slot_nic_2'
         if variable_name == cf.CFSP_DPDK_DPDK_DIRECTORY:
-            return os.getcwd()
+            return APEX_LAKE_ROOT
 
     def get_variable_list(self, section):
         if section == cf.CFS_PKTGEN:
@@ -114,8 +118,7 @@ class TestCommonInit(unittest.TestCase):
 
     def setUp(self):
         common.CONF_FILE = DummyConfigurationFile('')
-        self.dir = '{}/{}'.format(os.getcwd(),
-                                  'experimental_framework/')
+        self.dir = os.path.join(APEX_LAKE_ROOT, 'experimental_framework/')
 
     def tearDown(self):
         reset_common()
@@ -131,7 +134,8 @@ class TestCommonInit(unittest.TestCase):
                               init_general_vars, init_conf_file, mock_getcwd):
         mock_getcwd.return_value = self.dir
         common.init(True)
-        init_pkgen.assert_called_once()
+        if common.CONF_FILE.get_variable_list(cf.CFS_PKTGEN):
+            init_pkgen.assert_called_once()
         init_conf_file.assert_called_once()
         init_general_vars.assert_called_once()
         init_log.assert_called_once()
@@ -144,7 +148,7 @@ class TestCommonInit(unittest.TestCase):
     @mock.patch('experimental_framework.common.LOG')
     def test_init_general_vars_for_success(self, mock_log, mock_makedirs,
                                            mock_path_exists, mock_val_file):
-        common.BASE_DIR = "{}/".format(os.getcwd())
+        common.BASE_DIR = APEX_LAKE_ROOT
         mock_path_exists.return_value = False
         mock_val_file.return_value = True
         common.init_general_vars()
@@ -160,15 +164,19 @@ class TestCommonInit2(unittest.TestCase):
 
     def setUp(self):
         common.CONF_FILE = DummyConfigurationFile2('')
-        self.dir = '{}/{}'.format(os.getcwd(), 'experimental_framework/')
+        self.dir = os.path.join(APEX_LAKE_ROOT, 'experimental_framework')
 
     def tearDown(self):
         reset_common()
         common.CONF_FILE = None
 
+    @mock.patch('experimental_framework.common.InputValidation')
+    @mock.patch('os.path.exists')
+    @mock.patch('os.makedirs')
     @mock.patch('experimental_framework.common.LOG')
-    def test_init_general_vars_2_for_success(self, mock_log):
-        common.BASE_DIR = "{}/".format(os.getcwd())
+    def test_init_general_vars_2_for_success(self, mock_log, mock_makedirs,
+                                             mock_path_exists, mock_val_file):
+        common.BASE_DIR = APEX_LAKE_ROOT
         common.init_general_vars()
         self.assertEqual(common.TEMPLATE_FILE_EXTENSION, '.yaml')
         self.assertEqual(common.TEMPLATE_DIR, '/tmp/apexlake/heat_templates/')
@@ -183,14 +191,16 @@ class TestCommonInit2(unittest.TestCase):
     def test_init_pktgen_for_success(self):
         common.init_pktgen()
         self.assertEqual(common.PKTGEN, 'dpdk_pktgen')
-        directory = self.dir.split('experimental_framework/')[0]
+        directory = self.dir.split('experimental_framework')[0]
         self.assertEqual(common.PKTGEN_DIR, directory)
         self.assertEqual(common.PKTGEN_PROGRAM, 'program')
         self.assertEqual(common.PKTGEN_COREMASK, 'coremask')
         self.assertEqual(common.PKTGEN_MEMCHANNEL, 'memchannel')
         self.assertEqual(common.PKTGEN_BUS_SLOT_NIC_1, 'bus_slot_nic_1')
         self.assertEqual(common.PKTGEN_BUS_SLOT_NIC_2, 'bus_slot_nic_2')
-        expected_dir = "{}/".format(os.getcwd())
+        # we always add '/' to end of dirs for some reason
+        # probably because we aren't using os.path.join everywhere
+        expected_dir = APEX_LAKE_ROOT + '/'
         self.assertEqual(common.PKTGEN_DPDK_DIRECTORY, expected_dir)
 
     def test_init_pktgen_for_failure(self):
@@ -260,8 +270,8 @@ class TestConfigFileClass(unittest.TestCase):
             'Deployment-parameters',
             'Testcase-parameters'
         ]
-        c_file = './tests/data/common/conf.cfg'
-        common.BASE_DIR = os.getcwd()
+        c_file = os.path.join(APEX_LAKE_ROOT, 'tests/data/common/conf.cfg')
+        common.BASE_DIR = APEX_LAKE_ROOT
         self.conf_file = common.ConfigurationFile(self.sections, c_file)
 
     def tearDown(self):
@@ -275,7 +285,8 @@ class TestConfigFileClass(unittest.TestCase):
         sections = ['General', 'OpenStack', 'Experiment-VNF', 'PacketGen',
                     'Deployment-parameters', 'Testcase-parameters']
         c = DummyConfigurationFile3(
-            sections, config_file='./tests/data/common/conf.cfg')
+            sections, config_file=os.path.join(APEX_LAKE_ROOT,
+                                               'tests/data/common/conf.cfg'))
         self.assertEqual(
             DummyConfigurationFile3._config_section_map('', '', True),
             6)
@@ -285,8 +296,9 @@ class TestConfigFileClass(unittest.TestCase):
     def test__config_section_map_for_success(self):
         general_section = 'General'
         # openstack_section = 'OpenStack'
-        config_file = 'tests/data/common/conf.cfg'
-        config = ConfigParser.ConfigParser()
+        config_file = os.path.join(APEX_LAKE_ROOT,
+                                   'tests/data/common/conf.cfg')
+        config = six.moves.configparser.ConfigParser()
         config.read(config_file)
 
         expected = {
@@ -361,8 +373,9 @@ class TestCommonMethods(unittest.TestCase):
             'Deployment-parameters',
             'Testcase-parameters'
         ]
-        config_file = './tests/data/common/conf.cfg'
-        common.BASE_DIR = os.getcwd()
+        config_file = os.path.join(APEX_LAKE_ROOT,
+                                   'tests/data/common/conf.cfg')
+        common.BASE_DIR = APEX_LAKE_ROOT
         common.CONF_FILE = DummyConfigurationFile4(self.sections, config_file)
 
     def tearDown(self):
@@ -397,13 +410,14 @@ class TestCommonMethods(unittest.TestCase):
         self.assertEqual(expected, output)
 
     def test_get_file_first_line_for_success(self):
-        file = 'tests/data/common/conf.cfg'
+        file = os.path.join(APEX_LAKE_ROOT, 'tests/data/common/conf.cfg')
         expected = '[General]\n'
         output = common.get_file_first_line(file)
         self.assertEqual(expected, output)
 
     def test_replace_in_file_for_success(self):
-        filename = 'tests/data/common/file_replacement.txt'
+        filename = os.path.join(APEX_LAKE_ROOT,
+                                'tests/data/common/file_replacement.txt')
         text_to_search = 'replacement of'
         text_to_replace = '***'
         common.replace_in_file(filename, text_to_search, text_to_replace)
@@ -542,27 +556,14 @@ class TestinputValidation(unittest.TestCase):
             list(), ''
         )
 
-    def test_validate_file_exist_for_success(self):
-        filename = 'tests/data/common/file_replacement.txt'
-        output = common.InputValidation.validate_file_exist(filename, '')
-        self.assertTrue(output)
-
-    def test_validate_file_exist_for_failure(self):
-        filename = 'tests/data/common/file_replacement'
-        self.assertRaises(
-            ValueError,
-            common.InputValidation.validate_file_exist,
-            filename, ''
-        )
-
     def test_validate_directory_exist_and_format_for_success(self):
-        directory = 'tests/data/common/'
+        directory = os.path.join(APEX_LAKE_ROOT, 'tests/data/common/')
         output = common.InputValidation.\
             validate_directory_exist_and_format(directory, '')
         self.assertTrue(output)
 
     def test_validate_directory_exist_and_format_for_failure(self):
-        directory = 'tests/data/com/'
+        directory = os.path.join(APEX_LAKE_ROOT, 'tests/data/com/')
         self.assertRaises(
             ValueError,
             common.InputValidation.validate_directory_exist_and_format,
index 2b03edb..abf4134 100644 (file)
@@ -12,6 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 import unittest
 from experimental_framework.constants import conf_file_sections as cfs
 
index cec834e..5a9178f 100644 (file)
@@ -12,6 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 import unittest
 import logging
 import mock
index bad250e..0b0df6c 100644 (file)
@@ -12,6 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 import unittest
 import mock
 from experimental_framework.constants import conf_file_sections as conf_file
index dad3177..cc3e1bf 100644 (file)
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 import unittest
 import experimental_framework.heat_template_generation as heat_gen
 import mock
 import os
 import experimental_framework.common as common
+from experimental_framework import APEX_LAKE_ROOT
 
 __author__ = 'gpetralx'
 
@@ -45,6 +47,7 @@ def reset_common():
 
 
 class TestGeneratesTemplate(unittest.TestCase):
+
     def setUp(self):
         self.deployment_configuration = {
             'vnic_type': ['normal', 'direct'],
@@ -61,9 +64,11 @@ class TestGeneratesTemplate(unittest.TestCase):
     @mock.patch('experimental_framework.common.get_template_dir')
     def test_generates_template_for_success(self, mock_template_dir,
                                             mock_log):
-        generated_templates_dir = 'tests/data/generated_templates/'
+        generated_templates_dir = os.path.join(
+            APEX_LAKE_ROOT, 'tests/data/generated_templates/')
         mock_template_dir.return_value = generated_templates_dir
-        test_templates = 'tests/data/test_templates/'
+        test_templates = os.path.join(APEX_LAKE_ROOT,
+                                      'tests/data/test_templates/')
         heat_gen.generates_templates(self.template_name,
                                      self.deployment_configuration)
         for dirname, dirnames, filenames in os.walk(test_templates):
@@ -73,8 +78,9 @@ class TestGeneratesTemplate(unittest.TestCase):
                         self.assertListEqual(test.readlines(),
                                              generated.readlines())
 
-        t_name = '/tests/data/generated_templates/VTC_base_single_vm_wait.tmp'
-        self.template_name = "{}{}".format(os.getcwd(), t_name)
+        self.template_name = os.path.join(
+            APEX_LAKE_ROOT,
+            'tests/data/generated_templates/VTC_base_single_vm_wait.tmp')
         heat_gen.generates_templates(self.template_name,
                                      self.deployment_configuration)
         for dirname, dirnames, filenames in os.walk(test_templates):
@@ -86,7 +92,8 @@ class TestGeneratesTemplate(unittest.TestCase):
 
     @mock.patch('experimental_framework.common.get_template_dir')
     def test_get_all_heat_templates_for_success(self, template_dir):
-        generated_templates = 'tests/data/generated_templates/'
+        generated_templates = os.path.join(APEX_LAKE_ROOT,
+                                           'tests/data/generated_templates/')
         template_dir.return_value = generated_templates
         extension = '.yaml'
         expected = ['experiment_1.yaml', 'experiment_2.yaml']
index 0fe8554..58bd755 100644 (file)
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import print_function
+
+from __future__ import absolute_import
+import os
 import unittest
 import logging
 import experimental_framework.common as common
 
-from experimental_framework import heat_manager
+from experimental_framework import heat_manager, APEX_LAKE_ROOT
 import mock
 
 __author__ = 'gpetralx'
@@ -27,6 +31,7 @@ def get_mock_heat(version, *args, **kwargs):
 
 
 class MockStacks(object):
+
     def __init__(self, stacks):
         self.stacks = stacks
 
@@ -34,7 +39,7 @@ class MockStacks(object):
         list_name = list()
         for stack in self.stacks:
             list_name.append(stack.stack_name)
-        print list_name
+        print(list_name)
         return self.stacks
 
     def validate(self, template=None):
@@ -47,11 +52,12 @@ class MockStacks(object):
 
     def create(self, stack_name=None, files=None, template=None,
                parameters=None):
-        print stack_name
+        print(stack_name)
         self.stacks.append(MockStack(stack_name))
 
 
 class MockStacks_2(object):
+
     def __init__(self, stacks):
         self.stacks = stacks
 
@@ -60,6 +66,7 @@ class MockStacks_2(object):
 
 
 class MockStack(object):
+
     def __init__(self, stack_name):
         self.name = stack_name
 
@@ -80,6 +87,7 @@ class MockStack(object):
 
 
 class MockHeat(object):
+
     def __init__(self):
         stacks = [MockStack('stack_1'), MockStack('stack_2')]
         self.stacks_list = MockStacks(stacks)
@@ -90,18 +98,21 @@ class MockHeat(object):
 
 
 class MockHeat_2(MockHeat):
+
     def __init__(self):
         stacks = [MockStack('stack_1'), MockStack('stack_2')]
         self.stacks_list = MockStacks_2(stacks)
 
 
 class HeatManagerMock(heat_manager.HeatManager):
+
     def init_heat(self):
         if self.heat is None:
             self.heat = MockHeat()
 
 
 class HeatManagerMock_2(heat_manager.HeatManager):
+
     def init_heat(self):
         if self.heat is None:
             self.heat = MockHeat_2()
@@ -134,8 +145,9 @@ class TestHeatManager(unittest.TestCase):
                          self.heat_manager.check_stack_status('stack_x'))
 
     def test_validate_template_for_success(self):
-        template_file = \
-            'tests/data/test_templates/VTC_base_single_vm_wait_1.yaml'
+        template_file = os.path.join(
+            APEX_LAKE_ROOT,
+            'tests/data/test_templates/VTC_base_single_vm_wait_1.yaml')
         with self.assertRaises(ValueError):
             self.heat_manager.validate_heat_template(template_file)
 
@@ -180,11 +192,13 @@ class TestHeatManager_2(unittest.TestCase):
 
 
 class ServiceCatalog():
+
     def url_for(self, service_type):
         return 'http://heat_url'
 
 
 class KeystoneMock(object):
+
     @property
     def auth_token(self):
         return 'token'
@@ -193,6 +207,7 @@ class KeystoneMock(object):
 
 
 class TestHeatInit(unittest.TestCase):
+
     def setUp(self):
         credentials = dict()
         credentials['ip_controller'] = '1.1.1.1'
@@ -216,5 +231,5 @@ class TestHeatInit(unittest.TestCase):
                                                 tenant_name='project',
                                                 password='password',
                                                 auth_url='auth_uri')
-        heat_client.assert_called_once_with('1',  endpoint='http://heat_url',
+        heat_client.assert_called_once_with('1', endpoint='http://heat_url',
                                             token='token')
index 369129a..2bd8b7b 100644 (file)
@@ -12,6 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 import unittest
 import mock
 import os
@@ -21,6 +22,8 @@ import experimental_framework.benchmarks.\
     instantiation_validation_benchmark as iv_module
 from experimental_framework.benchmarks.\
     instantiation_validation_benchmark import InstantiationValidationBenchmark
+from six.moves import map
+from six.moves import range
 
 
 kill_counter = [0, 0]
@@ -204,11 +207,11 @@ class InstantiationValidationInitTest(unittest.TestCase):
         ]
         expected['allowed_values'] = dict()
         expected['allowed_values'][iv_module.THROUGHPUT] = \
-            map(str, range(0, 100))
+            list(map(str, list(range(0, 100))))
         expected['allowed_values'][iv_module.VLAN_SENDER] = \
-            map(str, range(-1, 4096))
+            list(map(str, list(range(-1, 4096))))
         expected['allowed_values'][iv_module.VLAN_RECEIVER] = \
-            map(str, range(-1, 4096))
+            list(map(str, list(range(-1, 4096))))
         expected['default_values'] = dict()
         expected['default_values'][iv_module.THROUGHPUT] = '1'
         expected['default_values'][iv_module.VLAN_SENDER] = '-1'
@@ -216,7 +219,7 @@ class InstantiationValidationInitTest(unittest.TestCase):
         output = self.iv.get_features()
         self.assertEqual(expected, output)
 
-    @mock.patch('commands.getoutput')
+    @mock.patch('subprocess.check_output')
     def test__get_pids_for_success(self, mock_getoutput):
         expected = [1234]
         mock_getoutput.return_value = '1234'
index f65600f..f9aa947 100644 (file)
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 import unittest
 import mock
-import os
+
+from six.moves import range
+
 import experimental_framework.common as common
 import experimental_framework.deployment_unit as deploy
 import experimental_framework.benchmarks.\
     instantiation_validation_noisy_neighbors_benchmark as mut
+from experimental_framework import APEX_LAKE_ROOT
 
 
 class InstantiationValidationInitTest(unittest.TestCase):
@@ -34,7 +38,7 @@ class InstantiationValidationInitTest(unittest.TestCase):
         openstack_credentials['heat_url'] = ''
         openstack_credentials['password'] = ''
         common.DEPLOYMENT_UNIT = deploy.DeploymentUnit(openstack_credentials)
-        common.BASE_DIR = os.getcwd()
+        common.BASE_DIR = APEX_LAKE_ROOT
         common.TEMPLATE_DIR = 'tests/data/generated_templates'
         self.iv = mut.\
             InstantiationValidationNoisyNeighborsBenchmark(name, params)
@@ -72,9 +76,11 @@ class InstantiationValidationInitTest(unittest.TestCase):
         expected['parameters'].append(mut.NUM_OF_NEIGHBORS)
         expected['parameters'].append(mut.AMOUNT_OF_RAM)
         expected['parameters'].append(mut.NUMBER_OF_CORES)
-        expected['allowed_values']['throughput'] = map(str, range(0, 100))
-        expected['allowed_values']['vlan_sender'] = map(str, range(-1, 4096))
-        expected['allowed_values']['vlan_receiver'] = map(str, range(-1, 4096))
+        expected['allowed_values']['throughput'] = [str(x) for x in range(100)]
+        expected['allowed_values']['vlan_sender'] = [str(x) for x in
+                                                     range(-1, 4096)]
+        expected['allowed_values']['vlan_receiver'] = [str(x) for x in
+                                                       range(-1, 4096)]
         expected['allowed_values'][mut.NUM_OF_NEIGHBORS] = \
             ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10']
         expected['allowed_values'][mut.NUMBER_OF_CORES] = \
@@ -115,10 +121,10 @@ class InstantiationValidationInitTest(unittest.TestCase):
                           'num_of_neighbours': 1}
         self.iv.template_file = 'template.yaml'
         self.iv.init()
-        mock_replace.assert_called_once_wih('file',
-                                            'local out_file = ""',
-                                            'local out_file = "' +
-                                            'res_file' + '"')
+        mock_replace.assert_called_once_with('file',
+                                             'local out_file = ""',
+                                             'local out_file = "' +
+                                             'res_file' + '"')
         mock_deploy_heat.assert_called_once_with('template.yaml',
                                                  'neighbour0',
                                                  {'cores': 1,
@@ -131,12 +137,14 @@ class InstantiationValidationInitTest(unittest.TestCase):
     @mock.patch('experimental_framework.common.'
                 'DEPLOYMENT_UNIT.destroy_heat_template')
     def test_finalize_for_success(self, mock_heat_destroy, mock_replace):
+        self.iv.lua_file = 'file'
+        self.iv.results_file = 'res_file'
         self.iv.neighbor_stack_names = ['neighbor0']
         stack_name = 'neighbor0'
         self.iv.finalize()
         mock_heat_destroy.assert_called_once_with(stack_name)
-        mock_replace.assert_called_once_wih('file',
-                                            'local out_file = ""',
-                                            'local out_file = "' +
-                                            'res_file' + '"')
+        mock_replace.assert_called_once_with('file',
+                                             'local out_file = "' +
+                                             'res_file' + '"',
+                                             'local out_file = ""')
         self.assertEqual(self.iv.neighbor_stack_names, list())
index fc5a7fd..39b38d7 100644 (file)
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 import unittest
 import mock
 import os
 import experimental_framework.common as common
 from experimental_framework.benchmarks \
     import multi_tenancy_throughput_benchmark as bench
+from six.moves import range
 
 __author__ = 'gpetralx'
 
 
 class MockDeploymentUnit(object):
+
     def deploy_heat_template(self, temp_file, stack_name, heat_param):
         pass
 
@@ -35,6 +38,7 @@ def get_deployment_unit():
 
 
 class TestMultiTenancyThroughputBenchmark(unittest.TestCase):
+
     def setUp(self):
         name = 'benchmark'
         params = dict()
@@ -47,9 +51,9 @@ class TestMultiTenancyThroughputBenchmark(unittest.TestCase):
     def test_get_features_for_sanity(self):
         output = self.benchmark.get_features()
         self.assertIsInstance(output, dict)
-        self.assertIn('parameters', output.keys())
-        self.assertIn('allowed_values', output.keys())
-        self.assertIn('default_values', output.keys())
+        self.assertIn('parameters', list(output.keys()))
+        self.assertIn('allowed_values', list(output.keys()))
+        self.assertIn('default_values', list(output.keys()))
         self.assertIsInstance(output['parameters'], list)
         self.assertIsInstance(output['allowed_values'], dict)
         self.assertIsInstance(output['default_values'], dict)
index ef3b0da..487de77 100644 (file)
@@ -13,6 +13,7 @@
 # limitations under the License.
 
 
+from __future__ import absolute_import
 import unittest
 import mock
 import os
@@ -37,9 +38,9 @@ class RFC2544ThroughputBenchmarkRunTest(unittest.TestCase):
     def test_get_features_for_sanity(self):
         output = self.benchmark.get_features()
         self.assertIsInstance(output, dict)
-        self.assertIn('parameters', output.keys())
-        self.assertIn('allowed_values', output.keys())
-        self.assertIn('default_values', output.keys())
+        self.assertIn('parameters', list(output.keys()))
+        self.assertIn('allowed_values', list(output.keys()))
+        self.assertIn('default_values', list(output.keys()))
         self.assertIsInstance(output['parameters'], list)
         self.assertIsInstance(output['allowed_values'], dict)
         self.assertIsInstance(output['default_values'], dict)
@@ -74,7 +75,6 @@ class RFC2544ThroughputBenchmarkRunTest(unittest.TestCase):
         output = self.benchmark.run()
         self.assertEqual(expected, output)
         conf_lua_file_mock.assert_called_once()
-        reset_lua_file_mock.assert_called_once()
         dpdk_instance = mock_dpdk()
         dpdk_instance.init_dpdk_pktgen.assert_called_once_with(
             dpdk_interfaces=2, pcap_file_0='packet_1.pcap',
index e51343f..fb38b69 100644 (file)
@@ -12,6 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
 import unittest
 import experimental_framework.heat_template_generation as heat_gen
 
@@ -19,6 +20,7 @@ __author__ = 'gpetralx'
 
 
 class TestTreeNode(unittest.TestCase):
+
     def setUp(self):
         self.tree = heat_gen.TreeNode()