stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
deployment_uuid = proc.stdout.readline().rstrip()
- return deployment_uuid.decode()
+ return deployment_uuid.decode("utf-8")
@staticmethod
def create_rally_deployment(environ=None):
'--deployment',
str(getattr(config.CONF, 'rally_deployment_name'))]
output = subprocess.check_output(cmd)
- LOGGER.info("%s\n%s", " ".join(cmd), output.decode())
+ LOGGER.info("%s\n%s", " ".join(cmd), output.decode("utf-8"))
except subprocess.CalledProcessError:
pass
cmd = ['rally', 'deployment', 'create', '--fromenv',
'--name', str(getattr(config.CONF, 'rally_deployment_name'))]
output = subprocess.check_output(cmd, env=environ)
- LOGGER.info("%s\n%s", " ".join(cmd), output.decode())
+ LOGGER.info("%s\n%s", " ".join(cmd), output.decode("utf-8"))
cmd = ['rally', 'deployment', 'check']
output = subprocess.check_output(cmd)
- LOGGER.info("%s\n%s", " ".join(cmd), output.decode())
+ LOGGER.info("%s\n%s", " ".join(cmd), output.decode("utf-8"))
return RallyBase.get_verifier_deployment_id()
@staticmethod
taskid_re = re.compile('^Task +(.*): started$')
for line in cmd_raw.splitlines(True):
line = line.strip()
- match = taskid_re.match(line.decode())
+ match = taskid_re.match(line.decode("utf-8"))
if match:
return match.group(1)
return None
cmd = (["rally", "task", "detailed", "--uuid", task_id])
LOGGER.debug('running command: %s', cmd)
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
- LOGGER.info("%s\n%s", " ".join(cmd), output.decode())
+ LOGGER.info("%s\n%s", " ".join(cmd), output.decode("utf-8"))
# save report as JSON
report_json_name = '{}.json'.format(test_name)
"--out", report_json_dir])
LOGGER.debug('running command: %s', cmd)
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
- LOGGER.info("%s\n%s", " ".join(cmd), output.decode())
+ LOGGER.info("%s\n%s", " ".join(cmd), output.decode("utf-8"))
json_results = open(report_json_dir).read()
self._append_summary(json_results, test_name)
"--to", file_name]
LOGGER.debug('running command: %s', cmd)
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
- LOGGER.info("%s\n%s", " ".join(cmd), output.decode())
+ LOGGER.info("%s\n%s", " ".join(cmd), output.decode("utf-8"))
@staticmethod
def verify_report(file_name, uuid, export_type="html"):
"--uuid", uuid, "--to", file_name]
LOGGER.debug('running command: %s', cmd)
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
- LOGGER.info("%s\n%s", " ".join(cmd), output.decode())
+ LOGGER.info("%s\n%s", " ".join(cmd), output.decode("utf-8"))
def clean(self):
"""Cleanup of OpenStack resources. Should be called on completion."""
yaml_data2 = ""
for line in output.splitlines():
try:
- grp = re.search(r'^([^\[]*)(\[.*\])\n*$', line.decode())
+ grp = re.search(r'^([^\[]*)(\[.*\])\n*$', line.decode("utf-8"))
yaml_data2 = "{}\n{}: {}".format(
yaml_data2, grp.group(1), grp.group(2))
except Exception: # pylint: disable=broad-except
self.shaker_timeout, self.image.name, self.flavor.name,
self.fip.floating_ip_address, self.ext_net.id,
env.get('NAMESERVER')))
- self.__logger.info("output:\n%s", stdout.read().decode())
- self.__logger.info("error:\n%s", stderr.read().decode())
+ self.__logger.info("output:\n%s", stdout.read().decode("utf-8"))
+ self.__logger.info("error:\n%s", stderr.read().decode("utf-8"))
if not os.path.exists(self.res_dir):
os.makedirs(self.res_dir)
try:
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
for line in proc.stdout:
- LOGGER.info(line.decode().rstrip())
- new_line = line.decode().replace(' ', '').split('|')
+ LOGGER.info(line.decode("utf-8").rstrip())
+ new_line = line.decode("utf-8").replace(' ', '').split('|')
if 'Tests' in new_line:
break
if 'Testscount' in new_line:
'--force']
try:
output = subprocess.check_output(cmd)
- LOGGER.info("%s\n%s", " ".join(cmd), output.decode())
+ LOGGER.info("%s\n%s", " ".join(cmd), output.decode("utf-8"))
except subprocess.CalledProcessError:
pass
'--name', str(getattr(config.CONF, 'tempest_verifier_name')),
'--type', 'tempest', '--system-wide']
output = subprocess.check_output(cmd)
- LOGGER.info("%s\n%s", " ".join(cmd), output.decode())
+ LOGGER.info("%s\n%s", " ".join(cmd), output.decode("utf-8"))
return TempestCommon.get_verifier_id()
@staticmethod
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
verifier_uuid = proc.stdout.readline().rstrip()
- return verifier_uuid.decode()
+ return verifier_uuid.decode("utf-8")
@staticmethod
def get_verifier_repo_dir(verifier_id):
cmd = ['rally', 'verify', 'configure-verifier', '--reconfigure',
'--id', str(getattr(config.CONF, 'tempest_verifier_name'))]
output = subprocess.check_output(cmd)
- LOGGER.info("%s\n%s", " ".join(cmd), output.decode())
+ LOGGER.info("%s\n%s", " ".join(cmd), output.decode("utf-8"))
LOGGER.debug("Looking for tempest.conf file...")
tempest_conf_file = os.path.join(deployment_dir, "tempest.conf")
cmd = "(cd {0}; stestr list '{1}' >{2} 2>/dev/null)".format(
self.verifier_repo_dir, testr_mode, self.list)
output = subprocess.check_output(cmd, shell=True)
- LOGGER.info("%s\n%s", cmd, output.decode())
+ LOGGER.info("%s\n%s", cmd, output.decode("utf-8"))
os.remove('/etc/tempest.conf')
def apply_tempest_blacklist(self):
with proc.stdout:
for line in iter(proc.stdout.readline, b''):
- if re.search(r"\} tempest\.", line.decode()):
+ if re.search(r"\} tempest\.", line.decode("utf-8")):
LOGGER.info(line.rstrip())
- elif re.search(r'(?=\(UUID=(.*)\))', line.decode()):
+ elif re.search(r'(?=\(UUID=(.*)\))', line.decode("utf-8")):
self.verification_id = re.search(
- r'(?=\(UUID=(.*)\))', line.decode()).group(1)
- f_stdout.write(line.decode())
+ r'(?=\(UUID=(.*)\))', line.decode("utf-8")).group(1)
+ f_stdout.write(line.decode("utf-8"))
proc.wait()
f_stdout.close()
if not os.path.exists(self.res_dir):
os.makedirs(self.res_dir)
cmd = ['vmtp', '-sc']
- output = subprocess.check_output(cmd).decode()
+ output = subprocess.check_output(cmd).decode("utf-8")
self.__logger.info("%s\n%s", " ".join(cmd), output)
with open(self.config, "w+") as conf:
vmtp_conf = yaml.full_load(output)
cmd = ['vmtp', '-d', '--json', '{}/vmtp.json'.format(self.res_dir),
'-c', self.config]
output = subprocess.check_output(
- cmd, stderr=subprocess.STDOUT, env=new_env).decode()
+ cmd, stderr=subprocess.STDOUT, env=new_env).decode("utf-8")
self.__logger.info("%s\n%s", " ".join(cmd), output)
cmd = ['vmtp_genchart', '-c', '{}/vmtp.html'.format(self.res_dir),
'{}/vmtp.json'.format(self.res_dir)]
output = subprocess.check_output(
- cmd, stderr=subprocess.STDOUT).decode()
+ cmd, stderr=subprocess.STDOUT).decode("utf-8")
self.__logger.info("%s\n%s", " ".join(cmd), output)
with open('{}/vmtp.json'.format(self.res_dir), 'r') as res_file:
self.details = json.load(res_file)
yfile.write(CLOUD_TEMPLATE.format(**cloud_data))
cmd = ['juju', 'add-cloud', 'abot-epc', '-f', clouds_yaml, '--replace']
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
- self.__logger.info("%s\n%s", " ".join(cmd), output.decode())
+ self.__logger.info("%s\n%s", " ".join(cmd), output.decode("utf-8"))
def _register_credentials(self):
self.__logger.info("Creating Credentials for Abot-epc .....")
cmd = ['juju', 'add-credential', 'abot-epc', '-f', credentials_yaml,
'--replace']
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
- self.__logger.info("%s\n%s", " ".join(cmd), output.decode())
+ self.__logger.info("%s\n%s", " ".join(cmd), output.decode("utf-8"))
def prepare(self):
"""Prepare testcase (Additional pre-configuration steps)."""
'RegionOne'),
'-u', self.public_auth_url]
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
- self.__logger.info("%s\n%s", " ".join(cmd), output.decode())
+ self.__logger.info("%s\n%s", " ".join(cmd), output.decode("utf-8"))
return image
def publish_image_alt(self, name=None):
'RegionOne'),
'-u', self.public_auth_url]
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
- self.__logger.info("%s\n%s", " ".join(cmd), output.decode())
+ self.__logger.info("%s\n%s", " ".join(cmd), output.decode("utf-8"))
return image_alt
def deploy_orchestrator(self): # pylint: disable=too-many-locals
'--config', 'use-default-secgroup=true',
'--debug']
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
- self.__logger.info("%s\n%s", " ".join(cmd), output.decode())
+ self.__logger.info("%s\n%s", " ".join(cmd), output.decode("utf-8"))
except subprocess.CalledProcessError as cpe:
self.__logger.error(
"Exception with Juju Bootstrap: %s\n%s",
- cpe.cmd, cpe.output.decode())
+ cpe.cmd, cpe.output.decode("utf-8"))
return False
except Exception: # pylint: disable=broad-except
self.__logger.exception("Some issue with Juju Bootstrap ...")
cmd = ['juju', 'status', '--format', 'short', name]
for i in range(10):
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
- self.__logger.info("%s\n%s", " ".join(cmd), output.decode())
+ self.__logger.info("%s\n%s", " ".join(cmd), output.decode("utf-8"))
ret = re.search(
- r'(?=workload:({})\))'.format(status), output.decode())
+ r'(?=workload:({})\))'.format(status), output.decode("utf-8"))
if ret:
self.__logger.info("%s workload is %s", name, status)
break
self.__logger.info("Deploying Abot-epc bundle file ...")
cmd = ['juju', 'deploy', '{}'.format(descriptor.get('file_name'))]
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
- self.__logger.info("%s\n%s", " ".join(cmd), output.decode())
+ self.__logger.info("%s\n%s", " ".join(cmd), output.decode("utf-8"))
self.__logger.info("Waiting for instances .....")
try:
cmd = ['timeout', '-t', JujuEpc.juju_timeout, 'juju-wait']
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
- self.__logger.info("%s\n%s", " ".join(cmd), output.decode())
+ self.__logger.info("%s\n%s", " ".join(cmd), output.decode("utf-8"))
self.__logger.info("Deployed Abot-epc on Openstack")
except subprocess.CalledProcessError as cpe:
self.__logger.error(
"Exception with Juju VNF Deployment: %s\n%s",
- cpe.cmd, cpe.output.decode())
+ cpe.cmd, cpe.output.decode("utf-8"))
return False
except Exception: # pylint: disable=broad-except
self.__logger.exception("Some issue with the VNF Deployment ..")
self.__logger.info("Checking status of ABot and EPC units ...")
cmd = ['juju', 'status']
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
- self.__logger.debug("%s\n%s", " ".join(cmd), output.decode())
+ self.__logger.debug("%s\n%s", " ".join(cmd), output.decode("utf-8"))
for app in ['abot-epc-basic', 'oai-epc', 'oai-hss']:
if not self.check_app(app):
return False
'juju', 'scp', '--', '-r', '-v',
'{}/featureFiles'.format(self.case_dir), 'abot-epc-basic/0:~/']
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
- self.__logger.info("%s\n%s", " ".join(cmd), output.decode())
+ self.__logger.info("%s\n%s", " ".join(cmd), output.decode("utf-8"))
self.__logger.info("Copying the feature files within Abot_node ")
cmd = ['timeout', '-t', JujuEpc.juju_timeout,
'sudo', 'cp', '-vfR', '~/featureFiles/*',
'/etc/rebaca-test-suite/featureFiles']
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
- self.__logger.info("%s\n%s", " ".join(cmd), output.decode())
+ self.__logger.info("%s\n%s", " ".join(cmd), output.decode("utf-8"))
return True
def test_vnf(self):
cmd = ['juju', 'run-action', 'abot-epc-basic/0', 'run',
'tagnames={}'.format(self.details['test_vnf']['tag_name'])]
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
- self.__logger.info("%s\n%s", " ".join(cmd), output.decode())
+ self.__logger.info("%s\n%s", " ".join(cmd), output.decode("utf-8"))
cmd = ['timeout', '-t', JujuEpc.juju_timeout, 'juju-wait']
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
- self.__logger.info("%s\n%s", " ".join(cmd), output.decode())
+ self.__logger.info("%s\n%s", " ".join(cmd), output.decode("utf-8"))
duration = time.time() - start_time
self.__logger.info("Getting results from Abot node....")
'/var/lib/abot-epc-basic/artifacts/TestResults.json',
'{}/.'.format(self.res_dir)]
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
- self.__logger.info("%s\n%s", " ".join(cmd), output.decode())
+ self.__logger.info("%s\n%s", " ".join(cmd), output.decode("utf-8"))
self.__logger.info("Parsing the Test results...")
res = (process_abot_test_result('{}/TestResults.json'.format(
self.res_dir)))
try:
cmd = ['juju', 'debug-log', '--replay', '--no-tail']
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
- self.__logger.debug("%s\n%s", " ".join(cmd), output.decode())
+ self.__logger.debug(
+ "%s\n%s", " ".join(cmd), output.decode("utf-8"))
self.__logger.info("Destroying Orchestrator...")
cmd = ['timeout', '-t', JujuEpc.juju_timeout,
'juju', 'destroy-controller', '-y', 'abot-controller',
'--destroy-all-models']
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
- self.__logger.info("%s\n%s", " ".join(cmd), output.decode())
+ self.__logger.info("%s\n%s", " ".join(cmd), output.decode("utf-8"))
except subprocess.CalledProcessError as cpe:
self.__logger.error(
"Exception with Juju Cleanup: %s\n%s",
- cpe.cmd, cpe.output.decode())
+ cpe.cmd, cpe.output.decode("utf-8"))
except Exception: # pylint: disable=broad-except
self.__logger.exception("General issue during the undeployment ..")
for fip in self.cloud.list_floating_ips():
cmd)
break
- res_buff += res.decode()
+ res_buff += res.decode("utf-8")
self.logger.debug("Response : '%s'", res_buff)
return res_buff
calls = [
mock.call(['rally', 'deployment', 'destroy', '--deployment',
str(getattr(config.CONF, 'rally_deployment_name'))]),
- mock.call().decode(),
+ mock.call().decode("utf-8"),
mock.call(['rally', 'deployment', 'create', '--fromenv', '--name',
str(getattr(config.CONF, 'rally_deployment_name'))],
env=None),
- mock.call().decode(),
+ mock.call().decode("utf-8"),
mock.call(['rally', 'deployment', 'check']),
- mock.call().decode()]
+ mock.call().decode("utf-8")]
mock_exec.assert_has_calls(calls)
@mock.patch('functest.opnfv_tests.openstack.rally.rally.os.path.exists')
mock.patch('six.moves.builtins.open',
mock.mock_open()) as mopen:
stream = six.BytesIO()
- stream.write(self.cmd_readline().encode())
+ stream.write(self.cmd_readline().encode("utf-8"))
mock_obj2 = mock.Mock()
attrs = {'stdout': stream, 'wait.return_value': 1}
mock_obj2.configure_mock(**attrs)
mock.patch('six.moves.builtins.open',
mock.mock_open()) as mopen:
stream = six.BytesIO()
- stream.write(self.cmd_readline().encode())
+ stream.write(self.cmd_readline().encode("utf-8"))
mock_obj2 = mock.Mock()
attrs = {'stdout': stream, 'wait.return_value': 0}
mock_obj2.configure_mock(**attrs)
with mock.patch('functest.utils.functest_utils.subprocess.Popen') \
as mock_subproc_open:
stream = six.BytesIO()
- stream.write(self.cmd_readline().encode())
+ stream.write(self.cmd_readline().encode("utf-8"))
mock_obj2 = mock.Mock()
attrs = {'stdout': stream, 'wait.return_value': 0}
mock_obj2.configure_mock(**attrs)
with mock.patch('functest.utils.functest_utils.subprocess.Popen') \
as mock_subproc_open:
stream = six.BytesIO()
- stream.write(self.cmd_readline().encode())
+ stream.write(self.cmd_readline().encode("utf-8"))
mock_obj2 = mock.Mock()
attrs = {'stdout': stream, 'wait.return_value': 1}
mock_obj2.configure_mock(**attrs)
ofd = open(output_file, "w")
for line in iter(popen.stdout.readline, b''):
if output_file:
- ofd.write(line.decode())
+ ofd.write(line.decode("utf-8"))
else:
- line = line.decode().replace('\n', '')
+ line = line.decode("utf-8").replace('\n', '')
print(line)
sys.stdout.flush()
if output_file: