import mimetypes
import os
import re
+import urllib
import zipfile
import boto3
from boto3.s3.transfer import TransferConfig
import botocore
-import pkg_resources
import requests
-from six.moves import urllib
from xtesting.core import testcase
from xtesting.utils import env
+from xtesting.utils import config
+from xtesting.utils import constants
__author__ = "Cedric Ollivier <cedric.ollivier@orange.com>"
try:
url = env.get('TEST_DB_URL')
req = requests.get(
- "{}?build_tag={}".format(url, env.get('BUILD_TAG')),
+ f"{url}?build_tag={env.get('BUILD_TAG')}",
headers=testcase.TestCase.headers)
req.raise_for_status()
- Campaign.__logger.debug("data from DB: \n%s", req.json())
- with open("{}.json".format(env.get('BUILD_TAG')), "w") as dfile:
- json.dump(req.json(), dfile)
+ output = req.json()
+ Campaign.__logger.debug("data from DB: \n%s", output)
+ for i, _ in enumerate(output["results"]):
+ for j, _ in enumerate(
+ output["results"][i]["details"]["links"]):
+ output["results"][i]["details"]["links"][j] = re.sub(
+ "^{os.environ['HTTP_DST_URL']}/*", '',
+ output["results"][i]["details"]["links"][j])
+ Campaign.__logger.debug("data to archive: \n%s", output)
+ with open(f"{env.get('BUILD_TAG')}.json", "w",
+ encoding='utf-8') as dfile:
+ json.dump(output, dfile)
except Exception: # pylint: disable=broad-except
Campaign.__logger.exception(
"The results cannot be collected from DB")
dst_s3_url = os.environ["S3_DST_URL"]
multipart_threshold = 5 * 1024 ** 5 if "google" in os.environ[
"S3_ENDPOINT_URL"] else 8 * 1024 * 1024
- config = TransferConfig(multipart_threshold=multipart_threshold)
+ tconfig = TransferConfig(multipart_threshold=multipart_threshold)
bucket_name = urllib.parse.urlparse(dst_s3_url).netloc
s3path = re.search(
'^/*(.*)/*$', urllib.parse.urlparse(dst_s3_url).path).group(1)
prefix = os.path.join(s3path, build_tag)
# pylint: disable=no-member
for s3_object in b3resource.Bucket(bucket_name).objects.filter(
- Prefix="{}/".format(prefix)):
- path, _ = os.path.split(s3_object.key)
- lpath = re.sub('^{}/*'.format(s3path), '', path)
+ Prefix=f"{prefix}/"):
+ path, _ = os.path.split(
+ urllib.parse.unquote_plus(s3_object.key))
+ lpath = re.sub(f'^{s3path}/*', '', path)
if lpath and not os.path.exists(lpath):
os.makedirs(lpath)
- # pylint: disable=no-member
- b3resource.Bucket(bucket_name).download_file(
- s3_object.key,
- re.sub('^{}/*'.format(s3path), '', s3_object.key),
- Config=config)
Campaign.__logger.info(
"Downloading %s",
- re.sub('^{}/*'.format(s3path), '', s3_object.key))
+ re.sub(f'^{s3path}/*', '',
+ urllib.parse.unquote_plus(s3_object.key)))
+ # pylint: disable=no-member
+ b3resource.Bucket(bucket_name).download_file(
+ urllib.parse.unquote_plus(s3_object.key),
+ re.sub(f'^{s3path}/*', '',
+ urllib.parse.unquote_plus(s3_object.key)),
+ Config=tconfig)
return Campaign.EX_OK
except Exception: # pylint: disable=broad-except
Campaign.__logger.exception("Cannot publish the artifacts")
build_tag = env.get('BUILD_TAG')
assert Campaign.dump_db() == Campaign.EX_OK
assert Campaign.dump_artifacts() == Campaign.EX_OK
- with zipfile.ZipFile('{}.zip'.format(build_tag), 'w') as zfile:
- zfile.write("{}.json".format(build_tag))
+ with zipfile.ZipFile(f'{build_tag}.zip',
+ 'w', zipfile.ZIP_DEFLATED) as zfile:
+ zfile.write(f"{build_tag}.json")
for root, _, files in os.walk(build_tag):
for filename in files:
zfile.write(os.path.join(root, filename))
dst_s3_url = os.environ["S3_DST_URL"]
multipart_threshold = 5 * 1024 ** 5 if "google" in os.environ[
"S3_ENDPOINT_URL"] else 8 * 1024 * 1024
- config = TransferConfig(multipart_threshold=multipart_threshold)
+ tconfig = TransferConfig(multipart_threshold=multipart_threshold)
bucket_name = urllib.parse.urlparse(dst_s3_url).netloc
- mime_type = mimetypes.guess_type('{}.zip'.format(build_tag))
+ mime_type = mimetypes.guess_type(f'{build_tag}.zip')
path = urllib.parse.urlparse(dst_s3_url).path.strip("/")
# pylint: disable=no-member
b3resource.Bucket(bucket_name).upload_file(
- '{}.zip'.format(build_tag),
- os.path.join(path, '{}.zip'.format(build_tag)),
- Config=config,
+ f'{build_tag}.zip',
+ os.path.join(path, f'{build_tag}.zip'),
+ Config=tconfig,
ExtraArgs={'ContentType': mime_type[
0] or 'application/octet-stream'})
dst_http_url = os.environ["HTTP_DST_URL"]
- link = os.path.join(dst_http_url, '{}.zip'.format(build_tag))
+ link = os.path.join(dst_http_url, f'{build_tag}.zip')
Campaign.__logger.info(
"All data were successfully published:\n\n%s", link)
return Campaign.EX_OK
if not os.path.exists(testcase.TestCase.dir_results):
os.makedirs(testcase.TestCase.dir_results)
if env.get('DEBUG').lower() == 'true':
- logging.config.fileConfig(pkg_resources.resource_filename(
- 'xtesting', 'ci/logging.debug.ini'))
+ logging.config.fileConfig(config.get_xtesting_config(
+ 'logging.debug.ini', constants.DEBUG_INI_PATH_DEFAULT))
else:
- logging.config.fileConfig(pkg_resources.resource_filename(
- 'xtesting', 'ci/logging.ini'))
+ logging.config.fileConfig(config.get_xtesting_config(
+ 'logging.ini', constants.INI_PATH_DEFAULT))
logging.captureWarnings(True)
+ os.chdir(testcase.TestCase.dir_results)
Campaign.zip_campaign_files()