from datetime import datetime
import json
import logging
+import mimetypes
import os
import re
import sys
import boto3
+from boto3.s3.transfer import TransferConfig
import botocore
import prettytable
import requests
dir_results = "/var/lib/xtesting/results"
_job_name_rule = "(dai|week)ly-(.+?)-[0-9]*"
- _headers = {'Content-Type': 'application/json'}
+ headers = {'Content-Type': 'application/json'}
__logger = logging.getLogger(__name__)
def __init__(self, **kwargs):
self.start_time = 0
self.stop_time = 0
self.is_skipped = False
+ self.output_log_name = 'xtesting.log'
+ self.output_debug_log_name = 'xtesting.debug.log'
self.res_dir = "{}/{}".format(self.dir_results, self.case_name)
def __str__(self):
data["version"] = "unknown"
req = requests.post(
url, data=json.dumps(data, sort_keys=True),
- headers=self._headers)
+ headers=self.headers)
req.raise_for_status()
if urllib.parse.urlparse(url).scheme != "file":
- res_url = req.json()["href"]
- if env.get('TEST_DB_EXT_URL'):
- res_url = res_url.replace(
- env.get('TEST_DB_URL'), env.get('TEST_DB_EXT_URL'))
+ # href must be postprocessed as OPNFV testapi is misconfigured
+ # (localhost is returned)
+ uid = re.sub(r'^.*/api/v1/results/*', '', req.json()["href"])
+ netloc = env.get('TEST_DB_EXT_URL') if env.get(
+ 'TEST_DB_EXT_URL') else env.get('TEST_DB_URL')
self.__logger.info(
"The results were successfully pushed to DB: \n\n%s\n",
- res_url)
+ os.path.join(netloc, uid))
except AssertionError:
self.__logger.exception(
"Please run test before publishing the results")
b3resource = boto3.resource(
's3', endpoint_url=os.environ["S3_ENDPOINT_URL"])
dst_s3_url = os.environ["S3_DST_URL"]
+ multipart_threshold = 5 * 1024 ** 5 if "google" in os.environ[
+ "S3_ENDPOINT_URL"] else 8 * 1024 * 1024
+ config = TransferConfig(multipart_threshold=multipart_threshold)
bucket_name = urllib.parse.urlparse(dst_s3_url).netloc
try:
b3resource.meta.client.head_bucket(Bucket=bucket_name)
typ, value, traceback = sys.exc_info()
six.reraise(typ, value, traceback)
path = urllib.parse.urlparse(dst_s3_url).path.strip("/")
+ dst_http_url = os.environ["HTTP_DST_URL"]
output_str = "\n"
- for root, _, files in os.walk(self.dir_results):
+ self.details["links"] = []
+ for log_file in [self.output_log_name, self.output_debug_log_name]:
+ if os.path.exists(os.path.join(self.dir_results, log_file)):
+ abs_file = os.path.join(self.dir_results, log_file)
+ mime_type = mimetypes.guess_type(abs_file)
+ self.__logger.debug(
+ "Publishing %s %s", abs_file, mime_type)
+ # pylint: disable=no-member
+ b3resource.Bucket(bucket_name).upload_file(
+ abs_file, os.path.join(path, log_file), Config=config,
+ ExtraArgs={'ContentType': mime_type[
+ 0] or 'application/octet-stream'})
+ link = os.path.join(dst_http_url, log_file)
+ output_str += "\n{}".format(link)
+ self.details["links"].append(link)
+ for root, _, files in os.walk(self.res_dir):
for pub_file in files:
+ abs_file = os.path.join(root, pub_file)
+ mime_type = mimetypes.guess_type(abs_file)
+ self.__logger.debug(
+ "Publishing %s %s", abs_file, mime_type)
# pylint: disable=no-member
b3resource.Bucket(bucket_name).upload_file(
- os.path.join(root, pub_file),
+ abs_file,
os.path.join(path, os.path.relpath(
os.path.join(root, pub_file),
- start=self.dir_results)))
- dst_http_url = os.environ["HTTP_DST_URL"]
- output_str += "\n{}".format(
- os.path.join(dst_http_url, os.path.relpath(
- os.path.join(root, pub_file),
- start=self.dir_results)))
+ start=self.dir_results)),
+ Config=config,
+ ExtraArgs={'ContentType': mime_type[
+ 0] or 'application/octet-stream'})
+ link = os.path.join(dst_http_url, os.path.relpath(
+ os.path.join(root, pub_file),
+ start=self.dir_results))
+ output_str += "\n{}".format(link)
+ self.details["links"].append(link)
self.__logger.info(
"All artifacts were successfully published: %s\n", output_str)
return TestCase.EX_OK