- add runner_id tag
- add test case name tag
- add task_id tag
JIRA: YARDSTICK-212
Change-Id: I75c27e23942a6e2189019e94bfe8026a5fd67621
Signed-off-by: QiLiang <liangqi1@huawei.com>
Conflicts:
yardstick/dispatcher/influxdb.py
"sla": {
"action": "monitor",
"max_rtt": 10
"sla": {
"action": "monitor",
"max_rtt": 10
+ },
+ "tc": "ping",
+ "task_id": "ea958583-c91e-461a-af14-2a7f9d7f79e7"
import ipaddress
import time
import logging
import ipaddress
import time
import logging
from itertools import ifilter
from itertools import ifilter
from yardstick.benchmark.contexts.base import Context
from yardstick.benchmark.runners import base as base_runner
from yardstick.common.task_template import TaskTemplate
from yardstick.benchmark.contexts.base import Context
from yardstick.benchmark.runners import base as base_runner
from yardstick.common.task_template import TaskTemplate
for i in range(0, len(task_files)):
one_task_start_time = time.time()
parser.path = task_files[i]
for i in range(0, len(task_files)):
one_task_start_time = time.time()
parser.path = task_files[i]
- scenarios, run_in_parallel = parser.parse_task(task_args[i],
+ task_name = os.path.splitext(os.path.basename(task_files[i]))[0]
+ scenarios, run_in_parallel = parser.parse_task(task_name,
+ task_args[i],
task_args_fnames[i])
self._run(scenarios, run_in_parallel, args.output_file)
task_args_fnames[i])
self._run(scenarios, run_in_parallel, args.output_file)
- def parse_task(self, task_args=None, task_args_file=None):
+ def parse_task(self, task_name, task_args=None, task_args_file=None):
'''parses the task file and return an context and scenario instances'''
print "Parsing task config:", self.path
'''parses the task file and return an context and scenario instances'''
print "Parsing task config:", self.path
run_in_parallel = cfg.get("run_in_parallel", False)
run_in_parallel = cfg.get("run_in_parallel", False)
+ # add tc and task id for influxdb extended tags
+ task_id = str(uuid.uuid4())
+ for scenario in cfg["scenarios"]:
+ scenario["tc"] = task_name
+ scenario["task_id"] = task_id
+
# TODO we need something better here, a class that represent the file
return cfg["scenarios"], run_in_parallel
# TODO we need something better here, a class that represent the file
return cfg["scenarios"], run_in_parallel
self.influxdb_url = "%s/write?db=%s" % (self.target, self.db_name)
self.raw_result = []
self.case_name = ""
self.influxdb_url = "%s/write?db=%s" % (self.target, self.db_name)
self.raw_result = []
self.case_name = ""
+ self.tc = ""
+ self.task_id = -1
self.static_tags = {
"pod_name": os.environ.get('POD_NAME', 'unknown'),
"installer": os.environ.get('INSTALLER_TYPE', 'unknown'),
self.static_tags = {
"pod_name": os.environ.get('POD_NAME', 'unknown'),
"installer": os.environ.get('INSTALLER_TYPE', 'unknown'),
return str(int(float(timestamp) * 1000000000))
return str(int(float(timestamp) * 1000000000))
+ def _get_extended_tags(self, data):
+ tags = {
+ "runner_id": data["runner_id"],
+ "tc": self.tc,
+ "task_id": self.task_id
+ }
+
+ return tags
+
def _data_to_line_protocol(self, data):
msg = {}
point = {}
point["measurement"] = self.case_name
point["fields"] = self._dict_key_flatten(data["benchmark"]["data"])
point["time"] = self._get_nano_timestamp(data)
def _data_to_line_protocol(self, data):
msg = {}
point = {}
point["measurement"] = self.case_name
point["fields"] = self._dict_key_flatten(data["benchmark"]["data"])
point["time"] = self._get_nano_timestamp(data)
+ point["tags"] = self._get_extended_tags(data)
msg["points"] = [point]
msg["tags"] = self.static_tags
msg["points"] = [point]
msg["tags"] = self.static_tags
if isinstance(data, dict) and "scenario_cfg" in data:
self.case_name = data["scenario_cfg"]["type"]
if isinstance(data, dict) and "scenario_cfg" in data:
self.case_name = data["scenario_cfg"]["type"]
+ self.tc = data["scenario_cfg"]["tc"]
+ self.task_id = data["scenario_cfg"]["task_id"]
return 0
if self.case_name == "":
return 0
if self.case_name == "":