merge N days and whole database process and not publish existed data any way
[releng.git] / utils / test / scripts / create_kibana_dashboards.py
index abb9471..efa6e17 100644 (file)
@@ -1,18 +1,14 @@
 #! /usr/bin/env python
 import json
-import logging
 import urlparse
 
 import argparse
-import yaml
 
+import conf_utils
+import logger_utils
 import shared_utils
 
-logger = logging.getLogger('create_kibana_dashboards')
-logger.setLevel(logging.DEBUG)
-file_handler = logging.FileHandler('./{}.log'.format('create_kibana_dashboards'))
-file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s'))
-logger.addHandler(file_handler)
+logger = logger_utils.KibanaDashboardLogger('elastic2kibana').get
 
 _installers = {'fuel', 'apex', 'compass', 'joid'}
 
@@ -277,7 +273,7 @@ def _get_pods_and_scenarios(project_name, case_name, installer):
         }
     })
 
-    elastic_data = shared_utils.get_elastic_data(urlparse.urljoin(base_elastic_url, '/test_results/mongo2elastic'),
+    elastic_data = shared_utils.get_elastic_docs(urlparse.urljoin(base_elastic_url, '/test_results/mongo2elastic'),
                                                  es_creds, query_json)
 
     pods_and_scenarios = {}
@@ -307,10 +303,7 @@ def construct_dashboards():
     :return: list of KibanaDashboards
     """
     kibana_dashboards = []
-    with open('./testcases.yaml') as f:
-        testcases_yaml = yaml.safe_load(f)
-
-    for project, case_dicts in testcases_yaml.items():
+    for project, case_dicts in conf_utils.testcases_yaml.items():
         for case in case_dicts:
             case_name = case.get('name')
             visualizations = case.get('visualizations')