merge N days and whole database process and not publish existed data any way
[releng.git] / utils / test / scripts / create_kibana_dashboards.py
index 5897a7e..efa6e17 100644 (file)
@@ -1,18 +1,14 @@
 #! /usr/bin/env python
 import json
-import logging
 import urlparse
 
 import argparse
 
 import conf_utils
+import logger_utils
 import shared_utils
 
-logger = logging.getLogger('create_kibana_dashboards')
-logger.setLevel(logging.DEBUG)
-file_handler = logging.FileHandler('./{}.log'.format('create_kibana_dashboards'))
-file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s'))
-logger.addHandler(file_handler)
+logger = logger_utils.KibanaDashboardLogger('elastic2kibana').get
 
 _installers = {'fuel', 'apex', 'compass', 'joid'}
 
@@ -277,7 +273,7 @@ def _get_pods_and_scenarios(project_name, case_name, installer):
         }
     })
 
-    elastic_data = shared_utils.get_elastic_data(urlparse.urljoin(base_elastic_url, '/test_results/mongo2elastic'),
+    elastic_data = shared_utils.get_elastic_docs(urlparse.urljoin(base_elastic_url, '/test_results/mongo2elastic'),
                                                  es_creds, query_json)
 
     pods_and_scenarios = {}