- # 1. get the number of results
- headers = urllib3.make_headers(basic_auth=creds)
- elastic_json = json.loads(http.request('GET', elastic_url + '/_search?size=0', headers=headers, body=body).data)
- print elastic_json
- nr_of_hits = _get_nr_of_hits(elastic_json)
-
- # 2. get all results
- elastic_json = json.loads(http.request('GET', elastic_url + '/_search?size={}'.format(nr_of_hits), headers=headers, body=body).data)
-
- elastic_docs = []
- for hit in elastic_json['hits']['hits']:
- elastic_docs.append(hit[field])
- return elastic_docs
-
-
-def get_elastic_docs_by_days(elastic_url, creds, days):
- if days == 0:
- body = '''{
- "query": {
- "match_all": {}
- }
- }'''
- elif days > 0:
- body = '''{{
- "query" : {{
- "range" : {{
- "start_date" : {{
- "gte" : "now-{}d"
- }}
- }}
- }}
- }}'''.format(days)
- else:
- raise Exception('Update days must be non-negative')
- return get_elastic_docs(elastic_url, creds, body)
+
+def publish_docs(url, creds=None, body=None):
+ result = _post(url, creds=creds, body=(json.dumps(body)))
+ return result.status, result.data
+
+
+def _get_docs_nr(url, creds=None, body=None):
+ res_data = _get('{}/_search?size=0'.format(url), creds=creds, body=body)
+ print(type(res_data), res_data)
+ return res_data['hits']['total']
+
+
+def get_docs(url, creds=None, body=None, field='_source'):
+
+ docs_nr = _get_docs_nr(url, creds=creds, body=body)
+ res_data = _get('{}/_search?size={}'.format(url, docs_nr),
+ creds=creds, body=body)
+
+ docs = []
+ for hit in res_data['hits']['hits']:
+ docs.append(hit[field])
+ return docs
+
+
+def publish_kibana(url, creds, type, id, body):
+ url = urlparse.urljoin(url, '/.kibana/{}/{}'.format(type, id))
+ publish_docs(url, creds, body)