##// END OF EJS Templates
logs: use extended bounds for log charts
ergo -
Show More
@@ -1,213 +1,216 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # AppEnlight Enterprise Edition, including its added features, Support
19 19 # services, and proprietary license terms, please see
20 20 # https://rhodecode.com/licenses/
21 21
22 22 import paginate
23 23 import logging
24 24 import sqlalchemy as sa
25 25
26 26 from appenlight.models.log import Log
27 27 from appenlight.models import get_db_session, Datastores
28 28 from appenlight.models.services.base import BaseService
29 29 from appenlight.lib.utils import es_index_name_limiter
30 30
31 31 log = logging.getLogger(__name__)
32 32
33 33
34 34 class LogService(BaseService):
35 35 @classmethod
36 36 def get_logs(cls, resource_ids=None, filter_settings=None,
37 37 db_session=None):
38 38 # ensure we always have id's passed
39 39 if not resource_ids:
40 40 # raise Exception('No App ID passed')
41 41 return []
42 42 db_session = get_db_session(db_session)
43 43 q = db_session.query(Log)
44 44 q = q.filter(Log.resource_id.in_(resource_ids))
45 45 if filter_settings.get('start_date'):
46 46 q = q.filter(Log.timestamp >= filter_settings.get('start_date'))
47 47 if filter_settings.get('end_date'):
48 48 q = q.filter(Log.timestamp <= filter_settings.get('end_date'))
49 49 if filter_settings.get('log_level'):
50 50 q = q.filter(
51 51 Log.log_level == filter_settings.get('log_level').upper())
52 52 if filter_settings.get('request_id'):
53 53 request_id = filter_settings.get('request_id', '')
54 54 q = q.filter(Log.request_id == request_id.replace('-', ''))
55 55 if filter_settings.get('namespace'):
56 56 q = q.filter(Log.namespace == filter_settings.get('namespace'))
57 57 q = q.order_by(sa.desc(Log.timestamp))
58 58 return q
59 59
60 60 @classmethod
61 61 def es_query_builder(cls, app_ids, filter_settings):
62 62 if not filter_settings:
63 63 filter_settings = {}
64 64
65 65 query = {
66 66 "query": {
67 67 "filtered": {
68 68 "filter": {
69 69 "and": [{"terms": {"resource_id": list(app_ids)}}]
70 70 }
71 71 }
72 72 }
73 73 }
74 74
75 75 start_date = filter_settings.get('start_date')
76 76 end_date = filter_settings.get('end_date')
77 77 filter_part = query['query']['filtered']['filter']['and']
78 78
79 79 for tag in filter_settings.get('tags', []):
80 80 tag_values = [v.lower() for v in tag['value']]
81 81 key = "tags.%s.values" % tag['name'].replace('.', '_')
82 82 filter_part.append({"terms": {key: tag_values}})
83 83
84 84 date_range = {"range": {"timestamp": {}}}
85 85 if start_date:
86 86 date_range["range"]["timestamp"]["gte"] = start_date
87 87 if end_date:
88 88 date_range["range"]["timestamp"]["lte"] = end_date
89 89 if start_date or end_date:
90 90 filter_part.append(date_range)
91 91
92 92 levels = filter_settings.get('level')
93 93 if levels:
94 94 filter_part.append({"terms": {'log_level': levels}})
95 95 namespaces = filter_settings.get('namespace')
96 96 if namespaces:
97 97 filter_part.append({"terms": {'namespace': namespaces}})
98 98
99 99 request_ids = filter_settings.get('request_id')
100 100 if request_ids:
101 101 filter_part.append({"terms": {'request_id': request_ids}})
102 102
103 103 messages = filter_settings.get('message')
104 104 if messages:
105 105 query['query']['filtered']['query'] = {
106 106 'match': {
107 107 'message': {
108 108 'query': ' '.join(messages),
109 109 'operator': 'and'
110 110 }
111 111 }
112 112 }
113 113 return query
114 114
115 115 @classmethod
116 116 def get_time_series_aggregate(cls, app_ids=None, filter_settings=None):
117 117 if not app_ids:
118 118 return {}
119 119 es_query = cls.es_query_builder(app_ids, filter_settings)
120 120 es_query["aggs"] = {
121 121 "events_over_time": {
122 122 "date_histogram": {
123 123 "field": "timestamp",
124 124 "interval": "1h",
125 "min_doc_count": 0
125 "min_doc_count": 0,
126 'extended_bounds': {
127 'max': filter_settings.get('end_date'),
128 'min': filter_settings.get('start_date')}
126 129 }
127 130 }
128 131 }
129 132 log.debug(es_query)
130 133 index_names = es_index_name_limiter(filter_settings.get('start_date'),
131 134 filter_settings.get('end_date'),
132 135 ixtypes=['logs'])
133 136 if index_names:
134 137 results = Datastores.es.search(
135 138 es_query, index=index_names, doc_type='log', size=0)
136 139 else:
137 140 results = []
138 141 return results
139 142
140 143 @classmethod
141 144 def get_search_iterator(cls, app_ids=None, page=1, items_per_page=50,
142 145 order_by=None, filter_settings=None, limit=None):
143 146 if not app_ids:
144 147 return {}, 0
145 148
146 149 es_query = cls.es_query_builder(app_ids, filter_settings)
147 150 sort_query = {
148 151 "sort": [
149 152 {"timestamp": {"order": "desc"}}
150 153 ]
151 154 }
152 155 es_query.update(sort_query)
153 156 log.debug(es_query)
154 157 es_from = (page - 1) * items_per_page
155 158 index_names = es_index_name_limiter(filter_settings.get('start_date'),
156 159 filter_settings.get('end_date'),
157 160 ixtypes=['logs'])
158 161 if not index_names:
159 162 return {}, 0
160 163
161 164 results = Datastores.es.search(es_query, index=index_names,
162 165 doc_type='log', size=items_per_page,
163 166 es_from=es_from)
164 167 if results['hits']['total'] > 5000:
165 168 count = 5000
166 169 else:
167 170 count = results['hits']['total']
168 171 return results['hits'], count
169 172
170 173 @classmethod
171 174 def get_paginator_by_app_ids(cls, app_ids=None, page=1, item_count=None,
172 175 items_per_page=50, order_by=None,
173 176 filter_settings=None,
174 177 exclude_columns=None, db_session=None):
175 178 if not filter_settings:
176 179 filter_settings = {}
177 180 results, item_count = cls.get_search_iterator(app_ids, page,
178 181 items_per_page, order_by,
179 182 filter_settings)
180 183 paginator = paginate.Page([],
181 184 item_count=item_count,
182 185 items_per_page=items_per_page,
183 186 **filter_settings)
184 187 ordered_ids = tuple(item['_source']['pg_id']
185 188 for item in results.get('hits', []))
186 189
187 190 sorted_instance_list = []
188 191 if ordered_ids:
189 192 db_session = get_db_session(db_session)
190 193 query = db_session.query(Log)
191 194 query = query.filter(Log.log_id.in_(ordered_ids))
192 195 query = query.order_by(sa.desc('timestamp'))
193 196 sa_items = query.all()
194 197 # resort by score
195 198 for i_id in ordered_ids:
196 199 for item in sa_items:
197 200 if str(item.log_id) == str(i_id):
198 201 sorted_instance_list.append(item)
199 202 paginator.sa_items = sorted_instance_list
200 203 return paginator
201 204
202 205 @classmethod
203 206 def query_by_primary_key_and_namespace(cls, list_of_pairs,
204 207 db_session=None):
205 208 db_session = get_db_session(db_session)
206 209 list_of_conditions = []
207 210 query = db_session.query(Log)
208 211 for pair in list_of_pairs:
209 212 list_of_conditions.append(sa.and_(
210 213 Log.primary_key == pair['pk'], Log.namespace == pair['ns']))
211 214 query = query.filter(sa.or_(*list_of_conditions))
212 215 query = query.order_by(sa.asc(Log.timestamp), sa.asc(Log.log_id))
213 216 return query
General Comments 0
You need to be logged in to leave comments. Login now