##// END OF EJS Templates
logs: use extended bounds for log charts
ergo -
Show More
@@ -1,230 +1,229 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # AppEnlight Enterprise Edition, including its added features, Support
18 # AppEnlight Enterprise Edition, including its added features, Support
19 # services, and proprietary license terms, please see
19 # services, and proprietary license terms, please see
20 # https://rhodecode.com/licenses/
20 # https://rhodecode.com/licenses/
21
21
22 import logging
22 import logging
23 from datetime import datetime, timedelta
23 from datetime import datetime, timedelta
24
24
25 from pyramid.view import view_config
25 from pyramid.view import view_config
26 from pyramid.httpexceptions import HTTPUnprocessableEntity
26 from pyramid.httpexceptions import HTTPUnprocessableEntity
27 from appenlight.models import Datastores, Log
27 from appenlight.models import Datastores, Log
28 from appenlight.models.services.log import LogService
28 from appenlight.models.services.log import LogService
29 from appenlight.lib.utils import (build_filter_settings_from_query_dict,
29 from appenlight.lib.utils import (build_filter_settings_from_query_dict,
30 es_index_name_limiter)
30 es_index_name_limiter)
31 from appenlight.lib.helpers import gen_pagination_headers
31 from appenlight.lib.helpers import gen_pagination_headers
32 from appenlight.celery.tasks import logs_cleanup
32 from appenlight.celery.tasks import logs_cleanup
33
33
34 log = logging.getLogger(__name__)
34 log = logging.getLogger(__name__)
35
35
36 section_filters_key = 'appenlight:logs:filter:%s'
36 section_filters_key = 'appenlight:logs:filter:%s'
37
37
38
38
39 @view_config(route_name='logs_no_id', renderer='json',
39 @view_config(route_name='logs_no_id', renderer='json',
40 permission='authenticated')
40 permission='authenticated')
41 def fetch_logs(request):
41 def fetch_logs(request):
42 """
42 """
43 Returns list of log entries from Elasticsearch
43 Returns list of log entries from Elasticsearch
44 """
44 """
45
45
46 filter_settings = build_filter_settings_from_query_dict(request,
46 filter_settings = build_filter_settings_from_query_dict(request,
47 request.GET.mixed())
47 request.GET.mixed())
48 logs_paginator = LogService.get_paginator_by_app_ids(
48 logs_paginator = LogService.get_paginator_by_app_ids(
49 app_ids=filter_settings['resource'],
49 app_ids=filter_settings['resource'],
50 page=filter_settings['page'],
50 page=filter_settings['page'],
51 filter_settings=filter_settings
51 filter_settings=filter_settings
52 )
52 )
53 headers = gen_pagination_headers(request, logs_paginator)
53 headers = gen_pagination_headers(request, logs_paginator)
54 request.response.headers.update(headers)
54 request.response.headers.update(headers)
55
55
56 return [l.get_dict() for l in logs_paginator.sa_items]
56 return [l.get_dict() for l in logs_paginator.sa_items]
57
57
58
58
59 @view_config(route_name='section_view',
59 @view_config(route_name='section_view',
60 match_param=['section=logs_section', 'view=fetch_series'],
60 match_param=['section=logs_section', 'view=fetch_series'],
61 renderer='json', permission='authenticated')
61 renderer='json', permission='authenticated')
62 def logs_fetch_series(request):
62 def logs_fetch_series(request):
63 """
63 """
64 Handles metric dashboard graphs
64 Handles metric dashboard graphs
65 Returns information for time/tier breakdown
65 Returns information for time/tier breakdown
66 """
66 """
67 filter_settings = build_filter_settings_from_query_dict(request,
67 filter_settings = build_filter_settings_from_query_dict(request,
68 request.GET.mixed())
68 request.GET.mixed())
69 paginator = LogService.get_paginator_by_app_ids(
69 paginator = LogService.get_paginator_by_app_ids(
70 app_ids=filter_settings['resource'],
70 app_ids=filter_settings['resource'],
71 page=1, filter_settings=filter_settings, items_per_page=1)
71 page=1, filter_settings=filter_settings, items_per_page=1)
72 now = datetime.utcnow().replace(microsecond=0, second=0)
72 now = datetime.utcnow().replace(microsecond=0, second=0)
73 delta = timedelta(days=7)
73 delta = timedelta(days=7)
74 if paginator.sa_items:
74 if paginator.sa_items:
75 start_date = paginator.sa_items[-1].timestamp.replace(microsecond=0,
75 start_date = paginator.sa_items[-1].timestamp.replace(microsecond=0,
76 second=0)
76 second=0)
77 filter_settings['start_date'] = start_date - delta
77 filter_settings['start_date'] = start_date - delta
78 else:
78 else:
79 filter_settings['start_date'] = now - delta
79 filter_settings['start_date'] = now - delta
80 filter_settings['end_date'] = filter_settings['start_date'] \
80 filter_settings['end_date'] = filter_settings['start_date'] \
81 + timedelta(days=7)
81 + timedelta(days=7)
82 since_when = filter_settings['start_date']
83
82
84 @request.registry.cache_regions.redis_sec_30.cache_on_arguments(
83 @request.registry.cache_regions.redis_sec_30.cache_on_arguments(
85 'logs_graphs')
84 'logs_graphs')
86 def cached(apps, search_params, delta, now):
85 def cached(apps, search_params, delta, now):
87 data = LogService.get_time_series_aggregate(
86 data = LogService.get_time_series_aggregate(
88 filter_settings['resource'], filter_settings)
87 filter_settings['resource'], filter_settings)
89 if not data:
88 if not data:
90 return []
89 return []
91 buckets = data['aggregations']['events_over_time']['buckets']
90 buckets = data['aggregations']['events_over_time']['buckets']
92 return [{"x": datetime.utcfromtimestamp(item["key"] / 1000),
91 return [{"x": datetime.utcfromtimestamp(item["key"] / 1000),
93 "logs": item["doc_count"]} for item in buckets]
92 "logs": item["doc_count"]} for item in buckets]
94
93
95 return cached(filter_settings, request.GET.mixed(), delta, now)
94 return cached(filter_settings, request.GET.mixed(), delta, now)
96
95
97
96
98 @view_config(route_name='logs_no_id', renderer='json', request_method="DELETE",
97 @view_config(route_name='logs_no_id', renderer='json', request_method="DELETE",
99 permission='authenticated')
98 permission='authenticated')
100 def logs_mass_delete(request):
99 def logs_mass_delete(request):
101 params = request.GET.mixed()
100 params = request.GET.mixed()
102 if 'resource' not in params:
101 if 'resource' not in params:
103 raise HTTPUnprocessableEntity()
102 raise HTTPUnprocessableEntity()
104 # this might be '' and then colander will not validate the schema
103 # this might be '' and then colander will not validate the schema
105 if not params.get('namespace'):
104 if not params.get('namespace'):
106 params.pop('namespace', None)
105 params.pop('namespace', None)
107 filter_settings = build_filter_settings_from_query_dict(
106 filter_settings = build_filter_settings_from_query_dict(
108 request, params, resource_permissions=['update_reports'])
107 request, params, resource_permissions=['update_reports'])
109
108
110 resource_id = list(filter_settings['resource'])[0]
109 resource_id = list(filter_settings['resource'])[0]
111 # filter settings returns list of all of users applications
110 # filter settings returns list of all of users applications
112 # if app is not matching - normally we would not care as its used for search
111 # if app is not matching - normally we would not care as its used for search
113 # but here user playing with params would possibly wipe out their whole data
112 # but here user playing with params would possibly wipe out their whole data
114 if int(resource_id) != int(params['resource']):
113 if int(resource_id) != int(params['resource']):
115 raise HTTPUnprocessableEntity()
114 raise HTTPUnprocessableEntity()
116
115
117 logs_cleanup.delay(resource_id, filter_settings)
116 logs_cleanup.delay(resource_id, filter_settings)
118 msg = 'Log cleanup process started - it may take a while for ' \
117 msg = 'Log cleanup process started - it may take a while for ' \
119 'everything to get removed'
118 'everything to get removed'
120 request.session.flash(msg)
119 request.session.flash(msg)
121 return {}
120 return {}
122
121
123
122
124 @view_config(route_name='section_view',
123 @view_config(route_name='section_view',
125 match_param=("view=common_tags", "section=logs_section"),
124 match_param=("view=common_tags", "section=logs_section"),
126 renderer='json', permission='authenticated')
125 renderer='json', permission='authenticated')
127 def common_tags(request):
126 def common_tags(request):
128 config = request.GET.mixed()
127 config = request.GET.mixed()
129 filter_settings = build_filter_settings_from_query_dict(request,
128 filter_settings = build_filter_settings_from_query_dict(request,
130 config)
129 config)
131
130
132 resources = list(filter_settings["resource"])
131 resources = list(filter_settings["resource"])
133 query = {
132 query = {
134 "query": {
133 "query": {
135 "filtered": {
134 "filtered": {
136 "filter": {
135 "filter": {
137 "and": [{"terms": {"resource_id": list(resources)}}]
136 "and": [{"terms": {"resource_id": list(resources)}}]
138 }
137 }
139 }
138 }
140 }
139 }
141 }
140 }
142 start_date = filter_settings.get('start_date')
141 start_date = filter_settings.get('start_date')
143 end_date = filter_settings.get('end_date')
142 end_date = filter_settings.get('end_date')
144 filter_part = query['query']['filtered']['filter']['and']
143 filter_part = query['query']['filtered']['filter']['and']
145
144
146 date_range = {"range": {"timestamp": {}}}
145 date_range = {"range": {"timestamp": {}}}
147 if start_date:
146 if start_date:
148 date_range["range"]["timestamp"]["gte"] = start_date
147 date_range["range"]["timestamp"]["gte"] = start_date
149 if end_date:
148 if end_date:
150 date_range["range"]["timestamp"]["lte"] = end_date
149 date_range["range"]["timestamp"]["lte"] = end_date
151 if start_date or end_date:
150 if start_date or end_date:
152 filter_part.append(date_range)
151 filter_part.append(date_range)
153
152
154 levels = filter_settings.get('level')
153 levels = filter_settings.get('level')
155 if levels:
154 if levels:
156 filter_part.append({"terms": {'log_level': levels}})
155 filter_part.append({"terms": {'log_level': levels}})
157 namespaces = filter_settings.get('namespace')
156 namespaces = filter_settings.get('namespace')
158 if namespaces:
157 if namespaces:
159 filter_part.append({"terms": {'namespace': namespaces}})
158 filter_part.append({"terms": {'namespace': namespaces}})
160
159
161 query["aggs"] = {
160 query["aggs"] = {
162 "sub_agg": {
161 "sub_agg": {
163 "terms": {
162 "terms": {
164 "field": "tag_list",
163 "field": "tag_list",
165 "size": 50
164 "size": 50
166 }
165 }
167 }
166 }
168 }
167 }
169 # tags
168 # tags
170 index_names = es_index_name_limiter(
169 index_names = es_index_name_limiter(
171 ixtypes=[config.get('datasource', 'logs')])
170 ixtypes=[config.get('datasource', 'logs')])
172 result = Datastores.es.search(query, index=index_names, doc_type='log',
171 result = Datastores.es.search(query, index=index_names, doc_type='log',
173 size=0)
172 size=0)
174 tag_buckets = result['aggregations']['sub_agg'].get('buckets', [])
173 tag_buckets = result['aggregations']['sub_agg'].get('buckets', [])
175 # namespaces
174 # namespaces
176 query["aggs"] = {
175 query["aggs"] = {
177 "sub_agg": {
176 "sub_agg": {
178 "terms": {
177 "terms": {
179 "field": "namespace",
178 "field": "namespace",
180 "size": 50
179 "size": 50
181 }
180 }
182 }
181 }
183 }
182 }
184 result = Datastores.es.search(query, index=index_names, doc_type='log',
183 result = Datastores.es.search(query, index=index_names, doc_type='log',
185 size=0)
184 size=0)
186 namespaces_buckets = result['aggregations']['sub_agg'].get('buckets', [])
185 namespaces_buckets = result['aggregations']['sub_agg'].get('buckets', [])
187 return {
186 return {
188 "tags": [item['key'] for item in tag_buckets],
187 "tags": [item['key'] for item in tag_buckets],
189 "namespaces": [item['key'] for item in namespaces_buckets]
188 "namespaces": [item['key'] for item in namespaces_buckets]
190 }
189 }
191
190
192
191
193 @view_config(route_name='section_view',
192 @view_config(route_name='section_view',
194 match_param=("view=common_values", "section=logs_section"),
193 match_param=("view=common_values", "section=logs_section"),
195 renderer='json', permission='authenticated')
194 renderer='json', permission='authenticated')
196 def common_values(request):
195 def common_values(request):
197 config = request.GET.mixed()
196 config = request.GET.mixed()
198 datasource = config.pop('datasource', 'logs')
197 datasource = config.pop('datasource', 'logs')
199 filter_settings = build_filter_settings_from_query_dict(request,
198 filter_settings = build_filter_settings_from_query_dict(request,
200 config)
199 config)
201 resources = list(filter_settings["resource"])
200 resources = list(filter_settings["resource"])
202 tag_name = filter_settings['tags'][0]['value'][0]
201 tag_name = filter_settings['tags'][0]['value'][0]
203 query = {
202 query = {
204 'query': {
203 'query': {
205 'filtered': {
204 'filtered': {
206 'filter': {
205 'filter': {
207 'and': [
206 'and': [
208 {'terms': {'resource_id': list(resources)}},
207 {'terms': {'resource_id': list(resources)}},
209 {'terms': {
208 {'terms': {
210 'namespace': filter_settings['namespace']}}
209 'namespace': filter_settings['namespace']}}
211 ]
210 ]
212 }
211 }
213 }
212 }
214 }
213 }
215 }
214 }
216 query['aggs'] = {
215 query['aggs'] = {
217 'sub_agg': {
216 'sub_agg': {
218 'terms': {
217 'terms': {
219 'field': 'tags.{}.values'.format(tag_name),
218 'field': 'tags.{}.values'.format(tag_name),
220 'size': 50
219 'size': 50
221 }
220 }
222 }
221 }
223 }
222 }
224 index_names = es_index_name_limiter(ixtypes=[datasource])
223 index_names = es_index_name_limiter(ixtypes=[datasource])
225 result = Datastores.es.search(query, index=index_names, doc_type='log',
224 result = Datastores.es.search(query, index=index_names, doc_type='log',
226 size=0)
225 size=0)
227 values_buckets = result['aggregations']['sub_agg'].get('buckets', [])
226 values_buckets = result['aggregations']['sub_agg'].get('buckets', [])
228 return {
227 return {
229 "values": [item['key'] for item in values_buckets]
228 "values": [item['key'] for item in values_buckets]
230 }
229 }
General Comments 0
You need to be logged in to leave comments. Login now