##// END OF EJS Templates
logs: use extended bounds for log charts
ergo -
Show More
@@ -1,230 +1,229 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # AppEnlight Enterprise Edition, including its added features, Support
19 19 # services, and proprietary license terms, please see
20 20 # https://rhodecode.com/licenses/
21 21
22 22 import logging
23 23 from datetime import datetime, timedelta
24 24
25 25 from pyramid.view import view_config
26 26 from pyramid.httpexceptions import HTTPUnprocessableEntity
27 27 from appenlight.models import Datastores, Log
28 28 from appenlight.models.services.log import LogService
29 29 from appenlight.lib.utils import (build_filter_settings_from_query_dict,
30 30 es_index_name_limiter)
31 31 from appenlight.lib.helpers import gen_pagination_headers
32 32 from appenlight.celery.tasks import logs_cleanup
33 33
34 34 log = logging.getLogger(__name__)
35 35
36 36 section_filters_key = 'appenlight:logs:filter:%s'
37 37
38 38
39 39 @view_config(route_name='logs_no_id', renderer='json',
40 40 permission='authenticated')
41 41 def fetch_logs(request):
42 42 """
43 43 Returns list of log entries from Elasticsearch
44 44 """
45 45
46 46 filter_settings = build_filter_settings_from_query_dict(request,
47 47 request.GET.mixed())
48 48 logs_paginator = LogService.get_paginator_by_app_ids(
49 49 app_ids=filter_settings['resource'],
50 50 page=filter_settings['page'],
51 51 filter_settings=filter_settings
52 52 )
53 53 headers = gen_pagination_headers(request, logs_paginator)
54 54 request.response.headers.update(headers)
55 55
56 56 return [l.get_dict() for l in logs_paginator.sa_items]
57 57
58 58
59 59 @view_config(route_name='section_view',
60 60 match_param=['section=logs_section', 'view=fetch_series'],
61 61 renderer='json', permission='authenticated')
62 62 def logs_fetch_series(request):
63 63 """
64 64 Handles metric dashboard graphs
65 65 Returns information for time/tier breakdown
66 66 """
67 67 filter_settings = build_filter_settings_from_query_dict(request,
68 68 request.GET.mixed())
69 69 paginator = LogService.get_paginator_by_app_ids(
70 70 app_ids=filter_settings['resource'],
71 71 page=1, filter_settings=filter_settings, items_per_page=1)
72 72 now = datetime.utcnow().replace(microsecond=0, second=0)
73 73 delta = timedelta(days=7)
74 74 if paginator.sa_items:
75 75 start_date = paginator.sa_items[-1].timestamp.replace(microsecond=0,
76 76 second=0)
77 77 filter_settings['start_date'] = start_date - delta
78 78 else:
79 79 filter_settings['start_date'] = now - delta
80 80 filter_settings['end_date'] = filter_settings['start_date'] \
81 81 + timedelta(days=7)
82 since_when = filter_settings['start_date']
83 82
84 83 @request.registry.cache_regions.redis_sec_30.cache_on_arguments(
85 84 'logs_graphs')
86 85 def cached(apps, search_params, delta, now):
87 86 data = LogService.get_time_series_aggregate(
88 87 filter_settings['resource'], filter_settings)
89 88 if not data:
90 89 return []
91 90 buckets = data['aggregations']['events_over_time']['buckets']
92 91 return [{"x": datetime.utcfromtimestamp(item["key"] / 1000),
93 92 "logs": item["doc_count"]} for item in buckets]
94 93
95 94 return cached(filter_settings, request.GET.mixed(), delta, now)
96 95
97 96
98 97 @view_config(route_name='logs_no_id', renderer='json', request_method="DELETE",
99 98 permission='authenticated')
100 99 def logs_mass_delete(request):
101 100 params = request.GET.mixed()
102 101 if 'resource' not in params:
103 102 raise HTTPUnprocessableEntity()
104 103 # this might be '' and then colander will not validate the schema
105 104 if not params.get('namespace'):
106 105 params.pop('namespace', None)
107 106 filter_settings = build_filter_settings_from_query_dict(
108 107 request, params, resource_permissions=['update_reports'])
109 108
110 109 resource_id = list(filter_settings['resource'])[0]
111 110 # filter settings returns list of all of users applications
112 111 # if app is not matching - normally we would not care as its used for search
113 112 # but here user playing with params would possibly wipe out their whole data
114 113 if int(resource_id) != int(params['resource']):
115 114 raise HTTPUnprocessableEntity()
116 115
117 116 logs_cleanup.delay(resource_id, filter_settings)
118 117 msg = 'Log cleanup process started - it may take a while for ' \
119 118 'everything to get removed'
120 119 request.session.flash(msg)
121 120 return {}
122 121
123 122
124 123 @view_config(route_name='section_view',
125 124 match_param=("view=common_tags", "section=logs_section"),
126 125 renderer='json', permission='authenticated')
127 126 def common_tags(request):
128 127 config = request.GET.mixed()
129 128 filter_settings = build_filter_settings_from_query_dict(request,
130 129 config)
131 130
132 131 resources = list(filter_settings["resource"])
133 132 query = {
134 133 "query": {
135 134 "filtered": {
136 135 "filter": {
137 136 "and": [{"terms": {"resource_id": list(resources)}}]
138 137 }
139 138 }
140 139 }
141 140 }
142 141 start_date = filter_settings.get('start_date')
143 142 end_date = filter_settings.get('end_date')
144 143 filter_part = query['query']['filtered']['filter']['and']
145 144
146 145 date_range = {"range": {"timestamp": {}}}
147 146 if start_date:
148 147 date_range["range"]["timestamp"]["gte"] = start_date
149 148 if end_date:
150 149 date_range["range"]["timestamp"]["lte"] = end_date
151 150 if start_date or end_date:
152 151 filter_part.append(date_range)
153 152
154 153 levels = filter_settings.get('level')
155 154 if levels:
156 155 filter_part.append({"terms": {'log_level': levels}})
157 156 namespaces = filter_settings.get('namespace')
158 157 if namespaces:
159 158 filter_part.append({"terms": {'namespace': namespaces}})
160 159
161 160 query["aggs"] = {
162 161 "sub_agg": {
163 162 "terms": {
164 163 "field": "tag_list",
165 164 "size": 50
166 165 }
167 166 }
168 167 }
169 168 # tags
170 169 index_names = es_index_name_limiter(
171 170 ixtypes=[config.get('datasource', 'logs')])
172 171 result = Datastores.es.search(query, index=index_names, doc_type='log',
173 172 size=0)
174 173 tag_buckets = result['aggregations']['sub_agg'].get('buckets', [])
175 174 # namespaces
176 175 query["aggs"] = {
177 176 "sub_agg": {
178 177 "terms": {
179 178 "field": "namespace",
180 179 "size": 50
181 180 }
182 181 }
183 182 }
184 183 result = Datastores.es.search(query, index=index_names, doc_type='log',
185 184 size=0)
186 185 namespaces_buckets = result['aggregations']['sub_agg'].get('buckets', [])
187 186 return {
188 187 "tags": [item['key'] for item in tag_buckets],
189 188 "namespaces": [item['key'] for item in namespaces_buckets]
190 189 }
191 190
192 191
193 192 @view_config(route_name='section_view',
194 193 match_param=("view=common_values", "section=logs_section"),
195 194 renderer='json', permission='authenticated')
196 195 def common_values(request):
197 196 config = request.GET.mixed()
198 197 datasource = config.pop('datasource', 'logs')
199 198 filter_settings = build_filter_settings_from_query_dict(request,
200 199 config)
201 200 resources = list(filter_settings["resource"])
202 201 tag_name = filter_settings['tags'][0]['value'][0]
203 202 query = {
204 203 'query': {
205 204 'filtered': {
206 205 'filter': {
207 206 'and': [
208 207 {'terms': {'resource_id': list(resources)}},
209 208 {'terms': {
210 209 'namespace': filter_settings['namespace']}}
211 210 ]
212 211 }
213 212 }
214 213 }
215 214 }
216 215 query['aggs'] = {
217 216 'sub_agg': {
218 217 'terms': {
219 218 'field': 'tags.{}.values'.format(tag_name),
220 219 'size': 50
221 220 }
222 221 }
223 222 }
224 223 index_names = es_index_name_limiter(ixtypes=[datasource])
225 224 result = Datastores.es.search(query, index=index_names, doc_type='log',
226 225 size=0)
227 226 values_buckets = result['aggregations']['sub_agg'].get('buckets', [])
228 227 return {
229 228 "values": [item['key'] for item in values_buckets]
230 229 }
General Comments 0
You need to be logged in to leave comments. Login now