##// END OF EJS Templates
validation: remove 0.4 report validators
ergo -
Show More
@@ -1,663 +1,634 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # AppEnlight Enterprise Edition, including its added features, Support
18 # AppEnlight Enterprise Edition, including its added features, Support
19 # services, and proprietary license terms, please see
19 # services, and proprietary license terms, please see
20 # https://rhodecode.com/licenses/
20 # https://rhodecode.com/licenses/
21
21
22 import bisect
22 import bisect
23 import collections
23 import collections
24 import math
24 import math
25 from datetime import datetime, timedelta
25 from datetime import datetime, timedelta
26
26
27 import sqlalchemy as sa
27 import sqlalchemy as sa
28 import pyelasticsearch
28 import pyelasticsearch
29
29
30 from celery.utils.log import get_task_logger
30 from celery.utils.log import get_task_logger
31 from zope.sqlalchemy import mark_changed
31 from zope.sqlalchemy import mark_changed
32 from pyramid.threadlocal import get_current_request, get_current_registry
32 from pyramid.threadlocal import get_current_request, get_current_registry
33 from appenlight.celery import celery
33 from appenlight.celery import celery
34 from appenlight.models.report_group import ReportGroup
34 from appenlight.models.report_group import ReportGroup
35 from appenlight.models import DBSession, Datastores
35 from appenlight.models import DBSession, Datastores
36 from appenlight.models.report import Report
36 from appenlight.models.report import Report
37 from appenlight.models.log import Log
37 from appenlight.models.log import Log
38 from appenlight.models.request_metric import Metric
38 from appenlight.models.request_metric import Metric
39 from appenlight.models.event import Event
39 from appenlight.models.event import Event
40
40
41 from appenlight.models.services.application import ApplicationService
41 from appenlight.models.services.application import ApplicationService
42 from appenlight.models.services.event import EventService
42 from appenlight.models.services.event import EventService
43 from appenlight.models.services.log import LogService
43 from appenlight.models.services.log import LogService
44 from appenlight.models.services.report import ReportService
44 from appenlight.models.services.report import ReportService
45 from appenlight.models.services.report_group import ReportGroupService
45 from appenlight.models.services.report_group import ReportGroupService
46 from appenlight.models.services.user import UserService
46 from appenlight.models.services.user import UserService
47 from appenlight.models.tag import Tag
47 from appenlight.models.tag import Tag
48 from appenlight.lib import print_traceback
48 from appenlight.lib import print_traceback
49 from appenlight.lib.utils import parse_proto, in_batches
49 from appenlight.lib.utils import parse_proto, in_batches
50 from appenlight.lib.ext_json import json
50 from appenlight.lib.ext_json import json
51 from appenlight.lib.redis_keys import REDIS_KEYS
51 from appenlight.lib.redis_keys import REDIS_KEYS
52 from appenlight.lib.enums import ReportType
52 from appenlight.lib.enums import ReportType
53
53
54 log = get_task_logger(__name__)
54 log = get_task_logger(__name__)
55
55
56 sample_boundries = list(range(100, 1000, 100)) + \
56 sample_boundries = list(range(100, 1000, 100)) + \
57 list(range(1000, 10000, 1000)) + \
57 list(range(1000, 10000, 1000)) + \
58 list(range(10000, 100000, 5000))
58 list(range(10000, 100000, 5000))
59
59
60
60
61 def pick_sample(total_occurences, report_type=None):
61 def pick_sample(total_occurences, report_type=None):
62 every = 1.0
62 every = 1.0
63 position = bisect.bisect_left(sample_boundries, total_occurences)
63 position = bisect.bisect_left(sample_boundries, total_occurences)
64 if position > 0:
64 if position > 0:
65 if report_type == ReportType.not_found:
65 if report_type == ReportType.not_found:
66 divide = 10.0
66 divide = 10.0
67 else:
67 else:
68 divide = 100.0
68 divide = 100.0
69 every = sample_boundries[position - 1] / divide
69 every = sample_boundries[position - 1] / divide
70 return total_occurences % every == 0
70 return total_occurences % every == 0
71
71
72
72
73 @celery.task(queue="default", default_retry_delay=1, max_retries=2)
73 @celery.task(queue="default", default_retry_delay=1, max_retries=2)
74 def test_exception_task():
74 def test_exception_task():
75 log.error('test celery log', extra={'location': 'celery'})
75 log.error('test celery log', extra={'location': 'celery'})
76 log.warning('test celery log', extra={'location': 'celery'})
76 log.warning('test celery log', extra={'location': 'celery'})
77 raise Exception('Celery exception test')
77 raise Exception('Celery exception test')
78
78
79
79
80 @celery.task(queue="default", default_retry_delay=1, max_retries=2)
80 @celery.task(queue="default", default_retry_delay=1, max_retries=2)
81 def test_retry_exception_task():
81 def test_retry_exception_task():
82 try:
82 try:
83 import time
83 import time
84
84
85 time.sleep(1.3)
85 time.sleep(1.3)
86 log.error('test retry celery log', extra={'location': 'celery'})
86 log.error('test retry celery log', extra={'location': 'celery'})
87 log.warning('test retry celery log', extra={'location': 'celery'})
87 log.warning('test retry celery log', extra={'location': 'celery'})
88 raise Exception('Celery exception test')
88 raise Exception('Celery exception test')
89 except Exception as exc:
89 except Exception as exc:
90 test_retry_exception_task.retry(exc=exc)
90 test_retry_exception_task.retry(exc=exc)
91
91
92
92
93 @celery.task(queue="reports", default_retry_delay=600, max_retries=999)
93 @celery.task(queue="reports", default_retry_delay=600, max_retries=999)
94 def add_reports(resource_id, params, dataset, environ=None, **kwargs):
94 def add_reports(resource_id, params, dataset, environ=None, **kwargs):
95 proto_version = parse_proto(params.get('protocol_version', ''))
95 proto_version = parse_proto(params.get('protocol_version', ''))
96 current_time = datetime.utcnow().replace(second=0, microsecond=0)
96 current_time = datetime.utcnow().replace(second=0, microsecond=0)
97 try:
97 try:
98 # we will store solr docs here for single insert
98 # we will store solr docs here for single insert
99 es_report_docs = {}
99 es_report_docs = {}
100 es_report_group_docs = {}
100 es_report_group_docs = {}
101 resource = ApplicationService.by_id(resource_id)
101 resource = ApplicationService.by_id(resource_id)
102 reports = []
103
104 if proto_version.major < 1 and proto_version.minor < 5:
105 for report_data in dataset:
106 report_details = report_data.get('report_details', [])
107 for i, detail_data in enumerate(report_details):
108 report_data.update(detail_data)
109 report_data.pop('report_details')
110 traceback = report_data.get('traceback')
111 if traceback is None:
112 report_data['traceback'] = report_data.get('frameinfo')
113 # for 0.3 api
114 error = report_data.pop('error_type', '')
115 if error:
116 report_data['error'] = error
117 if proto_version.minor < 4:
118 # convert "Unknown" slow reports to
119 # '' (from older clients)
120 if (report_data['error'] and
121 report_data['http_status'] < 500):
122 report_data['error'] = ''
123 message = report_data.get('message')
124 if 'extra' not in report_data:
125 report_data['extra'] = []
126 if message:
127 report_data['extra'] = [('message', message), ]
128 reports.append(report_data)
129 else:
130 reports = dataset
131
102
132 tags = []
103 tags = []
133 es_slow_calls_docs = {}
104 es_slow_calls_docs = {}
134 es_reports_stats_rows = {}
105 es_reports_stats_rows = {}
135 for report_data in reports:
106 for report_data in dataset:
136 # build report details for later
107 # build report details for later
137 added_details = 0
108 added_details = 0
138 report = Report()
109 report = Report()
139 report.set_data(report_data, resource, proto_version)
110 report.set_data(report_data, resource, proto_version)
140 report._skip_ft_index = True
111 report._skip_ft_index = True
141
112
142 report_group = ReportGroupService.by_hash_and_resource(
113 report_group = ReportGroupService.by_hash_and_resource(
143 report.resource_id,
114 report.resource_id,
144 report.grouping_hash
115 report.grouping_hash
145 )
116 )
146 occurences = report_data.get('occurences', 1)
117 occurences = report_data.get('occurences', 1)
147 if not report_group:
118 if not report_group:
148 # total reports will be +1 moment later
119 # total reports will be +1 moment later
149 report_group = ReportGroup(grouping_hash=report.grouping_hash,
120 report_group = ReportGroup(grouping_hash=report.grouping_hash,
150 occurences=0, total_reports=0,
121 occurences=0, total_reports=0,
151 last_report=0,
122 last_report=0,
152 priority=report.priority,
123 priority=report.priority,
153 error=report.error,
124 error=report.error,
154 first_timestamp=report.start_time)
125 first_timestamp=report.start_time)
155 report_group._skip_ft_index = True
126 report_group._skip_ft_index = True
156 report_group.report_type = report.report_type
127 report_group.report_type = report.report_type
157 report.report_group_time = report_group.first_timestamp
128 report.report_group_time = report_group.first_timestamp
158 add_sample = pick_sample(report_group.occurences,
129 add_sample = pick_sample(report_group.occurences,
159 report_type=report_group.report_type)
130 report_type=report_group.report_type)
160 if add_sample:
131 if add_sample:
161 resource.report_groups.append(report_group)
132 resource.report_groups.append(report_group)
162 report_group.reports.append(report)
133 report_group.reports.append(report)
163 added_details += 1
134 added_details += 1
164 DBSession.flush()
135 DBSession.flush()
165 if report.partition_id not in es_report_docs:
136 if report.partition_id not in es_report_docs:
166 es_report_docs[report.partition_id] = []
137 es_report_docs[report.partition_id] = []
167 es_report_docs[report.partition_id].append(report.es_doc())
138 es_report_docs[report.partition_id].append(report.es_doc())
168 tags.extend(list(report.tags.items()))
139 tags.extend(list(report.tags.items()))
169 slow_calls = report.add_slow_calls(report_data, report_group)
140 slow_calls = report.add_slow_calls(report_data, report_group)
170 DBSession.flush()
141 DBSession.flush()
171 for s_call in slow_calls:
142 for s_call in slow_calls:
172 if s_call.partition_id not in es_slow_calls_docs:
143 if s_call.partition_id not in es_slow_calls_docs:
173 es_slow_calls_docs[s_call.partition_id] = []
144 es_slow_calls_docs[s_call.partition_id] = []
174 es_slow_calls_docs[s_call.partition_id].append(
145 es_slow_calls_docs[s_call.partition_id].append(
175 s_call.es_doc())
146 s_call.es_doc())
176 # try generating new stat rows if needed
147 # try generating new stat rows if needed
177 else:
148 else:
178 # required for postprocessing to not fail later
149 # required for postprocessing to not fail later
179 report.report_group = report_group
150 report.report_group = report_group
180
151
181 stat_row = ReportService.generate_stat_rows(
152 stat_row = ReportService.generate_stat_rows(
182 report, resource, report_group)
153 report, resource, report_group)
183 if stat_row.partition_id not in es_reports_stats_rows:
154 if stat_row.partition_id not in es_reports_stats_rows:
184 es_reports_stats_rows[stat_row.partition_id] = []
155 es_reports_stats_rows[stat_row.partition_id] = []
185 es_reports_stats_rows[stat_row.partition_id].append(
156 es_reports_stats_rows[stat_row.partition_id].append(
186 stat_row.es_doc())
157 stat_row.es_doc())
187
158
188 # see if we should mark 10th occurence of report
159 # see if we should mark 10th occurence of report
189 last_occurences_10 = int(math.floor(report_group.occurences / 10))
160 last_occurences_10 = int(math.floor(report_group.occurences / 10))
190 curr_occurences_10 = int(math.floor(
161 curr_occurences_10 = int(math.floor(
191 (report_group.occurences + report.occurences) / 10))
162 (report_group.occurences + report.occurences) / 10))
192 last_occurences_100 = int(
163 last_occurences_100 = int(
193 math.floor(report_group.occurences / 100))
164 math.floor(report_group.occurences / 100))
194 curr_occurences_100 = int(math.floor(
165 curr_occurences_100 = int(math.floor(
195 (report_group.occurences + report.occurences) / 100))
166 (report_group.occurences + report.occurences) / 100))
196 notify_occurences_10 = last_occurences_10 != curr_occurences_10
167 notify_occurences_10 = last_occurences_10 != curr_occurences_10
197 notify_occurences_100 = last_occurences_100 != curr_occurences_100
168 notify_occurences_100 = last_occurences_100 != curr_occurences_100
198 report_group.occurences = ReportGroup.occurences + occurences
169 report_group.occurences = ReportGroup.occurences + occurences
199 report_group.last_timestamp = report.start_time
170 report_group.last_timestamp = report.start_time
200 report_group.summed_duration = ReportGroup.summed_duration + report.duration
171 report_group.summed_duration = ReportGroup.summed_duration + report.duration
201 summed_duration = ReportGroup.summed_duration + report.duration
172 summed_duration = ReportGroup.summed_duration + report.duration
202 summed_occurences = ReportGroup.occurences + occurences
173 summed_occurences = ReportGroup.occurences + occurences
203 report_group.average_duration = summed_duration / summed_occurences
174 report_group.average_duration = summed_duration / summed_occurences
204 report_group.run_postprocessing(report)
175 report_group.run_postprocessing(report)
205 if added_details:
176 if added_details:
206 report_group.total_reports = ReportGroup.total_reports + 1
177 report_group.total_reports = ReportGroup.total_reports + 1
207 report_group.last_report = report.id
178 report_group.last_report = report.id
208 report_group.set_notification_info(notify_10=notify_occurences_10,
179 report_group.set_notification_info(notify_10=notify_occurences_10,
209 notify_100=notify_occurences_100)
180 notify_100=notify_occurences_100)
210 DBSession.flush()
181 DBSession.flush()
211 report_group.get_report().notify_channel(report_group)
182 report_group.get_report().notify_channel(report_group)
212 if report_group.partition_id not in es_report_group_docs:
183 if report_group.partition_id not in es_report_group_docs:
213 es_report_group_docs[report_group.partition_id] = []
184 es_report_group_docs[report_group.partition_id] = []
214 es_report_group_docs[report_group.partition_id].append(
185 es_report_group_docs[report_group.partition_id].append(
215 report_group.es_doc())
186 report_group.es_doc())
216
187
217 action = 'REPORT'
188 action = 'REPORT'
218 log_msg = '%s: %s %s, client: %s, proto: %s' % (
189 log_msg = '%s: %s %s, client: %s, proto: %s' % (
219 action,
190 action,
220 report_data.get('http_status', 'unknown'),
191 report_data.get('http_status', 'unknown'),
221 str(resource),
192 str(resource),
222 report_data.get('client'),
193 report_data.get('client'),
223 proto_version)
194 proto_version)
224 log.info(log_msg)
195 log.info(log_msg)
225 total_reports = len(dataset)
196 total_reports = len(dataset)
226 key = REDIS_KEYS['counters']['reports_per_minute'].format(current_time)
197 key = REDIS_KEYS['counters']['reports_per_minute'].format(current_time)
227 Datastores.redis.incr(key, total_reports)
198 Datastores.redis.incr(key, total_reports)
228 Datastores.redis.expire(key, 3600 * 24)
199 Datastores.redis.expire(key, 3600 * 24)
229 key = REDIS_KEYS['counters']['reports_per_minute_per_app'].format(
200 key = REDIS_KEYS['counters']['reports_per_minute_per_app'].format(
230 resource_id, current_time)
201 resource_id, current_time)
231 Datastores.redis.incr(key, total_reports)
202 Datastores.redis.incr(key, total_reports)
232 Datastores.redis.expire(key, 3600 * 24)
203 Datastores.redis.expire(key, 3600 * 24)
233
204
234 add_reports_es(es_report_group_docs, es_report_docs)
205 add_reports_es(es_report_group_docs, es_report_docs)
235 add_reports_slow_calls_es(es_slow_calls_docs)
206 add_reports_slow_calls_es(es_slow_calls_docs)
236 add_reports_stats_rows_es(es_reports_stats_rows)
207 add_reports_stats_rows_es(es_reports_stats_rows)
237 return True
208 return True
238 except Exception as exc:
209 except Exception as exc:
239 print_traceback(log)
210 print_traceback(log)
240 add_reports.retry(exc=exc)
211 add_reports.retry(exc=exc)
241
212
242
213
243 @celery.task(queue="es", default_retry_delay=600, max_retries=999)
214 @celery.task(queue="es", default_retry_delay=600, max_retries=999)
244 def add_reports_es(report_group_docs, report_docs):
215 def add_reports_es(report_group_docs, report_docs):
245 for k, v in report_group_docs.items():
216 for k, v in report_group_docs.items():
246 Datastores.es.bulk_index(k, 'report_group', v, id_field="_id")
217 Datastores.es.bulk_index(k, 'report_group', v, id_field="_id")
247 for k, v in report_docs.items():
218 for k, v in report_docs.items():
248 Datastores.es.bulk_index(k, 'report', v, id_field="_id",
219 Datastores.es.bulk_index(k, 'report', v, id_field="_id",
249 parent_field='_parent')
220 parent_field='_parent')
250
221
251
222
252 @celery.task(queue="es", default_retry_delay=600, max_retries=999)
223 @celery.task(queue="es", default_retry_delay=600, max_retries=999)
253 def add_reports_slow_calls_es(es_docs):
224 def add_reports_slow_calls_es(es_docs):
254 for k, v in es_docs.items():
225 for k, v in es_docs.items():
255 Datastores.es.bulk_index(k, 'log', v)
226 Datastores.es.bulk_index(k, 'log', v)
256
227
257
228
258 @celery.task(queue="es", default_retry_delay=600, max_retries=999)
229 @celery.task(queue="es", default_retry_delay=600, max_retries=999)
259 def add_reports_stats_rows_es(es_docs):
230 def add_reports_stats_rows_es(es_docs):
260 for k, v in es_docs.items():
231 for k, v in es_docs.items():
261 Datastores.es.bulk_index(k, 'log', v)
232 Datastores.es.bulk_index(k, 'log', v)
262
233
263
234
264 @celery.task(queue="logs", default_retry_delay=600, max_retries=999)
235 @celery.task(queue="logs", default_retry_delay=600, max_retries=999)
265 def add_logs(resource_id, request, dataset, environ=None, **kwargs):
236 def add_logs(resource_id, request, dataset, environ=None, **kwargs):
266 proto_version = request.get('protocol_version')
237 proto_version = request.get('protocol_version')
267 current_time = datetime.utcnow().replace(second=0, microsecond=0)
238 current_time = datetime.utcnow().replace(second=0, microsecond=0)
268
239
269 try:
240 try:
270 es_docs = collections.defaultdict(list)
241 es_docs = collections.defaultdict(list)
271 application = ApplicationService.by_id(resource_id)
242 application = ApplicationService.by_id(resource_id)
272 ns_pairs = []
243 ns_pairs = []
273 for entry in dataset:
244 for entry in dataset:
274 # gather pk and ns so we can remove older versions of row later
245 # gather pk and ns so we can remove older versions of row later
275 if entry['primary_key'] is not None:
246 if entry['primary_key'] is not None:
276 ns_pairs.append({"pk": entry['primary_key'],
247 ns_pairs.append({"pk": entry['primary_key'],
277 "ns": entry['namespace']})
248 "ns": entry['namespace']})
278 log_entry = Log()
249 log_entry = Log()
279 log_entry.set_data(entry, resource=application)
250 log_entry.set_data(entry, resource=application)
280 log_entry._skip_ft_index = True
251 log_entry._skip_ft_index = True
281 application.logs.append(log_entry)
252 application.logs.append(log_entry)
282 DBSession.flush()
253 DBSession.flush()
283 # insert non pk rows first
254 # insert non pk rows first
284 if entry['primary_key'] is None:
255 if entry['primary_key'] is None:
285 es_docs[log_entry.partition_id].append(log_entry.es_doc())
256 es_docs[log_entry.partition_id].append(log_entry.es_doc())
286
257
287 # 2nd pass to delete all log entries from db foe same pk/ns pair
258 # 2nd pass to delete all log entries from db foe same pk/ns pair
288 if ns_pairs:
259 if ns_pairs:
289 ids_to_delete = []
260 ids_to_delete = []
290 es_docs = collections.defaultdict(list)
261 es_docs = collections.defaultdict(list)
291 es_docs_to_delete = collections.defaultdict(list)
262 es_docs_to_delete = collections.defaultdict(list)
292 found_pkey_logs = LogService.query_by_primary_key_and_namespace(
263 found_pkey_logs = LogService.query_by_primary_key_and_namespace(
293 list_of_pairs=ns_pairs)
264 list_of_pairs=ns_pairs)
294 log_dict = {}
265 log_dict = {}
295 for log_entry in found_pkey_logs:
266 for log_entry in found_pkey_logs:
296 log_key = (log_entry.primary_key, log_entry.namespace)
267 log_key = (log_entry.primary_key, log_entry.namespace)
297 if log_key not in log_dict:
268 if log_key not in log_dict:
298 log_dict[log_key] = []
269 log_dict[log_key] = []
299 log_dict[log_key].append(log_entry)
270 log_dict[log_key].append(log_entry)
300
271
301 for ns, entry_list in log_dict.items():
272 for ns, entry_list in log_dict.items():
302 entry_list = sorted(entry_list, key=lambda x: x.timestamp)
273 entry_list = sorted(entry_list, key=lambda x: x.timestamp)
303 # newest row needs to be indexed in es
274 # newest row needs to be indexed in es
304 log_entry = entry_list[-1]
275 log_entry = entry_list[-1]
305 # delete everything from pg and ES, leave the last row in pg
276 # delete everything from pg and ES, leave the last row in pg
306 for e in entry_list[:-1]:
277 for e in entry_list[:-1]:
307 ids_to_delete.append(e.log_id)
278 ids_to_delete.append(e.log_id)
308 es_docs_to_delete[e.partition_id].append(e.delete_hash)
279 es_docs_to_delete[e.partition_id].append(e.delete_hash)
309
280
310 es_docs_to_delete[log_entry.partition_id].append(
281 es_docs_to_delete[log_entry.partition_id].append(
311 log_entry.delete_hash)
282 log_entry.delete_hash)
312
283
313 es_docs[log_entry.partition_id].append(log_entry.es_doc())
284 es_docs[log_entry.partition_id].append(log_entry.es_doc())
314
285
315 if ids_to_delete:
286 if ids_to_delete:
316 query = DBSession.query(Log).filter(
287 query = DBSession.query(Log).filter(
317 Log.log_id.in_(ids_to_delete))
288 Log.log_id.in_(ids_to_delete))
318 query.delete(synchronize_session=False)
289 query.delete(synchronize_session=False)
319 if es_docs_to_delete:
290 if es_docs_to_delete:
320 # batch this to avoid problems with default ES bulk limits
291 # batch this to avoid problems with default ES bulk limits
321 for es_index in es_docs_to_delete.keys():
292 for es_index in es_docs_to_delete.keys():
322 for batch in in_batches(es_docs_to_delete[es_index], 20):
293 for batch in in_batches(es_docs_to_delete[es_index], 20):
323 query = {'terms': {'delete_hash': batch}}
294 query = {'terms': {'delete_hash': batch}}
324
295
325 try:
296 try:
326 Datastores.es.delete_by_query(
297 Datastores.es.delete_by_query(
327 es_index, 'log', query)
298 es_index, 'log', query)
328 except pyelasticsearch.ElasticHttpNotFoundError as exc:
299 except pyelasticsearch.ElasticHttpNotFoundError as exc:
329 log.error(exc)
300 log.error(exc)
330
301
331 total_logs = len(dataset)
302 total_logs = len(dataset)
332
303
333 log_msg = 'LOG_NEW: %s, entries: %s, proto:%s' % (
304 log_msg = 'LOG_NEW: %s, entries: %s, proto:%s' % (
334 str(application),
305 str(application),
335 total_logs,
306 total_logs,
336 proto_version)
307 proto_version)
337 log.info(log_msg)
308 log.info(log_msg)
338 # mark_changed(session)
309 # mark_changed(session)
339 key = REDIS_KEYS['counters']['logs_per_minute'].format(current_time)
310 key = REDIS_KEYS['counters']['logs_per_minute'].format(current_time)
340 Datastores.redis.incr(key, total_logs)
311 Datastores.redis.incr(key, total_logs)
341 Datastores.redis.expire(key, 3600 * 24)
312 Datastores.redis.expire(key, 3600 * 24)
342 key = REDIS_KEYS['counters']['logs_per_minute_per_app'].format(
313 key = REDIS_KEYS['counters']['logs_per_minute_per_app'].format(
343 resource_id, current_time)
314 resource_id, current_time)
344 Datastores.redis.incr(key, total_logs)
315 Datastores.redis.incr(key, total_logs)
345 Datastores.redis.expire(key, 3600 * 24)
316 Datastores.redis.expire(key, 3600 * 24)
346 add_logs_es(es_docs)
317 add_logs_es(es_docs)
347 return True
318 return True
348 except Exception as exc:
319 except Exception as exc:
349 print_traceback(log)
320 print_traceback(log)
350 add_logs.retry(exc=exc)
321 add_logs.retry(exc=exc)
351
322
352
323
353 @celery.task(queue="es", default_retry_delay=600, max_retries=999)
324 @celery.task(queue="es", default_retry_delay=600, max_retries=999)
354 def add_logs_es(es_docs):
325 def add_logs_es(es_docs):
355 for k, v in es_docs.items():
326 for k, v in es_docs.items():
356 Datastores.es.bulk_index(k, 'log', v)
327 Datastores.es.bulk_index(k, 'log', v)
357
328
358
329
359 @celery.task(queue="metrics", default_retry_delay=600, max_retries=999)
330 @celery.task(queue="metrics", default_retry_delay=600, max_retries=999)
360 def add_metrics(resource_id, request, dataset, proto_version):
331 def add_metrics(resource_id, request, dataset, proto_version):
361 current_time = datetime.utcnow().replace(second=0, microsecond=0)
332 current_time = datetime.utcnow().replace(second=0, microsecond=0)
362 try:
333 try:
363 application = ApplicationService.by_id_cached()(resource_id)
334 application = ApplicationService.by_id_cached()(resource_id)
364 application = DBSession.merge(application, load=False)
335 application = DBSession.merge(application, load=False)
365 es_docs = []
336 es_docs = []
366 rows = []
337 rows = []
367 for metric in dataset:
338 for metric in dataset:
368 tags = dict(metric['tags'])
339 tags = dict(metric['tags'])
369 server_n = tags.get('server_name', metric['server_name']).lower()
340 server_n = tags.get('server_name', metric['server_name']).lower()
370 tags['server_name'] = server_n or 'unknown'
341 tags['server_name'] = server_n or 'unknown'
371 new_metric = Metric(
342 new_metric = Metric(
372 timestamp=metric['timestamp'],
343 timestamp=metric['timestamp'],
373 resource_id=application.resource_id,
344 resource_id=application.resource_id,
374 namespace=metric['namespace'],
345 namespace=metric['namespace'],
375 tags=tags)
346 tags=tags)
376 rows.append(new_metric)
347 rows.append(new_metric)
377 es_docs.append(new_metric.es_doc())
348 es_docs.append(new_metric.es_doc())
378 session = DBSession()
349 session = DBSession()
379 session.bulk_save_objects(rows)
350 session.bulk_save_objects(rows)
380 session.flush()
351 session.flush()
381
352
382 action = 'METRICS'
353 action = 'METRICS'
383 metrics_msg = '%s: %s, metrics: %s, proto:%s' % (
354 metrics_msg = '%s: %s, metrics: %s, proto:%s' % (
384 action,
355 action,
385 str(application),
356 str(application),
386 len(dataset),
357 len(dataset),
387 proto_version
358 proto_version
388 )
359 )
389 log.info(metrics_msg)
360 log.info(metrics_msg)
390
361
391 mark_changed(session)
362 mark_changed(session)
392 key = REDIS_KEYS['counters']['metrics_per_minute'].format(current_time)
363 key = REDIS_KEYS['counters']['metrics_per_minute'].format(current_time)
393 Datastores.redis.incr(key, len(rows))
364 Datastores.redis.incr(key, len(rows))
394 Datastores.redis.expire(key, 3600 * 24)
365 Datastores.redis.expire(key, 3600 * 24)
395 key = REDIS_KEYS['counters']['metrics_per_minute_per_app'].format(
366 key = REDIS_KEYS['counters']['metrics_per_minute_per_app'].format(
396 resource_id, current_time)
367 resource_id, current_time)
397 Datastores.redis.incr(key, len(rows))
368 Datastores.redis.incr(key, len(rows))
398 Datastores.redis.expire(key, 3600 * 24)
369 Datastores.redis.expire(key, 3600 * 24)
399 add_metrics_es(es_docs)
370 add_metrics_es(es_docs)
400 return True
371 return True
401 except Exception as exc:
372 except Exception as exc:
402 print_traceback(log)
373 print_traceback(log)
403 add_metrics.retry(exc=exc)
374 add_metrics.retry(exc=exc)
404
375
405
376
406 @celery.task(queue="es", default_retry_delay=600, max_retries=999)
377 @celery.task(queue="es", default_retry_delay=600, max_retries=999)
407 def add_metrics_es(es_docs):
378 def add_metrics_es(es_docs):
408 for doc in es_docs:
379 for doc in es_docs:
409 partition = 'rcae_m_%s' % doc['timestamp'].strftime('%Y_%m_%d')
380 partition = 'rcae_m_%s' % doc['timestamp'].strftime('%Y_%m_%d')
410 Datastores.es.index(partition, 'log', doc)
381 Datastores.es.index(partition, 'log', doc)
411
382
412
383
413 @celery.task(queue="default", default_retry_delay=5, max_retries=2)
384 @celery.task(queue="default", default_retry_delay=5, max_retries=2)
414 def check_user_report_notifications(resource_id):
385 def check_user_report_notifications(resource_id):
415 since_when = datetime.utcnow()
386 since_when = datetime.utcnow()
416 try:
387 try:
417 request = get_current_request()
388 request = get_current_request()
418 application = ApplicationService.by_id(resource_id)
389 application = ApplicationService.by_id(resource_id)
419 if not application:
390 if not application:
420 return
391 return
421 error_key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format(
392 error_key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format(
422 ReportType.error, resource_id)
393 ReportType.error, resource_id)
423 slow_key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format(
394 slow_key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format(
424 ReportType.slow, resource_id)
395 ReportType.slow, resource_id)
425 error_group_ids = Datastores.redis.smembers(error_key)
396 error_group_ids = Datastores.redis.smembers(error_key)
426 slow_group_ids = Datastores.redis.smembers(slow_key)
397 slow_group_ids = Datastores.redis.smembers(slow_key)
427 Datastores.redis.delete(error_key)
398 Datastores.redis.delete(error_key)
428 Datastores.redis.delete(slow_key)
399 Datastores.redis.delete(slow_key)
429 err_gids = [int(g_id) for g_id in error_group_ids]
400 err_gids = [int(g_id) for g_id in error_group_ids]
430 slow_gids = [int(g_id) for g_id in list(slow_group_ids)]
401 slow_gids = [int(g_id) for g_id in list(slow_group_ids)]
431 group_ids = err_gids + slow_gids
402 group_ids = err_gids + slow_gids
432 occurence_dict = {}
403 occurence_dict = {}
433 for g_id in group_ids:
404 for g_id in group_ids:
434 key = REDIS_KEYS['counters']['report_group_occurences'].format(
405 key = REDIS_KEYS['counters']['report_group_occurences'].format(
435 g_id)
406 g_id)
436 val = Datastores.redis.get(key)
407 val = Datastores.redis.get(key)
437 Datastores.redis.delete(key)
408 Datastores.redis.delete(key)
438 if val:
409 if val:
439 occurence_dict[g_id] = int(val)
410 occurence_dict[g_id] = int(val)
440 else:
411 else:
441 occurence_dict[g_id] = 1
412 occurence_dict[g_id] = 1
442 report_groups = ReportGroupService.by_ids(group_ids)
413 report_groups = ReportGroupService.by_ids(group_ids)
443 report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref))
414 report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref))
444
415
445 ApplicationService.check_for_groups_alert(
416 ApplicationService.check_for_groups_alert(
446 application, 'alert', report_groups=report_groups,
417 application, 'alert', report_groups=report_groups,
447 occurence_dict=occurence_dict)
418 occurence_dict=occurence_dict)
448 users = set([p.user for p in application.users_for_perm('view')])
419 users = set([p.user for p in application.users_for_perm('view')])
449 report_groups = report_groups.all()
420 report_groups = report_groups.all()
450 for user in users:
421 for user in users:
451 UserService.report_notify(user, request, application,
422 UserService.report_notify(user, request, application,
452 report_groups=report_groups,
423 report_groups=report_groups,
453 occurence_dict=occurence_dict)
424 occurence_dict=occurence_dict)
454 for group in report_groups:
425 for group in report_groups:
455 # marks report_groups as notified
426 # marks report_groups as notified
456 if not group.notified:
427 if not group.notified:
457 group.notified = True
428 group.notified = True
458 except Exception as exc:
429 except Exception as exc:
459 print_traceback(log)
430 print_traceback(log)
460 raise
431 raise
461
432
462
433
463 @celery.task(queue="default", default_retry_delay=5, max_retries=2)
434 @celery.task(queue="default", default_retry_delay=5, max_retries=2)
464 def check_alerts(resource_id):
435 def check_alerts(resource_id):
465 since_when = datetime.utcnow()
436 since_when = datetime.utcnow()
466 try:
437 try:
467 request = get_current_request()
438 request = get_current_request()
468 application = ApplicationService.by_id(resource_id)
439 application = ApplicationService.by_id(resource_id)
469 if not application:
440 if not application:
470 return
441 return
471 error_key = REDIS_KEYS[
442 error_key = REDIS_KEYS[
472 'reports_to_notify_per_type_per_app_alerting'].format(
443 'reports_to_notify_per_type_per_app_alerting'].format(
473 ReportType.error, resource_id)
444 ReportType.error, resource_id)
474 slow_key = REDIS_KEYS[
445 slow_key = REDIS_KEYS[
475 'reports_to_notify_per_type_per_app_alerting'].format(
446 'reports_to_notify_per_type_per_app_alerting'].format(
476 ReportType.slow, resource_id)
447 ReportType.slow, resource_id)
477 error_group_ids = Datastores.redis.smembers(error_key)
448 error_group_ids = Datastores.redis.smembers(error_key)
478 slow_group_ids = Datastores.redis.smembers(slow_key)
449 slow_group_ids = Datastores.redis.smembers(slow_key)
479 Datastores.redis.delete(error_key)
450 Datastores.redis.delete(error_key)
480 Datastores.redis.delete(slow_key)
451 Datastores.redis.delete(slow_key)
481 err_gids = [int(g_id) for g_id in error_group_ids]
452 err_gids = [int(g_id) for g_id in error_group_ids]
482 slow_gids = [int(g_id) for g_id in list(slow_group_ids)]
453 slow_gids = [int(g_id) for g_id in list(slow_group_ids)]
483 group_ids = err_gids + slow_gids
454 group_ids = err_gids + slow_gids
484 occurence_dict = {}
455 occurence_dict = {}
485 for g_id in group_ids:
456 for g_id in group_ids:
486 key = REDIS_KEYS['counters'][
457 key = REDIS_KEYS['counters'][
487 'report_group_occurences_alerting'].format(
458 'report_group_occurences_alerting'].format(
488 g_id)
459 g_id)
489 val = Datastores.redis.get(key)
460 val = Datastores.redis.get(key)
490 Datastores.redis.delete(key)
461 Datastores.redis.delete(key)
491 if val:
462 if val:
492 occurence_dict[g_id] = int(val)
463 occurence_dict[g_id] = int(val)
493 else:
464 else:
494 occurence_dict[g_id] = 1
465 occurence_dict[g_id] = 1
495 report_groups = ReportGroupService.by_ids(group_ids)
466 report_groups = ReportGroupService.by_ids(group_ids)
496 report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref))
467 report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref))
497
468
498 ApplicationService.check_for_groups_alert(
469 ApplicationService.check_for_groups_alert(
499 application, 'alert', report_groups=report_groups,
470 application, 'alert', report_groups=report_groups,
500 occurence_dict=occurence_dict, since_when=since_when)
471 occurence_dict=occurence_dict, since_when=since_when)
501 except Exception as exc:
472 except Exception as exc:
502 print_traceback(log)
473 print_traceback(log)
503 raise
474 raise
504
475
505
476
506 @celery.task(queue="default", default_retry_delay=1, max_retries=2)
477 @celery.task(queue="default", default_retry_delay=1, max_retries=2)
507 def close_alerts():
478 def close_alerts():
508 log.warning('Checking alerts')
479 log.warning('Checking alerts')
509 since_when = datetime.utcnow()
480 since_when = datetime.utcnow()
510 try:
481 try:
511 event_types = [Event.types['error_report_alert'],
482 event_types = [Event.types['error_report_alert'],
512 Event.types['slow_report_alert'], ]
483 Event.types['slow_report_alert'], ]
513 statuses = [Event.statuses['active']]
484 statuses = [Event.statuses['active']]
514 # get events older than 5 min
485 # get events older than 5 min
515 events = EventService.by_type_and_status(
486 events = EventService.by_type_and_status(
516 event_types,
487 event_types,
517 statuses,
488 statuses,
518 older_than=(since_when - timedelta(minutes=5)))
489 older_than=(since_when - timedelta(minutes=5)))
519 for event in events:
490 for event in events:
520 # see if we can close them
491 # see if we can close them
521 event.validate_or_close(
492 event.validate_or_close(
522 since_when=(since_when - timedelta(minutes=1)))
493 since_when=(since_when - timedelta(minutes=1)))
523 except Exception as exc:
494 except Exception as exc:
524 print_traceback(log)
495 print_traceback(log)
525 raise
496 raise
526
497
527
498
528 @celery.task(queue="default", default_retry_delay=600, max_retries=999)
499 @celery.task(queue="default", default_retry_delay=600, max_retries=999)
529 def update_tag_counter(tag_name, tag_value, count):
500 def update_tag_counter(tag_name, tag_value, count):
530 try:
501 try:
531 query = DBSession.query(Tag).filter(Tag.name == tag_name).filter(
502 query = DBSession.query(Tag).filter(Tag.name == tag_name).filter(
532 sa.cast(Tag.value, sa.types.TEXT) == sa.cast(json.dumps(tag_value),
503 sa.cast(Tag.value, sa.types.TEXT) == sa.cast(json.dumps(tag_value),
533 sa.types.TEXT))
504 sa.types.TEXT))
534 query.update({'times_seen': Tag.times_seen + count,
505 query.update({'times_seen': Tag.times_seen + count,
535 'last_timestamp': datetime.utcnow()},
506 'last_timestamp': datetime.utcnow()},
536 synchronize_session=False)
507 synchronize_session=False)
537 session = DBSession()
508 session = DBSession()
538 mark_changed(session)
509 mark_changed(session)
539 return True
510 return True
540 except Exception as exc:
511 except Exception as exc:
541 print_traceback(log)
512 print_traceback(log)
542 update_tag_counter.retry(exc=exc)
513 update_tag_counter.retry(exc=exc)
543
514
544
515
545 @celery.task(queue="default")
516 @celery.task(queue="default")
546 def update_tag_counters():
517 def update_tag_counters():
547 """
518 """
548 Sets task to update counters for application tags
519 Sets task to update counters for application tags
549 """
520 """
550 tags = Datastores.redis.lrange(REDIS_KEYS['seen_tag_list'], 0, -1)
521 tags = Datastores.redis.lrange(REDIS_KEYS['seen_tag_list'], 0, -1)
551 Datastores.redis.delete(REDIS_KEYS['seen_tag_list'])
522 Datastores.redis.delete(REDIS_KEYS['seen_tag_list'])
552 c = collections.Counter(tags)
523 c = collections.Counter(tags)
553 for t_json, count in c.items():
524 for t_json, count in c.items():
554 tag_info = json.loads(t_json)
525 tag_info = json.loads(t_json)
555 update_tag_counter.delay(tag_info[0], tag_info[1], count)
526 update_tag_counter.delay(tag_info[0], tag_info[1], count)
556
527
557
528
558 @celery.task(queue="default")
529 @celery.task(queue="default")
559 def daily_digest():
530 def daily_digest():
560 """
531 """
561 Sends daily digest with top 50 error reports
532 Sends daily digest with top 50 error reports
562 """
533 """
563 request = get_current_request()
534 request = get_current_request()
564 apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports'])
535 apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports'])
565 Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports'])
536 Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports'])
566 since_when = datetime.utcnow() - timedelta(hours=8)
537 since_when = datetime.utcnow() - timedelta(hours=8)
567 log.warning('Generating daily digests')
538 log.warning('Generating daily digests')
568 for resource_id in apps:
539 for resource_id in apps:
569 resource_id = resource_id.decode('utf8')
540 resource_id = resource_id.decode('utf8')
570 end_date = datetime.utcnow().replace(microsecond=0, second=0)
541 end_date = datetime.utcnow().replace(microsecond=0, second=0)
571 filter_settings = {'resource': [resource_id],
542 filter_settings = {'resource': [resource_id],
572 'tags': [{'name': 'type',
543 'tags': [{'name': 'type',
573 'value': ['error'], 'op': None}],
544 'value': ['error'], 'op': None}],
574 'type': 'error', 'start_date': since_when,
545 'type': 'error', 'start_date': since_when,
575 'end_date': end_date}
546 'end_date': end_date}
576
547
577 reports = ReportGroupService.get_trending(
548 reports = ReportGroupService.get_trending(
578 request, filter_settings=filter_settings, limit=50)
549 request, filter_settings=filter_settings, limit=50)
579
550
580 application = ApplicationService.by_id(resource_id)
551 application = ApplicationService.by_id(resource_id)
581 if application:
552 if application:
582 users = set([p.user for p in application.users_for_perm('view')])
553 users = set([p.user for p in application.users_for_perm('view')])
583 for user in users:
554 for user in users:
584 user.send_digest(request, application, reports=reports,
555 user.send_digest(request, application, reports=reports,
585 since_when=since_when)
556 since_when=since_when)
586
557
587
558
588 @celery.task(queue="default")
559 @celery.task(queue="default")
589 def notifications_reports():
560 def notifications_reports():
590 """
561 """
591 Loop that checks redis for info and then issues new tasks to celery to
562 Loop that checks redis for info and then issues new tasks to celery to
592 issue notifications
563 issue notifications
593 """
564 """
594 apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports'])
565 apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports'])
595 Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports'])
566 Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports'])
596 for app in apps:
567 for app in apps:
597 log.warning('Notify for app: %s' % app)
568 log.warning('Notify for app: %s' % app)
598 check_user_report_notifications.delay(app.decode('utf8'))
569 check_user_report_notifications.delay(app.decode('utf8'))
599
570
600 @celery.task(queue="default")
571 @celery.task(queue="default")
601 def alerting_reports():
572 def alerting_reports():
602 """
573 """
603 Loop that checks redis for info and then issues new tasks to celery to
574 Loop that checks redis for info and then issues new tasks to celery to
604 perform the following:
575 perform the following:
605 - which applications should have new alerts opened
576 - which applications should have new alerts opened
606 """
577 """
607
578
608 apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports_alerting'])
579 apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports_alerting'])
609 Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports_alerting'])
580 Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports_alerting'])
610 for app in apps:
581 for app in apps:
611 log.warning('Notify for app: %s' % app)
582 log.warning('Notify for app: %s' % app)
612 check_alerts.delay(app.decode('utf8'))
583 check_alerts.delay(app.decode('utf8'))
613
584
614
585
615 @celery.task(queue="default", soft_time_limit=3600 * 4,
586 @celery.task(queue="default", soft_time_limit=3600 * 4,
616 hard_time_limit=3600 * 4, max_retries=999)
587 hard_time_limit=3600 * 4, max_retries=999)
617 def logs_cleanup(resource_id, filter_settings):
588 def logs_cleanup(resource_id, filter_settings):
618 request = get_current_request()
589 request = get_current_request()
619 request.tm.begin()
590 request.tm.begin()
620 es_query = {
591 es_query = {
621 "_source": False,
592 "_source": False,
622 "size": 5000,
593 "size": 5000,
623 "query": {
594 "query": {
624 "filtered": {
595 "filtered": {
625 "filter": {
596 "filter": {
626 "and": [{"term": {"resource_id": resource_id}}]
597 "and": [{"term": {"resource_id": resource_id}}]
627 }
598 }
628 }
599 }
629 }
600 }
630 }
601 }
631
602
632 query = DBSession.query(Log).filter(Log.resource_id == resource_id)
603 query = DBSession.query(Log).filter(Log.resource_id == resource_id)
633 if filter_settings['namespace']:
604 if filter_settings['namespace']:
634 query = query.filter(Log.namespace == filter_settings['namespace'][0])
605 query = query.filter(Log.namespace == filter_settings['namespace'][0])
635 es_query['query']['filtered']['filter']['and'].append(
606 es_query['query']['filtered']['filter']['and'].append(
636 {"term": {"namespace": filter_settings['namespace'][0]}}
607 {"term": {"namespace": filter_settings['namespace'][0]}}
637 )
608 )
638 query.delete(synchronize_session=False)
609 query.delete(synchronize_session=False)
639 request.tm.commit()
610 request.tm.commit()
640 result = request.es_conn.search(es_query, index='rcae_l_*',
611 result = request.es_conn.search(es_query, index='rcae_l_*',
641 doc_type='log', es_scroll='1m',
612 doc_type='log', es_scroll='1m',
642 es_search_type='scan')
613 es_search_type='scan')
643 scroll_id = result['_scroll_id']
614 scroll_id = result['_scroll_id']
644 while True:
615 while True:
645 log.warning('log_cleanup, app:{} ns:{} batch'.format(
616 log.warning('log_cleanup, app:{} ns:{} batch'.format(
646 resource_id,
617 resource_id,
647 filter_settings['namespace']
618 filter_settings['namespace']
648 ))
619 ))
649 es_docs_to_delete = []
620 es_docs_to_delete = []
650 result = request.es_conn.send_request(
621 result = request.es_conn.send_request(
651 'POST', ['_search', 'scroll'],
622 'POST', ['_search', 'scroll'],
652 body=scroll_id, query_params={"scroll": '1m'})
623 body=scroll_id, query_params={"scroll": '1m'})
653 scroll_id = result['_scroll_id']
624 scroll_id = result['_scroll_id']
654 if not result['hits']['hits']:
625 if not result['hits']['hits']:
655 break
626 break
656 for doc in result['hits']['hits']:
627 for doc in result['hits']['hits']:
657 es_docs_to_delete.append({"id": doc['_id'],
628 es_docs_to_delete.append({"id": doc['_id'],
658 "index": doc['_index']})
629 "index": doc['_index']})
659
630
660 for batch in in_batches(es_docs_to_delete, 10):
631 for batch in in_batches(es_docs_to_delete, 10):
661 Datastores.es.bulk([Datastores.es.delete_op(doc_type='log',
632 Datastores.es.bulk([Datastores.es.delete_op(doc_type='log',
662 **to_del)
633 **to_del)
663 for to_del in batch])
634 for to_del in batch])
@@ -1,758 +1,743 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # AppEnlight Enterprise Edition, including its added features, Support
18 # AppEnlight Enterprise Edition, including its added features, Support
19 # services, and proprietary license terms, please see
19 # services, and proprietary license terms, please see
20 # https://rhodecode.com/licenses/
20 # https://rhodecode.com/licenses/
21
21
22 import datetime
22 import datetime
23
23
24 import colander
24 import colander
25 from colander import null
25 from colander import null
26
26
27 # those keywords are here so we can distingush between searching for tags and
27 # those keywords are here so we can distingush between searching for tags and
28 # normal properties of reports/logs
28 # normal properties of reports/logs
29 accepted_search_params = ['resource',
29 accepted_search_params = ['resource',
30 'request_id',
30 'request_id',
31 'start_date',
31 'start_date',
32 'end_date',
32 'end_date',
33 'page',
33 'page',
34 'min_occurences',
34 'min_occurences',
35 'http_status',
35 'http_status',
36 'priority',
36 'priority',
37 'error',
37 'error',
38 'url_path',
38 'url_path',
39 'url_domain',
39 'url_domain',
40 'report_status',
40 'report_status',
41 'min_duration',
41 'min_duration',
42 'max_duration',
42 'max_duration',
43 'message',
43 'message',
44 'level',
44 'level',
45 'namespace']
45 'namespace']
46
46
47
47
48 @colander.deferred
48 @colander.deferred
49 def deferred_utcnow(node, kw):
49 def deferred_utcnow(node, kw):
50 return kw['utcnow']
50 return kw['utcnow']
51
51
52
52
53 def lowercase_preparer(input_data):
53 def lowercase_preparer(input_data):
54 """
54 """
55 Transforms a list of string entries to lowercase
55 Transforms a list of string entries to lowercase
56 Used in search query validation
56 Used in search query validation
57 """
57 """
58 if not input_data:
58 if not input_data:
59 return input_data
59 return input_data
60 return [x.lower() for x in input_data]
60 return [x.lower() for x in input_data]
61
61
62
62
63 def shortener_factory(cutoff_size=32):
63 def shortener_factory(cutoff_size=32):
64 """
64 """
65 Limits the input data to specific character count
65 Limits the input data to specific character count
66 :arg cutoff_cutoff_size How much characters to store
66 :arg cutoff_cutoff_size How much characters to store
67
67
68 """
68 """
69
69
70 def shortener(input_data):
70 def shortener(input_data):
71 if not input_data:
71 if not input_data:
72 return input_data
72 return input_data
73 else:
73 else:
74 if isinstance(input_data, str):
74 if isinstance(input_data, str):
75 return input_data[:cutoff_size]
75 return input_data[:cutoff_size]
76 else:
76 else:
77 return input_data
77 return input_data
78
78
79 return shortener
79 return shortener
80
80
81
81
82 def cast_to_unicode_or_null(value):
82 def cast_to_unicode_or_null(value):
83 if value is not colander.null:
83 if value is not colander.null:
84 return str(value)
84 return str(value)
85 return None
85 return None
86
86
87
87
88 class NonTZDate(colander.DateTime):
88 class NonTZDate(colander.DateTime):
89 """ Returns null for incorrect date format - also removes tz info"""
89 """ Returns null for incorrect date format - also removes tz info"""
90
90
91 def deserialize(self, node, cstruct):
91 def deserialize(self, node, cstruct):
92 # disabled for now
92 # disabled for now
93 # if cstruct and isinstance(cstruct, str):
93 # if cstruct and isinstance(cstruct, str):
94 # if ':' not in cstruct:
94 # if ':' not in cstruct:
95 # cstruct += ':0.0'
95 # cstruct += ':0.0'
96 # if '.' not in cstruct:
96 # if '.' not in cstruct:
97 # cstruct += '.0'
97 # cstruct += '.0'
98 value = super(NonTZDate, self).deserialize(node, cstruct)
98 value = super(NonTZDate, self).deserialize(node, cstruct)
99 if value:
99 if value:
100 return value.replace(tzinfo=None)
100 return value.replace(tzinfo=None)
101 return value
101 return value
102
102
103
103
104 class UnknownType(object):
104 class UnknownType(object):
105 """
105 """
106 Universal type that will accept a deserialized JSON object and store it unaltered
106 Universal type that will accept a deserialized JSON object and store it unaltered
107 """
107 """
108
108
109 def serialize(self, node, appstruct):
109 def serialize(self, node, appstruct):
110 if appstruct is null:
110 if appstruct is null:
111 return null
111 return null
112 return appstruct
112 return appstruct
113
113
114 def deserialize(self, node, cstruct):
114 def deserialize(self, node, cstruct):
115 if cstruct is null:
115 if cstruct is null:
116 return null
116 return null
117 return cstruct
117 return cstruct
118
118
119 def cstruct_children(self):
119 def cstruct_children(self):
120 return []
120 return []
121
121
122
122
123 # SLOW REPORT SCHEMA
123 # SLOW REPORT SCHEMA
124
124
125 def rewrite_type(input_data):
125 def rewrite_type(input_data):
126 """
126 """
127 Fix for legacy appenlight clients
127 Fix for legacy appenlight clients
128 """
128 """
129 if input_data == 'remote_call':
129 if input_data == 'remote_call':
130 return 'remote'
130 return 'remote'
131 return input_data
131 return input_data
132
132
133
133
134 class ExtraTupleSchema(colander.TupleSchema):
134 class ExtraTupleSchema(colander.TupleSchema):
135 name = colander.SchemaNode(colander.String(),
135 name = colander.SchemaNode(colander.String(),
136 validator=colander.Length(1, 64))
136 validator=colander.Length(1, 64))
137 value = colander.SchemaNode(UnknownType(),
137 value = colander.SchemaNode(UnknownType(),
138 preparer=shortener_factory(512),
138 preparer=shortener_factory(512),
139 missing=None)
139 missing=None)
140
140
141
141
142 class ExtraSchemaList(colander.SequenceSchema):
142 class ExtraSchemaList(colander.SequenceSchema):
143 tag = ExtraTupleSchema()
143 tag = ExtraTupleSchema()
144 missing = None
144 missing = None
145
145
146
146
147 class TagsTupleSchema(colander.TupleSchema):
147 class TagsTupleSchema(colander.TupleSchema):
148 name = colander.SchemaNode(colander.String(),
148 name = colander.SchemaNode(colander.String(),
149 validator=colander.Length(1, 128))
149 validator=colander.Length(1, 128))
150 value = colander.SchemaNode(UnknownType(),
150 value = colander.SchemaNode(UnknownType(),
151 preparer=shortener_factory(128),
151 preparer=shortener_factory(128),
152 missing=None)
152 missing=None)
153
153
154
154
155 class TagSchemaList(colander.SequenceSchema):
155 class TagSchemaList(colander.SequenceSchema):
156 tag = TagsTupleSchema()
156 tag = TagsTupleSchema()
157 missing = None
157 missing = None
158
158
159
159
160 class NumericTagsTupleSchema(colander.TupleSchema):
160 class NumericTagsTupleSchema(colander.TupleSchema):
161 name = colander.SchemaNode(colander.String(),
161 name = colander.SchemaNode(colander.String(),
162 validator=colander.Length(1, 128))
162 validator=colander.Length(1, 128))
163 value = colander.SchemaNode(colander.Float(), missing=0)
163 value = colander.SchemaNode(colander.Float(), missing=0)
164
164
165
165
166 class NumericTagSchemaList(colander.SequenceSchema):
166 class NumericTagSchemaList(colander.SequenceSchema):
167 tag = NumericTagsTupleSchema()
167 tag = NumericTagsTupleSchema()
168 missing = None
168 missing = None
169
169
170
170
171 class SlowCallSchema(colander.MappingSchema):
171 class SlowCallSchema(colander.MappingSchema):
172 """
172 """
173 Validates slow call format in slow call list
173 Validates slow call format in slow call list
174 """
174 """
175 start = colander.SchemaNode(NonTZDate())
175 start = colander.SchemaNode(NonTZDate())
176 end = colander.SchemaNode(NonTZDate())
176 end = colander.SchemaNode(NonTZDate())
177 statement = colander.SchemaNode(colander.String(), missing='')
177 statement = colander.SchemaNode(colander.String(), missing='')
178 parameters = colander.SchemaNode(UnknownType(), missing=None)
178 parameters = colander.SchemaNode(UnknownType(), missing=None)
179 type = colander.SchemaNode(
179 type = colander.SchemaNode(
180 colander.String(),
180 colander.String(),
181 preparer=rewrite_type,
181 preparer=rewrite_type,
182 validator=colander.OneOf(
182 validator=colander.OneOf(
183 ['tmpl', 'sql', 'nosql', 'remote', 'unknown', 'custom']),
183 ['tmpl', 'sql', 'nosql', 'remote', 'unknown', 'custom']),
184 missing='unknown')
184 missing='unknown')
185 subtype = colander.SchemaNode(colander.String(),
185 subtype = colander.SchemaNode(colander.String(),
186 validator=colander.Length(1, 16),
186 validator=colander.Length(1, 16),
187 missing='unknown')
187 missing='unknown')
188 location = colander.SchemaNode(colander.String(),
188 location = colander.SchemaNode(colander.String(),
189 validator=colander.Length(1, 255),
189 validator=colander.Length(1, 255),
190 missing='')
190 missing='')
191
191
192
192
193 def limited_date(node, value):
193 def limited_date(node, value):
194 """ checks to make sure that the value is not older/newer than 2h """
194 """ checks to make sure that the value is not older/newer than 2h """
195 hours = 2
195 hours = 2
196 min_time = datetime.datetime.utcnow() - datetime.timedelta(hours=72)
196 min_time = datetime.datetime.utcnow() - datetime.timedelta(hours=72)
197 max_time = datetime.datetime.utcnow() + datetime.timedelta(hours=2)
197 max_time = datetime.datetime.utcnow() + datetime.timedelta(hours=2)
198 if min_time > value:
198 if min_time > value:
199 msg = '%r is older from current UTC time by ' + str(hours) + ' hours.'
199 msg = '%r is older from current UTC time by ' + str(hours) + ' hours.'
200 msg += ' Ask administrator to enable permanent logging for ' \
200 msg += ' Ask administrator to enable permanent logging for ' \
201 'your application to store logs with dates in past.'
201 'your application to store logs with dates in past.'
202 raise colander.Invalid(node, msg % value)
202 raise colander.Invalid(node, msg % value)
203 if max_time < value:
203 if max_time < value:
204 msg = '%r is newer from current UTC time by ' + str(hours) + ' hours'
204 msg = '%r is newer from current UTC time by ' + str(hours) + ' hours'
205 msg += ' Ask administrator to enable permanent logging for ' \
205 msg += ' Ask administrator to enable permanent logging for ' \
206 'your application to store logs with dates in future.'
206 'your application to store logs with dates in future.'
207 raise colander.Invalid(node, msg % value)
207 raise colander.Invalid(node, msg % value)
208
208
209
209
210 class SlowCallListSchema(colander.SequenceSchema):
210 class SlowCallListSchema(colander.SequenceSchema):
211 """
211 """
212 Validates list of individual slow calls
212 Validates list of individual slow calls
213 """
213 """
214 slow_call = SlowCallSchema()
214 slow_call = SlowCallSchema()
215
215
216
216
217 class RequestStatsSchema(colander.MappingSchema):
217 class RequestStatsSchema(colander.MappingSchema):
218 """
218 """
219 Validates format of requests statistics dictionary
219 Validates format of requests statistics dictionary
220 """
220 """
221 main = colander.SchemaNode(colander.Float(), validator=colander.Range(0),
221 main = colander.SchemaNode(colander.Float(), validator=colander.Range(0),
222 missing=0)
222 missing=0)
223 sql = colander.SchemaNode(colander.Float(), validator=colander.Range(0),
223 sql = colander.SchemaNode(colander.Float(), validator=colander.Range(0),
224 missing=0)
224 missing=0)
225 nosql = colander.SchemaNode(colander.Float(), validator=colander.Range(0),
225 nosql = colander.SchemaNode(colander.Float(), validator=colander.Range(0),
226 missing=0)
226 missing=0)
227 remote = colander.SchemaNode(colander.Float(), validator=colander.Range(0),
227 remote = colander.SchemaNode(colander.Float(), validator=colander.Range(0),
228 missing=0)
228 missing=0)
229 tmpl = colander.SchemaNode(colander.Float(), validator=colander.Range(0),
229 tmpl = colander.SchemaNode(colander.Float(), validator=colander.Range(0),
230 missing=0)
230 missing=0)
231 custom = colander.SchemaNode(colander.Float(), validator=colander.Range(0),
231 custom = colander.SchemaNode(colander.Float(), validator=colander.Range(0),
232 missing=0)
232 missing=0)
233 sql_calls = colander.SchemaNode(colander.Float(),
233 sql_calls = colander.SchemaNode(colander.Float(),
234 validator=colander.Range(0),
234 validator=colander.Range(0),
235 missing=0)
235 missing=0)
236 nosql_calls = colander.SchemaNode(colander.Float(),
236 nosql_calls = colander.SchemaNode(colander.Float(),
237 validator=colander.Range(0),
237 validator=colander.Range(0),
238 missing=0)
238 missing=0)
239 remote_calls = colander.SchemaNode(colander.Float(),
239 remote_calls = colander.SchemaNode(colander.Float(),
240 validator=colander.Range(0),
240 validator=colander.Range(0),
241 missing=0)
241 missing=0)
242 tmpl_calls = colander.SchemaNode(colander.Float(),
242 tmpl_calls = colander.SchemaNode(colander.Float(),
243 validator=colander.Range(0),
243 validator=colander.Range(0),
244 missing=0)
244 missing=0)
245 custom_calls = colander.SchemaNode(colander.Float(),
245 custom_calls = colander.SchemaNode(colander.Float(),
246 validator=colander.Range(0),
246 validator=colander.Range(0),
247 missing=0)
247 missing=0)
248
248
249
249
250 class FrameInfoVarSchema(colander.SequenceSchema):
250 class FrameInfoVarSchema(colander.SequenceSchema):
251 """
251 """
252 Validates format of frame variables of a traceback
252 Validates format of frame variables of a traceback
253 """
253 """
254 vars = colander.SchemaNode(UnknownType(),
254 vars = colander.SchemaNode(UnknownType(),
255 validator=colander.Length(2, 2))
255 validator=colander.Length(2, 2))
256
256
257
257
258 class FrameInfoSchema(colander.MappingSchema):
258 class FrameInfoSchema(colander.MappingSchema):
259 """
259 """
260 Validates format of a traceback line
260 Validates format of a traceback line
261 """
261 """
262 cline = colander.SchemaNode(colander.String(), missing='')
262 cline = colander.SchemaNode(colander.String(), missing='')
263 module = colander.SchemaNode(colander.String(), missing='')
263 module = colander.SchemaNode(colander.String(), missing='')
264 line = colander.SchemaNode(colander.String(), missing='')
264 line = colander.SchemaNode(colander.String(), missing='')
265 file = colander.SchemaNode(colander.String(), missing='')
265 file = colander.SchemaNode(colander.String(), missing='')
266 fn = colander.SchemaNode(colander.String(), missing='')
266 fn = colander.SchemaNode(colander.String(), missing='')
267 vars = FrameInfoVarSchema()
267 vars = FrameInfoVarSchema()
268
268
269
269
270 class FrameInfoListSchema(colander.SequenceSchema):
270 class FrameInfoListSchema(colander.SequenceSchema):
271 """
271 """
272 Validates format of list of traceback lines
272 Validates format of list of traceback lines
273 """
273 """
274 frame = colander.SchemaNode(UnknownType())
274 frame = colander.SchemaNode(UnknownType())
275
275
276
276
277 class ReportDetailBaseSchema(colander.MappingSchema):
277 class ReportDetailBaseSchema(colander.MappingSchema):
278 """
278 """
279 Validates format of report - ie. request parameters and stats for a request in report group
279 Validates format of report - ie. request parameters and stats for a request in report group
280 """
280 """
281 username = colander.SchemaNode(colander.String(),
281 username = colander.SchemaNode(colander.String(),
282 preparer=[shortener_factory(255),
282 preparer=[shortener_factory(255),
283 lambda x: x or ''],
283 lambda x: x or ''],
284 missing='')
284 missing='')
285 request_id = colander.SchemaNode(colander.String(),
285 request_id = colander.SchemaNode(colander.String(),
286 preparer=shortener_factory(40),
286 preparer=shortener_factory(40),
287 missing='')
287 missing='')
288 url = colander.SchemaNode(colander.String(),
288 url = colander.SchemaNode(colander.String(),
289 preparer=shortener_factory(1024), missing='')
289 preparer=shortener_factory(1024), missing='')
290 ip = colander.SchemaNode(colander.String(), preparer=shortener_factory(39),
290 ip = colander.SchemaNode(colander.String(), preparer=shortener_factory(39),
291 missing=None)
291 missing=None)
292 start_time = colander.SchemaNode(NonTZDate(), validator=limited_date,
292 start_time = colander.SchemaNode(NonTZDate(), validator=limited_date,
293 missing=deferred_utcnow)
293 missing=deferred_utcnow)
294 end_time = colander.SchemaNode(NonTZDate(), validator=limited_date,
294 end_time = colander.SchemaNode(NonTZDate(), validator=limited_date,
295 missing=None)
295 missing=None)
296 user_agent = colander.SchemaNode(colander.String(),
296 user_agent = colander.SchemaNode(colander.String(),
297 preparer=[shortener_factory(512),
297 preparer=[shortener_factory(512),
298 lambda x: x or ''],
298 lambda x: x or ''],
299 missing='')
299 missing='')
300 message = colander.SchemaNode(colander.String(),
300 message = colander.SchemaNode(colander.String(),
301 preparer=shortener_factory(2048),
301 preparer=shortener_factory(2048),
302 missing='')
302 missing='')
303 group_string = colander.SchemaNode(colander.String(),
303 group_string = colander.SchemaNode(colander.String(),
304 validator=colander.Length(1, 512),
304 validator=colander.Length(1, 512),
305 missing=None)
305 missing=None)
306 request_stats = RequestStatsSchema(missing=None)
306 request_stats = RequestStatsSchema(missing=None)
307 request = colander.SchemaNode(colander.Mapping(unknown='preserve'),
307 request = colander.SchemaNode(colander.Mapping(unknown='preserve'),
308 missing={})
308 missing={})
309 traceback = FrameInfoListSchema(missing=None)
309 traceback = FrameInfoListSchema(missing=None)
310 slow_calls = SlowCallListSchema(missing=[])
310 slow_calls = SlowCallListSchema(missing=[])
311 extra = ExtraSchemaList()
311 extra = ExtraSchemaList()
312
312
313
313
314 class ReportDetailSchema_0_4(ReportDetailBaseSchema):
314 class ReportDetailSchema_0_4(ReportDetailBaseSchema):
315 frameinfo = FrameInfoListSchema(missing=None)
315 frameinfo = FrameInfoListSchema(missing=None)
316
316
317
317
318 class ReportDetailSchema_0_5(ReportDetailBaseSchema):
318 class ReportDetailSchema_0_5(ReportDetailBaseSchema):
319 pass
319 pass
320
320
321
321
322 class ReportDetailListSchema(colander.SequenceSchema):
322 class ReportDetailListSchema(colander.SequenceSchema):
323 """
323 """
324 Validates format of list of reports
324 Validates format of list of reports
325 """
325 """
326 report_detail = ReportDetailSchema_0_4()
326 report_detail = ReportDetailSchema_0_4()
327 validator = colander.Length(1)
327 validator = colander.Length(1)
328
328
329
329
330 class ReportSchemaBase(colander.MappingSchema):
330 class ReportSchemaBase(colander.MappingSchema):
331 """
331 """
332 Validates format of report group
332 Validates format of report group
333 """
333 """
334 client = colander.SchemaNode(colander.String(),
334 client = colander.SchemaNode(colander.String(),
335 preparer=lambda x: x or 'unknown')
335 preparer=lambda x: x or 'unknown')
336 server = colander.SchemaNode(
336 server = colander.SchemaNode(
337 colander.String(),
337 colander.String(),
338 preparer=[
338 preparer=[
339 lambda x: x.lower() if x else 'unknown', shortener_factory(128)],
339 lambda x: x.lower() if x else 'unknown', shortener_factory(128)],
340 missing='unknown')
340 missing='unknown')
341 priority = colander.SchemaNode(colander.Int(),
341 priority = colander.SchemaNode(colander.Int(),
342 preparer=[lambda x: x or 5],
342 preparer=[lambda x: x or 5],
343 validator=colander.Range(1, 10),
343 validator=colander.Range(1, 10),
344 missing=5)
344 missing=5)
345 language = colander.SchemaNode(colander.String(), missing='unknown')
345 language = colander.SchemaNode(colander.String(), missing='unknown')
346 error = colander.SchemaNode(colander.String(),
346 error = colander.SchemaNode(colander.String(),
347 preparer=shortener_factory(512),
347 preparer=shortener_factory(512),
348 missing='')
348 missing='')
349 view_name = colander.SchemaNode(colander.String(),
349 view_name = colander.SchemaNode(colander.String(),
350 preparer=[shortener_factory(128),
350 preparer=[shortener_factory(128),
351 lambda x: x or ''],
351 lambda x: x or ''],
352 missing='')
352 missing='')
353 http_status = colander.SchemaNode(colander.Int(),
353 http_status = colander.SchemaNode(colander.Int(),
354 preparer=[lambda x: x or 200],
354 preparer=[lambda x: x or 200],
355 validator=colander.Range(1))
355 validator=colander.Range(1))
356
356
357 occurences = colander.SchemaNode(colander.Int(),
357 occurences = colander.SchemaNode(colander.Int(),
358 validator=colander.Range(1, 99999999999),
358 validator=colander.Range(1, 99999999999),
359 missing=1)
359 missing=1)
360 tags = TagSchemaList()
360 tags = TagSchemaList()
361
361
362
362
363 class ReportSchema_0_4(ReportSchemaBase):
364 error_type = colander.SchemaNode(colander.String(),
365 preparer=[shortener_factory(512)],
366 missing='')
367 report_details = ReportDetailListSchema()
368
369
370 class ReportSchema_0_5(ReportSchemaBase, ReportDetailSchema_0_5):
363 class ReportSchema_0_5(ReportSchemaBase, ReportDetailSchema_0_5):
371 pass
364 pass
372
365
373
366
374 class ReportListSchema_0_4(colander.SequenceSchema):
375 """
376 Validates format of list of report groups
377 """
378 report = ReportSchema_0_4()
379 validator = colander.Length(1)
380
381
382 class ReportListSchema_0_5(colander.SequenceSchema):
367 class ReportListSchema_0_5(colander.SequenceSchema):
383 """
368 """
384 Validates format of list of report groups
369 Validates format of list of report groups
385 """
370 """
386 report = ReportSchema_0_5()
371 report = ReportSchema_0_5()
387 validator = colander.Length(1)
372 validator = colander.Length(1)
388
373
389
374
390 class LogSchema(colander.MappingSchema):
375 class LogSchema(colander.MappingSchema):
391 """
376 """
392 Validates format if individual log entry
377 Validates format if individual log entry
393 """
378 """
394 primary_key = colander.SchemaNode(UnknownType(),
379 primary_key = colander.SchemaNode(UnknownType(),
395 preparer=[cast_to_unicode_or_null,
380 preparer=[cast_to_unicode_or_null,
396 shortener_factory(128)],
381 shortener_factory(128)],
397 missing=None)
382 missing=None)
398 log_level = colander.SchemaNode(colander.String(),
383 log_level = colander.SchemaNode(colander.String(),
399 preparer=shortener_factory(10),
384 preparer=shortener_factory(10),
400 missing='UNKNOWN')
385 missing='UNKNOWN')
401 message = colander.SchemaNode(colander.String(),
386 message = colander.SchemaNode(colander.String(),
402 preparer=shortener_factory(4096),
387 preparer=shortener_factory(4096),
403 missing='')
388 missing='')
404 namespace = colander.SchemaNode(colander.String(),
389 namespace = colander.SchemaNode(colander.String(),
405 preparer=shortener_factory(128),
390 preparer=shortener_factory(128),
406 missing='')
391 missing='')
407 request_id = colander.SchemaNode(colander.String(),
392 request_id = colander.SchemaNode(colander.String(),
408 preparer=shortener_factory(40),
393 preparer=shortener_factory(40),
409 missing='')
394 missing='')
410 server = colander.SchemaNode(colander.String(),
395 server = colander.SchemaNode(colander.String(),
411 preparer=shortener_factory(128),
396 preparer=shortener_factory(128),
412 missing='unknown')
397 missing='unknown')
413 date = colander.SchemaNode(NonTZDate(),
398 date = colander.SchemaNode(NonTZDate(),
414 validator=limited_date,
399 validator=limited_date,
415 missing=deferred_utcnow)
400 missing=deferred_utcnow)
416 tags = TagSchemaList()
401 tags = TagSchemaList()
417
402
418
403
419 class LogSchemaPermanent(LogSchema):
404 class LogSchemaPermanent(LogSchema):
420 date = colander.SchemaNode(NonTZDate(),
405 date = colander.SchemaNode(NonTZDate(),
421 missing=deferred_utcnow)
406 missing=deferred_utcnow)
422 permanent = colander.SchemaNode(colander.Boolean(), missing=False)
407 permanent = colander.SchemaNode(colander.Boolean(), missing=False)
423
408
424
409
425 class LogListSchema(colander.SequenceSchema):
410 class LogListSchema(colander.SequenceSchema):
426 """
411 """
427 Validates format of list of log entries
412 Validates format of list of log entries
428 """
413 """
429 log = LogSchema()
414 log = LogSchema()
430 validator = colander.Length(1)
415 validator = colander.Length(1)
431
416
432
417
433 class LogListPermanentSchema(colander.SequenceSchema):
418 class LogListPermanentSchema(colander.SequenceSchema):
434 """
419 """
435 Validates format of list of log entries
420 Validates format of list of log entries
436 """
421 """
437 log = LogSchemaPermanent()
422 log = LogSchemaPermanent()
438 validator = colander.Length(1)
423 validator = colander.Length(1)
439
424
440
425
441 class ViewRequestStatsSchema(RequestStatsSchema):
426 class ViewRequestStatsSchema(RequestStatsSchema):
442 requests = colander.SchemaNode(colander.Integer(),
427 requests = colander.SchemaNode(colander.Integer(),
443 validator=colander.Range(0),
428 validator=colander.Range(0),
444 missing=0)
429 missing=0)
445
430
446
431
447 class ViewMetricTupleSchema(colander.TupleSchema):
432 class ViewMetricTupleSchema(colander.TupleSchema):
448 """
433 """
449 Validates list of views and their corresponding request stats object ie:
434 Validates list of views and their corresponding request stats object ie:
450 ["dir/module:func",{"custom": 0.0..}]
435 ["dir/module:func",{"custom": 0.0..}]
451 """
436 """
452 view_name = colander.SchemaNode(colander.String(),
437 view_name = colander.SchemaNode(colander.String(),
453 preparer=[shortener_factory(128),
438 preparer=[shortener_factory(128),
454 lambda x: x or 'unknown'],
439 lambda x: x or 'unknown'],
455 missing='unknown')
440 missing='unknown')
456 metrics = ViewRequestStatsSchema()
441 metrics = ViewRequestStatsSchema()
457
442
458
443
459 class ViewMetricListSchema(colander.SequenceSchema):
444 class ViewMetricListSchema(colander.SequenceSchema):
460 """
445 """
461 Validates view breakdown stats objects list
446 Validates view breakdown stats objects list
462 {metrics key of server/time object}
447 {metrics key of server/time object}
463 """
448 """
464 view_tuple = ViewMetricTupleSchema()
449 view_tuple = ViewMetricTupleSchema()
465 validator = colander.Length(1)
450 validator = colander.Length(1)
466
451
467
452
468 class ViewMetricSchema(colander.MappingSchema):
453 class ViewMetricSchema(colander.MappingSchema):
469 """
454 """
470 Validates server/timeinterval object, ie:
455 Validates server/timeinterval object, ie:
471 {server/time object}
456 {server/time object}
472
457
473 """
458 """
474 timestamp = colander.SchemaNode(NonTZDate(),
459 timestamp = colander.SchemaNode(NonTZDate(),
475 validator=limited_date,
460 validator=limited_date,
476 missing=None)
461 missing=None)
477 server = colander.SchemaNode(colander.String(),
462 server = colander.SchemaNode(colander.String(),
478 preparer=[shortener_factory(128),
463 preparer=[shortener_factory(128),
479 lambda x: x or 'unknown'],
464 lambda x: x or 'unknown'],
480 missing='unknown')
465 missing='unknown')
481 metrics = ViewMetricListSchema()
466 metrics = ViewMetricListSchema()
482
467
483
468
484 class GeneralMetricSchema(colander.MappingSchema):
469 class GeneralMetricSchema(colander.MappingSchema):
485 """
470 """
486 Validates universal metric schema
471 Validates universal metric schema
487
472
488 """
473 """
489 namespace = colander.SchemaNode(colander.String(), missing='',
474 namespace = colander.SchemaNode(colander.String(), missing='',
490 preparer=shortener_factory(128))
475 preparer=shortener_factory(128))
491
476
492 server_name = colander.SchemaNode(colander.String(),
477 server_name = colander.SchemaNode(colander.String(),
493 preparer=[shortener_factory(128),
478 preparer=[shortener_factory(128),
494 lambda x: x or 'unknown'],
479 lambda x: x or 'unknown'],
495 missing='unknown')
480 missing='unknown')
496 timestamp = colander.SchemaNode(NonTZDate(), validator=limited_date,
481 timestamp = colander.SchemaNode(NonTZDate(), validator=limited_date,
497 missing=deferred_utcnow)
482 missing=deferred_utcnow)
498 tags = TagSchemaList()
483 tags = TagSchemaList()
499
484
500
485
501 class GeneralMetricsListSchema(colander.SequenceSchema):
486 class GeneralMetricsListSchema(colander.SequenceSchema):
502 metric = GeneralMetricSchema()
487 metric = GeneralMetricSchema()
503 validator = colander.Length(1)
488 validator = colander.Length(1)
504
489
505
490
506 class MetricsListSchema(colander.SequenceSchema):
491 class MetricsListSchema(colander.SequenceSchema):
507 """
492 """
508 Validates list of metrics objects ie:
493 Validates list of metrics objects ie:
509 [{server/time object}, ] part
494 [{server/time object}, ] part
510
495
511
496
512 """
497 """
513 metric = ViewMetricSchema()
498 metric = ViewMetricSchema()
514 validator = colander.Length(1)
499 validator = colander.Length(1)
515
500
516
501
517 class StringToAppList(object):
502 class StringToAppList(object):
518 """
503 """
519 Returns validated list of application ids from user query and
504 Returns validated list of application ids from user query and
520 set of applications user is allowed to look at
505 set of applications user is allowed to look at
521 transform string to list containing single integer
506 transform string to list containing single integer
522 """
507 """
523
508
524 def serialize(self, node, appstruct):
509 def serialize(self, node, appstruct):
525 if appstruct is null:
510 if appstruct is null:
526 return null
511 return null
527 return appstruct
512 return appstruct
528
513
529 def deserialize(self, node, cstruct):
514 def deserialize(self, node, cstruct):
530 if cstruct is null:
515 if cstruct is null:
531 return null
516 return null
532
517
533 apps = set([int(a) for a in node.bindings['resources']])
518 apps = set([int(a) for a in node.bindings['resources']])
534
519
535 if isinstance(cstruct, str):
520 if isinstance(cstruct, str):
536 cstruct = [cstruct]
521 cstruct = [cstruct]
537
522
538 cstruct = [int(a) for a in cstruct]
523 cstruct = [int(a) for a in cstruct]
539
524
540 valid_apps = list(apps.intersection(set(cstruct)))
525 valid_apps = list(apps.intersection(set(cstruct)))
541 if valid_apps:
526 if valid_apps:
542 return valid_apps
527 return valid_apps
543 return null
528 return null
544
529
545 def cstruct_children(self):
530 def cstruct_children(self):
546 return []
531 return []
547
532
548
533
549 @colander.deferred
534 @colander.deferred
550 def possible_applications_validator(node, kw):
535 def possible_applications_validator(node, kw):
551 possible_apps = [int(a) for a in kw['resources']]
536 possible_apps = [int(a) for a in kw['resources']]
552 return colander.All(colander.ContainsOnly(possible_apps),
537 return colander.All(colander.ContainsOnly(possible_apps),
553 colander.Length(1))
538 colander.Length(1))
554
539
555
540
556 @colander.deferred
541 @colander.deferred
557 def possible_applications(node, kw):
542 def possible_applications(node, kw):
558 return [int(a) for a in kw['resources']]
543 return [int(a) for a in kw['resources']]
559
544
560
545
561 @colander.deferred
546 @colander.deferred
562 def today_start(node, kw):
547 def today_start(node, kw):
563 return datetime.datetime.utcnow().replace(second=0, microsecond=0,
548 return datetime.datetime.utcnow().replace(second=0, microsecond=0,
564 minute=0,
549 minute=0,
565 hour=0)
550 hour=0)
566
551
567
552
568 @colander.deferred
553 @colander.deferred
569 def today_end(node, kw):
554 def today_end(node, kw):
570 return datetime.datetime.utcnow().replace(second=0, microsecond=0,
555 return datetime.datetime.utcnow().replace(second=0, microsecond=0,
571 minute=59, hour=23)
556 minute=59, hour=23)
572
557
573
558
574 @colander.deferred
559 @colander.deferred
575 def old_start(node, kw):
560 def old_start(node, kw):
576 t_delta = datetime.timedelta(days=90)
561 t_delta = datetime.timedelta(days=90)
577 return datetime.datetime.utcnow().replace(second=0, microsecond=0,
562 return datetime.datetime.utcnow().replace(second=0, microsecond=0,
578 minute=0,
563 minute=0,
579 hour=0) - t_delta
564 hour=0) - t_delta
580
565
581
566
582 @colander.deferred
567 @colander.deferred
583 def today_end(node, kw):
568 def today_end(node, kw):
584 return datetime.datetime.utcnow().replace(second=0, microsecond=0,
569 return datetime.datetime.utcnow().replace(second=0, microsecond=0,
585 minute=59, hour=23)
570 minute=59, hour=23)
586
571
587
572
588 class PermissiveDate(colander.DateTime):
573 class PermissiveDate(colander.DateTime):
589 """ Returns null for incorrect date format - also removes tz info"""
574 """ Returns null for incorrect date format - also removes tz info"""
590
575
591 def deserialize(self, node, cstruct):
576 def deserialize(self, node, cstruct):
592 if not cstruct:
577 if not cstruct:
593 return null
578 return null
594
579
595 try:
580 try:
596 result = colander.iso8601.parse_date(
581 result = colander.iso8601.parse_date(
597 cstruct, default_timezone=self.default_tzinfo)
582 cstruct, default_timezone=self.default_tzinfo)
598 except colander.iso8601.ParseError:
583 except colander.iso8601.ParseError:
599 return null
584 return null
600 return result.replace(tzinfo=None)
585 return result.replace(tzinfo=None)
601
586
602
587
603 class LogSearchSchema(colander.MappingSchema):
588 class LogSearchSchema(colander.MappingSchema):
604 def schema_type(self, **kw):
589 def schema_type(self, **kw):
605 return colander.Mapping(unknown='preserve')
590 return colander.Mapping(unknown='preserve')
606
591
607 resource = colander.SchemaNode(StringToAppList(),
592 resource = colander.SchemaNode(StringToAppList(),
608 validator=possible_applications_validator,
593 validator=possible_applications_validator,
609 missing=possible_applications)
594 missing=possible_applications)
610
595
611 message = colander.SchemaNode(colander.Sequence(accept_scalar=True),
596 message = colander.SchemaNode(colander.Sequence(accept_scalar=True),
612 colander.SchemaNode(colander.String()),
597 colander.SchemaNode(colander.String()),
613 missing=None)
598 missing=None)
614 level = colander.SchemaNode(colander.Sequence(accept_scalar=True),
599 level = colander.SchemaNode(colander.Sequence(accept_scalar=True),
615 colander.SchemaNode(colander.String()),
600 colander.SchemaNode(colander.String()),
616 preparer=lowercase_preparer,
601 preparer=lowercase_preparer,
617 missing=None)
602 missing=None)
618 namespace = colander.SchemaNode(colander.Sequence(accept_scalar=True),
603 namespace = colander.SchemaNode(colander.Sequence(accept_scalar=True),
619 colander.SchemaNode(colander.String()),
604 colander.SchemaNode(colander.String()),
620 preparer=lowercase_preparer,
605 preparer=lowercase_preparer,
621 missing=None)
606 missing=None)
622 request_id = colander.SchemaNode(colander.Sequence(accept_scalar=True),
607 request_id = colander.SchemaNode(colander.Sequence(accept_scalar=True),
623 colander.SchemaNode(colander.String()),
608 colander.SchemaNode(colander.String()),
624 preparer=lowercase_preparer,
609 preparer=lowercase_preparer,
625 missing=None)
610 missing=None)
626 start_date = colander.SchemaNode(PermissiveDate(),
611 start_date = colander.SchemaNode(PermissiveDate(),
627 missing=None)
612 missing=None)
628 end_date = colander.SchemaNode(PermissiveDate(),
613 end_date = colander.SchemaNode(PermissiveDate(),
629 missing=None)
614 missing=None)
630 page = colander.SchemaNode(colander.Integer(),
615 page = colander.SchemaNode(colander.Integer(),
631 validator=colander.Range(min=1),
616 validator=colander.Range(min=1),
632 missing=1)
617 missing=1)
633
618
634
619
635 class ReportSearchSchema(colander.MappingSchema):
620 class ReportSearchSchema(colander.MappingSchema):
636 def schema_type(self, **kw):
621 def schema_type(self, **kw):
637 return colander.Mapping(unknown='preserve')
622 return colander.Mapping(unknown='preserve')
638
623
639 resource = colander.SchemaNode(StringToAppList(),
624 resource = colander.SchemaNode(StringToAppList(),
640 validator=possible_applications_validator,
625 validator=possible_applications_validator,
641 missing=possible_applications)
626 missing=possible_applications)
642 request_id = colander.SchemaNode(colander.Sequence(accept_scalar=True),
627 request_id = colander.SchemaNode(colander.Sequence(accept_scalar=True),
643 colander.SchemaNode(colander.String()),
628 colander.SchemaNode(colander.String()),
644 missing=None)
629 missing=None)
645 start_date = colander.SchemaNode(PermissiveDate(),
630 start_date = colander.SchemaNode(PermissiveDate(),
646 missing=None)
631 missing=None)
647 end_date = colander.SchemaNode(PermissiveDate(),
632 end_date = colander.SchemaNode(PermissiveDate(),
648 missing=None)
633 missing=None)
649 page = colander.SchemaNode(colander.Integer(),
634 page = colander.SchemaNode(colander.Integer(),
650 validator=colander.Range(min=1),
635 validator=colander.Range(min=1),
651 missing=1)
636 missing=1)
652
637
653 min_occurences = colander.SchemaNode(
638 min_occurences = colander.SchemaNode(
654 colander.Sequence(accept_scalar=True),
639 colander.Sequence(accept_scalar=True),
655 colander.SchemaNode(colander.Integer()),
640 colander.SchemaNode(colander.Integer()),
656 missing=None)
641 missing=None)
657
642
658 http_status = colander.SchemaNode(colander.Sequence(accept_scalar=True),
643 http_status = colander.SchemaNode(colander.Sequence(accept_scalar=True),
659 colander.SchemaNode(colander.Integer()),
644 colander.SchemaNode(colander.Integer()),
660 missing=None)
645 missing=None)
661 priority = colander.SchemaNode(colander.Sequence(accept_scalar=True),
646 priority = colander.SchemaNode(colander.Sequence(accept_scalar=True),
662 colander.SchemaNode(colander.Integer()),
647 colander.SchemaNode(colander.Integer()),
663 missing=None)
648 missing=None)
664 error = colander.SchemaNode(colander.Sequence(accept_scalar=True),
649 error = colander.SchemaNode(colander.Sequence(accept_scalar=True),
665 colander.SchemaNode(colander.String()),
650 colander.SchemaNode(colander.String()),
666 missing=None)
651 missing=None)
667 url_path = colander.SchemaNode(colander.Sequence(accept_scalar=True),
652 url_path = colander.SchemaNode(colander.Sequence(accept_scalar=True),
668 colander.SchemaNode(colander.String()),
653 colander.SchemaNode(colander.String()),
669 missing=None)
654 missing=None)
670 url_domain = colander.SchemaNode(colander.Sequence(accept_scalar=True),
655 url_domain = colander.SchemaNode(colander.Sequence(accept_scalar=True),
671 colander.SchemaNode(colander.String()),
656 colander.SchemaNode(colander.String()),
672 missing=None)
657 missing=None)
673 report_status = colander.SchemaNode(colander.Sequence(accept_scalar=True),
658 report_status = colander.SchemaNode(colander.Sequence(accept_scalar=True),
674 colander.SchemaNode(colander.String()),
659 colander.SchemaNode(colander.String()),
675 missing=None)
660 missing=None)
676 min_duration = colander.SchemaNode(colander.Sequence(accept_scalar=True),
661 min_duration = colander.SchemaNode(colander.Sequence(accept_scalar=True),
677 colander.SchemaNode(colander.Float()),
662 colander.SchemaNode(colander.Float()),
678 missing=None)
663 missing=None)
679 max_duration = colander.SchemaNode(colander.Sequence(accept_scalar=True),
664 max_duration = colander.SchemaNode(colander.Sequence(accept_scalar=True),
680 colander.SchemaNode(colander.Float()),
665 colander.SchemaNode(colander.Float()),
681 missing=None)
666 missing=None)
682
667
683
668
684 class TagSchema(colander.MappingSchema):
669 class TagSchema(colander.MappingSchema):
685 """
670 """
686 Used in log search
671 Used in log search
687 """
672 """
688 name = colander.SchemaNode(colander.String(),
673 name = colander.SchemaNode(colander.String(),
689 validator=colander.Length(1, 32))
674 validator=colander.Length(1, 32))
690 value = colander.SchemaNode(colander.Sequence(accept_scalar=True),
675 value = colander.SchemaNode(colander.Sequence(accept_scalar=True),
691 colander.SchemaNode(colander.String(),
676 colander.SchemaNode(colander.String(),
692 validator=colander.Length(
677 validator=colander.Length(
693 1, 128)),
678 1, 128)),
694 missing=None)
679 missing=None)
695 op = colander.SchemaNode(colander.String(),
680 op = colander.SchemaNode(colander.String(),
696 validator=colander.Length(1, 128),
681 validator=colander.Length(1, 128),
697 missing=None)
682 missing=None)
698
683
699
684
700 class TagListSchema(colander.SequenceSchema):
685 class TagListSchema(colander.SequenceSchema):
701 tag = TagSchema()
686 tag = TagSchema()
702
687
703
688
704 class RuleFieldType(object):
689 class RuleFieldType(object):
705 """ Validator which succeeds if the value passed to it is one of
690 """ Validator which succeeds if the value passed to it is one of
706 a fixed set of values """
691 a fixed set of values """
707
692
708 def __init__(self, cast_to):
693 def __init__(self, cast_to):
709 self.cast_to = cast_to
694 self.cast_to = cast_to
710
695
711 def __call__(self, node, value):
696 def __call__(self, node, value):
712 try:
697 try:
713 if self.cast_to == 'int':
698 if self.cast_to == 'int':
714 int(value)
699 int(value)
715 elif self.cast_to == 'float':
700 elif self.cast_to == 'float':
716 float(value)
701 float(value)
717 elif self.cast_to == 'unicode':
702 elif self.cast_to == 'unicode':
718 str(value)
703 str(value)
719 except:
704 except:
720 raise colander.Invalid(node,
705 raise colander.Invalid(node,
721 "Can't cast {} to {}".format(
706 "Can't cast {} to {}".format(
722 value, self.cast_to))
707 value, self.cast_to))
723
708
724
709
725 def build_rule_schema(ruleset, check_matrix):
710 def build_rule_schema(ruleset, check_matrix):
726 """
711 """
727 Accepts ruleset and a map of fields/possible operations and builds
712 Accepts ruleset and a map of fields/possible operations and builds
728 validation class
713 validation class
729 """
714 """
730
715
731 schema = colander.SchemaNode(colander.Mapping())
716 schema = colander.SchemaNode(colander.Mapping())
732 schema.add(colander.SchemaNode(colander.String(), name='field'))
717 schema.add(colander.SchemaNode(colander.String(), name='field'))
733
718
734 if ruleset['field'] in ['__AND__', '__OR__']:
719 if ruleset['field'] in ['__AND__', '__OR__']:
735 subrules = colander.SchemaNode(colander.Tuple(), name='rules')
720 subrules = colander.SchemaNode(colander.Tuple(), name='rules')
736 for rule in ruleset['rules']:
721 for rule in ruleset['rules']:
737 subrules.add(build_rule_schema(rule, check_matrix))
722 subrules.add(build_rule_schema(rule, check_matrix))
738 schema.add(subrules)
723 schema.add(subrules)
739 else:
724 else:
740 op_choices = check_matrix[ruleset['field']]['ops']
725 op_choices = check_matrix[ruleset['field']]['ops']
741 cast_to = check_matrix[ruleset['field']]['type']
726 cast_to = check_matrix[ruleset['field']]['type']
742 schema.add(colander.SchemaNode(colander.String(),
727 schema.add(colander.SchemaNode(colander.String(),
743 validator=colander.OneOf(op_choices),
728 validator=colander.OneOf(op_choices),
744 name='op'))
729 name='op'))
745
730
746 schema.add(colander.SchemaNode(colander.String(),
731 schema.add(colander.SchemaNode(colander.String(),
747 name='value',
732 name='value',
748 validator=RuleFieldType(cast_to)))
733 validator=RuleFieldType(cast_to)))
749 return schema
734 return schema
750
735
751
736
752 class ConfigTypeSchema(colander.MappingSchema):
737 class ConfigTypeSchema(colander.MappingSchema):
753 type = colander.SchemaNode(colander.String(), missing=None)
738 type = colander.SchemaNode(colander.String(), missing=None)
754 config = colander.SchemaNode(UnknownType(), missing=None)
739 config = colander.SchemaNode(UnknownType(), missing=None)
755
740
756
741
757 class MappingListSchema(colander.SequenceSchema):
742 class MappingListSchema(colander.SequenceSchema):
758 config = colander.SchemaNode(UnknownType())
743 config = colander.SchemaNode(UnknownType())
General Comments 0
You need to be logged in to leave comments. Login now