##// END OF EJS Templates
validation: remove 0.4 report validators
ergo -
Show More
@@ -1,663 +1,634 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # AppEnlight Enterprise Edition, including its added features, Support
19 19 # services, and proprietary license terms, please see
20 20 # https://rhodecode.com/licenses/
21 21
22 22 import bisect
23 23 import collections
24 24 import math
25 25 from datetime import datetime, timedelta
26 26
27 27 import sqlalchemy as sa
28 28 import pyelasticsearch
29 29
30 30 from celery.utils.log import get_task_logger
31 31 from zope.sqlalchemy import mark_changed
32 32 from pyramid.threadlocal import get_current_request, get_current_registry
33 33 from appenlight.celery import celery
34 34 from appenlight.models.report_group import ReportGroup
35 35 from appenlight.models import DBSession, Datastores
36 36 from appenlight.models.report import Report
37 37 from appenlight.models.log import Log
38 38 from appenlight.models.request_metric import Metric
39 39 from appenlight.models.event import Event
40 40
41 41 from appenlight.models.services.application import ApplicationService
42 42 from appenlight.models.services.event import EventService
43 43 from appenlight.models.services.log import LogService
44 44 from appenlight.models.services.report import ReportService
45 45 from appenlight.models.services.report_group import ReportGroupService
46 46 from appenlight.models.services.user import UserService
47 47 from appenlight.models.tag import Tag
48 48 from appenlight.lib import print_traceback
49 49 from appenlight.lib.utils import parse_proto, in_batches
50 50 from appenlight.lib.ext_json import json
51 51 from appenlight.lib.redis_keys import REDIS_KEYS
52 52 from appenlight.lib.enums import ReportType
53 53
54 54 log = get_task_logger(__name__)
55 55
56 56 sample_boundries = list(range(100, 1000, 100)) + \
57 57 list(range(1000, 10000, 1000)) + \
58 58 list(range(10000, 100000, 5000))
59 59
60 60
61 61 def pick_sample(total_occurences, report_type=None):
62 62 every = 1.0
63 63 position = bisect.bisect_left(sample_boundries, total_occurences)
64 64 if position > 0:
65 65 if report_type == ReportType.not_found:
66 66 divide = 10.0
67 67 else:
68 68 divide = 100.0
69 69 every = sample_boundries[position - 1] / divide
70 70 return total_occurences % every == 0
71 71
72 72
73 73 @celery.task(queue="default", default_retry_delay=1, max_retries=2)
74 74 def test_exception_task():
75 75 log.error('test celery log', extra={'location': 'celery'})
76 76 log.warning('test celery log', extra={'location': 'celery'})
77 77 raise Exception('Celery exception test')
78 78
79 79
80 80 @celery.task(queue="default", default_retry_delay=1, max_retries=2)
81 81 def test_retry_exception_task():
82 82 try:
83 83 import time
84 84
85 85 time.sleep(1.3)
86 86 log.error('test retry celery log', extra={'location': 'celery'})
87 87 log.warning('test retry celery log', extra={'location': 'celery'})
88 88 raise Exception('Celery exception test')
89 89 except Exception as exc:
90 90 test_retry_exception_task.retry(exc=exc)
91 91
92 92
93 93 @celery.task(queue="reports", default_retry_delay=600, max_retries=999)
94 94 def add_reports(resource_id, params, dataset, environ=None, **kwargs):
95 95 proto_version = parse_proto(params.get('protocol_version', ''))
96 96 current_time = datetime.utcnow().replace(second=0, microsecond=0)
97 97 try:
98 98 # we will store solr docs here for single insert
99 99 es_report_docs = {}
100 100 es_report_group_docs = {}
101 101 resource = ApplicationService.by_id(resource_id)
102 reports = []
103
104 if proto_version.major < 1 and proto_version.minor < 5:
105 for report_data in dataset:
106 report_details = report_data.get('report_details', [])
107 for i, detail_data in enumerate(report_details):
108 report_data.update(detail_data)
109 report_data.pop('report_details')
110 traceback = report_data.get('traceback')
111 if traceback is None:
112 report_data['traceback'] = report_data.get('frameinfo')
113 # for 0.3 api
114 error = report_data.pop('error_type', '')
115 if error:
116 report_data['error'] = error
117 if proto_version.minor < 4:
118 # convert "Unknown" slow reports to
119 # '' (from older clients)
120 if (report_data['error'] and
121 report_data['http_status'] < 500):
122 report_data['error'] = ''
123 message = report_data.get('message')
124 if 'extra' not in report_data:
125 report_data['extra'] = []
126 if message:
127 report_data['extra'] = [('message', message), ]
128 reports.append(report_data)
129 else:
130 reports = dataset
131 102
132 103 tags = []
133 104 es_slow_calls_docs = {}
134 105 es_reports_stats_rows = {}
135 for report_data in reports:
106 for report_data in dataset:
136 107 # build report details for later
137 108 added_details = 0
138 109 report = Report()
139 110 report.set_data(report_data, resource, proto_version)
140 111 report._skip_ft_index = True
141 112
142 113 report_group = ReportGroupService.by_hash_and_resource(
143 114 report.resource_id,
144 115 report.grouping_hash
145 116 )
146 117 occurences = report_data.get('occurences', 1)
147 118 if not report_group:
148 119 # total reports will be +1 moment later
149 120 report_group = ReportGroup(grouping_hash=report.grouping_hash,
150 121 occurences=0, total_reports=0,
151 122 last_report=0,
152 123 priority=report.priority,
153 124 error=report.error,
154 125 first_timestamp=report.start_time)
155 126 report_group._skip_ft_index = True
156 127 report_group.report_type = report.report_type
157 128 report.report_group_time = report_group.first_timestamp
158 129 add_sample = pick_sample(report_group.occurences,
159 130 report_type=report_group.report_type)
160 131 if add_sample:
161 132 resource.report_groups.append(report_group)
162 133 report_group.reports.append(report)
163 134 added_details += 1
164 135 DBSession.flush()
165 136 if report.partition_id not in es_report_docs:
166 137 es_report_docs[report.partition_id] = []
167 138 es_report_docs[report.partition_id].append(report.es_doc())
168 139 tags.extend(list(report.tags.items()))
169 140 slow_calls = report.add_slow_calls(report_data, report_group)
170 141 DBSession.flush()
171 142 for s_call in slow_calls:
172 143 if s_call.partition_id not in es_slow_calls_docs:
173 144 es_slow_calls_docs[s_call.partition_id] = []
174 145 es_slow_calls_docs[s_call.partition_id].append(
175 146 s_call.es_doc())
176 147 # try generating new stat rows if needed
177 148 else:
178 149 # required for postprocessing to not fail later
179 150 report.report_group = report_group
180 151
181 152 stat_row = ReportService.generate_stat_rows(
182 153 report, resource, report_group)
183 154 if stat_row.partition_id not in es_reports_stats_rows:
184 155 es_reports_stats_rows[stat_row.partition_id] = []
185 156 es_reports_stats_rows[stat_row.partition_id].append(
186 157 stat_row.es_doc())
187 158
188 159 # see if we should mark 10th occurence of report
189 160 last_occurences_10 = int(math.floor(report_group.occurences / 10))
190 161 curr_occurences_10 = int(math.floor(
191 162 (report_group.occurences + report.occurences) / 10))
192 163 last_occurences_100 = int(
193 164 math.floor(report_group.occurences / 100))
194 165 curr_occurences_100 = int(math.floor(
195 166 (report_group.occurences + report.occurences) / 100))
196 167 notify_occurences_10 = last_occurences_10 != curr_occurences_10
197 168 notify_occurences_100 = last_occurences_100 != curr_occurences_100
198 169 report_group.occurences = ReportGroup.occurences + occurences
199 170 report_group.last_timestamp = report.start_time
200 171 report_group.summed_duration = ReportGroup.summed_duration + report.duration
201 172 summed_duration = ReportGroup.summed_duration + report.duration
202 173 summed_occurences = ReportGroup.occurences + occurences
203 174 report_group.average_duration = summed_duration / summed_occurences
204 175 report_group.run_postprocessing(report)
205 176 if added_details:
206 177 report_group.total_reports = ReportGroup.total_reports + 1
207 178 report_group.last_report = report.id
208 179 report_group.set_notification_info(notify_10=notify_occurences_10,
209 180 notify_100=notify_occurences_100)
210 181 DBSession.flush()
211 182 report_group.get_report().notify_channel(report_group)
212 183 if report_group.partition_id not in es_report_group_docs:
213 184 es_report_group_docs[report_group.partition_id] = []
214 185 es_report_group_docs[report_group.partition_id].append(
215 186 report_group.es_doc())
216 187
217 188 action = 'REPORT'
218 189 log_msg = '%s: %s %s, client: %s, proto: %s' % (
219 190 action,
220 191 report_data.get('http_status', 'unknown'),
221 192 str(resource),
222 193 report_data.get('client'),
223 194 proto_version)
224 195 log.info(log_msg)
225 196 total_reports = len(dataset)
226 197 key = REDIS_KEYS['counters']['reports_per_minute'].format(current_time)
227 198 Datastores.redis.incr(key, total_reports)
228 199 Datastores.redis.expire(key, 3600 * 24)
229 200 key = REDIS_KEYS['counters']['reports_per_minute_per_app'].format(
230 201 resource_id, current_time)
231 202 Datastores.redis.incr(key, total_reports)
232 203 Datastores.redis.expire(key, 3600 * 24)
233 204
234 205 add_reports_es(es_report_group_docs, es_report_docs)
235 206 add_reports_slow_calls_es(es_slow_calls_docs)
236 207 add_reports_stats_rows_es(es_reports_stats_rows)
237 208 return True
238 209 except Exception as exc:
239 210 print_traceback(log)
240 211 add_reports.retry(exc=exc)
241 212
242 213
243 214 @celery.task(queue="es", default_retry_delay=600, max_retries=999)
244 215 def add_reports_es(report_group_docs, report_docs):
245 216 for k, v in report_group_docs.items():
246 217 Datastores.es.bulk_index(k, 'report_group', v, id_field="_id")
247 218 for k, v in report_docs.items():
248 219 Datastores.es.bulk_index(k, 'report', v, id_field="_id",
249 220 parent_field='_parent')
250 221
251 222
252 223 @celery.task(queue="es", default_retry_delay=600, max_retries=999)
253 224 def add_reports_slow_calls_es(es_docs):
254 225 for k, v in es_docs.items():
255 226 Datastores.es.bulk_index(k, 'log', v)
256 227
257 228
258 229 @celery.task(queue="es", default_retry_delay=600, max_retries=999)
259 230 def add_reports_stats_rows_es(es_docs):
260 231 for k, v in es_docs.items():
261 232 Datastores.es.bulk_index(k, 'log', v)
262 233
263 234
264 235 @celery.task(queue="logs", default_retry_delay=600, max_retries=999)
265 236 def add_logs(resource_id, request, dataset, environ=None, **kwargs):
266 237 proto_version = request.get('protocol_version')
267 238 current_time = datetime.utcnow().replace(second=0, microsecond=0)
268 239
269 240 try:
270 241 es_docs = collections.defaultdict(list)
271 242 application = ApplicationService.by_id(resource_id)
272 243 ns_pairs = []
273 244 for entry in dataset:
274 245 # gather pk and ns so we can remove older versions of row later
275 246 if entry['primary_key'] is not None:
276 247 ns_pairs.append({"pk": entry['primary_key'],
277 248 "ns": entry['namespace']})
278 249 log_entry = Log()
279 250 log_entry.set_data(entry, resource=application)
280 251 log_entry._skip_ft_index = True
281 252 application.logs.append(log_entry)
282 253 DBSession.flush()
283 254 # insert non pk rows first
284 255 if entry['primary_key'] is None:
285 256 es_docs[log_entry.partition_id].append(log_entry.es_doc())
286 257
287 258 # 2nd pass to delete all log entries from db foe same pk/ns pair
288 259 if ns_pairs:
289 260 ids_to_delete = []
290 261 es_docs = collections.defaultdict(list)
291 262 es_docs_to_delete = collections.defaultdict(list)
292 263 found_pkey_logs = LogService.query_by_primary_key_and_namespace(
293 264 list_of_pairs=ns_pairs)
294 265 log_dict = {}
295 266 for log_entry in found_pkey_logs:
296 267 log_key = (log_entry.primary_key, log_entry.namespace)
297 268 if log_key not in log_dict:
298 269 log_dict[log_key] = []
299 270 log_dict[log_key].append(log_entry)
300 271
301 272 for ns, entry_list in log_dict.items():
302 273 entry_list = sorted(entry_list, key=lambda x: x.timestamp)
303 274 # newest row needs to be indexed in es
304 275 log_entry = entry_list[-1]
305 276 # delete everything from pg and ES, leave the last row in pg
306 277 for e in entry_list[:-1]:
307 278 ids_to_delete.append(e.log_id)
308 279 es_docs_to_delete[e.partition_id].append(e.delete_hash)
309 280
310 281 es_docs_to_delete[log_entry.partition_id].append(
311 282 log_entry.delete_hash)
312 283
313 284 es_docs[log_entry.partition_id].append(log_entry.es_doc())
314 285
315 286 if ids_to_delete:
316 287 query = DBSession.query(Log).filter(
317 288 Log.log_id.in_(ids_to_delete))
318 289 query.delete(synchronize_session=False)
319 290 if es_docs_to_delete:
320 291 # batch this to avoid problems with default ES bulk limits
321 292 for es_index in es_docs_to_delete.keys():
322 293 for batch in in_batches(es_docs_to_delete[es_index], 20):
323 294 query = {'terms': {'delete_hash': batch}}
324 295
325 296 try:
326 297 Datastores.es.delete_by_query(
327 298 es_index, 'log', query)
328 299 except pyelasticsearch.ElasticHttpNotFoundError as exc:
329 300 log.error(exc)
330 301
331 302 total_logs = len(dataset)
332 303
333 304 log_msg = 'LOG_NEW: %s, entries: %s, proto:%s' % (
334 305 str(application),
335 306 total_logs,
336 307 proto_version)
337 308 log.info(log_msg)
338 309 # mark_changed(session)
339 310 key = REDIS_KEYS['counters']['logs_per_minute'].format(current_time)
340 311 Datastores.redis.incr(key, total_logs)
341 312 Datastores.redis.expire(key, 3600 * 24)
342 313 key = REDIS_KEYS['counters']['logs_per_minute_per_app'].format(
343 314 resource_id, current_time)
344 315 Datastores.redis.incr(key, total_logs)
345 316 Datastores.redis.expire(key, 3600 * 24)
346 317 add_logs_es(es_docs)
347 318 return True
348 319 except Exception as exc:
349 320 print_traceback(log)
350 321 add_logs.retry(exc=exc)
351 322
352 323
353 324 @celery.task(queue="es", default_retry_delay=600, max_retries=999)
354 325 def add_logs_es(es_docs):
355 326 for k, v in es_docs.items():
356 327 Datastores.es.bulk_index(k, 'log', v)
357 328
358 329
359 330 @celery.task(queue="metrics", default_retry_delay=600, max_retries=999)
360 331 def add_metrics(resource_id, request, dataset, proto_version):
361 332 current_time = datetime.utcnow().replace(second=0, microsecond=0)
362 333 try:
363 334 application = ApplicationService.by_id_cached()(resource_id)
364 335 application = DBSession.merge(application, load=False)
365 336 es_docs = []
366 337 rows = []
367 338 for metric in dataset:
368 339 tags = dict(metric['tags'])
369 340 server_n = tags.get('server_name', metric['server_name']).lower()
370 341 tags['server_name'] = server_n or 'unknown'
371 342 new_metric = Metric(
372 343 timestamp=metric['timestamp'],
373 344 resource_id=application.resource_id,
374 345 namespace=metric['namespace'],
375 346 tags=tags)
376 347 rows.append(new_metric)
377 348 es_docs.append(new_metric.es_doc())
378 349 session = DBSession()
379 350 session.bulk_save_objects(rows)
380 351 session.flush()
381 352
382 353 action = 'METRICS'
383 354 metrics_msg = '%s: %s, metrics: %s, proto:%s' % (
384 355 action,
385 356 str(application),
386 357 len(dataset),
387 358 proto_version
388 359 )
389 360 log.info(metrics_msg)
390 361
391 362 mark_changed(session)
392 363 key = REDIS_KEYS['counters']['metrics_per_minute'].format(current_time)
393 364 Datastores.redis.incr(key, len(rows))
394 365 Datastores.redis.expire(key, 3600 * 24)
395 366 key = REDIS_KEYS['counters']['metrics_per_minute_per_app'].format(
396 367 resource_id, current_time)
397 368 Datastores.redis.incr(key, len(rows))
398 369 Datastores.redis.expire(key, 3600 * 24)
399 370 add_metrics_es(es_docs)
400 371 return True
401 372 except Exception as exc:
402 373 print_traceback(log)
403 374 add_metrics.retry(exc=exc)
404 375
405 376
406 377 @celery.task(queue="es", default_retry_delay=600, max_retries=999)
407 378 def add_metrics_es(es_docs):
408 379 for doc in es_docs:
409 380 partition = 'rcae_m_%s' % doc['timestamp'].strftime('%Y_%m_%d')
410 381 Datastores.es.index(partition, 'log', doc)
411 382
412 383
413 384 @celery.task(queue="default", default_retry_delay=5, max_retries=2)
414 385 def check_user_report_notifications(resource_id):
415 386 since_when = datetime.utcnow()
416 387 try:
417 388 request = get_current_request()
418 389 application = ApplicationService.by_id(resource_id)
419 390 if not application:
420 391 return
421 392 error_key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format(
422 393 ReportType.error, resource_id)
423 394 slow_key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format(
424 395 ReportType.slow, resource_id)
425 396 error_group_ids = Datastores.redis.smembers(error_key)
426 397 slow_group_ids = Datastores.redis.smembers(slow_key)
427 398 Datastores.redis.delete(error_key)
428 399 Datastores.redis.delete(slow_key)
429 400 err_gids = [int(g_id) for g_id in error_group_ids]
430 401 slow_gids = [int(g_id) for g_id in list(slow_group_ids)]
431 402 group_ids = err_gids + slow_gids
432 403 occurence_dict = {}
433 404 for g_id in group_ids:
434 405 key = REDIS_KEYS['counters']['report_group_occurences'].format(
435 406 g_id)
436 407 val = Datastores.redis.get(key)
437 408 Datastores.redis.delete(key)
438 409 if val:
439 410 occurence_dict[g_id] = int(val)
440 411 else:
441 412 occurence_dict[g_id] = 1
442 413 report_groups = ReportGroupService.by_ids(group_ids)
443 414 report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref))
444 415
445 416 ApplicationService.check_for_groups_alert(
446 417 application, 'alert', report_groups=report_groups,
447 418 occurence_dict=occurence_dict)
448 419 users = set([p.user for p in application.users_for_perm('view')])
449 420 report_groups = report_groups.all()
450 421 for user in users:
451 422 UserService.report_notify(user, request, application,
452 423 report_groups=report_groups,
453 424 occurence_dict=occurence_dict)
454 425 for group in report_groups:
455 426 # marks report_groups as notified
456 427 if not group.notified:
457 428 group.notified = True
458 429 except Exception as exc:
459 430 print_traceback(log)
460 431 raise
461 432
462 433
463 434 @celery.task(queue="default", default_retry_delay=5, max_retries=2)
464 435 def check_alerts(resource_id):
465 436 since_when = datetime.utcnow()
466 437 try:
467 438 request = get_current_request()
468 439 application = ApplicationService.by_id(resource_id)
469 440 if not application:
470 441 return
471 442 error_key = REDIS_KEYS[
472 443 'reports_to_notify_per_type_per_app_alerting'].format(
473 444 ReportType.error, resource_id)
474 445 slow_key = REDIS_KEYS[
475 446 'reports_to_notify_per_type_per_app_alerting'].format(
476 447 ReportType.slow, resource_id)
477 448 error_group_ids = Datastores.redis.smembers(error_key)
478 449 slow_group_ids = Datastores.redis.smembers(slow_key)
479 450 Datastores.redis.delete(error_key)
480 451 Datastores.redis.delete(slow_key)
481 452 err_gids = [int(g_id) for g_id in error_group_ids]
482 453 slow_gids = [int(g_id) for g_id in list(slow_group_ids)]
483 454 group_ids = err_gids + slow_gids
484 455 occurence_dict = {}
485 456 for g_id in group_ids:
486 457 key = REDIS_KEYS['counters'][
487 458 'report_group_occurences_alerting'].format(
488 459 g_id)
489 460 val = Datastores.redis.get(key)
490 461 Datastores.redis.delete(key)
491 462 if val:
492 463 occurence_dict[g_id] = int(val)
493 464 else:
494 465 occurence_dict[g_id] = 1
495 466 report_groups = ReportGroupService.by_ids(group_ids)
496 467 report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref))
497 468
498 469 ApplicationService.check_for_groups_alert(
499 470 application, 'alert', report_groups=report_groups,
500 471 occurence_dict=occurence_dict, since_when=since_when)
501 472 except Exception as exc:
502 473 print_traceback(log)
503 474 raise
504 475
505 476
506 477 @celery.task(queue="default", default_retry_delay=1, max_retries=2)
507 478 def close_alerts():
508 479 log.warning('Checking alerts')
509 480 since_when = datetime.utcnow()
510 481 try:
511 482 event_types = [Event.types['error_report_alert'],
512 483 Event.types['slow_report_alert'], ]
513 484 statuses = [Event.statuses['active']]
514 485 # get events older than 5 min
515 486 events = EventService.by_type_and_status(
516 487 event_types,
517 488 statuses,
518 489 older_than=(since_when - timedelta(minutes=5)))
519 490 for event in events:
520 491 # see if we can close them
521 492 event.validate_or_close(
522 493 since_when=(since_when - timedelta(minutes=1)))
523 494 except Exception as exc:
524 495 print_traceback(log)
525 496 raise
526 497
527 498
528 499 @celery.task(queue="default", default_retry_delay=600, max_retries=999)
529 500 def update_tag_counter(tag_name, tag_value, count):
530 501 try:
531 502 query = DBSession.query(Tag).filter(Tag.name == tag_name).filter(
532 503 sa.cast(Tag.value, sa.types.TEXT) == sa.cast(json.dumps(tag_value),
533 504 sa.types.TEXT))
534 505 query.update({'times_seen': Tag.times_seen + count,
535 506 'last_timestamp': datetime.utcnow()},
536 507 synchronize_session=False)
537 508 session = DBSession()
538 509 mark_changed(session)
539 510 return True
540 511 except Exception as exc:
541 512 print_traceback(log)
542 513 update_tag_counter.retry(exc=exc)
543 514
544 515
545 516 @celery.task(queue="default")
546 517 def update_tag_counters():
547 518 """
548 519 Sets task to update counters for application tags
549 520 """
550 521 tags = Datastores.redis.lrange(REDIS_KEYS['seen_tag_list'], 0, -1)
551 522 Datastores.redis.delete(REDIS_KEYS['seen_tag_list'])
552 523 c = collections.Counter(tags)
553 524 for t_json, count in c.items():
554 525 tag_info = json.loads(t_json)
555 526 update_tag_counter.delay(tag_info[0], tag_info[1], count)
556 527
557 528
558 529 @celery.task(queue="default")
559 530 def daily_digest():
560 531 """
561 532 Sends daily digest with top 50 error reports
562 533 """
563 534 request = get_current_request()
564 535 apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports'])
565 536 Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports'])
566 537 since_when = datetime.utcnow() - timedelta(hours=8)
567 538 log.warning('Generating daily digests')
568 539 for resource_id in apps:
569 540 resource_id = resource_id.decode('utf8')
570 541 end_date = datetime.utcnow().replace(microsecond=0, second=0)
571 542 filter_settings = {'resource': [resource_id],
572 543 'tags': [{'name': 'type',
573 544 'value': ['error'], 'op': None}],
574 545 'type': 'error', 'start_date': since_when,
575 546 'end_date': end_date}
576 547
577 548 reports = ReportGroupService.get_trending(
578 549 request, filter_settings=filter_settings, limit=50)
579 550
580 551 application = ApplicationService.by_id(resource_id)
581 552 if application:
582 553 users = set([p.user for p in application.users_for_perm('view')])
583 554 for user in users:
584 555 user.send_digest(request, application, reports=reports,
585 556 since_when=since_when)
586 557
587 558
588 559 @celery.task(queue="default")
589 560 def notifications_reports():
590 561 """
591 562 Loop that checks redis for info and then issues new tasks to celery to
592 563 issue notifications
593 564 """
594 565 apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports'])
595 566 Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports'])
596 567 for app in apps:
597 568 log.warning('Notify for app: %s' % app)
598 569 check_user_report_notifications.delay(app.decode('utf8'))
599 570
600 571 @celery.task(queue="default")
601 572 def alerting_reports():
602 573 """
603 574 Loop that checks redis for info and then issues new tasks to celery to
604 575 perform the following:
605 576 - which applications should have new alerts opened
606 577 """
607 578
608 579 apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports_alerting'])
609 580 Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports_alerting'])
610 581 for app in apps:
611 582 log.warning('Notify for app: %s' % app)
612 583 check_alerts.delay(app.decode('utf8'))
613 584
614 585
615 586 @celery.task(queue="default", soft_time_limit=3600 * 4,
616 587 hard_time_limit=3600 * 4, max_retries=999)
617 588 def logs_cleanup(resource_id, filter_settings):
618 589 request = get_current_request()
619 590 request.tm.begin()
620 591 es_query = {
621 592 "_source": False,
622 593 "size": 5000,
623 594 "query": {
624 595 "filtered": {
625 596 "filter": {
626 597 "and": [{"term": {"resource_id": resource_id}}]
627 598 }
628 599 }
629 600 }
630 601 }
631 602
632 603 query = DBSession.query(Log).filter(Log.resource_id == resource_id)
633 604 if filter_settings['namespace']:
634 605 query = query.filter(Log.namespace == filter_settings['namespace'][0])
635 606 es_query['query']['filtered']['filter']['and'].append(
636 607 {"term": {"namespace": filter_settings['namespace'][0]}}
637 608 )
638 609 query.delete(synchronize_session=False)
639 610 request.tm.commit()
640 611 result = request.es_conn.search(es_query, index='rcae_l_*',
641 612 doc_type='log', es_scroll='1m',
642 613 es_search_type='scan')
643 614 scroll_id = result['_scroll_id']
644 615 while True:
645 616 log.warning('log_cleanup, app:{} ns:{} batch'.format(
646 617 resource_id,
647 618 filter_settings['namespace']
648 619 ))
649 620 es_docs_to_delete = []
650 621 result = request.es_conn.send_request(
651 622 'POST', ['_search', 'scroll'],
652 623 body=scroll_id, query_params={"scroll": '1m'})
653 624 scroll_id = result['_scroll_id']
654 625 if not result['hits']['hits']:
655 626 break
656 627 for doc in result['hits']['hits']:
657 628 es_docs_to_delete.append({"id": doc['_id'],
658 629 "index": doc['_index']})
659 630
660 631 for batch in in_batches(es_docs_to_delete, 10):
661 632 Datastores.es.bulk([Datastores.es.delete_op(doc_type='log',
662 633 **to_del)
663 634 for to_del in batch])
@@ -1,758 +1,743 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # AppEnlight Enterprise Edition, including its added features, Support
19 19 # services, and proprietary license terms, please see
20 20 # https://rhodecode.com/licenses/
21 21
22 22 import datetime
23 23
24 24 import colander
25 25 from colander import null
26 26
27 27 # those keywords are here so we can distingush between searching for tags and
28 28 # normal properties of reports/logs
29 29 accepted_search_params = ['resource',
30 30 'request_id',
31 31 'start_date',
32 32 'end_date',
33 33 'page',
34 34 'min_occurences',
35 35 'http_status',
36 36 'priority',
37 37 'error',
38 38 'url_path',
39 39 'url_domain',
40 40 'report_status',
41 41 'min_duration',
42 42 'max_duration',
43 43 'message',
44 44 'level',
45 45 'namespace']
46 46
47 47
48 48 @colander.deferred
49 49 def deferred_utcnow(node, kw):
50 50 return kw['utcnow']
51 51
52 52
53 53 def lowercase_preparer(input_data):
54 54 """
55 55 Transforms a list of string entries to lowercase
56 56 Used in search query validation
57 57 """
58 58 if not input_data:
59 59 return input_data
60 60 return [x.lower() for x in input_data]
61 61
62 62
63 63 def shortener_factory(cutoff_size=32):
64 64 """
65 65 Limits the input data to specific character count
66 66 :arg cutoff_cutoff_size How much characters to store
67 67
68 68 """
69 69
70 70 def shortener(input_data):
71 71 if not input_data:
72 72 return input_data
73 73 else:
74 74 if isinstance(input_data, str):
75 75 return input_data[:cutoff_size]
76 76 else:
77 77 return input_data
78 78
79 79 return shortener
80 80
81 81
82 82 def cast_to_unicode_or_null(value):
83 83 if value is not colander.null:
84 84 return str(value)
85 85 return None
86 86
87 87
88 88 class NonTZDate(colander.DateTime):
89 89 """ Returns null for incorrect date format - also removes tz info"""
90 90
91 91 def deserialize(self, node, cstruct):
92 92 # disabled for now
93 93 # if cstruct and isinstance(cstruct, str):
94 94 # if ':' not in cstruct:
95 95 # cstruct += ':0.0'
96 96 # if '.' not in cstruct:
97 97 # cstruct += '.0'
98 98 value = super(NonTZDate, self).deserialize(node, cstruct)
99 99 if value:
100 100 return value.replace(tzinfo=None)
101 101 return value
102 102
103 103
104 104 class UnknownType(object):
105 105 """
106 106 Universal type that will accept a deserialized JSON object and store it unaltered
107 107 """
108 108
109 109 def serialize(self, node, appstruct):
110 110 if appstruct is null:
111 111 return null
112 112 return appstruct
113 113
114 114 def deserialize(self, node, cstruct):
115 115 if cstruct is null:
116 116 return null
117 117 return cstruct
118 118
119 119 def cstruct_children(self):
120 120 return []
121 121
122 122
123 123 # SLOW REPORT SCHEMA
124 124
125 125 def rewrite_type(input_data):
126 126 """
127 127 Fix for legacy appenlight clients
128 128 """
129 129 if input_data == 'remote_call':
130 130 return 'remote'
131 131 return input_data
132 132
133 133
134 134 class ExtraTupleSchema(colander.TupleSchema):
135 135 name = colander.SchemaNode(colander.String(),
136 136 validator=colander.Length(1, 64))
137 137 value = colander.SchemaNode(UnknownType(),
138 138 preparer=shortener_factory(512),
139 139 missing=None)
140 140
141 141
142 142 class ExtraSchemaList(colander.SequenceSchema):
143 143 tag = ExtraTupleSchema()
144 144 missing = None
145 145
146 146
147 147 class TagsTupleSchema(colander.TupleSchema):
148 148 name = colander.SchemaNode(colander.String(),
149 149 validator=colander.Length(1, 128))
150 150 value = colander.SchemaNode(UnknownType(),
151 151 preparer=shortener_factory(128),
152 152 missing=None)
153 153
154 154
155 155 class TagSchemaList(colander.SequenceSchema):
156 156 tag = TagsTupleSchema()
157 157 missing = None
158 158
159 159
160 160 class NumericTagsTupleSchema(colander.TupleSchema):
161 161 name = colander.SchemaNode(colander.String(),
162 162 validator=colander.Length(1, 128))
163 163 value = colander.SchemaNode(colander.Float(), missing=0)
164 164
165 165
166 166 class NumericTagSchemaList(colander.SequenceSchema):
167 167 tag = NumericTagsTupleSchema()
168 168 missing = None
169 169
170 170
171 171 class SlowCallSchema(colander.MappingSchema):
172 172 """
173 173 Validates slow call format in slow call list
174 174 """
175 175 start = colander.SchemaNode(NonTZDate())
176 176 end = colander.SchemaNode(NonTZDate())
177 177 statement = colander.SchemaNode(colander.String(), missing='')
178 178 parameters = colander.SchemaNode(UnknownType(), missing=None)
179 179 type = colander.SchemaNode(
180 180 colander.String(),
181 181 preparer=rewrite_type,
182 182 validator=colander.OneOf(
183 183 ['tmpl', 'sql', 'nosql', 'remote', 'unknown', 'custom']),
184 184 missing='unknown')
185 185 subtype = colander.SchemaNode(colander.String(),
186 186 validator=colander.Length(1, 16),
187 187 missing='unknown')
188 188 location = colander.SchemaNode(colander.String(),
189 189 validator=colander.Length(1, 255),
190 190 missing='')
191 191
192 192
193 193 def limited_date(node, value):
194 194 """ checks to make sure that the value is not older/newer than 2h """
195 195 hours = 2
196 196 min_time = datetime.datetime.utcnow() - datetime.timedelta(hours=72)
197 197 max_time = datetime.datetime.utcnow() + datetime.timedelta(hours=2)
198 198 if min_time > value:
199 199 msg = '%r is older from current UTC time by ' + str(hours) + ' hours.'
200 200 msg += ' Ask administrator to enable permanent logging for ' \
201 201 'your application to store logs with dates in past.'
202 202 raise colander.Invalid(node, msg % value)
203 203 if max_time < value:
204 204 msg = '%r is newer from current UTC time by ' + str(hours) + ' hours'
205 205 msg += ' Ask administrator to enable permanent logging for ' \
206 206 'your application to store logs with dates in future.'
207 207 raise colander.Invalid(node, msg % value)
208 208
209 209
210 210 class SlowCallListSchema(colander.SequenceSchema):
211 211 """
212 212 Validates list of individual slow calls
213 213 """
214 214 slow_call = SlowCallSchema()
215 215
216 216
217 217 class RequestStatsSchema(colander.MappingSchema):
218 218 """
219 219 Validates format of requests statistics dictionary
220 220 """
221 221 main = colander.SchemaNode(colander.Float(), validator=colander.Range(0),
222 222 missing=0)
223 223 sql = colander.SchemaNode(colander.Float(), validator=colander.Range(0),
224 224 missing=0)
225 225 nosql = colander.SchemaNode(colander.Float(), validator=colander.Range(0),
226 226 missing=0)
227 227 remote = colander.SchemaNode(colander.Float(), validator=colander.Range(0),
228 228 missing=0)
229 229 tmpl = colander.SchemaNode(colander.Float(), validator=colander.Range(0),
230 230 missing=0)
231 231 custom = colander.SchemaNode(colander.Float(), validator=colander.Range(0),
232 232 missing=0)
233 233 sql_calls = colander.SchemaNode(colander.Float(),
234 234 validator=colander.Range(0),
235 235 missing=0)
236 236 nosql_calls = colander.SchemaNode(colander.Float(),
237 237 validator=colander.Range(0),
238 238 missing=0)
239 239 remote_calls = colander.SchemaNode(colander.Float(),
240 240 validator=colander.Range(0),
241 241 missing=0)
242 242 tmpl_calls = colander.SchemaNode(colander.Float(),
243 243 validator=colander.Range(0),
244 244 missing=0)
245 245 custom_calls = colander.SchemaNode(colander.Float(),
246 246 validator=colander.Range(0),
247 247 missing=0)
248 248
249 249
250 250 class FrameInfoVarSchema(colander.SequenceSchema):
251 251 """
252 252 Validates format of frame variables of a traceback
253 253 """
254 254 vars = colander.SchemaNode(UnknownType(),
255 255 validator=colander.Length(2, 2))
256 256
257 257
258 258 class FrameInfoSchema(colander.MappingSchema):
259 259 """
260 260 Validates format of a traceback line
261 261 """
262 262 cline = colander.SchemaNode(colander.String(), missing='')
263 263 module = colander.SchemaNode(colander.String(), missing='')
264 264 line = colander.SchemaNode(colander.String(), missing='')
265 265 file = colander.SchemaNode(colander.String(), missing='')
266 266 fn = colander.SchemaNode(colander.String(), missing='')
267 267 vars = FrameInfoVarSchema()
268 268
269 269
270 270 class FrameInfoListSchema(colander.SequenceSchema):
271 271 """
272 272 Validates format of list of traceback lines
273 273 """
274 274 frame = colander.SchemaNode(UnknownType())
275 275
276 276
277 277 class ReportDetailBaseSchema(colander.MappingSchema):
278 278 """
279 279 Validates format of report - ie. request parameters and stats for a request in report group
280 280 """
281 281 username = colander.SchemaNode(colander.String(),
282 282 preparer=[shortener_factory(255),
283 283 lambda x: x or ''],
284 284 missing='')
285 285 request_id = colander.SchemaNode(colander.String(),
286 286 preparer=shortener_factory(40),
287 287 missing='')
288 288 url = colander.SchemaNode(colander.String(),
289 289 preparer=shortener_factory(1024), missing='')
290 290 ip = colander.SchemaNode(colander.String(), preparer=shortener_factory(39),
291 291 missing=None)
292 292 start_time = colander.SchemaNode(NonTZDate(), validator=limited_date,
293 293 missing=deferred_utcnow)
294 294 end_time = colander.SchemaNode(NonTZDate(), validator=limited_date,
295 295 missing=None)
296 296 user_agent = colander.SchemaNode(colander.String(),
297 297 preparer=[shortener_factory(512),
298 298 lambda x: x or ''],
299 299 missing='')
300 300 message = colander.SchemaNode(colander.String(),
301 301 preparer=shortener_factory(2048),
302 302 missing='')
303 303 group_string = colander.SchemaNode(colander.String(),
304 304 validator=colander.Length(1, 512),
305 305 missing=None)
306 306 request_stats = RequestStatsSchema(missing=None)
307 307 request = colander.SchemaNode(colander.Mapping(unknown='preserve'),
308 308 missing={})
309 309 traceback = FrameInfoListSchema(missing=None)
310 310 slow_calls = SlowCallListSchema(missing=[])
311 311 extra = ExtraSchemaList()
312 312
313 313
314 314 class ReportDetailSchema_0_4(ReportDetailBaseSchema):
315 315 frameinfo = FrameInfoListSchema(missing=None)
316 316
317 317
318 318 class ReportDetailSchema_0_5(ReportDetailBaseSchema):
319 319 pass
320 320
321 321
322 322 class ReportDetailListSchema(colander.SequenceSchema):
323 323 """
324 324 Validates format of list of reports
325 325 """
326 326 report_detail = ReportDetailSchema_0_4()
327 327 validator = colander.Length(1)
328 328
329 329
330 330 class ReportSchemaBase(colander.MappingSchema):
331 331 """
332 332 Validates format of report group
333 333 """
334 334 client = colander.SchemaNode(colander.String(),
335 335 preparer=lambda x: x or 'unknown')
336 336 server = colander.SchemaNode(
337 337 colander.String(),
338 338 preparer=[
339 339 lambda x: x.lower() if x else 'unknown', shortener_factory(128)],
340 340 missing='unknown')
341 341 priority = colander.SchemaNode(colander.Int(),
342 342 preparer=[lambda x: x or 5],
343 343 validator=colander.Range(1, 10),
344 344 missing=5)
345 345 language = colander.SchemaNode(colander.String(), missing='unknown')
346 346 error = colander.SchemaNode(colander.String(),
347 347 preparer=shortener_factory(512),
348 348 missing='')
349 349 view_name = colander.SchemaNode(colander.String(),
350 350 preparer=[shortener_factory(128),
351 351 lambda x: x or ''],
352 352 missing='')
353 353 http_status = colander.SchemaNode(colander.Int(),
354 354 preparer=[lambda x: x or 200],
355 355 validator=colander.Range(1))
356 356
357 357 occurences = colander.SchemaNode(colander.Int(),
358 358 validator=colander.Range(1, 99999999999),
359 359 missing=1)
360 360 tags = TagSchemaList()
361 361
362 362
363 class ReportSchema_0_4(ReportSchemaBase):
364 error_type = colander.SchemaNode(colander.String(),
365 preparer=[shortener_factory(512)],
366 missing='')
367 report_details = ReportDetailListSchema()
368
369
370 363 class ReportSchema_0_5(ReportSchemaBase, ReportDetailSchema_0_5):
371 364 pass
372 365
373 366
374 class ReportListSchema_0_4(colander.SequenceSchema):
375 """
376 Validates format of list of report groups
377 """
378 report = ReportSchema_0_4()
379 validator = colander.Length(1)
380
381
382 367 class ReportListSchema_0_5(colander.SequenceSchema):
383 368 """
384 369 Validates format of list of report groups
385 370 """
386 371 report = ReportSchema_0_5()
387 372 validator = colander.Length(1)
388 373
389 374
390 375 class LogSchema(colander.MappingSchema):
391 376 """
392 377 Validates format if individual log entry
393 378 """
394 379 primary_key = colander.SchemaNode(UnknownType(),
395 380 preparer=[cast_to_unicode_or_null,
396 381 shortener_factory(128)],
397 382 missing=None)
398 383 log_level = colander.SchemaNode(colander.String(),
399 384 preparer=shortener_factory(10),
400 385 missing='UNKNOWN')
401 386 message = colander.SchemaNode(colander.String(),
402 387 preparer=shortener_factory(4096),
403 388 missing='')
404 389 namespace = colander.SchemaNode(colander.String(),
405 390 preparer=shortener_factory(128),
406 391 missing='')
407 392 request_id = colander.SchemaNode(colander.String(),
408 393 preparer=shortener_factory(40),
409 394 missing='')
410 395 server = colander.SchemaNode(colander.String(),
411 396 preparer=shortener_factory(128),
412 397 missing='unknown')
413 398 date = colander.SchemaNode(NonTZDate(),
414 399 validator=limited_date,
415 400 missing=deferred_utcnow)
416 401 tags = TagSchemaList()
417 402
418 403
419 404 class LogSchemaPermanent(LogSchema):
420 405 date = colander.SchemaNode(NonTZDate(),
421 406 missing=deferred_utcnow)
422 407 permanent = colander.SchemaNode(colander.Boolean(), missing=False)
423 408
424 409
425 410 class LogListSchema(colander.SequenceSchema):
426 411 """
427 412 Validates format of list of log entries
428 413 """
429 414 log = LogSchema()
430 415 validator = colander.Length(1)
431 416
432 417
433 418 class LogListPermanentSchema(colander.SequenceSchema):
434 419 """
435 420 Validates format of list of log entries
436 421 """
437 422 log = LogSchemaPermanent()
438 423 validator = colander.Length(1)
439 424
440 425
441 426 class ViewRequestStatsSchema(RequestStatsSchema):
442 427 requests = colander.SchemaNode(colander.Integer(),
443 428 validator=colander.Range(0),
444 429 missing=0)
445 430
446 431
447 432 class ViewMetricTupleSchema(colander.TupleSchema):
448 433 """
449 434 Validates list of views and their corresponding request stats object ie:
450 435 ["dir/module:func",{"custom": 0.0..}]
451 436 """
452 437 view_name = colander.SchemaNode(colander.String(),
453 438 preparer=[shortener_factory(128),
454 439 lambda x: x or 'unknown'],
455 440 missing='unknown')
456 441 metrics = ViewRequestStatsSchema()
457 442
458 443
459 444 class ViewMetricListSchema(colander.SequenceSchema):
460 445 """
461 446 Validates view breakdown stats objects list
462 447 {metrics key of server/time object}
463 448 """
464 449 view_tuple = ViewMetricTupleSchema()
465 450 validator = colander.Length(1)
466 451
467 452
468 453 class ViewMetricSchema(colander.MappingSchema):
469 454 """
470 455 Validates server/timeinterval object, ie:
471 456 {server/time object}
472 457
473 458 """
474 459 timestamp = colander.SchemaNode(NonTZDate(),
475 460 validator=limited_date,
476 461 missing=None)
477 462 server = colander.SchemaNode(colander.String(),
478 463 preparer=[shortener_factory(128),
479 464 lambda x: x or 'unknown'],
480 465 missing='unknown')
481 466 metrics = ViewMetricListSchema()
482 467
483 468
484 469 class GeneralMetricSchema(colander.MappingSchema):
485 470 """
486 471 Validates universal metric schema
487 472
488 473 """
489 474 namespace = colander.SchemaNode(colander.String(), missing='',
490 475 preparer=shortener_factory(128))
491 476
492 477 server_name = colander.SchemaNode(colander.String(),
493 478 preparer=[shortener_factory(128),
494 479 lambda x: x or 'unknown'],
495 480 missing='unknown')
496 481 timestamp = colander.SchemaNode(NonTZDate(), validator=limited_date,
497 482 missing=deferred_utcnow)
498 483 tags = TagSchemaList()
499 484
500 485
501 486 class GeneralMetricsListSchema(colander.SequenceSchema):
502 487 metric = GeneralMetricSchema()
503 488 validator = colander.Length(1)
504 489
505 490
506 491 class MetricsListSchema(colander.SequenceSchema):
507 492 """
508 493 Validates list of metrics objects ie:
509 494 [{server/time object}, ] part
510 495
511 496
512 497 """
513 498 metric = ViewMetricSchema()
514 499 validator = colander.Length(1)
515 500
516 501
517 502 class StringToAppList(object):
518 503 """
519 504 Returns validated list of application ids from user query and
520 505 set of applications user is allowed to look at
521 506 transform string to list containing single integer
522 507 """
523 508
524 509 def serialize(self, node, appstruct):
525 510 if appstruct is null:
526 511 return null
527 512 return appstruct
528 513
529 514 def deserialize(self, node, cstruct):
530 515 if cstruct is null:
531 516 return null
532 517
533 518 apps = set([int(a) for a in node.bindings['resources']])
534 519
535 520 if isinstance(cstruct, str):
536 521 cstruct = [cstruct]
537 522
538 523 cstruct = [int(a) for a in cstruct]
539 524
540 525 valid_apps = list(apps.intersection(set(cstruct)))
541 526 if valid_apps:
542 527 return valid_apps
543 528 return null
544 529
545 530 def cstruct_children(self):
546 531 return []
547 532
548 533
549 534 @colander.deferred
550 535 def possible_applications_validator(node, kw):
551 536 possible_apps = [int(a) for a in kw['resources']]
552 537 return colander.All(colander.ContainsOnly(possible_apps),
553 538 colander.Length(1))
554 539
555 540
556 541 @colander.deferred
557 542 def possible_applications(node, kw):
558 543 return [int(a) for a in kw['resources']]
559 544
560 545
561 546 @colander.deferred
562 547 def today_start(node, kw):
563 548 return datetime.datetime.utcnow().replace(second=0, microsecond=0,
564 549 minute=0,
565 550 hour=0)
566 551
567 552
568 553 @colander.deferred
569 554 def today_end(node, kw):
570 555 return datetime.datetime.utcnow().replace(second=0, microsecond=0,
571 556 minute=59, hour=23)
572 557
573 558
574 559 @colander.deferred
575 560 def old_start(node, kw):
576 561 t_delta = datetime.timedelta(days=90)
577 562 return datetime.datetime.utcnow().replace(second=0, microsecond=0,
578 563 minute=0,
579 564 hour=0) - t_delta
580 565
581 566
582 567 @colander.deferred
583 568 def today_end(node, kw):
584 569 return datetime.datetime.utcnow().replace(second=0, microsecond=0,
585 570 minute=59, hour=23)
586 571
587 572
588 573 class PermissiveDate(colander.DateTime):
589 574 """ Returns null for incorrect date format - also removes tz info"""
590 575
591 576 def deserialize(self, node, cstruct):
592 577 if not cstruct:
593 578 return null
594 579
595 580 try:
596 581 result = colander.iso8601.parse_date(
597 582 cstruct, default_timezone=self.default_tzinfo)
598 583 except colander.iso8601.ParseError:
599 584 return null
600 585 return result.replace(tzinfo=None)
601 586
602 587
603 588 class LogSearchSchema(colander.MappingSchema):
604 589 def schema_type(self, **kw):
605 590 return colander.Mapping(unknown='preserve')
606 591
607 592 resource = colander.SchemaNode(StringToAppList(),
608 593 validator=possible_applications_validator,
609 594 missing=possible_applications)
610 595
611 596 message = colander.SchemaNode(colander.Sequence(accept_scalar=True),
612 597 colander.SchemaNode(colander.String()),
613 598 missing=None)
614 599 level = colander.SchemaNode(colander.Sequence(accept_scalar=True),
615 600 colander.SchemaNode(colander.String()),
616 601 preparer=lowercase_preparer,
617 602 missing=None)
618 603 namespace = colander.SchemaNode(colander.Sequence(accept_scalar=True),
619 604 colander.SchemaNode(colander.String()),
620 605 preparer=lowercase_preparer,
621 606 missing=None)
622 607 request_id = colander.SchemaNode(colander.Sequence(accept_scalar=True),
623 608 colander.SchemaNode(colander.String()),
624 609 preparer=lowercase_preparer,
625 610 missing=None)
626 611 start_date = colander.SchemaNode(PermissiveDate(),
627 612 missing=None)
628 613 end_date = colander.SchemaNode(PermissiveDate(),
629 614 missing=None)
630 615 page = colander.SchemaNode(colander.Integer(),
631 616 validator=colander.Range(min=1),
632 617 missing=1)
633 618
634 619
635 620 class ReportSearchSchema(colander.MappingSchema):
636 621 def schema_type(self, **kw):
637 622 return colander.Mapping(unknown='preserve')
638 623
639 624 resource = colander.SchemaNode(StringToAppList(),
640 625 validator=possible_applications_validator,
641 626 missing=possible_applications)
642 627 request_id = colander.SchemaNode(colander.Sequence(accept_scalar=True),
643 628 colander.SchemaNode(colander.String()),
644 629 missing=None)
645 630 start_date = colander.SchemaNode(PermissiveDate(),
646 631 missing=None)
647 632 end_date = colander.SchemaNode(PermissiveDate(),
648 633 missing=None)
649 634 page = colander.SchemaNode(colander.Integer(),
650 635 validator=colander.Range(min=1),
651 636 missing=1)
652 637
653 638 min_occurences = colander.SchemaNode(
654 639 colander.Sequence(accept_scalar=True),
655 640 colander.SchemaNode(colander.Integer()),
656 641 missing=None)
657 642
658 643 http_status = colander.SchemaNode(colander.Sequence(accept_scalar=True),
659 644 colander.SchemaNode(colander.Integer()),
660 645 missing=None)
661 646 priority = colander.SchemaNode(colander.Sequence(accept_scalar=True),
662 647 colander.SchemaNode(colander.Integer()),
663 648 missing=None)
664 649 error = colander.SchemaNode(colander.Sequence(accept_scalar=True),
665 650 colander.SchemaNode(colander.String()),
666 651 missing=None)
667 652 url_path = colander.SchemaNode(colander.Sequence(accept_scalar=True),
668 653 colander.SchemaNode(colander.String()),
669 654 missing=None)
670 655 url_domain = colander.SchemaNode(colander.Sequence(accept_scalar=True),
671 656 colander.SchemaNode(colander.String()),
672 657 missing=None)
673 658 report_status = colander.SchemaNode(colander.Sequence(accept_scalar=True),
674 659 colander.SchemaNode(colander.String()),
675 660 missing=None)
676 661 min_duration = colander.SchemaNode(colander.Sequence(accept_scalar=True),
677 662 colander.SchemaNode(colander.Float()),
678 663 missing=None)
679 664 max_duration = colander.SchemaNode(colander.Sequence(accept_scalar=True),
680 665 colander.SchemaNode(colander.Float()),
681 666 missing=None)
682 667
683 668
684 669 class TagSchema(colander.MappingSchema):
685 670 """
686 671 Used in log search
687 672 """
688 673 name = colander.SchemaNode(colander.String(),
689 674 validator=colander.Length(1, 32))
690 675 value = colander.SchemaNode(colander.Sequence(accept_scalar=True),
691 676 colander.SchemaNode(colander.String(),
692 677 validator=colander.Length(
693 678 1, 128)),
694 679 missing=None)
695 680 op = colander.SchemaNode(colander.String(),
696 681 validator=colander.Length(1, 128),
697 682 missing=None)
698 683
699 684
700 685 class TagListSchema(colander.SequenceSchema):
701 686 tag = TagSchema()
702 687
703 688
704 689 class RuleFieldType(object):
705 690 """ Validator which succeeds if the value passed to it is one of
706 691 a fixed set of values """
707 692
708 693 def __init__(self, cast_to):
709 694 self.cast_to = cast_to
710 695
711 696 def __call__(self, node, value):
712 697 try:
713 698 if self.cast_to == 'int':
714 699 int(value)
715 700 elif self.cast_to == 'float':
716 701 float(value)
717 702 elif self.cast_to == 'unicode':
718 703 str(value)
719 704 except:
720 705 raise colander.Invalid(node,
721 706 "Can't cast {} to {}".format(
722 707 value, self.cast_to))
723 708
724 709
725 710 def build_rule_schema(ruleset, check_matrix):
726 711 """
727 712 Accepts ruleset and a map of fields/possible operations and builds
728 713 validation class
729 714 """
730 715
731 716 schema = colander.SchemaNode(colander.Mapping())
732 717 schema.add(colander.SchemaNode(colander.String(), name='field'))
733 718
734 719 if ruleset['field'] in ['__AND__', '__OR__']:
735 720 subrules = colander.SchemaNode(colander.Tuple(), name='rules')
736 721 for rule in ruleset['rules']:
737 722 subrules.add(build_rule_schema(rule, check_matrix))
738 723 schema.add(subrules)
739 724 else:
740 725 op_choices = check_matrix[ruleset['field']]['ops']
741 726 cast_to = check_matrix[ruleset['field']]['type']
742 727 schema.add(colander.SchemaNode(colander.String(),
743 728 validator=colander.OneOf(op_choices),
744 729 name='op'))
745 730
746 731 schema.add(colander.SchemaNode(colander.String(),
747 732 name='value',
748 733 validator=RuleFieldType(cast_to)))
749 734 return schema
750 735
751 736
752 737 class ConfigTypeSchema(colander.MappingSchema):
753 738 type = colander.SchemaNode(colander.String(), missing=None)
754 739 config = colander.SchemaNode(UnknownType(), missing=None)
755 740
756 741
757 742 class MappingListSchema(colander.SequenceSchema):
758 743 config = colander.SchemaNode(UnknownType())
General Comments 0
You need to be logged in to leave comments. Login now