Show More
@@ -1,47 +1,47 b'' | |||
|
1 | 1 | # appenlight README |
|
2 | 2 | |
|
3 | 3 | |
|
4 | 4 | To run the app you need to have meet prerequsites: |
|
5 | 5 | |
|
6 | 6 | - running elasticsearch (2.3+ tested) |
|
7 | 7 | - running postgresql (9.5+ required) |
|
8 | 8 | - running redis |
|
9 | 9 | |
|
10 | 10 | # Setup basics |
|
11 | 11 | |
|
12 | 12 | Set up the basic application database schema: |
|
13 | 13 | |
|
14 | 14 | appenlight_initialize_db config.ini |
|
15 | 15 | |
|
16 | 16 | Set up basic elasticsearch schema: |
|
17 | 17 | |
|
18 | 18 | appenlight-reindex-elasticsearch -c config.ini -t all |
|
19 | 19 | |
|
20 | 20 | Installed the appenlight uptime plugin |
|
21 | 21 | |
|
22 | 22 | # Running |
|
23 | 23 | |
|
24 | 24 | To run the application itself: |
|
25 | 25 | |
|
26 | 26 | pserve --reload development.ini |
|
27 | 27 | |
|
28 | 28 | To run celery queue processing: |
|
29 | 29 | |
|
30 | 30 | celery worker -A appenlight.celery -Q "reports,logs,metrics,default" --ini=development.ini |
|
31 | 31 | |
|
32 | 32 | To run celery beats scheduling: |
|
33 | 33 | |
|
34 | 34 | celery beat -A appenlight.celery --ini=development.ini |
|
35 | 35 | |
|
36 | You should also run the channelstream websocket server for real-time notifications | |
|
36 | You should also run the `channelstream websocket server for real-time notifications | |
|
37 | 37 | |
|
38 | 38 | channelstream -i filename.ini |
|
39 | 39 | |
|
40 | 40 | # Testing |
|
41 | 41 | |
|
42 | 42 | To run test suite: |
|
43 | 43 | |
|
44 | 44 | py.test appenlight/tests/tests.py --cov appenlight (this looks for testing.ini in repo root) |
|
45 | 45 | |
|
46 | 46 | WARNING!!! |
|
47 | 47 | Some tests will insert data into elasticsearch or redis based on testing.ini |
@@ -1,634 +1,634 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # AppEnlight Enterprise Edition, including its added features, Support |
|
19 | 19 | # services, and proprietary license terms, please see |
|
20 | 20 | # https://rhodecode.com/licenses/ |
|
21 | 21 | |
|
22 | 22 | import bisect |
|
23 | 23 | import collections |
|
24 | 24 | import math |
|
25 | 25 | from datetime import datetime, timedelta |
|
26 | 26 | |
|
27 | 27 | import sqlalchemy as sa |
|
28 | 28 | import pyelasticsearch |
|
29 | 29 | |
|
30 | 30 | from celery.utils.log import get_task_logger |
|
31 | 31 | from zope.sqlalchemy import mark_changed |
|
32 | 32 | from pyramid.threadlocal import get_current_request, get_current_registry |
|
33 | 33 | from appenlight.celery import celery |
|
34 | 34 | from appenlight.models.report_group import ReportGroup |
|
35 | 35 | from appenlight.models import DBSession, Datastores |
|
36 | 36 | from appenlight.models.report import Report |
|
37 | 37 | from appenlight.models.log import Log |
|
38 |
from appenlight.models. |
|
|
38 | from appenlight.models.metric import Metric | |
|
39 | 39 | from appenlight.models.event import Event |
|
40 | 40 | |
|
41 | 41 | from appenlight.models.services.application import ApplicationService |
|
42 | 42 | from appenlight.models.services.event import EventService |
|
43 | 43 | from appenlight.models.services.log import LogService |
|
44 | 44 | from appenlight.models.services.report import ReportService |
|
45 | 45 | from appenlight.models.services.report_group import ReportGroupService |
|
46 | 46 | from appenlight.models.services.user import UserService |
|
47 | 47 | from appenlight.models.tag import Tag |
|
48 | 48 | from appenlight.lib import print_traceback |
|
49 | 49 | from appenlight.lib.utils import parse_proto, in_batches |
|
50 | 50 | from appenlight.lib.ext_json import json |
|
51 | 51 | from appenlight.lib.redis_keys import REDIS_KEYS |
|
52 | 52 | from appenlight.lib.enums import ReportType |
|
53 | 53 | |
|
54 | 54 | log = get_task_logger(__name__) |
|
55 | 55 | |
|
56 | 56 | sample_boundries = list(range(100, 1000, 100)) + \ |
|
57 | 57 | list(range(1000, 10000, 1000)) + \ |
|
58 | 58 | list(range(10000, 100000, 5000)) |
|
59 | 59 | |
|
60 | 60 | |
|
61 | 61 | def pick_sample(total_occurences, report_type=None): |
|
62 | 62 | every = 1.0 |
|
63 | 63 | position = bisect.bisect_left(sample_boundries, total_occurences) |
|
64 | 64 | if position > 0: |
|
65 | 65 | if report_type == ReportType.not_found: |
|
66 | 66 | divide = 10.0 |
|
67 | 67 | else: |
|
68 | 68 | divide = 100.0 |
|
69 | 69 | every = sample_boundries[position - 1] / divide |
|
70 | 70 | return total_occurences % every == 0 |
|
71 | 71 | |
|
72 | 72 | |
|
73 | 73 | @celery.task(queue="default", default_retry_delay=1, max_retries=2) |
|
74 | 74 | def test_exception_task(): |
|
75 | 75 | log.error('test celery log', extra={'location': 'celery'}) |
|
76 | 76 | log.warning('test celery log', extra={'location': 'celery'}) |
|
77 | 77 | raise Exception('Celery exception test') |
|
78 | 78 | |
|
79 | 79 | |
|
80 | 80 | @celery.task(queue="default", default_retry_delay=1, max_retries=2) |
|
81 | 81 | def test_retry_exception_task(): |
|
82 | 82 | try: |
|
83 | 83 | import time |
|
84 | 84 | |
|
85 | 85 | time.sleep(1.3) |
|
86 | 86 | log.error('test retry celery log', extra={'location': 'celery'}) |
|
87 | 87 | log.warning('test retry celery log', extra={'location': 'celery'}) |
|
88 | 88 | raise Exception('Celery exception test') |
|
89 | 89 | except Exception as exc: |
|
90 | 90 | test_retry_exception_task.retry(exc=exc) |
|
91 | 91 | |
|
92 | 92 | |
|
93 | 93 | @celery.task(queue="reports", default_retry_delay=600, max_retries=144) |
|
94 | 94 | def add_reports(resource_id, params, dataset, environ=None, **kwargs): |
|
95 | 95 | proto_version = parse_proto(params.get('protocol_version', '')) |
|
96 | 96 | current_time = datetime.utcnow().replace(second=0, microsecond=0) |
|
97 | 97 | try: |
|
98 | 98 | # we will store solr docs here for single insert |
|
99 | 99 | es_report_docs = {} |
|
100 | 100 | es_report_group_docs = {} |
|
101 | 101 | resource = ApplicationService.by_id(resource_id) |
|
102 | 102 | |
|
103 | 103 | tags = [] |
|
104 | 104 | es_slow_calls_docs = {} |
|
105 | 105 | es_reports_stats_rows = {} |
|
106 | 106 | for report_data in dataset: |
|
107 | 107 | # build report details for later |
|
108 | 108 | added_details = 0 |
|
109 | 109 | report = Report() |
|
110 | 110 | report.set_data(report_data, resource, proto_version) |
|
111 | 111 | report._skip_ft_index = True |
|
112 | 112 | |
|
113 | 113 | report_group = ReportGroupService.by_hash_and_resource( |
|
114 | 114 | report.resource_id, |
|
115 | 115 | report.grouping_hash |
|
116 | 116 | ) |
|
117 | 117 | occurences = report_data.get('occurences', 1) |
|
118 | 118 | if not report_group: |
|
119 | 119 | # total reports will be +1 moment later |
|
120 | 120 | report_group = ReportGroup(grouping_hash=report.grouping_hash, |
|
121 | 121 | occurences=0, total_reports=0, |
|
122 | 122 | last_report=0, |
|
123 | 123 | priority=report.priority, |
|
124 | 124 | error=report.error, |
|
125 | 125 | first_timestamp=report.start_time) |
|
126 | 126 | report_group._skip_ft_index = True |
|
127 | 127 | report_group.report_type = report.report_type |
|
128 | 128 | report.report_group_time = report_group.first_timestamp |
|
129 | 129 | add_sample = pick_sample(report_group.occurences, |
|
130 | 130 | report_type=report_group.report_type) |
|
131 | 131 | if add_sample: |
|
132 | 132 | resource.report_groups.append(report_group) |
|
133 | 133 | report_group.reports.append(report) |
|
134 | 134 | added_details += 1 |
|
135 | 135 | DBSession.flush() |
|
136 | 136 | if report.partition_id not in es_report_docs: |
|
137 | 137 | es_report_docs[report.partition_id] = [] |
|
138 | 138 | es_report_docs[report.partition_id].append(report.es_doc()) |
|
139 | 139 | tags.extend(list(report.tags.items())) |
|
140 | 140 | slow_calls = report.add_slow_calls(report_data, report_group) |
|
141 | 141 | DBSession.flush() |
|
142 | 142 | for s_call in slow_calls: |
|
143 | 143 | if s_call.partition_id not in es_slow_calls_docs: |
|
144 | 144 | es_slow_calls_docs[s_call.partition_id] = [] |
|
145 | 145 | es_slow_calls_docs[s_call.partition_id].append( |
|
146 | 146 | s_call.es_doc()) |
|
147 | 147 | # try generating new stat rows if needed |
|
148 | 148 | else: |
|
149 | 149 | # required for postprocessing to not fail later |
|
150 | 150 | report.report_group = report_group |
|
151 | 151 | |
|
152 | 152 | stat_row = ReportService.generate_stat_rows( |
|
153 | 153 | report, resource, report_group) |
|
154 | 154 | if stat_row.partition_id not in es_reports_stats_rows: |
|
155 | 155 | es_reports_stats_rows[stat_row.partition_id] = [] |
|
156 | 156 | es_reports_stats_rows[stat_row.partition_id].append( |
|
157 | 157 | stat_row.es_doc()) |
|
158 | 158 | |
|
159 | 159 | # see if we should mark 10th occurence of report |
|
160 | 160 | last_occurences_10 = int(math.floor(report_group.occurences / 10)) |
|
161 | 161 | curr_occurences_10 = int(math.floor( |
|
162 | 162 | (report_group.occurences + report.occurences) / 10)) |
|
163 | 163 | last_occurences_100 = int( |
|
164 | 164 | math.floor(report_group.occurences / 100)) |
|
165 | 165 | curr_occurences_100 = int(math.floor( |
|
166 | 166 | (report_group.occurences + report.occurences) / 100)) |
|
167 | 167 | notify_occurences_10 = last_occurences_10 != curr_occurences_10 |
|
168 | 168 | notify_occurences_100 = last_occurences_100 != curr_occurences_100 |
|
169 | 169 | report_group.occurences = ReportGroup.occurences + occurences |
|
170 | 170 | report_group.last_timestamp = report.start_time |
|
171 | 171 | report_group.summed_duration = ReportGroup.summed_duration + report.duration |
|
172 | 172 | summed_duration = ReportGroup.summed_duration + report.duration |
|
173 | 173 | summed_occurences = ReportGroup.occurences + occurences |
|
174 | 174 | report_group.average_duration = summed_duration / summed_occurences |
|
175 | 175 | report_group.run_postprocessing(report) |
|
176 | 176 | if added_details: |
|
177 | 177 | report_group.total_reports = ReportGroup.total_reports + 1 |
|
178 | 178 | report_group.last_report = report.id |
|
179 | 179 | report_group.set_notification_info(notify_10=notify_occurences_10, |
|
180 | 180 | notify_100=notify_occurences_100) |
|
181 | 181 | DBSession.flush() |
|
182 | 182 | report_group.get_report().notify_channel(report_group) |
|
183 | 183 | if report_group.partition_id not in es_report_group_docs: |
|
184 | 184 | es_report_group_docs[report_group.partition_id] = [] |
|
185 | 185 | es_report_group_docs[report_group.partition_id].append( |
|
186 | 186 | report_group.es_doc()) |
|
187 | 187 | |
|
188 | 188 | action = 'REPORT' |
|
189 | 189 | log_msg = '%s: %s %s, client: %s, proto: %s' % ( |
|
190 | 190 | action, |
|
191 | 191 | report_data.get('http_status', 'unknown'), |
|
192 | 192 | str(resource), |
|
193 | 193 | report_data.get('client'), |
|
194 | 194 | proto_version) |
|
195 | 195 | log.info(log_msg) |
|
196 | 196 | total_reports = len(dataset) |
|
197 | 197 | key = REDIS_KEYS['counters']['reports_per_minute'].format(current_time) |
|
198 | 198 | Datastores.redis.incr(key, total_reports) |
|
199 | 199 | Datastores.redis.expire(key, 3600 * 24) |
|
200 | 200 | key = REDIS_KEYS['counters']['reports_per_minute_per_app'].format( |
|
201 | 201 | resource_id, current_time) |
|
202 | 202 | Datastores.redis.incr(key, total_reports) |
|
203 | 203 | Datastores.redis.expire(key, 3600 * 24) |
|
204 | 204 | |
|
205 | 205 | add_reports_es(es_report_group_docs, es_report_docs) |
|
206 | 206 | add_reports_slow_calls_es(es_slow_calls_docs) |
|
207 | 207 | add_reports_stats_rows_es(es_reports_stats_rows) |
|
208 | 208 | return True |
|
209 | 209 | except Exception as exc: |
|
210 | 210 | print_traceback(log) |
|
211 | 211 | add_reports.retry(exc=exc) |
|
212 | 212 | |
|
213 | 213 | |
|
214 | 214 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) |
|
215 | 215 | def add_reports_es(report_group_docs, report_docs): |
|
216 | 216 | for k, v in report_group_docs.items(): |
|
217 | 217 | Datastores.es.bulk_index(k, 'report_group', v, id_field="_id") |
|
218 | 218 | for k, v in report_docs.items(): |
|
219 | 219 | Datastores.es.bulk_index(k, 'report', v, id_field="_id", |
|
220 | 220 | parent_field='_parent') |
|
221 | 221 | |
|
222 | 222 | |
|
223 | 223 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) |
|
224 | 224 | def add_reports_slow_calls_es(es_docs): |
|
225 | 225 | for k, v in es_docs.items(): |
|
226 | 226 | Datastores.es.bulk_index(k, 'log', v) |
|
227 | 227 | |
|
228 | 228 | |
|
229 | 229 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) |
|
230 | 230 | def add_reports_stats_rows_es(es_docs): |
|
231 | 231 | for k, v in es_docs.items(): |
|
232 | 232 | Datastores.es.bulk_index(k, 'log', v) |
|
233 | 233 | |
|
234 | 234 | |
|
235 | 235 | @celery.task(queue="logs", default_retry_delay=600, max_retries=144) |
|
236 | 236 | def add_logs(resource_id, request, dataset, environ=None, **kwargs): |
|
237 | 237 | proto_version = request.get('protocol_version') |
|
238 | 238 | current_time = datetime.utcnow().replace(second=0, microsecond=0) |
|
239 | 239 | |
|
240 | 240 | try: |
|
241 | 241 | es_docs = collections.defaultdict(list) |
|
242 | 242 | application = ApplicationService.by_id(resource_id) |
|
243 | 243 | ns_pairs = [] |
|
244 | 244 | for entry in dataset: |
|
245 | 245 | # gather pk and ns so we can remove older versions of row later |
|
246 | 246 | if entry['primary_key'] is not None: |
|
247 | 247 | ns_pairs.append({"pk": entry['primary_key'], |
|
248 | 248 | "ns": entry['namespace']}) |
|
249 | 249 | log_entry = Log() |
|
250 | 250 | log_entry.set_data(entry, resource=application) |
|
251 | 251 | log_entry._skip_ft_index = True |
|
252 | 252 | application.logs.append(log_entry) |
|
253 | 253 | DBSession.flush() |
|
254 | 254 | # insert non pk rows first |
|
255 | 255 | if entry['primary_key'] is None: |
|
256 | 256 | es_docs[log_entry.partition_id].append(log_entry.es_doc()) |
|
257 | 257 | |
|
258 | 258 | # 2nd pass to delete all log entries from db foe same pk/ns pair |
|
259 | 259 | if ns_pairs: |
|
260 | 260 | ids_to_delete = [] |
|
261 | 261 | es_docs = collections.defaultdict(list) |
|
262 | 262 | es_docs_to_delete = collections.defaultdict(list) |
|
263 | 263 | found_pkey_logs = LogService.query_by_primary_key_and_namespace( |
|
264 | 264 | list_of_pairs=ns_pairs) |
|
265 | 265 | log_dict = {} |
|
266 | 266 | for log_entry in found_pkey_logs: |
|
267 | 267 | log_key = (log_entry.primary_key, log_entry.namespace) |
|
268 | 268 | if log_key not in log_dict: |
|
269 | 269 | log_dict[log_key] = [] |
|
270 | 270 | log_dict[log_key].append(log_entry) |
|
271 | 271 | |
|
272 | 272 | for ns, entry_list in log_dict.items(): |
|
273 | 273 | entry_list = sorted(entry_list, key=lambda x: x.timestamp) |
|
274 | 274 | # newest row needs to be indexed in es |
|
275 | 275 | log_entry = entry_list[-1] |
|
276 | 276 | # delete everything from pg and ES, leave the last row in pg |
|
277 | 277 | for e in entry_list[:-1]: |
|
278 | 278 | ids_to_delete.append(e.log_id) |
|
279 | 279 | es_docs_to_delete[e.partition_id].append(e.delete_hash) |
|
280 | 280 | |
|
281 | 281 | es_docs_to_delete[log_entry.partition_id].append( |
|
282 | 282 | log_entry.delete_hash) |
|
283 | 283 | |
|
284 | 284 | es_docs[log_entry.partition_id].append(log_entry.es_doc()) |
|
285 | 285 | |
|
286 | 286 | if ids_to_delete: |
|
287 | 287 | query = DBSession.query(Log).filter( |
|
288 | 288 | Log.log_id.in_(ids_to_delete)) |
|
289 | 289 | query.delete(synchronize_session=False) |
|
290 | 290 | if es_docs_to_delete: |
|
291 | 291 | # batch this to avoid problems with default ES bulk limits |
|
292 | 292 | for es_index in es_docs_to_delete.keys(): |
|
293 | 293 | for batch in in_batches(es_docs_to_delete[es_index], 20): |
|
294 | 294 | query = {'terms': {'delete_hash': batch}} |
|
295 | 295 | |
|
296 | 296 | try: |
|
297 | 297 | Datastores.es.delete_by_query( |
|
298 | 298 | es_index, 'log', query) |
|
299 | 299 | except pyelasticsearch.ElasticHttpNotFoundError as exc: |
|
300 | 300 | log.error(exc) |
|
301 | 301 | |
|
302 | 302 | total_logs = len(dataset) |
|
303 | 303 | |
|
304 | 304 | log_msg = 'LOG_NEW: %s, entries: %s, proto:%s' % ( |
|
305 | 305 | str(application), |
|
306 | 306 | total_logs, |
|
307 | 307 | proto_version) |
|
308 | 308 | log.info(log_msg) |
|
309 | 309 | # mark_changed(session) |
|
310 | 310 | key = REDIS_KEYS['counters']['logs_per_minute'].format(current_time) |
|
311 | 311 | Datastores.redis.incr(key, total_logs) |
|
312 | 312 | Datastores.redis.expire(key, 3600 * 24) |
|
313 | 313 | key = REDIS_KEYS['counters']['logs_per_minute_per_app'].format( |
|
314 | 314 | resource_id, current_time) |
|
315 | 315 | Datastores.redis.incr(key, total_logs) |
|
316 | 316 | Datastores.redis.expire(key, 3600 * 24) |
|
317 | 317 | add_logs_es(es_docs) |
|
318 | 318 | return True |
|
319 | 319 | except Exception as exc: |
|
320 | 320 | print_traceback(log) |
|
321 | 321 | add_logs.retry(exc=exc) |
|
322 | 322 | |
|
323 | 323 | |
|
324 | 324 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) |
|
325 | 325 | def add_logs_es(es_docs): |
|
326 | 326 | for k, v in es_docs.items(): |
|
327 | 327 | Datastores.es.bulk_index(k, 'log', v) |
|
328 | 328 | |
|
329 | 329 | |
|
330 | 330 | @celery.task(queue="metrics", default_retry_delay=600, max_retries=144) |
|
331 | 331 | def add_metrics(resource_id, request, dataset, proto_version): |
|
332 | 332 | current_time = datetime.utcnow().replace(second=0, microsecond=0) |
|
333 | 333 | try: |
|
334 | 334 | application = ApplicationService.by_id_cached()(resource_id) |
|
335 | 335 | application = DBSession.merge(application, load=False) |
|
336 | 336 | es_docs = [] |
|
337 | 337 | rows = [] |
|
338 | 338 | for metric in dataset: |
|
339 | 339 | tags = dict(metric['tags']) |
|
340 | 340 | server_n = tags.get('server_name', metric['server_name']).lower() |
|
341 | 341 | tags['server_name'] = server_n or 'unknown' |
|
342 | 342 | new_metric = Metric( |
|
343 | 343 | timestamp=metric['timestamp'], |
|
344 | 344 | resource_id=application.resource_id, |
|
345 | 345 | namespace=metric['namespace'], |
|
346 | 346 | tags=tags) |
|
347 | 347 | rows.append(new_metric) |
|
348 | 348 | es_docs.append(new_metric.es_doc()) |
|
349 | 349 | session = DBSession() |
|
350 | 350 | session.bulk_save_objects(rows) |
|
351 | 351 | session.flush() |
|
352 | 352 | |
|
353 | 353 | action = 'METRICS' |
|
354 | 354 | metrics_msg = '%s: %s, metrics: %s, proto:%s' % ( |
|
355 | 355 | action, |
|
356 | 356 | str(application), |
|
357 | 357 | len(dataset), |
|
358 | 358 | proto_version |
|
359 | 359 | ) |
|
360 | 360 | log.info(metrics_msg) |
|
361 | 361 | |
|
362 | 362 | mark_changed(session) |
|
363 | 363 | key = REDIS_KEYS['counters']['metrics_per_minute'].format(current_time) |
|
364 | 364 | Datastores.redis.incr(key, len(rows)) |
|
365 | 365 | Datastores.redis.expire(key, 3600 * 24) |
|
366 | 366 | key = REDIS_KEYS['counters']['metrics_per_minute_per_app'].format( |
|
367 | 367 | resource_id, current_time) |
|
368 | 368 | Datastores.redis.incr(key, len(rows)) |
|
369 | 369 | Datastores.redis.expire(key, 3600 * 24) |
|
370 | 370 | add_metrics_es(es_docs) |
|
371 | 371 | return True |
|
372 | 372 | except Exception as exc: |
|
373 | 373 | print_traceback(log) |
|
374 | 374 | add_metrics.retry(exc=exc) |
|
375 | 375 | |
|
376 | 376 | |
|
377 | 377 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) |
|
378 | 378 | def add_metrics_es(es_docs): |
|
379 | 379 | for doc in es_docs: |
|
380 | 380 | partition = 'rcae_m_%s' % doc['timestamp'].strftime('%Y_%m_%d') |
|
381 | 381 | Datastores.es.index(partition, 'log', doc) |
|
382 | 382 | |
|
383 | 383 | |
|
384 | 384 | @celery.task(queue="default", default_retry_delay=5, max_retries=2) |
|
385 | 385 | def check_user_report_notifications(resource_id): |
|
386 | 386 | since_when = datetime.utcnow() |
|
387 | 387 | try: |
|
388 | 388 | request = get_current_request() |
|
389 | 389 | application = ApplicationService.by_id(resource_id) |
|
390 | 390 | if not application: |
|
391 | 391 | return |
|
392 | 392 | error_key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format( |
|
393 | 393 | ReportType.error, resource_id) |
|
394 | 394 | slow_key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format( |
|
395 | 395 | ReportType.slow, resource_id) |
|
396 | 396 | error_group_ids = Datastores.redis.smembers(error_key) |
|
397 | 397 | slow_group_ids = Datastores.redis.smembers(slow_key) |
|
398 | 398 | Datastores.redis.delete(error_key) |
|
399 | 399 | Datastores.redis.delete(slow_key) |
|
400 | 400 | err_gids = [int(g_id) for g_id in error_group_ids] |
|
401 | 401 | slow_gids = [int(g_id) for g_id in list(slow_group_ids)] |
|
402 | 402 | group_ids = err_gids + slow_gids |
|
403 | 403 | occurence_dict = {} |
|
404 | 404 | for g_id in group_ids: |
|
405 | 405 | key = REDIS_KEYS['counters']['report_group_occurences'].format( |
|
406 | 406 | g_id) |
|
407 | 407 | val = Datastores.redis.get(key) |
|
408 | 408 | Datastores.redis.delete(key) |
|
409 | 409 | if val: |
|
410 | 410 | occurence_dict[g_id] = int(val) |
|
411 | 411 | else: |
|
412 | 412 | occurence_dict[g_id] = 1 |
|
413 | 413 | report_groups = ReportGroupService.by_ids(group_ids) |
|
414 | 414 | report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref)) |
|
415 | 415 | |
|
416 | 416 | ApplicationService.check_for_groups_alert( |
|
417 | 417 | application, 'alert', report_groups=report_groups, |
|
418 | 418 | occurence_dict=occurence_dict) |
|
419 | 419 | users = set([p.user for p in application.users_for_perm('view')]) |
|
420 | 420 | report_groups = report_groups.all() |
|
421 | 421 | for user in users: |
|
422 | 422 | UserService.report_notify(user, request, application, |
|
423 | 423 | report_groups=report_groups, |
|
424 | 424 | occurence_dict=occurence_dict) |
|
425 | 425 | for group in report_groups: |
|
426 | 426 | # marks report_groups as notified |
|
427 | 427 | if not group.notified: |
|
428 | 428 | group.notified = True |
|
429 | 429 | except Exception as exc: |
|
430 | 430 | print_traceback(log) |
|
431 | 431 | raise |
|
432 | 432 | |
|
433 | 433 | |
|
434 | 434 | @celery.task(queue="default", default_retry_delay=5, max_retries=2) |
|
435 | 435 | def check_alerts(resource_id): |
|
436 | 436 | since_when = datetime.utcnow() |
|
437 | 437 | try: |
|
438 | 438 | request = get_current_request() |
|
439 | 439 | application = ApplicationService.by_id(resource_id) |
|
440 | 440 | if not application: |
|
441 | 441 | return |
|
442 | 442 | error_key = REDIS_KEYS[ |
|
443 | 443 | 'reports_to_notify_per_type_per_app_alerting'].format( |
|
444 | 444 | ReportType.error, resource_id) |
|
445 | 445 | slow_key = REDIS_KEYS[ |
|
446 | 446 | 'reports_to_notify_per_type_per_app_alerting'].format( |
|
447 | 447 | ReportType.slow, resource_id) |
|
448 | 448 | error_group_ids = Datastores.redis.smembers(error_key) |
|
449 | 449 | slow_group_ids = Datastores.redis.smembers(slow_key) |
|
450 | 450 | Datastores.redis.delete(error_key) |
|
451 | 451 | Datastores.redis.delete(slow_key) |
|
452 | 452 | err_gids = [int(g_id) for g_id in error_group_ids] |
|
453 | 453 | slow_gids = [int(g_id) for g_id in list(slow_group_ids)] |
|
454 | 454 | group_ids = err_gids + slow_gids |
|
455 | 455 | occurence_dict = {} |
|
456 | 456 | for g_id in group_ids: |
|
457 | 457 | key = REDIS_KEYS['counters'][ |
|
458 | 458 | 'report_group_occurences_alerting'].format( |
|
459 | 459 | g_id) |
|
460 | 460 | val = Datastores.redis.get(key) |
|
461 | 461 | Datastores.redis.delete(key) |
|
462 | 462 | if val: |
|
463 | 463 | occurence_dict[g_id] = int(val) |
|
464 | 464 | else: |
|
465 | 465 | occurence_dict[g_id] = 1 |
|
466 | 466 | report_groups = ReportGroupService.by_ids(group_ids) |
|
467 | 467 | report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref)) |
|
468 | 468 | |
|
469 | 469 | ApplicationService.check_for_groups_alert( |
|
470 | 470 | application, 'alert', report_groups=report_groups, |
|
471 | 471 | occurence_dict=occurence_dict, since_when=since_when) |
|
472 | 472 | except Exception as exc: |
|
473 | 473 | print_traceback(log) |
|
474 | 474 | raise |
|
475 | 475 | |
|
476 | 476 | |
|
477 | 477 | @celery.task(queue="default", default_retry_delay=1, max_retries=2) |
|
478 | 478 | def close_alerts(): |
|
479 | 479 | log.warning('Checking alerts') |
|
480 | 480 | since_when = datetime.utcnow() |
|
481 | 481 | try: |
|
482 | 482 | event_types = [Event.types['error_report_alert'], |
|
483 | 483 | Event.types['slow_report_alert'], ] |
|
484 | 484 | statuses = [Event.statuses['active']] |
|
485 | 485 | # get events older than 5 min |
|
486 | 486 | events = EventService.by_type_and_status( |
|
487 | 487 | event_types, |
|
488 | 488 | statuses, |
|
489 | 489 | older_than=(since_when - timedelta(minutes=5))) |
|
490 | 490 | for event in events: |
|
491 | 491 | # see if we can close them |
|
492 | 492 | event.validate_or_close( |
|
493 | 493 | since_when=(since_when - timedelta(minutes=1))) |
|
494 | 494 | except Exception as exc: |
|
495 | 495 | print_traceback(log) |
|
496 | 496 | raise |
|
497 | 497 | |
|
498 | 498 | |
|
499 | 499 | @celery.task(queue="default", default_retry_delay=600, max_retries=144) |
|
500 | 500 | def update_tag_counter(tag_name, tag_value, count): |
|
501 | 501 | try: |
|
502 | 502 | query = DBSession.query(Tag).filter(Tag.name == tag_name).filter( |
|
503 | 503 | sa.cast(Tag.value, sa.types.TEXT) == sa.cast(json.dumps(tag_value), |
|
504 | 504 | sa.types.TEXT)) |
|
505 | 505 | query.update({'times_seen': Tag.times_seen + count, |
|
506 | 506 | 'last_timestamp': datetime.utcnow()}, |
|
507 | 507 | synchronize_session=False) |
|
508 | 508 | session = DBSession() |
|
509 | 509 | mark_changed(session) |
|
510 | 510 | return True |
|
511 | 511 | except Exception as exc: |
|
512 | 512 | print_traceback(log) |
|
513 | 513 | update_tag_counter.retry(exc=exc) |
|
514 | 514 | |
|
515 | 515 | |
|
516 | 516 | @celery.task(queue="default") |
|
517 | 517 | def update_tag_counters(): |
|
518 | 518 | """ |
|
519 | 519 | Sets task to update counters for application tags |
|
520 | 520 | """ |
|
521 | 521 | tags = Datastores.redis.lrange(REDIS_KEYS['seen_tag_list'], 0, -1) |
|
522 | 522 | Datastores.redis.delete(REDIS_KEYS['seen_tag_list']) |
|
523 | 523 | c = collections.Counter(tags) |
|
524 | 524 | for t_json, count in c.items(): |
|
525 | 525 | tag_info = json.loads(t_json) |
|
526 | 526 | update_tag_counter.delay(tag_info[0], tag_info[1], count) |
|
527 | 527 | |
|
528 | 528 | |
|
529 | 529 | @celery.task(queue="default") |
|
530 | 530 | def daily_digest(): |
|
531 | 531 | """ |
|
532 | 532 | Sends daily digest with top 50 error reports |
|
533 | 533 | """ |
|
534 | 534 | request = get_current_request() |
|
535 | 535 | apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports']) |
|
536 | 536 | Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports']) |
|
537 | 537 | since_when = datetime.utcnow() - timedelta(hours=8) |
|
538 | 538 | log.warning('Generating daily digests') |
|
539 | 539 | for resource_id in apps: |
|
540 | 540 | resource_id = resource_id.decode('utf8') |
|
541 | 541 | end_date = datetime.utcnow().replace(microsecond=0, second=0) |
|
542 | 542 | filter_settings = {'resource': [resource_id], |
|
543 | 543 | 'tags': [{'name': 'type', |
|
544 | 544 | 'value': ['error'], 'op': None}], |
|
545 | 545 | 'type': 'error', 'start_date': since_when, |
|
546 | 546 | 'end_date': end_date} |
|
547 | 547 | |
|
548 | 548 | reports = ReportGroupService.get_trending( |
|
549 | 549 | request, filter_settings=filter_settings, limit=50) |
|
550 | 550 | |
|
551 | 551 | application = ApplicationService.by_id(resource_id) |
|
552 | 552 | if application: |
|
553 | 553 | users = set([p.user for p in application.users_for_perm('view')]) |
|
554 | 554 | for user in users: |
|
555 | 555 | user.send_digest(request, application, reports=reports, |
|
556 | 556 | since_when=since_when) |
|
557 | 557 | |
|
558 | 558 | |
|
559 | 559 | @celery.task(queue="default") |
|
560 | 560 | def notifications_reports(): |
|
561 | 561 | """ |
|
562 | 562 | Loop that checks redis for info and then issues new tasks to celery to |
|
563 | 563 | issue notifications |
|
564 | 564 | """ |
|
565 | 565 | apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports']) |
|
566 | 566 | Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports']) |
|
567 | 567 | for app in apps: |
|
568 | 568 | log.warning('Notify for app: %s' % app) |
|
569 | 569 | check_user_report_notifications.delay(app.decode('utf8')) |
|
570 | 570 | |
|
571 | 571 | @celery.task(queue="default") |
|
572 | 572 | def alerting_reports(): |
|
573 | 573 | """ |
|
574 | 574 | Loop that checks redis for info and then issues new tasks to celery to |
|
575 | 575 | perform the following: |
|
576 | 576 | - which applications should have new alerts opened |
|
577 | 577 | """ |
|
578 | 578 | |
|
579 | 579 | apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports_alerting']) |
|
580 | 580 | Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports_alerting']) |
|
581 | 581 | for app in apps: |
|
582 | 582 | log.warning('Notify for app: %s' % app) |
|
583 | 583 | check_alerts.delay(app.decode('utf8')) |
|
584 | 584 | |
|
585 | 585 | |
|
586 | 586 | @celery.task(queue="default", soft_time_limit=3600 * 4, |
|
587 | 587 | hard_time_limit=3600 * 4, max_retries=144) |
|
588 | 588 | def logs_cleanup(resource_id, filter_settings): |
|
589 | 589 | request = get_current_request() |
|
590 | 590 | request.tm.begin() |
|
591 | 591 | es_query = { |
|
592 | 592 | "_source": False, |
|
593 | 593 | "size": 5000, |
|
594 | 594 | "query": { |
|
595 | 595 | "filtered": { |
|
596 | 596 | "filter": { |
|
597 | 597 | "and": [{"term": {"resource_id": resource_id}}] |
|
598 | 598 | } |
|
599 | 599 | } |
|
600 | 600 | } |
|
601 | 601 | } |
|
602 | 602 | |
|
603 | 603 | query = DBSession.query(Log).filter(Log.resource_id == resource_id) |
|
604 | 604 | if filter_settings['namespace']: |
|
605 | 605 | query = query.filter(Log.namespace == filter_settings['namespace'][0]) |
|
606 | 606 | es_query['query']['filtered']['filter']['and'].append( |
|
607 | 607 | {"term": {"namespace": filter_settings['namespace'][0]}} |
|
608 | 608 | ) |
|
609 | 609 | query.delete(synchronize_session=False) |
|
610 | 610 | request.tm.commit() |
|
611 | 611 | result = request.es_conn.search(es_query, index='rcae_l_*', |
|
612 | 612 | doc_type='log', es_scroll='1m', |
|
613 | 613 | es_search_type='scan') |
|
614 | 614 | scroll_id = result['_scroll_id'] |
|
615 | 615 | while True: |
|
616 | 616 | log.warning('log_cleanup, app:{} ns:{} batch'.format( |
|
617 | 617 | resource_id, |
|
618 | 618 | filter_settings['namespace'] |
|
619 | 619 | )) |
|
620 | 620 | es_docs_to_delete = [] |
|
621 | 621 | result = request.es_conn.send_request( |
|
622 | 622 | 'POST', ['_search', 'scroll'], |
|
623 | 623 | body=scroll_id, query_params={"scroll": '1m'}) |
|
624 | 624 | scroll_id = result['_scroll_id'] |
|
625 | 625 | if not result['hits']['hits']: |
|
626 | 626 | break |
|
627 | 627 | for doc in result['hits']['hits']: |
|
628 | 628 | es_docs_to_delete.append({"id": doc['_id'], |
|
629 | 629 | "index": doc['_index']}) |
|
630 | 630 | |
|
631 | 631 | for batch in in_batches(es_docs_to_delete, 10): |
|
632 | 632 | Datastores.es.bulk([Datastores.es.delete_op(doc_type='log', |
|
633 | 633 | **to_del) |
|
634 | 634 | for to_del in batch]) |
@@ -1,135 +1,135 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # AppEnlight Enterprise Edition, including its added features, Support |
|
19 | 19 | # services, and proprietary license terms, please see |
|
20 | 20 | # https://rhodecode.com/licenses/ |
|
21 | 21 | |
|
22 | 22 | import logging |
|
23 | 23 | |
|
24 | 24 | from sqlalchemy.ext.declarative import declarative_base |
|
25 | 25 | from sqlalchemy import MetaData |
|
26 | 26 | from sqlalchemy.orm import scoped_session |
|
27 | 27 | from sqlalchemy.orm import sessionmaker |
|
28 | 28 | from zope.sqlalchemy import ZopeTransactionExtension |
|
29 | 29 | import ziggurat_foundations |
|
30 | 30 | from ziggurat_foundations.models.base import get_db_session |
|
31 | 31 | |
|
32 | 32 | log = logging.getLogger(__name__) |
|
33 | 33 | |
|
34 | 34 | DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension())) |
|
35 | 35 | |
|
36 | 36 | NAMING_CONVENTION = { |
|
37 | 37 | "ix": 'ix_%(column_0_label)s', |
|
38 | 38 | "uq": "uq_%(table_name)s_%(column_0_name)s", |
|
39 | 39 | "ck": "ck_%(table_name)s_%(constraint_name)s", |
|
40 | 40 | "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s", |
|
41 | 41 | "pk": "pk_%(table_name)s" |
|
42 | 42 | } |
|
43 | 43 | |
|
44 | 44 | metadata = MetaData(naming_convention=NAMING_CONVENTION) |
|
45 | 45 | Base = declarative_base(metadata=metadata) |
|
46 | 46 | |
|
47 | 47 | # optional for request.db approach |
|
48 | 48 | ziggurat_foundations.models.DBSession = DBSession |
|
49 | 49 | |
|
50 | 50 | |
|
51 | 51 | class Datastores(object): |
|
52 | 52 | redis = None |
|
53 | 53 | es = None |
|
54 | 54 | |
|
55 | 55 | |
|
56 | 56 | def register_datastores(es_conn, redis_conn, redis_lockmgr): |
|
57 | 57 | Datastores.es = es_conn |
|
58 | 58 | Datastores.redis = redis_conn |
|
59 | 59 | Datastores.lockmgr = redis_lockmgr |
|
60 | 60 | |
|
61 | 61 | |
|
62 | 62 | class SliceableESQuery(object): |
|
63 | 63 | def __init__(self, query, sort_query=None, aggregations=False, **kwconfig): |
|
64 | 64 | self.query = query |
|
65 | 65 | self.sort_query = sort_query |
|
66 | 66 | self.aggregations = aggregations |
|
67 | 67 | self.items_per_page = kwconfig.pop('items_per_page', 10) |
|
68 | 68 | self.page = kwconfig.pop('page', 1) |
|
69 | 69 | self.kwconfig = kwconfig |
|
70 | 70 | self.result = None |
|
71 | 71 | |
|
72 | 72 | def __getitem__(self, index): |
|
73 | 73 | config = self.kwconfig.copy() |
|
74 | 74 | config['es_from'] = index.start |
|
75 | 75 | query = self.query.copy() |
|
76 | 76 | if self.sort_query: |
|
77 | 77 | query.update(self.sort_query) |
|
78 | 78 | self.result = Datastores.es.search(query, size=self.items_per_page, |
|
79 | 79 | **config) |
|
80 | 80 | if self.aggregations: |
|
81 | 81 | self.items = self.result.get('aggregations') |
|
82 | 82 | else: |
|
83 | 83 | self.items = self.result['hits']['hits'] |
|
84 | 84 | |
|
85 | 85 | return self.items |
|
86 | 86 | |
|
87 | 87 | def __iter__(self): |
|
88 | 88 | return self.result |
|
89 | 89 | |
|
90 | 90 | def __len__(self): |
|
91 | 91 | config = self.kwconfig.copy() |
|
92 | 92 | query = self.query.copy() |
|
93 | 93 | self.result = Datastores.es.search(query, size=self.items_per_page, |
|
94 | 94 | **config) |
|
95 | 95 | if self.aggregations: |
|
96 | 96 | self.items = self.result.get('aggregations') |
|
97 | 97 | else: |
|
98 | 98 | self.items = self.result['hits']['hits'] |
|
99 | 99 | |
|
100 | 100 | count = int(self.result['hits']['total']) |
|
101 | 101 | return count if count < 5000 else 5000 |
|
102 | 102 | |
|
103 | 103 | |
|
104 | 104 | from appenlight.models.resource import Resource |
|
105 | 105 | from appenlight.models.application import Application |
|
106 | 106 | from appenlight.models.user import User |
|
107 | 107 | from appenlight.models.alert_channel import AlertChannel |
|
108 | 108 | from appenlight.models.alert_channel_action import AlertChannelAction |
|
109 |
from appenlight.models. |
|
|
109 | from appenlight.models.metric import Metric | |
|
110 | 110 | from appenlight.models.application_postprocess_conf import \ |
|
111 | 111 | ApplicationPostprocessConf |
|
112 | 112 | from appenlight.models.auth_token import AuthToken |
|
113 | 113 | from appenlight.models.event import Event |
|
114 | 114 | from appenlight.models.external_identity import ExternalIdentity |
|
115 | 115 | from appenlight.models.group import Group |
|
116 | 116 | from appenlight.models.group_permission import GroupPermission |
|
117 | 117 | from appenlight.models.group_resource_permission import GroupResourcePermission |
|
118 | 118 | from appenlight.models.log import Log |
|
119 | 119 | from appenlight.models.plugin_config import PluginConfig |
|
120 | 120 | from appenlight.models.report import Report |
|
121 | 121 | from appenlight.models.report_group import ReportGroup |
|
122 | 122 | from appenlight.models.report_comment import ReportComment |
|
123 | 123 | from appenlight.models.report_assignment import ReportAssignment |
|
124 | 124 | from appenlight.models.report_stat import ReportStat |
|
125 | 125 | from appenlight.models.slow_call import SlowCall |
|
126 | 126 | from appenlight.models.tag import Tag |
|
127 | 127 | from appenlight.models.user_group import UserGroup |
|
128 | 128 | from appenlight.models.user_permission import UserPermission |
|
129 | 129 | from appenlight.models.user_resource_permission import UserResourcePermission |
|
130 | 130 | from ziggurat_foundations import ziggurat_model_init |
|
131 | 131 | |
|
132 | 132 | ziggurat_model_init(User, Group, UserGroup, GroupPermission, UserPermission, |
|
133 | 133 | UserResourcePermission, GroupResourcePermission, |
|
134 | 134 | Resource, |
|
135 | 135 | ExternalIdentity, passwordmanager=None) |
|
1 | NO CONTENT: file renamed from backend/src/appenlight/models/request_metric.py to backend/src/appenlight/models/metric.py |
@@ -1,435 +1,435 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # AppEnlight Enterprise Edition, including its added features, Support |
|
19 | 19 | # services, and proprietary license terms, please see |
|
20 | 20 | # https://rhodecode.com/licenses/ |
|
21 | 21 | |
|
22 | 22 | import argparse |
|
23 | 23 | import datetime |
|
24 | 24 | import logging |
|
25 | 25 | |
|
26 | 26 | import sqlalchemy as sa |
|
27 | 27 | from collections import defaultdict |
|
28 | 28 | from pyramid.paster import setup_logging |
|
29 | 29 | from pyramid.paster import bootstrap |
|
30 | 30 | from appenlight.models import ( |
|
31 | 31 | DBSession, |
|
32 | 32 | Datastores, |
|
33 | 33 | metadata |
|
34 | 34 | ) |
|
35 | 35 | from appenlight.lib import get_callable |
|
36 | 36 | from appenlight.models.report_group import ReportGroup |
|
37 | 37 | from appenlight.models.report import Report |
|
38 | 38 | from appenlight.models.report_stat import ReportStat |
|
39 | 39 | from appenlight.models.log import Log |
|
40 | 40 | from appenlight.models.slow_call import SlowCall |
|
41 |
from appenlight.models. |
|
|
41 | from appenlight.models.metric import Metric | |
|
42 | 42 | |
|
43 | 43 | |
|
44 | 44 | log = logging.getLogger(__name__) |
|
45 | 45 | |
|
46 | 46 | tables = { |
|
47 | 47 | 'slow_calls_p_': [], |
|
48 | 48 | 'reports_stats_p_': [], |
|
49 | 49 | 'reports_p_': [], |
|
50 | 50 | 'reports_groups_p_': [], |
|
51 | 51 | 'logs_p_': [], |
|
52 | 52 | 'metrics_p_': [], |
|
53 | 53 | } |
|
54 | 54 | |
|
55 | 55 | def detect_tables(table_prefix): |
|
56 | 56 | found_tables = [] |
|
57 | 57 | db_tables_query = ''' |
|
58 | 58 | SELECT tablename FROM pg_tables WHERE tablename NOT LIKE 'pg_%' AND |
|
59 | 59 | tablename NOT LIKE 'sql_%' ORDER BY tablename ASC;''' |
|
60 | 60 | |
|
61 | 61 | for table in DBSession.execute(db_tables_query).fetchall(): |
|
62 | 62 | tablename = table.tablename |
|
63 | 63 | if tablename.startswith(table_prefix): |
|
64 | 64 | t = sa.Table(tablename, metadata, autoload=True, |
|
65 | 65 | autoload_with=DBSession.bind.engine) |
|
66 | 66 | found_tables.append(t) |
|
67 | 67 | return found_tables |
|
68 | 68 | |
|
69 | 69 | |
|
70 | 70 | def main(): |
|
71 | 71 | """ |
|
72 | 72 | Recreates Elasticsearch indexes |
|
73 | 73 | Performs reindex of whole db to Elasticsearch |
|
74 | 74 | |
|
75 | 75 | """ |
|
76 | 76 | |
|
77 | 77 | # need parser twice because we first need to load ini file |
|
78 | 78 | # bootstrap pyramid and then load plugins |
|
79 | 79 | pre_parser = argparse.ArgumentParser( |
|
80 | 80 | description='Reindex AppEnlight data', |
|
81 | 81 | add_help=False) |
|
82 | 82 | pre_parser.add_argument('-c', '--config', required=True, |
|
83 | 83 | help='Configuration ini file of application') |
|
84 | 84 | pre_parser.add_argument('-h', '--help', help='Show help', nargs='?') |
|
85 | 85 | pre_parser.add_argument('-t', '--types', nargs='+', |
|
86 | 86 | help='Which parts of database should get reindexed') |
|
87 | 87 | args = pre_parser.parse_args() |
|
88 | 88 | |
|
89 | 89 | config_uri = args.config |
|
90 | 90 | setup_logging(config_uri) |
|
91 | 91 | log.setLevel(logging.INFO) |
|
92 | 92 | env = bootstrap(config_uri) |
|
93 | 93 | parser = argparse.ArgumentParser(description='Reindex AppEnlight data') |
|
94 | 94 | choices = { |
|
95 | 95 | 'reports': 'appenlight.scripts.reindex_elasticsearch:reindex_reports', |
|
96 | 96 | 'logs': 'appenlight.scripts.reindex_elasticsearch:reindex_logs', |
|
97 | 97 | 'metrics': 'appenlight.scripts.reindex_elasticsearch:reindex_metrics', |
|
98 | 98 | 'slow_calls': 'appenlight.scripts.reindex_elasticsearch:reindex_slow_calls', |
|
99 | 99 | 'template': 'appenlight.scripts.reindex_elasticsearch:update_template' |
|
100 | 100 | } |
|
101 | 101 | for k, v in env['registry'].appenlight_plugins.items(): |
|
102 | 102 | if v.get('fulltext_indexer'): |
|
103 | 103 | choices[k] = v['fulltext_indexer'] |
|
104 | 104 | parser.add_argument('-t', '--types', nargs='*', |
|
105 | 105 | choices=['all'] + list(choices.keys()), default=['all'], |
|
106 | 106 | help='Which parts of database should get reindexed') |
|
107 | 107 | parser.add_argument('-c', '--config', required=True, |
|
108 | 108 | help='Configuration ini file of application') |
|
109 | 109 | args = parser.parse_args() |
|
110 | 110 | |
|
111 | 111 | |
|
112 | 112 | if 'all' in args.types: |
|
113 | 113 | args.types = list(choices.keys()) |
|
114 | 114 | |
|
115 | 115 | log.info('settings {}'.format(args.types)) |
|
116 | 116 | |
|
117 | 117 | if 'template' in args.types: |
|
118 | 118 | get_callable(choices['template'])() |
|
119 | 119 | args.types.remove('template') |
|
120 | 120 | for selected in args.types: |
|
121 | 121 | get_callable(choices[selected])() |
|
122 | 122 | |
|
123 | 123 | |
|
124 | 124 | def update_template(): |
|
125 | 125 | try: |
|
126 | 126 | Datastores.es.send_request("delete", ['_template', 'rcae'], |
|
127 | 127 | query_params={}) |
|
128 | 128 | except Exception as e: |
|
129 | 129 | print(e) |
|
130 | 130 | log.info('updating elasticsearch template') |
|
131 | 131 | tag_templates = [ |
|
132 | 132 | {"values": { |
|
133 | 133 | "path_match": "tags.*", |
|
134 | 134 | "mapping": { |
|
135 | 135 | "type": "object", |
|
136 | 136 | "properties": { |
|
137 | 137 | "values": {"type": "string", "analyzer": "tag_value"}, |
|
138 | 138 | "numeric_values": {"type": "float"} |
|
139 | 139 | } |
|
140 | 140 | } |
|
141 | 141 | }} |
|
142 | 142 | ] |
|
143 | 143 | |
|
144 | 144 | template_schema = { |
|
145 | 145 | "template": "rcae_*", |
|
146 | 146 | "settings": { |
|
147 | 147 | "index": { |
|
148 | 148 | "refresh_interval": "5s", |
|
149 | 149 | "translog": {"interval": "5s", |
|
150 | 150 | "durability": "async"} |
|
151 | 151 | }, |
|
152 | 152 | "number_of_shards": 5, |
|
153 | 153 | "analysis": { |
|
154 | 154 | "analyzer": { |
|
155 | 155 | "url_path": { |
|
156 | 156 | "type": "custom", |
|
157 | 157 | "char_filter": [], |
|
158 | 158 | "tokenizer": "path_hierarchy", |
|
159 | 159 | "filter": [] |
|
160 | 160 | }, |
|
161 | 161 | "tag_value": { |
|
162 | 162 | "type": "custom", |
|
163 | 163 | "char_filter": [], |
|
164 | 164 | "tokenizer": "keyword", |
|
165 | 165 | "filter": ["lowercase"] |
|
166 | 166 | }, |
|
167 | 167 | } |
|
168 | 168 | }, |
|
169 | 169 | }, |
|
170 | 170 | "mappings": { |
|
171 | 171 | "report_group": { |
|
172 | 172 | "_all": {"enabled": False}, |
|
173 | 173 | "dynamic_templates": tag_templates, |
|
174 | 174 | "properties": { |
|
175 | 175 | "pg_id": {"type": "string", "index": "not_analyzed"}, |
|
176 | 176 | "resource_id": {"type": "integer"}, |
|
177 | 177 | "priority": {"type": "integer"}, |
|
178 | 178 | "error": {"type": "string", "analyzer": "simple"}, |
|
179 | 179 | "read": {"type": "boolean"}, |
|
180 | 180 | "occurences": {"type": "integer"}, |
|
181 | 181 | "fixed": {"type": "boolean"}, |
|
182 | 182 | "first_timestamp": {"type": "date"}, |
|
183 | 183 | "last_timestamp": {"type": "date"}, |
|
184 | 184 | "average_duration": {"type": "float"}, |
|
185 | 185 | "summed_duration": {"type": "float"}, |
|
186 | 186 | "public": {"type": "boolean"} |
|
187 | 187 | } |
|
188 | 188 | }, |
|
189 | 189 | "report": { |
|
190 | 190 | "_all": {"enabled": False}, |
|
191 | 191 | "dynamic_templates": tag_templates, |
|
192 | 192 | "properties": { |
|
193 | 193 | "pg_id": {"type": "string", "index": "not_analyzed"}, |
|
194 | 194 | "resource_id": {"type": "integer"}, |
|
195 | 195 | "group_id": {"type": "string"}, |
|
196 | 196 | "http_status": {"type": "integer"}, |
|
197 | 197 | "ip": {"type": "string", "index": "not_analyzed"}, |
|
198 | 198 | "url_domain": {"type": "string", "analyzer": "simple"}, |
|
199 | 199 | "url_path": {"type": "string", "analyzer": "url_path"}, |
|
200 | 200 | "error": {"type": "string", "analyzer": "simple"}, |
|
201 | 201 | "report_type": {"type": "integer"}, |
|
202 | 202 | "start_time": {"type": "date"}, |
|
203 | 203 | "request_id": {"type": "string", "index": "not_analyzed"}, |
|
204 | 204 | "end_time": {"type": "date"}, |
|
205 | 205 | "duration": {"type": "float"}, |
|
206 | 206 | "tags": { |
|
207 | 207 | "type": "object" |
|
208 | 208 | }, |
|
209 | 209 | "tag_list": {"type": "string", "analyzer": "tag_value"}, |
|
210 | 210 | "extra": { |
|
211 | 211 | "type": "object" |
|
212 | 212 | }, |
|
213 | 213 | }, |
|
214 | 214 | "_parent": {"type": "report_group"} |
|
215 | 215 | }, |
|
216 | 216 | "log": { |
|
217 | 217 | "_all": {"enabled": False}, |
|
218 | 218 | "dynamic_templates": tag_templates, |
|
219 | 219 | "properties": { |
|
220 | 220 | "pg_id": {"type": "string", "index": "not_analyzed"}, |
|
221 | 221 | "delete_hash": {"type": "string", "index": "not_analyzed"}, |
|
222 | 222 | "resource_id": {"type": "integer"}, |
|
223 | 223 | "timestamp": {"type": "date"}, |
|
224 | 224 | "permanent": {"type": "boolean"}, |
|
225 | 225 | "request_id": {"type": "string", "index": "not_analyzed"}, |
|
226 | 226 | "log_level": {"type": "string", "analyzer": "simple"}, |
|
227 | 227 | "message": {"type": "string", "analyzer": "simple"}, |
|
228 | 228 | "namespace": {"type": "string", "index": "not_analyzed"}, |
|
229 | 229 | "tags": { |
|
230 | 230 | "type": "object" |
|
231 | 231 | }, |
|
232 | 232 | "tag_list": {"type": "string", "analyzer": "tag_value"} |
|
233 | 233 | } |
|
234 | 234 | } |
|
235 | 235 | } |
|
236 | 236 | } |
|
237 | 237 | |
|
238 | 238 | Datastores.es.send_request('PUT', ['_template', 'rcae'], |
|
239 | 239 | body=template_schema, query_params={}) |
|
240 | 240 | |
|
241 | 241 | |
|
242 | 242 | def reindex_reports(): |
|
243 | 243 | reports_groups_tables = detect_tables('reports_groups_p_') |
|
244 | 244 | try: |
|
245 | 245 | Datastores.es.delete_index('rcae_r*') |
|
246 | 246 | except Exception as e: |
|
247 | 247 | log.error(e) |
|
248 | 248 | |
|
249 | 249 | log.info('reindexing report groups') |
|
250 | 250 | i = 0 |
|
251 | 251 | task_start = datetime.datetime.now() |
|
252 | 252 | for partition_table in reports_groups_tables: |
|
253 | 253 | conn = DBSession.connection().execution_options(stream_results=True) |
|
254 | 254 | result = conn.execute(partition_table.select()) |
|
255 | 255 | while True: |
|
256 | 256 | chunk = result.fetchmany(2000) |
|
257 | 257 | if not chunk: |
|
258 | 258 | break |
|
259 | 259 | es_docs = defaultdict(list) |
|
260 | 260 | for row in chunk: |
|
261 | 261 | i += 1 |
|
262 | 262 | item = ReportGroup(**dict(list(row.items()))) |
|
263 | 263 | d_range = item.partition_id |
|
264 | 264 | es_docs[d_range].append(item.es_doc()) |
|
265 | 265 | if es_docs: |
|
266 | 266 | name = partition_table.name |
|
267 | 267 | log.info('round {}, {}'.format(i, name)) |
|
268 | 268 | for k, v in es_docs.items(): |
|
269 | 269 | Datastores.es.bulk_index(k, 'report_group', v, |
|
270 | 270 | id_field="_id") |
|
271 | 271 | |
|
272 | 272 | log.info( |
|
273 | 273 | 'total docs {} {}'.format(i, datetime.datetime.now() - task_start)) |
|
274 | 274 | |
|
275 | 275 | i = 0 |
|
276 | 276 | log.info('reindexing reports') |
|
277 | 277 | task_start = datetime.datetime.now() |
|
278 | 278 | reports_tables = detect_tables('reports_p_') |
|
279 | 279 | for partition_table in reports_tables: |
|
280 | 280 | conn = DBSession.connection().execution_options(stream_results=True) |
|
281 | 281 | result = conn.execute(partition_table.select()) |
|
282 | 282 | while True: |
|
283 | 283 | chunk = result.fetchmany(2000) |
|
284 | 284 | if not chunk: |
|
285 | 285 | break |
|
286 | 286 | es_docs = defaultdict(list) |
|
287 | 287 | for row in chunk: |
|
288 | 288 | i += 1 |
|
289 | 289 | item = Report(**dict(list(row.items()))) |
|
290 | 290 | d_range = item.partition_id |
|
291 | 291 | es_docs[d_range].append(item.es_doc()) |
|
292 | 292 | if es_docs: |
|
293 | 293 | name = partition_table.name |
|
294 | 294 | log.info('round {}, {}'.format(i, name)) |
|
295 | 295 | for k, v in es_docs.items(): |
|
296 | 296 | Datastores.es.bulk_index(k, 'report', v, id_field="_id", |
|
297 | 297 | parent_field='_parent') |
|
298 | 298 | |
|
299 | 299 | log.info( |
|
300 | 300 | 'total docs {} {}'.format(i, datetime.datetime.now() - task_start)) |
|
301 | 301 | |
|
302 | 302 | log.info('reindexing reports stats') |
|
303 | 303 | i = 0 |
|
304 | 304 | task_start = datetime.datetime.now() |
|
305 | 305 | reports_stats_tables = detect_tables('reports_stats_p_') |
|
306 | 306 | for partition_table in reports_stats_tables: |
|
307 | 307 | conn = DBSession.connection().execution_options(stream_results=True) |
|
308 | 308 | result = conn.execute(partition_table.select()) |
|
309 | 309 | while True: |
|
310 | 310 | chunk = result.fetchmany(2000) |
|
311 | 311 | if not chunk: |
|
312 | 312 | break |
|
313 | 313 | es_docs = defaultdict(list) |
|
314 | 314 | for row in chunk: |
|
315 | 315 | rd = dict(list(row.items())) |
|
316 | 316 | # remove legacy columns |
|
317 | 317 | # TODO: remove the column later |
|
318 | 318 | rd.pop('size', None) |
|
319 | 319 | item = ReportStat(**rd) |
|
320 | 320 | i += 1 |
|
321 | 321 | d_range = item.partition_id |
|
322 | 322 | es_docs[d_range].append(item.es_doc()) |
|
323 | 323 | if es_docs: |
|
324 | 324 | name = partition_table.name |
|
325 | 325 | log.info('round {}, {}'.format(i, name)) |
|
326 | 326 | for k, v in es_docs.items(): |
|
327 | 327 | Datastores.es.bulk_index(k, 'log', v) |
|
328 | 328 | |
|
329 | 329 | log.info( |
|
330 | 330 | 'total docs {} {}'.format(i, datetime.datetime.now() - task_start)) |
|
331 | 331 | |
|
332 | 332 | |
|
333 | 333 | def reindex_logs(): |
|
334 | 334 | try: |
|
335 | 335 | Datastores.es.delete_index('rcae_l*') |
|
336 | 336 | except Exception as e: |
|
337 | 337 | log.error(e) |
|
338 | 338 | |
|
339 | 339 | # logs |
|
340 | 340 | log.info('reindexing logs') |
|
341 | 341 | i = 0 |
|
342 | 342 | task_start = datetime.datetime.now() |
|
343 | 343 | log_tables = detect_tables('logs_p_') |
|
344 | 344 | for partition_table in log_tables: |
|
345 | 345 | conn = DBSession.connection().execution_options(stream_results=True) |
|
346 | 346 | result = conn.execute(partition_table.select()) |
|
347 | 347 | while True: |
|
348 | 348 | chunk = result.fetchmany(2000) |
|
349 | 349 | if not chunk: |
|
350 | 350 | break |
|
351 | 351 | es_docs = defaultdict(list) |
|
352 | 352 | |
|
353 | 353 | for row in chunk: |
|
354 | 354 | i += 1 |
|
355 | 355 | item = Log(**dict(list(row.items()))) |
|
356 | 356 | d_range = item.partition_id |
|
357 | 357 | es_docs[d_range].append(item.es_doc()) |
|
358 | 358 | if es_docs: |
|
359 | 359 | name = partition_table.name |
|
360 | 360 | log.info('round {}, {}'.format(i, name)) |
|
361 | 361 | for k, v in es_docs.items(): |
|
362 | 362 | Datastores.es.bulk_index(k, 'log', v) |
|
363 | 363 | |
|
364 | 364 | log.info( |
|
365 | 365 | 'total docs {} {}'.format(i, datetime.datetime.now() - task_start)) |
|
366 | 366 | |
|
367 | 367 | |
|
368 | 368 | def reindex_metrics(): |
|
369 | 369 | try: |
|
370 | 370 | Datastores.es.delete_index('rcae_m*') |
|
371 | 371 | except Exception as e: |
|
372 | 372 | print(e) |
|
373 | 373 | |
|
374 | 374 | log.info('reindexing applications metrics') |
|
375 | 375 | i = 0 |
|
376 | 376 | task_start = datetime.datetime.now() |
|
377 | 377 | metric_tables = detect_tables('metrics_p_') |
|
378 | 378 | for partition_table in metric_tables: |
|
379 | 379 | conn = DBSession.connection().execution_options(stream_results=True) |
|
380 | 380 | result = conn.execute(partition_table.select()) |
|
381 | 381 | while True: |
|
382 | 382 | chunk = result.fetchmany(2000) |
|
383 | 383 | if not chunk: |
|
384 | 384 | break |
|
385 | 385 | es_docs = defaultdict(list) |
|
386 | 386 | for row in chunk: |
|
387 | 387 | i += 1 |
|
388 | 388 | item = Metric(**dict(list(row.items()))) |
|
389 | 389 | d_range = item.partition_id |
|
390 | 390 | es_docs[d_range].append(item.es_doc()) |
|
391 | 391 | if es_docs: |
|
392 | 392 | name = partition_table.name |
|
393 | 393 | log.info('round {}, {}'.format(i, name)) |
|
394 | 394 | for k, v in es_docs.items(): |
|
395 | 395 | Datastores.es.bulk_index(k, 'log', v) |
|
396 | 396 | |
|
397 | 397 | log.info( |
|
398 | 398 | 'total docs {} {}'.format(i, datetime.datetime.now() - task_start)) |
|
399 | 399 | |
|
400 | 400 | |
|
401 | 401 | def reindex_slow_calls(): |
|
402 | 402 | try: |
|
403 | 403 | Datastores.es.delete_index('rcae_sc*') |
|
404 | 404 | except Exception as e: |
|
405 | 405 | print(e) |
|
406 | 406 | |
|
407 | 407 | log.info('reindexing slow calls') |
|
408 | 408 | i = 0 |
|
409 | 409 | task_start = datetime.datetime.now() |
|
410 | 410 | slow_calls_tables = detect_tables('slow_calls_p_') |
|
411 | 411 | for partition_table in slow_calls_tables: |
|
412 | 412 | conn = DBSession.connection().execution_options(stream_results=True) |
|
413 | 413 | result = conn.execute(partition_table.select()) |
|
414 | 414 | while True: |
|
415 | 415 | chunk = result.fetchmany(2000) |
|
416 | 416 | if not chunk: |
|
417 | 417 | break |
|
418 | 418 | es_docs = defaultdict(list) |
|
419 | 419 | for row in chunk: |
|
420 | 420 | i += 1 |
|
421 | 421 | item = SlowCall(**dict(list(row.items()))) |
|
422 | 422 | d_range = item.partition_id |
|
423 | 423 | es_docs[d_range].append(item.es_doc()) |
|
424 | 424 | if es_docs: |
|
425 | 425 | name = partition_table.name |
|
426 | 426 | log.info('round {}, {}'.format(i, name)) |
|
427 | 427 | for k, v in es_docs.items(): |
|
428 | 428 | Datastores.es.bulk_index(k, 'log', v) |
|
429 | 429 | |
|
430 | 430 | log.info( |
|
431 | 431 | 'total docs {} {}'.format(i, datetime.datetime.now() - task_start)) |
|
432 | 432 | |
|
433 | 433 | |
|
434 | 434 | if __name__ == '__main__': |
|
435 | 435 | main() |
@@ -1,1703 +1,1703 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # AppEnlight Enterprise Edition, including its added features, Support |
|
19 | 19 | # services, and proprietary license terms, please see |
|
20 | 20 | # https://rhodecode.com/licenses/ |
|
21 | 21 | |
|
22 | 22 | import copy |
|
23 | 23 | import logging |
|
24 | 24 | import mock |
|
25 | 25 | import pyramid |
|
26 | 26 | import pytest |
|
27 | 27 | import sqlalchemy as sa |
|
28 | 28 | import webob |
|
29 | 29 | |
|
30 | 30 | from datetime import datetime |
|
31 | 31 | from pyramid import testing |
|
32 | 32 | |
|
33 | 33 | |
|
34 | 34 | from appenlight.models import DBSession |
|
35 | 35 | from appenlight.lib.ext_json import json |
|
36 | 36 | |
|
37 | 37 | |
|
38 | 38 | log = logging.getLogger(__name__) |
|
39 | 39 | |
|
40 | 40 | |
|
41 | 41 | class DummyContext(object): |
|
42 | 42 | pass |
|
43 | 43 | |
|
44 | 44 | |
|
45 | 45 | @pytest.mark.usefixtures('base_app') |
|
46 | 46 | class BasicTest(object): |
|
47 | 47 | pass |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | @pytest.mark.usefixtures('base_app') |
|
51 | 51 | class TestMigration(object): |
|
52 | 52 | def test_migration(self): |
|
53 | 53 | assert 1 == 1 |
|
54 | 54 | |
|
55 | 55 | |
|
56 | 56 | class TestAPIReports_0_4_Validation(object): |
|
57 | 57 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) |
|
58 | 58 | def test_no_payload(self, dummy_json): |
|
59 | 59 | import colander |
|
60 | 60 | from appenlight.validators import ReportListSchema_0_4 |
|
61 | 61 | utcnow = datetime.utcnow() |
|
62 | 62 | schema = ReportListSchema_0_4().bind(utcnow=utcnow) |
|
63 | 63 | with pytest.raises(colander.Invalid): |
|
64 | 64 | schema.deserialize(dummy_json) |
|
65 | 65 | |
|
66 | 66 | def test_minimal_payload(self, report_04_schema): |
|
67 | 67 | dummy_json = [{}] |
|
68 | 68 | import colander |
|
69 | 69 | from appenlight.validators import ReportListSchema_0_4 |
|
70 | 70 | utcnow = datetime.utcnow() |
|
71 | 71 | schema = ReportListSchema_0_4().bind(utcnow=utcnow) |
|
72 | 72 | with pytest.raises(colander.Invalid): |
|
73 | 73 | schema.deserialize(dummy_json) |
|
74 | 74 | |
|
75 | 75 | def test_minimal_payload(self): |
|
76 | 76 | from appenlight.validators import ReportListSchema_0_4 |
|
77 | 77 | dummy_json = [{'report_details': [{}]}] |
|
78 | 78 | utcnow = datetime.utcnow() |
|
79 | 79 | schema = ReportListSchema_0_4().bind(utcnow=utcnow) |
|
80 | 80 | deserialized = schema.deserialize(dummy_json) |
|
81 | 81 | |
|
82 | 82 | expected_deserialization = [ |
|
83 | 83 | {'error_type': '', |
|
84 | 84 | 'language': 'unknown', |
|
85 | 85 | 'report_details': [ |
|
86 | 86 | {'username': '', |
|
87 | 87 | 'traceback': None, |
|
88 | 88 | 'extra': None, |
|
89 | 89 | 'frameinfo': None, |
|
90 | 90 | 'url': '', |
|
91 | 91 | 'ip': None, |
|
92 | 92 | 'start_time': utcnow, |
|
93 | 93 | 'group_string': None, |
|
94 | 94 | 'request': {}, |
|
95 | 95 | 'request_stats': None, |
|
96 | 96 | 'end_time': None, |
|
97 | 97 | 'request_id': '', |
|
98 | 98 | 'message': '', |
|
99 | 99 | 'slow_calls': [], |
|
100 | 100 | 'user_agent': ''}], |
|
101 | 101 | 'server': 'unknown', |
|
102 | 102 | 'occurences': 1, |
|
103 | 103 | 'priority': 5, |
|
104 | 104 | 'view_name': '', |
|
105 | 105 | 'client': 'unknown', |
|
106 | 106 | 'http_status': 200, |
|
107 | 107 | 'error': '', |
|
108 | 108 | 'tags': None} |
|
109 | 109 | ] |
|
110 | 110 | assert deserialized == expected_deserialization |
|
111 | 111 | |
|
112 | 112 | def test_full_payload(self): |
|
113 | 113 | import appenlight.tests.payload_examples as payload_examples |
|
114 | 114 | from appenlight.validators import ReportListSchema_0_4 |
|
115 | 115 | utcnow = datetime.utcnow() |
|
116 | 116 | schema = ReportListSchema_0_4().bind(utcnow=utcnow) |
|
117 | 117 | PYTHON_PAYLOAD = copy.deepcopy(payload_examples.PYTHON_PAYLOAD_0_4) |
|
118 | 118 | utcnow = datetime.utcnow() |
|
119 | 119 | PYTHON_PAYLOAD["tags"] = [("foo", 1), ("action", "test"), ("baz", 1.1), |
|
120 | 120 | ("date", |
|
121 | 121 | utcnow.strftime('%Y-%m-%dT%H:%M:%S.0'))] |
|
122 | 122 | dummy_json = [PYTHON_PAYLOAD] |
|
123 | 123 | |
|
124 | 124 | deserialized = schema.deserialize(dummy_json) |
|
125 | 125 | assert deserialized[0]['error'] == PYTHON_PAYLOAD['error'] |
|
126 | 126 | assert deserialized[0]['language'] == PYTHON_PAYLOAD['language'] |
|
127 | 127 | assert deserialized[0]['server'] == PYTHON_PAYLOAD['server'] |
|
128 | 128 | assert deserialized[0]['priority'] == PYTHON_PAYLOAD['priority'] |
|
129 | 129 | assert deserialized[0]['view_name'] == PYTHON_PAYLOAD['view_name'] |
|
130 | 130 | assert deserialized[0]['client'] == PYTHON_PAYLOAD['client'] |
|
131 | 131 | assert deserialized[0]['http_status'] == PYTHON_PAYLOAD['http_status'] |
|
132 | 132 | assert deserialized[0]['error'] == PYTHON_PAYLOAD['error'] |
|
133 | 133 | assert deserialized[0]['occurences'] == PYTHON_PAYLOAD['occurences'] |
|
134 | 134 | first_detail = deserialized[0]['report_details'][0] |
|
135 | 135 | payload_detail = PYTHON_PAYLOAD['report_details'][0] |
|
136 | 136 | assert first_detail['username'] == payload_detail['username'] |
|
137 | 137 | assert first_detail['traceback'] == payload_detail['traceback'] |
|
138 | 138 | assert first_detail['url'] == payload_detail['url'] |
|
139 | 139 | assert first_detail['ip'] == payload_detail['ip'] |
|
140 | 140 | assert first_detail['start_time'].strftime('%Y-%m-%dT%H:%M:%S.0') == \ |
|
141 | 141 | payload_detail['start_time'] |
|
142 | 142 | assert first_detail['ip'] == payload_detail['ip'] |
|
143 | 143 | assert first_detail['group_string'] is None |
|
144 | 144 | assert first_detail['request_stats'] == payload_detail['request_stats'] |
|
145 | 145 | assert first_detail['end_time'].strftime('%Y-%m-%dT%H:%M:%S.0') == \ |
|
146 | 146 | payload_detail['end_time'] |
|
147 | 147 | assert first_detail['request_id'] == payload_detail['request_id'] |
|
148 | 148 | assert first_detail['message'] == payload_detail['message'] |
|
149 | 149 | assert first_detail['user_agent'] == payload_detail['user_agent'] |
|
150 | 150 | slow_call = first_detail['slow_calls'][0] |
|
151 | 151 | expected_slow_call = payload_detail['slow_calls'][0] |
|
152 | 152 | assert slow_call['start'].strftime('%Y-%m-%dT%H:%M:%S.0') == \ |
|
153 | 153 | expected_slow_call['start'] |
|
154 | 154 | assert slow_call['end'].strftime('%Y-%m-%dT%H:%M:%S.0') == \ |
|
155 | 155 | expected_slow_call['end'] |
|
156 | 156 | assert slow_call['statement'] == expected_slow_call['statement'] |
|
157 | 157 | assert slow_call['parameters'] == expected_slow_call['parameters'] |
|
158 | 158 | assert slow_call['type'] == expected_slow_call['type'] |
|
159 | 159 | assert slow_call['subtype'] == expected_slow_call['subtype'] |
|
160 | 160 | assert slow_call['location'] == '' |
|
161 | 161 | assert deserialized[0]['tags'] == [ |
|
162 | 162 | ('foo', 1), ('action', 'test'), |
|
163 | 163 | ('baz', 1.1), ('date', utcnow.strftime('%Y-%m-%dT%H:%M:%S.0'))] |
|
164 | 164 | |
|
165 | 165 | |
|
166 | 166 | class TestSentryProto_7(object): |
|
167 | 167 | def test_log_payload(self): |
|
168 | 168 | import appenlight.tests.payload_examples as payload_examples |
|
169 | 169 | from appenlight.lib.enums import ParsedSentryEventType |
|
170 | 170 | from appenlight.lib.utils.sentry import parse_sentry_event |
|
171 | 171 | event_dict, event_type = parse_sentry_event( |
|
172 | 172 | payload_examples.SENTRY_LOG_PAYLOAD_7) |
|
173 | 173 | assert ParsedSentryEventType.LOG == event_type |
|
174 | 174 | assert event_dict['log_level'] == 'CRITICAL' |
|
175 | 175 | assert event_dict['message'] == 'TEST from django logging' |
|
176 | 176 | assert event_dict['namespace'] == 'testlogger' |
|
177 | 177 | assert event_dict['request_id'] == '9a6172f2e6d2444582f83a6c333d9cfb' |
|
178 | 178 | assert event_dict['server'] == 'ergo-virtual-machine' |
|
179 | 179 | assert event_dict['date'] == datetime.utcnow().date().strftime( |
|
180 | 180 | '%Y-%m-%dT%H:%M:%SZ') |
|
181 | 181 | tags = [('site', 'example.com'), |
|
182 | 182 | ('sys.argv', ["'manage.py'", "'runserver'"]), |
|
183 | 183 | ('price', 6), |
|
184 | 184 | ('tag', "'extra'"), |
|
185 | 185 | ('dupa', True), |
|
186 | 186 | ('project', 'sentry'), |
|
187 | 187 | ('sentry_culprit', 'testlogger in index'), |
|
188 | 188 | ('sentry_language', 'python'), |
|
189 | 189 | ('sentry_release', 'test')] |
|
190 | 190 | assert sorted(event_dict['tags']) == sorted(tags) |
|
191 | 191 | |
|
192 | 192 | def test_report_payload(self): |
|
193 | 193 | import appenlight.tests.payload_examples as payload_examples |
|
194 | 194 | from appenlight.lib.enums import ParsedSentryEventType |
|
195 | 195 | from appenlight.lib.utils.sentry import parse_sentry_event |
|
196 | 196 | utcnow = datetime.utcnow().date().strftime('%Y-%m-%dT%H:%M:%SZ') |
|
197 | 197 | event_dict, event_type = parse_sentry_event( |
|
198 | 198 | payload_examples.SENTRY_PYTHON_PAYLOAD_7) |
|
199 | 199 | assert ParsedSentryEventType.ERROR_REPORT == event_type |
|
200 | 200 | assert event_dict['client'] == 'sentry' |
|
201 | 201 | assert event_dict[ |
|
202 | 202 | 'error'] == 'Exception: test 500 ' \ |
|
203 | 203 | '\u0142\xf3\u201c\u0107\u201c\u0107\u017c\u0105' |
|
204 | 204 | assert event_dict['language'] == 'python' |
|
205 | 205 | assert event_dict['ip'] == '127.0.0.1' |
|
206 | 206 | assert event_dict['request_id'] == '9fae652c8c1c4d6a8eee09260f613a98' |
|
207 | 207 | assert event_dict['server'] == 'ergo-virtual-machine' |
|
208 | 208 | assert event_dict['start_time'] == utcnow |
|
209 | 209 | assert event_dict['url'] == 'http://127.0.0.1:8000/error' |
|
210 | 210 | assert event_dict['user_agent'] == 'Mozilla/5.0 (X11; Linux x86_64) ' \ |
|
211 | 211 | 'AppleWebKit/537.36 (KHTML, ' \ |
|
212 | 212 | 'like Gecko) Chrome/47.0.2526.106 ' \ |
|
213 | 213 | 'Safari/537.36' |
|
214 | 214 | assert event_dict['view_name'] == 'djangoapp.views in error' |
|
215 | 215 | tags = [('site', 'example.com'), ('sentry_release', 'test')] |
|
216 | 216 | assert sorted(event_dict['tags']) == sorted(tags) |
|
217 | 217 | extra = [('sys.argv', ["'manage.py'", "'runserver'"]), |
|
218 | 218 | ('project', 'sentry')] |
|
219 | 219 | assert sorted(event_dict['extra']) == sorted(extra) |
|
220 | 220 | request = event_dict['request'] |
|
221 | 221 | assert request['url'] == 'http://127.0.0.1:8000/error' |
|
222 | 222 | assert request['cookies'] == {'appenlight': 'X'} |
|
223 | 223 | assert request['data'] is None |
|
224 | 224 | assert request['method'] == 'GET' |
|
225 | 225 | assert request['query_string'] == '' |
|
226 | 226 | assert request['env'] == {'REMOTE_ADDR': '127.0.0.1', |
|
227 | 227 | 'SERVER_NAME': 'localhost', |
|
228 | 228 | 'SERVER_PORT': '8000'} |
|
229 | 229 | assert request['headers'] == { |
|
230 | 230 | 'Accept': 'text/html,application/xhtml+xml,' |
|
231 | 231 | 'application/xml;q=0.9,image/webp,*/*;q=0.8', |
|
232 | 232 | 'Accept-Encoding': 'gzip, deflate, sdch', |
|
233 | 233 | 'Accept-Language': 'en-US,en;q=0.8,pl;q=0.6', |
|
234 | 234 | 'Connection': 'keep-alive', |
|
235 | 235 | 'Content-Length': '', |
|
236 | 236 | 'Content-Type': 'text/plain', |
|
237 | 237 | 'Cookie': 'appenlight=X', |
|
238 | 238 | 'Dnt': '1', |
|
239 | 239 | 'Host': '127.0.0.1:8000', |
|
240 | 240 | 'Upgrade-Insecure-Requests': '1', |
|
241 | 241 | 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) ' |
|
242 | 242 | 'AppleWebKit/537.36 (KHTML, like Gecko) ' |
|
243 | 243 | 'Chrome/47.0.2526.106 Safari/537.36'} |
|
244 | 244 | traceback = event_dict['traceback'] |
|
245 | 245 | assert traceback[0]['cline'] == 'response = wrapped_callback(request, ' \ |
|
246 | 246 | '*callback_args, **callback_kwargs)' |
|
247 | 247 | assert traceback[0]['file'] == 'django/core/handlers/base.py' |
|
248 | 248 | assert traceback[0]['fn'] == 'get_response' |
|
249 | 249 | assert traceback[0]['line'] == 111 |
|
250 | 250 | assert traceback[0]['module'] == 'django.core.handlers.base' |
|
251 | 251 | |
|
252 | 252 | assert traceback[1]['cline'] == "raise Exception(u'test 500 " \ |
|
253 | 253 | "\u0142\xf3\u201c\u0107\u201c\u0107" \ |
|
254 | 254 | "\u017c\u0105')" |
|
255 | 255 | assert traceback[1]['file'] == 'djangoapp/views.py' |
|
256 | 256 | assert traceback[1]['fn'] == 'error' |
|
257 | 257 | assert traceback[1]['line'] == 84 |
|
258 | 258 | assert traceback[1]['module'] == 'djangoapp.views' |
|
259 | 259 | assert sorted(traceback[1]['vars']) == sorted([ |
|
260 | 260 | ('c', |
|
261 | 261 | '<sqlite3.Cursor object at 0x7fe7c82af8f0>'), |
|
262 | 262 | ('request', |
|
263 | 263 | '<WSGIRequest at 0x140633490316304>'), |
|
264 | 264 | ('conn', |
|
265 | 265 | '<sqlite3.Connection object at 0x7fe7c8b23bf8>')]) |
|
266 | 266 | |
|
267 | 267 | |
|
268 | 268 | class TestAPIReports_0_5_Validation(object): |
|
269 | 269 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) |
|
270 | 270 | def test_no_payload(self, dummy_json): |
|
271 | 271 | import colander |
|
272 | 272 | from appenlight.validators import ReportListSchema_0_5 |
|
273 | 273 | utcnow = datetime.utcnow() |
|
274 | 274 | schema = ReportListSchema_0_5().bind(utcnow=utcnow) |
|
275 | 275 | with pytest.raises(colander.Invalid): |
|
276 | 276 | schema.deserialize(dummy_json) |
|
277 | 277 | |
|
278 | 278 | def test_minimal_payload(self): |
|
279 | 279 | dummy_json = [{}] |
|
280 | 280 | import colander |
|
281 | 281 | from appenlight.validators import ReportListSchema_0_5 |
|
282 | 282 | utcnow = datetime.utcnow() |
|
283 | 283 | schema = ReportListSchema_0_5().bind(utcnow=utcnow) |
|
284 | 284 | with pytest.raises(colander.Invalid): |
|
285 | 285 | schema.deserialize(dummy_json) |
|
286 | 286 | |
|
287 | 287 | def test_minimal_payload(self): |
|
288 | 288 | dummy_json = [{'report_details': [{}]}] |
|
289 | 289 | from appenlight.validators import ReportListSchema_0_5 |
|
290 | 290 | utcnow = datetime.utcnow() |
|
291 | 291 | schema = ReportListSchema_0_5().bind(utcnow=utcnow) |
|
292 | 292 | |
|
293 | 293 | deserialized = schema.deserialize(dummy_json) |
|
294 | 294 | |
|
295 | 295 | expected_deserialization = [ |
|
296 | 296 | {'language': 'unknown', |
|
297 | 297 | 'server': 'unknown', |
|
298 | 298 | 'occurences': 1, |
|
299 | 299 | 'priority': 5, |
|
300 | 300 | 'view_name': '', |
|
301 | 301 | 'client': 'unknown', |
|
302 | 302 | 'http_status': 200, |
|
303 | 303 | 'error': '', |
|
304 | 304 | 'tags': None, |
|
305 | 305 | 'username': '', |
|
306 | 306 | 'traceback': None, |
|
307 | 307 | 'extra': None, |
|
308 | 308 | 'url': '', |
|
309 | 309 | 'ip': None, |
|
310 | 310 | 'start_time': utcnow, |
|
311 | 311 | 'group_string': None, |
|
312 | 312 | 'request': {}, |
|
313 | 313 | 'request_stats': None, |
|
314 | 314 | 'end_time': None, |
|
315 | 315 | 'request_id': '', |
|
316 | 316 | 'message': '', |
|
317 | 317 | 'slow_calls': [], |
|
318 | 318 | 'user_agent': '' |
|
319 | 319 | } |
|
320 | 320 | ] |
|
321 | 321 | assert deserialized == expected_deserialization |
|
322 | 322 | |
|
323 | 323 | def test_full_payload(self): |
|
324 | 324 | import appenlight.tests.payload_examples as payload_examples |
|
325 | 325 | from appenlight.validators import ReportListSchema_0_5 |
|
326 | 326 | PYTHON_PAYLOAD = copy.deepcopy(payload_examples.PYTHON_PAYLOAD_0_5) |
|
327 | 327 | utcnow = datetime.utcnow() |
|
328 | 328 | schema = ReportListSchema_0_5().bind(utcnow=utcnow) |
|
329 | 329 | PYTHON_PAYLOAD["tags"] = [("foo", 1), ("action", "test"), ("baz", 1.1), |
|
330 | 330 | ("date", |
|
331 | 331 | utcnow.strftime('%Y-%m-%dT%H:%M:%S.0'))] |
|
332 | 332 | dummy_json = [PYTHON_PAYLOAD] |
|
333 | 333 | deserialized = schema.deserialize(dummy_json)[0] |
|
334 | 334 | assert deserialized['error'] == PYTHON_PAYLOAD['error'] |
|
335 | 335 | assert deserialized['language'] == PYTHON_PAYLOAD['language'] |
|
336 | 336 | assert deserialized['server'] == PYTHON_PAYLOAD['server'] |
|
337 | 337 | assert deserialized['priority'] == PYTHON_PAYLOAD['priority'] |
|
338 | 338 | assert deserialized['view_name'] == PYTHON_PAYLOAD['view_name'] |
|
339 | 339 | assert deserialized['client'] == PYTHON_PAYLOAD['client'] |
|
340 | 340 | assert deserialized['http_status'] == PYTHON_PAYLOAD['http_status'] |
|
341 | 341 | assert deserialized['error'] == PYTHON_PAYLOAD['error'] |
|
342 | 342 | assert deserialized['occurences'] == PYTHON_PAYLOAD['occurences'] |
|
343 | 343 | assert deserialized['username'] == PYTHON_PAYLOAD['username'] |
|
344 | 344 | assert deserialized['traceback'] == PYTHON_PAYLOAD['traceback'] |
|
345 | 345 | assert deserialized['url'] == PYTHON_PAYLOAD['url'] |
|
346 | 346 | assert deserialized['ip'] == PYTHON_PAYLOAD['ip'] |
|
347 | 347 | assert deserialized['start_time'].strftime('%Y-%m-%dT%H:%M:%S.0') == \ |
|
348 | 348 | PYTHON_PAYLOAD['start_time'] |
|
349 | 349 | assert deserialized['ip'] == PYTHON_PAYLOAD['ip'] |
|
350 | 350 | assert deserialized['group_string'] is None |
|
351 | 351 | assert deserialized['request_stats'] == PYTHON_PAYLOAD['request_stats'] |
|
352 | 352 | assert deserialized['end_time'].strftime('%Y-%m-%dT%H:%M:%S.0') == \ |
|
353 | 353 | PYTHON_PAYLOAD['end_time'] |
|
354 | 354 | assert deserialized['request_id'] == PYTHON_PAYLOAD['request_id'] |
|
355 | 355 | assert deserialized['message'] == PYTHON_PAYLOAD['message'] |
|
356 | 356 | assert deserialized['user_agent'] == PYTHON_PAYLOAD['user_agent'] |
|
357 | 357 | assert deserialized['slow_calls'][0]['start'].strftime( |
|
358 | 358 | '%Y-%m-%dT%H:%M:%S.0') == PYTHON_PAYLOAD['slow_calls'][0][ |
|
359 | 359 | 'start'] |
|
360 | 360 | assert deserialized['slow_calls'][0]['end'].strftime( |
|
361 | 361 | '%Y-%m-%dT%H:%M:%S.0') == PYTHON_PAYLOAD['slow_calls'][0][ |
|
362 | 362 | 'end'] |
|
363 | 363 | assert deserialized['slow_calls'][0]['statement'] == \ |
|
364 | 364 | PYTHON_PAYLOAD['slow_calls'][0]['statement'] |
|
365 | 365 | assert deserialized['slow_calls'][0]['parameters'] == \ |
|
366 | 366 | PYTHON_PAYLOAD['slow_calls'][0]['parameters'] |
|
367 | 367 | assert deserialized['slow_calls'][0]['type'] == \ |
|
368 | 368 | PYTHON_PAYLOAD['slow_calls'][0]['type'] |
|
369 | 369 | assert deserialized['slow_calls'][0]['subtype'] == \ |
|
370 | 370 | PYTHON_PAYLOAD['slow_calls'][0]['subtype'] |
|
371 | 371 | assert deserialized['slow_calls'][0]['location'] == '' |
|
372 | 372 | assert deserialized['tags'] == [ |
|
373 | 373 | ('foo', 1), ('action', 'test'), |
|
374 | 374 | ('baz', 1.1), ('date', utcnow.strftime('%Y-%m-%dT%H:%M:%S.0'))] |
|
375 | 375 | |
|
376 | 376 | |
|
377 | 377 | @pytest.mark.usefixtures('log_schema') |
|
378 | 378 | class TestAPILogsValidation(object): |
|
379 | 379 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) |
|
380 | 380 | def test_no_payload(self, dummy_json, log_schema): |
|
381 | 381 | import colander |
|
382 | 382 | |
|
383 | 383 | with pytest.raises(colander.Invalid): |
|
384 | 384 | log_schema.deserialize(dummy_json) |
|
385 | 385 | |
|
386 | 386 | def test_minimal_payload(self, log_schema): |
|
387 | 387 | dummy_json = [{}] |
|
388 | 388 | deserialized = log_schema.deserialize(dummy_json)[0] |
|
389 | 389 | expected = {'log_level': 'UNKNOWN', |
|
390 | 390 | 'namespace': '', |
|
391 | 391 | 'server': 'unknown', |
|
392 | 392 | 'request_id': '', |
|
393 | 393 | 'primary_key': None, |
|
394 | 394 | 'date': datetime.utcnow(), |
|
395 | 395 | 'message': '', |
|
396 | 396 | 'tags': None} |
|
397 | 397 | assert deserialized['log_level'] == expected['log_level'] |
|
398 | 398 | assert deserialized['message'] == expected['message'] |
|
399 | 399 | assert deserialized['namespace'] == expected['namespace'] |
|
400 | 400 | assert deserialized['request_id'] == expected['request_id'] |
|
401 | 401 | assert deserialized['server'] == expected['server'] |
|
402 | 402 | assert deserialized['tags'] == expected['tags'] |
|
403 | 403 | assert deserialized['primary_key'] == expected['primary_key'] |
|
404 | 404 | |
|
405 | 405 | def test_normal_payload(self, log_schema): |
|
406 | 406 | import appenlight.tests.payload_examples as payload_examples |
|
407 | 407 | deserialized = log_schema.deserialize(payload_examples.LOG_EXAMPLES)[0] |
|
408 | 408 | expected = payload_examples.LOG_EXAMPLES[0] |
|
409 | 409 | assert deserialized['log_level'] == expected['log_level'] |
|
410 | 410 | assert deserialized['message'] == expected['message'] |
|
411 | 411 | assert deserialized['namespace'] == expected['namespace'] |
|
412 | 412 | assert deserialized['request_id'] == expected['request_id'] |
|
413 | 413 | assert deserialized['server'] == expected['server'] |
|
414 | 414 | assert deserialized['date'].strftime('%Y-%m-%dT%H:%M:%S.%f') == \ |
|
415 | 415 | expected['date'] |
|
416 | 416 | assert deserialized['tags'][0][0] == "tag_name" |
|
417 | 417 | assert deserialized['tags'][0][1] == "tag_value" |
|
418 | 418 | assert deserialized['tags'][1][0] == "tag_name2" |
|
419 | 419 | assert deserialized['tags'][1][1] == 2 |
|
420 | 420 | |
|
421 | 421 | def test_normal_payload_date_without_microseconds(self, log_schema): |
|
422 | 422 | import appenlight.tests.payload_examples as payload_examples |
|
423 | 423 | LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES) |
|
424 | 424 | LOG_EXAMPLE[0]['date'] = datetime.utcnow().strftime( |
|
425 | 425 | '%Y-%m-%dT%H:%M:%S') |
|
426 | 426 | deserialized = log_schema.deserialize(LOG_EXAMPLE) |
|
427 | 427 | assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M:%S') == \ |
|
428 | 428 | LOG_EXAMPLE[0]['date'] |
|
429 | 429 | |
|
430 | 430 | def test_normal_payload_date_without_seconds(self, log_schema): |
|
431 | 431 | import appenlight.tests.payload_examples as payload_examples |
|
432 | 432 | LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES) |
|
433 | 433 | LOG_EXAMPLE[0]['date'] = datetime.utcnow().date().strftime( |
|
434 | 434 | '%Y-%m-%dT%H:%M') |
|
435 | 435 | deserialized = log_schema.deserialize(LOG_EXAMPLE) |
|
436 | 436 | assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M') == \ |
|
437 | 437 | LOG_EXAMPLE[0]['date'] |
|
438 | 438 | |
|
439 | 439 | def test_payload_empty_date(self, log_schema): |
|
440 | 440 | import appenlight.tests.payload_examples as payload_examples |
|
441 | 441 | LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES) |
|
442 | 442 | LOG_EXAMPLE[0]['date'] = None |
|
443 | 443 | deserialized = log_schema.deserialize(LOG_EXAMPLE) |
|
444 | 444 | assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M') is not None |
|
445 | 445 | |
|
446 | 446 | def test_payload_no_date(self, log_schema): |
|
447 | 447 | import appenlight.tests.payload_examples as payload_examples |
|
448 | 448 | LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES) |
|
449 | 449 | LOG_EXAMPLE[0].pop('date', None) |
|
450 | 450 | deserialized = log_schema.deserialize(LOG_EXAMPLE) |
|
451 | 451 | assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M') is not None |
|
452 | 452 | |
|
453 | 453 | |
|
454 | 454 | @pytest.mark.usefixtures('general_metrics_schema') |
|
455 | 455 | class TestAPIGeneralMetricsValidation(object): |
|
456 | 456 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) |
|
457 | 457 | def test_no_payload(self, dummy_json, general_metrics_schema): |
|
458 | 458 | import colander |
|
459 | 459 | |
|
460 | 460 | with pytest.raises(colander.Invalid): |
|
461 | 461 | general_metrics_schema.deserialize(dummy_json) |
|
462 | 462 | |
|
463 | 463 | def test_minimal_payload(self, general_metrics_schema): |
|
464 | 464 | dummy_json = [{}] |
|
465 | 465 | deserialized = general_metrics_schema.deserialize(dummy_json)[0] |
|
466 | 466 | expected = {'namespace': '', |
|
467 | 467 | 'server_name': 'unknown', |
|
468 | 468 | 'tags': None, |
|
469 | 469 | 'timestamp': datetime.utcnow()} |
|
470 | 470 | assert deserialized['namespace'] == expected['namespace'] |
|
471 | 471 | assert deserialized['server_name'] == expected['server_name'] |
|
472 | 472 | assert deserialized['tags'] == expected['tags'] |
|
473 | 473 | |
|
474 | 474 | def test_normal_payload(self, general_metrics_schema): |
|
475 | 475 | import appenlight.tests.payload_examples as payload_examples |
|
476 | 476 | dummy_json = [payload_examples.METRICS_PAYLOAD] |
|
477 | 477 | deserialized = general_metrics_schema.deserialize(dummy_json)[0] |
|
478 | 478 | expected = {'namespace': 'some.monitor', |
|
479 | 479 | 'server_name': 'server.name', |
|
480 | 480 | 'tags': [('usage_foo', 15.5), ('usage_bar', 63)], |
|
481 | 481 | 'timestamp': datetime.utcnow()} |
|
482 | 482 | assert deserialized['namespace'] == expected['namespace'] |
|
483 | 483 | assert deserialized['server_name'] == expected['server_name'] |
|
484 | 484 | assert deserialized['tags'] == expected['tags'] |
|
485 | 485 | |
|
486 | 486 | |
|
487 | 487 | @pytest.mark.usefixtures('request_metrics_schema') |
|
488 | 488 | class TestAPIRequestMetricsValidation(object): |
|
489 | 489 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) |
|
490 | 490 | def test_no_payload(self, dummy_json, request_metrics_schema): |
|
491 | 491 | import colander |
|
492 | 492 | |
|
493 | 493 | with pytest.raises(colander.Invalid): |
|
494 | 494 | print(request_metrics_schema.deserialize(dummy_json)) |
|
495 | 495 | |
|
496 | 496 | def test_normal_payload(self, request_metrics_schema): |
|
497 | 497 | import appenlight.tests.payload_examples as payload_examples |
|
498 | 498 | dummy_json = payload_examples.REQUEST_METRICS_EXAMPLES |
|
499 | 499 | deserialized = request_metrics_schema.deserialize(dummy_json)[0] |
|
500 | 500 | expected = {'metrics': [('dir/module:func', |
|
501 | 501 | {'custom': 0.0, |
|
502 | 502 | 'custom_calls': 0.0, |
|
503 | 503 | 'main': 0.01664, |
|
504 | 504 | 'nosql': 0.00061, |
|
505 | 505 | 'nosql_calls': 23.0, |
|
506 | 506 | 'remote': 0.0, |
|
507 | 507 | 'remote_calls': 0.0, |
|
508 | 508 | 'requests': 1, |
|
509 | 509 | 'sql': 0.00105, |
|
510 | 510 | 'sql_calls': 2.0, |
|
511 | 511 | 'tmpl': 0.0, |
|
512 | 512 | 'tmpl_calls': 0.0}), |
|
513 | 513 | ('SomeView.function', |
|
514 | 514 | {'custom': 0.0, |
|
515 | 515 | 'custom_calls': 0.0, |
|
516 | 516 | 'main': 0.647261, |
|
517 | 517 | 'nosql': 0.306554, |
|
518 | 518 | 'nosql_calls': 140.0, |
|
519 | 519 | 'remote': 0.0, |
|
520 | 520 | 'remote_calls': 0.0, |
|
521 | 521 | 'requests': 28, |
|
522 | 522 | 'sql': 0.0, |
|
523 | 523 | 'sql_calls': 0.0, |
|
524 | 524 | 'tmpl': 0.0, |
|
525 | 525 | 'tmpl_calls': 0.0})], |
|
526 | 526 | 'server': 'some.server.hostname', |
|
527 | 527 | 'timestamp': datetime.utcnow()} |
|
528 | 528 | assert deserialized['server'] == expected['server'] |
|
529 | 529 | metric = deserialized['metrics'][0] |
|
530 | 530 | expected_metric = expected['metrics'][0] |
|
531 | 531 | assert metric[0] == expected_metric[0] |
|
532 | 532 | assert sorted(metric[1].items()) == sorted(expected_metric[1].items()) |
|
533 | 533 | |
|
534 | 534 | |
|
535 | 535 | @pytest.mark.usefixtures('default_application') |
|
536 | 536 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') |
|
537 | 537 | class TestAPIReportsView(object): |
|
538 | 538 | def test_no_json_payload(self, default_application): |
|
539 | 539 | import colander |
|
540 | 540 | from appenlight.models.services.application import ApplicationService |
|
541 | 541 | from appenlight.views.api import reports_create |
|
542 | 542 | |
|
543 | 543 | context = DummyContext() |
|
544 | 544 | context.resource = ApplicationService.by_id(1) |
|
545 | 545 | request = testing.DummyRequest( |
|
546 | 546 | headers={'Content-Type': 'application/json'}) |
|
547 | 547 | request.unsafe_json_body = '' |
|
548 | 548 | request.context = context |
|
549 | 549 | route = mock.Mock() |
|
550 | 550 | route.name = 'api_reports' |
|
551 | 551 | request.matched_route = route |
|
552 | 552 | with pytest.raises(colander.Invalid): |
|
553 | 553 | response = reports_create(request) |
|
554 | 554 | |
|
555 | 555 | def test_single_proper_json_0_5_payload(self): |
|
556 | 556 | import appenlight.tests.payload_examples as payload_examples |
|
557 | 557 | from appenlight.views.api import reports_create |
|
558 | 558 | from appenlight.models.services.application import ApplicationService |
|
559 | 559 | from appenlight.models.report_group import ReportGroup |
|
560 | 560 | route = mock.Mock() |
|
561 | 561 | route.name = 'api_reports' |
|
562 | 562 | request = pyramid.threadlocal.get_current_request() |
|
563 | 563 | context = DummyContext() |
|
564 | 564 | context.resource = ApplicationService.by_id(1) |
|
565 | 565 | request.context = context |
|
566 | 566 | request.matched_route = route |
|
567 | 567 | PYTHON_PAYLOAD = payload_examples.PYTHON_PAYLOAD_0_5 |
|
568 | 568 | request.unsafe_json_body = [copy.deepcopy(PYTHON_PAYLOAD)] |
|
569 | 569 | reports_create(request) |
|
570 | 570 | query = DBSession.query(ReportGroup) |
|
571 | 571 | report = query.first() |
|
572 | 572 | assert query.count() == 1 |
|
573 | 573 | assert report.total_reports == 1 |
|
574 | 574 | |
|
575 | 575 | def test_grouping_0_5(self): |
|
576 | 576 | import appenlight.tests.payload_examples as payload_examples |
|
577 | 577 | from appenlight.views.api import reports_create |
|
578 | 578 | from appenlight.models.services.application import ApplicationService |
|
579 | 579 | from appenlight.models.report_group import ReportGroup |
|
580 | 580 | route = mock.Mock() |
|
581 | 581 | route.name = 'api_reports' |
|
582 | 582 | request = pyramid.threadlocal.get_current_request() |
|
583 | 583 | context = DummyContext() |
|
584 | 584 | context.resource = ApplicationService.by_id(1) |
|
585 | 585 | request.context = context |
|
586 | 586 | request.matched_route = route |
|
587 | 587 | PYTHON_PAYLOAD = payload_examples.PYTHON_PAYLOAD_0_5 |
|
588 | 588 | request.unsafe_json_body = [copy.deepcopy(PYTHON_PAYLOAD), |
|
589 | 589 | copy.deepcopy(PYTHON_PAYLOAD)] |
|
590 | 590 | reports_create(request) |
|
591 | 591 | query = DBSession.query(ReportGroup) |
|
592 | 592 | report = query.first() |
|
593 | 593 | assert query.count() == 1 |
|
594 | 594 | assert report.total_reports == 2 |
|
595 | 595 | |
|
596 | 596 | def test_grouping_different_reports_0_5(self): |
|
597 | 597 | import appenlight.tests.payload_examples as payload_examples |
|
598 | 598 | from appenlight.views.api import reports_create |
|
599 | 599 | from appenlight.models.services.application import ApplicationService |
|
600 | 600 | from appenlight.models.report_group import ReportGroup |
|
601 | 601 | route = mock.Mock() |
|
602 | 602 | route.name = 'api_reports' |
|
603 | 603 | request = pyramid.threadlocal.get_current_request() |
|
604 | 604 | context = DummyContext() |
|
605 | 605 | context.resource = ApplicationService.by_id(1) |
|
606 | 606 | request.context = context |
|
607 | 607 | request.matched_route = route |
|
608 | 608 | PYTHON_PAYLOAD = payload_examples.PYTHON_PAYLOAD_0_5 |
|
609 | 609 | PARSED_REPORT_404 = payload_examples.PARSED_REPORT_404 |
|
610 | 610 | request.unsafe_json_body = [copy.deepcopy(PYTHON_PAYLOAD), |
|
611 | 611 | copy.deepcopy(PARSED_REPORT_404)] |
|
612 | 612 | reports_create(request) |
|
613 | 613 | query = DBSession.query(ReportGroup) |
|
614 | 614 | report = query.first() |
|
615 | 615 | assert query.count() == 2 |
|
616 | 616 | assert report.total_reports == 1 |
|
617 | 617 | |
|
618 | 618 | |
|
619 | 619 | @pytest.mark.usefixtures('default_application') |
|
620 | 620 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') |
|
621 | 621 | class TestAirbrakeXMLView(object): |
|
622 | 622 | |
|
623 | 623 | def test_normal_payload_parsing(self): |
|
624 | 624 | import datetime |
|
625 | 625 | import defusedxml.ElementTree as ElementTree |
|
626 | 626 | import appenlight.tests.payload_examples as payload_examples |
|
627 | 627 | from appenlight.lib.utils.airbrake import parse_airbrake_xml |
|
628 | 628 | from appenlight.validators import ReportListSchema_0_5 |
|
629 | 629 | |
|
630 | 630 | context = DummyContext() |
|
631 | 631 | request = testing.DummyRequest( |
|
632 | 632 | headers={'Content-Type': 'application/xml'}) |
|
633 | 633 | request.context = context |
|
634 | 634 | request.context.possibly_public = False |
|
635 | 635 | root = ElementTree.fromstring(payload_examples.AIRBRAKE_RUBY_EXAMPLE) |
|
636 | 636 | request.context.airbrake_xml_etree = root |
|
637 | 637 | error_dict = parse_airbrake_xml(request) |
|
638 | 638 | schema = ReportListSchema_0_5().bind(utcnow=datetime.datetime.utcnow()) |
|
639 | 639 | deserialized_report = schema.deserialize([error_dict])[0] |
|
640 | 640 | assert deserialized_report['client'] == 'Airbrake Notifier' |
|
641 | 641 | assert deserialized_report['error'] == 'NameError: undefined local variable or method `sdfdfdf\' for #<#<Class:0x000000039a8b90>:0x00000002c53df0>' |
|
642 | 642 | assert deserialized_report['http_status'] == 500 |
|
643 | 643 | assert deserialized_report['language'] == 'unknown' |
|
644 | 644 | assert deserialized_report['message'] == '' |
|
645 | 645 | assert deserialized_report['occurences'] == 1 |
|
646 | 646 | assert deserialized_report['priority'] == 5 |
|
647 | 647 | d_request = deserialized_report['request'] |
|
648 | 648 | assert d_request['GET'] == {'test': '1234'} |
|
649 | 649 | assert d_request['action_dispatch.request.parameters'] == { |
|
650 | 650 | 'action': 'index', |
|
651 | 651 | 'controller': 'welcome', |
|
652 | 652 | 'test': '1234'} |
|
653 | 653 | assert deserialized_report['request_id'] == 'c11b2267f3ad8b00a1768cae35559fa1' |
|
654 | 654 | assert deserialized_report['server'] == 'ergo-desktop' |
|
655 | 655 | assert deserialized_report['traceback'][0] == { |
|
656 | 656 | 'cline': 'block in start_thread', |
|
657 | 657 | 'file': '/home/ergo/.rbenv/versions/1.9.3-p327/lib/ruby/1.9.1/webrick/server.rb', |
|
658 | 658 | 'fn': 'block in start_thread', |
|
659 | 659 | 'line': '191', |
|
660 | 660 | 'module': '', |
|
661 | 661 | 'vars': {}} |
|
662 | 662 | assert deserialized_report['traceback'][-1] == { |
|
663 | 663 | 'cline': '_app_views_welcome_index_html_erb___2570061166873166679_31748940', |
|
664 | 664 | 'file': '[PROJECT_ROOT]/app/views/welcome/index.html.erb', |
|
665 | 665 | 'fn': '_app_views_welcome_index_html_erb___2570061166873166679_31748940', |
|
666 | 666 | 'line': '3', |
|
667 | 667 | 'module': '', |
|
668 | 668 | 'vars': {}} |
|
669 | 669 | assert deserialized_report['url'] == 'http://0.0.0.0:3000/welcome/index?test=1234' |
|
670 | 670 | assert deserialized_report['view_name'] == 'welcome:index' |
|
671 | 671 | |
|
672 | 672 | def test_normal_payload_view(self): |
|
673 | 673 | import defusedxml.ElementTree as ElementTree |
|
674 | 674 | import appenlight.tests.payload_examples as payload_examples |
|
675 | 675 | |
|
676 | 676 | from appenlight.models.services.application import ApplicationService |
|
677 | 677 | from appenlight.views.api import airbrake_xml_compat |
|
678 | 678 | |
|
679 | 679 | context = DummyContext() |
|
680 | 680 | context.resource = ApplicationService.by_id(1) |
|
681 | 681 | request = testing.DummyRequest( |
|
682 | 682 | headers={'Content-Type': 'application/xml'}) |
|
683 | 683 | request.context = context |
|
684 | 684 | request.context.possibly_public = False |
|
685 | 685 | root = ElementTree.fromstring(payload_examples.AIRBRAKE_RUBY_EXAMPLE) |
|
686 | 686 | request.context.airbrake_xml_etree = root |
|
687 | 687 | route = mock.Mock() |
|
688 | 688 | route.name = 'api_airbrake' |
|
689 | 689 | request.matched_route = route |
|
690 | 690 | result = airbrake_xml_compat(request) |
|
691 | 691 | assert '<notice><id>' in result |
|
692 | 692 | |
|
693 | 693 | |
|
694 | 694 | @pytest.mark.usefixtures('default_application') |
|
695 | 695 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') |
|
696 | 696 | class TestAPILogView(object): |
|
697 | 697 | def test_no_json_payload(self, base_app): |
|
698 | 698 | import colander |
|
699 | 699 | from appenlight.models.services.application import ApplicationService |
|
700 | 700 | from appenlight.views.api import logs_create |
|
701 | 701 | |
|
702 | 702 | context = DummyContext() |
|
703 | 703 | context.resource = ApplicationService.by_id(1) |
|
704 | 704 | request = testing.DummyRequest( |
|
705 | 705 | headers={'Content-Type': 'application/json'}) |
|
706 | 706 | request.context = context |
|
707 | 707 | request.registry = base_app.registry |
|
708 | 708 | request.unsafe_json_body = '' |
|
709 | 709 | route = mock.Mock() |
|
710 | 710 | route.name = 'api_logs' |
|
711 | 711 | request.matched_route = route |
|
712 | 712 | with pytest.raises(colander.Invalid): |
|
713 | 713 | response = logs_create(request) |
|
714 | 714 | |
|
715 | 715 | def test_single_json_payload(self): |
|
716 | 716 | import appenlight.tests.payload_examples as payload_examples |
|
717 | 717 | from appenlight.models.log import Log |
|
718 | 718 | from appenlight.views.api import logs_create |
|
719 | 719 | from appenlight.models.services.application import ApplicationService |
|
720 | 720 | route = mock.Mock() |
|
721 | 721 | route.name = 'api_logs' |
|
722 | 722 | request = pyramid.threadlocal.get_current_request() |
|
723 | 723 | context = DummyContext() |
|
724 | 724 | context.resource = ApplicationService.by_id(1) |
|
725 | 725 | request.context = context |
|
726 | 726 | request.matched_route = route |
|
727 | 727 | request.unsafe_json_body = [copy.deepcopy( |
|
728 | 728 | payload_examples.LOG_EXAMPLES[0])] |
|
729 | 729 | logs_create(request) |
|
730 | 730 | query = DBSession.query(Log) |
|
731 | 731 | log = query.first() |
|
732 | 732 | assert query.count() == 1 |
|
733 | 733 | assert log.message == "OMG ValueError happened" |
|
734 | 734 | |
|
735 | 735 | def test_multiple_json_payload(self): |
|
736 | 736 | import appenlight.tests.payload_examples as payload_examples |
|
737 | 737 | from appenlight.models.log import Log |
|
738 | 738 | from appenlight.views.api import logs_create |
|
739 | 739 | from appenlight.models.services.application import ApplicationService |
|
740 | 740 | route = mock.Mock() |
|
741 | 741 | route.name = 'api_logs' |
|
742 | 742 | request = pyramid.threadlocal.get_current_request() |
|
743 | 743 | context = DummyContext() |
|
744 | 744 | context.resource = ApplicationService.by_id(1) |
|
745 | 745 | request.context = context |
|
746 | 746 | request.matched_route = route |
|
747 | 747 | LOG_PAYLOAD = payload_examples.LOG_EXAMPLES[0] |
|
748 | 748 | LOG_PAYLOAD2 = payload_examples.LOG_EXAMPLES[1] |
|
749 | 749 | request.unsafe_json_body = copy.deepcopy([LOG_PAYLOAD, LOG_PAYLOAD2]) |
|
750 | 750 | logs_create(request) |
|
751 | 751 | query = DBSession.query(Log).order_by(sa.asc(Log.log_id)) |
|
752 | 752 | assert query.count() == 2 |
|
753 | 753 | assert query[0].message == "OMG ValueError happened" |
|
754 | 754 | assert query[1].message == "OMG ValueError happened2" |
|
755 | 755 | |
|
756 | 756 | def test_public_key_rewriting(self): |
|
757 | 757 | import appenlight.tests.payload_examples as payload_examples |
|
758 | 758 | from appenlight.models.log import Log |
|
759 | 759 | from appenlight.views.api import logs_create |
|
760 | 760 | from appenlight.models.services.application import ApplicationService |
|
761 | 761 | route = mock.Mock() |
|
762 | 762 | route.name = 'api_logs' |
|
763 | 763 | request = pyramid.threadlocal.get_current_request() |
|
764 | 764 | context = DummyContext() |
|
765 | 765 | context.resource = ApplicationService.by_id(1) |
|
766 | 766 | request.context = context |
|
767 | 767 | request.matched_route = route |
|
768 | 768 | |
|
769 | 769 | LOG_PAYLOAD = copy.deepcopy(payload_examples.LOG_EXAMPLES[0]) |
|
770 | 770 | LOG_PAYLOAD2 = copy.deepcopy(payload_examples.LOG_EXAMPLES[1]) |
|
771 | 771 | LOG_PAYLOAD['primary_key'] = 'X2' |
|
772 | 772 | LOG_PAYLOAD2['primary_key'] = 'X2' |
|
773 | 773 | request.unsafe_json_body = [LOG_PAYLOAD, LOG_PAYLOAD2] |
|
774 | 774 | logs_create(request) |
|
775 | 775 | |
|
776 | 776 | query = DBSession.query(Log).order_by(sa.asc(Log.log_id)) |
|
777 | 777 | assert query.count() == 1 |
|
778 | 778 | assert query[0].message == "OMG ValueError happened2" |
|
779 | 779 | |
|
780 | 780 | @pytest.mark.usefixtures('default_application') |
|
781 | 781 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') |
|
782 | 782 | class TestAPIGeneralMetricsView(object): |
|
783 | 783 | def test_no_json_payload(self, base_app): |
|
784 | 784 | import colander |
|
785 | 785 | from appenlight.models.services.application import ApplicationService |
|
786 | 786 | from appenlight.views.api import general_metrics_create |
|
787 | 787 | route = mock.Mock() |
|
788 | 788 | route.name = 'api_general_metrics' |
|
789 | 789 | context = DummyContext() |
|
790 | 790 | context.resource = ApplicationService.by_id(1) |
|
791 | 791 | request = testing.DummyRequest( |
|
792 | 792 | headers={'Content-Type': 'application/json'}) |
|
793 | 793 | request.context = context |
|
794 | 794 | request.registry = base_app.registry |
|
795 | 795 | request.unsafe_json_body = '' |
|
796 | 796 | request.matched_route = route |
|
797 | 797 | with pytest.raises(colander.Invalid): |
|
798 | 798 | general_metrics_create(request) |
|
799 | 799 | |
|
800 | 800 | def test_single_json_payload(self): |
|
801 | 801 | import appenlight.tests.payload_examples as payload_examples |
|
802 |
from appenlight.models. |
|
|
802 | from appenlight.models.metric import Metric | |
|
803 | 803 | from appenlight.views.api import general_metrics_create |
|
804 | 804 | from appenlight.models.services.application import ApplicationService |
|
805 | 805 | route = mock.Mock() |
|
806 | 806 | route.name = 'api_general_metric' |
|
807 | 807 | request = pyramid.threadlocal.get_current_request() |
|
808 | 808 | request.matched_route = route |
|
809 | 809 | context = DummyContext() |
|
810 | 810 | context.resource = ApplicationService.by_id(1) |
|
811 | 811 | request.context = context |
|
812 | 812 | request.unsafe_json_body = payload_examples.METRICS_PAYLOAD |
|
813 | 813 | general_metrics_create(request) |
|
814 | 814 | query = DBSession.query(Metric) |
|
815 | 815 | metric = query.first() |
|
816 | 816 | assert query.count() == 1 |
|
817 | 817 | assert metric.namespace == 'some.monitor' |
|
818 | 818 | |
|
819 | 819 | def test_multiple_json_payload(self): |
|
820 | 820 | import appenlight.tests.payload_examples as payload_examples |
|
821 |
from appenlight.models. |
|
|
821 | from appenlight.models.metric import Metric | |
|
822 | 822 | from appenlight.views.api import general_metrics_create |
|
823 | 823 | from appenlight.models.services.application import ApplicationService |
|
824 | 824 | route = mock.Mock() |
|
825 | 825 | route.name = 'api_general_metrics' |
|
826 | 826 | request = pyramid.threadlocal.get_current_request() |
|
827 | 827 | request.matched_route = route |
|
828 | 828 | context = DummyContext() |
|
829 | 829 | context.resource = ApplicationService.by_id(1) |
|
830 | 830 | request.context = context |
|
831 | 831 | request.unsafe_json_body = [ |
|
832 | 832 | copy.deepcopy(payload_examples.METRICS_PAYLOAD), |
|
833 | 833 | copy.deepcopy(payload_examples.METRICS_PAYLOAD), |
|
834 | 834 | ] |
|
835 | 835 | general_metrics_create(request) |
|
836 | 836 | query = DBSession.query(Metric) |
|
837 | 837 | metric = query.first() |
|
838 | 838 | assert query.count() == 2 |
|
839 | 839 | assert metric.namespace == 'some.monitor' |
|
840 | 840 | |
|
841 | 841 | |
|
842 | 842 | class TestGroupingMessageReplacements(object): |
|
843 | 843 | def replace_default_repr_python(self): |
|
844 | 844 | test_str = ''' |
|
845 | 845 | ConnectionError: ConnectionError((<urllib3.connection.HTTPConnection object at 0x7f87a0ba9fd0>, 'Connection to domain.gr timed out. (connect timeout=10)')) caused by: ConnectTimeoutError((<urllib3.connection.HTTPConnection object at 0x7f87a0ba9fd0>, 'Connection to domain.gr timed out. (connect timeout=10)')) |
|
846 | 846 | ''' |
|
847 | 847 | regex = r'<(.*?) object at (.*?)>' |
|
848 | 848 | |
|
849 | 849 | |
|
850 | 850 | class TestRulesKeyGetter(object): |
|
851 | 851 | def test_default_dict_getter_top_key(self): |
|
852 | 852 | from appenlight.lib.rule import Rule |
|
853 | 853 | struct = { |
|
854 | 854 | "a": { |
|
855 | 855 | "b": 'b', |
|
856 | 856 | "c": { |
|
857 | 857 | "d": 'd', |
|
858 | 858 | "g": { |
|
859 | 859 | "h": 'h' |
|
860 | 860 | } |
|
861 | 861 | }, |
|
862 | 862 | "e": 'e' |
|
863 | 863 | }, |
|
864 | 864 | "f": 'f' |
|
865 | 865 | } |
|
866 | 866 | result = Rule.default_dict_struct_getter(struct, "a") |
|
867 | 867 | assert result == struct['a'] |
|
868 | 868 | |
|
869 | 869 | def test_default_dict_getter_sub_key(self): |
|
870 | 870 | from appenlight.lib.rule import Rule |
|
871 | 871 | struct = { |
|
872 | 872 | "a": { |
|
873 | 873 | "b": 'b', |
|
874 | 874 | "c": { |
|
875 | 875 | "d": 'd', |
|
876 | 876 | "g": { |
|
877 | 877 | "h": 'h' |
|
878 | 878 | } |
|
879 | 879 | }, |
|
880 | 880 | "e": 'e' |
|
881 | 881 | }, |
|
882 | 882 | "f": 'f' |
|
883 | 883 | } |
|
884 | 884 | result = Rule.default_dict_struct_getter(struct, 'a:b') |
|
885 | 885 | assert result == struct['a']['b'] |
|
886 | 886 | result = Rule.default_dict_struct_getter(struct, 'a:c:d') |
|
887 | 887 | assert result == struct['a']['c']['d'] |
|
888 | 888 | |
|
889 | 889 | def test_default_obj_getter_top_key(self): |
|
890 | 890 | from appenlight.lib.rule import Rule |
|
891 | 891 | class TestStruct(object): |
|
892 | 892 | def __init__(self, a, b): |
|
893 | 893 | self.a = a |
|
894 | 894 | self.b = b |
|
895 | 895 | |
|
896 | 896 | struct = TestStruct(a='a', |
|
897 | 897 | b=TestStruct(a='x', b='y')) |
|
898 | 898 | result = Rule.default_obj_struct_getter(struct, "a") |
|
899 | 899 | assert result == struct.a |
|
900 | 900 | |
|
901 | 901 | def test_default_obj_getter_sub_key(self): |
|
902 | 902 | from appenlight.lib.rule import Rule |
|
903 | 903 | class TestStruct(object): |
|
904 | 904 | def __init__(self, name, a, b): |
|
905 | 905 | self.name = name |
|
906 | 906 | self.a = a |
|
907 | 907 | self.b = b |
|
908 | 908 | |
|
909 | 909 | def __repr__(self): |
|
910 | 910 | return '<obj {}>'.format(self.name) |
|
911 | 911 | |
|
912 | 912 | c = TestStruct('c', a=5, b='z') |
|
913 | 913 | b = TestStruct('b', a=c, b='y') |
|
914 | 914 | struct = TestStruct('a', a='a', b=b) |
|
915 | 915 | result = Rule.default_obj_struct_getter(struct, 'b:b') |
|
916 | 916 | assert result == struct.b.b |
|
917 | 917 | result = Rule.default_obj_struct_getter(struct, 'b:a:b') |
|
918 | 918 | assert result == struct.b.a.b |
|
919 | 919 | |
|
920 | 920 | |
|
921 | 921 | @pytest.mark.usefixtures('report_type_matrix') |
|
922 | 922 | class TestRulesParsing(): |
|
923 | 923 | @pytest.mark.parametrize("op, struct_value, test_value, match_result", [ |
|
924 | 924 | ('eq', 500, 500, True), |
|
925 | 925 | ('eq', 600, 500, False), |
|
926 | 926 | ('eq', 300, 500, False), |
|
927 | 927 | ('eq', "300", 500, False), |
|
928 | 928 | ('eq', "600", 500, False), |
|
929 | 929 | ('eq', "500", 500, True), |
|
930 | 930 | ('ne', 500, 500, False), |
|
931 | 931 | ('ne', 600, 500, True), |
|
932 | 932 | ('ne', 300, 500, True), |
|
933 | 933 | ('ne', "300", 500, True), |
|
934 | 934 | ('ne', "600", 500, True), |
|
935 | 935 | ('ne', "500", 500, False), |
|
936 | 936 | ('ge', 500, 500, True), |
|
937 | 937 | ('ge', 600, 500, True), |
|
938 | 938 | ('ge', 499, 500, False), |
|
939 | 939 | ('gt', 499, 500, False), |
|
940 | 940 | ('gt', 500, 500, False), |
|
941 | 941 | ('gt', 501, 500, True), |
|
942 | 942 | ('le', 499, 500, True), |
|
943 | 943 | ('le', 500, 500, True), |
|
944 | 944 | ('le', 501, 500, False), |
|
945 | 945 | ('lt', 499, 500, True), |
|
946 | 946 | ('lt', 500, 500, False), |
|
947 | 947 | ('lt', 501, 500, False), |
|
948 | 948 | ]) |
|
949 | 949 | def test_single_op_int(self, op, struct_value, test_value, match_result, |
|
950 | 950 | report_type_matrix): |
|
951 | 951 | from appenlight.lib.rule import Rule |
|
952 | 952 | rule_config = { |
|
953 | 953 | "op": op, |
|
954 | 954 | "field": "http_status", |
|
955 | 955 | "value": test_value |
|
956 | 956 | } |
|
957 | 957 | rule = Rule(rule_config, report_type_matrix) |
|
958 | 958 | |
|
959 | 959 | data = { |
|
960 | 960 | "http_status": struct_value |
|
961 | 961 | } |
|
962 | 962 | assert rule.match(data) is match_result |
|
963 | 963 | |
|
964 | 964 | @pytest.mark.parametrize("op, struct_value, test_value, match_result", [ |
|
965 | 965 | ('ge', "500.01", 500, True), |
|
966 | 966 | ('ge', "500.01", 500.02, False), |
|
967 | 967 | ('le', "500.01", 500.02, True) |
|
968 | 968 | ]) |
|
969 | 969 | def test_single_op_float(self, op, struct_value, test_value, match_result, |
|
970 | 970 | report_type_matrix): |
|
971 | 971 | from appenlight.lib.rule import Rule |
|
972 | 972 | rule_config = { |
|
973 | 973 | "op": op, |
|
974 | 974 | "field": "duration", |
|
975 | 975 | "value": test_value |
|
976 | 976 | } |
|
977 | 977 | rule = Rule(rule_config, report_type_matrix) |
|
978 | 978 | |
|
979 | 979 | data = { |
|
980 | 980 | "duration": struct_value |
|
981 | 981 | } |
|
982 | 982 | assert rule.match(data) is match_result |
|
983 | 983 | |
|
984 | 984 | @pytest.mark.parametrize("op, struct_value, test_value, match_result", [ |
|
985 | 985 | ('contains', 'foo bar baz', 'foo', True), |
|
986 | 986 | ('contains', 'foo bar baz', 'bar', True), |
|
987 | 987 | ('contains', 'foo bar baz', 'dupa', False), |
|
988 | 988 | ('startswith', 'foo bar baz', 'foo', True), |
|
989 | 989 | ('startswith', 'foo bar baz', 'bar', False), |
|
990 | 990 | ('endswith', 'foo bar baz', 'baz', True), |
|
991 | 991 | ('endswith', 'foo bar baz', 'bar', False), |
|
992 | 992 | ]) |
|
993 | 993 | def test_single_op_string(self, op, struct_value, test_value, |
|
994 | 994 | match_result, report_type_matrix): |
|
995 | 995 | from appenlight.lib.rule import Rule |
|
996 | 996 | rule_config = { |
|
997 | 997 | "op": op, |
|
998 | 998 | "field": "error", |
|
999 | 999 | "value": test_value |
|
1000 | 1000 | } |
|
1001 | 1001 | rule = Rule(rule_config, report_type_matrix) |
|
1002 | 1002 | |
|
1003 | 1003 | data = { |
|
1004 | 1004 | "error": struct_value |
|
1005 | 1005 | } |
|
1006 | 1006 | assert rule.match(data) is match_result |
|
1007 | 1007 | |
|
1008 | 1008 | @pytest.mark.parametrize("field, value, s_type", [ |
|
1009 | 1009 | ('field_unicode', 500, str), |
|
1010 | 1010 | ('field_unicode', 500.0, str), |
|
1011 | 1011 | ('field_unicode', "500", str), |
|
1012 | 1012 | ('field_int', "500", int), |
|
1013 | 1013 | ('field_int', 500, int), |
|
1014 | 1014 | ('field_int', 500.0, int), |
|
1015 | 1015 | ('field_float', "500", float), |
|
1016 | 1016 | ('field_float', 500, float), |
|
1017 | 1017 | ('field_float', 500.0, float), |
|
1018 | 1018 | ]) |
|
1019 | 1019 | def test_type_normalization(self, field, value, s_type): |
|
1020 | 1020 | from appenlight.lib.rule import Rule |
|
1021 | 1021 | type_matrix = { |
|
1022 | 1022 | 'field_unicode': {"type": 'unicode'}, |
|
1023 | 1023 | 'field_float': {"type": 'float'}, |
|
1024 | 1024 | 'field_int': {"type": 'int'}, |
|
1025 | 1025 | } |
|
1026 | 1026 | |
|
1027 | 1027 | rule = Rule({}, type_matrix) |
|
1028 | 1028 | n_value = rule.normalized_type(field, value) |
|
1029 | 1029 | assert isinstance(n_value, s_type) is True |
|
1030 | 1030 | |
|
1031 | 1031 | |
|
1032 | 1032 | @pytest.mark.usefixtures('report_type_matrix') |
|
1033 | 1033 | class TestNestedRuleParsing(): |
|
1034 | 1034 | |
|
1035 | 1035 | @pytest.mark.parametrize("data, result", [ |
|
1036 | 1036 | ({"http_status": 501, "group": {"priority": 7, "occurences": 11}}, |
|
1037 | 1037 | False), |
|
1038 | 1038 | ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, |
|
1039 | 1039 | False), |
|
1040 | 1040 | ({"http_status": 500, "group": {"priority": 1, "occurences": 11}}, |
|
1041 | 1041 | False), |
|
1042 | 1042 | ({"http_status": 101, "group": {"priority": 3, "occurences": 5}}, |
|
1043 | 1043 | True), |
|
1044 | 1044 | ]) |
|
1045 | 1045 | def test_NOT_rule(self, data, result, report_type_matrix): |
|
1046 | 1046 | from appenlight.lib.rule import Rule |
|
1047 | 1047 | rule_config = { |
|
1048 | 1048 | "field": "__NOT__", |
|
1049 | 1049 | "rules": [ |
|
1050 | 1050 | { |
|
1051 | 1051 | "op": "ge", |
|
1052 | 1052 | "field": "group:occurences", |
|
1053 | 1053 | "value": "10" |
|
1054 | 1054 | }, |
|
1055 | 1055 | { |
|
1056 | 1056 | "op": "ge", |
|
1057 | 1057 | "field": "group:priority", |
|
1058 | 1058 | "value": "4" |
|
1059 | 1059 | } |
|
1060 | 1060 | ] |
|
1061 | 1061 | } |
|
1062 | 1062 | |
|
1063 | 1063 | rule = Rule(rule_config, report_type_matrix) |
|
1064 | 1064 | assert rule.match(data) is result |
|
1065 | 1065 | |
|
1066 | 1066 | @pytest.mark.parametrize("data, result", [ |
|
1067 | 1067 | ({"http_status": 501, "group": {"priority": 7, "occurences": 11}}, |
|
1068 | 1068 | True), |
|
1069 | 1069 | ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, |
|
1070 | 1070 | True), |
|
1071 | 1071 | ({"http_status": 500, "group": {"priority": 1, "occurences": 1}}, |
|
1072 | 1072 | True), |
|
1073 | 1073 | ({"http_status": 101, "group": {"priority": 3, "occurences": 11}}, |
|
1074 | 1074 | False), |
|
1075 | 1075 | ]) |
|
1076 | 1076 | def test_nested_OR_AND_rule(self, data, result, report_type_matrix): |
|
1077 | 1077 | from appenlight.lib.rule import Rule |
|
1078 | 1078 | rule_config = { |
|
1079 | 1079 | "field": "__OR__", |
|
1080 | 1080 | "rules": [ |
|
1081 | 1081 | { |
|
1082 | 1082 | "field": "__AND__", |
|
1083 | 1083 | "rules": [ |
|
1084 | 1084 | { |
|
1085 | 1085 | "op": "ge", |
|
1086 | 1086 | "field": "group:occurences", |
|
1087 | 1087 | "value": "10" |
|
1088 | 1088 | }, |
|
1089 | 1089 | { |
|
1090 | 1090 | "op": "ge", |
|
1091 | 1091 | "field": "group:priority", |
|
1092 | 1092 | "value": "4" |
|
1093 | 1093 | } |
|
1094 | 1094 | ] |
|
1095 | 1095 | }, |
|
1096 | 1096 | { |
|
1097 | 1097 | "op": "eq", |
|
1098 | 1098 | "field": "http_status", |
|
1099 | 1099 | "value": "500" |
|
1100 | 1100 | } |
|
1101 | 1101 | ] |
|
1102 | 1102 | } |
|
1103 | 1103 | |
|
1104 | 1104 | rule = Rule(rule_config, report_type_matrix) |
|
1105 | 1105 | assert rule.match(data) is result |
|
1106 | 1106 | |
|
1107 | 1107 | @pytest.mark.parametrize("data, result", [ |
|
1108 | 1108 | ({"http_status": 501, "group": {"priority": 7, "occurences": 11}}, |
|
1109 | 1109 | True), |
|
1110 | 1110 | ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, |
|
1111 | 1111 | True), |
|
1112 | 1112 | ({"http_status": 500, "group": {"priority": 1, "occurences": 1}}, |
|
1113 | 1113 | True), |
|
1114 | 1114 | ({"http_status": 101, "group": {"priority": 3, "occurences": 1}}, |
|
1115 | 1115 | False), |
|
1116 | 1116 | ]) |
|
1117 | 1117 | def test_nested_OR_OR_rule(self, data, result, report_type_matrix): |
|
1118 | 1118 | from appenlight.lib.rule import Rule |
|
1119 | 1119 | rule_config = { |
|
1120 | 1120 | "field": "__OR__", |
|
1121 | 1121 | "rules": [ |
|
1122 | 1122 | {"field": "__OR__", |
|
1123 | 1123 | "rules": [ |
|
1124 | 1124 | {"op": "ge", |
|
1125 | 1125 | "field": "group:occurences", |
|
1126 | 1126 | "value": "10" |
|
1127 | 1127 | }, |
|
1128 | 1128 | {"op": "ge", |
|
1129 | 1129 | "field": "group:priority", |
|
1130 | 1130 | "value": "4" |
|
1131 | 1131 | } |
|
1132 | 1132 | ] |
|
1133 | 1133 | }, |
|
1134 | 1134 | {"op": "eq", |
|
1135 | 1135 | "field": "http_status", |
|
1136 | 1136 | "value": "500" |
|
1137 | 1137 | } |
|
1138 | 1138 | ] |
|
1139 | 1139 | } |
|
1140 | 1140 | |
|
1141 | 1141 | rule = Rule(rule_config, report_type_matrix) |
|
1142 | 1142 | assert rule.match(data) is result |
|
1143 | 1143 | |
|
1144 | 1144 | @pytest.mark.parametrize("data, result", [ |
|
1145 | 1145 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}}, |
|
1146 | 1146 | True), |
|
1147 | 1147 | ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, |
|
1148 | 1148 | False), |
|
1149 | 1149 | ({"http_status": 500, "group": {"priority": 1, "occurences": 1}}, |
|
1150 | 1150 | False), |
|
1151 | 1151 | ({"http_status": 101, "group": {"priority": 3, "occurences": 1}}, |
|
1152 | 1152 | False), |
|
1153 | 1153 | ]) |
|
1154 | 1154 | def test_nested_AND_AND_rule(self, data, result, report_type_matrix): |
|
1155 | 1155 | from appenlight.lib.rule import Rule |
|
1156 | 1156 | rule_config = { |
|
1157 | 1157 | "field": "__AND__", |
|
1158 | 1158 | "rules": [ |
|
1159 | 1159 | {"field": "__AND__", |
|
1160 | 1160 | "rules": [ |
|
1161 | 1161 | {"op": "ge", |
|
1162 | 1162 | "field": "group:occurences", |
|
1163 | 1163 | "value": "10" |
|
1164 | 1164 | }, |
|
1165 | 1165 | {"op": "ge", |
|
1166 | 1166 | "field": "group:priority", |
|
1167 | 1167 | "value": "4" |
|
1168 | 1168 | }] |
|
1169 | 1169 | }, |
|
1170 | 1170 | {"op": "eq", |
|
1171 | 1171 | "field": "http_status", |
|
1172 | 1172 | "value": "500" |
|
1173 | 1173 | } |
|
1174 | 1174 | ] |
|
1175 | 1175 | } |
|
1176 | 1176 | |
|
1177 | 1177 | rule = Rule(rule_config, report_type_matrix) |
|
1178 | 1178 | assert rule.match(data) is result |
|
1179 | 1179 | |
|
1180 | 1180 | @pytest.mark.parametrize("data, result", [ |
|
1181 | 1181 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}, |
|
1182 | 1182 | "url_path": '/test/register', "error": "foo test bar"}, True), |
|
1183 | 1183 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}, |
|
1184 | 1184 | "url_path": '/test/register', "error": "foo INVALID bar"}, False), |
|
1185 | 1185 | ]) |
|
1186 | 1186 | def test_nested_AND_AND_AND_rule(self, data, result, report_type_matrix): |
|
1187 | 1187 | from appenlight.lib.rule import Rule |
|
1188 | 1188 | rule_config = { |
|
1189 | 1189 | "field": "__AND__", |
|
1190 | 1190 | "rules": [ |
|
1191 | 1191 | {"field": "__AND__", |
|
1192 | 1192 | "rules": [ |
|
1193 | 1193 | {"op": "ge", |
|
1194 | 1194 | "field": "group:occurences", |
|
1195 | 1195 | "value": "10" |
|
1196 | 1196 | }, |
|
1197 | 1197 | {"field": "__AND__", |
|
1198 | 1198 | "rules": [ |
|
1199 | 1199 | {"op": "endswith", |
|
1200 | 1200 | "field": "url_path", |
|
1201 | 1201 | "value": "register"}, |
|
1202 | 1202 | {"op": "contains", |
|
1203 | 1203 | "field": "error", |
|
1204 | 1204 | "value": "test"}]}] |
|
1205 | 1205 | }, |
|
1206 | 1206 | {"op": "eq", |
|
1207 | 1207 | "field": "http_status", |
|
1208 | 1208 | "value": "500" |
|
1209 | 1209 | } |
|
1210 | 1210 | ] |
|
1211 | 1211 | } |
|
1212 | 1212 | |
|
1213 | 1213 | rule = Rule(rule_config, report_type_matrix) |
|
1214 | 1214 | assert rule.match(data) is result |
|
1215 | 1215 | |
|
1216 | 1216 | @pytest.mark.parametrize("data, result", [ |
|
1217 | 1217 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}, |
|
1218 | 1218 | "url_path": 6, "error": 3}, False), |
|
1219 | 1219 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}, |
|
1220 | 1220 | "url_path": '/test/register', "error": "foo INVALID bar"}, True), |
|
1221 | 1221 | ]) |
|
1222 | 1222 | def test_nested_AND_AND_OR_rule(self, data, result, report_type_matrix): |
|
1223 | 1223 | from appenlight.lib.rule import Rule |
|
1224 | 1224 | rule_config = { |
|
1225 | 1225 | "field": "__AND__", |
|
1226 | 1226 | "rules": [ |
|
1227 | 1227 | {"field": "__AND__", |
|
1228 | 1228 | "rules": [ |
|
1229 | 1229 | {"op": "ge", |
|
1230 | 1230 | "field": "group:occurences", |
|
1231 | 1231 | "value": "10" |
|
1232 | 1232 | }, |
|
1233 | 1233 | {"field": "__OR__", |
|
1234 | 1234 | "rules": [ |
|
1235 | 1235 | {"op": "endswith", |
|
1236 | 1236 | "field": "url_path", |
|
1237 | 1237 | "value": "register" |
|
1238 | 1238 | }, |
|
1239 | 1239 | {"op": "contains", |
|
1240 | 1240 | "field": "error", |
|
1241 | 1241 | "value": "test" |
|
1242 | 1242 | }]}] |
|
1243 | 1243 | }, |
|
1244 | 1244 | {"op": "eq", |
|
1245 | 1245 | "field": "http_status", |
|
1246 | 1246 | "value": "500" |
|
1247 | 1247 | } |
|
1248 | 1248 | ] |
|
1249 | 1249 | } |
|
1250 | 1250 | |
|
1251 | 1251 | rule = Rule(rule_config, report_type_matrix) |
|
1252 | 1252 | assert rule.match(data) is result |
|
1253 | 1253 | |
|
1254 | 1254 | @pytest.mark.parametrize("op, field, value, should_fail", [ |
|
1255 | 1255 | ('eq', 'http_status', "1", False), |
|
1256 | 1256 | ('ne', 'http_status', "1", False), |
|
1257 | 1257 | ('ne', 'http_status', "foo", True), |
|
1258 | 1258 | ('startswith', 'http_status', "1", True), |
|
1259 | 1259 | ('eq', 'group:priority', "1", False), |
|
1260 | 1260 | ('ne', 'group:priority', "1", False), |
|
1261 | 1261 | ('ge', 'group:priority', "1", False), |
|
1262 | 1262 | ('le', 'group:priority', "1", False), |
|
1263 | 1263 | ('startswith', 'group:priority', "1", True), |
|
1264 | 1264 | ('eq', 'url_domain', "1", False), |
|
1265 | 1265 | ('ne', 'url_domain', "1", False), |
|
1266 | 1266 | ('startswith', 'url_domain', "1", False), |
|
1267 | 1267 | ('endswith', 'url_domain', "1", False), |
|
1268 | 1268 | ('contains', 'url_domain', "1", False), |
|
1269 | 1269 | ('ge', 'url_domain', "1", True), |
|
1270 | 1270 | ('eq', 'url_path', "1", False), |
|
1271 | 1271 | ('ne', 'url_path', "1", False), |
|
1272 | 1272 | ('startswith', 'url_path', "1", False), |
|
1273 | 1273 | ('endswith', 'url_path', "1", False), |
|
1274 | 1274 | ('contains', 'url_path', "1", False), |
|
1275 | 1275 | ('ge', 'url_path', "1", True), |
|
1276 | 1276 | ('eq', 'error', "1", False), |
|
1277 | 1277 | ('ne', 'error', "1", False), |
|
1278 | 1278 | ('startswith', 'error', "1", False), |
|
1279 | 1279 | ('endswith', 'error', "1", False), |
|
1280 | 1280 | ('contains', 'error', "1", False), |
|
1281 | 1281 | ('ge', 'error', "1", True), |
|
1282 | 1282 | ('ge', 'url_path', "1", True), |
|
1283 | 1283 | ('eq', 'tags:server_name', "1", False), |
|
1284 | 1284 | ('ne', 'tags:server_name', "1", False), |
|
1285 | 1285 | ('startswith', 'tags:server_name', "1", False), |
|
1286 | 1286 | ('endswith', 'tags:server_name', "1", False), |
|
1287 | 1287 | ('contains', 'tags:server_name', "1", False), |
|
1288 | 1288 | ('ge', 'tags:server_name', "1", True), |
|
1289 | 1289 | ('contains', 'traceback', "1", False), |
|
1290 | 1290 | ('ge', 'traceback', "1", True), |
|
1291 | 1291 | ('eq', 'group:occurences', "1", False), |
|
1292 | 1292 | ('ne', 'group:occurences', "1", False), |
|
1293 | 1293 | ('ge', 'group:occurences', "1", False), |
|
1294 | 1294 | ('le', 'group:occurences', "1", False), |
|
1295 | 1295 | ('contains', 'group:occurences', "1", True), |
|
1296 | 1296 | ]) |
|
1297 | 1297 | def test_rule_validation(self, op, field, value, should_fail, |
|
1298 | 1298 | report_type_matrix): |
|
1299 | 1299 | import colander |
|
1300 | 1300 | from appenlight.validators import build_rule_schema |
|
1301 | 1301 | rule_config = { |
|
1302 | 1302 | "op": op, |
|
1303 | 1303 | "field": field, |
|
1304 | 1304 | "value": value |
|
1305 | 1305 | } |
|
1306 | 1306 | |
|
1307 | 1307 | schema = build_rule_schema(rule_config, report_type_matrix) |
|
1308 | 1308 | if should_fail: |
|
1309 | 1309 | with pytest.raises(colander.Invalid): |
|
1310 | 1310 | schema.deserialize(rule_config) |
|
1311 | 1311 | else: |
|
1312 | 1312 | schema.deserialize(rule_config) |
|
1313 | 1313 | |
|
1314 | 1314 | def test_nested_proper_rule_validation(self, report_type_matrix): |
|
1315 | 1315 | from appenlight.validators import build_rule_schema |
|
1316 | 1316 | rule_config = { |
|
1317 | 1317 | "field": "__AND__", |
|
1318 | 1318 | "rules": [ |
|
1319 | 1319 | { |
|
1320 | 1320 | "field": "__AND__", |
|
1321 | 1321 | "rules": [ |
|
1322 | 1322 | { |
|
1323 | 1323 | "op": "ge", |
|
1324 | 1324 | "field": "group:occurences", |
|
1325 | 1325 | "value": "10" |
|
1326 | 1326 | }, |
|
1327 | 1327 | { |
|
1328 | 1328 | "field": "__OR__", |
|
1329 | 1329 | "rules": [ |
|
1330 | 1330 | { |
|
1331 | 1331 | "op": "endswith", |
|
1332 | 1332 | "field": "url_path", |
|
1333 | 1333 | "value": "register" |
|
1334 | 1334 | }, |
|
1335 | 1335 | { |
|
1336 | 1336 | "op": "contains", |
|
1337 | 1337 | "field": "error", |
|
1338 | 1338 | "value": "test" |
|
1339 | 1339 | } |
|
1340 | 1340 | ] |
|
1341 | 1341 | } |
|
1342 | 1342 | ] |
|
1343 | 1343 | }, |
|
1344 | 1344 | { |
|
1345 | 1345 | "op": "eq", |
|
1346 | 1346 | "field": "http_status", |
|
1347 | 1347 | "value": "500" |
|
1348 | 1348 | } |
|
1349 | 1349 | ] |
|
1350 | 1350 | } |
|
1351 | 1351 | |
|
1352 | 1352 | schema = build_rule_schema(rule_config, report_type_matrix) |
|
1353 | 1353 | deserialized = schema.deserialize(rule_config) |
|
1354 | 1354 | |
|
1355 | 1355 | def test_nested_bad_rule_validation(self, report_type_matrix): |
|
1356 | 1356 | import colander |
|
1357 | 1357 | from appenlight.validators import build_rule_schema |
|
1358 | 1358 | rule_config = { |
|
1359 | 1359 | "field": "__AND__", |
|
1360 | 1360 | "rules": [ |
|
1361 | 1361 | { |
|
1362 | 1362 | "field": "__AND__", |
|
1363 | 1363 | "rules": [ |
|
1364 | 1364 | { |
|
1365 | 1365 | "op": "ge", |
|
1366 | 1366 | "field": "group:occurences", |
|
1367 | 1367 | "value": "10" |
|
1368 | 1368 | }, |
|
1369 | 1369 | { |
|
1370 | 1370 | "field": "__OR__", |
|
1371 | 1371 | "rules": [ |
|
1372 | 1372 | { |
|
1373 | 1373 | "op": "gt", |
|
1374 | 1374 | "field": "url_path", |
|
1375 | 1375 | "value": "register" |
|
1376 | 1376 | }, |
|
1377 | 1377 | { |
|
1378 | 1378 | "op": "contains", |
|
1379 | 1379 | "field": "error", |
|
1380 | 1380 | "value": "test" |
|
1381 | 1381 | } |
|
1382 | 1382 | ] |
|
1383 | 1383 | } |
|
1384 | 1384 | ] |
|
1385 | 1385 | }, |
|
1386 | 1386 | { |
|
1387 | 1387 | "op": "eq", |
|
1388 | 1388 | "field": "http_status", |
|
1389 | 1389 | "value": "500" |
|
1390 | 1390 | } |
|
1391 | 1391 | ] |
|
1392 | 1392 | } |
|
1393 | 1393 | |
|
1394 | 1394 | schema = build_rule_schema(rule_config, report_type_matrix) |
|
1395 | 1395 | with pytest.raises(colander.Invalid): |
|
1396 | 1396 | deserialized = schema.deserialize(rule_config) |
|
1397 | 1397 | |
|
1398 | 1398 | def test_config_manipulator(self): |
|
1399 | 1399 | from appenlight.lib.rule import Rule |
|
1400 | 1400 | type_matrix = { |
|
1401 | 1401 | 'a': {"type": 'int', |
|
1402 | 1402 | "ops": ('eq', 'ne', 'ge', 'le',)}, |
|
1403 | 1403 | 'b': {"type": 'int', |
|
1404 | 1404 | "ops": ('eq', 'ne', 'ge', 'le',)}, |
|
1405 | 1405 | } |
|
1406 | 1406 | rule_config = { |
|
1407 | 1407 | "field": "__OR__", |
|
1408 | 1408 | "rules": [ |
|
1409 | 1409 | { |
|
1410 | 1410 | "field": "__OR__", |
|
1411 | 1411 | "rules": [ |
|
1412 | 1412 | { |
|
1413 | 1413 | "op": "ge", |
|
1414 | 1414 | "field": "a", |
|
1415 | 1415 | "value": "10" |
|
1416 | 1416 | } |
|
1417 | 1417 | ] |
|
1418 | 1418 | }, |
|
1419 | 1419 | { |
|
1420 | 1420 | "op": "eq", |
|
1421 | 1421 | "field": "b", |
|
1422 | 1422 | "value": "500" |
|
1423 | 1423 | } |
|
1424 | 1424 | ] |
|
1425 | 1425 | } |
|
1426 | 1426 | |
|
1427 | 1427 | def rule_manipulator(rule): |
|
1428 | 1428 | if 'value' in rule.config: |
|
1429 | 1429 | rule.config['value'] = "1" |
|
1430 | 1430 | |
|
1431 | 1431 | rule = Rule(rule_config, type_matrix, |
|
1432 | 1432 | config_manipulator=rule_manipulator) |
|
1433 | 1433 | rule.match({"a": 1, |
|
1434 | 1434 | "b": "2"}) |
|
1435 | 1435 | assert rule.config['rules'][0]['rules'][0]['value'] == "1" |
|
1436 | 1436 | assert rule.config['rules'][1]['value'] == "1" |
|
1437 | 1437 | assert rule.type_matrix["b"]['type'] == "int" |
|
1438 | 1438 | |
|
1439 | 1439 | def test_dynamic_config_manipulator(self): |
|
1440 | 1440 | from appenlight.lib.rule import Rule |
|
1441 | 1441 | rule_config = { |
|
1442 | 1442 | "field": "__OR__", |
|
1443 | 1443 | "rules": [ |
|
1444 | 1444 | { |
|
1445 | 1445 | "field": "__OR__", |
|
1446 | 1446 | "rules": [ |
|
1447 | 1447 | { |
|
1448 | 1448 | "op": "ge", |
|
1449 | 1449 | "field": "a", |
|
1450 | 1450 | "value": "10" |
|
1451 | 1451 | } |
|
1452 | 1452 | ] |
|
1453 | 1453 | }, |
|
1454 | 1454 | { |
|
1455 | 1455 | "op": "eq", |
|
1456 | 1456 | "field": "b", |
|
1457 | 1457 | "value": "500" |
|
1458 | 1458 | } |
|
1459 | 1459 | ] |
|
1460 | 1460 | } |
|
1461 | 1461 | |
|
1462 | 1462 | def rule_manipulator(rule): |
|
1463 | 1463 | rule.type_matrix = { |
|
1464 | 1464 | 'a': {"type": 'int', |
|
1465 | 1465 | "ops": ('eq', 'ne', 'ge', 'le',)}, |
|
1466 | 1466 | 'b': {"type": 'unicode', |
|
1467 | 1467 | "ops": ('eq', 'ne', 'ge', 'le',)}, |
|
1468 | 1468 | } |
|
1469 | 1469 | |
|
1470 | 1470 | if 'value' in rule.config: |
|
1471 | 1471 | if rule.config['field'] == 'a': |
|
1472 | 1472 | rule.config['value'] = "1" |
|
1473 | 1473 | elif rule.config['field'] == 'b': |
|
1474 | 1474 | rule.config['value'] = "2" |
|
1475 | 1475 | |
|
1476 | 1476 | rule = Rule(rule_config, {}, |
|
1477 | 1477 | config_manipulator=rule_manipulator) |
|
1478 | 1478 | rule.match({"a": 11, |
|
1479 | 1479 | "b": "55"}) |
|
1480 | 1480 | assert rule.config['rules'][0]['rules'][0]['value'] == "1" |
|
1481 | 1481 | assert rule.config['rules'][1]['value'] == "2" |
|
1482 | 1482 | assert rule.type_matrix["b"]['type'] == "unicode" |
|
1483 | 1483 | |
|
1484 | 1484 | |
|
1485 | 1485 | @pytest.mark.usefixtures('base_app', 'with_migrations') |
|
1486 | 1486 | class TestViewsWithForms(object): |
|
1487 | 1487 | def test_bad_csrf(self): |
|
1488 | 1488 | from appenlight.forms import CSRFException |
|
1489 | 1489 | from appenlight.views.index import register |
|
1490 | 1490 | post_data = {'dupa': 'dupa'} |
|
1491 | 1491 | request = testing.DummyRequest(post=post_data) |
|
1492 | 1492 | request.POST = webob.multidict.MultiDict(request.POST) |
|
1493 | 1493 | with pytest.raises(CSRFException): |
|
1494 | 1494 | register(request) |
|
1495 | 1495 | |
|
1496 | 1496 | def test_proper_csrf(self): |
|
1497 | 1497 | from appenlight.views.index import register |
|
1498 | 1498 | request = pyramid.threadlocal.get_current_request() |
|
1499 | 1499 | post_data = {'dupa': 'dupa', |
|
1500 | 1500 | 'csrf_token': request.session.get_csrf_token()} |
|
1501 | 1501 | request = testing.DummyRequest(post=post_data) |
|
1502 | 1502 | request.POST = webob.multidict.MultiDict(request.POST) |
|
1503 | 1503 | result = register(request) |
|
1504 | 1504 | assert result['form'].errors['email'][0] == 'This field is required.' |
|
1505 | 1505 | |
|
1506 | 1506 | |
|
1507 | 1507 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'default_data') |
|
1508 | 1508 | class TestRegistration(object): |
|
1509 | 1509 | def test_invalid_form(self): |
|
1510 | 1510 | from appenlight.views.index import register |
|
1511 | 1511 | request = pyramid.threadlocal.get_current_request() |
|
1512 | 1512 | post_data = {'user_name': '', |
|
1513 | 1513 | 'user_password': '', |
|
1514 | 1514 | 'email': '', |
|
1515 | 1515 | 'csrf_token': request.session.get_csrf_token()} |
|
1516 | 1516 | request = testing.DummyRequest(post=post_data) |
|
1517 | 1517 | request.POST = webob.multidict.MultiDict(request.POST) |
|
1518 | 1518 | result = register(request) |
|
1519 | 1519 | assert result['form'].errors['user_name'][0] == \ |
|
1520 | 1520 | 'This field is required.' |
|
1521 | 1521 | |
|
1522 | 1522 | def test_valid_form(self): |
|
1523 | 1523 | from appenlight.views.index import register |
|
1524 | 1524 | from ziggurat_foundations.models.services.user import UserService |
|
1525 | 1525 | request = pyramid.threadlocal.get_current_request() |
|
1526 | 1526 | post_data = {'user_name': 'foo', |
|
1527 | 1527 | 'user_password': 'barr', |
|
1528 | 1528 | 'email': 'test@test.foo', |
|
1529 | 1529 | 'csrf_token': request.session.get_csrf_token()} |
|
1530 | 1530 | request = testing.DummyRequest(post=post_data) |
|
1531 | 1531 | request.add_flash_to_headers = mock.Mock() |
|
1532 | 1532 | request.POST = webob.multidict.MultiDict(request.POST) |
|
1533 | 1533 | assert UserService.by_user_name('foo') is None |
|
1534 | 1534 | register(request) |
|
1535 | 1535 | user = UserService.by_user_name('foo') |
|
1536 | 1536 | assert user.user_name == 'foo' |
|
1537 | 1537 | assert len(user.user_password) == 60 |
|
1538 | 1538 | |
|
1539 | 1539 | |
|
1540 | 1540 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables', |
|
1541 | 1541 | 'default_user') |
|
1542 | 1542 | class TestApplicationCreation(object): |
|
1543 | 1543 | def test_wrong_data(self): |
|
1544 | 1544 | import appenlight.views.applications as applications |
|
1545 | 1545 | from ziggurat_foundations.models.services.user import UserService |
|
1546 | 1546 | request = pyramid.threadlocal.get_current_request() |
|
1547 | 1547 | request.user = UserService.by_user_name('testuser') |
|
1548 | 1548 | request.unsafe_json_body = {} |
|
1549 | 1549 | request.headers['X-XSRF-TOKEN'] = request.session.get_csrf_token() |
|
1550 | 1550 | response = applications.application_create(request) |
|
1551 | 1551 | assert response.code == 422 |
|
1552 | 1552 | |
|
1553 | 1553 | def test_proper_data(self): |
|
1554 | 1554 | import appenlight.views.applications as applications |
|
1555 | 1555 | from ziggurat_foundations.models.services.user import UserService |
|
1556 | 1556 | |
|
1557 | 1557 | request = pyramid.threadlocal.get_current_request() |
|
1558 | 1558 | request.user = UserService.by_user_name('testuser') |
|
1559 | 1559 | request.unsafe_json_body = {"resource_name": "app name", |
|
1560 | 1560 | "domains": "foo"} |
|
1561 | 1561 | request.headers['X-XSRF-TOKEN'] = request.session.get_csrf_token() |
|
1562 | 1562 | app_dict = applications.application_create(request) |
|
1563 | 1563 | assert app_dict['public_key'] is not None |
|
1564 | 1564 | assert app_dict['api_key'] is not None |
|
1565 | 1565 | assert app_dict['resource_name'] == 'app name' |
|
1566 | 1566 | assert app_dict['owner_group_id'] is None |
|
1567 | 1567 | assert app_dict['resource_id'] is not None |
|
1568 | 1568 | assert app_dict['default_grouping'] == 'url_traceback' |
|
1569 | 1569 | assert app_dict['possible_permissions'] == ('view', 'update_reports') |
|
1570 | 1570 | assert app_dict['slow_report_threshold'] == 10 |
|
1571 | 1571 | assert app_dict['owner_user_name'] == 'testuser' |
|
1572 | 1572 | assert app_dict['owner_user_id'] == request.user.id |
|
1573 | 1573 | assert app_dict['domains'] is 'foo' |
|
1574 | 1574 | assert app_dict['postprocessing_rules'] == [] |
|
1575 | 1575 | assert app_dict['error_report_threshold'] == 10 |
|
1576 | 1576 | assert app_dict['allow_permanent_storage'] is False |
|
1577 | 1577 | assert app_dict['resource_type'] == 'application' |
|
1578 | 1578 | assert app_dict['current_permissions'] == [] |
|
1579 | 1579 | |
|
1580 | 1580 | |
|
1581 | 1581 | @pytest.mark.usefixtures('default_application') |
|
1582 | 1582 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') |
|
1583 | 1583 | class TestAPISentryView(object): |
|
1584 | 1584 | def test_no_payload(self, default_application): |
|
1585 | 1585 | import colander |
|
1586 | 1586 | from appenlight.models.services.application import ApplicationService |
|
1587 | 1587 | from appenlight.views.api import sentry_compat |
|
1588 | 1588 | from appenlight.lib.request import JSONException |
|
1589 | 1589 | |
|
1590 | 1590 | context = DummyContext() |
|
1591 | 1591 | context.resource = ApplicationService.by_id(1) |
|
1592 | 1592 | request = testing.DummyRequest( |
|
1593 | 1593 | headers={'Content-Type': 'application/json'}) |
|
1594 | 1594 | request.unsafe_json_body = '' |
|
1595 | 1595 | request.context = context |
|
1596 | 1596 | route = mock.Mock() |
|
1597 | 1597 | route.name = 'api_sentry' |
|
1598 | 1598 | request.matched_route = route |
|
1599 | 1599 | with pytest.raises(JSONException): |
|
1600 | 1600 | sentry_compat(request) |
|
1601 | 1601 | |
|
1602 | 1602 | def test_java_client_payload(self): |
|
1603 | 1603 | from appenlight.views.api import sentry_compat |
|
1604 | 1604 | from appenlight.models.services.application import ApplicationService |
|
1605 | 1605 | from appenlight.models.report_group import ReportGroup |
|
1606 | 1606 | route = mock.Mock() |
|
1607 | 1607 | route.name = 'api_sentry' |
|
1608 | 1608 | request = pyramid.threadlocal.get_current_request() |
|
1609 | 1609 | context = DummyContext() |
|
1610 | 1610 | context.resource = ApplicationService.by_id(1) |
|
1611 | 1611 | request.context = context |
|
1612 | 1612 | request.matched_route = route |
|
1613 | 1613 | request.body = b'eJy1UmFr2zAQ/S0T+7BCLOzYThp/C6xjG6SDLd/GCBf57Ki' \ |
|
1614 | 1614 | b'RJSHJJiXkv+/UlC7p2kAZA33Ru6f33t1pz3BAHVayZhWr87' \ |
|
1615 | 1615 | b'JMs+I6q3MsrifFep2vc1iXM1HMpgBTNmIdeg8tEvlmJ9AGa' \ |
|
1616 | 1616 | b'fQ7goOkQoDOUmGcZpMkLZO0WGZFRadMiaHIR1EVnTMu3k3b' \ |
|
1617 | 1617 | b'oiMgqJrXpgOpOVjLLTiPkWAVhMa4jih3MAAholfWyUDAksz' \ |
|
1618 | 1618 | b'm1iopICbg8fWH52B8VWXZVYwHrWfV/jBipD2gW2no8CFMa5' \ |
|
1619 | 1619 | b'JButCDSjoQG6mR6LgLDojPPn/7sbydL25ep34HGl+y3DiE+' \ |
|
1620 | 1620 | b'lH0xXBXjMzFBsXW99SS7pWKYXRw91zqgK4BgZ4/DZVVP/cs' \ |
|
1621 | 1621 | b'3NuzSZPfAKqP2Cdj4tw7U/cKH0fEFeiWQFqE2FIHAmMPjaN' \ |
|
1622 | 1622 | b'Y/kHvbzY/JqdHUq9o/KxqQHkcsabX4piDuT4aK+pXG1ZNi/' \ |
|
1623 | 1623 | b'IwOpEyruXC1LiB3vPO3BmOOxTUCIqv5LIg5H12oh9cf0l+P' \ |
|
1624 | 1624 | b'MvP5P8kddgoFIEvMGzM5cRSD2aLJ6qTdHKm6nv9pPcRFba0' \ |
|
1625 | 1625 | b'Kd0eleeCFuGN+9JZ9TaXIn/V5JYMBvxXg3L6PwzSE4dkfOb' \ |
|
1626 | 1626 | b'w7CtfWmP85SdCs8OvA53fUV19cg==' |
|
1627 | 1627 | sentry_compat(request) |
|
1628 | 1628 | query = DBSession.query(ReportGroup) |
|
1629 | 1629 | report = query.first() |
|
1630 | 1630 | assert query.count() == 1 |
|
1631 | 1631 | assert report.total_reports == 1 |
|
1632 | 1632 | |
|
1633 | 1633 | def test_ruby_client_payload(self): |
|
1634 | 1634 | from appenlight.views.api import sentry_compat |
|
1635 | 1635 | from appenlight.models.services.application import ApplicationService |
|
1636 | 1636 | from appenlight.models.report_group import ReportGroup |
|
1637 | 1637 | from appenlight.tests.payload_examples import SENTRY_RUBY_ENCODED |
|
1638 | 1638 | route = mock.Mock() |
|
1639 | 1639 | route.name = 'api_sentry' |
|
1640 | 1640 | request = testing.DummyRequest( |
|
1641 | 1641 | headers={'Content-Type': 'application/octet-stream', |
|
1642 | 1642 | 'User-Agent': 'sentry-ruby/1.0.0', |
|
1643 | 1643 | 'X-Sentry-Auth': 'Sentry sentry_version=5, ' |
|
1644 | 1644 | 'sentry_client=raven-ruby/1.0.0, ' |
|
1645 | 1645 | 'sentry_timestamp=1462378483, ' |
|
1646 | 1646 | 'sentry_key=xxx, sentry_secret=xxx' |
|
1647 | 1647 | }) |
|
1648 | 1648 | context = DummyContext() |
|
1649 | 1649 | context.resource = ApplicationService.by_id(1) |
|
1650 | 1650 | request.context = context |
|
1651 | 1651 | request.matched_route = route |
|
1652 | 1652 | request.body = SENTRY_RUBY_ENCODED |
|
1653 | 1653 | sentry_compat(request) |
|
1654 | 1654 | query = DBSession.query(ReportGroup) |
|
1655 | 1655 | report = query.first() |
|
1656 | 1656 | assert query.count() == 1 |
|
1657 | 1657 | assert report.total_reports == 1 |
|
1658 | 1658 | |
|
1659 | 1659 | def test_python_client_decoded_payload(self): |
|
1660 | 1660 | from appenlight.views.api import sentry_compat |
|
1661 | 1661 | from appenlight.models.services.application import ApplicationService |
|
1662 | 1662 | from appenlight.models.report_group import ReportGroup |
|
1663 | 1663 | from appenlight.tests.payload_examples import SENTRY_PYTHON_PAYLOAD_7 |
|
1664 | 1664 | route = mock.Mock() |
|
1665 | 1665 | route.name = 'api_sentry' |
|
1666 | 1666 | request = pyramid.threadlocal.get_current_request() |
|
1667 | 1667 | context = DummyContext() |
|
1668 | 1668 | context.resource = ApplicationService.by_id(1) |
|
1669 | 1669 | request.context = context |
|
1670 | 1670 | request.matched_route = route |
|
1671 | 1671 | request.body = json.dumps(SENTRY_PYTHON_PAYLOAD_7).encode('utf8') |
|
1672 | 1672 | sentry_compat(request) |
|
1673 | 1673 | query = DBSession.query(ReportGroup) |
|
1674 | 1674 | report = query.first() |
|
1675 | 1675 | assert query.count() == 1 |
|
1676 | 1676 | assert report.total_reports == 1 |
|
1677 | 1677 | |
|
1678 | 1678 | def test_python_client_encoded_payload(self): |
|
1679 | 1679 | from appenlight.views.api import sentry_compat |
|
1680 | 1680 | from appenlight.models.services.application import ApplicationService |
|
1681 | 1681 | from appenlight.models.report_group import ReportGroup |
|
1682 | 1682 | from appenlight.tests.payload_examples import SENTRY_PYTHON_ENCODED |
|
1683 | 1683 | route = mock.Mock() |
|
1684 | 1684 | route.name = 'api_sentry' |
|
1685 | 1685 | request = testing.DummyRequest( |
|
1686 | 1686 | headers={'Content-Type': 'application/octet-stream', |
|
1687 | 1687 | 'Content-Encoding': 'deflate', |
|
1688 | 1688 | 'User-Agent': 'sentry-ruby/1.0.0', |
|
1689 | 1689 | 'X-Sentry-Auth': 'Sentry sentry_version=5, ' |
|
1690 | 1690 | 'sentry_client=raven-ruby/1.0.0, ' |
|
1691 | 1691 | 'sentry_timestamp=1462378483, ' |
|
1692 | 1692 | 'sentry_key=xxx, sentry_secret=xxx' |
|
1693 | 1693 | }) |
|
1694 | 1694 | context = DummyContext() |
|
1695 | 1695 | context.resource = ApplicationService.by_id(1) |
|
1696 | 1696 | request.context = context |
|
1697 | 1697 | request.matched_route = route |
|
1698 | 1698 | request.body = SENTRY_PYTHON_ENCODED |
|
1699 | 1699 | sentry_compat(request) |
|
1700 | 1700 | query = DBSession.query(ReportGroup) |
|
1701 | 1701 | report = query.first() |
|
1702 | 1702 | assert query.count() == 1 |
|
1703 | 1703 | assert report.total_reports == 1 |
General Comments 0
You need to be logged in to leave comments.
Login now