Show More
@@ -1,1593 +1,1597 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # AppEnlight Enterprise Edition, including its added features, Support |
|
19 | 19 | # services, and proprietary license terms, please see |
|
20 | 20 | # https://rhodecode.com/licenses/ |
|
21 | 21 | |
|
22 | 22 | import copy |
|
23 | 23 | import logging |
|
24 | 24 | import mock |
|
25 | 25 | import pyramid |
|
26 | 26 | import pytest |
|
27 | 27 | import sqlalchemy as sa |
|
28 | 28 | import webob |
|
29 | 29 | |
|
30 | 30 | from datetime import datetime |
|
31 | 31 | from pyramid import testing |
|
32 | 32 | |
|
33 | 33 | |
|
34 | 34 | from appenlight.models import DBSession |
|
35 | 35 | from appenlight.lib.ext_json import json |
|
36 | 36 | |
|
37 | 37 | |
|
38 | 38 | log = logging.getLogger(__name__) |
|
39 | 39 | |
|
40 | 40 | |
|
41 | 41 | class DummyContext(object): |
|
42 | 42 | pass |
|
43 | 43 | |
|
44 | 44 | |
|
45 | 45 | @pytest.mark.usefixtures('base_app') |
|
46 | 46 | class BasicTest(object): |
|
47 | 47 | pass |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | @pytest.mark.usefixtures('base_app') |
|
51 | 51 | class TestMigration(object): |
|
52 | 52 | def test_migration(self): |
|
53 | 53 | assert 1 == 1 |
|
54 | 54 | |
|
55 | 55 | |
|
56 | 56 | class TestSentryProto_7(object): |
|
57 | 57 | def test_log_payload(self): |
|
58 | 58 | import appenlight.tests.payload_examples as payload_examples |
|
59 | 59 | from appenlight.lib.enums import ParsedSentryEventType |
|
60 | 60 | from appenlight.lib.utils.sentry import parse_sentry_event |
|
61 | 61 | event_dict, event_type = parse_sentry_event( |
|
62 | 62 | payload_examples.SENTRY_LOG_PAYLOAD_7) |
|
63 | 63 | assert ParsedSentryEventType.LOG == event_type |
|
64 | 64 | assert event_dict['log_level'] == 'CRITICAL' |
|
65 | 65 | assert event_dict['message'] == 'TEST from django logging' |
|
66 | 66 | assert event_dict['namespace'] == 'testlogger' |
|
67 | 67 | assert event_dict['request_id'] == '9a6172f2e6d2444582f83a6c333d9cfb' |
|
68 | 68 | assert event_dict['server'] == 'ergo-virtual-machine' |
|
69 | 69 | assert event_dict['date'] == datetime.utcnow().date().strftime( |
|
70 | 70 | '%Y-%m-%dT%H:%M:%SZ') |
|
71 | 71 | tags = [('site', 'example.com'), |
|
72 | 72 | ('sys.argv', ["'manage.py'", "'runserver'"]), |
|
73 | 73 | ('price', 6), |
|
74 | 74 | ('tag', "'extra'"), |
|
75 | 75 | ('dupa', True), |
|
76 | 76 | ('project', 'sentry'), |
|
77 | 77 | ('sentry_culprit', 'testlogger in index'), |
|
78 | 78 | ('sentry_language', 'python'), |
|
79 | 79 | ('sentry_release', 'test')] |
|
80 | 80 | assert sorted(event_dict['tags']) == sorted(tags) |
|
81 | 81 | |
|
82 | 82 | def test_report_payload(self): |
|
83 | 83 | import appenlight.tests.payload_examples as payload_examples |
|
84 | 84 | from appenlight.lib.enums import ParsedSentryEventType |
|
85 | 85 | from appenlight.lib.utils.sentry import parse_sentry_event |
|
86 | 86 | utcnow = datetime.utcnow().date().strftime('%Y-%m-%dT%H:%M:%SZ') |
|
87 | 87 | event_dict, event_type = parse_sentry_event( |
|
88 | 88 | payload_examples.SENTRY_PYTHON_PAYLOAD_7) |
|
89 | 89 | assert ParsedSentryEventType.ERROR_REPORT == event_type |
|
90 | 90 | assert event_dict['client'] == 'sentry' |
|
91 | 91 | assert event_dict[ |
|
92 | 92 | 'error'] == 'Exception: test 500 ' \ |
|
93 | 93 | '\u0142\xf3\u201c\u0107\u201c\u0107\u017c\u0105' |
|
94 | 94 | assert event_dict['language'] == 'python' |
|
95 | 95 | assert event_dict['ip'] == '127.0.0.1' |
|
96 | 96 | assert event_dict['request_id'] == '9fae652c8c1c4d6a8eee09260f613a98' |
|
97 | 97 | assert event_dict['server'] == 'ergo-virtual-machine' |
|
98 | 98 | assert event_dict['start_time'] == utcnow |
|
99 | 99 | assert event_dict['url'] == 'http://127.0.0.1:8000/error' |
|
100 | 100 | assert event_dict['user_agent'] == 'Mozilla/5.0 (X11; Linux x86_64) ' \ |
|
101 | 101 | 'AppleWebKit/537.36 (KHTML, ' \ |
|
102 | 102 | 'like Gecko) Chrome/47.0.2526.106 ' \ |
|
103 | 103 | 'Safari/537.36' |
|
104 | 104 | assert event_dict['view_name'] == 'djangoapp.views in error' |
|
105 | 105 | tags = [('site', 'example.com'), ('sentry_release', 'test')] |
|
106 | 106 | assert sorted(event_dict['tags']) == sorted(tags) |
|
107 | 107 | extra = [('sys.argv', ["'manage.py'", "'runserver'"]), |
|
108 | 108 | ('project', 'sentry')] |
|
109 | 109 | assert sorted(event_dict['extra']) == sorted(extra) |
|
110 | 110 | request = event_dict['request'] |
|
111 | 111 | assert request['url'] == 'http://127.0.0.1:8000/error' |
|
112 | 112 | assert request['cookies'] == {'appenlight': 'X'} |
|
113 | 113 | assert request['data'] is None |
|
114 | 114 | assert request['method'] == 'GET' |
|
115 | 115 | assert request['query_string'] == '' |
|
116 | 116 | assert request['env'] == {'REMOTE_ADDR': '127.0.0.1', |
|
117 | 117 | 'SERVER_NAME': 'localhost', |
|
118 | 118 | 'SERVER_PORT': '8000'} |
|
119 | 119 | assert request['headers'] == { |
|
120 | 120 | 'Accept': 'text/html,application/xhtml+xml,' |
|
121 | 121 | 'application/xml;q=0.9,image/webp,*/*;q=0.8', |
|
122 | 122 | 'Accept-Encoding': 'gzip, deflate, sdch', |
|
123 | 123 | 'Accept-Language': 'en-US,en;q=0.8,pl;q=0.6', |
|
124 | 124 | 'Connection': 'keep-alive', |
|
125 | 125 | 'Content-Length': '', |
|
126 | 126 | 'Content-Type': 'text/plain', |
|
127 | 127 | 'Cookie': 'appenlight=X', |
|
128 | 128 | 'Dnt': '1', |
|
129 | 129 | 'Host': '127.0.0.1:8000', |
|
130 | 130 | 'Upgrade-Insecure-Requests': '1', |
|
131 | 131 | 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) ' |
|
132 | 132 | 'AppleWebKit/537.36 (KHTML, like Gecko) ' |
|
133 | 133 | 'Chrome/47.0.2526.106 Safari/537.36'} |
|
134 | 134 | traceback = event_dict['traceback'] |
|
135 | 135 | assert traceback[0]['cline'] == 'response = wrapped_callback(request, ' \ |
|
136 | 136 | '*callback_args, **callback_kwargs)' |
|
137 | 137 | assert traceback[0]['file'] == 'django/core/handlers/base.py' |
|
138 | 138 | assert traceback[0]['fn'] == 'get_response' |
|
139 | 139 | assert traceback[0]['line'] == 111 |
|
140 | 140 | assert traceback[0]['module'] == 'django.core.handlers.base' |
|
141 | 141 | |
|
142 | 142 | assert traceback[1]['cline'] == "raise Exception(u'test 500 " \ |
|
143 | 143 | "\u0142\xf3\u201c\u0107\u201c\u0107" \ |
|
144 | 144 | "\u017c\u0105')" |
|
145 | 145 | assert traceback[1]['file'] == 'djangoapp/views.py' |
|
146 | 146 | assert traceback[1]['fn'] == 'error' |
|
147 | 147 | assert traceback[1]['line'] == 84 |
|
148 | 148 | assert traceback[1]['module'] == 'djangoapp.views' |
|
149 | 149 | assert sorted(traceback[1]['vars']) == sorted([ |
|
150 | 150 | ('c', |
|
151 | 151 | '<sqlite3.Cursor object at 0x7fe7c82af8f0>'), |
|
152 | 152 | ('request', |
|
153 | 153 | '<WSGIRequest at 0x140633490316304>'), |
|
154 | 154 | ('conn', |
|
155 | 155 | '<sqlite3.Connection object at 0x7fe7c8b23bf8>')]) |
|
156 | 156 | |
|
157 | 157 | |
|
158 | 158 | class TestAPIReports_0_5_Validation(object): |
|
159 | 159 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) |
|
160 | 160 | def test_no_payload(self, dummy_json): |
|
161 | 161 | import colander |
|
162 | 162 | from appenlight.validators import ReportListSchema_0_5 |
|
163 | 163 | utcnow = datetime.utcnow() |
|
164 | 164 | schema = ReportListSchema_0_5().bind(utcnow=utcnow) |
|
165 | 165 | with pytest.raises(colander.Invalid): |
|
166 | 166 | schema.deserialize(dummy_json) |
|
167 | 167 | |
|
168 | 168 | def test_minimal_payload(self): |
|
169 | 169 | dummy_json = [{}] |
|
170 | 170 | import colander |
|
171 | 171 | from appenlight.validators import ReportListSchema_0_5 |
|
172 | 172 | utcnow = datetime.utcnow() |
|
173 | 173 | schema = ReportListSchema_0_5().bind(utcnow=utcnow) |
|
174 | 174 | with pytest.raises(colander.Invalid): |
|
175 | 175 | schema.deserialize(dummy_json) |
|
176 | 176 | |
|
177 | 177 | def test_minimal_payload(self): |
|
178 | 178 | dummy_json = [{'report_details': [{}]}] |
|
179 | 179 | from appenlight.validators import ReportListSchema_0_5 |
|
180 | 180 | utcnow = datetime.utcnow() |
|
181 | 181 | schema = ReportListSchema_0_5().bind(utcnow=utcnow) |
|
182 | 182 | |
|
183 | 183 | deserialized = schema.deserialize(dummy_json) |
|
184 | 184 | |
|
185 | 185 | expected_deserialization = [ |
|
186 | 186 | {'language': 'unknown', |
|
187 | 187 | 'server': 'unknown', |
|
188 | 188 | 'occurences': 1, |
|
189 | 189 | 'priority': 5, |
|
190 | 190 | 'view_name': '', |
|
191 | 191 | 'client': 'unknown', |
|
192 | 192 | 'http_status': 200, |
|
193 | 193 | 'error': '', |
|
194 | 194 | 'tags': None, |
|
195 | 195 | 'username': '', |
|
196 | 196 | 'traceback': None, |
|
197 | 197 | 'extra': None, |
|
198 | 198 | 'url': '', |
|
199 | 199 | 'ip': None, |
|
200 | 200 | 'start_time': utcnow, |
|
201 | 201 | 'group_string': None, |
|
202 | 202 | 'request': {}, |
|
203 | 203 | 'request_stats': None, |
|
204 | 204 | 'end_time': None, |
|
205 | 205 | 'request_id': '', |
|
206 | 206 | 'message': '', |
|
207 | 207 | 'slow_calls': [], |
|
208 | 208 | 'user_agent': '' |
|
209 | 209 | } |
|
210 | 210 | ] |
|
211 | 211 | assert deserialized == expected_deserialization |
|
212 | 212 | |
|
213 | 213 | def test_full_payload(self): |
|
214 | 214 | import appenlight.tests.payload_examples as payload_examples |
|
215 | 215 | from appenlight.validators import ReportListSchema_0_5 |
|
216 | 216 | PYTHON_PAYLOAD = copy.deepcopy(payload_examples.PYTHON_PAYLOAD_0_5) |
|
217 | 217 | utcnow = datetime.utcnow() |
|
218 | 218 | schema = ReportListSchema_0_5().bind(utcnow=utcnow) |
|
219 | 219 | PYTHON_PAYLOAD["tags"] = [("foo", 1), ("action", "test"), ("baz", 1.1), |
|
220 | 220 | ("date", |
|
221 | 221 | utcnow.strftime('%Y-%m-%dT%H:%M:%S.0'))] |
|
222 | 222 | dummy_json = [PYTHON_PAYLOAD] |
|
223 | 223 | deserialized = schema.deserialize(dummy_json)[0] |
|
224 | 224 | assert deserialized['error'] == PYTHON_PAYLOAD['error'] |
|
225 | 225 | assert deserialized['language'] == PYTHON_PAYLOAD['language'] |
|
226 | 226 | assert deserialized['server'] == PYTHON_PAYLOAD['server'] |
|
227 | 227 | assert deserialized['priority'] == PYTHON_PAYLOAD['priority'] |
|
228 | 228 | assert deserialized['view_name'] == PYTHON_PAYLOAD['view_name'] |
|
229 | 229 | assert deserialized['client'] == PYTHON_PAYLOAD['client'] |
|
230 | 230 | assert deserialized['http_status'] == PYTHON_PAYLOAD['http_status'] |
|
231 | 231 | assert deserialized['error'] == PYTHON_PAYLOAD['error'] |
|
232 | 232 | assert deserialized['occurences'] == PYTHON_PAYLOAD['occurences'] |
|
233 | 233 | assert deserialized['username'] == PYTHON_PAYLOAD['username'] |
|
234 | 234 | assert deserialized['traceback'] == PYTHON_PAYLOAD['traceback'] |
|
235 | 235 | assert deserialized['url'] == PYTHON_PAYLOAD['url'] |
|
236 | 236 | assert deserialized['ip'] == PYTHON_PAYLOAD['ip'] |
|
237 | 237 | assert deserialized['start_time'].strftime('%Y-%m-%dT%H:%M:%S.0') == \ |
|
238 | 238 | PYTHON_PAYLOAD['start_time'] |
|
239 | 239 | assert deserialized['ip'] == PYTHON_PAYLOAD['ip'] |
|
240 | 240 | assert deserialized['group_string'] is None |
|
241 | 241 | assert deserialized['request_stats'] == PYTHON_PAYLOAD['request_stats'] |
|
242 | 242 | assert deserialized['end_time'].strftime('%Y-%m-%dT%H:%M:%S.0') == \ |
|
243 | 243 | PYTHON_PAYLOAD['end_time'] |
|
244 | 244 | assert deserialized['request_id'] == PYTHON_PAYLOAD['request_id'] |
|
245 | 245 | assert deserialized['message'] == PYTHON_PAYLOAD['message'] |
|
246 | 246 | assert deserialized['user_agent'] == PYTHON_PAYLOAD['user_agent'] |
|
247 | 247 | assert deserialized['slow_calls'][0]['start'].strftime( |
|
248 | 248 | '%Y-%m-%dT%H:%M:%S.0') == PYTHON_PAYLOAD['slow_calls'][0][ |
|
249 | 249 | 'start'] |
|
250 | 250 | assert deserialized['slow_calls'][0]['end'].strftime( |
|
251 | 251 | '%Y-%m-%dT%H:%M:%S.0') == PYTHON_PAYLOAD['slow_calls'][0][ |
|
252 | 252 | 'end'] |
|
253 | 253 | assert deserialized['slow_calls'][0]['statement'] == \ |
|
254 | 254 | PYTHON_PAYLOAD['slow_calls'][0]['statement'] |
|
255 | 255 | assert deserialized['slow_calls'][0]['parameters'] == \ |
|
256 | 256 | PYTHON_PAYLOAD['slow_calls'][0]['parameters'] |
|
257 | 257 | assert deserialized['slow_calls'][0]['type'] == \ |
|
258 | 258 | PYTHON_PAYLOAD['slow_calls'][0]['type'] |
|
259 | 259 | assert deserialized['slow_calls'][0]['subtype'] == \ |
|
260 | 260 | PYTHON_PAYLOAD['slow_calls'][0]['subtype'] |
|
261 | 261 | assert deserialized['slow_calls'][0]['location'] == '' |
|
262 | 262 | assert deserialized['tags'] == [ |
|
263 | 263 | ('foo', 1), ('action', 'test'), |
|
264 | 264 | ('baz', 1.1), ('date', utcnow.strftime('%Y-%m-%dT%H:%M:%S.0'))] |
|
265 | 265 | |
|
266 | 266 | |
|
267 | 267 | @pytest.mark.usefixtures('log_schema') |
|
268 | 268 | class TestAPILogsValidation(object): |
|
269 | 269 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) |
|
270 | 270 | def test_no_payload(self, dummy_json, log_schema): |
|
271 | 271 | import colander |
|
272 | 272 | |
|
273 | 273 | with pytest.raises(colander.Invalid): |
|
274 | 274 | log_schema.deserialize(dummy_json) |
|
275 | 275 | |
|
276 | 276 | def test_minimal_payload(self, log_schema): |
|
277 | 277 | dummy_json = [{}] |
|
278 | 278 | deserialized = log_schema.deserialize(dummy_json)[0] |
|
279 | 279 | expected = {'log_level': 'UNKNOWN', |
|
280 | 280 | 'namespace': '', |
|
281 | 281 | 'server': 'unknown', |
|
282 | 282 | 'request_id': '', |
|
283 | 283 | 'primary_key': None, |
|
284 | 284 | 'date': datetime.utcnow(), |
|
285 | 285 | 'message': '', |
|
286 | 286 | 'tags': None} |
|
287 | 287 | assert deserialized['log_level'] == expected['log_level'] |
|
288 | 288 | assert deserialized['message'] == expected['message'] |
|
289 | 289 | assert deserialized['namespace'] == expected['namespace'] |
|
290 | 290 | assert deserialized['request_id'] == expected['request_id'] |
|
291 | 291 | assert deserialized['server'] == expected['server'] |
|
292 | 292 | assert deserialized['tags'] == expected['tags'] |
|
293 | 293 | assert deserialized['primary_key'] == expected['primary_key'] |
|
294 | 294 | |
|
295 | 295 | def test_normal_payload(self, log_schema): |
|
296 | 296 | import appenlight.tests.payload_examples as payload_examples |
|
297 | 297 | deserialized = log_schema.deserialize(payload_examples.LOG_EXAMPLES)[0] |
|
298 | 298 | expected = payload_examples.LOG_EXAMPLES[0] |
|
299 | 299 | assert deserialized['log_level'] == expected['log_level'] |
|
300 | 300 | assert deserialized['message'] == expected['message'] |
|
301 | 301 | assert deserialized['namespace'] == expected['namespace'] |
|
302 | 302 | assert deserialized['request_id'] == expected['request_id'] |
|
303 | 303 | assert deserialized['server'] == expected['server'] |
|
304 | 304 | assert deserialized['date'].strftime('%Y-%m-%dT%H:%M:%S.%f') == \ |
|
305 | 305 | expected['date'] |
|
306 | 306 | assert deserialized['tags'][0][0] == "tag_name" |
|
307 | 307 | assert deserialized['tags'][0][1] == "tag_value" |
|
308 | 308 | assert deserialized['tags'][1][0] == "tag_name2" |
|
309 | 309 | assert deserialized['tags'][1][1] == 2 |
|
310 | 310 | |
|
311 | 311 | def test_normal_payload_date_without_microseconds(self, log_schema): |
|
312 | 312 | import appenlight.tests.payload_examples as payload_examples |
|
313 | 313 | LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES) |
|
314 | 314 | LOG_EXAMPLE[0]['date'] = datetime.utcnow().strftime( |
|
315 | 315 | '%Y-%m-%dT%H:%M:%S') |
|
316 | 316 | deserialized = log_schema.deserialize(LOG_EXAMPLE) |
|
317 | 317 | assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M:%S') == \ |
|
318 | 318 | LOG_EXAMPLE[0]['date'] |
|
319 | 319 | |
|
320 | 320 | def test_normal_payload_date_without_seconds(self, log_schema): |
|
321 | 321 | import appenlight.tests.payload_examples as payload_examples |
|
322 | 322 | LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES) |
|
323 | 323 | LOG_EXAMPLE[0]['date'] = datetime.utcnow().date().strftime( |
|
324 | 324 | '%Y-%m-%dT%H:%M') |
|
325 | 325 | deserialized = log_schema.deserialize(LOG_EXAMPLE) |
|
326 | 326 | assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M') == \ |
|
327 | 327 | LOG_EXAMPLE[0]['date'] |
|
328 | 328 | |
|
329 | 329 | def test_payload_empty_date(self, log_schema): |
|
330 | 330 | import appenlight.tests.payload_examples as payload_examples |
|
331 | 331 | LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES) |
|
332 | 332 | LOG_EXAMPLE[0]['date'] = None |
|
333 | 333 | deserialized = log_schema.deserialize(LOG_EXAMPLE) |
|
334 | 334 | assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M') is not None |
|
335 | 335 | |
|
336 | 336 | def test_payload_no_date(self, log_schema): |
|
337 | 337 | import appenlight.tests.payload_examples as payload_examples |
|
338 | 338 | LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES) |
|
339 | 339 | LOG_EXAMPLE[0].pop('date', None) |
|
340 | 340 | deserialized = log_schema.deserialize(LOG_EXAMPLE) |
|
341 | 341 | assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M') is not None |
|
342 | 342 | |
|
343 | 343 | |
|
344 | 344 | @pytest.mark.usefixtures('general_metrics_schema') |
|
345 | 345 | class TestAPIGeneralMetricsValidation(object): |
|
346 | 346 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) |
|
347 | 347 | def test_no_payload(self, dummy_json, general_metrics_schema): |
|
348 | 348 | import colander |
|
349 | 349 | |
|
350 | 350 | with pytest.raises(colander.Invalid): |
|
351 | 351 | general_metrics_schema.deserialize(dummy_json) |
|
352 | 352 | |
|
353 | 353 | def test_minimal_payload(self, general_metrics_schema): |
|
354 | 354 | dummy_json = [{'tags': [['counter_a', 15.5], ['counter_b', 63]]}] |
|
355 | 355 | deserialized = general_metrics_schema.deserialize(dummy_json)[0] |
|
356 | 356 | expected = {'namespace': '', |
|
357 | 357 | 'server_name': 'unknown', |
|
358 | 358 | 'tags': [('counter_a', 15.5), ('counter_b', 63)], |
|
359 | 359 | 'timestamp': datetime.utcnow()} |
|
360 | 360 | assert deserialized['namespace'] == expected['namespace'] |
|
361 | 361 | assert deserialized['server_name'] == expected['server_name'] |
|
362 | 362 | assert deserialized['tags'] == expected['tags'] |
|
363 | 363 | |
|
364 | 364 | def test_normal_payload(self, general_metrics_schema): |
|
365 | 365 | import appenlight.tests.payload_examples as payload_examples |
|
366 | 366 | dummy_json = [payload_examples.METRICS_PAYLOAD] |
|
367 | 367 | deserialized = general_metrics_schema.deserialize(dummy_json)[0] |
|
368 | 368 | expected = {'namespace': 'some.monitor', |
|
369 | 369 | 'server_name': 'server.name', |
|
370 | 370 | 'tags': [('usage_foo', 15.5), ('usage_bar', 63)], |
|
371 | 371 | 'timestamp': datetime.utcnow()} |
|
372 | 372 | assert deserialized['namespace'] == expected['namespace'] |
|
373 | 373 | assert deserialized['server_name'] == expected['server_name'] |
|
374 | 374 | assert deserialized['tags'] == expected['tags'] |
|
375 | 375 | |
|
376 | 376 | |
|
377 | 377 | @pytest.mark.usefixtures('request_metrics_schema') |
|
378 | 378 | class TestAPIRequestMetricsValidation(object): |
|
379 | 379 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) |
|
380 | 380 | def test_no_payload(self, dummy_json, request_metrics_schema): |
|
381 | 381 | import colander |
|
382 | 382 | |
|
383 | 383 | with pytest.raises(colander.Invalid): |
|
384 | 384 | print(request_metrics_schema.deserialize(dummy_json)) |
|
385 | 385 | |
|
386 | 386 | def test_normal_payload(self, request_metrics_schema): |
|
387 | 387 | import appenlight.tests.payload_examples as payload_examples |
|
388 | 388 | dummy_json = payload_examples.REQUEST_METRICS_EXAMPLES |
|
389 | 389 | deserialized = request_metrics_schema.deserialize(dummy_json)[0] |
|
390 | 390 | expected = {'metrics': [('dir/module:func', |
|
391 | 391 | {'custom': 0.0, |
|
392 | 392 | 'custom_calls': 0.0, |
|
393 | 393 | 'main': 0.01664, |
|
394 | 394 | 'nosql': 0.00061, |
|
395 | 395 | 'nosql_calls': 23.0, |
|
396 | 396 | 'remote': 0.0, |
|
397 | 397 | 'remote_calls': 0.0, |
|
398 | 398 | 'requests': 1, |
|
399 | 399 | 'sql': 0.00105, |
|
400 | 400 | 'sql_calls': 2.0, |
|
401 | 401 | 'tmpl': 0.0, |
|
402 | 402 | 'tmpl_calls': 0.0}), |
|
403 | 403 | ('SomeView.function', |
|
404 | 404 | {'custom': 0.0, |
|
405 | 405 | 'custom_calls': 0.0, |
|
406 | 406 | 'main': 0.647261, |
|
407 | 407 | 'nosql': 0.306554, |
|
408 | 408 | 'nosql_calls': 140.0, |
|
409 | 409 | 'remote': 0.0, |
|
410 | 410 | 'remote_calls': 0.0, |
|
411 | 411 | 'requests': 28, |
|
412 | 412 | 'sql': 0.0, |
|
413 | 413 | 'sql_calls': 0.0, |
|
414 | 414 | 'tmpl': 0.0, |
|
415 | 415 | 'tmpl_calls': 0.0})], |
|
416 | 416 | 'server': 'some.server.hostname', |
|
417 | 417 | 'timestamp': datetime.utcnow()} |
|
418 | 418 | assert deserialized['server'] == expected['server'] |
|
419 | 419 | metric = deserialized['metrics'][0] |
|
420 | 420 | expected_metric = expected['metrics'][0] |
|
421 | 421 | assert metric[0] == expected_metric[0] |
|
422 | 422 | assert sorted(metric[1].items()) == sorted(expected_metric[1].items()) |
|
423 | 423 | |
|
424 | 424 | |
|
425 | 425 | @pytest.mark.usefixtures('default_application') |
|
426 | 426 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') |
|
427 | 427 | class TestAPIReportsView(object): |
|
428 | 428 | def test_no_json_payload(self, default_application): |
|
429 | 429 | import colander |
|
430 | 430 | from appenlight.models.services.application import ApplicationService |
|
431 | 431 | from appenlight.views.api import reports_create |
|
432 | 432 | |
|
433 | 433 | context = DummyContext() |
|
434 | 434 | context.resource = ApplicationService.by_id(1) |
|
435 | 435 | request = testing.DummyRequest( |
|
436 | 436 | headers={'Content-Type': 'application/json'}) |
|
437 | 437 | request.unsafe_json_body = '' |
|
438 | 438 | request.context = context |
|
439 | 439 | route = mock.Mock() |
|
440 | 440 | route.name = 'api_reports' |
|
441 | 441 | request.matched_route = route |
|
442 | 442 | with pytest.raises(colander.Invalid): |
|
443 | 443 | response = reports_create(request) |
|
444 | 444 | |
|
445 | 445 | def test_single_proper_json_0_5_payload(self): |
|
446 | 446 | import appenlight.tests.payload_examples as payload_examples |
|
447 | 447 | from appenlight.views.api import reports_create |
|
448 | 448 | from appenlight.models.services.application import ApplicationService |
|
449 | 449 | from appenlight.models.report_group import ReportGroup |
|
450 | 450 | route = mock.Mock() |
|
451 | 451 | route.name = 'api_reports' |
|
452 | 452 | request = pyramid.threadlocal.get_current_request() |
|
453 | 453 | context = DummyContext() |
|
454 | 454 | context.resource = ApplicationService.by_id(1) |
|
455 | 455 | request.context = context |
|
456 | 456 | request.matched_route = route |
|
457 | 457 | PYTHON_PAYLOAD = payload_examples.PYTHON_PAYLOAD_0_5 |
|
458 | 458 | request.unsafe_json_body = [copy.deepcopy(PYTHON_PAYLOAD)] |
|
459 | 459 | reports_create(request) |
|
460 | 460 | query = DBSession.query(ReportGroup) |
|
461 | 461 | report = query.first() |
|
462 | 462 | assert query.count() == 1 |
|
463 | 463 | assert report.total_reports == 1 |
|
464 | 464 | |
|
465 | 465 | def test_grouping_0_5(self): |
|
466 | 466 | import appenlight.tests.payload_examples as payload_examples |
|
467 | 467 | from appenlight.views.api import reports_create |
|
468 | 468 | from appenlight.models.services.application import ApplicationService |
|
469 | 469 | from appenlight.models.report_group import ReportGroup |
|
470 | 470 | route = mock.Mock() |
|
471 | 471 | route.name = 'api_reports' |
|
472 | 472 | request = pyramid.threadlocal.get_current_request() |
|
473 | 473 | context = DummyContext() |
|
474 | 474 | context.resource = ApplicationService.by_id(1) |
|
475 | 475 | request.context = context |
|
476 | 476 | request.matched_route = route |
|
477 | 477 | PYTHON_PAYLOAD = payload_examples.PYTHON_PAYLOAD_0_5 |
|
478 | 478 | request.unsafe_json_body = [copy.deepcopy(PYTHON_PAYLOAD), |
|
479 | 479 | copy.deepcopy(PYTHON_PAYLOAD)] |
|
480 | 480 | reports_create(request) |
|
481 | 481 | query = DBSession.query(ReportGroup) |
|
482 | 482 | report = query.first() |
|
483 | 483 | assert query.count() == 1 |
|
484 | 484 | assert report.total_reports == 2 |
|
485 | 485 | |
|
486 | 486 | def test_grouping_different_reports_0_5(self): |
|
487 | 487 | import appenlight.tests.payload_examples as payload_examples |
|
488 | 488 | from appenlight.views.api import reports_create |
|
489 | 489 | from appenlight.models.services.application import ApplicationService |
|
490 | 490 | from appenlight.models.report_group import ReportGroup |
|
491 | 491 | route = mock.Mock() |
|
492 | 492 | route.name = 'api_reports' |
|
493 | 493 | request = pyramid.threadlocal.get_current_request() |
|
494 | 494 | context = DummyContext() |
|
495 | 495 | context.resource = ApplicationService.by_id(1) |
|
496 | 496 | request.context = context |
|
497 | 497 | request.matched_route = route |
|
498 | 498 | PYTHON_PAYLOAD = payload_examples.PYTHON_PAYLOAD_0_5 |
|
499 | 499 | PARSED_REPORT_404 = payload_examples.PARSED_REPORT_404 |
|
500 | 500 | request.unsafe_json_body = [copy.deepcopy(PYTHON_PAYLOAD), |
|
501 | 501 | copy.deepcopy(PARSED_REPORT_404)] |
|
502 | 502 | reports_create(request) |
|
503 | 503 | query = DBSession.query(ReportGroup) |
|
504 | 504 | report = query.first() |
|
505 | 505 | assert query.count() == 2 |
|
506 | 506 | assert report.total_reports == 1 |
|
507 | 507 | |
|
508 | 508 | |
|
509 | 509 | @pytest.mark.usefixtures('default_application') |
|
510 | 510 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') |
|
511 | 511 | class TestAirbrakeXMLView(object): |
|
512 | 512 | |
|
513 | 513 | def test_normal_payload_parsing(self): |
|
514 | 514 | import datetime |
|
515 | 515 | import defusedxml.ElementTree as ElementTree |
|
516 | 516 | import appenlight.tests.payload_examples as payload_examples |
|
517 | 517 | from appenlight.lib.utils.airbrake import parse_airbrake_xml |
|
518 | 518 | from appenlight.validators import ReportListSchema_0_5 |
|
519 | 519 | |
|
520 | 520 | context = DummyContext() |
|
521 | 521 | request = testing.DummyRequest( |
|
522 | 522 | headers={'Content-Type': 'application/xml'}) |
|
523 | 523 | request.context = context |
|
524 | 524 | request.context.possibly_public = False |
|
525 | 525 | root = ElementTree.fromstring(payload_examples.AIRBRAKE_RUBY_EXAMPLE) |
|
526 | 526 | request.context.airbrake_xml_etree = root |
|
527 | 527 | error_dict = parse_airbrake_xml(request) |
|
528 | 528 | schema = ReportListSchema_0_5().bind(utcnow=datetime.datetime.utcnow()) |
|
529 | 529 | deserialized_report = schema.deserialize([error_dict])[0] |
|
530 | 530 | assert deserialized_report['client'] == 'Airbrake Notifier' |
|
531 | 531 | assert deserialized_report['error'] == 'NameError: undefined local variable or method `sdfdfdf\' for #<#<Class:0x000000039a8b90>:0x00000002c53df0>' |
|
532 | 532 | assert deserialized_report['http_status'] == 500 |
|
533 | 533 | assert deserialized_report['language'] == 'unknown' |
|
534 | 534 | assert deserialized_report['message'] == '' |
|
535 | 535 | assert deserialized_report['occurences'] == 1 |
|
536 | 536 | assert deserialized_report['priority'] == 5 |
|
537 | 537 | d_request = deserialized_report['request'] |
|
538 | 538 | assert d_request['GET'] == {'test': '1234'} |
|
539 | 539 | assert d_request['action_dispatch.request.parameters'] == { |
|
540 | 540 | 'action': 'index', |
|
541 | 541 | 'controller': 'welcome', |
|
542 | 542 | 'test': '1234'} |
|
543 | 543 | assert deserialized_report['request_id'] == 'c11b2267f3ad8b00a1768cae35559fa1' |
|
544 | 544 | assert deserialized_report['server'] == 'ergo-desktop' |
|
545 | 545 | assert deserialized_report['traceback'][0] == { |
|
546 | 546 | 'cline': 'block in start_thread', |
|
547 | 547 | 'file': '/home/ergo/.rbenv/versions/1.9.3-p327/lib/ruby/1.9.1/webrick/server.rb', |
|
548 | 548 | 'fn': 'block in start_thread', |
|
549 | 549 | 'line': '191', |
|
550 | 550 | 'module': '', |
|
551 | 551 | 'vars': {}} |
|
552 | 552 | assert deserialized_report['traceback'][-1] == { |
|
553 | 553 | 'cline': '_app_views_welcome_index_html_erb___2570061166873166679_31748940', |
|
554 | 554 | 'file': '[PROJECT_ROOT]/app/views/welcome/index.html.erb', |
|
555 | 555 | 'fn': '_app_views_welcome_index_html_erb___2570061166873166679_31748940', |
|
556 | 556 | 'line': '3', |
|
557 | 557 | 'module': '', |
|
558 | 558 | 'vars': {}} |
|
559 | 559 | assert deserialized_report['url'] == 'http://0.0.0.0:3000/welcome/index?test=1234' |
|
560 | 560 | assert deserialized_report['view_name'] == 'welcome:index' |
|
561 | 561 | |
|
562 | 562 | def test_normal_payload_view(self): |
|
563 | 563 | import defusedxml.ElementTree as ElementTree |
|
564 | 564 | import appenlight.tests.payload_examples as payload_examples |
|
565 | 565 | |
|
566 | 566 | from appenlight.models.services.application import ApplicationService |
|
567 | 567 | from appenlight.views.api import airbrake_xml_compat |
|
568 | 568 | |
|
569 | 569 | context = DummyContext() |
|
570 | 570 | context.resource = ApplicationService.by_id(1) |
|
571 | 571 | request = testing.DummyRequest( |
|
572 | 572 | headers={'Content-Type': 'application/xml'}) |
|
573 | 573 | request.context = context |
|
574 | 574 | request.context.possibly_public = False |
|
575 | 575 | root = ElementTree.fromstring(payload_examples.AIRBRAKE_RUBY_EXAMPLE) |
|
576 | 576 | request.context.airbrake_xml_etree = root |
|
577 | 577 | route = mock.Mock() |
|
578 | 578 | route.name = 'api_airbrake' |
|
579 | 579 | request.matched_route = route |
|
580 | 580 | result = airbrake_xml_compat(request) |
|
581 | 581 | assert '<notice><id>' in result |
|
582 | 582 | |
|
583 | 583 | |
|
584 | 584 | @pytest.mark.usefixtures('default_application') |
|
585 | 585 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') |
|
586 | 586 | class TestAPILogView(object): |
|
587 | 587 | def test_no_json_payload(self, base_app): |
|
588 | 588 | import colander |
|
589 | 589 | from appenlight.models.services.application import ApplicationService |
|
590 | 590 | from appenlight.views.api import logs_create |
|
591 | 591 | |
|
592 | 592 | context = DummyContext() |
|
593 | 593 | context.resource = ApplicationService.by_id(1) |
|
594 | 594 | request = testing.DummyRequest( |
|
595 | 595 | headers={'Content-Type': 'application/json'}) |
|
596 | 596 | request.context = context |
|
597 | 597 | request.registry = base_app.registry |
|
598 | 598 | request.unsafe_json_body = '' |
|
599 | 599 | route = mock.Mock() |
|
600 | 600 | route.name = 'api_logs' |
|
601 | 601 | request.matched_route = route |
|
602 | 602 | with pytest.raises(colander.Invalid): |
|
603 | 603 | response = logs_create(request) |
|
604 | 604 | |
|
605 | 605 | def test_single_json_payload(self): |
|
606 | 606 | import appenlight.tests.payload_examples as payload_examples |
|
607 | 607 | from appenlight.models.log import Log |
|
608 | 608 | from appenlight.views.api import logs_create |
|
609 | 609 | from appenlight.models.services.application import ApplicationService |
|
610 | 610 | route = mock.Mock() |
|
611 | 611 | route.name = 'api_logs' |
|
612 | 612 | request = pyramid.threadlocal.get_current_request() |
|
613 | 613 | context = DummyContext() |
|
614 | 614 | context.resource = ApplicationService.by_id(1) |
|
615 | 615 | request.context = context |
|
616 | 616 | request.matched_route = route |
|
617 | 617 | request.unsafe_json_body = [copy.deepcopy( |
|
618 | 618 | payload_examples.LOG_EXAMPLES[0])] |
|
619 | 619 | logs_create(request) |
|
620 | 620 | query = DBSession.query(Log) |
|
621 | 621 | log = query.first() |
|
622 | 622 | assert query.count() == 1 |
|
623 | 623 | assert log.message == "OMG ValueError happened" |
|
624 | 624 | |
|
625 | 625 | def test_multiple_json_payload(self): |
|
626 | 626 | import appenlight.tests.payload_examples as payload_examples |
|
627 | 627 | from appenlight.models.log import Log |
|
628 | 628 | from appenlight.views.api import logs_create |
|
629 | 629 | from appenlight.models.services.application import ApplicationService |
|
630 | 630 | route = mock.Mock() |
|
631 | 631 | route.name = 'api_logs' |
|
632 | 632 | request = pyramid.threadlocal.get_current_request() |
|
633 | 633 | context = DummyContext() |
|
634 | 634 | context.resource = ApplicationService.by_id(1) |
|
635 | 635 | request.context = context |
|
636 | 636 | request.matched_route = route |
|
637 | 637 | LOG_PAYLOAD = payload_examples.LOG_EXAMPLES[0] |
|
638 | 638 | LOG_PAYLOAD2 = payload_examples.LOG_EXAMPLES[1] |
|
639 | 639 | request.unsafe_json_body = copy.deepcopy([LOG_PAYLOAD, LOG_PAYLOAD2]) |
|
640 | 640 | logs_create(request) |
|
641 | 641 | query = DBSession.query(Log).order_by(sa.asc(Log.log_id)) |
|
642 | 642 | assert query.count() == 2 |
|
643 | 643 | assert query[0].message == "OMG ValueError happened" |
|
644 | 644 | assert query[1].message == "OMG ValueError happened2" |
|
645 | 645 | |
|
646 | 646 | def test_public_key_rewriting(self): |
|
647 | 647 | import appenlight.tests.payload_examples as payload_examples |
|
648 | 648 | from appenlight.models.log import Log |
|
649 | 649 | from appenlight.views.api import logs_create |
|
650 | 650 | from appenlight.models.services.application import ApplicationService |
|
651 | 651 | route = mock.Mock() |
|
652 | 652 | route.name = 'api_logs' |
|
653 | 653 | request = pyramid.threadlocal.get_current_request() |
|
654 | 654 | context = DummyContext() |
|
655 | 655 | context.resource = ApplicationService.by_id(1) |
|
656 | 656 | request.context = context |
|
657 | 657 | request.matched_route = route |
|
658 | 658 | |
|
659 | 659 | LOG_PAYLOAD = copy.deepcopy(payload_examples.LOG_EXAMPLES[0]) |
|
660 | 660 | LOG_PAYLOAD2 = copy.deepcopy(payload_examples.LOG_EXAMPLES[1]) |
|
661 | 661 | LOG_PAYLOAD['primary_key'] = 'X2' |
|
662 | 662 | LOG_PAYLOAD2['primary_key'] = 'X2' |
|
663 | 663 | request.unsafe_json_body = [LOG_PAYLOAD, LOG_PAYLOAD2] |
|
664 | 664 | logs_create(request) |
|
665 | 665 | |
|
666 | 666 | query = DBSession.query(Log).order_by(sa.asc(Log.log_id)) |
|
667 | 667 | assert query.count() == 1 |
|
668 | 668 | assert query[0].message == "OMG ValueError happened2" |
|
669 | 669 | |
|
670 | 670 | @pytest.mark.usefixtures('default_application') |
|
671 | 671 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') |
|
672 | 672 | class TestAPIGeneralMetricsView(object): |
|
673 | 673 | def test_no_json_payload(self, base_app): |
|
674 | 674 | import colander |
|
675 | 675 | from appenlight.models.services.application import ApplicationService |
|
676 | 676 | from appenlight.views.api import general_metrics_create |
|
677 | 677 | route = mock.Mock() |
|
678 | 678 | route.name = 'api_general_metrics' |
|
679 | 679 | context = DummyContext() |
|
680 | 680 | context.resource = ApplicationService.by_id(1) |
|
681 | 681 | request = testing.DummyRequest( |
|
682 | 682 | headers={'Content-Type': 'application/json'}) |
|
683 | 683 | request.context = context |
|
684 | 684 | request.registry = base_app.registry |
|
685 | 685 | request.unsafe_json_body = '' |
|
686 | 686 | request.matched_route = route |
|
687 | 687 | with pytest.raises(colander.Invalid): |
|
688 | 688 | general_metrics_create(request) |
|
689 | 689 | |
|
690 | 690 | def test_single_json_payload(self): |
|
691 | 691 | import appenlight.tests.payload_examples as payload_examples |
|
692 | 692 | from appenlight.models.metric import Metric |
|
693 | 693 | from appenlight.views.api import general_metrics_create |
|
694 | 694 | from appenlight.models.services.application import ApplicationService |
|
695 | 695 | route = mock.Mock() |
|
696 | 696 | route.name = 'api_general_metric' |
|
697 | 697 | request = pyramid.threadlocal.get_current_request() |
|
698 | 698 | request.matched_route = route |
|
699 | 699 | context = DummyContext() |
|
700 | 700 | context.resource = ApplicationService.by_id(1) |
|
701 | 701 | request.context = context |
|
702 | 702 | request.unsafe_json_body = payload_examples.METRICS_PAYLOAD |
|
703 | 703 | general_metrics_create(request) |
|
704 | 704 | query = DBSession.query(Metric) |
|
705 | 705 | metric = query.first() |
|
706 | 706 | assert query.count() == 1 |
|
707 | 707 | assert metric.namespace == 'some.monitor' |
|
708 | 708 | |
|
709 | 709 | def test_multiple_json_payload(self): |
|
710 | 710 | import appenlight.tests.payload_examples as payload_examples |
|
711 | 711 | from appenlight.models.metric import Metric |
|
712 | 712 | from appenlight.views.api import general_metrics_create |
|
713 | 713 | from appenlight.models.services.application import ApplicationService |
|
714 | 714 | route = mock.Mock() |
|
715 | 715 | route.name = 'api_general_metrics' |
|
716 | 716 | request = pyramid.threadlocal.get_current_request() |
|
717 | 717 | request.matched_route = route |
|
718 | 718 | context = DummyContext() |
|
719 | 719 | context.resource = ApplicationService.by_id(1) |
|
720 | 720 | request.context = context |
|
721 | 721 | request.unsafe_json_body = [ |
|
722 | 722 | copy.deepcopy(payload_examples.METRICS_PAYLOAD), |
|
723 | 723 | copy.deepcopy(payload_examples.METRICS_PAYLOAD), |
|
724 | 724 | ] |
|
725 | 725 | general_metrics_create(request) |
|
726 | 726 | query = DBSession.query(Metric) |
|
727 | 727 | metric = query.first() |
|
728 | 728 | assert query.count() == 2 |
|
729 | 729 | assert metric.namespace == 'some.monitor' |
|
730 | 730 | |
|
731 | 731 | |
|
732 | 732 | class TestGroupingMessageReplacements(object): |
|
733 | 733 | def replace_default_repr_python(self): |
|
734 | 734 | test_str = ''' |
|
735 | 735 | ConnectionError: ConnectionError((<urllib3.connection.HTTPConnection object at 0x7f87a0ba9fd0>, 'Connection to domain.gr timed out. (connect timeout=10)')) caused by: ConnectTimeoutError((<urllib3.connection.HTTPConnection object at 0x7f87a0ba9fd0>, 'Connection to domain.gr timed out. (connect timeout=10)')) |
|
736 | 736 | ''' |
|
737 | 737 | regex = r'<(.*?) object at (.*?)>' |
|
738 | 738 | |
|
739 | 739 | |
|
740 | 740 | class TestRulesKeyGetter(object): |
|
741 | 741 | def test_default_dict_getter_top_key(self): |
|
742 | 742 | from appenlight.lib.rule import Rule |
|
743 | 743 | struct = { |
|
744 | 744 | "a": { |
|
745 | 745 | "b": 'b', |
|
746 | 746 | "c": { |
|
747 | 747 | "d": 'd', |
|
748 | 748 | "g": { |
|
749 | 749 | "h": 'h' |
|
750 | 750 | } |
|
751 | 751 | }, |
|
752 | 752 | "e": 'e' |
|
753 | 753 | }, |
|
754 | 754 | "f": 'f' |
|
755 | 755 | } |
|
756 | 756 | result = Rule.default_dict_struct_getter(struct, "a") |
|
757 | 757 | assert result == struct['a'] |
|
758 | 758 | |
|
759 | 759 | def test_default_dict_getter_sub_key(self): |
|
760 | 760 | from appenlight.lib.rule import Rule |
|
761 | 761 | struct = { |
|
762 | 762 | "a": { |
|
763 | 763 | "b": 'b', |
|
764 | 764 | "c": { |
|
765 | 765 | "d": 'd', |
|
766 | 766 | "g": { |
|
767 | 767 | "h": 'h' |
|
768 | 768 | } |
|
769 | 769 | }, |
|
770 | 770 | "e": 'e' |
|
771 | 771 | }, |
|
772 | 772 | "f": 'f' |
|
773 | 773 | } |
|
774 | 774 | result = Rule.default_dict_struct_getter(struct, 'a:b') |
|
775 | 775 | assert result == struct['a']['b'] |
|
776 | 776 | result = Rule.default_dict_struct_getter(struct, 'a:c:d') |
|
777 | 777 | assert result == struct['a']['c']['d'] |
|
778 | 778 | |
|
779 | 779 | def test_default_obj_getter_top_key(self): |
|
780 | 780 | from appenlight.lib.rule import Rule |
|
781 | 781 | class TestStruct(object): |
|
782 | 782 | def __init__(self, a, b): |
|
783 | 783 | self.a = a |
|
784 | 784 | self.b = b |
|
785 | 785 | |
|
786 | 786 | struct = TestStruct(a='a', |
|
787 | 787 | b=TestStruct(a='x', b='y')) |
|
788 | 788 | result = Rule.default_obj_struct_getter(struct, "a") |
|
789 | 789 | assert result == struct.a |
|
790 | 790 | |
|
791 | 791 | def test_default_obj_getter_sub_key(self): |
|
792 | 792 | from appenlight.lib.rule import Rule |
|
793 | 793 | class TestStruct(object): |
|
794 | 794 | def __init__(self, name, a, b): |
|
795 | 795 | self.name = name |
|
796 | 796 | self.a = a |
|
797 | 797 | self.b = b |
|
798 | 798 | |
|
799 | 799 | def __repr__(self): |
|
800 | 800 | return '<obj {}>'.format(self.name) |
|
801 | 801 | |
|
802 | 802 | c = TestStruct('c', a=5, b='z') |
|
803 | 803 | b = TestStruct('b', a=c, b='y') |
|
804 | 804 | struct = TestStruct('a', a='a', b=b) |
|
805 | 805 | result = Rule.default_obj_struct_getter(struct, 'b:b') |
|
806 | 806 | assert result == struct.b.b |
|
807 | 807 | result = Rule.default_obj_struct_getter(struct, 'b:a:b') |
|
808 | 808 | assert result == struct.b.a.b |
|
809 | 809 | |
|
810 | 810 | |
|
811 | 811 | @pytest.mark.usefixtures('report_type_matrix') |
|
812 | 812 | class TestRulesParsing(): |
|
813 | 813 | @pytest.mark.parametrize("op, struct_value, test_value, match_result", [ |
|
814 | 814 | ('eq', 500, 500, True), |
|
815 | 815 | ('eq', 600, 500, False), |
|
816 | 816 | ('eq', 300, 500, False), |
|
817 | 817 | ('eq', "300", 500, False), |
|
818 | 818 | ('eq', "600", 500, False), |
|
819 | 819 | ('eq', "500", 500, True), |
|
820 | 820 | ('ne', 500, 500, False), |
|
821 | 821 | ('ne', 600, 500, True), |
|
822 | 822 | ('ne', 300, 500, True), |
|
823 | 823 | ('ne', "300", 500, True), |
|
824 | 824 | ('ne', "600", 500, True), |
|
825 | 825 | ('ne', "500", 500, False), |
|
826 | 826 | ('ge', 500, 500, True), |
|
827 | 827 | ('ge', 600, 500, True), |
|
828 | 828 | ('ge', 499, 500, False), |
|
829 | 829 | ('gt', 499, 500, False), |
|
830 | 830 | ('gt', 500, 500, False), |
|
831 | 831 | ('gt', 501, 500, True), |
|
832 | 832 | ('le', 499, 500, True), |
|
833 | 833 | ('le', 500, 500, True), |
|
834 | 834 | ('le', 501, 500, False), |
|
835 | 835 | ('lt', 499, 500, True), |
|
836 | 836 | ('lt', 500, 500, False), |
|
837 | 837 | ('lt', 501, 500, False), |
|
838 | 838 | ]) |
|
839 | 839 | def test_single_op_int(self, op, struct_value, test_value, match_result, |
|
840 | 840 | report_type_matrix): |
|
841 | 841 | from appenlight.lib.rule import Rule |
|
842 | 842 | rule_config = { |
|
843 | 843 | "op": op, |
|
844 | 844 | "field": "http_status", |
|
845 | 845 | "value": test_value |
|
846 | 846 | } |
|
847 | 847 | rule = Rule(rule_config, report_type_matrix) |
|
848 | 848 | |
|
849 | 849 | data = { |
|
850 | 850 | "http_status": struct_value |
|
851 | 851 | } |
|
852 | 852 | assert rule.match(data) is match_result |
|
853 | 853 | |
|
854 | 854 | @pytest.mark.parametrize("op, struct_value, test_value, match_result", [ |
|
855 | 855 | ('ge', "500.01", 500, True), |
|
856 | 856 | ('ge', "500.01", 500.02, False), |
|
857 | 857 | ('le', "500.01", 500.02, True) |
|
858 | 858 | ]) |
|
859 | 859 | def test_single_op_float(self, op, struct_value, test_value, match_result, |
|
860 | 860 | report_type_matrix): |
|
861 | 861 | from appenlight.lib.rule import Rule |
|
862 | 862 | rule_config = { |
|
863 | 863 | "op": op, |
|
864 | 864 | "field": "duration", |
|
865 | 865 | "value": test_value |
|
866 | 866 | } |
|
867 | 867 | rule = Rule(rule_config, report_type_matrix) |
|
868 | 868 | |
|
869 | 869 | data = { |
|
870 | 870 | "duration": struct_value |
|
871 | 871 | } |
|
872 | 872 | assert rule.match(data) is match_result |
|
873 | 873 | |
|
874 | 874 | @pytest.mark.parametrize("op, struct_value, test_value, match_result", [ |
|
875 | 875 | ('contains', 'foo bar baz', 'foo', True), |
|
876 | 876 | ('contains', 'foo bar baz', 'bar', True), |
|
877 | 877 | ('contains', 'foo bar baz', 'dupa', False), |
|
878 | 878 | ('startswith', 'foo bar baz', 'foo', True), |
|
879 | 879 | ('startswith', 'foo bar baz', 'bar', False), |
|
880 | 880 | ('endswith', 'foo bar baz', 'baz', True), |
|
881 | 881 | ('endswith', 'foo bar baz', 'bar', False), |
|
882 | 882 | ]) |
|
883 | 883 | def test_single_op_string(self, op, struct_value, test_value, |
|
884 | 884 | match_result, report_type_matrix): |
|
885 | 885 | from appenlight.lib.rule import Rule |
|
886 | 886 | rule_config = { |
|
887 | 887 | "op": op, |
|
888 | 888 | "field": "error", |
|
889 | 889 | "value": test_value |
|
890 | 890 | } |
|
891 | 891 | rule = Rule(rule_config, report_type_matrix) |
|
892 | 892 | |
|
893 | 893 | data = { |
|
894 | 894 | "error": struct_value |
|
895 | 895 | } |
|
896 | 896 | assert rule.match(data) is match_result |
|
897 | 897 | |
|
898 | 898 | @pytest.mark.parametrize("field, value, s_type", [ |
|
899 | 899 | ('field_unicode', 500, str), |
|
900 | 900 | ('field_unicode', 500.0, str), |
|
901 | 901 | ('field_unicode', "500", str), |
|
902 | 902 | ('field_int', "500", int), |
|
903 | 903 | ('field_int', 500, int), |
|
904 | 904 | ('field_int', 500.0, int), |
|
905 | 905 | ('field_float', "500", float), |
|
906 | 906 | ('field_float', 500, float), |
|
907 | 907 | ('field_float', 500.0, float), |
|
908 | 908 | ]) |
|
909 | 909 | def test_type_normalization(self, field, value, s_type): |
|
910 | 910 | from appenlight.lib.rule import Rule |
|
911 | 911 | type_matrix = { |
|
912 | 912 | 'field_unicode': {"type": 'unicode'}, |
|
913 | 913 | 'field_float': {"type": 'float'}, |
|
914 | 914 | 'field_int': {"type": 'int'}, |
|
915 | 915 | } |
|
916 | 916 | |
|
917 | 917 | rule = Rule({}, type_matrix) |
|
918 | 918 | n_value = rule.normalized_type(field, value) |
|
919 | 919 | assert isinstance(n_value, s_type) is True |
|
920 | 920 | |
|
921 | 921 | |
|
922 | 922 | @pytest.mark.usefixtures('report_type_matrix') |
|
923 | 923 | class TestNestedRuleParsing(): |
|
924 | 924 | |
|
925 | 925 | @pytest.mark.parametrize("data, result", [ |
|
926 | 926 | ({"http_status": 501, "group": {"priority": 7, "occurences": 11}}, |
|
927 | 927 | False), |
|
928 | 928 | ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, |
|
929 | 929 | False), |
|
930 | 930 | ({"http_status": 500, "group": {"priority": 1, "occurences": 11}}, |
|
931 | 931 | False), |
|
932 | 932 | ({"http_status": 101, "group": {"priority": 3, "occurences": 5}}, |
|
933 | 933 | True), |
|
934 | 934 | ]) |
|
935 | 935 | def test_NOT_rule(self, data, result, report_type_matrix): |
|
936 | 936 | from appenlight.lib.rule import Rule |
|
937 | 937 | rule_config = { |
|
938 | 938 | "field": "__NOT__", |
|
939 | 939 | "rules": [ |
|
940 | 940 | { |
|
941 | 941 | "op": "ge", |
|
942 | 942 | "field": "group:occurences", |
|
943 | 943 | "value": "10" |
|
944 | 944 | }, |
|
945 | 945 | { |
|
946 | 946 | "op": "ge", |
|
947 | 947 | "field": "group:priority", |
|
948 | 948 | "value": "4" |
|
949 | 949 | } |
|
950 | 950 | ] |
|
951 | 951 | } |
|
952 | 952 | |
|
953 | 953 | rule = Rule(rule_config, report_type_matrix) |
|
954 | 954 | assert rule.match(data) is result |
|
955 | 955 | |
|
956 | 956 | @pytest.mark.parametrize("data, result", [ |
|
957 | 957 | ({"http_status": 501, "group": {"priority": 7, "occurences": 11}}, |
|
958 | 958 | True), |
|
959 | 959 | ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, |
|
960 | 960 | True), |
|
961 | 961 | ({"http_status": 500, "group": {"priority": 1, "occurences": 1}}, |
|
962 | 962 | True), |
|
963 | 963 | ({"http_status": 101, "group": {"priority": 3, "occurences": 11}}, |
|
964 | 964 | False), |
|
965 | 965 | ]) |
|
966 | 966 | def test_nested_OR_AND_rule(self, data, result, report_type_matrix): |
|
967 | 967 | from appenlight.lib.rule import Rule |
|
968 | 968 | rule_config = { |
|
969 | 969 | "field": "__OR__", |
|
970 | 970 | "rules": [ |
|
971 | 971 | { |
|
972 | 972 | "field": "__AND__", |
|
973 | 973 | "rules": [ |
|
974 | 974 | { |
|
975 | 975 | "op": "ge", |
|
976 | 976 | "field": "group:occurences", |
|
977 | 977 | "value": "10" |
|
978 | 978 | }, |
|
979 | 979 | { |
|
980 | 980 | "op": "ge", |
|
981 | 981 | "field": "group:priority", |
|
982 | 982 | "value": "4" |
|
983 | 983 | } |
|
984 | 984 | ] |
|
985 | 985 | }, |
|
986 | 986 | { |
|
987 | 987 | "op": "eq", |
|
988 | 988 | "field": "http_status", |
|
989 | 989 | "value": "500" |
|
990 | 990 | } |
|
991 | 991 | ] |
|
992 | 992 | } |
|
993 | 993 | |
|
994 | 994 | rule = Rule(rule_config, report_type_matrix) |
|
995 | 995 | assert rule.match(data) is result |
|
996 | 996 | |
|
997 | 997 | @pytest.mark.parametrize("data, result", [ |
|
998 | 998 | ({"http_status": 501, "group": {"priority": 7, "occurences": 11}}, |
|
999 | 999 | True), |
|
1000 | 1000 | ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, |
|
1001 | 1001 | True), |
|
1002 | 1002 | ({"http_status": 500, "group": {"priority": 1, "occurences": 1}}, |
|
1003 | 1003 | True), |
|
1004 | 1004 | ({"http_status": 101, "group": {"priority": 3, "occurences": 1}}, |
|
1005 | 1005 | False), |
|
1006 | 1006 | ]) |
|
1007 | 1007 | def test_nested_OR_OR_rule(self, data, result, report_type_matrix): |
|
1008 | 1008 | from appenlight.lib.rule import Rule |
|
1009 | 1009 | rule_config = { |
|
1010 | 1010 | "field": "__OR__", |
|
1011 | 1011 | "rules": [ |
|
1012 | 1012 | {"field": "__OR__", |
|
1013 | 1013 | "rules": [ |
|
1014 | 1014 | {"op": "ge", |
|
1015 | 1015 | "field": "group:occurences", |
|
1016 | 1016 | "value": "10" |
|
1017 | 1017 | }, |
|
1018 | 1018 | {"op": "ge", |
|
1019 | 1019 | "field": "group:priority", |
|
1020 | 1020 | "value": "4" |
|
1021 | 1021 | } |
|
1022 | 1022 | ] |
|
1023 | 1023 | }, |
|
1024 | 1024 | {"op": "eq", |
|
1025 | 1025 | "field": "http_status", |
|
1026 | 1026 | "value": "500" |
|
1027 | 1027 | } |
|
1028 | 1028 | ] |
|
1029 | 1029 | } |
|
1030 | 1030 | |
|
1031 | 1031 | rule = Rule(rule_config, report_type_matrix) |
|
1032 | 1032 | assert rule.match(data) is result |
|
1033 | 1033 | |
|
1034 | 1034 | @pytest.mark.parametrize("data, result", [ |
|
1035 | 1035 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}}, |
|
1036 | 1036 | True), |
|
1037 | 1037 | ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, |
|
1038 | 1038 | False), |
|
1039 | 1039 | ({"http_status": 500, "group": {"priority": 1, "occurences": 1}}, |
|
1040 | 1040 | False), |
|
1041 | 1041 | ({"http_status": 101, "group": {"priority": 3, "occurences": 1}}, |
|
1042 | 1042 | False), |
|
1043 | 1043 | ]) |
|
1044 | 1044 | def test_nested_AND_AND_rule(self, data, result, report_type_matrix): |
|
1045 | 1045 | from appenlight.lib.rule import Rule |
|
1046 | 1046 | rule_config = { |
|
1047 | 1047 | "field": "__AND__", |
|
1048 | 1048 | "rules": [ |
|
1049 | 1049 | {"field": "__AND__", |
|
1050 | 1050 | "rules": [ |
|
1051 | 1051 | {"op": "ge", |
|
1052 | 1052 | "field": "group:occurences", |
|
1053 | 1053 | "value": "10" |
|
1054 | 1054 | }, |
|
1055 | 1055 | {"op": "ge", |
|
1056 | 1056 | "field": "group:priority", |
|
1057 | 1057 | "value": "4" |
|
1058 | 1058 | }] |
|
1059 | 1059 | }, |
|
1060 | 1060 | {"op": "eq", |
|
1061 | 1061 | "field": "http_status", |
|
1062 | 1062 | "value": "500" |
|
1063 | 1063 | } |
|
1064 | 1064 | ] |
|
1065 | 1065 | } |
|
1066 | 1066 | |
|
1067 | 1067 | rule = Rule(rule_config, report_type_matrix) |
|
1068 | 1068 | assert rule.match(data) is result |
|
1069 | 1069 | |
|
1070 | 1070 | @pytest.mark.parametrize("data, result", [ |
|
1071 | 1071 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}, |
|
1072 | 1072 | "url_path": '/test/register', "error": "foo test bar"}, True), |
|
1073 | 1073 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}, |
|
1074 | 1074 | "url_path": '/test/register', "error": "foo INVALID bar"}, False), |
|
1075 | 1075 | ]) |
|
1076 | 1076 | def test_nested_AND_AND_AND_rule(self, data, result, report_type_matrix): |
|
1077 | 1077 | from appenlight.lib.rule import Rule |
|
1078 | 1078 | rule_config = { |
|
1079 | 1079 | "field": "__AND__", |
|
1080 | 1080 | "rules": [ |
|
1081 | 1081 | {"field": "__AND__", |
|
1082 | 1082 | "rules": [ |
|
1083 | 1083 | {"op": "ge", |
|
1084 | 1084 | "field": "group:occurences", |
|
1085 | 1085 | "value": "10" |
|
1086 | 1086 | }, |
|
1087 | 1087 | {"field": "__AND__", |
|
1088 | 1088 | "rules": [ |
|
1089 | 1089 | {"op": "endswith", |
|
1090 | 1090 | "field": "url_path", |
|
1091 | 1091 | "value": "register"}, |
|
1092 | 1092 | {"op": "contains", |
|
1093 | 1093 | "field": "error", |
|
1094 | 1094 | "value": "test"}]}] |
|
1095 | 1095 | }, |
|
1096 | 1096 | {"op": "eq", |
|
1097 | 1097 | "field": "http_status", |
|
1098 | 1098 | "value": "500" |
|
1099 | 1099 | } |
|
1100 | 1100 | ] |
|
1101 | 1101 | } |
|
1102 | 1102 | |
|
1103 | 1103 | rule = Rule(rule_config, report_type_matrix) |
|
1104 | 1104 | assert rule.match(data) is result |
|
1105 | 1105 | |
|
1106 | 1106 | @pytest.mark.parametrize("data, result", [ |
|
1107 | 1107 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}, |
|
1108 | 1108 | "url_path": 6, "error": 3}, False), |
|
1109 | 1109 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}, |
|
1110 | 1110 | "url_path": '/test/register', "error": "foo INVALID bar"}, True), |
|
1111 | 1111 | ]) |
|
1112 | 1112 | def test_nested_AND_AND_OR_rule(self, data, result, report_type_matrix): |
|
1113 | 1113 | from appenlight.lib.rule import Rule |
|
1114 | 1114 | rule_config = { |
|
1115 | 1115 | "field": "__AND__", |
|
1116 | 1116 | "rules": [ |
|
1117 | 1117 | {"field": "__AND__", |
|
1118 | 1118 | "rules": [ |
|
1119 | 1119 | {"op": "ge", |
|
1120 | 1120 | "field": "group:occurences", |
|
1121 | 1121 | "value": "10" |
|
1122 | 1122 | }, |
|
1123 | 1123 | {"field": "__OR__", |
|
1124 | 1124 | "rules": [ |
|
1125 | 1125 | {"op": "endswith", |
|
1126 | 1126 | "field": "url_path", |
|
1127 | 1127 | "value": "register" |
|
1128 | 1128 | }, |
|
1129 | 1129 | {"op": "contains", |
|
1130 | 1130 | "field": "error", |
|
1131 | 1131 | "value": "test" |
|
1132 | 1132 | }]}] |
|
1133 | 1133 | }, |
|
1134 | 1134 | {"op": "eq", |
|
1135 | 1135 | "field": "http_status", |
|
1136 | 1136 | "value": "500" |
|
1137 | 1137 | } |
|
1138 | 1138 | ] |
|
1139 | 1139 | } |
|
1140 | 1140 | |
|
1141 | 1141 | rule = Rule(rule_config, report_type_matrix) |
|
1142 | 1142 | assert rule.match(data) is result |
|
1143 | 1143 | |
|
1144 | 1144 | @pytest.mark.parametrize("op, field, value, should_fail", [ |
|
1145 | 1145 | ('eq', 'http_status', "1", False), |
|
1146 | 1146 | ('ne', 'http_status', "1", False), |
|
1147 | 1147 | ('ne', 'http_status', "foo", True), |
|
1148 | 1148 | ('startswith', 'http_status', "1", True), |
|
1149 | 1149 | ('eq', 'group:priority', "1", False), |
|
1150 | 1150 | ('ne', 'group:priority', "1", False), |
|
1151 | 1151 | ('ge', 'group:priority', "1", False), |
|
1152 | 1152 | ('le', 'group:priority', "1", False), |
|
1153 | 1153 | ('startswith', 'group:priority', "1", True), |
|
1154 | 1154 | ('eq', 'url_domain', "1", False), |
|
1155 | 1155 | ('ne', 'url_domain', "1", False), |
|
1156 | 1156 | ('startswith', 'url_domain', "1", False), |
|
1157 | 1157 | ('endswith', 'url_domain', "1", False), |
|
1158 | 1158 | ('contains', 'url_domain', "1", False), |
|
1159 | 1159 | ('ge', 'url_domain', "1", True), |
|
1160 | 1160 | ('eq', 'url_path', "1", False), |
|
1161 | 1161 | ('ne', 'url_path', "1", False), |
|
1162 | 1162 | ('startswith', 'url_path', "1", False), |
|
1163 | 1163 | ('endswith', 'url_path', "1", False), |
|
1164 | 1164 | ('contains', 'url_path', "1", False), |
|
1165 | 1165 | ('ge', 'url_path', "1", True), |
|
1166 | 1166 | ('eq', 'error', "1", False), |
|
1167 | 1167 | ('ne', 'error', "1", False), |
|
1168 | 1168 | ('startswith', 'error', "1", False), |
|
1169 | 1169 | ('endswith', 'error', "1", False), |
|
1170 | 1170 | ('contains', 'error', "1", False), |
|
1171 | 1171 | ('ge', 'error', "1", True), |
|
1172 | 1172 | ('ge', 'url_path', "1", True), |
|
1173 | 1173 | ('eq', 'tags:server_name', "1", False), |
|
1174 | 1174 | ('ne', 'tags:server_name', "1", False), |
|
1175 | 1175 | ('startswith', 'tags:server_name', "1", False), |
|
1176 | 1176 | ('endswith', 'tags:server_name', "1", False), |
|
1177 | 1177 | ('contains', 'tags:server_name', "1", False), |
|
1178 | 1178 | ('ge', 'tags:server_name', "1", True), |
|
1179 | 1179 | ('contains', 'traceback', "1", False), |
|
1180 | 1180 | ('ge', 'traceback', "1", True), |
|
1181 | 1181 | ('eq', 'group:occurences', "1", False), |
|
1182 | 1182 | ('ne', 'group:occurences', "1", False), |
|
1183 | 1183 | ('ge', 'group:occurences', "1", False), |
|
1184 | 1184 | ('le', 'group:occurences', "1", False), |
|
1185 | 1185 | ('contains', 'group:occurences', "1", True), |
|
1186 | 1186 | ]) |
|
1187 | 1187 | def test_rule_validation(self, op, field, value, should_fail, |
|
1188 | 1188 | report_type_matrix): |
|
1189 | 1189 | import colander |
|
1190 | 1190 | from appenlight.validators import build_rule_schema |
|
1191 | 1191 | rule_config = { |
|
1192 | 1192 | "op": op, |
|
1193 | 1193 | "field": field, |
|
1194 | 1194 | "value": value |
|
1195 | 1195 | } |
|
1196 | 1196 | |
|
1197 | 1197 | schema = build_rule_schema(rule_config, report_type_matrix) |
|
1198 | 1198 | if should_fail: |
|
1199 | 1199 | with pytest.raises(colander.Invalid): |
|
1200 | 1200 | schema.deserialize(rule_config) |
|
1201 | 1201 | else: |
|
1202 | 1202 | schema.deserialize(rule_config) |
|
1203 | 1203 | |
|
1204 | 1204 | def test_nested_proper_rule_validation(self, report_type_matrix): |
|
1205 | 1205 | from appenlight.validators import build_rule_schema |
|
1206 | 1206 | rule_config = { |
|
1207 | 1207 | "field": "__AND__", |
|
1208 | 1208 | "rules": [ |
|
1209 | 1209 | { |
|
1210 | 1210 | "field": "__AND__", |
|
1211 | 1211 | "rules": [ |
|
1212 | 1212 | { |
|
1213 | 1213 | "op": "ge", |
|
1214 | 1214 | "field": "group:occurences", |
|
1215 | 1215 | "value": "10" |
|
1216 | 1216 | }, |
|
1217 | 1217 | { |
|
1218 | 1218 | "field": "__OR__", |
|
1219 | 1219 | "rules": [ |
|
1220 | 1220 | { |
|
1221 | 1221 | "op": "endswith", |
|
1222 | 1222 | "field": "url_path", |
|
1223 | 1223 | "value": "register" |
|
1224 | 1224 | }, |
|
1225 | 1225 | { |
|
1226 | 1226 | "op": "contains", |
|
1227 | 1227 | "field": "error", |
|
1228 | 1228 | "value": "test" |
|
1229 | 1229 | } |
|
1230 | 1230 | ] |
|
1231 | 1231 | } |
|
1232 | 1232 | ] |
|
1233 | 1233 | }, |
|
1234 | 1234 | { |
|
1235 | 1235 | "op": "eq", |
|
1236 | 1236 | "field": "http_status", |
|
1237 | 1237 | "value": "500" |
|
1238 | 1238 | } |
|
1239 | 1239 | ] |
|
1240 | 1240 | } |
|
1241 | 1241 | |
|
1242 | 1242 | schema = build_rule_schema(rule_config, report_type_matrix) |
|
1243 | 1243 | deserialized = schema.deserialize(rule_config) |
|
1244 | 1244 | |
|
1245 | 1245 | def test_nested_bad_rule_validation(self, report_type_matrix): |
|
1246 | 1246 | import colander |
|
1247 | 1247 | from appenlight.validators import build_rule_schema |
|
1248 | 1248 | rule_config = { |
|
1249 | 1249 | "field": "__AND__", |
|
1250 | 1250 | "rules": [ |
|
1251 | 1251 | { |
|
1252 | 1252 | "field": "__AND__", |
|
1253 | 1253 | "rules": [ |
|
1254 | 1254 | { |
|
1255 | 1255 | "op": "ge", |
|
1256 | 1256 | "field": "group:occurences", |
|
1257 | 1257 | "value": "10" |
|
1258 | 1258 | }, |
|
1259 | 1259 | { |
|
1260 | 1260 | "field": "__OR__", |
|
1261 | 1261 | "rules": [ |
|
1262 | 1262 | { |
|
1263 | 1263 | "op": "gt", |
|
1264 | 1264 | "field": "url_path", |
|
1265 | 1265 | "value": "register" |
|
1266 | 1266 | }, |
|
1267 | 1267 | { |
|
1268 | 1268 | "op": "contains", |
|
1269 | 1269 | "field": "error", |
|
1270 | 1270 | "value": "test" |
|
1271 | 1271 | } |
|
1272 | 1272 | ] |
|
1273 | 1273 | } |
|
1274 | 1274 | ] |
|
1275 | 1275 | }, |
|
1276 | 1276 | { |
|
1277 | 1277 | "op": "eq", |
|
1278 | 1278 | "field": "http_status", |
|
1279 | 1279 | "value": "500" |
|
1280 | 1280 | } |
|
1281 | 1281 | ] |
|
1282 | 1282 | } |
|
1283 | 1283 | |
|
1284 | 1284 | schema = build_rule_schema(rule_config, report_type_matrix) |
|
1285 | 1285 | with pytest.raises(colander.Invalid): |
|
1286 | 1286 | deserialized = schema.deserialize(rule_config) |
|
1287 | 1287 | |
|
1288 | 1288 | def test_config_manipulator(self): |
|
1289 | 1289 | from appenlight.lib.rule import Rule |
|
1290 | 1290 | type_matrix = { |
|
1291 | 1291 | 'a': {"type": 'int', |
|
1292 | 1292 | "ops": ('eq', 'ne', 'ge', 'le',)}, |
|
1293 | 1293 | 'b': {"type": 'int', |
|
1294 | 1294 | "ops": ('eq', 'ne', 'ge', 'le',)}, |
|
1295 | 1295 | } |
|
1296 | 1296 | rule_config = { |
|
1297 | 1297 | "field": "__OR__", |
|
1298 | 1298 | "rules": [ |
|
1299 | 1299 | { |
|
1300 | 1300 | "field": "__OR__", |
|
1301 | 1301 | "rules": [ |
|
1302 | 1302 | { |
|
1303 | 1303 | "op": "ge", |
|
1304 | 1304 | "field": "a", |
|
1305 | 1305 | "value": "10" |
|
1306 | 1306 | } |
|
1307 | 1307 | ] |
|
1308 | 1308 | }, |
|
1309 | 1309 | { |
|
1310 | 1310 | "op": "eq", |
|
1311 | 1311 | "field": "b", |
|
1312 | 1312 | "value": "500" |
|
1313 | 1313 | } |
|
1314 | 1314 | ] |
|
1315 | 1315 | } |
|
1316 | 1316 | |
|
1317 | 1317 | def rule_manipulator(rule): |
|
1318 | 1318 | if 'value' in rule.config: |
|
1319 | 1319 | rule.config['value'] = "1" |
|
1320 | 1320 | |
|
1321 | 1321 | rule = Rule(rule_config, type_matrix, |
|
1322 | 1322 | config_manipulator=rule_manipulator) |
|
1323 | 1323 | rule.match({"a": 1, |
|
1324 | 1324 | "b": "2"}) |
|
1325 | 1325 | assert rule.config['rules'][0]['rules'][0]['value'] == "1" |
|
1326 | 1326 | assert rule.config['rules'][1]['value'] == "1" |
|
1327 | 1327 | assert rule.type_matrix["b"]['type'] == "int" |
|
1328 | 1328 | |
|
1329 | 1329 | def test_dynamic_config_manipulator(self): |
|
1330 | 1330 | from appenlight.lib.rule import Rule |
|
1331 | 1331 | rule_config = { |
|
1332 | 1332 | "field": "__OR__", |
|
1333 | 1333 | "rules": [ |
|
1334 | 1334 | { |
|
1335 | 1335 | "field": "__OR__", |
|
1336 | 1336 | "rules": [ |
|
1337 | 1337 | { |
|
1338 | 1338 | "op": "ge", |
|
1339 | 1339 | "field": "a", |
|
1340 | 1340 | "value": "10" |
|
1341 | 1341 | } |
|
1342 | 1342 | ] |
|
1343 | 1343 | }, |
|
1344 | 1344 | { |
|
1345 | 1345 | "op": "eq", |
|
1346 | 1346 | "field": "b", |
|
1347 | 1347 | "value": "500" |
|
1348 | 1348 | } |
|
1349 | 1349 | ] |
|
1350 | 1350 | } |
|
1351 | 1351 | |
|
1352 | 1352 | def rule_manipulator(rule): |
|
1353 | 1353 | rule.type_matrix = { |
|
1354 | 1354 | 'a': {"type": 'int', |
|
1355 | 1355 | "ops": ('eq', 'ne', 'ge', 'le',)}, |
|
1356 | 1356 | 'b': {"type": 'unicode', |
|
1357 | 1357 | "ops": ('eq', 'ne', 'ge', 'le',)}, |
|
1358 | 1358 | } |
|
1359 | 1359 | |
|
1360 | 1360 | if 'value' in rule.config: |
|
1361 | 1361 | if rule.config['field'] == 'a': |
|
1362 | 1362 | rule.config['value'] = "1" |
|
1363 | 1363 | elif rule.config['field'] == 'b': |
|
1364 | 1364 | rule.config['value'] = "2" |
|
1365 | 1365 | |
|
1366 | 1366 | rule = Rule(rule_config, {}, |
|
1367 | 1367 | config_manipulator=rule_manipulator) |
|
1368 | 1368 | rule.match({"a": 11, |
|
1369 | 1369 | "b": "55"}) |
|
1370 | 1370 | assert rule.config['rules'][0]['rules'][0]['value'] == "1" |
|
1371 | 1371 | assert rule.config['rules'][1]['value'] == "2" |
|
1372 | 1372 | assert rule.type_matrix["b"]['type'] == "unicode" |
|
1373 | 1373 | |
|
1374 | 1374 | |
|
1375 | 1375 | @pytest.mark.usefixtures('base_app', 'with_migrations') |
|
1376 | 1376 | class TestViewsWithForms(object): |
|
1377 | 1377 | def test_bad_csrf(self): |
|
1378 | 1378 | from appenlight.forms import CSRFException |
|
1379 | 1379 | from appenlight.views.index import register |
|
1380 | 1380 | post_data = {'dupa': 'dupa'} |
|
1381 | 1381 | request = testing.DummyRequest(post=post_data) |
|
1382 | 1382 | request.POST = webob.multidict.MultiDict(request.POST) |
|
1383 | 1383 | with pytest.raises(CSRFException): |
|
1384 | 1384 | register(request) |
|
1385 | 1385 | |
|
1386 | 1386 | def test_proper_csrf(self): |
|
1387 | 1387 | from appenlight.views.index import register |
|
1388 | 1388 | request = pyramid.threadlocal.get_current_request() |
|
1389 | 1389 | post_data = {'dupa': 'dupa', |
|
1390 | 1390 | 'csrf_token': request.session.get_csrf_token()} |
|
1391 | 1391 | request = testing.DummyRequest(post=post_data) |
|
1392 | 1392 | request.POST = webob.multidict.MultiDict(request.POST) |
|
1393 | 1393 | result = register(request) |
|
1394 | 1394 | assert result['form'].errors['email'][0] == 'This field is required.' |
|
1395 | 1395 | |
|
1396 | 1396 | |
|
1397 | 1397 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'default_data') |
|
1398 | 1398 | class TestRegistration(object): |
|
1399 | 1399 | def test_invalid_form(self): |
|
1400 | 1400 | from appenlight.views.index import register |
|
1401 | 1401 | request = pyramid.threadlocal.get_current_request() |
|
1402 | 1402 | post_data = {'user_name': '', |
|
1403 | 1403 | 'user_password': '', |
|
1404 | 1404 | 'email': '', |
|
1405 | 1405 | 'csrf_token': request.session.get_csrf_token()} |
|
1406 | 1406 | request = testing.DummyRequest(post=post_data) |
|
1407 | 1407 | request.POST = webob.multidict.MultiDict(request.POST) |
|
1408 | 1408 | result = register(request) |
|
1409 | 1409 | assert result['form'].errors['user_name'][0] == \ |
|
1410 | 1410 | 'This field is required.' |
|
1411 | 1411 | |
|
1412 | 1412 | def test_valid_form(self): |
|
1413 | 1413 | from appenlight.views.index import register |
|
1414 | 1414 | from ziggurat_foundations.models.services.user import UserService |
|
1415 | 1415 | request = pyramid.threadlocal.get_current_request() |
|
1416 | 1416 | post_data = {'user_name': 'foo', |
|
1417 | 1417 | 'user_password': 'barr', |
|
1418 | 1418 | 'email': 'test@test.foo', |
|
1419 | 1419 | 'csrf_token': request.session.get_csrf_token()} |
|
1420 | 1420 | request = testing.DummyRequest(post=post_data) |
|
1421 | 1421 | request.add_flash_to_headers = mock.Mock() |
|
1422 | 1422 | request.POST = webob.multidict.MultiDict(request.POST) |
|
1423 | 1423 | assert UserService.by_user_name('foo') is None |
|
1424 | 1424 | register(request) |
|
1425 | 1425 | user = UserService.by_user_name('foo') |
|
1426 | 1426 | assert user.user_name == 'foo' |
|
1427 | 1427 | assert len(user.user_password) == 60 |
|
1428 | 1428 | |
|
1429 | 1429 | |
|
1430 | 1430 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables', |
|
1431 | 1431 | 'default_user') |
|
1432 | 1432 | class TestApplicationCreation(object): |
|
1433 | 1433 | def test_wrong_data(self): |
|
1434 | 1434 | import appenlight.views.applications as applications |
|
1435 | 1435 | from ziggurat_foundations.models.services.user import UserService |
|
1436 | 1436 | request = pyramid.threadlocal.get_current_request() |
|
1437 | 1437 | request.user = UserService.by_user_name('testuser') |
|
1438 | 1438 | request.unsafe_json_body = {} |
|
1439 | 1439 | request.headers['X-XSRF-TOKEN'] = request.session.get_csrf_token() |
|
1440 | 1440 | response = applications.application_create(request) |
|
1441 | 1441 | assert response.code == 422 |
|
1442 | 1442 | |
|
1443 | 1443 | def test_proper_data(self): |
|
1444 | 1444 | import appenlight.views.applications as applications |
|
1445 | 1445 | from ziggurat_foundations.models.services.user import UserService |
|
1446 | 1446 | |
|
1447 | 1447 | request = pyramid.threadlocal.get_current_request() |
|
1448 | 1448 | request.user = UserService.by_user_name('testuser') |
|
1449 | 1449 | request.unsafe_json_body = {"resource_name": "app name", |
|
1450 | 1450 | "domains": "foo"} |
|
1451 | 1451 | request.headers['X-XSRF-TOKEN'] = request.session.get_csrf_token() |
|
1452 | 1452 | app_dict = applications.application_create(request) |
|
1453 | 1453 | assert app_dict['public_key'] is not None |
|
1454 | 1454 | assert app_dict['api_key'] is not None |
|
1455 | 1455 | assert app_dict['resource_name'] == 'app name' |
|
1456 | 1456 | assert app_dict['owner_group_id'] is None |
|
1457 | 1457 | assert app_dict['resource_id'] is not None |
|
1458 | 1458 | assert app_dict['default_grouping'] == 'url_traceback' |
|
1459 | 1459 | assert app_dict['possible_permissions'] == ('view', 'update_reports') |
|
1460 | 1460 | assert app_dict['slow_report_threshold'] == 10 |
|
1461 | 1461 | assert app_dict['owner_user_name'] == 'testuser' |
|
1462 | 1462 | assert app_dict['owner_user_id'] == request.user.id |
|
1463 | 1463 | assert app_dict['domains'] is 'foo' |
|
1464 | 1464 | assert app_dict['postprocessing_rules'] == [] |
|
1465 | 1465 | assert app_dict['error_report_threshold'] == 10 |
|
1466 | 1466 | assert app_dict['allow_permanent_storage'] is False |
|
1467 | 1467 | assert app_dict['resource_type'] == 'application' |
|
1468 | 1468 | assert app_dict['current_permissions'] == [] |
|
1469 | 1469 | |
|
1470 | 1470 | |
|
1471 | 1471 | @pytest.mark.usefixtures('default_application') |
|
1472 | 1472 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') |
|
1473 | 1473 | class TestAPISentryView(object): |
|
1474 | 1474 | def test_no_payload(self, default_application): |
|
1475 | 1475 | import colander |
|
1476 | 1476 | from appenlight.models.services.application import ApplicationService |
|
1477 | 1477 | from appenlight.views.api import sentry_compat |
|
1478 | 1478 | from appenlight.lib.request import JSONException |
|
1479 | 1479 | |
|
1480 | 1480 | context = DummyContext() |
|
1481 | 1481 | context.resource = ApplicationService.by_id(1) |
|
1482 | 1482 | request = testing.DummyRequest( |
|
1483 | 1483 | headers={'Content-Type': 'application/json'}) |
|
1484 | 1484 | request.unsafe_json_body = '' |
|
1485 | 1485 | request.context = context |
|
1486 | 1486 | route = mock.Mock() |
|
1487 | 1487 | route.name = 'api_sentry' |
|
1488 | 1488 | request.matched_route = route |
|
1489 | 1489 | with pytest.raises(JSONException): |
|
1490 | 1490 | sentry_compat(request) |
|
1491 | 1491 | |
|
1492 | 1492 | def test_java_client_payload(self): |
|
1493 | 1493 | from appenlight.views.api import sentry_compat |
|
1494 | 1494 | from appenlight.models.services.application import ApplicationService |
|
1495 | 1495 | from appenlight.models.report_group import ReportGroup |
|
1496 | 1496 | route = mock.Mock() |
|
1497 | 1497 | route.name = 'api_sentry' |
|
1498 | 1498 | request = pyramid.threadlocal.get_current_request() |
|
1499 | 1499 | context = DummyContext() |
|
1500 | 1500 | context.resource = ApplicationService.by_id(1) |
|
1501 | context.resource.allow_permanent_storage = True | |
|
1501 | 1502 | request.context = context |
|
1502 | 1503 | request.matched_route = route |
|
1503 | 1504 | request.body = b'eJy1UmFr2zAQ/S0T+7BCLOzYThp/C6xjG6SDLd/GCBf57Ki' \ |
|
1504 | 1505 | b'RJSHJJiXkv+/UlC7p2kAZA33Ru6f33t1pz3BAHVayZhWr87' \ |
|
1505 | 1506 | b'JMs+I6q3MsrifFep2vc1iXM1HMpgBTNmIdeg8tEvlmJ9AGa' \ |
|
1506 | 1507 | b'fQ7goOkQoDOUmGcZpMkLZO0WGZFRadMiaHIR1EVnTMu3k3b' \ |
|
1507 | 1508 | b'oiMgqJrXpgOpOVjLLTiPkWAVhMa4jih3MAAholfWyUDAksz' \ |
|
1508 | 1509 | b'm1iopICbg8fWH52B8VWXZVYwHrWfV/jBipD2gW2no8CFMa5' \ |
|
1509 | 1510 | b'JButCDSjoQG6mR6LgLDojPPn/7sbydL25ep34HGl+y3DiE+' \ |
|
1510 | 1511 | b'lH0xXBXjMzFBsXW99SS7pWKYXRw91zqgK4BgZ4/DZVVP/cs' \ |
|
1511 | 1512 | b'3NuzSZPfAKqP2Cdj4tw7U/cKH0fEFeiWQFqE2FIHAmMPjaN' \ |
|
1512 | 1513 | b'Y/kHvbzY/JqdHUq9o/KxqQHkcsabX4piDuT4aK+pXG1ZNi/' \ |
|
1513 | 1514 | b'IwOpEyruXC1LiB3vPO3BmOOxTUCIqv5LIg5H12oh9cf0l+P' \ |
|
1514 | 1515 | b'MvP5P8kddgoFIEvMGzM5cRSD2aLJ6qTdHKm6nv9pPcRFba0' \ |
|
1515 | 1516 | b'Kd0eleeCFuGN+9JZ9TaXIn/V5JYMBvxXg3L6PwzSE4dkfOb' \ |
|
1516 | 1517 | b'w7CtfWmP85SdCs8OvA53fUV19cg==' |
|
1517 | 1518 | sentry_compat(request) |
|
1518 | 1519 | query = DBSession.query(ReportGroup) |
|
1519 | 1520 | report = query.first() |
|
1520 | 1521 | assert query.count() == 1 |
|
1521 | 1522 | assert report.total_reports == 1 |
|
1522 | 1523 | |
|
1523 | 1524 | def test_ruby_client_payload(self): |
|
1524 | 1525 | from appenlight.views.api import sentry_compat |
|
1525 | 1526 | from appenlight.models.services.application import ApplicationService |
|
1526 | 1527 | from appenlight.models.report_group import ReportGroup |
|
1527 | 1528 | from appenlight.tests.payload_examples import SENTRY_RUBY_ENCODED |
|
1528 | 1529 | route = mock.Mock() |
|
1529 | 1530 | route.name = 'api_sentry' |
|
1530 | 1531 | request = testing.DummyRequest( |
|
1531 | 1532 | headers={'Content-Type': 'application/octet-stream', |
|
1532 | 1533 | 'User-Agent': 'sentry-ruby/1.0.0', |
|
1533 | 1534 | 'X-Sentry-Auth': 'Sentry sentry_version=5, ' |
|
1534 | 1535 | 'sentry_client=raven-ruby/1.0.0, ' |
|
1535 | 1536 | 'sentry_timestamp=1462378483, ' |
|
1536 | 1537 | 'sentry_key=xxx, sentry_secret=xxx' |
|
1537 | 1538 | }) |
|
1538 | 1539 | context = DummyContext() |
|
1539 | 1540 | context.resource = ApplicationService.by_id(1) |
|
1541 | context.resource.allow_permanent_storage = True | |
|
1540 | 1542 | request.context = context |
|
1541 | 1543 | request.matched_route = route |
|
1542 | 1544 | request.body = SENTRY_RUBY_ENCODED |
|
1543 | 1545 | sentry_compat(request) |
|
1544 | 1546 | query = DBSession.query(ReportGroup) |
|
1545 | 1547 | report = query.first() |
|
1546 | 1548 | assert query.count() == 1 |
|
1547 | 1549 | assert report.total_reports == 1 |
|
1548 | 1550 | |
|
1549 | 1551 | def test_python_client_decoded_payload(self): |
|
1550 | 1552 | from appenlight.views.api import sentry_compat |
|
1551 | 1553 | from appenlight.models.services.application import ApplicationService |
|
1552 | 1554 | from appenlight.models.report_group import ReportGroup |
|
1553 | 1555 | from appenlight.tests.payload_examples import SENTRY_PYTHON_PAYLOAD_7 |
|
1554 | 1556 | route = mock.Mock() |
|
1555 | 1557 | route.name = 'api_sentry' |
|
1556 | 1558 | request = pyramid.threadlocal.get_current_request() |
|
1557 | 1559 | context = DummyContext() |
|
1558 | 1560 | context.resource = ApplicationService.by_id(1) |
|
1561 | context.resource.allow_permanent_storage = True | |
|
1559 | 1562 | request.context = context |
|
1560 | 1563 | request.matched_route = route |
|
1561 | 1564 | request.body = json.dumps(SENTRY_PYTHON_PAYLOAD_7).encode('utf8') |
|
1562 | 1565 | sentry_compat(request) |
|
1563 | 1566 | query = DBSession.query(ReportGroup) |
|
1564 | 1567 | report = query.first() |
|
1565 | 1568 | assert query.count() == 1 |
|
1566 | 1569 | assert report.total_reports == 1 |
|
1567 | 1570 | |
|
1568 | 1571 | def test_python_client_encoded_payload(self): |
|
1569 | 1572 | from appenlight.views.api import sentry_compat |
|
1570 | 1573 | from appenlight.models.services.application import ApplicationService |
|
1571 | 1574 | from appenlight.models.report_group import ReportGroup |
|
1572 | 1575 | from appenlight.tests.payload_examples import SENTRY_PYTHON_ENCODED |
|
1573 | 1576 | route = mock.Mock() |
|
1574 | 1577 | route.name = 'api_sentry' |
|
1575 | 1578 | request = testing.DummyRequest( |
|
1576 | 1579 | headers={'Content-Type': 'application/octet-stream', |
|
1577 | 1580 | 'Content-Encoding': 'deflate', |
|
1578 | 1581 | 'User-Agent': 'sentry-ruby/1.0.0', |
|
1579 | 1582 | 'X-Sentry-Auth': 'Sentry sentry_version=5, ' |
|
1580 | 1583 | 'sentry_client=raven-ruby/1.0.0, ' |
|
1581 | 1584 | 'sentry_timestamp=1462378483, ' |
|
1582 | 1585 | 'sentry_key=xxx, sentry_secret=xxx' |
|
1583 | 1586 | }) |
|
1584 | 1587 | context = DummyContext() |
|
1585 | 1588 | context.resource = ApplicationService.by_id(1) |
|
1589 | context.resource.allow_permanent_storage = True | |
|
1586 | 1590 | request.context = context |
|
1587 | 1591 | request.matched_route = route |
|
1588 | 1592 | request.body = SENTRY_PYTHON_ENCODED |
|
1589 | 1593 | sentry_compat(request) |
|
1590 | 1594 | query = DBSession.query(ReportGroup) |
|
1591 | 1595 | report = query.first() |
|
1592 | 1596 | assert query.count() == 1 |
|
1593 | 1597 | assert report.total_reports == 1 |
@@ -1,765 +1,773 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # AppEnlight Enterprise Edition, including its added features, Support |
|
19 | 19 | # services, and proprietary license terms, please see |
|
20 | 20 | # https://rhodecode.com/licenses/ |
|
21 | 21 | |
|
22 | 22 | import datetime |
|
23 | 23 | |
|
24 | 24 | import colander |
|
25 | 25 | from colander import null |
|
26 | 26 | |
|
27 | 27 | # those keywords are here so we can distingush between searching for tags and |
|
28 | 28 | # normal properties of reports/logs |
|
29 | 29 | accepted_search_params = ['resource', |
|
30 | 30 | 'request_id', |
|
31 | 31 | 'start_date', |
|
32 | 32 | 'end_date', |
|
33 | 33 | 'page', |
|
34 | 34 | 'min_occurences', |
|
35 | 35 | 'http_status', |
|
36 | 36 | 'priority', |
|
37 | 37 | 'error', |
|
38 | 38 | 'url_path', |
|
39 | 39 | 'url_domain', |
|
40 | 40 | 'report_status', |
|
41 | 41 | 'min_duration', |
|
42 | 42 | 'max_duration', |
|
43 | 43 | 'message', |
|
44 | 44 | 'level', |
|
45 | 45 | 'namespace'] |
|
46 | 46 | |
|
47 | 47 | |
|
48 | 48 | @colander.deferred |
|
49 | 49 | def deferred_utcnow(node, kw): |
|
50 | 50 | return kw['utcnow'] |
|
51 | 51 | |
|
52 | 52 | |
|
53 | @colander.deferred | |
|
54 | def optional_limited_date(node, kw): | |
|
55 | if not kw.get('allow_permanent_storage'): | |
|
56 | return limited_date | |
|
57 | ||
|
58 | ||
|
53 | 59 | def lowercase_preparer(input_data): |
|
54 | 60 | """ |
|
55 | 61 | Transforms a list of string entries to lowercase |
|
56 | 62 | Used in search query validation |
|
57 | 63 | """ |
|
58 | 64 | if not input_data: |
|
59 | 65 | return input_data |
|
60 | 66 | return [x.lower() for x in input_data] |
|
61 | 67 | |
|
62 | 68 | |
|
63 | 69 | def shortener_factory(cutoff_size=32): |
|
64 | 70 | """ |
|
65 | 71 | Limits the input data to specific character count |
|
66 | 72 | :arg cutoff_cutoff_size How much characters to store |
|
67 | 73 | |
|
68 | 74 | """ |
|
69 | 75 | |
|
70 | 76 | def shortener(input_data): |
|
71 | 77 | if not input_data: |
|
72 | 78 | return input_data |
|
73 | 79 | else: |
|
74 | 80 | if isinstance(input_data, str): |
|
75 | 81 | return input_data[:cutoff_size] |
|
76 | 82 | else: |
|
77 | 83 | return input_data |
|
78 | 84 | |
|
79 | 85 | return shortener |
|
80 | 86 | |
|
81 | 87 | |
|
82 | 88 | def cast_to_unicode_or_null(value): |
|
83 | 89 | if value is not colander.null: |
|
84 | 90 | return str(value) |
|
85 | 91 | return None |
|
86 | 92 | |
|
87 | 93 | |
|
88 | 94 | class NonTZDate(colander.DateTime): |
|
89 | 95 | """ Returns null for incorrect date format - also removes tz info""" |
|
90 | 96 | |
|
91 | 97 | def deserialize(self, node, cstruct): |
|
92 | 98 | # disabled for now |
|
93 | 99 | # if cstruct and isinstance(cstruct, str): |
|
94 | 100 | # if ':' not in cstruct: |
|
95 | 101 | # cstruct += ':0.0' |
|
96 | 102 | # if '.' not in cstruct: |
|
97 | 103 | # cstruct += '.0' |
|
98 | 104 | value = super(NonTZDate, self).deserialize(node, cstruct) |
|
99 | 105 | if value: |
|
100 | 106 | return value.replace(tzinfo=None) |
|
101 | 107 | return value |
|
102 | 108 | |
|
103 | 109 | |
|
104 | 110 | class UnknownType(object): |
|
105 | 111 | """ |
|
106 | 112 | Universal type that will accept a deserialized JSON object and store it unaltered |
|
107 | 113 | """ |
|
108 | 114 | |
|
109 | 115 | def serialize(self, node, appstruct): |
|
110 | 116 | if appstruct is null: |
|
111 | 117 | return null |
|
112 | 118 | return appstruct |
|
113 | 119 | |
|
114 | 120 | def deserialize(self, node, cstruct): |
|
115 | 121 | if cstruct is null: |
|
116 | 122 | return null |
|
117 | 123 | return cstruct |
|
118 | 124 | |
|
119 | 125 | def cstruct_children(self): |
|
120 | 126 | return [] |
|
121 | 127 | |
|
122 | 128 | |
|
123 | 129 | # SLOW REPORT SCHEMA |
|
124 | 130 | |
|
125 | 131 | def rewrite_type(input_data): |
|
126 | 132 | """ |
|
127 | 133 | Fix for legacy appenlight clients |
|
128 | 134 | """ |
|
129 | 135 | if input_data == 'remote_call': |
|
130 | 136 | return 'remote' |
|
131 | 137 | return input_data |
|
132 | 138 | |
|
133 | 139 | |
|
134 | 140 | class ExtraTupleSchema(colander.TupleSchema): |
|
135 | 141 | name = colander.SchemaNode(colander.String(), |
|
136 | 142 | validator=colander.Length(1, 64)) |
|
137 | 143 | value = colander.SchemaNode(UnknownType(), |
|
138 | 144 | preparer=shortener_factory(512), |
|
139 | 145 | missing=None) |
|
140 | 146 | |
|
141 | 147 | |
|
142 | 148 | class ExtraSchemaList(colander.SequenceSchema): |
|
143 | 149 | tag = ExtraTupleSchema() |
|
144 | 150 | missing = None |
|
145 | 151 | |
|
146 | 152 | |
|
147 | 153 | class TagsTupleSchema(colander.TupleSchema): |
|
148 | 154 | name = colander.SchemaNode(colander.String(), |
|
149 | 155 | validator=colander.Length(1, 128)) |
|
150 | 156 | value = colander.SchemaNode(UnknownType(), |
|
151 | 157 | preparer=shortener_factory(128), |
|
152 | 158 | missing=None) |
|
153 | 159 | |
|
154 | 160 | |
|
155 | 161 | class TagSchemaList(colander.SequenceSchema): |
|
156 | 162 | tag = TagsTupleSchema() |
|
157 | 163 | missing = None |
|
158 | 164 | |
|
159 | 165 | |
|
160 | 166 | class NumericTagsTupleSchema(colander.TupleSchema): |
|
161 | 167 | name = colander.SchemaNode(colander.String(), |
|
162 | 168 | validator=colander.Length(1, 128)) |
|
163 | 169 | value = colander.SchemaNode(colander.Float(), missing=0) |
|
164 | 170 | |
|
165 | 171 | |
|
166 | 172 | class NumericTagSchemaList(colander.SequenceSchema): |
|
167 | 173 | tag = NumericTagsTupleSchema() |
|
168 | 174 | missing = None |
|
169 | 175 | |
|
170 | 176 | |
|
171 | 177 | class SlowCallSchema(colander.MappingSchema): |
|
172 | 178 | """ |
|
173 | 179 | Validates slow call format in slow call list |
|
174 | 180 | """ |
|
175 | 181 | start = colander.SchemaNode(NonTZDate()) |
|
176 | 182 | end = colander.SchemaNode(NonTZDate()) |
|
177 | 183 | statement = colander.SchemaNode(colander.String(), missing='') |
|
178 | 184 | parameters = colander.SchemaNode(UnknownType(), missing=None) |
|
179 | 185 | type = colander.SchemaNode( |
|
180 | 186 | colander.String(), |
|
181 | 187 | preparer=rewrite_type, |
|
182 | 188 | validator=colander.OneOf( |
|
183 | 189 | ['tmpl', 'sql', 'nosql', 'remote', 'unknown', 'custom']), |
|
184 | 190 | missing='unknown') |
|
185 | 191 | subtype = colander.SchemaNode(colander.String(), |
|
186 | 192 | validator=colander.Length(1, 16), |
|
187 | 193 | missing='unknown') |
|
188 | 194 | location = colander.SchemaNode(colander.String(), |
|
189 | 195 | validator=colander.Length(1, 255), |
|
190 | 196 | missing='') |
|
191 | 197 | |
|
192 | 198 | |
|
193 | 199 | def limited_date(node, value): |
|
194 | 200 | """ checks to make sure that the value is not older/newer than 2h """ |
|
195 | 201 | past_hours = 72 |
|
196 | 202 | future_hours = 2 |
|
197 | 203 | min_time = datetime.datetime.utcnow() - datetime.timedelta( |
|
198 | 204 | hours=past_hours) |
|
199 | 205 | max_time = datetime.datetime.utcnow() + datetime.timedelta( |
|
200 | 206 | hours=future_hours) |
|
201 | 207 | if min_time > value: |
|
202 | 208 | msg = '%r is older from current UTC time by ' + str(past_hours) |
|
203 | 209 | msg += ' hours. Ask administrator to enable permanent logging for ' \ |
|
204 | 210 | 'your application to store logs with dates in past.' |
|
205 | 211 | raise colander.Invalid(node, msg % value) |
|
206 | 212 | if max_time < value: |
|
207 | 213 | msg = '%r is newer from current UTC time by ' + str(future_hours) |
|
208 | 214 | msg += ' hours. Ask administrator to enable permanent logging for ' \ |
|
209 | 215 | 'your application to store logs with dates in future.' |
|
210 | 216 | raise colander.Invalid(node, msg % value) |
|
211 | 217 | |
|
212 | 218 | |
|
213 | 219 | class SlowCallListSchema(colander.SequenceSchema): |
|
214 | 220 | """ |
|
215 | 221 | Validates list of individual slow calls |
|
216 | 222 | """ |
|
217 | 223 | slow_call = SlowCallSchema() |
|
218 | 224 | |
|
219 | 225 | |
|
220 | 226 | class RequestStatsSchema(colander.MappingSchema): |
|
221 | 227 | """ |
|
222 | 228 | Validates format of requests statistics dictionary |
|
223 | 229 | """ |
|
224 | 230 | main = colander.SchemaNode(colander.Float(), validator=colander.Range(0), |
|
225 | 231 | missing=0) |
|
226 | 232 | sql = colander.SchemaNode(colander.Float(), validator=colander.Range(0), |
|
227 | 233 | missing=0) |
|
228 | 234 | nosql = colander.SchemaNode(colander.Float(), validator=colander.Range(0), |
|
229 | 235 | missing=0) |
|
230 | 236 | remote = colander.SchemaNode(colander.Float(), validator=colander.Range(0), |
|
231 | 237 | missing=0) |
|
232 | 238 | tmpl = colander.SchemaNode(colander.Float(), validator=colander.Range(0), |
|
233 | 239 | missing=0) |
|
234 | 240 | custom = colander.SchemaNode(colander.Float(), validator=colander.Range(0), |
|
235 | 241 | missing=0) |
|
236 | 242 | sql_calls = colander.SchemaNode(colander.Float(), |
|
237 | 243 | validator=colander.Range(0), |
|
238 | 244 | missing=0) |
|
239 | 245 | nosql_calls = colander.SchemaNode(colander.Float(), |
|
240 | 246 | validator=colander.Range(0), |
|
241 | 247 | missing=0) |
|
242 | 248 | remote_calls = colander.SchemaNode(colander.Float(), |
|
243 | 249 | validator=colander.Range(0), |
|
244 | 250 | missing=0) |
|
245 | 251 | tmpl_calls = colander.SchemaNode(colander.Float(), |
|
246 | 252 | validator=colander.Range(0), |
|
247 | 253 | missing=0) |
|
248 | 254 | custom_calls = colander.SchemaNode(colander.Float(), |
|
249 | 255 | validator=colander.Range(0), |
|
250 | 256 | missing=0) |
|
251 | 257 | |
|
252 | 258 | |
|
253 | 259 | class FrameInfoVarSchema(colander.SequenceSchema): |
|
254 | 260 | """ |
|
255 | 261 | Validates format of frame variables of a traceback |
|
256 | 262 | """ |
|
257 | 263 | vars = colander.SchemaNode(UnknownType(), |
|
258 | 264 | validator=colander.Length(2, 2)) |
|
259 | 265 | |
|
260 | 266 | |
|
261 | 267 | class FrameInfoSchema(colander.MappingSchema): |
|
262 | 268 | """ |
|
263 | 269 | Validates format of a traceback line |
|
264 | 270 | """ |
|
265 | 271 | cline = colander.SchemaNode(colander.String(), missing='') |
|
266 | 272 | module = colander.SchemaNode(colander.String(), missing='') |
|
267 | 273 | line = colander.SchemaNode(colander.String(), missing='') |
|
268 | 274 | file = colander.SchemaNode(colander.String(), missing='') |
|
269 | 275 | fn = colander.SchemaNode(colander.String(), missing='') |
|
270 | 276 | vars = FrameInfoVarSchema() |
|
271 | 277 | |
|
272 | 278 | |
|
273 | 279 | class FrameInfoListSchema(colander.SequenceSchema): |
|
274 | 280 | """ |
|
275 | 281 | Validates format of list of traceback lines |
|
276 | 282 | """ |
|
277 | 283 | frame = colander.SchemaNode(UnknownType()) |
|
278 | 284 | |
|
279 | 285 | |
|
280 | 286 | class ReportDetailBaseSchema(colander.MappingSchema): |
|
281 | 287 | """ |
|
282 | 288 | Validates format of report - ie. request parameters and stats for a request in report group |
|
283 | 289 | """ |
|
284 | 290 | username = colander.SchemaNode(colander.String(), |
|
285 | 291 | preparer=[shortener_factory(255), |
|
286 | 292 | lambda x: x or ''], |
|
287 | 293 | missing='') |
|
288 | 294 | request_id = colander.SchemaNode(colander.String(), |
|
289 | 295 | preparer=shortener_factory(40), |
|
290 | 296 | missing='') |
|
291 | 297 | url = colander.SchemaNode(colander.String(), |
|
292 | 298 | preparer=shortener_factory(1024), missing='') |
|
293 | 299 | ip = colander.SchemaNode(colander.String(), preparer=shortener_factory(39), |
|
294 | 300 | missing=None) |
|
295 |
start_time = colander.SchemaNode(NonTZDate(), |
|
|
301 | start_time = colander.SchemaNode(NonTZDate(), | |
|
302 | validator=optional_limited_date, | |
|
296 | 303 | missing=deferred_utcnow) |
|
297 |
end_time = colander.SchemaNode(NonTZDate(), |
|
|
304 | end_time = colander.SchemaNode(NonTZDate(), | |
|
305 | validator=optional_limited_date, | |
|
298 | 306 | missing=None) |
|
299 | 307 | user_agent = colander.SchemaNode(colander.String(), |
|
300 | 308 | preparer=[shortener_factory(512), |
|
301 | 309 | lambda x: x or ''], |
|
302 | 310 | missing='') |
|
303 | 311 | message = colander.SchemaNode(colander.String(), |
|
304 | 312 | preparer=shortener_factory(2048), |
|
305 | 313 | missing='') |
|
306 | 314 | group_string = colander.SchemaNode(colander.String(), |
|
307 | 315 | validator=colander.Length(1, 512), |
|
308 | 316 | missing=None) |
|
309 | 317 | request_stats = RequestStatsSchema(missing=None) |
|
310 | 318 | request = colander.SchemaNode(colander.Mapping(unknown='preserve'), |
|
311 | 319 | missing={}) |
|
312 | 320 | traceback = FrameInfoListSchema(missing=None) |
|
313 | 321 | slow_calls = SlowCallListSchema(missing=[]) |
|
314 | 322 | extra = ExtraSchemaList() |
|
315 | 323 | |
|
316 | 324 | |
|
317 | 325 | class ReportDetailSchema_0_5(ReportDetailBaseSchema): |
|
318 | 326 | pass |
|
319 | 327 | |
|
320 | 328 | |
|
321 | 329 | class ReportDetailSchemaPermissiveDate_0_5(ReportDetailSchema_0_5): |
|
322 | 330 | start_time = colander.SchemaNode(NonTZDate(), missing=deferred_utcnow) |
|
323 | 331 | end_time = colander.SchemaNode(NonTZDate(), missing=None) |
|
324 | 332 | |
|
325 | 333 | |
|
326 | 334 | class ReportSchemaBase(colander.MappingSchema): |
|
327 | 335 | """ |
|
328 | 336 | Validates format of report group |
|
329 | 337 | """ |
|
330 | 338 | client = colander.SchemaNode(colander.String(), |
|
331 | 339 | preparer=lambda x: x or 'unknown') |
|
332 | 340 | server = colander.SchemaNode( |
|
333 | 341 | colander.String(), |
|
334 | 342 | preparer=[ |
|
335 | 343 | lambda x: x.lower() if x else 'unknown', shortener_factory(128)], |
|
336 | 344 | missing='unknown') |
|
337 | 345 | priority = colander.SchemaNode(colander.Int(), |
|
338 | 346 | preparer=[lambda x: x or 5], |
|
339 | 347 | validator=colander.Range(1, 10), |
|
340 | 348 | missing=5) |
|
341 | 349 | language = colander.SchemaNode(colander.String(), missing='unknown') |
|
342 | 350 | error = colander.SchemaNode(colander.String(), |
|
343 | 351 | preparer=shortener_factory(512), |
|
344 | 352 | missing='') |
|
345 | 353 | view_name = colander.SchemaNode(colander.String(), |
|
346 | 354 | preparer=[shortener_factory(128), |
|
347 | 355 | lambda x: x or ''], |
|
348 | 356 | missing='') |
|
349 | 357 | http_status = colander.SchemaNode(colander.Int(), |
|
350 | 358 | preparer=[lambda x: x or 200], |
|
351 | 359 | validator=colander.Range(1)) |
|
352 | 360 | |
|
353 | 361 | occurences = colander.SchemaNode(colander.Int(), |
|
354 | 362 | validator=colander.Range(1, 99999999999), |
|
355 | 363 | missing=1) |
|
356 | 364 | tags = TagSchemaList() |
|
357 | 365 | |
|
358 | 366 | |
|
359 | 367 | class ReportSchema_0_5(ReportSchemaBase, ReportDetailSchema_0_5): |
|
360 | 368 | pass |
|
361 | 369 | |
|
362 | 370 | |
|
363 | 371 | class ReportSchemaPermissiveDate_0_5(ReportSchemaBase, |
|
364 | 372 | ReportDetailSchemaPermissiveDate_0_5): |
|
365 | 373 | pass |
|
366 | 374 | |
|
367 | 375 | |
|
368 | 376 | class ReportListSchema_0_5(colander.SequenceSchema): |
|
369 | 377 | """ |
|
370 | 378 | Validates format of list of report groups |
|
371 | 379 | """ |
|
372 | 380 | report = ReportSchema_0_5() |
|
373 | 381 | validator = colander.Length(1) |
|
374 | 382 | |
|
375 | 383 | |
|
376 | 384 | class ReportListPermissiveDateSchema_0_5(colander.SequenceSchema): |
|
377 | 385 | """ |
|
378 | 386 | Validates format of list of report groups |
|
379 | 387 | """ |
|
380 | 388 | report = ReportSchemaPermissiveDate_0_5() |
|
381 | 389 | validator = colander.Length(1) |
|
382 | 390 | |
|
383 | 391 | |
|
384 | 392 | class LogSchema(colander.MappingSchema): |
|
385 | 393 | """ |
|
386 | 394 | Validates format if individual log entry |
|
387 | 395 | """ |
|
388 | 396 | primary_key = colander.SchemaNode(UnknownType(), |
|
389 | 397 | preparer=[cast_to_unicode_or_null, |
|
390 | 398 | shortener_factory(128)], |
|
391 | 399 | missing=None) |
|
392 | 400 | log_level = colander.SchemaNode(colander.String(), |
|
393 | 401 | preparer=shortener_factory(10), |
|
394 | 402 | missing='UNKNOWN') |
|
395 | 403 | message = colander.SchemaNode(colander.String(), |
|
396 | 404 | preparer=shortener_factory(4096), |
|
397 | 405 | missing='') |
|
398 | 406 | namespace = colander.SchemaNode(colander.String(), |
|
399 | 407 | preparer=shortener_factory(128), |
|
400 | 408 | missing='') |
|
401 | 409 | request_id = colander.SchemaNode(colander.String(), |
|
402 | 410 | preparer=shortener_factory(40), |
|
403 | 411 | missing='') |
|
404 | 412 | server = colander.SchemaNode(colander.String(), |
|
405 | 413 | preparer=shortener_factory(128), |
|
406 | 414 | missing='unknown') |
|
407 | 415 | date = colander.SchemaNode(NonTZDate(), |
|
408 | 416 | validator=limited_date, |
|
409 | 417 | missing=deferred_utcnow) |
|
410 | 418 | tags = TagSchemaList() |
|
411 | 419 | |
|
412 | 420 | |
|
413 | 421 | class LogSchemaPermanent(LogSchema): |
|
414 | 422 | date = colander.SchemaNode(NonTZDate(), |
|
415 | 423 | missing=deferred_utcnow) |
|
416 | 424 | permanent = colander.SchemaNode(colander.Boolean(), missing=False) |
|
417 | 425 | |
|
418 | 426 | |
|
419 | 427 | class LogListSchema(colander.SequenceSchema): |
|
420 | 428 | """ |
|
421 | 429 | Validates format of list of log entries |
|
422 | 430 | """ |
|
423 | 431 | log = LogSchema() |
|
424 | 432 | validator = colander.Length(1) |
|
425 | 433 | |
|
426 | 434 | |
|
427 | 435 | class LogListPermanentSchema(colander.SequenceSchema): |
|
428 | 436 | """ |
|
429 | 437 | Validates format of list of log entries |
|
430 | 438 | """ |
|
431 | 439 | log = LogSchemaPermanent() |
|
432 | 440 | validator = colander.Length(1) |
|
433 | 441 | |
|
434 | 442 | |
|
435 | 443 | class ViewRequestStatsSchema(RequestStatsSchema): |
|
436 | 444 | requests = colander.SchemaNode(colander.Integer(), |
|
437 | 445 | validator=colander.Range(0), |
|
438 | 446 | missing=0) |
|
439 | 447 | |
|
440 | 448 | |
|
441 | 449 | class ViewMetricTupleSchema(colander.TupleSchema): |
|
442 | 450 | """ |
|
443 | 451 | Validates list of views and their corresponding request stats object ie: |
|
444 | 452 | ["dir/module:func",{"custom": 0.0..}] |
|
445 | 453 | """ |
|
446 | 454 | view_name = colander.SchemaNode(colander.String(), |
|
447 | 455 | preparer=[shortener_factory(128), |
|
448 | 456 | lambda x: x or 'unknown'], |
|
449 | 457 | missing='unknown') |
|
450 | 458 | metrics = ViewRequestStatsSchema() |
|
451 | 459 | |
|
452 | 460 | |
|
453 | 461 | class ViewMetricListSchema(colander.SequenceSchema): |
|
454 | 462 | """ |
|
455 | 463 | Validates view breakdown stats objects list |
|
456 | 464 | {metrics key of server/time object} |
|
457 | 465 | """ |
|
458 | 466 | view_tuple = ViewMetricTupleSchema() |
|
459 | 467 | validator = colander.Length(1) |
|
460 | 468 | |
|
461 | 469 | |
|
462 | 470 | class ViewMetricSchema(colander.MappingSchema): |
|
463 | 471 | """ |
|
464 | 472 | Validates server/timeinterval object, ie: |
|
465 | 473 | {server/time object} |
|
466 | 474 | |
|
467 | 475 | """ |
|
468 | 476 | timestamp = colander.SchemaNode(NonTZDate(), |
|
469 | 477 | validator=limited_date, |
|
470 | 478 | missing=None) |
|
471 | 479 | server = colander.SchemaNode(colander.String(), |
|
472 | 480 | preparer=[shortener_factory(128), |
|
473 | 481 | lambda x: x or 'unknown'], |
|
474 | 482 | missing='unknown') |
|
475 | 483 | metrics = ViewMetricListSchema() |
|
476 | 484 | |
|
477 | 485 | |
|
478 | 486 | class GeneralMetricSchema(colander.MappingSchema): |
|
479 | 487 | """ |
|
480 | 488 | Validates universal metric schema |
|
481 | 489 | |
|
482 | 490 | """ |
|
483 | 491 | namespace = colander.SchemaNode(colander.String(), missing='', |
|
484 | 492 | preparer=shortener_factory(128)) |
|
485 | 493 | |
|
486 | 494 | server_name = colander.SchemaNode(colander.String(), |
|
487 | 495 | preparer=[shortener_factory(128), |
|
488 | 496 | lambda x: x or 'unknown'], |
|
489 | 497 | missing='unknown') |
|
490 | 498 | timestamp = colander.SchemaNode(NonTZDate(), validator=limited_date, |
|
491 | 499 | missing=deferred_utcnow) |
|
492 | 500 | tags = TagSchemaList(missing=colander.required) |
|
493 | 501 | |
|
494 | 502 | |
|
495 | 503 | class GeneralMetricPermanentSchema(GeneralMetricSchema): |
|
496 | 504 | """ |
|
497 | 505 | Validates universal metric schema |
|
498 | 506 | |
|
499 | 507 | """ |
|
500 | 508 | timestamp = colander.SchemaNode(NonTZDate(), missing=deferred_utcnow) |
|
501 | 509 | |
|
502 | 510 | |
|
503 | 511 | class GeneralMetricsListSchema(colander.SequenceSchema): |
|
504 | 512 | metric = GeneralMetricSchema() |
|
505 | 513 | validator = colander.Length(1) |
|
506 | 514 | |
|
507 | 515 | |
|
508 | 516 | class GeneralMetricsPermanentListSchema(colander.SequenceSchema): |
|
509 | 517 | metric = GeneralMetricPermanentSchema() |
|
510 | 518 | validator = colander.Length(1) |
|
511 | 519 | |
|
512 | 520 | |
|
513 | 521 | class MetricsListSchema(colander.SequenceSchema): |
|
514 | 522 | """ |
|
515 | 523 | Validates list of metrics objects ie: |
|
516 | 524 | [{server/time object}, ] part |
|
517 | 525 | |
|
518 | 526 | |
|
519 | 527 | """ |
|
520 | 528 | metric = ViewMetricSchema() |
|
521 | 529 | validator = colander.Length(1) |
|
522 | 530 | |
|
523 | 531 | |
|
524 | 532 | class StringToAppList(object): |
|
525 | 533 | """ |
|
526 | 534 | Returns validated list of application ids from user query and |
|
527 | 535 | set of applications user is allowed to look at |
|
528 | 536 | transform string to list containing single integer |
|
529 | 537 | """ |
|
530 | 538 | |
|
531 | 539 | def serialize(self, node, appstruct): |
|
532 | 540 | if appstruct is null: |
|
533 | 541 | return null |
|
534 | 542 | return appstruct |
|
535 | 543 | |
|
536 | 544 | def deserialize(self, node, cstruct): |
|
537 | 545 | if cstruct is null: |
|
538 | 546 | return null |
|
539 | 547 | |
|
540 | 548 | apps = set([int(a) for a in node.bindings['resources']]) |
|
541 | 549 | |
|
542 | 550 | if isinstance(cstruct, str): |
|
543 | 551 | cstruct = [cstruct] |
|
544 | 552 | |
|
545 | 553 | cstruct = [int(a) for a in cstruct] |
|
546 | 554 | |
|
547 | 555 | valid_apps = list(apps.intersection(set(cstruct))) |
|
548 | 556 | if valid_apps: |
|
549 | 557 | return valid_apps |
|
550 | 558 | return null |
|
551 | 559 | |
|
552 | 560 | def cstruct_children(self): |
|
553 | 561 | return [] |
|
554 | 562 | |
|
555 | 563 | |
|
556 | 564 | @colander.deferred |
|
557 | 565 | def possible_applications_validator(node, kw): |
|
558 | 566 | possible_apps = [int(a) for a in kw['resources']] |
|
559 | 567 | return colander.All(colander.ContainsOnly(possible_apps), |
|
560 | 568 | colander.Length(1)) |
|
561 | 569 | |
|
562 | 570 | |
|
563 | 571 | @colander.deferred |
|
564 | 572 | def possible_applications(node, kw): |
|
565 | 573 | return [int(a) for a in kw['resources']] |
|
566 | 574 | |
|
567 | 575 | |
|
568 | 576 | @colander.deferred |
|
569 | 577 | def today_start(node, kw): |
|
570 | 578 | return datetime.datetime.utcnow().replace(second=0, microsecond=0, |
|
571 | 579 | minute=0, |
|
572 | 580 | hour=0) |
|
573 | 581 | |
|
574 | 582 | |
|
575 | 583 | @colander.deferred |
|
576 | 584 | def today_end(node, kw): |
|
577 | 585 | return datetime.datetime.utcnow().replace(second=0, microsecond=0, |
|
578 | 586 | minute=59, hour=23) |
|
579 | 587 | |
|
580 | 588 | |
|
581 | 589 | @colander.deferred |
|
582 | 590 | def old_start(node, kw): |
|
583 | 591 | t_delta = datetime.timedelta(days=90) |
|
584 | 592 | return datetime.datetime.utcnow().replace(second=0, microsecond=0, |
|
585 | 593 | minute=0, |
|
586 | 594 | hour=0) - t_delta |
|
587 | 595 | |
|
588 | 596 | |
|
589 | 597 | @colander.deferred |
|
590 | 598 | def today_end(node, kw): |
|
591 | 599 | return datetime.datetime.utcnow().replace(second=0, microsecond=0, |
|
592 | 600 | minute=59, hour=23) |
|
593 | 601 | |
|
594 | 602 | |
|
595 | 603 | class PermissiveDate(colander.DateTime): |
|
596 | 604 | """ Returns null for incorrect date format - also removes tz info""" |
|
597 | 605 | |
|
598 | 606 | def deserialize(self, node, cstruct): |
|
599 | 607 | if not cstruct: |
|
600 | 608 | return null |
|
601 | 609 | |
|
602 | 610 | try: |
|
603 | 611 | result = colander.iso8601.parse_date( |
|
604 | 612 | cstruct, default_timezone=self.default_tzinfo) |
|
605 | 613 | except colander.iso8601.ParseError: |
|
606 | 614 | return null |
|
607 | 615 | return result.replace(tzinfo=None) |
|
608 | 616 | |
|
609 | 617 | |
|
610 | 618 | class LogSearchSchema(colander.MappingSchema): |
|
611 | 619 | def schema_type(self, **kw): |
|
612 | 620 | return colander.Mapping(unknown='preserve') |
|
613 | 621 | |
|
614 | 622 | resource = colander.SchemaNode(StringToAppList(), |
|
615 | 623 | validator=possible_applications_validator, |
|
616 | 624 | missing=possible_applications) |
|
617 | 625 | |
|
618 | 626 | message = colander.SchemaNode(colander.Sequence(accept_scalar=True), |
|
619 | 627 | colander.SchemaNode(colander.String()), |
|
620 | 628 | missing=None) |
|
621 | 629 | level = colander.SchemaNode(colander.Sequence(accept_scalar=True), |
|
622 | 630 | colander.SchemaNode(colander.String()), |
|
623 | 631 | preparer=lowercase_preparer, |
|
624 | 632 | missing=None) |
|
625 | 633 | namespace = colander.SchemaNode(colander.Sequence(accept_scalar=True), |
|
626 | 634 | colander.SchemaNode(colander.String()), |
|
627 | 635 | preparer=lowercase_preparer, |
|
628 | 636 | missing=None) |
|
629 | 637 | request_id = colander.SchemaNode(colander.Sequence(accept_scalar=True), |
|
630 | 638 | colander.SchemaNode(colander.String()), |
|
631 | 639 | preparer=lowercase_preparer, |
|
632 | 640 | missing=None) |
|
633 | 641 | start_date = colander.SchemaNode(PermissiveDate(), |
|
634 | 642 | missing=None) |
|
635 | 643 | end_date = colander.SchemaNode(PermissiveDate(), |
|
636 | 644 | missing=None) |
|
637 | 645 | page = colander.SchemaNode(colander.Integer(), |
|
638 | 646 | validator=colander.Range(min=1), |
|
639 | 647 | missing=1) |
|
640 | 648 | |
|
641 | 649 | |
|
642 | 650 | class ReportSearchSchema(colander.MappingSchema): |
|
643 | 651 | def schema_type(self, **kw): |
|
644 | 652 | return colander.Mapping(unknown='preserve') |
|
645 | 653 | |
|
646 | 654 | resource = colander.SchemaNode(StringToAppList(), |
|
647 | 655 | validator=possible_applications_validator, |
|
648 | 656 | missing=possible_applications) |
|
649 | 657 | request_id = colander.SchemaNode(colander.Sequence(accept_scalar=True), |
|
650 | 658 | colander.SchemaNode(colander.String()), |
|
651 | 659 | missing=None) |
|
652 | 660 | start_date = colander.SchemaNode(PermissiveDate(), |
|
653 | 661 | missing=None) |
|
654 | 662 | end_date = colander.SchemaNode(PermissiveDate(), |
|
655 | 663 | missing=None) |
|
656 | 664 | page = colander.SchemaNode(colander.Integer(), |
|
657 | 665 | validator=colander.Range(min=1), |
|
658 | 666 | missing=1) |
|
659 | 667 | |
|
660 | 668 | min_occurences = colander.SchemaNode( |
|
661 | 669 | colander.Sequence(accept_scalar=True), |
|
662 | 670 | colander.SchemaNode(colander.Integer()), |
|
663 | 671 | missing=None) |
|
664 | 672 | |
|
665 | 673 | http_status = colander.SchemaNode(colander.Sequence(accept_scalar=True), |
|
666 | 674 | colander.SchemaNode(colander.Integer()), |
|
667 | 675 | missing=None) |
|
668 | 676 | priority = colander.SchemaNode(colander.Sequence(accept_scalar=True), |
|
669 | 677 | colander.SchemaNode(colander.Integer()), |
|
670 | 678 | missing=None) |
|
671 | 679 | error = colander.SchemaNode(colander.Sequence(accept_scalar=True), |
|
672 | 680 | colander.SchemaNode(colander.String()), |
|
673 | 681 | missing=None) |
|
674 | 682 | url_path = colander.SchemaNode(colander.Sequence(accept_scalar=True), |
|
675 | 683 | colander.SchemaNode(colander.String()), |
|
676 | 684 | missing=None) |
|
677 | 685 | url_domain = colander.SchemaNode(colander.Sequence(accept_scalar=True), |
|
678 | 686 | colander.SchemaNode(colander.String()), |
|
679 | 687 | missing=None) |
|
680 | 688 | report_status = colander.SchemaNode(colander.Sequence(accept_scalar=True), |
|
681 | 689 | colander.SchemaNode(colander.String()), |
|
682 | 690 | missing=None) |
|
683 | 691 | min_duration = colander.SchemaNode(colander.Sequence(accept_scalar=True), |
|
684 | 692 | colander.SchemaNode(colander.Float()), |
|
685 | 693 | missing=None) |
|
686 | 694 | max_duration = colander.SchemaNode(colander.Sequence(accept_scalar=True), |
|
687 | 695 | colander.SchemaNode(colander.Float()), |
|
688 | 696 | missing=None) |
|
689 | 697 | |
|
690 | 698 | |
|
691 | 699 | class TagSchema(colander.MappingSchema): |
|
692 | 700 | """ |
|
693 | 701 | Used in log search |
|
694 | 702 | """ |
|
695 | 703 | name = colander.SchemaNode(colander.String(), |
|
696 | 704 | validator=colander.Length(1, 32)) |
|
697 | 705 | value = colander.SchemaNode(colander.Sequence(accept_scalar=True), |
|
698 | 706 | colander.SchemaNode(colander.String(), |
|
699 | 707 | validator=colander.Length( |
|
700 | 708 | 1, 128)), |
|
701 | 709 | missing=None) |
|
702 | 710 | op = colander.SchemaNode(colander.String(), |
|
703 | 711 | validator=colander.Length(1, 128), |
|
704 | 712 | missing=None) |
|
705 | 713 | |
|
706 | 714 | |
|
707 | 715 | class TagListSchema(colander.SequenceSchema): |
|
708 | 716 | tag = TagSchema() |
|
709 | 717 | |
|
710 | 718 | |
|
711 | 719 | class RuleFieldType(object): |
|
712 | 720 | """ Validator which succeeds if the value passed to it is one of |
|
713 | 721 | a fixed set of values """ |
|
714 | 722 | |
|
715 | 723 | def __init__(self, cast_to): |
|
716 | 724 | self.cast_to = cast_to |
|
717 | 725 | |
|
718 | 726 | def __call__(self, node, value): |
|
719 | 727 | try: |
|
720 | 728 | if self.cast_to == 'int': |
|
721 | 729 | int(value) |
|
722 | 730 | elif self.cast_to == 'float': |
|
723 | 731 | float(value) |
|
724 | 732 | elif self.cast_to == 'unicode': |
|
725 | 733 | str(value) |
|
726 | 734 | except: |
|
727 | 735 | raise colander.Invalid(node, |
|
728 | 736 | "Can't cast {} to {}".format( |
|
729 | 737 | value, self.cast_to)) |
|
730 | 738 | |
|
731 | 739 | |
|
732 | 740 | def build_rule_schema(ruleset, check_matrix): |
|
733 | 741 | """ |
|
734 | 742 | Accepts ruleset and a map of fields/possible operations and builds |
|
735 | 743 | validation class |
|
736 | 744 | """ |
|
737 | 745 | |
|
738 | 746 | schema = colander.SchemaNode(colander.Mapping()) |
|
739 | 747 | schema.add(colander.SchemaNode(colander.String(), name='field')) |
|
740 | 748 | |
|
741 | 749 | if ruleset['field'] in ['__AND__', '__OR__', '__NOT__']: |
|
742 | 750 | subrules = colander.SchemaNode(colander.Tuple(), name='rules') |
|
743 | 751 | for rule in ruleset['rules']: |
|
744 | 752 | subrules.add(build_rule_schema(rule, check_matrix)) |
|
745 | 753 | schema.add(subrules) |
|
746 | 754 | else: |
|
747 | 755 | op_choices = check_matrix[ruleset['field']]['ops'] |
|
748 | 756 | cast_to = check_matrix[ruleset['field']]['type'] |
|
749 | 757 | schema.add(colander.SchemaNode(colander.String(), |
|
750 | 758 | validator=colander.OneOf(op_choices), |
|
751 | 759 | name='op')) |
|
752 | 760 | |
|
753 | 761 | schema.add(colander.SchemaNode(colander.String(), |
|
754 | 762 | name='value', |
|
755 | 763 | validator=RuleFieldType(cast_to))) |
|
756 | 764 | return schema |
|
757 | 765 | |
|
758 | 766 | |
|
759 | 767 | class ConfigTypeSchema(colander.MappingSchema): |
|
760 | 768 | type = colander.SchemaNode(colander.String(), missing=None) |
|
761 | 769 | config = colander.SchemaNode(UnknownType(), missing=None) |
|
762 | 770 | |
|
763 | 771 | |
|
764 | 772 | class MappingListSchema(colander.SequenceSchema): |
|
765 | 773 | config = colander.SchemaNode(UnknownType()) |
@@ -1,438 +1,440 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # AppEnlight Enterprise Edition, including its added features, Support |
|
19 | 19 | # services, and proprietary license terms, please see |
|
20 | 20 | # https://rhodecode.com/licenses/ |
|
21 | 21 | |
|
22 | 22 | import base64 |
|
23 | 23 | import io |
|
24 | 24 | import datetime |
|
25 | 25 | import json |
|
26 | 26 | import logging |
|
27 | 27 | import urllib.request, urllib.parse, urllib.error |
|
28 | 28 | import zlib |
|
29 | 29 | |
|
30 | 30 | from gzip import GzipFile |
|
31 | 31 | from pyramid.view import view_config |
|
32 | 32 | from pyramid.httpexceptions import HTTPBadRequest |
|
33 | 33 | |
|
34 | 34 | import appenlight.celery.tasks as tasks |
|
35 | 35 | from appenlight.lib.api import rate_limiting, check_cors |
|
36 | 36 | from appenlight.lib.enums import ParsedSentryEventType |
|
37 | 37 | from appenlight.lib.utils import parse_proto |
|
38 | 38 | from appenlight.lib.utils.airbrake import parse_airbrake_xml |
|
39 | 39 | from appenlight.lib.utils.date_utils import convert_date |
|
40 | 40 | from appenlight.lib.utils.sentry import parse_sentry_event |
|
41 | 41 | from appenlight.lib.request import JSONException |
|
42 | 42 | from appenlight.validators import (LogListSchema, |
|
43 | 43 | MetricsListSchema, |
|
44 | 44 | GeneralMetricsListSchema, |
|
45 | 45 | GeneralMetricsPermanentListSchema, |
|
46 | 46 | GeneralMetricSchema, |
|
47 | 47 | GeneralMetricPermanentSchema, |
|
48 | 48 | LogListPermanentSchema, |
|
49 | 49 | ReportListSchema_0_5, |
|
50 | 50 | LogSchema, |
|
51 | 51 | LogSchemaPermanent, |
|
52 | 52 | ReportSchema_0_5) |
|
53 | 53 | |
|
54 | 54 | log = logging.getLogger(__name__) |
|
55 | 55 | |
|
56 | 56 | |
|
57 | 57 | @view_config(route_name='api_logs', renderer='string', permission='create', |
|
58 | 58 | require_csrf=False) |
|
59 | 59 | @view_config(route_name='api_log', renderer='string', permission='create', |
|
60 | 60 | require_csrf=False) |
|
61 | 61 | def logs_create(request): |
|
62 | 62 | """ |
|
63 | 63 | Endpoint for log aggregation |
|
64 | 64 | """ |
|
65 | 65 | application = request.context.resource |
|
66 | 66 | if request.method.upper() == 'OPTIONS': |
|
67 | 67 | return check_cors(request, application) |
|
68 | 68 | else: |
|
69 | 69 | check_cors(request, application, should_return=False) |
|
70 | 70 | |
|
71 | 71 | params = dict(request.params.copy()) |
|
72 | 72 | proto_version = parse_proto(params.get('protocol_version', '')) |
|
73 | 73 | payload = request.unsafe_json_body |
|
74 | 74 | sequence_accepted = request.matched_route.name == 'api_logs' |
|
75 | 75 | |
|
76 | 76 | if sequence_accepted: |
|
77 | 77 | if application.allow_permanent_storage: |
|
78 | 78 | schema = LogListPermanentSchema().bind( |
|
79 | 79 | utcnow=datetime.datetime.utcnow()) |
|
80 | 80 | else: |
|
81 | 81 | schema = LogListSchema().bind( |
|
82 | 82 | utcnow=datetime.datetime.utcnow()) |
|
83 | 83 | else: |
|
84 | 84 | if application.allow_permanent_storage: |
|
85 | 85 | schema = LogSchemaPermanent().bind( |
|
86 | 86 | utcnow=datetime.datetime.utcnow()) |
|
87 | 87 | else: |
|
88 | 88 | schema = LogSchema().bind( |
|
89 | 89 | utcnow=datetime.datetime.utcnow()) |
|
90 | 90 | |
|
91 | 91 | deserialized_logs = schema.deserialize(payload) |
|
92 | 92 | if sequence_accepted is False: |
|
93 | 93 | deserialized_logs = [deserialized_logs] |
|
94 | 94 | |
|
95 | 95 | rate_limiting(request, application, 'per_application_logs_rate_limit', |
|
96 | 96 | len(deserialized_logs)) |
|
97 | 97 | |
|
98 | 98 | # pprint.pprint(deserialized_logs) |
|
99 | 99 | |
|
100 | 100 | # we need to split those out so we can process the pkey ones one by one |
|
101 | 101 | non_pkey_logs = [log_dict for log_dict in deserialized_logs |
|
102 | 102 | if not log_dict['primary_key']] |
|
103 | 103 | pkey_dict = {} |
|
104 | 104 | # try to process the logs as best as we can and group together to reduce |
|
105 | 105 | # the amount of |
|
106 | 106 | for log_dict in deserialized_logs: |
|
107 | 107 | if log_dict['primary_key']: |
|
108 | 108 | key = (log_dict['primary_key'], log_dict['namespace'],) |
|
109 | 109 | if not key in pkey_dict: |
|
110 | 110 | pkey_dict[key] = [] |
|
111 | 111 | pkey_dict[key].append(log_dict) |
|
112 | 112 | |
|
113 | 113 | if non_pkey_logs: |
|
114 | 114 | log.debug('%s non-pkey logs received: %s' % (application, |
|
115 | 115 | len(non_pkey_logs))) |
|
116 | 116 | tasks.add_logs.delay(application.resource_id, params, non_pkey_logs) |
|
117 | 117 | if pkey_dict: |
|
118 | 118 | logs_to_insert = [] |
|
119 | 119 | for primary_key_tuple, payload in pkey_dict.items(): |
|
120 | 120 | sorted_logs = sorted(payload, key=lambda x: x['date']) |
|
121 | 121 | logs_to_insert.append(sorted_logs[-1]) |
|
122 | 122 | log.debug('%s pkey logs received: %s' % (application, |
|
123 | 123 | len(logs_to_insert))) |
|
124 | 124 | tasks.add_logs.delay(application.resource_id, params, logs_to_insert) |
|
125 | 125 | |
|
126 | 126 | log.info('LOG call %s %s client:%s' % ( |
|
127 | 127 | application, proto_version, request.headers.get('user_agent'))) |
|
128 | 128 | return 'OK: Logs accepted' |
|
129 | 129 | |
|
130 | 130 | |
|
131 | 131 | @view_config(route_name='api_request_stats', renderer='string', |
|
132 | 132 | permission='create', require_csrf=False) |
|
133 | 133 | @view_config(route_name='api_metrics', renderer='string', |
|
134 | 134 | permission='create', require_csrf=False) |
|
135 | 135 | def request_metrics_create(request): |
|
136 | 136 | """ |
|
137 | 137 | Endpoint for performance metrics, aggregates view performance stats |
|
138 | 138 | and converts them to general metric row |
|
139 | 139 | """ |
|
140 | 140 | application = request.context.resource |
|
141 | 141 | if request.method.upper() == 'OPTIONS': |
|
142 | 142 | return check_cors(request, application) |
|
143 | 143 | else: |
|
144 | 144 | check_cors(request, application, should_return=False) |
|
145 | 145 | |
|
146 | 146 | params = dict(request.params.copy()) |
|
147 | 147 | proto_version = parse_proto(params.get('protocol_version', '')) |
|
148 | 148 | |
|
149 | 149 | payload = request.unsafe_json_body |
|
150 | 150 | schema = MetricsListSchema() |
|
151 | 151 | dataset = schema.deserialize(payload) |
|
152 | 152 | |
|
153 | 153 | rate_limiting(request, application, 'per_application_metrics_rate_limit', |
|
154 | 154 | len(dataset)) |
|
155 | 155 | |
|
156 | 156 | # looping report data |
|
157 | 157 | metrics = {} |
|
158 | 158 | for metric in dataset: |
|
159 | 159 | server_name = metric.get('server', '').lower() or 'unknown' |
|
160 | 160 | start_interval = convert_date(metric['timestamp']) |
|
161 | 161 | start_interval = start_interval.replace(second=0, microsecond=0) |
|
162 | 162 | |
|
163 | 163 | for view_name, view_metrics in metric['metrics']: |
|
164 | 164 | key = '%s%s%s' % (metric['server'], start_interval, view_name) |
|
165 | 165 | if start_interval not in metrics: |
|
166 | 166 | metrics[key] = {"requests": 0, "main": 0, "sql": 0, |
|
167 | 167 | "nosql": 0, "remote": 0, "tmpl": 0, |
|
168 | 168 | "custom": 0, 'sql_calls': 0, |
|
169 | 169 | 'nosql_calls': 0, |
|
170 | 170 | 'remote_calls': 0, 'tmpl_calls': 0, |
|
171 | 171 | 'custom_calls': 0, |
|
172 | 172 | "start_interval": start_interval, |
|
173 | 173 | "server_name": server_name, |
|
174 | 174 | "view_name": view_name |
|
175 | 175 | } |
|
176 | 176 | metrics[key]["requests"] += int(view_metrics['requests']) |
|
177 | 177 | metrics[key]["main"] += round(view_metrics['main'], 5) |
|
178 | 178 | metrics[key]["sql"] += round(view_metrics['sql'], 5) |
|
179 | 179 | metrics[key]["nosql"] += round(view_metrics['nosql'], 5) |
|
180 | 180 | metrics[key]["remote"] += round(view_metrics['remote'], 5) |
|
181 | 181 | metrics[key]["tmpl"] += round(view_metrics['tmpl'], 5) |
|
182 | 182 | metrics[key]["custom"] += round(view_metrics.get('custom', 0.0), |
|
183 | 183 | 5) |
|
184 | 184 | metrics[key]["sql_calls"] += int( |
|
185 | 185 | view_metrics.get('sql_calls', 0)) |
|
186 | 186 | metrics[key]["nosql_calls"] += int( |
|
187 | 187 | view_metrics.get('nosql_calls', 0)) |
|
188 | 188 | metrics[key]["remote_calls"] += int( |
|
189 | 189 | view_metrics.get('remote_calls', 0)) |
|
190 | 190 | metrics[key]["tmpl_calls"] += int( |
|
191 | 191 | view_metrics.get('tmpl_calls', 0)) |
|
192 | 192 | metrics[key]["custom_calls"] += int( |
|
193 | 193 | view_metrics.get('custom_calls', 0)) |
|
194 | 194 | |
|
195 | 195 | if not metrics[key]["requests"]: |
|
196 | 196 | # fix this here because validator can't |
|
197 | 197 | metrics[key]["requests"] = 1 |
|
198 | 198 | # metrics dict is being built to minimize |
|
199 | 199 | # the amount of queries used |
|
200 | 200 | # in case we get multiple rows from same minute |
|
201 | 201 | |
|
202 | 202 | normalized_metrics = [] |
|
203 | 203 | for metric in metrics.values(): |
|
204 | 204 | new_metric = { |
|
205 | 205 | 'namespace': 'appenlight.request_metric', |
|
206 | 206 | 'timestamp': metric.pop('start_interval'), |
|
207 | 207 | 'server_name': metric['server_name'], |
|
208 | 208 | 'tags': list(metric.items()) |
|
209 | 209 | } |
|
210 | 210 | normalized_metrics.append(new_metric) |
|
211 | 211 | |
|
212 | 212 | tasks.add_metrics.delay(application.resource_id, params, |
|
213 | 213 | normalized_metrics, proto_version) |
|
214 | 214 | |
|
215 | 215 | log.info('REQUEST METRICS call {} {} client:{}'.format( |
|
216 | 216 | application.resource_name, proto_version, |
|
217 | 217 | request.headers.get('user_agent'))) |
|
218 | 218 | return 'OK: request metrics accepted' |
|
219 | 219 | |
|
220 | 220 | |
|
221 | 221 | @view_config(route_name='api_general_metrics', renderer='string', |
|
222 | 222 | permission='create', require_csrf=False) |
|
223 | 223 | @view_config(route_name='api_general_metric', renderer='string', |
|
224 | 224 | permission='create', require_csrf=False) |
|
225 | 225 | def general_metrics_create(request): |
|
226 | 226 | """ |
|
227 | 227 | Endpoint for general metrics aggregation |
|
228 | 228 | """ |
|
229 | 229 | application = request.context.resource |
|
230 | 230 | if request.method.upper() == 'OPTIONS': |
|
231 | 231 | return check_cors(request, application) |
|
232 | 232 | else: |
|
233 | 233 | check_cors(request, application, should_return=False) |
|
234 | 234 | |
|
235 | 235 | params = dict(request.params.copy()) |
|
236 | 236 | proto_version = parse_proto(params.get('protocol_version', '')) |
|
237 | 237 | payload = request.unsafe_json_body |
|
238 | 238 | sequence_accepted = request.matched_route.name == 'api_general_metrics' |
|
239 | 239 | if sequence_accepted: |
|
240 | 240 | if application.allow_permanent_storage: |
|
241 | 241 | schema = GeneralMetricsPermanentListSchema().bind( |
|
242 | 242 | utcnow=datetime.datetime.utcnow()) |
|
243 | 243 | else: |
|
244 | 244 | schema = GeneralMetricsListSchema().bind( |
|
245 | 245 | utcnow=datetime.datetime.utcnow()) |
|
246 | 246 | else: |
|
247 | 247 | if application.allow_permanent_storage: |
|
248 | 248 | schema = GeneralMetricPermanentSchema().bind( |
|
249 | 249 | utcnow=datetime.datetime.utcnow()) |
|
250 | 250 | else: |
|
251 | 251 | schema = GeneralMetricSchema().bind( |
|
252 | 252 | utcnow=datetime.datetime.utcnow()) |
|
253 | 253 | |
|
254 | 254 | deserialized_metrics = schema.deserialize(payload) |
|
255 | 255 | if sequence_accepted is False: |
|
256 | 256 | deserialized_metrics = [deserialized_metrics] |
|
257 | 257 | |
|
258 | 258 | rate_limiting(request, application, 'per_application_metrics_rate_limit', |
|
259 | 259 | len(deserialized_metrics)) |
|
260 | 260 | |
|
261 | 261 | tasks.add_metrics.delay(application.resource_id, params, |
|
262 | 262 | deserialized_metrics, proto_version) |
|
263 | 263 | |
|
264 | 264 | log.info('METRICS call {} {} client:{}'.format( |
|
265 | 265 | application.resource_name, proto_version, |
|
266 | 266 | request.headers.get('user_agent'))) |
|
267 | 267 | return 'OK: Metrics accepted' |
|
268 | 268 | |
|
269 | 269 | |
|
270 | 270 | @view_config(route_name='api_reports', renderer='string', permission='create', |
|
271 | 271 | require_csrf=False) |
|
272 | 272 | @view_config(route_name='api_slow_reports', renderer='string', |
|
273 | 273 | permission='create', require_csrf=False) |
|
274 | 274 | @view_config(route_name='api_report', renderer='string', permission='create', |
|
275 | 275 | require_csrf=False) |
|
276 | 276 | def reports_create(request): |
|
277 | 277 | """ |
|
278 | 278 | Endpoint for exception and slowness reports |
|
279 | 279 | """ |
|
280 | 280 | # route_url('reports') |
|
281 | 281 | application = request.context.resource |
|
282 | 282 | if request.method.upper() == 'OPTIONS': |
|
283 | 283 | return check_cors(request, application) |
|
284 | 284 | else: |
|
285 | 285 | check_cors(request, application, should_return=False) |
|
286 | 286 | params = dict(request.params.copy()) |
|
287 | 287 | proto_version = parse_proto(params.get('protocol_version', '')) |
|
288 | 288 | payload = request.unsafe_json_body |
|
289 | 289 | sequence_accepted = request.matched_route.name == 'api_reports' |
|
290 | 290 | |
|
291 | 291 | if sequence_accepted: |
|
292 | 292 | schema = ReportListSchema_0_5().bind( |
|
293 | 293 | utcnow=datetime.datetime.utcnow()) |
|
294 | 294 | else: |
|
295 | 295 | schema = ReportSchema_0_5().bind( |
|
296 | 296 | utcnow=datetime.datetime.utcnow()) |
|
297 | 297 | |
|
298 | 298 | deserialized_reports = schema.deserialize(payload) |
|
299 | 299 | if sequence_accepted is False: |
|
300 | 300 | deserialized_reports = [deserialized_reports] |
|
301 | 301 | if deserialized_reports: |
|
302 | 302 | rate_limiting(request, application, |
|
303 | 303 | 'per_application_reports_rate_limit', |
|
304 | 304 | len(deserialized_reports)) |
|
305 | 305 | |
|
306 | 306 | # pprint.pprint(deserialized_reports) |
|
307 | 307 | tasks.add_reports.delay(application.resource_id, params, |
|
308 | 308 | deserialized_reports) |
|
309 | 309 | log.info('REPORT call %s, %s client:%s' % ( |
|
310 | 310 | application, |
|
311 | 311 | proto_version, |
|
312 | 312 | request.headers.get('user_agent')) |
|
313 | 313 | ) |
|
314 | 314 | return 'OK: Reports accepted' |
|
315 | 315 | |
|
316 | 316 | |
|
317 | 317 | @view_config(route_name='api_airbrake', renderer='string', permission='create', |
|
318 | 318 | require_csrf=False) |
|
319 | 319 | def airbrake_xml_compat(request): |
|
320 | 320 | """ |
|
321 | 321 | Airbrake compatible endpoint for XML reports |
|
322 | 322 | """ |
|
323 | 323 | application = request.context.resource |
|
324 | 324 | if request.method.upper() == 'OPTIONS': |
|
325 | 325 | return check_cors(request, application) |
|
326 | 326 | else: |
|
327 | 327 | check_cors(request, application, should_return=False) |
|
328 | 328 | |
|
329 | 329 | params = dict(request.params.copy()) |
|
330 | 330 | |
|
331 | 331 | error_dict = parse_airbrake_xml(request) |
|
332 | 332 | schema = ReportListSchema_0_5().bind(utcnow=datetime.datetime.utcnow()) |
|
333 | 333 | deserialized_reports = schema.deserialize([error_dict]) |
|
334 | 334 | rate_limiting(request, application, 'per_application_reports_rate_limit', |
|
335 | 335 | len(deserialized_reports)) |
|
336 | 336 | |
|
337 | 337 | tasks.add_reports.delay(application.resource_id, params, |
|
338 | 338 | deserialized_reports) |
|
339 | 339 | log.info('%s AIRBRAKE call for application %s, api_ver:%s client:%s' % ( |
|
340 | 340 | 500, application.resource_name, |
|
341 | 341 | request.params.get('protocol_version', 'unknown'), |
|
342 | 342 | request.headers.get('user_agent')) |
|
343 | 343 | ) |
|
344 | 344 | return '<notice><id>no-id</id><url>%s</url></notice>' % \ |
|
345 | 345 | request.registry.settings['mailing.app_url'] |
|
346 | 346 | |
|
347 | 347 | |
|
348 | 348 | def decompress_gzip(data): |
|
349 | 349 | try: |
|
350 | 350 | fp = io.StringIO(data) |
|
351 | 351 | with GzipFile(fileobj=fp) as f: |
|
352 | 352 | return f.read() |
|
353 | 353 | except Exception as exc: |
|
354 | 354 | raise |
|
355 | 355 | log.error(exc) |
|
356 | 356 | raise HTTPBadRequest() |
|
357 | 357 | |
|
358 | 358 | |
|
359 | 359 | def decompress_zlib(data): |
|
360 | 360 | try: |
|
361 | 361 | return zlib.decompress(data) |
|
362 | 362 | except Exception as exc: |
|
363 | 363 | raise |
|
364 | 364 | log.error(exc) |
|
365 | 365 | raise HTTPBadRequest() |
|
366 | 366 | |
|
367 | 367 | |
|
368 | 368 | def decode_b64(data): |
|
369 | 369 | try: |
|
370 | 370 | return base64.b64decode(data) |
|
371 | 371 | except Exception as exc: |
|
372 | 372 | raise |
|
373 | 373 | log.error(exc) |
|
374 | 374 | raise HTTPBadRequest() |
|
375 | 375 | |
|
376 | 376 | |
|
377 | 377 | @view_config(route_name='api_sentry', renderer='string', permission='create', |
|
378 | 378 | require_csrf=False) |
|
379 | 379 | @view_config(route_name='api_sentry_slash', renderer='string', |
|
380 | 380 | permission='create', require_csrf=False) |
|
381 | 381 | def sentry_compat(request): |
|
382 | 382 | """ |
|
383 | 383 | Sentry compatible endpoint |
|
384 | 384 | """ |
|
385 | 385 | application = request.context.resource |
|
386 | 386 | if request.method.upper() == 'OPTIONS': |
|
387 | 387 | return check_cors(request, application) |
|
388 | 388 | else: |
|
389 | 389 | check_cors(request, application, should_return=False) |
|
390 | 390 | |
|
391 | 391 | # handle various report encoding |
|
392 | 392 | content_encoding = request.headers.get('Content-Encoding') |
|
393 | 393 | content_type = request.headers.get('Content-Type') |
|
394 | 394 | if content_encoding == 'gzip': |
|
395 | 395 | body = decompress_gzip(request.body) |
|
396 | 396 | elif content_encoding == 'deflate': |
|
397 | 397 | body = decompress_zlib(request.body) |
|
398 | 398 | else: |
|
399 | 399 | body = request.body |
|
400 | 400 | # attempt to fix string before decoding for stupid clients |
|
401 | 401 | if content_type == 'application/x-www-form-urlencoded': |
|
402 | 402 | body = urllib.parse.unquote(body.decode('utf8')) |
|
403 | 403 | check_char = '{' if isinstance(body, str) else b'{' |
|
404 | 404 | if not body.startswith(check_char): |
|
405 | 405 | try: |
|
406 | 406 | body = decode_b64(body) |
|
407 | 407 | body = decompress_zlib(body) |
|
408 | 408 | except Exception as exc: |
|
409 | 409 | log.info(exc) |
|
410 | 410 | |
|
411 | 411 | try: |
|
412 | 412 | json_body = json.loads(body.decode('utf8')) |
|
413 | 413 | except ValueError: |
|
414 | 414 | raise JSONException("Incorrect JSON") |
|
415 | 415 | |
|
416 | 416 | event, event_type = parse_sentry_event(json_body) |
|
417 | 417 | |
|
418 | 418 | if event_type == ParsedSentryEventType.LOG: |
|
419 | 419 | if application.allow_permanent_storage: |
|
420 | 420 | schema = LogSchemaPermanent().bind( |
|
421 | 421 | utcnow=datetime.datetime.utcnow()) |
|
422 | 422 | else: |
|
423 | 423 | schema = LogSchema().bind( |
|
424 | 424 | utcnow=datetime.datetime.utcnow()) |
|
425 | 425 | deserialized_logs = schema.deserialize(event) |
|
426 | 426 | non_pkey_logs = [deserialized_logs] |
|
427 | 427 | log.debug('%s non-pkey logs received: %s' % (application, |
|
428 | 428 | len(non_pkey_logs))) |
|
429 | 429 | tasks.add_logs.delay(application.resource_id, {}, non_pkey_logs) |
|
430 | 430 | if event_type == ParsedSentryEventType.ERROR_REPORT: |
|
431 |
schema = ReportSchema_0_5().bind( |
|
|
431 | schema = ReportSchema_0_5().bind( | |
|
432 | utcnow=datetime.datetime.utcnow(), | |
|
433 | allow_permanent_storage=application.allow_permanent_storage) | |
|
432 | 434 | deserialized_reports = [schema.deserialize(event)] |
|
433 | 435 | rate_limiting(request, application, |
|
434 | 436 | 'per_application_reports_rate_limit', |
|
435 | 437 | len(deserialized_reports)) |
|
436 | 438 | tasks.add_reports.delay(application.resource_id, {}, |
|
437 | 439 | deserialized_reports) |
|
438 | 440 | return 'OK: Events accepted' |
General Comments 0
You need to be logged in to leave comments.
Login now