Show More
@@ -1,1593 +1,1597 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # AppEnlight Enterprise Edition, including its added features, Support |
|
18 | # AppEnlight Enterprise Edition, including its added features, Support | |
19 | # services, and proprietary license terms, please see |
|
19 | # services, and proprietary license terms, please see | |
20 | # https://rhodecode.com/licenses/ |
|
20 | # https://rhodecode.com/licenses/ | |
21 |
|
21 | |||
22 | import copy |
|
22 | import copy | |
23 | import logging |
|
23 | import logging | |
24 | import mock |
|
24 | import mock | |
25 | import pyramid |
|
25 | import pyramid | |
26 | import pytest |
|
26 | import pytest | |
27 | import sqlalchemy as sa |
|
27 | import sqlalchemy as sa | |
28 | import webob |
|
28 | import webob | |
29 |
|
29 | |||
30 | from datetime import datetime |
|
30 | from datetime import datetime | |
31 | from pyramid import testing |
|
31 | from pyramid import testing | |
32 |
|
32 | |||
33 |
|
33 | |||
34 | from appenlight.models import DBSession |
|
34 | from appenlight.models import DBSession | |
35 | from appenlight.lib.ext_json import json |
|
35 | from appenlight.lib.ext_json import json | |
36 |
|
36 | |||
37 |
|
37 | |||
38 | log = logging.getLogger(__name__) |
|
38 | log = logging.getLogger(__name__) | |
39 |
|
39 | |||
40 |
|
40 | |||
41 | class DummyContext(object): |
|
41 | class DummyContext(object): | |
42 | pass |
|
42 | pass | |
43 |
|
43 | |||
44 |
|
44 | |||
45 | @pytest.mark.usefixtures('base_app') |
|
45 | @pytest.mark.usefixtures('base_app') | |
46 | class BasicTest(object): |
|
46 | class BasicTest(object): | |
47 | pass |
|
47 | pass | |
48 |
|
48 | |||
49 |
|
49 | |||
50 | @pytest.mark.usefixtures('base_app') |
|
50 | @pytest.mark.usefixtures('base_app') | |
51 | class TestMigration(object): |
|
51 | class TestMigration(object): | |
52 | def test_migration(self): |
|
52 | def test_migration(self): | |
53 | assert 1 == 1 |
|
53 | assert 1 == 1 | |
54 |
|
54 | |||
55 |
|
55 | |||
56 | class TestSentryProto_7(object): |
|
56 | class TestSentryProto_7(object): | |
57 | def test_log_payload(self): |
|
57 | def test_log_payload(self): | |
58 | import appenlight.tests.payload_examples as payload_examples |
|
58 | import appenlight.tests.payload_examples as payload_examples | |
59 | from appenlight.lib.enums import ParsedSentryEventType |
|
59 | from appenlight.lib.enums import ParsedSentryEventType | |
60 | from appenlight.lib.utils.sentry import parse_sentry_event |
|
60 | from appenlight.lib.utils.sentry import parse_sentry_event | |
61 | event_dict, event_type = parse_sentry_event( |
|
61 | event_dict, event_type = parse_sentry_event( | |
62 | payload_examples.SENTRY_LOG_PAYLOAD_7) |
|
62 | payload_examples.SENTRY_LOG_PAYLOAD_7) | |
63 | assert ParsedSentryEventType.LOG == event_type |
|
63 | assert ParsedSentryEventType.LOG == event_type | |
64 | assert event_dict['log_level'] == 'CRITICAL' |
|
64 | assert event_dict['log_level'] == 'CRITICAL' | |
65 | assert event_dict['message'] == 'TEST from django logging' |
|
65 | assert event_dict['message'] == 'TEST from django logging' | |
66 | assert event_dict['namespace'] == 'testlogger' |
|
66 | assert event_dict['namespace'] == 'testlogger' | |
67 | assert event_dict['request_id'] == '9a6172f2e6d2444582f83a6c333d9cfb' |
|
67 | assert event_dict['request_id'] == '9a6172f2e6d2444582f83a6c333d9cfb' | |
68 | assert event_dict['server'] == 'ergo-virtual-machine' |
|
68 | assert event_dict['server'] == 'ergo-virtual-machine' | |
69 | assert event_dict['date'] == datetime.utcnow().date().strftime( |
|
69 | assert event_dict['date'] == datetime.utcnow().date().strftime( | |
70 | '%Y-%m-%dT%H:%M:%SZ') |
|
70 | '%Y-%m-%dT%H:%M:%SZ') | |
71 | tags = [('site', 'example.com'), |
|
71 | tags = [('site', 'example.com'), | |
72 | ('sys.argv', ["'manage.py'", "'runserver'"]), |
|
72 | ('sys.argv', ["'manage.py'", "'runserver'"]), | |
73 | ('price', 6), |
|
73 | ('price', 6), | |
74 | ('tag', "'extra'"), |
|
74 | ('tag', "'extra'"), | |
75 | ('dupa', True), |
|
75 | ('dupa', True), | |
76 | ('project', 'sentry'), |
|
76 | ('project', 'sentry'), | |
77 | ('sentry_culprit', 'testlogger in index'), |
|
77 | ('sentry_culprit', 'testlogger in index'), | |
78 | ('sentry_language', 'python'), |
|
78 | ('sentry_language', 'python'), | |
79 | ('sentry_release', 'test')] |
|
79 | ('sentry_release', 'test')] | |
80 | assert sorted(event_dict['tags']) == sorted(tags) |
|
80 | assert sorted(event_dict['tags']) == sorted(tags) | |
81 |
|
81 | |||
82 | def test_report_payload(self): |
|
82 | def test_report_payload(self): | |
83 | import appenlight.tests.payload_examples as payload_examples |
|
83 | import appenlight.tests.payload_examples as payload_examples | |
84 | from appenlight.lib.enums import ParsedSentryEventType |
|
84 | from appenlight.lib.enums import ParsedSentryEventType | |
85 | from appenlight.lib.utils.sentry import parse_sentry_event |
|
85 | from appenlight.lib.utils.sentry import parse_sentry_event | |
86 | utcnow = datetime.utcnow().date().strftime('%Y-%m-%dT%H:%M:%SZ') |
|
86 | utcnow = datetime.utcnow().date().strftime('%Y-%m-%dT%H:%M:%SZ') | |
87 | event_dict, event_type = parse_sentry_event( |
|
87 | event_dict, event_type = parse_sentry_event( | |
88 | payload_examples.SENTRY_PYTHON_PAYLOAD_7) |
|
88 | payload_examples.SENTRY_PYTHON_PAYLOAD_7) | |
89 | assert ParsedSentryEventType.ERROR_REPORT == event_type |
|
89 | assert ParsedSentryEventType.ERROR_REPORT == event_type | |
90 | assert event_dict['client'] == 'sentry' |
|
90 | assert event_dict['client'] == 'sentry' | |
91 | assert event_dict[ |
|
91 | assert event_dict[ | |
92 | 'error'] == 'Exception: test 500 ' \ |
|
92 | 'error'] == 'Exception: test 500 ' \ | |
93 | '\u0142\xf3\u201c\u0107\u201c\u0107\u017c\u0105' |
|
93 | '\u0142\xf3\u201c\u0107\u201c\u0107\u017c\u0105' | |
94 | assert event_dict['language'] == 'python' |
|
94 | assert event_dict['language'] == 'python' | |
95 | assert event_dict['ip'] == '127.0.0.1' |
|
95 | assert event_dict['ip'] == '127.0.0.1' | |
96 | assert event_dict['request_id'] == '9fae652c8c1c4d6a8eee09260f613a98' |
|
96 | assert event_dict['request_id'] == '9fae652c8c1c4d6a8eee09260f613a98' | |
97 | assert event_dict['server'] == 'ergo-virtual-machine' |
|
97 | assert event_dict['server'] == 'ergo-virtual-machine' | |
98 | assert event_dict['start_time'] == utcnow |
|
98 | assert event_dict['start_time'] == utcnow | |
99 | assert event_dict['url'] == 'http://127.0.0.1:8000/error' |
|
99 | assert event_dict['url'] == 'http://127.0.0.1:8000/error' | |
100 | assert event_dict['user_agent'] == 'Mozilla/5.0 (X11; Linux x86_64) ' \ |
|
100 | assert event_dict['user_agent'] == 'Mozilla/5.0 (X11; Linux x86_64) ' \ | |
101 | 'AppleWebKit/537.36 (KHTML, ' \ |
|
101 | 'AppleWebKit/537.36 (KHTML, ' \ | |
102 | 'like Gecko) Chrome/47.0.2526.106 ' \ |
|
102 | 'like Gecko) Chrome/47.0.2526.106 ' \ | |
103 | 'Safari/537.36' |
|
103 | 'Safari/537.36' | |
104 | assert event_dict['view_name'] == 'djangoapp.views in error' |
|
104 | assert event_dict['view_name'] == 'djangoapp.views in error' | |
105 | tags = [('site', 'example.com'), ('sentry_release', 'test')] |
|
105 | tags = [('site', 'example.com'), ('sentry_release', 'test')] | |
106 | assert sorted(event_dict['tags']) == sorted(tags) |
|
106 | assert sorted(event_dict['tags']) == sorted(tags) | |
107 | extra = [('sys.argv', ["'manage.py'", "'runserver'"]), |
|
107 | extra = [('sys.argv', ["'manage.py'", "'runserver'"]), | |
108 | ('project', 'sentry')] |
|
108 | ('project', 'sentry')] | |
109 | assert sorted(event_dict['extra']) == sorted(extra) |
|
109 | assert sorted(event_dict['extra']) == sorted(extra) | |
110 | request = event_dict['request'] |
|
110 | request = event_dict['request'] | |
111 | assert request['url'] == 'http://127.0.0.1:8000/error' |
|
111 | assert request['url'] == 'http://127.0.0.1:8000/error' | |
112 | assert request['cookies'] == {'appenlight': 'X'} |
|
112 | assert request['cookies'] == {'appenlight': 'X'} | |
113 | assert request['data'] is None |
|
113 | assert request['data'] is None | |
114 | assert request['method'] == 'GET' |
|
114 | assert request['method'] == 'GET' | |
115 | assert request['query_string'] == '' |
|
115 | assert request['query_string'] == '' | |
116 | assert request['env'] == {'REMOTE_ADDR': '127.0.0.1', |
|
116 | assert request['env'] == {'REMOTE_ADDR': '127.0.0.1', | |
117 | 'SERVER_NAME': 'localhost', |
|
117 | 'SERVER_NAME': 'localhost', | |
118 | 'SERVER_PORT': '8000'} |
|
118 | 'SERVER_PORT': '8000'} | |
119 | assert request['headers'] == { |
|
119 | assert request['headers'] == { | |
120 | 'Accept': 'text/html,application/xhtml+xml,' |
|
120 | 'Accept': 'text/html,application/xhtml+xml,' | |
121 | 'application/xml;q=0.9,image/webp,*/*;q=0.8', |
|
121 | 'application/xml;q=0.9,image/webp,*/*;q=0.8', | |
122 | 'Accept-Encoding': 'gzip, deflate, sdch', |
|
122 | 'Accept-Encoding': 'gzip, deflate, sdch', | |
123 | 'Accept-Language': 'en-US,en;q=0.8,pl;q=0.6', |
|
123 | 'Accept-Language': 'en-US,en;q=0.8,pl;q=0.6', | |
124 | 'Connection': 'keep-alive', |
|
124 | 'Connection': 'keep-alive', | |
125 | 'Content-Length': '', |
|
125 | 'Content-Length': '', | |
126 | 'Content-Type': 'text/plain', |
|
126 | 'Content-Type': 'text/plain', | |
127 | 'Cookie': 'appenlight=X', |
|
127 | 'Cookie': 'appenlight=X', | |
128 | 'Dnt': '1', |
|
128 | 'Dnt': '1', | |
129 | 'Host': '127.0.0.1:8000', |
|
129 | 'Host': '127.0.0.1:8000', | |
130 | 'Upgrade-Insecure-Requests': '1', |
|
130 | 'Upgrade-Insecure-Requests': '1', | |
131 | 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) ' |
|
131 | 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) ' | |
132 | 'AppleWebKit/537.36 (KHTML, like Gecko) ' |
|
132 | 'AppleWebKit/537.36 (KHTML, like Gecko) ' | |
133 | 'Chrome/47.0.2526.106 Safari/537.36'} |
|
133 | 'Chrome/47.0.2526.106 Safari/537.36'} | |
134 | traceback = event_dict['traceback'] |
|
134 | traceback = event_dict['traceback'] | |
135 | assert traceback[0]['cline'] == 'response = wrapped_callback(request, ' \ |
|
135 | assert traceback[0]['cline'] == 'response = wrapped_callback(request, ' \ | |
136 | '*callback_args, **callback_kwargs)' |
|
136 | '*callback_args, **callback_kwargs)' | |
137 | assert traceback[0]['file'] == 'django/core/handlers/base.py' |
|
137 | assert traceback[0]['file'] == 'django/core/handlers/base.py' | |
138 | assert traceback[0]['fn'] == 'get_response' |
|
138 | assert traceback[0]['fn'] == 'get_response' | |
139 | assert traceback[0]['line'] == 111 |
|
139 | assert traceback[0]['line'] == 111 | |
140 | assert traceback[0]['module'] == 'django.core.handlers.base' |
|
140 | assert traceback[0]['module'] == 'django.core.handlers.base' | |
141 |
|
141 | |||
142 | assert traceback[1]['cline'] == "raise Exception(u'test 500 " \ |
|
142 | assert traceback[1]['cline'] == "raise Exception(u'test 500 " \ | |
143 | "\u0142\xf3\u201c\u0107\u201c\u0107" \ |
|
143 | "\u0142\xf3\u201c\u0107\u201c\u0107" \ | |
144 | "\u017c\u0105')" |
|
144 | "\u017c\u0105')" | |
145 | assert traceback[1]['file'] == 'djangoapp/views.py' |
|
145 | assert traceback[1]['file'] == 'djangoapp/views.py' | |
146 | assert traceback[1]['fn'] == 'error' |
|
146 | assert traceback[1]['fn'] == 'error' | |
147 | assert traceback[1]['line'] == 84 |
|
147 | assert traceback[1]['line'] == 84 | |
148 | assert traceback[1]['module'] == 'djangoapp.views' |
|
148 | assert traceback[1]['module'] == 'djangoapp.views' | |
149 | assert sorted(traceback[1]['vars']) == sorted([ |
|
149 | assert sorted(traceback[1]['vars']) == sorted([ | |
150 | ('c', |
|
150 | ('c', | |
151 | '<sqlite3.Cursor object at 0x7fe7c82af8f0>'), |
|
151 | '<sqlite3.Cursor object at 0x7fe7c82af8f0>'), | |
152 | ('request', |
|
152 | ('request', | |
153 | '<WSGIRequest at 0x140633490316304>'), |
|
153 | '<WSGIRequest at 0x140633490316304>'), | |
154 | ('conn', |
|
154 | ('conn', | |
155 | '<sqlite3.Connection object at 0x7fe7c8b23bf8>')]) |
|
155 | '<sqlite3.Connection object at 0x7fe7c8b23bf8>')]) | |
156 |
|
156 | |||
157 |
|
157 | |||
158 | class TestAPIReports_0_5_Validation(object): |
|
158 | class TestAPIReports_0_5_Validation(object): | |
159 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) |
|
159 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) | |
160 | def test_no_payload(self, dummy_json): |
|
160 | def test_no_payload(self, dummy_json): | |
161 | import colander |
|
161 | import colander | |
162 | from appenlight.validators import ReportListSchema_0_5 |
|
162 | from appenlight.validators import ReportListSchema_0_5 | |
163 | utcnow = datetime.utcnow() |
|
163 | utcnow = datetime.utcnow() | |
164 | schema = ReportListSchema_0_5().bind(utcnow=utcnow) |
|
164 | schema = ReportListSchema_0_5().bind(utcnow=utcnow) | |
165 | with pytest.raises(colander.Invalid): |
|
165 | with pytest.raises(colander.Invalid): | |
166 | schema.deserialize(dummy_json) |
|
166 | schema.deserialize(dummy_json) | |
167 |
|
167 | |||
168 | def test_minimal_payload(self): |
|
168 | def test_minimal_payload(self): | |
169 | dummy_json = [{}] |
|
169 | dummy_json = [{}] | |
170 | import colander |
|
170 | import colander | |
171 | from appenlight.validators import ReportListSchema_0_5 |
|
171 | from appenlight.validators import ReportListSchema_0_5 | |
172 | utcnow = datetime.utcnow() |
|
172 | utcnow = datetime.utcnow() | |
173 | schema = ReportListSchema_0_5().bind(utcnow=utcnow) |
|
173 | schema = ReportListSchema_0_5().bind(utcnow=utcnow) | |
174 | with pytest.raises(colander.Invalid): |
|
174 | with pytest.raises(colander.Invalid): | |
175 | schema.deserialize(dummy_json) |
|
175 | schema.deserialize(dummy_json) | |
176 |
|
176 | |||
177 | def test_minimal_payload(self): |
|
177 | def test_minimal_payload(self): | |
178 | dummy_json = [{'report_details': [{}]}] |
|
178 | dummy_json = [{'report_details': [{}]}] | |
179 | from appenlight.validators import ReportListSchema_0_5 |
|
179 | from appenlight.validators import ReportListSchema_0_5 | |
180 | utcnow = datetime.utcnow() |
|
180 | utcnow = datetime.utcnow() | |
181 | schema = ReportListSchema_0_5().bind(utcnow=utcnow) |
|
181 | schema = ReportListSchema_0_5().bind(utcnow=utcnow) | |
182 |
|
182 | |||
183 | deserialized = schema.deserialize(dummy_json) |
|
183 | deserialized = schema.deserialize(dummy_json) | |
184 |
|
184 | |||
185 | expected_deserialization = [ |
|
185 | expected_deserialization = [ | |
186 | {'language': 'unknown', |
|
186 | {'language': 'unknown', | |
187 | 'server': 'unknown', |
|
187 | 'server': 'unknown', | |
188 | 'occurences': 1, |
|
188 | 'occurences': 1, | |
189 | 'priority': 5, |
|
189 | 'priority': 5, | |
190 | 'view_name': '', |
|
190 | 'view_name': '', | |
191 | 'client': 'unknown', |
|
191 | 'client': 'unknown', | |
192 | 'http_status': 200, |
|
192 | 'http_status': 200, | |
193 | 'error': '', |
|
193 | 'error': '', | |
194 | 'tags': None, |
|
194 | 'tags': None, | |
195 | 'username': '', |
|
195 | 'username': '', | |
196 | 'traceback': None, |
|
196 | 'traceback': None, | |
197 | 'extra': None, |
|
197 | 'extra': None, | |
198 | 'url': '', |
|
198 | 'url': '', | |
199 | 'ip': None, |
|
199 | 'ip': None, | |
200 | 'start_time': utcnow, |
|
200 | 'start_time': utcnow, | |
201 | 'group_string': None, |
|
201 | 'group_string': None, | |
202 | 'request': {}, |
|
202 | 'request': {}, | |
203 | 'request_stats': None, |
|
203 | 'request_stats': None, | |
204 | 'end_time': None, |
|
204 | 'end_time': None, | |
205 | 'request_id': '', |
|
205 | 'request_id': '', | |
206 | 'message': '', |
|
206 | 'message': '', | |
207 | 'slow_calls': [], |
|
207 | 'slow_calls': [], | |
208 | 'user_agent': '' |
|
208 | 'user_agent': '' | |
209 | } |
|
209 | } | |
210 | ] |
|
210 | ] | |
211 | assert deserialized == expected_deserialization |
|
211 | assert deserialized == expected_deserialization | |
212 |
|
212 | |||
213 | def test_full_payload(self): |
|
213 | def test_full_payload(self): | |
214 | import appenlight.tests.payload_examples as payload_examples |
|
214 | import appenlight.tests.payload_examples as payload_examples | |
215 | from appenlight.validators import ReportListSchema_0_5 |
|
215 | from appenlight.validators import ReportListSchema_0_5 | |
216 | PYTHON_PAYLOAD = copy.deepcopy(payload_examples.PYTHON_PAYLOAD_0_5) |
|
216 | PYTHON_PAYLOAD = copy.deepcopy(payload_examples.PYTHON_PAYLOAD_0_5) | |
217 | utcnow = datetime.utcnow() |
|
217 | utcnow = datetime.utcnow() | |
218 | schema = ReportListSchema_0_5().bind(utcnow=utcnow) |
|
218 | schema = ReportListSchema_0_5().bind(utcnow=utcnow) | |
219 | PYTHON_PAYLOAD["tags"] = [("foo", 1), ("action", "test"), ("baz", 1.1), |
|
219 | PYTHON_PAYLOAD["tags"] = [("foo", 1), ("action", "test"), ("baz", 1.1), | |
220 | ("date", |
|
220 | ("date", | |
221 | utcnow.strftime('%Y-%m-%dT%H:%M:%S.0'))] |
|
221 | utcnow.strftime('%Y-%m-%dT%H:%M:%S.0'))] | |
222 | dummy_json = [PYTHON_PAYLOAD] |
|
222 | dummy_json = [PYTHON_PAYLOAD] | |
223 | deserialized = schema.deserialize(dummy_json)[0] |
|
223 | deserialized = schema.deserialize(dummy_json)[0] | |
224 | assert deserialized['error'] == PYTHON_PAYLOAD['error'] |
|
224 | assert deserialized['error'] == PYTHON_PAYLOAD['error'] | |
225 | assert deserialized['language'] == PYTHON_PAYLOAD['language'] |
|
225 | assert deserialized['language'] == PYTHON_PAYLOAD['language'] | |
226 | assert deserialized['server'] == PYTHON_PAYLOAD['server'] |
|
226 | assert deserialized['server'] == PYTHON_PAYLOAD['server'] | |
227 | assert deserialized['priority'] == PYTHON_PAYLOAD['priority'] |
|
227 | assert deserialized['priority'] == PYTHON_PAYLOAD['priority'] | |
228 | assert deserialized['view_name'] == PYTHON_PAYLOAD['view_name'] |
|
228 | assert deserialized['view_name'] == PYTHON_PAYLOAD['view_name'] | |
229 | assert deserialized['client'] == PYTHON_PAYLOAD['client'] |
|
229 | assert deserialized['client'] == PYTHON_PAYLOAD['client'] | |
230 | assert deserialized['http_status'] == PYTHON_PAYLOAD['http_status'] |
|
230 | assert deserialized['http_status'] == PYTHON_PAYLOAD['http_status'] | |
231 | assert deserialized['error'] == PYTHON_PAYLOAD['error'] |
|
231 | assert deserialized['error'] == PYTHON_PAYLOAD['error'] | |
232 | assert deserialized['occurences'] == PYTHON_PAYLOAD['occurences'] |
|
232 | assert deserialized['occurences'] == PYTHON_PAYLOAD['occurences'] | |
233 | assert deserialized['username'] == PYTHON_PAYLOAD['username'] |
|
233 | assert deserialized['username'] == PYTHON_PAYLOAD['username'] | |
234 | assert deserialized['traceback'] == PYTHON_PAYLOAD['traceback'] |
|
234 | assert deserialized['traceback'] == PYTHON_PAYLOAD['traceback'] | |
235 | assert deserialized['url'] == PYTHON_PAYLOAD['url'] |
|
235 | assert deserialized['url'] == PYTHON_PAYLOAD['url'] | |
236 | assert deserialized['ip'] == PYTHON_PAYLOAD['ip'] |
|
236 | assert deserialized['ip'] == PYTHON_PAYLOAD['ip'] | |
237 | assert deserialized['start_time'].strftime('%Y-%m-%dT%H:%M:%S.0') == \ |
|
237 | assert deserialized['start_time'].strftime('%Y-%m-%dT%H:%M:%S.0') == \ | |
238 | PYTHON_PAYLOAD['start_time'] |
|
238 | PYTHON_PAYLOAD['start_time'] | |
239 | assert deserialized['ip'] == PYTHON_PAYLOAD['ip'] |
|
239 | assert deserialized['ip'] == PYTHON_PAYLOAD['ip'] | |
240 | assert deserialized['group_string'] is None |
|
240 | assert deserialized['group_string'] is None | |
241 | assert deserialized['request_stats'] == PYTHON_PAYLOAD['request_stats'] |
|
241 | assert deserialized['request_stats'] == PYTHON_PAYLOAD['request_stats'] | |
242 | assert deserialized['end_time'].strftime('%Y-%m-%dT%H:%M:%S.0') == \ |
|
242 | assert deserialized['end_time'].strftime('%Y-%m-%dT%H:%M:%S.0') == \ | |
243 | PYTHON_PAYLOAD['end_time'] |
|
243 | PYTHON_PAYLOAD['end_time'] | |
244 | assert deserialized['request_id'] == PYTHON_PAYLOAD['request_id'] |
|
244 | assert deserialized['request_id'] == PYTHON_PAYLOAD['request_id'] | |
245 | assert deserialized['message'] == PYTHON_PAYLOAD['message'] |
|
245 | assert deserialized['message'] == PYTHON_PAYLOAD['message'] | |
246 | assert deserialized['user_agent'] == PYTHON_PAYLOAD['user_agent'] |
|
246 | assert deserialized['user_agent'] == PYTHON_PAYLOAD['user_agent'] | |
247 | assert deserialized['slow_calls'][0]['start'].strftime( |
|
247 | assert deserialized['slow_calls'][0]['start'].strftime( | |
248 | '%Y-%m-%dT%H:%M:%S.0') == PYTHON_PAYLOAD['slow_calls'][0][ |
|
248 | '%Y-%m-%dT%H:%M:%S.0') == PYTHON_PAYLOAD['slow_calls'][0][ | |
249 | 'start'] |
|
249 | 'start'] | |
250 | assert deserialized['slow_calls'][0]['end'].strftime( |
|
250 | assert deserialized['slow_calls'][0]['end'].strftime( | |
251 | '%Y-%m-%dT%H:%M:%S.0') == PYTHON_PAYLOAD['slow_calls'][0][ |
|
251 | '%Y-%m-%dT%H:%M:%S.0') == PYTHON_PAYLOAD['slow_calls'][0][ | |
252 | 'end'] |
|
252 | 'end'] | |
253 | assert deserialized['slow_calls'][0]['statement'] == \ |
|
253 | assert deserialized['slow_calls'][0]['statement'] == \ | |
254 | PYTHON_PAYLOAD['slow_calls'][0]['statement'] |
|
254 | PYTHON_PAYLOAD['slow_calls'][0]['statement'] | |
255 | assert deserialized['slow_calls'][0]['parameters'] == \ |
|
255 | assert deserialized['slow_calls'][0]['parameters'] == \ | |
256 | PYTHON_PAYLOAD['slow_calls'][0]['parameters'] |
|
256 | PYTHON_PAYLOAD['slow_calls'][0]['parameters'] | |
257 | assert deserialized['slow_calls'][0]['type'] == \ |
|
257 | assert deserialized['slow_calls'][0]['type'] == \ | |
258 | PYTHON_PAYLOAD['slow_calls'][0]['type'] |
|
258 | PYTHON_PAYLOAD['slow_calls'][0]['type'] | |
259 | assert deserialized['slow_calls'][0]['subtype'] == \ |
|
259 | assert deserialized['slow_calls'][0]['subtype'] == \ | |
260 | PYTHON_PAYLOAD['slow_calls'][0]['subtype'] |
|
260 | PYTHON_PAYLOAD['slow_calls'][0]['subtype'] | |
261 | assert deserialized['slow_calls'][0]['location'] == '' |
|
261 | assert deserialized['slow_calls'][0]['location'] == '' | |
262 | assert deserialized['tags'] == [ |
|
262 | assert deserialized['tags'] == [ | |
263 | ('foo', 1), ('action', 'test'), |
|
263 | ('foo', 1), ('action', 'test'), | |
264 | ('baz', 1.1), ('date', utcnow.strftime('%Y-%m-%dT%H:%M:%S.0'))] |
|
264 | ('baz', 1.1), ('date', utcnow.strftime('%Y-%m-%dT%H:%M:%S.0'))] | |
265 |
|
265 | |||
266 |
|
266 | |||
267 | @pytest.mark.usefixtures('log_schema') |
|
267 | @pytest.mark.usefixtures('log_schema') | |
268 | class TestAPILogsValidation(object): |
|
268 | class TestAPILogsValidation(object): | |
269 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) |
|
269 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) | |
270 | def test_no_payload(self, dummy_json, log_schema): |
|
270 | def test_no_payload(self, dummy_json, log_schema): | |
271 | import colander |
|
271 | import colander | |
272 |
|
272 | |||
273 | with pytest.raises(colander.Invalid): |
|
273 | with pytest.raises(colander.Invalid): | |
274 | log_schema.deserialize(dummy_json) |
|
274 | log_schema.deserialize(dummy_json) | |
275 |
|
275 | |||
276 | def test_minimal_payload(self, log_schema): |
|
276 | def test_minimal_payload(self, log_schema): | |
277 | dummy_json = [{}] |
|
277 | dummy_json = [{}] | |
278 | deserialized = log_schema.deserialize(dummy_json)[0] |
|
278 | deserialized = log_schema.deserialize(dummy_json)[0] | |
279 | expected = {'log_level': 'UNKNOWN', |
|
279 | expected = {'log_level': 'UNKNOWN', | |
280 | 'namespace': '', |
|
280 | 'namespace': '', | |
281 | 'server': 'unknown', |
|
281 | 'server': 'unknown', | |
282 | 'request_id': '', |
|
282 | 'request_id': '', | |
283 | 'primary_key': None, |
|
283 | 'primary_key': None, | |
284 | 'date': datetime.utcnow(), |
|
284 | 'date': datetime.utcnow(), | |
285 | 'message': '', |
|
285 | 'message': '', | |
286 | 'tags': None} |
|
286 | 'tags': None} | |
287 | assert deserialized['log_level'] == expected['log_level'] |
|
287 | assert deserialized['log_level'] == expected['log_level'] | |
288 | assert deserialized['message'] == expected['message'] |
|
288 | assert deserialized['message'] == expected['message'] | |
289 | assert deserialized['namespace'] == expected['namespace'] |
|
289 | assert deserialized['namespace'] == expected['namespace'] | |
290 | assert deserialized['request_id'] == expected['request_id'] |
|
290 | assert deserialized['request_id'] == expected['request_id'] | |
291 | assert deserialized['server'] == expected['server'] |
|
291 | assert deserialized['server'] == expected['server'] | |
292 | assert deserialized['tags'] == expected['tags'] |
|
292 | assert deserialized['tags'] == expected['tags'] | |
293 | assert deserialized['primary_key'] == expected['primary_key'] |
|
293 | assert deserialized['primary_key'] == expected['primary_key'] | |
294 |
|
294 | |||
295 | def test_normal_payload(self, log_schema): |
|
295 | def test_normal_payload(self, log_schema): | |
296 | import appenlight.tests.payload_examples as payload_examples |
|
296 | import appenlight.tests.payload_examples as payload_examples | |
297 | deserialized = log_schema.deserialize(payload_examples.LOG_EXAMPLES)[0] |
|
297 | deserialized = log_schema.deserialize(payload_examples.LOG_EXAMPLES)[0] | |
298 | expected = payload_examples.LOG_EXAMPLES[0] |
|
298 | expected = payload_examples.LOG_EXAMPLES[0] | |
299 | assert deserialized['log_level'] == expected['log_level'] |
|
299 | assert deserialized['log_level'] == expected['log_level'] | |
300 | assert deserialized['message'] == expected['message'] |
|
300 | assert deserialized['message'] == expected['message'] | |
301 | assert deserialized['namespace'] == expected['namespace'] |
|
301 | assert deserialized['namespace'] == expected['namespace'] | |
302 | assert deserialized['request_id'] == expected['request_id'] |
|
302 | assert deserialized['request_id'] == expected['request_id'] | |
303 | assert deserialized['server'] == expected['server'] |
|
303 | assert deserialized['server'] == expected['server'] | |
304 | assert deserialized['date'].strftime('%Y-%m-%dT%H:%M:%S.%f') == \ |
|
304 | assert deserialized['date'].strftime('%Y-%m-%dT%H:%M:%S.%f') == \ | |
305 | expected['date'] |
|
305 | expected['date'] | |
306 | assert deserialized['tags'][0][0] == "tag_name" |
|
306 | assert deserialized['tags'][0][0] == "tag_name" | |
307 | assert deserialized['tags'][0][1] == "tag_value" |
|
307 | assert deserialized['tags'][0][1] == "tag_value" | |
308 | assert deserialized['tags'][1][0] == "tag_name2" |
|
308 | assert deserialized['tags'][1][0] == "tag_name2" | |
309 | assert deserialized['tags'][1][1] == 2 |
|
309 | assert deserialized['tags'][1][1] == 2 | |
310 |
|
310 | |||
311 | def test_normal_payload_date_without_microseconds(self, log_schema): |
|
311 | def test_normal_payload_date_without_microseconds(self, log_schema): | |
312 | import appenlight.tests.payload_examples as payload_examples |
|
312 | import appenlight.tests.payload_examples as payload_examples | |
313 | LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES) |
|
313 | LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES) | |
314 | LOG_EXAMPLE[0]['date'] = datetime.utcnow().strftime( |
|
314 | LOG_EXAMPLE[0]['date'] = datetime.utcnow().strftime( | |
315 | '%Y-%m-%dT%H:%M:%S') |
|
315 | '%Y-%m-%dT%H:%M:%S') | |
316 | deserialized = log_schema.deserialize(LOG_EXAMPLE) |
|
316 | deserialized = log_schema.deserialize(LOG_EXAMPLE) | |
317 | assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M:%S') == \ |
|
317 | assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M:%S') == \ | |
318 | LOG_EXAMPLE[0]['date'] |
|
318 | LOG_EXAMPLE[0]['date'] | |
319 |
|
319 | |||
320 | def test_normal_payload_date_without_seconds(self, log_schema): |
|
320 | def test_normal_payload_date_without_seconds(self, log_schema): | |
321 | import appenlight.tests.payload_examples as payload_examples |
|
321 | import appenlight.tests.payload_examples as payload_examples | |
322 | LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES) |
|
322 | LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES) | |
323 | LOG_EXAMPLE[0]['date'] = datetime.utcnow().date().strftime( |
|
323 | LOG_EXAMPLE[0]['date'] = datetime.utcnow().date().strftime( | |
324 | '%Y-%m-%dT%H:%M') |
|
324 | '%Y-%m-%dT%H:%M') | |
325 | deserialized = log_schema.deserialize(LOG_EXAMPLE) |
|
325 | deserialized = log_schema.deserialize(LOG_EXAMPLE) | |
326 | assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M') == \ |
|
326 | assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M') == \ | |
327 | LOG_EXAMPLE[0]['date'] |
|
327 | LOG_EXAMPLE[0]['date'] | |
328 |
|
328 | |||
329 | def test_payload_empty_date(self, log_schema): |
|
329 | def test_payload_empty_date(self, log_schema): | |
330 | import appenlight.tests.payload_examples as payload_examples |
|
330 | import appenlight.tests.payload_examples as payload_examples | |
331 | LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES) |
|
331 | LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES) | |
332 | LOG_EXAMPLE[0]['date'] = None |
|
332 | LOG_EXAMPLE[0]['date'] = None | |
333 | deserialized = log_schema.deserialize(LOG_EXAMPLE) |
|
333 | deserialized = log_schema.deserialize(LOG_EXAMPLE) | |
334 | assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M') is not None |
|
334 | assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M') is not None | |
335 |
|
335 | |||
336 | def test_payload_no_date(self, log_schema): |
|
336 | def test_payload_no_date(self, log_schema): | |
337 | import appenlight.tests.payload_examples as payload_examples |
|
337 | import appenlight.tests.payload_examples as payload_examples | |
338 | LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES) |
|
338 | LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES) | |
339 | LOG_EXAMPLE[0].pop('date', None) |
|
339 | LOG_EXAMPLE[0].pop('date', None) | |
340 | deserialized = log_schema.deserialize(LOG_EXAMPLE) |
|
340 | deserialized = log_schema.deserialize(LOG_EXAMPLE) | |
341 | assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M') is not None |
|
341 | assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M') is not None | |
342 |
|
342 | |||
343 |
|
343 | |||
344 | @pytest.mark.usefixtures('general_metrics_schema') |
|
344 | @pytest.mark.usefixtures('general_metrics_schema') | |
345 | class TestAPIGeneralMetricsValidation(object): |
|
345 | class TestAPIGeneralMetricsValidation(object): | |
346 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) |
|
346 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) | |
347 | def test_no_payload(self, dummy_json, general_metrics_schema): |
|
347 | def test_no_payload(self, dummy_json, general_metrics_schema): | |
348 | import colander |
|
348 | import colander | |
349 |
|
349 | |||
350 | with pytest.raises(colander.Invalid): |
|
350 | with pytest.raises(colander.Invalid): | |
351 | general_metrics_schema.deserialize(dummy_json) |
|
351 | general_metrics_schema.deserialize(dummy_json) | |
352 |
|
352 | |||
353 | def test_minimal_payload(self, general_metrics_schema): |
|
353 | def test_minimal_payload(self, general_metrics_schema): | |
354 | dummy_json = [{'tags': [['counter_a', 15.5], ['counter_b', 63]]}] |
|
354 | dummy_json = [{'tags': [['counter_a', 15.5], ['counter_b', 63]]}] | |
355 | deserialized = general_metrics_schema.deserialize(dummy_json)[0] |
|
355 | deserialized = general_metrics_schema.deserialize(dummy_json)[0] | |
356 | expected = {'namespace': '', |
|
356 | expected = {'namespace': '', | |
357 | 'server_name': 'unknown', |
|
357 | 'server_name': 'unknown', | |
358 | 'tags': [('counter_a', 15.5), ('counter_b', 63)], |
|
358 | 'tags': [('counter_a', 15.5), ('counter_b', 63)], | |
359 | 'timestamp': datetime.utcnow()} |
|
359 | 'timestamp': datetime.utcnow()} | |
360 | assert deserialized['namespace'] == expected['namespace'] |
|
360 | assert deserialized['namespace'] == expected['namespace'] | |
361 | assert deserialized['server_name'] == expected['server_name'] |
|
361 | assert deserialized['server_name'] == expected['server_name'] | |
362 | assert deserialized['tags'] == expected['tags'] |
|
362 | assert deserialized['tags'] == expected['tags'] | |
363 |
|
363 | |||
364 | def test_normal_payload(self, general_metrics_schema): |
|
364 | def test_normal_payload(self, general_metrics_schema): | |
365 | import appenlight.tests.payload_examples as payload_examples |
|
365 | import appenlight.tests.payload_examples as payload_examples | |
366 | dummy_json = [payload_examples.METRICS_PAYLOAD] |
|
366 | dummy_json = [payload_examples.METRICS_PAYLOAD] | |
367 | deserialized = general_metrics_schema.deserialize(dummy_json)[0] |
|
367 | deserialized = general_metrics_schema.deserialize(dummy_json)[0] | |
368 | expected = {'namespace': 'some.monitor', |
|
368 | expected = {'namespace': 'some.monitor', | |
369 | 'server_name': 'server.name', |
|
369 | 'server_name': 'server.name', | |
370 | 'tags': [('usage_foo', 15.5), ('usage_bar', 63)], |
|
370 | 'tags': [('usage_foo', 15.5), ('usage_bar', 63)], | |
371 | 'timestamp': datetime.utcnow()} |
|
371 | 'timestamp': datetime.utcnow()} | |
372 | assert deserialized['namespace'] == expected['namespace'] |
|
372 | assert deserialized['namespace'] == expected['namespace'] | |
373 | assert deserialized['server_name'] == expected['server_name'] |
|
373 | assert deserialized['server_name'] == expected['server_name'] | |
374 | assert deserialized['tags'] == expected['tags'] |
|
374 | assert deserialized['tags'] == expected['tags'] | |
375 |
|
375 | |||
376 |
|
376 | |||
377 | @pytest.mark.usefixtures('request_metrics_schema') |
|
377 | @pytest.mark.usefixtures('request_metrics_schema') | |
378 | class TestAPIRequestMetricsValidation(object): |
|
378 | class TestAPIRequestMetricsValidation(object): | |
379 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) |
|
379 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) | |
380 | def test_no_payload(self, dummy_json, request_metrics_schema): |
|
380 | def test_no_payload(self, dummy_json, request_metrics_schema): | |
381 | import colander |
|
381 | import colander | |
382 |
|
382 | |||
383 | with pytest.raises(colander.Invalid): |
|
383 | with pytest.raises(colander.Invalid): | |
384 | print(request_metrics_schema.deserialize(dummy_json)) |
|
384 | print(request_metrics_schema.deserialize(dummy_json)) | |
385 |
|
385 | |||
386 | def test_normal_payload(self, request_metrics_schema): |
|
386 | def test_normal_payload(self, request_metrics_schema): | |
387 | import appenlight.tests.payload_examples as payload_examples |
|
387 | import appenlight.tests.payload_examples as payload_examples | |
388 | dummy_json = payload_examples.REQUEST_METRICS_EXAMPLES |
|
388 | dummy_json = payload_examples.REQUEST_METRICS_EXAMPLES | |
389 | deserialized = request_metrics_schema.deserialize(dummy_json)[0] |
|
389 | deserialized = request_metrics_schema.deserialize(dummy_json)[0] | |
390 | expected = {'metrics': [('dir/module:func', |
|
390 | expected = {'metrics': [('dir/module:func', | |
391 | {'custom': 0.0, |
|
391 | {'custom': 0.0, | |
392 | 'custom_calls': 0.0, |
|
392 | 'custom_calls': 0.0, | |
393 | 'main': 0.01664, |
|
393 | 'main': 0.01664, | |
394 | 'nosql': 0.00061, |
|
394 | 'nosql': 0.00061, | |
395 | 'nosql_calls': 23.0, |
|
395 | 'nosql_calls': 23.0, | |
396 | 'remote': 0.0, |
|
396 | 'remote': 0.0, | |
397 | 'remote_calls': 0.0, |
|
397 | 'remote_calls': 0.0, | |
398 | 'requests': 1, |
|
398 | 'requests': 1, | |
399 | 'sql': 0.00105, |
|
399 | 'sql': 0.00105, | |
400 | 'sql_calls': 2.0, |
|
400 | 'sql_calls': 2.0, | |
401 | 'tmpl': 0.0, |
|
401 | 'tmpl': 0.0, | |
402 | 'tmpl_calls': 0.0}), |
|
402 | 'tmpl_calls': 0.0}), | |
403 | ('SomeView.function', |
|
403 | ('SomeView.function', | |
404 | {'custom': 0.0, |
|
404 | {'custom': 0.0, | |
405 | 'custom_calls': 0.0, |
|
405 | 'custom_calls': 0.0, | |
406 | 'main': 0.647261, |
|
406 | 'main': 0.647261, | |
407 | 'nosql': 0.306554, |
|
407 | 'nosql': 0.306554, | |
408 | 'nosql_calls': 140.0, |
|
408 | 'nosql_calls': 140.0, | |
409 | 'remote': 0.0, |
|
409 | 'remote': 0.0, | |
410 | 'remote_calls': 0.0, |
|
410 | 'remote_calls': 0.0, | |
411 | 'requests': 28, |
|
411 | 'requests': 28, | |
412 | 'sql': 0.0, |
|
412 | 'sql': 0.0, | |
413 | 'sql_calls': 0.0, |
|
413 | 'sql_calls': 0.0, | |
414 | 'tmpl': 0.0, |
|
414 | 'tmpl': 0.0, | |
415 | 'tmpl_calls': 0.0})], |
|
415 | 'tmpl_calls': 0.0})], | |
416 | 'server': 'some.server.hostname', |
|
416 | 'server': 'some.server.hostname', | |
417 | 'timestamp': datetime.utcnow()} |
|
417 | 'timestamp': datetime.utcnow()} | |
418 | assert deserialized['server'] == expected['server'] |
|
418 | assert deserialized['server'] == expected['server'] | |
419 | metric = deserialized['metrics'][0] |
|
419 | metric = deserialized['metrics'][0] | |
420 | expected_metric = expected['metrics'][0] |
|
420 | expected_metric = expected['metrics'][0] | |
421 | assert metric[0] == expected_metric[0] |
|
421 | assert metric[0] == expected_metric[0] | |
422 | assert sorted(metric[1].items()) == sorted(expected_metric[1].items()) |
|
422 | assert sorted(metric[1].items()) == sorted(expected_metric[1].items()) | |
423 |
|
423 | |||
424 |
|
424 | |||
425 | @pytest.mark.usefixtures('default_application') |
|
425 | @pytest.mark.usefixtures('default_application') | |
426 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') |
|
426 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') | |
427 | class TestAPIReportsView(object): |
|
427 | class TestAPIReportsView(object): | |
428 | def test_no_json_payload(self, default_application): |
|
428 | def test_no_json_payload(self, default_application): | |
429 | import colander |
|
429 | import colander | |
430 | from appenlight.models.services.application import ApplicationService |
|
430 | from appenlight.models.services.application import ApplicationService | |
431 | from appenlight.views.api import reports_create |
|
431 | from appenlight.views.api import reports_create | |
432 |
|
432 | |||
433 | context = DummyContext() |
|
433 | context = DummyContext() | |
434 | context.resource = ApplicationService.by_id(1) |
|
434 | context.resource = ApplicationService.by_id(1) | |
435 | request = testing.DummyRequest( |
|
435 | request = testing.DummyRequest( | |
436 | headers={'Content-Type': 'application/json'}) |
|
436 | headers={'Content-Type': 'application/json'}) | |
437 | request.unsafe_json_body = '' |
|
437 | request.unsafe_json_body = '' | |
438 | request.context = context |
|
438 | request.context = context | |
439 | route = mock.Mock() |
|
439 | route = mock.Mock() | |
440 | route.name = 'api_reports' |
|
440 | route.name = 'api_reports' | |
441 | request.matched_route = route |
|
441 | request.matched_route = route | |
442 | with pytest.raises(colander.Invalid): |
|
442 | with pytest.raises(colander.Invalid): | |
443 | response = reports_create(request) |
|
443 | response = reports_create(request) | |
444 |
|
444 | |||
445 | def test_single_proper_json_0_5_payload(self): |
|
445 | def test_single_proper_json_0_5_payload(self): | |
446 | import appenlight.tests.payload_examples as payload_examples |
|
446 | import appenlight.tests.payload_examples as payload_examples | |
447 | from appenlight.views.api import reports_create |
|
447 | from appenlight.views.api import reports_create | |
448 | from appenlight.models.services.application import ApplicationService |
|
448 | from appenlight.models.services.application import ApplicationService | |
449 | from appenlight.models.report_group import ReportGroup |
|
449 | from appenlight.models.report_group import ReportGroup | |
450 | route = mock.Mock() |
|
450 | route = mock.Mock() | |
451 | route.name = 'api_reports' |
|
451 | route.name = 'api_reports' | |
452 | request = pyramid.threadlocal.get_current_request() |
|
452 | request = pyramid.threadlocal.get_current_request() | |
453 | context = DummyContext() |
|
453 | context = DummyContext() | |
454 | context.resource = ApplicationService.by_id(1) |
|
454 | context.resource = ApplicationService.by_id(1) | |
455 | request.context = context |
|
455 | request.context = context | |
456 | request.matched_route = route |
|
456 | request.matched_route = route | |
457 | PYTHON_PAYLOAD = payload_examples.PYTHON_PAYLOAD_0_5 |
|
457 | PYTHON_PAYLOAD = payload_examples.PYTHON_PAYLOAD_0_5 | |
458 | request.unsafe_json_body = [copy.deepcopy(PYTHON_PAYLOAD)] |
|
458 | request.unsafe_json_body = [copy.deepcopy(PYTHON_PAYLOAD)] | |
459 | reports_create(request) |
|
459 | reports_create(request) | |
460 | query = DBSession.query(ReportGroup) |
|
460 | query = DBSession.query(ReportGroup) | |
461 | report = query.first() |
|
461 | report = query.first() | |
462 | assert query.count() == 1 |
|
462 | assert query.count() == 1 | |
463 | assert report.total_reports == 1 |
|
463 | assert report.total_reports == 1 | |
464 |
|
464 | |||
465 | def test_grouping_0_5(self): |
|
465 | def test_grouping_0_5(self): | |
466 | import appenlight.tests.payload_examples as payload_examples |
|
466 | import appenlight.tests.payload_examples as payload_examples | |
467 | from appenlight.views.api import reports_create |
|
467 | from appenlight.views.api import reports_create | |
468 | from appenlight.models.services.application import ApplicationService |
|
468 | from appenlight.models.services.application import ApplicationService | |
469 | from appenlight.models.report_group import ReportGroup |
|
469 | from appenlight.models.report_group import ReportGroup | |
470 | route = mock.Mock() |
|
470 | route = mock.Mock() | |
471 | route.name = 'api_reports' |
|
471 | route.name = 'api_reports' | |
472 | request = pyramid.threadlocal.get_current_request() |
|
472 | request = pyramid.threadlocal.get_current_request() | |
473 | context = DummyContext() |
|
473 | context = DummyContext() | |
474 | context.resource = ApplicationService.by_id(1) |
|
474 | context.resource = ApplicationService.by_id(1) | |
475 | request.context = context |
|
475 | request.context = context | |
476 | request.matched_route = route |
|
476 | request.matched_route = route | |
477 | PYTHON_PAYLOAD = payload_examples.PYTHON_PAYLOAD_0_5 |
|
477 | PYTHON_PAYLOAD = payload_examples.PYTHON_PAYLOAD_0_5 | |
478 | request.unsafe_json_body = [copy.deepcopy(PYTHON_PAYLOAD), |
|
478 | request.unsafe_json_body = [copy.deepcopy(PYTHON_PAYLOAD), | |
479 | copy.deepcopy(PYTHON_PAYLOAD)] |
|
479 | copy.deepcopy(PYTHON_PAYLOAD)] | |
480 | reports_create(request) |
|
480 | reports_create(request) | |
481 | query = DBSession.query(ReportGroup) |
|
481 | query = DBSession.query(ReportGroup) | |
482 | report = query.first() |
|
482 | report = query.first() | |
483 | assert query.count() == 1 |
|
483 | assert query.count() == 1 | |
484 | assert report.total_reports == 2 |
|
484 | assert report.total_reports == 2 | |
485 |
|
485 | |||
486 | def test_grouping_different_reports_0_5(self): |
|
486 | def test_grouping_different_reports_0_5(self): | |
487 | import appenlight.tests.payload_examples as payload_examples |
|
487 | import appenlight.tests.payload_examples as payload_examples | |
488 | from appenlight.views.api import reports_create |
|
488 | from appenlight.views.api import reports_create | |
489 | from appenlight.models.services.application import ApplicationService |
|
489 | from appenlight.models.services.application import ApplicationService | |
490 | from appenlight.models.report_group import ReportGroup |
|
490 | from appenlight.models.report_group import ReportGroup | |
491 | route = mock.Mock() |
|
491 | route = mock.Mock() | |
492 | route.name = 'api_reports' |
|
492 | route.name = 'api_reports' | |
493 | request = pyramid.threadlocal.get_current_request() |
|
493 | request = pyramid.threadlocal.get_current_request() | |
494 | context = DummyContext() |
|
494 | context = DummyContext() | |
495 | context.resource = ApplicationService.by_id(1) |
|
495 | context.resource = ApplicationService.by_id(1) | |
496 | request.context = context |
|
496 | request.context = context | |
497 | request.matched_route = route |
|
497 | request.matched_route = route | |
498 | PYTHON_PAYLOAD = payload_examples.PYTHON_PAYLOAD_0_5 |
|
498 | PYTHON_PAYLOAD = payload_examples.PYTHON_PAYLOAD_0_5 | |
499 | PARSED_REPORT_404 = payload_examples.PARSED_REPORT_404 |
|
499 | PARSED_REPORT_404 = payload_examples.PARSED_REPORT_404 | |
500 | request.unsafe_json_body = [copy.deepcopy(PYTHON_PAYLOAD), |
|
500 | request.unsafe_json_body = [copy.deepcopy(PYTHON_PAYLOAD), | |
501 | copy.deepcopy(PARSED_REPORT_404)] |
|
501 | copy.deepcopy(PARSED_REPORT_404)] | |
502 | reports_create(request) |
|
502 | reports_create(request) | |
503 | query = DBSession.query(ReportGroup) |
|
503 | query = DBSession.query(ReportGroup) | |
504 | report = query.first() |
|
504 | report = query.first() | |
505 | assert query.count() == 2 |
|
505 | assert query.count() == 2 | |
506 | assert report.total_reports == 1 |
|
506 | assert report.total_reports == 1 | |
507 |
|
507 | |||
508 |
|
508 | |||
509 | @pytest.mark.usefixtures('default_application') |
|
509 | @pytest.mark.usefixtures('default_application') | |
510 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') |
|
510 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') | |
511 | class TestAirbrakeXMLView(object): |
|
511 | class TestAirbrakeXMLView(object): | |
512 |
|
512 | |||
513 | def test_normal_payload_parsing(self): |
|
513 | def test_normal_payload_parsing(self): | |
514 | import datetime |
|
514 | import datetime | |
515 | import defusedxml.ElementTree as ElementTree |
|
515 | import defusedxml.ElementTree as ElementTree | |
516 | import appenlight.tests.payload_examples as payload_examples |
|
516 | import appenlight.tests.payload_examples as payload_examples | |
517 | from appenlight.lib.utils.airbrake import parse_airbrake_xml |
|
517 | from appenlight.lib.utils.airbrake import parse_airbrake_xml | |
518 | from appenlight.validators import ReportListSchema_0_5 |
|
518 | from appenlight.validators import ReportListSchema_0_5 | |
519 |
|
519 | |||
520 | context = DummyContext() |
|
520 | context = DummyContext() | |
521 | request = testing.DummyRequest( |
|
521 | request = testing.DummyRequest( | |
522 | headers={'Content-Type': 'application/xml'}) |
|
522 | headers={'Content-Type': 'application/xml'}) | |
523 | request.context = context |
|
523 | request.context = context | |
524 | request.context.possibly_public = False |
|
524 | request.context.possibly_public = False | |
525 | root = ElementTree.fromstring(payload_examples.AIRBRAKE_RUBY_EXAMPLE) |
|
525 | root = ElementTree.fromstring(payload_examples.AIRBRAKE_RUBY_EXAMPLE) | |
526 | request.context.airbrake_xml_etree = root |
|
526 | request.context.airbrake_xml_etree = root | |
527 | error_dict = parse_airbrake_xml(request) |
|
527 | error_dict = parse_airbrake_xml(request) | |
528 | schema = ReportListSchema_0_5().bind(utcnow=datetime.datetime.utcnow()) |
|
528 | schema = ReportListSchema_0_5().bind(utcnow=datetime.datetime.utcnow()) | |
529 | deserialized_report = schema.deserialize([error_dict])[0] |
|
529 | deserialized_report = schema.deserialize([error_dict])[0] | |
530 | assert deserialized_report['client'] == 'Airbrake Notifier' |
|
530 | assert deserialized_report['client'] == 'Airbrake Notifier' | |
531 | assert deserialized_report['error'] == 'NameError: undefined local variable or method `sdfdfdf\' for #<#<Class:0x000000039a8b90>:0x00000002c53df0>' |
|
531 | assert deserialized_report['error'] == 'NameError: undefined local variable or method `sdfdfdf\' for #<#<Class:0x000000039a8b90>:0x00000002c53df0>' | |
532 | assert deserialized_report['http_status'] == 500 |
|
532 | assert deserialized_report['http_status'] == 500 | |
533 | assert deserialized_report['language'] == 'unknown' |
|
533 | assert deserialized_report['language'] == 'unknown' | |
534 | assert deserialized_report['message'] == '' |
|
534 | assert deserialized_report['message'] == '' | |
535 | assert deserialized_report['occurences'] == 1 |
|
535 | assert deserialized_report['occurences'] == 1 | |
536 | assert deserialized_report['priority'] == 5 |
|
536 | assert deserialized_report['priority'] == 5 | |
537 | d_request = deserialized_report['request'] |
|
537 | d_request = deserialized_report['request'] | |
538 | assert d_request['GET'] == {'test': '1234'} |
|
538 | assert d_request['GET'] == {'test': '1234'} | |
539 | assert d_request['action_dispatch.request.parameters'] == { |
|
539 | assert d_request['action_dispatch.request.parameters'] == { | |
540 | 'action': 'index', |
|
540 | 'action': 'index', | |
541 | 'controller': 'welcome', |
|
541 | 'controller': 'welcome', | |
542 | 'test': '1234'} |
|
542 | 'test': '1234'} | |
543 | assert deserialized_report['request_id'] == 'c11b2267f3ad8b00a1768cae35559fa1' |
|
543 | assert deserialized_report['request_id'] == 'c11b2267f3ad8b00a1768cae35559fa1' | |
544 | assert deserialized_report['server'] == 'ergo-desktop' |
|
544 | assert deserialized_report['server'] == 'ergo-desktop' | |
545 | assert deserialized_report['traceback'][0] == { |
|
545 | assert deserialized_report['traceback'][0] == { | |
546 | 'cline': 'block in start_thread', |
|
546 | 'cline': 'block in start_thread', | |
547 | 'file': '/home/ergo/.rbenv/versions/1.9.3-p327/lib/ruby/1.9.1/webrick/server.rb', |
|
547 | 'file': '/home/ergo/.rbenv/versions/1.9.3-p327/lib/ruby/1.9.1/webrick/server.rb', | |
548 | 'fn': 'block in start_thread', |
|
548 | 'fn': 'block in start_thread', | |
549 | 'line': '191', |
|
549 | 'line': '191', | |
550 | 'module': '', |
|
550 | 'module': '', | |
551 | 'vars': {}} |
|
551 | 'vars': {}} | |
552 | assert deserialized_report['traceback'][-1] == { |
|
552 | assert deserialized_report['traceback'][-1] == { | |
553 | 'cline': '_app_views_welcome_index_html_erb___2570061166873166679_31748940', |
|
553 | 'cline': '_app_views_welcome_index_html_erb___2570061166873166679_31748940', | |
554 | 'file': '[PROJECT_ROOT]/app/views/welcome/index.html.erb', |
|
554 | 'file': '[PROJECT_ROOT]/app/views/welcome/index.html.erb', | |
555 | 'fn': '_app_views_welcome_index_html_erb___2570061166873166679_31748940', |
|
555 | 'fn': '_app_views_welcome_index_html_erb___2570061166873166679_31748940', | |
556 | 'line': '3', |
|
556 | 'line': '3', | |
557 | 'module': '', |
|
557 | 'module': '', | |
558 | 'vars': {}} |
|
558 | 'vars': {}} | |
559 | assert deserialized_report['url'] == 'http://0.0.0.0:3000/welcome/index?test=1234' |
|
559 | assert deserialized_report['url'] == 'http://0.0.0.0:3000/welcome/index?test=1234' | |
560 | assert deserialized_report['view_name'] == 'welcome:index' |
|
560 | assert deserialized_report['view_name'] == 'welcome:index' | |
561 |
|
561 | |||
562 | def test_normal_payload_view(self): |
|
562 | def test_normal_payload_view(self): | |
563 | import defusedxml.ElementTree as ElementTree |
|
563 | import defusedxml.ElementTree as ElementTree | |
564 | import appenlight.tests.payload_examples as payload_examples |
|
564 | import appenlight.tests.payload_examples as payload_examples | |
565 |
|
565 | |||
566 | from appenlight.models.services.application import ApplicationService |
|
566 | from appenlight.models.services.application import ApplicationService | |
567 | from appenlight.views.api import airbrake_xml_compat |
|
567 | from appenlight.views.api import airbrake_xml_compat | |
568 |
|
568 | |||
569 | context = DummyContext() |
|
569 | context = DummyContext() | |
570 | context.resource = ApplicationService.by_id(1) |
|
570 | context.resource = ApplicationService.by_id(1) | |
571 | request = testing.DummyRequest( |
|
571 | request = testing.DummyRequest( | |
572 | headers={'Content-Type': 'application/xml'}) |
|
572 | headers={'Content-Type': 'application/xml'}) | |
573 | request.context = context |
|
573 | request.context = context | |
574 | request.context.possibly_public = False |
|
574 | request.context.possibly_public = False | |
575 | root = ElementTree.fromstring(payload_examples.AIRBRAKE_RUBY_EXAMPLE) |
|
575 | root = ElementTree.fromstring(payload_examples.AIRBRAKE_RUBY_EXAMPLE) | |
576 | request.context.airbrake_xml_etree = root |
|
576 | request.context.airbrake_xml_etree = root | |
577 | route = mock.Mock() |
|
577 | route = mock.Mock() | |
578 | route.name = 'api_airbrake' |
|
578 | route.name = 'api_airbrake' | |
579 | request.matched_route = route |
|
579 | request.matched_route = route | |
580 | result = airbrake_xml_compat(request) |
|
580 | result = airbrake_xml_compat(request) | |
581 | assert '<notice><id>' in result |
|
581 | assert '<notice><id>' in result | |
582 |
|
582 | |||
583 |
|
583 | |||
584 | @pytest.mark.usefixtures('default_application') |
|
584 | @pytest.mark.usefixtures('default_application') | |
585 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') |
|
585 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') | |
586 | class TestAPILogView(object): |
|
586 | class TestAPILogView(object): | |
587 | def test_no_json_payload(self, base_app): |
|
587 | def test_no_json_payload(self, base_app): | |
588 | import colander |
|
588 | import colander | |
589 | from appenlight.models.services.application import ApplicationService |
|
589 | from appenlight.models.services.application import ApplicationService | |
590 | from appenlight.views.api import logs_create |
|
590 | from appenlight.views.api import logs_create | |
591 |
|
591 | |||
592 | context = DummyContext() |
|
592 | context = DummyContext() | |
593 | context.resource = ApplicationService.by_id(1) |
|
593 | context.resource = ApplicationService.by_id(1) | |
594 | request = testing.DummyRequest( |
|
594 | request = testing.DummyRequest( | |
595 | headers={'Content-Type': 'application/json'}) |
|
595 | headers={'Content-Type': 'application/json'}) | |
596 | request.context = context |
|
596 | request.context = context | |
597 | request.registry = base_app.registry |
|
597 | request.registry = base_app.registry | |
598 | request.unsafe_json_body = '' |
|
598 | request.unsafe_json_body = '' | |
599 | route = mock.Mock() |
|
599 | route = mock.Mock() | |
600 | route.name = 'api_logs' |
|
600 | route.name = 'api_logs' | |
601 | request.matched_route = route |
|
601 | request.matched_route = route | |
602 | with pytest.raises(colander.Invalid): |
|
602 | with pytest.raises(colander.Invalid): | |
603 | response = logs_create(request) |
|
603 | response = logs_create(request) | |
604 |
|
604 | |||
605 | def test_single_json_payload(self): |
|
605 | def test_single_json_payload(self): | |
606 | import appenlight.tests.payload_examples as payload_examples |
|
606 | import appenlight.tests.payload_examples as payload_examples | |
607 | from appenlight.models.log import Log |
|
607 | from appenlight.models.log import Log | |
608 | from appenlight.views.api import logs_create |
|
608 | from appenlight.views.api import logs_create | |
609 | from appenlight.models.services.application import ApplicationService |
|
609 | from appenlight.models.services.application import ApplicationService | |
610 | route = mock.Mock() |
|
610 | route = mock.Mock() | |
611 | route.name = 'api_logs' |
|
611 | route.name = 'api_logs' | |
612 | request = pyramid.threadlocal.get_current_request() |
|
612 | request = pyramid.threadlocal.get_current_request() | |
613 | context = DummyContext() |
|
613 | context = DummyContext() | |
614 | context.resource = ApplicationService.by_id(1) |
|
614 | context.resource = ApplicationService.by_id(1) | |
615 | request.context = context |
|
615 | request.context = context | |
616 | request.matched_route = route |
|
616 | request.matched_route = route | |
617 | request.unsafe_json_body = [copy.deepcopy( |
|
617 | request.unsafe_json_body = [copy.deepcopy( | |
618 | payload_examples.LOG_EXAMPLES[0])] |
|
618 | payload_examples.LOG_EXAMPLES[0])] | |
619 | logs_create(request) |
|
619 | logs_create(request) | |
620 | query = DBSession.query(Log) |
|
620 | query = DBSession.query(Log) | |
621 | log = query.first() |
|
621 | log = query.first() | |
622 | assert query.count() == 1 |
|
622 | assert query.count() == 1 | |
623 | assert log.message == "OMG ValueError happened" |
|
623 | assert log.message == "OMG ValueError happened" | |
624 |
|
624 | |||
625 | def test_multiple_json_payload(self): |
|
625 | def test_multiple_json_payload(self): | |
626 | import appenlight.tests.payload_examples as payload_examples |
|
626 | import appenlight.tests.payload_examples as payload_examples | |
627 | from appenlight.models.log import Log |
|
627 | from appenlight.models.log import Log | |
628 | from appenlight.views.api import logs_create |
|
628 | from appenlight.views.api import logs_create | |
629 | from appenlight.models.services.application import ApplicationService |
|
629 | from appenlight.models.services.application import ApplicationService | |
630 | route = mock.Mock() |
|
630 | route = mock.Mock() | |
631 | route.name = 'api_logs' |
|
631 | route.name = 'api_logs' | |
632 | request = pyramid.threadlocal.get_current_request() |
|
632 | request = pyramid.threadlocal.get_current_request() | |
633 | context = DummyContext() |
|
633 | context = DummyContext() | |
634 | context.resource = ApplicationService.by_id(1) |
|
634 | context.resource = ApplicationService.by_id(1) | |
635 | request.context = context |
|
635 | request.context = context | |
636 | request.matched_route = route |
|
636 | request.matched_route = route | |
637 | LOG_PAYLOAD = payload_examples.LOG_EXAMPLES[0] |
|
637 | LOG_PAYLOAD = payload_examples.LOG_EXAMPLES[0] | |
638 | LOG_PAYLOAD2 = payload_examples.LOG_EXAMPLES[1] |
|
638 | LOG_PAYLOAD2 = payload_examples.LOG_EXAMPLES[1] | |
639 | request.unsafe_json_body = copy.deepcopy([LOG_PAYLOAD, LOG_PAYLOAD2]) |
|
639 | request.unsafe_json_body = copy.deepcopy([LOG_PAYLOAD, LOG_PAYLOAD2]) | |
640 | logs_create(request) |
|
640 | logs_create(request) | |
641 | query = DBSession.query(Log).order_by(sa.asc(Log.log_id)) |
|
641 | query = DBSession.query(Log).order_by(sa.asc(Log.log_id)) | |
642 | assert query.count() == 2 |
|
642 | assert query.count() == 2 | |
643 | assert query[0].message == "OMG ValueError happened" |
|
643 | assert query[0].message == "OMG ValueError happened" | |
644 | assert query[1].message == "OMG ValueError happened2" |
|
644 | assert query[1].message == "OMG ValueError happened2" | |
645 |
|
645 | |||
646 | def test_public_key_rewriting(self): |
|
646 | def test_public_key_rewriting(self): | |
647 | import appenlight.tests.payload_examples as payload_examples |
|
647 | import appenlight.tests.payload_examples as payload_examples | |
648 | from appenlight.models.log import Log |
|
648 | from appenlight.models.log import Log | |
649 | from appenlight.views.api import logs_create |
|
649 | from appenlight.views.api import logs_create | |
650 | from appenlight.models.services.application import ApplicationService |
|
650 | from appenlight.models.services.application import ApplicationService | |
651 | route = mock.Mock() |
|
651 | route = mock.Mock() | |
652 | route.name = 'api_logs' |
|
652 | route.name = 'api_logs' | |
653 | request = pyramid.threadlocal.get_current_request() |
|
653 | request = pyramid.threadlocal.get_current_request() | |
654 | context = DummyContext() |
|
654 | context = DummyContext() | |
655 | context.resource = ApplicationService.by_id(1) |
|
655 | context.resource = ApplicationService.by_id(1) | |
656 | request.context = context |
|
656 | request.context = context | |
657 | request.matched_route = route |
|
657 | request.matched_route = route | |
658 |
|
658 | |||
659 | LOG_PAYLOAD = copy.deepcopy(payload_examples.LOG_EXAMPLES[0]) |
|
659 | LOG_PAYLOAD = copy.deepcopy(payload_examples.LOG_EXAMPLES[0]) | |
660 | LOG_PAYLOAD2 = copy.deepcopy(payload_examples.LOG_EXAMPLES[1]) |
|
660 | LOG_PAYLOAD2 = copy.deepcopy(payload_examples.LOG_EXAMPLES[1]) | |
661 | LOG_PAYLOAD['primary_key'] = 'X2' |
|
661 | LOG_PAYLOAD['primary_key'] = 'X2' | |
662 | LOG_PAYLOAD2['primary_key'] = 'X2' |
|
662 | LOG_PAYLOAD2['primary_key'] = 'X2' | |
663 | request.unsafe_json_body = [LOG_PAYLOAD, LOG_PAYLOAD2] |
|
663 | request.unsafe_json_body = [LOG_PAYLOAD, LOG_PAYLOAD2] | |
664 | logs_create(request) |
|
664 | logs_create(request) | |
665 |
|
665 | |||
666 | query = DBSession.query(Log).order_by(sa.asc(Log.log_id)) |
|
666 | query = DBSession.query(Log).order_by(sa.asc(Log.log_id)) | |
667 | assert query.count() == 1 |
|
667 | assert query.count() == 1 | |
668 | assert query[0].message == "OMG ValueError happened2" |
|
668 | assert query[0].message == "OMG ValueError happened2" | |
669 |
|
669 | |||
670 | @pytest.mark.usefixtures('default_application') |
|
670 | @pytest.mark.usefixtures('default_application') | |
671 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') |
|
671 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') | |
672 | class TestAPIGeneralMetricsView(object): |
|
672 | class TestAPIGeneralMetricsView(object): | |
673 | def test_no_json_payload(self, base_app): |
|
673 | def test_no_json_payload(self, base_app): | |
674 | import colander |
|
674 | import colander | |
675 | from appenlight.models.services.application import ApplicationService |
|
675 | from appenlight.models.services.application import ApplicationService | |
676 | from appenlight.views.api import general_metrics_create |
|
676 | from appenlight.views.api import general_metrics_create | |
677 | route = mock.Mock() |
|
677 | route = mock.Mock() | |
678 | route.name = 'api_general_metrics' |
|
678 | route.name = 'api_general_metrics' | |
679 | context = DummyContext() |
|
679 | context = DummyContext() | |
680 | context.resource = ApplicationService.by_id(1) |
|
680 | context.resource = ApplicationService.by_id(1) | |
681 | request = testing.DummyRequest( |
|
681 | request = testing.DummyRequest( | |
682 | headers={'Content-Type': 'application/json'}) |
|
682 | headers={'Content-Type': 'application/json'}) | |
683 | request.context = context |
|
683 | request.context = context | |
684 | request.registry = base_app.registry |
|
684 | request.registry = base_app.registry | |
685 | request.unsafe_json_body = '' |
|
685 | request.unsafe_json_body = '' | |
686 | request.matched_route = route |
|
686 | request.matched_route = route | |
687 | with pytest.raises(colander.Invalid): |
|
687 | with pytest.raises(colander.Invalid): | |
688 | general_metrics_create(request) |
|
688 | general_metrics_create(request) | |
689 |
|
689 | |||
690 | def test_single_json_payload(self): |
|
690 | def test_single_json_payload(self): | |
691 | import appenlight.tests.payload_examples as payload_examples |
|
691 | import appenlight.tests.payload_examples as payload_examples | |
692 | from appenlight.models.metric import Metric |
|
692 | from appenlight.models.metric import Metric | |
693 | from appenlight.views.api import general_metrics_create |
|
693 | from appenlight.views.api import general_metrics_create | |
694 | from appenlight.models.services.application import ApplicationService |
|
694 | from appenlight.models.services.application import ApplicationService | |
695 | route = mock.Mock() |
|
695 | route = mock.Mock() | |
696 | route.name = 'api_general_metric' |
|
696 | route.name = 'api_general_metric' | |
697 | request = pyramid.threadlocal.get_current_request() |
|
697 | request = pyramid.threadlocal.get_current_request() | |
698 | request.matched_route = route |
|
698 | request.matched_route = route | |
699 | context = DummyContext() |
|
699 | context = DummyContext() | |
700 | context.resource = ApplicationService.by_id(1) |
|
700 | context.resource = ApplicationService.by_id(1) | |
701 | request.context = context |
|
701 | request.context = context | |
702 | request.unsafe_json_body = payload_examples.METRICS_PAYLOAD |
|
702 | request.unsafe_json_body = payload_examples.METRICS_PAYLOAD | |
703 | general_metrics_create(request) |
|
703 | general_metrics_create(request) | |
704 | query = DBSession.query(Metric) |
|
704 | query = DBSession.query(Metric) | |
705 | metric = query.first() |
|
705 | metric = query.first() | |
706 | assert query.count() == 1 |
|
706 | assert query.count() == 1 | |
707 | assert metric.namespace == 'some.monitor' |
|
707 | assert metric.namespace == 'some.monitor' | |
708 |
|
708 | |||
709 | def test_multiple_json_payload(self): |
|
709 | def test_multiple_json_payload(self): | |
710 | import appenlight.tests.payload_examples as payload_examples |
|
710 | import appenlight.tests.payload_examples as payload_examples | |
711 | from appenlight.models.metric import Metric |
|
711 | from appenlight.models.metric import Metric | |
712 | from appenlight.views.api import general_metrics_create |
|
712 | from appenlight.views.api import general_metrics_create | |
713 | from appenlight.models.services.application import ApplicationService |
|
713 | from appenlight.models.services.application import ApplicationService | |
714 | route = mock.Mock() |
|
714 | route = mock.Mock() | |
715 | route.name = 'api_general_metrics' |
|
715 | route.name = 'api_general_metrics' | |
716 | request = pyramid.threadlocal.get_current_request() |
|
716 | request = pyramid.threadlocal.get_current_request() | |
717 | request.matched_route = route |
|
717 | request.matched_route = route | |
718 | context = DummyContext() |
|
718 | context = DummyContext() | |
719 | context.resource = ApplicationService.by_id(1) |
|
719 | context.resource = ApplicationService.by_id(1) | |
720 | request.context = context |
|
720 | request.context = context | |
721 | request.unsafe_json_body = [ |
|
721 | request.unsafe_json_body = [ | |
722 | copy.deepcopy(payload_examples.METRICS_PAYLOAD), |
|
722 | copy.deepcopy(payload_examples.METRICS_PAYLOAD), | |
723 | copy.deepcopy(payload_examples.METRICS_PAYLOAD), |
|
723 | copy.deepcopy(payload_examples.METRICS_PAYLOAD), | |
724 | ] |
|
724 | ] | |
725 | general_metrics_create(request) |
|
725 | general_metrics_create(request) | |
726 | query = DBSession.query(Metric) |
|
726 | query = DBSession.query(Metric) | |
727 | metric = query.first() |
|
727 | metric = query.first() | |
728 | assert query.count() == 2 |
|
728 | assert query.count() == 2 | |
729 | assert metric.namespace == 'some.monitor' |
|
729 | assert metric.namespace == 'some.monitor' | |
730 |
|
730 | |||
731 |
|
731 | |||
732 | class TestGroupingMessageReplacements(object): |
|
732 | class TestGroupingMessageReplacements(object): | |
733 | def replace_default_repr_python(self): |
|
733 | def replace_default_repr_python(self): | |
734 | test_str = ''' |
|
734 | test_str = ''' | |
735 | ConnectionError: ConnectionError((<urllib3.connection.HTTPConnection object at 0x7f87a0ba9fd0>, 'Connection to domain.gr timed out. (connect timeout=10)')) caused by: ConnectTimeoutError((<urllib3.connection.HTTPConnection object at 0x7f87a0ba9fd0>, 'Connection to domain.gr timed out. (connect timeout=10)')) |
|
735 | ConnectionError: ConnectionError((<urllib3.connection.HTTPConnection object at 0x7f87a0ba9fd0>, 'Connection to domain.gr timed out. (connect timeout=10)')) caused by: ConnectTimeoutError((<urllib3.connection.HTTPConnection object at 0x7f87a0ba9fd0>, 'Connection to domain.gr timed out. (connect timeout=10)')) | |
736 | ''' |
|
736 | ''' | |
737 | regex = r'<(.*?) object at (.*?)>' |
|
737 | regex = r'<(.*?) object at (.*?)>' | |
738 |
|
738 | |||
739 |
|
739 | |||
740 | class TestRulesKeyGetter(object): |
|
740 | class TestRulesKeyGetter(object): | |
741 | def test_default_dict_getter_top_key(self): |
|
741 | def test_default_dict_getter_top_key(self): | |
742 | from appenlight.lib.rule import Rule |
|
742 | from appenlight.lib.rule import Rule | |
743 | struct = { |
|
743 | struct = { | |
744 | "a": { |
|
744 | "a": { | |
745 | "b": 'b', |
|
745 | "b": 'b', | |
746 | "c": { |
|
746 | "c": { | |
747 | "d": 'd', |
|
747 | "d": 'd', | |
748 | "g": { |
|
748 | "g": { | |
749 | "h": 'h' |
|
749 | "h": 'h' | |
750 | } |
|
750 | } | |
751 | }, |
|
751 | }, | |
752 | "e": 'e' |
|
752 | "e": 'e' | |
753 | }, |
|
753 | }, | |
754 | "f": 'f' |
|
754 | "f": 'f' | |
755 | } |
|
755 | } | |
756 | result = Rule.default_dict_struct_getter(struct, "a") |
|
756 | result = Rule.default_dict_struct_getter(struct, "a") | |
757 | assert result == struct['a'] |
|
757 | assert result == struct['a'] | |
758 |
|
758 | |||
759 | def test_default_dict_getter_sub_key(self): |
|
759 | def test_default_dict_getter_sub_key(self): | |
760 | from appenlight.lib.rule import Rule |
|
760 | from appenlight.lib.rule import Rule | |
761 | struct = { |
|
761 | struct = { | |
762 | "a": { |
|
762 | "a": { | |
763 | "b": 'b', |
|
763 | "b": 'b', | |
764 | "c": { |
|
764 | "c": { | |
765 | "d": 'd', |
|
765 | "d": 'd', | |
766 | "g": { |
|
766 | "g": { | |
767 | "h": 'h' |
|
767 | "h": 'h' | |
768 | } |
|
768 | } | |
769 | }, |
|
769 | }, | |
770 | "e": 'e' |
|
770 | "e": 'e' | |
771 | }, |
|
771 | }, | |
772 | "f": 'f' |
|
772 | "f": 'f' | |
773 | } |
|
773 | } | |
774 | result = Rule.default_dict_struct_getter(struct, 'a:b') |
|
774 | result = Rule.default_dict_struct_getter(struct, 'a:b') | |
775 | assert result == struct['a']['b'] |
|
775 | assert result == struct['a']['b'] | |
776 | result = Rule.default_dict_struct_getter(struct, 'a:c:d') |
|
776 | result = Rule.default_dict_struct_getter(struct, 'a:c:d') | |
777 | assert result == struct['a']['c']['d'] |
|
777 | assert result == struct['a']['c']['d'] | |
778 |
|
778 | |||
779 | def test_default_obj_getter_top_key(self): |
|
779 | def test_default_obj_getter_top_key(self): | |
780 | from appenlight.lib.rule import Rule |
|
780 | from appenlight.lib.rule import Rule | |
781 | class TestStruct(object): |
|
781 | class TestStruct(object): | |
782 | def __init__(self, a, b): |
|
782 | def __init__(self, a, b): | |
783 | self.a = a |
|
783 | self.a = a | |
784 | self.b = b |
|
784 | self.b = b | |
785 |
|
785 | |||
786 | struct = TestStruct(a='a', |
|
786 | struct = TestStruct(a='a', | |
787 | b=TestStruct(a='x', b='y')) |
|
787 | b=TestStruct(a='x', b='y')) | |
788 | result = Rule.default_obj_struct_getter(struct, "a") |
|
788 | result = Rule.default_obj_struct_getter(struct, "a") | |
789 | assert result == struct.a |
|
789 | assert result == struct.a | |
790 |
|
790 | |||
791 | def test_default_obj_getter_sub_key(self): |
|
791 | def test_default_obj_getter_sub_key(self): | |
792 | from appenlight.lib.rule import Rule |
|
792 | from appenlight.lib.rule import Rule | |
793 | class TestStruct(object): |
|
793 | class TestStruct(object): | |
794 | def __init__(self, name, a, b): |
|
794 | def __init__(self, name, a, b): | |
795 | self.name = name |
|
795 | self.name = name | |
796 | self.a = a |
|
796 | self.a = a | |
797 | self.b = b |
|
797 | self.b = b | |
798 |
|
798 | |||
799 | def __repr__(self): |
|
799 | def __repr__(self): | |
800 | return '<obj {}>'.format(self.name) |
|
800 | return '<obj {}>'.format(self.name) | |
801 |
|
801 | |||
802 | c = TestStruct('c', a=5, b='z') |
|
802 | c = TestStruct('c', a=5, b='z') | |
803 | b = TestStruct('b', a=c, b='y') |
|
803 | b = TestStruct('b', a=c, b='y') | |
804 | struct = TestStruct('a', a='a', b=b) |
|
804 | struct = TestStruct('a', a='a', b=b) | |
805 | result = Rule.default_obj_struct_getter(struct, 'b:b') |
|
805 | result = Rule.default_obj_struct_getter(struct, 'b:b') | |
806 | assert result == struct.b.b |
|
806 | assert result == struct.b.b | |
807 | result = Rule.default_obj_struct_getter(struct, 'b:a:b') |
|
807 | result = Rule.default_obj_struct_getter(struct, 'b:a:b') | |
808 | assert result == struct.b.a.b |
|
808 | assert result == struct.b.a.b | |
809 |
|
809 | |||
810 |
|
810 | |||
811 | @pytest.mark.usefixtures('report_type_matrix') |
|
811 | @pytest.mark.usefixtures('report_type_matrix') | |
812 | class TestRulesParsing(): |
|
812 | class TestRulesParsing(): | |
813 | @pytest.mark.parametrize("op, struct_value, test_value, match_result", [ |
|
813 | @pytest.mark.parametrize("op, struct_value, test_value, match_result", [ | |
814 | ('eq', 500, 500, True), |
|
814 | ('eq', 500, 500, True), | |
815 | ('eq', 600, 500, False), |
|
815 | ('eq', 600, 500, False), | |
816 | ('eq', 300, 500, False), |
|
816 | ('eq', 300, 500, False), | |
817 | ('eq', "300", 500, False), |
|
817 | ('eq', "300", 500, False), | |
818 | ('eq', "600", 500, False), |
|
818 | ('eq', "600", 500, False), | |
819 | ('eq', "500", 500, True), |
|
819 | ('eq', "500", 500, True), | |
820 | ('ne', 500, 500, False), |
|
820 | ('ne', 500, 500, False), | |
821 | ('ne', 600, 500, True), |
|
821 | ('ne', 600, 500, True), | |
822 | ('ne', 300, 500, True), |
|
822 | ('ne', 300, 500, True), | |
823 | ('ne', "300", 500, True), |
|
823 | ('ne', "300", 500, True), | |
824 | ('ne', "600", 500, True), |
|
824 | ('ne', "600", 500, True), | |
825 | ('ne', "500", 500, False), |
|
825 | ('ne', "500", 500, False), | |
826 | ('ge', 500, 500, True), |
|
826 | ('ge', 500, 500, True), | |
827 | ('ge', 600, 500, True), |
|
827 | ('ge', 600, 500, True), | |
828 | ('ge', 499, 500, False), |
|
828 | ('ge', 499, 500, False), | |
829 | ('gt', 499, 500, False), |
|
829 | ('gt', 499, 500, False), | |
830 | ('gt', 500, 500, False), |
|
830 | ('gt', 500, 500, False), | |
831 | ('gt', 501, 500, True), |
|
831 | ('gt', 501, 500, True), | |
832 | ('le', 499, 500, True), |
|
832 | ('le', 499, 500, True), | |
833 | ('le', 500, 500, True), |
|
833 | ('le', 500, 500, True), | |
834 | ('le', 501, 500, False), |
|
834 | ('le', 501, 500, False), | |
835 | ('lt', 499, 500, True), |
|
835 | ('lt', 499, 500, True), | |
836 | ('lt', 500, 500, False), |
|
836 | ('lt', 500, 500, False), | |
837 | ('lt', 501, 500, False), |
|
837 | ('lt', 501, 500, False), | |
838 | ]) |
|
838 | ]) | |
839 | def test_single_op_int(self, op, struct_value, test_value, match_result, |
|
839 | def test_single_op_int(self, op, struct_value, test_value, match_result, | |
840 | report_type_matrix): |
|
840 | report_type_matrix): | |
841 | from appenlight.lib.rule import Rule |
|
841 | from appenlight.lib.rule import Rule | |
842 | rule_config = { |
|
842 | rule_config = { | |
843 | "op": op, |
|
843 | "op": op, | |
844 | "field": "http_status", |
|
844 | "field": "http_status", | |
845 | "value": test_value |
|
845 | "value": test_value | |
846 | } |
|
846 | } | |
847 | rule = Rule(rule_config, report_type_matrix) |
|
847 | rule = Rule(rule_config, report_type_matrix) | |
848 |
|
848 | |||
849 | data = { |
|
849 | data = { | |
850 | "http_status": struct_value |
|
850 | "http_status": struct_value | |
851 | } |
|
851 | } | |
852 | assert rule.match(data) is match_result |
|
852 | assert rule.match(data) is match_result | |
853 |
|
853 | |||
854 | @pytest.mark.parametrize("op, struct_value, test_value, match_result", [ |
|
854 | @pytest.mark.parametrize("op, struct_value, test_value, match_result", [ | |
855 | ('ge', "500.01", 500, True), |
|
855 | ('ge', "500.01", 500, True), | |
856 | ('ge', "500.01", 500.02, False), |
|
856 | ('ge', "500.01", 500.02, False), | |
857 | ('le', "500.01", 500.02, True) |
|
857 | ('le', "500.01", 500.02, True) | |
858 | ]) |
|
858 | ]) | |
859 | def test_single_op_float(self, op, struct_value, test_value, match_result, |
|
859 | def test_single_op_float(self, op, struct_value, test_value, match_result, | |
860 | report_type_matrix): |
|
860 | report_type_matrix): | |
861 | from appenlight.lib.rule import Rule |
|
861 | from appenlight.lib.rule import Rule | |
862 | rule_config = { |
|
862 | rule_config = { | |
863 | "op": op, |
|
863 | "op": op, | |
864 | "field": "duration", |
|
864 | "field": "duration", | |
865 | "value": test_value |
|
865 | "value": test_value | |
866 | } |
|
866 | } | |
867 | rule = Rule(rule_config, report_type_matrix) |
|
867 | rule = Rule(rule_config, report_type_matrix) | |
868 |
|
868 | |||
869 | data = { |
|
869 | data = { | |
870 | "duration": struct_value |
|
870 | "duration": struct_value | |
871 | } |
|
871 | } | |
872 | assert rule.match(data) is match_result |
|
872 | assert rule.match(data) is match_result | |
873 |
|
873 | |||
874 | @pytest.mark.parametrize("op, struct_value, test_value, match_result", [ |
|
874 | @pytest.mark.parametrize("op, struct_value, test_value, match_result", [ | |
875 | ('contains', 'foo bar baz', 'foo', True), |
|
875 | ('contains', 'foo bar baz', 'foo', True), | |
876 | ('contains', 'foo bar baz', 'bar', True), |
|
876 | ('contains', 'foo bar baz', 'bar', True), | |
877 | ('contains', 'foo bar baz', 'dupa', False), |
|
877 | ('contains', 'foo bar baz', 'dupa', False), | |
878 | ('startswith', 'foo bar baz', 'foo', True), |
|
878 | ('startswith', 'foo bar baz', 'foo', True), | |
879 | ('startswith', 'foo bar baz', 'bar', False), |
|
879 | ('startswith', 'foo bar baz', 'bar', False), | |
880 | ('endswith', 'foo bar baz', 'baz', True), |
|
880 | ('endswith', 'foo bar baz', 'baz', True), | |
881 | ('endswith', 'foo bar baz', 'bar', False), |
|
881 | ('endswith', 'foo bar baz', 'bar', False), | |
882 | ]) |
|
882 | ]) | |
883 | def test_single_op_string(self, op, struct_value, test_value, |
|
883 | def test_single_op_string(self, op, struct_value, test_value, | |
884 | match_result, report_type_matrix): |
|
884 | match_result, report_type_matrix): | |
885 | from appenlight.lib.rule import Rule |
|
885 | from appenlight.lib.rule import Rule | |
886 | rule_config = { |
|
886 | rule_config = { | |
887 | "op": op, |
|
887 | "op": op, | |
888 | "field": "error", |
|
888 | "field": "error", | |
889 | "value": test_value |
|
889 | "value": test_value | |
890 | } |
|
890 | } | |
891 | rule = Rule(rule_config, report_type_matrix) |
|
891 | rule = Rule(rule_config, report_type_matrix) | |
892 |
|
892 | |||
893 | data = { |
|
893 | data = { | |
894 | "error": struct_value |
|
894 | "error": struct_value | |
895 | } |
|
895 | } | |
896 | assert rule.match(data) is match_result |
|
896 | assert rule.match(data) is match_result | |
897 |
|
897 | |||
898 | @pytest.mark.parametrize("field, value, s_type", [ |
|
898 | @pytest.mark.parametrize("field, value, s_type", [ | |
899 | ('field_unicode', 500, str), |
|
899 | ('field_unicode', 500, str), | |
900 | ('field_unicode', 500.0, str), |
|
900 | ('field_unicode', 500.0, str), | |
901 | ('field_unicode', "500", str), |
|
901 | ('field_unicode', "500", str), | |
902 | ('field_int', "500", int), |
|
902 | ('field_int', "500", int), | |
903 | ('field_int', 500, int), |
|
903 | ('field_int', 500, int), | |
904 | ('field_int', 500.0, int), |
|
904 | ('field_int', 500.0, int), | |
905 | ('field_float', "500", float), |
|
905 | ('field_float', "500", float), | |
906 | ('field_float', 500, float), |
|
906 | ('field_float', 500, float), | |
907 | ('field_float', 500.0, float), |
|
907 | ('field_float', 500.0, float), | |
908 | ]) |
|
908 | ]) | |
909 | def test_type_normalization(self, field, value, s_type): |
|
909 | def test_type_normalization(self, field, value, s_type): | |
910 | from appenlight.lib.rule import Rule |
|
910 | from appenlight.lib.rule import Rule | |
911 | type_matrix = { |
|
911 | type_matrix = { | |
912 | 'field_unicode': {"type": 'unicode'}, |
|
912 | 'field_unicode': {"type": 'unicode'}, | |
913 | 'field_float': {"type": 'float'}, |
|
913 | 'field_float': {"type": 'float'}, | |
914 | 'field_int': {"type": 'int'}, |
|
914 | 'field_int': {"type": 'int'}, | |
915 | } |
|
915 | } | |
916 |
|
916 | |||
917 | rule = Rule({}, type_matrix) |
|
917 | rule = Rule({}, type_matrix) | |
918 | n_value = rule.normalized_type(field, value) |
|
918 | n_value = rule.normalized_type(field, value) | |
919 | assert isinstance(n_value, s_type) is True |
|
919 | assert isinstance(n_value, s_type) is True | |
920 |
|
920 | |||
921 |
|
921 | |||
922 | @pytest.mark.usefixtures('report_type_matrix') |
|
922 | @pytest.mark.usefixtures('report_type_matrix') | |
923 | class TestNestedRuleParsing(): |
|
923 | class TestNestedRuleParsing(): | |
924 |
|
924 | |||
925 | @pytest.mark.parametrize("data, result", [ |
|
925 | @pytest.mark.parametrize("data, result", [ | |
926 | ({"http_status": 501, "group": {"priority": 7, "occurences": 11}}, |
|
926 | ({"http_status": 501, "group": {"priority": 7, "occurences": 11}}, | |
927 | False), |
|
927 | False), | |
928 | ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, |
|
928 | ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, | |
929 | False), |
|
929 | False), | |
930 | ({"http_status": 500, "group": {"priority": 1, "occurences": 11}}, |
|
930 | ({"http_status": 500, "group": {"priority": 1, "occurences": 11}}, | |
931 | False), |
|
931 | False), | |
932 | ({"http_status": 101, "group": {"priority": 3, "occurences": 5}}, |
|
932 | ({"http_status": 101, "group": {"priority": 3, "occurences": 5}}, | |
933 | True), |
|
933 | True), | |
934 | ]) |
|
934 | ]) | |
935 | def test_NOT_rule(self, data, result, report_type_matrix): |
|
935 | def test_NOT_rule(self, data, result, report_type_matrix): | |
936 | from appenlight.lib.rule import Rule |
|
936 | from appenlight.lib.rule import Rule | |
937 | rule_config = { |
|
937 | rule_config = { | |
938 | "field": "__NOT__", |
|
938 | "field": "__NOT__", | |
939 | "rules": [ |
|
939 | "rules": [ | |
940 | { |
|
940 | { | |
941 | "op": "ge", |
|
941 | "op": "ge", | |
942 | "field": "group:occurences", |
|
942 | "field": "group:occurences", | |
943 | "value": "10" |
|
943 | "value": "10" | |
944 | }, |
|
944 | }, | |
945 | { |
|
945 | { | |
946 | "op": "ge", |
|
946 | "op": "ge", | |
947 | "field": "group:priority", |
|
947 | "field": "group:priority", | |
948 | "value": "4" |
|
948 | "value": "4" | |
949 | } |
|
949 | } | |
950 | ] |
|
950 | ] | |
951 | } |
|
951 | } | |
952 |
|
952 | |||
953 | rule = Rule(rule_config, report_type_matrix) |
|
953 | rule = Rule(rule_config, report_type_matrix) | |
954 | assert rule.match(data) is result |
|
954 | assert rule.match(data) is result | |
955 |
|
955 | |||
956 | @pytest.mark.parametrize("data, result", [ |
|
956 | @pytest.mark.parametrize("data, result", [ | |
957 | ({"http_status": 501, "group": {"priority": 7, "occurences": 11}}, |
|
957 | ({"http_status": 501, "group": {"priority": 7, "occurences": 11}}, | |
958 | True), |
|
958 | True), | |
959 | ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, |
|
959 | ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, | |
960 | True), |
|
960 | True), | |
961 | ({"http_status": 500, "group": {"priority": 1, "occurences": 1}}, |
|
961 | ({"http_status": 500, "group": {"priority": 1, "occurences": 1}}, | |
962 | True), |
|
962 | True), | |
963 | ({"http_status": 101, "group": {"priority": 3, "occurences": 11}}, |
|
963 | ({"http_status": 101, "group": {"priority": 3, "occurences": 11}}, | |
964 | False), |
|
964 | False), | |
965 | ]) |
|
965 | ]) | |
966 | def test_nested_OR_AND_rule(self, data, result, report_type_matrix): |
|
966 | def test_nested_OR_AND_rule(self, data, result, report_type_matrix): | |
967 | from appenlight.lib.rule import Rule |
|
967 | from appenlight.lib.rule import Rule | |
968 | rule_config = { |
|
968 | rule_config = { | |
969 | "field": "__OR__", |
|
969 | "field": "__OR__", | |
970 | "rules": [ |
|
970 | "rules": [ | |
971 | { |
|
971 | { | |
972 | "field": "__AND__", |
|
972 | "field": "__AND__", | |
973 | "rules": [ |
|
973 | "rules": [ | |
974 | { |
|
974 | { | |
975 | "op": "ge", |
|
975 | "op": "ge", | |
976 | "field": "group:occurences", |
|
976 | "field": "group:occurences", | |
977 | "value": "10" |
|
977 | "value": "10" | |
978 | }, |
|
978 | }, | |
979 | { |
|
979 | { | |
980 | "op": "ge", |
|
980 | "op": "ge", | |
981 | "field": "group:priority", |
|
981 | "field": "group:priority", | |
982 | "value": "4" |
|
982 | "value": "4" | |
983 | } |
|
983 | } | |
984 | ] |
|
984 | ] | |
985 | }, |
|
985 | }, | |
986 | { |
|
986 | { | |
987 | "op": "eq", |
|
987 | "op": "eq", | |
988 | "field": "http_status", |
|
988 | "field": "http_status", | |
989 | "value": "500" |
|
989 | "value": "500" | |
990 | } |
|
990 | } | |
991 | ] |
|
991 | ] | |
992 | } |
|
992 | } | |
993 |
|
993 | |||
994 | rule = Rule(rule_config, report_type_matrix) |
|
994 | rule = Rule(rule_config, report_type_matrix) | |
995 | assert rule.match(data) is result |
|
995 | assert rule.match(data) is result | |
996 |
|
996 | |||
997 | @pytest.mark.parametrize("data, result", [ |
|
997 | @pytest.mark.parametrize("data, result", [ | |
998 | ({"http_status": 501, "group": {"priority": 7, "occurences": 11}}, |
|
998 | ({"http_status": 501, "group": {"priority": 7, "occurences": 11}}, | |
999 | True), |
|
999 | True), | |
1000 | ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, |
|
1000 | ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, | |
1001 | True), |
|
1001 | True), | |
1002 | ({"http_status": 500, "group": {"priority": 1, "occurences": 1}}, |
|
1002 | ({"http_status": 500, "group": {"priority": 1, "occurences": 1}}, | |
1003 | True), |
|
1003 | True), | |
1004 | ({"http_status": 101, "group": {"priority": 3, "occurences": 1}}, |
|
1004 | ({"http_status": 101, "group": {"priority": 3, "occurences": 1}}, | |
1005 | False), |
|
1005 | False), | |
1006 | ]) |
|
1006 | ]) | |
1007 | def test_nested_OR_OR_rule(self, data, result, report_type_matrix): |
|
1007 | def test_nested_OR_OR_rule(self, data, result, report_type_matrix): | |
1008 | from appenlight.lib.rule import Rule |
|
1008 | from appenlight.lib.rule import Rule | |
1009 | rule_config = { |
|
1009 | rule_config = { | |
1010 | "field": "__OR__", |
|
1010 | "field": "__OR__", | |
1011 | "rules": [ |
|
1011 | "rules": [ | |
1012 | {"field": "__OR__", |
|
1012 | {"field": "__OR__", | |
1013 | "rules": [ |
|
1013 | "rules": [ | |
1014 | {"op": "ge", |
|
1014 | {"op": "ge", | |
1015 | "field": "group:occurences", |
|
1015 | "field": "group:occurences", | |
1016 | "value": "10" |
|
1016 | "value": "10" | |
1017 | }, |
|
1017 | }, | |
1018 | {"op": "ge", |
|
1018 | {"op": "ge", | |
1019 | "field": "group:priority", |
|
1019 | "field": "group:priority", | |
1020 | "value": "4" |
|
1020 | "value": "4" | |
1021 | } |
|
1021 | } | |
1022 | ] |
|
1022 | ] | |
1023 | }, |
|
1023 | }, | |
1024 | {"op": "eq", |
|
1024 | {"op": "eq", | |
1025 | "field": "http_status", |
|
1025 | "field": "http_status", | |
1026 | "value": "500" |
|
1026 | "value": "500" | |
1027 | } |
|
1027 | } | |
1028 | ] |
|
1028 | ] | |
1029 | } |
|
1029 | } | |
1030 |
|
1030 | |||
1031 | rule = Rule(rule_config, report_type_matrix) |
|
1031 | rule = Rule(rule_config, report_type_matrix) | |
1032 | assert rule.match(data) is result |
|
1032 | assert rule.match(data) is result | |
1033 |
|
1033 | |||
1034 | @pytest.mark.parametrize("data, result", [ |
|
1034 | @pytest.mark.parametrize("data, result", [ | |
1035 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}}, |
|
1035 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}}, | |
1036 | True), |
|
1036 | True), | |
1037 | ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, |
|
1037 | ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, | |
1038 | False), |
|
1038 | False), | |
1039 | ({"http_status": 500, "group": {"priority": 1, "occurences": 1}}, |
|
1039 | ({"http_status": 500, "group": {"priority": 1, "occurences": 1}}, | |
1040 | False), |
|
1040 | False), | |
1041 | ({"http_status": 101, "group": {"priority": 3, "occurences": 1}}, |
|
1041 | ({"http_status": 101, "group": {"priority": 3, "occurences": 1}}, | |
1042 | False), |
|
1042 | False), | |
1043 | ]) |
|
1043 | ]) | |
1044 | def test_nested_AND_AND_rule(self, data, result, report_type_matrix): |
|
1044 | def test_nested_AND_AND_rule(self, data, result, report_type_matrix): | |
1045 | from appenlight.lib.rule import Rule |
|
1045 | from appenlight.lib.rule import Rule | |
1046 | rule_config = { |
|
1046 | rule_config = { | |
1047 | "field": "__AND__", |
|
1047 | "field": "__AND__", | |
1048 | "rules": [ |
|
1048 | "rules": [ | |
1049 | {"field": "__AND__", |
|
1049 | {"field": "__AND__", | |
1050 | "rules": [ |
|
1050 | "rules": [ | |
1051 | {"op": "ge", |
|
1051 | {"op": "ge", | |
1052 | "field": "group:occurences", |
|
1052 | "field": "group:occurences", | |
1053 | "value": "10" |
|
1053 | "value": "10" | |
1054 | }, |
|
1054 | }, | |
1055 | {"op": "ge", |
|
1055 | {"op": "ge", | |
1056 | "field": "group:priority", |
|
1056 | "field": "group:priority", | |
1057 | "value": "4" |
|
1057 | "value": "4" | |
1058 | }] |
|
1058 | }] | |
1059 | }, |
|
1059 | }, | |
1060 | {"op": "eq", |
|
1060 | {"op": "eq", | |
1061 | "field": "http_status", |
|
1061 | "field": "http_status", | |
1062 | "value": "500" |
|
1062 | "value": "500" | |
1063 | } |
|
1063 | } | |
1064 | ] |
|
1064 | ] | |
1065 | } |
|
1065 | } | |
1066 |
|
1066 | |||
1067 | rule = Rule(rule_config, report_type_matrix) |
|
1067 | rule = Rule(rule_config, report_type_matrix) | |
1068 | assert rule.match(data) is result |
|
1068 | assert rule.match(data) is result | |
1069 |
|
1069 | |||
1070 | @pytest.mark.parametrize("data, result", [ |
|
1070 | @pytest.mark.parametrize("data, result", [ | |
1071 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}, |
|
1071 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}, | |
1072 | "url_path": '/test/register', "error": "foo test bar"}, True), |
|
1072 | "url_path": '/test/register', "error": "foo test bar"}, True), | |
1073 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}, |
|
1073 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}, | |
1074 | "url_path": '/test/register', "error": "foo INVALID bar"}, False), |
|
1074 | "url_path": '/test/register', "error": "foo INVALID bar"}, False), | |
1075 | ]) |
|
1075 | ]) | |
1076 | def test_nested_AND_AND_AND_rule(self, data, result, report_type_matrix): |
|
1076 | def test_nested_AND_AND_AND_rule(self, data, result, report_type_matrix): | |
1077 | from appenlight.lib.rule import Rule |
|
1077 | from appenlight.lib.rule import Rule | |
1078 | rule_config = { |
|
1078 | rule_config = { | |
1079 | "field": "__AND__", |
|
1079 | "field": "__AND__", | |
1080 | "rules": [ |
|
1080 | "rules": [ | |
1081 | {"field": "__AND__", |
|
1081 | {"field": "__AND__", | |
1082 | "rules": [ |
|
1082 | "rules": [ | |
1083 | {"op": "ge", |
|
1083 | {"op": "ge", | |
1084 | "field": "group:occurences", |
|
1084 | "field": "group:occurences", | |
1085 | "value": "10" |
|
1085 | "value": "10" | |
1086 | }, |
|
1086 | }, | |
1087 | {"field": "__AND__", |
|
1087 | {"field": "__AND__", | |
1088 | "rules": [ |
|
1088 | "rules": [ | |
1089 | {"op": "endswith", |
|
1089 | {"op": "endswith", | |
1090 | "field": "url_path", |
|
1090 | "field": "url_path", | |
1091 | "value": "register"}, |
|
1091 | "value": "register"}, | |
1092 | {"op": "contains", |
|
1092 | {"op": "contains", | |
1093 | "field": "error", |
|
1093 | "field": "error", | |
1094 | "value": "test"}]}] |
|
1094 | "value": "test"}]}] | |
1095 | }, |
|
1095 | }, | |
1096 | {"op": "eq", |
|
1096 | {"op": "eq", | |
1097 | "field": "http_status", |
|
1097 | "field": "http_status", | |
1098 | "value": "500" |
|
1098 | "value": "500" | |
1099 | } |
|
1099 | } | |
1100 | ] |
|
1100 | ] | |
1101 | } |
|
1101 | } | |
1102 |
|
1102 | |||
1103 | rule = Rule(rule_config, report_type_matrix) |
|
1103 | rule = Rule(rule_config, report_type_matrix) | |
1104 | assert rule.match(data) is result |
|
1104 | assert rule.match(data) is result | |
1105 |
|
1105 | |||
1106 | @pytest.mark.parametrize("data, result", [ |
|
1106 | @pytest.mark.parametrize("data, result", [ | |
1107 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}, |
|
1107 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}, | |
1108 | "url_path": 6, "error": 3}, False), |
|
1108 | "url_path": 6, "error": 3}, False), | |
1109 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}, |
|
1109 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}, | |
1110 | "url_path": '/test/register', "error": "foo INVALID bar"}, True), |
|
1110 | "url_path": '/test/register', "error": "foo INVALID bar"}, True), | |
1111 | ]) |
|
1111 | ]) | |
1112 | def test_nested_AND_AND_OR_rule(self, data, result, report_type_matrix): |
|
1112 | def test_nested_AND_AND_OR_rule(self, data, result, report_type_matrix): | |
1113 | from appenlight.lib.rule import Rule |
|
1113 | from appenlight.lib.rule import Rule | |
1114 | rule_config = { |
|
1114 | rule_config = { | |
1115 | "field": "__AND__", |
|
1115 | "field": "__AND__", | |
1116 | "rules": [ |
|
1116 | "rules": [ | |
1117 | {"field": "__AND__", |
|
1117 | {"field": "__AND__", | |
1118 | "rules": [ |
|
1118 | "rules": [ | |
1119 | {"op": "ge", |
|
1119 | {"op": "ge", | |
1120 | "field": "group:occurences", |
|
1120 | "field": "group:occurences", | |
1121 | "value": "10" |
|
1121 | "value": "10" | |
1122 | }, |
|
1122 | }, | |
1123 | {"field": "__OR__", |
|
1123 | {"field": "__OR__", | |
1124 | "rules": [ |
|
1124 | "rules": [ | |
1125 | {"op": "endswith", |
|
1125 | {"op": "endswith", | |
1126 | "field": "url_path", |
|
1126 | "field": "url_path", | |
1127 | "value": "register" |
|
1127 | "value": "register" | |
1128 | }, |
|
1128 | }, | |
1129 | {"op": "contains", |
|
1129 | {"op": "contains", | |
1130 | "field": "error", |
|
1130 | "field": "error", | |
1131 | "value": "test" |
|
1131 | "value": "test" | |
1132 | }]}] |
|
1132 | }]}] | |
1133 | }, |
|
1133 | }, | |
1134 | {"op": "eq", |
|
1134 | {"op": "eq", | |
1135 | "field": "http_status", |
|
1135 | "field": "http_status", | |
1136 | "value": "500" |
|
1136 | "value": "500" | |
1137 | } |
|
1137 | } | |
1138 | ] |
|
1138 | ] | |
1139 | } |
|
1139 | } | |
1140 |
|
1140 | |||
1141 | rule = Rule(rule_config, report_type_matrix) |
|
1141 | rule = Rule(rule_config, report_type_matrix) | |
1142 | assert rule.match(data) is result |
|
1142 | assert rule.match(data) is result | |
1143 |
|
1143 | |||
1144 | @pytest.mark.parametrize("op, field, value, should_fail", [ |
|
1144 | @pytest.mark.parametrize("op, field, value, should_fail", [ | |
1145 | ('eq', 'http_status', "1", False), |
|
1145 | ('eq', 'http_status', "1", False), | |
1146 | ('ne', 'http_status', "1", False), |
|
1146 | ('ne', 'http_status', "1", False), | |
1147 | ('ne', 'http_status', "foo", True), |
|
1147 | ('ne', 'http_status', "foo", True), | |
1148 | ('startswith', 'http_status', "1", True), |
|
1148 | ('startswith', 'http_status', "1", True), | |
1149 | ('eq', 'group:priority', "1", False), |
|
1149 | ('eq', 'group:priority', "1", False), | |
1150 | ('ne', 'group:priority', "1", False), |
|
1150 | ('ne', 'group:priority', "1", False), | |
1151 | ('ge', 'group:priority', "1", False), |
|
1151 | ('ge', 'group:priority', "1", False), | |
1152 | ('le', 'group:priority', "1", False), |
|
1152 | ('le', 'group:priority', "1", False), | |
1153 | ('startswith', 'group:priority', "1", True), |
|
1153 | ('startswith', 'group:priority', "1", True), | |
1154 | ('eq', 'url_domain', "1", False), |
|
1154 | ('eq', 'url_domain', "1", False), | |
1155 | ('ne', 'url_domain', "1", False), |
|
1155 | ('ne', 'url_domain', "1", False), | |
1156 | ('startswith', 'url_domain', "1", False), |
|
1156 | ('startswith', 'url_domain', "1", False), | |
1157 | ('endswith', 'url_domain', "1", False), |
|
1157 | ('endswith', 'url_domain', "1", False), | |
1158 | ('contains', 'url_domain', "1", False), |
|
1158 | ('contains', 'url_domain', "1", False), | |
1159 | ('ge', 'url_domain', "1", True), |
|
1159 | ('ge', 'url_domain', "1", True), | |
1160 | ('eq', 'url_path', "1", False), |
|
1160 | ('eq', 'url_path', "1", False), | |
1161 | ('ne', 'url_path', "1", False), |
|
1161 | ('ne', 'url_path', "1", False), | |
1162 | ('startswith', 'url_path', "1", False), |
|
1162 | ('startswith', 'url_path', "1", False), | |
1163 | ('endswith', 'url_path', "1", False), |
|
1163 | ('endswith', 'url_path', "1", False), | |
1164 | ('contains', 'url_path', "1", False), |
|
1164 | ('contains', 'url_path', "1", False), | |
1165 | ('ge', 'url_path', "1", True), |
|
1165 | ('ge', 'url_path', "1", True), | |
1166 | ('eq', 'error', "1", False), |
|
1166 | ('eq', 'error', "1", False), | |
1167 | ('ne', 'error', "1", False), |
|
1167 | ('ne', 'error', "1", False), | |
1168 | ('startswith', 'error', "1", False), |
|
1168 | ('startswith', 'error', "1", False), | |
1169 | ('endswith', 'error', "1", False), |
|
1169 | ('endswith', 'error', "1", False), | |
1170 | ('contains', 'error', "1", False), |
|
1170 | ('contains', 'error', "1", False), | |
1171 | ('ge', 'error', "1", True), |
|
1171 | ('ge', 'error', "1", True), | |
1172 | ('ge', 'url_path', "1", True), |
|
1172 | ('ge', 'url_path', "1", True), | |
1173 | ('eq', 'tags:server_name', "1", False), |
|
1173 | ('eq', 'tags:server_name', "1", False), | |
1174 | ('ne', 'tags:server_name', "1", False), |
|
1174 | ('ne', 'tags:server_name', "1", False), | |
1175 | ('startswith', 'tags:server_name', "1", False), |
|
1175 | ('startswith', 'tags:server_name', "1", False), | |
1176 | ('endswith', 'tags:server_name', "1", False), |
|
1176 | ('endswith', 'tags:server_name', "1", False), | |
1177 | ('contains', 'tags:server_name', "1", False), |
|
1177 | ('contains', 'tags:server_name', "1", False), | |
1178 | ('ge', 'tags:server_name', "1", True), |
|
1178 | ('ge', 'tags:server_name', "1", True), | |
1179 | ('contains', 'traceback', "1", False), |
|
1179 | ('contains', 'traceback', "1", False), | |
1180 | ('ge', 'traceback', "1", True), |
|
1180 | ('ge', 'traceback', "1", True), | |
1181 | ('eq', 'group:occurences', "1", False), |
|
1181 | ('eq', 'group:occurences', "1", False), | |
1182 | ('ne', 'group:occurences', "1", False), |
|
1182 | ('ne', 'group:occurences', "1", False), | |
1183 | ('ge', 'group:occurences', "1", False), |
|
1183 | ('ge', 'group:occurences', "1", False), | |
1184 | ('le', 'group:occurences', "1", False), |
|
1184 | ('le', 'group:occurences', "1", False), | |
1185 | ('contains', 'group:occurences', "1", True), |
|
1185 | ('contains', 'group:occurences', "1", True), | |
1186 | ]) |
|
1186 | ]) | |
1187 | def test_rule_validation(self, op, field, value, should_fail, |
|
1187 | def test_rule_validation(self, op, field, value, should_fail, | |
1188 | report_type_matrix): |
|
1188 | report_type_matrix): | |
1189 | import colander |
|
1189 | import colander | |
1190 | from appenlight.validators import build_rule_schema |
|
1190 | from appenlight.validators import build_rule_schema | |
1191 | rule_config = { |
|
1191 | rule_config = { | |
1192 | "op": op, |
|
1192 | "op": op, | |
1193 | "field": field, |
|
1193 | "field": field, | |
1194 | "value": value |
|
1194 | "value": value | |
1195 | } |
|
1195 | } | |
1196 |
|
1196 | |||
1197 | schema = build_rule_schema(rule_config, report_type_matrix) |
|
1197 | schema = build_rule_schema(rule_config, report_type_matrix) | |
1198 | if should_fail: |
|
1198 | if should_fail: | |
1199 | with pytest.raises(colander.Invalid): |
|
1199 | with pytest.raises(colander.Invalid): | |
1200 | schema.deserialize(rule_config) |
|
1200 | schema.deserialize(rule_config) | |
1201 | else: |
|
1201 | else: | |
1202 | schema.deserialize(rule_config) |
|
1202 | schema.deserialize(rule_config) | |
1203 |
|
1203 | |||
1204 | def test_nested_proper_rule_validation(self, report_type_matrix): |
|
1204 | def test_nested_proper_rule_validation(self, report_type_matrix): | |
1205 | from appenlight.validators import build_rule_schema |
|
1205 | from appenlight.validators import build_rule_schema | |
1206 | rule_config = { |
|
1206 | rule_config = { | |
1207 | "field": "__AND__", |
|
1207 | "field": "__AND__", | |
1208 | "rules": [ |
|
1208 | "rules": [ | |
1209 | { |
|
1209 | { | |
1210 | "field": "__AND__", |
|
1210 | "field": "__AND__", | |
1211 | "rules": [ |
|
1211 | "rules": [ | |
1212 | { |
|
1212 | { | |
1213 | "op": "ge", |
|
1213 | "op": "ge", | |
1214 | "field": "group:occurences", |
|
1214 | "field": "group:occurences", | |
1215 | "value": "10" |
|
1215 | "value": "10" | |
1216 | }, |
|
1216 | }, | |
1217 | { |
|
1217 | { | |
1218 | "field": "__OR__", |
|
1218 | "field": "__OR__", | |
1219 | "rules": [ |
|
1219 | "rules": [ | |
1220 | { |
|
1220 | { | |
1221 | "op": "endswith", |
|
1221 | "op": "endswith", | |
1222 | "field": "url_path", |
|
1222 | "field": "url_path", | |
1223 | "value": "register" |
|
1223 | "value": "register" | |
1224 | }, |
|
1224 | }, | |
1225 | { |
|
1225 | { | |
1226 | "op": "contains", |
|
1226 | "op": "contains", | |
1227 | "field": "error", |
|
1227 | "field": "error", | |
1228 | "value": "test" |
|
1228 | "value": "test" | |
1229 | } |
|
1229 | } | |
1230 | ] |
|
1230 | ] | |
1231 | } |
|
1231 | } | |
1232 | ] |
|
1232 | ] | |
1233 | }, |
|
1233 | }, | |
1234 | { |
|
1234 | { | |
1235 | "op": "eq", |
|
1235 | "op": "eq", | |
1236 | "field": "http_status", |
|
1236 | "field": "http_status", | |
1237 | "value": "500" |
|
1237 | "value": "500" | |
1238 | } |
|
1238 | } | |
1239 | ] |
|
1239 | ] | |
1240 | } |
|
1240 | } | |
1241 |
|
1241 | |||
1242 | schema = build_rule_schema(rule_config, report_type_matrix) |
|
1242 | schema = build_rule_schema(rule_config, report_type_matrix) | |
1243 | deserialized = schema.deserialize(rule_config) |
|
1243 | deserialized = schema.deserialize(rule_config) | |
1244 |
|
1244 | |||
1245 | def test_nested_bad_rule_validation(self, report_type_matrix): |
|
1245 | def test_nested_bad_rule_validation(self, report_type_matrix): | |
1246 | import colander |
|
1246 | import colander | |
1247 | from appenlight.validators import build_rule_schema |
|
1247 | from appenlight.validators import build_rule_schema | |
1248 | rule_config = { |
|
1248 | rule_config = { | |
1249 | "field": "__AND__", |
|
1249 | "field": "__AND__", | |
1250 | "rules": [ |
|
1250 | "rules": [ | |
1251 | { |
|
1251 | { | |
1252 | "field": "__AND__", |
|
1252 | "field": "__AND__", | |
1253 | "rules": [ |
|
1253 | "rules": [ | |
1254 | { |
|
1254 | { | |
1255 | "op": "ge", |
|
1255 | "op": "ge", | |
1256 | "field": "group:occurences", |
|
1256 | "field": "group:occurences", | |
1257 | "value": "10" |
|
1257 | "value": "10" | |
1258 | }, |
|
1258 | }, | |
1259 | { |
|
1259 | { | |
1260 | "field": "__OR__", |
|
1260 | "field": "__OR__", | |
1261 | "rules": [ |
|
1261 | "rules": [ | |
1262 | { |
|
1262 | { | |
1263 | "op": "gt", |
|
1263 | "op": "gt", | |
1264 | "field": "url_path", |
|
1264 | "field": "url_path", | |
1265 | "value": "register" |
|
1265 | "value": "register" | |
1266 | }, |
|
1266 | }, | |
1267 | { |
|
1267 | { | |
1268 | "op": "contains", |
|
1268 | "op": "contains", | |
1269 | "field": "error", |
|
1269 | "field": "error", | |
1270 | "value": "test" |
|
1270 | "value": "test" | |
1271 | } |
|
1271 | } | |
1272 | ] |
|
1272 | ] | |
1273 | } |
|
1273 | } | |
1274 | ] |
|
1274 | ] | |
1275 | }, |
|
1275 | }, | |
1276 | { |
|
1276 | { | |
1277 | "op": "eq", |
|
1277 | "op": "eq", | |
1278 | "field": "http_status", |
|
1278 | "field": "http_status", | |
1279 | "value": "500" |
|
1279 | "value": "500" | |
1280 | } |
|
1280 | } | |
1281 | ] |
|
1281 | ] | |
1282 | } |
|
1282 | } | |
1283 |
|
1283 | |||
1284 | schema = build_rule_schema(rule_config, report_type_matrix) |
|
1284 | schema = build_rule_schema(rule_config, report_type_matrix) | |
1285 | with pytest.raises(colander.Invalid): |
|
1285 | with pytest.raises(colander.Invalid): | |
1286 | deserialized = schema.deserialize(rule_config) |
|
1286 | deserialized = schema.deserialize(rule_config) | |
1287 |
|
1287 | |||
1288 | def test_config_manipulator(self): |
|
1288 | def test_config_manipulator(self): | |
1289 | from appenlight.lib.rule import Rule |
|
1289 | from appenlight.lib.rule import Rule | |
1290 | type_matrix = { |
|
1290 | type_matrix = { | |
1291 | 'a': {"type": 'int', |
|
1291 | 'a': {"type": 'int', | |
1292 | "ops": ('eq', 'ne', 'ge', 'le',)}, |
|
1292 | "ops": ('eq', 'ne', 'ge', 'le',)}, | |
1293 | 'b': {"type": 'int', |
|
1293 | 'b': {"type": 'int', | |
1294 | "ops": ('eq', 'ne', 'ge', 'le',)}, |
|
1294 | "ops": ('eq', 'ne', 'ge', 'le',)}, | |
1295 | } |
|
1295 | } | |
1296 | rule_config = { |
|
1296 | rule_config = { | |
1297 | "field": "__OR__", |
|
1297 | "field": "__OR__", | |
1298 | "rules": [ |
|
1298 | "rules": [ | |
1299 | { |
|
1299 | { | |
1300 | "field": "__OR__", |
|
1300 | "field": "__OR__", | |
1301 | "rules": [ |
|
1301 | "rules": [ | |
1302 | { |
|
1302 | { | |
1303 | "op": "ge", |
|
1303 | "op": "ge", | |
1304 | "field": "a", |
|
1304 | "field": "a", | |
1305 | "value": "10" |
|
1305 | "value": "10" | |
1306 | } |
|
1306 | } | |
1307 | ] |
|
1307 | ] | |
1308 | }, |
|
1308 | }, | |
1309 | { |
|
1309 | { | |
1310 | "op": "eq", |
|
1310 | "op": "eq", | |
1311 | "field": "b", |
|
1311 | "field": "b", | |
1312 | "value": "500" |
|
1312 | "value": "500" | |
1313 | } |
|
1313 | } | |
1314 | ] |
|
1314 | ] | |
1315 | } |
|
1315 | } | |
1316 |
|
1316 | |||
1317 | def rule_manipulator(rule): |
|
1317 | def rule_manipulator(rule): | |
1318 | if 'value' in rule.config: |
|
1318 | if 'value' in rule.config: | |
1319 | rule.config['value'] = "1" |
|
1319 | rule.config['value'] = "1" | |
1320 |
|
1320 | |||
1321 | rule = Rule(rule_config, type_matrix, |
|
1321 | rule = Rule(rule_config, type_matrix, | |
1322 | config_manipulator=rule_manipulator) |
|
1322 | config_manipulator=rule_manipulator) | |
1323 | rule.match({"a": 1, |
|
1323 | rule.match({"a": 1, | |
1324 | "b": "2"}) |
|
1324 | "b": "2"}) | |
1325 | assert rule.config['rules'][0]['rules'][0]['value'] == "1" |
|
1325 | assert rule.config['rules'][0]['rules'][0]['value'] == "1" | |
1326 | assert rule.config['rules'][1]['value'] == "1" |
|
1326 | assert rule.config['rules'][1]['value'] == "1" | |
1327 | assert rule.type_matrix["b"]['type'] == "int" |
|
1327 | assert rule.type_matrix["b"]['type'] == "int" | |
1328 |
|
1328 | |||
1329 | def test_dynamic_config_manipulator(self): |
|
1329 | def test_dynamic_config_manipulator(self): | |
1330 | from appenlight.lib.rule import Rule |
|
1330 | from appenlight.lib.rule import Rule | |
1331 | rule_config = { |
|
1331 | rule_config = { | |
1332 | "field": "__OR__", |
|
1332 | "field": "__OR__", | |
1333 | "rules": [ |
|
1333 | "rules": [ | |
1334 | { |
|
1334 | { | |
1335 | "field": "__OR__", |
|
1335 | "field": "__OR__", | |
1336 | "rules": [ |
|
1336 | "rules": [ | |
1337 | { |
|
1337 | { | |
1338 | "op": "ge", |
|
1338 | "op": "ge", | |
1339 | "field": "a", |
|
1339 | "field": "a", | |
1340 | "value": "10" |
|
1340 | "value": "10" | |
1341 | } |
|
1341 | } | |
1342 | ] |
|
1342 | ] | |
1343 | }, |
|
1343 | }, | |
1344 | { |
|
1344 | { | |
1345 | "op": "eq", |
|
1345 | "op": "eq", | |
1346 | "field": "b", |
|
1346 | "field": "b", | |
1347 | "value": "500" |
|
1347 | "value": "500" | |
1348 | } |
|
1348 | } | |
1349 | ] |
|
1349 | ] | |
1350 | } |
|
1350 | } | |
1351 |
|
1351 | |||
1352 | def rule_manipulator(rule): |
|
1352 | def rule_manipulator(rule): | |
1353 | rule.type_matrix = { |
|
1353 | rule.type_matrix = { | |
1354 | 'a': {"type": 'int', |
|
1354 | 'a': {"type": 'int', | |
1355 | "ops": ('eq', 'ne', 'ge', 'le',)}, |
|
1355 | "ops": ('eq', 'ne', 'ge', 'le',)}, | |
1356 | 'b': {"type": 'unicode', |
|
1356 | 'b': {"type": 'unicode', | |
1357 | "ops": ('eq', 'ne', 'ge', 'le',)}, |
|
1357 | "ops": ('eq', 'ne', 'ge', 'le',)}, | |
1358 | } |
|
1358 | } | |
1359 |
|
1359 | |||
1360 | if 'value' in rule.config: |
|
1360 | if 'value' in rule.config: | |
1361 | if rule.config['field'] == 'a': |
|
1361 | if rule.config['field'] == 'a': | |
1362 | rule.config['value'] = "1" |
|
1362 | rule.config['value'] = "1" | |
1363 | elif rule.config['field'] == 'b': |
|
1363 | elif rule.config['field'] == 'b': | |
1364 | rule.config['value'] = "2" |
|
1364 | rule.config['value'] = "2" | |
1365 |
|
1365 | |||
1366 | rule = Rule(rule_config, {}, |
|
1366 | rule = Rule(rule_config, {}, | |
1367 | config_manipulator=rule_manipulator) |
|
1367 | config_manipulator=rule_manipulator) | |
1368 | rule.match({"a": 11, |
|
1368 | rule.match({"a": 11, | |
1369 | "b": "55"}) |
|
1369 | "b": "55"}) | |
1370 | assert rule.config['rules'][0]['rules'][0]['value'] == "1" |
|
1370 | assert rule.config['rules'][0]['rules'][0]['value'] == "1" | |
1371 | assert rule.config['rules'][1]['value'] == "2" |
|
1371 | assert rule.config['rules'][1]['value'] == "2" | |
1372 | assert rule.type_matrix["b"]['type'] == "unicode" |
|
1372 | assert rule.type_matrix["b"]['type'] == "unicode" | |
1373 |
|
1373 | |||
1374 |
|
1374 | |||
1375 | @pytest.mark.usefixtures('base_app', 'with_migrations') |
|
1375 | @pytest.mark.usefixtures('base_app', 'with_migrations') | |
1376 | class TestViewsWithForms(object): |
|
1376 | class TestViewsWithForms(object): | |
1377 | def test_bad_csrf(self): |
|
1377 | def test_bad_csrf(self): | |
1378 | from appenlight.forms import CSRFException |
|
1378 | from appenlight.forms import CSRFException | |
1379 | from appenlight.views.index import register |
|
1379 | from appenlight.views.index import register | |
1380 | post_data = {'dupa': 'dupa'} |
|
1380 | post_data = {'dupa': 'dupa'} | |
1381 | request = testing.DummyRequest(post=post_data) |
|
1381 | request = testing.DummyRequest(post=post_data) | |
1382 | request.POST = webob.multidict.MultiDict(request.POST) |
|
1382 | request.POST = webob.multidict.MultiDict(request.POST) | |
1383 | with pytest.raises(CSRFException): |
|
1383 | with pytest.raises(CSRFException): | |
1384 | register(request) |
|
1384 | register(request) | |
1385 |
|
1385 | |||
1386 | def test_proper_csrf(self): |
|
1386 | def test_proper_csrf(self): | |
1387 | from appenlight.views.index import register |
|
1387 | from appenlight.views.index import register | |
1388 | request = pyramid.threadlocal.get_current_request() |
|
1388 | request = pyramid.threadlocal.get_current_request() | |
1389 | post_data = {'dupa': 'dupa', |
|
1389 | post_data = {'dupa': 'dupa', | |
1390 | 'csrf_token': request.session.get_csrf_token()} |
|
1390 | 'csrf_token': request.session.get_csrf_token()} | |
1391 | request = testing.DummyRequest(post=post_data) |
|
1391 | request = testing.DummyRequest(post=post_data) | |
1392 | request.POST = webob.multidict.MultiDict(request.POST) |
|
1392 | request.POST = webob.multidict.MultiDict(request.POST) | |
1393 | result = register(request) |
|
1393 | result = register(request) | |
1394 | assert result['form'].errors['email'][0] == 'This field is required.' |
|
1394 | assert result['form'].errors['email'][0] == 'This field is required.' | |
1395 |
|
1395 | |||
1396 |
|
1396 | |||
1397 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'default_data') |
|
1397 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'default_data') | |
1398 | class TestRegistration(object): |
|
1398 | class TestRegistration(object): | |
1399 | def test_invalid_form(self): |
|
1399 | def test_invalid_form(self): | |
1400 | from appenlight.views.index import register |
|
1400 | from appenlight.views.index import register | |
1401 | request = pyramid.threadlocal.get_current_request() |
|
1401 | request = pyramid.threadlocal.get_current_request() | |
1402 | post_data = {'user_name': '', |
|
1402 | post_data = {'user_name': '', | |
1403 | 'user_password': '', |
|
1403 | 'user_password': '', | |
1404 | 'email': '', |
|
1404 | 'email': '', | |
1405 | 'csrf_token': request.session.get_csrf_token()} |
|
1405 | 'csrf_token': request.session.get_csrf_token()} | |
1406 | request = testing.DummyRequest(post=post_data) |
|
1406 | request = testing.DummyRequest(post=post_data) | |
1407 | request.POST = webob.multidict.MultiDict(request.POST) |
|
1407 | request.POST = webob.multidict.MultiDict(request.POST) | |
1408 | result = register(request) |
|
1408 | result = register(request) | |
1409 | assert result['form'].errors['user_name'][0] == \ |
|
1409 | assert result['form'].errors['user_name'][0] == \ | |
1410 | 'This field is required.' |
|
1410 | 'This field is required.' | |
1411 |
|
1411 | |||
1412 | def test_valid_form(self): |
|
1412 | def test_valid_form(self): | |
1413 | from appenlight.views.index import register |
|
1413 | from appenlight.views.index import register | |
1414 | from ziggurat_foundations.models.services.user import UserService |
|
1414 | from ziggurat_foundations.models.services.user import UserService | |
1415 | request = pyramid.threadlocal.get_current_request() |
|
1415 | request = pyramid.threadlocal.get_current_request() | |
1416 | post_data = {'user_name': 'foo', |
|
1416 | post_data = {'user_name': 'foo', | |
1417 | 'user_password': 'barr', |
|
1417 | 'user_password': 'barr', | |
1418 | 'email': 'test@test.foo', |
|
1418 | 'email': 'test@test.foo', | |
1419 | 'csrf_token': request.session.get_csrf_token()} |
|
1419 | 'csrf_token': request.session.get_csrf_token()} | |
1420 | request = testing.DummyRequest(post=post_data) |
|
1420 | request = testing.DummyRequest(post=post_data) | |
1421 | request.add_flash_to_headers = mock.Mock() |
|
1421 | request.add_flash_to_headers = mock.Mock() | |
1422 | request.POST = webob.multidict.MultiDict(request.POST) |
|
1422 | request.POST = webob.multidict.MultiDict(request.POST) | |
1423 | assert UserService.by_user_name('foo') is None |
|
1423 | assert UserService.by_user_name('foo') is None | |
1424 | register(request) |
|
1424 | register(request) | |
1425 | user = UserService.by_user_name('foo') |
|
1425 | user = UserService.by_user_name('foo') | |
1426 | assert user.user_name == 'foo' |
|
1426 | assert user.user_name == 'foo' | |
1427 | assert len(user.user_password) == 60 |
|
1427 | assert len(user.user_password) == 60 | |
1428 |
|
1428 | |||
1429 |
|
1429 | |||
1430 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables', |
|
1430 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables', | |
1431 | 'default_user') |
|
1431 | 'default_user') | |
1432 | class TestApplicationCreation(object): |
|
1432 | class TestApplicationCreation(object): | |
1433 | def test_wrong_data(self): |
|
1433 | def test_wrong_data(self): | |
1434 | import appenlight.views.applications as applications |
|
1434 | import appenlight.views.applications as applications | |
1435 | from ziggurat_foundations.models.services.user import UserService |
|
1435 | from ziggurat_foundations.models.services.user import UserService | |
1436 | request = pyramid.threadlocal.get_current_request() |
|
1436 | request = pyramid.threadlocal.get_current_request() | |
1437 | request.user = UserService.by_user_name('testuser') |
|
1437 | request.user = UserService.by_user_name('testuser') | |
1438 | request.unsafe_json_body = {} |
|
1438 | request.unsafe_json_body = {} | |
1439 | request.headers['X-XSRF-TOKEN'] = request.session.get_csrf_token() |
|
1439 | request.headers['X-XSRF-TOKEN'] = request.session.get_csrf_token() | |
1440 | response = applications.application_create(request) |
|
1440 | response = applications.application_create(request) | |
1441 | assert response.code == 422 |
|
1441 | assert response.code == 422 | |
1442 |
|
1442 | |||
1443 | def test_proper_data(self): |
|
1443 | def test_proper_data(self): | |
1444 | import appenlight.views.applications as applications |
|
1444 | import appenlight.views.applications as applications | |
1445 | from ziggurat_foundations.models.services.user import UserService |
|
1445 | from ziggurat_foundations.models.services.user import UserService | |
1446 |
|
1446 | |||
1447 | request = pyramid.threadlocal.get_current_request() |
|
1447 | request = pyramid.threadlocal.get_current_request() | |
1448 | request.user = UserService.by_user_name('testuser') |
|
1448 | request.user = UserService.by_user_name('testuser') | |
1449 | request.unsafe_json_body = {"resource_name": "app name", |
|
1449 | request.unsafe_json_body = {"resource_name": "app name", | |
1450 | "domains": "foo"} |
|
1450 | "domains": "foo"} | |
1451 | request.headers['X-XSRF-TOKEN'] = request.session.get_csrf_token() |
|
1451 | request.headers['X-XSRF-TOKEN'] = request.session.get_csrf_token() | |
1452 | app_dict = applications.application_create(request) |
|
1452 | app_dict = applications.application_create(request) | |
1453 | assert app_dict['public_key'] is not None |
|
1453 | assert app_dict['public_key'] is not None | |
1454 | assert app_dict['api_key'] is not None |
|
1454 | assert app_dict['api_key'] is not None | |
1455 | assert app_dict['resource_name'] == 'app name' |
|
1455 | assert app_dict['resource_name'] == 'app name' | |
1456 | assert app_dict['owner_group_id'] is None |
|
1456 | assert app_dict['owner_group_id'] is None | |
1457 | assert app_dict['resource_id'] is not None |
|
1457 | assert app_dict['resource_id'] is not None | |
1458 | assert app_dict['default_grouping'] == 'url_traceback' |
|
1458 | assert app_dict['default_grouping'] == 'url_traceback' | |
1459 | assert app_dict['possible_permissions'] == ('view', 'update_reports') |
|
1459 | assert app_dict['possible_permissions'] == ('view', 'update_reports') | |
1460 | assert app_dict['slow_report_threshold'] == 10 |
|
1460 | assert app_dict['slow_report_threshold'] == 10 | |
1461 | assert app_dict['owner_user_name'] == 'testuser' |
|
1461 | assert app_dict['owner_user_name'] == 'testuser' | |
1462 | assert app_dict['owner_user_id'] == request.user.id |
|
1462 | assert app_dict['owner_user_id'] == request.user.id | |
1463 | assert app_dict['domains'] is 'foo' |
|
1463 | assert app_dict['domains'] is 'foo' | |
1464 | assert app_dict['postprocessing_rules'] == [] |
|
1464 | assert app_dict['postprocessing_rules'] == [] | |
1465 | assert app_dict['error_report_threshold'] == 10 |
|
1465 | assert app_dict['error_report_threshold'] == 10 | |
1466 | assert app_dict['allow_permanent_storage'] is False |
|
1466 | assert app_dict['allow_permanent_storage'] is False | |
1467 | assert app_dict['resource_type'] == 'application' |
|
1467 | assert app_dict['resource_type'] == 'application' | |
1468 | assert app_dict['current_permissions'] == [] |
|
1468 | assert app_dict['current_permissions'] == [] | |
1469 |
|
1469 | |||
1470 |
|
1470 | |||
1471 | @pytest.mark.usefixtures('default_application') |
|
1471 | @pytest.mark.usefixtures('default_application') | |
1472 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') |
|
1472 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') | |
1473 | class TestAPISentryView(object): |
|
1473 | class TestAPISentryView(object): | |
1474 | def test_no_payload(self, default_application): |
|
1474 | def test_no_payload(self, default_application): | |
1475 | import colander |
|
1475 | import colander | |
1476 | from appenlight.models.services.application import ApplicationService |
|
1476 | from appenlight.models.services.application import ApplicationService | |
1477 | from appenlight.views.api import sentry_compat |
|
1477 | from appenlight.views.api import sentry_compat | |
1478 | from appenlight.lib.request import JSONException |
|
1478 | from appenlight.lib.request import JSONException | |
1479 |
|
1479 | |||
1480 | context = DummyContext() |
|
1480 | context = DummyContext() | |
1481 | context.resource = ApplicationService.by_id(1) |
|
1481 | context.resource = ApplicationService.by_id(1) | |
1482 | request = testing.DummyRequest( |
|
1482 | request = testing.DummyRequest( | |
1483 | headers={'Content-Type': 'application/json'}) |
|
1483 | headers={'Content-Type': 'application/json'}) | |
1484 | request.unsafe_json_body = '' |
|
1484 | request.unsafe_json_body = '' | |
1485 | request.context = context |
|
1485 | request.context = context | |
1486 | route = mock.Mock() |
|
1486 | route = mock.Mock() | |
1487 | route.name = 'api_sentry' |
|
1487 | route.name = 'api_sentry' | |
1488 | request.matched_route = route |
|
1488 | request.matched_route = route | |
1489 | with pytest.raises(JSONException): |
|
1489 | with pytest.raises(JSONException): | |
1490 | sentry_compat(request) |
|
1490 | sentry_compat(request) | |
1491 |
|
1491 | |||
1492 | def test_java_client_payload(self): |
|
1492 | def test_java_client_payload(self): | |
1493 | from appenlight.views.api import sentry_compat |
|
1493 | from appenlight.views.api import sentry_compat | |
1494 | from appenlight.models.services.application import ApplicationService |
|
1494 | from appenlight.models.services.application import ApplicationService | |
1495 | from appenlight.models.report_group import ReportGroup |
|
1495 | from appenlight.models.report_group import ReportGroup | |
1496 | route = mock.Mock() |
|
1496 | route = mock.Mock() | |
1497 | route.name = 'api_sentry' |
|
1497 | route.name = 'api_sentry' | |
1498 | request = pyramid.threadlocal.get_current_request() |
|
1498 | request = pyramid.threadlocal.get_current_request() | |
1499 | context = DummyContext() |
|
1499 | context = DummyContext() | |
1500 | context.resource = ApplicationService.by_id(1) |
|
1500 | context.resource = ApplicationService.by_id(1) | |
|
1501 | context.resource.allow_permanent_storage = True | |||
1501 | request.context = context |
|
1502 | request.context = context | |
1502 | request.matched_route = route |
|
1503 | request.matched_route = route | |
1503 | request.body = b'eJy1UmFr2zAQ/S0T+7BCLOzYThp/C6xjG6SDLd/GCBf57Ki' \ |
|
1504 | request.body = b'eJy1UmFr2zAQ/S0T+7BCLOzYThp/C6xjG6SDLd/GCBf57Ki' \ | |
1504 | b'RJSHJJiXkv+/UlC7p2kAZA33Ru6f33t1pz3BAHVayZhWr87' \ |
|
1505 | b'RJSHJJiXkv+/UlC7p2kAZA33Ru6f33t1pz3BAHVayZhWr87' \ | |
1505 | b'JMs+I6q3MsrifFep2vc1iXM1HMpgBTNmIdeg8tEvlmJ9AGa' \ |
|
1506 | b'JMs+I6q3MsrifFep2vc1iXM1HMpgBTNmIdeg8tEvlmJ9AGa' \ | |
1506 | b'fQ7goOkQoDOUmGcZpMkLZO0WGZFRadMiaHIR1EVnTMu3k3b' \ |
|
1507 | b'fQ7goOkQoDOUmGcZpMkLZO0WGZFRadMiaHIR1EVnTMu3k3b' \ | |
1507 | b'oiMgqJrXpgOpOVjLLTiPkWAVhMa4jih3MAAholfWyUDAksz' \ |
|
1508 | b'oiMgqJrXpgOpOVjLLTiPkWAVhMa4jih3MAAholfWyUDAksz' \ | |
1508 | b'm1iopICbg8fWH52B8VWXZVYwHrWfV/jBipD2gW2no8CFMa5' \ |
|
1509 | b'm1iopICbg8fWH52B8VWXZVYwHrWfV/jBipD2gW2no8CFMa5' \ | |
1509 | b'JButCDSjoQG6mR6LgLDojPPn/7sbydL25ep34HGl+y3DiE+' \ |
|
1510 | b'JButCDSjoQG6mR6LgLDojPPn/7sbydL25ep34HGl+y3DiE+' \ | |
1510 | b'lH0xXBXjMzFBsXW99SS7pWKYXRw91zqgK4BgZ4/DZVVP/cs' \ |
|
1511 | b'lH0xXBXjMzFBsXW99SS7pWKYXRw91zqgK4BgZ4/DZVVP/cs' \ | |
1511 | b'3NuzSZPfAKqP2Cdj4tw7U/cKH0fEFeiWQFqE2FIHAmMPjaN' \ |
|
1512 | b'3NuzSZPfAKqP2Cdj4tw7U/cKH0fEFeiWQFqE2FIHAmMPjaN' \ | |
1512 | b'Y/kHvbzY/JqdHUq9o/KxqQHkcsabX4piDuT4aK+pXG1ZNi/' \ |
|
1513 | b'Y/kHvbzY/JqdHUq9o/KxqQHkcsabX4piDuT4aK+pXG1ZNi/' \ | |
1513 | b'IwOpEyruXC1LiB3vPO3BmOOxTUCIqv5LIg5H12oh9cf0l+P' \ |
|
1514 | b'IwOpEyruXC1LiB3vPO3BmOOxTUCIqv5LIg5H12oh9cf0l+P' \ | |
1514 | b'MvP5P8kddgoFIEvMGzM5cRSD2aLJ6qTdHKm6nv9pPcRFba0' \ |
|
1515 | b'MvP5P8kddgoFIEvMGzM5cRSD2aLJ6qTdHKm6nv9pPcRFba0' \ | |
1515 | b'Kd0eleeCFuGN+9JZ9TaXIn/V5JYMBvxXg3L6PwzSE4dkfOb' \ |
|
1516 | b'Kd0eleeCFuGN+9JZ9TaXIn/V5JYMBvxXg3L6PwzSE4dkfOb' \ | |
1516 | b'w7CtfWmP85SdCs8OvA53fUV19cg==' |
|
1517 | b'w7CtfWmP85SdCs8OvA53fUV19cg==' | |
1517 | sentry_compat(request) |
|
1518 | sentry_compat(request) | |
1518 | query = DBSession.query(ReportGroup) |
|
1519 | query = DBSession.query(ReportGroup) | |
1519 | report = query.first() |
|
1520 | report = query.first() | |
1520 | assert query.count() == 1 |
|
1521 | assert query.count() == 1 | |
1521 | assert report.total_reports == 1 |
|
1522 | assert report.total_reports == 1 | |
1522 |
|
1523 | |||
1523 | def test_ruby_client_payload(self): |
|
1524 | def test_ruby_client_payload(self): | |
1524 | from appenlight.views.api import sentry_compat |
|
1525 | from appenlight.views.api import sentry_compat | |
1525 | from appenlight.models.services.application import ApplicationService |
|
1526 | from appenlight.models.services.application import ApplicationService | |
1526 | from appenlight.models.report_group import ReportGroup |
|
1527 | from appenlight.models.report_group import ReportGroup | |
1527 | from appenlight.tests.payload_examples import SENTRY_RUBY_ENCODED |
|
1528 | from appenlight.tests.payload_examples import SENTRY_RUBY_ENCODED | |
1528 | route = mock.Mock() |
|
1529 | route = mock.Mock() | |
1529 | route.name = 'api_sentry' |
|
1530 | route.name = 'api_sentry' | |
1530 | request = testing.DummyRequest( |
|
1531 | request = testing.DummyRequest( | |
1531 | headers={'Content-Type': 'application/octet-stream', |
|
1532 | headers={'Content-Type': 'application/octet-stream', | |
1532 | 'User-Agent': 'sentry-ruby/1.0.0', |
|
1533 | 'User-Agent': 'sentry-ruby/1.0.0', | |
1533 | 'X-Sentry-Auth': 'Sentry sentry_version=5, ' |
|
1534 | 'X-Sentry-Auth': 'Sentry sentry_version=5, ' | |
1534 | 'sentry_client=raven-ruby/1.0.0, ' |
|
1535 | 'sentry_client=raven-ruby/1.0.0, ' | |
1535 | 'sentry_timestamp=1462378483, ' |
|
1536 | 'sentry_timestamp=1462378483, ' | |
1536 | 'sentry_key=xxx, sentry_secret=xxx' |
|
1537 | 'sentry_key=xxx, sentry_secret=xxx' | |
1537 | }) |
|
1538 | }) | |
1538 | context = DummyContext() |
|
1539 | context = DummyContext() | |
1539 | context.resource = ApplicationService.by_id(1) |
|
1540 | context.resource = ApplicationService.by_id(1) | |
|
1541 | context.resource.allow_permanent_storage = True | |||
1540 | request.context = context |
|
1542 | request.context = context | |
1541 | request.matched_route = route |
|
1543 | request.matched_route = route | |
1542 | request.body = SENTRY_RUBY_ENCODED |
|
1544 | request.body = SENTRY_RUBY_ENCODED | |
1543 | sentry_compat(request) |
|
1545 | sentry_compat(request) | |
1544 | query = DBSession.query(ReportGroup) |
|
1546 | query = DBSession.query(ReportGroup) | |
1545 | report = query.first() |
|
1547 | report = query.first() | |
1546 | assert query.count() == 1 |
|
1548 | assert query.count() == 1 | |
1547 | assert report.total_reports == 1 |
|
1549 | assert report.total_reports == 1 | |
1548 |
|
1550 | |||
1549 | def test_python_client_decoded_payload(self): |
|
1551 | def test_python_client_decoded_payload(self): | |
1550 | from appenlight.views.api import sentry_compat |
|
1552 | from appenlight.views.api import sentry_compat | |
1551 | from appenlight.models.services.application import ApplicationService |
|
1553 | from appenlight.models.services.application import ApplicationService | |
1552 | from appenlight.models.report_group import ReportGroup |
|
1554 | from appenlight.models.report_group import ReportGroup | |
1553 | from appenlight.tests.payload_examples import SENTRY_PYTHON_PAYLOAD_7 |
|
1555 | from appenlight.tests.payload_examples import SENTRY_PYTHON_PAYLOAD_7 | |
1554 | route = mock.Mock() |
|
1556 | route = mock.Mock() | |
1555 | route.name = 'api_sentry' |
|
1557 | route.name = 'api_sentry' | |
1556 | request = pyramid.threadlocal.get_current_request() |
|
1558 | request = pyramid.threadlocal.get_current_request() | |
1557 | context = DummyContext() |
|
1559 | context = DummyContext() | |
1558 | context.resource = ApplicationService.by_id(1) |
|
1560 | context.resource = ApplicationService.by_id(1) | |
|
1561 | context.resource.allow_permanent_storage = True | |||
1559 | request.context = context |
|
1562 | request.context = context | |
1560 | request.matched_route = route |
|
1563 | request.matched_route = route | |
1561 | request.body = json.dumps(SENTRY_PYTHON_PAYLOAD_7).encode('utf8') |
|
1564 | request.body = json.dumps(SENTRY_PYTHON_PAYLOAD_7).encode('utf8') | |
1562 | sentry_compat(request) |
|
1565 | sentry_compat(request) | |
1563 | query = DBSession.query(ReportGroup) |
|
1566 | query = DBSession.query(ReportGroup) | |
1564 | report = query.first() |
|
1567 | report = query.first() | |
1565 | assert query.count() == 1 |
|
1568 | assert query.count() == 1 | |
1566 | assert report.total_reports == 1 |
|
1569 | assert report.total_reports == 1 | |
1567 |
|
1570 | |||
1568 | def test_python_client_encoded_payload(self): |
|
1571 | def test_python_client_encoded_payload(self): | |
1569 | from appenlight.views.api import sentry_compat |
|
1572 | from appenlight.views.api import sentry_compat | |
1570 | from appenlight.models.services.application import ApplicationService |
|
1573 | from appenlight.models.services.application import ApplicationService | |
1571 | from appenlight.models.report_group import ReportGroup |
|
1574 | from appenlight.models.report_group import ReportGroup | |
1572 | from appenlight.tests.payload_examples import SENTRY_PYTHON_ENCODED |
|
1575 | from appenlight.tests.payload_examples import SENTRY_PYTHON_ENCODED | |
1573 | route = mock.Mock() |
|
1576 | route = mock.Mock() | |
1574 | route.name = 'api_sentry' |
|
1577 | route.name = 'api_sentry' | |
1575 | request = testing.DummyRequest( |
|
1578 | request = testing.DummyRequest( | |
1576 | headers={'Content-Type': 'application/octet-stream', |
|
1579 | headers={'Content-Type': 'application/octet-stream', | |
1577 | 'Content-Encoding': 'deflate', |
|
1580 | 'Content-Encoding': 'deflate', | |
1578 | 'User-Agent': 'sentry-ruby/1.0.0', |
|
1581 | 'User-Agent': 'sentry-ruby/1.0.0', | |
1579 | 'X-Sentry-Auth': 'Sentry sentry_version=5, ' |
|
1582 | 'X-Sentry-Auth': 'Sentry sentry_version=5, ' | |
1580 | 'sentry_client=raven-ruby/1.0.0, ' |
|
1583 | 'sentry_client=raven-ruby/1.0.0, ' | |
1581 | 'sentry_timestamp=1462378483, ' |
|
1584 | 'sentry_timestamp=1462378483, ' | |
1582 | 'sentry_key=xxx, sentry_secret=xxx' |
|
1585 | 'sentry_key=xxx, sentry_secret=xxx' | |
1583 | }) |
|
1586 | }) | |
1584 | context = DummyContext() |
|
1587 | context = DummyContext() | |
1585 | context.resource = ApplicationService.by_id(1) |
|
1588 | context.resource = ApplicationService.by_id(1) | |
|
1589 | context.resource.allow_permanent_storage = True | |||
1586 | request.context = context |
|
1590 | request.context = context | |
1587 | request.matched_route = route |
|
1591 | request.matched_route = route | |
1588 | request.body = SENTRY_PYTHON_ENCODED |
|
1592 | request.body = SENTRY_PYTHON_ENCODED | |
1589 | sentry_compat(request) |
|
1593 | sentry_compat(request) | |
1590 | query = DBSession.query(ReportGroup) |
|
1594 | query = DBSession.query(ReportGroup) | |
1591 | report = query.first() |
|
1595 | report = query.first() | |
1592 | assert query.count() == 1 |
|
1596 | assert query.count() == 1 | |
1593 | assert report.total_reports == 1 |
|
1597 | assert report.total_reports == 1 |
@@ -1,765 +1,773 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # AppEnlight Enterprise Edition, including its added features, Support |
|
18 | # AppEnlight Enterprise Edition, including its added features, Support | |
19 | # services, and proprietary license terms, please see |
|
19 | # services, and proprietary license terms, please see | |
20 | # https://rhodecode.com/licenses/ |
|
20 | # https://rhodecode.com/licenses/ | |
21 |
|
21 | |||
22 | import datetime |
|
22 | import datetime | |
23 |
|
23 | |||
24 | import colander |
|
24 | import colander | |
25 | from colander import null |
|
25 | from colander import null | |
26 |
|
26 | |||
27 | # those keywords are here so we can distingush between searching for tags and |
|
27 | # those keywords are here so we can distingush between searching for tags and | |
28 | # normal properties of reports/logs |
|
28 | # normal properties of reports/logs | |
29 | accepted_search_params = ['resource', |
|
29 | accepted_search_params = ['resource', | |
30 | 'request_id', |
|
30 | 'request_id', | |
31 | 'start_date', |
|
31 | 'start_date', | |
32 | 'end_date', |
|
32 | 'end_date', | |
33 | 'page', |
|
33 | 'page', | |
34 | 'min_occurences', |
|
34 | 'min_occurences', | |
35 | 'http_status', |
|
35 | 'http_status', | |
36 | 'priority', |
|
36 | 'priority', | |
37 | 'error', |
|
37 | 'error', | |
38 | 'url_path', |
|
38 | 'url_path', | |
39 | 'url_domain', |
|
39 | 'url_domain', | |
40 | 'report_status', |
|
40 | 'report_status', | |
41 | 'min_duration', |
|
41 | 'min_duration', | |
42 | 'max_duration', |
|
42 | 'max_duration', | |
43 | 'message', |
|
43 | 'message', | |
44 | 'level', |
|
44 | 'level', | |
45 | 'namespace'] |
|
45 | 'namespace'] | |
46 |
|
46 | |||
47 |
|
47 | |||
48 | @colander.deferred |
|
48 | @colander.deferred | |
49 | def deferred_utcnow(node, kw): |
|
49 | def deferred_utcnow(node, kw): | |
50 | return kw['utcnow'] |
|
50 | return kw['utcnow'] | |
51 |
|
51 | |||
52 |
|
52 | |||
|
53 | @colander.deferred | |||
|
54 | def optional_limited_date(node, kw): | |||
|
55 | if not kw.get('allow_permanent_storage'): | |||
|
56 | return limited_date | |||
|
57 | ||||
|
58 | ||||
53 | def lowercase_preparer(input_data): |
|
59 | def lowercase_preparer(input_data): | |
54 | """ |
|
60 | """ | |
55 | Transforms a list of string entries to lowercase |
|
61 | Transforms a list of string entries to lowercase | |
56 | Used in search query validation |
|
62 | Used in search query validation | |
57 | """ |
|
63 | """ | |
58 | if not input_data: |
|
64 | if not input_data: | |
59 | return input_data |
|
65 | return input_data | |
60 | return [x.lower() for x in input_data] |
|
66 | return [x.lower() for x in input_data] | |
61 |
|
67 | |||
62 |
|
68 | |||
63 | def shortener_factory(cutoff_size=32): |
|
69 | def shortener_factory(cutoff_size=32): | |
64 | """ |
|
70 | """ | |
65 | Limits the input data to specific character count |
|
71 | Limits the input data to specific character count | |
66 | :arg cutoff_cutoff_size How much characters to store |
|
72 | :arg cutoff_cutoff_size How much characters to store | |
67 |
|
73 | |||
68 | """ |
|
74 | """ | |
69 |
|
75 | |||
70 | def shortener(input_data): |
|
76 | def shortener(input_data): | |
71 | if not input_data: |
|
77 | if not input_data: | |
72 | return input_data |
|
78 | return input_data | |
73 | else: |
|
79 | else: | |
74 | if isinstance(input_data, str): |
|
80 | if isinstance(input_data, str): | |
75 | return input_data[:cutoff_size] |
|
81 | return input_data[:cutoff_size] | |
76 | else: |
|
82 | else: | |
77 | return input_data |
|
83 | return input_data | |
78 |
|
84 | |||
79 | return shortener |
|
85 | return shortener | |
80 |
|
86 | |||
81 |
|
87 | |||
82 | def cast_to_unicode_or_null(value): |
|
88 | def cast_to_unicode_or_null(value): | |
83 | if value is not colander.null: |
|
89 | if value is not colander.null: | |
84 | return str(value) |
|
90 | return str(value) | |
85 | return None |
|
91 | return None | |
86 |
|
92 | |||
87 |
|
93 | |||
88 | class NonTZDate(colander.DateTime): |
|
94 | class NonTZDate(colander.DateTime): | |
89 | """ Returns null for incorrect date format - also removes tz info""" |
|
95 | """ Returns null for incorrect date format - also removes tz info""" | |
90 |
|
96 | |||
91 | def deserialize(self, node, cstruct): |
|
97 | def deserialize(self, node, cstruct): | |
92 | # disabled for now |
|
98 | # disabled for now | |
93 | # if cstruct and isinstance(cstruct, str): |
|
99 | # if cstruct and isinstance(cstruct, str): | |
94 | # if ':' not in cstruct: |
|
100 | # if ':' not in cstruct: | |
95 | # cstruct += ':0.0' |
|
101 | # cstruct += ':0.0' | |
96 | # if '.' not in cstruct: |
|
102 | # if '.' not in cstruct: | |
97 | # cstruct += '.0' |
|
103 | # cstruct += '.0' | |
98 | value = super(NonTZDate, self).deserialize(node, cstruct) |
|
104 | value = super(NonTZDate, self).deserialize(node, cstruct) | |
99 | if value: |
|
105 | if value: | |
100 | return value.replace(tzinfo=None) |
|
106 | return value.replace(tzinfo=None) | |
101 | return value |
|
107 | return value | |
102 |
|
108 | |||
103 |
|
109 | |||
104 | class UnknownType(object): |
|
110 | class UnknownType(object): | |
105 | """ |
|
111 | """ | |
106 | Universal type that will accept a deserialized JSON object and store it unaltered |
|
112 | Universal type that will accept a deserialized JSON object and store it unaltered | |
107 | """ |
|
113 | """ | |
108 |
|
114 | |||
109 | def serialize(self, node, appstruct): |
|
115 | def serialize(self, node, appstruct): | |
110 | if appstruct is null: |
|
116 | if appstruct is null: | |
111 | return null |
|
117 | return null | |
112 | return appstruct |
|
118 | return appstruct | |
113 |
|
119 | |||
114 | def deserialize(self, node, cstruct): |
|
120 | def deserialize(self, node, cstruct): | |
115 | if cstruct is null: |
|
121 | if cstruct is null: | |
116 | return null |
|
122 | return null | |
117 | return cstruct |
|
123 | return cstruct | |
118 |
|
124 | |||
119 | def cstruct_children(self): |
|
125 | def cstruct_children(self): | |
120 | return [] |
|
126 | return [] | |
121 |
|
127 | |||
122 |
|
128 | |||
123 | # SLOW REPORT SCHEMA |
|
129 | # SLOW REPORT SCHEMA | |
124 |
|
130 | |||
125 | def rewrite_type(input_data): |
|
131 | def rewrite_type(input_data): | |
126 | """ |
|
132 | """ | |
127 | Fix for legacy appenlight clients |
|
133 | Fix for legacy appenlight clients | |
128 | """ |
|
134 | """ | |
129 | if input_data == 'remote_call': |
|
135 | if input_data == 'remote_call': | |
130 | return 'remote' |
|
136 | return 'remote' | |
131 | return input_data |
|
137 | return input_data | |
132 |
|
138 | |||
133 |
|
139 | |||
134 | class ExtraTupleSchema(colander.TupleSchema): |
|
140 | class ExtraTupleSchema(colander.TupleSchema): | |
135 | name = colander.SchemaNode(colander.String(), |
|
141 | name = colander.SchemaNode(colander.String(), | |
136 | validator=colander.Length(1, 64)) |
|
142 | validator=colander.Length(1, 64)) | |
137 | value = colander.SchemaNode(UnknownType(), |
|
143 | value = colander.SchemaNode(UnknownType(), | |
138 | preparer=shortener_factory(512), |
|
144 | preparer=shortener_factory(512), | |
139 | missing=None) |
|
145 | missing=None) | |
140 |
|
146 | |||
141 |
|
147 | |||
142 | class ExtraSchemaList(colander.SequenceSchema): |
|
148 | class ExtraSchemaList(colander.SequenceSchema): | |
143 | tag = ExtraTupleSchema() |
|
149 | tag = ExtraTupleSchema() | |
144 | missing = None |
|
150 | missing = None | |
145 |
|
151 | |||
146 |
|
152 | |||
147 | class TagsTupleSchema(colander.TupleSchema): |
|
153 | class TagsTupleSchema(colander.TupleSchema): | |
148 | name = colander.SchemaNode(colander.String(), |
|
154 | name = colander.SchemaNode(colander.String(), | |
149 | validator=colander.Length(1, 128)) |
|
155 | validator=colander.Length(1, 128)) | |
150 | value = colander.SchemaNode(UnknownType(), |
|
156 | value = colander.SchemaNode(UnknownType(), | |
151 | preparer=shortener_factory(128), |
|
157 | preparer=shortener_factory(128), | |
152 | missing=None) |
|
158 | missing=None) | |
153 |
|
159 | |||
154 |
|
160 | |||
155 | class TagSchemaList(colander.SequenceSchema): |
|
161 | class TagSchemaList(colander.SequenceSchema): | |
156 | tag = TagsTupleSchema() |
|
162 | tag = TagsTupleSchema() | |
157 | missing = None |
|
163 | missing = None | |
158 |
|
164 | |||
159 |
|
165 | |||
160 | class NumericTagsTupleSchema(colander.TupleSchema): |
|
166 | class NumericTagsTupleSchema(colander.TupleSchema): | |
161 | name = colander.SchemaNode(colander.String(), |
|
167 | name = colander.SchemaNode(colander.String(), | |
162 | validator=colander.Length(1, 128)) |
|
168 | validator=colander.Length(1, 128)) | |
163 | value = colander.SchemaNode(colander.Float(), missing=0) |
|
169 | value = colander.SchemaNode(colander.Float(), missing=0) | |
164 |
|
170 | |||
165 |
|
171 | |||
166 | class NumericTagSchemaList(colander.SequenceSchema): |
|
172 | class NumericTagSchemaList(colander.SequenceSchema): | |
167 | tag = NumericTagsTupleSchema() |
|
173 | tag = NumericTagsTupleSchema() | |
168 | missing = None |
|
174 | missing = None | |
169 |
|
175 | |||
170 |
|
176 | |||
171 | class SlowCallSchema(colander.MappingSchema): |
|
177 | class SlowCallSchema(colander.MappingSchema): | |
172 | """ |
|
178 | """ | |
173 | Validates slow call format in slow call list |
|
179 | Validates slow call format in slow call list | |
174 | """ |
|
180 | """ | |
175 | start = colander.SchemaNode(NonTZDate()) |
|
181 | start = colander.SchemaNode(NonTZDate()) | |
176 | end = colander.SchemaNode(NonTZDate()) |
|
182 | end = colander.SchemaNode(NonTZDate()) | |
177 | statement = colander.SchemaNode(colander.String(), missing='') |
|
183 | statement = colander.SchemaNode(colander.String(), missing='') | |
178 | parameters = colander.SchemaNode(UnknownType(), missing=None) |
|
184 | parameters = colander.SchemaNode(UnknownType(), missing=None) | |
179 | type = colander.SchemaNode( |
|
185 | type = colander.SchemaNode( | |
180 | colander.String(), |
|
186 | colander.String(), | |
181 | preparer=rewrite_type, |
|
187 | preparer=rewrite_type, | |
182 | validator=colander.OneOf( |
|
188 | validator=colander.OneOf( | |
183 | ['tmpl', 'sql', 'nosql', 'remote', 'unknown', 'custom']), |
|
189 | ['tmpl', 'sql', 'nosql', 'remote', 'unknown', 'custom']), | |
184 | missing='unknown') |
|
190 | missing='unknown') | |
185 | subtype = colander.SchemaNode(colander.String(), |
|
191 | subtype = colander.SchemaNode(colander.String(), | |
186 | validator=colander.Length(1, 16), |
|
192 | validator=colander.Length(1, 16), | |
187 | missing='unknown') |
|
193 | missing='unknown') | |
188 | location = colander.SchemaNode(colander.String(), |
|
194 | location = colander.SchemaNode(colander.String(), | |
189 | validator=colander.Length(1, 255), |
|
195 | validator=colander.Length(1, 255), | |
190 | missing='') |
|
196 | missing='') | |
191 |
|
197 | |||
192 |
|
198 | |||
193 | def limited_date(node, value): |
|
199 | def limited_date(node, value): | |
194 | """ checks to make sure that the value is not older/newer than 2h """ |
|
200 | """ checks to make sure that the value is not older/newer than 2h """ | |
195 | past_hours = 72 |
|
201 | past_hours = 72 | |
196 | future_hours = 2 |
|
202 | future_hours = 2 | |
197 | min_time = datetime.datetime.utcnow() - datetime.timedelta( |
|
203 | min_time = datetime.datetime.utcnow() - datetime.timedelta( | |
198 | hours=past_hours) |
|
204 | hours=past_hours) | |
199 | max_time = datetime.datetime.utcnow() + datetime.timedelta( |
|
205 | max_time = datetime.datetime.utcnow() + datetime.timedelta( | |
200 | hours=future_hours) |
|
206 | hours=future_hours) | |
201 | if min_time > value: |
|
207 | if min_time > value: | |
202 | msg = '%r is older from current UTC time by ' + str(past_hours) |
|
208 | msg = '%r is older from current UTC time by ' + str(past_hours) | |
203 | msg += ' hours. Ask administrator to enable permanent logging for ' \ |
|
209 | msg += ' hours. Ask administrator to enable permanent logging for ' \ | |
204 | 'your application to store logs with dates in past.' |
|
210 | 'your application to store logs with dates in past.' | |
205 | raise colander.Invalid(node, msg % value) |
|
211 | raise colander.Invalid(node, msg % value) | |
206 | if max_time < value: |
|
212 | if max_time < value: | |
207 | msg = '%r is newer from current UTC time by ' + str(future_hours) |
|
213 | msg = '%r is newer from current UTC time by ' + str(future_hours) | |
208 | msg += ' hours. Ask administrator to enable permanent logging for ' \ |
|
214 | msg += ' hours. Ask administrator to enable permanent logging for ' \ | |
209 | 'your application to store logs with dates in future.' |
|
215 | 'your application to store logs with dates in future.' | |
210 | raise colander.Invalid(node, msg % value) |
|
216 | raise colander.Invalid(node, msg % value) | |
211 |
|
217 | |||
212 |
|
218 | |||
213 | class SlowCallListSchema(colander.SequenceSchema): |
|
219 | class SlowCallListSchema(colander.SequenceSchema): | |
214 | """ |
|
220 | """ | |
215 | Validates list of individual slow calls |
|
221 | Validates list of individual slow calls | |
216 | """ |
|
222 | """ | |
217 | slow_call = SlowCallSchema() |
|
223 | slow_call = SlowCallSchema() | |
218 |
|
224 | |||
219 |
|
225 | |||
220 | class RequestStatsSchema(colander.MappingSchema): |
|
226 | class RequestStatsSchema(colander.MappingSchema): | |
221 | """ |
|
227 | """ | |
222 | Validates format of requests statistics dictionary |
|
228 | Validates format of requests statistics dictionary | |
223 | """ |
|
229 | """ | |
224 | main = colander.SchemaNode(colander.Float(), validator=colander.Range(0), |
|
230 | main = colander.SchemaNode(colander.Float(), validator=colander.Range(0), | |
225 | missing=0) |
|
231 | missing=0) | |
226 | sql = colander.SchemaNode(colander.Float(), validator=colander.Range(0), |
|
232 | sql = colander.SchemaNode(colander.Float(), validator=colander.Range(0), | |
227 | missing=0) |
|
233 | missing=0) | |
228 | nosql = colander.SchemaNode(colander.Float(), validator=colander.Range(0), |
|
234 | nosql = colander.SchemaNode(colander.Float(), validator=colander.Range(0), | |
229 | missing=0) |
|
235 | missing=0) | |
230 | remote = colander.SchemaNode(colander.Float(), validator=colander.Range(0), |
|
236 | remote = colander.SchemaNode(colander.Float(), validator=colander.Range(0), | |
231 | missing=0) |
|
237 | missing=0) | |
232 | tmpl = colander.SchemaNode(colander.Float(), validator=colander.Range(0), |
|
238 | tmpl = colander.SchemaNode(colander.Float(), validator=colander.Range(0), | |
233 | missing=0) |
|
239 | missing=0) | |
234 | custom = colander.SchemaNode(colander.Float(), validator=colander.Range(0), |
|
240 | custom = colander.SchemaNode(colander.Float(), validator=colander.Range(0), | |
235 | missing=0) |
|
241 | missing=0) | |
236 | sql_calls = colander.SchemaNode(colander.Float(), |
|
242 | sql_calls = colander.SchemaNode(colander.Float(), | |
237 | validator=colander.Range(0), |
|
243 | validator=colander.Range(0), | |
238 | missing=0) |
|
244 | missing=0) | |
239 | nosql_calls = colander.SchemaNode(colander.Float(), |
|
245 | nosql_calls = colander.SchemaNode(colander.Float(), | |
240 | validator=colander.Range(0), |
|
246 | validator=colander.Range(0), | |
241 | missing=0) |
|
247 | missing=0) | |
242 | remote_calls = colander.SchemaNode(colander.Float(), |
|
248 | remote_calls = colander.SchemaNode(colander.Float(), | |
243 | validator=colander.Range(0), |
|
249 | validator=colander.Range(0), | |
244 | missing=0) |
|
250 | missing=0) | |
245 | tmpl_calls = colander.SchemaNode(colander.Float(), |
|
251 | tmpl_calls = colander.SchemaNode(colander.Float(), | |
246 | validator=colander.Range(0), |
|
252 | validator=colander.Range(0), | |
247 | missing=0) |
|
253 | missing=0) | |
248 | custom_calls = colander.SchemaNode(colander.Float(), |
|
254 | custom_calls = colander.SchemaNode(colander.Float(), | |
249 | validator=colander.Range(0), |
|
255 | validator=colander.Range(0), | |
250 | missing=0) |
|
256 | missing=0) | |
251 |
|
257 | |||
252 |
|
258 | |||
253 | class FrameInfoVarSchema(colander.SequenceSchema): |
|
259 | class FrameInfoVarSchema(colander.SequenceSchema): | |
254 | """ |
|
260 | """ | |
255 | Validates format of frame variables of a traceback |
|
261 | Validates format of frame variables of a traceback | |
256 | """ |
|
262 | """ | |
257 | vars = colander.SchemaNode(UnknownType(), |
|
263 | vars = colander.SchemaNode(UnknownType(), | |
258 | validator=colander.Length(2, 2)) |
|
264 | validator=colander.Length(2, 2)) | |
259 |
|
265 | |||
260 |
|
266 | |||
261 | class FrameInfoSchema(colander.MappingSchema): |
|
267 | class FrameInfoSchema(colander.MappingSchema): | |
262 | """ |
|
268 | """ | |
263 | Validates format of a traceback line |
|
269 | Validates format of a traceback line | |
264 | """ |
|
270 | """ | |
265 | cline = colander.SchemaNode(colander.String(), missing='') |
|
271 | cline = colander.SchemaNode(colander.String(), missing='') | |
266 | module = colander.SchemaNode(colander.String(), missing='') |
|
272 | module = colander.SchemaNode(colander.String(), missing='') | |
267 | line = colander.SchemaNode(colander.String(), missing='') |
|
273 | line = colander.SchemaNode(colander.String(), missing='') | |
268 | file = colander.SchemaNode(colander.String(), missing='') |
|
274 | file = colander.SchemaNode(colander.String(), missing='') | |
269 | fn = colander.SchemaNode(colander.String(), missing='') |
|
275 | fn = colander.SchemaNode(colander.String(), missing='') | |
270 | vars = FrameInfoVarSchema() |
|
276 | vars = FrameInfoVarSchema() | |
271 |
|
277 | |||
272 |
|
278 | |||
273 | class FrameInfoListSchema(colander.SequenceSchema): |
|
279 | class FrameInfoListSchema(colander.SequenceSchema): | |
274 | """ |
|
280 | """ | |
275 | Validates format of list of traceback lines |
|
281 | Validates format of list of traceback lines | |
276 | """ |
|
282 | """ | |
277 | frame = colander.SchemaNode(UnknownType()) |
|
283 | frame = colander.SchemaNode(UnknownType()) | |
278 |
|
284 | |||
279 |
|
285 | |||
280 | class ReportDetailBaseSchema(colander.MappingSchema): |
|
286 | class ReportDetailBaseSchema(colander.MappingSchema): | |
281 | """ |
|
287 | """ | |
282 | Validates format of report - ie. request parameters and stats for a request in report group |
|
288 | Validates format of report - ie. request parameters and stats for a request in report group | |
283 | """ |
|
289 | """ | |
284 | username = colander.SchemaNode(colander.String(), |
|
290 | username = colander.SchemaNode(colander.String(), | |
285 | preparer=[shortener_factory(255), |
|
291 | preparer=[shortener_factory(255), | |
286 | lambda x: x or ''], |
|
292 | lambda x: x or ''], | |
287 | missing='') |
|
293 | missing='') | |
288 | request_id = colander.SchemaNode(colander.String(), |
|
294 | request_id = colander.SchemaNode(colander.String(), | |
289 | preparer=shortener_factory(40), |
|
295 | preparer=shortener_factory(40), | |
290 | missing='') |
|
296 | missing='') | |
291 | url = colander.SchemaNode(colander.String(), |
|
297 | url = colander.SchemaNode(colander.String(), | |
292 | preparer=shortener_factory(1024), missing='') |
|
298 | preparer=shortener_factory(1024), missing='') | |
293 | ip = colander.SchemaNode(colander.String(), preparer=shortener_factory(39), |
|
299 | ip = colander.SchemaNode(colander.String(), preparer=shortener_factory(39), | |
294 | missing=None) |
|
300 | missing=None) | |
295 |
start_time = colander.SchemaNode(NonTZDate(), |
|
301 | start_time = colander.SchemaNode(NonTZDate(), | |
|
302 | validator=optional_limited_date, | |||
296 | missing=deferred_utcnow) |
|
303 | missing=deferred_utcnow) | |
297 |
end_time = colander.SchemaNode(NonTZDate(), |
|
304 | end_time = colander.SchemaNode(NonTZDate(), | |
|
305 | validator=optional_limited_date, | |||
298 | missing=None) |
|
306 | missing=None) | |
299 | user_agent = colander.SchemaNode(colander.String(), |
|
307 | user_agent = colander.SchemaNode(colander.String(), | |
300 | preparer=[shortener_factory(512), |
|
308 | preparer=[shortener_factory(512), | |
301 | lambda x: x or ''], |
|
309 | lambda x: x or ''], | |
302 | missing='') |
|
310 | missing='') | |
303 | message = colander.SchemaNode(colander.String(), |
|
311 | message = colander.SchemaNode(colander.String(), | |
304 | preparer=shortener_factory(2048), |
|
312 | preparer=shortener_factory(2048), | |
305 | missing='') |
|
313 | missing='') | |
306 | group_string = colander.SchemaNode(colander.String(), |
|
314 | group_string = colander.SchemaNode(colander.String(), | |
307 | validator=colander.Length(1, 512), |
|
315 | validator=colander.Length(1, 512), | |
308 | missing=None) |
|
316 | missing=None) | |
309 | request_stats = RequestStatsSchema(missing=None) |
|
317 | request_stats = RequestStatsSchema(missing=None) | |
310 | request = colander.SchemaNode(colander.Mapping(unknown='preserve'), |
|
318 | request = colander.SchemaNode(colander.Mapping(unknown='preserve'), | |
311 | missing={}) |
|
319 | missing={}) | |
312 | traceback = FrameInfoListSchema(missing=None) |
|
320 | traceback = FrameInfoListSchema(missing=None) | |
313 | slow_calls = SlowCallListSchema(missing=[]) |
|
321 | slow_calls = SlowCallListSchema(missing=[]) | |
314 | extra = ExtraSchemaList() |
|
322 | extra = ExtraSchemaList() | |
315 |
|
323 | |||
316 |
|
324 | |||
317 | class ReportDetailSchema_0_5(ReportDetailBaseSchema): |
|
325 | class ReportDetailSchema_0_5(ReportDetailBaseSchema): | |
318 | pass |
|
326 | pass | |
319 |
|
327 | |||
320 |
|
328 | |||
321 | class ReportDetailSchemaPermissiveDate_0_5(ReportDetailSchema_0_5): |
|
329 | class ReportDetailSchemaPermissiveDate_0_5(ReportDetailSchema_0_5): | |
322 | start_time = colander.SchemaNode(NonTZDate(), missing=deferred_utcnow) |
|
330 | start_time = colander.SchemaNode(NonTZDate(), missing=deferred_utcnow) | |
323 | end_time = colander.SchemaNode(NonTZDate(), missing=None) |
|
331 | end_time = colander.SchemaNode(NonTZDate(), missing=None) | |
324 |
|
332 | |||
325 |
|
333 | |||
326 | class ReportSchemaBase(colander.MappingSchema): |
|
334 | class ReportSchemaBase(colander.MappingSchema): | |
327 | """ |
|
335 | """ | |
328 | Validates format of report group |
|
336 | Validates format of report group | |
329 | """ |
|
337 | """ | |
330 | client = colander.SchemaNode(colander.String(), |
|
338 | client = colander.SchemaNode(colander.String(), | |
331 | preparer=lambda x: x or 'unknown') |
|
339 | preparer=lambda x: x or 'unknown') | |
332 | server = colander.SchemaNode( |
|
340 | server = colander.SchemaNode( | |
333 | colander.String(), |
|
341 | colander.String(), | |
334 | preparer=[ |
|
342 | preparer=[ | |
335 | lambda x: x.lower() if x else 'unknown', shortener_factory(128)], |
|
343 | lambda x: x.lower() if x else 'unknown', shortener_factory(128)], | |
336 | missing='unknown') |
|
344 | missing='unknown') | |
337 | priority = colander.SchemaNode(colander.Int(), |
|
345 | priority = colander.SchemaNode(colander.Int(), | |
338 | preparer=[lambda x: x or 5], |
|
346 | preparer=[lambda x: x or 5], | |
339 | validator=colander.Range(1, 10), |
|
347 | validator=colander.Range(1, 10), | |
340 | missing=5) |
|
348 | missing=5) | |
341 | language = colander.SchemaNode(colander.String(), missing='unknown') |
|
349 | language = colander.SchemaNode(colander.String(), missing='unknown') | |
342 | error = colander.SchemaNode(colander.String(), |
|
350 | error = colander.SchemaNode(colander.String(), | |
343 | preparer=shortener_factory(512), |
|
351 | preparer=shortener_factory(512), | |
344 | missing='') |
|
352 | missing='') | |
345 | view_name = colander.SchemaNode(colander.String(), |
|
353 | view_name = colander.SchemaNode(colander.String(), | |
346 | preparer=[shortener_factory(128), |
|
354 | preparer=[shortener_factory(128), | |
347 | lambda x: x or ''], |
|
355 | lambda x: x or ''], | |
348 | missing='') |
|
356 | missing='') | |
349 | http_status = colander.SchemaNode(colander.Int(), |
|
357 | http_status = colander.SchemaNode(colander.Int(), | |
350 | preparer=[lambda x: x or 200], |
|
358 | preparer=[lambda x: x or 200], | |
351 | validator=colander.Range(1)) |
|
359 | validator=colander.Range(1)) | |
352 |
|
360 | |||
353 | occurences = colander.SchemaNode(colander.Int(), |
|
361 | occurences = colander.SchemaNode(colander.Int(), | |
354 | validator=colander.Range(1, 99999999999), |
|
362 | validator=colander.Range(1, 99999999999), | |
355 | missing=1) |
|
363 | missing=1) | |
356 | tags = TagSchemaList() |
|
364 | tags = TagSchemaList() | |
357 |
|
365 | |||
358 |
|
366 | |||
359 | class ReportSchema_0_5(ReportSchemaBase, ReportDetailSchema_0_5): |
|
367 | class ReportSchema_0_5(ReportSchemaBase, ReportDetailSchema_0_5): | |
360 | pass |
|
368 | pass | |
361 |
|
369 | |||
362 |
|
370 | |||
363 | class ReportSchemaPermissiveDate_0_5(ReportSchemaBase, |
|
371 | class ReportSchemaPermissiveDate_0_5(ReportSchemaBase, | |
364 | ReportDetailSchemaPermissiveDate_0_5): |
|
372 | ReportDetailSchemaPermissiveDate_0_5): | |
365 | pass |
|
373 | pass | |
366 |
|
374 | |||
367 |
|
375 | |||
368 | class ReportListSchema_0_5(colander.SequenceSchema): |
|
376 | class ReportListSchema_0_5(colander.SequenceSchema): | |
369 | """ |
|
377 | """ | |
370 | Validates format of list of report groups |
|
378 | Validates format of list of report groups | |
371 | """ |
|
379 | """ | |
372 | report = ReportSchema_0_5() |
|
380 | report = ReportSchema_0_5() | |
373 | validator = colander.Length(1) |
|
381 | validator = colander.Length(1) | |
374 |
|
382 | |||
375 |
|
383 | |||
376 | class ReportListPermissiveDateSchema_0_5(colander.SequenceSchema): |
|
384 | class ReportListPermissiveDateSchema_0_5(colander.SequenceSchema): | |
377 | """ |
|
385 | """ | |
378 | Validates format of list of report groups |
|
386 | Validates format of list of report groups | |
379 | """ |
|
387 | """ | |
380 | report = ReportSchemaPermissiveDate_0_5() |
|
388 | report = ReportSchemaPermissiveDate_0_5() | |
381 | validator = colander.Length(1) |
|
389 | validator = colander.Length(1) | |
382 |
|
390 | |||
383 |
|
391 | |||
384 | class LogSchema(colander.MappingSchema): |
|
392 | class LogSchema(colander.MappingSchema): | |
385 | """ |
|
393 | """ | |
386 | Validates format if individual log entry |
|
394 | Validates format if individual log entry | |
387 | """ |
|
395 | """ | |
388 | primary_key = colander.SchemaNode(UnknownType(), |
|
396 | primary_key = colander.SchemaNode(UnknownType(), | |
389 | preparer=[cast_to_unicode_or_null, |
|
397 | preparer=[cast_to_unicode_or_null, | |
390 | shortener_factory(128)], |
|
398 | shortener_factory(128)], | |
391 | missing=None) |
|
399 | missing=None) | |
392 | log_level = colander.SchemaNode(colander.String(), |
|
400 | log_level = colander.SchemaNode(colander.String(), | |
393 | preparer=shortener_factory(10), |
|
401 | preparer=shortener_factory(10), | |
394 | missing='UNKNOWN') |
|
402 | missing='UNKNOWN') | |
395 | message = colander.SchemaNode(colander.String(), |
|
403 | message = colander.SchemaNode(colander.String(), | |
396 | preparer=shortener_factory(4096), |
|
404 | preparer=shortener_factory(4096), | |
397 | missing='') |
|
405 | missing='') | |
398 | namespace = colander.SchemaNode(colander.String(), |
|
406 | namespace = colander.SchemaNode(colander.String(), | |
399 | preparer=shortener_factory(128), |
|
407 | preparer=shortener_factory(128), | |
400 | missing='') |
|
408 | missing='') | |
401 | request_id = colander.SchemaNode(colander.String(), |
|
409 | request_id = colander.SchemaNode(colander.String(), | |
402 | preparer=shortener_factory(40), |
|
410 | preparer=shortener_factory(40), | |
403 | missing='') |
|
411 | missing='') | |
404 | server = colander.SchemaNode(colander.String(), |
|
412 | server = colander.SchemaNode(colander.String(), | |
405 | preparer=shortener_factory(128), |
|
413 | preparer=shortener_factory(128), | |
406 | missing='unknown') |
|
414 | missing='unknown') | |
407 | date = colander.SchemaNode(NonTZDate(), |
|
415 | date = colander.SchemaNode(NonTZDate(), | |
408 | validator=limited_date, |
|
416 | validator=limited_date, | |
409 | missing=deferred_utcnow) |
|
417 | missing=deferred_utcnow) | |
410 | tags = TagSchemaList() |
|
418 | tags = TagSchemaList() | |
411 |
|
419 | |||
412 |
|
420 | |||
413 | class LogSchemaPermanent(LogSchema): |
|
421 | class LogSchemaPermanent(LogSchema): | |
414 | date = colander.SchemaNode(NonTZDate(), |
|
422 | date = colander.SchemaNode(NonTZDate(), | |
415 | missing=deferred_utcnow) |
|
423 | missing=deferred_utcnow) | |
416 | permanent = colander.SchemaNode(colander.Boolean(), missing=False) |
|
424 | permanent = colander.SchemaNode(colander.Boolean(), missing=False) | |
417 |
|
425 | |||
418 |
|
426 | |||
419 | class LogListSchema(colander.SequenceSchema): |
|
427 | class LogListSchema(colander.SequenceSchema): | |
420 | """ |
|
428 | """ | |
421 | Validates format of list of log entries |
|
429 | Validates format of list of log entries | |
422 | """ |
|
430 | """ | |
423 | log = LogSchema() |
|
431 | log = LogSchema() | |
424 | validator = colander.Length(1) |
|
432 | validator = colander.Length(1) | |
425 |
|
433 | |||
426 |
|
434 | |||
427 | class LogListPermanentSchema(colander.SequenceSchema): |
|
435 | class LogListPermanentSchema(colander.SequenceSchema): | |
428 | """ |
|
436 | """ | |
429 | Validates format of list of log entries |
|
437 | Validates format of list of log entries | |
430 | """ |
|
438 | """ | |
431 | log = LogSchemaPermanent() |
|
439 | log = LogSchemaPermanent() | |
432 | validator = colander.Length(1) |
|
440 | validator = colander.Length(1) | |
433 |
|
441 | |||
434 |
|
442 | |||
435 | class ViewRequestStatsSchema(RequestStatsSchema): |
|
443 | class ViewRequestStatsSchema(RequestStatsSchema): | |
436 | requests = colander.SchemaNode(colander.Integer(), |
|
444 | requests = colander.SchemaNode(colander.Integer(), | |
437 | validator=colander.Range(0), |
|
445 | validator=colander.Range(0), | |
438 | missing=0) |
|
446 | missing=0) | |
439 |
|
447 | |||
440 |
|
448 | |||
441 | class ViewMetricTupleSchema(colander.TupleSchema): |
|
449 | class ViewMetricTupleSchema(colander.TupleSchema): | |
442 | """ |
|
450 | """ | |
443 | Validates list of views and their corresponding request stats object ie: |
|
451 | Validates list of views and their corresponding request stats object ie: | |
444 | ["dir/module:func",{"custom": 0.0..}] |
|
452 | ["dir/module:func",{"custom": 0.0..}] | |
445 | """ |
|
453 | """ | |
446 | view_name = colander.SchemaNode(colander.String(), |
|
454 | view_name = colander.SchemaNode(colander.String(), | |
447 | preparer=[shortener_factory(128), |
|
455 | preparer=[shortener_factory(128), | |
448 | lambda x: x or 'unknown'], |
|
456 | lambda x: x or 'unknown'], | |
449 | missing='unknown') |
|
457 | missing='unknown') | |
450 | metrics = ViewRequestStatsSchema() |
|
458 | metrics = ViewRequestStatsSchema() | |
451 |
|
459 | |||
452 |
|
460 | |||
453 | class ViewMetricListSchema(colander.SequenceSchema): |
|
461 | class ViewMetricListSchema(colander.SequenceSchema): | |
454 | """ |
|
462 | """ | |
455 | Validates view breakdown stats objects list |
|
463 | Validates view breakdown stats objects list | |
456 | {metrics key of server/time object} |
|
464 | {metrics key of server/time object} | |
457 | """ |
|
465 | """ | |
458 | view_tuple = ViewMetricTupleSchema() |
|
466 | view_tuple = ViewMetricTupleSchema() | |
459 | validator = colander.Length(1) |
|
467 | validator = colander.Length(1) | |
460 |
|
468 | |||
461 |
|
469 | |||
462 | class ViewMetricSchema(colander.MappingSchema): |
|
470 | class ViewMetricSchema(colander.MappingSchema): | |
463 | """ |
|
471 | """ | |
464 | Validates server/timeinterval object, ie: |
|
472 | Validates server/timeinterval object, ie: | |
465 | {server/time object} |
|
473 | {server/time object} | |
466 |
|
474 | |||
467 | """ |
|
475 | """ | |
468 | timestamp = colander.SchemaNode(NonTZDate(), |
|
476 | timestamp = colander.SchemaNode(NonTZDate(), | |
469 | validator=limited_date, |
|
477 | validator=limited_date, | |
470 | missing=None) |
|
478 | missing=None) | |
471 | server = colander.SchemaNode(colander.String(), |
|
479 | server = colander.SchemaNode(colander.String(), | |
472 | preparer=[shortener_factory(128), |
|
480 | preparer=[shortener_factory(128), | |
473 | lambda x: x or 'unknown'], |
|
481 | lambda x: x or 'unknown'], | |
474 | missing='unknown') |
|
482 | missing='unknown') | |
475 | metrics = ViewMetricListSchema() |
|
483 | metrics = ViewMetricListSchema() | |
476 |
|
484 | |||
477 |
|
485 | |||
478 | class GeneralMetricSchema(colander.MappingSchema): |
|
486 | class GeneralMetricSchema(colander.MappingSchema): | |
479 | """ |
|
487 | """ | |
480 | Validates universal metric schema |
|
488 | Validates universal metric schema | |
481 |
|
489 | |||
482 | """ |
|
490 | """ | |
483 | namespace = colander.SchemaNode(colander.String(), missing='', |
|
491 | namespace = colander.SchemaNode(colander.String(), missing='', | |
484 | preparer=shortener_factory(128)) |
|
492 | preparer=shortener_factory(128)) | |
485 |
|
493 | |||
486 | server_name = colander.SchemaNode(colander.String(), |
|
494 | server_name = colander.SchemaNode(colander.String(), | |
487 | preparer=[shortener_factory(128), |
|
495 | preparer=[shortener_factory(128), | |
488 | lambda x: x or 'unknown'], |
|
496 | lambda x: x or 'unknown'], | |
489 | missing='unknown') |
|
497 | missing='unknown') | |
490 | timestamp = colander.SchemaNode(NonTZDate(), validator=limited_date, |
|
498 | timestamp = colander.SchemaNode(NonTZDate(), validator=limited_date, | |
491 | missing=deferred_utcnow) |
|
499 | missing=deferred_utcnow) | |
492 | tags = TagSchemaList(missing=colander.required) |
|
500 | tags = TagSchemaList(missing=colander.required) | |
493 |
|
501 | |||
494 |
|
502 | |||
495 | class GeneralMetricPermanentSchema(GeneralMetricSchema): |
|
503 | class GeneralMetricPermanentSchema(GeneralMetricSchema): | |
496 | """ |
|
504 | """ | |
497 | Validates universal metric schema |
|
505 | Validates universal metric schema | |
498 |
|
506 | |||
499 | """ |
|
507 | """ | |
500 | timestamp = colander.SchemaNode(NonTZDate(), missing=deferred_utcnow) |
|
508 | timestamp = colander.SchemaNode(NonTZDate(), missing=deferred_utcnow) | |
501 |
|
509 | |||
502 |
|
510 | |||
503 | class GeneralMetricsListSchema(colander.SequenceSchema): |
|
511 | class GeneralMetricsListSchema(colander.SequenceSchema): | |
504 | metric = GeneralMetricSchema() |
|
512 | metric = GeneralMetricSchema() | |
505 | validator = colander.Length(1) |
|
513 | validator = colander.Length(1) | |
506 |
|
514 | |||
507 |
|
515 | |||
508 | class GeneralMetricsPermanentListSchema(colander.SequenceSchema): |
|
516 | class GeneralMetricsPermanentListSchema(colander.SequenceSchema): | |
509 | metric = GeneralMetricPermanentSchema() |
|
517 | metric = GeneralMetricPermanentSchema() | |
510 | validator = colander.Length(1) |
|
518 | validator = colander.Length(1) | |
511 |
|
519 | |||
512 |
|
520 | |||
513 | class MetricsListSchema(colander.SequenceSchema): |
|
521 | class MetricsListSchema(colander.SequenceSchema): | |
514 | """ |
|
522 | """ | |
515 | Validates list of metrics objects ie: |
|
523 | Validates list of metrics objects ie: | |
516 | [{server/time object}, ] part |
|
524 | [{server/time object}, ] part | |
517 |
|
525 | |||
518 |
|
526 | |||
519 | """ |
|
527 | """ | |
520 | metric = ViewMetricSchema() |
|
528 | metric = ViewMetricSchema() | |
521 | validator = colander.Length(1) |
|
529 | validator = colander.Length(1) | |
522 |
|
530 | |||
523 |
|
531 | |||
524 | class StringToAppList(object): |
|
532 | class StringToAppList(object): | |
525 | """ |
|
533 | """ | |
526 | Returns validated list of application ids from user query and |
|
534 | Returns validated list of application ids from user query and | |
527 | set of applications user is allowed to look at |
|
535 | set of applications user is allowed to look at | |
528 | transform string to list containing single integer |
|
536 | transform string to list containing single integer | |
529 | """ |
|
537 | """ | |
530 |
|
538 | |||
531 | def serialize(self, node, appstruct): |
|
539 | def serialize(self, node, appstruct): | |
532 | if appstruct is null: |
|
540 | if appstruct is null: | |
533 | return null |
|
541 | return null | |
534 | return appstruct |
|
542 | return appstruct | |
535 |
|
543 | |||
536 | def deserialize(self, node, cstruct): |
|
544 | def deserialize(self, node, cstruct): | |
537 | if cstruct is null: |
|
545 | if cstruct is null: | |
538 | return null |
|
546 | return null | |
539 |
|
547 | |||
540 | apps = set([int(a) for a in node.bindings['resources']]) |
|
548 | apps = set([int(a) for a in node.bindings['resources']]) | |
541 |
|
549 | |||
542 | if isinstance(cstruct, str): |
|
550 | if isinstance(cstruct, str): | |
543 | cstruct = [cstruct] |
|
551 | cstruct = [cstruct] | |
544 |
|
552 | |||
545 | cstruct = [int(a) for a in cstruct] |
|
553 | cstruct = [int(a) for a in cstruct] | |
546 |
|
554 | |||
547 | valid_apps = list(apps.intersection(set(cstruct))) |
|
555 | valid_apps = list(apps.intersection(set(cstruct))) | |
548 | if valid_apps: |
|
556 | if valid_apps: | |
549 | return valid_apps |
|
557 | return valid_apps | |
550 | return null |
|
558 | return null | |
551 |
|
559 | |||
552 | def cstruct_children(self): |
|
560 | def cstruct_children(self): | |
553 | return [] |
|
561 | return [] | |
554 |
|
562 | |||
555 |
|
563 | |||
556 | @colander.deferred |
|
564 | @colander.deferred | |
557 | def possible_applications_validator(node, kw): |
|
565 | def possible_applications_validator(node, kw): | |
558 | possible_apps = [int(a) for a in kw['resources']] |
|
566 | possible_apps = [int(a) for a in kw['resources']] | |
559 | return colander.All(colander.ContainsOnly(possible_apps), |
|
567 | return colander.All(colander.ContainsOnly(possible_apps), | |
560 | colander.Length(1)) |
|
568 | colander.Length(1)) | |
561 |
|
569 | |||
562 |
|
570 | |||
563 | @colander.deferred |
|
571 | @colander.deferred | |
564 | def possible_applications(node, kw): |
|
572 | def possible_applications(node, kw): | |
565 | return [int(a) for a in kw['resources']] |
|
573 | return [int(a) for a in kw['resources']] | |
566 |
|
574 | |||
567 |
|
575 | |||
568 | @colander.deferred |
|
576 | @colander.deferred | |
569 | def today_start(node, kw): |
|
577 | def today_start(node, kw): | |
570 | return datetime.datetime.utcnow().replace(second=0, microsecond=0, |
|
578 | return datetime.datetime.utcnow().replace(second=0, microsecond=0, | |
571 | minute=0, |
|
579 | minute=0, | |
572 | hour=0) |
|
580 | hour=0) | |
573 |
|
581 | |||
574 |
|
582 | |||
575 | @colander.deferred |
|
583 | @colander.deferred | |
576 | def today_end(node, kw): |
|
584 | def today_end(node, kw): | |
577 | return datetime.datetime.utcnow().replace(second=0, microsecond=0, |
|
585 | return datetime.datetime.utcnow().replace(second=0, microsecond=0, | |
578 | minute=59, hour=23) |
|
586 | minute=59, hour=23) | |
579 |
|
587 | |||
580 |
|
588 | |||
581 | @colander.deferred |
|
589 | @colander.deferred | |
582 | def old_start(node, kw): |
|
590 | def old_start(node, kw): | |
583 | t_delta = datetime.timedelta(days=90) |
|
591 | t_delta = datetime.timedelta(days=90) | |
584 | return datetime.datetime.utcnow().replace(second=0, microsecond=0, |
|
592 | return datetime.datetime.utcnow().replace(second=0, microsecond=0, | |
585 | minute=0, |
|
593 | minute=0, | |
586 | hour=0) - t_delta |
|
594 | hour=0) - t_delta | |
587 |
|
595 | |||
588 |
|
596 | |||
589 | @colander.deferred |
|
597 | @colander.deferred | |
590 | def today_end(node, kw): |
|
598 | def today_end(node, kw): | |
591 | return datetime.datetime.utcnow().replace(second=0, microsecond=0, |
|
599 | return datetime.datetime.utcnow().replace(second=0, microsecond=0, | |
592 | minute=59, hour=23) |
|
600 | minute=59, hour=23) | |
593 |
|
601 | |||
594 |
|
602 | |||
595 | class PermissiveDate(colander.DateTime): |
|
603 | class PermissiveDate(colander.DateTime): | |
596 | """ Returns null for incorrect date format - also removes tz info""" |
|
604 | """ Returns null for incorrect date format - also removes tz info""" | |
597 |
|
605 | |||
598 | def deserialize(self, node, cstruct): |
|
606 | def deserialize(self, node, cstruct): | |
599 | if not cstruct: |
|
607 | if not cstruct: | |
600 | return null |
|
608 | return null | |
601 |
|
609 | |||
602 | try: |
|
610 | try: | |
603 | result = colander.iso8601.parse_date( |
|
611 | result = colander.iso8601.parse_date( | |
604 | cstruct, default_timezone=self.default_tzinfo) |
|
612 | cstruct, default_timezone=self.default_tzinfo) | |
605 | except colander.iso8601.ParseError: |
|
613 | except colander.iso8601.ParseError: | |
606 | return null |
|
614 | return null | |
607 | return result.replace(tzinfo=None) |
|
615 | return result.replace(tzinfo=None) | |
608 |
|
616 | |||
609 |
|
617 | |||
610 | class LogSearchSchema(colander.MappingSchema): |
|
618 | class LogSearchSchema(colander.MappingSchema): | |
611 | def schema_type(self, **kw): |
|
619 | def schema_type(self, **kw): | |
612 | return colander.Mapping(unknown='preserve') |
|
620 | return colander.Mapping(unknown='preserve') | |
613 |
|
621 | |||
614 | resource = colander.SchemaNode(StringToAppList(), |
|
622 | resource = colander.SchemaNode(StringToAppList(), | |
615 | validator=possible_applications_validator, |
|
623 | validator=possible_applications_validator, | |
616 | missing=possible_applications) |
|
624 | missing=possible_applications) | |
617 |
|
625 | |||
618 | message = colander.SchemaNode(colander.Sequence(accept_scalar=True), |
|
626 | message = colander.SchemaNode(colander.Sequence(accept_scalar=True), | |
619 | colander.SchemaNode(colander.String()), |
|
627 | colander.SchemaNode(colander.String()), | |
620 | missing=None) |
|
628 | missing=None) | |
621 | level = colander.SchemaNode(colander.Sequence(accept_scalar=True), |
|
629 | level = colander.SchemaNode(colander.Sequence(accept_scalar=True), | |
622 | colander.SchemaNode(colander.String()), |
|
630 | colander.SchemaNode(colander.String()), | |
623 | preparer=lowercase_preparer, |
|
631 | preparer=lowercase_preparer, | |
624 | missing=None) |
|
632 | missing=None) | |
625 | namespace = colander.SchemaNode(colander.Sequence(accept_scalar=True), |
|
633 | namespace = colander.SchemaNode(colander.Sequence(accept_scalar=True), | |
626 | colander.SchemaNode(colander.String()), |
|
634 | colander.SchemaNode(colander.String()), | |
627 | preparer=lowercase_preparer, |
|
635 | preparer=lowercase_preparer, | |
628 | missing=None) |
|
636 | missing=None) | |
629 | request_id = colander.SchemaNode(colander.Sequence(accept_scalar=True), |
|
637 | request_id = colander.SchemaNode(colander.Sequence(accept_scalar=True), | |
630 | colander.SchemaNode(colander.String()), |
|
638 | colander.SchemaNode(colander.String()), | |
631 | preparer=lowercase_preparer, |
|
639 | preparer=lowercase_preparer, | |
632 | missing=None) |
|
640 | missing=None) | |
633 | start_date = colander.SchemaNode(PermissiveDate(), |
|
641 | start_date = colander.SchemaNode(PermissiveDate(), | |
634 | missing=None) |
|
642 | missing=None) | |
635 | end_date = colander.SchemaNode(PermissiveDate(), |
|
643 | end_date = colander.SchemaNode(PermissiveDate(), | |
636 | missing=None) |
|
644 | missing=None) | |
637 | page = colander.SchemaNode(colander.Integer(), |
|
645 | page = colander.SchemaNode(colander.Integer(), | |
638 | validator=colander.Range(min=1), |
|
646 | validator=colander.Range(min=1), | |
639 | missing=1) |
|
647 | missing=1) | |
640 |
|
648 | |||
641 |
|
649 | |||
642 | class ReportSearchSchema(colander.MappingSchema): |
|
650 | class ReportSearchSchema(colander.MappingSchema): | |
643 | def schema_type(self, **kw): |
|
651 | def schema_type(self, **kw): | |
644 | return colander.Mapping(unknown='preserve') |
|
652 | return colander.Mapping(unknown='preserve') | |
645 |
|
653 | |||
646 | resource = colander.SchemaNode(StringToAppList(), |
|
654 | resource = colander.SchemaNode(StringToAppList(), | |
647 | validator=possible_applications_validator, |
|
655 | validator=possible_applications_validator, | |
648 | missing=possible_applications) |
|
656 | missing=possible_applications) | |
649 | request_id = colander.SchemaNode(colander.Sequence(accept_scalar=True), |
|
657 | request_id = colander.SchemaNode(colander.Sequence(accept_scalar=True), | |
650 | colander.SchemaNode(colander.String()), |
|
658 | colander.SchemaNode(colander.String()), | |
651 | missing=None) |
|
659 | missing=None) | |
652 | start_date = colander.SchemaNode(PermissiveDate(), |
|
660 | start_date = colander.SchemaNode(PermissiveDate(), | |
653 | missing=None) |
|
661 | missing=None) | |
654 | end_date = colander.SchemaNode(PermissiveDate(), |
|
662 | end_date = colander.SchemaNode(PermissiveDate(), | |
655 | missing=None) |
|
663 | missing=None) | |
656 | page = colander.SchemaNode(colander.Integer(), |
|
664 | page = colander.SchemaNode(colander.Integer(), | |
657 | validator=colander.Range(min=1), |
|
665 | validator=colander.Range(min=1), | |
658 | missing=1) |
|
666 | missing=1) | |
659 |
|
667 | |||
660 | min_occurences = colander.SchemaNode( |
|
668 | min_occurences = colander.SchemaNode( | |
661 | colander.Sequence(accept_scalar=True), |
|
669 | colander.Sequence(accept_scalar=True), | |
662 | colander.SchemaNode(colander.Integer()), |
|
670 | colander.SchemaNode(colander.Integer()), | |
663 | missing=None) |
|
671 | missing=None) | |
664 |
|
672 | |||
665 | http_status = colander.SchemaNode(colander.Sequence(accept_scalar=True), |
|
673 | http_status = colander.SchemaNode(colander.Sequence(accept_scalar=True), | |
666 | colander.SchemaNode(colander.Integer()), |
|
674 | colander.SchemaNode(colander.Integer()), | |
667 | missing=None) |
|
675 | missing=None) | |
668 | priority = colander.SchemaNode(colander.Sequence(accept_scalar=True), |
|
676 | priority = colander.SchemaNode(colander.Sequence(accept_scalar=True), | |
669 | colander.SchemaNode(colander.Integer()), |
|
677 | colander.SchemaNode(colander.Integer()), | |
670 | missing=None) |
|
678 | missing=None) | |
671 | error = colander.SchemaNode(colander.Sequence(accept_scalar=True), |
|
679 | error = colander.SchemaNode(colander.Sequence(accept_scalar=True), | |
672 | colander.SchemaNode(colander.String()), |
|
680 | colander.SchemaNode(colander.String()), | |
673 | missing=None) |
|
681 | missing=None) | |
674 | url_path = colander.SchemaNode(colander.Sequence(accept_scalar=True), |
|
682 | url_path = colander.SchemaNode(colander.Sequence(accept_scalar=True), | |
675 | colander.SchemaNode(colander.String()), |
|
683 | colander.SchemaNode(colander.String()), | |
676 | missing=None) |
|
684 | missing=None) | |
677 | url_domain = colander.SchemaNode(colander.Sequence(accept_scalar=True), |
|
685 | url_domain = colander.SchemaNode(colander.Sequence(accept_scalar=True), | |
678 | colander.SchemaNode(colander.String()), |
|
686 | colander.SchemaNode(colander.String()), | |
679 | missing=None) |
|
687 | missing=None) | |
680 | report_status = colander.SchemaNode(colander.Sequence(accept_scalar=True), |
|
688 | report_status = colander.SchemaNode(colander.Sequence(accept_scalar=True), | |
681 | colander.SchemaNode(colander.String()), |
|
689 | colander.SchemaNode(colander.String()), | |
682 | missing=None) |
|
690 | missing=None) | |
683 | min_duration = colander.SchemaNode(colander.Sequence(accept_scalar=True), |
|
691 | min_duration = colander.SchemaNode(colander.Sequence(accept_scalar=True), | |
684 | colander.SchemaNode(colander.Float()), |
|
692 | colander.SchemaNode(colander.Float()), | |
685 | missing=None) |
|
693 | missing=None) | |
686 | max_duration = colander.SchemaNode(colander.Sequence(accept_scalar=True), |
|
694 | max_duration = colander.SchemaNode(colander.Sequence(accept_scalar=True), | |
687 | colander.SchemaNode(colander.Float()), |
|
695 | colander.SchemaNode(colander.Float()), | |
688 | missing=None) |
|
696 | missing=None) | |
689 |
|
697 | |||
690 |
|
698 | |||
691 | class TagSchema(colander.MappingSchema): |
|
699 | class TagSchema(colander.MappingSchema): | |
692 | """ |
|
700 | """ | |
693 | Used in log search |
|
701 | Used in log search | |
694 | """ |
|
702 | """ | |
695 | name = colander.SchemaNode(colander.String(), |
|
703 | name = colander.SchemaNode(colander.String(), | |
696 | validator=colander.Length(1, 32)) |
|
704 | validator=colander.Length(1, 32)) | |
697 | value = colander.SchemaNode(colander.Sequence(accept_scalar=True), |
|
705 | value = colander.SchemaNode(colander.Sequence(accept_scalar=True), | |
698 | colander.SchemaNode(colander.String(), |
|
706 | colander.SchemaNode(colander.String(), | |
699 | validator=colander.Length( |
|
707 | validator=colander.Length( | |
700 | 1, 128)), |
|
708 | 1, 128)), | |
701 | missing=None) |
|
709 | missing=None) | |
702 | op = colander.SchemaNode(colander.String(), |
|
710 | op = colander.SchemaNode(colander.String(), | |
703 | validator=colander.Length(1, 128), |
|
711 | validator=colander.Length(1, 128), | |
704 | missing=None) |
|
712 | missing=None) | |
705 |
|
713 | |||
706 |
|
714 | |||
707 | class TagListSchema(colander.SequenceSchema): |
|
715 | class TagListSchema(colander.SequenceSchema): | |
708 | tag = TagSchema() |
|
716 | tag = TagSchema() | |
709 |
|
717 | |||
710 |
|
718 | |||
711 | class RuleFieldType(object): |
|
719 | class RuleFieldType(object): | |
712 | """ Validator which succeeds if the value passed to it is one of |
|
720 | """ Validator which succeeds if the value passed to it is one of | |
713 | a fixed set of values """ |
|
721 | a fixed set of values """ | |
714 |
|
722 | |||
715 | def __init__(self, cast_to): |
|
723 | def __init__(self, cast_to): | |
716 | self.cast_to = cast_to |
|
724 | self.cast_to = cast_to | |
717 |
|
725 | |||
718 | def __call__(self, node, value): |
|
726 | def __call__(self, node, value): | |
719 | try: |
|
727 | try: | |
720 | if self.cast_to == 'int': |
|
728 | if self.cast_to == 'int': | |
721 | int(value) |
|
729 | int(value) | |
722 | elif self.cast_to == 'float': |
|
730 | elif self.cast_to == 'float': | |
723 | float(value) |
|
731 | float(value) | |
724 | elif self.cast_to == 'unicode': |
|
732 | elif self.cast_to == 'unicode': | |
725 | str(value) |
|
733 | str(value) | |
726 | except: |
|
734 | except: | |
727 | raise colander.Invalid(node, |
|
735 | raise colander.Invalid(node, | |
728 | "Can't cast {} to {}".format( |
|
736 | "Can't cast {} to {}".format( | |
729 | value, self.cast_to)) |
|
737 | value, self.cast_to)) | |
730 |
|
738 | |||
731 |
|
739 | |||
732 | def build_rule_schema(ruleset, check_matrix): |
|
740 | def build_rule_schema(ruleset, check_matrix): | |
733 | """ |
|
741 | """ | |
734 | Accepts ruleset and a map of fields/possible operations and builds |
|
742 | Accepts ruleset and a map of fields/possible operations and builds | |
735 | validation class |
|
743 | validation class | |
736 | """ |
|
744 | """ | |
737 |
|
745 | |||
738 | schema = colander.SchemaNode(colander.Mapping()) |
|
746 | schema = colander.SchemaNode(colander.Mapping()) | |
739 | schema.add(colander.SchemaNode(colander.String(), name='field')) |
|
747 | schema.add(colander.SchemaNode(colander.String(), name='field')) | |
740 |
|
748 | |||
741 | if ruleset['field'] in ['__AND__', '__OR__', '__NOT__']: |
|
749 | if ruleset['field'] in ['__AND__', '__OR__', '__NOT__']: | |
742 | subrules = colander.SchemaNode(colander.Tuple(), name='rules') |
|
750 | subrules = colander.SchemaNode(colander.Tuple(), name='rules') | |
743 | for rule in ruleset['rules']: |
|
751 | for rule in ruleset['rules']: | |
744 | subrules.add(build_rule_schema(rule, check_matrix)) |
|
752 | subrules.add(build_rule_schema(rule, check_matrix)) | |
745 | schema.add(subrules) |
|
753 | schema.add(subrules) | |
746 | else: |
|
754 | else: | |
747 | op_choices = check_matrix[ruleset['field']]['ops'] |
|
755 | op_choices = check_matrix[ruleset['field']]['ops'] | |
748 | cast_to = check_matrix[ruleset['field']]['type'] |
|
756 | cast_to = check_matrix[ruleset['field']]['type'] | |
749 | schema.add(colander.SchemaNode(colander.String(), |
|
757 | schema.add(colander.SchemaNode(colander.String(), | |
750 | validator=colander.OneOf(op_choices), |
|
758 | validator=colander.OneOf(op_choices), | |
751 | name='op')) |
|
759 | name='op')) | |
752 |
|
760 | |||
753 | schema.add(colander.SchemaNode(colander.String(), |
|
761 | schema.add(colander.SchemaNode(colander.String(), | |
754 | name='value', |
|
762 | name='value', | |
755 | validator=RuleFieldType(cast_to))) |
|
763 | validator=RuleFieldType(cast_to))) | |
756 | return schema |
|
764 | return schema | |
757 |
|
765 | |||
758 |
|
766 | |||
759 | class ConfigTypeSchema(colander.MappingSchema): |
|
767 | class ConfigTypeSchema(colander.MappingSchema): | |
760 | type = colander.SchemaNode(colander.String(), missing=None) |
|
768 | type = colander.SchemaNode(colander.String(), missing=None) | |
761 | config = colander.SchemaNode(UnknownType(), missing=None) |
|
769 | config = colander.SchemaNode(UnknownType(), missing=None) | |
762 |
|
770 | |||
763 |
|
771 | |||
764 | class MappingListSchema(colander.SequenceSchema): |
|
772 | class MappingListSchema(colander.SequenceSchema): | |
765 | config = colander.SchemaNode(UnknownType()) |
|
773 | config = colander.SchemaNode(UnknownType()) |
@@ -1,438 +1,440 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # AppEnlight Enterprise Edition, including its added features, Support |
|
18 | # AppEnlight Enterprise Edition, including its added features, Support | |
19 | # services, and proprietary license terms, please see |
|
19 | # services, and proprietary license terms, please see | |
20 | # https://rhodecode.com/licenses/ |
|
20 | # https://rhodecode.com/licenses/ | |
21 |
|
21 | |||
22 | import base64 |
|
22 | import base64 | |
23 | import io |
|
23 | import io | |
24 | import datetime |
|
24 | import datetime | |
25 | import json |
|
25 | import json | |
26 | import logging |
|
26 | import logging | |
27 | import urllib.request, urllib.parse, urllib.error |
|
27 | import urllib.request, urllib.parse, urllib.error | |
28 | import zlib |
|
28 | import zlib | |
29 |
|
29 | |||
30 | from gzip import GzipFile |
|
30 | from gzip import GzipFile | |
31 | from pyramid.view import view_config |
|
31 | from pyramid.view import view_config | |
32 | from pyramid.httpexceptions import HTTPBadRequest |
|
32 | from pyramid.httpexceptions import HTTPBadRequest | |
33 |
|
33 | |||
34 | import appenlight.celery.tasks as tasks |
|
34 | import appenlight.celery.tasks as tasks | |
35 | from appenlight.lib.api import rate_limiting, check_cors |
|
35 | from appenlight.lib.api import rate_limiting, check_cors | |
36 | from appenlight.lib.enums import ParsedSentryEventType |
|
36 | from appenlight.lib.enums import ParsedSentryEventType | |
37 | from appenlight.lib.utils import parse_proto |
|
37 | from appenlight.lib.utils import parse_proto | |
38 | from appenlight.lib.utils.airbrake import parse_airbrake_xml |
|
38 | from appenlight.lib.utils.airbrake import parse_airbrake_xml | |
39 | from appenlight.lib.utils.date_utils import convert_date |
|
39 | from appenlight.lib.utils.date_utils import convert_date | |
40 | from appenlight.lib.utils.sentry import parse_sentry_event |
|
40 | from appenlight.lib.utils.sentry import parse_sentry_event | |
41 | from appenlight.lib.request import JSONException |
|
41 | from appenlight.lib.request import JSONException | |
42 | from appenlight.validators import (LogListSchema, |
|
42 | from appenlight.validators import (LogListSchema, | |
43 | MetricsListSchema, |
|
43 | MetricsListSchema, | |
44 | GeneralMetricsListSchema, |
|
44 | GeneralMetricsListSchema, | |
45 | GeneralMetricsPermanentListSchema, |
|
45 | GeneralMetricsPermanentListSchema, | |
46 | GeneralMetricSchema, |
|
46 | GeneralMetricSchema, | |
47 | GeneralMetricPermanentSchema, |
|
47 | GeneralMetricPermanentSchema, | |
48 | LogListPermanentSchema, |
|
48 | LogListPermanentSchema, | |
49 | ReportListSchema_0_5, |
|
49 | ReportListSchema_0_5, | |
50 | LogSchema, |
|
50 | LogSchema, | |
51 | LogSchemaPermanent, |
|
51 | LogSchemaPermanent, | |
52 | ReportSchema_0_5) |
|
52 | ReportSchema_0_5) | |
53 |
|
53 | |||
54 | log = logging.getLogger(__name__) |
|
54 | log = logging.getLogger(__name__) | |
55 |
|
55 | |||
56 |
|
56 | |||
57 | @view_config(route_name='api_logs', renderer='string', permission='create', |
|
57 | @view_config(route_name='api_logs', renderer='string', permission='create', | |
58 | require_csrf=False) |
|
58 | require_csrf=False) | |
59 | @view_config(route_name='api_log', renderer='string', permission='create', |
|
59 | @view_config(route_name='api_log', renderer='string', permission='create', | |
60 | require_csrf=False) |
|
60 | require_csrf=False) | |
61 | def logs_create(request): |
|
61 | def logs_create(request): | |
62 | """ |
|
62 | """ | |
63 | Endpoint for log aggregation |
|
63 | Endpoint for log aggregation | |
64 | """ |
|
64 | """ | |
65 | application = request.context.resource |
|
65 | application = request.context.resource | |
66 | if request.method.upper() == 'OPTIONS': |
|
66 | if request.method.upper() == 'OPTIONS': | |
67 | return check_cors(request, application) |
|
67 | return check_cors(request, application) | |
68 | else: |
|
68 | else: | |
69 | check_cors(request, application, should_return=False) |
|
69 | check_cors(request, application, should_return=False) | |
70 |
|
70 | |||
71 | params = dict(request.params.copy()) |
|
71 | params = dict(request.params.copy()) | |
72 | proto_version = parse_proto(params.get('protocol_version', '')) |
|
72 | proto_version = parse_proto(params.get('protocol_version', '')) | |
73 | payload = request.unsafe_json_body |
|
73 | payload = request.unsafe_json_body | |
74 | sequence_accepted = request.matched_route.name == 'api_logs' |
|
74 | sequence_accepted = request.matched_route.name == 'api_logs' | |
75 |
|
75 | |||
76 | if sequence_accepted: |
|
76 | if sequence_accepted: | |
77 | if application.allow_permanent_storage: |
|
77 | if application.allow_permanent_storage: | |
78 | schema = LogListPermanentSchema().bind( |
|
78 | schema = LogListPermanentSchema().bind( | |
79 | utcnow=datetime.datetime.utcnow()) |
|
79 | utcnow=datetime.datetime.utcnow()) | |
80 | else: |
|
80 | else: | |
81 | schema = LogListSchema().bind( |
|
81 | schema = LogListSchema().bind( | |
82 | utcnow=datetime.datetime.utcnow()) |
|
82 | utcnow=datetime.datetime.utcnow()) | |
83 | else: |
|
83 | else: | |
84 | if application.allow_permanent_storage: |
|
84 | if application.allow_permanent_storage: | |
85 | schema = LogSchemaPermanent().bind( |
|
85 | schema = LogSchemaPermanent().bind( | |
86 | utcnow=datetime.datetime.utcnow()) |
|
86 | utcnow=datetime.datetime.utcnow()) | |
87 | else: |
|
87 | else: | |
88 | schema = LogSchema().bind( |
|
88 | schema = LogSchema().bind( | |
89 | utcnow=datetime.datetime.utcnow()) |
|
89 | utcnow=datetime.datetime.utcnow()) | |
90 |
|
90 | |||
91 | deserialized_logs = schema.deserialize(payload) |
|
91 | deserialized_logs = schema.deserialize(payload) | |
92 | if sequence_accepted is False: |
|
92 | if sequence_accepted is False: | |
93 | deserialized_logs = [deserialized_logs] |
|
93 | deserialized_logs = [deserialized_logs] | |
94 |
|
94 | |||
95 | rate_limiting(request, application, 'per_application_logs_rate_limit', |
|
95 | rate_limiting(request, application, 'per_application_logs_rate_limit', | |
96 | len(deserialized_logs)) |
|
96 | len(deserialized_logs)) | |
97 |
|
97 | |||
98 | # pprint.pprint(deserialized_logs) |
|
98 | # pprint.pprint(deserialized_logs) | |
99 |
|
99 | |||
100 | # we need to split those out so we can process the pkey ones one by one |
|
100 | # we need to split those out so we can process the pkey ones one by one | |
101 | non_pkey_logs = [log_dict for log_dict in deserialized_logs |
|
101 | non_pkey_logs = [log_dict for log_dict in deserialized_logs | |
102 | if not log_dict['primary_key']] |
|
102 | if not log_dict['primary_key']] | |
103 | pkey_dict = {} |
|
103 | pkey_dict = {} | |
104 | # try to process the logs as best as we can and group together to reduce |
|
104 | # try to process the logs as best as we can and group together to reduce | |
105 | # the amount of |
|
105 | # the amount of | |
106 | for log_dict in deserialized_logs: |
|
106 | for log_dict in deserialized_logs: | |
107 | if log_dict['primary_key']: |
|
107 | if log_dict['primary_key']: | |
108 | key = (log_dict['primary_key'], log_dict['namespace'],) |
|
108 | key = (log_dict['primary_key'], log_dict['namespace'],) | |
109 | if not key in pkey_dict: |
|
109 | if not key in pkey_dict: | |
110 | pkey_dict[key] = [] |
|
110 | pkey_dict[key] = [] | |
111 | pkey_dict[key].append(log_dict) |
|
111 | pkey_dict[key].append(log_dict) | |
112 |
|
112 | |||
113 | if non_pkey_logs: |
|
113 | if non_pkey_logs: | |
114 | log.debug('%s non-pkey logs received: %s' % (application, |
|
114 | log.debug('%s non-pkey logs received: %s' % (application, | |
115 | len(non_pkey_logs))) |
|
115 | len(non_pkey_logs))) | |
116 | tasks.add_logs.delay(application.resource_id, params, non_pkey_logs) |
|
116 | tasks.add_logs.delay(application.resource_id, params, non_pkey_logs) | |
117 | if pkey_dict: |
|
117 | if pkey_dict: | |
118 | logs_to_insert = [] |
|
118 | logs_to_insert = [] | |
119 | for primary_key_tuple, payload in pkey_dict.items(): |
|
119 | for primary_key_tuple, payload in pkey_dict.items(): | |
120 | sorted_logs = sorted(payload, key=lambda x: x['date']) |
|
120 | sorted_logs = sorted(payload, key=lambda x: x['date']) | |
121 | logs_to_insert.append(sorted_logs[-1]) |
|
121 | logs_to_insert.append(sorted_logs[-1]) | |
122 | log.debug('%s pkey logs received: %s' % (application, |
|
122 | log.debug('%s pkey logs received: %s' % (application, | |
123 | len(logs_to_insert))) |
|
123 | len(logs_to_insert))) | |
124 | tasks.add_logs.delay(application.resource_id, params, logs_to_insert) |
|
124 | tasks.add_logs.delay(application.resource_id, params, logs_to_insert) | |
125 |
|
125 | |||
126 | log.info('LOG call %s %s client:%s' % ( |
|
126 | log.info('LOG call %s %s client:%s' % ( | |
127 | application, proto_version, request.headers.get('user_agent'))) |
|
127 | application, proto_version, request.headers.get('user_agent'))) | |
128 | return 'OK: Logs accepted' |
|
128 | return 'OK: Logs accepted' | |
129 |
|
129 | |||
130 |
|
130 | |||
131 | @view_config(route_name='api_request_stats', renderer='string', |
|
131 | @view_config(route_name='api_request_stats', renderer='string', | |
132 | permission='create', require_csrf=False) |
|
132 | permission='create', require_csrf=False) | |
133 | @view_config(route_name='api_metrics', renderer='string', |
|
133 | @view_config(route_name='api_metrics', renderer='string', | |
134 | permission='create', require_csrf=False) |
|
134 | permission='create', require_csrf=False) | |
135 | def request_metrics_create(request): |
|
135 | def request_metrics_create(request): | |
136 | """ |
|
136 | """ | |
137 | Endpoint for performance metrics, aggregates view performance stats |
|
137 | Endpoint for performance metrics, aggregates view performance stats | |
138 | and converts them to general metric row |
|
138 | and converts them to general metric row | |
139 | """ |
|
139 | """ | |
140 | application = request.context.resource |
|
140 | application = request.context.resource | |
141 | if request.method.upper() == 'OPTIONS': |
|
141 | if request.method.upper() == 'OPTIONS': | |
142 | return check_cors(request, application) |
|
142 | return check_cors(request, application) | |
143 | else: |
|
143 | else: | |
144 | check_cors(request, application, should_return=False) |
|
144 | check_cors(request, application, should_return=False) | |
145 |
|
145 | |||
146 | params = dict(request.params.copy()) |
|
146 | params = dict(request.params.copy()) | |
147 | proto_version = parse_proto(params.get('protocol_version', '')) |
|
147 | proto_version = parse_proto(params.get('protocol_version', '')) | |
148 |
|
148 | |||
149 | payload = request.unsafe_json_body |
|
149 | payload = request.unsafe_json_body | |
150 | schema = MetricsListSchema() |
|
150 | schema = MetricsListSchema() | |
151 | dataset = schema.deserialize(payload) |
|
151 | dataset = schema.deserialize(payload) | |
152 |
|
152 | |||
153 | rate_limiting(request, application, 'per_application_metrics_rate_limit', |
|
153 | rate_limiting(request, application, 'per_application_metrics_rate_limit', | |
154 | len(dataset)) |
|
154 | len(dataset)) | |
155 |
|
155 | |||
156 | # looping report data |
|
156 | # looping report data | |
157 | metrics = {} |
|
157 | metrics = {} | |
158 | for metric in dataset: |
|
158 | for metric in dataset: | |
159 | server_name = metric.get('server', '').lower() or 'unknown' |
|
159 | server_name = metric.get('server', '').lower() or 'unknown' | |
160 | start_interval = convert_date(metric['timestamp']) |
|
160 | start_interval = convert_date(metric['timestamp']) | |
161 | start_interval = start_interval.replace(second=0, microsecond=0) |
|
161 | start_interval = start_interval.replace(second=0, microsecond=0) | |
162 |
|
162 | |||
163 | for view_name, view_metrics in metric['metrics']: |
|
163 | for view_name, view_metrics in metric['metrics']: | |
164 | key = '%s%s%s' % (metric['server'], start_interval, view_name) |
|
164 | key = '%s%s%s' % (metric['server'], start_interval, view_name) | |
165 | if start_interval not in metrics: |
|
165 | if start_interval not in metrics: | |
166 | metrics[key] = {"requests": 0, "main": 0, "sql": 0, |
|
166 | metrics[key] = {"requests": 0, "main": 0, "sql": 0, | |
167 | "nosql": 0, "remote": 0, "tmpl": 0, |
|
167 | "nosql": 0, "remote": 0, "tmpl": 0, | |
168 | "custom": 0, 'sql_calls': 0, |
|
168 | "custom": 0, 'sql_calls': 0, | |
169 | 'nosql_calls': 0, |
|
169 | 'nosql_calls': 0, | |
170 | 'remote_calls': 0, 'tmpl_calls': 0, |
|
170 | 'remote_calls': 0, 'tmpl_calls': 0, | |
171 | 'custom_calls': 0, |
|
171 | 'custom_calls': 0, | |
172 | "start_interval": start_interval, |
|
172 | "start_interval": start_interval, | |
173 | "server_name": server_name, |
|
173 | "server_name": server_name, | |
174 | "view_name": view_name |
|
174 | "view_name": view_name | |
175 | } |
|
175 | } | |
176 | metrics[key]["requests"] += int(view_metrics['requests']) |
|
176 | metrics[key]["requests"] += int(view_metrics['requests']) | |
177 | metrics[key]["main"] += round(view_metrics['main'], 5) |
|
177 | metrics[key]["main"] += round(view_metrics['main'], 5) | |
178 | metrics[key]["sql"] += round(view_metrics['sql'], 5) |
|
178 | metrics[key]["sql"] += round(view_metrics['sql'], 5) | |
179 | metrics[key]["nosql"] += round(view_metrics['nosql'], 5) |
|
179 | metrics[key]["nosql"] += round(view_metrics['nosql'], 5) | |
180 | metrics[key]["remote"] += round(view_metrics['remote'], 5) |
|
180 | metrics[key]["remote"] += round(view_metrics['remote'], 5) | |
181 | metrics[key]["tmpl"] += round(view_metrics['tmpl'], 5) |
|
181 | metrics[key]["tmpl"] += round(view_metrics['tmpl'], 5) | |
182 | metrics[key]["custom"] += round(view_metrics.get('custom', 0.0), |
|
182 | metrics[key]["custom"] += round(view_metrics.get('custom', 0.0), | |
183 | 5) |
|
183 | 5) | |
184 | metrics[key]["sql_calls"] += int( |
|
184 | metrics[key]["sql_calls"] += int( | |
185 | view_metrics.get('sql_calls', 0)) |
|
185 | view_metrics.get('sql_calls', 0)) | |
186 | metrics[key]["nosql_calls"] += int( |
|
186 | metrics[key]["nosql_calls"] += int( | |
187 | view_metrics.get('nosql_calls', 0)) |
|
187 | view_metrics.get('nosql_calls', 0)) | |
188 | metrics[key]["remote_calls"] += int( |
|
188 | metrics[key]["remote_calls"] += int( | |
189 | view_metrics.get('remote_calls', 0)) |
|
189 | view_metrics.get('remote_calls', 0)) | |
190 | metrics[key]["tmpl_calls"] += int( |
|
190 | metrics[key]["tmpl_calls"] += int( | |
191 | view_metrics.get('tmpl_calls', 0)) |
|
191 | view_metrics.get('tmpl_calls', 0)) | |
192 | metrics[key]["custom_calls"] += int( |
|
192 | metrics[key]["custom_calls"] += int( | |
193 | view_metrics.get('custom_calls', 0)) |
|
193 | view_metrics.get('custom_calls', 0)) | |
194 |
|
194 | |||
195 | if not metrics[key]["requests"]: |
|
195 | if not metrics[key]["requests"]: | |
196 | # fix this here because validator can't |
|
196 | # fix this here because validator can't | |
197 | metrics[key]["requests"] = 1 |
|
197 | metrics[key]["requests"] = 1 | |
198 | # metrics dict is being built to minimize |
|
198 | # metrics dict is being built to minimize | |
199 | # the amount of queries used |
|
199 | # the amount of queries used | |
200 | # in case we get multiple rows from same minute |
|
200 | # in case we get multiple rows from same minute | |
201 |
|
201 | |||
202 | normalized_metrics = [] |
|
202 | normalized_metrics = [] | |
203 | for metric in metrics.values(): |
|
203 | for metric in metrics.values(): | |
204 | new_metric = { |
|
204 | new_metric = { | |
205 | 'namespace': 'appenlight.request_metric', |
|
205 | 'namespace': 'appenlight.request_metric', | |
206 | 'timestamp': metric.pop('start_interval'), |
|
206 | 'timestamp': metric.pop('start_interval'), | |
207 | 'server_name': metric['server_name'], |
|
207 | 'server_name': metric['server_name'], | |
208 | 'tags': list(metric.items()) |
|
208 | 'tags': list(metric.items()) | |
209 | } |
|
209 | } | |
210 | normalized_metrics.append(new_metric) |
|
210 | normalized_metrics.append(new_metric) | |
211 |
|
211 | |||
212 | tasks.add_metrics.delay(application.resource_id, params, |
|
212 | tasks.add_metrics.delay(application.resource_id, params, | |
213 | normalized_metrics, proto_version) |
|
213 | normalized_metrics, proto_version) | |
214 |
|
214 | |||
215 | log.info('REQUEST METRICS call {} {} client:{}'.format( |
|
215 | log.info('REQUEST METRICS call {} {} client:{}'.format( | |
216 | application.resource_name, proto_version, |
|
216 | application.resource_name, proto_version, | |
217 | request.headers.get('user_agent'))) |
|
217 | request.headers.get('user_agent'))) | |
218 | return 'OK: request metrics accepted' |
|
218 | return 'OK: request metrics accepted' | |
219 |
|
219 | |||
220 |
|
220 | |||
221 | @view_config(route_name='api_general_metrics', renderer='string', |
|
221 | @view_config(route_name='api_general_metrics', renderer='string', | |
222 | permission='create', require_csrf=False) |
|
222 | permission='create', require_csrf=False) | |
223 | @view_config(route_name='api_general_metric', renderer='string', |
|
223 | @view_config(route_name='api_general_metric', renderer='string', | |
224 | permission='create', require_csrf=False) |
|
224 | permission='create', require_csrf=False) | |
225 | def general_metrics_create(request): |
|
225 | def general_metrics_create(request): | |
226 | """ |
|
226 | """ | |
227 | Endpoint for general metrics aggregation |
|
227 | Endpoint for general metrics aggregation | |
228 | """ |
|
228 | """ | |
229 | application = request.context.resource |
|
229 | application = request.context.resource | |
230 | if request.method.upper() == 'OPTIONS': |
|
230 | if request.method.upper() == 'OPTIONS': | |
231 | return check_cors(request, application) |
|
231 | return check_cors(request, application) | |
232 | else: |
|
232 | else: | |
233 | check_cors(request, application, should_return=False) |
|
233 | check_cors(request, application, should_return=False) | |
234 |
|
234 | |||
235 | params = dict(request.params.copy()) |
|
235 | params = dict(request.params.copy()) | |
236 | proto_version = parse_proto(params.get('protocol_version', '')) |
|
236 | proto_version = parse_proto(params.get('protocol_version', '')) | |
237 | payload = request.unsafe_json_body |
|
237 | payload = request.unsafe_json_body | |
238 | sequence_accepted = request.matched_route.name == 'api_general_metrics' |
|
238 | sequence_accepted = request.matched_route.name == 'api_general_metrics' | |
239 | if sequence_accepted: |
|
239 | if sequence_accepted: | |
240 | if application.allow_permanent_storage: |
|
240 | if application.allow_permanent_storage: | |
241 | schema = GeneralMetricsPermanentListSchema().bind( |
|
241 | schema = GeneralMetricsPermanentListSchema().bind( | |
242 | utcnow=datetime.datetime.utcnow()) |
|
242 | utcnow=datetime.datetime.utcnow()) | |
243 | else: |
|
243 | else: | |
244 | schema = GeneralMetricsListSchema().bind( |
|
244 | schema = GeneralMetricsListSchema().bind( | |
245 | utcnow=datetime.datetime.utcnow()) |
|
245 | utcnow=datetime.datetime.utcnow()) | |
246 | else: |
|
246 | else: | |
247 | if application.allow_permanent_storage: |
|
247 | if application.allow_permanent_storage: | |
248 | schema = GeneralMetricPermanentSchema().bind( |
|
248 | schema = GeneralMetricPermanentSchema().bind( | |
249 | utcnow=datetime.datetime.utcnow()) |
|
249 | utcnow=datetime.datetime.utcnow()) | |
250 | else: |
|
250 | else: | |
251 | schema = GeneralMetricSchema().bind( |
|
251 | schema = GeneralMetricSchema().bind( | |
252 | utcnow=datetime.datetime.utcnow()) |
|
252 | utcnow=datetime.datetime.utcnow()) | |
253 |
|
253 | |||
254 | deserialized_metrics = schema.deserialize(payload) |
|
254 | deserialized_metrics = schema.deserialize(payload) | |
255 | if sequence_accepted is False: |
|
255 | if sequence_accepted is False: | |
256 | deserialized_metrics = [deserialized_metrics] |
|
256 | deserialized_metrics = [deserialized_metrics] | |
257 |
|
257 | |||
258 | rate_limiting(request, application, 'per_application_metrics_rate_limit', |
|
258 | rate_limiting(request, application, 'per_application_metrics_rate_limit', | |
259 | len(deserialized_metrics)) |
|
259 | len(deserialized_metrics)) | |
260 |
|
260 | |||
261 | tasks.add_metrics.delay(application.resource_id, params, |
|
261 | tasks.add_metrics.delay(application.resource_id, params, | |
262 | deserialized_metrics, proto_version) |
|
262 | deserialized_metrics, proto_version) | |
263 |
|
263 | |||
264 | log.info('METRICS call {} {} client:{}'.format( |
|
264 | log.info('METRICS call {} {} client:{}'.format( | |
265 | application.resource_name, proto_version, |
|
265 | application.resource_name, proto_version, | |
266 | request.headers.get('user_agent'))) |
|
266 | request.headers.get('user_agent'))) | |
267 | return 'OK: Metrics accepted' |
|
267 | return 'OK: Metrics accepted' | |
268 |
|
268 | |||
269 |
|
269 | |||
270 | @view_config(route_name='api_reports', renderer='string', permission='create', |
|
270 | @view_config(route_name='api_reports', renderer='string', permission='create', | |
271 | require_csrf=False) |
|
271 | require_csrf=False) | |
272 | @view_config(route_name='api_slow_reports', renderer='string', |
|
272 | @view_config(route_name='api_slow_reports', renderer='string', | |
273 | permission='create', require_csrf=False) |
|
273 | permission='create', require_csrf=False) | |
274 | @view_config(route_name='api_report', renderer='string', permission='create', |
|
274 | @view_config(route_name='api_report', renderer='string', permission='create', | |
275 | require_csrf=False) |
|
275 | require_csrf=False) | |
276 | def reports_create(request): |
|
276 | def reports_create(request): | |
277 | """ |
|
277 | """ | |
278 | Endpoint for exception and slowness reports |
|
278 | Endpoint for exception and slowness reports | |
279 | """ |
|
279 | """ | |
280 | # route_url('reports') |
|
280 | # route_url('reports') | |
281 | application = request.context.resource |
|
281 | application = request.context.resource | |
282 | if request.method.upper() == 'OPTIONS': |
|
282 | if request.method.upper() == 'OPTIONS': | |
283 | return check_cors(request, application) |
|
283 | return check_cors(request, application) | |
284 | else: |
|
284 | else: | |
285 | check_cors(request, application, should_return=False) |
|
285 | check_cors(request, application, should_return=False) | |
286 | params = dict(request.params.copy()) |
|
286 | params = dict(request.params.copy()) | |
287 | proto_version = parse_proto(params.get('protocol_version', '')) |
|
287 | proto_version = parse_proto(params.get('protocol_version', '')) | |
288 | payload = request.unsafe_json_body |
|
288 | payload = request.unsafe_json_body | |
289 | sequence_accepted = request.matched_route.name == 'api_reports' |
|
289 | sequence_accepted = request.matched_route.name == 'api_reports' | |
290 |
|
290 | |||
291 | if sequence_accepted: |
|
291 | if sequence_accepted: | |
292 | schema = ReportListSchema_0_5().bind( |
|
292 | schema = ReportListSchema_0_5().bind( | |
293 | utcnow=datetime.datetime.utcnow()) |
|
293 | utcnow=datetime.datetime.utcnow()) | |
294 | else: |
|
294 | else: | |
295 | schema = ReportSchema_0_5().bind( |
|
295 | schema = ReportSchema_0_5().bind( | |
296 | utcnow=datetime.datetime.utcnow()) |
|
296 | utcnow=datetime.datetime.utcnow()) | |
297 |
|
297 | |||
298 | deserialized_reports = schema.deserialize(payload) |
|
298 | deserialized_reports = schema.deserialize(payload) | |
299 | if sequence_accepted is False: |
|
299 | if sequence_accepted is False: | |
300 | deserialized_reports = [deserialized_reports] |
|
300 | deserialized_reports = [deserialized_reports] | |
301 | if deserialized_reports: |
|
301 | if deserialized_reports: | |
302 | rate_limiting(request, application, |
|
302 | rate_limiting(request, application, | |
303 | 'per_application_reports_rate_limit', |
|
303 | 'per_application_reports_rate_limit', | |
304 | len(deserialized_reports)) |
|
304 | len(deserialized_reports)) | |
305 |
|
305 | |||
306 | # pprint.pprint(deserialized_reports) |
|
306 | # pprint.pprint(deserialized_reports) | |
307 | tasks.add_reports.delay(application.resource_id, params, |
|
307 | tasks.add_reports.delay(application.resource_id, params, | |
308 | deserialized_reports) |
|
308 | deserialized_reports) | |
309 | log.info('REPORT call %s, %s client:%s' % ( |
|
309 | log.info('REPORT call %s, %s client:%s' % ( | |
310 | application, |
|
310 | application, | |
311 | proto_version, |
|
311 | proto_version, | |
312 | request.headers.get('user_agent')) |
|
312 | request.headers.get('user_agent')) | |
313 | ) |
|
313 | ) | |
314 | return 'OK: Reports accepted' |
|
314 | return 'OK: Reports accepted' | |
315 |
|
315 | |||
316 |
|
316 | |||
317 | @view_config(route_name='api_airbrake', renderer='string', permission='create', |
|
317 | @view_config(route_name='api_airbrake', renderer='string', permission='create', | |
318 | require_csrf=False) |
|
318 | require_csrf=False) | |
319 | def airbrake_xml_compat(request): |
|
319 | def airbrake_xml_compat(request): | |
320 | """ |
|
320 | """ | |
321 | Airbrake compatible endpoint for XML reports |
|
321 | Airbrake compatible endpoint for XML reports | |
322 | """ |
|
322 | """ | |
323 | application = request.context.resource |
|
323 | application = request.context.resource | |
324 | if request.method.upper() == 'OPTIONS': |
|
324 | if request.method.upper() == 'OPTIONS': | |
325 | return check_cors(request, application) |
|
325 | return check_cors(request, application) | |
326 | else: |
|
326 | else: | |
327 | check_cors(request, application, should_return=False) |
|
327 | check_cors(request, application, should_return=False) | |
328 |
|
328 | |||
329 | params = dict(request.params.copy()) |
|
329 | params = dict(request.params.copy()) | |
330 |
|
330 | |||
331 | error_dict = parse_airbrake_xml(request) |
|
331 | error_dict = parse_airbrake_xml(request) | |
332 | schema = ReportListSchema_0_5().bind(utcnow=datetime.datetime.utcnow()) |
|
332 | schema = ReportListSchema_0_5().bind(utcnow=datetime.datetime.utcnow()) | |
333 | deserialized_reports = schema.deserialize([error_dict]) |
|
333 | deserialized_reports = schema.deserialize([error_dict]) | |
334 | rate_limiting(request, application, 'per_application_reports_rate_limit', |
|
334 | rate_limiting(request, application, 'per_application_reports_rate_limit', | |
335 | len(deserialized_reports)) |
|
335 | len(deserialized_reports)) | |
336 |
|
336 | |||
337 | tasks.add_reports.delay(application.resource_id, params, |
|
337 | tasks.add_reports.delay(application.resource_id, params, | |
338 | deserialized_reports) |
|
338 | deserialized_reports) | |
339 | log.info('%s AIRBRAKE call for application %s, api_ver:%s client:%s' % ( |
|
339 | log.info('%s AIRBRAKE call for application %s, api_ver:%s client:%s' % ( | |
340 | 500, application.resource_name, |
|
340 | 500, application.resource_name, | |
341 | request.params.get('protocol_version', 'unknown'), |
|
341 | request.params.get('protocol_version', 'unknown'), | |
342 | request.headers.get('user_agent')) |
|
342 | request.headers.get('user_agent')) | |
343 | ) |
|
343 | ) | |
344 | return '<notice><id>no-id</id><url>%s</url></notice>' % \ |
|
344 | return '<notice><id>no-id</id><url>%s</url></notice>' % \ | |
345 | request.registry.settings['mailing.app_url'] |
|
345 | request.registry.settings['mailing.app_url'] | |
346 |
|
346 | |||
347 |
|
347 | |||
348 | def decompress_gzip(data): |
|
348 | def decompress_gzip(data): | |
349 | try: |
|
349 | try: | |
350 | fp = io.StringIO(data) |
|
350 | fp = io.StringIO(data) | |
351 | with GzipFile(fileobj=fp) as f: |
|
351 | with GzipFile(fileobj=fp) as f: | |
352 | return f.read() |
|
352 | return f.read() | |
353 | except Exception as exc: |
|
353 | except Exception as exc: | |
354 | raise |
|
354 | raise | |
355 | log.error(exc) |
|
355 | log.error(exc) | |
356 | raise HTTPBadRequest() |
|
356 | raise HTTPBadRequest() | |
357 |
|
357 | |||
358 |
|
358 | |||
359 | def decompress_zlib(data): |
|
359 | def decompress_zlib(data): | |
360 | try: |
|
360 | try: | |
361 | return zlib.decompress(data) |
|
361 | return zlib.decompress(data) | |
362 | except Exception as exc: |
|
362 | except Exception as exc: | |
363 | raise |
|
363 | raise | |
364 | log.error(exc) |
|
364 | log.error(exc) | |
365 | raise HTTPBadRequest() |
|
365 | raise HTTPBadRequest() | |
366 |
|
366 | |||
367 |
|
367 | |||
368 | def decode_b64(data): |
|
368 | def decode_b64(data): | |
369 | try: |
|
369 | try: | |
370 | return base64.b64decode(data) |
|
370 | return base64.b64decode(data) | |
371 | except Exception as exc: |
|
371 | except Exception as exc: | |
372 | raise |
|
372 | raise | |
373 | log.error(exc) |
|
373 | log.error(exc) | |
374 | raise HTTPBadRequest() |
|
374 | raise HTTPBadRequest() | |
375 |
|
375 | |||
376 |
|
376 | |||
377 | @view_config(route_name='api_sentry', renderer='string', permission='create', |
|
377 | @view_config(route_name='api_sentry', renderer='string', permission='create', | |
378 | require_csrf=False) |
|
378 | require_csrf=False) | |
379 | @view_config(route_name='api_sentry_slash', renderer='string', |
|
379 | @view_config(route_name='api_sentry_slash', renderer='string', | |
380 | permission='create', require_csrf=False) |
|
380 | permission='create', require_csrf=False) | |
381 | def sentry_compat(request): |
|
381 | def sentry_compat(request): | |
382 | """ |
|
382 | """ | |
383 | Sentry compatible endpoint |
|
383 | Sentry compatible endpoint | |
384 | """ |
|
384 | """ | |
385 | application = request.context.resource |
|
385 | application = request.context.resource | |
386 | if request.method.upper() == 'OPTIONS': |
|
386 | if request.method.upper() == 'OPTIONS': | |
387 | return check_cors(request, application) |
|
387 | return check_cors(request, application) | |
388 | else: |
|
388 | else: | |
389 | check_cors(request, application, should_return=False) |
|
389 | check_cors(request, application, should_return=False) | |
390 |
|
390 | |||
391 | # handle various report encoding |
|
391 | # handle various report encoding | |
392 | content_encoding = request.headers.get('Content-Encoding') |
|
392 | content_encoding = request.headers.get('Content-Encoding') | |
393 | content_type = request.headers.get('Content-Type') |
|
393 | content_type = request.headers.get('Content-Type') | |
394 | if content_encoding == 'gzip': |
|
394 | if content_encoding == 'gzip': | |
395 | body = decompress_gzip(request.body) |
|
395 | body = decompress_gzip(request.body) | |
396 | elif content_encoding == 'deflate': |
|
396 | elif content_encoding == 'deflate': | |
397 | body = decompress_zlib(request.body) |
|
397 | body = decompress_zlib(request.body) | |
398 | else: |
|
398 | else: | |
399 | body = request.body |
|
399 | body = request.body | |
400 | # attempt to fix string before decoding for stupid clients |
|
400 | # attempt to fix string before decoding for stupid clients | |
401 | if content_type == 'application/x-www-form-urlencoded': |
|
401 | if content_type == 'application/x-www-form-urlencoded': | |
402 | body = urllib.parse.unquote(body.decode('utf8')) |
|
402 | body = urllib.parse.unquote(body.decode('utf8')) | |
403 | check_char = '{' if isinstance(body, str) else b'{' |
|
403 | check_char = '{' if isinstance(body, str) else b'{' | |
404 | if not body.startswith(check_char): |
|
404 | if not body.startswith(check_char): | |
405 | try: |
|
405 | try: | |
406 | body = decode_b64(body) |
|
406 | body = decode_b64(body) | |
407 | body = decompress_zlib(body) |
|
407 | body = decompress_zlib(body) | |
408 | except Exception as exc: |
|
408 | except Exception as exc: | |
409 | log.info(exc) |
|
409 | log.info(exc) | |
410 |
|
410 | |||
411 | try: |
|
411 | try: | |
412 | json_body = json.loads(body.decode('utf8')) |
|
412 | json_body = json.loads(body.decode('utf8')) | |
413 | except ValueError: |
|
413 | except ValueError: | |
414 | raise JSONException("Incorrect JSON") |
|
414 | raise JSONException("Incorrect JSON") | |
415 |
|
415 | |||
416 | event, event_type = parse_sentry_event(json_body) |
|
416 | event, event_type = parse_sentry_event(json_body) | |
417 |
|
417 | |||
418 | if event_type == ParsedSentryEventType.LOG: |
|
418 | if event_type == ParsedSentryEventType.LOG: | |
419 | if application.allow_permanent_storage: |
|
419 | if application.allow_permanent_storage: | |
420 | schema = LogSchemaPermanent().bind( |
|
420 | schema = LogSchemaPermanent().bind( | |
421 | utcnow=datetime.datetime.utcnow()) |
|
421 | utcnow=datetime.datetime.utcnow()) | |
422 | else: |
|
422 | else: | |
423 | schema = LogSchema().bind( |
|
423 | schema = LogSchema().bind( | |
424 | utcnow=datetime.datetime.utcnow()) |
|
424 | utcnow=datetime.datetime.utcnow()) | |
425 | deserialized_logs = schema.deserialize(event) |
|
425 | deserialized_logs = schema.deserialize(event) | |
426 | non_pkey_logs = [deserialized_logs] |
|
426 | non_pkey_logs = [deserialized_logs] | |
427 | log.debug('%s non-pkey logs received: %s' % (application, |
|
427 | log.debug('%s non-pkey logs received: %s' % (application, | |
428 | len(non_pkey_logs))) |
|
428 | len(non_pkey_logs))) | |
429 | tasks.add_logs.delay(application.resource_id, {}, non_pkey_logs) |
|
429 | tasks.add_logs.delay(application.resource_id, {}, non_pkey_logs) | |
430 | if event_type == ParsedSentryEventType.ERROR_REPORT: |
|
430 | if event_type == ParsedSentryEventType.ERROR_REPORT: | |
431 |
schema = ReportSchema_0_5().bind( |
|
431 | schema = ReportSchema_0_5().bind( | |
|
432 | utcnow=datetime.datetime.utcnow(), | |||
|
433 | allow_permanent_storage=application.allow_permanent_storage) | |||
432 | deserialized_reports = [schema.deserialize(event)] |
|
434 | deserialized_reports = [schema.deserialize(event)] | |
433 | rate_limiting(request, application, |
|
435 | rate_limiting(request, application, | |
434 | 'per_application_reports_rate_limit', |
|
436 | 'per_application_reports_rate_limit', | |
435 | len(deserialized_reports)) |
|
437 | len(deserialized_reports)) | |
436 | tasks.add_reports.delay(application.resource_id, {}, |
|
438 | tasks.add_reports.delay(application.resource_id, {}, | |
437 | deserialized_reports) |
|
439 | deserialized_reports) | |
438 | return 'OK: Events accepted' |
|
440 | return 'OK: Events accepted' |
General Comments 0
You need to be logged in to leave comments.
Login now