Show More
@@ -1,47 +1,47 b'' | |||||
1 | # appenlight README |
|
1 | # appenlight README | |
2 |
|
2 | |||
3 |
|
3 | |||
4 | To run the app you need to have meet prerequsites: |
|
4 | To run the app you need to have meet prerequsites: | |
5 |
|
5 | |||
6 | - running elasticsearch (2.3+ tested) |
|
6 | - running elasticsearch (2.3+ tested) | |
7 | - running postgresql (9.5+ required) |
|
7 | - running postgresql (9.5+ required) | |
8 | - running redis |
|
8 | - running redis | |
9 |
|
9 | |||
10 | # Setup basics |
|
10 | # Setup basics | |
11 |
|
11 | |||
12 | Set up the basic application database schema: |
|
12 | Set up the basic application database schema: | |
13 |
|
13 | |||
14 | appenlight_initialize_db config.ini |
|
14 | appenlight_initialize_db config.ini | |
15 |
|
15 | |||
16 | Set up basic elasticsearch schema: |
|
16 | Set up basic elasticsearch schema: | |
17 |
|
17 | |||
18 | appenlight-reindex-elasticsearch -c config.ini -t all |
|
18 | appenlight-reindex-elasticsearch -c config.ini -t all | |
19 |
|
19 | |||
20 | Installed the appenlight uptime plugin |
|
20 | Installed the appenlight uptime plugin | |
21 |
|
21 | |||
22 | # Running |
|
22 | # Running | |
23 |
|
23 | |||
24 | To run the application itself: |
|
24 | To run the application itself: | |
25 |
|
25 | |||
26 | pserve --reload development.ini |
|
26 | pserve --reload development.ini | |
27 |
|
27 | |||
28 | To run celery queue processing: |
|
28 | To run celery queue processing: | |
29 |
|
29 | |||
30 | celery worker -A appenlight.celery -Q "reports,logs,metrics,default" --ini=development.ini |
|
30 | celery worker -A appenlight.celery -Q "reports,logs,metrics,default" --ini=development.ini | |
31 |
|
31 | |||
32 | To run celery beats scheduling: |
|
32 | To run celery beats scheduling: | |
33 |
|
33 | |||
34 | celery beat -A appenlight.celery --ini=development.ini |
|
34 | celery beat -A appenlight.celery --ini=development.ini | |
35 |
|
35 | |||
36 | You should also run the channelstream websocket server for real-time notifications |
|
36 | You should also run the `channelstream websocket server for real-time notifications | |
37 |
|
37 | |||
38 | channelstream -i filename.ini |
|
38 | channelstream -i filename.ini | |
39 |
|
39 | |||
40 | # Testing |
|
40 | # Testing | |
41 |
|
41 | |||
42 | To run test suite: |
|
42 | To run test suite: | |
43 |
|
43 | |||
44 | py.test appenlight/tests/tests.py --cov appenlight (this looks for testing.ini in repo root) |
|
44 | py.test appenlight/tests/tests.py --cov appenlight (this looks for testing.ini in repo root) | |
45 |
|
45 | |||
46 | WARNING!!! |
|
46 | WARNING!!! | |
47 | Some tests will insert data into elasticsearch or redis based on testing.ini |
|
47 | Some tests will insert data into elasticsearch or redis based on testing.ini |
@@ -1,634 +1,634 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # AppEnlight Enterprise Edition, including its added features, Support |
|
18 | # AppEnlight Enterprise Edition, including its added features, Support | |
19 | # services, and proprietary license terms, please see |
|
19 | # services, and proprietary license terms, please see | |
20 | # https://rhodecode.com/licenses/ |
|
20 | # https://rhodecode.com/licenses/ | |
21 |
|
21 | |||
22 | import bisect |
|
22 | import bisect | |
23 | import collections |
|
23 | import collections | |
24 | import math |
|
24 | import math | |
25 | from datetime import datetime, timedelta |
|
25 | from datetime import datetime, timedelta | |
26 |
|
26 | |||
27 | import sqlalchemy as sa |
|
27 | import sqlalchemy as sa | |
28 | import pyelasticsearch |
|
28 | import pyelasticsearch | |
29 |
|
29 | |||
30 | from celery.utils.log import get_task_logger |
|
30 | from celery.utils.log import get_task_logger | |
31 | from zope.sqlalchemy import mark_changed |
|
31 | from zope.sqlalchemy import mark_changed | |
32 | from pyramid.threadlocal import get_current_request, get_current_registry |
|
32 | from pyramid.threadlocal import get_current_request, get_current_registry | |
33 | from appenlight.celery import celery |
|
33 | from appenlight.celery import celery | |
34 | from appenlight.models.report_group import ReportGroup |
|
34 | from appenlight.models.report_group import ReportGroup | |
35 | from appenlight.models import DBSession, Datastores |
|
35 | from appenlight.models import DBSession, Datastores | |
36 | from appenlight.models.report import Report |
|
36 | from appenlight.models.report import Report | |
37 | from appenlight.models.log import Log |
|
37 | from appenlight.models.log import Log | |
38 |
from appenlight.models. |
|
38 | from appenlight.models.metric import Metric | |
39 | from appenlight.models.event import Event |
|
39 | from appenlight.models.event import Event | |
40 |
|
40 | |||
41 | from appenlight.models.services.application import ApplicationService |
|
41 | from appenlight.models.services.application import ApplicationService | |
42 | from appenlight.models.services.event import EventService |
|
42 | from appenlight.models.services.event import EventService | |
43 | from appenlight.models.services.log import LogService |
|
43 | from appenlight.models.services.log import LogService | |
44 | from appenlight.models.services.report import ReportService |
|
44 | from appenlight.models.services.report import ReportService | |
45 | from appenlight.models.services.report_group import ReportGroupService |
|
45 | from appenlight.models.services.report_group import ReportGroupService | |
46 | from appenlight.models.services.user import UserService |
|
46 | from appenlight.models.services.user import UserService | |
47 | from appenlight.models.tag import Tag |
|
47 | from appenlight.models.tag import Tag | |
48 | from appenlight.lib import print_traceback |
|
48 | from appenlight.lib import print_traceback | |
49 | from appenlight.lib.utils import parse_proto, in_batches |
|
49 | from appenlight.lib.utils import parse_proto, in_batches | |
50 | from appenlight.lib.ext_json import json |
|
50 | from appenlight.lib.ext_json import json | |
51 | from appenlight.lib.redis_keys import REDIS_KEYS |
|
51 | from appenlight.lib.redis_keys import REDIS_KEYS | |
52 | from appenlight.lib.enums import ReportType |
|
52 | from appenlight.lib.enums import ReportType | |
53 |
|
53 | |||
54 | log = get_task_logger(__name__) |
|
54 | log = get_task_logger(__name__) | |
55 |
|
55 | |||
56 | sample_boundries = list(range(100, 1000, 100)) + \ |
|
56 | sample_boundries = list(range(100, 1000, 100)) + \ | |
57 | list(range(1000, 10000, 1000)) + \ |
|
57 | list(range(1000, 10000, 1000)) + \ | |
58 | list(range(10000, 100000, 5000)) |
|
58 | list(range(10000, 100000, 5000)) | |
59 |
|
59 | |||
60 |
|
60 | |||
61 | def pick_sample(total_occurences, report_type=None): |
|
61 | def pick_sample(total_occurences, report_type=None): | |
62 | every = 1.0 |
|
62 | every = 1.0 | |
63 | position = bisect.bisect_left(sample_boundries, total_occurences) |
|
63 | position = bisect.bisect_left(sample_boundries, total_occurences) | |
64 | if position > 0: |
|
64 | if position > 0: | |
65 | if report_type == ReportType.not_found: |
|
65 | if report_type == ReportType.not_found: | |
66 | divide = 10.0 |
|
66 | divide = 10.0 | |
67 | else: |
|
67 | else: | |
68 | divide = 100.0 |
|
68 | divide = 100.0 | |
69 | every = sample_boundries[position - 1] / divide |
|
69 | every = sample_boundries[position - 1] / divide | |
70 | return total_occurences % every == 0 |
|
70 | return total_occurences % every == 0 | |
71 |
|
71 | |||
72 |
|
72 | |||
73 | @celery.task(queue="default", default_retry_delay=1, max_retries=2) |
|
73 | @celery.task(queue="default", default_retry_delay=1, max_retries=2) | |
74 | def test_exception_task(): |
|
74 | def test_exception_task(): | |
75 | log.error('test celery log', extra={'location': 'celery'}) |
|
75 | log.error('test celery log', extra={'location': 'celery'}) | |
76 | log.warning('test celery log', extra={'location': 'celery'}) |
|
76 | log.warning('test celery log', extra={'location': 'celery'}) | |
77 | raise Exception('Celery exception test') |
|
77 | raise Exception('Celery exception test') | |
78 |
|
78 | |||
79 |
|
79 | |||
80 | @celery.task(queue="default", default_retry_delay=1, max_retries=2) |
|
80 | @celery.task(queue="default", default_retry_delay=1, max_retries=2) | |
81 | def test_retry_exception_task(): |
|
81 | def test_retry_exception_task(): | |
82 | try: |
|
82 | try: | |
83 | import time |
|
83 | import time | |
84 |
|
84 | |||
85 | time.sleep(1.3) |
|
85 | time.sleep(1.3) | |
86 | log.error('test retry celery log', extra={'location': 'celery'}) |
|
86 | log.error('test retry celery log', extra={'location': 'celery'}) | |
87 | log.warning('test retry celery log', extra={'location': 'celery'}) |
|
87 | log.warning('test retry celery log', extra={'location': 'celery'}) | |
88 | raise Exception('Celery exception test') |
|
88 | raise Exception('Celery exception test') | |
89 | except Exception as exc: |
|
89 | except Exception as exc: | |
90 | test_retry_exception_task.retry(exc=exc) |
|
90 | test_retry_exception_task.retry(exc=exc) | |
91 |
|
91 | |||
92 |
|
92 | |||
93 | @celery.task(queue="reports", default_retry_delay=600, max_retries=144) |
|
93 | @celery.task(queue="reports", default_retry_delay=600, max_retries=144) | |
94 | def add_reports(resource_id, params, dataset, environ=None, **kwargs): |
|
94 | def add_reports(resource_id, params, dataset, environ=None, **kwargs): | |
95 | proto_version = parse_proto(params.get('protocol_version', '')) |
|
95 | proto_version = parse_proto(params.get('protocol_version', '')) | |
96 | current_time = datetime.utcnow().replace(second=0, microsecond=0) |
|
96 | current_time = datetime.utcnow().replace(second=0, microsecond=0) | |
97 | try: |
|
97 | try: | |
98 | # we will store solr docs here for single insert |
|
98 | # we will store solr docs here for single insert | |
99 | es_report_docs = {} |
|
99 | es_report_docs = {} | |
100 | es_report_group_docs = {} |
|
100 | es_report_group_docs = {} | |
101 | resource = ApplicationService.by_id(resource_id) |
|
101 | resource = ApplicationService.by_id(resource_id) | |
102 |
|
102 | |||
103 | tags = [] |
|
103 | tags = [] | |
104 | es_slow_calls_docs = {} |
|
104 | es_slow_calls_docs = {} | |
105 | es_reports_stats_rows = {} |
|
105 | es_reports_stats_rows = {} | |
106 | for report_data in dataset: |
|
106 | for report_data in dataset: | |
107 | # build report details for later |
|
107 | # build report details for later | |
108 | added_details = 0 |
|
108 | added_details = 0 | |
109 | report = Report() |
|
109 | report = Report() | |
110 | report.set_data(report_data, resource, proto_version) |
|
110 | report.set_data(report_data, resource, proto_version) | |
111 | report._skip_ft_index = True |
|
111 | report._skip_ft_index = True | |
112 |
|
112 | |||
113 | report_group = ReportGroupService.by_hash_and_resource( |
|
113 | report_group = ReportGroupService.by_hash_and_resource( | |
114 | report.resource_id, |
|
114 | report.resource_id, | |
115 | report.grouping_hash |
|
115 | report.grouping_hash | |
116 | ) |
|
116 | ) | |
117 | occurences = report_data.get('occurences', 1) |
|
117 | occurences = report_data.get('occurences', 1) | |
118 | if not report_group: |
|
118 | if not report_group: | |
119 | # total reports will be +1 moment later |
|
119 | # total reports will be +1 moment later | |
120 | report_group = ReportGroup(grouping_hash=report.grouping_hash, |
|
120 | report_group = ReportGroup(grouping_hash=report.grouping_hash, | |
121 | occurences=0, total_reports=0, |
|
121 | occurences=0, total_reports=0, | |
122 | last_report=0, |
|
122 | last_report=0, | |
123 | priority=report.priority, |
|
123 | priority=report.priority, | |
124 | error=report.error, |
|
124 | error=report.error, | |
125 | first_timestamp=report.start_time) |
|
125 | first_timestamp=report.start_time) | |
126 | report_group._skip_ft_index = True |
|
126 | report_group._skip_ft_index = True | |
127 | report_group.report_type = report.report_type |
|
127 | report_group.report_type = report.report_type | |
128 | report.report_group_time = report_group.first_timestamp |
|
128 | report.report_group_time = report_group.first_timestamp | |
129 | add_sample = pick_sample(report_group.occurences, |
|
129 | add_sample = pick_sample(report_group.occurences, | |
130 | report_type=report_group.report_type) |
|
130 | report_type=report_group.report_type) | |
131 | if add_sample: |
|
131 | if add_sample: | |
132 | resource.report_groups.append(report_group) |
|
132 | resource.report_groups.append(report_group) | |
133 | report_group.reports.append(report) |
|
133 | report_group.reports.append(report) | |
134 | added_details += 1 |
|
134 | added_details += 1 | |
135 | DBSession.flush() |
|
135 | DBSession.flush() | |
136 | if report.partition_id not in es_report_docs: |
|
136 | if report.partition_id not in es_report_docs: | |
137 | es_report_docs[report.partition_id] = [] |
|
137 | es_report_docs[report.partition_id] = [] | |
138 | es_report_docs[report.partition_id].append(report.es_doc()) |
|
138 | es_report_docs[report.partition_id].append(report.es_doc()) | |
139 | tags.extend(list(report.tags.items())) |
|
139 | tags.extend(list(report.tags.items())) | |
140 | slow_calls = report.add_slow_calls(report_data, report_group) |
|
140 | slow_calls = report.add_slow_calls(report_data, report_group) | |
141 | DBSession.flush() |
|
141 | DBSession.flush() | |
142 | for s_call in slow_calls: |
|
142 | for s_call in slow_calls: | |
143 | if s_call.partition_id not in es_slow_calls_docs: |
|
143 | if s_call.partition_id not in es_slow_calls_docs: | |
144 | es_slow_calls_docs[s_call.partition_id] = [] |
|
144 | es_slow_calls_docs[s_call.partition_id] = [] | |
145 | es_slow_calls_docs[s_call.partition_id].append( |
|
145 | es_slow_calls_docs[s_call.partition_id].append( | |
146 | s_call.es_doc()) |
|
146 | s_call.es_doc()) | |
147 | # try generating new stat rows if needed |
|
147 | # try generating new stat rows if needed | |
148 | else: |
|
148 | else: | |
149 | # required for postprocessing to not fail later |
|
149 | # required for postprocessing to not fail later | |
150 | report.report_group = report_group |
|
150 | report.report_group = report_group | |
151 |
|
151 | |||
152 | stat_row = ReportService.generate_stat_rows( |
|
152 | stat_row = ReportService.generate_stat_rows( | |
153 | report, resource, report_group) |
|
153 | report, resource, report_group) | |
154 | if stat_row.partition_id not in es_reports_stats_rows: |
|
154 | if stat_row.partition_id not in es_reports_stats_rows: | |
155 | es_reports_stats_rows[stat_row.partition_id] = [] |
|
155 | es_reports_stats_rows[stat_row.partition_id] = [] | |
156 | es_reports_stats_rows[stat_row.partition_id].append( |
|
156 | es_reports_stats_rows[stat_row.partition_id].append( | |
157 | stat_row.es_doc()) |
|
157 | stat_row.es_doc()) | |
158 |
|
158 | |||
159 | # see if we should mark 10th occurence of report |
|
159 | # see if we should mark 10th occurence of report | |
160 | last_occurences_10 = int(math.floor(report_group.occurences / 10)) |
|
160 | last_occurences_10 = int(math.floor(report_group.occurences / 10)) | |
161 | curr_occurences_10 = int(math.floor( |
|
161 | curr_occurences_10 = int(math.floor( | |
162 | (report_group.occurences + report.occurences) / 10)) |
|
162 | (report_group.occurences + report.occurences) / 10)) | |
163 | last_occurences_100 = int( |
|
163 | last_occurences_100 = int( | |
164 | math.floor(report_group.occurences / 100)) |
|
164 | math.floor(report_group.occurences / 100)) | |
165 | curr_occurences_100 = int(math.floor( |
|
165 | curr_occurences_100 = int(math.floor( | |
166 | (report_group.occurences + report.occurences) / 100)) |
|
166 | (report_group.occurences + report.occurences) / 100)) | |
167 | notify_occurences_10 = last_occurences_10 != curr_occurences_10 |
|
167 | notify_occurences_10 = last_occurences_10 != curr_occurences_10 | |
168 | notify_occurences_100 = last_occurences_100 != curr_occurences_100 |
|
168 | notify_occurences_100 = last_occurences_100 != curr_occurences_100 | |
169 | report_group.occurences = ReportGroup.occurences + occurences |
|
169 | report_group.occurences = ReportGroup.occurences + occurences | |
170 | report_group.last_timestamp = report.start_time |
|
170 | report_group.last_timestamp = report.start_time | |
171 | report_group.summed_duration = ReportGroup.summed_duration + report.duration |
|
171 | report_group.summed_duration = ReportGroup.summed_duration + report.duration | |
172 | summed_duration = ReportGroup.summed_duration + report.duration |
|
172 | summed_duration = ReportGroup.summed_duration + report.duration | |
173 | summed_occurences = ReportGroup.occurences + occurences |
|
173 | summed_occurences = ReportGroup.occurences + occurences | |
174 | report_group.average_duration = summed_duration / summed_occurences |
|
174 | report_group.average_duration = summed_duration / summed_occurences | |
175 | report_group.run_postprocessing(report) |
|
175 | report_group.run_postprocessing(report) | |
176 | if added_details: |
|
176 | if added_details: | |
177 | report_group.total_reports = ReportGroup.total_reports + 1 |
|
177 | report_group.total_reports = ReportGroup.total_reports + 1 | |
178 | report_group.last_report = report.id |
|
178 | report_group.last_report = report.id | |
179 | report_group.set_notification_info(notify_10=notify_occurences_10, |
|
179 | report_group.set_notification_info(notify_10=notify_occurences_10, | |
180 | notify_100=notify_occurences_100) |
|
180 | notify_100=notify_occurences_100) | |
181 | DBSession.flush() |
|
181 | DBSession.flush() | |
182 | report_group.get_report().notify_channel(report_group) |
|
182 | report_group.get_report().notify_channel(report_group) | |
183 | if report_group.partition_id not in es_report_group_docs: |
|
183 | if report_group.partition_id not in es_report_group_docs: | |
184 | es_report_group_docs[report_group.partition_id] = [] |
|
184 | es_report_group_docs[report_group.partition_id] = [] | |
185 | es_report_group_docs[report_group.partition_id].append( |
|
185 | es_report_group_docs[report_group.partition_id].append( | |
186 | report_group.es_doc()) |
|
186 | report_group.es_doc()) | |
187 |
|
187 | |||
188 | action = 'REPORT' |
|
188 | action = 'REPORT' | |
189 | log_msg = '%s: %s %s, client: %s, proto: %s' % ( |
|
189 | log_msg = '%s: %s %s, client: %s, proto: %s' % ( | |
190 | action, |
|
190 | action, | |
191 | report_data.get('http_status', 'unknown'), |
|
191 | report_data.get('http_status', 'unknown'), | |
192 | str(resource), |
|
192 | str(resource), | |
193 | report_data.get('client'), |
|
193 | report_data.get('client'), | |
194 | proto_version) |
|
194 | proto_version) | |
195 | log.info(log_msg) |
|
195 | log.info(log_msg) | |
196 | total_reports = len(dataset) |
|
196 | total_reports = len(dataset) | |
197 | key = REDIS_KEYS['counters']['reports_per_minute'].format(current_time) |
|
197 | key = REDIS_KEYS['counters']['reports_per_minute'].format(current_time) | |
198 | Datastores.redis.incr(key, total_reports) |
|
198 | Datastores.redis.incr(key, total_reports) | |
199 | Datastores.redis.expire(key, 3600 * 24) |
|
199 | Datastores.redis.expire(key, 3600 * 24) | |
200 | key = REDIS_KEYS['counters']['reports_per_minute_per_app'].format( |
|
200 | key = REDIS_KEYS['counters']['reports_per_minute_per_app'].format( | |
201 | resource_id, current_time) |
|
201 | resource_id, current_time) | |
202 | Datastores.redis.incr(key, total_reports) |
|
202 | Datastores.redis.incr(key, total_reports) | |
203 | Datastores.redis.expire(key, 3600 * 24) |
|
203 | Datastores.redis.expire(key, 3600 * 24) | |
204 |
|
204 | |||
205 | add_reports_es(es_report_group_docs, es_report_docs) |
|
205 | add_reports_es(es_report_group_docs, es_report_docs) | |
206 | add_reports_slow_calls_es(es_slow_calls_docs) |
|
206 | add_reports_slow_calls_es(es_slow_calls_docs) | |
207 | add_reports_stats_rows_es(es_reports_stats_rows) |
|
207 | add_reports_stats_rows_es(es_reports_stats_rows) | |
208 | return True |
|
208 | return True | |
209 | except Exception as exc: |
|
209 | except Exception as exc: | |
210 | print_traceback(log) |
|
210 | print_traceback(log) | |
211 | add_reports.retry(exc=exc) |
|
211 | add_reports.retry(exc=exc) | |
212 |
|
212 | |||
213 |
|
213 | |||
214 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) |
|
214 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) | |
215 | def add_reports_es(report_group_docs, report_docs): |
|
215 | def add_reports_es(report_group_docs, report_docs): | |
216 | for k, v in report_group_docs.items(): |
|
216 | for k, v in report_group_docs.items(): | |
217 | Datastores.es.bulk_index(k, 'report_group', v, id_field="_id") |
|
217 | Datastores.es.bulk_index(k, 'report_group', v, id_field="_id") | |
218 | for k, v in report_docs.items(): |
|
218 | for k, v in report_docs.items(): | |
219 | Datastores.es.bulk_index(k, 'report', v, id_field="_id", |
|
219 | Datastores.es.bulk_index(k, 'report', v, id_field="_id", | |
220 | parent_field='_parent') |
|
220 | parent_field='_parent') | |
221 |
|
221 | |||
222 |
|
222 | |||
223 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) |
|
223 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) | |
224 | def add_reports_slow_calls_es(es_docs): |
|
224 | def add_reports_slow_calls_es(es_docs): | |
225 | for k, v in es_docs.items(): |
|
225 | for k, v in es_docs.items(): | |
226 | Datastores.es.bulk_index(k, 'log', v) |
|
226 | Datastores.es.bulk_index(k, 'log', v) | |
227 |
|
227 | |||
228 |
|
228 | |||
229 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) |
|
229 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) | |
230 | def add_reports_stats_rows_es(es_docs): |
|
230 | def add_reports_stats_rows_es(es_docs): | |
231 | for k, v in es_docs.items(): |
|
231 | for k, v in es_docs.items(): | |
232 | Datastores.es.bulk_index(k, 'log', v) |
|
232 | Datastores.es.bulk_index(k, 'log', v) | |
233 |
|
233 | |||
234 |
|
234 | |||
235 | @celery.task(queue="logs", default_retry_delay=600, max_retries=144) |
|
235 | @celery.task(queue="logs", default_retry_delay=600, max_retries=144) | |
236 | def add_logs(resource_id, request, dataset, environ=None, **kwargs): |
|
236 | def add_logs(resource_id, request, dataset, environ=None, **kwargs): | |
237 | proto_version = request.get('protocol_version') |
|
237 | proto_version = request.get('protocol_version') | |
238 | current_time = datetime.utcnow().replace(second=0, microsecond=0) |
|
238 | current_time = datetime.utcnow().replace(second=0, microsecond=0) | |
239 |
|
239 | |||
240 | try: |
|
240 | try: | |
241 | es_docs = collections.defaultdict(list) |
|
241 | es_docs = collections.defaultdict(list) | |
242 | application = ApplicationService.by_id(resource_id) |
|
242 | application = ApplicationService.by_id(resource_id) | |
243 | ns_pairs = [] |
|
243 | ns_pairs = [] | |
244 | for entry in dataset: |
|
244 | for entry in dataset: | |
245 | # gather pk and ns so we can remove older versions of row later |
|
245 | # gather pk and ns so we can remove older versions of row later | |
246 | if entry['primary_key'] is not None: |
|
246 | if entry['primary_key'] is not None: | |
247 | ns_pairs.append({"pk": entry['primary_key'], |
|
247 | ns_pairs.append({"pk": entry['primary_key'], | |
248 | "ns": entry['namespace']}) |
|
248 | "ns": entry['namespace']}) | |
249 | log_entry = Log() |
|
249 | log_entry = Log() | |
250 | log_entry.set_data(entry, resource=application) |
|
250 | log_entry.set_data(entry, resource=application) | |
251 | log_entry._skip_ft_index = True |
|
251 | log_entry._skip_ft_index = True | |
252 | application.logs.append(log_entry) |
|
252 | application.logs.append(log_entry) | |
253 | DBSession.flush() |
|
253 | DBSession.flush() | |
254 | # insert non pk rows first |
|
254 | # insert non pk rows first | |
255 | if entry['primary_key'] is None: |
|
255 | if entry['primary_key'] is None: | |
256 | es_docs[log_entry.partition_id].append(log_entry.es_doc()) |
|
256 | es_docs[log_entry.partition_id].append(log_entry.es_doc()) | |
257 |
|
257 | |||
258 | # 2nd pass to delete all log entries from db foe same pk/ns pair |
|
258 | # 2nd pass to delete all log entries from db foe same pk/ns pair | |
259 | if ns_pairs: |
|
259 | if ns_pairs: | |
260 | ids_to_delete = [] |
|
260 | ids_to_delete = [] | |
261 | es_docs = collections.defaultdict(list) |
|
261 | es_docs = collections.defaultdict(list) | |
262 | es_docs_to_delete = collections.defaultdict(list) |
|
262 | es_docs_to_delete = collections.defaultdict(list) | |
263 | found_pkey_logs = LogService.query_by_primary_key_and_namespace( |
|
263 | found_pkey_logs = LogService.query_by_primary_key_and_namespace( | |
264 | list_of_pairs=ns_pairs) |
|
264 | list_of_pairs=ns_pairs) | |
265 | log_dict = {} |
|
265 | log_dict = {} | |
266 | for log_entry in found_pkey_logs: |
|
266 | for log_entry in found_pkey_logs: | |
267 | log_key = (log_entry.primary_key, log_entry.namespace) |
|
267 | log_key = (log_entry.primary_key, log_entry.namespace) | |
268 | if log_key not in log_dict: |
|
268 | if log_key not in log_dict: | |
269 | log_dict[log_key] = [] |
|
269 | log_dict[log_key] = [] | |
270 | log_dict[log_key].append(log_entry) |
|
270 | log_dict[log_key].append(log_entry) | |
271 |
|
271 | |||
272 | for ns, entry_list in log_dict.items(): |
|
272 | for ns, entry_list in log_dict.items(): | |
273 | entry_list = sorted(entry_list, key=lambda x: x.timestamp) |
|
273 | entry_list = sorted(entry_list, key=lambda x: x.timestamp) | |
274 | # newest row needs to be indexed in es |
|
274 | # newest row needs to be indexed in es | |
275 | log_entry = entry_list[-1] |
|
275 | log_entry = entry_list[-1] | |
276 | # delete everything from pg and ES, leave the last row in pg |
|
276 | # delete everything from pg and ES, leave the last row in pg | |
277 | for e in entry_list[:-1]: |
|
277 | for e in entry_list[:-1]: | |
278 | ids_to_delete.append(e.log_id) |
|
278 | ids_to_delete.append(e.log_id) | |
279 | es_docs_to_delete[e.partition_id].append(e.delete_hash) |
|
279 | es_docs_to_delete[e.partition_id].append(e.delete_hash) | |
280 |
|
280 | |||
281 | es_docs_to_delete[log_entry.partition_id].append( |
|
281 | es_docs_to_delete[log_entry.partition_id].append( | |
282 | log_entry.delete_hash) |
|
282 | log_entry.delete_hash) | |
283 |
|
283 | |||
284 | es_docs[log_entry.partition_id].append(log_entry.es_doc()) |
|
284 | es_docs[log_entry.partition_id].append(log_entry.es_doc()) | |
285 |
|
285 | |||
286 | if ids_to_delete: |
|
286 | if ids_to_delete: | |
287 | query = DBSession.query(Log).filter( |
|
287 | query = DBSession.query(Log).filter( | |
288 | Log.log_id.in_(ids_to_delete)) |
|
288 | Log.log_id.in_(ids_to_delete)) | |
289 | query.delete(synchronize_session=False) |
|
289 | query.delete(synchronize_session=False) | |
290 | if es_docs_to_delete: |
|
290 | if es_docs_to_delete: | |
291 | # batch this to avoid problems with default ES bulk limits |
|
291 | # batch this to avoid problems with default ES bulk limits | |
292 | for es_index in es_docs_to_delete.keys(): |
|
292 | for es_index in es_docs_to_delete.keys(): | |
293 | for batch in in_batches(es_docs_to_delete[es_index], 20): |
|
293 | for batch in in_batches(es_docs_to_delete[es_index], 20): | |
294 | query = {'terms': {'delete_hash': batch}} |
|
294 | query = {'terms': {'delete_hash': batch}} | |
295 |
|
295 | |||
296 | try: |
|
296 | try: | |
297 | Datastores.es.delete_by_query( |
|
297 | Datastores.es.delete_by_query( | |
298 | es_index, 'log', query) |
|
298 | es_index, 'log', query) | |
299 | except pyelasticsearch.ElasticHttpNotFoundError as exc: |
|
299 | except pyelasticsearch.ElasticHttpNotFoundError as exc: | |
300 | log.error(exc) |
|
300 | log.error(exc) | |
301 |
|
301 | |||
302 | total_logs = len(dataset) |
|
302 | total_logs = len(dataset) | |
303 |
|
303 | |||
304 | log_msg = 'LOG_NEW: %s, entries: %s, proto:%s' % ( |
|
304 | log_msg = 'LOG_NEW: %s, entries: %s, proto:%s' % ( | |
305 | str(application), |
|
305 | str(application), | |
306 | total_logs, |
|
306 | total_logs, | |
307 | proto_version) |
|
307 | proto_version) | |
308 | log.info(log_msg) |
|
308 | log.info(log_msg) | |
309 | # mark_changed(session) |
|
309 | # mark_changed(session) | |
310 | key = REDIS_KEYS['counters']['logs_per_minute'].format(current_time) |
|
310 | key = REDIS_KEYS['counters']['logs_per_minute'].format(current_time) | |
311 | Datastores.redis.incr(key, total_logs) |
|
311 | Datastores.redis.incr(key, total_logs) | |
312 | Datastores.redis.expire(key, 3600 * 24) |
|
312 | Datastores.redis.expire(key, 3600 * 24) | |
313 | key = REDIS_KEYS['counters']['logs_per_minute_per_app'].format( |
|
313 | key = REDIS_KEYS['counters']['logs_per_minute_per_app'].format( | |
314 | resource_id, current_time) |
|
314 | resource_id, current_time) | |
315 | Datastores.redis.incr(key, total_logs) |
|
315 | Datastores.redis.incr(key, total_logs) | |
316 | Datastores.redis.expire(key, 3600 * 24) |
|
316 | Datastores.redis.expire(key, 3600 * 24) | |
317 | add_logs_es(es_docs) |
|
317 | add_logs_es(es_docs) | |
318 | return True |
|
318 | return True | |
319 | except Exception as exc: |
|
319 | except Exception as exc: | |
320 | print_traceback(log) |
|
320 | print_traceback(log) | |
321 | add_logs.retry(exc=exc) |
|
321 | add_logs.retry(exc=exc) | |
322 |
|
322 | |||
323 |
|
323 | |||
324 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) |
|
324 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) | |
325 | def add_logs_es(es_docs): |
|
325 | def add_logs_es(es_docs): | |
326 | for k, v in es_docs.items(): |
|
326 | for k, v in es_docs.items(): | |
327 | Datastores.es.bulk_index(k, 'log', v) |
|
327 | Datastores.es.bulk_index(k, 'log', v) | |
328 |
|
328 | |||
329 |
|
329 | |||
330 | @celery.task(queue="metrics", default_retry_delay=600, max_retries=144) |
|
330 | @celery.task(queue="metrics", default_retry_delay=600, max_retries=144) | |
331 | def add_metrics(resource_id, request, dataset, proto_version): |
|
331 | def add_metrics(resource_id, request, dataset, proto_version): | |
332 | current_time = datetime.utcnow().replace(second=0, microsecond=0) |
|
332 | current_time = datetime.utcnow().replace(second=0, microsecond=0) | |
333 | try: |
|
333 | try: | |
334 | application = ApplicationService.by_id_cached()(resource_id) |
|
334 | application = ApplicationService.by_id_cached()(resource_id) | |
335 | application = DBSession.merge(application, load=False) |
|
335 | application = DBSession.merge(application, load=False) | |
336 | es_docs = [] |
|
336 | es_docs = [] | |
337 | rows = [] |
|
337 | rows = [] | |
338 | for metric in dataset: |
|
338 | for metric in dataset: | |
339 | tags = dict(metric['tags']) |
|
339 | tags = dict(metric['tags']) | |
340 | server_n = tags.get('server_name', metric['server_name']).lower() |
|
340 | server_n = tags.get('server_name', metric['server_name']).lower() | |
341 | tags['server_name'] = server_n or 'unknown' |
|
341 | tags['server_name'] = server_n or 'unknown' | |
342 | new_metric = Metric( |
|
342 | new_metric = Metric( | |
343 | timestamp=metric['timestamp'], |
|
343 | timestamp=metric['timestamp'], | |
344 | resource_id=application.resource_id, |
|
344 | resource_id=application.resource_id, | |
345 | namespace=metric['namespace'], |
|
345 | namespace=metric['namespace'], | |
346 | tags=tags) |
|
346 | tags=tags) | |
347 | rows.append(new_metric) |
|
347 | rows.append(new_metric) | |
348 | es_docs.append(new_metric.es_doc()) |
|
348 | es_docs.append(new_metric.es_doc()) | |
349 | session = DBSession() |
|
349 | session = DBSession() | |
350 | session.bulk_save_objects(rows) |
|
350 | session.bulk_save_objects(rows) | |
351 | session.flush() |
|
351 | session.flush() | |
352 |
|
352 | |||
353 | action = 'METRICS' |
|
353 | action = 'METRICS' | |
354 | metrics_msg = '%s: %s, metrics: %s, proto:%s' % ( |
|
354 | metrics_msg = '%s: %s, metrics: %s, proto:%s' % ( | |
355 | action, |
|
355 | action, | |
356 | str(application), |
|
356 | str(application), | |
357 | len(dataset), |
|
357 | len(dataset), | |
358 | proto_version |
|
358 | proto_version | |
359 | ) |
|
359 | ) | |
360 | log.info(metrics_msg) |
|
360 | log.info(metrics_msg) | |
361 |
|
361 | |||
362 | mark_changed(session) |
|
362 | mark_changed(session) | |
363 | key = REDIS_KEYS['counters']['metrics_per_minute'].format(current_time) |
|
363 | key = REDIS_KEYS['counters']['metrics_per_minute'].format(current_time) | |
364 | Datastores.redis.incr(key, len(rows)) |
|
364 | Datastores.redis.incr(key, len(rows)) | |
365 | Datastores.redis.expire(key, 3600 * 24) |
|
365 | Datastores.redis.expire(key, 3600 * 24) | |
366 | key = REDIS_KEYS['counters']['metrics_per_minute_per_app'].format( |
|
366 | key = REDIS_KEYS['counters']['metrics_per_minute_per_app'].format( | |
367 | resource_id, current_time) |
|
367 | resource_id, current_time) | |
368 | Datastores.redis.incr(key, len(rows)) |
|
368 | Datastores.redis.incr(key, len(rows)) | |
369 | Datastores.redis.expire(key, 3600 * 24) |
|
369 | Datastores.redis.expire(key, 3600 * 24) | |
370 | add_metrics_es(es_docs) |
|
370 | add_metrics_es(es_docs) | |
371 | return True |
|
371 | return True | |
372 | except Exception as exc: |
|
372 | except Exception as exc: | |
373 | print_traceback(log) |
|
373 | print_traceback(log) | |
374 | add_metrics.retry(exc=exc) |
|
374 | add_metrics.retry(exc=exc) | |
375 |
|
375 | |||
376 |
|
376 | |||
377 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) |
|
377 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) | |
378 | def add_metrics_es(es_docs): |
|
378 | def add_metrics_es(es_docs): | |
379 | for doc in es_docs: |
|
379 | for doc in es_docs: | |
380 | partition = 'rcae_m_%s' % doc['timestamp'].strftime('%Y_%m_%d') |
|
380 | partition = 'rcae_m_%s' % doc['timestamp'].strftime('%Y_%m_%d') | |
381 | Datastores.es.index(partition, 'log', doc) |
|
381 | Datastores.es.index(partition, 'log', doc) | |
382 |
|
382 | |||
383 |
|
383 | |||
384 | @celery.task(queue="default", default_retry_delay=5, max_retries=2) |
|
384 | @celery.task(queue="default", default_retry_delay=5, max_retries=2) | |
385 | def check_user_report_notifications(resource_id): |
|
385 | def check_user_report_notifications(resource_id): | |
386 | since_when = datetime.utcnow() |
|
386 | since_when = datetime.utcnow() | |
387 | try: |
|
387 | try: | |
388 | request = get_current_request() |
|
388 | request = get_current_request() | |
389 | application = ApplicationService.by_id(resource_id) |
|
389 | application = ApplicationService.by_id(resource_id) | |
390 | if not application: |
|
390 | if not application: | |
391 | return |
|
391 | return | |
392 | error_key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format( |
|
392 | error_key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format( | |
393 | ReportType.error, resource_id) |
|
393 | ReportType.error, resource_id) | |
394 | slow_key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format( |
|
394 | slow_key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format( | |
395 | ReportType.slow, resource_id) |
|
395 | ReportType.slow, resource_id) | |
396 | error_group_ids = Datastores.redis.smembers(error_key) |
|
396 | error_group_ids = Datastores.redis.smembers(error_key) | |
397 | slow_group_ids = Datastores.redis.smembers(slow_key) |
|
397 | slow_group_ids = Datastores.redis.smembers(slow_key) | |
398 | Datastores.redis.delete(error_key) |
|
398 | Datastores.redis.delete(error_key) | |
399 | Datastores.redis.delete(slow_key) |
|
399 | Datastores.redis.delete(slow_key) | |
400 | err_gids = [int(g_id) for g_id in error_group_ids] |
|
400 | err_gids = [int(g_id) for g_id in error_group_ids] | |
401 | slow_gids = [int(g_id) for g_id in list(slow_group_ids)] |
|
401 | slow_gids = [int(g_id) for g_id in list(slow_group_ids)] | |
402 | group_ids = err_gids + slow_gids |
|
402 | group_ids = err_gids + slow_gids | |
403 | occurence_dict = {} |
|
403 | occurence_dict = {} | |
404 | for g_id in group_ids: |
|
404 | for g_id in group_ids: | |
405 | key = REDIS_KEYS['counters']['report_group_occurences'].format( |
|
405 | key = REDIS_KEYS['counters']['report_group_occurences'].format( | |
406 | g_id) |
|
406 | g_id) | |
407 | val = Datastores.redis.get(key) |
|
407 | val = Datastores.redis.get(key) | |
408 | Datastores.redis.delete(key) |
|
408 | Datastores.redis.delete(key) | |
409 | if val: |
|
409 | if val: | |
410 | occurence_dict[g_id] = int(val) |
|
410 | occurence_dict[g_id] = int(val) | |
411 | else: |
|
411 | else: | |
412 | occurence_dict[g_id] = 1 |
|
412 | occurence_dict[g_id] = 1 | |
413 | report_groups = ReportGroupService.by_ids(group_ids) |
|
413 | report_groups = ReportGroupService.by_ids(group_ids) | |
414 | report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref)) |
|
414 | report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref)) | |
415 |
|
415 | |||
416 | ApplicationService.check_for_groups_alert( |
|
416 | ApplicationService.check_for_groups_alert( | |
417 | application, 'alert', report_groups=report_groups, |
|
417 | application, 'alert', report_groups=report_groups, | |
418 | occurence_dict=occurence_dict) |
|
418 | occurence_dict=occurence_dict) | |
419 | users = set([p.user for p in application.users_for_perm('view')]) |
|
419 | users = set([p.user for p in application.users_for_perm('view')]) | |
420 | report_groups = report_groups.all() |
|
420 | report_groups = report_groups.all() | |
421 | for user in users: |
|
421 | for user in users: | |
422 | UserService.report_notify(user, request, application, |
|
422 | UserService.report_notify(user, request, application, | |
423 | report_groups=report_groups, |
|
423 | report_groups=report_groups, | |
424 | occurence_dict=occurence_dict) |
|
424 | occurence_dict=occurence_dict) | |
425 | for group in report_groups: |
|
425 | for group in report_groups: | |
426 | # marks report_groups as notified |
|
426 | # marks report_groups as notified | |
427 | if not group.notified: |
|
427 | if not group.notified: | |
428 | group.notified = True |
|
428 | group.notified = True | |
429 | except Exception as exc: |
|
429 | except Exception as exc: | |
430 | print_traceback(log) |
|
430 | print_traceback(log) | |
431 | raise |
|
431 | raise | |
432 |
|
432 | |||
433 |
|
433 | |||
434 | @celery.task(queue="default", default_retry_delay=5, max_retries=2) |
|
434 | @celery.task(queue="default", default_retry_delay=5, max_retries=2) | |
435 | def check_alerts(resource_id): |
|
435 | def check_alerts(resource_id): | |
436 | since_when = datetime.utcnow() |
|
436 | since_when = datetime.utcnow() | |
437 | try: |
|
437 | try: | |
438 | request = get_current_request() |
|
438 | request = get_current_request() | |
439 | application = ApplicationService.by_id(resource_id) |
|
439 | application = ApplicationService.by_id(resource_id) | |
440 | if not application: |
|
440 | if not application: | |
441 | return |
|
441 | return | |
442 | error_key = REDIS_KEYS[ |
|
442 | error_key = REDIS_KEYS[ | |
443 | 'reports_to_notify_per_type_per_app_alerting'].format( |
|
443 | 'reports_to_notify_per_type_per_app_alerting'].format( | |
444 | ReportType.error, resource_id) |
|
444 | ReportType.error, resource_id) | |
445 | slow_key = REDIS_KEYS[ |
|
445 | slow_key = REDIS_KEYS[ | |
446 | 'reports_to_notify_per_type_per_app_alerting'].format( |
|
446 | 'reports_to_notify_per_type_per_app_alerting'].format( | |
447 | ReportType.slow, resource_id) |
|
447 | ReportType.slow, resource_id) | |
448 | error_group_ids = Datastores.redis.smembers(error_key) |
|
448 | error_group_ids = Datastores.redis.smembers(error_key) | |
449 | slow_group_ids = Datastores.redis.smembers(slow_key) |
|
449 | slow_group_ids = Datastores.redis.smembers(slow_key) | |
450 | Datastores.redis.delete(error_key) |
|
450 | Datastores.redis.delete(error_key) | |
451 | Datastores.redis.delete(slow_key) |
|
451 | Datastores.redis.delete(slow_key) | |
452 | err_gids = [int(g_id) for g_id in error_group_ids] |
|
452 | err_gids = [int(g_id) for g_id in error_group_ids] | |
453 | slow_gids = [int(g_id) for g_id in list(slow_group_ids)] |
|
453 | slow_gids = [int(g_id) for g_id in list(slow_group_ids)] | |
454 | group_ids = err_gids + slow_gids |
|
454 | group_ids = err_gids + slow_gids | |
455 | occurence_dict = {} |
|
455 | occurence_dict = {} | |
456 | for g_id in group_ids: |
|
456 | for g_id in group_ids: | |
457 | key = REDIS_KEYS['counters'][ |
|
457 | key = REDIS_KEYS['counters'][ | |
458 | 'report_group_occurences_alerting'].format( |
|
458 | 'report_group_occurences_alerting'].format( | |
459 | g_id) |
|
459 | g_id) | |
460 | val = Datastores.redis.get(key) |
|
460 | val = Datastores.redis.get(key) | |
461 | Datastores.redis.delete(key) |
|
461 | Datastores.redis.delete(key) | |
462 | if val: |
|
462 | if val: | |
463 | occurence_dict[g_id] = int(val) |
|
463 | occurence_dict[g_id] = int(val) | |
464 | else: |
|
464 | else: | |
465 | occurence_dict[g_id] = 1 |
|
465 | occurence_dict[g_id] = 1 | |
466 | report_groups = ReportGroupService.by_ids(group_ids) |
|
466 | report_groups = ReportGroupService.by_ids(group_ids) | |
467 | report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref)) |
|
467 | report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref)) | |
468 |
|
468 | |||
469 | ApplicationService.check_for_groups_alert( |
|
469 | ApplicationService.check_for_groups_alert( | |
470 | application, 'alert', report_groups=report_groups, |
|
470 | application, 'alert', report_groups=report_groups, | |
471 | occurence_dict=occurence_dict, since_when=since_when) |
|
471 | occurence_dict=occurence_dict, since_when=since_when) | |
472 | except Exception as exc: |
|
472 | except Exception as exc: | |
473 | print_traceback(log) |
|
473 | print_traceback(log) | |
474 | raise |
|
474 | raise | |
475 |
|
475 | |||
476 |
|
476 | |||
477 | @celery.task(queue="default", default_retry_delay=1, max_retries=2) |
|
477 | @celery.task(queue="default", default_retry_delay=1, max_retries=2) | |
478 | def close_alerts(): |
|
478 | def close_alerts(): | |
479 | log.warning('Checking alerts') |
|
479 | log.warning('Checking alerts') | |
480 | since_when = datetime.utcnow() |
|
480 | since_when = datetime.utcnow() | |
481 | try: |
|
481 | try: | |
482 | event_types = [Event.types['error_report_alert'], |
|
482 | event_types = [Event.types['error_report_alert'], | |
483 | Event.types['slow_report_alert'], ] |
|
483 | Event.types['slow_report_alert'], ] | |
484 | statuses = [Event.statuses['active']] |
|
484 | statuses = [Event.statuses['active']] | |
485 | # get events older than 5 min |
|
485 | # get events older than 5 min | |
486 | events = EventService.by_type_and_status( |
|
486 | events = EventService.by_type_and_status( | |
487 | event_types, |
|
487 | event_types, | |
488 | statuses, |
|
488 | statuses, | |
489 | older_than=(since_when - timedelta(minutes=5))) |
|
489 | older_than=(since_when - timedelta(minutes=5))) | |
490 | for event in events: |
|
490 | for event in events: | |
491 | # see if we can close them |
|
491 | # see if we can close them | |
492 | event.validate_or_close( |
|
492 | event.validate_or_close( | |
493 | since_when=(since_when - timedelta(minutes=1))) |
|
493 | since_when=(since_when - timedelta(minutes=1))) | |
494 | except Exception as exc: |
|
494 | except Exception as exc: | |
495 | print_traceback(log) |
|
495 | print_traceback(log) | |
496 | raise |
|
496 | raise | |
497 |
|
497 | |||
498 |
|
498 | |||
499 | @celery.task(queue="default", default_retry_delay=600, max_retries=144) |
|
499 | @celery.task(queue="default", default_retry_delay=600, max_retries=144) | |
500 | def update_tag_counter(tag_name, tag_value, count): |
|
500 | def update_tag_counter(tag_name, tag_value, count): | |
501 | try: |
|
501 | try: | |
502 | query = DBSession.query(Tag).filter(Tag.name == tag_name).filter( |
|
502 | query = DBSession.query(Tag).filter(Tag.name == tag_name).filter( | |
503 | sa.cast(Tag.value, sa.types.TEXT) == sa.cast(json.dumps(tag_value), |
|
503 | sa.cast(Tag.value, sa.types.TEXT) == sa.cast(json.dumps(tag_value), | |
504 | sa.types.TEXT)) |
|
504 | sa.types.TEXT)) | |
505 | query.update({'times_seen': Tag.times_seen + count, |
|
505 | query.update({'times_seen': Tag.times_seen + count, | |
506 | 'last_timestamp': datetime.utcnow()}, |
|
506 | 'last_timestamp': datetime.utcnow()}, | |
507 | synchronize_session=False) |
|
507 | synchronize_session=False) | |
508 | session = DBSession() |
|
508 | session = DBSession() | |
509 | mark_changed(session) |
|
509 | mark_changed(session) | |
510 | return True |
|
510 | return True | |
511 | except Exception as exc: |
|
511 | except Exception as exc: | |
512 | print_traceback(log) |
|
512 | print_traceback(log) | |
513 | update_tag_counter.retry(exc=exc) |
|
513 | update_tag_counter.retry(exc=exc) | |
514 |
|
514 | |||
515 |
|
515 | |||
516 | @celery.task(queue="default") |
|
516 | @celery.task(queue="default") | |
517 | def update_tag_counters(): |
|
517 | def update_tag_counters(): | |
518 | """ |
|
518 | """ | |
519 | Sets task to update counters for application tags |
|
519 | Sets task to update counters for application tags | |
520 | """ |
|
520 | """ | |
521 | tags = Datastores.redis.lrange(REDIS_KEYS['seen_tag_list'], 0, -1) |
|
521 | tags = Datastores.redis.lrange(REDIS_KEYS['seen_tag_list'], 0, -1) | |
522 | Datastores.redis.delete(REDIS_KEYS['seen_tag_list']) |
|
522 | Datastores.redis.delete(REDIS_KEYS['seen_tag_list']) | |
523 | c = collections.Counter(tags) |
|
523 | c = collections.Counter(tags) | |
524 | for t_json, count in c.items(): |
|
524 | for t_json, count in c.items(): | |
525 | tag_info = json.loads(t_json) |
|
525 | tag_info = json.loads(t_json) | |
526 | update_tag_counter.delay(tag_info[0], tag_info[1], count) |
|
526 | update_tag_counter.delay(tag_info[0], tag_info[1], count) | |
527 |
|
527 | |||
528 |
|
528 | |||
529 | @celery.task(queue="default") |
|
529 | @celery.task(queue="default") | |
530 | def daily_digest(): |
|
530 | def daily_digest(): | |
531 | """ |
|
531 | """ | |
532 | Sends daily digest with top 50 error reports |
|
532 | Sends daily digest with top 50 error reports | |
533 | """ |
|
533 | """ | |
534 | request = get_current_request() |
|
534 | request = get_current_request() | |
535 | apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports']) |
|
535 | apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports']) | |
536 | Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports']) |
|
536 | Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports']) | |
537 | since_when = datetime.utcnow() - timedelta(hours=8) |
|
537 | since_when = datetime.utcnow() - timedelta(hours=8) | |
538 | log.warning('Generating daily digests') |
|
538 | log.warning('Generating daily digests') | |
539 | for resource_id in apps: |
|
539 | for resource_id in apps: | |
540 | resource_id = resource_id.decode('utf8') |
|
540 | resource_id = resource_id.decode('utf8') | |
541 | end_date = datetime.utcnow().replace(microsecond=0, second=0) |
|
541 | end_date = datetime.utcnow().replace(microsecond=0, second=0) | |
542 | filter_settings = {'resource': [resource_id], |
|
542 | filter_settings = {'resource': [resource_id], | |
543 | 'tags': [{'name': 'type', |
|
543 | 'tags': [{'name': 'type', | |
544 | 'value': ['error'], 'op': None}], |
|
544 | 'value': ['error'], 'op': None}], | |
545 | 'type': 'error', 'start_date': since_when, |
|
545 | 'type': 'error', 'start_date': since_when, | |
546 | 'end_date': end_date} |
|
546 | 'end_date': end_date} | |
547 |
|
547 | |||
548 | reports = ReportGroupService.get_trending( |
|
548 | reports = ReportGroupService.get_trending( | |
549 | request, filter_settings=filter_settings, limit=50) |
|
549 | request, filter_settings=filter_settings, limit=50) | |
550 |
|
550 | |||
551 | application = ApplicationService.by_id(resource_id) |
|
551 | application = ApplicationService.by_id(resource_id) | |
552 | if application: |
|
552 | if application: | |
553 | users = set([p.user for p in application.users_for_perm('view')]) |
|
553 | users = set([p.user for p in application.users_for_perm('view')]) | |
554 | for user in users: |
|
554 | for user in users: | |
555 | user.send_digest(request, application, reports=reports, |
|
555 | user.send_digest(request, application, reports=reports, | |
556 | since_when=since_when) |
|
556 | since_when=since_when) | |
557 |
|
557 | |||
558 |
|
558 | |||
559 | @celery.task(queue="default") |
|
559 | @celery.task(queue="default") | |
560 | def notifications_reports(): |
|
560 | def notifications_reports(): | |
561 | """ |
|
561 | """ | |
562 | Loop that checks redis for info and then issues new tasks to celery to |
|
562 | Loop that checks redis for info and then issues new tasks to celery to | |
563 | issue notifications |
|
563 | issue notifications | |
564 | """ |
|
564 | """ | |
565 | apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports']) |
|
565 | apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports']) | |
566 | Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports']) |
|
566 | Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports']) | |
567 | for app in apps: |
|
567 | for app in apps: | |
568 | log.warning('Notify for app: %s' % app) |
|
568 | log.warning('Notify for app: %s' % app) | |
569 | check_user_report_notifications.delay(app.decode('utf8')) |
|
569 | check_user_report_notifications.delay(app.decode('utf8')) | |
570 |
|
570 | |||
571 | @celery.task(queue="default") |
|
571 | @celery.task(queue="default") | |
572 | def alerting_reports(): |
|
572 | def alerting_reports(): | |
573 | """ |
|
573 | """ | |
574 | Loop that checks redis for info and then issues new tasks to celery to |
|
574 | Loop that checks redis for info and then issues new tasks to celery to | |
575 | perform the following: |
|
575 | perform the following: | |
576 | - which applications should have new alerts opened |
|
576 | - which applications should have new alerts opened | |
577 | """ |
|
577 | """ | |
578 |
|
578 | |||
579 | apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports_alerting']) |
|
579 | apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports_alerting']) | |
580 | Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports_alerting']) |
|
580 | Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports_alerting']) | |
581 | for app in apps: |
|
581 | for app in apps: | |
582 | log.warning('Notify for app: %s' % app) |
|
582 | log.warning('Notify for app: %s' % app) | |
583 | check_alerts.delay(app.decode('utf8')) |
|
583 | check_alerts.delay(app.decode('utf8')) | |
584 |
|
584 | |||
585 |
|
585 | |||
586 | @celery.task(queue="default", soft_time_limit=3600 * 4, |
|
586 | @celery.task(queue="default", soft_time_limit=3600 * 4, | |
587 | hard_time_limit=3600 * 4, max_retries=144) |
|
587 | hard_time_limit=3600 * 4, max_retries=144) | |
588 | def logs_cleanup(resource_id, filter_settings): |
|
588 | def logs_cleanup(resource_id, filter_settings): | |
589 | request = get_current_request() |
|
589 | request = get_current_request() | |
590 | request.tm.begin() |
|
590 | request.tm.begin() | |
591 | es_query = { |
|
591 | es_query = { | |
592 | "_source": False, |
|
592 | "_source": False, | |
593 | "size": 5000, |
|
593 | "size": 5000, | |
594 | "query": { |
|
594 | "query": { | |
595 | "filtered": { |
|
595 | "filtered": { | |
596 | "filter": { |
|
596 | "filter": { | |
597 | "and": [{"term": {"resource_id": resource_id}}] |
|
597 | "and": [{"term": {"resource_id": resource_id}}] | |
598 | } |
|
598 | } | |
599 | } |
|
599 | } | |
600 | } |
|
600 | } | |
601 | } |
|
601 | } | |
602 |
|
602 | |||
603 | query = DBSession.query(Log).filter(Log.resource_id == resource_id) |
|
603 | query = DBSession.query(Log).filter(Log.resource_id == resource_id) | |
604 | if filter_settings['namespace']: |
|
604 | if filter_settings['namespace']: | |
605 | query = query.filter(Log.namespace == filter_settings['namespace'][0]) |
|
605 | query = query.filter(Log.namespace == filter_settings['namespace'][0]) | |
606 | es_query['query']['filtered']['filter']['and'].append( |
|
606 | es_query['query']['filtered']['filter']['and'].append( | |
607 | {"term": {"namespace": filter_settings['namespace'][0]}} |
|
607 | {"term": {"namespace": filter_settings['namespace'][0]}} | |
608 | ) |
|
608 | ) | |
609 | query.delete(synchronize_session=False) |
|
609 | query.delete(synchronize_session=False) | |
610 | request.tm.commit() |
|
610 | request.tm.commit() | |
611 | result = request.es_conn.search(es_query, index='rcae_l_*', |
|
611 | result = request.es_conn.search(es_query, index='rcae_l_*', | |
612 | doc_type='log', es_scroll='1m', |
|
612 | doc_type='log', es_scroll='1m', | |
613 | es_search_type='scan') |
|
613 | es_search_type='scan') | |
614 | scroll_id = result['_scroll_id'] |
|
614 | scroll_id = result['_scroll_id'] | |
615 | while True: |
|
615 | while True: | |
616 | log.warning('log_cleanup, app:{} ns:{} batch'.format( |
|
616 | log.warning('log_cleanup, app:{} ns:{} batch'.format( | |
617 | resource_id, |
|
617 | resource_id, | |
618 | filter_settings['namespace'] |
|
618 | filter_settings['namespace'] | |
619 | )) |
|
619 | )) | |
620 | es_docs_to_delete = [] |
|
620 | es_docs_to_delete = [] | |
621 | result = request.es_conn.send_request( |
|
621 | result = request.es_conn.send_request( | |
622 | 'POST', ['_search', 'scroll'], |
|
622 | 'POST', ['_search', 'scroll'], | |
623 | body=scroll_id, query_params={"scroll": '1m'}) |
|
623 | body=scroll_id, query_params={"scroll": '1m'}) | |
624 | scroll_id = result['_scroll_id'] |
|
624 | scroll_id = result['_scroll_id'] | |
625 | if not result['hits']['hits']: |
|
625 | if not result['hits']['hits']: | |
626 | break |
|
626 | break | |
627 | for doc in result['hits']['hits']: |
|
627 | for doc in result['hits']['hits']: | |
628 | es_docs_to_delete.append({"id": doc['_id'], |
|
628 | es_docs_to_delete.append({"id": doc['_id'], | |
629 | "index": doc['_index']}) |
|
629 | "index": doc['_index']}) | |
630 |
|
630 | |||
631 | for batch in in_batches(es_docs_to_delete, 10): |
|
631 | for batch in in_batches(es_docs_to_delete, 10): | |
632 | Datastores.es.bulk([Datastores.es.delete_op(doc_type='log', |
|
632 | Datastores.es.bulk([Datastores.es.delete_op(doc_type='log', | |
633 | **to_del) |
|
633 | **to_del) | |
634 | for to_del in batch]) |
|
634 | for to_del in batch]) |
@@ -1,135 +1,135 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # AppEnlight Enterprise Edition, including its added features, Support |
|
18 | # AppEnlight Enterprise Edition, including its added features, Support | |
19 | # services, and proprietary license terms, please see |
|
19 | # services, and proprietary license terms, please see | |
20 | # https://rhodecode.com/licenses/ |
|
20 | # https://rhodecode.com/licenses/ | |
21 |
|
21 | |||
22 | import logging |
|
22 | import logging | |
23 |
|
23 | |||
24 | from sqlalchemy.ext.declarative import declarative_base |
|
24 | from sqlalchemy.ext.declarative import declarative_base | |
25 | from sqlalchemy import MetaData |
|
25 | from sqlalchemy import MetaData | |
26 | from sqlalchemy.orm import scoped_session |
|
26 | from sqlalchemy.orm import scoped_session | |
27 | from sqlalchemy.orm import sessionmaker |
|
27 | from sqlalchemy.orm import sessionmaker | |
28 | from zope.sqlalchemy import ZopeTransactionExtension |
|
28 | from zope.sqlalchemy import ZopeTransactionExtension | |
29 | import ziggurat_foundations |
|
29 | import ziggurat_foundations | |
30 | from ziggurat_foundations.models.base import get_db_session |
|
30 | from ziggurat_foundations.models.base import get_db_session | |
31 |
|
31 | |||
32 | log = logging.getLogger(__name__) |
|
32 | log = logging.getLogger(__name__) | |
33 |
|
33 | |||
34 | DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension())) |
|
34 | DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension())) | |
35 |
|
35 | |||
36 | NAMING_CONVENTION = { |
|
36 | NAMING_CONVENTION = { | |
37 | "ix": 'ix_%(column_0_label)s', |
|
37 | "ix": 'ix_%(column_0_label)s', | |
38 | "uq": "uq_%(table_name)s_%(column_0_name)s", |
|
38 | "uq": "uq_%(table_name)s_%(column_0_name)s", | |
39 | "ck": "ck_%(table_name)s_%(constraint_name)s", |
|
39 | "ck": "ck_%(table_name)s_%(constraint_name)s", | |
40 | "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s", |
|
40 | "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s", | |
41 | "pk": "pk_%(table_name)s" |
|
41 | "pk": "pk_%(table_name)s" | |
42 | } |
|
42 | } | |
43 |
|
43 | |||
44 | metadata = MetaData(naming_convention=NAMING_CONVENTION) |
|
44 | metadata = MetaData(naming_convention=NAMING_CONVENTION) | |
45 | Base = declarative_base(metadata=metadata) |
|
45 | Base = declarative_base(metadata=metadata) | |
46 |
|
46 | |||
47 | # optional for request.db approach |
|
47 | # optional for request.db approach | |
48 | ziggurat_foundations.models.DBSession = DBSession |
|
48 | ziggurat_foundations.models.DBSession = DBSession | |
49 |
|
49 | |||
50 |
|
50 | |||
51 | class Datastores(object): |
|
51 | class Datastores(object): | |
52 | redis = None |
|
52 | redis = None | |
53 | es = None |
|
53 | es = None | |
54 |
|
54 | |||
55 |
|
55 | |||
56 | def register_datastores(es_conn, redis_conn, redis_lockmgr): |
|
56 | def register_datastores(es_conn, redis_conn, redis_lockmgr): | |
57 | Datastores.es = es_conn |
|
57 | Datastores.es = es_conn | |
58 | Datastores.redis = redis_conn |
|
58 | Datastores.redis = redis_conn | |
59 | Datastores.lockmgr = redis_lockmgr |
|
59 | Datastores.lockmgr = redis_lockmgr | |
60 |
|
60 | |||
61 |
|
61 | |||
62 | class SliceableESQuery(object): |
|
62 | class SliceableESQuery(object): | |
63 | def __init__(self, query, sort_query=None, aggregations=False, **kwconfig): |
|
63 | def __init__(self, query, sort_query=None, aggregations=False, **kwconfig): | |
64 | self.query = query |
|
64 | self.query = query | |
65 | self.sort_query = sort_query |
|
65 | self.sort_query = sort_query | |
66 | self.aggregations = aggregations |
|
66 | self.aggregations = aggregations | |
67 | self.items_per_page = kwconfig.pop('items_per_page', 10) |
|
67 | self.items_per_page = kwconfig.pop('items_per_page', 10) | |
68 | self.page = kwconfig.pop('page', 1) |
|
68 | self.page = kwconfig.pop('page', 1) | |
69 | self.kwconfig = kwconfig |
|
69 | self.kwconfig = kwconfig | |
70 | self.result = None |
|
70 | self.result = None | |
71 |
|
71 | |||
72 | def __getitem__(self, index): |
|
72 | def __getitem__(self, index): | |
73 | config = self.kwconfig.copy() |
|
73 | config = self.kwconfig.copy() | |
74 | config['es_from'] = index.start |
|
74 | config['es_from'] = index.start | |
75 | query = self.query.copy() |
|
75 | query = self.query.copy() | |
76 | if self.sort_query: |
|
76 | if self.sort_query: | |
77 | query.update(self.sort_query) |
|
77 | query.update(self.sort_query) | |
78 | self.result = Datastores.es.search(query, size=self.items_per_page, |
|
78 | self.result = Datastores.es.search(query, size=self.items_per_page, | |
79 | **config) |
|
79 | **config) | |
80 | if self.aggregations: |
|
80 | if self.aggregations: | |
81 | self.items = self.result.get('aggregations') |
|
81 | self.items = self.result.get('aggregations') | |
82 | else: |
|
82 | else: | |
83 | self.items = self.result['hits']['hits'] |
|
83 | self.items = self.result['hits']['hits'] | |
84 |
|
84 | |||
85 | return self.items |
|
85 | return self.items | |
86 |
|
86 | |||
87 | def __iter__(self): |
|
87 | def __iter__(self): | |
88 | return self.result |
|
88 | return self.result | |
89 |
|
89 | |||
90 | def __len__(self): |
|
90 | def __len__(self): | |
91 | config = self.kwconfig.copy() |
|
91 | config = self.kwconfig.copy() | |
92 | query = self.query.copy() |
|
92 | query = self.query.copy() | |
93 | self.result = Datastores.es.search(query, size=self.items_per_page, |
|
93 | self.result = Datastores.es.search(query, size=self.items_per_page, | |
94 | **config) |
|
94 | **config) | |
95 | if self.aggregations: |
|
95 | if self.aggregations: | |
96 | self.items = self.result.get('aggregations') |
|
96 | self.items = self.result.get('aggregations') | |
97 | else: |
|
97 | else: | |
98 | self.items = self.result['hits']['hits'] |
|
98 | self.items = self.result['hits']['hits'] | |
99 |
|
99 | |||
100 | count = int(self.result['hits']['total']) |
|
100 | count = int(self.result['hits']['total']) | |
101 | return count if count < 5000 else 5000 |
|
101 | return count if count < 5000 else 5000 | |
102 |
|
102 | |||
103 |
|
103 | |||
104 | from appenlight.models.resource import Resource |
|
104 | from appenlight.models.resource import Resource | |
105 | from appenlight.models.application import Application |
|
105 | from appenlight.models.application import Application | |
106 | from appenlight.models.user import User |
|
106 | from appenlight.models.user import User | |
107 | from appenlight.models.alert_channel import AlertChannel |
|
107 | from appenlight.models.alert_channel import AlertChannel | |
108 | from appenlight.models.alert_channel_action import AlertChannelAction |
|
108 | from appenlight.models.alert_channel_action import AlertChannelAction | |
109 |
from appenlight.models. |
|
109 | from appenlight.models.metric import Metric | |
110 | from appenlight.models.application_postprocess_conf import \ |
|
110 | from appenlight.models.application_postprocess_conf import \ | |
111 | ApplicationPostprocessConf |
|
111 | ApplicationPostprocessConf | |
112 | from appenlight.models.auth_token import AuthToken |
|
112 | from appenlight.models.auth_token import AuthToken | |
113 | from appenlight.models.event import Event |
|
113 | from appenlight.models.event import Event | |
114 | from appenlight.models.external_identity import ExternalIdentity |
|
114 | from appenlight.models.external_identity import ExternalIdentity | |
115 | from appenlight.models.group import Group |
|
115 | from appenlight.models.group import Group | |
116 | from appenlight.models.group_permission import GroupPermission |
|
116 | from appenlight.models.group_permission import GroupPermission | |
117 | from appenlight.models.group_resource_permission import GroupResourcePermission |
|
117 | from appenlight.models.group_resource_permission import GroupResourcePermission | |
118 | from appenlight.models.log import Log |
|
118 | from appenlight.models.log import Log | |
119 | from appenlight.models.plugin_config import PluginConfig |
|
119 | from appenlight.models.plugin_config import PluginConfig | |
120 | from appenlight.models.report import Report |
|
120 | from appenlight.models.report import Report | |
121 | from appenlight.models.report_group import ReportGroup |
|
121 | from appenlight.models.report_group import ReportGroup | |
122 | from appenlight.models.report_comment import ReportComment |
|
122 | from appenlight.models.report_comment import ReportComment | |
123 | from appenlight.models.report_assignment import ReportAssignment |
|
123 | from appenlight.models.report_assignment import ReportAssignment | |
124 | from appenlight.models.report_stat import ReportStat |
|
124 | from appenlight.models.report_stat import ReportStat | |
125 | from appenlight.models.slow_call import SlowCall |
|
125 | from appenlight.models.slow_call import SlowCall | |
126 | from appenlight.models.tag import Tag |
|
126 | from appenlight.models.tag import Tag | |
127 | from appenlight.models.user_group import UserGroup |
|
127 | from appenlight.models.user_group import UserGroup | |
128 | from appenlight.models.user_permission import UserPermission |
|
128 | from appenlight.models.user_permission import UserPermission | |
129 | from appenlight.models.user_resource_permission import UserResourcePermission |
|
129 | from appenlight.models.user_resource_permission import UserResourcePermission | |
130 | from ziggurat_foundations import ziggurat_model_init |
|
130 | from ziggurat_foundations import ziggurat_model_init | |
131 |
|
131 | |||
132 | ziggurat_model_init(User, Group, UserGroup, GroupPermission, UserPermission, |
|
132 | ziggurat_model_init(User, Group, UserGroup, GroupPermission, UserPermission, | |
133 | UserResourcePermission, GroupResourcePermission, |
|
133 | UserResourcePermission, GroupResourcePermission, | |
134 | Resource, |
|
134 | Resource, | |
135 | ExternalIdentity, passwordmanager=None) |
|
135 | ExternalIdentity, passwordmanager=None) |
1 | NO CONTENT: file renamed from backend/src/appenlight/models/request_metric.py to backend/src/appenlight/models/metric.py |
|
NO CONTENT: file renamed from backend/src/appenlight/models/request_metric.py to backend/src/appenlight/models/metric.py |
@@ -1,435 +1,435 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # AppEnlight Enterprise Edition, including its added features, Support |
|
18 | # AppEnlight Enterprise Edition, including its added features, Support | |
19 | # services, and proprietary license terms, please see |
|
19 | # services, and proprietary license terms, please see | |
20 | # https://rhodecode.com/licenses/ |
|
20 | # https://rhodecode.com/licenses/ | |
21 |
|
21 | |||
22 | import argparse |
|
22 | import argparse | |
23 | import datetime |
|
23 | import datetime | |
24 | import logging |
|
24 | import logging | |
25 |
|
25 | |||
26 | import sqlalchemy as sa |
|
26 | import sqlalchemy as sa | |
27 | from collections import defaultdict |
|
27 | from collections import defaultdict | |
28 | from pyramid.paster import setup_logging |
|
28 | from pyramid.paster import setup_logging | |
29 | from pyramid.paster import bootstrap |
|
29 | from pyramid.paster import bootstrap | |
30 | from appenlight.models import ( |
|
30 | from appenlight.models import ( | |
31 | DBSession, |
|
31 | DBSession, | |
32 | Datastores, |
|
32 | Datastores, | |
33 | metadata |
|
33 | metadata | |
34 | ) |
|
34 | ) | |
35 | from appenlight.lib import get_callable |
|
35 | from appenlight.lib import get_callable | |
36 | from appenlight.models.report_group import ReportGroup |
|
36 | from appenlight.models.report_group import ReportGroup | |
37 | from appenlight.models.report import Report |
|
37 | from appenlight.models.report import Report | |
38 | from appenlight.models.report_stat import ReportStat |
|
38 | from appenlight.models.report_stat import ReportStat | |
39 | from appenlight.models.log import Log |
|
39 | from appenlight.models.log import Log | |
40 | from appenlight.models.slow_call import SlowCall |
|
40 | from appenlight.models.slow_call import SlowCall | |
41 |
from appenlight.models. |
|
41 | from appenlight.models.metric import Metric | |
42 |
|
42 | |||
43 |
|
43 | |||
44 | log = logging.getLogger(__name__) |
|
44 | log = logging.getLogger(__name__) | |
45 |
|
45 | |||
46 | tables = { |
|
46 | tables = { | |
47 | 'slow_calls_p_': [], |
|
47 | 'slow_calls_p_': [], | |
48 | 'reports_stats_p_': [], |
|
48 | 'reports_stats_p_': [], | |
49 | 'reports_p_': [], |
|
49 | 'reports_p_': [], | |
50 | 'reports_groups_p_': [], |
|
50 | 'reports_groups_p_': [], | |
51 | 'logs_p_': [], |
|
51 | 'logs_p_': [], | |
52 | 'metrics_p_': [], |
|
52 | 'metrics_p_': [], | |
53 | } |
|
53 | } | |
54 |
|
54 | |||
55 | def detect_tables(table_prefix): |
|
55 | def detect_tables(table_prefix): | |
56 | found_tables = [] |
|
56 | found_tables = [] | |
57 | db_tables_query = ''' |
|
57 | db_tables_query = ''' | |
58 | SELECT tablename FROM pg_tables WHERE tablename NOT LIKE 'pg_%' AND |
|
58 | SELECT tablename FROM pg_tables WHERE tablename NOT LIKE 'pg_%' AND | |
59 | tablename NOT LIKE 'sql_%' ORDER BY tablename ASC;''' |
|
59 | tablename NOT LIKE 'sql_%' ORDER BY tablename ASC;''' | |
60 |
|
60 | |||
61 | for table in DBSession.execute(db_tables_query).fetchall(): |
|
61 | for table in DBSession.execute(db_tables_query).fetchall(): | |
62 | tablename = table.tablename |
|
62 | tablename = table.tablename | |
63 | if tablename.startswith(table_prefix): |
|
63 | if tablename.startswith(table_prefix): | |
64 | t = sa.Table(tablename, metadata, autoload=True, |
|
64 | t = sa.Table(tablename, metadata, autoload=True, | |
65 | autoload_with=DBSession.bind.engine) |
|
65 | autoload_with=DBSession.bind.engine) | |
66 | found_tables.append(t) |
|
66 | found_tables.append(t) | |
67 | return found_tables |
|
67 | return found_tables | |
68 |
|
68 | |||
69 |
|
69 | |||
70 | def main(): |
|
70 | def main(): | |
71 | """ |
|
71 | """ | |
72 | Recreates Elasticsearch indexes |
|
72 | Recreates Elasticsearch indexes | |
73 | Performs reindex of whole db to Elasticsearch |
|
73 | Performs reindex of whole db to Elasticsearch | |
74 |
|
74 | |||
75 | """ |
|
75 | """ | |
76 |
|
76 | |||
77 | # need parser twice because we first need to load ini file |
|
77 | # need parser twice because we first need to load ini file | |
78 | # bootstrap pyramid and then load plugins |
|
78 | # bootstrap pyramid and then load plugins | |
79 | pre_parser = argparse.ArgumentParser( |
|
79 | pre_parser = argparse.ArgumentParser( | |
80 | description='Reindex AppEnlight data', |
|
80 | description='Reindex AppEnlight data', | |
81 | add_help=False) |
|
81 | add_help=False) | |
82 | pre_parser.add_argument('-c', '--config', required=True, |
|
82 | pre_parser.add_argument('-c', '--config', required=True, | |
83 | help='Configuration ini file of application') |
|
83 | help='Configuration ini file of application') | |
84 | pre_parser.add_argument('-h', '--help', help='Show help', nargs='?') |
|
84 | pre_parser.add_argument('-h', '--help', help='Show help', nargs='?') | |
85 | pre_parser.add_argument('-t', '--types', nargs='+', |
|
85 | pre_parser.add_argument('-t', '--types', nargs='+', | |
86 | help='Which parts of database should get reindexed') |
|
86 | help='Which parts of database should get reindexed') | |
87 | args = pre_parser.parse_args() |
|
87 | args = pre_parser.parse_args() | |
88 |
|
88 | |||
89 | config_uri = args.config |
|
89 | config_uri = args.config | |
90 | setup_logging(config_uri) |
|
90 | setup_logging(config_uri) | |
91 | log.setLevel(logging.INFO) |
|
91 | log.setLevel(logging.INFO) | |
92 | env = bootstrap(config_uri) |
|
92 | env = bootstrap(config_uri) | |
93 | parser = argparse.ArgumentParser(description='Reindex AppEnlight data') |
|
93 | parser = argparse.ArgumentParser(description='Reindex AppEnlight data') | |
94 | choices = { |
|
94 | choices = { | |
95 | 'reports': 'appenlight.scripts.reindex_elasticsearch:reindex_reports', |
|
95 | 'reports': 'appenlight.scripts.reindex_elasticsearch:reindex_reports', | |
96 | 'logs': 'appenlight.scripts.reindex_elasticsearch:reindex_logs', |
|
96 | 'logs': 'appenlight.scripts.reindex_elasticsearch:reindex_logs', | |
97 | 'metrics': 'appenlight.scripts.reindex_elasticsearch:reindex_metrics', |
|
97 | 'metrics': 'appenlight.scripts.reindex_elasticsearch:reindex_metrics', | |
98 | 'slow_calls': 'appenlight.scripts.reindex_elasticsearch:reindex_slow_calls', |
|
98 | 'slow_calls': 'appenlight.scripts.reindex_elasticsearch:reindex_slow_calls', | |
99 | 'template': 'appenlight.scripts.reindex_elasticsearch:update_template' |
|
99 | 'template': 'appenlight.scripts.reindex_elasticsearch:update_template' | |
100 | } |
|
100 | } | |
101 | for k, v in env['registry'].appenlight_plugins.items(): |
|
101 | for k, v in env['registry'].appenlight_plugins.items(): | |
102 | if v.get('fulltext_indexer'): |
|
102 | if v.get('fulltext_indexer'): | |
103 | choices[k] = v['fulltext_indexer'] |
|
103 | choices[k] = v['fulltext_indexer'] | |
104 | parser.add_argument('-t', '--types', nargs='*', |
|
104 | parser.add_argument('-t', '--types', nargs='*', | |
105 | choices=['all'] + list(choices.keys()), default=['all'], |
|
105 | choices=['all'] + list(choices.keys()), default=['all'], | |
106 | help='Which parts of database should get reindexed') |
|
106 | help='Which parts of database should get reindexed') | |
107 | parser.add_argument('-c', '--config', required=True, |
|
107 | parser.add_argument('-c', '--config', required=True, | |
108 | help='Configuration ini file of application') |
|
108 | help='Configuration ini file of application') | |
109 | args = parser.parse_args() |
|
109 | args = parser.parse_args() | |
110 |
|
110 | |||
111 |
|
111 | |||
112 | if 'all' in args.types: |
|
112 | if 'all' in args.types: | |
113 | args.types = list(choices.keys()) |
|
113 | args.types = list(choices.keys()) | |
114 |
|
114 | |||
115 | log.info('settings {}'.format(args.types)) |
|
115 | log.info('settings {}'.format(args.types)) | |
116 |
|
116 | |||
117 | if 'template' in args.types: |
|
117 | if 'template' in args.types: | |
118 | get_callable(choices['template'])() |
|
118 | get_callable(choices['template'])() | |
119 | args.types.remove('template') |
|
119 | args.types.remove('template') | |
120 | for selected in args.types: |
|
120 | for selected in args.types: | |
121 | get_callable(choices[selected])() |
|
121 | get_callable(choices[selected])() | |
122 |
|
122 | |||
123 |
|
123 | |||
124 | def update_template(): |
|
124 | def update_template(): | |
125 | try: |
|
125 | try: | |
126 | Datastores.es.send_request("delete", ['_template', 'rcae'], |
|
126 | Datastores.es.send_request("delete", ['_template', 'rcae'], | |
127 | query_params={}) |
|
127 | query_params={}) | |
128 | except Exception as e: |
|
128 | except Exception as e: | |
129 | print(e) |
|
129 | print(e) | |
130 | log.info('updating elasticsearch template') |
|
130 | log.info('updating elasticsearch template') | |
131 | tag_templates = [ |
|
131 | tag_templates = [ | |
132 | {"values": { |
|
132 | {"values": { | |
133 | "path_match": "tags.*", |
|
133 | "path_match": "tags.*", | |
134 | "mapping": { |
|
134 | "mapping": { | |
135 | "type": "object", |
|
135 | "type": "object", | |
136 | "properties": { |
|
136 | "properties": { | |
137 | "values": {"type": "string", "analyzer": "tag_value"}, |
|
137 | "values": {"type": "string", "analyzer": "tag_value"}, | |
138 | "numeric_values": {"type": "float"} |
|
138 | "numeric_values": {"type": "float"} | |
139 | } |
|
139 | } | |
140 | } |
|
140 | } | |
141 | }} |
|
141 | }} | |
142 | ] |
|
142 | ] | |
143 |
|
143 | |||
144 | template_schema = { |
|
144 | template_schema = { | |
145 | "template": "rcae_*", |
|
145 | "template": "rcae_*", | |
146 | "settings": { |
|
146 | "settings": { | |
147 | "index": { |
|
147 | "index": { | |
148 | "refresh_interval": "5s", |
|
148 | "refresh_interval": "5s", | |
149 | "translog": {"interval": "5s", |
|
149 | "translog": {"interval": "5s", | |
150 | "durability": "async"} |
|
150 | "durability": "async"} | |
151 | }, |
|
151 | }, | |
152 | "number_of_shards": 5, |
|
152 | "number_of_shards": 5, | |
153 | "analysis": { |
|
153 | "analysis": { | |
154 | "analyzer": { |
|
154 | "analyzer": { | |
155 | "url_path": { |
|
155 | "url_path": { | |
156 | "type": "custom", |
|
156 | "type": "custom", | |
157 | "char_filter": [], |
|
157 | "char_filter": [], | |
158 | "tokenizer": "path_hierarchy", |
|
158 | "tokenizer": "path_hierarchy", | |
159 | "filter": [] |
|
159 | "filter": [] | |
160 | }, |
|
160 | }, | |
161 | "tag_value": { |
|
161 | "tag_value": { | |
162 | "type": "custom", |
|
162 | "type": "custom", | |
163 | "char_filter": [], |
|
163 | "char_filter": [], | |
164 | "tokenizer": "keyword", |
|
164 | "tokenizer": "keyword", | |
165 | "filter": ["lowercase"] |
|
165 | "filter": ["lowercase"] | |
166 | }, |
|
166 | }, | |
167 | } |
|
167 | } | |
168 | }, |
|
168 | }, | |
169 | }, |
|
169 | }, | |
170 | "mappings": { |
|
170 | "mappings": { | |
171 | "report_group": { |
|
171 | "report_group": { | |
172 | "_all": {"enabled": False}, |
|
172 | "_all": {"enabled": False}, | |
173 | "dynamic_templates": tag_templates, |
|
173 | "dynamic_templates": tag_templates, | |
174 | "properties": { |
|
174 | "properties": { | |
175 | "pg_id": {"type": "string", "index": "not_analyzed"}, |
|
175 | "pg_id": {"type": "string", "index": "not_analyzed"}, | |
176 | "resource_id": {"type": "integer"}, |
|
176 | "resource_id": {"type": "integer"}, | |
177 | "priority": {"type": "integer"}, |
|
177 | "priority": {"type": "integer"}, | |
178 | "error": {"type": "string", "analyzer": "simple"}, |
|
178 | "error": {"type": "string", "analyzer": "simple"}, | |
179 | "read": {"type": "boolean"}, |
|
179 | "read": {"type": "boolean"}, | |
180 | "occurences": {"type": "integer"}, |
|
180 | "occurences": {"type": "integer"}, | |
181 | "fixed": {"type": "boolean"}, |
|
181 | "fixed": {"type": "boolean"}, | |
182 | "first_timestamp": {"type": "date"}, |
|
182 | "first_timestamp": {"type": "date"}, | |
183 | "last_timestamp": {"type": "date"}, |
|
183 | "last_timestamp": {"type": "date"}, | |
184 | "average_duration": {"type": "float"}, |
|
184 | "average_duration": {"type": "float"}, | |
185 | "summed_duration": {"type": "float"}, |
|
185 | "summed_duration": {"type": "float"}, | |
186 | "public": {"type": "boolean"} |
|
186 | "public": {"type": "boolean"} | |
187 | } |
|
187 | } | |
188 | }, |
|
188 | }, | |
189 | "report": { |
|
189 | "report": { | |
190 | "_all": {"enabled": False}, |
|
190 | "_all": {"enabled": False}, | |
191 | "dynamic_templates": tag_templates, |
|
191 | "dynamic_templates": tag_templates, | |
192 | "properties": { |
|
192 | "properties": { | |
193 | "pg_id": {"type": "string", "index": "not_analyzed"}, |
|
193 | "pg_id": {"type": "string", "index": "not_analyzed"}, | |
194 | "resource_id": {"type": "integer"}, |
|
194 | "resource_id": {"type": "integer"}, | |
195 | "group_id": {"type": "string"}, |
|
195 | "group_id": {"type": "string"}, | |
196 | "http_status": {"type": "integer"}, |
|
196 | "http_status": {"type": "integer"}, | |
197 | "ip": {"type": "string", "index": "not_analyzed"}, |
|
197 | "ip": {"type": "string", "index": "not_analyzed"}, | |
198 | "url_domain": {"type": "string", "analyzer": "simple"}, |
|
198 | "url_domain": {"type": "string", "analyzer": "simple"}, | |
199 | "url_path": {"type": "string", "analyzer": "url_path"}, |
|
199 | "url_path": {"type": "string", "analyzer": "url_path"}, | |
200 | "error": {"type": "string", "analyzer": "simple"}, |
|
200 | "error": {"type": "string", "analyzer": "simple"}, | |
201 | "report_type": {"type": "integer"}, |
|
201 | "report_type": {"type": "integer"}, | |
202 | "start_time": {"type": "date"}, |
|
202 | "start_time": {"type": "date"}, | |
203 | "request_id": {"type": "string", "index": "not_analyzed"}, |
|
203 | "request_id": {"type": "string", "index": "not_analyzed"}, | |
204 | "end_time": {"type": "date"}, |
|
204 | "end_time": {"type": "date"}, | |
205 | "duration": {"type": "float"}, |
|
205 | "duration": {"type": "float"}, | |
206 | "tags": { |
|
206 | "tags": { | |
207 | "type": "object" |
|
207 | "type": "object" | |
208 | }, |
|
208 | }, | |
209 | "tag_list": {"type": "string", "analyzer": "tag_value"}, |
|
209 | "tag_list": {"type": "string", "analyzer": "tag_value"}, | |
210 | "extra": { |
|
210 | "extra": { | |
211 | "type": "object" |
|
211 | "type": "object" | |
212 | }, |
|
212 | }, | |
213 | }, |
|
213 | }, | |
214 | "_parent": {"type": "report_group"} |
|
214 | "_parent": {"type": "report_group"} | |
215 | }, |
|
215 | }, | |
216 | "log": { |
|
216 | "log": { | |
217 | "_all": {"enabled": False}, |
|
217 | "_all": {"enabled": False}, | |
218 | "dynamic_templates": tag_templates, |
|
218 | "dynamic_templates": tag_templates, | |
219 | "properties": { |
|
219 | "properties": { | |
220 | "pg_id": {"type": "string", "index": "not_analyzed"}, |
|
220 | "pg_id": {"type": "string", "index": "not_analyzed"}, | |
221 | "delete_hash": {"type": "string", "index": "not_analyzed"}, |
|
221 | "delete_hash": {"type": "string", "index": "not_analyzed"}, | |
222 | "resource_id": {"type": "integer"}, |
|
222 | "resource_id": {"type": "integer"}, | |
223 | "timestamp": {"type": "date"}, |
|
223 | "timestamp": {"type": "date"}, | |
224 | "permanent": {"type": "boolean"}, |
|
224 | "permanent": {"type": "boolean"}, | |
225 | "request_id": {"type": "string", "index": "not_analyzed"}, |
|
225 | "request_id": {"type": "string", "index": "not_analyzed"}, | |
226 | "log_level": {"type": "string", "analyzer": "simple"}, |
|
226 | "log_level": {"type": "string", "analyzer": "simple"}, | |
227 | "message": {"type": "string", "analyzer": "simple"}, |
|
227 | "message": {"type": "string", "analyzer": "simple"}, | |
228 | "namespace": {"type": "string", "index": "not_analyzed"}, |
|
228 | "namespace": {"type": "string", "index": "not_analyzed"}, | |
229 | "tags": { |
|
229 | "tags": { | |
230 | "type": "object" |
|
230 | "type": "object" | |
231 | }, |
|
231 | }, | |
232 | "tag_list": {"type": "string", "analyzer": "tag_value"} |
|
232 | "tag_list": {"type": "string", "analyzer": "tag_value"} | |
233 | } |
|
233 | } | |
234 | } |
|
234 | } | |
235 | } |
|
235 | } | |
236 | } |
|
236 | } | |
237 |
|
237 | |||
238 | Datastores.es.send_request('PUT', ['_template', 'rcae'], |
|
238 | Datastores.es.send_request('PUT', ['_template', 'rcae'], | |
239 | body=template_schema, query_params={}) |
|
239 | body=template_schema, query_params={}) | |
240 |
|
240 | |||
241 |
|
241 | |||
242 | def reindex_reports(): |
|
242 | def reindex_reports(): | |
243 | reports_groups_tables = detect_tables('reports_groups_p_') |
|
243 | reports_groups_tables = detect_tables('reports_groups_p_') | |
244 | try: |
|
244 | try: | |
245 | Datastores.es.delete_index('rcae_r*') |
|
245 | Datastores.es.delete_index('rcae_r*') | |
246 | except Exception as e: |
|
246 | except Exception as e: | |
247 | log.error(e) |
|
247 | log.error(e) | |
248 |
|
248 | |||
249 | log.info('reindexing report groups') |
|
249 | log.info('reindexing report groups') | |
250 | i = 0 |
|
250 | i = 0 | |
251 | task_start = datetime.datetime.now() |
|
251 | task_start = datetime.datetime.now() | |
252 | for partition_table in reports_groups_tables: |
|
252 | for partition_table in reports_groups_tables: | |
253 | conn = DBSession.connection().execution_options(stream_results=True) |
|
253 | conn = DBSession.connection().execution_options(stream_results=True) | |
254 | result = conn.execute(partition_table.select()) |
|
254 | result = conn.execute(partition_table.select()) | |
255 | while True: |
|
255 | while True: | |
256 | chunk = result.fetchmany(2000) |
|
256 | chunk = result.fetchmany(2000) | |
257 | if not chunk: |
|
257 | if not chunk: | |
258 | break |
|
258 | break | |
259 | es_docs = defaultdict(list) |
|
259 | es_docs = defaultdict(list) | |
260 | for row in chunk: |
|
260 | for row in chunk: | |
261 | i += 1 |
|
261 | i += 1 | |
262 | item = ReportGroup(**dict(list(row.items()))) |
|
262 | item = ReportGroup(**dict(list(row.items()))) | |
263 | d_range = item.partition_id |
|
263 | d_range = item.partition_id | |
264 | es_docs[d_range].append(item.es_doc()) |
|
264 | es_docs[d_range].append(item.es_doc()) | |
265 | if es_docs: |
|
265 | if es_docs: | |
266 | name = partition_table.name |
|
266 | name = partition_table.name | |
267 | log.info('round {}, {}'.format(i, name)) |
|
267 | log.info('round {}, {}'.format(i, name)) | |
268 | for k, v in es_docs.items(): |
|
268 | for k, v in es_docs.items(): | |
269 | Datastores.es.bulk_index(k, 'report_group', v, |
|
269 | Datastores.es.bulk_index(k, 'report_group', v, | |
270 | id_field="_id") |
|
270 | id_field="_id") | |
271 |
|
271 | |||
272 | log.info( |
|
272 | log.info( | |
273 | 'total docs {} {}'.format(i, datetime.datetime.now() - task_start)) |
|
273 | 'total docs {} {}'.format(i, datetime.datetime.now() - task_start)) | |
274 |
|
274 | |||
275 | i = 0 |
|
275 | i = 0 | |
276 | log.info('reindexing reports') |
|
276 | log.info('reindexing reports') | |
277 | task_start = datetime.datetime.now() |
|
277 | task_start = datetime.datetime.now() | |
278 | reports_tables = detect_tables('reports_p_') |
|
278 | reports_tables = detect_tables('reports_p_') | |
279 | for partition_table in reports_tables: |
|
279 | for partition_table in reports_tables: | |
280 | conn = DBSession.connection().execution_options(stream_results=True) |
|
280 | conn = DBSession.connection().execution_options(stream_results=True) | |
281 | result = conn.execute(partition_table.select()) |
|
281 | result = conn.execute(partition_table.select()) | |
282 | while True: |
|
282 | while True: | |
283 | chunk = result.fetchmany(2000) |
|
283 | chunk = result.fetchmany(2000) | |
284 | if not chunk: |
|
284 | if not chunk: | |
285 | break |
|
285 | break | |
286 | es_docs = defaultdict(list) |
|
286 | es_docs = defaultdict(list) | |
287 | for row in chunk: |
|
287 | for row in chunk: | |
288 | i += 1 |
|
288 | i += 1 | |
289 | item = Report(**dict(list(row.items()))) |
|
289 | item = Report(**dict(list(row.items()))) | |
290 | d_range = item.partition_id |
|
290 | d_range = item.partition_id | |
291 | es_docs[d_range].append(item.es_doc()) |
|
291 | es_docs[d_range].append(item.es_doc()) | |
292 | if es_docs: |
|
292 | if es_docs: | |
293 | name = partition_table.name |
|
293 | name = partition_table.name | |
294 | log.info('round {}, {}'.format(i, name)) |
|
294 | log.info('round {}, {}'.format(i, name)) | |
295 | for k, v in es_docs.items(): |
|
295 | for k, v in es_docs.items(): | |
296 | Datastores.es.bulk_index(k, 'report', v, id_field="_id", |
|
296 | Datastores.es.bulk_index(k, 'report', v, id_field="_id", | |
297 | parent_field='_parent') |
|
297 | parent_field='_parent') | |
298 |
|
298 | |||
299 | log.info( |
|
299 | log.info( | |
300 | 'total docs {} {}'.format(i, datetime.datetime.now() - task_start)) |
|
300 | 'total docs {} {}'.format(i, datetime.datetime.now() - task_start)) | |
301 |
|
301 | |||
302 | log.info('reindexing reports stats') |
|
302 | log.info('reindexing reports stats') | |
303 | i = 0 |
|
303 | i = 0 | |
304 | task_start = datetime.datetime.now() |
|
304 | task_start = datetime.datetime.now() | |
305 | reports_stats_tables = detect_tables('reports_stats_p_') |
|
305 | reports_stats_tables = detect_tables('reports_stats_p_') | |
306 | for partition_table in reports_stats_tables: |
|
306 | for partition_table in reports_stats_tables: | |
307 | conn = DBSession.connection().execution_options(stream_results=True) |
|
307 | conn = DBSession.connection().execution_options(stream_results=True) | |
308 | result = conn.execute(partition_table.select()) |
|
308 | result = conn.execute(partition_table.select()) | |
309 | while True: |
|
309 | while True: | |
310 | chunk = result.fetchmany(2000) |
|
310 | chunk = result.fetchmany(2000) | |
311 | if not chunk: |
|
311 | if not chunk: | |
312 | break |
|
312 | break | |
313 | es_docs = defaultdict(list) |
|
313 | es_docs = defaultdict(list) | |
314 | for row in chunk: |
|
314 | for row in chunk: | |
315 | rd = dict(list(row.items())) |
|
315 | rd = dict(list(row.items())) | |
316 | # remove legacy columns |
|
316 | # remove legacy columns | |
317 | # TODO: remove the column later |
|
317 | # TODO: remove the column later | |
318 | rd.pop('size', None) |
|
318 | rd.pop('size', None) | |
319 | item = ReportStat(**rd) |
|
319 | item = ReportStat(**rd) | |
320 | i += 1 |
|
320 | i += 1 | |
321 | d_range = item.partition_id |
|
321 | d_range = item.partition_id | |
322 | es_docs[d_range].append(item.es_doc()) |
|
322 | es_docs[d_range].append(item.es_doc()) | |
323 | if es_docs: |
|
323 | if es_docs: | |
324 | name = partition_table.name |
|
324 | name = partition_table.name | |
325 | log.info('round {}, {}'.format(i, name)) |
|
325 | log.info('round {}, {}'.format(i, name)) | |
326 | for k, v in es_docs.items(): |
|
326 | for k, v in es_docs.items(): | |
327 | Datastores.es.bulk_index(k, 'log', v) |
|
327 | Datastores.es.bulk_index(k, 'log', v) | |
328 |
|
328 | |||
329 | log.info( |
|
329 | log.info( | |
330 | 'total docs {} {}'.format(i, datetime.datetime.now() - task_start)) |
|
330 | 'total docs {} {}'.format(i, datetime.datetime.now() - task_start)) | |
331 |
|
331 | |||
332 |
|
332 | |||
333 | def reindex_logs(): |
|
333 | def reindex_logs(): | |
334 | try: |
|
334 | try: | |
335 | Datastores.es.delete_index('rcae_l*') |
|
335 | Datastores.es.delete_index('rcae_l*') | |
336 | except Exception as e: |
|
336 | except Exception as e: | |
337 | log.error(e) |
|
337 | log.error(e) | |
338 |
|
338 | |||
339 | # logs |
|
339 | # logs | |
340 | log.info('reindexing logs') |
|
340 | log.info('reindexing logs') | |
341 | i = 0 |
|
341 | i = 0 | |
342 | task_start = datetime.datetime.now() |
|
342 | task_start = datetime.datetime.now() | |
343 | log_tables = detect_tables('logs_p_') |
|
343 | log_tables = detect_tables('logs_p_') | |
344 | for partition_table in log_tables: |
|
344 | for partition_table in log_tables: | |
345 | conn = DBSession.connection().execution_options(stream_results=True) |
|
345 | conn = DBSession.connection().execution_options(stream_results=True) | |
346 | result = conn.execute(partition_table.select()) |
|
346 | result = conn.execute(partition_table.select()) | |
347 | while True: |
|
347 | while True: | |
348 | chunk = result.fetchmany(2000) |
|
348 | chunk = result.fetchmany(2000) | |
349 | if not chunk: |
|
349 | if not chunk: | |
350 | break |
|
350 | break | |
351 | es_docs = defaultdict(list) |
|
351 | es_docs = defaultdict(list) | |
352 |
|
352 | |||
353 | for row in chunk: |
|
353 | for row in chunk: | |
354 | i += 1 |
|
354 | i += 1 | |
355 | item = Log(**dict(list(row.items()))) |
|
355 | item = Log(**dict(list(row.items()))) | |
356 | d_range = item.partition_id |
|
356 | d_range = item.partition_id | |
357 | es_docs[d_range].append(item.es_doc()) |
|
357 | es_docs[d_range].append(item.es_doc()) | |
358 | if es_docs: |
|
358 | if es_docs: | |
359 | name = partition_table.name |
|
359 | name = partition_table.name | |
360 | log.info('round {}, {}'.format(i, name)) |
|
360 | log.info('round {}, {}'.format(i, name)) | |
361 | for k, v in es_docs.items(): |
|
361 | for k, v in es_docs.items(): | |
362 | Datastores.es.bulk_index(k, 'log', v) |
|
362 | Datastores.es.bulk_index(k, 'log', v) | |
363 |
|
363 | |||
364 | log.info( |
|
364 | log.info( | |
365 | 'total docs {} {}'.format(i, datetime.datetime.now() - task_start)) |
|
365 | 'total docs {} {}'.format(i, datetime.datetime.now() - task_start)) | |
366 |
|
366 | |||
367 |
|
367 | |||
368 | def reindex_metrics(): |
|
368 | def reindex_metrics(): | |
369 | try: |
|
369 | try: | |
370 | Datastores.es.delete_index('rcae_m*') |
|
370 | Datastores.es.delete_index('rcae_m*') | |
371 | except Exception as e: |
|
371 | except Exception as e: | |
372 | print(e) |
|
372 | print(e) | |
373 |
|
373 | |||
374 | log.info('reindexing applications metrics') |
|
374 | log.info('reindexing applications metrics') | |
375 | i = 0 |
|
375 | i = 0 | |
376 | task_start = datetime.datetime.now() |
|
376 | task_start = datetime.datetime.now() | |
377 | metric_tables = detect_tables('metrics_p_') |
|
377 | metric_tables = detect_tables('metrics_p_') | |
378 | for partition_table in metric_tables: |
|
378 | for partition_table in metric_tables: | |
379 | conn = DBSession.connection().execution_options(stream_results=True) |
|
379 | conn = DBSession.connection().execution_options(stream_results=True) | |
380 | result = conn.execute(partition_table.select()) |
|
380 | result = conn.execute(partition_table.select()) | |
381 | while True: |
|
381 | while True: | |
382 | chunk = result.fetchmany(2000) |
|
382 | chunk = result.fetchmany(2000) | |
383 | if not chunk: |
|
383 | if not chunk: | |
384 | break |
|
384 | break | |
385 | es_docs = defaultdict(list) |
|
385 | es_docs = defaultdict(list) | |
386 | for row in chunk: |
|
386 | for row in chunk: | |
387 | i += 1 |
|
387 | i += 1 | |
388 | item = Metric(**dict(list(row.items()))) |
|
388 | item = Metric(**dict(list(row.items()))) | |
389 | d_range = item.partition_id |
|
389 | d_range = item.partition_id | |
390 | es_docs[d_range].append(item.es_doc()) |
|
390 | es_docs[d_range].append(item.es_doc()) | |
391 | if es_docs: |
|
391 | if es_docs: | |
392 | name = partition_table.name |
|
392 | name = partition_table.name | |
393 | log.info('round {}, {}'.format(i, name)) |
|
393 | log.info('round {}, {}'.format(i, name)) | |
394 | for k, v in es_docs.items(): |
|
394 | for k, v in es_docs.items(): | |
395 | Datastores.es.bulk_index(k, 'log', v) |
|
395 | Datastores.es.bulk_index(k, 'log', v) | |
396 |
|
396 | |||
397 | log.info( |
|
397 | log.info( | |
398 | 'total docs {} {}'.format(i, datetime.datetime.now() - task_start)) |
|
398 | 'total docs {} {}'.format(i, datetime.datetime.now() - task_start)) | |
399 |
|
399 | |||
400 |
|
400 | |||
401 | def reindex_slow_calls(): |
|
401 | def reindex_slow_calls(): | |
402 | try: |
|
402 | try: | |
403 | Datastores.es.delete_index('rcae_sc*') |
|
403 | Datastores.es.delete_index('rcae_sc*') | |
404 | except Exception as e: |
|
404 | except Exception as e: | |
405 | print(e) |
|
405 | print(e) | |
406 |
|
406 | |||
407 | log.info('reindexing slow calls') |
|
407 | log.info('reindexing slow calls') | |
408 | i = 0 |
|
408 | i = 0 | |
409 | task_start = datetime.datetime.now() |
|
409 | task_start = datetime.datetime.now() | |
410 | slow_calls_tables = detect_tables('slow_calls_p_') |
|
410 | slow_calls_tables = detect_tables('slow_calls_p_') | |
411 | for partition_table in slow_calls_tables: |
|
411 | for partition_table in slow_calls_tables: | |
412 | conn = DBSession.connection().execution_options(stream_results=True) |
|
412 | conn = DBSession.connection().execution_options(stream_results=True) | |
413 | result = conn.execute(partition_table.select()) |
|
413 | result = conn.execute(partition_table.select()) | |
414 | while True: |
|
414 | while True: | |
415 | chunk = result.fetchmany(2000) |
|
415 | chunk = result.fetchmany(2000) | |
416 | if not chunk: |
|
416 | if not chunk: | |
417 | break |
|
417 | break | |
418 | es_docs = defaultdict(list) |
|
418 | es_docs = defaultdict(list) | |
419 | for row in chunk: |
|
419 | for row in chunk: | |
420 | i += 1 |
|
420 | i += 1 | |
421 | item = SlowCall(**dict(list(row.items()))) |
|
421 | item = SlowCall(**dict(list(row.items()))) | |
422 | d_range = item.partition_id |
|
422 | d_range = item.partition_id | |
423 | es_docs[d_range].append(item.es_doc()) |
|
423 | es_docs[d_range].append(item.es_doc()) | |
424 | if es_docs: |
|
424 | if es_docs: | |
425 | name = partition_table.name |
|
425 | name = partition_table.name | |
426 | log.info('round {}, {}'.format(i, name)) |
|
426 | log.info('round {}, {}'.format(i, name)) | |
427 | for k, v in es_docs.items(): |
|
427 | for k, v in es_docs.items(): | |
428 | Datastores.es.bulk_index(k, 'log', v) |
|
428 | Datastores.es.bulk_index(k, 'log', v) | |
429 |
|
429 | |||
430 | log.info( |
|
430 | log.info( | |
431 | 'total docs {} {}'.format(i, datetime.datetime.now() - task_start)) |
|
431 | 'total docs {} {}'.format(i, datetime.datetime.now() - task_start)) | |
432 |
|
432 | |||
433 |
|
433 | |||
434 | if __name__ == '__main__': |
|
434 | if __name__ == '__main__': | |
435 | main() |
|
435 | main() |
@@ -1,1703 +1,1703 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2016 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2016 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # AppEnlight Enterprise Edition, including its added features, Support |
|
18 | # AppEnlight Enterprise Edition, including its added features, Support | |
19 | # services, and proprietary license terms, please see |
|
19 | # services, and proprietary license terms, please see | |
20 | # https://rhodecode.com/licenses/ |
|
20 | # https://rhodecode.com/licenses/ | |
21 |
|
21 | |||
22 | import copy |
|
22 | import copy | |
23 | import logging |
|
23 | import logging | |
24 | import mock |
|
24 | import mock | |
25 | import pyramid |
|
25 | import pyramid | |
26 | import pytest |
|
26 | import pytest | |
27 | import sqlalchemy as sa |
|
27 | import sqlalchemy as sa | |
28 | import webob |
|
28 | import webob | |
29 |
|
29 | |||
30 | from datetime import datetime |
|
30 | from datetime import datetime | |
31 | from pyramid import testing |
|
31 | from pyramid import testing | |
32 |
|
32 | |||
33 |
|
33 | |||
34 | from appenlight.models import DBSession |
|
34 | from appenlight.models import DBSession | |
35 | from appenlight.lib.ext_json import json |
|
35 | from appenlight.lib.ext_json import json | |
36 |
|
36 | |||
37 |
|
37 | |||
38 | log = logging.getLogger(__name__) |
|
38 | log = logging.getLogger(__name__) | |
39 |
|
39 | |||
40 |
|
40 | |||
41 | class DummyContext(object): |
|
41 | class DummyContext(object): | |
42 | pass |
|
42 | pass | |
43 |
|
43 | |||
44 |
|
44 | |||
45 | @pytest.mark.usefixtures('base_app') |
|
45 | @pytest.mark.usefixtures('base_app') | |
46 | class BasicTest(object): |
|
46 | class BasicTest(object): | |
47 | pass |
|
47 | pass | |
48 |
|
48 | |||
49 |
|
49 | |||
50 | @pytest.mark.usefixtures('base_app') |
|
50 | @pytest.mark.usefixtures('base_app') | |
51 | class TestMigration(object): |
|
51 | class TestMigration(object): | |
52 | def test_migration(self): |
|
52 | def test_migration(self): | |
53 | assert 1 == 1 |
|
53 | assert 1 == 1 | |
54 |
|
54 | |||
55 |
|
55 | |||
56 | class TestAPIReports_0_4_Validation(object): |
|
56 | class TestAPIReports_0_4_Validation(object): | |
57 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) |
|
57 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) | |
58 | def test_no_payload(self, dummy_json): |
|
58 | def test_no_payload(self, dummy_json): | |
59 | import colander |
|
59 | import colander | |
60 | from appenlight.validators import ReportListSchema_0_4 |
|
60 | from appenlight.validators import ReportListSchema_0_4 | |
61 | utcnow = datetime.utcnow() |
|
61 | utcnow = datetime.utcnow() | |
62 | schema = ReportListSchema_0_4().bind(utcnow=utcnow) |
|
62 | schema = ReportListSchema_0_4().bind(utcnow=utcnow) | |
63 | with pytest.raises(colander.Invalid): |
|
63 | with pytest.raises(colander.Invalid): | |
64 | schema.deserialize(dummy_json) |
|
64 | schema.deserialize(dummy_json) | |
65 |
|
65 | |||
66 | def test_minimal_payload(self, report_04_schema): |
|
66 | def test_minimal_payload(self, report_04_schema): | |
67 | dummy_json = [{}] |
|
67 | dummy_json = [{}] | |
68 | import colander |
|
68 | import colander | |
69 | from appenlight.validators import ReportListSchema_0_4 |
|
69 | from appenlight.validators import ReportListSchema_0_4 | |
70 | utcnow = datetime.utcnow() |
|
70 | utcnow = datetime.utcnow() | |
71 | schema = ReportListSchema_0_4().bind(utcnow=utcnow) |
|
71 | schema = ReportListSchema_0_4().bind(utcnow=utcnow) | |
72 | with pytest.raises(colander.Invalid): |
|
72 | with pytest.raises(colander.Invalid): | |
73 | schema.deserialize(dummy_json) |
|
73 | schema.deserialize(dummy_json) | |
74 |
|
74 | |||
75 | def test_minimal_payload(self): |
|
75 | def test_minimal_payload(self): | |
76 | from appenlight.validators import ReportListSchema_0_4 |
|
76 | from appenlight.validators import ReportListSchema_0_4 | |
77 | dummy_json = [{'report_details': [{}]}] |
|
77 | dummy_json = [{'report_details': [{}]}] | |
78 | utcnow = datetime.utcnow() |
|
78 | utcnow = datetime.utcnow() | |
79 | schema = ReportListSchema_0_4().bind(utcnow=utcnow) |
|
79 | schema = ReportListSchema_0_4().bind(utcnow=utcnow) | |
80 | deserialized = schema.deserialize(dummy_json) |
|
80 | deserialized = schema.deserialize(dummy_json) | |
81 |
|
81 | |||
82 | expected_deserialization = [ |
|
82 | expected_deserialization = [ | |
83 | {'error_type': '', |
|
83 | {'error_type': '', | |
84 | 'language': 'unknown', |
|
84 | 'language': 'unknown', | |
85 | 'report_details': [ |
|
85 | 'report_details': [ | |
86 | {'username': '', |
|
86 | {'username': '', | |
87 | 'traceback': None, |
|
87 | 'traceback': None, | |
88 | 'extra': None, |
|
88 | 'extra': None, | |
89 | 'frameinfo': None, |
|
89 | 'frameinfo': None, | |
90 | 'url': '', |
|
90 | 'url': '', | |
91 | 'ip': None, |
|
91 | 'ip': None, | |
92 | 'start_time': utcnow, |
|
92 | 'start_time': utcnow, | |
93 | 'group_string': None, |
|
93 | 'group_string': None, | |
94 | 'request': {}, |
|
94 | 'request': {}, | |
95 | 'request_stats': None, |
|
95 | 'request_stats': None, | |
96 | 'end_time': None, |
|
96 | 'end_time': None, | |
97 | 'request_id': '', |
|
97 | 'request_id': '', | |
98 | 'message': '', |
|
98 | 'message': '', | |
99 | 'slow_calls': [], |
|
99 | 'slow_calls': [], | |
100 | 'user_agent': ''}], |
|
100 | 'user_agent': ''}], | |
101 | 'server': 'unknown', |
|
101 | 'server': 'unknown', | |
102 | 'occurences': 1, |
|
102 | 'occurences': 1, | |
103 | 'priority': 5, |
|
103 | 'priority': 5, | |
104 | 'view_name': '', |
|
104 | 'view_name': '', | |
105 | 'client': 'unknown', |
|
105 | 'client': 'unknown', | |
106 | 'http_status': 200, |
|
106 | 'http_status': 200, | |
107 | 'error': '', |
|
107 | 'error': '', | |
108 | 'tags': None} |
|
108 | 'tags': None} | |
109 | ] |
|
109 | ] | |
110 | assert deserialized == expected_deserialization |
|
110 | assert deserialized == expected_deserialization | |
111 |
|
111 | |||
112 | def test_full_payload(self): |
|
112 | def test_full_payload(self): | |
113 | import appenlight.tests.payload_examples as payload_examples |
|
113 | import appenlight.tests.payload_examples as payload_examples | |
114 | from appenlight.validators import ReportListSchema_0_4 |
|
114 | from appenlight.validators import ReportListSchema_0_4 | |
115 | utcnow = datetime.utcnow() |
|
115 | utcnow = datetime.utcnow() | |
116 | schema = ReportListSchema_0_4().bind(utcnow=utcnow) |
|
116 | schema = ReportListSchema_0_4().bind(utcnow=utcnow) | |
117 | PYTHON_PAYLOAD = copy.deepcopy(payload_examples.PYTHON_PAYLOAD_0_4) |
|
117 | PYTHON_PAYLOAD = copy.deepcopy(payload_examples.PYTHON_PAYLOAD_0_4) | |
118 | utcnow = datetime.utcnow() |
|
118 | utcnow = datetime.utcnow() | |
119 | PYTHON_PAYLOAD["tags"] = [("foo", 1), ("action", "test"), ("baz", 1.1), |
|
119 | PYTHON_PAYLOAD["tags"] = [("foo", 1), ("action", "test"), ("baz", 1.1), | |
120 | ("date", |
|
120 | ("date", | |
121 | utcnow.strftime('%Y-%m-%dT%H:%M:%S.0'))] |
|
121 | utcnow.strftime('%Y-%m-%dT%H:%M:%S.0'))] | |
122 | dummy_json = [PYTHON_PAYLOAD] |
|
122 | dummy_json = [PYTHON_PAYLOAD] | |
123 |
|
123 | |||
124 | deserialized = schema.deserialize(dummy_json) |
|
124 | deserialized = schema.deserialize(dummy_json) | |
125 | assert deserialized[0]['error'] == PYTHON_PAYLOAD['error'] |
|
125 | assert deserialized[0]['error'] == PYTHON_PAYLOAD['error'] | |
126 | assert deserialized[0]['language'] == PYTHON_PAYLOAD['language'] |
|
126 | assert deserialized[0]['language'] == PYTHON_PAYLOAD['language'] | |
127 | assert deserialized[0]['server'] == PYTHON_PAYLOAD['server'] |
|
127 | assert deserialized[0]['server'] == PYTHON_PAYLOAD['server'] | |
128 | assert deserialized[0]['priority'] == PYTHON_PAYLOAD['priority'] |
|
128 | assert deserialized[0]['priority'] == PYTHON_PAYLOAD['priority'] | |
129 | assert deserialized[0]['view_name'] == PYTHON_PAYLOAD['view_name'] |
|
129 | assert deserialized[0]['view_name'] == PYTHON_PAYLOAD['view_name'] | |
130 | assert deserialized[0]['client'] == PYTHON_PAYLOAD['client'] |
|
130 | assert deserialized[0]['client'] == PYTHON_PAYLOAD['client'] | |
131 | assert deserialized[0]['http_status'] == PYTHON_PAYLOAD['http_status'] |
|
131 | assert deserialized[0]['http_status'] == PYTHON_PAYLOAD['http_status'] | |
132 | assert deserialized[0]['error'] == PYTHON_PAYLOAD['error'] |
|
132 | assert deserialized[0]['error'] == PYTHON_PAYLOAD['error'] | |
133 | assert deserialized[0]['occurences'] == PYTHON_PAYLOAD['occurences'] |
|
133 | assert deserialized[0]['occurences'] == PYTHON_PAYLOAD['occurences'] | |
134 | first_detail = deserialized[0]['report_details'][0] |
|
134 | first_detail = deserialized[0]['report_details'][0] | |
135 | payload_detail = PYTHON_PAYLOAD['report_details'][0] |
|
135 | payload_detail = PYTHON_PAYLOAD['report_details'][0] | |
136 | assert first_detail['username'] == payload_detail['username'] |
|
136 | assert first_detail['username'] == payload_detail['username'] | |
137 | assert first_detail['traceback'] == payload_detail['traceback'] |
|
137 | assert first_detail['traceback'] == payload_detail['traceback'] | |
138 | assert first_detail['url'] == payload_detail['url'] |
|
138 | assert first_detail['url'] == payload_detail['url'] | |
139 | assert first_detail['ip'] == payload_detail['ip'] |
|
139 | assert first_detail['ip'] == payload_detail['ip'] | |
140 | assert first_detail['start_time'].strftime('%Y-%m-%dT%H:%M:%S.0') == \ |
|
140 | assert first_detail['start_time'].strftime('%Y-%m-%dT%H:%M:%S.0') == \ | |
141 | payload_detail['start_time'] |
|
141 | payload_detail['start_time'] | |
142 | assert first_detail['ip'] == payload_detail['ip'] |
|
142 | assert first_detail['ip'] == payload_detail['ip'] | |
143 | assert first_detail['group_string'] is None |
|
143 | assert first_detail['group_string'] is None | |
144 | assert first_detail['request_stats'] == payload_detail['request_stats'] |
|
144 | assert first_detail['request_stats'] == payload_detail['request_stats'] | |
145 | assert first_detail['end_time'].strftime('%Y-%m-%dT%H:%M:%S.0') == \ |
|
145 | assert first_detail['end_time'].strftime('%Y-%m-%dT%H:%M:%S.0') == \ | |
146 | payload_detail['end_time'] |
|
146 | payload_detail['end_time'] | |
147 | assert first_detail['request_id'] == payload_detail['request_id'] |
|
147 | assert first_detail['request_id'] == payload_detail['request_id'] | |
148 | assert first_detail['message'] == payload_detail['message'] |
|
148 | assert first_detail['message'] == payload_detail['message'] | |
149 | assert first_detail['user_agent'] == payload_detail['user_agent'] |
|
149 | assert first_detail['user_agent'] == payload_detail['user_agent'] | |
150 | slow_call = first_detail['slow_calls'][0] |
|
150 | slow_call = first_detail['slow_calls'][0] | |
151 | expected_slow_call = payload_detail['slow_calls'][0] |
|
151 | expected_slow_call = payload_detail['slow_calls'][0] | |
152 | assert slow_call['start'].strftime('%Y-%m-%dT%H:%M:%S.0') == \ |
|
152 | assert slow_call['start'].strftime('%Y-%m-%dT%H:%M:%S.0') == \ | |
153 | expected_slow_call['start'] |
|
153 | expected_slow_call['start'] | |
154 | assert slow_call['end'].strftime('%Y-%m-%dT%H:%M:%S.0') == \ |
|
154 | assert slow_call['end'].strftime('%Y-%m-%dT%H:%M:%S.0') == \ | |
155 | expected_slow_call['end'] |
|
155 | expected_slow_call['end'] | |
156 | assert slow_call['statement'] == expected_slow_call['statement'] |
|
156 | assert slow_call['statement'] == expected_slow_call['statement'] | |
157 | assert slow_call['parameters'] == expected_slow_call['parameters'] |
|
157 | assert slow_call['parameters'] == expected_slow_call['parameters'] | |
158 | assert slow_call['type'] == expected_slow_call['type'] |
|
158 | assert slow_call['type'] == expected_slow_call['type'] | |
159 | assert slow_call['subtype'] == expected_slow_call['subtype'] |
|
159 | assert slow_call['subtype'] == expected_slow_call['subtype'] | |
160 | assert slow_call['location'] == '' |
|
160 | assert slow_call['location'] == '' | |
161 | assert deserialized[0]['tags'] == [ |
|
161 | assert deserialized[0]['tags'] == [ | |
162 | ('foo', 1), ('action', 'test'), |
|
162 | ('foo', 1), ('action', 'test'), | |
163 | ('baz', 1.1), ('date', utcnow.strftime('%Y-%m-%dT%H:%M:%S.0'))] |
|
163 | ('baz', 1.1), ('date', utcnow.strftime('%Y-%m-%dT%H:%M:%S.0'))] | |
164 |
|
164 | |||
165 |
|
165 | |||
166 | class TestSentryProto_7(object): |
|
166 | class TestSentryProto_7(object): | |
167 | def test_log_payload(self): |
|
167 | def test_log_payload(self): | |
168 | import appenlight.tests.payload_examples as payload_examples |
|
168 | import appenlight.tests.payload_examples as payload_examples | |
169 | from appenlight.lib.enums import ParsedSentryEventType |
|
169 | from appenlight.lib.enums import ParsedSentryEventType | |
170 | from appenlight.lib.utils.sentry import parse_sentry_event |
|
170 | from appenlight.lib.utils.sentry import parse_sentry_event | |
171 | event_dict, event_type = parse_sentry_event( |
|
171 | event_dict, event_type = parse_sentry_event( | |
172 | payload_examples.SENTRY_LOG_PAYLOAD_7) |
|
172 | payload_examples.SENTRY_LOG_PAYLOAD_7) | |
173 | assert ParsedSentryEventType.LOG == event_type |
|
173 | assert ParsedSentryEventType.LOG == event_type | |
174 | assert event_dict['log_level'] == 'CRITICAL' |
|
174 | assert event_dict['log_level'] == 'CRITICAL' | |
175 | assert event_dict['message'] == 'TEST from django logging' |
|
175 | assert event_dict['message'] == 'TEST from django logging' | |
176 | assert event_dict['namespace'] == 'testlogger' |
|
176 | assert event_dict['namespace'] == 'testlogger' | |
177 | assert event_dict['request_id'] == '9a6172f2e6d2444582f83a6c333d9cfb' |
|
177 | assert event_dict['request_id'] == '9a6172f2e6d2444582f83a6c333d9cfb' | |
178 | assert event_dict['server'] == 'ergo-virtual-machine' |
|
178 | assert event_dict['server'] == 'ergo-virtual-machine' | |
179 | assert event_dict['date'] == datetime.utcnow().date().strftime( |
|
179 | assert event_dict['date'] == datetime.utcnow().date().strftime( | |
180 | '%Y-%m-%dT%H:%M:%SZ') |
|
180 | '%Y-%m-%dT%H:%M:%SZ') | |
181 | tags = [('site', 'example.com'), |
|
181 | tags = [('site', 'example.com'), | |
182 | ('sys.argv', ["'manage.py'", "'runserver'"]), |
|
182 | ('sys.argv', ["'manage.py'", "'runserver'"]), | |
183 | ('price', 6), |
|
183 | ('price', 6), | |
184 | ('tag', "'extra'"), |
|
184 | ('tag', "'extra'"), | |
185 | ('dupa', True), |
|
185 | ('dupa', True), | |
186 | ('project', 'sentry'), |
|
186 | ('project', 'sentry'), | |
187 | ('sentry_culprit', 'testlogger in index'), |
|
187 | ('sentry_culprit', 'testlogger in index'), | |
188 | ('sentry_language', 'python'), |
|
188 | ('sentry_language', 'python'), | |
189 | ('sentry_release', 'test')] |
|
189 | ('sentry_release', 'test')] | |
190 | assert sorted(event_dict['tags']) == sorted(tags) |
|
190 | assert sorted(event_dict['tags']) == sorted(tags) | |
191 |
|
191 | |||
192 | def test_report_payload(self): |
|
192 | def test_report_payload(self): | |
193 | import appenlight.tests.payload_examples as payload_examples |
|
193 | import appenlight.tests.payload_examples as payload_examples | |
194 | from appenlight.lib.enums import ParsedSentryEventType |
|
194 | from appenlight.lib.enums import ParsedSentryEventType | |
195 | from appenlight.lib.utils.sentry import parse_sentry_event |
|
195 | from appenlight.lib.utils.sentry import parse_sentry_event | |
196 | utcnow = datetime.utcnow().date().strftime('%Y-%m-%dT%H:%M:%SZ') |
|
196 | utcnow = datetime.utcnow().date().strftime('%Y-%m-%dT%H:%M:%SZ') | |
197 | event_dict, event_type = parse_sentry_event( |
|
197 | event_dict, event_type = parse_sentry_event( | |
198 | payload_examples.SENTRY_PYTHON_PAYLOAD_7) |
|
198 | payload_examples.SENTRY_PYTHON_PAYLOAD_7) | |
199 | assert ParsedSentryEventType.ERROR_REPORT == event_type |
|
199 | assert ParsedSentryEventType.ERROR_REPORT == event_type | |
200 | assert event_dict['client'] == 'sentry' |
|
200 | assert event_dict['client'] == 'sentry' | |
201 | assert event_dict[ |
|
201 | assert event_dict[ | |
202 | 'error'] == 'Exception: test 500 ' \ |
|
202 | 'error'] == 'Exception: test 500 ' \ | |
203 | '\u0142\xf3\u201c\u0107\u201c\u0107\u017c\u0105' |
|
203 | '\u0142\xf3\u201c\u0107\u201c\u0107\u017c\u0105' | |
204 | assert event_dict['language'] == 'python' |
|
204 | assert event_dict['language'] == 'python' | |
205 | assert event_dict['ip'] == '127.0.0.1' |
|
205 | assert event_dict['ip'] == '127.0.0.1' | |
206 | assert event_dict['request_id'] == '9fae652c8c1c4d6a8eee09260f613a98' |
|
206 | assert event_dict['request_id'] == '9fae652c8c1c4d6a8eee09260f613a98' | |
207 | assert event_dict['server'] == 'ergo-virtual-machine' |
|
207 | assert event_dict['server'] == 'ergo-virtual-machine' | |
208 | assert event_dict['start_time'] == utcnow |
|
208 | assert event_dict['start_time'] == utcnow | |
209 | assert event_dict['url'] == 'http://127.0.0.1:8000/error' |
|
209 | assert event_dict['url'] == 'http://127.0.0.1:8000/error' | |
210 | assert event_dict['user_agent'] == 'Mozilla/5.0 (X11; Linux x86_64) ' \ |
|
210 | assert event_dict['user_agent'] == 'Mozilla/5.0 (X11; Linux x86_64) ' \ | |
211 | 'AppleWebKit/537.36 (KHTML, ' \ |
|
211 | 'AppleWebKit/537.36 (KHTML, ' \ | |
212 | 'like Gecko) Chrome/47.0.2526.106 ' \ |
|
212 | 'like Gecko) Chrome/47.0.2526.106 ' \ | |
213 | 'Safari/537.36' |
|
213 | 'Safari/537.36' | |
214 | assert event_dict['view_name'] == 'djangoapp.views in error' |
|
214 | assert event_dict['view_name'] == 'djangoapp.views in error' | |
215 | tags = [('site', 'example.com'), ('sentry_release', 'test')] |
|
215 | tags = [('site', 'example.com'), ('sentry_release', 'test')] | |
216 | assert sorted(event_dict['tags']) == sorted(tags) |
|
216 | assert sorted(event_dict['tags']) == sorted(tags) | |
217 | extra = [('sys.argv', ["'manage.py'", "'runserver'"]), |
|
217 | extra = [('sys.argv', ["'manage.py'", "'runserver'"]), | |
218 | ('project', 'sentry')] |
|
218 | ('project', 'sentry')] | |
219 | assert sorted(event_dict['extra']) == sorted(extra) |
|
219 | assert sorted(event_dict['extra']) == sorted(extra) | |
220 | request = event_dict['request'] |
|
220 | request = event_dict['request'] | |
221 | assert request['url'] == 'http://127.0.0.1:8000/error' |
|
221 | assert request['url'] == 'http://127.0.0.1:8000/error' | |
222 | assert request['cookies'] == {'appenlight': 'X'} |
|
222 | assert request['cookies'] == {'appenlight': 'X'} | |
223 | assert request['data'] is None |
|
223 | assert request['data'] is None | |
224 | assert request['method'] == 'GET' |
|
224 | assert request['method'] == 'GET' | |
225 | assert request['query_string'] == '' |
|
225 | assert request['query_string'] == '' | |
226 | assert request['env'] == {'REMOTE_ADDR': '127.0.0.1', |
|
226 | assert request['env'] == {'REMOTE_ADDR': '127.0.0.1', | |
227 | 'SERVER_NAME': 'localhost', |
|
227 | 'SERVER_NAME': 'localhost', | |
228 | 'SERVER_PORT': '8000'} |
|
228 | 'SERVER_PORT': '8000'} | |
229 | assert request['headers'] == { |
|
229 | assert request['headers'] == { | |
230 | 'Accept': 'text/html,application/xhtml+xml,' |
|
230 | 'Accept': 'text/html,application/xhtml+xml,' | |
231 | 'application/xml;q=0.9,image/webp,*/*;q=0.8', |
|
231 | 'application/xml;q=0.9,image/webp,*/*;q=0.8', | |
232 | 'Accept-Encoding': 'gzip, deflate, sdch', |
|
232 | 'Accept-Encoding': 'gzip, deflate, sdch', | |
233 | 'Accept-Language': 'en-US,en;q=0.8,pl;q=0.6', |
|
233 | 'Accept-Language': 'en-US,en;q=0.8,pl;q=0.6', | |
234 | 'Connection': 'keep-alive', |
|
234 | 'Connection': 'keep-alive', | |
235 | 'Content-Length': '', |
|
235 | 'Content-Length': '', | |
236 | 'Content-Type': 'text/plain', |
|
236 | 'Content-Type': 'text/plain', | |
237 | 'Cookie': 'appenlight=X', |
|
237 | 'Cookie': 'appenlight=X', | |
238 | 'Dnt': '1', |
|
238 | 'Dnt': '1', | |
239 | 'Host': '127.0.0.1:8000', |
|
239 | 'Host': '127.0.0.1:8000', | |
240 | 'Upgrade-Insecure-Requests': '1', |
|
240 | 'Upgrade-Insecure-Requests': '1', | |
241 | 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) ' |
|
241 | 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) ' | |
242 | 'AppleWebKit/537.36 (KHTML, like Gecko) ' |
|
242 | 'AppleWebKit/537.36 (KHTML, like Gecko) ' | |
243 | 'Chrome/47.0.2526.106 Safari/537.36'} |
|
243 | 'Chrome/47.0.2526.106 Safari/537.36'} | |
244 | traceback = event_dict['traceback'] |
|
244 | traceback = event_dict['traceback'] | |
245 | assert traceback[0]['cline'] == 'response = wrapped_callback(request, ' \ |
|
245 | assert traceback[0]['cline'] == 'response = wrapped_callback(request, ' \ | |
246 | '*callback_args, **callback_kwargs)' |
|
246 | '*callback_args, **callback_kwargs)' | |
247 | assert traceback[0]['file'] == 'django/core/handlers/base.py' |
|
247 | assert traceback[0]['file'] == 'django/core/handlers/base.py' | |
248 | assert traceback[0]['fn'] == 'get_response' |
|
248 | assert traceback[0]['fn'] == 'get_response' | |
249 | assert traceback[0]['line'] == 111 |
|
249 | assert traceback[0]['line'] == 111 | |
250 | assert traceback[0]['module'] == 'django.core.handlers.base' |
|
250 | assert traceback[0]['module'] == 'django.core.handlers.base' | |
251 |
|
251 | |||
252 | assert traceback[1]['cline'] == "raise Exception(u'test 500 " \ |
|
252 | assert traceback[1]['cline'] == "raise Exception(u'test 500 " \ | |
253 | "\u0142\xf3\u201c\u0107\u201c\u0107" \ |
|
253 | "\u0142\xf3\u201c\u0107\u201c\u0107" \ | |
254 | "\u017c\u0105')" |
|
254 | "\u017c\u0105')" | |
255 | assert traceback[1]['file'] == 'djangoapp/views.py' |
|
255 | assert traceback[1]['file'] == 'djangoapp/views.py' | |
256 | assert traceback[1]['fn'] == 'error' |
|
256 | assert traceback[1]['fn'] == 'error' | |
257 | assert traceback[1]['line'] == 84 |
|
257 | assert traceback[1]['line'] == 84 | |
258 | assert traceback[1]['module'] == 'djangoapp.views' |
|
258 | assert traceback[1]['module'] == 'djangoapp.views' | |
259 | assert sorted(traceback[1]['vars']) == sorted([ |
|
259 | assert sorted(traceback[1]['vars']) == sorted([ | |
260 | ('c', |
|
260 | ('c', | |
261 | '<sqlite3.Cursor object at 0x7fe7c82af8f0>'), |
|
261 | '<sqlite3.Cursor object at 0x7fe7c82af8f0>'), | |
262 | ('request', |
|
262 | ('request', | |
263 | '<WSGIRequest at 0x140633490316304>'), |
|
263 | '<WSGIRequest at 0x140633490316304>'), | |
264 | ('conn', |
|
264 | ('conn', | |
265 | '<sqlite3.Connection object at 0x7fe7c8b23bf8>')]) |
|
265 | '<sqlite3.Connection object at 0x7fe7c8b23bf8>')]) | |
266 |
|
266 | |||
267 |
|
267 | |||
268 | class TestAPIReports_0_5_Validation(object): |
|
268 | class TestAPIReports_0_5_Validation(object): | |
269 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) |
|
269 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) | |
270 | def test_no_payload(self, dummy_json): |
|
270 | def test_no_payload(self, dummy_json): | |
271 | import colander |
|
271 | import colander | |
272 | from appenlight.validators import ReportListSchema_0_5 |
|
272 | from appenlight.validators import ReportListSchema_0_5 | |
273 | utcnow = datetime.utcnow() |
|
273 | utcnow = datetime.utcnow() | |
274 | schema = ReportListSchema_0_5().bind(utcnow=utcnow) |
|
274 | schema = ReportListSchema_0_5().bind(utcnow=utcnow) | |
275 | with pytest.raises(colander.Invalid): |
|
275 | with pytest.raises(colander.Invalid): | |
276 | schema.deserialize(dummy_json) |
|
276 | schema.deserialize(dummy_json) | |
277 |
|
277 | |||
278 | def test_minimal_payload(self): |
|
278 | def test_minimal_payload(self): | |
279 | dummy_json = [{}] |
|
279 | dummy_json = [{}] | |
280 | import colander |
|
280 | import colander | |
281 | from appenlight.validators import ReportListSchema_0_5 |
|
281 | from appenlight.validators import ReportListSchema_0_5 | |
282 | utcnow = datetime.utcnow() |
|
282 | utcnow = datetime.utcnow() | |
283 | schema = ReportListSchema_0_5().bind(utcnow=utcnow) |
|
283 | schema = ReportListSchema_0_5().bind(utcnow=utcnow) | |
284 | with pytest.raises(colander.Invalid): |
|
284 | with pytest.raises(colander.Invalid): | |
285 | schema.deserialize(dummy_json) |
|
285 | schema.deserialize(dummy_json) | |
286 |
|
286 | |||
287 | def test_minimal_payload(self): |
|
287 | def test_minimal_payload(self): | |
288 | dummy_json = [{'report_details': [{}]}] |
|
288 | dummy_json = [{'report_details': [{}]}] | |
289 | from appenlight.validators import ReportListSchema_0_5 |
|
289 | from appenlight.validators import ReportListSchema_0_5 | |
290 | utcnow = datetime.utcnow() |
|
290 | utcnow = datetime.utcnow() | |
291 | schema = ReportListSchema_0_5().bind(utcnow=utcnow) |
|
291 | schema = ReportListSchema_0_5().bind(utcnow=utcnow) | |
292 |
|
292 | |||
293 | deserialized = schema.deserialize(dummy_json) |
|
293 | deserialized = schema.deserialize(dummy_json) | |
294 |
|
294 | |||
295 | expected_deserialization = [ |
|
295 | expected_deserialization = [ | |
296 | {'language': 'unknown', |
|
296 | {'language': 'unknown', | |
297 | 'server': 'unknown', |
|
297 | 'server': 'unknown', | |
298 | 'occurences': 1, |
|
298 | 'occurences': 1, | |
299 | 'priority': 5, |
|
299 | 'priority': 5, | |
300 | 'view_name': '', |
|
300 | 'view_name': '', | |
301 | 'client': 'unknown', |
|
301 | 'client': 'unknown', | |
302 | 'http_status': 200, |
|
302 | 'http_status': 200, | |
303 | 'error': '', |
|
303 | 'error': '', | |
304 | 'tags': None, |
|
304 | 'tags': None, | |
305 | 'username': '', |
|
305 | 'username': '', | |
306 | 'traceback': None, |
|
306 | 'traceback': None, | |
307 | 'extra': None, |
|
307 | 'extra': None, | |
308 | 'url': '', |
|
308 | 'url': '', | |
309 | 'ip': None, |
|
309 | 'ip': None, | |
310 | 'start_time': utcnow, |
|
310 | 'start_time': utcnow, | |
311 | 'group_string': None, |
|
311 | 'group_string': None, | |
312 | 'request': {}, |
|
312 | 'request': {}, | |
313 | 'request_stats': None, |
|
313 | 'request_stats': None, | |
314 | 'end_time': None, |
|
314 | 'end_time': None, | |
315 | 'request_id': '', |
|
315 | 'request_id': '', | |
316 | 'message': '', |
|
316 | 'message': '', | |
317 | 'slow_calls': [], |
|
317 | 'slow_calls': [], | |
318 | 'user_agent': '' |
|
318 | 'user_agent': '' | |
319 | } |
|
319 | } | |
320 | ] |
|
320 | ] | |
321 | assert deserialized == expected_deserialization |
|
321 | assert deserialized == expected_deserialization | |
322 |
|
322 | |||
323 | def test_full_payload(self): |
|
323 | def test_full_payload(self): | |
324 | import appenlight.tests.payload_examples as payload_examples |
|
324 | import appenlight.tests.payload_examples as payload_examples | |
325 | from appenlight.validators import ReportListSchema_0_5 |
|
325 | from appenlight.validators import ReportListSchema_0_5 | |
326 | PYTHON_PAYLOAD = copy.deepcopy(payload_examples.PYTHON_PAYLOAD_0_5) |
|
326 | PYTHON_PAYLOAD = copy.deepcopy(payload_examples.PYTHON_PAYLOAD_0_5) | |
327 | utcnow = datetime.utcnow() |
|
327 | utcnow = datetime.utcnow() | |
328 | schema = ReportListSchema_0_5().bind(utcnow=utcnow) |
|
328 | schema = ReportListSchema_0_5().bind(utcnow=utcnow) | |
329 | PYTHON_PAYLOAD["tags"] = [("foo", 1), ("action", "test"), ("baz", 1.1), |
|
329 | PYTHON_PAYLOAD["tags"] = [("foo", 1), ("action", "test"), ("baz", 1.1), | |
330 | ("date", |
|
330 | ("date", | |
331 | utcnow.strftime('%Y-%m-%dT%H:%M:%S.0'))] |
|
331 | utcnow.strftime('%Y-%m-%dT%H:%M:%S.0'))] | |
332 | dummy_json = [PYTHON_PAYLOAD] |
|
332 | dummy_json = [PYTHON_PAYLOAD] | |
333 | deserialized = schema.deserialize(dummy_json)[0] |
|
333 | deserialized = schema.deserialize(dummy_json)[0] | |
334 | assert deserialized['error'] == PYTHON_PAYLOAD['error'] |
|
334 | assert deserialized['error'] == PYTHON_PAYLOAD['error'] | |
335 | assert deserialized['language'] == PYTHON_PAYLOAD['language'] |
|
335 | assert deserialized['language'] == PYTHON_PAYLOAD['language'] | |
336 | assert deserialized['server'] == PYTHON_PAYLOAD['server'] |
|
336 | assert deserialized['server'] == PYTHON_PAYLOAD['server'] | |
337 | assert deserialized['priority'] == PYTHON_PAYLOAD['priority'] |
|
337 | assert deserialized['priority'] == PYTHON_PAYLOAD['priority'] | |
338 | assert deserialized['view_name'] == PYTHON_PAYLOAD['view_name'] |
|
338 | assert deserialized['view_name'] == PYTHON_PAYLOAD['view_name'] | |
339 | assert deserialized['client'] == PYTHON_PAYLOAD['client'] |
|
339 | assert deserialized['client'] == PYTHON_PAYLOAD['client'] | |
340 | assert deserialized['http_status'] == PYTHON_PAYLOAD['http_status'] |
|
340 | assert deserialized['http_status'] == PYTHON_PAYLOAD['http_status'] | |
341 | assert deserialized['error'] == PYTHON_PAYLOAD['error'] |
|
341 | assert deserialized['error'] == PYTHON_PAYLOAD['error'] | |
342 | assert deserialized['occurences'] == PYTHON_PAYLOAD['occurences'] |
|
342 | assert deserialized['occurences'] == PYTHON_PAYLOAD['occurences'] | |
343 | assert deserialized['username'] == PYTHON_PAYLOAD['username'] |
|
343 | assert deserialized['username'] == PYTHON_PAYLOAD['username'] | |
344 | assert deserialized['traceback'] == PYTHON_PAYLOAD['traceback'] |
|
344 | assert deserialized['traceback'] == PYTHON_PAYLOAD['traceback'] | |
345 | assert deserialized['url'] == PYTHON_PAYLOAD['url'] |
|
345 | assert deserialized['url'] == PYTHON_PAYLOAD['url'] | |
346 | assert deserialized['ip'] == PYTHON_PAYLOAD['ip'] |
|
346 | assert deserialized['ip'] == PYTHON_PAYLOAD['ip'] | |
347 | assert deserialized['start_time'].strftime('%Y-%m-%dT%H:%M:%S.0') == \ |
|
347 | assert deserialized['start_time'].strftime('%Y-%m-%dT%H:%M:%S.0') == \ | |
348 | PYTHON_PAYLOAD['start_time'] |
|
348 | PYTHON_PAYLOAD['start_time'] | |
349 | assert deserialized['ip'] == PYTHON_PAYLOAD['ip'] |
|
349 | assert deserialized['ip'] == PYTHON_PAYLOAD['ip'] | |
350 | assert deserialized['group_string'] is None |
|
350 | assert deserialized['group_string'] is None | |
351 | assert deserialized['request_stats'] == PYTHON_PAYLOAD['request_stats'] |
|
351 | assert deserialized['request_stats'] == PYTHON_PAYLOAD['request_stats'] | |
352 | assert deserialized['end_time'].strftime('%Y-%m-%dT%H:%M:%S.0') == \ |
|
352 | assert deserialized['end_time'].strftime('%Y-%m-%dT%H:%M:%S.0') == \ | |
353 | PYTHON_PAYLOAD['end_time'] |
|
353 | PYTHON_PAYLOAD['end_time'] | |
354 | assert deserialized['request_id'] == PYTHON_PAYLOAD['request_id'] |
|
354 | assert deserialized['request_id'] == PYTHON_PAYLOAD['request_id'] | |
355 | assert deserialized['message'] == PYTHON_PAYLOAD['message'] |
|
355 | assert deserialized['message'] == PYTHON_PAYLOAD['message'] | |
356 | assert deserialized['user_agent'] == PYTHON_PAYLOAD['user_agent'] |
|
356 | assert deserialized['user_agent'] == PYTHON_PAYLOAD['user_agent'] | |
357 | assert deserialized['slow_calls'][0]['start'].strftime( |
|
357 | assert deserialized['slow_calls'][0]['start'].strftime( | |
358 | '%Y-%m-%dT%H:%M:%S.0') == PYTHON_PAYLOAD['slow_calls'][0][ |
|
358 | '%Y-%m-%dT%H:%M:%S.0') == PYTHON_PAYLOAD['slow_calls'][0][ | |
359 | 'start'] |
|
359 | 'start'] | |
360 | assert deserialized['slow_calls'][0]['end'].strftime( |
|
360 | assert deserialized['slow_calls'][0]['end'].strftime( | |
361 | '%Y-%m-%dT%H:%M:%S.0') == PYTHON_PAYLOAD['slow_calls'][0][ |
|
361 | '%Y-%m-%dT%H:%M:%S.0') == PYTHON_PAYLOAD['slow_calls'][0][ | |
362 | 'end'] |
|
362 | 'end'] | |
363 | assert deserialized['slow_calls'][0]['statement'] == \ |
|
363 | assert deserialized['slow_calls'][0]['statement'] == \ | |
364 | PYTHON_PAYLOAD['slow_calls'][0]['statement'] |
|
364 | PYTHON_PAYLOAD['slow_calls'][0]['statement'] | |
365 | assert deserialized['slow_calls'][0]['parameters'] == \ |
|
365 | assert deserialized['slow_calls'][0]['parameters'] == \ | |
366 | PYTHON_PAYLOAD['slow_calls'][0]['parameters'] |
|
366 | PYTHON_PAYLOAD['slow_calls'][0]['parameters'] | |
367 | assert deserialized['slow_calls'][0]['type'] == \ |
|
367 | assert deserialized['slow_calls'][0]['type'] == \ | |
368 | PYTHON_PAYLOAD['slow_calls'][0]['type'] |
|
368 | PYTHON_PAYLOAD['slow_calls'][0]['type'] | |
369 | assert deserialized['slow_calls'][0]['subtype'] == \ |
|
369 | assert deserialized['slow_calls'][0]['subtype'] == \ | |
370 | PYTHON_PAYLOAD['slow_calls'][0]['subtype'] |
|
370 | PYTHON_PAYLOAD['slow_calls'][0]['subtype'] | |
371 | assert deserialized['slow_calls'][0]['location'] == '' |
|
371 | assert deserialized['slow_calls'][0]['location'] == '' | |
372 | assert deserialized['tags'] == [ |
|
372 | assert deserialized['tags'] == [ | |
373 | ('foo', 1), ('action', 'test'), |
|
373 | ('foo', 1), ('action', 'test'), | |
374 | ('baz', 1.1), ('date', utcnow.strftime('%Y-%m-%dT%H:%M:%S.0'))] |
|
374 | ('baz', 1.1), ('date', utcnow.strftime('%Y-%m-%dT%H:%M:%S.0'))] | |
375 |
|
375 | |||
376 |
|
376 | |||
377 | @pytest.mark.usefixtures('log_schema') |
|
377 | @pytest.mark.usefixtures('log_schema') | |
378 | class TestAPILogsValidation(object): |
|
378 | class TestAPILogsValidation(object): | |
379 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) |
|
379 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) | |
380 | def test_no_payload(self, dummy_json, log_schema): |
|
380 | def test_no_payload(self, dummy_json, log_schema): | |
381 | import colander |
|
381 | import colander | |
382 |
|
382 | |||
383 | with pytest.raises(colander.Invalid): |
|
383 | with pytest.raises(colander.Invalid): | |
384 | log_schema.deserialize(dummy_json) |
|
384 | log_schema.deserialize(dummy_json) | |
385 |
|
385 | |||
386 | def test_minimal_payload(self, log_schema): |
|
386 | def test_minimal_payload(self, log_schema): | |
387 | dummy_json = [{}] |
|
387 | dummy_json = [{}] | |
388 | deserialized = log_schema.deserialize(dummy_json)[0] |
|
388 | deserialized = log_schema.deserialize(dummy_json)[0] | |
389 | expected = {'log_level': 'UNKNOWN', |
|
389 | expected = {'log_level': 'UNKNOWN', | |
390 | 'namespace': '', |
|
390 | 'namespace': '', | |
391 | 'server': 'unknown', |
|
391 | 'server': 'unknown', | |
392 | 'request_id': '', |
|
392 | 'request_id': '', | |
393 | 'primary_key': None, |
|
393 | 'primary_key': None, | |
394 | 'date': datetime.utcnow(), |
|
394 | 'date': datetime.utcnow(), | |
395 | 'message': '', |
|
395 | 'message': '', | |
396 | 'tags': None} |
|
396 | 'tags': None} | |
397 | assert deserialized['log_level'] == expected['log_level'] |
|
397 | assert deserialized['log_level'] == expected['log_level'] | |
398 | assert deserialized['message'] == expected['message'] |
|
398 | assert deserialized['message'] == expected['message'] | |
399 | assert deserialized['namespace'] == expected['namespace'] |
|
399 | assert deserialized['namespace'] == expected['namespace'] | |
400 | assert deserialized['request_id'] == expected['request_id'] |
|
400 | assert deserialized['request_id'] == expected['request_id'] | |
401 | assert deserialized['server'] == expected['server'] |
|
401 | assert deserialized['server'] == expected['server'] | |
402 | assert deserialized['tags'] == expected['tags'] |
|
402 | assert deserialized['tags'] == expected['tags'] | |
403 | assert deserialized['primary_key'] == expected['primary_key'] |
|
403 | assert deserialized['primary_key'] == expected['primary_key'] | |
404 |
|
404 | |||
405 | def test_normal_payload(self, log_schema): |
|
405 | def test_normal_payload(self, log_schema): | |
406 | import appenlight.tests.payload_examples as payload_examples |
|
406 | import appenlight.tests.payload_examples as payload_examples | |
407 | deserialized = log_schema.deserialize(payload_examples.LOG_EXAMPLES)[0] |
|
407 | deserialized = log_schema.deserialize(payload_examples.LOG_EXAMPLES)[0] | |
408 | expected = payload_examples.LOG_EXAMPLES[0] |
|
408 | expected = payload_examples.LOG_EXAMPLES[0] | |
409 | assert deserialized['log_level'] == expected['log_level'] |
|
409 | assert deserialized['log_level'] == expected['log_level'] | |
410 | assert deserialized['message'] == expected['message'] |
|
410 | assert deserialized['message'] == expected['message'] | |
411 | assert deserialized['namespace'] == expected['namespace'] |
|
411 | assert deserialized['namespace'] == expected['namespace'] | |
412 | assert deserialized['request_id'] == expected['request_id'] |
|
412 | assert deserialized['request_id'] == expected['request_id'] | |
413 | assert deserialized['server'] == expected['server'] |
|
413 | assert deserialized['server'] == expected['server'] | |
414 | assert deserialized['date'].strftime('%Y-%m-%dT%H:%M:%S.%f') == \ |
|
414 | assert deserialized['date'].strftime('%Y-%m-%dT%H:%M:%S.%f') == \ | |
415 | expected['date'] |
|
415 | expected['date'] | |
416 | assert deserialized['tags'][0][0] == "tag_name" |
|
416 | assert deserialized['tags'][0][0] == "tag_name" | |
417 | assert deserialized['tags'][0][1] == "tag_value" |
|
417 | assert deserialized['tags'][0][1] == "tag_value" | |
418 | assert deserialized['tags'][1][0] == "tag_name2" |
|
418 | assert deserialized['tags'][1][0] == "tag_name2" | |
419 | assert deserialized['tags'][1][1] == 2 |
|
419 | assert deserialized['tags'][1][1] == 2 | |
420 |
|
420 | |||
421 | def test_normal_payload_date_without_microseconds(self, log_schema): |
|
421 | def test_normal_payload_date_without_microseconds(self, log_schema): | |
422 | import appenlight.tests.payload_examples as payload_examples |
|
422 | import appenlight.tests.payload_examples as payload_examples | |
423 | LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES) |
|
423 | LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES) | |
424 | LOG_EXAMPLE[0]['date'] = datetime.utcnow().strftime( |
|
424 | LOG_EXAMPLE[0]['date'] = datetime.utcnow().strftime( | |
425 | '%Y-%m-%dT%H:%M:%S') |
|
425 | '%Y-%m-%dT%H:%M:%S') | |
426 | deserialized = log_schema.deserialize(LOG_EXAMPLE) |
|
426 | deserialized = log_schema.deserialize(LOG_EXAMPLE) | |
427 | assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M:%S') == \ |
|
427 | assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M:%S') == \ | |
428 | LOG_EXAMPLE[0]['date'] |
|
428 | LOG_EXAMPLE[0]['date'] | |
429 |
|
429 | |||
430 | def test_normal_payload_date_without_seconds(self, log_schema): |
|
430 | def test_normal_payload_date_without_seconds(self, log_schema): | |
431 | import appenlight.tests.payload_examples as payload_examples |
|
431 | import appenlight.tests.payload_examples as payload_examples | |
432 | LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES) |
|
432 | LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES) | |
433 | LOG_EXAMPLE[0]['date'] = datetime.utcnow().date().strftime( |
|
433 | LOG_EXAMPLE[0]['date'] = datetime.utcnow().date().strftime( | |
434 | '%Y-%m-%dT%H:%M') |
|
434 | '%Y-%m-%dT%H:%M') | |
435 | deserialized = log_schema.deserialize(LOG_EXAMPLE) |
|
435 | deserialized = log_schema.deserialize(LOG_EXAMPLE) | |
436 | assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M') == \ |
|
436 | assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M') == \ | |
437 | LOG_EXAMPLE[0]['date'] |
|
437 | LOG_EXAMPLE[0]['date'] | |
438 |
|
438 | |||
439 | def test_payload_empty_date(self, log_schema): |
|
439 | def test_payload_empty_date(self, log_schema): | |
440 | import appenlight.tests.payload_examples as payload_examples |
|
440 | import appenlight.tests.payload_examples as payload_examples | |
441 | LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES) |
|
441 | LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES) | |
442 | LOG_EXAMPLE[0]['date'] = None |
|
442 | LOG_EXAMPLE[0]['date'] = None | |
443 | deserialized = log_schema.deserialize(LOG_EXAMPLE) |
|
443 | deserialized = log_schema.deserialize(LOG_EXAMPLE) | |
444 | assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M') is not None |
|
444 | assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M') is not None | |
445 |
|
445 | |||
446 | def test_payload_no_date(self, log_schema): |
|
446 | def test_payload_no_date(self, log_schema): | |
447 | import appenlight.tests.payload_examples as payload_examples |
|
447 | import appenlight.tests.payload_examples as payload_examples | |
448 | LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES) |
|
448 | LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES) | |
449 | LOG_EXAMPLE[0].pop('date', None) |
|
449 | LOG_EXAMPLE[0].pop('date', None) | |
450 | deserialized = log_schema.deserialize(LOG_EXAMPLE) |
|
450 | deserialized = log_schema.deserialize(LOG_EXAMPLE) | |
451 | assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M') is not None |
|
451 | assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M') is not None | |
452 |
|
452 | |||
453 |
|
453 | |||
454 | @pytest.mark.usefixtures('general_metrics_schema') |
|
454 | @pytest.mark.usefixtures('general_metrics_schema') | |
455 | class TestAPIGeneralMetricsValidation(object): |
|
455 | class TestAPIGeneralMetricsValidation(object): | |
456 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) |
|
456 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) | |
457 | def test_no_payload(self, dummy_json, general_metrics_schema): |
|
457 | def test_no_payload(self, dummy_json, general_metrics_schema): | |
458 | import colander |
|
458 | import colander | |
459 |
|
459 | |||
460 | with pytest.raises(colander.Invalid): |
|
460 | with pytest.raises(colander.Invalid): | |
461 | general_metrics_schema.deserialize(dummy_json) |
|
461 | general_metrics_schema.deserialize(dummy_json) | |
462 |
|
462 | |||
463 | def test_minimal_payload(self, general_metrics_schema): |
|
463 | def test_minimal_payload(self, general_metrics_schema): | |
464 | dummy_json = [{}] |
|
464 | dummy_json = [{}] | |
465 | deserialized = general_metrics_schema.deserialize(dummy_json)[0] |
|
465 | deserialized = general_metrics_schema.deserialize(dummy_json)[0] | |
466 | expected = {'namespace': '', |
|
466 | expected = {'namespace': '', | |
467 | 'server_name': 'unknown', |
|
467 | 'server_name': 'unknown', | |
468 | 'tags': None, |
|
468 | 'tags': None, | |
469 | 'timestamp': datetime.utcnow()} |
|
469 | 'timestamp': datetime.utcnow()} | |
470 | assert deserialized['namespace'] == expected['namespace'] |
|
470 | assert deserialized['namespace'] == expected['namespace'] | |
471 | assert deserialized['server_name'] == expected['server_name'] |
|
471 | assert deserialized['server_name'] == expected['server_name'] | |
472 | assert deserialized['tags'] == expected['tags'] |
|
472 | assert deserialized['tags'] == expected['tags'] | |
473 |
|
473 | |||
474 | def test_normal_payload(self, general_metrics_schema): |
|
474 | def test_normal_payload(self, general_metrics_schema): | |
475 | import appenlight.tests.payload_examples as payload_examples |
|
475 | import appenlight.tests.payload_examples as payload_examples | |
476 | dummy_json = [payload_examples.METRICS_PAYLOAD] |
|
476 | dummy_json = [payload_examples.METRICS_PAYLOAD] | |
477 | deserialized = general_metrics_schema.deserialize(dummy_json)[0] |
|
477 | deserialized = general_metrics_schema.deserialize(dummy_json)[0] | |
478 | expected = {'namespace': 'some.monitor', |
|
478 | expected = {'namespace': 'some.monitor', | |
479 | 'server_name': 'server.name', |
|
479 | 'server_name': 'server.name', | |
480 | 'tags': [('usage_foo', 15.5), ('usage_bar', 63)], |
|
480 | 'tags': [('usage_foo', 15.5), ('usage_bar', 63)], | |
481 | 'timestamp': datetime.utcnow()} |
|
481 | 'timestamp': datetime.utcnow()} | |
482 | assert deserialized['namespace'] == expected['namespace'] |
|
482 | assert deserialized['namespace'] == expected['namespace'] | |
483 | assert deserialized['server_name'] == expected['server_name'] |
|
483 | assert deserialized['server_name'] == expected['server_name'] | |
484 | assert deserialized['tags'] == expected['tags'] |
|
484 | assert deserialized['tags'] == expected['tags'] | |
485 |
|
485 | |||
486 |
|
486 | |||
487 | @pytest.mark.usefixtures('request_metrics_schema') |
|
487 | @pytest.mark.usefixtures('request_metrics_schema') | |
488 | class TestAPIRequestMetricsValidation(object): |
|
488 | class TestAPIRequestMetricsValidation(object): | |
489 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) |
|
489 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) | |
490 | def test_no_payload(self, dummy_json, request_metrics_schema): |
|
490 | def test_no_payload(self, dummy_json, request_metrics_schema): | |
491 | import colander |
|
491 | import colander | |
492 |
|
492 | |||
493 | with pytest.raises(colander.Invalid): |
|
493 | with pytest.raises(colander.Invalid): | |
494 | print(request_metrics_schema.deserialize(dummy_json)) |
|
494 | print(request_metrics_schema.deserialize(dummy_json)) | |
495 |
|
495 | |||
496 | def test_normal_payload(self, request_metrics_schema): |
|
496 | def test_normal_payload(self, request_metrics_schema): | |
497 | import appenlight.tests.payload_examples as payload_examples |
|
497 | import appenlight.tests.payload_examples as payload_examples | |
498 | dummy_json = payload_examples.REQUEST_METRICS_EXAMPLES |
|
498 | dummy_json = payload_examples.REQUEST_METRICS_EXAMPLES | |
499 | deserialized = request_metrics_schema.deserialize(dummy_json)[0] |
|
499 | deserialized = request_metrics_schema.deserialize(dummy_json)[0] | |
500 | expected = {'metrics': [('dir/module:func', |
|
500 | expected = {'metrics': [('dir/module:func', | |
501 | {'custom': 0.0, |
|
501 | {'custom': 0.0, | |
502 | 'custom_calls': 0.0, |
|
502 | 'custom_calls': 0.0, | |
503 | 'main': 0.01664, |
|
503 | 'main': 0.01664, | |
504 | 'nosql': 0.00061, |
|
504 | 'nosql': 0.00061, | |
505 | 'nosql_calls': 23.0, |
|
505 | 'nosql_calls': 23.0, | |
506 | 'remote': 0.0, |
|
506 | 'remote': 0.0, | |
507 | 'remote_calls': 0.0, |
|
507 | 'remote_calls': 0.0, | |
508 | 'requests': 1, |
|
508 | 'requests': 1, | |
509 | 'sql': 0.00105, |
|
509 | 'sql': 0.00105, | |
510 | 'sql_calls': 2.0, |
|
510 | 'sql_calls': 2.0, | |
511 | 'tmpl': 0.0, |
|
511 | 'tmpl': 0.0, | |
512 | 'tmpl_calls': 0.0}), |
|
512 | 'tmpl_calls': 0.0}), | |
513 | ('SomeView.function', |
|
513 | ('SomeView.function', | |
514 | {'custom': 0.0, |
|
514 | {'custom': 0.0, | |
515 | 'custom_calls': 0.0, |
|
515 | 'custom_calls': 0.0, | |
516 | 'main': 0.647261, |
|
516 | 'main': 0.647261, | |
517 | 'nosql': 0.306554, |
|
517 | 'nosql': 0.306554, | |
518 | 'nosql_calls': 140.0, |
|
518 | 'nosql_calls': 140.0, | |
519 | 'remote': 0.0, |
|
519 | 'remote': 0.0, | |
520 | 'remote_calls': 0.0, |
|
520 | 'remote_calls': 0.0, | |
521 | 'requests': 28, |
|
521 | 'requests': 28, | |
522 | 'sql': 0.0, |
|
522 | 'sql': 0.0, | |
523 | 'sql_calls': 0.0, |
|
523 | 'sql_calls': 0.0, | |
524 | 'tmpl': 0.0, |
|
524 | 'tmpl': 0.0, | |
525 | 'tmpl_calls': 0.0})], |
|
525 | 'tmpl_calls': 0.0})], | |
526 | 'server': 'some.server.hostname', |
|
526 | 'server': 'some.server.hostname', | |
527 | 'timestamp': datetime.utcnow()} |
|
527 | 'timestamp': datetime.utcnow()} | |
528 | assert deserialized['server'] == expected['server'] |
|
528 | assert deserialized['server'] == expected['server'] | |
529 | metric = deserialized['metrics'][0] |
|
529 | metric = deserialized['metrics'][0] | |
530 | expected_metric = expected['metrics'][0] |
|
530 | expected_metric = expected['metrics'][0] | |
531 | assert metric[0] == expected_metric[0] |
|
531 | assert metric[0] == expected_metric[0] | |
532 | assert sorted(metric[1].items()) == sorted(expected_metric[1].items()) |
|
532 | assert sorted(metric[1].items()) == sorted(expected_metric[1].items()) | |
533 |
|
533 | |||
534 |
|
534 | |||
535 | @pytest.mark.usefixtures('default_application') |
|
535 | @pytest.mark.usefixtures('default_application') | |
536 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') |
|
536 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') | |
537 | class TestAPIReportsView(object): |
|
537 | class TestAPIReportsView(object): | |
538 | def test_no_json_payload(self, default_application): |
|
538 | def test_no_json_payload(self, default_application): | |
539 | import colander |
|
539 | import colander | |
540 | from appenlight.models.services.application import ApplicationService |
|
540 | from appenlight.models.services.application import ApplicationService | |
541 | from appenlight.views.api import reports_create |
|
541 | from appenlight.views.api import reports_create | |
542 |
|
542 | |||
543 | context = DummyContext() |
|
543 | context = DummyContext() | |
544 | context.resource = ApplicationService.by_id(1) |
|
544 | context.resource = ApplicationService.by_id(1) | |
545 | request = testing.DummyRequest( |
|
545 | request = testing.DummyRequest( | |
546 | headers={'Content-Type': 'application/json'}) |
|
546 | headers={'Content-Type': 'application/json'}) | |
547 | request.unsafe_json_body = '' |
|
547 | request.unsafe_json_body = '' | |
548 | request.context = context |
|
548 | request.context = context | |
549 | route = mock.Mock() |
|
549 | route = mock.Mock() | |
550 | route.name = 'api_reports' |
|
550 | route.name = 'api_reports' | |
551 | request.matched_route = route |
|
551 | request.matched_route = route | |
552 | with pytest.raises(colander.Invalid): |
|
552 | with pytest.raises(colander.Invalid): | |
553 | response = reports_create(request) |
|
553 | response = reports_create(request) | |
554 |
|
554 | |||
555 | def test_single_proper_json_0_5_payload(self): |
|
555 | def test_single_proper_json_0_5_payload(self): | |
556 | import appenlight.tests.payload_examples as payload_examples |
|
556 | import appenlight.tests.payload_examples as payload_examples | |
557 | from appenlight.views.api import reports_create |
|
557 | from appenlight.views.api import reports_create | |
558 | from appenlight.models.services.application import ApplicationService |
|
558 | from appenlight.models.services.application import ApplicationService | |
559 | from appenlight.models.report_group import ReportGroup |
|
559 | from appenlight.models.report_group import ReportGroup | |
560 | route = mock.Mock() |
|
560 | route = mock.Mock() | |
561 | route.name = 'api_reports' |
|
561 | route.name = 'api_reports' | |
562 | request = pyramid.threadlocal.get_current_request() |
|
562 | request = pyramid.threadlocal.get_current_request() | |
563 | context = DummyContext() |
|
563 | context = DummyContext() | |
564 | context.resource = ApplicationService.by_id(1) |
|
564 | context.resource = ApplicationService.by_id(1) | |
565 | request.context = context |
|
565 | request.context = context | |
566 | request.matched_route = route |
|
566 | request.matched_route = route | |
567 | PYTHON_PAYLOAD = payload_examples.PYTHON_PAYLOAD_0_5 |
|
567 | PYTHON_PAYLOAD = payload_examples.PYTHON_PAYLOAD_0_5 | |
568 | request.unsafe_json_body = [copy.deepcopy(PYTHON_PAYLOAD)] |
|
568 | request.unsafe_json_body = [copy.deepcopy(PYTHON_PAYLOAD)] | |
569 | reports_create(request) |
|
569 | reports_create(request) | |
570 | query = DBSession.query(ReportGroup) |
|
570 | query = DBSession.query(ReportGroup) | |
571 | report = query.first() |
|
571 | report = query.first() | |
572 | assert query.count() == 1 |
|
572 | assert query.count() == 1 | |
573 | assert report.total_reports == 1 |
|
573 | assert report.total_reports == 1 | |
574 |
|
574 | |||
575 | def test_grouping_0_5(self): |
|
575 | def test_grouping_0_5(self): | |
576 | import appenlight.tests.payload_examples as payload_examples |
|
576 | import appenlight.tests.payload_examples as payload_examples | |
577 | from appenlight.views.api import reports_create |
|
577 | from appenlight.views.api import reports_create | |
578 | from appenlight.models.services.application import ApplicationService |
|
578 | from appenlight.models.services.application import ApplicationService | |
579 | from appenlight.models.report_group import ReportGroup |
|
579 | from appenlight.models.report_group import ReportGroup | |
580 | route = mock.Mock() |
|
580 | route = mock.Mock() | |
581 | route.name = 'api_reports' |
|
581 | route.name = 'api_reports' | |
582 | request = pyramid.threadlocal.get_current_request() |
|
582 | request = pyramid.threadlocal.get_current_request() | |
583 | context = DummyContext() |
|
583 | context = DummyContext() | |
584 | context.resource = ApplicationService.by_id(1) |
|
584 | context.resource = ApplicationService.by_id(1) | |
585 | request.context = context |
|
585 | request.context = context | |
586 | request.matched_route = route |
|
586 | request.matched_route = route | |
587 | PYTHON_PAYLOAD = payload_examples.PYTHON_PAYLOAD_0_5 |
|
587 | PYTHON_PAYLOAD = payload_examples.PYTHON_PAYLOAD_0_5 | |
588 | request.unsafe_json_body = [copy.deepcopy(PYTHON_PAYLOAD), |
|
588 | request.unsafe_json_body = [copy.deepcopy(PYTHON_PAYLOAD), | |
589 | copy.deepcopy(PYTHON_PAYLOAD)] |
|
589 | copy.deepcopy(PYTHON_PAYLOAD)] | |
590 | reports_create(request) |
|
590 | reports_create(request) | |
591 | query = DBSession.query(ReportGroup) |
|
591 | query = DBSession.query(ReportGroup) | |
592 | report = query.first() |
|
592 | report = query.first() | |
593 | assert query.count() == 1 |
|
593 | assert query.count() == 1 | |
594 | assert report.total_reports == 2 |
|
594 | assert report.total_reports == 2 | |
595 |
|
595 | |||
596 | def test_grouping_different_reports_0_5(self): |
|
596 | def test_grouping_different_reports_0_5(self): | |
597 | import appenlight.tests.payload_examples as payload_examples |
|
597 | import appenlight.tests.payload_examples as payload_examples | |
598 | from appenlight.views.api import reports_create |
|
598 | from appenlight.views.api import reports_create | |
599 | from appenlight.models.services.application import ApplicationService |
|
599 | from appenlight.models.services.application import ApplicationService | |
600 | from appenlight.models.report_group import ReportGroup |
|
600 | from appenlight.models.report_group import ReportGroup | |
601 | route = mock.Mock() |
|
601 | route = mock.Mock() | |
602 | route.name = 'api_reports' |
|
602 | route.name = 'api_reports' | |
603 | request = pyramid.threadlocal.get_current_request() |
|
603 | request = pyramid.threadlocal.get_current_request() | |
604 | context = DummyContext() |
|
604 | context = DummyContext() | |
605 | context.resource = ApplicationService.by_id(1) |
|
605 | context.resource = ApplicationService.by_id(1) | |
606 | request.context = context |
|
606 | request.context = context | |
607 | request.matched_route = route |
|
607 | request.matched_route = route | |
608 | PYTHON_PAYLOAD = payload_examples.PYTHON_PAYLOAD_0_5 |
|
608 | PYTHON_PAYLOAD = payload_examples.PYTHON_PAYLOAD_0_5 | |
609 | PARSED_REPORT_404 = payload_examples.PARSED_REPORT_404 |
|
609 | PARSED_REPORT_404 = payload_examples.PARSED_REPORT_404 | |
610 | request.unsafe_json_body = [copy.deepcopy(PYTHON_PAYLOAD), |
|
610 | request.unsafe_json_body = [copy.deepcopy(PYTHON_PAYLOAD), | |
611 | copy.deepcopy(PARSED_REPORT_404)] |
|
611 | copy.deepcopy(PARSED_REPORT_404)] | |
612 | reports_create(request) |
|
612 | reports_create(request) | |
613 | query = DBSession.query(ReportGroup) |
|
613 | query = DBSession.query(ReportGroup) | |
614 | report = query.first() |
|
614 | report = query.first() | |
615 | assert query.count() == 2 |
|
615 | assert query.count() == 2 | |
616 | assert report.total_reports == 1 |
|
616 | assert report.total_reports == 1 | |
617 |
|
617 | |||
618 |
|
618 | |||
619 | @pytest.mark.usefixtures('default_application') |
|
619 | @pytest.mark.usefixtures('default_application') | |
620 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') |
|
620 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') | |
621 | class TestAirbrakeXMLView(object): |
|
621 | class TestAirbrakeXMLView(object): | |
622 |
|
622 | |||
623 | def test_normal_payload_parsing(self): |
|
623 | def test_normal_payload_parsing(self): | |
624 | import datetime |
|
624 | import datetime | |
625 | import defusedxml.ElementTree as ElementTree |
|
625 | import defusedxml.ElementTree as ElementTree | |
626 | import appenlight.tests.payload_examples as payload_examples |
|
626 | import appenlight.tests.payload_examples as payload_examples | |
627 | from appenlight.lib.utils.airbrake import parse_airbrake_xml |
|
627 | from appenlight.lib.utils.airbrake import parse_airbrake_xml | |
628 | from appenlight.validators import ReportListSchema_0_5 |
|
628 | from appenlight.validators import ReportListSchema_0_5 | |
629 |
|
629 | |||
630 | context = DummyContext() |
|
630 | context = DummyContext() | |
631 | request = testing.DummyRequest( |
|
631 | request = testing.DummyRequest( | |
632 | headers={'Content-Type': 'application/xml'}) |
|
632 | headers={'Content-Type': 'application/xml'}) | |
633 | request.context = context |
|
633 | request.context = context | |
634 | request.context.possibly_public = False |
|
634 | request.context.possibly_public = False | |
635 | root = ElementTree.fromstring(payload_examples.AIRBRAKE_RUBY_EXAMPLE) |
|
635 | root = ElementTree.fromstring(payload_examples.AIRBRAKE_RUBY_EXAMPLE) | |
636 | request.context.airbrake_xml_etree = root |
|
636 | request.context.airbrake_xml_etree = root | |
637 | error_dict = parse_airbrake_xml(request) |
|
637 | error_dict = parse_airbrake_xml(request) | |
638 | schema = ReportListSchema_0_5().bind(utcnow=datetime.datetime.utcnow()) |
|
638 | schema = ReportListSchema_0_5().bind(utcnow=datetime.datetime.utcnow()) | |
639 | deserialized_report = schema.deserialize([error_dict])[0] |
|
639 | deserialized_report = schema.deserialize([error_dict])[0] | |
640 | assert deserialized_report['client'] == 'Airbrake Notifier' |
|
640 | assert deserialized_report['client'] == 'Airbrake Notifier' | |
641 | assert deserialized_report['error'] == 'NameError: undefined local variable or method `sdfdfdf\' for #<#<Class:0x000000039a8b90>:0x00000002c53df0>' |
|
641 | assert deserialized_report['error'] == 'NameError: undefined local variable or method `sdfdfdf\' for #<#<Class:0x000000039a8b90>:0x00000002c53df0>' | |
642 | assert deserialized_report['http_status'] == 500 |
|
642 | assert deserialized_report['http_status'] == 500 | |
643 | assert deserialized_report['language'] == 'unknown' |
|
643 | assert deserialized_report['language'] == 'unknown' | |
644 | assert deserialized_report['message'] == '' |
|
644 | assert deserialized_report['message'] == '' | |
645 | assert deserialized_report['occurences'] == 1 |
|
645 | assert deserialized_report['occurences'] == 1 | |
646 | assert deserialized_report['priority'] == 5 |
|
646 | assert deserialized_report['priority'] == 5 | |
647 | d_request = deserialized_report['request'] |
|
647 | d_request = deserialized_report['request'] | |
648 | assert d_request['GET'] == {'test': '1234'} |
|
648 | assert d_request['GET'] == {'test': '1234'} | |
649 | assert d_request['action_dispatch.request.parameters'] == { |
|
649 | assert d_request['action_dispatch.request.parameters'] == { | |
650 | 'action': 'index', |
|
650 | 'action': 'index', | |
651 | 'controller': 'welcome', |
|
651 | 'controller': 'welcome', | |
652 | 'test': '1234'} |
|
652 | 'test': '1234'} | |
653 | assert deserialized_report['request_id'] == 'c11b2267f3ad8b00a1768cae35559fa1' |
|
653 | assert deserialized_report['request_id'] == 'c11b2267f3ad8b00a1768cae35559fa1' | |
654 | assert deserialized_report['server'] == 'ergo-desktop' |
|
654 | assert deserialized_report['server'] == 'ergo-desktop' | |
655 | assert deserialized_report['traceback'][0] == { |
|
655 | assert deserialized_report['traceback'][0] == { | |
656 | 'cline': 'block in start_thread', |
|
656 | 'cline': 'block in start_thread', | |
657 | 'file': '/home/ergo/.rbenv/versions/1.9.3-p327/lib/ruby/1.9.1/webrick/server.rb', |
|
657 | 'file': '/home/ergo/.rbenv/versions/1.9.3-p327/lib/ruby/1.9.1/webrick/server.rb', | |
658 | 'fn': 'block in start_thread', |
|
658 | 'fn': 'block in start_thread', | |
659 | 'line': '191', |
|
659 | 'line': '191', | |
660 | 'module': '', |
|
660 | 'module': '', | |
661 | 'vars': {}} |
|
661 | 'vars': {}} | |
662 | assert deserialized_report['traceback'][-1] == { |
|
662 | assert deserialized_report['traceback'][-1] == { | |
663 | 'cline': '_app_views_welcome_index_html_erb___2570061166873166679_31748940', |
|
663 | 'cline': '_app_views_welcome_index_html_erb___2570061166873166679_31748940', | |
664 | 'file': '[PROJECT_ROOT]/app/views/welcome/index.html.erb', |
|
664 | 'file': '[PROJECT_ROOT]/app/views/welcome/index.html.erb', | |
665 | 'fn': '_app_views_welcome_index_html_erb___2570061166873166679_31748940', |
|
665 | 'fn': '_app_views_welcome_index_html_erb___2570061166873166679_31748940', | |
666 | 'line': '3', |
|
666 | 'line': '3', | |
667 | 'module': '', |
|
667 | 'module': '', | |
668 | 'vars': {}} |
|
668 | 'vars': {}} | |
669 | assert deserialized_report['url'] == 'http://0.0.0.0:3000/welcome/index?test=1234' |
|
669 | assert deserialized_report['url'] == 'http://0.0.0.0:3000/welcome/index?test=1234' | |
670 | assert deserialized_report['view_name'] == 'welcome:index' |
|
670 | assert deserialized_report['view_name'] == 'welcome:index' | |
671 |
|
671 | |||
672 | def test_normal_payload_view(self): |
|
672 | def test_normal_payload_view(self): | |
673 | import defusedxml.ElementTree as ElementTree |
|
673 | import defusedxml.ElementTree as ElementTree | |
674 | import appenlight.tests.payload_examples as payload_examples |
|
674 | import appenlight.tests.payload_examples as payload_examples | |
675 |
|
675 | |||
676 | from appenlight.models.services.application import ApplicationService |
|
676 | from appenlight.models.services.application import ApplicationService | |
677 | from appenlight.views.api import airbrake_xml_compat |
|
677 | from appenlight.views.api import airbrake_xml_compat | |
678 |
|
678 | |||
679 | context = DummyContext() |
|
679 | context = DummyContext() | |
680 | context.resource = ApplicationService.by_id(1) |
|
680 | context.resource = ApplicationService.by_id(1) | |
681 | request = testing.DummyRequest( |
|
681 | request = testing.DummyRequest( | |
682 | headers={'Content-Type': 'application/xml'}) |
|
682 | headers={'Content-Type': 'application/xml'}) | |
683 | request.context = context |
|
683 | request.context = context | |
684 | request.context.possibly_public = False |
|
684 | request.context.possibly_public = False | |
685 | root = ElementTree.fromstring(payload_examples.AIRBRAKE_RUBY_EXAMPLE) |
|
685 | root = ElementTree.fromstring(payload_examples.AIRBRAKE_RUBY_EXAMPLE) | |
686 | request.context.airbrake_xml_etree = root |
|
686 | request.context.airbrake_xml_etree = root | |
687 | route = mock.Mock() |
|
687 | route = mock.Mock() | |
688 | route.name = 'api_airbrake' |
|
688 | route.name = 'api_airbrake' | |
689 | request.matched_route = route |
|
689 | request.matched_route = route | |
690 | result = airbrake_xml_compat(request) |
|
690 | result = airbrake_xml_compat(request) | |
691 | assert '<notice><id>' in result |
|
691 | assert '<notice><id>' in result | |
692 |
|
692 | |||
693 |
|
693 | |||
694 | @pytest.mark.usefixtures('default_application') |
|
694 | @pytest.mark.usefixtures('default_application') | |
695 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') |
|
695 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') | |
696 | class TestAPILogView(object): |
|
696 | class TestAPILogView(object): | |
697 | def test_no_json_payload(self, base_app): |
|
697 | def test_no_json_payload(self, base_app): | |
698 | import colander |
|
698 | import colander | |
699 | from appenlight.models.services.application import ApplicationService |
|
699 | from appenlight.models.services.application import ApplicationService | |
700 | from appenlight.views.api import logs_create |
|
700 | from appenlight.views.api import logs_create | |
701 |
|
701 | |||
702 | context = DummyContext() |
|
702 | context = DummyContext() | |
703 | context.resource = ApplicationService.by_id(1) |
|
703 | context.resource = ApplicationService.by_id(1) | |
704 | request = testing.DummyRequest( |
|
704 | request = testing.DummyRequest( | |
705 | headers={'Content-Type': 'application/json'}) |
|
705 | headers={'Content-Type': 'application/json'}) | |
706 | request.context = context |
|
706 | request.context = context | |
707 | request.registry = base_app.registry |
|
707 | request.registry = base_app.registry | |
708 | request.unsafe_json_body = '' |
|
708 | request.unsafe_json_body = '' | |
709 | route = mock.Mock() |
|
709 | route = mock.Mock() | |
710 | route.name = 'api_logs' |
|
710 | route.name = 'api_logs' | |
711 | request.matched_route = route |
|
711 | request.matched_route = route | |
712 | with pytest.raises(colander.Invalid): |
|
712 | with pytest.raises(colander.Invalid): | |
713 | response = logs_create(request) |
|
713 | response = logs_create(request) | |
714 |
|
714 | |||
715 | def test_single_json_payload(self): |
|
715 | def test_single_json_payload(self): | |
716 | import appenlight.tests.payload_examples as payload_examples |
|
716 | import appenlight.tests.payload_examples as payload_examples | |
717 | from appenlight.models.log import Log |
|
717 | from appenlight.models.log import Log | |
718 | from appenlight.views.api import logs_create |
|
718 | from appenlight.views.api import logs_create | |
719 | from appenlight.models.services.application import ApplicationService |
|
719 | from appenlight.models.services.application import ApplicationService | |
720 | route = mock.Mock() |
|
720 | route = mock.Mock() | |
721 | route.name = 'api_logs' |
|
721 | route.name = 'api_logs' | |
722 | request = pyramid.threadlocal.get_current_request() |
|
722 | request = pyramid.threadlocal.get_current_request() | |
723 | context = DummyContext() |
|
723 | context = DummyContext() | |
724 | context.resource = ApplicationService.by_id(1) |
|
724 | context.resource = ApplicationService.by_id(1) | |
725 | request.context = context |
|
725 | request.context = context | |
726 | request.matched_route = route |
|
726 | request.matched_route = route | |
727 | request.unsafe_json_body = [copy.deepcopy( |
|
727 | request.unsafe_json_body = [copy.deepcopy( | |
728 | payload_examples.LOG_EXAMPLES[0])] |
|
728 | payload_examples.LOG_EXAMPLES[0])] | |
729 | logs_create(request) |
|
729 | logs_create(request) | |
730 | query = DBSession.query(Log) |
|
730 | query = DBSession.query(Log) | |
731 | log = query.first() |
|
731 | log = query.first() | |
732 | assert query.count() == 1 |
|
732 | assert query.count() == 1 | |
733 | assert log.message == "OMG ValueError happened" |
|
733 | assert log.message == "OMG ValueError happened" | |
734 |
|
734 | |||
735 | def test_multiple_json_payload(self): |
|
735 | def test_multiple_json_payload(self): | |
736 | import appenlight.tests.payload_examples as payload_examples |
|
736 | import appenlight.tests.payload_examples as payload_examples | |
737 | from appenlight.models.log import Log |
|
737 | from appenlight.models.log import Log | |
738 | from appenlight.views.api import logs_create |
|
738 | from appenlight.views.api import logs_create | |
739 | from appenlight.models.services.application import ApplicationService |
|
739 | from appenlight.models.services.application import ApplicationService | |
740 | route = mock.Mock() |
|
740 | route = mock.Mock() | |
741 | route.name = 'api_logs' |
|
741 | route.name = 'api_logs' | |
742 | request = pyramid.threadlocal.get_current_request() |
|
742 | request = pyramid.threadlocal.get_current_request() | |
743 | context = DummyContext() |
|
743 | context = DummyContext() | |
744 | context.resource = ApplicationService.by_id(1) |
|
744 | context.resource = ApplicationService.by_id(1) | |
745 | request.context = context |
|
745 | request.context = context | |
746 | request.matched_route = route |
|
746 | request.matched_route = route | |
747 | LOG_PAYLOAD = payload_examples.LOG_EXAMPLES[0] |
|
747 | LOG_PAYLOAD = payload_examples.LOG_EXAMPLES[0] | |
748 | LOG_PAYLOAD2 = payload_examples.LOG_EXAMPLES[1] |
|
748 | LOG_PAYLOAD2 = payload_examples.LOG_EXAMPLES[1] | |
749 | request.unsafe_json_body = copy.deepcopy([LOG_PAYLOAD, LOG_PAYLOAD2]) |
|
749 | request.unsafe_json_body = copy.deepcopy([LOG_PAYLOAD, LOG_PAYLOAD2]) | |
750 | logs_create(request) |
|
750 | logs_create(request) | |
751 | query = DBSession.query(Log).order_by(sa.asc(Log.log_id)) |
|
751 | query = DBSession.query(Log).order_by(sa.asc(Log.log_id)) | |
752 | assert query.count() == 2 |
|
752 | assert query.count() == 2 | |
753 | assert query[0].message == "OMG ValueError happened" |
|
753 | assert query[0].message == "OMG ValueError happened" | |
754 | assert query[1].message == "OMG ValueError happened2" |
|
754 | assert query[1].message == "OMG ValueError happened2" | |
755 |
|
755 | |||
756 | def test_public_key_rewriting(self): |
|
756 | def test_public_key_rewriting(self): | |
757 | import appenlight.tests.payload_examples as payload_examples |
|
757 | import appenlight.tests.payload_examples as payload_examples | |
758 | from appenlight.models.log import Log |
|
758 | from appenlight.models.log import Log | |
759 | from appenlight.views.api import logs_create |
|
759 | from appenlight.views.api import logs_create | |
760 | from appenlight.models.services.application import ApplicationService |
|
760 | from appenlight.models.services.application import ApplicationService | |
761 | route = mock.Mock() |
|
761 | route = mock.Mock() | |
762 | route.name = 'api_logs' |
|
762 | route.name = 'api_logs' | |
763 | request = pyramid.threadlocal.get_current_request() |
|
763 | request = pyramid.threadlocal.get_current_request() | |
764 | context = DummyContext() |
|
764 | context = DummyContext() | |
765 | context.resource = ApplicationService.by_id(1) |
|
765 | context.resource = ApplicationService.by_id(1) | |
766 | request.context = context |
|
766 | request.context = context | |
767 | request.matched_route = route |
|
767 | request.matched_route = route | |
768 |
|
768 | |||
769 | LOG_PAYLOAD = copy.deepcopy(payload_examples.LOG_EXAMPLES[0]) |
|
769 | LOG_PAYLOAD = copy.deepcopy(payload_examples.LOG_EXAMPLES[0]) | |
770 | LOG_PAYLOAD2 = copy.deepcopy(payload_examples.LOG_EXAMPLES[1]) |
|
770 | LOG_PAYLOAD2 = copy.deepcopy(payload_examples.LOG_EXAMPLES[1]) | |
771 | LOG_PAYLOAD['primary_key'] = 'X2' |
|
771 | LOG_PAYLOAD['primary_key'] = 'X2' | |
772 | LOG_PAYLOAD2['primary_key'] = 'X2' |
|
772 | LOG_PAYLOAD2['primary_key'] = 'X2' | |
773 | request.unsafe_json_body = [LOG_PAYLOAD, LOG_PAYLOAD2] |
|
773 | request.unsafe_json_body = [LOG_PAYLOAD, LOG_PAYLOAD2] | |
774 | logs_create(request) |
|
774 | logs_create(request) | |
775 |
|
775 | |||
776 | query = DBSession.query(Log).order_by(sa.asc(Log.log_id)) |
|
776 | query = DBSession.query(Log).order_by(sa.asc(Log.log_id)) | |
777 | assert query.count() == 1 |
|
777 | assert query.count() == 1 | |
778 | assert query[0].message == "OMG ValueError happened2" |
|
778 | assert query[0].message == "OMG ValueError happened2" | |
779 |
|
779 | |||
780 | @pytest.mark.usefixtures('default_application') |
|
780 | @pytest.mark.usefixtures('default_application') | |
781 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') |
|
781 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') | |
782 | class TestAPIGeneralMetricsView(object): |
|
782 | class TestAPIGeneralMetricsView(object): | |
783 | def test_no_json_payload(self, base_app): |
|
783 | def test_no_json_payload(self, base_app): | |
784 | import colander |
|
784 | import colander | |
785 | from appenlight.models.services.application import ApplicationService |
|
785 | from appenlight.models.services.application import ApplicationService | |
786 | from appenlight.views.api import general_metrics_create |
|
786 | from appenlight.views.api import general_metrics_create | |
787 | route = mock.Mock() |
|
787 | route = mock.Mock() | |
788 | route.name = 'api_general_metrics' |
|
788 | route.name = 'api_general_metrics' | |
789 | context = DummyContext() |
|
789 | context = DummyContext() | |
790 | context.resource = ApplicationService.by_id(1) |
|
790 | context.resource = ApplicationService.by_id(1) | |
791 | request = testing.DummyRequest( |
|
791 | request = testing.DummyRequest( | |
792 | headers={'Content-Type': 'application/json'}) |
|
792 | headers={'Content-Type': 'application/json'}) | |
793 | request.context = context |
|
793 | request.context = context | |
794 | request.registry = base_app.registry |
|
794 | request.registry = base_app.registry | |
795 | request.unsafe_json_body = '' |
|
795 | request.unsafe_json_body = '' | |
796 | request.matched_route = route |
|
796 | request.matched_route = route | |
797 | with pytest.raises(colander.Invalid): |
|
797 | with pytest.raises(colander.Invalid): | |
798 | general_metrics_create(request) |
|
798 | general_metrics_create(request) | |
799 |
|
799 | |||
800 | def test_single_json_payload(self): |
|
800 | def test_single_json_payload(self): | |
801 | import appenlight.tests.payload_examples as payload_examples |
|
801 | import appenlight.tests.payload_examples as payload_examples | |
802 |
from appenlight.models. |
|
802 | from appenlight.models.metric import Metric | |
803 | from appenlight.views.api import general_metrics_create |
|
803 | from appenlight.views.api import general_metrics_create | |
804 | from appenlight.models.services.application import ApplicationService |
|
804 | from appenlight.models.services.application import ApplicationService | |
805 | route = mock.Mock() |
|
805 | route = mock.Mock() | |
806 | route.name = 'api_general_metric' |
|
806 | route.name = 'api_general_metric' | |
807 | request = pyramid.threadlocal.get_current_request() |
|
807 | request = pyramid.threadlocal.get_current_request() | |
808 | request.matched_route = route |
|
808 | request.matched_route = route | |
809 | context = DummyContext() |
|
809 | context = DummyContext() | |
810 | context.resource = ApplicationService.by_id(1) |
|
810 | context.resource = ApplicationService.by_id(1) | |
811 | request.context = context |
|
811 | request.context = context | |
812 | request.unsafe_json_body = payload_examples.METRICS_PAYLOAD |
|
812 | request.unsafe_json_body = payload_examples.METRICS_PAYLOAD | |
813 | general_metrics_create(request) |
|
813 | general_metrics_create(request) | |
814 | query = DBSession.query(Metric) |
|
814 | query = DBSession.query(Metric) | |
815 | metric = query.first() |
|
815 | metric = query.first() | |
816 | assert query.count() == 1 |
|
816 | assert query.count() == 1 | |
817 | assert metric.namespace == 'some.monitor' |
|
817 | assert metric.namespace == 'some.monitor' | |
818 |
|
818 | |||
819 | def test_multiple_json_payload(self): |
|
819 | def test_multiple_json_payload(self): | |
820 | import appenlight.tests.payload_examples as payload_examples |
|
820 | import appenlight.tests.payload_examples as payload_examples | |
821 |
from appenlight.models. |
|
821 | from appenlight.models.metric import Metric | |
822 | from appenlight.views.api import general_metrics_create |
|
822 | from appenlight.views.api import general_metrics_create | |
823 | from appenlight.models.services.application import ApplicationService |
|
823 | from appenlight.models.services.application import ApplicationService | |
824 | route = mock.Mock() |
|
824 | route = mock.Mock() | |
825 | route.name = 'api_general_metrics' |
|
825 | route.name = 'api_general_metrics' | |
826 | request = pyramid.threadlocal.get_current_request() |
|
826 | request = pyramid.threadlocal.get_current_request() | |
827 | request.matched_route = route |
|
827 | request.matched_route = route | |
828 | context = DummyContext() |
|
828 | context = DummyContext() | |
829 | context.resource = ApplicationService.by_id(1) |
|
829 | context.resource = ApplicationService.by_id(1) | |
830 | request.context = context |
|
830 | request.context = context | |
831 | request.unsafe_json_body = [ |
|
831 | request.unsafe_json_body = [ | |
832 | copy.deepcopy(payload_examples.METRICS_PAYLOAD), |
|
832 | copy.deepcopy(payload_examples.METRICS_PAYLOAD), | |
833 | copy.deepcopy(payload_examples.METRICS_PAYLOAD), |
|
833 | copy.deepcopy(payload_examples.METRICS_PAYLOAD), | |
834 | ] |
|
834 | ] | |
835 | general_metrics_create(request) |
|
835 | general_metrics_create(request) | |
836 | query = DBSession.query(Metric) |
|
836 | query = DBSession.query(Metric) | |
837 | metric = query.first() |
|
837 | metric = query.first() | |
838 | assert query.count() == 2 |
|
838 | assert query.count() == 2 | |
839 | assert metric.namespace == 'some.monitor' |
|
839 | assert metric.namespace == 'some.monitor' | |
840 |
|
840 | |||
841 |
|
841 | |||
842 | class TestGroupingMessageReplacements(object): |
|
842 | class TestGroupingMessageReplacements(object): | |
843 | def replace_default_repr_python(self): |
|
843 | def replace_default_repr_python(self): | |
844 | test_str = ''' |
|
844 | test_str = ''' | |
845 | ConnectionError: ConnectionError((<urllib3.connection.HTTPConnection object at 0x7f87a0ba9fd0>, 'Connection to domain.gr timed out. (connect timeout=10)')) caused by: ConnectTimeoutError((<urllib3.connection.HTTPConnection object at 0x7f87a0ba9fd0>, 'Connection to domain.gr timed out. (connect timeout=10)')) |
|
845 | ConnectionError: ConnectionError((<urllib3.connection.HTTPConnection object at 0x7f87a0ba9fd0>, 'Connection to domain.gr timed out. (connect timeout=10)')) caused by: ConnectTimeoutError((<urllib3.connection.HTTPConnection object at 0x7f87a0ba9fd0>, 'Connection to domain.gr timed out. (connect timeout=10)')) | |
846 | ''' |
|
846 | ''' | |
847 | regex = r'<(.*?) object at (.*?)>' |
|
847 | regex = r'<(.*?) object at (.*?)>' | |
848 |
|
848 | |||
849 |
|
849 | |||
850 | class TestRulesKeyGetter(object): |
|
850 | class TestRulesKeyGetter(object): | |
851 | def test_default_dict_getter_top_key(self): |
|
851 | def test_default_dict_getter_top_key(self): | |
852 | from appenlight.lib.rule import Rule |
|
852 | from appenlight.lib.rule import Rule | |
853 | struct = { |
|
853 | struct = { | |
854 | "a": { |
|
854 | "a": { | |
855 | "b": 'b', |
|
855 | "b": 'b', | |
856 | "c": { |
|
856 | "c": { | |
857 | "d": 'd', |
|
857 | "d": 'd', | |
858 | "g": { |
|
858 | "g": { | |
859 | "h": 'h' |
|
859 | "h": 'h' | |
860 | } |
|
860 | } | |
861 | }, |
|
861 | }, | |
862 | "e": 'e' |
|
862 | "e": 'e' | |
863 | }, |
|
863 | }, | |
864 | "f": 'f' |
|
864 | "f": 'f' | |
865 | } |
|
865 | } | |
866 | result = Rule.default_dict_struct_getter(struct, "a") |
|
866 | result = Rule.default_dict_struct_getter(struct, "a") | |
867 | assert result == struct['a'] |
|
867 | assert result == struct['a'] | |
868 |
|
868 | |||
869 | def test_default_dict_getter_sub_key(self): |
|
869 | def test_default_dict_getter_sub_key(self): | |
870 | from appenlight.lib.rule import Rule |
|
870 | from appenlight.lib.rule import Rule | |
871 | struct = { |
|
871 | struct = { | |
872 | "a": { |
|
872 | "a": { | |
873 | "b": 'b', |
|
873 | "b": 'b', | |
874 | "c": { |
|
874 | "c": { | |
875 | "d": 'd', |
|
875 | "d": 'd', | |
876 | "g": { |
|
876 | "g": { | |
877 | "h": 'h' |
|
877 | "h": 'h' | |
878 | } |
|
878 | } | |
879 | }, |
|
879 | }, | |
880 | "e": 'e' |
|
880 | "e": 'e' | |
881 | }, |
|
881 | }, | |
882 | "f": 'f' |
|
882 | "f": 'f' | |
883 | } |
|
883 | } | |
884 | result = Rule.default_dict_struct_getter(struct, 'a:b') |
|
884 | result = Rule.default_dict_struct_getter(struct, 'a:b') | |
885 | assert result == struct['a']['b'] |
|
885 | assert result == struct['a']['b'] | |
886 | result = Rule.default_dict_struct_getter(struct, 'a:c:d') |
|
886 | result = Rule.default_dict_struct_getter(struct, 'a:c:d') | |
887 | assert result == struct['a']['c']['d'] |
|
887 | assert result == struct['a']['c']['d'] | |
888 |
|
888 | |||
889 | def test_default_obj_getter_top_key(self): |
|
889 | def test_default_obj_getter_top_key(self): | |
890 | from appenlight.lib.rule import Rule |
|
890 | from appenlight.lib.rule import Rule | |
891 | class TestStruct(object): |
|
891 | class TestStruct(object): | |
892 | def __init__(self, a, b): |
|
892 | def __init__(self, a, b): | |
893 | self.a = a |
|
893 | self.a = a | |
894 | self.b = b |
|
894 | self.b = b | |
895 |
|
895 | |||
896 | struct = TestStruct(a='a', |
|
896 | struct = TestStruct(a='a', | |
897 | b=TestStruct(a='x', b='y')) |
|
897 | b=TestStruct(a='x', b='y')) | |
898 | result = Rule.default_obj_struct_getter(struct, "a") |
|
898 | result = Rule.default_obj_struct_getter(struct, "a") | |
899 | assert result == struct.a |
|
899 | assert result == struct.a | |
900 |
|
900 | |||
901 | def test_default_obj_getter_sub_key(self): |
|
901 | def test_default_obj_getter_sub_key(self): | |
902 | from appenlight.lib.rule import Rule |
|
902 | from appenlight.lib.rule import Rule | |
903 | class TestStruct(object): |
|
903 | class TestStruct(object): | |
904 | def __init__(self, name, a, b): |
|
904 | def __init__(self, name, a, b): | |
905 | self.name = name |
|
905 | self.name = name | |
906 | self.a = a |
|
906 | self.a = a | |
907 | self.b = b |
|
907 | self.b = b | |
908 |
|
908 | |||
909 | def __repr__(self): |
|
909 | def __repr__(self): | |
910 | return '<obj {}>'.format(self.name) |
|
910 | return '<obj {}>'.format(self.name) | |
911 |
|
911 | |||
912 | c = TestStruct('c', a=5, b='z') |
|
912 | c = TestStruct('c', a=5, b='z') | |
913 | b = TestStruct('b', a=c, b='y') |
|
913 | b = TestStruct('b', a=c, b='y') | |
914 | struct = TestStruct('a', a='a', b=b) |
|
914 | struct = TestStruct('a', a='a', b=b) | |
915 | result = Rule.default_obj_struct_getter(struct, 'b:b') |
|
915 | result = Rule.default_obj_struct_getter(struct, 'b:b') | |
916 | assert result == struct.b.b |
|
916 | assert result == struct.b.b | |
917 | result = Rule.default_obj_struct_getter(struct, 'b:a:b') |
|
917 | result = Rule.default_obj_struct_getter(struct, 'b:a:b') | |
918 | assert result == struct.b.a.b |
|
918 | assert result == struct.b.a.b | |
919 |
|
919 | |||
920 |
|
920 | |||
921 | @pytest.mark.usefixtures('report_type_matrix') |
|
921 | @pytest.mark.usefixtures('report_type_matrix') | |
922 | class TestRulesParsing(): |
|
922 | class TestRulesParsing(): | |
923 | @pytest.mark.parametrize("op, struct_value, test_value, match_result", [ |
|
923 | @pytest.mark.parametrize("op, struct_value, test_value, match_result", [ | |
924 | ('eq', 500, 500, True), |
|
924 | ('eq', 500, 500, True), | |
925 | ('eq', 600, 500, False), |
|
925 | ('eq', 600, 500, False), | |
926 | ('eq', 300, 500, False), |
|
926 | ('eq', 300, 500, False), | |
927 | ('eq', "300", 500, False), |
|
927 | ('eq', "300", 500, False), | |
928 | ('eq', "600", 500, False), |
|
928 | ('eq', "600", 500, False), | |
929 | ('eq', "500", 500, True), |
|
929 | ('eq', "500", 500, True), | |
930 | ('ne', 500, 500, False), |
|
930 | ('ne', 500, 500, False), | |
931 | ('ne', 600, 500, True), |
|
931 | ('ne', 600, 500, True), | |
932 | ('ne', 300, 500, True), |
|
932 | ('ne', 300, 500, True), | |
933 | ('ne', "300", 500, True), |
|
933 | ('ne', "300", 500, True), | |
934 | ('ne', "600", 500, True), |
|
934 | ('ne', "600", 500, True), | |
935 | ('ne', "500", 500, False), |
|
935 | ('ne', "500", 500, False), | |
936 | ('ge', 500, 500, True), |
|
936 | ('ge', 500, 500, True), | |
937 | ('ge', 600, 500, True), |
|
937 | ('ge', 600, 500, True), | |
938 | ('ge', 499, 500, False), |
|
938 | ('ge', 499, 500, False), | |
939 | ('gt', 499, 500, False), |
|
939 | ('gt', 499, 500, False), | |
940 | ('gt', 500, 500, False), |
|
940 | ('gt', 500, 500, False), | |
941 | ('gt', 501, 500, True), |
|
941 | ('gt', 501, 500, True), | |
942 | ('le', 499, 500, True), |
|
942 | ('le', 499, 500, True), | |
943 | ('le', 500, 500, True), |
|
943 | ('le', 500, 500, True), | |
944 | ('le', 501, 500, False), |
|
944 | ('le', 501, 500, False), | |
945 | ('lt', 499, 500, True), |
|
945 | ('lt', 499, 500, True), | |
946 | ('lt', 500, 500, False), |
|
946 | ('lt', 500, 500, False), | |
947 | ('lt', 501, 500, False), |
|
947 | ('lt', 501, 500, False), | |
948 | ]) |
|
948 | ]) | |
949 | def test_single_op_int(self, op, struct_value, test_value, match_result, |
|
949 | def test_single_op_int(self, op, struct_value, test_value, match_result, | |
950 | report_type_matrix): |
|
950 | report_type_matrix): | |
951 | from appenlight.lib.rule import Rule |
|
951 | from appenlight.lib.rule import Rule | |
952 | rule_config = { |
|
952 | rule_config = { | |
953 | "op": op, |
|
953 | "op": op, | |
954 | "field": "http_status", |
|
954 | "field": "http_status", | |
955 | "value": test_value |
|
955 | "value": test_value | |
956 | } |
|
956 | } | |
957 | rule = Rule(rule_config, report_type_matrix) |
|
957 | rule = Rule(rule_config, report_type_matrix) | |
958 |
|
958 | |||
959 | data = { |
|
959 | data = { | |
960 | "http_status": struct_value |
|
960 | "http_status": struct_value | |
961 | } |
|
961 | } | |
962 | assert rule.match(data) is match_result |
|
962 | assert rule.match(data) is match_result | |
963 |
|
963 | |||
964 | @pytest.mark.parametrize("op, struct_value, test_value, match_result", [ |
|
964 | @pytest.mark.parametrize("op, struct_value, test_value, match_result", [ | |
965 | ('ge', "500.01", 500, True), |
|
965 | ('ge', "500.01", 500, True), | |
966 | ('ge', "500.01", 500.02, False), |
|
966 | ('ge', "500.01", 500.02, False), | |
967 | ('le', "500.01", 500.02, True) |
|
967 | ('le', "500.01", 500.02, True) | |
968 | ]) |
|
968 | ]) | |
969 | def test_single_op_float(self, op, struct_value, test_value, match_result, |
|
969 | def test_single_op_float(self, op, struct_value, test_value, match_result, | |
970 | report_type_matrix): |
|
970 | report_type_matrix): | |
971 | from appenlight.lib.rule import Rule |
|
971 | from appenlight.lib.rule import Rule | |
972 | rule_config = { |
|
972 | rule_config = { | |
973 | "op": op, |
|
973 | "op": op, | |
974 | "field": "duration", |
|
974 | "field": "duration", | |
975 | "value": test_value |
|
975 | "value": test_value | |
976 | } |
|
976 | } | |
977 | rule = Rule(rule_config, report_type_matrix) |
|
977 | rule = Rule(rule_config, report_type_matrix) | |
978 |
|
978 | |||
979 | data = { |
|
979 | data = { | |
980 | "duration": struct_value |
|
980 | "duration": struct_value | |
981 | } |
|
981 | } | |
982 | assert rule.match(data) is match_result |
|
982 | assert rule.match(data) is match_result | |
983 |
|
983 | |||
984 | @pytest.mark.parametrize("op, struct_value, test_value, match_result", [ |
|
984 | @pytest.mark.parametrize("op, struct_value, test_value, match_result", [ | |
985 | ('contains', 'foo bar baz', 'foo', True), |
|
985 | ('contains', 'foo bar baz', 'foo', True), | |
986 | ('contains', 'foo bar baz', 'bar', True), |
|
986 | ('contains', 'foo bar baz', 'bar', True), | |
987 | ('contains', 'foo bar baz', 'dupa', False), |
|
987 | ('contains', 'foo bar baz', 'dupa', False), | |
988 | ('startswith', 'foo bar baz', 'foo', True), |
|
988 | ('startswith', 'foo bar baz', 'foo', True), | |
989 | ('startswith', 'foo bar baz', 'bar', False), |
|
989 | ('startswith', 'foo bar baz', 'bar', False), | |
990 | ('endswith', 'foo bar baz', 'baz', True), |
|
990 | ('endswith', 'foo bar baz', 'baz', True), | |
991 | ('endswith', 'foo bar baz', 'bar', False), |
|
991 | ('endswith', 'foo bar baz', 'bar', False), | |
992 | ]) |
|
992 | ]) | |
993 | def test_single_op_string(self, op, struct_value, test_value, |
|
993 | def test_single_op_string(self, op, struct_value, test_value, | |
994 | match_result, report_type_matrix): |
|
994 | match_result, report_type_matrix): | |
995 | from appenlight.lib.rule import Rule |
|
995 | from appenlight.lib.rule import Rule | |
996 | rule_config = { |
|
996 | rule_config = { | |
997 | "op": op, |
|
997 | "op": op, | |
998 | "field": "error", |
|
998 | "field": "error", | |
999 | "value": test_value |
|
999 | "value": test_value | |
1000 | } |
|
1000 | } | |
1001 | rule = Rule(rule_config, report_type_matrix) |
|
1001 | rule = Rule(rule_config, report_type_matrix) | |
1002 |
|
1002 | |||
1003 | data = { |
|
1003 | data = { | |
1004 | "error": struct_value |
|
1004 | "error": struct_value | |
1005 | } |
|
1005 | } | |
1006 | assert rule.match(data) is match_result |
|
1006 | assert rule.match(data) is match_result | |
1007 |
|
1007 | |||
1008 | @pytest.mark.parametrize("field, value, s_type", [ |
|
1008 | @pytest.mark.parametrize("field, value, s_type", [ | |
1009 | ('field_unicode', 500, str), |
|
1009 | ('field_unicode', 500, str), | |
1010 | ('field_unicode', 500.0, str), |
|
1010 | ('field_unicode', 500.0, str), | |
1011 | ('field_unicode', "500", str), |
|
1011 | ('field_unicode', "500", str), | |
1012 | ('field_int', "500", int), |
|
1012 | ('field_int', "500", int), | |
1013 | ('field_int', 500, int), |
|
1013 | ('field_int', 500, int), | |
1014 | ('field_int', 500.0, int), |
|
1014 | ('field_int', 500.0, int), | |
1015 | ('field_float', "500", float), |
|
1015 | ('field_float', "500", float), | |
1016 | ('field_float', 500, float), |
|
1016 | ('field_float', 500, float), | |
1017 | ('field_float', 500.0, float), |
|
1017 | ('field_float', 500.0, float), | |
1018 | ]) |
|
1018 | ]) | |
1019 | def test_type_normalization(self, field, value, s_type): |
|
1019 | def test_type_normalization(self, field, value, s_type): | |
1020 | from appenlight.lib.rule import Rule |
|
1020 | from appenlight.lib.rule import Rule | |
1021 | type_matrix = { |
|
1021 | type_matrix = { | |
1022 | 'field_unicode': {"type": 'unicode'}, |
|
1022 | 'field_unicode': {"type": 'unicode'}, | |
1023 | 'field_float': {"type": 'float'}, |
|
1023 | 'field_float': {"type": 'float'}, | |
1024 | 'field_int': {"type": 'int'}, |
|
1024 | 'field_int': {"type": 'int'}, | |
1025 | } |
|
1025 | } | |
1026 |
|
1026 | |||
1027 | rule = Rule({}, type_matrix) |
|
1027 | rule = Rule({}, type_matrix) | |
1028 | n_value = rule.normalized_type(field, value) |
|
1028 | n_value = rule.normalized_type(field, value) | |
1029 | assert isinstance(n_value, s_type) is True |
|
1029 | assert isinstance(n_value, s_type) is True | |
1030 |
|
1030 | |||
1031 |
|
1031 | |||
1032 | @pytest.mark.usefixtures('report_type_matrix') |
|
1032 | @pytest.mark.usefixtures('report_type_matrix') | |
1033 | class TestNestedRuleParsing(): |
|
1033 | class TestNestedRuleParsing(): | |
1034 |
|
1034 | |||
1035 | @pytest.mark.parametrize("data, result", [ |
|
1035 | @pytest.mark.parametrize("data, result", [ | |
1036 | ({"http_status": 501, "group": {"priority": 7, "occurences": 11}}, |
|
1036 | ({"http_status": 501, "group": {"priority": 7, "occurences": 11}}, | |
1037 | False), |
|
1037 | False), | |
1038 | ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, |
|
1038 | ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, | |
1039 | False), |
|
1039 | False), | |
1040 | ({"http_status": 500, "group": {"priority": 1, "occurences": 11}}, |
|
1040 | ({"http_status": 500, "group": {"priority": 1, "occurences": 11}}, | |
1041 | False), |
|
1041 | False), | |
1042 | ({"http_status": 101, "group": {"priority": 3, "occurences": 5}}, |
|
1042 | ({"http_status": 101, "group": {"priority": 3, "occurences": 5}}, | |
1043 | True), |
|
1043 | True), | |
1044 | ]) |
|
1044 | ]) | |
1045 | def test_NOT_rule(self, data, result, report_type_matrix): |
|
1045 | def test_NOT_rule(self, data, result, report_type_matrix): | |
1046 | from appenlight.lib.rule import Rule |
|
1046 | from appenlight.lib.rule import Rule | |
1047 | rule_config = { |
|
1047 | rule_config = { | |
1048 | "field": "__NOT__", |
|
1048 | "field": "__NOT__", | |
1049 | "rules": [ |
|
1049 | "rules": [ | |
1050 | { |
|
1050 | { | |
1051 | "op": "ge", |
|
1051 | "op": "ge", | |
1052 | "field": "group:occurences", |
|
1052 | "field": "group:occurences", | |
1053 | "value": "10" |
|
1053 | "value": "10" | |
1054 | }, |
|
1054 | }, | |
1055 | { |
|
1055 | { | |
1056 | "op": "ge", |
|
1056 | "op": "ge", | |
1057 | "field": "group:priority", |
|
1057 | "field": "group:priority", | |
1058 | "value": "4" |
|
1058 | "value": "4" | |
1059 | } |
|
1059 | } | |
1060 | ] |
|
1060 | ] | |
1061 | } |
|
1061 | } | |
1062 |
|
1062 | |||
1063 | rule = Rule(rule_config, report_type_matrix) |
|
1063 | rule = Rule(rule_config, report_type_matrix) | |
1064 | assert rule.match(data) is result |
|
1064 | assert rule.match(data) is result | |
1065 |
|
1065 | |||
1066 | @pytest.mark.parametrize("data, result", [ |
|
1066 | @pytest.mark.parametrize("data, result", [ | |
1067 | ({"http_status": 501, "group": {"priority": 7, "occurences": 11}}, |
|
1067 | ({"http_status": 501, "group": {"priority": 7, "occurences": 11}}, | |
1068 | True), |
|
1068 | True), | |
1069 | ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, |
|
1069 | ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, | |
1070 | True), |
|
1070 | True), | |
1071 | ({"http_status": 500, "group": {"priority": 1, "occurences": 1}}, |
|
1071 | ({"http_status": 500, "group": {"priority": 1, "occurences": 1}}, | |
1072 | True), |
|
1072 | True), | |
1073 | ({"http_status": 101, "group": {"priority": 3, "occurences": 11}}, |
|
1073 | ({"http_status": 101, "group": {"priority": 3, "occurences": 11}}, | |
1074 | False), |
|
1074 | False), | |
1075 | ]) |
|
1075 | ]) | |
1076 | def test_nested_OR_AND_rule(self, data, result, report_type_matrix): |
|
1076 | def test_nested_OR_AND_rule(self, data, result, report_type_matrix): | |
1077 | from appenlight.lib.rule import Rule |
|
1077 | from appenlight.lib.rule import Rule | |
1078 | rule_config = { |
|
1078 | rule_config = { | |
1079 | "field": "__OR__", |
|
1079 | "field": "__OR__", | |
1080 | "rules": [ |
|
1080 | "rules": [ | |
1081 | { |
|
1081 | { | |
1082 | "field": "__AND__", |
|
1082 | "field": "__AND__", | |
1083 | "rules": [ |
|
1083 | "rules": [ | |
1084 | { |
|
1084 | { | |
1085 | "op": "ge", |
|
1085 | "op": "ge", | |
1086 | "field": "group:occurences", |
|
1086 | "field": "group:occurences", | |
1087 | "value": "10" |
|
1087 | "value": "10" | |
1088 | }, |
|
1088 | }, | |
1089 | { |
|
1089 | { | |
1090 | "op": "ge", |
|
1090 | "op": "ge", | |
1091 | "field": "group:priority", |
|
1091 | "field": "group:priority", | |
1092 | "value": "4" |
|
1092 | "value": "4" | |
1093 | } |
|
1093 | } | |
1094 | ] |
|
1094 | ] | |
1095 | }, |
|
1095 | }, | |
1096 | { |
|
1096 | { | |
1097 | "op": "eq", |
|
1097 | "op": "eq", | |
1098 | "field": "http_status", |
|
1098 | "field": "http_status", | |
1099 | "value": "500" |
|
1099 | "value": "500" | |
1100 | } |
|
1100 | } | |
1101 | ] |
|
1101 | ] | |
1102 | } |
|
1102 | } | |
1103 |
|
1103 | |||
1104 | rule = Rule(rule_config, report_type_matrix) |
|
1104 | rule = Rule(rule_config, report_type_matrix) | |
1105 | assert rule.match(data) is result |
|
1105 | assert rule.match(data) is result | |
1106 |
|
1106 | |||
1107 | @pytest.mark.parametrize("data, result", [ |
|
1107 | @pytest.mark.parametrize("data, result", [ | |
1108 | ({"http_status": 501, "group": {"priority": 7, "occurences": 11}}, |
|
1108 | ({"http_status": 501, "group": {"priority": 7, "occurences": 11}}, | |
1109 | True), |
|
1109 | True), | |
1110 | ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, |
|
1110 | ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, | |
1111 | True), |
|
1111 | True), | |
1112 | ({"http_status": 500, "group": {"priority": 1, "occurences": 1}}, |
|
1112 | ({"http_status": 500, "group": {"priority": 1, "occurences": 1}}, | |
1113 | True), |
|
1113 | True), | |
1114 | ({"http_status": 101, "group": {"priority": 3, "occurences": 1}}, |
|
1114 | ({"http_status": 101, "group": {"priority": 3, "occurences": 1}}, | |
1115 | False), |
|
1115 | False), | |
1116 | ]) |
|
1116 | ]) | |
1117 | def test_nested_OR_OR_rule(self, data, result, report_type_matrix): |
|
1117 | def test_nested_OR_OR_rule(self, data, result, report_type_matrix): | |
1118 | from appenlight.lib.rule import Rule |
|
1118 | from appenlight.lib.rule import Rule | |
1119 | rule_config = { |
|
1119 | rule_config = { | |
1120 | "field": "__OR__", |
|
1120 | "field": "__OR__", | |
1121 | "rules": [ |
|
1121 | "rules": [ | |
1122 | {"field": "__OR__", |
|
1122 | {"field": "__OR__", | |
1123 | "rules": [ |
|
1123 | "rules": [ | |
1124 | {"op": "ge", |
|
1124 | {"op": "ge", | |
1125 | "field": "group:occurences", |
|
1125 | "field": "group:occurences", | |
1126 | "value": "10" |
|
1126 | "value": "10" | |
1127 | }, |
|
1127 | }, | |
1128 | {"op": "ge", |
|
1128 | {"op": "ge", | |
1129 | "field": "group:priority", |
|
1129 | "field": "group:priority", | |
1130 | "value": "4" |
|
1130 | "value": "4" | |
1131 | } |
|
1131 | } | |
1132 | ] |
|
1132 | ] | |
1133 | }, |
|
1133 | }, | |
1134 | {"op": "eq", |
|
1134 | {"op": "eq", | |
1135 | "field": "http_status", |
|
1135 | "field": "http_status", | |
1136 | "value": "500" |
|
1136 | "value": "500" | |
1137 | } |
|
1137 | } | |
1138 | ] |
|
1138 | ] | |
1139 | } |
|
1139 | } | |
1140 |
|
1140 | |||
1141 | rule = Rule(rule_config, report_type_matrix) |
|
1141 | rule = Rule(rule_config, report_type_matrix) | |
1142 | assert rule.match(data) is result |
|
1142 | assert rule.match(data) is result | |
1143 |
|
1143 | |||
1144 | @pytest.mark.parametrize("data, result", [ |
|
1144 | @pytest.mark.parametrize("data, result", [ | |
1145 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}}, |
|
1145 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}}, | |
1146 | True), |
|
1146 | True), | |
1147 | ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, |
|
1147 | ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, | |
1148 | False), |
|
1148 | False), | |
1149 | ({"http_status": 500, "group": {"priority": 1, "occurences": 1}}, |
|
1149 | ({"http_status": 500, "group": {"priority": 1, "occurences": 1}}, | |
1150 | False), |
|
1150 | False), | |
1151 | ({"http_status": 101, "group": {"priority": 3, "occurences": 1}}, |
|
1151 | ({"http_status": 101, "group": {"priority": 3, "occurences": 1}}, | |
1152 | False), |
|
1152 | False), | |
1153 | ]) |
|
1153 | ]) | |
1154 | def test_nested_AND_AND_rule(self, data, result, report_type_matrix): |
|
1154 | def test_nested_AND_AND_rule(self, data, result, report_type_matrix): | |
1155 | from appenlight.lib.rule import Rule |
|
1155 | from appenlight.lib.rule import Rule | |
1156 | rule_config = { |
|
1156 | rule_config = { | |
1157 | "field": "__AND__", |
|
1157 | "field": "__AND__", | |
1158 | "rules": [ |
|
1158 | "rules": [ | |
1159 | {"field": "__AND__", |
|
1159 | {"field": "__AND__", | |
1160 | "rules": [ |
|
1160 | "rules": [ | |
1161 | {"op": "ge", |
|
1161 | {"op": "ge", | |
1162 | "field": "group:occurences", |
|
1162 | "field": "group:occurences", | |
1163 | "value": "10" |
|
1163 | "value": "10" | |
1164 | }, |
|
1164 | }, | |
1165 | {"op": "ge", |
|
1165 | {"op": "ge", | |
1166 | "field": "group:priority", |
|
1166 | "field": "group:priority", | |
1167 | "value": "4" |
|
1167 | "value": "4" | |
1168 | }] |
|
1168 | }] | |
1169 | }, |
|
1169 | }, | |
1170 | {"op": "eq", |
|
1170 | {"op": "eq", | |
1171 | "field": "http_status", |
|
1171 | "field": "http_status", | |
1172 | "value": "500" |
|
1172 | "value": "500" | |
1173 | } |
|
1173 | } | |
1174 | ] |
|
1174 | ] | |
1175 | } |
|
1175 | } | |
1176 |
|
1176 | |||
1177 | rule = Rule(rule_config, report_type_matrix) |
|
1177 | rule = Rule(rule_config, report_type_matrix) | |
1178 | assert rule.match(data) is result |
|
1178 | assert rule.match(data) is result | |
1179 |
|
1179 | |||
1180 | @pytest.mark.parametrize("data, result", [ |
|
1180 | @pytest.mark.parametrize("data, result", [ | |
1181 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}, |
|
1181 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}, | |
1182 | "url_path": '/test/register', "error": "foo test bar"}, True), |
|
1182 | "url_path": '/test/register', "error": "foo test bar"}, True), | |
1183 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}, |
|
1183 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}, | |
1184 | "url_path": '/test/register', "error": "foo INVALID bar"}, False), |
|
1184 | "url_path": '/test/register', "error": "foo INVALID bar"}, False), | |
1185 | ]) |
|
1185 | ]) | |
1186 | def test_nested_AND_AND_AND_rule(self, data, result, report_type_matrix): |
|
1186 | def test_nested_AND_AND_AND_rule(self, data, result, report_type_matrix): | |
1187 | from appenlight.lib.rule import Rule |
|
1187 | from appenlight.lib.rule import Rule | |
1188 | rule_config = { |
|
1188 | rule_config = { | |
1189 | "field": "__AND__", |
|
1189 | "field": "__AND__", | |
1190 | "rules": [ |
|
1190 | "rules": [ | |
1191 | {"field": "__AND__", |
|
1191 | {"field": "__AND__", | |
1192 | "rules": [ |
|
1192 | "rules": [ | |
1193 | {"op": "ge", |
|
1193 | {"op": "ge", | |
1194 | "field": "group:occurences", |
|
1194 | "field": "group:occurences", | |
1195 | "value": "10" |
|
1195 | "value": "10" | |
1196 | }, |
|
1196 | }, | |
1197 | {"field": "__AND__", |
|
1197 | {"field": "__AND__", | |
1198 | "rules": [ |
|
1198 | "rules": [ | |
1199 | {"op": "endswith", |
|
1199 | {"op": "endswith", | |
1200 | "field": "url_path", |
|
1200 | "field": "url_path", | |
1201 | "value": "register"}, |
|
1201 | "value": "register"}, | |
1202 | {"op": "contains", |
|
1202 | {"op": "contains", | |
1203 | "field": "error", |
|
1203 | "field": "error", | |
1204 | "value": "test"}]}] |
|
1204 | "value": "test"}]}] | |
1205 | }, |
|
1205 | }, | |
1206 | {"op": "eq", |
|
1206 | {"op": "eq", | |
1207 | "field": "http_status", |
|
1207 | "field": "http_status", | |
1208 | "value": "500" |
|
1208 | "value": "500" | |
1209 | } |
|
1209 | } | |
1210 | ] |
|
1210 | ] | |
1211 | } |
|
1211 | } | |
1212 |
|
1212 | |||
1213 | rule = Rule(rule_config, report_type_matrix) |
|
1213 | rule = Rule(rule_config, report_type_matrix) | |
1214 | assert rule.match(data) is result |
|
1214 | assert rule.match(data) is result | |
1215 |
|
1215 | |||
1216 | @pytest.mark.parametrize("data, result", [ |
|
1216 | @pytest.mark.parametrize("data, result", [ | |
1217 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}, |
|
1217 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}, | |
1218 | "url_path": 6, "error": 3}, False), |
|
1218 | "url_path": 6, "error": 3}, False), | |
1219 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}, |
|
1219 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}, | |
1220 | "url_path": '/test/register', "error": "foo INVALID bar"}, True), |
|
1220 | "url_path": '/test/register', "error": "foo INVALID bar"}, True), | |
1221 | ]) |
|
1221 | ]) | |
1222 | def test_nested_AND_AND_OR_rule(self, data, result, report_type_matrix): |
|
1222 | def test_nested_AND_AND_OR_rule(self, data, result, report_type_matrix): | |
1223 | from appenlight.lib.rule import Rule |
|
1223 | from appenlight.lib.rule import Rule | |
1224 | rule_config = { |
|
1224 | rule_config = { | |
1225 | "field": "__AND__", |
|
1225 | "field": "__AND__", | |
1226 | "rules": [ |
|
1226 | "rules": [ | |
1227 | {"field": "__AND__", |
|
1227 | {"field": "__AND__", | |
1228 | "rules": [ |
|
1228 | "rules": [ | |
1229 | {"op": "ge", |
|
1229 | {"op": "ge", | |
1230 | "field": "group:occurences", |
|
1230 | "field": "group:occurences", | |
1231 | "value": "10" |
|
1231 | "value": "10" | |
1232 | }, |
|
1232 | }, | |
1233 | {"field": "__OR__", |
|
1233 | {"field": "__OR__", | |
1234 | "rules": [ |
|
1234 | "rules": [ | |
1235 | {"op": "endswith", |
|
1235 | {"op": "endswith", | |
1236 | "field": "url_path", |
|
1236 | "field": "url_path", | |
1237 | "value": "register" |
|
1237 | "value": "register" | |
1238 | }, |
|
1238 | }, | |
1239 | {"op": "contains", |
|
1239 | {"op": "contains", | |
1240 | "field": "error", |
|
1240 | "field": "error", | |
1241 | "value": "test" |
|
1241 | "value": "test" | |
1242 | }]}] |
|
1242 | }]}] | |
1243 | }, |
|
1243 | }, | |
1244 | {"op": "eq", |
|
1244 | {"op": "eq", | |
1245 | "field": "http_status", |
|
1245 | "field": "http_status", | |
1246 | "value": "500" |
|
1246 | "value": "500" | |
1247 | } |
|
1247 | } | |
1248 | ] |
|
1248 | ] | |
1249 | } |
|
1249 | } | |
1250 |
|
1250 | |||
1251 | rule = Rule(rule_config, report_type_matrix) |
|
1251 | rule = Rule(rule_config, report_type_matrix) | |
1252 | assert rule.match(data) is result |
|
1252 | assert rule.match(data) is result | |
1253 |
|
1253 | |||
1254 | @pytest.mark.parametrize("op, field, value, should_fail", [ |
|
1254 | @pytest.mark.parametrize("op, field, value, should_fail", [ | |
1255 | ('eq', 'http_status', "1", False), |
|
1255 | ('eq', 'http_status', "1", False), | |
1256 | ('ne', 'http_status', "1", False), |
|
1256 | ('ne', 'http_status', "1", False), | |
1257 | ('ne', 'http_status', "foo", True), |
|
1257 | ('ne', 'http_status', "foo", True), | |
1258 | ('startswith', 'http_status', "1", True), |
|
1258 | ('startswith', 'http_status', "1", True), | |
1259 | ('eq', 'group:priority', "1", False), |
|
1259 | ('eq', 'group:priority', "1", False), | |
1260 | ('ne', 'group:priority', "1", False), |
|
1260 | ('ne', 'group:priority', "1", False), | |
1261 | ('ge', 'group:priority', "1", False), |
|
1261 | ('ge', 'group:priority', "1", False), | |
1262 | ('le', 'group:priority', "1", False), |
|
1262 | ('le', 'group:priority', "1", False), | |
1263 | ('startswith', 'group:priority', "1", True), |
|
1263 | ('startswith', 'group:priority', "1", True), | |
1264 | ('eq', 'url_domain', "1", False), |
|
1264 | ('eq', 'url_domain', "1", False), | |
1265 | ('ne', 'url_domain', "1", False), |
|
1265 | ('ne', 'url_domain', "1", False), | |
1266 | ('startswith', 'url_domain', "1", False), |
|
1266 | ('startswith', 'url_domain', "1", False), | |
1267 | ('endswith', 'url_domain', "1", False), |
|
1267 | ('endswith', 'url_domain', "1", False), | |
1268 | ('contains', 'url_domain', "1", False), |
|
1268 | ('contains', 'url_domain', "1", False), | |
1269 | ('ge', 'url_domain', "1", True), |
|
1269 | ('ge', 'url_domain', "1", True), | |
1270 | ('eq', 'url_path', "1", False), |
|
1270 | ('eq', 'url_path', "1", False), | |
1271 | ('ne', 'url_path', "1", False), |
|
1271 | ('ne', 'url_path', "1", False), | |
1272 | ('startswith', 'url_path', "1", False), |
|
1272 | ('startswith', 'url_path', "1", False), | |
1273 | ('endswith', 'url_path', "1", False), |
|
1273 | ('endswith', 'url_path', "1", False), | |
1274 | ('contains', 'url_path', "1", False), |
|
1274 | ('contains', 'url_path', "1", False), | |
1275 | ('ge', 'url_path', "1", True), |
|
1275 | ('ge', 'url_path', "1", True), | |
1276 | ('eq', 'error', "1", False), |
|
1276 | ('eq', 'error', "1", False), | |
1277 | ('ne', 'error', "1", False), |
|
1277 | ('ne', 'error', "1", False), | |
1278 | ('startswith', 'error', "1", False), |
|
1278 | ('startswith', 'error', "1", False), | |
1279 | ('endswith', 'error', "1", False), |
|
1279 | ('endswith', 'error', "1", False), | |
1280 | ('contains', 'error', "1", False), |
|
1280 | ('contains', 'error', "1", False), | |
1281 | ('ge', 'error', "1", True), |
|
1281 | ('ge', 'error', "1", True), | |
1282 | ('ge', 'url_path', "1", True), |
|
1282 | ('ge', 'url_path', "1", True), | |
1283 | ('eq', 'tags:server_name', "1", False), |
|
1283 | ('eq', 'tags:server_name', "1", False), | |
1284 | ('ne', 'tags:server_name', "1", False), |
|
1284 | ('ne', 'tags:server_name', "1", False), | |
1285 | ('startswith', 'tags:server_name', "1", False), |
|
1285 | ('startswith', 'tags:server_name', "1", False), | |
1286 | ('endswith', 'tags:server_name', "1", False), |
|
1286 | ('endswith', 'tags:server_name', "1", False), | |
1287 | ('contains', 'tags:server_name', "1", False), |
|
1287 | ('contains', 'tags:server_name', "1", False), | |
1288 | ('ge', 'tags:server_name', "1", True), |
|
1288 | ('ge', 'tags:server_name', "1", True), | |
1289 | ('contains', 'traceback', "1", False), |
|
1289 | ('contains', 'traceback', "1", False), | |
1290 | ('ge', 'traceback', "1", True), |
|
1290 | ('ge', 'traceback', "1", True), | |
1291 | ('eq', 'group:occurences', "1", False), |
|
1291 | ('eq', 'group:occurences', "1", False), | |
1292 | ('ne', 'group:occurences', "1", False), |
|
1292 | ('ne', 'group:occurences', "1", False), | |
1293 | ('ge', 'group:occurences', "1", False), |
|
1293 | ('ge', 'group:occurences', "1", False), | |
1294 | ('le', 'group:occurences', "1", False), |
|
1294 | ('le', 'group:occurences', "1", False), | |
1295 | ('contains', 'group:occurences', "1", True), |
|
1295 | ('contains', 'group:occurences', "1", True), | |
1296 | ]) |
|
1296 | ]) | |
1297 | def test_rule_validation(self, op, field, value, should_fail, |
|
1297 | def test_rule_validation(self, op, field, value, should_fail, | |
1298 | report_type_matrix): |
|
1298 | report_type_matrix): | |
1299 | import colander |
|
1299 | import colander | |
1300 | from appenlight.validators import build_rule_schema |
|
1300 | from appenlight.validators import build_rule_schema | |
1301 | rule_config = { |
|
1301 | rule_config = { | |
1302 | "op": op, |
|
1302 | "op": op, | |
1303 | "field": field, |
|
1303 | "field": field, | |
1304 | "value": value |
|
1304 | "value": value | |
1305 | } |
|
1305 | } | |
1306 |
|
1306 | |||
1307 | schema = build_rule_schema(rule_config, report_type_matrix) |
|
1307 | schema = build_rule_schema(rule_config, report_type_matrix) | |
1308 | if should_fail: |
|
1308 | if should_fail: | |
1309 | with pytest.raises(colander.Invalid): |
|
1309 | with pytest.raises(colander.Invalid): | |
1310 | schema.deserialize(rule_config) |
|
1310 | schema.deserialize(rule_config) | |
1311 | else: |
|
1311 | else: | |
1312 | schema.deserialize(rule_config) |
|
1312 | schema.deserialize(rule_config) | |
1313 |
|
1313 | |||
1314 | def test_nested_proper_rule_validation(self, report_type_matrix): |
|
1314 | def test_nested_proper_rule_validation(self, report_type_matrix): | |
1315 | from appenlight.validators import build_rule_schema |
|
1315 | from appenlight.validators import build_rule_schema | |
1316 | rule_config = { |
|
1316 | rule_config = { | |
1317 | "field": "__AND__", |
|
1317 | "field": "__AND__", | |
1318 | "rules": [ |
|
1318 | "rules": [ | |
1319 | { |
|
1319 | { | |
1320 | "field": "__AND__", |
|
1320 | "field": "__AND__", | |
1321 | "rules": [ |
|
1321 | "rules": [ | |
1322 | { |
|
1322 | { | |
1323 | "op": "ge", |
|
1323 | "op": "ge", | |
1324 | "field": "group:occurences", |
|
1324 | "field": "group:occurences", | |
1325 | "value": "10" |
|
1325 | "value": "10" | |
1326 | }, |
|
1326 | }, | |
1327 | { |
|
1327 | { | |
1328 | "field": "__OR__", |
|
1328 | "field": "__OR__", | |
1329 | "rules": [ |
|
1329 | "rules": [ | |
1330 | { |
|
1330 | { | |
1331 | "op": "endswith", |
|
1331 | "op": "endswith", | |
1332 | "field": "url_path", |
|
1332 | "field": "url_path", | |
1333 | "value": "register" |
|
1333 | "value": "register" | |
1334 | }, |
|
1334 | }, | |
1335 | { |
|
1335 | { | |
1336 | "op": "contains", |
|
1336 | "op": "contains", | |
1337 | "field": "error", |
|
1337 | "field": "error", | |
1338 | "value": "test" |
|
1338 | "value": "test" | |
1339 | } |
|
1339 | } | |
1340 | ] |
|
1340 | ] | |
1341 | } |
|
1341 | } | |
1342 | ] |
|
1342 | ] | |
1343 | }, |
|
1343 | }, | |
1344 | { |
|
1344 | { | |
1345 | "op": "eq", |
|
1345 | "op": "eq", | |
1346 | "field": "http_status", |
|
1346 | "field": "http_status", | |
1347 | "value": "500" |
|
1347 | "value": "500" | |
1348 | } |
|
1348 | } | |
1349 | ] |
|
1349 | ] | |
1350 | } |
|
1350 | } | |
1351 |
|
1351 | |||
1352 | schema = build_rule_schema(rule_config, report_type_matrix) |
|
1352 | schema = build_rule_schema(rule_config, report_type_matrix) | |
1353 | deserialized = schema.deserialize(rule_config) |
|
1353 | deserialized = schema.deserialize(rule_config) | |
1354 |
|
1354 | |||
1355 | def test_nested_bad_rule_validation(self, report_type_matrix): |
|
1355 | def test_nested_bad_rule_validation(self, report_type_matrix): | |
1356 | import colander |
|
1356 | import colander | |
1357 | from appenlight.validators import build_rule_schema |
|
1357 | from appenlight.validators import build_rule_schema | |
1358 | rule_config = { |
|
1358 | rule_config = { | |
1359 | "field": "__AND__", |
|
1359 | "field": "__AND__", | |
1360 | "rules": [ |
|
1360 | "rules": [ | |
1361 | { |
|
1361 | { | |
1362 | "field": "__AND__", |
|
1362 | "field": "__AND__", | |
1363 | "rules": [ |
|
1363 | "rules": [ | |
1364 | { |
|
1364 | { | |
1365 | "op": "ge", |
|
1365 | "op": "ge", | |
1366 | "field": "group:occurences", |
|
1366 | "field": "group:occurences", | |
1367 | "value": "10" |
|
1367 | "value": "10" | |
1368 | }, |
|
1368 | }, | |
1369 | { |
|
1369 | { | |
1370 | "field": "__OR__", |
|
1370 | "field": "__OR__", | |
1371 | "rules": [ |
|
1371 | "rules": [ | |
1372 | { |
|
1372 | { | |
1373 | "op": "gt", |
|
1373 | "op": "gt", | |
1374 | "field": "url_path", |
|
1374 | "field": "url_path", | |
1375 | "value": "register" |
|
1375 | "value": "register" | |
1376 | }, |
|
1376 | }, | |
1377 | { |
|
1377 | { | |
1378 | "op": "contains", |
|
1378 | "op": "contains", | |
1379 | "field": "error", |
|
1379 | "field": "error", | |
1380 | "value": "test" |
|
1380 | "value": "test" | |
1381 | } |
|
1381 | } | |
1382 | ] |
|
1382 | ] | |
1383 | } |
|
1383 | } | |
1384 | ] |
|
1384 | ] | |
1385 | }, |
|
1385 | }, | |
1386 | { |
|
1386 | { | |
1387 | "op": "eq", |
|
1387 | "op": "eq", | |
1388 | "field": "http_status", |
|
1388 | "field": "http_status", | |
1389 | "value": "500" |
|
1389 | "value": "500" | |
1390 | } |
|
1390 | } | |
1391 | ] |
|
1391 | ] | |
1392 | } |
|
1392 | } | |
1393 |
|
1393 | |||
1394 | schema = build_rule_schema(rule_config, report_type_matrix) |
|
1394 | schema = build_rule_schema(rule_config, report_type_matrix) | |
1395 | with pytest.raises(colander.Invalid): |
|
1395 | with pytest.raises(colander.Invalid): | |
1396 | deserialized = schema.deserialize(rule_config) |
|
1396 | deserialized = schema.deserialize(rule_config) | |
1397 |
|
1397 | |||
1398 | def test_config_manipulator(self): |
|
1398 | def test_config_manipulator(self): | |
1399 | from appenlight.lib.rule import Rule |
|
1399 | from appenlight.lib.rule import Rule | |
1400 | type_matrix = { |
|
1400 | type_matrix = { | |
1401 | 'a': {"type": 'int', |
|
1401 | 'a': {"type": 'int', | |
1402 | "ops": ('eq', 'ne', 'ge', 'le',)}, |
|
1402 | "ops": ('eq', 'ne', 'ge', 'le',)}, | |
1403 | 'b': {"type": 'int', |
|
1403 | 'b': {"type": 'int', | |
1404 | "ops": ('eq', 'ne', 'ge', 'le',)}, |
|
1404 | "ops": ('eq', 'ne', 'ge', 'le',)}, | |
1405 | } |
|
1405 | } | |
1406 | rule_config = { |
|
1406 | rule_config = { | |
1407 | "field": "__OR__", |
|
1407 | "field": "__OR__", | |
1408 | "rules": [ |
|
1408 | "rules": [ | |
1409 | { |
|
1409 | { | |
1410 | "field": "__OR__", |
|
1410 | "field": "__OR__", | |
1411 | "rules": [ |
|
1411 | "rules": [ | |
1412 | { |
|
1412 | { | |
1413 | "op": "ge", |
|
1413 | "op": "ge", | |
1414 | "field": "a", |
|
1414 | "field": "a", | |
1415 | "value": "10" |
|
1415 | "value": "10" | |
1416 | } |
|
1416 | } | |
1417 | ] |
|
1417 | ] | |
1418 | }, |
|
1418 | }, | |
1419 | { |
|
1419 | { | |
1420 | "op": "eq", |
|
1420 | "op": "eq", | |
1421 | "field": "b", |
|
1421 | "field": "b", | |
1422 | "value": "500" |
|
1422 | "value": "500" | |
1423 | } |
|
1423 | } | |
1424 | ] |
|
1424 | ] | |
1425 | } |
|
1425 | } | |
1426 |
|
1426 | |||
1427 | def rule_manipulator(rule): |
|
1427 | def rule_manipulator(rule): | |
1428 | if 'value' in rule.config: |
|
1428 | if 'value' in rule.config: | |
1429 | rule.config['value'] = "1" |
|
1429 | rule.config['value'] = "1" | |
1430 |
|
1430 | |||
1431 | rule = Rule(rule_config, type_matrix, |
|
1431 | rule = Rule(rule_config, type_matrix, | |
1432 | config_manipulator=rule_manipulator) |
|
1432 | config_manipulator=rule_manipulator) | |
1433 | rule.match({"a": 1, |
|
1433 | rule.match({"a": 1, | |
1434 | "b": "2"}) |
|
1434 | "b": "2"}) | |
1435 | assert rule.config['rules'][0]['rules'][0]['value'] == "1" |
|
1435 | assert rule.config['rules'][0]['rules'][0]['value'] == "1" | |
1436 | assert rule.config['rules'][1]['value'] == "1" |
|
1436 | assert rule.config['rules'][1]['value'] == "1" | |
1437 | assert rule.type_matrix["b"]['type'] == "int" |
|
1437 | assert rule.type_matrix["b"]['type'] == "int" | |
1438 |
|
1438 | |||
1439 | def test_dynamic_config_manipulator(self): |
|
1439 | def test_dynamic_config_manipulator(self): | |
1440 | from appenlight.lib.rule import Rule |
|
1440 | from appenlight.lib.rule import Rule | |
1441 | rule_config = { |
|
1441 | rule_config = { | |
1442 | "field": "__OR__", |
|
1442 | "field": "__OR__", | |
1443 | "rules": [ |
|
1443 | "rules": [ | |
1444 | { |
|
1444 | { | |
1445 | "field": "__OR__", |
|
1445 | "field": "__OR__", | |
1446 | "rules": [ |
|
1446 | "rules": [ | |
1447 | { |
|
1447 | { | |
1448 | "op": "ge", |
|
1448 | "op": "ge", | |
1449 | "field": "a", |
|
1449 | "field": "a", | |
1450 | "value": "10" |
|
1450 | "value": "10" | |
1451 | } |
|
1451 | } | |
1452 | ] |
|
1452 | ] | |
1453 | }, |
|
1453 | }, | |
1454 | { |
|
1454 | { | |
1455 | "op": "eq", |
|
1455 | "op": "eq", | |
1456 | "field": "b", |
|
1456 | "field": "b", | |
1457 | "value": "500" |
|
1457 | "value": "500" | |
1458 | } |
|
1458 | } | |
1459 | ] |
|
1459 | ] | |
1460 | } |
|
1460 | } | |
1461 |
|
1461 | |||
1462 | def rule_manipulator(rule): |
|
1462 | def rule_manipulator(rule): | |
1463 | rule.type_matrix = { |
|
1463 | rule.type_matrix = { | |
1464 | 'a': {"type": 'int', |
|
1464 | 'a': {"type": 'int', | |
1465 | "ops": ('eq', 'ne', 'ge', 'le',)}, |
|
1465 | "ops": ('eq', 'ne', 'ge', 'le',)}, | |
1466 | 'b': {"type": 'unicode', |
|
1466 | 'b': {"type": 'unicode', | |
1467 | "ops": ('eq', 'ne', 'ge', 'le',)}, |
|
1467 | "ops": ('eq', 'ne', 'ge', 'le',)}, | |
1468 | } |
|
1468 | } | |
1469 |
|
1469 | |||
1470 | if 'value' in rule.config: |
|
1470 | if 'value' in rule.config: | |
1471 | if rule.config['field'] == 'a': |
|
1471 | if rule.config['field'] == 'a': | |
1472 | rule.config['value'] = "1" |
|
1472 | rule.config['value'] = "1" | |
1473 | elif rule.config['field'] == 'b': |
|
1473 | elif rule.config['field'] == 'b': | |
1474 | rule.config['value'] = "2" |
|
1474 | rule.config['value'] = "2" | |
1475 |
|
1475 | |||
1476 | rule = Rule(rule_config, {}, |
|
1476 | rule = Rule(rule_config, {}, | |
1477 | config_manipulator=rule_manipulator) |
|
1477 | config_manipulator=rule_manipulator) | |
1478 | rule.match({"a": 11, |
|
1478 | rule.match({"a": 11, | |
1479 | "b": "55"}) |
|
1479 | "b": "55"}) | |
1480 | assert rule.config['rules'][0]['rules'][0]['value'] == "1" |
|
1480 | assert rule.config['rules'][0]['rules'][0]['value'] == "1" | |
1481 | assert rule.config['rules'][1]['value'] == "2" |
|
1481 | assert rule.config['rules'][1]['value'] == "2" | |
1482 | assert rule.type_matrix["b"]['type'] == "unicode" |
|
1482 | assert rule.type_matrix["b"]['type'] == "unicode" | |
1483 |
|
1483 | |||
1484 |
|
1484 | |||
1485 | @pytest.mark.usefixtures('base_app', 'with_migrations') |
|
1485 | @pytest.mark.usefixtures('base_app', 'with_migrations') | |
1486 | class TestViewsWithForms(object): |
|
1486 | class TestViewsWithForms(object): | |
1487 | def test_bad_csrf(self): |
|
1487 | def test_bad_csrf(self): | |
1488 | from appenlight.forms import CSRFException |
|
1488 | from appenlight.forms import CSRFException | |
1489 | from appenlight.views.index import register |
|
1489 | from appenlight.views.index import register | |
1490 | post_data = {'dupa': 'dupa'} |
|
1490 | post_data = {'dupa': 'dupa'} | |
1491 | request = testing.DummyRequest(post=post_data) |
|
1491 | request = testing.DummyRequest(post=post_data) | |
1492 | request.POST = webob.multidict.MultiDict(request.POST) |
|
1492 | request.POST = webob.multidict.MultiDict(request.POST) | |
1493 | with pytest.raises(CSRFException): |
|
1493 | with pytest.raises(CSRFException): | |
1494 | register(request) |
|
1494 | register(request) | |
1495 |
|
1495 | |||
1496 | def test_proper_csrf(self): |
|
1496 | def test_proper_csrf(self): | |
1497 | from appenlight.views.index import register |
|
1497 | from appenlight.views.index import register | |
1498 | request = pyramid.threadlocal.get_current_request() |
|
1498 | request = pyramid.threadlocal.get_current_request() | |
1499 | post_data = {'dupa': 'dupa', |
|
1499 | post_data = {'dupa': 'dupa', | |
1500 | 'csrf_token': request.session.get_csrf_token()} |
|
1500 | 'csrf_token': request.session.get_csrf_token()} | |
1501 | request = testing.DummyRequest(post=post_data) |
|
1501 | request = testing.DummyRequest(post=post_data) | |
1502 | request.POST = webob.multidict.MultiDict(request.POST) |
|
1502 | request.POST = webob.multidict.MultiDict(request.POST) | |
1503 | result = register(request) |
|
1503 | result = register(request) | |
1504 | assert result['form'].errors['email'][0] == 'This field is required.' |
|
1504 | assert result['form'].errors['email'][0] == 'This field is required.' | |
1505 |
|
1505 | |||
1506 |
|
1506 | |||
1507 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'default_data') |
|
1507 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'default_data') | |
1508 | class TestRegistration(object): |
|
1508 | class TestRegistration(object): | |
1509 | def test_invalid_form(self): |
|
1509 | def test_invalid_form(self): | |
1510 | from appenlight.views.index import register |
|
1510 | from appenlight.views.index import register | |
1511 | request = pyramid.threadlocal.get_current_request() |
|
1511 | request = pyramid.threadlocal.get_current_request() | |
1512 | post_data = {'user_name': '', |
|
1512 | post_data = {'user_name': '', | |
1513 | 'user_password': '', |
|
1513 | 'user_password': '', | |
1514 | 'email': '', |
|
1514 | 'email': '', | |
1515 | 'csrf_token': request.session.get_csrf_token()} |
|
1515 | 'csrf_token': request.session.get_csrf_token()} | |
1516 | request = testing.DummyRequest(post=post_data) |
|
1516 | request = testing.DummyRequest(post=post_data) | |
1517 | request.POST = webob.multidict.MultiDict(request.POST) |
|
1517 | request.POST = webob.multidict.MultiDict(request.POST) | |
1518 | result = register(request) |
|
1518 | result = register(request) | |
1519 | assert result['form'].errors['user_name'][0] == \ |
|
1519 | assert result['form'].errors['user_name'][0] == \ | |
1520 | 'This field is required.' |
|
1520 | 'This field is required.' | |
1521 |
|
1521 | |||
1522 | def test_valid_form(self): |
|
1522 | def test_valid_form(self): | |
1523 | from appenlight.views.index import register |
|
1523 | from appenlight.views.index import register | |
1524 | from ziggurat_foundations.models.services.user import UserService |
|
1524 | from ziggurat_foundations.models.services.user import UserService | |
1525 | request = pyramid.threadlocal.get_current_request() |
|
1525 | request = pyramid.threadlocal.get_current_request() | |
1526 | post_data = {'user_name': 'foo', |
|
1526 | post_data = {'user_name': 'foo', | |
1527 | 'user_password': 'barr', |
|
1527 | 'user_password': 'barr', | |
1528 | 'email': 'test@test.foo', |
|
1528 | 'email': 'test@test.foo', | |
1529 | 'csrf_token': request.session.get_csrf_token()} |
|
1529 | 'csrf_token': request.session.get_csrf_token()} | |
1530 | request = testing.DummyRequest(post=post_data) |
|
1530 | request = testing.DummyRequest(post=post_data) | |
1531 | request.add_flash_to_headers = mock.Mock() |
|
1531 | request.add_flash_to_headers = mock.Mock() | |
1532 | request.POST = webob.multidict.MultiDict(request.POST) |
|
1532 | request.POST = webob.multidict.MultiDict(request.POST) | |
1533 | assert UserService.by_user_name('foo') is None |
|
1533 | assert UserService.by_user_name('foo') is None | |
1534 | register(request) |
|
1534 | register(request) | |
1535 | user = UserService.by_user_name('foo') |
|
1535 | user = UserService.by_user_name('foo') | |
1536 | assert user.user_name == 'foo' |
|
1536 | assert user.user_name == 'foo' | |
1537 | assert len(user.user_password) == 60 |
|
1537 | assert len(user.user_password) == 60 | |
1538 |
|
1538 | |||
1539 |
|
1539 | |||
1540 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables', |
|
1540 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables', | |
1541 | 'default_user') |
|
1541 | 'default_user') | |
1542 | class TestApplicationCreation(object): |
|
1542 | class TestApplicationCreation(object): | |
1543 | def test_wrong_data(self): |
|
1543 | def test_wrong_data(self): | |
1544 | import appenlight.views.applications as applications |
|
1544 | import appenlight.views.applications as applications | |
1545 | from ziggurat_foundations.models.services.user import UserService |
|
1545 | from ziggurat_foundations.models.services.user import UserService | |
1546 | request = pyramid.threadlocal.get_current_request() |
|
1546 | request = pyramid.threadlocal.get_current_request() | |
1547 | request.user = UserService.by_user_name('testuser') |
|
1547 | request.user = UserService.by_user_name('testuser') | |
1548 | request.unsafe_json_body = {} |
|
1548 | request.unsafe_json_body = {} | |
1549 | request.headers['X-XSRF-TOKEN'] = request.session.get_csrf_token() |
|
1549 | request.headers['X-XSRF-TOKEN'] = request.session.get_csrf_token() | |
1550 | response = applications.application_create(request) |
|
1550 | response = applications.application_create(request) | |
1551 | assert response.code == 422 |
|
1551 | assert response.code == 422 | |
1552 |
|
1552 | |||
1553 | def test_proper_data(self): |
|
1553 | def test_proper_data(self): | |
1554 | import appenlight.views.applications as applications |
|
1554 | import appenlight.views.applications as applications | |
1555 | from ziggurat_foundations.models.services.user import UserService |
|
1555 | from ziggurat_foundations.models.services.user import UserService | |
1556 |
|
1556 | |||
1557 | request = pyramid.threadlocal.get_current_request() |
|
1557 | request = pyramid.threadlocal.get_current_request() | |
1558 | request.user = UserService.by_user_name('testuser') |
|
1558 | request.user = UserService.by_user_name('testuser') | |
1559 | request.unsafe_json_body = {"resource_name": "app name", |
|
1559 | request.unsafe_json_body = {"resource_name": "app name", | |
1560 | "domains": "foo"} |
|
1560 | "domains": "foo"} | |
1561 | request.headers['X-XSRF-TOKEN'] = request.session.get_csrf_token() |
|
1561 | request.headers['X-XSRF-TOKEN'] = request.session.get_csrf_token() | |
1562 | app_dict = applications.application_create(request) |
|
1562 | app_dict = applications.application_create(request) | |
1563 | assert app_dict['public_key'] is not None |
|
1563 | assert app_dict['public_key'] is not None | |
1564 | assert app_dict['api_key'] is not None |
|
1564 | assert app_dict['api_key'] is not None | |
1565 | assert app_dict['resource_name'] == 'app name' |
|
1565 | assert app_dict['resource_name'] == 'app name' | |
1566 | assert app_dict['owner_group_id'] is None |
|
1566 | assert app_dict['owner_group_id'] is None | |
1567 | assert app_dict['resource_id'] is not None |
|
1567 | assert app_dict['resource_id'] is not None | |
1568 | assert app_dict['default_grouping'] == 'url_traceback' |
|
1568 | assert app_dict['default_grouping'] == 'url_traceback' | |
1569 | assert app_dict['possible_permissions'] == ('view', 'update_reports') |
|
1569 | assert app_dict['possible_permissions'] == ('view', 'update_reports') | |
1570 | assert app_dict['slow_report_threshold'] == 10 |
|
1570 | assert app_dict['slow_report_threshold'] == 10 | |
1571 | assert app_dict['owner_user_name'] == 'testuser' |
|
1571 | assert app_dict['owner_user_name'] == 'testuser' | |
1572 | assert app_dict['owner_user_id'] == request.user.id |
|
1572 | assert app_dict['owner_user_id'] == request.user.id | |
1573 | assert app_dict['domains'] is 'foo' |
|
1573 | assert app_dict['domains'] is 'foo' | |
1574 | assert app_dict['postprocessing_rules'] == [] |
|
1574 | assert app_dict['postprocessing_rules'] == [] | |
1575 | assert app_dict['error_report_threshold'] == 10 |
|
1575 | assert app_dict['error_report_threshold'] == 10 | |
1576 | assert app_dict['allow_permanent_storage'] is False |
|
1576 | assert app_dict['allow_permanent_storage'] is False | |
1577 | assert app_dict['resource_type'] == 'application' |
|
1577 | assert app_dict['resource_type'] == 'application' | |
1578 | assert app_dict['current_permissions'] == [] |
|
1578 | assert app_dict['current_permissions'] == [] | |
1579 |
|
1579 | |||
1580 |
|
1580 | |||
1581 | @pytest.mark.usefixtures('default_application') |
|
1581 | @pytest.mark.usefixtures('default_application') | |
1582 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') |
|
1582 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') | |
1583 | class TestAPISentryView(object): |
|
1583 | class TestAPISentryView(object): | |
1584 | def test_no_payload(self, default_application): |
|
1584 | def test_no_payload(self, default_application): | |
1585 | import colander |
|
1585 | import colander | |
1586 | from appenlight.models.services.application import ApplicationService |
|
1586 | from appenlight.models.services.application import ApplicationService | |
1587 | from appenlight.views.api import sentry_compat |
|
1587 | from appenlight.views.api import sentry_compat | |
1588 | from appenlight.lib.request import JSONException |
|
1588 | from appenlight.lib.request import JSONException | |
1589 |
|
1589 | |||
1590 | context = DummyContext() |
|
1590 | context = DummyContext() | |
1591 | context.resource = ApplicationService.by_id(1) |
|
1591 | context.resource = ApplicationService.by_id(1) | |
1592 | request = testing.DummyRequest( |
|
1592 | request = testing.DummyRequest( | |
1593 | headers={'Content-Type': 'application/json'}) |
|
1593 | headers={'Content-Type': 'application/json'}) | |
1594 | request.unsafe_json_body = '' |
|
1594 | request.unsafe_json_body = '' | |
1595 | request.context = context |
|
1595 | request.context = context | |
1596 | route = mock.Mock() |
|
1596 | route = mock.Mock() | |
1597 | route.name = 'api_sentry' |
|
1597 | route.name = 'api_sentry' | |
1598 | request.matched_route = route |
|
1598 | request.matched_route = route | |
1599 | with pytest.raises(JSONException): |
|
1599 | with pytest.raises(JSONException): | |
1600 | sentry_compat(request) |
|
1600 | sentry_compat(request) | |
1601 |
|
1601 | |||
1602 | def test_java_client_payload(self): |
|
1602 | def test_java_client_payload(self): | |
1603 | from appenlight.views.api import sentry_compat |
|
1603 | from appenlight.views.api import sentry_compat | |
1604 | from appenlight.models.services.application import ApplicationService |
|
1604 | from appenlight.models.services.application import ApplicationService | |
1605 | from appenlight.models.report_group import ReportGroup |
|
1605 | from appenlight.models.report_group import ReportGroup | |
1606 | route = mock.Mock() |
|
1606 | route = mock.Mock() | |
1607 | route.name = 'api_sentry' |
|
1607 | route.name = 'api_sentry' | |
1608 | request = pyramid.threadlocal.get_current_request() |
|
1608 | request = pyramid.threadlocal.get_current_request() | |
1609 | context = DummyContext() |
|
1609 | context = DummyContext() | |
1610 | context.resource = ApplicationService.by_id(1) |
|
1610 | context.resource = ApplicationService.by_id(1) | |
1611 | request.context = context |
|
1611 | request.context = context | |
1612 | request.matched_route = route |
|
1612 | request.matched_route = route | |
1613 | request.body = b'eJy1UmFr2zAQ/S0T+7BCLOzYThp/C6xjG6SDLd/GCBf57Ki' \ |
|
1613 | request.body = b'eJy1UmFr2zAQ/S0T+7BCLOzYThp/C6xjG6SDLd/GCBf57Ki' \ | |
1614 | b'RJSHJJiXkv+/UlC7p2kAZA33Ru6f33t1pz3BAHVayZhWr87' \ |
|
1614 | b'RJSHJJiXkv+/UlC7p2kAZA33Ru6f33t1pz3BAHVayZhWr87' \ | |
1615 | b'JMs+I6q3MsrifFep2vc1iXM1HMpgBTNmIdeg8tEvlmJ9AGa' \ |
|
1615 | b'JMs+I6q3MsrifFep2vc1iXM1HMpgBTNmIdeg8tEvlmJ9AGa' \ | |
1616 | b'fQ7goOkQoDOUmGcZpMkLZO0WGZFRadMiaHIR1EVnTMu3k3b' \ |
|
1616 | b'fQ7goOkQoDOUmGcZpMkLZO0WGZFRadMiaHIR1EVnTMu3k3b' \ | |
1617 | b'oiMgqJrXpgOpOVjLLTiPkWAVhMa4jih3MAAholfWyUDAksz' \ |
|
1617 | b'oiMgqJrXpgOpOVjLLTiPkWAVhMa4jih3MAAholfWyUDAksz' \ | |
1618 | b'm1iopICbg8fWH52B8VWXZVYwHrWfV/jBipD2gW2no8CFMa5' \ |
|
1618 | b'm1iopICbg8fWH52B8VWXZVYwHrWfV/jBipD2gW2no8CFMa5' \ | |
1619 | b'JButCDSjoQG6mR6LgLDojPPn/7sbydL25ep34HGl+y3DiE+' \ |
|
1619 | b'JButCDSjoQG6mR6LgLDojPPn/7sbydL25ep34HGl+y3DiE+' \ | |
1620 | b'lH0xXBXjMzFBsXW99SS7pWKYXRw91zqgK4BgZ4/DZVVP/cs' \ |
|
1620 | b'lH0xXBXjMzFBsXW99SS7pWKYXRw91zqgK4BgZ4/DZVVP/cs' \ | |
1621 | b'3NuzSZPfAKqP2Cdj4tw7U/cKH0fEFeiWQFqE2FIHAmMPjaN' \ |
|
1621 | b'3NuzSZPfAKqP2Cdj4tw7U/cKH0fEFeiWQFqE2FIHAmMPjaN' \ | |
1622 | b'Y/kHvbzY/JqdHUq9o/KxqQHkcsabX4piDuT4aK+pXG1ZNi/' \ |
|
1622 | b'Y/kHvbzY/JqdHUq9o/KxqQHkcsabX4piDuT4aK+pXG1ZNi/' \ | |
1623 | b'IwOpEyruXC1LiB3vPO3BmOOxTUCIqv5LIg5H12oh9cf0l+P' \ |
|
1623 | b'IwOpEyruXC1LiB3vPO3BmOOxTUCIqv5LIg5H12oh9cf0l+P' \ | |
1624 | b'MvP5P8kddgoFIEvMGzM5cRSD2aLJ6qTdHKm6nv9pPcRFba0' \ |
|
1624 | b'MvP5P8kddgoFIEvMGzM5cRSD2aLJ6qTdHKm6nv9pPcRFba0' \ | |
1625 | b'Kd0eleeCFuGN+9JZ9TaXIn/V5JYMBvxXg3L6PwzSE4dkfOb' \ |
|
1625 | b'Kd0eleeCFuGN+9JZ9TaXIn/V5JYMBvxXg3L6PwzSE4dkfOb' \ | |
1626 | b'w7CtfWmP85SdCs8OvA53fUV19cg==' |
|
1626 | b'w7CtfWmP85SdCs8OvA53fUV19cg==' | |
1627 | sentry_compat(request) |
|
1627 | sentry_compat(request) | |
1628 | query = DBSession.query(ReportGroup) |
|
1628 | query = DBSession.query(ReportGroup) | |
1629 | report = query.first() |
|
1629 | report = query.first() | |
1630 | assert query.count() == 1 |
|
1630 | assert query.count() == 1 | |
1631 | assert report.total_reports == 1 |
|
1631 | assert report.total_reports == 1 | |
1632 |
|
1632 | |||
1633 | def test_ruby_client_payload(self): |
|
1633 | def test_ruby_client_payload(self): | |
1634 | from appenlight.views.api import sentry_compat |
|
1634 | from appenlight.views.api import sentry_compat | |
1635 | from appenlight.models.services.application import ApplicationService |
|
1635 | from appenlight.models.services.application import ApplicationService | |
1636 | from appenlight.models.report_group import ReportGroup |
|
1636 | from appenlight.models.report_group import ReportGroup | |
1637 | from appenlight.tests.payload_examples import SENTRY_RUBY_ENCODED |
|
1637 | from appenlight.tests.payload_examples import SENTRY_RUBY_ENCODED | |
1638 | route = mock.Mock() |
|
1638 | route = mock.Mock() | |
1639 | route.name = 'api_sentry' |
|
1639 | route.name = 'api_sentry' | |
1640 | request = testing.DummyRequest( |
|
1640 | request = testing.DummyRequest( | |
1641 | headers={'Content-Type': 'application/octet-stream', |
|
1641 | headers={'Content-Type': 'application/octet-stream', | |
1642 | 'User-Agent': 'sentry-ruby/1.0.0', |
|
1642 | 'User-Agent': 'sentry-ruby/1.0.0', | |
1643 | 'X-Sentry-Auth': 'Sentry sentry_version=5, ' |
|
1643 | 'X-Sentry-Auth': 'Sentry sentry_version=5, ' | |
1644 | 'sentry_client=raven-ruby/1.0.0, ' |
|
1644 | 'sentry_client=raven-ruby/1.0.0, ' | |
1645 | 'sentry_timestamp=1462378483, ' |
|
1645 | 'sentry_timestamp=1462378483, ' | |
1646 | 'sentry_key=xxx, sentry_secret=xxx' |
|
1646 | 'sentry_key=xxx, sentry_secret=xxx' | |
1647 | }) |
|
1647 | }) | |
1648 | context = DummyContext() |
|
1648 | context = DummyContext() | |
1649 | context.resource = ApplicationService.by_id(1) |
|
1649 | context.resource = ApplicationService.by_id(1) | |
1650 | request.context = context |
|
1650 | request.context = context | |
1651 | request.matched_route = route |
|
1651 | request.matched_route = route | |
1652 | request.body = SENTRY_RUBY_ENCODED |
|
1652 | request.body = SENTRY_RUBY_ENCODED | |
1653 | sentry_compat(request) |
|
1653 | sentry_compat(request) | |
1654 | query = DBSession.query(ReportGroup) |
|
1654 | query = DBSession.query(ReportGroup) | |
1655 | report = query.first() |
|
1655 | report = query.first() | |
1656 | assert query.count() == 1 |
|
1656 | assert query.count() == 1 | |
1657 | assert report.total_reports == 1 |
|
1657 | assert report.total_reports == 1 | |
1658 |
|
1658 | |||
1659 | def test_python_client_decoded_payload(self): |
|
1659 | def test_python_client_decoded_payload(self): | |
1660 | from appenlight.views.api import sentry_compat |
|
1660 | from appenlight.views.api import sentry_compat | |
1661 | from appenlight.models.services.application import ApplicationService |
|
1661 | from appenlight.models.services.application import ApplicationService | |
1662 | from appenlight.models.report_group import ReportGroup |
|
1662 | from appenlight.models.report_group import ReportGroup | |
1663 | from appenlight.tests.payload_examples import SENTRY_PYTHON_PAYLOAD_7 |
|
1663 | from appenlight.tests.payload_examples import SENTRY_PYTHON_PAYLOAD_7 | |
1664 | route = mock.Mock() |
|
1664 | route = mock.Mock() | |
1665 | route.name = 'api_sentry' |
|
1665 | route.name = 'api_sentry' | |
1666 | request = pyramid.threadlocal.get_current_request() |
|
1666 | request = pyramid.threadlocal.get_current_request() | |
1667 | context = DummyContext() |
|
1667 | context = DummyContext() | |
1668 | context.resource = ApplicationService.by_id(1) |
|
1668 | context.resource = ApplicationService.by_id(1) | |
1669 | request.context = context |
|
1669 | request.context = context | |
1670 | request.matched_route = route |
|
1670 | request.matched_route = route | |
1671 | request.body = json.dumps(SENTRY_PYTHON_PAYLOAD_7).encode('utf8') |
|
1671 | request.body = json.dumps(SENTRY_PYTHON_PAYLOAD_7).encode('utf8') | |
1672 | sentry_compat(request) |
|
1672 | sentry_compat(request) | |
1673 | query = DBSession.query(ReportGroup) |
|
1673 | query = DBSession.query(ReportGroup) | |
1674 | report = query.first() |
|
1674 | report = query.first() | |
1675 | assert query.count() == 1 |
|
1675 | assert query.count() == 1 | |
1676 | assert report.total_reports == 1 |
|
1676 | assert report.total_reports == 1 | |
1677 |
|
1677 | |||
1678 | def test_python_client_encoded_payload(self): |
|
1678 | def test_python_client_encoded_payload(self): | |
1679 | from appenlight.views.api import sentry_compat |
|
1679 | from appenlight.views.api import sentry_compat | |
1680 | from appenlight.models.services.application import ApplicationService |
|
1680 | from appenlight.models.services.application import ApplicationService | |
1681 | from appenlight.models.report_group import ReportGroup |
|
1681 | from appenlight.models.report_group import ReportGroup | |
1682 | from appenlight.tests.payload_examples import SENTRY_PYTHON_ENCODED |
|
1682 | from appenlight.tests.payload_examples import SENTRY_PYTHON_ENCODED | |
1683 | route = mock.Mock() |
|
1683 | route = mock.Mock() | |
1684 | route.name = 'api_sentry' |
|
1684 | route.name = 'api_sentry' | |
1685 | request = testing.DummyRequest( |
|
1685 | request = testing.DummyRequest( | |
1686 | headers={'Content-Type': 'application/octet-stream', |
|
1686 | headers={'Content-Type': 'application/octet-stream', | |
1687 | 'Content-Encoding': 'deflate', |
|
1687 | 'Content-Encoding': 'deflate', | |
1688 | 'User-Agent': 'sentry-ruby/1.0.0', |
|
1688 | 'User-Agent': 'sentry-ruby/1.0.0', | |
1689 | 'X-Sentry-Auth': 'Sentry sentry_version=5, ' |
|
1689 | 'X-Sentry-Auth': 'Sentry sentry_version=5, ' | |
1690 | 'sentry_client=raven-ruby/1.0.0, ' |
|
1690 | 'sentry_client=raven-ruby/1.0.0, ' | |
1691 | 'sentry_timestamp=1462378483, ' |
|
1691 | 'sentry_timestamp=1462378483, ' | |
1692 | 'sentry_key=xxx, sentry_secret=xxx' |
|
1692 | 'sentry_key=xxx, sentry_secret=xxx' | |
1693 | }) |
|
1693 | }) | |
1694 | context = DummyContext() |
|
1694 | context = DummyContext() | |
1695 | context.resource = ApplicationService.by_id(1) |
|
1695 | context.resource = ApplicationService.by_id(1) | |
1696 | request.context = context |
|
1696 | request.context = context | |
1697 | request.matched_route = route |
|
1697 | request.matched_route = route | |
1698 | request.body = SENTRY_PYTHON_ENCODED |
|
1698 | request.body = SENTRY_PYTHON_ENCODED | |
1699 | sentry_compat(request) |
|
1699 | sentry_compat(request) | |
1700 | query = DBSession.query(ReportGroup) |
|
1700 | query = DBSession.query(ReportGroup) | |
1701 | report = query.first() |
|
1701 | report = query.first() | |
1702 | assert query.count() == 1 |
|
1702 | assert query.count() == 1 | |
1703 | assert report.total_reports == 1 |
|
1703 | assert report.total_reports == 1 |
General Comments 0
You need to be logged in to leave comments.
Login now