Auto status change to "Under Review"
Show More
@@ -1,220 +1,220 b'' | |||
|
1 | 1 | # Created by .ignore support plugin (hsz.mobi) |
|
2 | 2 | ### Node template |
|
3 | 3 | # Logs |
|
4 | 4 | logs |
|
5 | 5 | *.log |
|
6 | 6 | npm-debug.log* |
|
7 | 7 | yarn-debug.log* |
|
8 | 8 | yarn-error.log* |
|
9 | 9 | |
|
10 | 10 | # Runtime data |
|
11 | 11 | pids |
|
12 | 12 | *.pid |
|
13 | 13 | *.seed |
|
14 | 14 | *.pid.lock |
|
15 | 15 | |
|
16 | 16 | # Directory for instrumented libs generated by jscoverage/JSCover |
|
17 | 17 | lib-cov |
|
18 | 18 | |
|
19 | 19 | # Coverage directory used by tools like istanbul |
|
20 | 20 | coverage |
|
21 | 21 | |
|
22 | 22 | # nyc test coverage |
|
23 | 23 | .nyc_output |
|
24 | 24 | |
|
25 | 25 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) |
|
26 | 26 | .grunt |
|
27 | 27 | |
|
28 | 28 | # Bower dependency directory (https://bower.io/) |
|
29 | 29 | bower_components |
|
30 | 30 | |
|
31 | 31 | # node-waf configuration |
|
32 | 32 | .lock-wscript |
|
33 | 33 | |
|
34 | 34 | # Compiled binary addons (https://nodejs.org/api/addons.html) |
|
35 | 35 | build/Release |
|
36 | 36 | |
|
37 | 37 | # Dependency directories |
|
38 | 38 | node_modules/ |
|
39 | 39 | jspm_packages/ |
|
40 | 40 | |
|
41 | 41 | # Typescript v1 declaration files |
|
42 | 42 | typings/ |
|
43 | 43 | |
|
44 | 44 | # Optional npm cache directory |
|
45 | 45 | .npm |
|
46 | 46 | |
|
47 | 47 | # Optional eslint cache |
|
48 | 48 | .eslintcache |
|
49 | 49 | |
|
50 | 50 | # Optional REPL history |
|
51 | 51 | .node_repl_history |
|
52 | 52 | |
|
53 | 53 | # Output of 'npm pack' |
|
54 | 54 | *.tgz |
|
55 | 55 | |
|
56 | 56 | # Yarn Integrity file |
|
57 | 57 | .yarn-integrity |
|
58 | 58 | |
|
59 | 59 | # dotenv environment variables file |
|
60 | 60 | .env |
|
61 | 61 | |
|
62 | 62 | ### JetBrains template |
|
63 | 63 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm |
|
64 | 64 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 |
|
65 | 65 | |
|
66 | 66 | # User-specific stuff: |
|
67 | 67 | .idea/**/workspace.xml |
|
68 | 68 | .idea/**/tasks.xml |
|
69 | 69 | .idea/dictionaries |
|
70 | 70 | |
|
71 | 71 | # Sensitive or high-churn files: |
|
72 | 72 | .idea/**/dataSources/ |
|
73 | 73 | .idea/**/dataSources.ids |
|
74 | 74 | .idea/**/dataSources.xml |
|
75 | 75 | .idea/**/dataSources.local.xml |
|
76 | 76 | .idea/**/sqlDataSources.xml |
|
77 | 77 | .idea/**/dynamic.xml |
|
78 | 78 | .idea/**/uiDesigner.xml |
|
79 | 79 | |
|
80 | 80 | # Gradle: |
|
81 | 81 | .idea/**/gradle.xml |
|
82 | 82 | .idea/**/libraries |
|
83 | 83 | |
|
84 | 84 | # CMake |
|
85 | 85 | cmake-build-debug/ |
|
86 | 86 | |
|
87 | 87 | # Mongo Explorer plugin: |
|
88 | 88 | .idea/**/mongoSettings.xml |
|
89 | 89 | |
|
90 | 90 | ## File-based project format: |
|
91 | 91 | *.iws |
|
92 | 92 | |
|
93 | 93 | ## Plugin-specific files: |
|
94 | 94 | |
|
95 | 95 | # IntelliJ |
|
96 | 96 | out/ |
|
97 | 97 | |
|
98 | 98 | # mpeltonen/sbt-idea plugin |
|
99 | 99 | .idea_modules/ |
|
100 | 100 | |
|
101 | 101 | # JIRA plugin |
|
102 | 102 | atlassian-ide-plugin.xml |
|
103 | 103 | |
|
104 | 104 | # Cursive Clojure plugin |
|
105 | 105 | .idea/replstate.xml |
|
106 | 106 | |
|
107 | 107 | # Crashlytics plugin (for Android Studio and IntelliJ) |
|
108 | 108 | com_crashlytics_export_strings.xml |
|
109 | 109 | crashlytics.properties |
|
110 | 110 | crashlytics-build.properties |
|
111 | 111 | fabric.properties |
|
112 | 112 | ### Python template |
|
113 | 113 | # Byte-compiled / optimized / DLL files |
|
114 | 114 | __pycache__/ |
|
115 | 115 | *.py[cod] |
|
116 | 116 | *$py.class |
|
117 | 117 | |
|
118 | 118 | # C extensions |
|
119 | 119 | *.so |
|
120 | 120 | |
|
121 | 121 | # Distribution / packaging |
|
122 | 122 | .Python |
|
123 | 123 | build/ |
|
124 | 124 | develop-eggs/ |
|
125 | 125 | dist/ |
|
126 | 126 | downloads/ |
|
127 | 127 | eggs/ |
|
128 | 128 | .eggs/ |
|
129 | lib/ | |
|
130 | lib64/ | |
|
129 | /lib/ | |
|
130 | /lib64/ | |
|
131 | 131 | parts/ |
|
132 | 132 | sdist/ |
|
133 | 133 | var/ |
|
134 | 134 | wheels/ |
|
135 | 135 | *.egg-info/ |
|
136 | 136 | .installed.cfg |
|
137 | 137 | *.egg |
|
138 | 138 | MANIFEST |
|
139 | 139 | |
|
140 | 140 | # PyInstaller |
|
141 | 141 | # Usually these files are written by a python script from a template |
|
142 | 142 | # before PyInstaller builds the exe, so as to inject date/other infos into it. |
|
143 | 143 | *.manifest |
|
144 | 144 | *.spec |
|
145 | 145 | |
|
146 | 146 | # Installer logs |
|
147 | 147 | pip-log.txt |
|
148 | 148 | pip-delete-this-directory.txt |
|
149 | 149 | |
|
150 | 150 | # Unit test / coverage reports |
|
151 | 151 | htmlcov/ |
|
152 | 152 | .tox/ |
|
153 | 153 | .coverage |
|
154 | 154 | .coverage.* |
|
155 | 155 | .cache |
|
156 | 156 | nosetests.xml |
|
157 | 157 | coverage.xml |
|
158 | 158 | *.cover |
|
159 | 159 | .hypothesis/ |
|
160 | 160 | |
|
161 | 161 | # Translations |
|
162 | 162 | *.mo |
|
163 | 163 | *.pot |
|
164 | 164 | |
|
165 | 165 | # Django stuff: |
|
166 | 166 | local_settings.py |
|
167 | 167 | |
|
168 | 168 | # Flask stuff: |
|
169 | 169 | instance/ |
|
170 | 170 | .webassets-cache |
|
171 | 171 | |
|
172 | 172 | # Scrapy stuff: |
|
173 | 173 | .scrapy |
|
174 | 174 | |
|
175 | 175 | # Sphinx documentation |
|
176 | 176 | docs/_build/ |
|
177 | 177 | |
|
178 | 178 | # PyBuilder |
|
179 | 179 | target/ |
|
180 | 180 | |
|
181 | 181 | # Jupyter Notebook |
|
182 | 182 | .ipynb_checkpoints |
|
183 | 183 | |
|
184 | 184 | # pyenv |
|
185 | 185 | .python-version |
|
186 | 186 | |
|
187 | 187 | # celery beat schedule file |
|
188 | 188 | celerybeat-schedule |
|
189 | 189 | |
|
190 | 190 | # SageMath parsed files |
|
191 | 191 | *.sage.py |
|
192 | 192 | |
|
193 | 193 | # Environments |
|
194 | 194 | .venv |
|
195 | 195 | env/ |
|
196 | 196 | venv/ |
|
197 | 197 | ENV/ |
|
198 | 198 | env.bak/ |
|
199 | 199 | venv.bak/ |
|
200 | 200 | |
|
201 | 201 | # Spyder project settings |
|
202 | 202 | .spyderproject |
|
203 | 203 | .spyproject |
|
204 | 204 | |
|
205 | 205 | # Rope project settings |
|
206 | 206 | .ropeproject |
|
207 | 207 | |
|
208 | 208 | # mkdocs documentation |
|
209 | 209 | /site |
|
210 | 210 | |
|
211 | 211 | # mypy |
|
212 | 212 | .mypy_cache/ |
|
213 | 213 | ### Example user template template |
|
214 | 214 | ### Example user template |
|
215 | 215 | |
|
216 | 216 | # IntelliJ project files |
|
217 | 217 | .idea |
|
218 | 218 | *.iml |
|
219 | 219 | out |
|
220 | 220 | gen |
@@ -1,49 +1,49 b'' | |||
|
1 | 1 | repoze.sendmail==4.1 |
|
2 | 2 | pyramid==1.7.3 |
|
3 | 3 | pyramid_tm==0.12 |
|
4 | 4 | pyramid_debugtoolbar |
|
5 | 5 | pyramid_authstack==1.0.1 |
|
6 | 6 | SQLAlchemy==1.0.12 |
|
7 | 7 | alembic==1.0.8 |
|
8 | 8 | webhelpers2==2.0 |
|
9 | 9 | transaction==1.4.3 |
|
10 | 10 | zope.sqlalchemy==0.7.6 |
|
11 | 11 | pyramid_mailer==0.14.1 |
|
12 | 12 | redis==2.10.5 |
|
13 | 13 | redlock-py==1.0.8 |
|
14 | 14 | pyramid_jinja2==2.6.2 |
|
15 | 15 | psycopg2==2.7.7 |
|
16 | 16 | wtforms==2.1 |
|
17 | 17 | celery==3.1.23 |
|
18 | 18 | formencode==1.3.0 |
|
19 | 19 | psutil==2.1.2 |
|
20 |
ziggurat_foundations==0. |
|
|
20 | ziggurat_foundations==0.8.3 | |
|
21 | 21 | bcrypt==3.1.6 |
|
22 | 22 | appenlight_client |
|
23 | 23 | markdown==2.5 |
|
24 | 24 | colander==1.7 |
|
25 | 25 | defusedxml==0.5.0 |
|
26 | 26 | dogpile.cache==0.5.7 |
|
27 | 27 | pyramid_redis_sessions==1.0.1 |
|
28 | 28 | simplejson==3.8.2 |
|
29 | 29 | waitress==1.0 |
|
30 | 30 | gunicorn==19.9.0 |
|
31 | 31 | requests==2.20.0 |
|
32 | 32 | requests_oauthlib==0.6.1 |
|
33 | 33 | gevent==1.1.1 |
|
34 | 34 | gevent-websocket==0.9.5 |
|
35 | 35 | pygments==2.1.3 |
|
36 | 36 | lxml==4.3.2 |
|
37 | 37 | paginate==0.5.4 |
|
38 | 38 | paginate-sqlalchemy==0.2.0 |
|
39 | 39 | pyelasticsearch==1.4 |
|
40 | 40 | six==1.9.0 |
|
41 | 41 | mock==1.0.1 |
|
42 | 42 | itsdangerous==1.1.0 |
|
43 | 43 | camplight==0.9.6 |
|
44 | 44 | jira==1.0.7 |
|
45 | 45 | python-dateutil==2.5.3 |
|
46 | 46 | authomatic==0.1.0.post1 |
|
47 | 47 | cryptography==2.6.1 |
|
48 | 48 | webassets==0.11.1 |
|
49 | 49 |
@@ -1,660 +1,662 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import bisect |
|
18 | 18 | import collections |
|
19 | 19 | import math |
|
20 | 20 | from datetime import datetime, timedelta |
|
21 | 21 | |
|
22 | 22 | import sqlalchemy as sa |
|
23 | 23 | import pyelasticsearch |
|
24 | 24 | |
|
25 | 25 | from celery.utils.log import get_task_logger |
|
26 | 26 | from zope.sqlalchemy import mark_changed |
|
27 | 27 | from pyramid.threadlocal import get_current_request, get_current_registry |
|
28 | from ziggurat_foundations.models.services.resource import ResourceService | |
|
29 | ||
|
28 | 30 | from appenlight.celery import celery |
|
29 | 31 | from appenlight.models.report_group import ReportGroup |
|
30 | 32 | from appenlight.models import DBSession, Datastores |
|
31 | 33 | from appenlight.models.report import Report |
|
32 | 34 | from appenlight.models.log import Log |
|
33 | 35 | from appenlight.models.metric import Metric |
|
34 | 36 | from appenlight.models.event import Event |
|
35 | 37 | |
|
36 | 38 | from appenlight.models.services.application import ApplicationService |
|
37 | 39 | from appenlight.models.services.event import EventService |
|
38 | 40 | from appenlight.models.services.log import LogService |
|
39 | 41 | from appenlight.models.services.report import ReportService |
|
40 | 42 | from appenlight.models.services.report_group import ReportGroupService |
|
41 | 43 | from appenlight.models.services.user import UserService |
|
42 | 44 | from appenlight.models.tag import Tag |
|
43 | 45 | from appenlight.lib import print_traceback |
|
44 | 46 | from appenlight.lib.utils import parse_proto, in_batches |
|
45 | 47 | from appenlight.lib.ext_json import json |
|
46 | 48 | from appenlight.lib.redis_keys import REDIS_KEYS |
|
47 | 49 | from appenlight.lib.enums import ReportType |
|
48 | 50 | |
|
49 | 51 | log = get_task_logger(__name__) |
|
50 | 52 | |
|
51 | 53 | sample_boundries = list(range(100, 1000, 100)) + \ |
|
52 | 54 | list(range(1000, 10000, 1000)) + \ |
|
53 | 55 | list(range(10000, 100000, 5000)) |
|
54 | 56 | |
|
55 | 57 | |
|
56 | 58 | def pick_sample(total_occurences, report_type=None): |
|
57 | 59 | every = 1.0 |
|
58 | 60 | position = bisect.bisect_left(sample_boundries, total_occurences) |
|
59 | 61 | if position > 0: |
|
60 | 62 | if report_type == ReportType.not_found: |
|
61 | 63 | divide = 10.0 |
|
62 | 64 | else: |
|
63 | 65 | divide = 100.0 |
|
64 | 66 | every = sample_boundries[position - 1] / divide |
|
65 | 67 | return total_occurences % every == 0 |
|
66 | 68 | |
|
67 | 69 | |
|
68 | 70 | @celery.task(queue="default", default_retry_delay=1, max_retries=2) |
|
69 | 71 | def test_exception_task(): |
|
70 | 72 | log.error('test celery log', extra={'location': 'celery'}) |
|
71 | 73 | log.warning('test celery log', extra={'location': 'celery'}) |
|
72 | 74 | raise Exception('Celery exception test') |
|
73 | 75 | |
|
74 | 76 | |
|
75 | 77 | @celery.task(queue="default", default_retry_delay=1, max_retries=2) |
|
76 | 78 | def test_retry_exception_task(): |
|
77 | 79 | try: |
|
78 | 80 | import time |
|
79 | 81 | |
|
80 | 82 | time.sleep(1.3) |
|
81 | 83 | log.error('test retry celery log', extra={'location': 'celery'}) |
|
82 | 84 | log.warning('test retry celery log', extra={'location': 'celery'}) |
|
83 | 85 | raise Exception('Celery exception test') |
|
84 | 86 | except Exception as exc: |
|
85 | 87 | test_retry_exception_task.retry(exc=exc) |
|
86 | 88 | |
|
87 | 89 | |
|
88 | 90 | @celery.task(queue="reports", default_retry_delay=600, max_retries=144) |
|
89 | 91 | def add_reports(resource_id, request_params, dataset, **kwargs): |
|
90 | 92 | proto_version = parse_proto(request_params.get('protocol_version', '')) |
|
91 | 93 | current_time = datetime.utcnow().replace(second=0, microsecond=0) |
|
92 | 94 | try: |
|
93 | 95 | # we will store solr docs here for single insert |
|
94 | 96 | es_report_docs = {} |
|
95 | 97 | es_report_group_docs = {} |
|
96 | 98 | resource = ApplicationService.by_id(resource_id) |
|
97 | 99 | |
|
98 | 100 | tags = [] |
|
99 | 101 | es_slow_calls_docs = {} |
|
100 | 102 | es_reports_stats_rows = {} |
|
101 | 103 | for report_data in dataset: |
|
102 | 104 | # build report details for later |
|
103 | 105 | added_details = 0 |
|
104 | 106 | report = Report() |
|
105 | 107 | report.set_data(report_data, resource, proto_version) |
|
106 | 108 | report._skip_ft_index = True |
|
107 | 109 | |
|
108 | 110 | # find latest group in this months partition |
|
109 | 111 | report_group = ReportGroupService.by_hash_and_resource( |
|
110 | 112 | report.resource_id, |
|
111 | 113 | report.grouping_hash, |
|
112 | 114 | since_when=datetime.utcnow().date().replace(day=1) |
|
113 | 115 | ) |
|
114 | 116 | occurences = report_data.get('occurences', 1) |
|
115 | 117 | if not report_group: |
|
116 | 118 | # total reports will be +1 moment later |
|
117 | 119 | report_group = ReportGroup(grouping_hash=report.grouping_hash, |
|
118 | 120 | occurences=0, total_reports=0, |
|
119 | 121 | last_report=0, |
|
120 | 122 | priority=report.priority, |
|
121 | 123 | error=report.error, |
|
122 | 124 | first_timestamp=report.start_time) |
|
123 | 125 | report_group._skip_ft_index = True |
|
124 | 126 | report_group.report_type = report.report_type |
|
125 | 127 | report.report_group_time = report_group.first_timestamp |
|
126 | 128 | add_sample = pick_sample(report_group.occurences, |
|
127 | 129 | report_type=report_group.report_type) |
|
128 | 130 | if add_sample: |
|
129 | 131 | resource.report_groups.append(report_group) |
|
130 | 132 | report_group.reports.append(report) |
|
131 | 133 | added_details += 1 |
|
132 | 134 | DBSession.flush() |
|
133 | 135 | if report.partition_id not in es_report_docs: |
|
134 | 136 | es_report_docs[report.partition_id] = [] |
|
135 | 137 | es_report_docs[report.partition_id].append(report.es_doc()) |
|
136 | 138 | tags.extend(list(report.tags.items())) |
|
137 | 139 | slow_calls = report.add_slow_calls(report_data, report_group) |
|
138 | 140 | DBSession.flush() |
|
139 | 141 | for s_call in slow_calls: |
|
140 | 142 | if s_call.partition_id not in es_slow_calls_docs: |
|
141 | 143 | es_slow_calls_docs[s_call.partition_id] = [] |
|
142 | 144 | es_slow_calls_docs[s_call.partition_id].append( |
|
143 | 145 | s_call.es_doc()) |
|
144 | 146 | # try generating new stat rows if needed |
|
145 | 147 | else: |
|
146 | 148 | # required for postprocessing to not fail later |
|
147 | 149 | report.report_group = report_group |
|
148 | 150 | |
|
149 | 151 | stat_row = ReportService.generate_stat_rows( |
|
150 | 152 | report, resource, report_group) |
|
151 | 153 | if stat_row.partition_id not in es_reports_stats_rows: |
|
152 | 154 | es_reports_stats_rows[stat_row.partition_id] = [] |
|
153 | 155 | es_reports_stats_rows[stat_row.partition_id].append( |
|
154 | 156 | stat_row.es_doc()) |
|
155 | 157 | |
|
156 | 158 | # see if we should mark 10th occurence of report |
|
157 | 159 | last_occurences_10 = int(math.floor(report_group.occurences / 10)) |
|
158 | 160 | curr_occurences_10 = int(math.floor( |
|
159 | 161 | (report_group.occurences + report.occurences) / 10)) |
|
160 | 162 | last_occurences_100 = int( |
|
161 | 163 | math.floor(report_group.occurences / 100)) |
|
162 | 164 | curr_occurences_100 = int(math.floor( |
|
163 | 165 | (report_group.occurences + report.occurences) / 100)) |
|
164 | 166 | notify_occurences_10 = last_occurences_10 != curr_occurences_10 |
|
165 | 167 | notify_occurences_100 = last_occurences_100 != curr_occurences_100 |
|
166 | 168 | report_group.occurences = ReportGroup.occurences + occurences |
|
167 | 169 | report_group.last_timestamp = report.start_time |
|
168 | 170 | report_group.summed_duration = ReportGroup.summed_duration + report.duration |
|
169 | 171 | summed_duration = ReportGroup.summed_duration + report.duration |
|
170 | 172 | summed_occurences = ReportGroup.occurences + occurences |
|
171 | 173 | report_group.average_duration = summed_duration / summed_occurences |
|
172 | 174 | report_group.run_postprocessing(report) |
|
173 | 175 | if added_details: |
|
174 | 176 | report_group.total_reports = ReportGroup.total_reports + 1 |
|
175 | 177 | report_group.last_report = report.id |
|
176 | 178 | report_group.set_notification_info(notify_10=notify_occurences_10, |
|
177 | 179 | notify_100=notify_occurences_100) |
|
178 | 180 | DBSession.flush() |
|
179 | 181 | report_group.get_report().notify_channel(report_group) |
|
180 | 182 | if report_group.partition_id not in es_report_group_docs: |
|
181 | 183 | es_report_group_docs[report_group.partition_id] = [] |
|
182 | 184 | es_report_group_docs[report_group.partition_id].append( |
|
183 | 185 | report_group.es_doc()) |
|
184 | 186 | |
|
185 | 187 | action = 'REPORT' |
|
186 | 188 | log_msg = '%s: %s %s, client: %s, proto: %s' % ( |
|
187 | 189 | action, |
|
188 | 190 | report_data.get('http_status', 'unknown'), |
|
189 | 191 | str(resource), |
|
190 | 192 | report_data.get('client'), |
|
191 | 193 | proto_version) |
|
192 | 194 | log.info(log_msg) |
|
193 | 195 | total_reports = len(dataset) |
|
194 | 196 | redis_pipeline = Datastores.redis.pipeline(transaction=False) |
|
195 | 197 | key = REDIS_KEYS['counters']['reports_per_minute'].format(current_time) |
|
196 | 198 | redis_pipeline.incr(key, total_reports) |
|
197 | 199 | redis_pipeline.expire(key, 3600 * 24) |
|
198 | 200 | key = REDIS_KEYS['counters']['events_per_minute_per_user'].format( |
|
199 | 201 | resource.owner_user_id, current_time) |
|
200 | 202 | redis_pipeline.incr(key, total_reports) |
|
201 | 203 | redis_pipeline.expire(key, 3600) |
|
202 | 204 | key = REDIS_KEYS['counters']['reports_per_hour_per_app'].format( |
|
203 | 205 | resource_id, current_time.replace(minute=0)) |
|
204 | 206 | redis_pipeline.incr(key, total_reports) |
|
205 | 207 | redis_pipeline.expire(key, 3600 * 24 * 7) |
|
206 | 208 | redis_pipeline.sadd( |
|
207 | 209 | REDIS_KEYS['apps_that_got_new_data_per_hour'].format( |
|
208 | 210 | current_time.replace(minute=0)), resource_id) |
|
209 | 211 | redis_pipeline.execute() |
|
210 | 212 | |
|
211 | 213 | add_reports_es(es_report_group_docs, es_report_docs) |
|
212 | 214 | add_reports_slow_calls_es(es_slow_calls_docs) |
|
213 | 215 | add_reports_stats_rows_es(es_reports_stats_rows) |
|
214 | 216 | return True |
|
215 | 217 | except Exception as exc: |
|
216 | 218 | print_traceback(log) |
|
217 | 219 | add_reports.retry(exc=exc) |
|
218 | 220 | |
|
219 | 221 | |
|
220 | 222 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) |
|
221 | 223 | def add_reports_es(report_group_docs, report_docs): |
|
222 | 224 | for k, v in report_group_docs.items(): |
|
223 | 225 | Datastores.es.bulk_index(k, 'report_group', v, id_field="_id") |
|
224 | 226 | for k, v in report_docs.items(): |
|
225 | 227 | Datastores.es.bulk_index(k, 'report', v, id_field="_id", |
|
226 | 228 | parent_field='_parent') |
|
227 | 229 | |
|
228 | 230 | |
|
229 | 231 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) |
|
230 | 232 | def add_reports_slow_calls_es(es_docs): |
|
231 | 233 | for k, v in es_docs.items(): |
|
232 | 234 | Datastores.es.bulk_index(k, 'log', v) |
|
233 | 235 | |
|
234 | 236 | |
|
235 | 237 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) |
|
236 | 238 | def add_reports_stats_rows_es(es_docs): |
|
237 | 239 | for k, v in es_docs.items(): |
|
238 | 240 | Datastores.es.bulk_index(k, 'log', v) |
|
239 | 241 | |
|
240 | 242 | |
|
241 | 243 | @celery.task(queue="logs", default_retry_delay=600, max_retries=144) |
|
242 | 244 | def add_logs(resource_id, request_params, dataset, **kwargs): |
|
243 | 245 | proto_version = request_params.get('protocol_version') |
|
244 | 246 | current_time = datetime.utcnow().replace(second=0, microsecond=0) |
|
245 | 247 | |
|
246 | 248 | try: |
|
247 | 249 | es_docs = collections.defaultdict(list) |
|
248 | 250 | resource = ApplicationService.by_id_cached()(resource_id) |
|
249 | 251 | resource = DBSession.merge(resource, load=False) |
|
250 | 252 | ns_pairs = [] |
|
251 | 253 | for entry in dataset: |
|
252 | 254 | # gather pk and ns so we can remove older versions of row later |
|
253 | 255 | if entry['primary_key'] is not None: |
|
254 | 256 | ns_pairs.append({"pk": entry['primary_key'], |
|
255 | 257 | "ns": entry['namespace']}) |
|
256 | 258 | log_entry = Log() |
|
257 | 259 | log_entry.set_data(entry, resource=resource) |
|
258 | 260 | log_entry._skip_ft_index = True |
|
259 | 261 | resource.logs.append(log_entry) |
|
260 | 262 | DBSession.flush() |
|
261 | 263 | # insert non pk rows first |
|
262 | 264 | if entry['primary_key'] is None: |
|
263 | 265 | es_docs[log_entry.partition_id].append(log_entry.es_doc()) |
|
264 | 266 | |
|
265 | 267 | # 2nd pass to delete all log entries from db foe same pk/ns pair |
|
266 | 268 | if ns_pairs: |
|
267 | 269 | ids_to_delete = [] |
|
268 | 270 | es_docs = collections.defaultdict(list) |
|
269 | 271 | es_docs_to_delete = collections.defaultdict(list) |
|
270 | 272 | found_pkey_logs = LogService.query_by_primary_key_and_namespace( |
|
271 | 273 | list_of_pairs=ns_pairs) |
|
272 | 274 | log_dict = {} |
|
273 | 275 | for log_entry in found_pkey_logs: |
|
274 | 276 | log_key = (log_entry.primary_key, log_entry.namespace) |
|
275 | 277 | if log_key not in log_dict: |
|
276 | 278 | log_dict[log_key] = [] |
|
277 | 279 | log_dict[log_key].append(log_entry) |
|
278 | 280 | |
|
279 | 281 | for ns, entry_list in log_dict.items(): |
|
280 | 282 | entry_list = sorted(entry_list, key=lambda x: x.timestamp) |
|
281 | 283 | # newest row needs to be indexed in es |
|
282 | 284 | log_entry = entry_list[-1] |
|
283 | 285 | # delete everything from pg and ES, leave the last row in pg |
|
284 | 286 | for e in entry_list[:-1]: |
|
285 | 287 | ids_to_delete.append(e.log_id) |
|
286 | 288 | es_docs_to_delete[e.partition_id].append(e.delete_hash) |
|
287 | 289 | |
|
288 | 290 | es_docs_to_delete[log_entry.partition_id].append( |
|
289 | 291 | log_entry.delete_hash) |
|
290 | 292 | |
|
291 | 293 | es_docs[log_entry.partition_id].append(log_entry.es_doc()) |
|
292 | 294 | |
|
293 | 295 | if ids_to_delete: |
|
294 | 296 | query = DBSession.query(Log).filter( |
|
295 | 297 | Log.log_id.in_(ids_to_delete)) |
|
296 | 298 | query.delete(synchronize_session=False) |
|
297 | 299 | if es_docs_to_delete: |
|
298 | 300 | # batch this to avoid problems with default ES bulk limits |
|
299 | 301 | for es_index in es_docs_to_delete.keys(): |
|
300 | 302 | for batch in in_batches(es_docs_to_delete[es_index], 20): |
|
301 | 303 | query = {'terms': {'delete_hash': batch}} |
|
302 | 304 | |
|
303 | 305 | try: |
|
304 | 306 | Datastores.es.delete_by_query( |
|
305 | 307 | es_index, 'log', query) |
|
306 | 308 | except pyelasticsearch.ElasticHttpNotFoundError as exc: |
|
307 | 309 | msg = 'skipping index {}'.format(es_index) |
|
308 | 310 | log.info(msg) |
|
309 | 311 | |
|
310 | 312 | total_logs = len(dataset) |
|
311 | 313 | |
|
312 | 314 | log_msg = 'LOG_NEW: %s, entries: %s, proto:%s' % ( |
|
313 | 315 | str(resource), |
|
314 | 316 | total_logs, |
|
315 | 317 | proto_version) |
|
316 | 318 | log.info(log_msg) |
|
317 | 319 | # mark_changed(session) |
|
318 | 320 | redis_pipeline = Datastores.redis.pipeline(transaction=False) |
|
319 | 321 | key = REDIS_KEYS['counters']['logs_per_minute'].format(current_time) |
|
320 | 322 | redis_pipeline.incr(key, total_logs) |
|
321 | 323 | redis_pipeline.expire(key, 3600 * 24) |
|
322 | 324 | key = REDIS_KEYS['counters']['events_per_minute_per_user'].format( |
|
323 | 325 | resource.owner_user_id, current_time) |
|
324 | 326 | redis_pipeline.incr(key, total_logs) |
|
325 | 327 | redis_pipeline.expire(key, 3600) |
|
326 | 328 | key = REDIS_KEYS['counters']['logs_per_hour_per_app'].format( |
|
327 | 329 | resource_id, current_time.replace(minute=0)) |
|
328 | 330 | redis_pipeline.incr(key, total_logs) |
|
329 | 331 | redis_pipeline.expire(key, 3600 * 24 * 7) |
|
330 | 332 | redis_pipeline.sadd( |
|
331 | 333 | REDIS_KEYS['apps_that_got_new_data_per_hour'].format( |
|
332 | 334 | current_time.replace(minute=0)), resource_id) |
|
333 | 335 | redis_pipeline.execute() |
|
334 | 336 | add_logs_es(es_docs) |
|
335 | 337 | return True |
|
336 | 338 | except Exception as exc: |
|
337 | 339 | print_traceback(log) |
|
338 | 340 | add_logs.retry(exc=exc) |
|
339 | 341 | |
|
340 | 342 | |
|
341 | 343 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) |
|
342 | 344 | def add_logs_es(es_docs): |
|
343 | 345 | for k, v in es_docs.items(): |
|
344 | 346 | Datastores.es.bulk_index(k, 'log', v) |
|
345 | 347 | |
|
346 | 348 | |
|
347 | 349 | @celery.task(queue="metrics", default_retry_delay=600, max_retries=144) |
|
348 | 350 | def add_metrics(resource_id, request_params, dataset, proto_version): |
|
349 | 351 | current_time = datetime.utcnow().replace(second=0, microsecond=0) |
|
350 | 352 | try: |
|
351 | 353 | resource = ApplicationService.by_id_cached()(resource_id) |
|
352 | 354 | resource = DBSession.merge(resource, load=False) |
|
353 | 355 | es_docs = [] |
|
354 | 356 | rows = [] |
|
355 | 357 | for metric in dataset: |
|
356 | 358 | tags = dict(metric['tags']) |
|
357 | 359 | server_n = tags.get('server_name', metric['server_name']).lower() |
|
358 | 360 | tags['server_name'] = server_n or 'unknown' |
|
359 | 361 | new_metric = Metric( |
|
360 | 362 | timestamp=metric['timestamp'], |
|
361 | 363 | resource_id=resource.resource_id, |
|
362 | 364 | namespace=metric['namespace'], |
|
363 | 365 | tags=tags) |
|
364 | 366 | rows.append(new_metric) |
|
365 | 367 | es_docs.append(new_metric.es_doc()) |
|
366 | 368 | session = DBSession() |
|
367 | 369 | session.bulk_save_objects(rows) |
|
368 | 370 | session.flush() |
|
369 | 371 | |
|
370 | 372 | action = 'METRICS' |
|
371 | 373 | metrics_msg = '%s: %s, metrics: %s, proto:%s' % ( |
|
372 | 374 | action, |
|
373 | 375 | str(resource), |
|
374 | 376 | len(dataset), |
|
375 | 377 | proto_version |
|
376 | 378 | ) |
|
377 | 379 | log.info(metrics_msg) |
|
378 | 380 | |
|
379 | 381 | mark_changed(session) |
|
380 | 382 | redis_pipeline = Datastores.redis.pipeline(transaction=False) |
|
381 | 383 | key = REDIS_KEYS['counters']['metrics_per_minute'].format(current_time) |
|
382 | 384 | redis_pipeline.incr(key, len(rows)) |
|
383 | 385 | redis_pipeline.expire(key, 3600 * 24) |
|
384 | 386 | key = REDIS_KEYS['counters']['events_per_minute_per_user'].format( |
|
385 | 387 | resource.owner_user_id, current_time) |
|
386 | 388 | redis_pipeline.incr(key, len(rows)) |
|
387 | 389 | redis_pipeline.expire(key, 3600) |
|
388 | 390 | key = REDIS_KEYS['counters']['metrics_per_hour_per_app'].format( |
|
389 | 391 | resource_id, current_time.replace(minute=0)) |
|
390 | 392 | redis_pipeline.incr(key, len(rows)) |
|
391 | 393 | redis_pipeline.expire(key, 3600 * 24 * 7) |
|
392 | 394 | redis_pipeline.sadd( |
|
393 | 395 | REDIS_KEYS['apps_that_got_new_data_per_hour'].format( |
|
394 | 396 | current_time.replace(minute=0)), resource_id) |
|
395 | 397 | redis_pipeline.execute() |
|
396 | 398 | add_metrics_es(es_docs) |
|
397 | 399 | return True |
|
398 | 400 | except Exception as exc: |
|
399 | 401 | print_traceback(log) |
|
400 | 402 | add_metrics.retry(exc=exc) |
|
401 | 403 | |
|
402 | 404 | |
|
403 | 405 | @celery.task(queue="es", default_retry_delay=600, max_retries=144) |
|
404 | 406 | def add_metrics_es(es_docs): |
|
405 | 407 | for doc in es_docs: |
|
406 | 408 | partition = 'rcae_m_%s' % doc['timestamp'].strftime('%Y_%m_%d') |
|
407 | 409 | Datastores.es.index(partition, 'log', doc) |
|
408 | 410 | |
|
409 | 411 | |
|
410 | 412 | @celery.task(queue="default", default_retry_delay=5, max_retries=2) |
|
411 | 413 | def check_user_report_notifications(resource_id): |
|
412 | 414 | since_when = datetime.utcnow() |
|
413 | 415 | try: |
|
414 | 416 | request = get_current_request() |
|
415 | 417 | application = ApplicationService.by_id(resource_id) |
|
416 | 418 | if not application: |
|
417 | 419 | return |
|
418 | 420 | error_key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format( |
|
419 | 421 | ReportType.error, resource_id) |
|
420 | 422 | slow_key = REDIS_KEYS['reports_to_notify_per_type_per_app'].format( |
|
421 | 423 | ReportType.slow, resource_id) |
|
422 | 424 | error_group_ids = Datastores.redis.smembers(error_key) |
|
423 | 425 | slow_group_ids = Datastores.redis.smembers(slow_key) |
|
424 | 426 | Datastores.redis.delete(error_key) |
|
425 | 427 | Datastores.redis.delete(slow_key) |
|
426 | 428 | err_gids = [int(g_id) for g_id in error_group_ids] |
|
427 | 429 | slow_gids = [int(g_id) for g_id in list(slow_group_ids)] |
|
428 | 430 | group_ids = err_gids + slow_gids |
|
429 | 431 | occurence_dict = {} |
|
430 | 432 | for g_id in group_ids: |
|
431 | 433 | key = REDIS_KEYS['counters']['report_group_occurences'].format( |
|
432 | 434 | g_id) |
|
433 | 435 | val = Datastores.redis.get(key) |
|
434 | 436 | Datastores.redis.delete(key) |
|
435 | 437 | if val: |
|
436 | 438 | occurence_dict[g_id] = int(val) |
|
437 | 439 | else: |
|
438 | 440 | occurence_dict[g_id] = 1 |
|
439 | 441 | report_groups = ReportGroupService.by_ids(group_ids) |
|
440 | 442 | report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref)) |
|
441 | 443 | |
|
442 | 444 | ApplicationService.check_for_groups_alert( |
|
443 | 445 | application, 'alert', report_groups=report_groups, |
|
444 | 446 | occurence_dict=occurence_dict) |
|
445 |
users = set([p.user for p in |
|
|
447 | users = set([p.user for p in ResourceService.users_for_perm(application, 'view')]) | |
|
446 | 448 | report_groups = report_groups.all() |
|
447 | 449 | for user in users: |
|
448 | 450 | UserService.report_notify(user, request, application, |
|
449 | 451 | report_groups=report_groups, |
|
450 | 452 | occurence_dict=occurence_dict) |
|
451 | 453 | for group in report_groups: |
|
452 | 454 | # marks report_groups as notified |
|
453 | 455 | if not group.notified: |
|
454 | 456 | group.notified = True |
|
455 | 457 | except Exception as exc: |
|
456 | 458 | print_traceback(log) |
|
457 | 459 | raise |
|
458 | 460 | |
|
459 | 461 | |
|
460 | 462 | @celery.task(queue="default", default_retry_delay=5, max_retries=2) |
|
461 | 463 | def check_alerts(resource_id): |
|
462 | 464 | since_when = datetime.utcnow() |
|
463 | 465 | try: |
|
464 | 466 | request = get_current_request() |
|
465 | 467 | application = ApplicationService.by_id(resource_id) |
|
466 | 468 | if not application: |
|
467 | 469 | return |
|
468 | 470 | error_key = REDIS_KEYS[ |
|
469 | 471 | 'reports_to_notify_per_type_per_app_alerting'].format( |
|
470 | 472 | ReportType.error, resource_id) |
|
471 | 473 | slow_key = REDIS_KEYS[ |
|
472 | 474 | 'reports_to_notify_per_type_per_app_alerting'].format( |
|
473 | 475 | ReportType.slow, resource_id) |
|
474 | 476 | error_group_ids = Datastores.redis.smembers(error_key) |
|
475 | 477 | slow_group_ids = Datastores.redis.smembers(slow_key) |
|
476 | 478 | Datastores.redis.delete(error_key) |
|
477 | 479 | Datastores.redis.delete(slow_key) |
|
478 | 480 | err_gids = [int(g_id) for g_id in error_group_ids] |
|
479 | 481 | slow_gids = [int(g_id) for g_id in list(slow_group_ids)] |
|
480 | 482 | group_ids = err_gids + slow_gids |
|
481 | 483 | occurence_dict = {} |
|
482 | 484 | for g_id in group_ids: |
|
483 | 485 | key = REDIS_KEYS['counters'][ |
|
484 | 486 | 'report_group_occurences_alerting'].format( |
|
485 | 487 | g_id) |
|
486 | 488 | val = Datastores.redis.get(key) |
|
487 | 489 | Datastores.redis.delete(key) |
|
488 | 490 | if val: |
|
489 | 491 | occurence_dict[g_id] = int(val) |
|
490 | 492 | else: |
|
491 | 493 | occurence_dict[g_id] = 1 |
|
492 | 494 | report_groups = ReportGroupService.by_ids(group_ids) |
|
493 | 495 | report_groups.options(sa.orm.joinedload(ReportGroup.last_report_ref)) |
|
494 | 496 | |
|
495 | 497 | ApplicationService.check_for_groups_alert( |
|
496 | 498 | application, 'alert', report_groups=report_groups, |
|
497 | 499 | occurence_dict=occurence_dict, since_when=since_when) |
|
498 | 500 | except Exception as exc: |
|
499 | 501 | print_traceback(log) |
|
500 | 502 | raise |
|
501 | 503 | |
|
502 | 504 | |
|
503 | 505 | @celery.task(queue="default", default_retry_delay=1, max_retries=2) |
|
504 | 506 | def close_alerts(): |
|
505 | 507 | log.warning('Checking alerts') |
|
506 | 508 | since_when = datetime.utcnow() |
|
507 | 509 | try: |
|
508 | 510 | event_types = [Event.types['error_report_alert'], |
|
509 | 511 | Event.types['slow_report_alert'], ] |
|
510 | 512 | statuses = [Event.statuses['active']] |
|
511 | 513 | # get events older than 5 min |
|
512 | 514 | events = EventService.by_type_and_status( |
|
513 | 515 | event_types, |
|
514 | 516 | statuses, |
|
515 | 517 | older_than=(since_when - timedelta(minutes=5))) |
|
516 | 518 | for event in events: |
|
517 | 519 | # see if we can close them |
|
518 | 520 | event.validate_or_close( |
|
519 | 521 | since_when=(since_when - timedelta(minutes=1))) |
|
520 | 522 | except Exception as exc: |
|
521 | 523 | print_traceback(log) |
|
522 | 524 | raise |
|
523 | 525 | |
|
524 | 526 | |
|
525 | 527 | @celery.task(queue="default", default_retry_delay=600, max_retries=144) |
|
526 | 528 | def update_tag_counter(tag_name, tag_value, count): |
|
527 | 529 | try: |
|
528 | 530 | query = DBSession.query(Tag).filter(Tag.name == tag_name).filter( |
|
529 | 531 | sa.cast(Tag.value, sa.types.TEXT) == sa.cast(json.dumps(tag_value), |
|
530 | 532 | sa.types.TEXT)) |
|
531 | 533 | query.update({'times_seen': Tag.times_seen + count, |
|
532 | 534 | 'last_timestamp': datetime.utcnow()}, |
|
533 | 535 | synchronize_session=False) |
|
534 | 536 | session = DBSession() |
|
535 | 537 | mark_changed(session) |
|
536 | 538 | return True |
|
537 | 539 | except Exception as exc: |
|
538 | 540 | print_traceback(log) |
|
539 | 541 | update_tag_counter.retry(exc=exc) |
|
540 | 542 | |
|
541 | 543 | |
|
542 | 544 | @celery.task(queue="default") |
|
543 | 545 | def update_tag_counters(): |
|
544 | 546 | """ |
|
545 | 547 | Sets task to update counters for application tags |
|
546 | 548 | """ |
|
547 | 549 | tags = Datastores.redis.lrange(REDIS_KEYS['seen_tag_list'], 0, -1) |
|
548 | 550 | Datastores.redis.delete(REDIS_KEYS['seen_tag_list']) |
|
549 | 551 | c = collections.Counter(tags) |
|
550 | 552 | for t_json, count in c.items(): |
|
551 | 553 | tag_info = json.loads(t_json) |
|
552 | 554 | update_tag_counter.delay(tag_info[0], tag_info[1], count) |
|
553 | 555 | |
|
554 | 556 | |
|
555 | 557 | @celery.task(queue="default") |
|
556 | 558 | def daily_digest(): |
|
557 | 559 | """ |
|
558 | 560 | Sends daily digest with top 50 error reports |
|
559 | 561 | """ |
|
560 | 562 | request = get_current_request() |
|
561 | 563 | apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports']) |
|
562 | 564 | Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports']) |
|
563 | 565 | since_when = datetime.utcnow() - timedelta(hours=8) |
|
564 | 566 | log.warning('Generating daily digests') |
|
565 | 567 | for resource_id in apps: |
|
566 | 568 | resource_id = resource_id.decode('utf8') |
|
567 | 569 | end_date = datetime.utcnow().replace(microsecond=0, second=0) |
|
568 | 570 | filter_settings = {'resource': [resource_id], |
|
569 | 571 | 'tags': [{'name': 'type', |
|
570 | 572 | 'value': ['error'], 'op': None}], |
|
571 | 573 | 'type': 'error', 'start_date': since_when, |
|
572 | 574 | 'end_date': end_date} |
|
573 | 575 | |
|
574 | 576 | reports = ReportGroupService.get_trending( |
|
575 | 577 | request, filter_settings=filter_settings, limit=50) |
|
576 | 578 | |
|
577 | 579 | application = ApplicationService.by_id(resource_id) |
|
578 | 580 | if application: |
|
579 |
users = set([p.user for p in |
|
|
581 | users = set([p.user for p in ResourceService.users_for_perm(application, 'view')]) | |
|
580 | 582 | for user in users: |
|
581 | 583 | user.send_digest(request, application, reports=reports, |
|
582 | 584 | since_when=since_when) |
|
583 | 585 | |
|
584 | 586 | |
|
585 | 587 | @celery.task(queue="default") |
|
586 | 588 | def notifications_reports(): |
|
587 | 589 | """ |
|
588 | 590 | Loop that checks redis for info and then issues new tasks to celery to |
|
589 | 591 | issue notifications |
|
590 | 592 | """ |
|
591 | 593 | apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports']) |
|
592 | 594 | Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports']) |
|
593 | 595 | for app in apps: |
|
594 | 596 | log.warning('Notify for app: %s' % app) |
|
595 | 597 | check_user_report_notifications.delay(app.decode('utf8')) |
|
596 | 598 | |
|
597 | 599 | @celery.task(queue="default") |
|
598 | 600 | def alerting_reports(): |
|
599 | 601 | """ |
|
600 | 602 | Loop that checks redis for info and then issues new tasks to celery to |
|
601 | 603 | perform the following: |
|
602 | 604 | - which applications should have new alerts opened |
|
603 | 605 | """ |
|
604 | 606 | |
|
605 | 607 | apps = Datastores.redis.smembers(REDIS_KEYS['apps_that_had_reports_alerting']) |
|
606 | 608 | Datastores.redis.delete(REDIS_KEYS['apps_that_had_reports_alerting']) |
|
607 | 609 | for app in apps: |
|
608 | 610 | log.warning('Notify for app: %s' % app) |
|
609 | 611 | check_alerts.delay(app.decode('utf8')) |
|
610 | 612 | |
|
611 | 613 | |
|
612 | 614 | @celery.task(queue="default", soft_time_limit=3600 * 4, |
|
613 | 615 | hard_time_limit=3600 * 4, max_retries=144) |
|
614 | 616 | def logs_cleanup(resource_id, filter_settings): |
|
615 | 617 | request = get_current_request() |
|
616 | 618 | request.tm.begin() |
|
617 | 619 | es_query = { |
|
618 | 620 | "_source": False, |
|
619 | 621 | "size": 5000, |
|
620 | 622 | "query": { |
|
621 | 623 | "filtered": { |
|
622 | 624 | "filter": { |
|
623 | 625 | "and": [{"term": {"resource_id": resource_id}}] |
|
624 | 626 | } |
|
625 | 627 | } |
|
626 | 628 | } |
|
627 | 629 | } |
|
628 | 630 | |
|
629 | 631 | query = DBSession.query(Log).filter(Log.resource_id == resource_id) |
|
630 | 632 | if filter_settings['namespace']: |
|
631 | 633 | query = query.filter(Log.namespace == filter_settings['namespace'][0]) |
|
632 | 634 | es_query['query']['filtered']['filter']['and'].append( |
|
633 | 635 | {"term": {"namespace": filter_settings['namespace'][0]}} |
|
634 | 636 | ) |
|
635 | 637 | query.delete(synchronize_session=False) |
|
636 | 638 | request.tm.commit() |
|
637 | 639 | result = request.es_conn.search(es_query, index='rcae_l_*', |
|
638 | 640 | doc_type='log', es_scroll='1m', |
|
639 | 641 | es_search_type='scan') |
|
640 | 642 | scroll_id = result['_scroll_id'] |
|
641 | 643 | while True: |
|
642 | 644 | log.warning('log_cleanup, app:{} ns:{} batch'.format( |
|
643 | 645 | resource_id, |
|
644 | 646 | filter_settings['namespace'] |
|
645 | 647 | )) |
|
646 | 648 | es_docs_to_delete = [] |
|
647 | 649 | result = request.es_conn.send_request( |
|
648 | 650 | 'POST', ['_search', 'scroll'], |
|
649 | 651 | body=scroll_id, query_params={"scroll": '1m'}) |
|
650 | 652 | scroll_id = result['_scroll_id'] |
|
651 | 653 | if not result['hits']['hits']: |
|
652 | 654 | break |
|
653 | 655 | for doc in result['hits']['hits']: |
|
654 | 656 | es_docs_to_delete.append({"id": doc['_id'], |
|
655 | 657 | "index": doc['_index']}) |
|
656 | 658 | |
|
657 | 659 | for batch in in_batches(es_docs_to_delete, 10): |
|
658 | 660 | Datastores.es.bulk([Datastores.es.delete_op(doc_type='log', |
|
659 | 661 | **to_del) |
|
660 | 662 | for to_del in batch]) |
@@ -1,896 +1,896 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import wtforms |
|
18 | 18 | import formencode |
|
19 | 19 | import re |
|
20 | 20 | import pyramid.threadlocal |
|
21 | 21 | import datetime |
|
22 | 22 | import appenlight.lib.helpers as h |
|
23 | 23 | |
|
24 |
from |
|
|
25 |
from |
|
|
24 | from ziggurat_foundations.models.services.user import UserService | |
|
25 | from ziggurat_foundations.models.services.group import GroupService | |
|
26 | 26 | from appenlight.models import DBSession |
|
27 | 27 | from appenlight.models.alert_channel import AlertChannel |
|
28 | 28 | from appenlight.models.integrations import IntegrationException |
|
29 | 29 | from appenlight.models.integrations.campfire import CampfireIntegration |
|
30 | 30 | from appenlight.models.integrations.bitbucket import BitbucketIntegration |
|
31 | 31 | from appenlight.models.integrations.github import GithubIntegration |
|
32 | 32 | from appenlight.models.integrations.flowdock import FlowdockIntegration |
|
33 | 33 | from appenlight.models.integrations.hipchat import HipchatIntegration |
|
34 | 34 | from appenlight.models.integrations.jira import JiraClient |
|
35 | 35 | from appenlight.models.integrations.slack import SlackIntegration |
|
36 | 36 | from appenlight.lib.ext_json import json |
|
37 | 37 | from wtforms.ext.csrf.form import SecureForm |
|
38 | 38 | from wtforms.compat import iteritems |
|
39 | 39 | from collections import defaultdict |
|
40 | 40 | |
|
41 | 41 | _ = str |
|
42 | 42 | |
|
43 | 43 | strip_filter = lambda x: x.strip() if x else None |
|
44 | 44 | uppercase_filter = lambda x: x.upper() if x else None |
|
45 | 45 | |
|
46 | 46 | FALSE_VALUES = ('false', '', False, None) |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | class CSRFException(Exception): |
|
50 | 50 | pass |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | class ReactorForm(SecureForm): |
|
54 | 54 | def __init__(self, formdata=None, obj=None, prefix='', csrf_context=None, |
|
55 | 55 | **kwargs): |
|
56 | 56 | super(ReactorForm, self).__init__(formdata=formdata, obj=obj, |
|
57 | 57 | prefix=prefix, |
|
58 | 58 | csrf_context=csrf_context, **kwargs) |
|
59 | 59 | self._csrf_context = csrf_context |
|
60 | 60 | |
|
61 | 61 | def generate_csrf_token(self, csrf_context): |
|
62 | 62 | return csrf_context.session.get_csrf_token() |
|
63 | 63 | |
|
64 | 64 | def validate_csrf_token(self, field): |
|
65 | 65 | request = self._csrf_context or pyramid.threadlocal.get_current_request() |
|
66 | 66 | is_from_auth_token = 'auth:auth_token' in request.effective_principals |
|
67 | 67 | if is_from_auth_token: |
|
68 | 68 | return True |
|
69 | 69 | |
|
70 | 70 | if field.data != field.current_token: |
|
71 | 71 | # try to save the day by using token from angular |
|
72 | 72 | if request.headers.get('X-XSRF-TOKEN') != field.current_token: |
|
73 | 73 | raise CSRFException('Invalid CSRF token') |
|
74 | 74 | |
|
75 | 75 | @property |
|
76 | 76 | def errors_dict(self): |
|
77 | 77 | r_dict = defaultdict(list) |
|
78 | 78 | for k, errors in self.errors.items(): |
|
79 | 79 | r_dict[k].extend([str(e) for e in errors]) |
|
80 | 80 | return r_dict |
|
81 | 81 | |
|
82 | 82 | @property |
|
83 | 83 | def errors_json(self): |
|
84 | 84 | return json.dumps(self.errors_dict) |
|
85 | 85 | |
|
86 | 86 | def populate_obj(self, obj, ignore_none=False): |
|
87 | 87 | """ |
|
88 | 88 | Populates the attributes of the passed `obj` with data from the form's |
|
89 | 89 | fields. |
|
90 | 90 | |
|
91 | 91 | :note: This is a destructive operation; Any attribute with the same name |
|
92 | 92 | as a field will be overridden. Use with caution. |
|
93 | 93 | """ |
|
94 | 94 | if ignore_none: |
|
95 | 95 | for name, field in iteritems(self._fields): |
|
96 | 96 | if field.data is not None: |
|
97 | 97 | field.populate_obj(obj, name) |
|
98 | 98 | else: |
|
99 | 99 | for name, field in iteritems(self._fields): |
|
100 | 100 | field.populate_obj(obj, name) |
|
101 | 101 | |
|
102 | 102 | css_classes = {} |
|
103 | 103 | ignore_labels = {} |
|
104 | 104 | |
|
105 | 105 | |
|
106 | 106 | class SignInForm(ReactorForm): |
|
107 | 107 | came_from = wtforms.HiddenField() |
|
108 | 108 | sign_in_user_name = wtforms.StringField(_('User Name')) |
|
109 | 109 | sign_in_user_password = wtforms.PasswordField(_('Password')) |
|
110 | 110 | |
|
111 | 111 | ignore_labels = ['submit'] |
|
112 | 112 | css_classes = {'submit': 'btn btn-primary'} |
|
113 | 113 | |
|
114 | 114 | html_attrs = {'sign_in_user_name': {'placeholder': 'Your login'}, |
|
115 | 115 | 'sign_in_user_password': { |
|
116 | 116 | 'placeholder': 'Your password'}} |
|
117 | 117 | |
|
118 | 118 | |
|
119 | 119 | from wtforms.widgets import html_params, HTMLString |
|
120 | 120 | |
|
121 | 121 | |
|
122 | 122 | def select_multi_checkbox(field, ul_class='set', **kwargs): |
|
123 | 123 | """Render a multi-checkbox widget""" |
|
124 | 124 | kwargs.setdefault('type', 'checkbox') |
|
125 | 125 | field_id = kwargs.pop('id', field.id) |
|
126 | 126 | html = ['<ul %s>' % html_params(id=field_id, class_=ul_class)] |
|
127 | 127 | for value, label, checked in field.iter_choices(): |
|
128 | 128 | choice_id = '%s-%s' % (field_id, value) |
|
129 | 129 | options = dict(kwargs, name=field.name, value=value, id=choice_id) |
|
130 | 130 | if checked: |
|
131 | 131 | options['checked'] = 'checked' |
|
132 | 132 | html.append('<li><input %s /> ' % html_params(**options)) |
|
133 | 133 | html.append('<label for="%s">%s</label></li>' % (choice_id, label)) |
|
134 | 134 | html.append('</ul>') |
|
135 | 135 | return HTMLString(''.join(html)) |
|
136 | 136 | |
|
137 | 137 | |
|
138 | 138 | def button_widget(field, button_cls='ButtonField btn btn-default', **kwargs): |
|
139 | 139 | """Render a button widget""" |
|
140 | 140 | kwargs.setdefault('type', 'button') |
|
141 | 141 | field_id = kwargs.pop('id', field.id) |
|
142 | 142 | kwargs.setdefault('value', field.label.text) |
|
143 | 143 | html = ['<button %s>%s</button>' % (html_params(id=field_id, |
|
144 | 144 | class_=button_cls), |
|
145 | 145 | kwargs['value'],)] |
|
146 | 146 | return HTMLString(''.join(html)) |
|
147 | 147 | |
|
148 | 148 | |
|
149 | 149 | def clean_whitespace(value): |
|
150 | 150 | if value: |
|
151 | 151 | return value.strip() |
|
152 | 152 | return value |
|
153 | 153 | |
|
154 | 154 | |
|
155 | 155 | def found_username_validator(form, field): |
|
156 | user = User.by_user_name(field.data) | |
|
156 | user = UserService.by_user_name(field.data) | |
|
157 | 157 | # sets user to recover in email validator |
|
158 | 158 | form.field_user = user |
|
159 | 159 | if not user: |
|
160 | 160 | raise wtforms.ValidationError('This username does not exist') |
|
161 | 161 | |
|
162 | 162 | |
|
163 | 163 | def found_username_email_validator(form, field): |
|
164 | user = User.by_email(field.data) | |
|
164 | user = UserService.by_email(field.data) | |
|
165 | 165 | if not user: |
|
166 | 166 | raise wtforms.ValidationError('Email is incorrect') |
|
167 | 167 | |
|
168 | 168 | |
|
169 | 169 | def unique_username_validator(form, field): |
|
170 | user = User.by_user_name(field.data) | |
|
170 | user = UserService.by_user_name(field.data) | |
|
171 | 171 | if user: |
|
172 | 172 | raise wtforms.ValidationError('This username already exists in system') |
|
173 | 173 | |
|
174 | 174 | |
|
175 | 175 | def unique_groupname_validator(form, field): |
|
176 | group = Group.by_group_name(field.data) | |
|
176 | group = GroupService.by_group_name(field.data) | |
|
177 | 177 | mod_group = getattr(form, '_modified_group', None) |
|
178 | 178 | if group and (not mod_group or mod_group.id != group.id): |
|
179 | 179 | raise wtforms.ValidationError( |
|
180 | 180 | 'This group name already exists in system') |
|
181 | 181 | |
|
182 | 182 | |
|
183 | 183 | def unique_email_validator(form, field): |
|
184 | user = User.by_email(field.data) | |
|
184 | user = UserService.by_email(field.data) | |
|
185 | 185 | if user: |
|
186 | 186 | raise wtforms.ValidationError('This email already exists in system') |
|
187 | 187 | |
|
188 | 188 | |
|
189 | 189 | def email_validator(form, field): |
|
190 | 190 | validator = formencode.validators.Email() |
|
191 | 191 | try: |
|
192 | 192 | validator.to_python(field.data) |
|
193 | 193 | except formencode.Invalid as e: |
|
194 | 194 | raise wtforms.ValidationError(e) |
|
195 | 195 | |
|
196 | 196 | |
|
197 | 197 | def unique_alert_email_validator(form, field): |
|
198 | 198 | q = DBSession.query(AlertChannel) |
|
199 | 199 | q = q.filter(AlertChannel.channel_name == 'email') |
|
200 | 200 | q = q.filter(AlertChannel.channel_value == field.data) |
|
201 | 201 | email = q.first() |
|
202 | 202 | if email: |
|
203 | 203 | raise wtforms.ValidationError( |
|
204 | 204 | 'This email already exists in alert system') |
|
205 | 205 | |
|
206 | 206 | |
|
207 | 207 | def blocked_email_validator(form, field): |
|
208 | 208 | blocked_emails = [ |
|
209 | 209 | 'goood-mail.org', |
|
210 | 210 | 'shoeonlineblog.com', |
|
211 | 211 | 'louboutinemart.com', |
|
212 | 212 | 'guccibagshere.com', |
|
213 | 213 | 'nikeshoesoutletforsale.com' |
|
214 | 214 | ] |
|
215 | 215 | data = field.data or '' |
|
216 | 216 | domain = data.split('@')[-1] |
|
217 | 217 | if domain in blocked_emails: |
|
218 | 218 | raise wtforms.ValidationError('Don\'t spam') |
|
219 | 219 | |
|
220 | 220 | |
|
221 | 221 | def old_password_validator(form, field): |
|
222 |
if not field.user |
|
|
222 | if not UserService.check_password(field.user, field.data or ''): | |
|
223 | 223 | raise wtforms.ValidationError('You need to enter correct password') |
|
224 | 224 | |
|
225 | 225 | |
|
226 | 226 | class UserRegisterForm(ReactorForm): |
|
227 | 227 | user_name = wtforms.StringField( |
|
228 | 228 | _('User Name'), |
|
229 | 229 | filters=[strip_filter], |
|
230 | 230 | validators=[ |
|
231 | 231 | wtforms.validators.Length(min=2, max=30), |
|
232 | 232 | wtforms.validators.Regexp( |
|
233 | 233 | re.compile(r'^[\.\w-]+$', re.UNICODE), |
|
234 | 234 | message="Invalid characters used"), |
|
235 | 235 | unique_username_validator, |
|
236 | 236 | wtforms.validators.DataRequired() |
|
237 | 237 | ]) |
|
238 | 238 | |
|
239 | 239 | user_password = wtforms.PasswordField(_('User Password'), |
|
240 | 240 | filters=[strip_filter], |
|
241 | 241 | validators=[ |
|
242 | 242 | wtforms.validators.Length(min=4), |
|
243 | 243 | wtforms.validators.DataRequired() |
|
244 | 244 | ]) |
|
245 | 245 | |
|
246 | 246 | email = wtforms.StringField(_('Email Address'), |
|
247 | 247 | filters=[strip_filter], |
|
248 | 248 | validators=[email_validator, |
|
249 | 249 | unique_email_validator, |
|
250 | 250 | blocked_email_validator, |
|
251 | 251 | wtforms.validators.DataRequired()]) |
|
252 | 252 | first_name = wtforms.HiddenField(_('First Name')) |
|
253 | 253 | last_name = wtforms.HiddenField(_('Last Name')) |
|
254 | 254 | |
|
255 | 255 | ignore_labels = ['submit'] |
|
256 | 256 | css_classes = {'submit': 'btn btn-primary'} |
|
257 | 257 | |
|
258 | 258 | html_attrs = {'user_name': {'placeholder': 'Your login'}, |
|
259 | 259 | 'user_password': {'placeholder': 'Your password'}, |
|
260 | 260 | 'email': {'placeholder': 'Your email'}} |
|
261 | 261 | |
|
262 | 262 | |
|
263 | 263 | class UserCreateForm(UserRegisterForm): |
|
264 | 264 | status = wtforms.BooleanField('User status', |
|
265 | 265 | false_values=FALSE_VALUES) |
|
266 | 266 | |
|
267 | 267 | |
|
268 | 268 | class UserUpdateForm(UserCreateForm): |
|
269 | 269 | user_name = None |
|
270 | 270 | user_password = wtforms.PasswordField(_('User Password'), |
|
271 | 271 | filters=[strip_filter], |
|
272 | 272 | validators=[ |
|
273 | 273 | wtforms.validators.Length(min=4), |
|
274 | 274 | wtforms.validators.Optional() |
|
275 | 275 | ]) |
|
276 | 276 | email = wtforms.StringField(_('Email Address'), |
|
277 | 277 | filters=[strip_filter], |
|
278 | 278 | validators=[email_validator, |
|
279 | 279 | wtforms.validators.DataRequired()]) |
|
280 | 280 | |
|
281 | 281 | |
|
282 | 282 | class LostPasswordForm(ReactorForm): |
|
283 | 283 | email = wtforms.StringField(_('Email Address'), |
|
284 | 284 | filters=[strip_filter], |
|
285 | 285 | validators=[email_validator, |
|
286 | 286 | found_username_email_validator, |
|
287 | 287 | wtforms.validators.DataRequired()]) |
|
288 | 288 | |
|
289 | 289 | submit = wtforms.SubmitField(_('Reset password')) |
|
290 | 290 | ignore_labels = ['submit'] |
|
291 | 291 | css_classes = {'submit': 'btn btn-primary'} |
|
292 | 292 | |
|
293 | 293 | |
|
294 | 294 | class ChangePasswordForm(ReactorForm): |
|
295 | 295 | old_password = wtforms.PasswordField( |
|
296 | 296 | 'Old Password', |
|
297 | 297 | filters=[strip_filter], |
|
298 | 298 | validators=[old_password_validator, |
|
299 | 299 | wtforms.validators.DataRequired()]) |
|
300 | 300 | |
|
301 | 301 | new_password = wtforms.PasswordField( |
|
302 | 302 | 'New Password', |
|
303 | 303 | filters=[strip_filter], |
|
304 | 304 | validators=[wtforms.validators.Length(min=4), |
|
305 | 305 | wtforms.validators.DataRequired()]) |
|
306 | 306 | new_password_confirm = wtforms.PasswordField( |
|
307 | 307 | 'Confirm Password', |
|
308 | 308 | filters=[strip_filter], |
|
309 | 309 | validators=[wtforms.validators.EqualTo('new_password'), |
|
310 | 310 | wtforms.validators.DataRequired()]) |
|
311 | 311 | submit = wtforms.SubmitField('Change Password') |
|
312 | 312 | ignore_labels = ['submit'] |
|
313 | 313 | css_classes = {'submit': 'btn btn-primary'} |
|
314 | 314 | |
|
315 | 315 | |
|
316 | 316 | class CheckPasswordForm(ReactorForm): |
|
317 | 317 | password = wtforms.PasswordField( |
|
318 | 318 | 'Password', |
|
319 | 319 | filters=[strip_filter], |
|
320 | 320 | validators=[old_password_validator, |
|
321 | 321 | wtforms.validators.DataRequired()]) |
|
322 | 322 | |
|
323 | 323 | |
|
324 | 324 | class NewPasswordForm(ReactorForm): |
|
325 | 325 | new_password = wtforms.PasswordField( |
|
326 | 326 | 'New Password', |
|
327 | 327 | filters=[strip_filter], |
|
328 | 328 | validators=[wtforms.validators.Length(min=4), |
|
329 | 329 | wtforms.validators.DataRequired()]) |
|
330 | 330 | new_password_confirm = wtforms.PasswordField( |
|
331 | 331 | 'Confirm Password', |
|
332 | 332 | filters=[strip_filter], |
|
333 | 333 | validators=[wtforms.validators.EqualTo('new_password'), |
|
334 | 334 | wtforms.validators.DataRequired()]) |
|
335 | 335 | submit = wtforms.SubmitField('Set Password') |
|
336 | 336 | ignore_labels = ['submit'] |
|
337 | 337 | css_classes = {'submit': 'btn btn-primary'} |
|
338 | 338 | |
|
339 | 339 | |
|
340 | 340 | class CORSTextAreaField(wtforms.StringField): |
|
341 | 341 | """ |
|
342 | 342 | This field represents an HTML ``<textarea>`` and can be used to take |
|
343 | 343 | multi-line input. |
|
344 | 344 | """ |
|
345 | 345 | widget = wtforms.widgets.TextArea() |
|
346 | 346 | |
|
347 | 347 | def process_formdata(self, valuelist): |
|
348 | 348 | self.data = [] |
|
349 | 349 | if valuelist: |
|
350 | 350 | data = [x.strip() for x in valuelist[0].split('\n')] |
|
351 | 351 | for d in data: |
|
352 | 352 | if not d: |
|
353 | 353 | continue |
|
354 | 354 | if d.startswith('www.'): |
|
355 | 355 | d = d[4:] |
|
356 | 356 | if data: |
|
357 | 357 | self.data.append(d) |
|
358 | 358 | else: |
|
359 | 359 | self.data = [] |
|
360 | 360 | self.data = '\n'.join(self.data) |
|
361 | 361 | |
|
362 | 362 | |
|
363 | 363 | class ApplicationCreateForm(ReactorForm): |
|
364 | 364 | resource_name = wtforms.StringField( |
|
365 | 365 | _('Application name'), |
|
366 | 366 | filters=[strip_filter], |
|
367 | 367 | validators=[wtforms.validators.Length(min=1), |
|
368 | 368 | wtforms.validators.DataRequired()]) |
|
369 | 369 | |
|
370 | 370 | domains = CORSTextAreaField( |
|
371 | 371 | _('Domain names for CORS headers '), |
|
372 | 372 | validators=[wtforms.validators.Length(min=1), |
|
373 | 373 | wtforms.validators.Optional()], |
|
374 | 374 | description='Required for Javascript error ' |
|
375 | 375 | 'tracking (one line one domain, skip http:// part)') |
|
376 | 376 | |
|
377 | 377 | submit = wtforms.SubmitField(_('Create Application')) |
|
378 | 378 | |
|
379 | 379 | ignore_labels = ['submit'] |
|
380 | 380 | css_classes = {'submit': 'btn btn-primary'} |
|
381 | 381 | html_attrs = {'resource_name': {'placeholder': 'Application Name'}, |
|
382 | 382 | 'uptime_url': {'placeholder': 'http://somedomain.com'}} |
|
383 | 383 | |
|
384 | 384 | |
|
385 | 385 | class ApplicationUpdateForm(ApplicationCreateForm): |
|
386 | 386 | default_grouping = wtforms.SelectField( |
|
387 | 387 | _('Default grouping for errors'), |
|
388 | 388 | choices=[('url_type', 'Error Type + location',), |
|
389 | 389 | ('url_traceback', 'Traceback + location',), |
|
390 | 390 | ('traceback_server', 'Traceback + Server',)], |
|
391 | 391 | default='url_traceback') |
|
392 | 392 | |
|
393 | 393 | error_report_threshold = wtforms.IntegerField( |
|
394 | 394 | _('Alert on error reports'), |
|
395 | 395 | validators=[ |
|
396 | 396 | wtforms.validators.NumberRange(min=1), |
|
397 | 397 | wtforms.validators.DataRequired() |
|
398 | 398 | ], |
|
399 | 399 | description='Application requires to send at least this amount of ' |
|
400 | 400 | 'error reports per minute to open alert' |
|
401 | 401 | ) |
|
402 | 402 | |
|
403 | 403 | slow_report_threshold = wtforms.IntegerField( |
|
404 | 404 | _('Alert on slow reports'), |
|
405 | 405 | validators=[wtforms.validators.NumberRange(min=1), |
|
406 | 406 | wtforms.validators.DataRequired()], |
|
407 | 407 | description='Application requires to send at least this amount of ' |
|
408 | 408 | 'slow reports per minute to open alert') |
|
409 | 409 | |
|
410 | 410 | allow_permanent_storage = wtforms.BooleanField( |
|
411 | 411 | _('Permanent logs'), |
|
412 | 412 | false_values=FALSE_VALUES, |
|
413 | 413 | description=_( |
|
414 | 414 | 'Allow permanent storage of logs in separate DB partitions')) |
|
415 | 415 | |
|
416 | 416 | submit = wtforms.SubmitField(_('Create Application')) |
|
417 | 417 | |
|
418 | 418 | |
|
419 | 419 | class UserSearchSchemaForm(ReactorForm): |
|
420 | 420 | user_name = wtforms.StringField('User Name', |
|
421 | 421 | filters=[strip_filter], ) |
|
422 | 422 | |
|
423 | 423 | submit = wtforms.SubmitField(_('Search User')) |
|
424 | 424 | ignore_labels = ['submit'] |
|
425 | 425 | css_classes = {'submit': 'btn btn-primary'} |
|
426 | 426 | |
|
427 | 427 | '<li class="user_exists"><span></span></li>' |
|
428 | 428 | |
|
429 | 429 | |
|
430 | 430 | class YesNoForm(ReactorForm): |
|
431 | 431 | no = wtforms.SubmitField('No', default='') |
|
432 | 432 | yes = wtforms.SubmitField('Yes', default='') |
|
433 | 433 | ignore_labels = ['submit'] |
|
434 | 434 | css_classes = {'submit': 'btn btn-primary'} |
|
435 | 435 | |
|
436 | 436 | |
|
437 | 437 | status_codes = [('', 'All',), ('500', '500',), ('404', '404',)] |
|
438 | 438 | |
|
439 | 439 | priorities = [('', 'All',)] |
|
440 | 440 | for i in range(1, 11): |
|
441 | 441 | priorities.append((str(i), str(i),)) |
|
442 | 442 | |
|
443 | 443 | report_status_choices = [('', 'All',), |
|
444 | 444 | ('never_reviewed', 'Never revieved',), |
|
445 | 445 | ('reviewed', 'Revieved',), |
|
446 | 446 | ('public', 'Public',), |
|
447 | 447 | ('fixed', 'Fixed',), ] |
|
448 | 448 | |
|
449 | 449 | |
|
450 | 450 | class ReportBrowserForm(ReactorForm): |
|
451 | 451 | applications = wtforms.SelectMultipleField('Applications', |
|
452 | 452 | widget=select_multi_checkbox) |
|
453 | 453 | http_status = wtforms.SelectField('HTTP Status', choices=status_codes) |
|
454 | 454 | priority = wtforms.SelectField('Priority', choices=priorities, default='') |
|
455 | 455 | start_date = wtforms.DateField('Start Date') |
|
456 | 456 | end_date = wtforms.DateField('End Date') |
|
457 | 457 | error = wtforms.StringField('Error') |
|
458 | 458 | url_path = wtforms.StringField('URL Path') |
|
459 | 459 | url_domain = wtforms.StringField('URL Domain') |
|
460 | 460 | report_status = wtforms.SelectField('Report status', |
|
461 | 461 | choices=report_status_choices, |
|
462 | 462 | default='') |
|
463 | 463 | submit = wtforms.SubmitField('<span class="glyphicon glyphicon-search">' |
|
464 | 464 | '</span> Filter results', |
|
465 | 465 | widget=button_widget) |
|
466 | 466 | |
|
467 | 467 | ignore_labels = ['submit'] |
|
468 | 468 | css_classes = {'submit': 'btn btn-primary'} |
|
469 | 469 | |
|
470 | 470 | |
|
471 | 471 | slow_report_status_choices = [('', 'All',), |
|
472 | 472 | ('never_reviewed', 'Never revieved',), |
|
473 | 473 | ('reviewed', 'Revieved',), |
|
474 | 474 | ('public', 'Public',), ] |
|
475 | 475 | |
|
476 | 476 | |
|
477 | 477 | class BulkOperationForm(ReactorForm): |
|
478 | 478 | applications = wtforms.SelectField('Applications') |
|
479 | 479 | start_date = wtforms.DateField( |
|
480 | 480 | 'Start Date', |
|
481 | 481 | default=lambda: datetime.datetime.utcnow() - datetime.timedelta( |
|
482 | 482 | days=90)) |
|
483 | 483 | end_date = wtforms.DateField('End Date') |
|
484 | 484 | confirm = wtforms.BooleanField( |
|
485 | 485 | 'Confirm operation', |
|
486 | 486 | validators=[wtforms.validators.DataRequired()]) |
|
487 | 487 | |
|
488 | 488 | |
|
489 | 489 | class LogBrowserForm(ReactorForm): |
|
490 | 490 | applications = wtforms.SelectMultipleField('Applications', |
|
491 | 491 | widget=select_multi_checkbox) |
|
492 | 492 | start_date = wtforms.DateField('Start Date') |
|
493 | 493 | log_level = wtforms.StringField('Log level') |
|
494 | 494 | message = wtforms.StringField('Message') |
|
495 | 495 | namespace = wtforms.StringField('Namespace') |
|
496 | 496 | submit = wtforms.SubmitField( |
|
497 | 497 | '<span class="glyphicon glyphicon-search"></span> Filter results', |
|
498 | 498 | widget=button_widget) |
|
499 | 499 | ignore_labels = ['submit'] |
|
500 | 500 | css_classes = {'submit': 'btn btn-primary'} |
|
501 | 501 | |
|
502 | 502 | |
|
503 | 503 | class CommentForm(ReactorForm): |
|
504 | 504 | body = wtforms.TextAreaField('Comment', validators=[ |
|
505 | 505 | wtforms.validators.Length(min=1), |
|
506 | 506 | wtforms.validators.DataRequired() |
|
507 | 507 | ]) |
|
508 | 508 | submit = wtforms.SubmitField('Comment', ) |
|
509 | 509 | ignore_labels = ['submit'] |
|
510 | 510 | css_classes = {'submit': 'btn btn-primary'} |
|
511 | 511 | |
|
512 | 512 | |
|
513 | 513 | class EmailChannelCreateForm(ReactorForm): |
|
514 | 514 | email = wtforms.StringField(_('Email Address'), |
|
515 | 515 | filters=[strip_filter], |
|
516 | 516 | validators=[email_validator, |
|
517 | 517 | unique_alert_email_validator, |
|
518 | 518 | wtforms.validators.DataRequired()]) |
|
519 | 519 | submit = wtforms.SubmitField('Add email channel', ) |
|
520 | 520 | ignore_labels = ['submit'] |
|
521 | 521 | css_classes = {'submit': 'btn btn-primary'} |
|
522 | 522 | |
|
523 | 523 | |
|
524 | 524 | def gen_user_profile_form(): |
|
525 | 525 | class UserProfileForm(ReactorForm): |
|
526 | 526 | email = wtforms.StringField( |
|
527 | 527 | _('Email Address'), |
|
528 | 528 | validators=[email_validator, wtforms.validators.DataRequired()]) |
|
529 | 529 | first_name = wtforms.StringField(_('First Name')) |
|
530 | 530 | last_name = wtforms.StringField(_('Last Name')) |
|
531 | 531 | company_name = wtforms.StringField(_('Company Name')) |
|
532 | 532 | company_address = wtforms.TextAreaField(_('Company Address')) |
|
533 | 533 | zip_code = wtforms.StringField(_('ZIP code')) |
|
534 | 534 | city = wtforms.StringField(_('City')) |
|
535 | 535 | notifications = wtforms.BooleanField('Account notifications', |
|
536 | 536 | false_values=FALSE_VALUES) |
|
537 | 537 | submit = wtforms.SubmitField(_('Update Account')) |
|
538 | 538 | ignore_labels = ['submit'] |
|
539 | 539 | css_classes = {'submit': 'btn btn-primary'} |
|
540 | 540 | |
|
541 | 541 | return UserProfileForm |
|
542 | 542 | |
|
543 | 543 | |
|
544 | 544 | class PurgeAppForm(ReactorForm): |
|
545 | 545 | resource_id = wtforms.HiddenField( |
|
546 | 546 | 'App Id', |
|
547 | 547 | validators=[wtforms.validators.DataRequired()]) |
|
548 | 548 | days = wtforms.IntegerField( |
|
549 | 549 | 'Days', |
|
550 | 550 | validators=[wtforms.validators.DataRequired()]) |
|
551 | 551 | password = wtforms.PasswordField( |
|
552 | 552 | 'Admin Password', |
|
553 | 553 | validators=[old_password_validator, wtforms.validators.DataRequired()]) |
|
554 | 554 | submit = wtforms.SubmitField(_('Purge Data')) |
|
555 | 555 | ignore_labels = ['submit'] |
|
556 | 556 | css_classes = {'submit': 'btn btn-primary'} |
|
557 | 557 | |
|
558 | 558 | |
|
559 | 559 | class IntegrationRepoForm(ReactorForm): |
|
560 | 560 | host_name = wtforms.StringField("Service Host", default='') |
|
561 | 561 | user_name = wtforms.StringField( |
|
562 | 562 | "User Name", |
|
563 | 563 | filters=[strip_filter], |
|
564 | 564 | validators=[wtforms.validators.DataRequired(), |
|
565 | 565 | wtforms.validators.Length(min=1)]) |
|
566 | 566 | repo_name = wtforms.StringField( |
|
567 | 567 | "Repo Name", |
|
568 | 568 | filters=[strip_filter], |
|
569 | 569 | validators=[wtforms.validators.DataRequired(), |
|
570 | 570 | wtforms.validators.Length(min=1)]) |
|
571 | 571 | |
|
572 | 572 | |
|
573 | 573 | class IntegrationBitbucketForm(IntegrationRepoForm): |
|
574 | 574 | host_name = wtforms.StringField("Service Host", |
|
575 | 575 | default='https://bitbucket.org') |
|
576 | 576 | |
|
577 | 577 | def validate_user_name(self, field): |
|
578 | 578 | try: |
|
579 | 579 | request = pyramid.threadlocal.get_current_request() |
|
580 | 580 | client = BitbucketIntegration.create_client( |
|
581 | 581 | request, |
|
582 | 582 | self.user_name.data, |
|
583 | 583 | self.repo_name.data) |
|
584 | 584 | client.get_assignees() |
|
585 | 585 | except IntegrationException as e: |
|
586 | 586 | raise wtforms.validators.ValidationError(str(e)) |
|
587 | 587 | |
|
588 | 588 | |
|
589 | 589 | class IntegrationGithubForm(IntegrationRepoForm): |
|
590 | 590 | host_name = wtforms.StringField("Service Host", |
|
591 | 591 | default='https://github.com') |
|
592 | 592 | |
|
593 | 593 | def validate_user_name(self, field): |
|
594 | 594 | try: |
|
595 | 595 | request = pyramid.threadlocal.get_current_request() |
|
596 | 596 | client = GithubIntegration.create_client( |
|
597 | 597 | request, |
|
598 | 598 | self.user_name.data, |
|
599 | 599 | self.repo_name.data) |
|
600 | 600 | client.get_assignees() |
|
601 | 601 | except IntegrationException as e: |
|
602 | 602 | raise wtforms.validators.ValidationError(str(e)) |
|
603 | 603 | raise wtforms.validators.ValidationError(str(e)) |
|
604 | 604 | |
|
605 | 605 | |
|
606 | 606 | def filter_rooms(data): |
|
607 | 607 | if data is not None: |
|
608 | 608 | rooms = data.split(',') |
|
609 | 609 | return ','.join([r.strip() for r in rooms]) |
|
610 | 610 | |
|
611 | 611 | |
|
612 | 612 | class IntegrationCampfireForm(ReactorForm): |
|
613 | 613 | account = wtforms.StringField( |
|
614 | 614 | 'Account', |
|
615 | 615 | filters=[strip_filter], |
|
616 | 616 | validators=[wtforms.validators.DataRequired()]) |
|
617 | 617 | api_token = wtforms.StringField( |
|
618 | 618 | 'Api Token', |
|
619 | 619 | filters=[strip_filter], |
|
620 | 620 | validators=[wtforms.validators.DataRequired()]) |
|
621 | 621 | rooms = wtforms.StringField('Room ID list', filters=[filter_rooms]) |
|
622 | 622 | |
|
623 | 623 | def validate_api_token(self, field): |
|
624 | 624 | try: |
|
625 | 625 | client = CampfireIntegration.create_client(self.api_token.data, |
|
626 | 626 | self.account.data) |
|
627 | 627 | client.get_account() |
|
628 | 628 | except IntegrationException as e: |
|
629 | 629 | raise wtforms.validators.ValidationError(str(e)) |
|
630 | 630 | |
|
631 | 631 | def validate_rooms(self, field): |
|
632 | 632 | if not field.data: |
|
633 | 633 | return |
|
634 | 634 | client = CampfireIntegration.create_client(self.api_token.data, |
|
635 | 635 | self.account.data) |
|
636 | 636 | |
|
637 | 637 | try: |
|
638 | 638 | room_list = [r['id'] for r in client.get_rooms()] |
|
639 | 639 | except IntegrationException as e: |
|
640 | 640 | raise wtforms.validators.ValidationError(str(e)) |
|
641 | 641 | |
|
642 | 642 | rooms = field.data.split(',') |
|
643 | 643 | if len(rooms) > 3: |
|
644 | 644 | msg = 'You can use up to 3 room ids' |
|
645 | 645 | raise wtforms.validators.ValidationError(msg) |
|
646 | 646 | if rooms: |
|
647 | 647 | for room_id in rooms: |
|
648 | 648 | if int(room_id) not in room_list: |
|
649 | 649 | msg = "Room %s doesn't exist" |
|
650 | 650 | raise wtforms.validators.ValidationError(msg % room_id) |
|
651 | 651 | if not room_id.strip().isdigit(): |
|
652 | 652 | msg = 'You must use only integers for room ids' |
|
653 | 653 | raise wtforms.validators.ValidationError(msg) |
|
654 | 654 | |
|
655 | 655 | submit = wtforms.SubmitField(_('Connect to Campfire')) |
|
656 | 656 | ignore_labels = ['submit'] |
|
657 | 657 | css_classes = {'submit': 'btn btn-primary'} |
|
658 | 658 | |
|
659 | 659 | |
|
660 | 660 | def filter_rooms(data): |
|
661 | 661 | if data is not None: |
|
662 | 662 | rooms = data.split(',') |
|
663 | 663 | return ','.join([r.strip() for r in rooms]) |
|
664 | 664 | |
|
665 | 665 | |
|
666 | 666 | class IntegrationHipchatForm(ReactorForm): |
|
667 | 667 | api_token = wtforms.StringField( |
|
668 | 668 | 'Api Token', |
|
669 | 669 | filters=[strip_filter], |
|
670 | 670 | validators=[wtforms.validators.DataRequired()]) |
|
671 | 671 | rooms = wtforms.StringField( |
|
672 | 672 | 'Room ID list', |
|
673 | 673 | filters=[filter_rooms], |
|
674 | 674 | validators=[wtforms.validators.DataRequired()]) |
|
675 | 675 | |
|
676 | 676 | def validate_rooms(self, field): |
|
677 | 677 | if not field.data: |
|
678 | 678 | return |
|
679 | 679 | client = HipchatIntegration.create_client(self.api_token.data) |
|
680 | 680 | rooms = field.data.split(',') |
|
681 | 681 | if len(rooms) > 3: |
|
682 | 682 | msg = 'You can use up to 3 room ids' |
|
683 | 683 | raise wtforms.validators.ValidationError(msg) |
|
684 | 684 | if rooms: |
|
685 | 685 | for room_id in rooms: |
|
686 | 686 | if not room_id.strip().isdigit(): |
|
687 | 687 | msg = 'You must use only integers for room ids' |
|
688 | 688 | raise wtforms.validators.ValidationError(msg) |
|
689 | 689 | try: |
|
690 | 690 | client.send({ |
|
691 | 691 | "message_format": 'text', |
|
692 | 692 | "message": "testing for room existence", |
|
693 | 693 | "from": "AppEnlight", |
|
694 | 694 | "room_id": room_id, |
|
695 | 695 | "color": "green" |
|
696 | 696 | }) |
|
697 | 697 | except IntegrationException as exc: |
|
698 | 698 | msg = 'Room id: %s exception: %s' |
|
699 | 699 | raise wtforms.validators.ValidationError(msg % (room_id, |
|
700 | 700 | exc)) |
|
701 | 701 | |
|
702 | 702 | |
|
703 | 703 | class IntegrationFlowdockForm(ReactorForm): |
|
704 | 704 | api_token = wtforms.StringField('API Token', |
|
705 | 705 | filters=[strip_filter], |
|
706 | 706 | validators=[ |
|
707 | 707 | wtforms.validators.DataRequired() |
|
708 | 708 | ], ) |
|
709 | 709 | |
|
710 | 710 | def validate_api_token(self, field): |
|
711 | 711 | try: |
|
712 | 712 | client = FlowdockIntegration.create_client(self.api_token.data) |
|
713 | 713 | registry = pyramid.threadlocal.get_current_registry() |
|
714 | 714 | payload = { |
|
715 | 715 | "source": registry.settings['mailing.from_name'], |
|
716 | 716 | "from_address": registry.settings['mailing.from_email'], |
|
717 | 717 | "subject": "Integration test", |
|
718 | 718 | "content": "If you can see this it was successful", |
|
719 | 719 | "tags": ["appenlight"], |
|
720 | 720 | "link": registry.settings['mailing.app_url'] |
|
721 | 721 | } |
|
722 | 722 | client.send_to_inbox(payload) |
|
723 | 723 | except IntegrationException as e: |
|
724 | 724 | raise wtforms.validators.ValidationError(str(e)) |
|
725 | 725 | |
|
726 | 726 | |
|
727 | 727 | class IntegrationSlackForm(ReactorForm): |
|
728 | 728 | webhook_url = wtforms.StringField( |
|
729 | 729 | 'Reports webhook', |
|
730 | 730 | filters=[strip_filter], |
|
731 | 731 | validators=[wtforms.validators.DataRequired()]) |
|
732 | 732 | |
|
733 | 733 | def validate_webhook_url(self, field): |
|
734 | 734 | registry = pyramid.threadlocal.get_current_registry() |
|
735 | 735 | client = SlackIntegration.create_client(field.data) |
|
736 | 736 | link = "<%s|%s>" % (registry.settings['mailing.app_url'], |
|
737 | 737 | registry.settings['mailing.from_name']) |
|
738 | 738 | test_data = { |
|
739 | 739 | "username": "AppEnlight", |
|
740 | 740 | "icon_emoji": ":fire:", |
|
741 | 741 | "attachments": [ |
|
742 | 742 | {"fallback": "Testing integration channel: %s" % link, |
|
743 | 743 | "pretext": "Testing integration channel: %s" % link, |
|
744 | 744 | "color": "good", |
|
745 | 745 | "fields": [ |
|
746 | 746 | { |
|
747 | 747 | "title": "Status", |
|
748 | 748 | "value": "Integration is working fine", |
|
749 | 749 | "short": False |
|
750 | 750 | } |
|
751 | 751 | ]} |
|
752 | 752 | ] |
|
753 | 753 | } |
|
754 | 754 | try: |
|
755 | 755 | client.make_request(data=test_data) |
|
756 | 756 | except IntegrationException as exc: |
|
757 | 757 | raise wtforms.validators.ValidationError(str(exc)) |
|
758 | 758 | |
|
759 | 759 | |
|
760 | 760 | class IntegrationWebhooksForm(ReactorForm): |
|
761 | 761 | reports_webhook = wtforms.StringField( |
|
762 | 762 | 'Reports webhook', |
|
763 | 763 | filters=[strip_filter], |
|
764 | 764 | validators=[wtforms.validators.DataRequired()]) |
|
765 | 765 | alerts_webhook = wtforms.StringField( |
|
766 | 766 | 'Alerts webhook', |
|
767 | 767 | filters=[strip_filter], |
|
768 | 768 | validators=[wtforms.validators.DataRequired()]) |
|
769 | 769 | submit = wtforms.SubmitField(_('Setup webhooks')) |
|
770 | 770 | ignore_labels = ['submit'] |
|
771 | 771 | css_classes = {'submit': 'btn btn-primary'} |
|
772 | 772 | |
|
773 | 773 | |
|
774 | 774 | class IntegrationJiraForm(ReactorForm): |
|
775 | 775 | host_name = wtforms.StringField( |
|
776 | 776 | 'Server URL', |
|
777 | 777 | filters=[strip_filter], |
|
778 | 778 | validators=[wtforms.validators.DataRequired()]) |
|
779 | 779 | user_name = wtforms.StringField( |
|
780 | 780 | 'Username', |
|
781 | 781 | filters=[strip_filter], |
|
782 | 782 | validators=[wtforms.validators.DataRequired()]) |
|
783 | 783 | password = wtforms.PasswordField( |
|
784 | 784 | 'Password', |
|
785 | 785 | filters=[strip_filter], |
|
786 | 786 | validators=[wtforms.validators.DataRequired()]) |
|
787 | 787 | project = wtforms.StringField( |
|
788 | 788 | 'Project key', |
|
789 | 789 | filters=[uppercase_filter, strip_filter], |
|
790 | 790 | validators=[wtforms.validators.DataRequired()]) |
|
791 | 791 | |
|
792 | 792 | def validate_project(self, field): |
|
793 | 793 | if not field.data: |
|
794 | 794 | return |
|
795 | 795 | try: |
|
796 | 796 | client = JiraClient(self.user_name.data, |
|
797 | 797 | self.password.data, |
|
798 | 798 | self.host_name.data, |
|
799 | 799 | self.project.data) |
|
800 | 800 | except Exception as exc: |
|
801 | 801 | raise wtforms.validators.ValidationError(str(exc)) |
|
802 | 802 | |
|
803 | 803 | room_list = [r.key.upper() for r in client.get_projects()] |
|
804 | 804 | if field.data.upper() not in room_list: |
|
805 | 805 | msg = "Project %s doesn\t exist in your Jira Instance" |
|
806 | 806 | raise wtforms.validators.ValidationError(msg % field.data) |
|
807 | 807 | |
|
808 | 808 | |
|
809 | 809 | def get_deletion_form(resource): |
|
810 | 810 | class F(ReactorForm): |
|
811 | 811 | application_name = wtforms.StringField( |
|
812 | 812 | 'Application Name', |
|
813 | 813 | filters=[strip_filter], |
|
814 | 814 | validators=[wtforms.validators.AnyOf([resource.resource_name])]) |
|
815 | 815 | resource_id = wtforms.HiddenField(default=resource.resource_id) |
|
816 | 816 | submit = wtforms.SubmitField(_('Delete my application')) |
|
817 | 817 | ignore_labels = ['submit'] |
|
818 | 818 | css_classes = {'submit': 'btn btn-danger'} |
|
819 | 819 | |
|
820 | 820 | return F |
|
821 | 821 | |
|
822 | 822 | |
|
823 | 823 | class ChangeApplicationOwnerForm(ReactorForm): |
|
824 | 824 | password = wtforms.PasswordField( |
|
825 | 825 | 'Password', |
|
826 | 826 | filters=[strip_filter], |
|
827 | 827 | validators=[old_password_validator, |
|
828 | 828 | wtforms.validators.DataRequired()]) |
|
829 | 829 | |
|
830 | 830 | user_name = wtforms.StringField( |
|
831 | 831 | 'New owners username', |
|
832 | 832 | filters=[strip_filter], |
|
833 | 833 | validators=[found_username_validator, |
|
834 | 834 | wtforms.validators.DataRequired()]) |
|
835 | 835 | submit = wtforms.SubmitField(_('Transfer ownership of application')) |
|
836 | 836 | ignore_labels = ['submit'] |
|
837 | 837 | css_classes = {'submit': 'btn btn-danger'} |
|
838 | 838 | |
|
839 | 839 | |
|
840 | 840 | def default_filename(): |
|
841 | 841 | return 'Invoice %s' % datetime.datetime.utcnow().strftime('%Y/%m') |
|
842 | 842 | |
|
843 | 843 | |
|
844 | 844 | class FileUploadForm(ReactorForm): |
|
845 | 845 | title = wtforms.StringField('File Title', |
|
846 | 846 | default=default_filename, |
|
847 | 847 | validators=[wtforms.validators.DataRequired()]) |
|
848 | 848 | file = wtforms.FileField('File') |
|
849 | 849 | |
|
850 | 850 | def validate_file(self, field): |
|
851 | 851 | if not hasattr(field.data, 'file'): |
|
852 | 852 | raise wtforms.ValidationError('File is missing') |
|
853 | 853 | |
|
854 | 854 | submit = wtforms.SubmitField(_('Upload')) |
|
855 | 855 | |
|
856 | 856 | |
|
857 | 857 | def get_partition_deletion_form(es_indices, pg_indices): |
|
858 | 858 | class F(ReactorForm): |
|
859 | 859 | es_index = wtforms.SelectMultipleField('Elasticsearch', |
|
860 | 860 | choices=[(ix, '') for ix in |
|
861 | 861 | es_indices]) |
|
862 | 862 | pg_index = wtforms.SelectMultipleField('pg', |
|
863 | 863 | choices=[(ix, '') for ix in |
|
864 | 864 | pg_indices]) |
|
865 | 865 | confirm = wtforms.TextField('Confirm', |
|
866 | 866 | filters=[uppercase_filter, strip_filter], |
|
867 | 867 | validators=[ |
|
868 | 868 | wtforms.validators.AnyOf(['CONFIRM']), |
|
869 | 869 | wtforms.validators.DataRequired()]) |
|
870 | 870 | ignore_labels = ['submit'] |
|
871 | 871 | css_classes = {'submit': 'btn btn-danger'} |
|
872 | 872 | |
|
873 | 873 | return F |
|
874 | 874 | |
|
875 | 875 | |
|
876 | 876 | class GroupCreateForm(ReactorForm): |
|
877 | 877 | group_name = wtforms.StringField( |
|
878 | 878 | _('Group Name'), |
|
879 | 879 | filters=[strip_filter], |
|
880 | 880 | validators=[ |
|
881 | 881 | wtforms.validators.Length(min=2, max=50), |
|
882 | 882 | unique_groupname_validator, |
|
883 | 883 | wtforms.validators.DataRequired() |
|
884 | 884 | ]) |
|
885 | 885 | description = wtforms.StringField(_('Group description')) |
|
886 | 886 | |
|
887 | 887 | |
|
888 | 888 | time_choices = [(k, v['label'],) for k, v in h.time_deltas.items()] |
|
889 | 889 | |
|
890 | 890 | |
|
891 | 891 | class AuthTokenCreateForm(ReactorForm): |
|
892 | 892 | description = wtforms.StringField(_('Token description')) |
|
893 | 893 | expires = wtforms.SelectField('Expires', |
|
894 | 894 | coerce=lambda x: x, |
|
895 | 895 | choices=time_choices, |
|
896 | 896 | validators=[wtforms.validators.Optional()]) |
@@ -1,135 +1,135 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import json |
|
18 | 18 | |
|
19 | 19 | from pyramid.security import unauthenticated_userid |
|
20 | 20 | |
|
21 | 21 | import appenlight.lib.helpers as helpers |
|
22 | 22 | |
|
23 | 23 | from authomatic.providers import oauth2, oauth1 |
|
24 | 24 | from authomatic import Authomatic |
|
25 |
from |
|
|
25 | from ziggurat_foundations.models.services.user import UserService | |
|
26 | 26 | |
|
27 | 27 | |
|
28 | 28 | class CSRFException(Exception): |
|
29 | 29 | pass |
|
30 | 30 | |
|
31 | 31 | |
|
32 | 32 | class JSONException(Exception): |
|
33 | 33 | pass |
|
34 | 34 | |
|
35 | 35 | |
|
36 | 36 | def get_csrf_token(request): |
|
37 | 37 | return request.session.get_csrf_token() |
|
38 | 38 | |
|
39 | 39 | |
|
40 | 40 | def safe_json_body(request): |
|
41 | 41 | """ |
|
42 | 42 | Returns None if json body is missing or erroneous |
|
43 | 43 | """ |
|
44 | 44 | try: |
|
45 | 45 | return request.json_body |
|
46 | 46 | except ValueError: |
|
47 | 47 | return None |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | def unsafe_json_body(request): |
|
51 | 51 | """ |
|
52 | 52 | Throws JSONException if json can't deserialize |
|
53 | 53 | """ |
|
54 | 54 | try: |
|
55 | 55 | return request.json_body |
|
56 | 56 | except ValueError: |
|
57 | 57 | raise JSONException('Incorrect JSON') |
|
58 | 58 | |
|
59 | 59 | |
|
60 | 60 | def get_user(request): |
|
61 | 61 | if not request.path_info.startswith('/static'): |
|
62 | 62 | user_id = unauthenticated_userid(request) |
|
63 | 63 | try: |
|
64 | 64 | user_id = int(user_id) |
|
65 | 65 | except Exception: |
|
66 | 66 | return None |
|
67 | 67 | |
|
68 | 68 | if user_id: |
|
69 | user = User.by_id(user_id) | |
|
69 | user = UserService.by_id(user_id) | |
|
70 | 70 | if user: |
|
71 | 71 | request.environ['appenlight.username'] = '%d:%s' % ( |
|
72 | 72 | user_id, user.user_name) |
|
73 | 73 | return user |
|
74 | 74 | else: |
|
75 | 75 | return None |
|
76 | 76 | |
|
77 | 77 | |
|
78 | 78 | def es_conn(request): |
|
79 | 79 | return request.registry.es_conn |
|
80 | 80 | |
|
81 | 81 | |
|
82 | 82 | def add_flash_to_headers(request, clear=True): |
|
83 | 83 | """ |
|
84 | 84 | Adds pending flash messages to response, if clear is true clears out the |
|
85 | 85 | flash queue |
|
86 | 86 | """ |
|
87 | 87 | flash_msgs = helpers.get_type_formatted_flash(request) |
|
88 | 88 | request.response.headers['x-flash-messages'] = json.dumps(flash_msgs) |
|
89 | 89 | helpers.clear_flash(request) |
|
90 | 90 | |
|
91 | 91 | |
|
92 | 92 | def get_authomatic(request): |
|
93 | 93 | settings = request.registry.settings |
|
94 | 94 | # authomatic social auth |
|
95 | 95 | authomatic_conf = { |
|
96 | 96 | # callback http://yourapp.com/social_auth/twitter |
|
97 | 97 | 'twitter': { |
|
98 | 98 | 'class_': oauth1.Twitter, |
|
99 | 99 | 'consumer_key': settings.get('authomatic.pr.twitter.key', ''), |
|
100 | 100 | 'consumer_secret': settings.get('authomatic.pr.twitter.secret', |
|
101 | 101 | ''), |
|
102 | 102 | }, |
|
103 | 103 | # callback http://yourapp.com/social_auth/facebook |
|
104 | 104 | 'facebook': { |
|
105 | 105 | 'class_': oauth2.Facebook, |
|
106 | 106 | 'consumer_key': settings.get('authomatic.pr.facebook.app_id', ''), |
|
107 | 107 | 'consumer_secret': settings.get('authomatic.pr.facebook.secret', |
|
108 | 108 | ''), |
|
109 | 109 | 'scope': ['email'], |
|
110 | 110 | }, |
|
111 | 111 | # callback http://yourapp.com/social_auth/google |
|
112 | 112 | 'google': { |
|
113 | 113 | 'class_': oauth2.Google, |
|
114 | 114 | 'consumer_key': settings.get('authomatic.pr.google.key', ''), |
|
115 | 115 | 'consumer_secret': settings.get( |
|
116 | 116 | 'authomatic.pr.google.secret', ''), |
|
117 | 117 | 'scope': ['profile', 'email'], |
|
118 | 118 | }, |
|
119 | 119 | 'github': { |
|
120 | 120 | 'class_': oauth2.GitHub, |
|
121 | 121 | 'consumer_key': settings.get('authomatic.pr.github.key', ''), |
|
122 | 122 | 'consumer_secret': settings.get( |
|
123 | 123 | 'authomatic.pr.github.secret', ''), |
|
124 | 124 | 'scope': ['repo', 'public_repo', 'user:email'], |
|
125 | 125 | 'access_headers': {'User-Agent': 'AppEnlight'}, |
|
126 | 126 | }, |
|
127 | 127 | 'bitbucket': { |
|
128 | 128 | 'class_': oauth1.Bitbucket, |
|
129 | 129 | 'consumer_key': settings.get('authomatic.pr.bitbucket.key', ''), |
|
130 | 130 | 'consumer_secret': settings.get( |
|
131 | 131 | 'authomatic.pr.bitbucket.secret', '') |
|
132 | 132 | } |
|
133 | 133 | } |
|
134 | 134 | return Authomatic( |
|
135 | 135 | config=authomatic_conf, secret=settings['authomatic.secret']) |
@@ -1,490 +1,491 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | """ |
|
18 | 18 | Utility functions. |
|
19 | 19 | """ |
|
20 | 20 | import logging |
|
21 | 21 | import requests |
|
22 | 22 | import hashlib |
|
23 | 23 | import json |
|
24 | 24 | import copy |
|
25 | 25 | import uuid |
|
26 | 26 | import appenlight.lib.helpers as h |
|
27 | 27 | from collections import namedtuple |
|
28 | 28 | from datetime import timedelta, datetime, date |
|
29 | 29 | from dogpile.cache.api import NO_VALUE |
|
30 | 30 | from appenlight.models import Datastores |
|
31 | 31 | from appenlight.validators import (LogSearchSchema, |
|
32 | 32 | TagListSchema, |
|
33 | 33 | accepted_search_params) |
|
34 | 34 | from itsdangerous import TimestampSigner |
|
35 | 35 | from ziggurat_foundations.permissions import ALL_PERMISSIONS |
|
36 | from ziggurat_foundations.models.services.user import UserService | |
|
36 | 37 | from dateutil.relativedelta import relativedelta |
|
37 | 38 | from dateutil.rrule import rrule, MONTHLY, DAILY |
|
38 | 39 | |
|
39 | 40 | log = logging.getLogger(__name__) |
|
40 | 41 | |
|
41 | 42 | |
|
42 | 43 | Stat = namedtuple('Stat', 'start_interval value') |
|
43 | 44 | |
|
44 | 45 | |
|
45 | 46 | def default_extractor(item): |
|
46 | 47 | """ |
|
47 | 48 | :param item - item to extract date from |
|
48 | 49 | """ |
|
49 | 50 | if hasattr(item, 'start_interval'): |
|
50 | 51 | return item.start_interval |
|
51 | 52 | return item['start_interval'] |
|
52 | 53 | |
|
53 | 54 | |
|
54 | 55 | # fast gap generator |
|
55 | 56 | def gap_gen_default(start, step, itemiterator, end_time=None, |
|
56 | 57 | iv_extractor=None): |
|
57 | 58 | """ generates a list of time/value items based on step and itemiterator |
|
58 | 59 | if there are entries missing from iterator time/None will be returned |
|
59 | 60 | instead |
|
60 | 61 | :param start - datetime - what time should we start generating our values |
|
61 | 62 | :param step - timedelta - stepsize |
|
62 | 63 | :param itemiterator - iterable - we will check this iterable for values |
|
63 | 64 | corresponding to generated steps |
|
64 | 65 | :param end_time - datetime - when last step is >= end_time stop iterating |
|
65 | 66 | :param iv_extractor - extracts current step from iterable items |
|
66 | 67 | """ |
|
67 | 68 | |
|
68 | 69 | if not iv_extractor: |
|
69 | 70 | iv_extractor = default_extractor |
|
70 | 71 | |
|
71 | 72 | next_step = start |
|
72 | 73 | minutes = step.total_seconds() / 60.0 |
|
73 | 74 | while next_step.minute % minutes != 0: |
|
74 | 75 | next_step = next_step.replace(minute=next_step.minute - 1) |
|
75 | 76 | for item in itemiterator: |
|
76 | 77 | item_start_interval = iv_extractor(item) |
|
77 | 78 | # do we have a match for current time step in our data? |
|
78 | 79 | # no gen a new tuple with 0 values |
|
79 | 80 | while next_step < item_start_interval: |
|
80 | 81 | yield Stat(next_step, None) |
|
81 | 82 | next_step = next_step + step |
|
82 | 83 | if next_step == item_start_interval: |
|
83 | 84 | yield Stat(item_start_interval, item) |
|
84 | 85 | next_step = next_step + step |
|
85 | 86 | if end_time: |
|
86 | 87 | while next_step < end_time: |
|
87 | 88 | yield Stat(next_step, None) |
|
88 | 89 | next_step = next_step + step |
|
89 | 90 | |
|
90 | 91 | |
|
91 | 92 | class DateTimeEncoder(json.JSONEncoder): |
|
92 | 93 | """ Simple datetime to ISO encoder for json serialization""" |
|
93 | 94 | |
|
94 | 95 | def default(self, obj): |
|
95 | 96 | if isinstance(obj, date): |
|
96 | 97 | return obj.isoformat() |
|
97 | 98 | if isinstance(obj, datetime): |
|
98 | 99 | return obj.isoformat() |
|
99 | 100 | return json.JSONEncoder.default(self, obj) |
|
100 | 101 | |
|
101 | 102 | |
|
102 | 103 | def channelstream_request(secret, endpoint, payload, throw_exceptions=False, |
|
103 | 104 | servers=None): |
|
104 | 105 | responses = [] |
|
105 | 106 | if not servers: |
|
106 | 107 | servers = [] |
|
107 | 108 | |
|
108 | 109 | signer = TimestampSigner(secret) |
|
109 | 110 | sig_for_server = signer.sign(endpoint) |
|
110 | 111 | for secret, server in [(s['secret'], s['server']) for s in servers]: |
|
111 | 112 | response = {} |
|
112 | 113 | secret_headers = {'x-channelstream-secret': sig_for_server, |
|
113 | 114 | 'x-channelstream-endpoint': endpoint, |
|
114 | 115 | 'Content-Type': 'application/json'} |
|
115 | 116 | url = '%s%s' % (server, endpoint) |
|
116 | 117 | try: |
|
117 | 118 | response = requests.post(url, |
|
118 | 119 | data=json.dumps(payload, |
|
119 | 120 | cls=DateTimeEncoder), |
|
120 | 121 | headers=secret_headers, |
|
121 | 122 | verify=False, |
|
122 | 123 | timeout=2).json() |
|
123 | 124 | except requests.exceptions.RequestException as e: |
|
124 | 125 | if throw_exceptions: |
|
125 | 126 | raise |
|
126 | 127 | responses.append(response) |
|
127 | 128 | return responses |
|
128 | 129 | |
|
129 | 130 | |
|
130 | 131 | def add_cors_headers(response): |
|
131 | 132 | # allow CORS |
|
132 | 133 | response.headers.add('Access-Control-Allow-Origin', '*') |
|
133 | 134 | response.headers.add('XDomainRequestAllowed', '1') |
|
134 | 135 | response.headers.add('Access-Control-Allow-Methods', 'GET, POST, OPTIONS') |
|
135 | 136 | # response.headers.add('Access-Control-Allow-Credentials', 'true') |
|
136 | 137 | response.headers.add('Access-Control-Allow-Headers', |
|
137 | 138 | 'Content-Type, Depth, User-Agent, X-File-Size, X-Requested-With, If-Modified-Since, X-File-Name, Cache-Control, Pragma, Origin, Connection, Referer, Cookie') |
|
138 | 139 | response.headers.add('Access-Control-Max-Age', '86400') |
|
139 | 140 | |
|
140 | 141 | |
|
141 | 142 | from sqlalchemy.sql import compiler |
|
142 | 143 | from psycopg2.extensions import adapt as sqlescape |
|
143 | 144 | |
|
144 | 145 | |
|
145 | 146 | # or use the appropiate escape function from your db driver |
|
146 | 147 | |
|
147 | 148 | def compile_query(query): |
|
148 | 149 | dialect = query.session.bind.dialect |
|
149 | 150 | statement = query.statement |
|
150 | 151 | comp = compiler.SQLCompiler(dialect, statement) |
|
151 | 152 | comp.compile() |
|
152 | 153 | enc = dialect.encoding |
|
153 | 154 | params = {} |
|
154 | 155 | for k, v in comp.params.items(): |
|
155 | 156 | if isinstance(v, str): |
|
156 | 157 | v = v.encode(enc) |
|
157 | 158 | params[k] = sqlescape(v) |
|
158 | 159 | return (comp.string.encode(enc) % params).decode(enc) |
|
159 | 160 | |
|
160 | 161 | |
|
161 | 162 | def convert_es_type(input_data): |
|
162 | 163 | """ |
|
163 | 164 | This might need to convert some text or other types to corresponding ES types |
|
164 | 165 | """ |
|
165 | 166 | return str(input_data) |
|
166 | 167 | |
|
167 | 168 | |
|
168 | 169 | ProtoVersion = namedtuple('ProtoVersion', ['major', 'minor', 'patch']) |
|
169 | 170 | |
|
170 | 171 | |
|
171 | 172 | def parse_proto(input_data): |
|
172 | 173 | try: |
|
173 | 174 | parts = [int(x) for x in input_data.split('.')] |
|
174 | 175 | while len(parts) < 3: |
|
175 | 176 | parts.append(0) |
|
176 | 177 | return ProtoVersion(*parts) |
|
177 | 178 | except Exception as e: |
|
178 | 179 | log.info('Unknown protocol version: %s' % e) |
|
179 | 180 | return ProtoVersion(99, 99, 99) |
|
180 | 181 | |
|
181 | 182 | |
|
182 | 183 | def es_index_name_limiter(start_date=None, end_date=None, months_in_past=6, |
|
183 | 184 | ixtypes=None): |
|
184 | 185 | """ |
|
185 | 186 | This function limits the search to 6 months by default so we don't have to |
|
186 | 187 | query 300 elasticsearch indices for 20 years of historical data for example |
|
187 | 188 | """ |
|
188 | 189 | |
|
189 | 190 | # should be cached later |
|
190 | 191 | def get_possible_names(): |
|
191 | 192 | return list(Datastores.es.aliases().keys()) |
|
192 | 193 | |
|
193 | 194 | possible_names = get_possible_names() |
|
194 | 195 | es_index_types = [] |
|
195 | 196 | if not ixtypes: |
|
196 | 197 | ixtypes = ['reports', 'metrics', 'logs'] |
|
197 | 198 | for t in ixtypes: |
|
198 | 199 | if t == 'reports': |
|
199 | 200 | es_index_types.append('rcae_r_%s') |
|
200 | 201 | elif t == 'logs': |
|
201 | 202 | es_index_types.append('rcae_l_%s') |
|
202 | 203 | elif t == 'metrics': |
|
203 | 204 | es_index_types.append('rcae_m_%s') |
|
204 | 205 | elif t == 'uptime': |
|
205 | 206 | es_index_types.append('rcae_u_%s') |
|
206 | 207 | elif t == 'slow_calls': |
|
207 | 208 | es_index_types.append('rcae_sc_%s') |
|
208 | 209 | |
|
209 | 210 | if start_date: |
|
210 | 211 | start_date = copy.copy(start_date) |
|
211 | 212 | else: |
|
212 | 213 | if not end_date: |
|
213 | 214 | end_date = datetime.utcnow() |
|
214 | 215 | start_date = end_date + relativedelta(months=months_in_past * -1) |
|
215 | 216 | |
|
216 | 217 | if not end_date: |
|
217 | 218 | end_date = start_date + relativedelta(months=months_in_past) |
|
218 | 219 | |
|
219 | 220 | index_dates = list(rrule(MONTHLY, |
|
220 | 221 | dtstart=start_date.date().replace(day=1), |
|
221 | 222 | until=end_date.date(), |
|
222 | 223 | count=36)) |
|
223 | 224 | index_names = [] |
|
224 | 225 | for ix_type in es_index_types: |
|
225 | 226 | to_extend = [ix_type % d.strftime('%Y_%m') for d in index_dates |
|
226 | 227 | if ix_type % d.strftime('%Y_%m') in possible_names] |
|
227 | 228 | index_names.extend(to_extend) |
|
228 | 229 | for day in list(rrule(DAILY, dtstart=start_date.date(), |
|
229 | 230 | until=end_date.date(), count=366)): |
|
230 | 231 | ix_name = ix_type % day.strftime('%Y_%m_%d') |
|
231 | 232 | if ix_name in possible_names: |
|
232 | 233 | index_names.append(ix_name) |
|
233 | 234 | return index_names |
|
234 | 235 | |
|
235 | 236 | |
|
236 | 237 | def build_filter_settings_from_query_dict( |
|
237 | 238 | request, params=None, override_app_ids=None, |
|
238 | 239 | resource_permissions=None): |
|
239 | 240 | """ |
|
240 | 241 | Builds list of normalized search terms for ES from query params |
|
241 | 242 | ensuring application list is restricted to only applications user |
|
242 | 243 | has access to |
|
243 | 244 | |
|
244 | 245 | :param params (dictionary) |
|
245 | 246 | :param override_app_ids - list of application id's to use instead of |
|
246 | 247 | applications user normally has access to |
|
247 | 248 | """ |
|
248 | 249 | params = copy.deepcopy(params) |
|
249 | 250 | applications = [] |
|
250 | 251 | if not resource_permissions: |
|
251 | 252 | resource_permissions = ['view'] |
|
252 | 253 | |
|
253 | 254 | if request.user: |
|
254 |
applications = |
|
|
255 | resource_permissions, resource_types=['application']) | |
|
255 | applications = UserService.resources_with_perms( | |
|
256 | request.user, resource_permissions, resource_types=['application']) | |
|
256 | 257 | |
|
257 | 258 | # CRITICAL - this ensures our resultset is limited to only the ones |
|
258 | 259 | # user has view permissions |
|
259 | 260 | all_possible_app_ids = set([app.resource_id for app in applications]) |
|
260 | 261 | |
|
261 | 262 | # if override is preset we force permission for app to be present |
|
262 | 263 | # this allows users to see dashboards and applications they would |
|
263 | 264 | # normally not be able to |
|
264 | 265 | |
|
265 | 266 | if override_app_ids: |
|
266 | 267 | all_possible_app_ids = set(override_app_ids) |
|
267 | 268 | |
|
268 | 269 | schema = LogSearchSchema().bind(resources=all_possible_app_ids) |
|
269 | 270 | tag_schema = TagListSchema() |
|
270 | 271 | filter_settings = schema.deserialize(params) |
|
271 | 272 | tag_list = [] |
|
272 | 273 | for k, v in list(filter_settings.items()): |
|
273 | 274 | if k in accepted_search_params: |
|
274 | 275 | continue |
|
275 | 276 | tag_list.append({"name": k, "value": v, "op": 'eq'}) |
|
276 | 277 | # remove the key from filter_settings |
|
277 | 278 | filter_settings.pop(k, None) |
|
278 | 279 | tags = tag_schema.deserialize(tag_list) |
|
279 | 280 | filter_settings['tags'] = tags |
|
280 | 281 | return filter_settings |
|
281 | 282 | |
|
282 | 283 | |
|
283 | 284 | def gen_uuid(): |
|
284 | 285 | return str(uuid.uuid4()) |
|
285 | 286 | |
|
286 | 287 | |
|
287 | 288 | def gen_uuid4_sha_hex(): |
|
288 | 289 | return hashlib.sha1(uuid.uuid4().bytes).hexdigest() |
|
289 | 290 | |
|
290 | 291 | |
|
291 | 292 | def permission_tuple_to_dict(data): |
|
292 | 293 | out = { |
|
293 | 294 | "user_name": None, |
|
294 | 295 | "perm_name": data.perm_name, |
|
295 | 296 | "owner": data.owner, |
|
296 | 297 | "type": data.type, |
|
297 | 298 | "resource_name": None, |
|
298 | 299 | "resource_type": None, |
|
299 | 300 | "resource_id": None, |
|
300 | 301 | "group_name": None, |
|
301 | 302 | "group_id": None |
|
302 | 303 | } |
|
303 | 304 | if data.user: |
|
304 | 305 | out["user_name"] = data.user.user_name |
|
305 | 306 | if data.perm_name == ALL_PERMISSIONS: |
|
306 | 307 | out['perm_name'] = '__all_permissions__' |
|
307 | 308 | if data.resource: |
|
308 | 309 | out['resource_name'] = data.resource.resource_name |
|
309 | 310 | out['resource_type'] = data.resource.resource_type |
|
310 | 311 | out['resource_id'] = data.resource.resource_id |
|
311 | 312 | if data.group: |
|
312 | 313 | out['group_name'] = data.group.group_name |
|
313 | 314 | out['group_id'] = data.group.id |
|
314 | 315 | return out |
|
315 | 316 | |
|
316 | 317 | |
|
317 | 318 | def get_cached_buckets(request, stats_since, end_time, fn, cache_key, |
|
318 | 319 | gap_gen=None, db_session=None, step_interval=None, |
|
319 | 320 | iv_extractor=None, |
|
320 | 321 | rerange=False, *args, **kwargs): |
|
321 | 322 | """ Takes "fn" that should return some data and tries to load the data |
|
322 | 323 | dividing it into daily buckets - if the stats_since and end time give a |
|
323 | 324 | delta bigger than 24hours, then only "todays" data is computed on the fly |
|
324 | 325 | |
|
325 | 326 | :param request: (request) request object |
|
326 | 327 | :param stats_since: (datetime) start date of buckets range |
|
327 | 328 | :param end_time: (datetime) end date of buckets range - utcnow() if None |
|
328 | 329 | :param fn: (callable) callable to use to populate buckets should have |
|
329 | 330 | following signature: |
|
330 | 331 | def get_data(request, since_when, until, *args, **kwargs): |
|
331 | 332 | |
|
332 | 333 | :param cache_key: (string) cache key that will be used to build bucket |
|
333 | 334 | caches |
|
334 | 335 | :param gap_gen: (callable) gap generator - should return step intervals |
|
335 | 336 | to use with out `fn` callable |
|
336 | 337 | :param db_session: (Session) sqlalchemy session |
|
337 | 338 | :param step_interval: (timedelta) optional step interval if we want to |
|
338 | 339 | override the default determined from total start/end time delta |
|
339 | 340 | :param iv_extractor: (callable) used to get step intervals from data |
|
340 | 341 | returned by `fn` callable |
|
341 | 342 | :param rerange: (bool) handy if we want to change ranges from hours to |
|
342 | 343 | days when cached data is missing - will shorten execution time if `fn` |
|
343 | 344 | callable supports that and we are working with multiple rows - like metrics |
|
344 | 345 | :param args: |
|
345 | 346 | :param kwargs: |
|
346 | 347 | |
|
347 | 348 | :return: iterable |
|
348 | 349 | """ |
|
349 | 350 | if not end_time: |
|
350 | 351 | end_time = datetime.utcnow().replace(second=0, microsecond=0) |
|
351 | 352 | delta = end_time - stats_since |
|
352 | 353 | # if smaller than 3 days we want to group by 5min else by 1h, |
|
353 | 354 | # for 60 min group by min |
|
354 | 355 | if not gap_gen: |
|
355 | 356 | gap_gen = gap_gen_default |
|
356 | 357 | if not iv_extractor: |
|
357 | 358 | iv_extractor = default_extractor |
|
358 | 359 | |
|
359 | 360 | # do not use custom interval if total time range with new iv would exceed |
|
360 | 361 | # end time |
|
361 | 362 | if not step_interval or stats_since + step_interval >= end_time: |
|
362 | 363 | if delta < h.time_deltas.get('12h')['delta']: |
|
363 | 364 | step_interval = timedelta(seconds=60) |
|
364 | 365 | elif delta < h.time_deltas.get('3d')['delta']: |
|
365 | 366 | step_interval = timedelta(seconds=60 * 5) |
|
366 | 367 | elif delta > h.time_deltas.get('2w')['delta']: |
|
367 | 368 | step_interval = timedelta(days=1) |
|
368 | 369 | else: |
|
369 | 370 | step_interval = timedelta(minutes=60) |
|
370 | 371 | |
|
371 | 372 | if step_interval >= timedelta(minutes=60): |
|
372 | 373 | log.info('cached_buckets:{}: adjusting start time ' |
|
373 | 374 | 'for hourly or daily intervals'.format(cache_key)) |
|
374 | 375 | stats_since = stats_since.replace(hour=0, minute=0) |
|
375 | 376 | |
|
376 | 377 | ranges = [i.start_interval for i in list(gap_gen(stats_since, |
|
377 | 378 | step_interval, [], |
|
378 | 379 | end_time=end_time))] |
|
379 | 380 | buckets = {} |
|
380 | 381 | storage_key = 'buckets:' + cache_key + '{}|{}' |
|
381 | 382 | # this means we basicly cache per hour in 3-14 day intervals but i think |
|
382 | 383 | # its fine at this point - will be faster than db access anyways |
|
383 | 384 | |
|
384 | 385 | if len(ranges) >= 1: |
|
385 | 386 | last_ranges = [ranges[-1]] |
|
386 | 387 | else: |
|
387 | 388 | last_ranges = [] |
|
388 | 389 | if step_interval >= timedelta(minutes=60): |
|
389 | 390 | for r in ranges: |
|
390 | 391 | k = storage_key.format(step_interval.total_seconds(), r) |
|
391 | 392 | value = request.registry.cache_regions.redis_day_30.get(k) |
|
392 | 393 | # last buckets are never loaded from cache |
|
393 | 394 | is_last_result = ( |
|
394 | 395 | r >= end_time - timedelta(hours=6) or r in last_ranges) |
|
395 | 396 | if value is not NO_VALUE and not is_last_result: |
|
396 | 397 | log.info("cached_buckets:{}: " |
|
397 | 398 | "loading range {} from cache".format(cache_key, r)) |
|
398 | 399 | buckets[r] = value |
|
399 | 400 | else: |
|
400 | 401 | log.info("cached_buckets:{}: " |
|
401 | 402 | "loading range {} from storage".format(cache_key, r)) |
|
402 | 403 | range_size = step_interval |
|
403 | 404 | if (step_interval == timedelta(minutes=60) and |
|
404 | 405 | not is_last_result and rerange): |
|
405 | 406 | range_size = timedelta(days=1) |
|
406 | 407 | r = r.replace(hour=0, minute=0) |
|
407 | 408 | log.info("cached_buckets:{}: " |
|
408 | 409 | "loading collapsed " |
|
409 | 410 | "range {} {}".format(cache_key, r, |
|
410 | 411 | r + range_size)) |
|
411 | 412 | bucket_data = fn( |
|
412 | 413 | request, r, r + range_size, step_interval, |
|
413 | 414 | gap_gen, bucket_count=len(ranges), *args, **kwargs) |
|
414 | 415 | for b in bucket_data: |
|
415 | 416 | b_iv = iv_extractor(b) |
|
416 | 417 | buckets[b_iv] = b |
|
417 | 418 | k2 = storage_key.format( |
|
418 | 419 | step_interval.total_seconds(), b_iv) |
|
419 | 420 | request.registry.cache_regions.redis_day_30.set(k2, b) |
|
420 | 421 | log.info("cached_buckets:{}: saving cache".format(cache_key)) |
|
421 | 422 | else: |
|
422 | 423 | # bucket count is 1 for short time ranges <= 24h from now |
|
423 | 424 | bucket_data = fn(request, stats_since, end_time, step_interval, |
|
424 | 425 | gap_gen, bucket_count=1, *args, **kwargs) |
|
425 | 426 | for b in bucket_data: |
|
426 | 427 | buckets[iv_extractor(b)] = b |
|
427 | 428 | return buckets |
|
428 | 429 | |
|
429 | 430 | |
|
430 | 431 | def get_cached_split_data(request, stats_since, end_time, fn, cache_key, |
|
431 | 432 | db_session=None, *args, **kwargs): |
|
432 | 433 | """ Takes "fn" that should return some data and tries to load the data |
|
433 | 434 | dividing it into 2 buckets - cached "since_from" bucket and "today" |
|
434 | 435 | bucket - then the data can be reduced into single value |
|
435 | 436 | |
|
436 | 437 | Data is cached if the stats_since and end time give a delta bigger |
|
437 | 438 | than 24hours - then only 24h is computed on the fly |
|
438 | 439 | """ |
|
439 | 440 | if not end_time: |
|
440 | 441 | end_time = datetime.utcnow().replace(second=0, microsecond=0) |
|
441 | 442 | delta = end_time - stats_since |
|
442 | 443 | |
|
443 | 444 | if delta >= timedelta(minutes=60): |
|
444 | 445 | log.info('cached_split_data:{}: adjusting start time ' |
|
445 | 446 | 'for hourly or daily intervals'.format(cache_key)) |
|
446 | 447 | stats_since = stats_since.replace(hour=0, minute=0) |
|
447 | 448 | |
|
448 | 449 | storage_key = 'buckets_split_data:' + cache_key + ':{}|{}' |
|
449 | 450 | old_end_time = end_time.replace(hour=0, minute=0) |
|
450 | 451 | |
|
451 | 452 | final_storage_key = storage_key.format(delta.total_seconds(), |
|
452 | 453 | old_end_time) |
|
453 | 454 | older_data = None |
|
454 | 455 | |
|
455 | 456 | cdata = request.registry.cache_regions.redis_day_7.get( |
|
456 | 457 | final_storage_key) |
|
457 | 458 | |
|
458 | 459 | if cdata: |
|
459 | 460 | log.info("cached_split_data:{}: found old " |
|
460 | 461 | "bucket data".format(cache_key)) |
|
461 | 462 | older_data = cdata |
|
462 | 463 | |
|
463 | 464 | if (stats_since < end_time - h.time_deltas.get('24h')['delta'] and |
|
464 | 465 | not cdata): |
|
465 | 466 | log.info("cached_split_data:{}: didn't find the " |
|
466 | 467 | "start bucket in cache so load older data".format(cache_key)) |
|
467 | 468 | recent_stats_since = old_end_time |
|
468 | 469 | older_data = fn(request, stats_since, recent_stats_since, |
|
469 | 470 | db_session=db_session, *args, **kwargs) |
|
470 | 471 | request.registry.cache_regions.redis_day_7.set(final_storage_key, |
|
471 | 472 | older_data) |
|
472 | 473 | elif stats_since < end_time - h.time_deltas.get('24h')['delta']: |
|
473 | 474 | recent_stats_since = old_end_time |
|
474 | 475 | else: |
|
475 | 476 | recent_stats_since = stats_since |
|
476 | 477 | |
|
477 | 478 | log.info("cached_split_data:{}: loading fresh " |
|
478 | 479 | "data bucksts from last 24h ".format(cache_key)) |
|
479 | 480 | todays_data = fn(request, recent_stats_since, end_time, |
|
480 | 481 | db_session=db_session, *args, **kwargs) |
|
481 | 482 | return older_data, todays_data |
|
482 | 483 | |
|
483 | 484 | |
|
484 | 485 | def in_batches(seq, size): |
|
485 | 486 | """ |
|
486 | 487 | Splits am iterable into batches of specified size |
|
487 | 488 | :param seq (iterable) |
|
488 | 489 | :param size integer |
|
489 | 490 | """ |
|
490 | 491 | return (seq[pos:pos + size] for pos in range(0, len(seq), size)) |
@@ -1,79 +1,79 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import sqlalchemy as sa |
|
18 | 18 | |
|
19 |
from |
|
|
19 | from ziggurat_foundations.models.services.resource import ResourceService | |
|
20 | 20 | from appenlight.models import Base, get_db_session |
|
21 | 21 | from sqlalchemy.orm import validates |
|
22 | 22 | from ziggurat_foundations.models.base import BaseModel |
|
23 | 23 | |
|
24 | 24 | |
|
25 | 25 | class AlertChannelAction(Base, BaseModel): |
|
26 | 26 | """ |
|
27 | 27 | Stores notifications conditions for user's alert channels |
|
28 | 28 | This is later used for rule parsing like "alert if http_status == 500" |
|
29 | 29 | """ |
|
30 | 30 | __tablename__ = 'alert_channels_actions' |
|
31 | 31 | |
|
32 | 32 | types = ['report', 'chart'] |
|
33 | 33 | |
|
34 | 34 | owner_id = sa.Column(sa.Integer, |
|
35 | 35 | sa.ForeignKey('users.id', onupdate='CASCADE', |
|
36 | 36 | ondelete='CASCADE')) |
|
37 | 37 | resource_id = sa.Column(sa.Integer()) |
|
38 | 38 | action = sa.Column(sa.Unicode(10), nullable=False, default='always') |
|
39 | 39 | type = sa.Column(sa.Unicode(10), nullable=False) |
|
40 | 40 | other_id = sa.Column(sa.Unicode(40)) |
|
41 | 41 | pkey = sa.Column(sa.Integer(), nullable=False, primary_key=True) |
|
42 | 42 | rule = sa.Column(sa.dialects.postgresql.JSON, |
|
43 | 43 | nullable=False, default={'field': 'http_status', |
|
44 | 44 | "op": "ge", "value": "500"}) |
|
45 | 45 | config = sa.Column(sa.dialects.postgresql.JSON) |
|
46 | 46 | name = sa.Column(sa.Unicode(255)) |
|
47 | 47 | |
|
48 | 48 | @validates('notify_type') |
|
49 | 49 | def validate_email(self, key, notify_type): |
|
50 | 50 | assert notify_type in ['always', 'only_first'] |
|
51 | 51 | return notify_type |
|
52 | 52 | |
|
53 | 53 | def resource_name(self, db_session=None): |
|
54 | 54 | db_session = get_db_session(db_session) |
|
55 | 55 | if self.resource_id: |
|
56 |
return Resource.by_resource_id( |
|
|
57 |
|
|
|
56 | return ResourceService.by_resource_id( | |
|
57 | self.resource_id, db_session=db_session).resource_name | |
|
58 | 58 | else: |
|
59 | 59 | return 'any resource' |
|
60 | 60 | |
|
61 | 61 | def get_dict(self, exclude_keys=None, include_keys=None, |
|
62 | 62 | extended_info=False): |
|
63 | 63 | """ |
|
64 | 64 | Returns dictionary with required information that will be consumed by |
|
65 | 65 | angular |
|
66 | 66 | """ |
|
67 | 67 | instance_dict = super(AlertChannelAction, self).get_dict() |
|
68 | 68 | exclude_keys_list = exclude_keys or [] |
|
69 | 69 | include_keys_list = include_keys or [] |
|
70 | 70 | if extended_info: |
|
71 | 71 | instance_dict['channels'] = [ |
|
72 | 72 | c.get_dict(extended_info=False) for c in self.channels] |
|
73 | 73 | |
|
74 | 74 | d = {} |
|
75 | 75 | for k in instance_dict.keys(): |
|
76 | 76 | if (k not in exclude_keys_list and |
|
77 | 77 | (k in include_keys_list or not include_keys)): |
|
78 | 78 | d[k] = instance_dict[k] |
|
79 | 79 | return d |
@@ -1,165 +1,165 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import sqlalchemy as sa |
|
18 | 18 | import logging |
|
19 | 19 | |
|
20 | 20 | from datetime import datetime |
|
21 | 21 | from appenlight.models import Base, get_db_session |
|
22 | 22 | from appenlight.models.services.report_stat import ReportStatService |
|
23 | from appenlight.models.resource import Resource | |
|
24 | 23 | from appenlight.models.integrations import IntegrationException |
|
25 | 24 | from pyramid.threadlocal import get_current_request |
|
26 | 25 | from sqlalchemy.dialects.postgresql import JSON |
|
27 | 26 | from ziggurat_foundations.models.base import BaseModel |
|
27 | from ziggurat_foundations.models.services.resource import ResourceService | |
|
28 | 28 | |
|
29 | 29 | log = logging.getLogger(__name__) |
|
30 | 30 | |
|
31 | 31 | |
|
32 | 32 | class Event(Base, BaseModel): |
|
33 | 33 | __tablename__ = 'events' |
|
34 | 34 | |
|
35 | 35 | types = {'error_report_alert': 1, |
|
36 | 36 | 'slow_report_alert': 3, |
|
37 | 37 | 'comment': 5, |
|
38 | 38 | 'assignment': 6, |
|
39 | 39 | 'uptime_alert': 7, |
|
40 | 40 | 'chart_alert': 9} |
|
41 | 41 | |
|
42 | 42 | statuses = {'active': 1, |
|
43 | 43 | 'closed': 0} |
|
44 | 44 | |
|
45 | 45 | id = sa.Column(sa.Integer, primary_key=True) |
|
46 | 46 | start_date = sa.Column(sa.DateTime, default=datetime.utcnow) |
|
47 | 47 | end_date = sa.Column(sa.DateTime) |
|
48 | 48 | status = sa.Column(sa.Integer, default=1) |
|
49 | 49 | event_type = sa.Column(sa.Integer, default=1) |
|
50 | 50 | origin_user_id = sa.Column(sa.Integer(), sa.ForeignKey('users.id'), |
|
51 | 51 | nullable=True) |
|
52 | 52 | target_user_id = sa.Column(sa.Integer(), sa.ForeignKey('users.id'), |
|
53 | 53 | nullable=True) |
|
54 | 54 | resource_id = sa.Column(sa.Integer(), |
|
55 | 55 | sa.ForeignKey('resources.resource_id'), |
|
56 | 56 | nullable=True) |
|
57 | 57 | target_id = sa.Column(sa.Integer) |
|
58 | 58 | target_uuid = sa.Column(sa.Unicode(40)) |
|
59 | 59 | text = sa.Column(sa.UnicodeText()) |
|
60 | 60 | values = sa.Column(JSON(), nullable=False, default=None) |
|
61 | 61 | |
|
62 | 62 | def __repr__(self): |
|
63 | 63 | return '<Event %s, app:%s, %s>' % (self.unified_alert_name(), |
|
64 | 64 | self.resource_id, |
|
65 | 65 | self.unified_alert_action()) |
|
66 | 66 | |
|
67 | 67 | @property |
|
68 | 68 | def reverse_types(self): |
|
69 | 69 | return dict([(v, k) for k, v in self.types.items()]) |
|
70 | 70 | |
|
71 | 71 | def unified_alert_name(self): |
|
72 | 72 | return self.reverse_types[self.event_type] |
|
73 | 73 | |
|
74 | 74 | def unified_alert_action(self): |
|
75 | 75 | event_name = self.reverse_types[self.event_type] |
|
76 | 76 | if self.status == Event.statuses['closed']: |
|
77 | 77 | return "CLOSE" |
|
78 | 78 | if self.status != Event.statuses['closed']: |
|
79 | 79 | return "OPEN" |
|
80 | 80 | return event_name |
|
81 | 81 | |
|
82 | 82 | def send_alerts(self, request=None, resource=None, db_session=None): |
|
83 | 83 | """" Sends alerts to applicable channels """ |
|
84 | 84 | db_session = get_db_session(db_session) |
|
85 | 85 | db_session.flush() |
|
86 | 86 | if not resource: |
|
87 | resource = Resource.by_resource_id(self.resource_id) | |
|
87 | resource = ResourceService.by_resource_id(self.resource_id) | |
|
88 | 88 | if not request: |
|
89 | 89 | request = get_current_request() |
|
90 | 90 | if not resource: |
|
91 | 91 | return |
|
92 |
users = set([p.user for p in |
|
|
92 | users = set([p.user for p in ResourceService.users_for_perm(resource, 'view')]) | |
|
93 | 93 | for user in users: |
|
94 | 94 | for channel in user.alert_channels: |
|
95 | 95 | matches_resource = not channel.resources or resource in [r.resource_id for r in channel.resources] |
|
96 | 96 | if ( |
|
97 | 97 | not channel.channel_validated or |
|
98 | 98 | not channel.send_alerts or |
|
99 | 99 | not matches_resource |
|
100 | 100 | ): |
|
101 | 101 | continue |
|
102 | 102 | else: |
|
103 | 103 | try: |
|
104 | 104 | channel.notify_alert(resource=resource, |
|
105 | 105 | event=self, |
|
106 | 106 | user=user, |
|
107 | 107 | request=request) |
|
108 | 108 | except IntegrationException as e: |
|
109 | 109 | log.warning('%s' % e) |
|
110 | 110 | |
|
111 | 111 | def validate_or_close(self, since_when, db_session=None): |
|
112 | 112 | """ Checks if alerts should stay open or it's time to close them. |
|
113 | 113 | Generates close alert event if alerts get closed """ |
|
114 | 114 | event_types = [Event.types['error_report_alert'], |
|
115 | 115 | Event.types['slow_report_alert']] |
|
116 | app = Resource.by_resource_id(self.resource_id) | |
|
116 | app = ResourceService.by_resource_id(self.resource_id) | |
|
117 | 117 | # if app was deleted close instantly |
|
118 | 118 | if not app: |
|
119 | 119 | self.close() |
|
120 | 120 | return |
|
121 | 121 | |
|
122 | 122 | if self.event_type in event_types: |
|
123 | 123 | total = ReportStatService.count_by_type( |
|
124 | 124 | self.event_type, self.resource_id, since_when) |
|
125 | 125 | if Event.types['error_report_alert'] == self.event_type: |
|
126 | 126 | threshold = app.error_report_threshold |
|
127 | 127 | if Event.types['slow_report_alert'] == self.event_type: |
|
128 | 128 | threshold = app.slow_report_threshold |
|
129 | 129 | |
|
130 | 130 | if total < threshold: |
|
131 | 131 | self.close() |
|
132 | 132 | |
|
133 | 133 | def close(self, db_session=None): |
|
134 | 134 | """ |
|
135 | 135 | Closes an event and sends notification to affected users |
|
136 | 136 | """ |
|
137 | 137 | self.end_date = datetime.utcnow() |
|
138 | 138 | self.status = Event.statuses['closed'] |
|
139 | 139 | log.warning('ALERT: CLOSE: %s' % self) |
|
140 | 140 | self.send_alerts() |
|
141 | 141 | |
|
142 | 142 | def text_representation(self): |
|
143 | 143 | alert_type = self.unified_alert_name() |
|
144 | 144 | text = '' |
|
145 | 145 | if 'slow_report' in alert_type: |
|
146 | 146 | text += 'Slow report alert' |
|
147 | 147 | if 'error_report' in alert_type: |
|
148 | 148 | text += 'Exception report alert' |
|
149 | 149 | if 'uptime_alert' in alert_type: |
|
150 | 150 | text += 'Uptime alert' |
|
151 | 151 | if 'chart_alert' in alert_type: |
|
152 | 152 | text += 'Metrics value alert' |
|
153 | 153 | |
|
154 | 154 | alert_action = self.unified_alert_action() |
|
155 | 155 | if alert_action == 'OPEN': |
|
156 | 156 | text += ' got opened.' |
|
157 | 157 | if alert_action == 'CLOSE': |
|
158 | 158 | text += ' got closed.' |
|
159 | 159 | return text |
|
160 | 160 | |
|
161 | 161 | def get_dict(self, request=None): |
|
162 | 162 | dict_data = super(Event, self).get_dict() |
|
163 | 163 | dict_data['text'] = self.text_representation() |
|
164 | 164 | dict_data['resource_name'] = self.resource.resource_name |
|
165 | 165 | return dict_data |
@@ -1,83 +1,84 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import sqlalchemy as sa |
|
18 | 18 | from appenlight.models import Base |
|
19 | 19 | from appenlight.lib.utils import permission_tuple_to_dict |
|
20 | 20 | from pyramid.security import Allow, ALL_PERMISSIONS |
|
21 | 21 | from ziggurat_foundations.models.resource import ResourceMixin |
|
22 | from ziggurat_foundations.models.services.resource import ResourceService | |
|
22 | 23 | |
|
23 | 24 | |
|
24 | 25 | class Resource(ResourceMixin, Base): |
|
25 | 26 | events = sa.orm.relationship('Event', |
|
26 | 27 | lazy='dynamic', |
|
27 | 28 | backref='resource', |
|
28 | 29 | passive_deletes=True, |
|
29 | 30 | passive_updates=True) |
|
30 | 31 | |
|
31 | 32 | @property |
|
32 | 33 | def owner_user_name(self): |
|
33 | 34 | if self.owner: |
|
34 | 35 | return self.owner.user_name |
|
35 | 36 | |
|
36 | 37 | @property |
|
37 | 38 | def owner_group_name(self): |
|
38 | 39 | if self.owner_group: |
|
39 | 40 | return self.owner_group.group_name |
|
40 | 41 | |
|
41 | 42 | def get_dict(self, exclude_keys=None, include_keys=None, |
|
42 | 43 | include_perms=False, include_processing_rules=False): |
|
43 | 44 | result = super(Resource, self).get_dict(exclude_keys, include_keys) |
|
44 | 45 | result['possible_permissions'] = self.__possible_permissions__ |
|
45 | 46 | if include_perms: |
|
46 | 47 | result['current_permissions'] = self.user_permissions_list |
|
47 | 48 | else: |
|
48 | 49 | result['current_permissions'] = [] |
|
49 | 50 | if include_processing_rules: |
|
50 | 51 | result["postprocessing_rules"] = [rule.get_dict() for rule |
|
51 | 52 | in self.postprocess_conf] |
|
52 | 53 | else: |
|
53 | 54 | result["postprocessing_rules"] = [] |
|
54 | 55 | exclude_keys_list = exclude_keys or [] |
|
55 | 56 | include_keys_list = include_keys or [] |
|
56 | 57 | d = {} |
|
57 | 58 | for k in result.keys(): |
|
58 | 59 | if (k not in exclude_keys_list and |
|
59 | 60 | (k in include_keys_list or not include_keys)): |
|
60 | 61 | d[k] = result[k] |
|
61 | 62 | for k in ['owner_user_name', 'owner_group_name']: |
|
62 | 63 | if (k not in exclude_keys_list and |
|
63 | 64 | (k in include_keys_list or not include_keys)): |
|
64 | 65 | d[k] = getattr(self, k) |
|
65 | 66 | return d |
|
66 | 67 | |
|
67 | 68 | @property |
|
68 | 69 | def user_permissions_list(self): |
|
69 | 70 | return [permission_tuple_to_dict(perm) for perm in |
|
70 |
|
|
|
71 |
|
|
|
71 | ResourceService.users_for_perm( | |
|
72 | self, '__any_permission__', limit_group_permissions=True)] | |
|
72 | 73 | |
|
73 | 74 | @property |
|
74 | 75 | def __acl__(self): |
|
75 | 76 | acls = [] |
|
76 | 77 | |
|
77 | 78 | if self.owner_user_id: |
|
78 | 79 | acls.extend([(Allow, self.owner_user_id, ALL_PERMISSIONS,), ]) |
|
79 | 80 | |
|
80 | 81 | if self.owner_group_id: |
|
81 | 82 | acls.extend([(Allow, "group:%s" % self.owner_group_id, |
|
82 | 83 | ALL_PERMISSIONS,), ]) |
|
83 | 84 | return acls |
@@ -1,109 +1,109 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import sqlalchemy as sa |
|
18 | 18 | from pyramid.threadlocal import get_current_registry |
|
19 | 19 | from paginate_sqlalchemy import SqlalchemyOrmPage |
|
20 | from ziggurat_foundations.models.services.user import UserService | |
|
21 | ||
|
20 | 22 | from appenlight.models import get_db_session |
|
21 | 23 | from appenlight.models.event import Event |
|
22 | 24 | from appenlight.models.services.base import BaseService |
|
23 | 25 | |
|
24 | 26 | |
|
25 | 27 | class EventService(BaseService): |
|
26 | 28 | @classmethod |
|
27 | 29 | def for_resource(cls, resource_ids, event_type=None, status=None, |
|
28 | 30 | since_when=None, limit=20, event_id=None, |
|
29 | 31 | target_uuid=None, order_by=None, or_target_user_id=None, |
|
30 | 32 | db_session=None): |
|
31 | 33 | """ |
|
32 | 34 | Fetches events including based on passed params OR if target_user_id |
|
33 | 35 | is present include events that just target this user |
|
34 | 36 | """ |
|
35 | 37 | db_session = get_db_session(db_session) |
|
36 | 38 | query = db_session.query(Event) |
|
37 | 39 | query = query.options(sa.orm.joinedload(Event.resource)) |
|
38 | 40 | and_cond = [Event.resource_id.in_(resource_ids)] |
|
39 | 41 | if not resource_ids: |
|
40 | 42 | and_cond = [Event.resource_id == -999] |
|
41 | 43 | |
|
42 | 44 | if event_type: |
|
43 | 45 | and_cond.append(Event.event_type == event_type) |
|
44 | 46 | if status: |
|
45 | 47 | and_cond.append(Event.status == status) |
|
46 | 48 | if since_when: |
|
47 | 49 | and_cond.append(Event.start_date >= since_when) |
|
48 | 50 | if event_id: |
|
49 | 51 | and_cond.append(Event.id == event_id) |
|
50 | 52 | if target_uuid: |
|
51 | 53 | and_cond.append(Event.target_uuid == target_uuid) |
|
52 | 54 | |
|
53 | 55 | or_cond = [] |
|
54 | 56 | |
|
55 | 57 | if or_target_user_id: |
|
56 | 58 | or_cond.append(sa.or_(Event.target_user_id == or_target_user_id)) |
|
57 | 59 | |
|
58 | 60 | query = query.filter(sa.or_(sa.and_(*and_cond), |
|
59 | 61 | *or_cond)) |
|
60 | 62 | if not order_by: |
|
61 | 63 | query = query.order_by(sa.desc(Event.start_date)) |
|
62 | 64 | if limit: |
|
63 | 65 | query = query.limit(limit) |
|
64 | 66 | |
|
65 | 67 | return query |
|
66 | 68 | |
|
67 | 69 | @classmethod |
|
68 | 70 | def by_type_and_status(cls, event_types, status_types, since_when=None, |
|
69 | 71 | older_than=None, db_session=None, app_ids=None): |
|
70 | 72 | db_session = get_db_session(db_session) |
|
71 | 73 | query = db_session.query(Event) |
|
72 | 74 | query = query.filter(Event.event_type.in_(event_types)) |
|
73 | 75 | query = query.filter(Event.status.in_(status_types)) |
|
74 | 76 | if since_when: |
|
75 | 77 | query = query.filter(Event.start_date >= since_when) |
|
76 | 78 | if older_than: |
|
77 | 79 | query = query.filter(Event.start_date <= older_than) |
|
78 | 80 | if app_ids: |
|
79 | 81 | query = query.filter(Event.resource_id.in_(app_ids)) |
|
80 | 82 | return query |
|
81 | 83 | |
|
82 | 84 | @classmethod |
|
83 | 85 | def latest_for_user(cls, user, db_session=None): |
|
84 | 86 | registry = get_current_registry() |
|
85 | resources = user.resources_with_perms( | |
|
86 | ['view'], resource_types=registry.resource_types) | |
|
87 | resources = UserService.resources_with_perms(user, ['view'], resource_types=registry.resource_types) | |
|
87 | 88 | resource_ids = [r.resource_id for r in resources] |
|
88 | 89 | db_session = get_db_session(db_session) |
|
89 | 90 | return EventService.for_resource( |
|
90 | 91 | resource_ids, or_target_user_id=user.id, limit=10, |
|
91 | 92 | db_session=db_session) |
|
92 | 93 | |
|
93 | 94 | @classmethod |
|
94 | 95 | def get_paginator(cls, user, page=1, item_count=None, items_per_page=50, |
|
95 | 96 | order_by=None, filter_settings=None, db_session=None): |
|
96 | 97 | if not filter_settings: |
|
97 | 98 | filter_settings = {} |
|
98 | 99 | registry = get_current_registry() |
|
99 | resources = user.resources_with_perms( | |
|
100 | ['view'], resource_types=registry.resource_types) | |
|
100 | resources = UserService.resources_with_perms(user, ['view'], resource_types=registry.resource_types) | |
|
101 | 101 | resource_ids = [r.resource_id for r in resources] |
|
102 | 102 | query = EventService.for_resource( |
|
103 | 103 | resource_ids, or_target_user_id=user.id, limit=100, |
|
104 | 104 | db_session=db_session) |
|
105 | 105 | |
|
106 | 106 | paginator = SqlalchemyOrmPage(query, page=page, |
|
107 | 107 | items_per_page=items_per_page, |
|
108 | 108 | **filter_settings) |
|
109 | 109 | return paginator |
@@ -1,27 +1,27 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | from appenlight.models import get_db_session |
|
18 | 18 | from appenlight.models.group import Group |
|
19 |
from |
|
|
19 | from ziggurat_foundations.models.services.group import GroupService | |
|
20 | 20 | |
|
21 | 21 | |
|
22 |
class GroupService( |
|
|
22 | class GroupService(GroupService): | |
|
23 | 23 | @classmethod |
|
24 | 24 | def by_id(cls, group_id, db_session=None): |
|
25 | 25 | db_session = get_db_session(db_session) |
|
26 | 26 | query = db_session.query(Group).filter(Group.id == group_id) |
|
27 | 27 | return query.first() |
@@ -1,33 +1,33 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | from appenlight.models.group_resource_permission import GroupResourcePermission |
|
18 | 18 | from appenlight.models import get_db_session |
|
19 | from appenlight.models.services.base import BaseService | |
|
19 | from ziggurat_foundations.models.services.group_resource_permission import GroupResourcePermissionService | |
|
20 | 20 | |
|
21 | 21 | |
|
22 |
class GroupResourcePermissionService( |
|
|
22 | class GroupResourcePermissionService(GroupResourcePermissionService): | |
|
23 | 23 | @classmethod |
|
24 | 24 | def by_resource_group_and_perm(cls, group_id, perm_name, resource_id, |
|
25 | 25 | db_session=None): |
|
26 | 26 | """ return all instances by user name, perm name and resource id """ |
|
27 | 27 | db_session = get_db_session(db_session) |
|
28 | 28 | query = db_session.query(GroupResourcePermission) |
|
29 | 29 | query = query.filter(GroupResourcePermission.group_id == group_id) |
|
30 | 30 | query = query.filter( |
|
31 | 31 | GroupResourcePermission.resource_id == resource_id) |
|
32 | 32 | query = query.filter(GroupResourcePermission.perm_name == perm_name) |
|
33 | 33 | return query.first() |
@@ -1,151 +1,152 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import logging |
|
18 | 18 | import pyramid_mailer |
|
19 | 19 | import pyramid.renderers |
|
20 | 20 | import sqlalchemy as sa |
|
21 | 21 | |
|
22 | 22 | from collections import namedtuple |
|
23 | 23 | from datetime import datetime |
|
24 | 24 | |
|
25 | from ziggurat_foundations.models.services.user import UserService | |
|
26 | ||
|
25 | 27 | from appenlight.lib.rule import Rule |
|
26 | 28 | from appenlight.models import get_db_session |
|
27 | 29 | from appenlight.models.integrations import IntegrationException |
|
28 | 30 | from appenlight.models.report import REPORT_TYPE_MATRIX |
|
29 | 31 | from appenlight.models.user import User |
|
30 | from appenlight.models.services.base import BaseService | |
|
31 | 32 | from paginate_sqlalchemy import SqlalchemyOrmPage |
|
32 | 33 | from pyramid.threadlocal import get_current_registry |
|
33 | 34 | |
|
34 | 35 | log = logging.getLogger(__name__) |
|
35 | 36 | |
|
36 | 37 | GroupOccurence = namedtuple('GroupOccurence', ['occurences', 'group']) |
|
37 | 38 | |
|
38 | 39 | |
|
39 |
class UserService( |
|
|
40 | class UserService(UserService): | |
|
40 | 41 | @classmethod |
|
41 | 42 | def all(cls, db_session=None): |
|
42 | 43 | return get_db_session(db_session).query(User).order_by(User.user_name) |
|
43 | 44 | |
|
44 | 45 | @classmethod |
|
45 | 46 | def send_email(cls, request, recipients, variables, template, |
|
46 | 47 | immediately=False, silent=False): |
|
47 | 48 | html = pyramid.renderers.render(template, variables, request) |
|
48 | 49 | title = variables.get('email_title', |
|
49 | 50 | variables.get('title', "No Title")) |
|
50 | 51 | title = title.replace('\r', '').replace('\n', '') |
|
51 | 52 | sender = "{} <{}>".format( |
|
52 | 53 | request.registry.settings['mailing.from_name'], |
|
53 | 54 | request.registry.settings['mailing.from_email']) |
|
54 | 55 | message = pyramid_mailer.message.Message( |
|
55 | 56 | subject=title, sender=sender, recipients=recipients, html=html) |
|
56 | 57 | if immediately: |
|
57 | 58 | try: |
|
58 | 59 | request.registry.mailer.send_immediately(message) |
|
59 | 60 | except Exception as e: |
|
60 | 61 | log.warning('Exception %s' % e) |
|
61 | 62 | if not silent: |
|
62 | 63 | raise |
|
63 | 64 | else: |
|
64 | 65 | request.registry.mailer.send(message) |
|
65 | 66 | |
|
66 | 67 | @classmethod |
|
67 | 68 | def get_paginator(cls, page=1, item_count=None, items_per_page=50, |
|
68 | 69 | order_by=None, filter_settings=None, |
|
69 | 70 | exclude_columns=None, db_session=None): |
|
70 | 71 | registry = get_current_registry() |
|
71 | 72 | if not exclude_columns: |
|
72 | 73 | exclude_columns = [] |
|
73 | 74 | if not filter_settings: |
|
74 | 75 | filter_settings = {} |
|
75 | 76 | db_session = get_db_session(db_session) |
|
76 | 77 | q = db_session.query(User) |
|
77 | 78 | if filter_settings.get('order_col'): |
|
78 | 79 | order_col = filter_settings.get('order_col') |
|
79 | 80 | if filter_settings.get('order_dir') == 'dsc': |
|
80 | 81 | sort_on = 'desc' |
|
81 | 82 | else: |
|
82 | 83 | sort_on = 'asc' |
|
83 | 84 | q = q.order_by(getattr(sa, sort_on)(getattr(User, order_col))) |
|
84 | 85 | else: |
|
85 | 86 | q = q.order_by(sa.desc(User.registered_date)) |
|
86 | 87 | # remove urlgen or it never caches count |
|
87 | 88 | cache_params = dict(filter_settings) |
|
88 | 89 | cache_params.pop('url', None) |
|
89 | 90 | cache_params.pop('url_maker', None) |
|
90 | 91 | |
|
91 | 92 | @registry.cache_regions.redis_min_5.cache_on_arguments() |
|
92 | 93 | def estimate_users(cache_key): |
|
93 | 94 | o_q = q.order_by(False) |
|
94 | 95 | return o_q.count() |
|
95 | 96 | |
|
96 | 97 | item_count = estimate_users(cache_params) |
|
97 | 98 | # if the number of pages is low we may want to invalidate the count to |
|
98 | 99 | # provide 'real time' update - use case - |
|
99 | 100 | # errors just started to flow in |
|
100 | 101 | if item_count < 1000: |
|
101 | 102 | item_count = estimate_users.refresh(cache_params) |
|
102 | 103 | paginator = SqlalchemyOrmPage(q, page=page, |
|
103 | 104 | item_count=item_count, |
|
104 | 105 | items_per_page=items_per_page, |
|
105 | 106 | **filter_settings) |
|
106 | 107 | return paginator |
|
107 | 108 | |
|
108 | 109 | @classmethod |
|
109 | 110 | def get_valid_channels(cls, user): |
|
110 | 111 | return [channel for channel in user.alert_channels |
|
111 | 112 | if channel.channel_validated] |
|
112 | 113 | |
|
113 | 114 | @classmethod |
|
114 | 115 | def report_notify(cls, user, request, application, report_groups, |
|
115 | 116 | occurence_dict, db_session=None): |
|
116 | 117 | db_session = get_db_session(db_session) |
|
117 | 118 | if not report_groups: |
|
118 | 119 | return True |
|
119 | 120 | since_when = datetime.utcnow() |
|
120 | 121 | for channel in cls.get_valid_channels(user): |
|
121 | 122 | confirmed_groups = [] |
|
122 | 123 | |
|
123 | 124 | for group in report_groups: |
|
124 | 125 | occurences = occurence_dict.get(group.id, 1) |
|
125 | 126 | for action in channel.channel_actions: |
|
126 | 127 | not_matched = ( |
|
127 | 128 | action.resource_id and action.resource_id != |
|
128 | 129 | application.resource_id) |
|
129 | 130 | if action.type != 'report' or not_matched: |
|
130 | 131 | continue |
|
131 | 132 | should_notify = (action.action == 'always' or |
|
132 | 133 | not group.notified) |
|
133 | 134 | rule_obj = Rule(action.rule, REPORT_TYPE_MATRIX) |
|
134 | 135 | report_dict = group.get_report().get_dict(request) |
|
135 | 136 | if rule_obj.match(report_dict) and should_notify: |
|
136 | 137 | item = GroupOccurence(occurences, group) |
|
137 | 138 | if item not in confirmed_groups: |
|
138 | 139 | confirmed_groups.append(item) |
|
139 | 140 | |
|
140 | 141 | # send individual reports |
|
141 | 142 | total_confirmed = len(confirmed_groups) |
|
142 | 143 | if not total_confirmed: |
|
143 | 144 | continue |
|
144 | 145 | try: |
|
145 | 146 | channel.notify_reports(resource=application, |
|
146 | 147 | user=user, |
|
147 | 148 | request=request, |
|
148 | 149 | since_when=since_when, |
|
149 | 150 | reports=confirmed_groups) |
|
150 | 151 | except IntegrationException as e: |
|
151 | 152 | log.warning('%s' % e) |
@@ -1,133 +1,133 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import logging |
|
18 | 18 | import sqlalchemy as sa |
|
19 | 19 | from datetime import datetime |
|
20 | 20 | from appenlight.models import Base, get_db_session |
|
21 | 21 | from appenlight.models.services.event import EventService |
|
22 | 22 | from appenlight.models.integrations import IntegrationException |
|
23 | 23 | from pyramid.threadlocal import get_current_request |
|
24 | 24 | from ziggurat_foundations.models.user import UserMixin |
|
25 | from ziggurat_foundations.models.services.user import UserService | |
|
25 | 26 | |
|
26 | 27 | log = logging.getLogger(__name__) |
|
27 | 28 | |
|
28 | 29 | |
|
29 | 30 | class User(UserMixin, Base): |
|
30 | 31 | __possible_permissions__ = [] |
|
31 | 32 | |
|
32 | 33 | first_name = sa.Column(sa.Unicode(25)) |
|
33 | 34 | last_name = sa.Column(sa.Unicode(25)) |
|
34 | 35 | company_name = sa.Column(sa.Unicode(255), default='') |
|
35 | 36 | company_address = sa.Column(sa.Unicode(255), default='') |
|
36 | 37 | zip_code = sa.Column(sa.Unicode(25), default='') |
|
37 | 38 | city = sa.Column(sa.Unicode(50), default='') |
|
38 | 39 | default_report_sort = sa.Column(sa.Unicode(25), default='newest') |
|
39 | 40 | notes = sa.Column(sa.UnicodeText, default='') |
|
40 | 41 | notifications = sa.Column(sa.Boolean(), default=True) |
|
41 | 42 | registration_ip = sa.Column(sa.UnicodeText(), default='') |
|
42 | 43 | alert_channels = sa.orm.relationship('AlertChannel', |
|
43 | 44 | cascade="all,delete-orphan", |
|
44 | 45 | passive_deletes=True, |
|
45 | 46 | passive_updates=True, |
|
46 | 47 | backref='owner', |
|
47 | 48 | order_by='AlertChannel.channel_name, ' |
|
48 | 49 | 'AlertChannel.channel_value') |
|
49 | 50 | |
|
50 | 51 | alert_actions = sa.orm.relationship('AlertChannelAction', |
|
51 | 52 | cascade="all,delete-orphan", |
|
52 | 53 | passive_deletes=True, |
|
53 | 54 | passive_updates=True, |
|
54 | 55 | backref='owner', |
|
55 | 56 | order_by='AlertChannelAction.pkey') |
|
56 | 57 | |
|
57 | 58 | auth_tokens = sa.orm.relationship('AuthToken', |
|
58 | 59 | cascade="all,delete-orphan", |
|
59 | 60 | passive_deletes=True, |
|
60 | 61 | passive_updates=True, |
|
61 | 62 | backref='owner', |
|
62 | 63 | order_by='AuthToken.creation_date') |
|
63 | 64 | |
|
64 | 65 | def get_dict(self, exclude_keys=None, include_keys=None, |
|
65 | 66 | extended_info=False): |
|
66 | 67 | result = super(User, self).get_dict(exclude_keys, include_keys) |
|
67 | 68 | if extended_info: |
|
68 | 69 | result['groups'] = [g.group_name for g in self.groups] |
|
69 |
result['permissions'] = [p.perm_name for p in |
|
|
70 | result['permissions'] = [p.perm_name for p in UserService.permissions(self)] | |
|
70 | 71 | request = get_current_request() |
|
71 |
apps = |
|
|
72 | apps = UserService.resources_with_perms(self, | |
|
72 | 73 | ['view'], resource_types=['application']) |
|
73 | 74 | result['applications'] = sorted( |
|
74 | 75 | [{'resource_id': a.resource_id, |
|
75 | 76 | 'resource_name': a.resource_name} |
|
76 | 77 | for a in apps.all()], |
|
77 | 78 | key=lambda x: x['resource_name'].lower()) |
|
78 | 79 | result['assigned_reports'] = [r.get_dict(request) for r |
|
79 | 80 | in self.assigned_report_groups] |
|
80 | 81 | result['latest_events'] = [ev.get_dict(request) for ev |
|
81 | 82 | in self.latest_events()] |
|
82 | 83 | |
|
83 | 84 | exclude_keys_list = exclude_keys or [] |
|
84 | 85 | include_keys_list = include_keys or [] |
|
85 | 86 | d = {} |
|
86 | 87 | for k in result.keys(): |
|
87 | 88 | if (k not in exclude_keys_list and |
|
88 | 89 | (k in include_keys_list or not include_keys)): |
|
89 | 90 | d[k] = result[k] |
|
90 | 91 | return d |
|
91 | 92 | |
|
92 | 93 | def __repr__(self): |
|
93 | 94 | return '<User: %s, id: %s>' % (self.user_name, self.id) |
|
94 | 95 | |
|
95 | 96 | @property |
|
96 | 97 | def assigned_report_groups(self): |
|
97 | 98 | from appenlight.models.report_group import ReportGroup |
|
98 | 99 | |
|
99 |
resources = |
|
|
100 | ['view'], resource_types=['application']) | |
|
100 | resources = UserService.resources_with_perms(self, ['view'], resource_types=['application']) | |
|
101 | 101 | query = self.assigned_reports_relation |
|
102 | 102 | rid_list = [r.resource_id for r in resources] |
|
103 | 103 | query = query.filter(ReportGroup.resource_id.in_(rid_list)) |
|
104 | 104 | query = query.limit(50) |
|
105 | 105 | return query |
|
106 | 106 | |
|
107 | 107 | def feed_report(self, report): |
|
108 | 108 | """ """ |
|
109 | 109 | if not hasattr(self, 'current_reports'): |
|
110 | 110 | self.current_reports = [] |
|
111 | 111 | self.current_reports.append(report) |
|
112 | 112 | |
|
113 | 113 | def send_digest(self, request, application, reports, since_when=None, |
|
114 | 114 | db_session=None): |
|
115 | 115 | db_session = get_db_session(db_session) |
|
116 | 116 | if not reports: |
|
117 | 117 | return True |
|
118 | 118 | if not since_when: |
|
119 | 119 | since_when = datetime.utcnow() |
|
120 | 120 | for channel in self.alert_channels: |
|
121 | 121 | if not channel.channel_validated or not channel.daily_digest: |
|
122 | 122 | continue |
|
123 | 123 | try: |
|
124 | 124 | channel.send_digest(resource=application, |
|
125 | 125 | user=self, |
|
126 | 126 | request=request, |
|
127 | 127 | since_when=since_when, |
|
128 | 128 | reports=reports) |
|
129 | 129 | except IntegrationException as e: |
|
130 | 130 | log.warning('%s' % e) |
|
131 | 131 | |
|
132 | 132 | def latest_events(self): |
|
133 | 133 | return EventService.latest_for_user(self) |
@@ -1,153 +1,155 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import argparse |
|
18 | 18 | import getpass |
|
19 | 19 | import logging |
|
20 | 20 | |
|
21 | 21 | from pyramid.paster import setup_logging, bootstrap |
|
22 | 22 | from pyramid.threadlocal import get_current_request |
|
23 | from ziggurat_foundations.models.services.user import UserService | |
|
24 | ||
|
23 | 25 | |
|
24 | 26 | from appenlight.forms import UserRegisterForm |
|
25 | 27 | from appenlight.lib.ext_json import json |
|
26 | 28 | from appenlight.models import ( |
|
27 | 29 | DBSession, |
|
28 | 30 | Group, |
|
29 | 31 | GroupPermission, |
|
30 | 32 | User, |
|
31 | 33 | AuthToken |
|
32 | 34 | ) |
|
33 | 35 | from appenlight.models.services.group import GroupService |
|
34 | 36 | |
|
35 | 37 | log = logging.getLogger(__name__) |
|
36 | 38 | |
|
37 | 39 | _ = str |
|
38 | 40 | |
|
39 | 41 | |
|
40 | 42 | def is_yes(input_data): |
|
41 | 43 | return input_data in ['y', 'yes'] |
|
42 | 44 | |
|
43 | 45 | |
|
44 | 46 | def is_no(input_data): |
|
45 | 47 | return input_data in ['n', 'no'] |
|
46 | 48 | |
|
47 | 49 | |
|
48 | 50 | def main(): |
|
49 | 51 | parser = argparse.ArgumentParser( |
|
50 | 52 | description='Populate AppEnlight database', |
|
51 | 53 | add_help=False) |
|
52 | 54 | parser.add_argument('-c', '--config', required=True, |
|
53 | 55 | help='Configuration ini file of application') |
|
54 | 56 | parser.add_argument('--username', default=None, |
|
55 | 57 | help='User to create') |
|
56 | 58 | parser.add_argument('--password', default=None, |
|
57 | 59 | help='Password for created user') |
|
58 | 60 | parser.add_argument('--email', default=None, |
|
59 | 61 | help='Email for created user') |
|
60 | 62 | parser.add_argument('--auth-token', default=None, |
|
61 | 63 | help='Auth token for created user') |
|
62 | 64 | args = parser.parse_args() |
|
63 | 65 | config_uri = args.config |
|
64 | 66 | |
|
65 | 67 | setup_logging(config_uri) |
|
66 | 68 | env = bootstrap(config_uri) |
|
67 | 69 | request = env['request'] |
|
68 | 70 | with get_current_request().tm: |
|
69 | 71 | group = GroupService.by_id(1) |
|
70 | 72 | if not group: |
|
71 | 73 | group = Group(id=1, group_name='Administrators', |
|
72 | 74 | description="Top level permission owners") |
|
73 | 75 | DBSession.add(group) |
|
74 | 76 | permission = GroupPermission(perm_name='root_administration') |
|
75 | 77 | group.permissions.append(permission) |
|
76 | 78 | |
|
77 | 79 | create_user = True if args.username else None |
|
78 | 80 | while create_user is None: |
|
79 | 81 | response = input( |
|
80 | 82 | 'Do you want to create a new admin? (n)\n').lower() |
|
81 | 83 | |
|
82 | 84 | if is_yes(response or 'n'): |
|
83 | 85 | create_user = True |
|
84 | 86 | elif is_no(response or 'n'): |
|
85 | 87 | create_user = False |
|
86 | 88 | |
|
87 | 89 | if create_user: |
|
88 | 90 | csrf_token = request.session.get_csrf_token() |
|
89 | 91 | user_name = args.username |
|
90 | 92 | print('*********************************************************') |
|
91 | 93 | while user_name is None: |
|
92 | 94 | response = input('What is the username of new admin?\n') |
|
93 | 95 | form = UserRegisterForm( |
|
94 | 96 | user_name=response, csrf_token=csrf_token, |
|
95 | 97 | csrf_context=request) |
|
96 | 98 | form.validate() |
|
97 | 99 | if form.user_name.errors: |
|
98 | 100 | print(form.user_name.errors[0]) |
|
99 | 101 | else: |
|
100 | 102 | user_name = response |
|
101 | 103 | print('The admin username is "{}"\n'.format(user_name)) |
|
102 | 104 | print('*********************************************************') |
|
103 | 105 | email = args.email |
|
104 | 106 | while email is None: |
|
105 | 107 | response = input('What is the email of admin account?\n') |
|
106 | 108 | form = UserRegisterForm( |
|
107 | 109 | email=response, csrf_token=csrf_token, |
|
108 | 110 | csrf_context=request) |
|
109 | 111 | form.validate() |
|
110 | 112 | if form.email.errors: |
|
111 | 113 | print(form.email.errors[0]) |
|
112 | 114 | else: |
|
113 | 115 | email = response |
|
114 | 116 | print('The admin email is "{}"\n'.format(email)) |
|
115 | 117 | print('*********************************************************') |
|
116 | 118 | user_password = args.password |
|
117 | 119 | confirmed_password = args.password |
|
118 | 120 | while user_password is None or confirmed_password is None: |
|
119 | 121 | response = getpass.getpass( |
|
120 | 122 | 'What is the password for admin account?\n') |
|
121 | 123 | form = UserRegisterForm( |
|
122 | 124 | user_password=response, csrf_token=csrf_token, |
|
123 | 125 | csrf_context=request) |
|
124 | 126 | form.validate() |
|
125 | 127 | if form.user_password.errors: |
|
126 | 128 | print(form.user_password.errors[0]) |
|
127 | 129 | else: |
|
128 | 130 | user_password = response |
|
129 | 131 | |
|
130 | 132 | response = getpass.getpass('Please confirm the password.\n') |
|
131 | 133 | if user_password == response: |
|
132 | 134 | confirmed_password = response |
|
133 | 135 | else: |
|
134 | 136 | print('Passwords do not match. Please try again') |
|
135 | 137 | print('*********************************************************') |
|
136 | 138 | |
|
137 | 139 | with get_current_request().tm: |
|
138 | 140 | if create_user: |
|
139 | 141 | group = GroupService.by_id(1) |
|
140 | 142 | user = User(user_name=user_name, email=email, status=1) |
|
141 |
|
|
|
142 |
|
|
|
143 | UserService.regenerate_security_code(user) | |
|
144 | UserService.set_password(user, user_password) | |
|
143 | 145 | DBSession.add(user) |
|
144 | 146 | token = AuthToken(description="Uptime monitoring token") |
|
145 | 147 | if args.auth_token: |
|
146 | 148 | token.token = args.auth_token |
|
147 | 149 | user.auth_tokens.append(token) |
|
148 | 150 | group.users.append(user) |
|
149 | 151 | print('USER CREATED') |
|
150 | 152 | print(json.dumps(user.get_dict())) |
|
151 | 153 | print('*********************************************************') |
|
152 | 154 | print('AUTH TOKEN') |
|
153 | 155 | print(json.dumps(user.auth_tokens[0].get_dict())) |
@@ -1,346 +1,348 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | from pyramid.security import Allow, Everyone, Authenticated, ALL_PERMISSIONS |
|
18 | 18 | from pyramid.authentication import CallbackAuthenticationPolicy |
|
19 | 19 | import appenlight.models.resource |
|
20 | 20 | from appenlight.models.services.auth_token import AuthTokenService |
|
21 | 21 | from appenlight.models.services.application import ApplicationService |
|
22 | 22 | from appenlight.models.services.report_group import ReportGroupService |
|
23 | 23 | from appenlight.models.services.plugin_config import PluginConfigService |
|
24 | 24 | from appenlight.lib import to_integer_safe |
|
25 | 25 | from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest |
|
26 | 26 | from ziggurat_foundations.permissions import permission_to_04_acls |
|
27 | from ziggurat_foundations.models.services.user import UserService | |
|
28 | from ziggurat_foundations.models.services.resource import ResourceService | |
|
27 | 29 | import defusedxml.ElementTree as ElementTree |
|
28 | 30 | import urllib.request, urllib.error, urllib.parse |
|
29 | 31 | import logging |
|
30 | 32 | import re |
|
31 | 33 | from xml.sax.saxutils import quoteattr |
|
32 | 34 | |
|
33 | 35 | log = logging.getLogger(__name__) |
|
34 | 36 | |
|
35 | 37 | |
|
36 | 38 | def groupfinder(userid, request): |
|
37 | 39 | if userid and hasattr(request, 'user') and request.user: |
|
38 | 40 | groups = ['group:%s' % g.id for g in request.user.groups] |
|
39 | 41 | return groups |
|
40 | 42 | return [] |
|
41 | 43 | |
|
42 | 44 | |
|
43 | 45 | class AuthTokenAuthenticationPolicy(CallbackAuthenticationPolicy): |
|
44 | 46 | def __init__(self, callback=None): |
|
45 | 47 | self.callback = callback |
|
46 | 48 | |
|
47 | 49 | def remember(self, request, principal, **kw): |
|
48 | 50 | return [] |
|
49 | 51 | |
|
50 | 52 | def forget(self, request): |
|
51 | 53 | return [] |
|
52 | 54 | |
|
53 | 55 | def unauthenticated_userid(self, request): |
|
54 | 56 | token = request.headers.get('x-appenlight-auth-token') |
|
55 | 57 | if token: |
|
56 | 58 | auth_token = AuthTokenService.by_token(token) |
|
57 | 59 | if auth_token and not auth_token.is_expired: |
|
58 | 60 | log.info('%s is valid' % auth_token) |
|
59 | 61 | return auth_token.owner_id |
|
60 | 62 | elif auth_token: |
|
61 | 63 | log.warning('%s is expired' % auth_token) |
|
62 | 64 | else: |
|
63 | 65 | log.warning('token: %s is not found' % token) |
|
64 | 66 | |
|
65 | 67 | def authenticated_userid(self, request): |
|
66 | 68 | return self.unauthenticated_userid(request) |
|
67 | 69 | |
|
68 | 70 | |
|
69 | 71 | def rewrite_root_perm(perm_user, perm_name): |
|
70 | 72 | """ |
|
71 | 73 | Translates root_administration into ALL_PERMISSIONS object |
|
72 | 74 | """ |
|
73 | 75 | if perm_name == 'root_administration': |
|
74 | 76 | return (Allow, perm_user, ALL_PERMISSIONS,) |
|
75 | 77 | else: |
|
76 | 78 | return (Allow, perm_user, perm_name,) |
|
77 | 79 | |
|
78 | 80 | |
|
79 | 81 | def add_root_superperm(request, context): |
|
80 | 82 | """ |
|
81 | 83 | Adds ALL_PERMISSIONS to every resource if user somehow has 'root_permission' |
|
82 | 84 | non-resource permission |
|
83 | 85 | """ |
|
84 | 86 | if hasattr(request, 'user') and request.user: |
|
85 |
acls = permission_to_04_acls(request.user |
|
|
87 | acls = permission_to_04_acls(UserService.permissions(request.user)) | |
|
86 | 88 | for perm_user, perm_name in acls: |
|
87 | 89 | if perm_name == 'root_administration': |
|
88 | 90 | context.__acl__.append(rewrite_root_perm(perm_user, perm_name)) |
|
89 | 91 | |
|
90 | 92 | |
|
91 | 93 | class RootFactory(object): |
|
92 | 94 | """ |
|
93 | 95 | General factory for non-resource/report specific pages |
|
94 | 96 | """ |
|
95 | 97 | |
|
96 | 98 | def __init__(self, request): |
|
97 | 99 | self.__acl__ = [(Allow, Authenticated, 'authenticated'), |
|
98 | 100 | (Allow, Authenticated, 'create_resources')] |
|
99 | 101 | # general page factory - append custom non resource permissions |
|
100 | 102 | if hasattr(request, 'user') and request.user: |
|
101 |
acls = permission_to_04_acls(request.user |
|
|
103 | acls = permission_to_04_acls(UserService.permissions(request.user)) | |
|
102 | 104 | for perm_user, perm_name in acls: |
|
103 | 105 | self.__acl__.append(rewrite_root_perm(perm_user, perm_name)) |
|
104 | 106 | |
|
105 | 107 | class ResourceFactory(object): |
|
106 | 108 | """ |
|
107 | 109 | Checks permissions to specific resource based on user permissions or |
|
108 | 110 | API key headers |
|
109 | 111 | """ |
|
110 | 112 | |
|
111 | 113 | def __init__(self, request): |
|
112 | 114 | Resource = appenlight.models.resource.Resource |
|
113 | 115 | |
|
114 | 116 | self.__acl__ = [] |
|
115 | 117 | resource_id = request.matchdict.get("resource_id", |
|
116 | 118 | request.GET.get("resource_id")) |
|
117 | 119 | resource_id = to_integer_safe(resource_id) |
|
118 | self.resource = Resource.by_resource_id(resource_id) \ | |
|
120 | self.resource = ResourceService.by_resource_id(resource_id) \ | |
|
119 | 121 | if resource_id else None |
|
120 | 122 | if self.resource and request.user: |
|
121 | 123 | self.__acl__ = self.resource.__acl__ |
|
122 |
permissions = self.resource |
|
|
124 | permissions = ResourceService.perms_for_user(self.resource, request.user) | |
|
123 | 125 | for perm_user, perm_name in permission_to_04_acls(permissions): |
|
124 | 126 | self.__acl__.append(rewrite_root_perm(perm_user, perm_name)) |
|
125 | 127 | add_root_superperm(request, self) |
|
126 | 128 | |
|
127 | 129 | |
|
128 | 130 | class ResourceReportFactory(object): |
|
129 | 131 | """ |
|
130 | 132 | Checks permissions to specific resource based on user permissions or |
|
131 | 133 | API key headers |
|
132 | 134 | Resource is fetched based on report group information |
|
133 | 135 | """ |
|
134 | 136 | |
|
135 | 137 | def __init__(self, request): |
|
136 | 138 | Resource = appenlight.models.resource.Resource |
|
137 | 139 | |
|
138 | 140 | self.__acl__ = [] |
|
139 | 141 | group_id = request.matchdict.get("group_id", |
|
140 | 142 | request.params.get("group_id")) |
|
141 | 143 | group_id = to_integer_safe(group_id) |
|
142 | 144 | self.report_group = ReportGroupService.by_id( |
|
143 | 145 | group_id) if group_id else None |
|
144 | 146 | if not self.report_group: |
|
145 | 147 | raise HTTPNotFound() |
|
146 | 148 | |
|
147 | 149 | self.public = self.report_group.public |
|
148 | self.resource = Resource.by_resource_id(self.report_group.resource_id) \ | |
|
150 | self.resource = ResourceService.by_resource_id(self.report_group.resource_id) \ | |
|
149 | 151 | if self.report_group else None |
|
150 | 152 | |
|
151 | 153 | if self.resource: |
|
152 | 154 | self.__acl__ = self.resource.__acl__ |
|
153 | 155 | if request.user: |
|
154 |
permissions = self.resource |
|
|
156 | permissions = ResourceService.perms_for_user(self.resource, request.user) | |
|
155 | 157 | for perm_user, perm_name in permission_to_04_acls(permissions): |
|
156 | 158 | self.__acl__.append(rewrite_root_perm(perm_user, perm_name)) |
|
157 | 159 | if self.public: |
|
158 | 160 | self.__acl__.append((Allow, Everyone, 'view',)) |
|
159 | 161 | if not request.user: |
|
160 | 162 | # unauthed users need to visit using both group and report pair |
|
161 | 163 | report_id = request.params.get('reportId', |
|
162 | 164 | request.params.get('report_id', -1)) |
|
163 | 165 | report = self.report_group.get_report(report_id, public=True) |
|
164 | 166 | if not report: |
|
165 | 167 | raise HTTPNotFound() |
|
166 | 168 | add_root_superperm(request, self) |
|
167 | 169 | |
|
168 | 170 | |
|
169 | 171 | class APIFactory(object): |
|
170 | 172 | """ |
|
171 | 173 | Checks permissions to perform client API actions based on keys |
|
172 | 174 | """ |
|
173 | 175 | |
|
174 | 176 | def __init__(self, request): |
|
175 | 177 | self.__acl__ = [] |
|
176 | 178 | self.possibly_public = False |
|
177 | 179 | private_api_key = request.headers.get( |
|
178 | 180 | 'x-appenlight-api-key', |
|
179 | 181 | request.params.get('api_key') |
|
180 | 182 | ) |
|
181 | 183 | log.debug("private key: %s" % private_api_key) |
|
182 | 184 | if private_api_key: |
|
183 | 185 | self.resource = ApplicationService.by_api_key_cached()( |
|
184 | 186 | private_api_key) |
|
185 | 187 | # then try public key |
|
186 | 188 | else: |
|
187 | 189 | public_api_key = request.headers.get( |
|
188 | 190 | 'x-appenlight-public-api-key', |
|
189 | 191 | request.GET.get('public_api_key')) |
|
190 | 192 | log.debug("public key: %s" % public_api_key) |
|
191 | 193 | self.resource = ApplicationService.by_public_api_key( |
|
192 | 194 | public_api_key, from_cache=True, request=request) |
|
193 | 195 | self.possibly_public = True |
|
194 | 196 | if self.resource: |
|
195 | 197 | self.__acl__.append((Allow, Everyone, 'create',)) |
|
196 | 198 | |
|
197 | 199 | |
|
198 | 200 | class AirbrakeV2APIFactory(object): |
|
199 | 201 | """ |
|
200 | 202 | Check permission based on Airbrake XML report |
|
201 | 203 | """ |
|
202 | 204 | |
|
203 | 205 | def __init__(self, request): |
|
204 | 206 | self.__acl__ = [] |
|
205 | 207 | self.possibly_public = False |
|
206 | 208 | fixed_xml_data = '' |
|
207 | 209 | try: |
|
208 | 210 | data = request.GET.get('data') |
|
209 | 211 | if data: |
|
210 | 212 | self.possibly_public = True |
|
211 | 213 | except (UnicodeDecodeError, UnicodeEncodeError) as exc: |
|
212 | 214 | log.warning( |
|
213 | 215 | 'Problem parsing Airbrake data: %s, failed decoding' % exc) |
|
214 | 216 | raise HTTPBadRequest() |
|
215 | 217 | try: |
|
216 | 218 | if not data: |
|
217 | 219 | data = request.body |
|
218 | 220 | # fix shitty airbrake js client not escaping line method attr |
|
219 | 221 | |
|
220 | 222 | def repl(input): |
|
221 | 223 | return 'line method=%s file' % quoteattr(input.group(1)) |
|
222 | 224 | |
|
223 | 225 | fixed_xml_data = re.sub('line method="(.*?)" file', repl, data) |
|
224 | 226 | root = ElementTree.fromstring(fixed_xml_data) |
|
225 | 227 | except Exception as exc: |
|
226 | 228 | log.info( |
|
227 | 229 | 'Problem parsing Airbrake ' |
|
228 | 230 | 'data: %s, trying unquoting' % exc) |
|
229 | 231 | self.possibly_public = True |
|
230 | 232 | try: |
|
231 | 233 | root = ElementTree.fromstring(urllib.parse.unquote(fixed_xml_data)) |
|
232 | 234 | except Exception as exc: |
|
233 | 235 | log.warning('Problem parsing Airbrake ' |
|
234 | 236 | 'data: %s, failed completly' % exc) |
|
235 | 237 | raise HTTPBadRequest() |
|
236 | 238 | self.airbrake_xml_etree = root |
|
237 | 239 | api_key = root.findtext('api-key', '') |
|
238 | 240 | |
|
239 | 241 | self.resource = ApplicationService.by_api_key_cached()(api_key) |
|
240 | 242 | if not self.resource: |
|
241 | 243 | self.resource = ApplicationService.by_public_api_key(api_key, |
|
242 | 244 | from_cache=True, |
|
243 | 245 | request=request) |
|
244 | 246 | if self.resource: |
|
245 | 247 | self.possibly_public = True |
|
246 | 248 | |
|
247 | 249 | if self.resource: |
|
248 | 250 | self.__acl__.append((Allow, Everyone, 'create',)) |
|
249 | 251 | |
|
250 | 252 | |
|
251 | 253 | def parse_sentry_header(header): |
|
252 | 254 | parsed = header.split(' ', 1)[1].split(',') or [] |
|
253 | 255 | return dict([x.strip().split('=') for x in parsed]) |
|
254 | 256 | |
|
255 | 257 | |
|
256 | 258 | class SentryAPIFactory(object): |
|
257 | 259 | """ |
|
258 | 260 | Check permission based on Sentry payload |
|
259 | 261 | """ |
|
260 | 262 | |
|
261 | 263 | def __init__(self, request): |
|
262 | 264 | self.__acl__ = [] |
|
263 | 265 | self.possibly_public = False |
|
264 | 266 | if request.headers.get('X-Sentry-Auth', '').startswith('Sentry'): |
|
265 | 267 | header_string = request.headers['X-Sentry-Auth'] |
|
266 | 268 | result = parse_sentry_header(header_string) |
|
267 | 269 | elif request.headers.get('Authorization', '').startswith('Sentry'): |
|
268 | 270 | header_string = request.headers['Authorization'] |
|
269 | 271 | result = parse_sentry_header(header_string) |
|
270 | 272 | else: |
|
271 | 273 | result = dict((k, v) for k, v in list(request.GET.items()) |
|
272 | 274 | if k.startswith('sentry_')) |
|
273 | 275 | key = result.get('sentry_key') |
|
274 | 276 | log.info('sentry request {}'.format(result)) |
|
275 | 277 | |
|
276 | 278 | self.resource = ApplicationService.by_api_key_cached()(key) |
|
277 | 279 | if not self.resource or \ |
|
278 | 280 | result.get('sentry_client', '').startswith('raven-js'): |
|
279 | 281 | self.resource = ApplicationService.by_public_api_key( |
|
280 | 282 | key, from_cache=True, request=request) |
|
281 | 283 | if self.resource: |
|
282 | 284 | self.__acl__.append((Allow, Everyone, 'create',)) |
|
283 | 285 | |
|
284 | 286 | |
|
285 | 287 | class ResourcePluginConfigFactory(object): |
|
286 | 288 | |
|
287 | 289 | def __init__(self, request): |
|
288 | 290 | Resource = appenlight.models.resource.Resource |
|
289 | 291 | self.__acl__ = [] |
|
290 | 292 | self.resource = None |
|
291 | 293 | plugin_id = to_integer_safe(request.matchdict.get('id')) |
|
292 | 294 | self.plugin = PluginConfigService.by_id(plugin_id) |
|
293 | 295 | if not self.plugin: |
|
294 | 296 | raise HTTPNotFound() |
|
295 | 297 | if self.plugin.resource_id: |
|
296 | self.resource = Resource.by_resource_id(self.plugin.resource_id) | |
|
298 | self.resource = ResourceService.by_resource_id(self.plugin.resource_id) | |
|
297 | 299 | if self.resource: |
|
298 | 300 | self.__acl__ = self.resource.__acl__ |
|
299 | 301 | if request.user and self.resource: |
|
300 |
permissions = self.resource |
|
|
302 | permissions = ResourceService.perms_for_user(self.resource, request.user) | |
|
301 | 303 | for perm_user, perm_name in permission_to_04_acls(permissions): |
|
302 | 304 | self.__acl__.append(rewrite_root_perm(perm_user, perm_name)) |
|
303 | 305 | |
|
304 | 306 | add_root_superperm(request, self) |
|
305 | 307 | |
|
306 | 308 | |
|
307 | 309 | class ResourceJSONBodyFactory(object): |
|
308 | 310 | """ |
|
309 | 311 | Checks permissions to specific resource based on user permissions or |
|
310 | 312 | API key headers from json body |
|
311 | 313 | """ |
|
312 | 314 | |
|
313 | 315 | def __init__(self, request): |
|
314 | 316 | Resource = appenlight.models.resource.Resource |
|
315 | 317 | |
|
316 | 318 | self.__acl__ = [] |
|
317 | 319 | resource_id = request.unsafe_json_body().get('resource_id') |
|
318 | 320 | resource_id = to_integer_safe(resource_id) |
|
319 | self.resource = Resource.by_resource_id(resource_id) | |
|
321 | self.resource = ResourceService.by_resource_id(resource_id) | |
|
320 | 322 | if self.resource and request.user: |
|
321 | 323 | self.__acl__ = self.resource.__acl__ |
|
322 |
permissions = self.resource |
|
|
324 | permissions = ResourceService.perms_for_user(self.resource, request.user) | |
|
323 | 325 | for perm_user, perm_name in permission_to_04_acls(permissions): |
|
324 | 326 | self.__acl__.append(rewrite_root_perm(perm_user, perm_name)) |
|
325 | 327 | add_root_superperm(request, self) |
|
326 | 328 | |
|
327 | 329 | |
|
328 | 330 | class ResourcePluginMixedFactory(object): |
|
329 | 331 | def __init__(self, request): |
|
330 | 332 | Resource = appenlight.models.resource.Resource |
|
331 | 333 | self.__acl__ = [] |
|
332 | 334 | json_body = request.safe_json_body |
|
333 | 335 | self.resource = None |
|
334 | 336 | if json_body: |
|
335 | 337 | resource_id = json_body.get('resource_id') |
|
336 | 338 | else: |
|
337 | 339 | resource_id = request.GET.get('resource_id') |
|
338 | 340 | if resource_id: |
|
339 | 341 | resource_id = to_integer_safe(resource_id) |
|
340 | self.resource = Resource.by_resource_id(resource_id) | |
|
342 | self.resource = ResourceService.by_resource_id(resource_id) | |
|
341 | 343 | if self.resource and request.user: |
|
342 | 344 | self.__acl__ = self.resource.__acl__ |
|
343 |
permissions = self.resource |
|
|
345 | permissions = ResourceService.perms_for_user(self.resource, request.user) | |
|
344 | 346 | for perm_user, perm_name in permission_to_04_acls(permissions): |
|
345 | 347 | self.__acl__.append(rewrite_root_perm(perm_user, perm_name)) |
|
346 | 348 | add_root_superperm(request, self) |
@@ -1,1592 +1,1592 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import copy |
|
18 | 18 | import logging |
|
19 | 19 | import mock |
|
20 | 20 | import pyramid |
|
21 | 21 | import pytest |
|
22 | 22 | import sqlalchemy as sa |
|
23 | 23 | import webob |
|
24 | 24 | |
|
25 | 25 | from datetime import datetime |
|
26 | 26 | from pyramid import testing |
|
27 | 27 | |
|
28 | 28 | |
|
29 | 29 | from appenlight.models import DBSession |
|
30 | 30 | from appenlight.lib.ext_json import json |
|
31 | 31 | |
|
32 | 32 | |
|
33 | 33 | log = logging.getLogger(__name__) |
|
34 | 34 | |
|
35 | 35 | |
|
36 | 36 | class DummyContext(object): |
|
37 | 37 | pass |
|
38 | 38 | |
|
39 | 39 | |
|
40 | 40 | @pytest.mark.usefixtures('base_app') |
|
41 | 41 | class BasicTest(object): |
|
42 | 42 | pass |
|
43 | 43 | |
|
44 | 44 | |
|
45 | 45 | @pytest.mark.usefixtures('base_app') |
|
46 | 46 | class TestMigration(object): |
|
47 | 47 | def test_migration(self): |
|
48 | 48 | assert 1 == 1 |
|
49 | 49 | |
|
50 | 50 | |
|
51 | 51 | class TestSentryProto_7(object): |
|
52 | 52 | def test_log_payload(self): |
|
53 | 53 | import appenlight.tests.payload_examples as payload_examples |
|
54 | 54 | from appenlight.lib.enums import ParsedSentryEventType |
|
55 | 55 | from appenlight.lib.utils.sentry import parse_sentry_event |
|
56 | 56 | event_dict, event_type = parse_sentry_event( |
|
57 | 57 | payload_examples.SENTRY_LOG_PAYLOAD_7) |
|
58 | 58 | assert ParsedSentryEventType.LOG == event_type |
|
59 | 59 | assert event_dict['log_level'] == 'CRITICAL' |
|
60 | 60 | assert event_dict['message'] == 'TEST from django logging' |
|
61 | 61 | assert event_dict['namespace'] == 'testlogger' |
|
62 | 62 | assert event_dict['request_id'] == '9a6172f2e6d2444582f83a6c333d9cfb' |
|
63 | 63 | assert event_dict['server'] == 'ergo-virtual-machine' |
|
64 | 64 | assert event_dict['date'] == datetime.utcnow().date().strftime( |
|
65 | 65 | '%Y-%m-%dT%H:%M:%SZ') |
|
66 | 66 | tags = [('site', 'example.com'), |
|
67 | 67 | ('sys.argv', ["'manage.py'", "'runserver'"]), |
|
68 | 68 | ('price', 6), |
|
69 | 69 | ('tag', "'extra'"), |
|
70 | 70 | ('dupa', True), |
|
71 | 71 | ('project', 'sentry'), |
|
72 | 72 | ('sentry_culprit', 'testlogger in index'), |
|
73 | 73 | ('sentry_language', 'python'), |
|
74 | 74 | ('sentry_release', 'test')] |
|
75 | 75 | assert sorted(event_dict['tags']) == sorted(tags) |
|
76 | 76 | |
|
77 | 77 | def test_report_payload(self): |
|
78 | 78 | import appenlight.tests.payload_examples as payload_examples |
|
79 | 79 | from appenlight.lib.enums import ParsedSentryEventType |
|
80 | 80 | from appenlight.lib.utils.sentry import parse_sentry_event |
|
81 | 81 | utcnow = datetime.utcnow().date().strftime('%Y-%m-%dT%H:%M:%SZ') |
|
82 | 82 | event_dict, event_type = parse_sentry_event( |
|
83 | 83 | payload_examples.SENTRY_PYTHON_PAYLOAD_7) |
|
84 | 84 | assert ParsedSentryEventType.ERROR_REPORT == event_type |
|
85 | 85 | assert event_dict['client'] == 'sentry' |
|
86 | 86 | assert event_dict[ |
|
87 | 87 | 'error'] == 'Exception: test 500 ' \ |
|
88 | 88 | '\u0142\xf3\u201c\u0107\u201c\u0107\u017c\u0105' |
|
89 | 89 | assert event_dict['language'] == 'python' |
|
90 | 90 | assert event_dict['ip'] == '127.0.0.1' |
|
91 | 91 | assert event_dict['request_id'] == '9fae652c8c1c4d6a8eee09260f613a98' |
|
92 | 92 | assert event_dict['server'] == 'ergo-virtual-machine' |
|
93 | 93 | assert event_dict['start_time'] == utcnow |
|
94 | 94 | assert event_dict['url'] == 'http://127.0.0.1:8000/error' |
|
95 | 95 | assert event_dict['user_agent'] == 'Mozilla/5.0 (X11; Linux x86_64) ' \ |
|
96 | 96 | 'AppleWebKit/537.36 (KHTML, ' \ |
|
97 | 97 | 'like Gecko) Chrome/47.0.2526.106 ' \ |
|
98 | 98 | 'Safari/537.36' |
|
99 | 99 | assert event_dict['view_name'] == 'djangoapp.views in error' |
|
100 | 100 | tags = [('site', 'example.com'), ('sentry_release', 'test')] |
|
101 | 101 | assert sorted(event_dict['tags']) == sorted(tags) |
|
102 | 102 | extra = [('sys.argv', ["'manage.py'", "'runserver'"]), |
|
103 | 103 | ('project', 'sentry')] |
|
104 | 104 | assert sorted(event_dict['extra']) == sorted(extra) |
|
105 | 105 | request = event_dict['request'] |
|
106 | 106 | assert request['url'] == 'http://127.0.0.1:8000/error' |
|
107 | 107 | assert request['cookies'] == {'appenlight': 'X'} |
|
108 | 108 | assert request['data'] is None |
|
109 | 109 | assert request['method'] == 'GET' |
|
110 | 110 | assert request['query_string'] == '' |
|
111 | 111 | assert request['env'] == {'REMOTE_ADDR': '127.0.0.1', |
|
112 | 112 | 'SERVER_NAME': 'localhost', |
|
113 | 113 | 'SERVER_PORT': '8000'} |
|
114 | 114 | assert request['headers'] == { |
|
115 | 115 | 'Accept': 'text/html,application/xhtml+xml,' |
|
116 | 116 | 'application/xml;q=0.9,image/webp,*/*;q=0.8', |
|
117 | 117 | 'Accept-Encoding': 'gzip, deflate, sdch', |
|
118 | 118 | 'Accept-Language': 'en-US,en;q=0.8,pl;q=0.6', |
|
119 | 119 | 'Connection': 'keep-alive', |
|
120 | 120 | 'Content-Length': '', |
|
121 | 121 | 'Content-Type': 'text/plain', |
|
122 | 122 | 'Cookie': 'appenlight=X', |
|
123 | 123 | 'Dnt': '1', |
|
124 | 124 | 'Host': '127.0.0.1:8000', |
|
125 | 125 | 'Upgrade-Insecure-Requests': '1', |
|
126 | 126 | 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) ' |
|
127 | 127 | 'AppleWebKit/537.36 (KHTML, like Gecko) ' |
|
128 | 128 | 'Chrome/47.0.2526.106 Safari/537.36'} |
|
129 | 129 | traceback = event_dict['traceback'] |
|
130 | 130 | assert traceback[0]['cline'] == 'response = wrapped_callback(request, ' \ |
|
131 | 131 | '*callback_args, **callback_kwargs)' |
|
132 | 132 | assert traceback[0]['file'] == 'django/core/handlers/base.py' |
|
133 | 133 | assert traceback[0]['fn'] == 'get_response' |
|
134 | 134 | assert traceback[0]['line'] == 111 |
|
135 | 135 | assert traceback[0]['module'] == 'django.core.handlers.base' |
|
136 | 136 | |
|
137 | 137 | assert traceback[1]['cline'] == "raise Exception(u'test 500 " \ |
|
138 | 138 | "\u0142\xf3\u201c\u0107\u201c\u0107" \ |
|
139 | 139 | "\u017c\u0105')" |
|
140 | 140 | assert traceback[1]['file'] == 'djangoapp/views.py' |
|
141 | 141 | assert traceback[1]['fn'] == 'error' |
|
142 | 142 | assert traceback[1]['line'] == 84 |
|
143 | 143 | assert traceback[1]['module'] == 'djangoapp.views' |
|
144 | 144 | assert sorted(traceback[1]['vars']) == sorted([ |
|
145 | 145 | ('c', |
|
146 | 146 | '<sqlite3.Cursor object at 0x7fe7c82af8f0>'), |
|
147 | 147 | ('request', |
|
148 | 148 | '<WSGIRequest at 0x140633490316304>'), |
|
149 | 149 | ('conn', |
|
150 | 150 | '<sqlite3.Connection object at 0x7fe7c8b23bf8>')]) |
|
151 | 151 | |
|
152 | 152 | |
|
153 | 153 | class TestAPIReports_0_5_Validation(object): |
|
154 | 154 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) |
|
155 | 155 | def test_no_payload(self, dummy_json): |
|
156 | 156 | import colander |
|
157 | 157 | from appenlight.validators import ReportListSchema_0_5 |
|
158 | 158 | utcnow = datetime.utcnow() |
|
159 | 159 | schema = ReportListSchema_0_5().bind(utcnow=utcnow) |
|
160 | 160 | with pytest.raises(colander.Invalid): |
|
161 | 161 | schema.deserialize(dummy_json) |
|
162 | 162 | |
|
163 | 163 | def test_minimal_payload(self): |
|
164 | 164 | dummy_json = [{}] |
|
165 | 165 | import colander |
|
166 | 166 | from appenlight.validators import ReportListSchema_0_5 |
|
167 | 167 | utcnow = datetime.utcnow() |
|
168 | 168 | schema = ReportListSchema_0_5().bind(utcnow=utcnow) |
|
169 | 169 | with pytest.raises(colander.Invalid): |
|
170 | 170 | schema.deserialize(dummy_json) |
|
171 | 171 | |
|
172 | 172 | def test_minimal_payload(self): |
|
173 | 173 | dummy_json = [{'report_details': [{}]}] |
|
174 | 174 | from appenlight.validators import ReportListSchema_0_5 |
|
175 | 175 | utcnow = datetime.utcnow() |
|
176 | 176 | schema = ReportListSchema_0_5().bind(utcnow=utcnow) |
|
177 | 177 | |
|
178 | 178 | deserialized = schema.deserialize(dummy_json) |
|
179 | 179 | |
|
180 | 180 | expected_deserialization = [ |
|
181 | 181 | {'language': 'unknown', |
|
182 | 182 | 'server': 'unknown', |
|
183 | 183 | 'occurences': 1, |
|
184 | 184 | 'priority': 5, |
|
185 | 185 | 'view_name': '', |
|
186 | 186 | 'client': 'unknown', |
|
187 | 187 | 'http_status': 200, |
|
188 | 188 | 'error': '', |
|
189 | 189 | 'tags': None, |
|
190 | 190 | 'username': '', |
|
191 | 191 | 'traceback': None, |
|
192 | 192 | 'extra': None, |
|
193 | 193 | 'url': '', |
|
194 | 194 | 'ip': None, |
|
195 | 195 | 'start_time': utcnow, |
|
196 | 196 | 'group_string': None, |
|
197 | 197 | 'request': {}, |
|
198 | 198 | 'request_stats': None, |
|
199 | 199 | 'end_time': None, |
|
200 | 200 | 'request_id': '', |
|
201 | 201 | 'message': '', |
|
202 | 202 | 'slow_calls': [], |
|
203 | 203 | 'user_agent': '' |
|
204 | 204 | } |
|
205 | 205 | ] |
|
206 | 206 | assert deserialized == expected_deserialization |
|
207 | 207 | |
|
208 | 208 | def test_full_payload(self): |
|
209 | 209 | import appenlight.tests.payload_examples as payload_examples |
|
210 | 210 | from appenlight.validators import ReportListSchema_0_5 |
|
211 | 211 | PYTHON_PAYLOAD = copy.deepcopy(payload_examples.PYTHON_PAYLOAD_0_5) |
|
212 | 212 | utcnow = datetime.utcnow() |
|
213 | 213 | schema = ReportListSchema_0_5().bind(utcnow=utcnow) |
|
214 | 214 | PYTHON_PAYLOAD["tags"] = [("foo", 1), ("action", "test"), ("baz", 1.1), |
|
215 | 215 | ("date", |
|
216 | 216 | utcnow.strftime('%Y-%m-%dT%H:%M:%S.0'))] |
|
217 | 217 | dummy_json = [PYTHON_PAYLOAD] |
|
218 | 218 | deserialized = schema.deserialize(dummy_json)[0] |
|
219 | 219 | assert deserialized['error'] == PYTHON_PAYLOAD['error'] |
|
220 | 220 | assert deserialized['language'] == PYTHON_PAYLOAD['language'] |
|
221 | 221 | assert deserialized['server'] == PYTHON_PAYLOAD['server'] |
|
222 | 222 | assert deserialized['priority'] == PYTHON_PAYLOAD['priority'] |
|
223 | 223 | assert deserialized['view_name'] == PYTHON_PAYLOAD['view_name'] |
|
224 | 224 | assert deserialized['client'] == PYTHON_PAYLOAD['client'] |
|
225 | 225 | assert deserialized['http_status'] == PYTHON_PAYLOAD['http_status'] |
|
226 | 226 | assert deserialized['error'] == PYTHON_PAYLOAD['error'] |
|
227 | 227 | assert deserialized['occurences'] == PYTHON_PAYLOAD['occurences'] |
|
228 | 228 | assert deserialized['username'] == PYTHON_PAYLOAD['username'] |
|
229 | 229 | assert deserialized['traceback'] == PYTHON_PAYLOAD['traceback'] |
|
230 | 230 | assert deserialized['url'] == PYTHON_PAYLOAD['url'] |
|
231 | 231 | assert deserialized['ip'] == PYTHON_PAYLOAD['ip'] |
|
232 | 232 | assert deserialized['start_time'].strftime('%Y-%m-%dT%H:%M:%S.0') == \ |
|
233 | 233 | PYTHON_PAYLOAD['start_time'] |
|
234 | 234 | assert deserialized['ip'] == PYTHON_PAYLOAD['ip'] |
|
235 | 235 | assert deserialized['group_string'] is None |
|
236 | 236 | assert deserialized['request_stats'] == PYTHON_PAYLOAD['request_stats'] |
|
237 | 237 | assert deserialized['end_time'].strftime('%Y-%m-%dT%H:%M:%S.0') == \ |
|
238 | 238 | PYTHON_PAYLOAD['end_time'] |
|
239 | 239 | assert deserialized['request_id'] == PYTHON_PAYLOAD['request_id'] |
|
240 | 240 | assert deserialized['message'] == PYTHON_PAYLOAD['message'] |
|
241 | 241 | assert deserialized['user_agent'] == PYTHON_PAYLOAD['user_agent'] |
|
242 | 242 | assert deserialized['slow_calls'][0]['start'].strftime( |
|
243 | 243 | '%Y-%m-%dT%H:%M:%S.0') == PYTHON_PAYLOAD['slow_calls'][0][ |
|
244 | 244 | 'start'] |
|
245 | 245 | assert deserialized['slow_calls'][0]['end'].strftime( |
|
246 | 246 | '%Y-%m-%dT%H:%M:%S.0') == PYTHON_PAYLOAD['slow_calls'][0][ |
|
247 | 247 | 'end'] |
|
248 | 248 | assert deserialized['slow_calls'][0]['statement'] == \ |
|
249 | 249 | PYTHON_PAYLOAD['slow_calls'][0]['statement'] |
|
250 | 250 | assert deserialized['slow_calls'][0]['parameters'] == \ |
|
251 | 251 | PYTHON_PAYLOAD['slow_calls'][0]['parameters'] |
|
252 | 252 | assert deserialized['slow_calls'][0]['type'] == \ |
|
253 | 253 | PYTHON_PAYLOAD['slow_calls'][0]['type'] |
|
254 | 254 | assert deserialized['slow_calls'][0]['subtype'] == \ |
|
255 | 255 | PYTHON_PAYLOAD['slow_calls'][0]['subtype'] |
|
256 | 256 | assert deserialized['slow_calls'][0]['location'] == '' |
|
257 | 257 | assert deserialized['tags'] == [ |
|
258 | 258 | ('foo', 1), ('action', 'test'), |
|
259 | 259 | ('baz', 1.1), ('date', utcnow.strftime('%Y-%m-%dT%H:%M:%S.0'))] |
|
260 | 260 | |
|
261 | 261 | |
|
262 | 262 | @pytest.mark.usefixtures('log_schema') |
|
263 | 263 | class TestAPILogsValidation(object): |
|
264 | 264 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) |
|
265 | 265 | def test_no_payload(self, dummy_json, log_schema): |
|
266 | 266 | import colander |
|
267 | 267 | |
|
268 | 268 | with pytest.raises(colander.Invalid): |
|
269 | 269 | log_schema.deserialize(dummy_json) |
|
270 | 270 | |
|
271 | 271 | def test_minimal_payload(self, log_schema): |
|
272 | 272 | dummy_json = [{}] |
|
273 | 273 | deserialized = log_schema.deserialize(dummy_json)[0] |
|
274 | 274 | expected = {'log_level': 'UNKNOWN', |
|
275 | 275 | 'namespace': '', |
|
276 | 276 | 'server': 'unknown', |
|
277 | 277 | 'request_id': '', |
|
278 | 278 | 'primary_key': None, |
|
279 | 279 | 'date': datetime.utcnow(), |
|
280 | 280 | 'message': '', |
|
281 | 281 | 'tags': None} |
|
282 | 282 | assert deserialized['log_level'] == expected['log_level'] |
|
283 | 283 | assert deserialized['message'] == expected['message'] |
|
284 | 284 | assert deserialized['namespace'] == expected['namespace'] |
|
285 | 285 | assert deserialized['request_id'] == expected['request_id'] |
|
286 | 286 | assert deserialized['server'] == expected['server'] |
|
287 | 287 | assert deserialized['tags'] == expected['tags'] |
|
288 | 288 | assert deserialized['primary_key'] == expected['primary_key'] |
|
289 | 289 | |
|
290 | 290 | def test_normal_payload(self, log_schema): |
|
291 | 291 | import appenlight.tests.payload_examples as payload_examples |
|
292 | 292 | deserialized = log_schema.deserialize(payload_examples.LOG_EXAMPLES)[0] |
|
293 | 293 | expected = payload_examples.LOG_EXAMPLES[0] |
|
294 | 294 | assert deserialized['log_level'] == expected['log_level'] |
|
295 | 295 | assert deserialized['message'] == expected['message'] |
|
296 | 296 | assert deserialized['namespace'] == expected['namespace'] |
|
297 | 297 | assert deserialized['request_id'] == expected['request_id'] |
|
298 | 298 | assert deserialized['server'] == expected['server'] |
|
299 | 299 | assert deserialized['date'].strftime('%Y-%m-%dT%H:%M:%S.%f') == \ |
|
300 | 300 | expected['date'] |
|
301 | 301 | assert deserialized['tags'][0][0] == "tag_name" |
|
302 | 302 | assert deserialized['tags'][0][1] == "tag_value" |
|
303 | 303 | assert deserialized['tags'][1][0] == "tag_name2" |
|
304 | 304 | assert deserialized['tags'][1][1] == 2 |
|
305 | 305 | |
|
306 | 306 | def test_normal_payload_date_without_microseconds(self, log_schema): |
|
307 | 307 | import appenlight.tests.payload_examples as payload_examples |
|
308 | 308 | LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES) |
|
309 | 309 | LOG_EXAMPLE[0]['date'] = datetime.utcnow().strftime( |
|
310 | 310 | '%Y-%m-%dT%H:%M:%S') |
|
311 | 311 | deserialized = log_schema.deserialize(LOG_EXAMPLE) |
|
312 | 312 | assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M:%S') == \ |
|
313 | 313 | LOG_EXAMPLE[0]['date'] |
|
314 | 314 | |
|
315 | 315 | def test_normal_payload_date_without_seconds(self, log_schema): |
|
316 | 316 | import appenlight.tests.payload_examples as payload_examples |
|
317 | 317 | LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES) |
|
318 | 318 | LOG_EXAMPLE[0]['date'] = datetime.utcnow().date().strftime( |
|
319 | 319 | '%Y-%m-%dT%H:%M') |
|
320 | 320 | deserialized = log_schema.deserialize(LOG_EXAMPLE) |
|
321 | 321 | assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M') == \ |
|
322 | 322 | LOG_EXAMPLE[0]['date'] |
|
323 | 323 | |
|
324 | 324 | def test_payload_empty_date(self, log_schema): |
|
325 | 325 | import appenlight.tests.payload_examples as payload_examples |
|
326 | 326 | LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES) |
|
327 | 327 | LOG_EXAMPLE[0]['date'] = None |
|
328 | 328 | deserialized = log_schema.deserialize(LOG_EXAMPLE) |
|
329 | 329 | assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M') is not None |
|
330 | 330 | |
|
331 | 331 | def test_payload_no_date(self, log_schema): |
|
332 | 332 | import appenlight.tests.payload_examples as payload_examples |
|
333 | 333 | LOG_EXAMPLE = copy.deepcopy(payload_examples.LOG_EXAMPLES) |
|
334 | 334 | LOG_EXAMPLE[0].pop('date', None) |
|
335 | 335 | deserialized = log_schema.deserialize(LOG_EXAMPLE) |
|
336 | 336 | assert deserialized[0]['date'].strftime('%Y-%m-%dT%H:%M') is not None |
|
337 | 337 | |
|
338 | 338 | |
|
339 | 339 | @pytest.mark.usefixtures('general_metrics_schema') |
|
340 | 340 | class TestAPIGeneralMetricsValidation(object): |
|
341 | 341 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) |
|
342 | 342 | def test_no_payload(self, dummy_json, general_metrics_schema): |
|
343 | 343 | import colander |
|
344 | 344 | |
|
345 | 345 | with pytest.raises(colander.Invalid): |
|
346 | 346 | general_metrics_schema.deserialize(dummy_json) |
|
347 | 347 | |
|
348 | 348 | def test_minimal_payload(self, general_metrics_schema): |
|
349 | 349 | dummy_json = [{'tags': [['counter_a', 15.5], ['counter_b', 63]]}] |
|
350 | 350 | deserialized = general_metrics_schema.deserialize(dummy_json)[0] |
|
351 | 351 | expected = {'namespace': '', |
|
352 | 352 | 'server_name': 'unknown', |
|
353 | 353 | 'tags': [('counter_a', 15.5), ('counter_b', 63)], |
|
354 | 354 | 'timestamp': datetime.utcnow()} |
|
355 | 355 | assert deserialized['namespace'] == expected['namespace'] |
|
356 | 356 | assert deserialized['server_name'] == expected['server_name'] |
|
357 | 357 | assert deserialized['tags'] == expected['tags'] |
|
358 | 358 | |
|
359 | 359 | def test_normal_payload(self, general_metrics_schema): |
|
360 | 360 | import appenlight.tests.payload_examples as payload_examples |
|
361 | 361 | dummy_json = [payload_examples.METRICS_PAYLOAD] |
|
362 | 362 | deserialized = general_metrics_schema.deserialize(dummy_json)[0] |
|
363 | 363 | expected = {'namespace': 'some.monitor', |
|
364 | 364 | 'server_name': 'server.name', |
|
365 | 365 | 'tags': [('usage_foo', 15.5), ('usage_bar', 63)], |
|
366 | 366 | 'timestamp': datetime.utcnow()} |
|
367 | 367 | assert deserialized['namespace'] == expected['namespace'] |
|
368 | 368 | assert deserialized['server_name'] == expected['server_name'] |
|
369 | 369 | assert deserialized['tags'] == expected['tags'] |
|
370 | 370 | |
|
371 | 371 | |
|
372 | 372 | @pytest.mark.usefixtures('request_metrics_schema') |
|
373 | 373 | class TestAPIRequestMetricsValidation(object): |
|
374 | 374 | @pytest.mark.parametrize('dummy_json', ['', {}, [], None]) |
|
375 | 375 | def test_no_payload(self, dummy_json, request_metrics_schema): |
|
376 | 376 | import colander |
|
377 | 377 | |
|
378 | 378 | with pytest.raises(colander.Invalid): |
|
379 | 379 | print(request_metrics_schema.deserialize(dummy_json)) |
|
380 | 380 | |
|
381 | 381 | def test_normal_payload(self, request_metrics_schema): |
|
382 | 382 | import appenlight.tests.payload_examples as payload_examples |
|
383 | 383 | dummy_json = payload_examples.REQUEST_METRICS_EXAMPLES |
|
384 | 384 | deserialized = request_metrics_schema.deserialize(dummy_json)[0] |
|
385 | 385 | expected = {'metrics': [('dir/module:func', |
|
386 | 386 | {'custom': 0.0, |
|
387 | 387 | 'custom_calls': 0.0, |
|
388 | 388 | 'main': 0.01664, |
|
389 | 389 | 'nosql': 0.00061, |
|
390 | 390 | 'nosql_calls': 23.0, |
|
391 | 391 | 'remote': 0.0, |
|
392 | 392 | 'remote_calls': 0.0, |
|
393 | 393 | 'requests': 1, |
|
394 | 394 | 'sql': 0.00105, |
|
395 | 395 | 'sql_calls': 2.0, |
|
396 | 396 | 'tmpl': 0.0, |
|
397 | 397 | 'tmpl_calls': 0.0}), |
|
398 | 398 | ('SomeView.function', |
|
399 | 399 | {'custom': 0.0, |
|
400 | 400 | 'custom_calls': 0.0, |
|
401 | 401 | 'main': 0.647261, |
|
402 | 402 | 'nosql': 0.306554, |
|
403 | 403 | 'nosql_calls': 140.0, |
|
404 | 404 | 'remote': 0.0, |
|
405 | 405 | 'remote_calls': 0.0, |
|
406 | 406 | 'requests': 28, |
|
407 | 407 | 'sql': 0.0, |
|
408 | 408 | 'sql_calls': 0.0, |
|
409 | 409 | 'tmpl': 0.0, |
|
410 | 410 | 'tmpl_calls': 0.0})], |
|
411 | 411 | 'server': 'some.server.hostname', |
|
412 | 412 | 'timestamp': datetime.utcnow()} |
|
413 | 413 | assert deserialized['server'] == expected['server'] |
|
414 | 414 | metric = deserialized['metrics'][0] |
|
415 | 415 | expected_metric = expected['metrics'][0] |
|
416 | 416 | assert metric[0] == expected_metric[0] |
|
417 | 417 | assert sorted(metric[1].items()) == sorted(expected_metric[1].items()) |
|
418 | 418 | |
|
419 | 419 | |
|
420 | 420 | @pytest.mark.usefixtures('default_application') |
|
421 | 421 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') |
|
422 | 422 | class TestAPIReportsView(object): |
|
423 | 423 | def test_no_json_payload(self, default_application): |
|
424 | 424 | import colander |
|
425 | 425 | from appenlight.models.services.application import ApplicationService |
|
426 | 426 | from appenlight.views.api import reports_create |
|
427 | 427 | |
|
428 | 428 | context = DummyContext() |
|
429 | 429 | context.resource = ApplicationService.by_id(1) |
|
430 | 430 | request = testing.DummyRequest( |
|
431 | 431 | headers={'Content-Type': 'application/json'}) |
|
432 | 432 | request.unsafe_json_body = '' |
|
433 | 433 | request.context = context |
|
434 | 434 | route = mock.Mock() |
|
435 | 435 | route.name = 'api_reports' |
|
436 | 436 | request.matched_route = route |
|
437 | 437 | with pytest.raises(colander.Invalid): |
|
438 | 438 | response = reports_create(request) |
|
439 | 439 | |
|
440 | 440 | def test_single_proper_json_0_5_payload(self): |
|
441 | 441 | import appenlight.tests.payload_examples as payload_examples |
|
442 | 442 | from appenlight.views.api import reports_create |
|
443 | 443 | from appenlight.models.services.application import ApplicationService |
|
444 | 444 | from appenlight.models.report_group import ReportGroup |
|
445 | 445 | route = mock.Mock() |
|
446 | 446 | route.name = 'api_reports' |
|
447 | 447 | request = pyramid.threadlocal.get_current_request() |
|
448 | 448 | context = DummyContext() |
|
449 | 449 | context.resource = ApplicationService.by_id(1) |
|
450 | 450 | request.context = context |
|
451 | 451 | request.matched_route = route |
|
452 | 452 | PYTHON_PAYLOAD = payload_examples.PYTHON_PAYLOAD_0_5 |
|
453 | 453 | request.unsafe_json_body = [copy.deepcopy(PYTHON_PAYLOAD)] |
|
454 | 454 | reports_create(request) |
|
455 | 455 | query = DBSession.query(ReportGroup) |
|
456 | 456 | report = query.first() |
|
457 | 457 | assert query.count() == 1 |
|
458 | 458 | assert report.total_reports == 1 |
|
459 | 459 | |
|
460 | 460 | def test_grouping_0_5(self): |
|
461 | 461 | import appenlight.tests.payload_examples as payload_examples |
|
462 | 462 | from appenlight.views.api import reports_create |
|
463 | 463 | from appenlight.models.services.application import ApplicationService |
|
464 | 464 | from appenlight.models.report_group import ReportGroup |
|
465 | 465 | route = mock.Mock() |
|
466 | 466 | route.name = 'api_reports' |
|
467 | 467 | request = pyramid.threadlocal.get_current_request() |
|
468 | 468 | context = DummyContext() |
|
469 | 469 | context.resource = ApplicationService.by_id(1) |
|
470 | 470 | request.context = context |
|
471 | 471 | request.matched_route = route |
|
472 | 472 | PYTHON_PAYLOAD = payload_examples.PYTHON_PAYLOAD_0_5 |
|
473 | 473 | request.unsafe_json_body = [copy.deepcopy(PYTHON_PAYLOAD), |
|
474 | 474 | copy.deepcopy(PYTHON_PAYLOAD)] |
|
475 | 475 | reports_create(request) |
|
476 | 476 | query = DBSession.query(ReportGroup) |
|
477 | 477 | report = query.first() |
|
478 | 478 | assert query.count() == 1 |
|
479 | 479 | assert report.total_reports == 2 |
|
480 | 480 | |
|
481 | 481 | def test_grouping_different_reports_0_5(self): |
|
482 | 482 | import appenlight.tests.payload_examples as payload_examples |
|
483 | 483 | from appenlight.views.api import reports_create |
|
484 | 484 | from appenlight.models.services.application import ApplicationService |
|
485 | 485 | from appenlight.models.report_group import ReportGroup |
|
486 | 486 | route = mock.Mock() |
|
487 | 487 | route.name = 'api_reports' |
|
488 | 488 | request = pyramid.threadlocal.get_current_request() |
|
489 | 489 | context = DummyContext() |
|
490 | 490 | context.resource = ApplicationService.by_id(1) |
|
491 | 491 | request.context = context |
|
492 | 492 | request.matched_route = route |
|
493 | 493 | PYTHON_PAYLOAD = payload_examples.PYTHON_PAYLOAD_0_5 |
|
494 | 494 | PARSED_REPORT_404 = payload_examples.PARSED_REPORT_404 |
|
495 | 495 | request.unsafe_json_body = [copy.deepcopy(PYTHON_PAYLOAD), |
|
496 | 496 | copy.deepcopy(PARSED_REPORT_404)] |
|
497 | 497 | reports_create(request) |
|
498 | 498 | query = DBSession.query(ReportGroup) |
|
499 | 499 | report = query.first() |
|
500 | 500 | assert query.count() == 2 |
|
501 | 501 | assert report.total_reports == 1 |
|
502 | 502 | |
|
503 | 503 | |
|
504 | 504 | @pytest.mark.usefixtures('default_application') |
|
505 | 505 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') |
|
506 | 506 | class TestAirbrakeXMLView(object): |
|
507 | 507 | |
|
508 | 508 | def test_normal_payload_parsing(self): |
|
509 | 509 | import datetime |
|
510 | 510 | import defusedxml.ElementTree as ElementTree |
|
511 | 511 | import appenlight.tests.payload_examples as payload_examples |
|
512 | 512 | from appenlight.lib.utils.airbrake import parse_airbrake_xml |
|
513 | 513 | from appenlight.validators import ReportListSchema_0_5 |
|
514 | 514 | |
|
515 | 515 | context = DummyContext() |
|
516 | 516 | request = testing.DummyRequest( |
|
517 | 517 | headers={'Content-Type': 'application/xml'}) |
|
518 | 518 | request.context = context |
|
519 | 519 | request.context.possibly_public = False |
|
520 | 520 | root = ElementTree.fromstring(payload_examples.AIRBRAKE_RUBY_EXAMPLE) |
|
521 | 521 | request.context.airbrake_xml_etree = root |
|
522 | 522 | error_dict = parse_airbrake_xml(request) |
|
523 | 523 | schema = ReportListSchema_0_5().bind(utcnow=datetime.datetime.utcnow()) |
|
524 | 524 | deserialized_report = schema.deserialize([error_dict])[0] |
|
525 | 525 | assert deserialized_report['client'] == 'Airbrake Notifier' |
|
526 | 526 | assert deserialized_report['error'] == 'NameError: undefined local variable or method `sdfdfdf\' for #<#<Class:0x000000039a8b90>:0x00000002c53df0>' |
|
527 | 527 | assert deserialized_report['http_status'] == 500 |
|
528 | 528 | assert deserialized_report['language'] == 'unknown' |
|
529 | 529 | assert deserialized_report['message'] == '' |
|
530 | 530 | assert deserialized_report['occurences'] == 1 |
|
531 | 531 | assert deserialized_report['priority'] == 5 |
|
532 | 532 | d_request = deserialized_report['request'] |
|
533 | 533 | assert d_request['GET'] == {'test': '1234'} |
|
534 | 534 | assert d_request['action_dispatch.request.parameters'] == { |
|
535 | 535 | 'action': 'index', |
|
536 | 536 | 'controller': 'welcome', |
|
537 | 537 | 'test': '1234'} |
|
538 | 538 | assert deserialized_report['request_id'] == 'c11b2267f3ad8b00a1768cae35559fa1' |
|
539 | 539 | assert deserialized_report['server'] == 'ergo-desktop' |
|
540 | 540 | assert deserialized_report['traceback'][0] == { |
|
541 | 541 | 'cline': 'block in start_thread', |
|
542 | 542 | 'file': '/home/ergo/.rbenv/versions/1.9.3-p327/lib/ruby/1.9.1/webrick/server.rb', |
|
543 | 543 | 'fn': 'block in start_thread', |
|
544 | 544 | 'line': '191', |
|
545 | 545 | 'module': '', |
|
546 | 546 | 'vars': {}} |
|
547 | 547 | assert deserialized_report['traceback'][-1] == { |
|
548 | 548 | 'cline': '_app_views_welcome_index_html_erb___2570061166873166679_31748940', |
|
549 | 549 | 'file': '[PROJECT_ROOT]/app/views/welcome/index.html.erb', |
|
550 | 550 | 'fn': '_app_views_welcome_index_html_erb___2570061166873166679_31748940', |
|
551 | 551 | 'line': '3', |
|
552 | 552 | 'module': '', |
|
553 | 553 | 'vars': {}} |
|
554 | 554 | assert deserialized_report['url'] == 'http://0.0.0.0:3000/welcome/index?test=1234' |
|
555 | 555 | assert deserialized_report['view_name'] == 'welcome:index' |
|
556 | 556 | |
|
557 | 557 | def test_normal_payload_view(self): |
|
558 | 558 | import defusedxml.ElementTree as ElementTree |
|
559 | 559 | import appenlight.tests.payload_examples as payload_examples |
|
560 | 560 | |
|
561 | 561 | from appenlight.models.services.application import ApplicationService |
|
562 | 562 | from appenlight.views.api import airbrake_xml_compat |
|
563 | 563 | |
|
564 | 564 | context = DummyContext() |
|
565 | 565 | context.resource = ApplicationService.by_id(1) |
|
566 | 566 | request = testing.DummyRequest( |
|
567 | 567 | headers={'Content-Type': 'application/xml'}) |
|
568 | 568 | request.context = context |
|
569 | 569 | request.context.possibly_public = False |
|
570 | 570 | root = ElementTree.fromstring(payload_examples.AIRBRAKE_RUBY_EXAMPLE) |
|
571 | 571 | request.context.airbrake_xml_etree = root |
|
572 | 572 | route = mock.Mock() |
|
573 | 573 | route.name = 'api_airbrake' |
|
574 | 574 | request.matched_route = route |
|
575 | 575 | result = airbrake_xml_compat(request) |
|
576 | 576 | assert '<notice><id>' in result |
|
577 | 577 | |
|
578 | 578 | |
|
579 | 579 | @pytest.mark.usefixtures('default_application') |
|
580 | 580 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') |
|
581 | 581 | class TestAPILogView(object): |
|
582 | 582 | def test_no_json_payload(self, base_app): |
|
583 | 583 | import colander |
|
584 | 584 | from appenlight.models.services.application import ApplicationService |
|
585 | 585 | from appenlight.views.api import logs_create |
|
586 | 586 | |
|
587 | 587 | context = DummyContext() |
|
588 | 588 | context.resource = ApplicationService.by_id(1) |
|
589 | 589 | request = testing.DummyRequest( |
|
590 | 590 | headers={'Content-Type': 'application/json'}) |
|
591 | 591 | request.context = context |
|
592 | 592 | request.registry = base_app.registry |
|
593 | 593 | request.unsafe_json_body = '' |
|
594 | 594 | route = mock.Mock() |
|
595 | 595 | route.name = 'api_logs' |
|
596 | 596 | request.matched_route = route |
|
597 | 597 | with pytest.raises(colander.Invalid): |
|
598 | 598 | response = logs_create(request) |
|
599 | 599 | |
|
600 | 600 | def test_single_json_payload(self): |
|
601 | 601 | import appenlight.tests.payload_examples as payload_examples |
|
602 | 602 | from appenlight.models.log import Log |
|
603 | 603 | from appenlight.views.api import logs_create |
|
604 | 604 | from appenlight.models.services.application import ApplicationService |
|
605 | 605 | route = mock.Mock() |
|
606 | 606 | route.name = 'api_logs' |
|
607 | 607 | request = pyramid.threadlocal.get_current_request() |
|
608 | 608 | context = DummyContext() |
|
609 | 609 | context.resource = ApplicationService.by_id(1) |
|
610 | 610 | request.context = context |
|
611 | 611 | request.matched_route = route |
|
612 | 612 | request.unsafe_json_body = [copy.deepcopy( |
|
613 | 613 | payload_examples.LOG_EXAMPLES[0])] |
|
614 | 614 | logs_create(request) |
|
615 | 615 | query = DBSession.query(Log) |
|
616 | 616 | log = query.first() |
|
617 | 617 | assert query.count() == 1 |
|
618 | 618 | assert log.message == "OMG ValueError happened" |
|
619 | 619 | |
|
620 | 620 | def test_multiple_json_payload(self): |
|
621 | 621 | import appenlight.tests.payload_examples as payload_examples |
|
622 | 622 | from appenlight.models.log import Log |
|
623 | 623 | from appenlight.views.api import logs_create |
|
624 | 624 | from appenlight.models.services.application import ApplicationService |
|
625 | 625 | route = mock.Mock() |
|
626 | 626 | route.name = 'api_logs' |
|
627 | 627 | request = pyramid.threadlocal.get_current_request() |
|
628 | 628 | context = DummyContext() |
|
629 | 629 | context.resource = ApplicationService.by_id(1) |
|
630 | 630 | request.context = context |
|
631 | 631 | request.matched_route = route |
|
632 | 632 | LOG_PAYLOAD = payload_examples.LOG_EXAMPLES[0] |
|
633 | 633 | LOG_PAYLOAD2 = payload_examples.LOG_EXAMPLES[1] |
|
634 | 634 | request.unsafe_json_body = copy.deepcopy([LOG_PAYLOAD, LOG_PAYLOAD2]) |
|
635 | 635 | logs_create(request) |
|
636 | 636 | query = DBSession.query(Log).order_by(sa.asc(Log.log_id)) |
|
637 | 637 | assert query.count() == 2 |
|
638 | 638 | assert query[0].message == "OMG ValueError happened" |
|
639 | 639 | assert query[1].message == "OMG ValueError happened2" |
|
640 | 640 | |
|
641 | 641 | def test_public_key_rewriting(self): |
|
642 | 642 | import appenlight.tests.payload_examples as payload_examples |
|
643 | 643 | from appenlight.models.log import Log |
|
644 | 644 | from appenlight.views.api import logs_create |
|
645 | 645 | from appenlight.models.services.application import ApplicationService |
|
646 | 646 | route = mock.Mock() |
|
647 | 647 | route.name = 'api_logs' |
|
648 | 648 | request = pyramid.threadlocal.get_current_request() |
|
649 | 649 | context = DummyContext() |
|
650 | 650 | context.resource = ApplicationService.by_id(1) |
|
651 | 651 | request.context = context |
|
652 | 652 | request.matched_route = route |
|
653 | 653 | |
|
654 | 654 | LOG_PAYLOAD = copy.deepcopy(payload_examples.LOG_EXAMPLES[0]) |
|
655 | 655 | LOG_PAYLOAD2 = copy.deepcopy(payload_examples.LOG_EXAMPLES[1]) |
|
656 | 656 | LOG_PAYLOAD['primary_key'] = 'X2' |
|
657 | 657 | LOG_PAYLOAD2['primary_key'] = 'X2' |
|
658 | 658 | request.unsafe_json_body = [LOG_PAYLOAD, LOG_PAYLOAD2] |
|
659 | 659 | logs_create(request) |
|
660 | 660 | |
|
661 | 661 | query = DBSession.query(Log).order_by(sa.asc(Log.log_id)) |
|
662 | 662 | assert query.count() == 1 |
|
663 | 663 | assert query[0].message == "OMG ValueError happened2" |
|
664 | 664 | |
|
665 | 665 | @pytest.mark.usefixtures('default_application') |
|
666 | 666 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') |
|
667 | 667 | class TestAPIGeneralMetricsView(object): |
|
668 | 668 | def test_no_json_payload(self, base_app): |
|
669 | 669 | import colander |
|
670 | 670 | from appenlight.models.services.application import ApplicationService |
|
671 | 671 | from appenlight.views.api import general_metrics_create |
|
672 | 672 | route = mock.Mock() |
|
673 | 673 | route.name = 'api_general_metrics' |
|
674 | 674 | context = DummyContext() |
|
675 | 675 | context.resource = ApplicationService.by_id(1) |
|
676 | 676 | request = testing.DummyRequest( |
|
677 | 677 | headers={'Content-Type': 'application/json'}) |
|
678 | 678 | request.context = context |
|
679 | 679 | request.registry = base_app.registry |
|
680 | 680 | request.unsafe_json_body = '' |
|
681 | 681 | request.matched_route = route |
|
682 | 682 | with pytest.raises(colander.Invalid): |
|
683 | 683 | general_metrics_create(request) |
|
684 | 684 | |
|
685 | 685 | def test_single_json_payload(self): |
|
686 | 686 | import appenlight.tests.payload_examples as payload_examples |
|
687 | 687 | from appenlight.models.metric import Metric |
|
688 | 688 | from appenlight.views.api import general_metrics_create |
|
689 | 689 | from appenlight.models.services.application import ApplicationService |
|
690 | 690 | route = mock.Mock() |
|
691 | 691 | route.name = 'api_general_metric' |
|
692 | 692 | request = pyramid.threadlocal.get_current_request() |
|
693 | 693 | request.matched_route = route |
|
694 | 694 | context = DummyContext() |
|
695 | 695 | context.resource = ApplicationService.by_id(1) |
|
696 | 696 | request.context = context |
|
697 | 697 | request.unsafe_json_body = payload_examples.METRICS_PAYLOAD |
|
698 | 698 | general_metrics_create(request) |
|
699 | 699 | query = DBSession.query(Metric) |
|
700 | 700 | metric = query.first() |
|
701 | 701 | assert query.count() == 1 |
|
702 | 702 | assert metric.namespace == 'some.monitor' |
|
703 | 703 | |
|
704 | 704 | def test_multiple_json_payload(self): |
|
705 | 705 | import appenlight.tests.payload_examples as payload_examples |
|
706 | 706 | from appenlight.models.metric import Metric |
|
707 | 707 | from appenlight.views.api import general_metrics_create |
|
708 | 708 | from appenlight.models.services.application import ApplicationService |
|
709 | 709 | route = mock.Mock() |
|
710 | 710 | route.name = 'api_general_metrics' |
|
711 | 711 | request = pyramid.threadlocal.get_current_request() |
|
712 | 712 | request.matched_route = route |
|
713 | 713 | context = DummyContext() |
|
714 | 714 | context.resource = ApplicationService.by_id(1) |
|
715 | 715 | request.context = context |
|
716 | 716 | request.unsafe_json_body = [ |
|
717 | 717 | copy.deepcopy(payload_examples.METRICS_PAYLOAD), |
|
718 | 718 | copy.deepcopy(payload_examples.METRICS_PAYLOAD), |
|
719 | 719 | ] |
|
720 | 720 | general_metrics_create(request) |
|
721 | 721 | query = DBSession.query(Metric) |
|
722 | 722 | metric = query.first() |
|
723 | 723 | assert query.count() == 2 |
|
724 | 724 | assert metric.namespace == 'some.monitor' |
|
725 | 725 | |
|
726 | 726 | |
|
727 | 727 | class TestGroupingMessageReplacements(object): |
|
728 | 728 | def replace_default_repr_python(self): |
|
729 | 729 | test_str = ''' |
|
730 | 730 | ConnectionError: ConnectionError((<urllib3.connection.HTTPConnection object at 0x7f87a0ba9fd0>, 'Connection to domain.gr timed out. (connect timeout=10)')) caused by: ConnectTimeoutError((<urllib3.connection.HTTPConnection object at 0x7f87a0ba9fd0>, 'Connection to domain.gr timed out. (connect timeout=10)')) |
|
731 | 731 | ''' |
|
732 | 732 | regex = r'<(.*?) object at (.*?)>' |
|
733 | 733 | |
|
734 | 734 | |
|
735 | 735 | class TestRulesKeyGetter(object): |
|
736 | 736 | def test_default_dict_getter_top_key(self): |
|
737 | 737 | from appenlight.lib.rule import Rule |
|
738 | 738 | struct = { |
|
739 | 739 | "a": { |
|
740 | 740 | "b": 'b', |
|
741 | 741 | "c": { |
|
742 | 742 | "d": 'd', |
|
743 | 743 | "g": { |
|
744 | 744 | "h": 'h' |
|
745 | 745 | } |
|
746 | 746 | }, |
|
747 | 747 | "e": 'e' |
|
748 | 748 | }, |
|
749 | 749 | "f": 'f' |
|
750 | 750 | } |
|
751 | 751 | result = Rule.default_dict_struct_getter(struct, "a") |
|
752 | 752 | assert result == struct['a'] |
|
753 | 753 | |
|
754 | 754 | def test_default_dict_getter_sub_key(self): |
|
755 | 755 | from appenlight.lib.rule import Rule |
|
756 | 756 | struct = { |
|
757 | 757 | "a": { |
|
758 | 758 | "b": 'b', |
|
759 | 759 | "c": { |
|
760 | 760 | "d": 'd', |
|
761 | 761 | "g": { |
|
762 | 762 | "h": 'h' |
|
763 | 763 | } |
|
764 | 764 | }, |
|
765 | 765 | "e": 'e' |
|
766 | 766 | }, |
|
767 | 767 | "f": 'f' |
|
768 | 768 | } |
|
769 | 769 | result = Rule.default_dict_struct_getter(struct, 'a:b') |
|
770 | 770 | assert result == struct['a']['b'] |
|
771 | 771 | result = Rule.default_dict_struct_getter(struct, 'a:c:d') |
|
772 | 772 | assert result == struct['a']['c']['d'] |
|
773 | 773 | |
|
774 | 774 | def test_default_obj_getter_top_key(self): |
|
775 | 775 | from appenlight.lib.rule import Rule |
|
776 | 776 | class TestStruct(object): |
|
777 | 777 | def __init__(self, a, b): |
|
778 | 778 | self.a = a |
|
779 | 779 | self.b = b |
|
780 | 780 | |
|
781 | 781 | struct = TestStruct(a='a', |
|
782 | 782 | b=TestStruct(a='x', b='y')) |
|
783 | 783 | result = Rule.default_obj_struct_getter(struct, "a") |
|
784 | 784 | assert result == struct.a |
|
785 | 785 | |
|
786 | 786 | def test_default_obj_getter_sub_key(self): |
|
787 | 787 | from appenlight.lib.rule import Rule |
|
788 | 788 | class TestStruct(object): |
|
789 | 789 | def __init__(self, name, a, b): |
|
790 | 790 | self.name = name |
|
791 | 791 | self.a = a |
|
792 | 792 | self.b = b |
|
793 | 793 | |
|
794 | 794 | def __repr__(self): |
|
795 | 795 | return '<obj {}>'.format(self.name) |
|
796 | 796 | |
|
797 | 797 | c = TestStruct('c', a=5, b='z') |
|
798 | 798 | b = TestStruct('b', a=c, b='y') |
|
799 | 799 | struct = TestStruct('a', a='a', b=b) |
|
800 | 800 | result = Rule.default_obj_struct_getter(struct, 'b:b') |
|
801 | 801 | assert result == struct.b.b |
|
802 | 802 | result = Rule.default_obj_struct_getter(struct, 'b:a:b') |
|
803 | 803 | assert result == struct.b.a.b |
|
804 | 804 | |
|
805 | 805 | |
|
806 | 806 | @pytest.mark.usefixtures('report_type_matrix') |
|
807 | 807 | class TestRulesParsing(): |
|
808 | 808 | @pytest.mark.parametrize("op, struct_value, test_value, match_result", [ |
|
809 | 809 | ('eq', 500, 500, True), |
|
810 | 810 | ('eq', 600, 500, False), |
|
811 | 811 | ('eq', 300, 500, False), |
|
812 | 812 | ('eq', "300", 500, False), |
|
813 | 813 | ('eq', "600", 500, False), |
|
814 | 814 | ('eq', "500", 500, True), |
|
815 | 815 | ('ne', 500, 500, False), |
|
816 | 816 | ('ne', 600, 500, True), |
|
817 | 817 | ('ne', 300, 500, True), |
|
818 | 818 | ('ne', "300", 500, True), |
|
819 | 819 | ('ne', "600", 500, True), |
|
820 | 820 | ('ne', "500", 500, False), |
|
821 | 821 | ('ge', 500, 500, True), |
|
822 | 822 | ('ge', 600, 500, True), |
|
823 | 823 | ('ge', 499, 500, False), |
|
824 | 824 | ('gt', 499, 500, False), |
|
825 | 825 | ('gt', 500, 500, False), |
|
826 | 826 | ('gt', 501, 500, True), |
|
827 | 827 | ('le', 499, 500, True), |
|
828 | 828 | ('le', 500, 500, True), |
|
829 | 829 | ('le', 501, 500, False), |
|
830 | 830 | ('lt', 499, 500, True), |
|
831 | 831 | ('lt', 500, 500, False), |
|
832 | 832 | ('lt', 501, 500, False), |
|
833 | 833 | ]) |
|
834 | 834 | def test_single_op_int(self, op, struct_value, test_value, match_result, |
|
835 | 835 | report_type_matrix): |
|
836 | 836 | from appenlight.lib.rule import Rule |
|
837 | 837 | rule_config = { |
|
838 | 838 | "op": op, |
|
839 | 839 | "field": "http_status", |
|
840 | 840 | "value": test_value |
|
841 | 841 | } |
|
842 | 842 | rule = Rule(rule_config, report_type_matrix) |
|
843 | 843 | |
|
844 | 844 | data = { |
|
845 | 845 | "http_status": struct_value |
|
846 | 846 | } |
|
847 | 847 | assert rule.match(data) is match_result |
|
848 | 848 | |
|
849 | 849 | @pytest.mark.parametrize("op, struct_value, test_value, match_result", [ |
|
850 | 850 | ('ge', "500.01", 500, True), |
|
851 | 851 | ('ge', "500.01", 500.02, False), |
|
852 | 852 | ('le', "500.01", 500.02, True) |
|
853 | 853 | ]) |
|
854 | 854 | def test_single_op_float(self, op, struct_value, test_value, match_result, |
|
855 | 855 | report_type_matrix): |
|
856 | 856 | from appenlight.lib.rule import Rule |
|
857 | 857 | rule_config = { |
|
858 | 858 | "op": op, |
|
859 | 859 | "field": "duration", |
|
860 | 860 | "value": test_value |
|
861 | 861 | } |
|
862 | 862 | rule = Rule(rule_config, report_type_matrix) |
|
863 | 863 | |
|
864 | 864 | data = { |
|
865 | 865 | "duration": struct_value |
|
866 | 866 | } |
|
867 | 867 | assert rule.match(data) is match_result |
|
868 | 868 | |
|
869 | 869 | @pytest.mark.parametrize("op, struct_value, test_value, match_result", [ |
|
870 | 870 | ('contains', 'foo bar baz', 'foo', True), |
|
871 | 871 | ('contains', 'foo bar baz', 'bar', True), |
|
872 | 872 | ('contains', 'foo bar baz', 'dupa', False), |
|
873 | 873 | ('startswith', 'foo bar baz', 'foo', True), |
|
874 | 874 | ('startswith', 'foo bar baz', 'bar', False), |
|
875 | 875 | ('endswith', 'foo bar baz', 'baz', True), |
|
876 | 876 | ('endswith', 'foo bar baz', 'bar', False), |
|
877 | 877 | ]) |
|
878 | 878 | def test_single_op_string(self, op, struct_value, test_value, |
|
879 | 879 | match_result, report_type_matrix): |
|
880 | 880 | from appenlight.lib.rule import Rule |
|
881 | 881 | rule_config = { |
|
882 | 882 | "op": op, |
|
883 | 883 | "field": "error", |
|
884 | 884 | "value": test_value |
|
885 | 885 | } |
|
886 | 886 | rule = Rule(rule_config, report_type_matrix) |
|
887 | 887 | |
|
888 | 888 | data = { |
|
889 | 889 | "error": struct_value |
|
890 | 890 | } |
|
891 | 891 | assert rule.match(data) is match_result |
|
892 | 892 | |
|
893 | 893 | @pytest.mark.parametrize("field, value, s_type", [ |
|
894 | 894 | ('field_unicode', 500, str), |
|
895 | 895 | ('field_unicode', 500.0, str), |
|
896 | 896 | ('field_unicode', "500", str), |
|
897 | 897 | ('field_int', "500", int), |
|
898 | 898 | ('field_int', 500, int), |
|
899 | 899 | ('field_int', 500.0, int), |
|
900 | 900 | ('field_float', "500", float), |
|
901 | 901 | ('field_float', 500, float), |
|
902 | 902 | ('field_float', 500.0, float), |
|
903 | 903 | ]) |
|
904 | 904 | def test_type_normalization(self, field, value, s_type): |
|
905 | 905 | from appenlight.lib.rule import Rule |
|
906 | 906 | type_matrix = { |
|
907 | 907 | 'field_unicode': {"type": 'unicode'}, |
|
908 | 908 | 'field_float': {"type": 'float'}, |
|
909 | 909 | 'field_int': {"type": 'int'}, |
|
910 | 910 | } |
|
911 | 911 | |
|
912 | 912 | rule = Rule({}, type_matrix) |
|
913 | 913 | n_value = rule.normalized_type(field, value) |
|
914 | 914 | assert isinstance(n_value, s_type) is True |
|
915 | 915 | |
|
916 | 916 | |
|
917 | 917 | @pytest.mark.usefixtures('report_type_matrix') |
|
918 | 918 | class TestNestedRuleParsing(): |
|
919 | 919 | |
|
920 | 920 | @pytest.mark.parametrize("data, result", [ |
|
921 | 921 | ({"http_status": 501, "group": {"priority": 7, "occurences": 11}}, |
|
922 | 922 | False), |
|
923 | 923 | ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, |
|
924 | 924 | False), |
|
925 | 925 | ({"http_status": 500, "group": {"priority": 1, "occurences": 11}}, |
|
926 | 926 | False), |
|
927 | 927 | ({"http_status": 101, "group": {"priority": 3, "occurences": 5}}, |
|
928 | 928 | True), |
|
929 | 929 | ]) |
|
930 | 930 | def test_NOT_rule(self, data, result, report_type_matrix): |
|
931 | 931 | from appenlight.lib.rule import Rule |
|
932 | 932 | rule_config = { |
|
933 | 933 | "field": "__NOT__", |
|
934 | 934 | "rules": [ |
|
935 | 935 | { |
|
936 | 936 | "op": "ge", |
|
937 | 937 | "field": "group:occurences", |
|
938 | 938 | "value": "10" |
|
939 | 939 | }, |
|
940 | 940 | { |
|
941 | 941 | "op": "ge", |
|
942 | 942 | "field": "group:priority", |
|
943 | 943 | "value": "4" |
|
944 | 944 | } |
|
945 | 945 | ] |
|
946 | 946 | } |
|
947 | 947 | |
|
948 | 948 | rule = Rule(rule_config, report_type_matrix) |
|
949 | 949 | assert rule.match(data) is result |
|
950 | 950 | |
|
951 | 951 | @pytest.mark.parametrize("data, result", [ |
|
952 | 952 | ({"http_status": 501, "group": {"priority": 7, "occurences": 11}}, |
|
953 | 953 | True), |
|
954 | 954 | ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, |
|
955 | 955 | True), |
|
956 | 956 | ({"http_status": 500, "group": {"priority": 1, "occurences": 1}}, |
|
957 | 957 | True), |
|
958 | 958 | ({"http_status": 101, "group": {"priority": 3, "occurences": 11}}, |
|
959 | 959 | False), |
|
960 | 960 | ]) |
|
961 | 961 | def test_nested_OR_AND_rule(self, data, result, report_type_matrix): |
|
962 | 962 | from appenlight.lib.rule import Rule |
|
963 | 963 | rule_config = { |
|
964 | 964 | "field": "__OR__", |
|
965 | 965 | "rules": [ |
|
966 | 966 | { |
|
967 | 967 | "field": "__AND__", |
|
968 | 968 | "rules": [ |
|
969 | 969 | { |
|
970 | 970 | "op": "ge", |
|
971 | 971 | "field": "group:occurences", |
|
972 | 972 | "value": "10" |
|
973 | 973 | }, |
|
974 | 974 | { |
|
975 | 975 | "op": "ge", |
|
976 | 976 | "field": "group:priority", |
|
977 | 977 | "value": "4" |
|
978 | 978 | } |
|
979 | 979 | ] |
|
980 | 980 | }, |
|
981 | 981 | { |
|
982 | 982 | "op": "eq", |
|
983 | 983 | "field": "http_status", |
|
984 | 984 | "value": "500" |
|
985 | 985 | } |
|
986 | 986 | ] |
|
987 | 987 | } |
|
988 | 988 | |
|
989 | 989 | rule = Rule(rule_config, report_type_matrix) |
|
990 | 990 | assert rule.match(data) is result |
|
991 | 991 | |
|
992 | 992 | @pytest.mark.parametrize("data, result", [ |
|
993 | 993 | ({"http_status": 501, "group": {"priority": 7, "occurences": 11}}, |
|
994 | 994 | True), |
|
995 | 995 | ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, |
|
996 | 996 | True), |
|
997 | 997 | ({"http_status": 500, "group": {"priority": 1, "occurences": 1}}, |
|
998 | 998 | True), |
|
999 | 999 | ({"http_status": 101, "group": {"priority": 3, "occurences": 1}}, |
|
1000 | 1000 | False), |
|
1001 | 1001 | ]) |
|
1002 | 1002 | def test_nested_OR_OR_rule(self, data, result, report_type_matrix): |
|
1003 | 1003 | from appenlight.lib.rule import Rule |
|
1004 | 1004 | rule_config = { |
|
1005 | 1005 | "field": "__OR__", |
|
1006 | 1006 | "rules": [ |
|
1007 | 1007 | {"field": "__OR__", |
|
1008 | 1008 | "rules": [ |
|
1009 | 1009 | {"op": "ge", |
|
1010 | 1010 | "field": "group:occurences", |
|
1011 | 1011 | "value": "10" |
|
1012 | 1012 | }, |
|
1013 | 1013 | {"op": "ge", |
|
1014 | 1014 | "field": "group:priority", |
|
1015 | 1015 | "value": "4" |
|
1016 | 1016 | } |
|
1017 | 1017 | ] |
|
1018 | 1018 | }, |
|
1019 | 1019 | {"op": "eq", |
|
1020 | 1020 | "field": "http_status", |
|
1021 | 1021 | "value": "500" |
|
1022 | 1022 | } |
|
1023 | 1023 | ] |
|
1024 | 1024 | } |
|
1025 | 1025 | |
|
1026 | 1026 | rule = Rule(rule_config, report_type_matrix) |
|
1027 | 1027 | assert rule.match(data) is result |
|
1028 | 1028 | |
|
1029 | 1029 | @pytest.mark.parametrize("data, result", [ |
|
1030 | 1030 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}}, |
|
1031 | 1031 | True), |
|
1032 | 1032 | ({"http_status": 101, "group": {"priority": 7, "occurences": 11}}, |
|
1033 | 1033 | False), |
|
1034 | 1034 | ({"http_status": 500, "group": {"priority": 1, "occurences": 1}}, |
|
1035 | 1035 | False), |
|
1036 | 1036 | ({"http_status": 101, "group": {"priority": 3, "occurences": 1}}, |
|
1037 | 1037 | False), |
|
1038 | 1038 | ]) |
|
1039 | 1039 | def test_nested_AND_AND_rule(self, data, result, report_type_matrix): |
|
1040 | 1040 | from appenlight.lib.rule import Rule |
|
1041 | 1041 | rule_config = { |
|
1042 | 1042 | "field": "__AND__", |
|
1043 | 1043 | "rules": [ |
|
1044 | 1044 | {"field": "__AND__", |
|
1045 | 1045 | "rules": [ |
|
1046 | 1046 | {"op": "ge", |
|
1047 | 1047 | "field": "group:occurences", |
|
1048 | 1048 | "value": "10" |
|
1049 | 1049 | }, |
|
1050 | 1050 | {"op": "ge", |
|
1051 | 1051 | "field": "group:priority", |
|
1052 | 1052 | "value": "4" |
|
1053 | 1053 | }] |
|
1054 | 1054 | }, |
|
1055 | 1055 | {"op": "eq", |
|
1056 | 1056 | "field": "http_status", |
|
1057 | 1057 | "value": "500" |
|
1058 | 1058 | } |
|
1059 | 1059 | ] |
|
1060 | 1060 | } |
|
1061 | 1061 | |
|
1062 | 1062 | rule = Rule(rule_config, report_type_matrix) |
|
1063 | 1063 | assert rule.match(data) is result |
|
1064 | 1064 | |
|
1065 | 1065 | @pytest.mark.parametrize("data, result", [ |
|
1066 | 1066 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}, |
|
1067 | 1067 | "url_path": '/test/register', "error": "foo test bar"}, True), |
|
1068 | 1068 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}, |
|
1069 | 1069 | "url_path": '/test/register', "error": "foo INVALID bar"}, False), |
|
1070 | 1070 | ]) |
|
1071 | 1071 | def test_nested_AND_AND_AND_rule(self, data, result, report_type_matrix): |
|
1072 | 1072 | from appenlight.lib.rule import Rule |
|
1073 | 1073 | rule_config = { |
|
1074 | 1074 | "field": "__AND__", |
|
1075 | 1075 | "rules": [ |
|
1076 | 1076 | {"field": "__AND__", |
|
1077 | 1077 | "rules": [ |
|
1078 | 1078 | {"op": "ge", |
|
1079 | 1079 | "field": "group:occurences", |
|
1080 | 1080 | "value": "10" |
|
1081 | 1081 | }, |
|
1082 | 1082 | {"field": "__AND__", |
|
1083 | 1083 | "rules": [ |
|
1084 | 1084 | {"op": "endswith", |
|
1085 | 1085 | "field": "url_path", |
|
1086 | 1086 | "value": "register"}, |
|
1087 | 1087 | {"op": "contains", |
|
1088 | 1088 | "field": "error", |
|
1089 | 1089 | "value": "test"}]}] |
|
1090 | 1090 | }, |
|
1091 | 1091 | {"op": "eq", |
|
1092 | 1092 | "field": "http_status", |
|
1093 | 1093 | "value": "500" |
|
1094 | 1094 | } |
|
1095 | 1095 | ] |
|
1096 | 1096 | } |
|
1097 | 1097 | |
|
1098 | 1098 | rule = Rule(rule_config, report_type_matrix) |
|
1099 | 1099 | assert rule.match(data) is result |
|
1100 | 1100 | |
|
1101 | 1101 | @pytest.mark.parametrize("data, result", [ |
|
1102 | 1102 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}, |
|
1103 | 1103 | "url_path": 6, "error": 3}, False), |
|
1104 | 1104 | ({"http_status": 500, "group": {"priority": 7, "occurences": 11}, |
|
1105 | 1105 | "url_path": '/test/register', "error": "foo INVALID bar"}, True), |
|
1106 | 1106 | ]) |
|
1107 | 1107 | def test_nested_AND_AND_OR_rule(self, data, result, report_type_matrix): |
|
1108 | 1108 | from appenlight.lib.rule import Rule |
|
1109 | 1109 | rule_config = { |
|
1110 | 1110 | "field": "__AND__", |
|
1111 | 1111 | "rules": [ |
|
1112 | 1112 | {"field": "__AND__", |
|
1113 | 1113 | "rules": [ |
|
1114 | 1114 | {"op": "ge", |
|
1115 | 1115 | "field": "group:occurences", |
|
1116 | 1116 | "value": "10" |
|
1117 | 1117 | }, |
|
1118 | 1118 | {"field": "__OR__", |
|
1119 | 1119 | "rules": [ |
|
1120 | 1120 | {"op": "endswith", |
|
1121 | 1121 | "field": "url_path", |
|
1122 | 1122 | "value": "register" |
|
1123 | 1123 | }, |
|
1124 | 1124 | {"op": "contains", |
|
1125 | 1125 | "field": "error", |
|
1126 | 1126 | "value": "test" |
|
1127 | 1127 | }]}] |
|
1128 | 1128 | }, |
|
1129 | 1129 | {"op": "eq", |
|
1130 | 1130 | "field": "http_status", |
|
1131 | 1131 | "value": "500" |
|
1132 | 1132 | } |
|
1133 | 1133 | ] |
|
1134 | 1134 | } |
|
1135 | 1135 | |
|
1136 | 1136 | rule = Rule(rule_config, report_type_matrix) |
|
1137 | 1137 | assert rule.match(data) is result |
|
1138 | 1138 | |
|
1139 | 1139 | @pytest.mark.parametrize("op, field, value, should_fail", [ |
|
1140 | 1140 | ('eq', 'http_status', "1", False), |
|
1141 | 1141 | ('ne', 'http_status', "1", False), |
|
1142 | 1142 | ('ne', 'http_status', "foo", True), |
|
1143 | 1143 | ('startswith', 'http_status', "1", True), |
|
1144 | 1144 | ('eq', 'group:priority', "1", False), |
|
1145 | 1145 | ('ne', 'group:priority', "1", False), |
|
1146 | 1146 | ('ge', 'group:priority', "1", False), |
|
1147 | 1147 | ('le', 'group:priority', "1", False), |
|
1148 | 1148 | ('startswith', 'group:priority', "1", True), |
|
1149 | 1149 | ('eq', 'url_domain', "1", False), |
|
1150 | 1150 | ('ne', 'url_domain', "1", False), |
|
1151 | 1151 | ('startswith', 'url_domain', "1", False), |
|
1152 | 1152 | ('endswith', 'url_domain', "1", False), |
|
1153 | 1153 | ('contains', 'url_domain', "1", False), |
|
1154 | 1154 | ('ge', 'url_domain', "1", True), |
|
1155 | 1155 | ('eq', 'url_path', "1", False), |
|
1156 | 1156 | ('ne', 'url_path', "1", False), |
|
1157 | 1157 | ('startswith', 'url_path', "1", False), |
|
1158 | 1158 | ('endswith', 'url_path', "1", False), |
|
1159 | 1159 | ('contains', 'url_path', "1", False), |
|
1160 | 1160 | ('ge', 'url_path', "1", True), |
|
1161 | 1161 | ('eq', 'error', "1", False), |
|
1162 | 1162 | ('ne', 'error', "1", False), |
|
1163 | 1163 | ('startswith', 'error', "1", False), |
|
1164 | 1164 | ('endswith', 'error', "1", False), |
|
1165 | 1165 | ('contains', 'error', "1", False), |
|
1166 | 1166 | ('ge', 'error', "1", True), |
|
1167 | 1167 | ('ge', 'url_path', "1", True), |
|
1168 | 1168 | ('eq', 'tags:server_name', "1", False), |
|
1169 | 1169 | ('ne', 'tags:server_name', "1", False), |
|
1170 | 1170 | ('startswith', 'tags:server_name', "1", False), |
|
1171 | 1171 | ('endswith', 'tags:server_name', "1", False), |
|
1172 | 1172 | ('contains', 'tags:server_name', "1", False), |
|
1173 | 1173 | ('ge', 'tags:server_name', "1", True), |
|
1174 | 1174 | ('contains', 'traceback', "1", False), |
|
1175 | 1175 | ('ge', 'traceback', "1", True), |
|
1176 | 1176 | ('eq', 'group:occurences', "1", False), |
|
1177 | 1177 | ('ne', 'group:occurences', "1", False), |
|
1178 | 1178 | ('ge', 'group:occurences', "1", False), |
|
1179 | 1179 | ('le', 'group:occurences', "1", False), |
|
1180 | 1180 | ('contains', 'group:occurences', "1", True), |
|
1181 | 1181 | ]) |
|
1182 | 1182 | def test_rule_validation(self, op, field, value, should_fail, |
|
1183 | 1183 | report_type_matrix): |
|
1184 | 1184 | import colander |
|
1185 | 1185 | from appenlight.validators import build_rule_schema |
|
1186 | 1186 | rule_config = { |
|
1187 | 1187 | "op": op, |
|
1188 | 1188 | "field": field, |
|
1189 | 1189 | "value": value |
|
1190 | 1190 | } |
|
1191 | 1191 | |
|
1192 | 1192 | schema = build_rule_schema(rule_config, report_type_matrix) |
|
1193 | 1193 | if should_fail: |
|
1194 | 1194 | with pytest.raises(colander.Invalid): |
|
1195 | 1195 | schema.deserialize(rule_config) |
|
1196 | 1196 | else: |
|
1197 | 1197 | schema.deserialize(rule_config) |
|
1198 | 1198 | |
|
1199 | 1199 | def test_nested_proper_rule_validation(self, report_type_matrix): |
|
1200 | 1200 | from appenlight.validators import build_rule_schema |
|
1201 | 1201 | rule_config = { |
|
1202 | 1202 | "field": "__AND__", |
|
1203 | 1203 | "rules": [ |
|
1204 | 1204 | { |
|
1205 | 1205 | "field": "__AND__", |
|
1206 | 1206 | "rules": [ |
|
1207 | 1207 | { |
|
1208 | 1208 | "op": "ge", |
|
1209 | 1209 | "field": "group:occurences", |
|
1210 | 1210 | "value": "10" |
|
1211 | 1211 | }, |
|
1212 | 1212 | { |
|
1213 | 1213 | "field": "__OR__", |
|
1214 | 1214 | "rules": [ |
|
1215 | 1215 | { |
|
1216 | 1216 | "op": "endswith", |
|
1217 | 1217 | "field": "url_path", |
|
1218 | 1218 | "value": "register" |
|
1219 | 1219 | }, |
|
1220 | 1220 | { |
|
1221 | 1221 | "op": "contains", |
|
1222 | 1222 | "field": "error", |
|
1223 | 1223 | "value": "test" |
|
1224 | 1224 | } |
|
1225 | 1225 | ] |
|
1226 | 1226 | } |
|
1227 | 1227 | ] |
|
1228 | 1228 | }, |
|
1229 | 1229 | { |
|
1230 | 1230 | "op": "eq", |
|
1231 | 1231 | "field": "http_status", |
|
1232 | 1232 | "value": "500" |
|
1233 | 1233 | } |
|
1234 | 1234 | ] |
|
1235 | 1235 | } |
|
1236 | 1236 | |
|
1237 | 1237 | schema = build_rule_schema(rule_config, report_type_matrix) |
|
1238 | 1238 | deserialized = schema.deserialize(rule_config) |
|
1239 | 1239 | |
|
1240 | 1240 | def test_nested_bad_rule_validation(self, report_type_matrix): |
|
1241 | 1241 | import colander |
|
1242 | 1242 | from appenlight.validators import build_rule_schema |
|
1243 | 1243 | rule_config = { |
|
1244 | 1244 | "field": "__AND__", |
|
1245 | 1245 | "rules": [ |
|
1246 | 1246 | { |
|
1247 | 1247 | "field": "__AND__", |
|
1248 | 1248 | "rules": [ |
|
1249 | 1249 | { |
|
1250 | 1250 | "op": "ge", |
|
1251 | 1251 | "field": "group:occurences", |
|
1252 | 1252 | "value": "10" |
|
1253 | 1253 | }, |
|
1254 | 1254 | { |
|
1255 | 1255 | "field": "__OR__", |
|
1256 | 1256 | "rules": [ |
|
1257 | 1257 | { |
|
1258 | 1258 | "op": "gt", |
|
1259 | 1259 | "field": "url_path", |
|
1260 | 1260 | "value": "register" |
|
1261 | 1261 | }, |
|
1262 | 1262 | { |
|
1263 | 1263 | "op": "contains", |
|
1264 | 1264 | "field": "error", |
|
1265 | 1265 | "value": "test" |
|
1266 | 1266 | } |
|
1267 | 1267 | ] |
|
1268 | 1268 | } |
|
1269 | 1269 | ] |
|
1270 | 1270 | }, |
|
1271 | 1271 | { |
|
1272 | 1272 | "op": "eq", |
|
1273 | 1273 | "field": "http_status", |
|
1274 | 1274 | "value": "500" |
|
1275 | 1275 | } |
|
1276 | 1276 | ] |
|
1277 | 1277 | } |
|
1278 | 1278 | |
|
1279 | 1279 | schema = build_rule_schema(rule_config, report_type_matrix) |
|
1280 | 1280 | with pytest.raises(colander.Invalid): |
|
1281 | 1281 | deserialized = schema.deserialize(rule_config) |
|
1282 | 1282 | |
|
1283 | 1283 | def test_config_manipulator(self): |
|
1284 | 1284 | from appenlight.lib.rule import Rule |
|
1285 | 1285 | type_matrix = { |
|
1286 | 1286 | 'a': {"type": 'int', |
|
1287 | 1287 | "ops": ('eq', 'ne', 'ge', 'le',)}, |
|
1288 | 1288 | 'b': {"type": 'int', |
|
1289 | 1289 | "ops": ('eq', 'ne', 'ge', 'le',)}, |
|
1290 | 1290 | } |
|
1291 | 1291 | rule_config = { |
|
1292 | 1292 | "field": "__OR__", |
|
1293 | 1293 | "rules": [ |
|
1294 | 1294 | { |
|
1295 | 1295 | "field": "__OR__", |
|
1296 | 1296 | "rules": [ |
|
1297 | 1297 | { |
|
1298 | 1298 | "op": "ge", |
|
1299 | 1299 | "field": "a", |
|
1300 | 1300 | "value": "10" |
|
1301 | 1301 | } |
|
1302 | 1302 | ] |
|
1303 | 1303 | }, |
|
1304 | 1304 | { |
|
1305 | 1305 | "op": "eq", |
|
1306 | 1306 | "field": "b", |
|
1307 | 1307 | "value": "500" |
|
1308 | 1308 | } |
|
1309 | 1309 | ] |
|
1310 | 1310 | } |
|
1311 | 1311 | |
|
1312 | 1312 | def rule_manipulator(rule): |
|
1313 | 1313 | if 'value' in rule.config: |
|
1314 | 1314 | rule.config['value'] = "1" |
|
1315 | 1315 | |
|
1316 | 1316 | rule = Rule(rule_config, type_matrix, |
|
1317 | 1317 | config_manipulator=rule_manipulator) |
|
1318 | 1318 | rule.match({"a": 1, |
|
1319 | 1319 | "b": "2"}) |
|
1320 | 1320 | assert rule.config['rules'][0]['rules'][0]['value'] == "1" |
|
1321 | 1321 | assert rule.config['rules'][1]['value'] == "1" |
|
1322 | 1322 | assert rule.type_matrix["b"]['type'] == "int" |
|
1323 | 1323 | |
|
1324 | 1324 | def test_dynamic_config_manipulator(self): |
|
1325 | 1325 | from appenlight.lib.rule import Rule |
|
1326 | 1326 | rule_config = { |
|
1327 | 1327 | "field": "__OR__", |
|
1328 | 1328 | "rules": [ |
|
1329 | 1329 | { |
|
1330 | 1330 | "field": "__OR__", |
|
1331 | 1331 | "rules": [ |
|
1332 | 1332 | { |
|
1333 | 1333 | "op": "ge", |
|
1334 | 1334 | "field": "a", |
|
1335 | 1335 | "value": "10" |
|
1336 | 1336 | } |
|
1337 | 1337 | ] |
|
1338 | 1338 | }, |
|
1339 | 1339 | { |
|
1340 | 1340 | "op": "eq", |
|
1341 | 1341 | "field": "b", |
|
1342 | 1342 | "value": "500" |
|
1343 | 1343 | } |
|
1344 | 1344 | ] |
|
1345 | 1345 | } |
|
1346 | 1346 | |
|
1347 | 1347 | def rule_manipulator(rule): |
|
1348 | 1348 | rule.type_matrix = { |
|
1349 | 1349 | 'a': {"type": 'int', |
|
1350 | 1350 | "ops": ('eq', 'ne', 'ge', 'le',)}, |
|
1351 | 1351 | 'b': {"type": 'unicode', |
|
1352 | 1352 | "ops": ('eq', 'ne', 'ge', 'le',)}, |
|
1353 | 1353 | } |
|
1354 | 1354 | |
|
1355 | 1355 | if 'value' in rule.config: |
|
1356 | 1356 | if rule.config['field'] == 'a': |
|
1357 | 1357 | rule.config['value'] = "1" |
|
1358 | 1358 | elif rule.config['field'] == 'b': |
|
1359 | 1359 | rule.config['value'] = "2" |
|
1360 | 1360 | |
|
1361 | 1361 | rule = Rule(rule_config, {}, |
|
1362 | 1362 | config_manipulator=rule_manipulator) |
|
1363 | 1363 | rule.match({"a": 11, |
|
1364 | 1364 | "b": "55"}) |
|
1365 | 1365 | assert rule.config['rules'][0]['rules'][0]['value'] == "1" |
|
1366 | 1366 | assert rule.config['rules'][1]['value'] == "2" |
|
1367 | 1367 | assert rule.type_matrix["b"]['type'] == "unicode" |
|
1368 | 1368 | |
|
1369 | 1369 | |
|
1370 | 1370 | @pytest.mark.usefixtures('base_app', 'with_migrations') |
|
1371 | 1371 | class TestViewsWithForms(object): |
|
1372 | 1372 | def test_bad_csrf(self): |
|
1373 | 1373 | from appenlight.forms import CSRFException |
|
1374 | 1374 | from appenlight.views.index import register |
|
1375 | 1375 | post_data = {'dupa': 'dupa'} |
|
1376 | 1376 | request = testing.DummyRequest(post=post_data) |
|
1377 | 1377 | request.POST = webob.multidict.MultiDict(request.POST) |
|
1378 | 1378 | with pytest.raises(CSRFException): |
|
1379 | 1379 | register(request) |
|
1380 | 1380 | |
|
1381 | 1381 | def test_proper_csrf(self): |
|
1382 | 1382 | from appenlight.views.index import register |
|
1383 | 1383 | request = pyramid.threadlocal.get_current_request() |
|
1384 | 1384 | post_data = {'dupa': 'dupa', |
|
1385 | 1385 | 'csrf_token': request.session.get_csrf_token()} |
|
1386 | 1386 | request = testing.DummyRequest(post=post_data) |
|
1387 | 1387 | request.POST = webob.multidict.MultiDict(request.POST) |
|
1388 | 1388 | result = register(request) |
|
1389 | 1389 | assert result['form'].errors['email'][0] == 'This field is required.' |
|
1390 | 1390 | |
|
1391 | 1391 | |
|
1392 | 1392 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'default_data') |
|
1393 | 1393 | class TestRegistration(object): |
|
1394 | 1394 | def test_invalid_form(self): |
|
1395 | 1395 | from appenlight.views.index import register |
|
1396 | 1396 | request = pyramid.threadlocal.get_current_request() |
|
1397 | 1397 | post_data = {'user_name': '', |
|
1398 | 1398 | 'user_password': '', |
|
1399 | 1399 | 'email': '', |
|
1400 | 1400 | 'csrf_token': request.session.get_csrf_token()} |
|
1401 | 1401 | request = testing.DummyRequest(post=post_data) |
|
1402 | 1402 | request.POST = webob.multidict.MultiDict(request.POST) |
|
1403 | 1403 | result = register(request) |
|
1404 | 1404 | assert result['form'].errors['user_name'][0] == \ |
|
1405 | 1405 | 'This field is required.' |
|
1406 | 1406 | |
|
1407 | 1407 | def test_valid_form(self): |
|
1408 | 1408 | from appenlight.views.index import register |
|
1409 | 1409 | from ziggurat_foundations.models.services.user import UserService |
|
1410 | 1410 | request = pyramid.threadlocal.get_current_request() |
|
1411 | 1411 | post_data = {'user_name': 'foo', |
|
1412 | 1412 | 'user_password': 'barr', |
|
1413 | 1413 | 'email': 'test@test.foo', |
|
1414 | 1414 | 'csrf_token': request.session.get_csrf_token()} |
|
1415 | 1415 | request = testing.DummyRequest(post=post_data) |
|
1416 | 1416 | request.add_flash_to_headers = mock.Mock() |
|
1417 | 1417 | request.POST = webob.multidict.MultiDict(request.POST) |
|
1418 | 1418 | assert UserService.by_user_name('foo') is None |
|
1419 | 1419 | register(request) |
|
1420 | 1420 | user = UserService.by_user_name('foo') |
|
1421 | 1421 | assert user.user_name == 'foo' |
|
1422 |
assert len(user.user_password) |
|
|
1422 | assert len(user.user_password) >= 60 | |
|
1423 | 1423 | |
|
1424 | 1424 | |
|
1425 | 1425 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables', |
|
1426 | 1426 | 'default_user') |
|
1427 | 1427 | class TestApplicationCreation(object): |
|
1428 | 1428 | def test_wrong_data(self): |
|
1429 | 1429 | import appenlight.views.applications as applications |
|
1430 | 1430 | from ziggurat_foundations.models.services.user import UserService |
|
1431 | 1431 | request = pyramid.threadlocal.get_current_request() |
|
1432 | 1432 | request.user = UserService.by_user_name('testuser') |
|
1433 | 1433 | request.unsafe_json_body = {} |
|
1434 | 1434 | request.headers['X-XSRF-TOKEN'] = request.session.get_csrf_token() |
|
1435 | 1435 | response = applications.application_create(request) |
|
1436 | 1436 | assert response.code == 422 |
|
1437 | 1437 | |
|
1438 | 1438 | def test_proper_data(self): |
|
1439 | 1439 | import appenlight.views.applications as applications |
|
1440 | 1440 | from ziggurat_foundations.models.services.user import UserService |
|
1441 | 1441 | |
|
1442 | 1442 | request = pyramid.threadlocal.get_current_request() |
|
1443 | 1443 | request.user = UserService.by_user_name('testuser') |
|
1444 | 1444 | request.unsafe_json_body = {"resource_name": "app name", |
|
1445 | 1445 | "domains": "foo"} |
|
1446 | 1446 | request.headers['X-XSRF-TOKEN'] = request.session.get_csrf_token() |
|
1447 | 1447 | app_dict = applications.application_create(request) |
|
1448 | 1448 | assert app_dict['public_key'] is not None |
|
1449 | 1449 | assert app_dict['api_key'] is not None |
|
1450 | 1450 | assert app_dict['resource_name'] == 'app name' |
|
1451 | 1451 | assert app_dict['owner_group_id'] is None |
|
1452 | 1452 | assert app_dict['resource_id'] is not None |
|
1453 | 1453 | assert app_dict['default_grouping'] == 'url_traceback' |
|
1454 | 1454 | assert app_dict['possible_permissions'] == ('view', 'update_reports') |
|
1455 | 1455 | assert app_dict['slow_report_threshold'] == 10 |
|
1456 | 1456 | assert app_dict['owner_user_name'] == 'testuser' |
|
1457 | 1457 | assert app_dict['owner_user_id'] == request.user.id |
|
1458 | 1458 | assert app_dict['domains'] is 'foo' |
|
1459 | 1459 | assert app_dict['postprocessing_rules'] == [] |
|
1460 | 1460 | assert app_dict['error_report_threshold'] == 10 |
|
1461 | 1461 | assert app_dict['allow_permanent_storage'] is False |
|
1462 | 1462 | assert app_dict['resource_type'] == 'application' |
|
1463 | 1463 | assert app_dict['current_permissions'] == [] |
|
1464 | 1464 | |
|
1465 | 1465 | |
|
1466 | 1466 | @pytest.mark.usefixtures('default_application') |
|
1467 | 1467 | @pytest.mark.usefixtures('base_app', 'with_migrations', 'clean_tables') |
|
1468 | 1468 | class TestAPISentryView(object): |
|
1469 | 1469 | def test_no_payload(self, default_application): |
|
1470 | 1470 | import colander |
|
1471 | 1471 | from appenlight.models.services.application import ApplicationService |
|
1472 | 1472 | from appenlight.views.api import sentry_compat |
|
1473 | 1473 | from appenlight.lib.request import JSONException |
|
1474 | 1474 | |
|
1475 | 1475 | context = DummyContext() |
|
1476 | 1476 | context.resource = ApplicationService.by_id(1) |
|
1477 | 1477 | request = testing.DummyRequest( |
|
1478 | 1478 | headers={'Content-Type': 'application/json'}) |
|
1479 | 1479 | request.unsafe_json_body = '' |
|
1480 | 1480 | request.context = context |
|
1481 | 1481 | route = mock.Mock() |
|
1482 | 1482 | route.name = 'api_sentry' |
|
1483 | 1483 | request.matched_route = route |
|
1484 | 1484 | with pytest.raises(JSONException): |
|
1485 | 1485 | sentry_compat(request) |
|
1486 | 1486 | |
|
1487 | 1487 | def test_java_client_payload(self): |
|
1488 | 1488 | from appenlight.views.api import sentry_compat |
|
1489 | 1489 | from appenlight.models.services.application import ApplicationService |
|
1490 | 1490 | from appenlight.models.report_group import ReportGroup |
|
1491 | 1491 | route = mock.Mock() |
|
1492 | 1492 | route.name = 'api_sentry' |
|
1493 | 1493 | request = pyramid.threadlocal.get_current_request() |
|
1494 | 1494 | context = DummyContext() |
|
1495 | 1495 | context.resource = ApplicationService.by_id(1) |
|
1496 | 1496 | context.resource.allow_permanent_storage = True |
|
1497 | 1497 | request.context = context |
|
1498 | 1498 | request.matched_route = route |
|
1499 | 1499 | request.body = b'eJy1UmFr2zAQ/S0T+7BCLOzYThp/C6xjG6SDLd/GCBf57Ki' \ |
|
1500 | 1500 | b'RJSHJJiXkv+/UlC7p2kAZA33Ru6f33t1pz3BAHVayZhWr87' \ |
|
1501 | 1501 | b'JMs+I6q3MsrifFep2vc1iXM1HMpgBTNmIdeg8tEvlmJ9AGa' \ |
|
1502 | 1502 | b'fQ7goOkQoDOUmGcZpMkLZO0WGZFRadMiaHIR1EVnTMu3k3b' \ |
|
1503 | 1503 | b'oiMgqJrXpgOpOVjLLTiPkWAVhMa4jih3MAAholfWyUDAksz' \ |
|
1504 | 1504 | b'm1iopICbg8fWH52B8VWXZVYwHrWfV/jBipD2gW2no8CFMa5' \ |
|
1505 | 1505 | b'JButCDSjoQG6mR6LgLDojPPn/7sbydL25ep34HGl+y3DiE+' \ |
|
1506 | 1506 | b'lH0xXBXjMzFBsXW99SS7pWKYXRw91zqgK4BgZ4/DZVVP/cs' \ |
|
1507 | 1507 | b'3NuzSZPfAKqP2Cdj4tw7U/cKH0fEFeiWQFqE2FIHAmMPjaN' \ |
|
1508 | 1508 | b'Y/kHvbzY/JqdHUq9o/KxqQHkcsabX4piDuT4aK+pXG1ZNi/' \ |
|
1509 | 1509 | b'IwOpEyruXC1LiB3vPO3BmOOxTUCIqv5LIg5H12oh9cf0l+P' \ |
|
1510 | 1510 | b'MvP5P8kddgoFIEvMGzM5cRSD2aLJ6qTdHKm6nv9pPcRFba0' \ |
|
1511 | 1511 | b'Kd0eleeCFuGN+9JZ9TaXIn/V5JYMBvxXg3L6PwzSE4dkfOb' \ |
|
1512 | 1512 | b'w7CtfWmP85SdCs8OvA53fUV19cg==' |
|
1513 | 1513 | sentry_compat(request) |
|
1514 | 1514 | query = DBSession.query(ReportGroup) |
|
1515 | 1515 | report = query.first() |
|
1516 | 1516 | assert query.count() == 1 |
|
1517 | 1517 | assert report.total_reports == 1 |
|
1518 | 1518 | |
|
1519 | 1519 | def test_ruby_client_payload(self): |
|
1520 | 1520 | from appenlight.views.api import sentry_compat |
|
1521 | 1521 | from appenlight.models.services.application import ApplicationService |
|
1522 | 1522 | from appenlight.models.report_group import ReportGroup |
|
1523 | 1523 | from appenlight.tests.payload_examples import SENTRY_RUBY_ENCODED |
|
1524 | 1524 | route = mock.Mock() |
|
1525 | 1525 | route.name = 'api_sentry' |
|
1526 | 1526 | request = testing.DummyRequest( |
|
1527 | 1527 | headers={'Content-Type': 'application/octet-stream', |
|
1528 | 1528 | 'User-Agent': 'sentry-ruby/1.0.0', |
|
1529 | 1529 | 'X-Sentry-Auth': 'Sentry sentry_version=5, ' |
|
1530 | 1530 | 'sentry_client=raven-ruby/1.0.0, ' |
|
1531 | 1531 | 'sentry_timestamp=1462378483, ' |
|
1532 | 1532 | 'sentry_key=xxx, sentry_secret=xxx' |
|
1533 | 1533 | }) |
|
1534 | 1534 | context = DummyContext() |
|
1535 | 1535 | context.resource = ApplicationService.by_id(1) |
|
1536 | 1536 | context.resource.allow_permanent_storage = True |
|
1537 | 1537 | request.context = context |
|
1538 | 1538 | request.matched_route = route |
|
1539 | 1539 | request.body = SENTRY_RUBY_ENCODED |
|
1540 | 1540 | sentry_compat(request) |
|
1541 | 1541 | query = DBSession.query(ReportGroup) |
|
1542 | 1542 | report = query.first() |
|
1543 | 1543 | assert query.count() == 1 |
|
1544 | 1544 | assert report.total_reports == 1 |
|
1545 | 1545 | |
|
1546 | 1546 | def test_python_client_decoded_payload(self): |
|
1547 | 1547 | from appenlight.views.api import sentry_compat |
|
1548 | 1548 | from appenlight.models.services.application import ApplicationService |
|
1549 | 1549 | from appenlight.models.report_group import ReportGroup |
|
1550 | 1550 | from appenlight.tests.payload_examples import SENTRY_PYTHON_PAYLOAD_7 |
|
1551 | 1551 | route = mock.Mock() |
|
1552 | 1552 | route.name = 'api_sentry' |
|
1553 | 1553 | request = pyramid.threadlocal.get_current_request() |
|
1554 | 1554 | context = DummyContext() |
|
1555 | 1555 | context.resource = ApplicationService.by_id(1) |
|
1556 | 1556 | context.resource.allow_permanent_storage = True |
|
1557 | 1557 | request.context = context |
|
1558 | 1558 | request.matched_route = route |
|
1559 | 1559 | request.body = json.dumps(SENTRY_PYTHON_PAYLOAD_7).encode('utf8') |
|
1560 | 1560 | sentry_compat(request) |
|
1561 | 1561 | query = DBSession.query(ReportGroup) |
|
1562 | 1562 | report = query.first() |
|
1563 | 1563 | assert query.count() == 1 |
|
1564 | 1564 | assert report.total_reports == 1 |
|
1565 | 1565 | |
|
1566 | 1566 | def test_python_client_encoded_payload(self): |
|
1567 | 1567 | from appenlight.views.api import sentry_compat |
|
1568 | 1568 | from appenlight.models.services.application import ApplicationService |
|
1569 | 1569 | from appenlight.models.report_group import ReportGroup |
|
1570 | 1570 | from appenlight.tests.payload_examples import SENTRY_PYTHON_ENCODED |
|
1571 | 1571 | route = mock.Mock() |
|
1572 | 1572 | route.name = 'api_sentry' |
|
1573 | 1573 | request = testing.DummyRequest( |
|
1574 | 1574 | headers={'Content-Type': 'application/octet-stream', |
|
1575 | 1575 | 'Content-Encoding': 'deflate', |
|
1576 | 1576 | 'User-Agent': 'sentry-ruby/1.0.0', |
|
1577 | 1577 | 'X-Sentry-Auth': 'Sentry sentry_version=5, ' |
|
1578 | 1578 | 'sentry_client=raven-ruby/1.0.0, ' |
|
1579 | 1579 | 'sentry_timestamp=1462378483, ' |
|
1580 | 1580 | 'sentry_key=xxx, sentry_secret=xxx' |
|
1581 | 1581 | }) |
|
1582 | 1582 | context = DummyContext() |
|
1583 | 1583 | context.resource = ApplicationService.by_id(1) |
|
1584 | 1584 | context.resource.allow_permanent_storage = True |
|
1585 | 1585 | request.context = context |
|
1586 | 1586 | request.matched_route = route |
|
1587 | 1587 | request.body = SENTRY_PYTHON_ENCODED |
|
1588 | 1588 | sentry_compat(request) |
|
1589 | 1589 | query = DBSession.query(ReportGroup) |
|
1590 | 1590 | report = query.first() |
|
1591 | 1591 | assert query.count() == 1 |
|
1592 | 1592 | assert report.total_reports == 1 |
@@ -1,36 +1,36 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | from pyramid.view import view_config |
|
18 | 18 | from pyramid.httpexceptions import HTTPFound, HTTPNotFound |
|
19 | 19 | from pyramid import security |
|
20 |
from |
|
|
20 | from ziggurat_foundations.models.services.user import UserService | |
|
21 | 21 | |
|
22 | 22 | import logging |
|
23 | 23 | |
|
24 | 24 | log = logging.getLogger(__name__) |
|
25 | 25 | |
|
26 | 26 | |
|
27 | 27 | @view_config(route_name='section_view', permission='root_administration', |
|
28 | 28 | match_param=['section=admin_section', 'view=relogin_user'], |
|
29 | 29 | renderer='json', request_method='GET') |
|
30 | 30 | def relogin_to_user(request): |
|
31 | user = User.by_id(request.GET.get('user_id')) | |
|
31 | user = UserService.by_id(request.GET.get('user_id')) | |
|
32 | 32 | if not user: |
|
33 | 33 | return HTTPNotFound() |
|
34 | 34 | headers = security.remember(request, user.id) |
|
35 | 35 | return HTTPFound(location=request.route_url('/'), |
|
36 | 36 | headers=headers) |
@@ -1,755 +1,760 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import copy |
|
18 | 18 | import json |
|
19 | 19 | import logging |
|
20 | 20 | import six |
|
21 | 21 | |
|
22 | 22 | from datetime import datetime, timedelta |
|
23 | 23 | |
|
24 | 24 | import colander |
|
25 | 25 | from pyramid.httpexceptions import HTTPFound, HTTPUnprocessableEntity |
|
26 | 26 | from pyramid.view import view_config |
|
27 | 27 | from webob.multidict import MultiDict |
|
28 | 28 | from zope.sqlalchemy import mark_changed |
|
29 | 29 | from ziggurat_foundations.permissions import ANY_PERMISSION |
|
30 | 30 | |
|
31 | 31 | import appenlight.forms as forms |
|
32 | 32 | from appenlight.models import DBSession |
|
33 | 33 | from appenlight.models.resource import Resource |
|
34 | 34 | from appenlight.models.application import Application |
|
35 | 35 | from appenlight.models.application_postprocess_conf import \ |
|
36 | 36 | ApplicationPostprocessConf |
|
37 |
from |
|
|
37 | from ziggurat_foundations.models.services.user import UserService | |
|
38 | from ziggurat_foundations.models.services.resource import ResourceService | |
|
39 | from ziggurat_foundations.models.services.user_resource_permission import UserResourcePermissionService | |
|
38 | 40 | from appenlight.models.user_resource_permission import UserResourcePermission |
|
39 | 41 | from appenlight.models.group_resource_permission import GroupResourcePermission |
|
40 | 42 | from appenlight.models.services.application import ApplicationService |
|
41 | 43 | from appenlight.models.services.application_postprocess_conf import \ |
|
42 | 44 | ApplicationPostprocessConfService |
|
43 | 45 | from appenlight.models.services.group import GroupService |
|
44 | 46 | from appenlight.models.services.group_resource_permission import \ |
|
45 | 47 | GroupResourcePermissionService |
|
46 | 48 | from appenlight.models.services.request_metric import RequestMetricService |
|
47 | 49 | from appenlight.models.services.report_group import ReportGroupService |
|
48 | 50 | from appenlight.models.services.slow_call import SlowCallService |
|
49 | 51 | from appenlight.lib import helpers as h |
|
50 | 52 | from appenlight.lib.utils import build_filter_settings_from_query_dict |
|
51 | 53 | from appenlight.security import RootFactory |
|
52 | 54 | from appenlight.models.report import REPORT_TYPE_MATRIX |
|
53 | 55 | from appenlight.validators import build_rule_schema |
|
54 | 56 | |
|
55 | 57 | _ = str |
|
56 | 58 | |
|
57 | 59 | log = logging.getLogger(__name__) |
|
58 | 60 | |
|
59 | 61 | |
|
60 | 62 | def app_not_found(request, id): |
|
61 | 63 | """ |
|
62 | 64 | Redirects on non found and sets a flash message |
|
63 | 65 | """ |
|
64 | 66 | request.session.flash(_('Application not found'), 'warning') |
|
65 | 67 | return HTTPFound( |
|
66 | 68 | location=request.route_url('applications', action='index')) |
|
67 | 69 | |
|
68 | 70 | |
|
69 | 71 | @view_config(route_name='applications_no_id', |
|
70 | 72 | renderer='json', request_method="GET", permission='authenticated') |
|
71 | 73 | def applications_list(request): |
|
72 | 74 | """ |
|
73 | 75 | Applications list |
|
74 | 76 | |
|
75 | 77 | if query params contain ?type=foo, it will list applications |
|
76 | 78 | with one of those permissions for user, |
|
77 | 79 | otherwise only list of owned applications will |
|
78 | 80 | be returned |
|
79 | 81 | |
|
80 | 82 | appending ?root_list while being administration will allow to list all |
|
81 | 83 | applications in the system |
|
82 | 84 | |
|
83 | 85 | """ |
|
84 | 86 | is_root = request.has_permission('root_administration', |
|
85 | 87 | RootFactory(request)) |
|
86 | 88 | if is_root and request.GET.get('root_list'): |
|
87 | 89 | resources = Resource.all().order_by(Resource.resource_name) |
|
88 | 90 | resource_type = request.GET.get('resource_type', 'application') |
|
89 | 91 | if resource_type: |
|
90 | 92 | resources = resources.filter( |
|
91 | 93 | Resource.resource_type == resource_type) |
|
92 | 94 | else: |
|
93 | 95 | permissions = request.params.getall('permission') |
|
94 | 96 | if permissions: |
|
95 |
resources = |
|
|
97 | resources = UserService.resources_with_perms( | |
|
98 | request.user, | |
|
96 | 99 | permissions, |
|
97 | 100 | resource_types=[request.GET.get('resource_type', |
|
98 | 101 | 'application')]) |
|
99 | 102 | else: |
|
100 | 103 | resources = request.user.resources.filter( |
|
101 | 104 | Application.resource_type == request.GET.get( |
|
102 | 105 | 'resource_type', |
|
103 | 106 | 'application')) |
|
104 | 107 | return [r.get_dict(include_keys=['resource_id', 'resource_name', 'domains', |
|
105 | 108 | 'owner_user_name', 'owner_group_name']) |
|
106 | 109 | for |
|
107 | 110 | r in resources] |
|
108 | 111 | |
|
109 | 112 | |
|
110 | 113 | @view_config(route_name='applications', renderer='json', |
|
111 | 114 | request_method="GET", permission='view') |
|
112 | 115 | def application_GET(request): |
|
113 | 116 | resource = request.context.resource |
|
114 | 117 | include_sensitive_info = False |
|
115 | 118 | if request.has_permission('edit'): |
|
116 | 119 | include_sensitive_info = True |
|
117 | 120 | resource_dict = resource.get_dict( |
|
118 | 121 | include_perms=include_sensitive_info, |
|
119 | 122 | include_processing_rules=include_sensitive_info) |
|
120 | 123 | return resource_dict |
|
121 | 124 | |
|
122 | 125 | |
|
123 | 126 | @view_config(route_name='applications_no_id', request_method="POST", |
|
124 | 127 | renderer='json', permission='create_resources') |
|
125 | 128 | def application_create(request): |
|
126 | 129 | """ |
|
127 | 130 | Creates new application instances |
|
128 | 131 | """ |
|
129 | 132 | user = request.user |
|
130 | 133 | form = forms.ApplicationCreateForm(MultiDict(request.unsafe_json_body), |
|
131 | 134 | csrf_context=request) |
|
132 | 135 | if form.validate(): |
|
133 | 136 | session = DBSession() |
|
134 | 137 | resource = Application() |
|
135 | 138 | DBSession.add(resource) |
|
136 | 139 | form.populate_obj(resource) |
|
137 | 140 | resource.api_key = resource.generate_api_key() |
|
138 | 141 | user.resources.append(resource) |
|
139 | 142 | request.session.flash(_('Application created')) |
|
140 | 143 | DBSession.flush() |
|
141 | 144 | mark_changed(session) |
|
142 | 145 | else: |
|
143 | 146 | return HTTPUnprocessableEntity(body=form.errors_json) |
|
144 | 147 | |
|
145 | 148 | return resource.get_dict() |
|
146 | 149 | |
|
147 | 150 | |
|
148 | 151 | @view_config(route_name='applications', request_method="PATCH", |
|
149 | 152 | renderer='json', permission='edit') |
|
150 | 153 | def application_update(request): |
|
151 | 154 | """ |
|
152 | 155 | Updates main application configuration |
|
153 | 156 | """ |
|
154 | 157 | resource = request.context.resource |
|
155 | 158 | if not resource: |
|
156 | 159 | return app_not_found() |
|
157 | 160 | |
|
158 | 161 | # disallow setting permanent storage by non-admins |
|
159 | 162 | # use default/non-resource based context for this check |
|
160 | 163 | req_dict = copy.copy(request.unsafe_json_body) |
|
161 | 164 | if not request.has_permission('root_administration', RootFactory(request)): |
|
162 | 165 | req_dict['allow_permanent_storage'] = '' |
|
163 | 166 | if not req_dict.get('uptime_url'): |
|
164 | 167 | # needed cause validator is still triggered by default |
|
165 | 168 | req_dict.pop('uptime_url', '') |
|
166 | 169 | application_form = forms.ApplicationUpdateForm(MultiDict(req_dict), |
|
167 | 170 | csrf_context=request) |
|
168 | 171 | if application_form.validate(): |
|
169 | 172 | application_form.populate_obj(resource) |
|
170 | 173 | request.session.flash(_('Application updated')) |
|
171 | 174 | else: |
|
172 | 175 | return HTTPUnprocessableEntity(body=application_form.errors_json) |
|
173 | 176 | |
|
174 | 177 | include_sensitive_info = False |
|
175 | 178 | if request.has_permission('edit'): |
|
176 | 179 | include_sensitive_info = True |
|
177 | 180 | resource_dict = resource.get_dict( |
|
178 | 181 | include_perms=include_sensitive_info, |
|
179 | 182 | include_processing_rules=include_sensitive_info) |
|
180 | 183 | return resource_dict |
|
181 | 184 | |
|
182 | 185 | |
|
183 | 186 | @view_config(route_name='applications_property', match_param='key=api_key', |
|
184 | 187 | request_method="POST", renderer='json', |
|
185 | 188 | permission='delete') |
|
186 | 189 | def application_regenerate_key(request): |
|
187 | 190 | """ |
|
188 | 191 | Regenerates API keys for application |
|
189 | 192 | """ |
|
190 | 193 | resource = request.context.resource |
|
191 | 194 | |
|
192 | 195 | form = forms.CheckPasswordForm(MultiDict(request.unsafe_json_body), |
|
193 | 196 | csrf_context=request) |
|
194 | 197 | form.password.user = request.user |
|
195 | 198 | |
|
196 | 199 | if form.validate(): |
|
197 | 200 | resource.api_key = resource.generate_api_key() |
|
198 | 201 | resource.public_key = resource.generate_api_key() |
|
199 | 202 | msg = 'API keys regenerated - please update your application config.' |
|
200 | 203 | request.session.flash(_(msg)) |
|
201 | 204 | else: |
|
202 | 205 | return HTTPUnprocessableEntity(body=form.errors_json) |
|
203 | 206 | |
|
204 | 207 | if request.has_permission('edit'): |
|
205 | 208 | include_sensitive_info = True |
|
206 | 209 | resource_dict = resource.get_dict( |
|
207 | 210 | include_perms=include_sensitive_info, |
|
208 | 211 | include_processing_rules=include_sensitive_info) |
|
209 | 212 | return resource_dict |
|
210 | 213 | |
|
211 | 214 | |
|
212 | 215 | @view_config(route_name='applications_property', |
|
213 | 216 | match_param='key=delete_resource', |
|
214 | 217 | request_method="PATCH", renderer='json', permission='delete') |
|
215 | 218 | def application_remove(request): |
|
216 | 219 | """ |
|
217 | 220 | Removes application resources |
|
218 | 221 | """ |
|
219 | 222 | resource = request.context.resource |
|
220 | 223 | # we need polymorphic object here, to properly launch sqlalchemy events |
|
221 | 224 | resource = ApplicationService.by_id(resource.resource_id) |
|
222 | 225 | form = forms.CheckPasswordForm(MultiDict(request.safe_json_body or {}), |
|
223 | 226 | csrf_context=request) |
|
224 | 227 | form.password.user = request.user |
|
225 | 228 | if form.validate(): |
|
226 | 229 | DBSession.delete(resource) |
|
227 | 230 | request.session.flash(_('Application removed')) |
|
228 | 231 | else: |
|
229 | 232 | return HTTPUnprocessableEntity(body=form.errors_json) |
|
230 | 233 | |
|
231 | 234 | return True |
|
232 | 235 | |
|
233 | 236 | |
|
234 | 237 | @view_config(route_name='applications_property', match_param='key=owner', |
|
235 | 238 | request_method="PATCH", renderer='json', permission='delete') |
|
236 | 239 | def application_ownership_transfer(request): |
|
237 | 240 | """ |
|
238 | 241 | Allows application owner to transfer application ownership to other user |
|
239 | 242 | """ |
|
240 | 243 | resource = request.context.resource |
|
241 | 244 | form = forms.ChangeApplicationOwnerForm( |
|
242 | 245 | MultiDict(request.safe_json_body or {}), csrf_context=request) |
|
243 | 246 | form.password.user = request.user |
|
244 | 247 | if form.validate(): |
|
245 | user = User.by_user_name(form.user_name.data) | |
|
248 | user = UserService.by_user_name(form.user_name.data) | |
|
246 | 249 | user.resources.append(resource) |
|
247 | 250 | # remove integrations to not leak security data of external applications |
|
248 | 251 | for integration in resource.integrations[:]: |
|
249 | 252 | resource.integrations.remove(integration) |
|
250 | 253 | request.session.flash(_('Application transfered')) |
|
251 | 254 | else: |
|
252 | 255 | return HTTPUnprocessableEntity(body=form.errors_json) |
|
253 | 256 | return True |
|
254 | 257 | |
|
255 | 258 | |
|
256 | 259 | @view_config(route_name='applications_property', |
|
257 | 260 | match_param='key=postprocessing_rules', renderer='json', |
|
258 | 261 | request_method='POST', permission='edit') |
|
259 | 262 | def applications_postprocess_POST(request): |
|
260 | 263 | """ |
|
261 | 264 | Creates new postprocessing rules for applications |
|
262 | 265 | """ |
|
263 | 266 | resource = request.context.resource |
|
264 | 267 | conf = ApplicationPostprocessConf() |
|
265 | 268 | conf.do = 'postprocess' |
|
266 | 269 | conf.new_value = '1' |
|
267 | 270 | resource.postprocess_conf.append(conf) |
|
268 | 271 | DBSession.flush() |
|
269 | 272 | return conf.get_dict() |
|
270 | 273 | |
|
271 | 274 | |
|
272 | 275 | @view_config(route_name='applications_property', |
|
273 | 276 | match_param='key=postprocessing_rules', renderer='json', |
|
274 | 277 | request_method='PATCH', permission='edit') |
|
275 | 278 | def applications_postprocess_PATCH(request): |
|
276 | 279 | """ |
|
277 | 280 | Creates new postprocessing rules for applications |
|
278 | 281 | """ |
|
279 | 282 | json_body = request.unsafe_json_body |
|
280 | 283 | |
|
281 | 284 | schema = build_rule_schema(json_body['rule'], REPORT_TYPE_MATRIX) |
|
282 | 285 | try: |
|
283 | 286 | schema.deserialize(json_body['rule']) |
|
284 | 287 | except colander.Invalid as exc: |
|
285 | 288 | return HTTPUnprocessableEntity(body=json.dumps(exc.asdict())) |
|
286 | 289 | |
|
287 | 290 | resource = request.context.resource |
|
288 | 291 | conf = ApplicationPostprocessConfService.by_pkey_and_resource_id( |
|
289 | 292 | json_body['pkey'], resource.resource_id) |
|
290 | 293 | conf.rule = request.unsafe_json_body['rule'] |
|
291 | 294 | # for now hardcode int since we dont support anything else so far |
|
292 | 295 | conf.new_value = int(request.unsafe_json_body['new_value']) |
|
293 | 296 | return conf.get_dict() |
|
294 | 297 | |
|
295 | 298 | |
|
296 | 299 | @view_config(route_name='applications_property', |
|
297 | 300 | match_param='key=postprocessing_rules', renderer='json', |
|
298 | 301 | request_method='DELETE', permission='edit') |
|
299 | 302 | def applications_postprocess_DELETE(request): |
|
300 | 303 | """ |
|
301 | 304 | Removes application postprocessing rules |
|
302 | 305 | """ |
|
303 | 306 | form = forms.ReactorForm(request.POST, csrf_context=request) |
|
304 | 307 | resource = request.context.resource |
|
305 | 308 | if form.validate(): |
|
306 | 309 | for postprocess_conf in resource.postprocess_conf: |
|
307 | 310 | if postprocess_conf.pkey == int(request.GET['pkey']): |
|
308 | 311 | # remove rule |
|
309 | 312 | DBSession.delete(postprocess_conf) |
|
310 | 313 | return True |
|
311 | 314 | |
|
312 | 315 | |
|
313 | 316 | @view_config(route_name='applications_property', |
|
314 | 317 | match_param='key=report_graphs', renderer='json', |
|
315 | 318 | permission='view') |
|
316 | 319 | @view_config(route_name='applications_property', |
|
317 | 320 | match_param='key=slow_report_graphs', renderer='json', |
|
318 | 321 | permission='view') |
|
319 | 322 | def get_application_report_stats(request): |
|
320 | 323 | query_params = request.GET.mixed() |
|
321 | 324 | query_params['resource'] = (request.context.resource.resource_id,) |
|
322 | 325 | |
|
323 | 326 | filter_settings = build_filter_settings_from_query_dict(request, |
|
324 | 327 | query_params) |
|
325 | 328 | if not filter_settings.get('end_date'): |
|
326 | 329 | end_date = datetime.utcnow().replace(microsecond=0, second=0) |
|
327 | 330 | filter_settings['end_date'] = end_date |
|
328 | 331 | |
|
329 | 332 | if not filter_settings.get('start_date'): |
|
330 | 333 | delta = timedelta(hours=1) |
|
331 | 334 | filter_settings['start_date'] = filter_settings['end_date'] - delta |
|
332 | 335 | |
|
333 | 336 | result = ReportGroupService.get_report_stats(request, filter_settings) |
|
334 | 337 | return result |
|
335 | 338 | |
|
336 | 339 | |
|
337 | 340 | @view_config(route_name='applications_property', |
|
338 | 341 | match_param='key=metrics_graphs', renderer='json', |
|
339 | 342 | permission='view') |
|
340 | 343 | def metrics_graphs(request): |
|
341 | 344 | """ |
|
342 | 345 | Handles metric dashboard graphs |
|
343 | 346 | Returns information for time/tier breakdown |
|
344 | 347 | """ |
|
345 | 348 | query_params = request.GET.mixed() |
|
346 | 349 | query_params['resource'] = (request.context.resource.resource_id,) |
|
347 | 350 | |
|
348 | 351 | filter_settings = build_filter_settings_from_query_dict(request, |
|
349 | 352 | query_params) |
|
350 | 353 | |
|
351 | 354 | if not filter_settings.get('end_date'): |
|
352 | 355 | end_date = datetime.utcnow().replace(microsecond=0, second=0) |
|
353 | 356 | filter_settings['end_date'] = end_date |
|
354 | 357 | |
|
355 | 358 | delta = timedelta(hours=1) |
|
356 | 359 | if not filter_settings.get('start_date'): |
|
357 | 360 | filter_settings['start_date'] = filter_settings['end_date'] - delta |
|
358 | 361 | if filter_settings['end_date'] <= filter_settings['start_date']: |
|
359 | 362 | filter_settings['end_date'] = filter_settings['start_date'] |
|
360 | 363 | |
|
361 | 364 | delta = filter_settings['end_date'] - filter_settings['start_date'] |
|
362 | 365 | if delta < h.time_deltas.get('12h')['delta']: |
|
363 | 366 | divide_by_min = 1 |
|
364 | 367 | elif delta <= h.time_deltas.get('3d')['delta']: |
|
365 | 368 | divide_by_min = 5.0 |
|
366 | 369 | elif delta >= h.time_deltas.get('2w')['delta']: |
|
367 | 370 | divide_by_min = 60.0 * 24 |
|
368 | 371 | else: |
|
369 | 372 | divide_by_min = 60.0 |
|
370 | 373 | |
|
371 | 374 | results = RequestMetricService.get_metrics_stats( |
|
372 | 375 | request, filter_settings) |
|
373 | 376 | # because requests are PER SECOND / we divide 1 min stats by 60 |
|
374 | 377 | # requests are normalized to 1 min average |
|
375 | 378 | # results are average seconds time spent per request in specific area |
|
376 | 379 | for point in results: |
|
377 | 380 | if point['requests']: |
|
378 | 381 | point['main'] = (point['main'] - point['sql'] - |
|
379 | 382 | point['nosql'] - point['remote'] - |
|
380 | 383 | point['tmpl'] - |
|
381 | 384 | point['custom']) / point['requests'] |
|
382 | 385 | point['sql'] = point['sql'] / point['requests'] |
|
383 | 386 | point['nosql'] = point['nosql'] / point['requests'] |
|
384 | 387 | point['remote'] = point['remote'] / point['requests'] |
|
385 | 388 | point['tmpl'] = point['tmpl'] / point['requests'] |
|
386 | 389 | point['custom'] = point['custom'] / point['requests'] |
|
387 | 390 | point['requests_2'] = point['requests'] / 60.0 / divide_by_min |
|
388 | 391 | |
|
389 | 392 | selected_types = ['main', 'sql', 'nosql', 'remote', 'tmpl', 'custom'] |
|
390 | 393 | |
|
391 | 394 | for point in results: |
|
392 | 395 | for stat_type in selected_types: |
|
393 | 396 | point[stat_type] = round(point.get(stat_type, 0), 3) |
|
394 | 397 | |
|
395 | 398 | return results |
|
396 | 399 | |
|
397 | 400 | |
|
398 | 401 | @view_config(route_name='applications_property', |
|
399 | 402 | match_param='key=response_graphs', renderer='json', |
|
400 | 403 | permission='view') |
|
401 | 404 | def response_graphs(request): |
|
402 | 405 | """ |
|
403 | 406 | Handles dashboard infomation for avg. response time split by today, |
|
404 | 407 | 2 days ago and week ago |
|
405 | 408 | """ |
|
406 | 409 | query_params = request.GET.mixed() |
|
407 | 410 | query_params['resource'] = (request.context.resource.resource_id,) |
|
408 | 411 | |
|
409 | 412 | filter_settings = build_filter_settings_from_query_dict(request, |
|
410 | 413 | query_params) |
|
411 | 414 | |
|
412 | 415 | if not filter_settings.get('end_date'): |
|
413 | 416 | end_date = datetime.utcnow().replace(microsecond=0, second=0) |
|
414 | 417 | filter_settings['end_date'] = end_date |
|
415 | 418 | |
|
416 | 419 | delta = timedelta(hours=1) |
|
417 | 420 | if not filter_settings.get('start_date'): |
|
418 | 421 | filter_settings['start_date'] = filter_settings['end_date'] - delta |
|
419 | 422 | |
|
420 | 423 | result_now = RequestMetricService.get_metrics_stats( |
|
421 | 424 | request, filter_settings) |
|
422 | 425 | |
|
423 | 426 | filter_settings_2d = filter_settings.copy() |
|
424 | 427 | filter_settings_2d['start_date'] = filter_settings['start_date'] - \ |
|
425 | 428 | timedelta(days=2) |
|
426 | 429 | filter_settings_2d['end_date'] = filter_settings['end_date'] - \ |
|
427 | 430 | timedelta(days=2) |
|
428 | 431 | result_2d = RequestMetricService.get_metrics_stats( |
|
429 | 432 | request, filter_settings_2d) |
|
430 | 433 | |
|
431 | 434 | filter_settings_7d = filter_settings.copy() |
|
432 | 435 | filter_settings_7d['start_date'] = filter_settings['start_date'] - \ |
|
433 | 436 | timedelta(days=7) |
|
434 | 437 | filter_settings_7d['end_date'] = filter_settings['end_date'] - \ |
|
435 | 438 | timedelta(days=7) |
|
436 | 439 | result_7d = RequestMetricService.get_metrics_stats( |
|
437 | 440 | request, filter_settings_7d) |
|
438 | 441 | |
|
439 | 442 | plot_data = [] |
|
440 | 443 | |
|
441 | 444 | for item in result_now: |
|
442 | 445 | point = {'x': item['x'], 'today': 0, 'days_ago_2': 0, |
|
443 | 446 | 'days_ago_7': 0} |
|
444 | 447 | if item['requests']: |
|
445 | 448 | point['today'] = round(item['main'] / item['requests'], 3) |
|
446 | 449 | plot_data.append(point) |
|
447 | 450 | |
|
448 | 451 | for i, item in enumerate(result_2d[:len(plot_data)]): |
|
449 | 452 | plot_data[i]['days_ago_2'] = 0 |
|
450 | 453 | point = result_2d[i] |
|
451 | 454 | if point['requests']: |
|
452 | 455 | plot_data[i]['days_ago_2'] = round(point['main'] / |
|
453 | 456 | point['requests'], 3) |
|
454 | 457 | |
|
455 | 458 | for i, item in enumerate(result_7d[:len(plot_data)]): |
|
456 | 459 | plot_data[i]['days_ago_7'] = 0 |
|
457 | 460 | point = result_7d[i] |
|
458 | 461 | if point['requests']: |
|
459 | 462 | plot_data[i]['days_ago_7'] = round(point['main'] / |
|
460 | 463 | point['requests'], 3) |
|
461 | 464 | |
|
462 | 465 | return plot_data |
|
463 | 466 | |
|
464 | 467 | |
|
465 | 468 | @view_config(route_name='applications_property', |
|
466 | 469 | match_param='key=requests_graphs', renderer='json', |
|
467 | 470 | permission='view') |
|
468 | 471 | def requests_graphs(request): |
|
469 | 472 | """ |
|
470 | 473 | Handles dashboard infomation for avg. response time split by today, |
|
471 | 474 | 2 days ago and week ago |
|
472 | 475 | """ |
|
473 | 476 | query_params = request.GET.mixed() |
|
474 | 477 | query_params['resource'] = (request.context.resource.resource_id,) |
|
475 | 478 | |
|
476 | 479 | filter_settings = build_filter_settings_from_query_dict(request, |
|
477 | 480 | query_params) |
|
478 | 481 | |
|
479 | 482 | if not filter_settings.get('end_date'): |
|
480 | 483 | end_date = datetime.utcnow().replace(microsecond=0, second=0) |
|
481 | 484 | filter_settings['end_date'] = end_date |
|
482 | 485 | |
|
483 | 486 | delta = timedelta(hours=1) |
|
484 | 487 | if not filter_settings.get('start_date'): |
|
485 | 488 | filter_settings['start_date'] = filter_settings['end_date'] - delta |
|
486 | 489 | |
|
487 | 490 | result_now = RequestMetricService.get_metrics_stats( |
|
488 | 491 | request, filter_settings) |
|
489 | 492 | |
|
490 | 493 | delta = filter_settings['end_date'] - filter_settings['start_date'] |
|
491 | 494 | if delta < h.time_deltas.get('12h')['delta']: |
|
492 | 495 | seconds = h.time_deltas['1m']['minutes'] * 60.0 |
|
493 | 496 | elif delta <= h.time_deltas.get('3d')['delta']: |
|
494 | 497 | seconds = h.time_deltas['5m']['minutes'] * 60.0 |
|
495 | 498 | elif delta >= h.time_deltas.get('2w')['delta']: |
|
496 | 499 | seconds = h.time_deltas['24h']['minutes'] * 60.0 |
|
497 | 500 | else: |
|
498 | 501 | seconds = h.time_deltas['1h']['minutes'] * 60.0 |
|
499 | 502 | |
|
500 | 503 | for item in result_now: |
|
501 | 504 | if item['requests']: |
|
502 | 505 | item['requests'] = round(item['requests'] / seconds, 3) |
|
503 | 506 | return result_now |
|
504 | 507 | |
|
505 | 508 | |
|
506 | 509 | @view_config(route_name='applications_property', |
|
507 | 510 | match_param='key=apdex_stats', renderer='json', |
|
508 | 511 | permission='view') |
|
509 | 512 | def get_apdex_stats(request): |
|
510 | 513 | """ |
|
511 | 514 | Returns information and calculates APDEX score per server for dashboard |
|
512 | 515 | server information (upper right stats boxes) |
|
513 | 516 | """ |
|
514 | 517 | query_params = request.GET.mixed() |
|
515 | 518 | query_params['resource'] = (request.context.resource.resource_id,) |
|
516 | 519 | |
|
517 | 520 | filter_settings = build_filter_settings_from_query_dict(request, |
|
518 | 521 | query_params) |
|
519 | 522 | # make sure we have only one resource here to don't produce |
|
520 | 523 | # weird results when we have wrong app in app selector |
|
521 | 524 | filter_settings['resource'] = [filter_settings['resource'][0]] |
|
522 | 525 | |
|
523 | 526 | if not filter_settings.get('end_date'): |
|
524 | 527 | end_date = datetime.utcnow().replace(microsecond=0, second=0) |
|
525 | 528 | filter_settings['end_date'] = end_date |
|
526 | 529 | |
|
527 | 530 | delta = timedelta(hours=1) |
|
528 | 531 | if not filter_settings.get('start_date'): |
|
529 | 532 | filter_settings['start_date'] = filter_settings['end_date'] - delta |
|
530 | 533 | |
|
531 | 534 | return RequestMetricService.get_apdex_stats(request, filter_settings) |
|
532 | 535 | |
|
533 | 536 | |
|
534 | 537 | @view_config(route_name='applications_property', match_param='key=slow_calls', |
|
535 | 538 | renderer='json', permission='view') |
|
536 | 539 | def get_slow_calls(request): |
|
537 | 540 | """ |
|
538 | 541 | Returns information for time consuming calls in specific time interval |
|
539 | 542 | """ |
|
540 | 543 | query_params = request.GET.mixed() |
|
541 | 544 | query_params['resource'] = (request.context.resource.resource_id,) |
|
542 | 545 | |
|
543 | 546 | filter_settings = build_filter_settings_from_query_dict(request, |
|
544 | 547 | query_params) |
|
545 | 548 | |
|
546 | 549 | if not filter_settings.get('end_date'): |
|
547 | 550 | end_date = datetime.utcnow().replace(microsecond=0, second=0) |
|
548 | 551 | filter_settings['end_date'] = end_date |
|
549 | 552 | |
|
550 | 553 | delta = timedelta(hours=1) |
|
551 | 554 | if not filter_settings.get('start_date'): |
|
552 | 555 | filter_settings['start_date'] = filter_settings['end_date'] - delta |
|
553 | 556 | |
|
554 | 557 | return SlowCallService.get_time_consuming_calls(request, filter_settings) |
|
555 | 558 | |
|
556 | 559 | |
|
557 | 560 | @view_config(route_name='applications_property', |
|
558 | 561 | match_param='key=requests_breakdown', |
|
559 | 562 | renderer='json', permission='view') |
|
560 | 563 | def get_requests_breakdown(request): |
|
561 | 564 | """ |
|
562 | 565 | Used on dashboard to get information which views are most used in |
|
563 | 566 | a time interval |
|
564 | 567 | """ |
|
565 | 568 | query_params = request.GET.mixed() |
|
566 | 569 | query_params['resource'] = (request.context.resource.resource_id,) |
|
567 | 570 | |
|
568 | 571 | filter_settings = build_filter_settings_from_query_dict(request, |
|
569 | 572 | query_params) |
|
570 | 573 | if not filter_settings.get('end_date'): |
|
571 | 574 | end_date = datetime.utcnow().replace(microsecond=0, second=0) |
|
572 | 575 | filter_settings['end_date'] = end_date |
|
573 | 576 | |
|
574 | 577 | if not filter_settings.get('start_date'): |
|
575 | 578 | delta = timedelta(hours=1) |
|
576 | 579 | filter_settings['start_date'] = filter_settings['end_date'] - delta |
|
577 | 580 | |
|
578 | 581 | series = RequestMetricService.get_requests_breakdown( |
|
579 | 582 | request, filter_settings) |
|
580 | 583 | |
|
581 | 584 | results = [] |
|
582 | 585 | for row in series: |
|
583 | 586 | d_row = {'avg_response': round(row['main'] / row['requests'], 3), |
|
584 | 587 | 'requests': row['requests'], |
|
585 | 588 | 'main': row['main'], |
|
586 | 589 | 'view_name': row['key'], |
|
587 | 590 | 'latest_details': row['latest_details'], |
|
588 | 591 | 'percentage': round(row['percentage'] * 100, 1)} |
|
589 | 592 | |
|
590 | 593 | results.append(d_row) |
|
591 | 594 | |
|
592 | 595 | return results |
|
593 | 596 | |
|
594 | 597 | |
|
595 | 598 | @view_config(route_name='applications_property', |
|
596 | 599 | match_param='key=trending_reports', renderer='json', |
|
597 | 600 | permission='view') |
|
598 | 601 | def trending_reports(request): |
|
599 | 602 | """ |
|
600 | 603 | Returns exception/slow reports trending for specific time interval |
|
601 | 604 | """ |
|
602 | 605 | query_params = request.GET.mixed().copy() |
|
603 | 606 | # pop report type to rewrite it to tag later |
|
604 | 607 | report_type = query_params.pop('report_type', None) |
|
605 | 608 | if report_type: |
|
606 | 609 | query_params['type'] = report_type |
|
607 | 610 | |
|
608 | 611 | query_params['resource'] = (request.context.resource.resource_id,) |
|
609 | 612 | |
|
610 | 613 | filter_settings = build_filter_settings_from_query_dict(request, |
|
611 | 614 | query_params) |
|
612 | 615 | |
|
613 | 616 | if not filter_settings.get('end_date'): |
|
614 | 617 | end_date = datetime.utcnow().replace(microsecond=0, second=0) |
|
615 | 618 | filter_settings['end_date'] = end_date |
|
616 | 619 | |
|
617 | 620 | if not filter_settings.get('start_date'): |
|
618 | 621 | delta = timedelta(hours=1) |
|
619 | 622 | filter_settings['start_date'] = filter_settings['end_date'] - delta |
|
620 | 623 | |
|
621 | 624 | results = ReportGroupService.get_trending(request, filter_settings) |
|
622 | 625 | |
|
623 | 626 | trending = [] |
|
624 | 627 | for occurences, group in results: |
|
625 | 628 | report_group = group.get_dict(request) |
|
626 | 629 | # show the occurences in time range instead of global ones |
|
627 | 630 | report_group['occurences'] = occurences |
|
628 | 631 | trending.append(report_group) |
|
629 | 632 | |
|
630 | 633 | return trending |
|
631 | 634 | |
|
632 | 635 | |
|
633 | 636 | @view_config(route_name='applications_property', |
|
634 | 637 | match_param='key=integrations', |
|
635 | 638 | renderer='json', permission='view') |
|
636 | 639 | def integrations(request): |
|
637 | 640 | """ |
|
638 | 641 | Integration list for given application |
|
639 | 642 | """ |
|
640 | 643 | application = request.context.resource |
|
641 | 644 | return {'resource': application} |
|
642 | 645 | |
|
643 | 646 | |
|
644 | 647 | @view_config(route_name='applications_property', |
|
645 | 648 | match_param='key=user_permissions', renderer='json', |
|
646 | 649 | permission='owner', request_method='POST') |
|
647 | 650 | def user_resource_permission_create(request): |
|
648 | 651 | """ |
|
649 | 652 | Set new permissions for user for a resource |
|
650 | 653 | """ |
|
651 | 654 | resource = request.context.resource |
|
652 | 655 | user_name = request.unsafe_json_body.get('user_name') |
|
653 | user = User.by_user_name(user_name) | |
|
656 | user = UserService.by_user_name(user_name) | |
|
654 | 657 | if not user: |
|
655 | user = User.by_email(user_name) | |
|
658 | user = UserService.by_email(user_name) | |
|
656 | 659 | if not user: |
|
657 | 660 | return False |
|
658 | 661 | |
|
659 | 662 | for perm_name in request.unsafe_json_body.get('permissions', []): |
|
660 | permission = UserResourcePermission.by_resource_user_and_perm( | |
|
663 | permission = UserResourcePermissionService.by_resource_user_and_perm( | |
|
661 | 664 | user.id, perm_name, resource.resource_id) |
|
662 | 665 | if not permission: |
|
663 | 666 | permission = UserResourcePermission(perm_name=perm_name, |
|
664 | 667 | user_id=user.id) |
|
665 | 668 | resource.user_permissions.append(permission) |
|
666 | 669 | DBSession.flush() |
|
667 |
perms = [p.perm_name for p in |
|
|
670 | perms = [p.perm_name for p in ResourceService.perms_for_user(resource, user) | |
|
668 | 671 | if p.type == 'user'] |
|
669 | 672 | result = {'user_name': user.user_name, |
|
670 | 673 | 'permissions': list(set(perms))} |
|
671 | 674 | return result |
|
672 | 675 | |
|
673 | 676 | |
|
674 | 677 | @view_config(route_name='applications_property', |
|
675 | 678 | match_param='key=user_permissions', renderer='json', |
|
676 | 679 | permission='owner', request_method='DELETE') |
|
677 | 680 | def user_resource_permission_delete(request): |
|
678 | 681 | """ |
|
679 | 682 | Removes user permission from specific resource |
|
680 | 683 | """ |
|
681 | 684 | resource = request.context.resource |
|
682 | 685 | |
|
683 | user = User.by_user_name(request.GET.get('user_name')) | |
|
686 | user = UserService.by_user_name(request.GET.get('user_name')) | |
|
684 | 687 | if not user: |
|
685 | 688 | return False |
|
686 | 689 | |
|
687 | 690 | for perm_name in request.GET.getall('permissions'): |
|
688 | permission = UserResourcePermission.by_resource_user_and_perm( | |
|
691 | permission = UserResourcePermissionService.by_resource_user_and_perm( | |
|
689 | 692 | user.id, perm_name, resource.resource_id) |
|
690 | 693 | resource.user_permissions.remove(permission) |
|
691 | 694 | DBSession.flush() |
|
692 |
perms = [p.perm_name for p in |
|
|
695 | perms = [p.perm_name for p in ResourceService.perms_for_user(resource, user) | |
|
693 | 696 | if p.type == 'user'] |
|
694 | 697 | result = {'user_name': user.user_name, |
|
695 | 698 | 'permissions': list(set(perms))} |
|
696 | 699 | return result |
|
697 | 700 | |
|
698 | 701 | |
|
699 | 702 | @view_config(route_name='applications_property', |
|
700 | 703 | match_param='key=group_permissions', renderer='json', |
|
701 | 704 | permission='owner', request_method='POST') |
|
702 | 705 | def group_resource_permission_create(request): |
|
703 | 706 | """ |
|
704 | 707 | Set new permissions for group for a resource |
|
705 | 708 | """ |
|
706 | 709 | resource = request.context.resource |
|
707 | 710 | group = GroupService.by_id(request.unsafe_json_body.get('group_id')) |
|
708 | 711 | if not group: |
|
709 | 712 | return False |
|
710 | 713 | |
|
711 | 714 | for perm_name in request.unsafe_json_body.get('permissions', []): |
|
712 | 715 | permission = GroupResourcePermissionService.by_resource_group_and_perm( |
|
713 | 716 | group.id, perm_name, resource.resource_id) |
|
714 | 717 | if not permission: |
|
715 | 718 | permission = GroupResourcePermission(perm_name=perm_name, |
|
716 | 719 | group_id=group.id) |
|
717 | 720 | resource.group_permissions.append(permission) |
|
718 | 721 | DBSession.flush() |
|
719 |
perm_tuples = |
|
|
722 | perm_tuples = ResourceService.groups_for_perm( | |
|
723 | resource, | |
|
720 | 724 | ANY_PERMISSION, |
|
721 | 725 | limit_group_permissions=True, |
|
722 | 726 | group_ids=[group.id]) |
|
723 | 727 | perms = [p.perm_name for p in perm_tuples if p.type == 'group'] |
|
724 | 728 | result = {'group': group.get_dict(), |
|
725 | 729 | 'permissions': list(set(perms))} |
|
726 | 730 | return result |
|
727 | 731 | |
|
728 | 732 | |
|
729 | 733 | @view_config(route_name='applications_property', |
|
730 | 734 | match_param='key=group_permissions', renderer='json', |
|
731 | 735 | permission='owner', request_method='DELETE') |
|
732 | 736 | def group_resource_permission_delete(request): |
|
733 | 737 | """ |
|
734 | 738 | Removes group permission from specific resource |
|
735 | 739 | """ |
|
736 | 740 | form = forms.ReactorForm(request.POST, csrf_context=request) |
|
737 | 741 | form.validate() |
|
738 | 742 | resource = request.context.resource |
|
739 | 743 | group = GroupService.by_id(request.GET.get('group_id')) |
|
740 | 744 | if not group: |
|
741 | 745 | return False |
|
742 | 746 | |
|
743 | 747 | for perm_name in request.GET.getall('permissions'): |
|
744 | 748 | permission = GroupResourcePermissionService.by_resource_group_and_perm( |
|
745 | 749 | group.id, perm_name, resource.resource_id) |
|
746 | 750 | resource.group_permissions.remove(permission) |
|
747 | 751 | DBSession.flush() |
|
748 |
perm_tuples = |
|
|
752 | perm_tuples = ResourceService.groups_for_perm( | |
|
753 | resource, | |
|
749 | 754 | ANY_PERMISSION, |
|
750 | 755 | limit_group_permissions=True, |
|
751 | 756 | group_ids=[group.id]) |
|
752 | 757 | perms = [p.perm_name for p in perm_tuples if p.type == 'group'] |
|
753 | 758 | result = {'group': group.get_dict(), |
|
754 | 759 | 'permissions': list(set(perms))} |
|
755 | 760 | return result |
@@ -1,59 +1,60 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | from appenlight.lib.helpers import gen_pagination_headers |
|
18 | 18 | from appenlight.models.services.event import EventService |
|
19 | 19 | from pyramid.view import view_config |
|
20 | 20 | from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound |
|
21 | from ziggurat_foundations.models.services.user import UserService | |
|
21 | 22 | |
|
22 | 23 | |
|
23 | 24 | @view_config(route_name='events_no_id', |
|
24 | 25 | renderer='json', permission='authenticated') |
|
25 | 26 | def fetch_events(request): |
|
26 | 27 | """ |
|
27 | 28 | Returns list of log entries from Elasticsearch |
|
28 | 29 | """ |
|
29 | 30 | event_paginator = EventService.get_paginator( |
|
30 | 31 | user=request.user, |
|
31 | 32 | page=1, |
|
32 | 33 | items_per_page=100 |
|
33 | 34 | ) |
|
34 | 35 | headers = gen_pagination_headers(request, event_paginator) |
|
35 | 36 | request.response.headers.update(headers) |
|
36 | 37 | |
|
37 | 38 | return [ev.get_dict() for ev in event_paginator.items] |
|
38 | 39 | |
|
39 | 40 | |
|
40 | 41 | @view_config(route_name='events', renderer='json', request_method='PATCH', |
|
41 | 42 | permission='authenticated') |
|
42 | 43 | def event_PATCH(request): |
|
43 |
resources = |
|
|
44 | ['view'], resource_types=request.registry.resource_types) | |
|
44 | resources = UserService.resources_with_perms( | |
|
45 | request.user, ['view'], resource_types=request.registry.resource_types) | |
|
45 | 46 | event = EventService.for_resource( |
|
46 | 47 | [r.resource_id for r in resources], |
|
47 | 48 | event_id=request.matchdict['event_id']).first() |
|
48 | 49 | if not event: |
|
49 | 50 | return HTTPNotFound() |
|
50 | 51 | allowed_keys = ['status'] |
|
51 | 52 | for k, v in request.unsafe_json_body.items(): |
|
52 | 53 | if k in allowed_keys: |
|
53 | 54 | if k == 'status': |
|
54 | 55 | event.close() |
|
55 | 56 | else: |
|
56 | 57 | setattr(event, k, v) |
|
57 | 58 | else: |
|
58 | 59 | return HTTPBadRequest() |
|
59 | 60 | return event.get_dict() |
@@ -1,191 +1,192 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import logging |
|
18 | 18 | |
|
19 | 19 | from pyramid.view import view_config |
|
20 | 20 | from pyramid.httpexceptions import HTTPUnprocessableEntity, HTTPNotFound |
|
21 | 21 | |
|
22 | from ziggurat_foundations.models.services.user import UserService | |
|
22 | 23 | from appenlight.lib.utils import permission_tuple_to_dict |
|
23 | 24 | from appenlight.models.services.config import ConfigService |
|
24 | 25 | from appenlight.models.group import Group |
|
25 | 26 | from appenlight.models.services.group import GroupService |
|
26 | 27 | from appenlight.models.user import User |
|
27 | 28 | from appenlight.models import DBSession |
|
28 | 29 | from appenlight import forms |
|
29 | 30 | from webob.multidict import MultiDict |
|
30 | 31 | |
|
31 | 32 | log = logging.getLogger(__name__) |
|
32 | 33 | |
|
33 | 34 | _ = str |
|
34 | 35 | |
|
35 | 36 | |
|
36 | 37 | @view_config(route_name='groups_no_id', renderer='json', |
|
37 | 38 | request_method="GET", permission='authenticated') |
|
38 | 39 | def groups_list(request): |
|
39 | 40 | """ |
|
40 | 41 | Returns groups list |
|
41 | 42 | """ |
|
42 | 43 | groups = Group.all().order_by(Group.group_name) |
|
43 | 44 | list_groups = ConfigService.by_key_and_section( |
|
44 | 45 | 'list_groups_to_non_admins', 'global') |
|
45 | 46 | if list_groups.value or request.has_permission('root_administration'): |
|
46 | 47 | return [g.get_dict() for g in groups] |
|
47 | 48 | else: |
|
48 | 49 | return [] |
|
49 | 50 | |
|
50 | 51 | |
|
51 | 52 | @view_config(route_name='groups_no_id', renderer='json', |
|
52 | 53 | request_method="POST", permission='root_administration') |
|
53 | 54 | def groups_create(request): |
|
54 | 55 | """ |
|
55 | 56 | Returns groups list |
|
56 | 57 | """ |
|
57 | 58 | form = forms.GroupCreateForm( |
|
58 | 59 | MultiDict(request.safe_json_body or {}), csrf_context=request) |
|
59 | 60 | if form.validate(): |
|
60 | 61 | log.info('registering group') |
|
61 | 62 | group = Group() |
|
62 | 63 | # insert new group here |
|
63 | 64 | DBSession.add(group) |
|
64 | 65 | form.populate_obj(group) |
|
65 | 66 | request.session.flash(_('Group created')) |
|
66 | 67 | DBSession.flush() |
|
67 | 68 | return group.get_dict(include_perms=True) |
|
68 | 69 | else: |
|
69 | 70 | return HTTPUnprocessableEntity(body=form.errors_json) |
|
70 | 71 | |
|
71 | 72 | |
|
72 | 73 | @view_config(route_name='groups', renderer='json', |
|
73 | 74 | request_method="DELETE", permission='root_administration') |
|
74 | 75 | def groups_DELETE(request): |
|
75 | 76 | """ |
|
76 | 77 | Removes a groups permanently from db |
|
77 | 78 | """ |
|
78 | 79 | msg = _('You cannot remove administrator group from the system') |
|
79 | 80 | group = GroupService.by_id(request.matchdict.get('group_id')) |
|
80 | 81 | if group: |
|
81 | 82 | if group.id == 1: |
|
82 | 83 | request.session.flash(msg, 'warning') |
|
83 | 84 | else: |
|
84 | 85 | DBSession.delete(group) |
|
85 | 86 | request.session.flash(_('Group removed')) |
|
86 | 87 | return True |
|
87 | 88 | request.response.status = 422 |
|
88 | 89 | return False |
|
89 | 90 | |
|
90 | 91 | |
|
91 | 92 | @view_config(route_name='groups', renderer='json', |
|
92 | 93 | request_method="GET", permission='root_administration') |
|
93 | 94 | @view_config(route_name='groups', renderer='json', |
|
94 | 95 | request_method="PATCH", permission='root_administration') |
|
95 | 96 | def group_update(request): |
|
96 | 97 | """ |
|
97 | 98 | Updates group object |
|
98 | 99 | """ |
|
99 | 100 | group = GroupService.by_id(request.matchdict.get('group_id')) |
|
100 | 101 | if not group: |
|
101 | 102 | return HTTPNotFound() |
|
102 | 103 | |
|
103 | 104 | if request.method == 'PATCH': |
|
104 | 105 | form = forms.GroupCreateForm( |
|
105 | 106 | MultiDict(request.unsafe_json_body), csrf_context=request) |
|
106 | 107 | form._modified_group = group |
|
107 | 108 | if form.validate(): |
|
108 | 109 | form.populate_obj(group) |
|
109 | 110 | else: |
|
110 | 111 | return HTTPUnprocessableEntity(body=form.errors_json) |
|
111 | 112 | return group.get_dict(include_perms=True) |
|
112 | 113 | |
|
113 | 114 | |
|
114 | 115 | @view_config(route_name='groups_property', |
|
115 | 116 | match_param='key=resource_permissions', |
|
116 | 117 | renderer='json', permission='root_administration') |
|
117 | 118 | def groups_resource_permissions_list(request): |
|
118 | 119 | """ |
|
119 | 120 | Get list of permissions assigned to specific resources |
|
120 | 121 | """ |
|
121 | 122 | group = GroupService.by_id(request.matchdict.get('group_id')) |
|
122 | 123 | if not group: |
|
123 | 124 | return HTTPNotFound() |
|
124 | 125 | return [permission_tuple_to_dict(perm) for perm in |
|
125 |
|
|
|
126 | GroupService.resources_with_possible_perms(group)] | |
|
126 | 127 | |
|
127 | 128 | |
|
128 | 129 | @view_config(route_name='groups_property', |
|
129 | 130 | match_param='key=users', request_method="GET", |
|
130 | 131 | renderer='json', permission='root_administration') |
|
131 | 132 | def groups_users_list(request): |
|
132 | 133 | """ |
|
133 | 134 | Get list of permissions assigned to specific resources |
|
134 | 135 | """ |
|
135 | 136 | group = GroupService.by_id(request.matchdict.get('group_id')) |
|
136 | 137 | if not group: |
|
137 | 138 | return HTTPNotFound() |
|
138 | 139 | props = ['user_name', 'id', 'first_name', 'last_name', 'email', |
|
139 | 140 | 'last_login_date', 'status'] |
|
140 | 141 | users_dicts = [] |
|
141 | 142 | for user in group.users: |
|
142 | 143 | u_dict = user.get_dict(include_keys=props) |
|
143 |
u_dict['gravatar_url'] = |
|
|
144 | u_dict['gravatar_url'] = UserService.gravatar_url(user, s=20) | |
|
144 | 145 | users_dicts.append(u_dict) |
|
145 | 146 | return users_dicts |
|
146 | 147 | |
|
147 | 148 | |
|
148 | 149 | @view_config(route_name='groups_property', |
|
149 | 150 | match_param='key=users', request_method="DELETE", |
|
150 | 151 | renderer='json', permission='root_administration') |
|
151 | 152 | def groups_users_remove(request): |
|
152 | 153 | """ |
|
153 | 154 | Get list of permissions assigned to specific resources |
|
154 | 155 | """ |
|
155 | 156 | group = GroupService.by_id(request.matchdict.get('group_id')) |
|
156 | user = User.by_user_name(request.GET.get('user_name')) | |
|
157 | user = UserService.by_user_name(request.GET.get('user_name')) | |
|
157 | 158 | if not group or not user: |
|
158 | 159 | return HTTPNotFound() |
|
159 | 160 | if len(group.users) > 1: |
|
160 | 161 | group.users.remove(user) |
|
161 | 162 | msg = "User removed from group" |
|
162 | 163 | request.session.flash(msg) |
|
163 | 164 | group.member_count = group.users_dynamic.count() |
|
164 | 165 | return True |
|
165 | 166 | msg = "Administrator group needs to contain at least one user" |
|
166 | 167 | request.session.flash(msg, 'warning') |
|
167 | 168 | return False |
|
168 | 169 | |
|
169 | 170 | |
|
170 | 171 | @view_config(route_name='groups_property', |
|
171 | 172 | match_param='key=users', request_method="POST", |
|
172 | 173 | renderer='json', permission='root_administration') |
|
173 | 174 | def groups_users_add(request): |
|
174 | 175 | """ |
|
175 | 176 | Get list of permissions assigned to specific resources |
|
176 | 177 | """ |
|
177 | 178 | group = GroupService.by_id(request.matchdict.get('group_id')) |
|
178 | user = User.by_user_name(request.unsafe_json_body.get('user_name')) | |
|
179 | user = UserService.by_user_name(request.unsafe_json_body.get('user_name')) | |
|
179 | 180 | if not user: |
|
180 | user = User.by_email(request.unsafe_json_body.get('user_name')) | |
|
181 | user = UserService.by_email(request.unsafe_json_body.get('user_name')) | |
|
181 | 182 | |
|
182 | 183 | if not group or not user: |
|
183 | 184 | return HTTPNotFound() |
|
184 | 185 | if user not in group.users: |
|
185 | 186 | group.users.append(user) |
|
186 | 187 | group.member_count = group.users_dynamic.count() |
|
187 | 188 | props = ['user_name', 'id', 'first_name', 'last_name', 'email', |
|
188 | 189 | 'last_login_date', 'status'] |
|
189 | 190 | u_dict = user.get_dict(include_keys=props) |
|
190 |
u_dict['gravatar_url'] = |
|
|
191 | u_dict['gravatar_url'] = UserService.gravatar_url(user, s=20) | |
|
191 | 192 | return u_dict |
@@ -1,241 +1,242 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import datetime |
|
18 | 18 | import logging |
|
19 | 19 | import uuid |
|
20 | 20 | |
|
21 | 21 | import pyramid.security as security |
|
22 | 22 | |
|
23 | 23 | from pyramid.view import view_config |
|
24 | 24 | from pyramid.httpexceptions import HTTPFound |
|
25 | 25 | from pyramid.response import Response |
|
26 | 26 | from pyramid.security import NO_PERMISSION_REQUIRED |
|
27 | 27 | from ziggurat_foundations.ext.pyramid.sign_in import ZigguratSignInSuccess |
|
28 | 28 | from ziggurat_foundations.ext.pyramid.sign_in import ZigguratSignInBadAuth |
|
29 | 29 | from ziggurat_foundations.ext.pyramid.sign_in import ZigguratSignOut |
|
30 | from ziggurat_foundations.models.services.user import UserService | |
|
30 | 31 | |
|
31 | 32 | from appenlight.lib.social import handle_social_data |
|
32 | 33 | from appenlight.models import DBSession |
|
33 | 34 | from appenlight.models.user import User |
|
34 | 35 | from appenlight.models.services.user import UserService |
|
35 | 36 | from appenlight.subscribers import _ |
|
36 | 37 | from appenlight import forms |
|
37 | 38 | from webob.multidict import MultiDict |
|
38 | 39 | |
|
39 | 40 | log = logging.getLogger(__name__) |
|
40 | 41 | |
|
41 | 42 | |
|
42 | 43 | @view_config(context=ZigguratSignInSuccess, permission=NO_PERMISSION_REQUIRED) |
|
43 | 44 | def sign_in(request): |
|
44 | 45 | """ |
|
45 | 46 | Performs sign in by sending proper user identification headers |
|
46 | 47 | Regenerates CSRF token |
|
47 | 48 | """ |
|
48 | 49 | user = request.context.user |
|
49 | 50 | if user.status == 1: |
|
50 | 51 | request.session.new_csrf_token() |
|
51 | 52 | user.last_login_date = datetime.datetime.utcnow() |
|
52 | 53 | social_data = request.session.get('zigg.social_auth') |
|
53 | 54 | if social_data: |
|
54 | 55 | handle_social_data(request, user, social_data) |
|
55 | 56 | else: |
|
56 | 57 | request.session.flash(_('Account got disabled')) |
|
57 | 58 | |
|
58 | 59 | if request.context.came_from != '/': |
|
59 | 60 | return HTTPFound(location=request.context.came_from, |
|
60 | 61 | headers=request.context.headers) |
|
61 | 62 | else: |
|
62 | 63 | return HTTPFound(location=request.route_url('/'), |
|
63 | 64 | headers=request.context.headers) |
|
64 | 65 | |
|
65 | 66 | |
|
66 | 67 | @view_config(context=ZigguratSignInBadAuth, permission=NO_PERMISSION_REQUIRED) |
|
67 | 68 | def bad_auth(request): |
|
68 | 69 | """ |
|
69 | 70 | Handles incorrect login flow |
|
70 | 71 | """ |
|
71 | 72 | request.session.flash(_('Incorrect username or password'), 'warning') |
|
72 | 73 | return HTTPFound(location=request.route_url('register'), |
|
73 | 74 | headers=request.context.headers) |
|
74 | 75 | |
|
75 | 76 | |
|
76 | 77 | @view_config(context=ZigguratSignOut, permission=NO_PERMISSION_REQUIRED) |
|
77 | 78 | def sign_out(request): |
|
78 | 79 | """ |
|
79 | 80 | Removes user identification cookie |
|
80 | 81 | """ |
|
81 | 82 | return HTTPFound(location=request.route_url('register'), |
|
82 | 83 | headers=request.context.headers) |
|
83 | 84 | |
|
84 | 85 | |
|
85 | 86 | @view_config(route_name='lost_password', |
|
86 | 87 | renderer='appenlight:templates/user/lost_password.jinja2', |
|
87 | 88 | permission=NO_PERMISSION_REQUIRED) |
|
88 | 89 | def lost_password(request): |
|
89 | 90 | """ |
|
90 | 91 | Presents lost password page - sends password reset link to |
|
91 | 92 | specified email address. |
|
92 | 93 | This link is valid only for 10 minutes |
|
93 | 94 | """ |
|
94 | 95 | form = forms.LostPasswordForm(request.POST, csrf_context=request) |
|
95 | 96 | if request.method == 'POST' and form.validate(): |
|
96 | user = User.by_email(form.email.data) | |
|
97 | user = UserService.by_email(form.email.data) | |
|
97 | 98 | if user: |
|
98 |
|
|
|
99 | UserService.regenerate_security_code(user) | |
|
99 | 100 | user.security_code_date = datetime.datetime.utcnow() |
|
100 | 101 | email_vars = { |
|
101 | 102 | 'user': user, |
|
102 | 103 | 'request': request, |
|
103 | 104 | 'email_title': "AppEnlight :: New password request" |
|
104 | 105 | } |
|
105 | 106 | UserService.send_email( |
|
106 | 107 | request, recipients=[user.email], |
|
107 | 108 | variables=email_vars, |
|
108 | 109 | template='/email_templates/lost_password.jinja2') |
|
109 | 110 | msg = 'Password reset email had been sent. ' \ |
|
110 | 111 | 'Please check your mailbox for further instructions.' |
|
111 | 112 | request.session.flash(_(msg)) |
|
112 | 113 | return HTTPFound(location=request.route_url('lost_password')) |
|
113 | 114 | return {"form": form} |
|
114 | 115 | |
|
115 | 116 | |
|
116 | 117 | @view_config(route_name='lost_password_generate', |
|
117 | 118 | permission=NO_PERMISSION_REQUIRED, |
|
118 | 119 | renderer='appenlight:templates/user/lost_password_generate.jinja2') |
|
119 | 120 | def lost_password_generate(request): |
|
120 | 121 | """ |
|
121 | 122 | Shows new password form - perform time check and set new password for user |
|
122 | 123 | """ |
|
123 | user = User.by_user_name_and_security_code( | |
|
124 | user = UserService.by_user_name_and_security_code( | |
|
124 | 125 | request.GET.get('user_name'), request.GET.get('security_code')) |
|
125 | 126 | if user: |
|
126 | 127 | delta = datetime.datetime.utcnow() - user.security_code_date |
|
127 | 128 | |
|
128 | 129 | if user and delta.total_seconds() < 600: |
|
129 | 130 | form = forms.NewPasswordForm(request.POST, csrf_context=request) |
|
130 | 131 | if request.method == "POST" and form.validate(): |
|
131 |
|
|
|
132 | UserService.set_password(user, form.new_password.data) | |
|
132 | 133 | request.session.flash(_('You can sign in with your new password.')) |
|
133 | 134 | return HTTPFound(location=request.route_url('register')) |
|
134 | 135 | else: |
|
135 | 136 | return {"form": form} |
|
136 | 137 | else: |
|
137 | 138 | return Response('Security code expired') |
|
138 | 139 | |
|
139 | 140 | |
|
140 | 141 | @view_config(route_name='register', |
|
141 | 142 | renderer='appenlight:templates/user/register.jinja2', |
|
142 | 143 | permission=NO_PERMISSION_REQUIRED) |
|
143 | 144 | def register(request): |
|
144 | 145 | """ |
|
145 | 146 | Render register page with form |
|
146 | 147 | Also handles oAuth flow for registration |
|
147 | 148 | """ |
|
148 | 149 | login_url = request.route_url('ziggurat.routes.sign_in') |
|
149 | 150 | if request.query_string: |
|
150 | 151 | query_string = '?%s' % request.query_string |
|
151 | 152 | else: |
|
152 | 153 | query_string = '' |
|
153 | 154 | referrer = '%s%s' % (request.path, query_string) |
|
154 | 155 | |
|
155 | 156 | if referrer in [login_url, '/register', '/register?sign_in=1']: |
|
156 | 157 | referrer = '/' # never use the login form itself as came_from |
|
157 | 158 | sign_in_form = forms.SignInForm( |
|
158 | 159 | came_from=request.params.get('came_from', referrer), |
|
159 | 160 | csrf_context=request) |
|
160 | 161 | |
|
161 | 162 | # populate form from oAuth session data returned by authomatic |
|
162 | 163 | social_data = request.session.get('zigg.social_auth') |
|
163 | 164 | if request.method != 'POST' and social_data: |
|
164 | 165 | log.debug(social_data) |
|
165 | 166 | user_name = social_data['user'].get('user_name', '').split('@')[0] |
|
166 | 167 | form_data = { |
|
167 | 168 | 'user_name': user_name, |
|
168 | 169 | 'email': social_data['user'].get('email') |
|
169 | 170 | } |
|
170 | 171 | form_data['user_password'] = str(uuid.uuid4()) |
|
171 | 172 | form = forms.UserRegisterForm(MultiDict(form_data), |
|
172 | 173 | csrf_context=request) |
|
173 | 174 | form.user_password.widget.hide_value = False |
|
174 | 175 | else: |
|
175 | 176 | form = forms.UserRegisterForm(request.POST, csrf_context=request) |
|
176 | 177 | if request.method == 'POST' and form.validate(): |
|
177 | 178 | log.info('registering user') |
|
178 | 179 | # insert new user here |
|
179 | 180 | if request.registry.settings['appenlight.disable_registration']: |
|
180 | 181 | request.session.flash(_('Registration is currently disabled.')) |
|
181 | 182 | return HTTPFound(location=request.route_url('/')) |
|
182 | 183 | |
|
183 | 184 | new_user = User() |
|
184 | 185 | DBSession.add(new_user) |
|
185 | 186 | form.populate_obj(new_user) |
|
186 |
|
|
|
187 | UserService.regenerate_security_code(new_user) | |
|
187 | 188 | new_user.status = 1 |
|
188 |
|
|
|
189 | UserService.set_password(new_user, new_user.user_password) | |
|
189 | 190 | new_user.registration_ip = request.environ.get('REMOTE_ADDR') |
|
190 | 191 | |
|
191 | 192 | if social_data: |
|
192 | 193 | handle_social_data(request, new_user, social_data) |
|
193 | 194 | |
|
194 | 195 | email_vars = {'user': new_user, |
|
195 | 196 | 'request': request, |
|
196 | 197 | 'email_title': "AppEnlight :: Start information"} |
|
197 | 198 | UserService.send_email( |
|
198 | 199 | request, recipients=[new_user.email], variables=email_vars, |
|
199 | 200 | template='/email_templates/registered.jinja2') |
|
200 | 201 | request.session.flash(_('You have successfully registered.')) |
|
201 | 202 | DBSession.flush() |
|
202 | 203 | headers = security.remember(request, new_user.id) |
|
203 | 204 | return HTTPFound(location=request.route_url('/'), |
|
204 | 205 | headers=headers) |
|
205 | 206 | settings = request.registry.settings |
|
206 | 207 | social_plugins = {} |
|
207 | 208 | if settings.get('authomatic.pr.twitter.key', ''): |
|
208 | 209 | social_plugins['twitter'] = True |
|
209 | 210 | if settings.get('authomatic.pr.google.key', ''): |
|
210 | 211 | social_plugins['google'] = True |
|
211 | 212 | if settings.get('authomatic.pr.github.key', ''): |
|
212 | 213 | social_plugins['github'] = True |
|
213 | 214 | if settings.get('authomatic.pr.bitbucket.key', ''): |
|
214 | 215 | social_plugins['bitbucket'] = True |
|
215 | 216 | |
|
216 | 217 | return { |
|
217 | 218 | "form": form, |
|
218 | 219 | "sign_in_form": sign_in_form, |
|
219 | 220 | "social_plugins": social_plugins |
|
220 | 221 | } |
|
221 | 222 | |
|
222 | 223 | |
|
223 | 224 | @view_config(route_name='/', |
|
224 | 225 | renderer='appenlight:templates/app.jinja2', |
|
225 | 226 | permission=NO_PERMISSION_REQUIRED) |
|
226 | 227 | @view_config(route_name='angular_app_ui', |
|
227 | 228 | renderer='appenlight:templates/app.jinja2', |
|
228 | 229 | permission=NO_PERMISSION_REQUIRED) |
|
229 | 230 | @view_config(route_name='angular_app_ui_ix', |
|
230 | 231 | renderer='appenlight:templates/app.jinja2', |
|
231 | 232 | permission=NO_PERMISSION_REQUIRED) |
|
232 | 233 | def app_main_index(request): |
|
233 | 234 | """ |
|
234 | 235 | Render dashoard/report browser page page along with: |
|
235 | 236 | - flash messages |
|
236 | 237 | - application list |
|
237 | 238 | - assigned reports |
|
238 | 239 | - latest events |
|
239 | 240 | (those last two come from subscribers.py that sets global renderer variables) |
|
240 | 241 | """ |
|
241 | 242 | return {} |
@@ -1,294 +1,297 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import logging |
|
18 | 18 | |
|
19 | 19 | from datetime import datetime, timedelta |
|
20 | 20 | from pyramid.view import view_config |
|
21 | 21 | from pyramid.httpexceptions import HTTPUnprocessableEntity |
|
22 | 22 | |
|
23 | from ziggurat_foundations.models.services.resource import ResourceService | |
|
24 | from ziggurat_foundations.models.services.user import UserService | |
|
25 | ||
|
23 | 26 | from appenlight.models import DBSession |
|
24 | 27 | from appenlight.models.user import User |
|
25 | 28 | from appenlight.models.report_comment import ReportComment |
|
26 | 29 | from appenlight.models.report_assignment import ReportAssignment |
|
27 | 30 | from appenlight.models.services.user import UserService |
|
28 | 31 | from appenlight.models.services.report_group import ReportGroupService |
|
29 | 32 | from appenlight import forms |
|
30 | 33 | from appenlight.lib.enums import ReportType |
|
31 | 34 | from appenlight.lib.helpers import gen_pagination_headers |
|
32 | 35 | from appenlight.lib.utils import build_filter_settings_from_query_dict |
|
33 | 36 | from appenlight.validators import ReportSearchSchema, TagListSchema, \ |
|
34 | 37 | accepted_search_params |
|
35 | 38 | from webob.multidict import MultiDict |
|
36 | 39 | |
|
37 | 40 | _ = str |
|
38 | 41 | |
|
39 | 42 | log = logging.getLogger(__name__) |
|
40 | 43 | |
|
41 | 44 | section_filters_key = 'appenlight:reports:filter:%s' |
|
42 | 45 | |
|
43 | 46 | |
|
44 | 47 | @view_config(route_name='reports', renderer='json', permission='authenticated') |
|
45 | 48 | @view_config(route_name='slow_reports', renderer='json', |
|
46 | 49 | permission='authenticated') |
|
47 | 50 | def index(request): |
|
48 | 51 | """ |
|
49 | 52 | Returns list of report groups based on user search query |
|
50 | 53 | """ |
|
51 | 54 | if request.user: |
|
52 | 55 | request.user.last_login_date = datetime.utcnow() |
|
53 | 56 | |
|
54 |
applications = |
|
|
55 | ['view'], resource_types=['application']) | |
|
57 | applications = UserService.resources_with_perms( | |
|
58 | request.user, ['view'], resource_types=['application']) | |
|
56 | 59 | |
|
57 | 60 | search_params = request.GET.mixed() |
|
58 | 61 | |
|
59 | 62 | all_possible_app_ids = set([app.resource_id for app in applications]) |
|
60 | 63 | schema = ReportSearchSchema().bind(resources=all_possible_app_ids) |
|
61 | 64 | tag_schema = TagListSchema() |
|
62 | 65 | filter_settings = schema.deserialize(search_params) |
|
63 | 66 | tag_list = [{"name": k, "value": v} for k, v in filter_settings.items() |
|
64 | 67 | if k not in accepted_search_params] |
|
65 | 68 | tags = tag_schema.deserialize(tag_list) |
|
66 | 69 | filter_settings['tags'] = tags |
|
67 | 70 | if request.matched_route.name == 'slow_reports': |
|
68 | 71 | filter_settings['report_type'] = [ReportType.slow] |
|
69 | 72 | else: |
|
70 | 73 | filter_settings['report_type'] = [ReportType.error] |
|
71 | 74 | |
|
72 | 75 | reports_paginator = ReportGroupService.get_paginator_by_app_ids( |
|
73 | 76 | app_ids=filter_settings['resource'], |
|
74 | 77 | page=filter_settings['page'], |
|
75 | 78 | filter_settings=filter_settings |
|
76 | 79 | ) |
|
77 | 80 | reports = [] |
|
78 | 81 | include_keys = ('id', 'http_status', 'report_type', 'resource_name', |
|
79 | 82 | 'front_url', 'resource_id', 'error', 'url_path', 'tags', |
|
80 | 83 | 'duration') |
|
81 | 84 | for report in reports_paginator.sa_items: |
|
82 | 85 | reports.append(report.get_dict(request, include_keys=include_keys)) |
|
83 | 86 | headers = gen_pagination_headers(request, reports_paginator) |
|
84 | 87 | request.response.headers.update(headers) |
|
85 | 88 | return reports |
|
86 | 89 | |
|
87 | 90 | |
|
88 | 91 | @view_config(route_name='report_groups', renderer='json', permission='view', |
|
89 | 92 | request_method="GET") |
|
90 | 93 | def view_report(request): |
|
91 | 94 | """ |
|
92 | 95 | Show individual detailed report group along with latest report |
|
93 | 96 | """ |
|
94 | 97 | report_group = request.context.report_group |
|
95 | 98 | if not report_group.read: |
|
96 | 99 | report_group.read = True |
|
97 | 100 | |
|
98 | 101 | report_id = request.params.get('reportId', request.params.get('report_id')) |
|
99 | 102 | report_dict = report_group.get_report(report_id).get_dict(request, |
|
100 | 103 | details=True) |
|
101 | 104 | # disallow browsing other occurences by anonymous |
|
102 | 105 | if not request.user: |
|
103 | 106 | report_dict.pop('group_next_report', None) |
|
104 | 107 | report_dict.pop('group_previous_report', None) |
|
105 | 108 | return report_dict |
|
106 | 109 | |
|
107 | 110 | |
|
108 | 111 | @view_config(route_name='report_groups', renderer='json', |
|
109 | 112 | permission='update_reports', request_method='DELETE') |
|
110 | 113 | def remove(request): |
|
111 | 114 | """ |
|
112 | 115 | Used to remove reourt groups from database |
|
113 | 116 | """ |
|
114 | 117 | report = request.context.report_group |
|
115 | 118 | form = forms.ReactorForm(request.POST, csrf_context=request) |
|
116 | 119 | form.validate() |
|
117 | 120 | DBSession.delete(report) |
|
118 | 121 | return True |
|
119 | 122 | |
|
120 | 123 | |
|
121 | 124 | @view_config(route_name='report_groups_property', match_param='key=comments', |
|
122 | 125 | renderer='json', permission='view', request_method="POST") |
|
123 | 126 | def comment_create(request): |
|
124 | 127 | """ |
|
125 | 128 | Creates user comments for report group, sends email notifications |
|
126 | 129 | of said comments |
|
127 | 130 | """ |
|
128 | 131 | report_group = request.context.report_group |
|
129 | 132 | application = request.context.resource |
|
130 | 133 | form = forms.CommentForm(MultiDict(request.unsafe_json_body), |
|
131 | 134 | csrf_context=request) |
|
132 | 135 | if request.method == 'POST' and form.validate(): |
|
133 | 136 | comment = ReportComment(owner_id=request.user.id, |
|
134 | 137 | report_time=report_group.first_timestamp) |
|
135 | 138 | form.populate_obj(comment) |
|
136 | 139 | report_group.comments.append(comment) |
|
137 |
perm_list = |
|
|
140 | perm_list = ResourceService.users_for_perm(application, 'view') | |
|
138 | 141 | uids_to_notify = [] |
|
139 | 142 | users_to_notify = [] |
|
140 | 143 | for perm in perm_list: |
|
141 | 144 | user = perm.user |
|
142 | 145 | if ('@{}'.format(user.user_name) in comment.body and |
|
143 | 146 | user.id not in uids_to_notify): |
|
144 | 147 | uids_to_notify.append(user.id) |
|
145 | 148 | users_to_notify.append(user) |
|
146 | 149 | |
|
147 | 150 | commenters = ReportGroupService.users_commenting( |
|
148 | 151 | report_group, exclude_user_id=request.user.id) |
|
149 | 152 | for user in commenters: |
|
150 | 153 | if user.id not in uids_to_notify: |
|
151 | 154 | uids_to_notify.append(user.id) |
|
152 | 155 | users_to_notify.append(user) |
|
153 | 156 | |
|
154 | 157 | for user in users_to_notify: |
|
155 | 158 | email_vars = {'user': user, |
|
156 | 159 | 'commenting_user': request.user, |
|
157 | 160 | 'request': request, |
|
158 | 161 | 'application': application, |
|
159 | 162 | 'report_group': report_group, |
|
160 | 163 | 'comment': comment, |
|
161 | 164 | 'email_title': "AppEnlight :: New comment"} |
|
162 | 165 | UserService.send_email( |
|
163 | 166 | request, |
|
164 | 167 | recipients=[user.email], |
|
165 | 168 | variables=email_vars, |
|
166 | 169 | template='/email_templates/new_comment_report.jinja2') |
|
167 | 170 | request.session.flash(_('Your comment was created')) |
|
168 | 171 | return comment.get_dict() |
|
169 | 172 | else: |
|
170 | 173 | return form.errors |
|
171 | 174 | |
|
172 | 175 | |
|
173 | 176 | @view_config(route_name='report_groups_property', |
|
174 | 177 | match_param='key=assigned_users', renderer='json', |
|
175 | 178 | permission='update_reports', request_method="GET") |
|
176 | 179 | def assigned_users(request): |
|
177 | 180 | """ |
|
178 | 181 | Returns list of users a specific report group is assigned for review |
|
179 | 182 | """ |
|
180 | 183 | report_group = request.context.report_group |
|
181 | 184 | application = request.context.resource |
|
182 |
users = set([p.user for p in |
|
|
185 | users = set([p.user for p in ResourceService.users_for_perm(application, 'view')]) | |
|
183 | 186 | currently_assigned = [u.user_name for u in report_group.assigned_users] |
|
184 | 187 | user_status = {'assigned': [], 'unassigned': []} |
|
185 | 188 | # handle users |
|
186 | 189 | for user in users: |
|
187 | 190 | user_dict = {'user_name': user.user_name, |
|
188 |
'gravatar_url': |
|
|
191 | 'gravatar_url': UserService.gravatar_url(user), | |
|
189 | 192 | 'name': '%s %s' % (user.first_name, user.last_name,)} |
|
190 | 193 | if user.user_name in currently_assigned: |
|
191 | 194 | user_status['assigned'].append(user_dict) |
|
192 | 195 | elif user_dict not in user_status['unassigned']: |
|
193 | 196 | user_status['unassigned'].append(user_dict) |
|
194 | 197 | return user_status |
|
195 | 198 | |
|
196 | 199 | |
|
197 | 200 | @view_config(route_name='report_groups_property', |
|
198 | 201 | match_param='key=assigned_users', renderer='json', |
|
199 | 202 | permission='update_reports', request_method="PATCH") |
|
200 | 203 | def assign_users(request): |
|
201 | 204 | """ |
|
202 | 205 | Assigns specific report group to user for review - send email notification |
|
203 | 206 | """ |
|
204 | 207 | report_group = request.context.report_group |
|
205 | 208 | application = request.context.resource |
|
206 | 209 | currently_assigned = [u.user_name for u in report_group.assigned_users] |
|
207 | 210 | new_assigns = request.unsafe_json_body |
|
208 | 211 | |
|
209 | 212 | # first unassign old users |
|
210 | 213 | for user_name in new_assigns['unassigned']: |
|
211 | 214 | if user_name in currently_assigned: |
|
212 | user = User.by_user_name(user_name) | |
|
215 | user = UserService.by_user_name(user_name) | |
|
213 | 216 | report_group.assigned_users.remove(user) |
|
214 | 217 | comment = ReportComment(owner_id=request.user.id, |
|
215 | 218 | report_time=report_group.first_timestamp) |
|
216 | 219 | comment.body = 'Unassigned group from @%s' % user_name |
|
217 | 220 | report_group.comments.append(comment) |
|
218 | 221 | |
|
219 | 222 | # assign new users |
|
220 | 223 | for user_name in new_assigns['assigned']: |
|
221 | 224 | if user_name not in currently_assigned: |
|
222 | user = User.by_user_name(user_name) | |
|
225 | user = UserService.by_user_name(user_name) | |
|
223 | 226 | if user in report_group.assigned_users: |
|
224 | 227 | report_group.assigned_users.remove(user) |
|
225 | 228 | DBSession.flush() |
|
226 | 229 | assignment = ReportAssignment( |
|
227 | 230 | owner_id=user.id, |
|
228 | 231 | report_time=report_group.first_timestamp, |
|
229 | 232 | group_id=report_group.id) |
|
230 | 233 | DBSession.add(assignment) |
|
231 | 234 | |
|
232 | 235 | comment = ReportComment(owner_id=request.user.id, |
|
233 | 236 | report_time=report_group.first_timestamp) |
|
234 | 237 | comment.body = 'Assigned report_group to @%s' % user_name |
|
235 | 238 | report_group.comments.append(comment) |
|
236 | 239 | |
|
237 | 240 | email_vars = {'user': user, |
|
238 | 241 | 'request': request, |
|
239 | 242 | 'application': application, |
|
240 | 243 | 'report_group': report_group, |
|
241 | 244 | 'email_title': "AppEnlight :: Assigned Report"} |
|
242 | 245 | UserService.send_email(request, recipients=[user.email], |
|
243 | 246 | variables=email_vars, |
|
244 | 247 | template='/email_templates/assigned_report.jinja2') |
|
245 | 248 | |
|
246 | 249 | return True |
|
247 | 250 | |
|
248 | 251 | |
|
249 | 252 | @view_config(route_name='report_groups_property', match_param='key=history', |
|
250 | 253 | renderer='json', permission='view') |
|
251 | 254 | def history(request): |
|
252 | 255 | """ Separate error graph or similar graph""" |
|
253 | 256 | report_group = request.context.report_group |
|
254 | 257 | query_params = request.GET.mixed() |
|
255 | 258 | query_params['resource'] = (report_group.resource_id,) |
|
256 | 259 | |
|
257 | 260 | filter_settings = build_filter_settings_from_query_dict(request, |
|
258 | 261 | query_params) |
|
259 | 262 | if not filter_settings.get('end_date'): |
|
260 | 263 | end_date = datetime.utcnow().replace(microsecond=0, second=0) |
|
261 | 264 | filter_settings['end_date'] = end_date |
|
262 | 265 | |
|
263 | 266 | if not filter_settings.get('start_date'): |
|
264 | 267 | delta = timedelta(days=30) |
|
265 | 268 | filter_settings['start_date'] = filter_settings['end_date'] - delta |
|
266 | 269 | |
|
267 | 270 | filter_settings['group_id'] = report_group.id |
|
268 | 271 | |
|
269 | 272 | result = ReportGroupService.get_report_stats(request, filter_settings) |
|
270 | 273 | |
|
271 | 274 | plot_data = [] |
|
272 | 275 | for row in result: |
|
273 | 276 | point = { |
|
274 | 277 | 'x': row['x'], |
|
275 | 278 | 'reports': row['report'] + row['slow_report'] + row['not_found']} |
|
276 | 279 | plot_data.append(point) |
|
277 | 280 | |
|
278 | 281 | return plot_data |
|
279 | 282 | |
|
280 | 283 | |
|
281 | 284 | @view_config(route_name='report_groups', renderer='json', |
|
282 | 285 | permission='update_reports', request_method="PATCH") |
|
283 | 286 | def report_groups_PATCH(request): |
|
284 | 287 | """ |
|
285 | 288 | Used to update the report group fixed status |
|
286 | 289 | """ |
|
287 | 290 | report_group = request.context.report_group |
|
288 | 291 | allowed_keys = ['public', 'fixed'] |
|
289 | 292 | for k, v in request.unsafe_json_body.items(): |
|
290 | 293 | if k in allowed_keys: |
|
291 | 294 | setattr(report_group, k, v) |
|
292 | 295 | else: |
|
293 | 296 | return HTTPUnprocessableEntity() |
|
294 | 297 | return report_group.get_dict(request) |
@@ -1,444 +1,446 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import copy |
|
18 | 18 | import logging |
|
19 | 19 | import datetime |
|
20 | 20 | import time |
|
21 | 21 | import random |
|
22 | 22 | import redis |
|
23 | 23 | import six |
|
24 | 24 | import pyramid.renderers |
|
25 | 25 | import requests |
|
26 | ||
|
27 | from ziggurat_foundations.models.services.user import UserService | |
|
28 | ||
|
26 | 29 | import appenlight.celery.tasks |
|
27 | 30 | from pyramid.view import view_config |
|
28 | 31 | from pyramid_mailer.message import Message |
|
29 | 32 | from appenlight_client.timing import time_trace |
|
30 | 33 | from appenlight.models import DBSession, Datastores |
|
31 | 34 | from appenlight.models.user import User |
|
32 | 35 | from appenlight.models.report_group import ReportGroup |
|
33 | 36 | from appenlight.models.event import Event |
|
34 | 37 | from appenlight.models.services.report_group import ReportGroupService |
|
35 | 38 | from appenlight.models.services.event import EventService |
|
36 | 39 | from appenlight.lib.enums import ReportType |
|
37 | 40 | |
|
38 | 41 | log = logging.getLogger(__name__) |
|
39 | 42 | |
|
40 | 43 | GLOBAL_REQ = None |
|
41 | 44 | |
|
42 | 45 | |
|
43 | 46 | @view_config(route_name='test', match_param='action=mail', |
|
44 | 47 | renderer='string', permission='root_administration') |
|
45 | 48 | def mail(request): |
|
46 | 49 | """ |
|
47 | 50 | Test email communication |
|
48 | 51 | """ |
|
49 | 52 | request.environ['HTTP_HOST'] = 'appenlight.com' |
|
50 | 53 | request.environ['wsgi.url_scheme'] = 'https' |
|
51 | 54 | renderer_vars = {"title": "You have just registered on AppEnlight", |
|
52 | 55 | "username": "test", |
|
53 | 56 | "email": "grzegżółka", |
|
54 | 57 | 'firstname': 'dupa'} |
|
55 | 58 | # return vars |
|
56 | 59 | html = pyramid.renderers.render('/email_templates/registered.jinja2', |
|
57 | 60 | renderer_vars, |
|
58 | 61 | request=request) |
|
59 | 62 | message = Message(subject="hello world %s" % random.randint(1, 9999), |
|
60 | 63 | sender="info@appenlight.com", |
|
61 | 64 | recipients=["ergo14@gmail.com"], |
|
62 | 65 | html=html) |
|
63 | 66 | request.registry.mailer.send(message) |
|
64 | 67 | return html |
|
65 | 68 | return vars |
|
66 | 69 | |
|
67 | 70 | |
|
68 | 71 | @view_config(route_name='test', match_param='action=alerting', |
|
69 | 72 | renderer='appenlight:templates/tests/alerting.jinja2', |
|
70 | 73 | permission='root_administration') |
|
71 | 74 | def alerting_test(request): |
|
72 | 75 | """ |
|
73 | 76 | Allows to test send data on various registered alerting channels |
|
74 | 77 | """ |
|
75 | applications = request.user.resources_with_perms( | |
|
76 | ['view'], resource_types=['application']) | |
|
78 | applications = UserService.resources_with_perms(request.user, ['view'], resource_types=['application']) | |
|
77 | 79 | # what we can select in total |
|
78 | 80 | all_possible_app_ids = [app.resource_id for app in applications] |
|
79 | 81 | resource = applications[0] |
|
80 | 82 | |
|
81 | 83 | alert_channels = [] |
|
82 | 84 | for channel in request.user.alert_channels: |
|
83 | 85 | alert_channels.append(channel.get_dict()) |
|
84 | 86 | |
|
85 | 87 | cname = request.params.get('channel_name') |
|
86 | 88 | cvalue = request.params.get('channel_value') |
|
87 | 89 | event_name = request.params.get('event_name') |
|
88 | 90 | if cname and cvalue: |
|
89 | 91 | for channel in request.user.alert_channels: |
|
90 | 92 | if (channel.channel_value == cvalue and |
|
91 | 93 | channel.channel_name == cname): |
|
92 | 94 | break |
|
93 | 95 | if event_name in ['error_report_alert', 'slow_report_alert']: |
|
94 | 96 | # opened |
|
95 | 97 | new_event = Event(resource_id=resource.resource_id, |
|
96 | 98 | event_type=Event.types[event_name], |
|
97 | 99 | start_date=datetime.datetime.utcnow(), |
|
98 | 100 | status=Event.statuses['active'], |
|
99 | 101 | values={'reports': 5, |
|
100 | 102 | 'threshold': 10} |
|
101 | 103 | ) |
|
102 | 104 | channel.notify_alert(resource=resource, |
|
103 | 105 | event=new_event, |
|
104 | 106 | user=request.user, |
|
105 | 107 | request=request) |
|
106 | 108 | |
|
107 | 109 | # closed |
|
108 | 110 | ev_type = Event.types[event_name.replace('open', 'close')] |
|
109 | 111 | new_event = Event(resource_id=resource.resource_id, |
|
110 | 112 | event_type=ev_type, |
|
111 | 113 | start_date=datetime.datetime.utcnow(), |
|
112 | 114 | status=Event.statuses['closed'], |
|
113 | 115 | values={'reports': 5, |
|
114 | 116 | 'threshold': 10}) |
|
115 | 117 | channel.notify_alert(resource=resource, |
|
116 | 118 | event=new_event, |
|
117 | 119 | user=request.user, |
|
118 | 120 | request=request) |
|
119 | 121 | elif event_name == 'notify_reports': |
|
120 | 122 | report = ReportGroupService.by_app_ids(all_possible_app_ids) \ |
|
121 | 123 | .filter(ReportGroup.report_type == ReportType.error).first() |
|
122 | 124 | confirmed_reports = [(5, report), (1, report)] |
|
123 | 125 | channel.notify_reports(resource=resource, |
|
124 | 126 | user=request.user, |
|
125 | 127 | request=request, |
|
126 | 128 | since_when=datetime.datetime.utcnow(), |
|
127 | 129 | reports=confirmed_reports) |
|
128 | 130 | confirmed_reports = [(5, report)] |
|
129 | 131 | channel.notify_reports(resource=resource, |
|
130 | 132 | user=request.user, |
|
131 | 133 | request=request, |
|
132 | 134 | since_when=datetime.datetime.utcnow(), |
|
133 | 135 | reports=confirmed_reports) |
|
134 | 136 | elif event_name == 'notify_uptime': |
|
135 | 137 | new_event = Event(resource_id=resource.resource_id, |
|
136 | 138 | event_type=Event.types['uptime_alert'], |
|
137 | 139 | start_date=datetime.datetime.utcnow(), |
|
138 | 140 | status=Event.statuses['active'], |
|
139 | 141 | values={"status_code": 500, |
|
140 | 142 | "tries": 2, |
|
141 | 143 | "response_time": 0}) |
|
142 | 144 | channel.notify_uptime_alert(resource=resource, |
|
143 | 145 | event=new_event, |
|
144 | 146 | user=request.user, |
|
145 | 147 | request=request) |
|
146 | 148 | elif event_name == 'chart_alert': |
|
147 | 149 | event = EventService.by_type_and_status( |
|
148 | 150 | event_types=(Event.types['chart_alert'],), |
|
149 | 151 | status_types=(Event.statuses['active'],)).first() |
|
150 | 152 | channel.notify_chart_alert(resource=event.resource, |
|
151 | 153 | event=event, |
|
152 | 154 | user=request.user, |
|
153 | 155 | request=request) |
|
154 | 156 | elif event_name == 'daily_digest': |
|
155 | 157 | since_when = datetime.datetime.utcnow() - datetime.timedelta( |
|
156 | 158 | hours=8) |
|
157 | 159 | filter_settings = {'resource': [resource.resource_id], |
|
158 | 160 | 'tags': [{'name': 'type', |
|
159 | 161 | 'value': ['error'], 'op': None}], |
|
160 | 162 | 'type': 'error', 'start_date': since_when} |
|
161 | 163 | |
|
162 | 164 | reports = ReportGroupService.get_trending( |
|
163 | 165 | request, filter_settings=filter_settings, limit=50) |
|
164 | 166 | channel.send_digest(resource=resource, |
|
165 | 167 | user=request.user, |
|
166 | 168 | request=request, |
|
167 | 169 | since_when=datetime.datetime.utcnow(), |
|
168 | 170 | reports=reports) |
|
169 | 171 | |
|
170 | 172 | return {'alert_channels': alert_channels, |
|
171 | 173 | 'applications': dict([(app.resource_id, app.resource_name) |
|
172 | 174 | for app in applications.all()])} |
|
173 | 175 | |
|
174 | 176 | |
|
175 | 177 | @view_config(route_name='test', match_param='action=error', |
|
176 | 178 | renderer='string', permission='root_administration') |
|
177 | 179 | def error(request): |
|
178 | 180 | """ |
|
179 | 181 | Raises an internal error with some test data for testing purposes |
|
180 | 182 | """ |
|
181 | 183 | request.environ['appenlight.message'] = 'test message' |
|
182 | 184 | request.environ['appenlight.extra']['dupa'] = 'dupa' |
|
183 | 185 | request.environ['appenlight.extra']['message'] = 'message' |
|
184 | 186 | request.environ['appenlight.tags']['action'] = 'test_error' |
|
185 | 187 | request.environ['appenlight.tags']['count'] = 5 |
|
186 | 188 | log.debug(chr(960)) |
|
187 | 189 | log.debug('debug') |
|
188 | 190 | log.info(chr(960)) |
|
189 | 191 | log.info('INFO') |
|
190 | 192 | log.warning('warning') |
|
191 | 193 | |
|
192 | 194 | @time_trace(name='error.foobar', min_duration=0.1) |
|
193 | 195 | def fooobar(): |
|
194 | 196 | time.sleep(0.12) |
|
195 | 197 | return 1 |
|
196 | 198 | |
|
197 | 199 | fooobar() |
|
198 | 200 | |
|
199 | 201 | def foobar(somearg): |
|
200 | 202 | raise Exception('test') |
|
201 | 203 | |
|
202 | 204 | client = redis.StrictRedis() |
|
203 | 205 | client.setex('testval', 10, 'foo') |
|
204 | 206 | request.environ['appenlight.force_send'] = 1 |
|
205 | 207 | |
|
206 | 208 | # stats, result = get_local_storage(local_timing).get_thread_stats() |
|
207 | 209 | # import pprint |
|
208 | 210 | # pprint.pprint(stats) |
|
209 | 211 | # pprint.pprint(result) |
|
210 | 212 | # print 'entries', len(result) |
|
211 | 213 | request.environ['appenlight.username'] = 'ErgO' |
|
212 | 214 | raise Exception(chr(960) + '%s' % random.randint(1, 5)) |
|
213 | 215 | return {} |
|
214 | 216 | |
|
215 | 217 | |
|
216 | 218 | @view_config(route_name='test', match_param='action=task', |
|
217 | 219 | renderer='string', permission='root_administration') |
|
218 | 220 | def test_task(request): |
|
219 | 221 | """ |
|
220 | 222 | Test erroneous celery task |
|
221 | 223 | """ |
|
222 | 224 | import appenlight.celery.tasks |
|
223 | 225 | |
|
224 | 226 | appenlight.celery.tasks.test_exception_task.delay() |
|
225 | 227 | return 'task sent' |
|
226 | 228 | |
|
227 | 229 | |
|
228 | 230 | @view_config(route_name='test', match_param='action=task_retry', |
|
229 | 231 | renderer='string', permission='root_administration') |
|
230 | 232 | def test_task_retry(request): |
|
231 | 233 | """ |
|
232 | 234 | Test erroneous celery task |
|
233 | 235 | """ |
|
234 | 236 | import appenlight.celery.tasks |
|
235 | 237 | |
|
236 | 238 | appenlight.celery.tasks.test_retry_exception_task.delay() |
|
237 | 239 | return 'task sent' |
|
238 | 240 | |
|
239 | 241 | |
|
240 | 242 | @view_config(route_name='test', match_param='action=celery_emails', |
|
241 | 243 | renderer='string', permission='root_administration') |
|
242 | 244 | def test_celery_emails(request): |
|
243 | 245 | import appenlight.celery.tasks |
|
244 | 246 | appenlight.celery.tasks.alerting.delay() |
|
245 | 247 | return 'task sent' |
|
246 | 248 | |
|
247 | 249 | |
|
248 | 250 | @view_config(route_name='test', match_param='action=daily_digest', |
|
249 | 251 | renderer='string', permission='root_administration') |
|
250 | 252 | def test_celery_daily_digest(request): |
|
251 | 253 | import appenlight.celery.tasks |
|
252 | 254 | appenlight.celery.tasks.daily_digest.delay() |
|
253 | 255 | return 'task sent' |
|
254 | 256 | |
|
255 | 257 | |
|
256 | 258 | @view_config(route_name='test', match_param='action=celery_alerting', |
|
257 | 259 | renderer='string', permission='root_administration') |
|
258 | 260 | def test_celery_alerting(request): |
|
259 | 261 | import appenlight.celery.tasks |
|
260 | 262 | appenlight.celery.tasks.alerting() |
|
261 | 263 | return 'task sent' |
|
262 | 264 | |
|
263 | 265 | |
|
264 | 266 | @view_config(route_name='test', match_param='action=logging', |
|
265 | 267 | renderer='string', permission='root_administration') |
|
266 | 268 | def logs(request): |
|
267 | 269 | """ |
|
268 | 270 | Test some in-app logging |
|
269 | 271 | """ |
|
270 | 272 | log.debug(chr(960)) |
|
271 | 273 | log.debug('debug') |
|
272 | 274 | log.info(chr(960)) |
|
273 | 275 | log.info('INFO') |
|
274 | 276 | log.warning('Matched GET /\xc4\x85\xc5\xbc\xc4\x87' |
|
275 | 277 | '\xc4\x99\xc4\x99\xc4\x85/summary') |
|
276 | 278 | log.warning('XXXXMatched GET /\xc4\x85\xc5\xbc\xc4' |
|
277 | 279 | '\x87\xc4\x99\xc4\x99\xc4\x85/summary') |
|
278 | 280 | log.warning('DUPA /ążćęęą') |
|
279 | 281 | log.warning("g\u017ceg\u017c\u00f3\u0142ka") |
|
280 | 282 | log.error('TEST Lorem ipsum2', |
|
281 | 283 | extra={'user': 'ergo', 'commit': 'sog8ds0g7sdih12hh1j512h5k'}) |
|
282 | 284 | log.fatal('TEST Lorem ipsum3') |
|
283 | 285 | log.warning('TEST Lorem ipsum', |
|
284 | 286 | extra={"action": 'purchase', |
|
285 | 287 | "price": random.random() * 100, |
|
286 | 288 | "quantity": random.randint(1, 99)}) |
|
287 | 289 | log.warning('test_pkey', |
|
288 | 290 | extra={"action": 'test_pkey', "price": random.random() * 100, |
|
289 | 291 | 'ae_primary_key': 1, |
|
290 | 292 | "quantity": random.randint(1, 99)}) |
|
291 | 293 | log.warning('test_pkey2', |
|
292 | 294 | extra={"action": 'test_pkey', "price": random.random() * 100, |
|
293 | 295 | 'ae_primary_key': 'b', |
|
294 | 296 | 'ae_permanent': 't', |
|
295 | 297 | "quantity": random.randint(1, 99)}) |
|
296 | 298 | log.warning('test_pkey3', |
|
297 | 299 | extra={"action": 'test_pkey', "price": random.random() * 100, |
|
298 | 300 | 'ae_primary_key': 1, |
|
299 | 301 | "quantity": random.randint(1, 99)}) |
|
300 | 302 | log.warning('test_pkey4', |
|
301 | 303 | extra={"action": 'test_pkey', "price": random.random() * 100, |
|
302 | 304 | 'ae_primary_key': 'b', |
|
303 | 305 | 'ae_permanent': True, |
|
304 | 306 | "quantity": random.randint(1, 99)}) |
|
305 | 307 | request.environ['appenlight.force_send'] = 1 |
|
306 | 308 | return {} |
|
307 | 309 | |
|
308 | 310 | |
|
309 | 311 | @view_config(route_name='test', match_param='action=transaction', |
|
310 | 312 | renderer='string', permission='root_administration') |
|
311 | 313 | def transaction_test(request): |
|
312 | 314 | """ |
|
313 | 315 | Test transactions |
|
314 | 316 | """ |
|
315 | 317 | try: |
|
316 | 318 | result = DBSession.execute("SELECT 1/0") |
|
317 | 319 | except: |
|
318 | 320 | request.tm.abort() |
|
319 | 321 | result = DBSession.execute("SELECT 1") |
|
320 | 322 | return 'OK' |
|
321 | 323 | |
|
322 | 324 | |
|
323 | 325 | @view_config(route_name='test', match_param='action=slow_request', |
|
324 | 326 | renderer='string', permission='root_administration') |
|
325 | 327 | def slow_request(request): |
|
326 | 328 | """ |
|
327 | 329 | Test a request that has some slow entries - including nested calls |
|
328 | 330 | """ |
|
329 | 331 | users = DBSession.query(User).all() |
|
330 | 332 | import random |
|
331 | 333 | some_val = random.random() |
|
332 | 334 | import threading |
|
333 | 335 | t_id = id(threading.currentThread()) |
|
334 | 336 | log.warning('slow_log %s %s ' % (some_val, t_id)) |
|
335 | 337 | log.critical('tid %s' % t_id) |
|
336 | 338 | |
|
337 | 339 | @time_trace(name='baz_func %s' % some_val, min_duration=0.1) |
|
338 | 340 | def baz(arg): |
|
339 | 341 | time.sleep(0.32) |
|
340 | 342 | return arg |
|
341 | 343 | |
|
342 | 344 | requests.get('http://ubuntu.com') |
|
343 | 345 | |
|
344 | 346 | @time_trace(name='foo_func %s %s' % (some_val, t_id), min_duration=0.1) |
|
345 | 347 | def foo(arg): |
|
346 | 348 | time.sleep(0.52) |
|
347 | 349 | log.warning('foo_func %s %s' % (some_val, t_id)) |
|
348 | 350 | requests.get('http://ubuntu.com?test=%s' % some_val) |
|
349 | 351 | return bar(arg) |
|
350 | 352 | |
|
351 | 353 | @time_trace(name='bar_func %s %s' % (some_val, t_id), min_duration=0.1) |
|
352 | 354 | def bar(arg): |
|
353 | 355 | log.warning('bar_func %s %s' % (some_val, t_id)) |
|
354 | 356 | time.sleep(1.52) |
|
355 | 357 | baz(arg) |
|
356 | 358 | baz(arg) |
|
357 | 359 | return baz(arg) |
|
358 | 360 | |
|
359 | 361 | foo('a') |
|
360 | 362 | return {} |
|
361 | 363 | |
|
362 | 364 | |
|
363 | 365 | @view_config(route_name='test', match_param='action=styling', |
|
364 | 366 | renderer='appenlight:templates/tests/styling.jinja2', |
|
365 | 367 | permission='__no_permission_required__') |
|
366 | 368 | def styling(request): |
|
367 | 369 | """ |
|
368 | 370 | Some styling test page |
|
369 | 371 | """ |
|
370 | 372 | _ = str |
|
371 | 373 | request.session.flash(_( |
|
372 | 374 | 'Your password got updated. ' |
|
373 | 375 | 'Next time log in with your new credentials.')) |
|
374 | 376 | request.session.flash(_( |
|
375 | 377 | 'Something went wrong when we ' |
|
376 | 378 | 'tried to authorize you via external provider'), |
|
377 | 379 | 'warning') |
|
378 | 380 | request.session.flash(_( |
|
379 | 381 | 'Unfortunately there was a problem ' |
|
380 | 382 | 'processing your payment, please try again later.'), |
|
381 | 383 | 'error') |
|
382 | 384 | return {} |
|
383 | 385 | |
|
384 | 386 | |
|
385 | 387 | @view_config(route_name='test', match_param='action=js_error', |
|
386 | 388 | renderer='appenlight:templates/tests/js_error.jinja2', |
|
387 | 389 | permission='__no_permission_required__') |
|
388 | 390 | def js(request): |
|
389 | 391 | """ |
|
390 | 392 | Used for testing javasctipt client for error catching |
|
391 | 393 | """ |
|
392 | 394 | return {} |
|
393 | 395 | |
|
394 | 396 | |
|
395 | 397 | @view_config(route_name='test', match_param='action=js_log', |
|
396 | 398 | renderer='appenlight:templates/tests/js_log.jinja2', |
|
397 | 399 | permission='__no_permission_required__') |
|
398 | 400 | def js_log(request): |
|
399 | 401 | """ |
|
400 | 402 | Used for testing javasctipt client for logging |
|
401 | 403 | """ |
|
402 | 404 | return {} |
|
403 | 405 | |
|
404 | 406 | |
|
405 | 407 | @view_config(route_name='test', match_param='action=log_requests', |
|
406 | 408 | renderer='string', |
|
407 | 409 | permission='__no_permission_required__') |
|
408 | 410 | def log_requests(request): |
|
409 | 411 | """ |
|
410 | 412 | Util view for printing json requests |
|
411 | 413 | """ |
|
412 | 414 | return {} |
|
413 | 415 | |
|
414 | 416 | |
|
415 | 417 | @view_config(route_name='test', match_param='action=url', renderer='string', |
|
416 | 418 | permission='__no_permission_required__') |
|
417 | 419 | def log_requests(request): |
|
418 | 420 | """ |
|
419 | 421 | I have no fucking clue why I needed that ;-) |
|
420 | 422 | """ |
|
421 | 423 | return request.route_url('reports', _app_url='https://appenlight.com') |
|
422 | 424 | |
|
423 | 425 | |
|
424 | 426 | class TestClass(object): |
|
425 | 427 | """ |
|
426 | 428 | Used to test if class-based view name resolution works correctly |
|
427 | 429 | """ |
|
428 | 430 | |
|
429 | 431 | def __init__(self, request): |
|
430 | 432 | self.request = request |
|
431 | 433 | |
|
432 | 434 | @view_config(route_name='test', match_param='action=test_a', |
|
433 | 435 | renderer='string', permission='root_administration') |
|
434 | 436 | @view_config(route_name='test', match_param='action=test_c', |
|
435 | 437 | renderer='string', permission='root_administration') |
|
436 | 438 | @view_config(route_name='test', match_param='action=test_d', |
|
437 | 439 | renderer='string', permission='root_administration') |
|
438 | 440 | def test_a(self): |
|
439 | 441 | return 'ok' |
|
440 | 442 | |
|
441 | 443 | @view_config(route_name='test', match_param='action=test_b', |
|
442 | 444 | renderer='string', permission='root_administration') |
|
443 | 445 | def test_b(self): |
|
444 | 446 | return 'ok' |
@@ -1,678 +1,678 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright 2010 - 2017 RhodeCode GmbH and the AppEnlight project authors |
|
4 | 4 | # |
|
5 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
|
6 | 6 | # you may not use this file except in compliance with the License. |
|
7 | 7 | # You may obtain a copy of the License at |
|
8 | 8 | # |
|
9 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
|
10 | 10 | # |
|
11 | 11 | # Unless required by applicable law or agreed to in writing, software |
|
12 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
|
13 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
14 | 14 | # See the License for the specific language governing permissions and |
|
15 | 15 | # limitations under the License. |
|
16 | 16 | |
|
17 | 17 | import colander |
|
18 | 18 | import datetime |
|
19 | 19 | import json |
|
20 | 20 | import logging |
|
21 | 21 | import uuid |
|
22 | 22 | import pyramid.security as security |
|
23 | 23 | import appenlight.lib.helpers as h |
|
24 | 24 | |
|
25 | 25 | from authomatic.adapters import WebObAdapter |
|
26 | 26 | from pyramid.view import view_config |
|
27 | 27 | from pyramid.httpexceptions import HTTPFound, HTTPUnprocessableEntity |
|
28 | 28 | from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest |
|
29 | 29 | from pyramid.security import NO_PERMISSION_REQUIRED |
|
30 | 30 | from ziggurat_foundations.models.services.external_identity import \ |
|
31 | 31 | ExternalIdentityService |
|
32 | from ziggurat_foundations.models.services.user import UserService | |
|
32 | 33 | |
|
33 | 34 | from appenlight.lib import generate_random_string |
|
34 | 35 | from appenlight.lib.social import handle_social_data |
|
35 | 36 | from appenlight.lib.utils import channelstream_request, add_cors_headers, \ |
|
36 | 37 | permission_tuple_to_dict |
|
37 | 38 | from appenlight.models import DBSession |
|
38 | 39 | from appenlight.models.alert_channels.email import EmailAlertChannel |
|
39 | 40 | from appenlight.models.alert_channel_action import AlertChannelAction |
|
40 | 41 | from appenlight.models.services.alert_channel import AlertChannelService |
|
41 | 42 | from appenlight.models.services.alert_channel_action import \ |
|
42 | 43 | AlertChannelActionService |
|
43 | 44 | from appenlight.models.auth_token import AuthToken |
|
44 | 45 | from appenlight.models.report import REPORT_TYPE_MATRIX |
|
45 | 46 | from appenlight.models.user import User |
|
46 | 47 | from appenlight.models.services.user import UserService |
|
47 | 48 | from appenlight.subscribers import _ |
|
48 | 49 | from appenlight.validators import build_rule_schema |
|
49 | 50 | from appenlight import forms |
|
50 | 51 | from webob.multidict import MultiDict |
|
51 | 52 | |
|
52 | 53 | log = logging.getLogger(__name__) |
|
53 | 54 | |
|
54 | 55 | |
|
55 | 56 | @view_config(route_name='users_no_id', renderer='json', |
|
56 | 57 | request_method="GET", permission='root_administration') |
|
57 | 58 | def users_list(request): |
|
58 | 59 | """ |
|
59 | 60 | Returns users list |
|
60 | 61 | """ |
|
61 | 62 | props = ['user_name', 'id', 'first_name', 'last_name', 'email', |
|
62 | 63 | 'last_login_date', 'status'] |
|
63 | 64 | users = UserService.all() |
|
64 | 65 | users_dicts = [] |
|
65 | 66 | for user in users: |
|
66 | 67 | u_dict = user.get_dict(include_keys=props) |
|
67 |
u_dict['gravatar_url'] = |
|
|
68 | u_dict['gravatar_url'] = UserService.gravatar_url(user, s=20) | |
|
68 | 69 | users_dicts.append(u_dict) |
|
69 | 70 | return users_dicts |
|
70 | 71 | |
|
71 | 72 | |
|
72 | 73 | @view_config(route_name='users_no_id', renderer='json', |
|
73 | 74 | request_method="POST", permission='root_administration') |
|
74 | 75 | def users_create(request): |
|
75 | 76 | """ |
|
76 | 77 | Returns users list |
|
77 | 78 | """ |
|
78 | 79 | form = forms.UserCreateForm(MultiDict(request.safe_json_body or {}), |
|
79 | 80 | csrf_context=request) |
|
80 | 81 | if form.validate(): |
|
81 | 82 | log.info('registering user') |
|
82 | 83 | # probably not needed in the future since this requires root anyways |
|
83 | 84 | # lets keep this here in case we lower view permission in the future |
|
84 | 85 | # if request.registry.settings['appenlight.disable_registration']: |
|
85 | 86 | # return HTTPUnprocessableEntity(body={'error': 'Registration is currently disabled.'}) |
|
86 | 87 | user = User() |
|
87 | 88 | # insert new user here |
|
88 | 89 | DBSession.add(user) |
|
89 | 90 | form.populate_obj(user) |
|
90 |
|
|
|
91 |
|
|
|
91 | UserService.regenerate_security_code(user) | |
|
92 | UserService.set_password(user, user.user_password) | |
|
92 | 93 | user.status = 1 if form.status.data else 0 |
|
93 | 94 | request.session.flash(_('User created')) |
|
94 | 95 | DBSession.flush() |
|
95 | 96 | return user.get_dict(exclude_keys=['security_code_date', 'notes', |
|
96 | 97 | 'security_code', 'user_password']) |
|
97 | 98 | else: |
|
98 | 99 | return HTTPUnprocessableEntity(body=form.errors_json) |
|
99 | 100 | |
|
100 | 101 | |
|
101 | 102 | @view_config(route_name='users', renderer='json', |
|
102 | 103 | request_method="GET", permission='root_administration') |
|
103 | 104 | @view_config(route_name='users', renderer='json', |
|
104 | 105 | request_method="PATCH", permission='root_administration') |
|
105 | 106 | def users_update(request): |
|
106 | 107 | """ |
|
107 | 108 | Updates user object |
|
108 | 109 | """ |
|
109 | user = User.by_id(request.matchdict.get('user_id')) | |
|
110 | user = UserService.by_id(request.matchdict.get('user_id')) | |
|
110 | 111 | if not user: |
|
111 | 112 | return HTTPNotFound() |
|
112 | 113 | post_data = request.safe_json_body or {} |
|
113 | 114 | if request.method == 'PATCH': |
|
114 | 115 | form = forms.UserUpdateForm(MultiDict(post_data), |
|
115 | 116 | csrf_context=request) |
|
116 | 117 | if form.validate(): |
|
117 | 118 | form.populate_obj(user, ignore_none=True) |
|
118 | 119 | if form.user_password.data: |
|
119 |
|
|
|
120 | UserService.set_password(user, user.user_password) | |
|
120 | 121 | if form.status.data: |
|
121 | 122 | user.status = 1 |
|
122 | 123 | else: |
|
123 | 124 | user.status = 0 |
|
124 | 125 | else: |
|
125 | 126 | return HTTPUnprocessableEntity(body=form.errors_json) |
|
126 | 127 | return user.get_dict(exclude_keys=['security_code_date', 'notes', |
|
127 | 128 | 'security_code', 'user_password']) |
|
128 | 129 | |
|
129 | 130 | |
|
130 | 131 | @view_config(route_name='users_property', |
|
131 | 132 | match_param='key=resource_permissions', |
|
132 | 133 | renderer='json', permission='authenticated') |
|
133 | 134 | def users_resource_permissions_list(request): |
|
134 | 135 | """ |
|
135 | 136 | Get list of permissions assigned to specific resources |
|
136 | 137 | """ |
|
137 | user = User.by_id(request.matchdict.get('user_id')) | |
|
138 | user = UserService.by_id(request.matchdict.get('user_id')) | |
|
138 | 139 | if not user: |
|
139 | 140 | return HTTPNotFound() |
|
140 | 141 | return [permission_tuple_to_dict(perm) for perm in |
|
141 |
|
|
|
142 | UserService.resources_with_possible_perms(user)] | |
|
142 | 143 | |
|
143 | 144 | |
|
144 | 145 | @view_config(route_name='users', renderer='json', |
|
145 | 146 | request_method="DELETE", permission='root_administration') |
|
146 | 147 | def users_DELETE(request): |
|
147 | 148 | """ |
|
148 | 149 | Removes a user permanently from db - makes a check to see if after the |
|
149 | 150 | operation there will be at least one admin left |
|
150 | 151 | """ |
|
151 | 152 | msg = _('There needs to be at least one administrator in the system') |
|
152 | user = User.by_id(request.matchdict.get('user_id')) | |
|
153 | user = UserService.by_id(request.matchdict.get('user_id')) | |
|
153 | 154 | if user: |
|
154 | users = User.users_for_perms(['root_administration']).all() | |
|
155 | users = UserService.users_for_perms(['root_administration']).all() | |
|
155 | 156 | if len(users) < 2 and user.id == users[0].id: |
|
156 | 157 | request.session.flash(msg, 'warning') |
|
157 | 158 | else: |
|
158 | 159 | DBSession.delete(user) |
|
159 | 160 | request.session.flash(_('User removed')) |
|
160 | 161 | return True |
|
161 | 162 | request.response.status = 422 |
|
162 | 163 | return False |
|
163 | 164 | |
|
164 | 165 | |
|
165 | 166 | @view_config(route_name='users_self', renderer='json', |
|
166 | 167 | request_method="GET", permission='authenticated') |
|
167 | 168 | @view_config(route_name='users_self', renderer='json', |
|
168 | 169 | request_method="PATCH", permission='authenticated') |
|
169 | 170 | def users_self(request): |
|
170 | 171 | """ |
|
171 | 172 | Updates user personal information |
|
172 | 173 | """ |
|
173 | 174 | |
|
174 | 175 | if request.method == 'PATCH': |
|
175 | 176 | form = forms.gen_user_profile_form()( |
|
176 | 177 | MultiDict(request.unsafe_json_body), |
|
177 | 178 | csrf_context=request) |
|
178 | 179 | if form.validate(): |
|
179 | 180 | form.populate_obj(request.user) |
|
180 | 181 | request.session.flash(_('Your profile got updated.')) |
|
181 | 182 | else: |
|
182 | 183 | return HTTPUnprocessableEntity(body=form.errors_json) |
|
183 | 184 | return request.user.get_dict( |
|
184 | 185 | exclude_keys=['security_code_date', 'notes', 'security_code', |
|
185 | 186 | 'user_password'], |
|
186 | 187 | extended_info=True) |
|
187 | 188 | |
|
188 | 189 | |
|
189 | 190 | @view_config(route_name='users_self_property', |
|
190 | 191 | match_param='key=external_identities', renderer='json', |
|
191 | 192 | request_method='GET', permission='authenticated') |
|
192 | 193 | def users_external_identies(request): |
|
193 | 194 | user = request.user |
|
194 | 195 | identities = [{'provider': ident.provider_name, |
|
195 | 196 | 'id': ident.external_user_name} for ident |
|
196 | 197 | in user.external_identities.all()] |
|
197 | 198 | return identities |
|
198 | 199 | |
|
199 | 200 | |
|
200 | 201 | @view_config(route_name='users_self_property', |
|
201 | 202 | match_param='key=external_identities', renderer='json', |
|
202 | 203 | request_method='DELETE', permission='authenticated') |
|
203 | 204 | def users_external_identies_DELETE(request): |
|
204 | 205 | """ |
|
205 | 206 | Unbinds external identities(google,twitter etc.) from user account |
|
206 | 207 | """ |
|
207 | 208 | user = request.user |
|
208 | 209 | for identity in user.external_identities.all(): |
|
209 | 210 | log.info('found identity %s' % identity) |
|
210 | 211 | if (identity.provider_name == request.params.get('provider') and |
|
211 | 212 | identity.external_user_name == request.params.get('id')): |
|
212 | 213 | log.info('remove identity %s' % identity) |
|
213 | 214 | DBSession.delete(identity) |
|
214 | 215 | return True |
|
215 | 216 | return False |
|
216 | 217 | |
|
217 | 218 | |
|
218 | 219 | @view_config(route_name='users_self_property', |
|
219 | 220 | match_param='key=password', renderer='json', |
|
220 | 221 | request_method='PATCH', permission='authenticated') |
|
221 | 222 | def users_password(request): |
|
222 | 223 | """ |
|
223 | 224 | Sets new password for user account |
|
224 | 225 | """ |
|
225 | 226 | user = request.user |
|
226 | 227 | form = forms.ChangePasswordForm(MultiDict(request.unsafe_json_body), |
|
227 | 228 | csrf_context=request) |
|
228 | 229 | form.old_password.user = user |
|
229 | 230 | if form.validate(): |
|
230 |
|
|
|
231 |
|
|
|
231 | UserService.regenerate_security_code(user) | |
|
232 | UserService.set_password(user, form.new_password.data) | |
|
232 | 233 | msg = 'Your password got updated. ' \ |
|
233 | 234 | 'Next time log in with your new credentials.' |
|
234 | 235 | request.session.flash(_(msg)) |
|
235 | 236 | return True |
|
236 | 237 | else: |
|
237 | 238 | return HTTPUnprocessableEntity(body=form.errors_json) |
|
238 | 239 | return False |
|
239 | 240 | |
|
240 | 241 | |
|
241 | 242 | @view_config(route_name='users_self_property', match_param='key=websocket', |
|
242 | 243 | renderer='json', permission='authenticated') |
|
243 | 244 | def users_websocket(request): |
|
244 | 245 | """ |
|
245 | 246 | Handle authorization of users trying to connect |
|
246 | 247 | """ |
|
247 | 248 | # handle preflight request |
|
248 | 249 | user = request.user |
|
249 | 250 | if request.method == 'OPTIONS': |
|
250 | 251 | res = request.response.body('OK') |
|
251 | 252 | add_cors_headers(res) |
|
252 | 253 | return res |
|
253 |
applications = |
|
|
254 | ['view'], resource_types=['application']) | |
|
254 | applications = UserService.resources_with_perms(user, ['view'], resource_types=['application']) | |
|
255 | 255 | channels = ['app_%s' % app.resource_id for app in applications] |
|
256 | 256 | payload = {"username": user.user_name, |
|
257 | 257 | "conn_id": str(uuid.uuid4()), |
|
258 | 258 | "channels": channels |
|
259 | 259 | } |
|
260 | 260 | settings = request.registry.settings |
|
261 | 261 | response = channelstream_request( |
|
262 | 262 | settings['cometd.secret'], '/connect', payload, |
|
263 | 263 | servers=[request.registry.settings['cometd_servers']], |
|
264 | 264 | throw_exceptions=True) |
|
265 | 265 | return payload |
|
266 | 266 | |
|
267 | 267 | |
|
268 | 268 | @view_config(route_name='users_self_property', request_method="GET", |
|
269 | 269 | match_param='key=alert_channels', renderer='json', |
|
270 | 270 | permission='authenticated') |
|
271 | 271 | def alert_channels(request): |
|
272 | 272 | """ |
|
273 | 273 | Lists all available alert channels |
|
274 | 274 | """ |
|
275 | 275 | user = request.user |
|
276 | 276 | return [c.get_dict(extended_info=True) for c in user.alert_channels] |
|
277 | 277 | |
|
278 | 278 | |
|
279 | 279 | @view_config(route_name='users_self_property', match_param='key=alert_actions', |
|
280 | 280 | request_method="GET", renderer='json', permission='authenticated') |
|
281 | 281 | def alert_actions(request): |
|
282 | 282 | """ |
|
283 | 283 | Lists all available alert channels |
|
284 | 284 | """ |
|
285 | 285 | user = request.user |
|
286 | 286 | return [r.get_dict(extended_info=True) for r in user.alert_actions] |
|
287 | 287 | |
|
288 | 288 | |
|
289 | 289 | @view_config(route_name='users_self_property', renderer='json', |
|
290 | 290 | match_param='key=alert_channels_rules', request_method='POST', |
|
291 | 291 | permission='authenticated') |
|
292 | 292 | def alert_channels_rule_POST(request): |
|
293 | 293 | """ |
|
294 | 294 | Creates new notification rule for specific alert channel |
|
295 | 295 | """ |
|
296 | 296 | user = request.user |
|
297 | 297 | alert_action = AlertChannelAction(owner_id=request.user.id, |
|
298 | 298 | type='report') |
|
299 | 299 | DBSession.add(alert_action) |
|
300 | 300 | DBSession.flush() |
|
301 | 301 | return alert_action.get_dict() |
|
302 | 302 | |
|
303 | 303 | |
|
304 | 304 | @view_config(route_name='users_self_property', permission='authenticated', |
|
305 | 305 | match_param='key=alert_channels_rules', |
|
306 | 306 | renderer='json', request_method='DELETE') |
|
307 | 307 | def alert_channels_rule_DELETE(request): |
|
308 | 308 | """ |
|
309 | 309 | Removes specific alert channel rule |
|
310 | 310 | """ |
|
311 | 311 | user = request.user |
|
312 | 312 | rule_action = AlertChannelActionService.by_owner_id_and_pkey( |
|
313 | 313 | user.id, |
|
314 | 314 | request.GET.get('pkey')) |
|
315 | 315 | if rule_action: |
|
316 | 316 | DBSession.delete(rule_action) |
|
317 | 317 | return True |
|
318 | 318 | return HTTPNotFound() |
|
319 | 319 | |
|
320 | 320 | |
|
321 | 321 | @view_config(route_name='users_self_property', permission='authenticated', |
|
322 | 322 | match_param='key=alert_channels_rules', |
|
323 | 323 | renderer='json', request_method='PATCH') |
|
324 | 324 | def alert_channels_rule_PATCH(request): |
|
325 | 325 | """ |
|
326 | 326 | Removes specific alert channel rule |
|
327 | 327 | """ |
|
328 | 328 | user = request.user |
|
329 | 329 | json_body = request.unsafe_json_body |
|
330 | 330 | |
|
331 | 331 | schema = build_rule_schema(json_body['rule'], REPORT_TYPE_MATRIX) |
|
332 | 332 | try: |
|
333 | 333 | schema.deserialize(json_body['rule']) |
|
334 | 334 | except colander.Invalid as exc: |
|
335 | 335 | return HTTPUnprocessableEntity(body=json.dumps(exc.asdict())) |
|
336 | 336 | |
|
337 | 337 | rule_action = AlertChannelActionService.by_owner_id_and_pkey( |
|
338 | 338 | user.id, |
|
339 | 339 | request.GET.get('pkey')) |
|
340 | 340 | |
|
341 | 341 | if rule_action: |
|
342 | 342 | rule_action.rule = json_body['rule'] |
|
343 | 343 | rule_action.resource_id = json_body['resource_id'] |
|
344 | 344 | rule_action.action = json_body['action'] |
|
345 | 345 | return rule_action.get_dict() |
|
346 | 346 | return HTTPNotFound() |
|
347 | 347 | |
|
348 | 348 | |
|
349 | 349 | @view_config(route_name='users_self_property', permission='authenticated', |
|
350 | 350 | match_param='key=alert_channels', |
|
351 | 351 | renderer='json', request_method='PATCH') |
|
352 | 352 | def alert_channels_PATCH(request): |
|
353 | 353 | user = request.user |
|
354 | 354 | channel_name = request.GET.get('channel_name') |
|
355 | 355 | channel_value = request.GET.get('channel_value') |
|
356 | 356 | # iterate over channels |
|
357 | 357 | channel = None |
|
358 | 358 | for channel in user.alert_channels: |
|
359 | 359 | if (channel.channel_name == channel_name and |
|
360 | 360 | channel.channel_value == channel_value): |
|
361 | 361 | break |
|
362 | 362 | if not channel: |
|
363 | 363 | return HTTPNotFound() |
|
364 | 364 | |
|
365 | 365 | allowed_keys = ['daily_digest', 'send_alerts'] |
|
366 | 366 | for k, v in request.unsafe_json_body.items(): |
|
367 | 367 | if k in allowed_keys: |
|
368 | 368 | setattr(channel, k, v) |
|
369 | 369 | else: |
|
370 | 370 | return HTTPBadRequest() |
|
371 | 371 | return channel.get_dict() |
|
372 | 372 | |
|
373 | 373 | |
|
374 | 374 | @view_config(route_name='users_self_property', permission='authenticated', |
|
375 | 375 | match_param='key=alert_channels', |
|
376 | 376 | request_method="POST", renderer='json') |
|
377 | 377 | def alert_channels_POST(request): |
|
378 | 378 | """ |
|
379 | 379 | Creates a new email alert channel for user, sends a validation email |
|
380 | 380 | """ |
|
381 | 381 | user = request.user |
|
382 | 382 | form = forms.EmailChannelCreateForm(MultiDict(request.unsafe_json_body), |
|
383 | 383 | csrf_context=request) |
|
384 | 384 | if not form.validate(): |
|
385 | 385 | return HTTPUnprocessableEntity(body=form.errors_json) |
|
386 | 386 | |
|
387 | 387 | email = form.email.data.strip() |
|
388 | 388 | channel = EmailAlertChannel() |
|
389 | 389 | channel.channel_name = 'email' |
|
390 | 390 | channel.channel_value = email |
|
391 | 391 | security_code = generate_random_string(10) |
|
392 | 392 | channel.channel_json_conf = {'security_code': security_code} |
|
393 | 393 | user.alert_channels.append(channel) |
|
394 | 394 | |
|
395 | 395 | email_vars = {'user': user, |
|
396 | 396 | 'email': email, |
|
397 | 397 | 'request': request, |
|
398 | 398 | 'security_code': security_code, |
|
399 | 399 | 'email_title': "AppEnlight :: " |
|
400 | 400 | "Please authorize your email"} |
|
401 | 401 | |
|
402 | 402 | UserService.send_email(request, recipients=[email], |
|
403 | 403 | variables=email_vars, |
|
404 | 404 | template='/email_templates/authorize_email.jinja2') |
|
405 | 405 | request.session.flash(_('Your alert channel was ' |
|
406 | 406 | 'added to the system.')) |
|
407 | 407 | request.session.flash( |
|
408 | 408 | _('You need to authorize your email channel, a message was ' |
|
409 | 409 | 'sent containing necessary information.'), |
|
410 | 410 | 'warning') |
|
411 | 411 | DBSession.flush() |
|
412 | 412 | channel.get_dict() |
|
413 | 413 | |
|
414 | 414 | |
|
415 | 415 | @view_config(route_name='section_view', |
|
416 | 416 | match_param=['section=user_section', |
|
417 | 417 | 'view=alert_channels_authorize'], |
|
418 | 418 | renderer='string', permission='authenticated') |
|
419 | 419 | def alert_channels_authorize(request): |
|
420 | 420 | """ |
|
421 | 421 | Performs alert channel authorization based on auth code sent in email |
|
422 | 422 | """ |
|
423 | 423 | user = request.user |
|
424 | 424 | for channel in user.alert_channels: |
|
425 | 425 | security_code = request.params.get('security_code', '') |
|
426 | 426 | if channel.channel_json_conf['security_code'] == security_code: |
|
427 | 427 | channel.channel_validated = True |
|
428 | 428 | request.session.flash(_('Your email was authorized.')) |
|
429 | 429 | return HTTPFound(location=request.route_url('/')) |
|
430 | 430 | |
|
431 | 431 | |
|
432 | 432 | @view_config(route_name='users_self_property', request_method="DELETE", |
|
433 | 433 | match_param='key=alert_channels', renderer='json', |
|
434 | 434 | permission='authenticated') |
|
435 | 435 | def alert_channel_DELETE(request): |
|
436 | 436 | """ |
|
437 | 437 | Removes alert channel from users channel |
|
438 | 438 | """ |
|
439 | 439 | user = request.user |
|
440 | 440 | channel = None |
|
441 | 441 | for chan in user.alert_channels: |
|
442 | 442 | if (chan.channel_name == request.params.get('channel_name') and |
|
443 | 443 | chan.channel_value == request.params.get('channel_value')): |
|
444 | 444 | channel = chan |
|
445 | 445 | break |
|
446 | 446 | if channel: |
|
447 | 447 | user.alert_channels.remove(channel) |
|
448 | 448 | request.session.flash(_('Your channel was removed.')) |
|
449 | 449 | return True |
|
450 | 450 | return False |
|
451 | 451 | |
|
452 | 452 | |
|
453 | 453 | @view_config(route_name='users_self_property', permission='authenticated', |
|
454 | 454 | match_param='key=alert_channels_actions_binds', |
|
455 | 455 | renderer='json', request_method="POST") |
|
456 | 456 | def alert_channels_actions_binds_POST(request): |
|
457 | 457 | """ |
|
458 | 458 | Adds alert action to users channels |
|
459 | 459 | """ |
|
460 | 460 | user = request.user |
|
461 | 461 | json_body = request.unsafe_json_body |
|
462 | 462 | channel = AlertChannelService.by_owner_id_and_pkey( |
|
463 | 463 | user.id, |
|
464 | 464 | json_body.get('channel_pkey')) |
|
465 | 465 | |
|
466 | 466 | rule_action = AlertChannelActionService.by_owner_id_and_pkey( |
|
467 | 467 | user.id, |
|
468 | 468 | json_body.get('action_pkey')) |
|
469 | 469 | |
|
470 | 470 | if channel and rule_action: |
|
471 | 471 | if channel.pkey not in [c.pkey for c in rule_action.channels]: |
|
472 | 472 | rule_action.channels.append(channel) |
|
473 | 473 | return rule_action.get_dict(extended_info=True) |
|
474 | 474 | return HTTPUnprocessableEntity() |
|
475 | 475 | |
|
476 | 476 | |
|
477 | 477 | @view_config(route_name='users_self_property', request_method="DELETE", |
|
478 | 478 | match_param='key=alert_channels_actions_binds', |
|
479 | 479 | renderer='json', permission='authenticated') |
|
480 | 480 | def alert_channels_actions_binds_DELETE(request): |
|
481 | 481 | """ |
|
482 | 482 | Removes alert action from users channels |
|
483 | 483 | """ |
|
484 | 484 | user = request.user |
|
485 | 485 | channel = AlertChannelService.by_owner_id_and_pkey( |
|
486 | 486 | user.id, |
|
487 | 487 | request.GET.get('channel_pkey')) |
|
488 | 488 | |
|
489 | 489 | rule_action = AlertChannelActionService.by_owner_id_and_pkey( |
|
490 | 490 | user.id, |
|
491 | 491 | request.GET.get('action_pkey')) |
|
492 | 492 | |
|
493 | 493 | if channel and rule_action: |
|
494 | 494 | if channel.pkey in [c.pkey for c in rule_action.channels]: |
|
495 | 495 | rule_action.channels.remove(channel) |
|
496 | 496 | return rule_action.get_dict(extended_info=True) |
|
497 | 497 | return HTTPUnprocessableEntity() |
|
498 | 498 | |
|
499 | 499 | |
|
500 | 500 | @view_config(route_name='social_auth_abort', |
|
501 | 501 | renderer='string', permission=NO_PERMISSION_REQUIRED) |
|
502 | 502 | def oauth_abort(request): |
|
503 | 503 | """ |
|
504 | 504 | Handles problems with authorization via velruse |
|
505 | 505 | """ |
|
506 | 506 | |
|
507 | 507 | |
|
508 | 508 | @view_config(route_name='social_auth', permission=NO_PERMISSION_REQUIRED) |
|
509 | 509 | def social_auth(request): |
|
510 | 510 | # Get the internal provider name URL variable. |
|
511 | 511 | provider_name = request.matchdict.get('provider') |
|
512 | 512 | |
|
513 | 513 | # Start the login procedure. |
|
514 | 514 | adapter = WebObAdapter(request, request.response) |
|
515 | 515 | result = request.authomatic.login(adapter, provider_name) |
|
516 | 516 | if result: |
|
517 | 517 | if result.error: |
|
518 | 518 | return handle_auth_error(request, result) |
|
519 | 519 | elif result.user: |
|
520 | 520 | return handle_auth_success(request, result) |
|
521 | 521 | return request.response |
|
522 | 522 | |
|
523 | 523 | |
|
524 | 524 | def handle_auth_error(request, result): |
|
525 | 525 | # Login procedure finished with an error. |
|
526 | 526 | request.session.pop('zigg.social_auth', None) |
|
527 | 527 | request.session.flash(_('Something went wrong when we tried to ' |
|
528 | 528 | 'authorize you via external provider. ' |
|
529 | 529 | 'Please try again.'), 'warning') |
|
530 | 530 | |
|
531 | 531 | return HTTPFound(location=request.route_url('/')) |
|
532 | 532 | |
|
533 | 533 | |
|
534 | 534 | def handle_auth_success(request, result): |
|
535 | 535 | # Hooray, we have the user! |
|
536 | 536 | # OAuth 2.0 and OAuth 1.0a provide only limited user data on login, |
|
537 | 537 | # We need to update the user to get more info. |
|
538 | 538 | if result.user: |
|
539 | 539 | result.user.update() |
|
540 | 540 | |
|
541 | 541 | social_data = { |
|
542 | 542 | 'user': {'data': result.user.data}, |
|
543 | 543 | 'credentials': result.user.credentials |
|
544 | 544 | } |
|
545 | 545 | # normalize data |
|
546 | 546 | social_data['user']['id'] = result.user.id |
|
547 | 547 | user_name = result.user.username or '' |
|
548 | 548 | # use email name as username for google |
|
549 | 549 | if (social_data['credentials'].provider_name == 'google' and |
|
550 | 550 | result.user.email): |
|
551 | 551 | user_name = result.user.email |
|
552 | 552 | social_data['user']['user_name'] = user_name |
|
553 | 553 | social_data['user']['email'] = result.user.email or '' |
|
554 | 554 | |
|
555 | 555 | request.session['zigg.social_auth'] = social_data |
|
556 | 556 | # user is logged so bind his external identity with account |
|
557 | 557 | if request.user: |
|
558 | 558 | handle_social_data(request, request.user, social_data) |
|
559 | 559 | request.session.pop('zigg.social_auth', None) |
|
560 | 560 | return HTTPFound(location=request.route_url('/')) |
|
561 | 561 | else: |
|
562 | 562 | user = ExternalIdentityService.user_by_external_id_and_provider( |
|
563 | 563 | social_data['user']['id'], |
|
564 | 564 | social_data['credentials'].provider_name |
|
565 | 565 | ) |
|
566 | 566 | # fix legacy accounts with wrong google ID |
|
567 | 567 | if not user and social_data['credentials'].provider_name == 'google': |
|
568 | 568 | user = ExternalIdentityService.user_by_external_id_and_provider( |
|
569 | 569 | social_data['user']['email'], |
|
570 | 570 | social_data['credentials'].provider_name) |
|
571 | 571 | |
|
572 | 572 | # user tokens are already found in our db |
|
573 | 573 | if user: |
|
574 | 574 | handle_social_data(request, user, social_data) |
|
575 | 575 | headers = security.remember(request, user.id) |
|
576 | 576 | request.session.pop('zigg.social_auth', None) |
|
577 | 577 | return HTTPFound(location=request.route_url('/'), headers=headers) |
|
578 | 578 | else: |
|
579 | 579 | msg = 'You need to finish registration ' \ |
|
580 | 580 | 'process to bind your external identity to your account ' \ |
|
581 | 581 | 'or sign in to existing account' |
|
582 | 582 | request.session.flash(msg) |
|
583 | 583 | return HTTPFound(location=request.route_url('register')) |
|
584 | 584 | |
|
585 | 585 | |
|
586 | 586 | @view_config(route_name='section_view', permission='authenticated', |
|
587 | 587 | match_param=['section=users_section', 'view=search_users'], |
|
588 | 588 | renderer='json') |
|
589 | 589 | def search_users(request): |
|
590 | 590 | """ |
|
591 | 591 | Returns a list of users for autocomplete |
|
592 | 592 | """ |
|
593 | 593 | user = request.user |
|
594 | 594 | items_returned = [] |
|
595 | 595 | like_condition = request.params.get('user_name', '') + '%' |
|
596 | 596 | # first append used if email is passed |
|
597 | found_user = User.by_email(request.params.get('user_name', '')) | |
|
597 | found_user = UserService.by_email(request.params.get('user_name', '')) | |
|
598 | 598 | if found_user: |
|
599 | 599 | name = '{} {}'.format(found_user.first_name, found_user.last_name) |
|
600 | 600 | items_returned.append({'user': found_user.user_name, 'name': name}) |
|
601 | for found_user in User.user_names_like(like_condition).limit(20): | |
|
601 | for found_user in UserService.user_names_like(like_condition).limit(20): | |
|
602 | 602 | name = '{} {}'.format(found_user.first_name, found_user.last_name) |
|
603 | 603 | items_returned.append({'user': found_user.user_name, 'name': name}) |
|
604 | 604 | return items_returned |
|
605 | 605 | |
|
606 | 606 | |
|
607 | 607 | @view_config(route_name='users_self_property', match_param='key=auth_tokens', |
|
608 | 608 | request_method="GET", renderer='json', permission='authenticated') |
|
609 | 609 | @view_config(route_name='users_property', match_param='key=auth_tokens', |
|
610 | 610 | request_method="GET", renderer='json', permission='authenticated') |
|
611 | 611 | def auth_tokens_list(request): |
|
612 | 612 | """ |
|
613 | 613 | Lists all available alert channels |
|
614 | 614 | """ |
|
615 | 615 | if request.matched_route.name == 'users_self_property': |
|
616 | 616 | user = request.user |
|
617 | 617 | else: |
|
618 | user = User.by_id(request.matchdict.get('user_id')) | |
|
618 | user = UserService.by_id(request.matchdict.get('user_id')) | |
|
619 | 619 | if not user: |
|
620 | 620 | return HTTPNotFound() |
|
621 | 621 | return [c.get_dict() for c in user.auth_tokens] |
|
622 | 622 | |
|
623 | 623 | |
|
624 | 624 | @view_config(route_name='users_self_property', match_param='key=auth_tokens', |
|
625 | 625 | request_method="POST", renderer='json', |
|
626 | 626 | permission='authenticated') |
|
627 | 627 | @view_config(route_name='users_property', match_param='key=auth_tokens', |
|
628 | 628 | request_method="POST", renderer='json', |
|
629 | 629 | permission='authenticated') |
|
630 | 630 | def auth_tokens_POST(request): |
|
631 | 631 | """ |
|
632 | 632 | Lists all available alert channels |
|
633 | 633 | """ |
|
634 | 634 | if request.matched_route.name == 'users_self_property': |
|
635 | 635 | user = request.user |
|
636 | 636 | else: |
|
637 | user = User.by_id(request.matchdict.get('user_id')) | |
|
637 | user = UserService.by_id(request.matchdict.get('user_id')) | |
|
638 | 638 | if not user: |
|
639 | 639 | return HTTPNotFound() |
|
640 | 640 | |
|
641 | 641 | req_data = request.safe_json_body or {} |
|
642 | 642 | if not req_data.get('expires'): |
|
643 | 643 | req_data.pop('expires', None) |
|
644 | 644 | form = forms.AuthTokenCreateForm(MultiDict(req_data), csrf_context=request) |
|
645 | 645 | if not form.validate(): |
|
646 | 646 | return HTTPUnprocessableEntity(body=form.errors_json) |
|
647 | 647 | token = AuthToken() |
|
648 | 648 | form.populate_obj(token) |
|
649 | 649 | if token.expires: |
|
650 | 650 | interval = h.time_deltas.get(token.expires)['delta'] |
|
651 | 651 | token.expires = datetime.datetime.utcnow() + interval |
|
652 | 652 | user.auth_tokens.append(token) |
|
653 | 653 | DBSession.flush() |
|
654 | 654 | return token.get_dict() |
|
655 | 655 | |
|
656 | 656 | |
|
657 | 657 | @view_config(route_name='users_self_property', match_param='key=auth_tokens', |
|
658 | 658 | request_method="DELETE", renderer='json', |
|
659 | 659 | permission='authenticated') |
|
660 | 660 | @view_config(route_name='users_property', match_param='key=auth_tokens', |
|
661 | 661 | request_method="DELETE", renderer='json', |
|
662 | 662 | permission='authenticated') |
|
663 | 663 | def auth_tokens_DELETE(request): |
|
664 | 664 | """ |
|
665 | 665 | Lists all available alert channels |
|
666 | 666 | """ |
|
667 | 667 | if request.matched_route.name == 'users_self_property': |
|
668 | 668 | user = request.user |
|
669 | 669 | else: |
|
670 | user = User.by_id(request.matchdict.get('user_id')) | |
|
670 | user = UserService.by_id(request.matchdict.get('user_id')) | |
|
671 | 671 | if not user: |
|
672 | 672 | return HTTPNotFound() |
|
673 | 673 | |
|
674 | 674 | for token in user.auth_tokens: |
|
675 | 675 | if token.token == request.params.get('token'): |
|
676 | 676 | user.auth_tokens.remove(token) |
|
677 | 677 | return True |
|
678 | 678 | return False |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
General Comments 3
You need to be logged in to leave comments.
Login now