Show More
@@ -1,414 +1,415 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2012-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2012-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | RhodeCode task modules, containing all task that suppose to be run |
|
22 | RhodeCode task modules, containing all task that suppose to be run | |
23 | by celery daemon |
|
23 | by celery daemon | |
24 | """ |
|
24 | """ | |
25 |
|
25 | |||
26 | import os |
|
26 | import os | |
27 | import time |
|
27 | import time | |
28 |
|
28 | |||
29 | from pyramid import compat |
|
29 | from pyramid import compat | |
30 | from pyramid_mailer.mailer import Mailer |
|
30 | from pyramid_mailer.mailer import Mailer | |
31 | from pyramid_mailer.message import Message |
|
31 | from pyramid_mailer.message import Message | |
32 | from email.utils import formatdate |
|
32 | from email.utils import formatdate | |
33 |
|
33 | |||
34 | import rhodecode |
|
34 | import rhodecode | |
35 | from rhodecode.lib import audit_logger |
|
35 | from rhodecode.lib import audit_logger | |
36 | from rhodecode.lib.celerylib import get_logger, async_task, RequestContextTask, run_task |
|
36 | from rhodecode.lib.celerylib import get_logger, async_task, RequestContextTask, run_task | |
37 | from rhodecode.lib import hooks_base |
|
37 | from rhodecode.lib import hooks_base | |
38 | from rhodecode.lib.utils2 import safe_int, str2bool, aslist |
|
38 | from rhodecode.lib.utils2 import safe_int, str2bool, aslist | |
39 | from rhodecode.lib.statsd_client import StatsdClient |
|
39 | from rhodecode.lib.statsd_client import StatsdClient | |
40 | from rhodecode.model.db import ( |
|
40 | from rhodecode.model.db import ( | |
41 | Session, IntegrityError, true, Repository, RepoGroup, User) |
|
41 | Session, IntegrityError, true, Repository, RepoGroup, User) | |
42 | from rhodecode.model.permission import PermissionModel |
|
42 | from rhodecode.model.permission import PermissionModel | |
43 |
|
43 | |||
44 |
|
44 | |||
45 | @async_task(ignore_result=True, base=RequestContextTask) |
|
45 | @async_task(ignore_result=True, base=RequestContextTask) | |
46 | def send_email(recipients, subject, body='', html_body='', email_config=None, |
|
46 | def send_email(recipients, subject, body='', html_body='', email_config=None, | |
47 | extra_headers=None): |
|
47 | extra_headers=None): | |
48 | """ |
|
48 | """ | |
49 | Sends an email with defined parameters from the .ini files. |
|
49 | Sends an email with defined parameters from the .ini files. | |
50 |
|
50 | |||
51 | :param recipients: list of recipients, it this is empty the defined email |
|
51 | :param recipients: list of recipients, it this is empty the defined email | |
52 | address from field 'email_to' is used instead |
|
52 | address from field 'email_to' is used instead | |
53 | :param subject: subject of the mail |
|
53 | :param subject: subject of the mail | |
54 | :param body: body of the mail |
|
54 | :param body: body of the mail | |
55 | :param html_body: html version of body |
|
55 | :param html_body: html version of body | |
56 | :param email_config: specify custom configuration for mailer |
|
56 | :param email_config: specify custom configuration for mailer | |
57 | :param extra_headers: specify custom headers |
|
57 | :param extra_headers: specify custom headers | |
58 | """ |
|
58 | """ | |
59 | log = get_logger(send_email) |
|
59 | log = get_logger(send_email) | |
60 |
|
60 | |||
61 | email_config = email_config or rhodecode.CONFIG |
|
61 | email_config = email_config or rhodecode.CONFIG | |
62 |
|
62 | |||
63 | mail_server = email_config.get('smtp_server') or None |
|
63 | mail_server = email_config.get('smtp_server') or None | |
64 | if mail_server is None: |
|
64 | if mail_server is None: | |
65 | log.error("SMTP server information missing. Sending email failed. " |
|
65 | log.error("SMTP server information missing. Sending email failed. " | |
66 | "Make sure that `smtp_server` variable is configured " |
|
66 | "Make sure that `smtp_server` variable is configured " | |
67 | "inside the .ini file") |
|
67 | "inside the .ini file") | |
68 | return False |
|
68 | return False | |
69 |
|
69 | |||
70 | subject = "%s %s" % (email_config.get('email_prefix', ''), subject) |
|
70 | subject = "%s %s" % (email_config.get('email_prefix', ''), subject) | |
71 |
|
71 | |||
72 | if recipients: |
|
72 | if recipients: | |
73 | if isinstance(recipients, compat.string_types): |
|
73 | if isinstance(recipients, compat.string_types): | |
74 | recipients = recipients.split(',') |
|
74 | recipients = recipients.split(',') | |
75 | else: |
|
75 | else: | |
76 | # if recipients are not defined we send to email_config + all admins |
|
76 | # if recipients are not defined we send to email_config + all admins | |
77 | admins = [] |
|
77 | admins = [] | |
78 | for u in User.query().filter(User.admin == true()).all(): |
|
78 | for u in User.query().filter(User.admin == true()).all(): | |
79 | if u.email: |
|
79 | if u.email: | |
80 | admins.append(u.email) |
|
80 | admins.append(u.email) | |
81 | recipients = [] |
|
81 | recipients = [] | |
82 | config_email = email_config.get('email_to') |
|
82 | config_email = email_config.get('email_to') | |
83 | if config_email: |
|
83 | if config_email: | |
84 | recipients += [config_email] |
|
84 | recipients += [config_email] | |
85 | recipients += admins |
|
85 | recipients += admins | |
86 |
|
86 | |||
87 | # translate our LEGACY config into the one that pyramid_mailer supports |
|
87 | # translate our LEGACY config into the one that pyramid_mailer supports | |
88 | email_conf = dict( |
|
88 | email_conf = dict( | |
89 | host=mail_server, |
|
89 | host=mail_server, | |
90 | port=email_config.get('smtp_port', 25), |
|
90 | port=email_config.get('smtp_port', 25), | |
91 | username=email_config.get('smtp_username'), |
|
91 | username=email_config.get('smtp_username'), | |
92 | password=email_config.get('smtp_password'), |
|
92 | password=email_config.get('smtp_password'), | |
93 |
|
93 | |||
94 | tls=str2bool(email_config.get('smtp_use_tls')), |
|
94 | tls=str2bool(email_config.get('smtp_use_tls')), | |
95 | ssl=str2bool(email_config.get('smtp_use_ssl')), |
|
95 | ssl=str2bool(email_config.get('smtp_use_ssl')), | |
96 |
|
96 | |||
97 | # SSL key file |
|
97 | # SSL key file | |
98 | # keyfile='', |
|
98 | # keyfile='', | |
99 |
|
99 | |||
100 | # SSL certificate file |
|
100 | # SSL certificate file | |
101 | # certfile='', |
|
101 | # certfile='', | |
102 |
|
102 | |||
103 | # Location of maildir |
|
103 | # Location of maildir | |
104 | # queue_path='', |
|
104 | # queue_path='', | |
105 |
|
105 | |||
106 | default_sender=email_config.get('app_email_from', 'RhodeCode-noreply@rhodecode.com'), |
|
106 | default_sender=email_config.get('app_email_from', 'RhodeCode-noreply@rhodecode.com'), | |
107 |
|
107 | |||
108 | debug=str2bool(email_config.get('smtp_debug')), |
|
108 | debug=str2bool(email_config.get('smtp_debug')), | |
109 | # /usr/sbin/sendmail Sendmail executable |
|
109 | # /usr/sbin/sendmail Sendmail executable | |
110 | # sendmail_app='', |
|
110 | # sendmail_app='', | |
111 |
|
111 | |||
112 | # {sendmail_app} -t -i -f {sender} Template for sendmail execution |
|
112 | # {sendmail_app} -t -i -f {sender} Template for sendmail execution | |
113 | # sendmail_template='', |
|
113 | # sendmail_template='', | |
114 | ) |
|
114 | ) | |
115 |
|
115 | |||
116 | if extra_headers is None: |
|
116 | if extra_headers is None: | |
117 | extra_headers = {} |
|
117 | extra_headers = {} | |
118 |
|
118 | |||
119 | extra_headers.setdefault('Date', formatdate(time.time())) |
|
119 | extra_headers.setdefault('Date', formatdate(time.time())) | |
120 |
|
120 | |||
121 | if 'thread_ids' in extra_headers: |
|
121 | if 'thread_ids' in extra_headers: | |
122 | thread_ids = extra_headers.pop('thread_ids') |
|
122 | thread_ids = extra_headers.pop('thread_ids') | |
123 | extra_headers['References'] = ' '.join('<{}>'.format(t) for t in thread_ids) |
|
123 | extra_headers['References'] = ' '.join('<{}>'.format(t) for t in thread_ids) | |
124 |
|
124 | |||
125 | try: |
|
125 | try: | |
126 | mailer = Mailer(**email_conf) |
|
126 | mailer = Mailer(**email_conf) | |
127 |
|
127 | |||
128 | message = Message(subject=subject, |
|
128 | message = Message(subject=subject, | |
129 | sender=email_conf['default_sender'], |
|
129 | sender=email_conf['default_sender'], | |
130 | recipients=recipients, |
|
130 | recipients=recipients, | |
131 | body=body, html=html_body, |
|
131 | body=body, html=html_body, | |
132 | extra_headers=extra_headers) |
|
132 | extra_headers=extra_headers) | |
133 | mailer.send_immediately(message) |
|
133 | mailer.send_immediately(message) | |
134 | statsd = StatsdClient.statsd |
|
134 | statsd = StatsdClient.statsd | |
135 | if statsd: |
|
135 | if statsd: | |
136 | statsd.incr('rhodecode_email_sent_total') |
|
136 | statsd.incr('rhodecode_email_sent_total') | |
137 |
|
137 | |||
138 | except Exception: |
|
138 | except Exception: | |
139 | log.exception('Mail sending failed') |
|
139 | log.exception('Mail sending failed') | |
140 | return False |
|
140 | return False | |
141 | return True |
|
141 | return True | |
142 |
|
142 | |||
143 |
|
143 | |||
144 | @async_task(ignore_result=True, base=RequestContextTask) |
|
144 | @async_task(ignore_result=True, base=RequestContextTask) | |
145 | def create_repo(form_data, cur_user): |
|
145 | def create_repo(form_data, cur_user): | |
146 | from rhodecode.model.repo import RepoModel |
|
146 | from rhodecode.model.repo import RepoModel | |
147 | from rhodecode.model.user import UserModel |
|
147 | from rhodecode.model.user import UserModel | |
148 | from rhodecode.model.scm import ScmModel |
|
148 | from rhodecode.model.scm import ScmModel | |
149 | from rhodecode.model.settings import SettingsModel |
|
149 | from rhodecode.model.settings import SettingsModel | |
150 |
|
150 | |||
151 | log = get_logger(create_repo) |
|
151 | log = get_logger(create_repo) | |
152 |
|
152 | |||
153 | cur_user = UserModel()._get_user(cur_user) |
|
153 | cur_user = UserModel()._get_user(cur_user) | |
154 | owner = cur_user |
|
154 | owner = cur_user | |
155 |
|
155 | |||
156 | repo_name = form_data['repo_name'] |
|
156 | repo_name = form_data['repo_name'] | |
157 | repo_name_full = form_data['repo_name_full'] |
|
157 | repo_name_full = form_data['repo_name_full'] | |
158 | repo_type = form_data['repo_type'] |
|
158 | repo_type = form_data['repo_type'] | |
159 | description = form_data['repo_description'] |
|
159 | description = form_data['repo_description'] | |
160 | private = form_data['repo_private'] |
|
160 | private = form_data['repo_private'] | |
161 | clone_uri = form_data.get('clone_uri') |
|
161 | clone_uri = form_data.get('clone_uri') | |
162 | repo_group = safe_int(form_data['repo_group']) |
|
162 | repo_group = safe_int(form_data['repo_group']) | |
163 | copy_fork_permissions = form_data.get('copy_permissions') |
|
163 | copy_fork_permissions = form_data.get('copy_permissions') | |
164 | copy_group_permissions = form_data.get('repo_copy_permissions') |
|
164 | copy_group_permissions = form_data.get('repo_copy_permissions') | |
165 | fork_of = form_data.get('fork_parent_id') |
|
165 | fork_of = form_data.get('fork_parent_id') | |
166 | state = form_data.get('repo_state', Repository.STATE_PENDING) |
|
166 | state = form_data.get('repo_state', Repository.STATE_PENDING) | |
167 |
|
167 | |||
168 | # repo creation defaults, private and repo_type are filled in form |
|
168 | # repo creation defaults, private and repo_type are filled in form | |
169 | defs = SettingsModel().get_default_repo_settings(strip_prefix=True) |
|
169 | defs = SettingsModel().get_default_repo_settings(strip_prefix=True) | |
170 | enable_statistics = form_data.get( |
|
170 | enable_statistics = form_data.get( | |
171 | 'enable_statistics', defs.get('repo_enable_statistics')) |
|
171 | 'enable_statistics', defs.get('repo_enable_statistics')) | |
172 | enable_locking = form_data.get( |
|
172 | enable_locking = form_data.get( | |
173 | 'enable_locking', defs.get('repo_enable_locking')) |
|
173 | 'enable_locking', defs.get('repo_enable_locking')) | |
174 | enable_downloads = form_data.get( |
|
174 | enable_downloads = form_data.get( | |
175 | 'enable_downloads', defs.get('repo_enable_downloads')) |
|
175 | 'enable_downloads', defs.get('repo_enable_downloads')) | |
176 |
|
176 | |||
177 | # set landing rev based on default branches for SCM |
|
177 | # set landing rev based on default branches for SCM | |
178 | landing_ref, _label = ScmModel.backend_landing_ref(repo_type) |
|
178 | landing_ref, _label = ScmModel.backend_landing_ref(repo_type) | |
179 |
|
179 | |||
180 | try: |
|
180 | try: | |
181 | RepoModel()._create_repo( |
|
181 | RepoModel()._create_repo( | |
182 | repo_name=repo_name_full, |
|
182 | repo_name=repo_name_full, | |
183 | repo_type=repo_type, |
|
183 | repo_type=repo_type, | |
184 | description=description, |
|
184 | description=description, | |
185 | owner=owner, |
|
185 | owner=owner, | |
186 | private=private, |
|
186 | private=private, | |
187 | clone_uri=clone_uri, |
|
187 | clone_uri=clone_uri, | |
188 | repo_group=repo_group, |
|
188 | repo_group=repo_group, | |
189 | landing_rev=landing_ref, |
|
189 | landing_rev=landing_ref, | |
190 | fork_of=fork_of, |
|
190 | fork_of=fork_of, | |
191 | copy_fork_permissions=copy_fork_permissions, |
|
191 | copy_fork_permissions=copy_fork_permissions, | |
192 | copy_group_permissions=copy_group_permissions, |
|
192 | copy_group_permissions=copy_group_permissions, | |
193 | enable_statistics=enable_statistics, |
|
193 | enable_statistics=enable_statistics, | |
194 | enable_locking=enable_locking, |
|
194 | enable_locking=enable_locking, | |
195 | enable_downloads=enable_downloads, |
|
195 | enable_downloads=enable_downloads, | |
196 | state=state |
|
196 | state=state | |
197 | ) |
|
197 | ) | |
198 | Session().commit() |
|
198 | Session().commit() | |
199 |
|
199 | |||
200 | # now create this repo on Filesystem |
|
200 | # now create this repo on Filesystem | |
201 | RepoModel()._create_filesystem_repo( |
|
201 | RepoModel()._create_filesystem_repo( | |
202 | repo_name=repo_name, |
|
202 | repo_name=repo_name, | |
203 | repo_type=repo_type, |
|
203 | repo_type=repo_type, | |
204 | repo_group=RepoModel()._get_repo_group(repo_group), |
|
204 | repo_group=RepoModel()._get_repo_group(repo_group), | |
205 | clone_uri=clone_uri, |
|
205 | clone_uri=clone_uri, | |
206 | ) |
|
206 | ) | |
207 | repo = Repository.get_by_repo_name(repo_name_full) |
|
207 | repo = Repository.get_by_repo_name(repo_name_full) | |
208 | hooks_base.create_repository(created_by=owner.username, **repo.get_dict()) |
|
208 | hooks_base.create_repository(created_by=owner.username, **repo.get_dict()) | |
209 |
|
209 | |||
210 | # update repo commit caches initially |
|
210 | # update repo commit caches initially | |
211 | repo.update_commit_cache() |
|
211 | repo.update_commit_cache() | |
212 |
|
212 | |||
213 | # set new created state |
|
213 | # set new created state | |
214 | repo.set_state(Repository.STATE_CREATED) |
|
214 | repo.set_state(Repository.STATE_CREATED) | |
215 | repo_id = repo.repo_id |
|
215 | repo_id = repo.repo_id | |
216 | repo_data = repo.get_api_data() |
|
216 | repo_data = repo.get_api_data() | |
217 |
|
217 | |||
218 | audit_logger.store( |
|
218 | audit_logger.store( | |
219 | 'repo.create', action_data={'data': repo_data}, |
|
219 | 'repo.create', action_data={'data': repo_data}, | |
220 | user=cur_user, |
|
220 | user=cur_user, | |
221 | repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id)) |
|
221 | repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id)) | |
222 |
|
222 | |||
223 | Session().commit() |
|
223 | Session().commit() | |
224 |
|
224 | |||
225 | PermissionModel().trigger_permission_flush() |
|
225 | PermissionModel().trigger_permission_flush() | |
226 |
|
226 | |||
227 | except Exception as e: |
|
227 | except Exception as e: | |
228 | log.warning('Exception occurred when creating repository, ' |
|
228 | log.warning('Exception occurred when creating repository, ' | |
229 | 'doing cleanup...', exc_info=True) |
|
229 | 'doing cleanup...', exc_info=True) | |
230 | if isinstance(e, IntegrityError): |
|
230 | if isinstance(e, IntegrityError): | |
231 | Session().rollback() |
|
231 | Session().rollback() | |
232 |
|
232 | |||
233 | # rollback things manually ! |
|
233 | # rollback things manually ! | |
234 | repo = Repository.get_by_repo_name(repo_name_full) |
|
234 | repo = Repository.get_by_repo_name(repo_name_full) | |
235 | if repo: |
|
235 | if repo: | |
236 | Repository.delete(repo.repo_id) |
|
236 | Repository.delete(repo.repo_id) | |
237 | Session().commit() |
|
237 | Session().commit() | |
238 | RepoModel()._delete_filesystem_repo(repo) |
|
238 | RepoModel()._delete_filesystem_repo(repo) | |
239 | log.info('Cleanup of repo %s finished', repo_name_full) |
|
239 | log.info('Cleanup of repo %s finished', repo_name_full) | |
240 | raise |
|
240 | raise | |
241 |
|
241 | |||
242 | return True |
|
242 | return True | |
243 |
|
243 | |||
244 |
|
244 | |||
245 | @async_task(ignore_result=True, base=RequestContextTask) |
|
245 | @async_task(ignore_result=True, base=RequestContextTask) | |
246 | def create_repo_fork(form_data, cur_user): |
|
246 | def create_repo_fork(form_data, cur_user): | |
247 | """ |
|
247 | """ | |
248 | Creates a fork of repository using internal VCS methods |
|
248 | Creates a fork of repository using internal VCS methods | |
249 | """ |
|
249 | """ | |
250 | from rhodecode.model.repo import RepoModel |
|
250 | from rhodecode.model.repo import RepoModel | |
251 | from rhodecode.model.user import UserModel |
|
251 | from rhodecode.model.user import UserModel | |
252 |
|
252 | |||
253 | log = get_logger(create_repo_fork) |
|
253 | log = get_logger(create_repo_fork) | |
254 |
|
254 | |||
255 | cur_user = UserModel()._get_user(cur_user) |
|
255 | cur_user = UserModel()._get_user(cur_user) | |
256 | owner = cur_user |
|
256 | owner = cur_user | |
257 |
|
257 | |||
258 | repo_name = form_data['repo_name'] # fork in this case |
|
258 | repo_name = form_data['repo_name'] # fork in this case | |
259 | repo_name_full = form_data['repo_name_full'] |
|
259 | repo_name_full = form_data['repo_name_full'] | |
260 | repo_type = form_data['repo_type'] |
|
260 | repo_type = form_data['repo_type'] | |
261 | description = form_data['description'] |
|
261 | description = form_data['description'] | |
262 | private = form_data['private'] |
|
262 | private = form_data['private'] | |
263 | clone_uri = form_data.get('clone_uri') |
|
263 | clone_uri = form_data.get('clone_uri') | |
264 | repo_group = safe_int(form_data['repo_group']) |
|
264 | repo_group = safe_int(form_data['repo_group']) | |
265 | landing_ref = form_data['landing_rev'] |
|
265 | landing_ref = form_data['landing_rev'] | |
266 | copy_fork_permissions = form_data.get('copy_permissions') |
|
266 | copy_fork_permissions = form_data.get('copy_permissions') | |
267 | fork_id = safe_int(form_data.get('fork_parent_id')) |
|
267 | fork_id = safe_int(form_data.get('fork_parent_id')) | |
268 |
|
268 | |||
269 | try: |
|
269 | try: | |
270 | fork_of = RepoModel()._get_repo(fork_id) |
|
270 | fork_of = RepoModel()._get_repo(fork_id) | |
271 | RepoModel()._create_repo( |
|
271 | RepoModel()._create_repo( | |
272 | repo_name=repo_name_full, |
|
272 | repo_name=repo_name_full, | |
273 | repo_type=repo_type, |
|
273 | repo_type=repo_type, | |
274 | description=description, |
|
274 | description=description, | |
275 | owner=owner, |
|
275 | owner=owner, | |
276 | private=private, |
|
276 | private=private, | |
277 | clone_uri=clone_uri, |
|
277 | clone_uri=clone_uri, | |
278 | repo_group=repo_group, |
|
278 | repo_group=repo_group, | |
279 | landing_rev=landing_ref, |
|
279 | landing_rev=landing_ref, | |
280 | fork_of=fork_of, |
|
280 | fork_of=fork_of, | |
281 | copy_fork_permissions=copy_fork_permissions |
|
281 | copy_fork_permissions=copy_fork_permissions | |
282 | ) |
|
282 | ) | |
283 |
|
283 | |||
284 | Session().commit() |
|
284 | Session().commit() | |
285 |
|
285 | |||
286 | base_path = Repository.base_path() |
|
286 | base_path = Repository.base_path() | |
287 | source_repo_path = os.path.join(base_path, fork_of.repo_name) |
|
287 | source_repo_path = os.path.join(base_path, fork_of.repo_name) | |
288 |
|
288 | |||
289 | # now create this repo on Filesystem |
|
289 | # now create this repo on Filesystem | |
290 | RepoModel()._create_filesystem_repo( |
|
290 | RepoModel()._create_filesystem_repo( | |
291 | repo_name=repo_name, |
|
291 | repo_name=repo_name, | |
292 | repo_type=repo_type, |
|
292 | repo_type=repo_type, | |
293 | repo_group=RepoModel()._get_repo_group(repo_group), |
|
293 | repo_group=RepoModel()._get_repo_group(repo_group), | |
294 | clone_uri=source_repo_path, |
|
294 | clone_uri=source_repo_path, | |
295 | ) |
|
295 | ) | |
296 | repo = Repository.get_by_repo_name(repo_name_full) |
|
296 | repo = Repository.get_by_repo_name(repo_name_full) | |
297 | hooks_base.create_repository(created_by=owner.username, **repo.get_dict()) |
|
297 | hooks_base.create_repository(created_by=owner.username, **repo.get_dict()) | |
298 |
|
298 | |||
299 | # update repo commit caches initially |
|
299 | # update repo commit caches initially | |
300 | config = repo._config |
|
300 | config = repo._config | |
301 | config.set('extensions', 'largefiles', '') |
|
301 | config.set('extensions', 'largefiles', '') | |
302 | repo.update_commit_cache(config=config) |
|
302 | repo.update_commit_cache(config=config) | |
303 |
|
303 | |||
304 | # set new created state |
|
304 | # set new created state | |
305 | repo.set_state(Repository.STATE_CREATED) |
|
305 | repo.set_state(Repository.STATE_CREATED) | |
306 |
|
306 | |||
307 | repo_id = repo.repo_id |
|
307 | repo_id = repo.repo_id | |
308 | repo_data = repo.get_api_data() |
|
308 | repo_data = repo.get_api_data() | |
309 | audit_logger.store( |
|
309 | audit_logger.store( | |
310 | 'repo.fork', action_data={'data': repo_data}, |
|
310 | 'repo.fork', action_data={'data': repo_data}, | |
311 | user=cur_user, |
|
311 | user=cur_user, | |
312 | repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id)) |
|
312 | repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id)) | |
313 |
|
313 | |||
314 | Session().commit() |
|
314 | Session().commit() | |
315 | except Exception as e: |
|
315 | except Exception as e: | |
316 | log.warning('Exception occurred when forking repository, ' |
|
316 | log.warning('Exception occurred when forking repository, ' | |
317 | 'doing cleanup...', exc_info=True) |
|
317 | 'doing cleanup...', exc_info=True) | |
318 | if isinstance(e, IntegrityError): |
|
318 | if isinstance(e, IntegrityError): | |
319 | Session().rollback() |
|
319 | Session().rollback() | |
320 |
|
320 | |||
321 | # rollback things manually ! |
|
321 | # rollback things manually ! | |
322 | repo = Repository.get_by_repo_name(repo_name_full) |
|
322 | repo = Repository.get_by_repo_name(repo_name_full) | |
323 | if repo: |
|
323 | if repo: | |
324 | Repository.delete(repo.repo_id) |
|
324 | Repository.delete(repo.repo_id) | |
325 | Session().commit() |
|
325 | Session().commit() | |
326 | RepoModel()._delete_filesystem_repo(repo) |
|
326 | RepoModel()._delete_filesystem_repo(repo) | |
327 | log.info('Cleanup of repo %s finished', repo_name_full) |
|
327 | log.info('Cleanup of repo %s finished', repo_name_full) | |
328 | raise |
|
328 | raise | |
329 |
|
329 | |||
330 | return True |
|
330 | return True | |
331 |
|
331 | |||
332 |
|
332 | |||
333 | @async_task(ignore_result=True) |
|
333 | @async_task(ignore_result=True, base=RequestContextTask) | |
334 | def repo_maintenance(repoid): |
|
334 | def repo_maintenance(repoid): | |
335 | from rhodecode.lib import repo_maintenance as repo_maintenance_lib |
|
335 | from rhodecode.lib import repo_maintenance as repo_maintenance_lib | |
336 | log = get_logger(repo_maintenance) |
|
336 | log = get_logger(repo_maintenance) | |
337 | repo = Repository.get_by_id_or_repo_name(repoid) |
|
337 | repo = Repository.get_by_id_or_repo_name(repoid) | |
338 | if repo: |
|
338 | if repo: | |
339 | maintenance = repo_maintenance_lib.RepoMaintenance() |
|
339 | maintenance = repo_maintenance_lib.RepoMaintenance() | |
340 | tasks = maintenance.get_tasks_for_repo(repo) |
|
340 | tasks = maintenance.get_tasks_for_repo(repo) | |
341 | log.debug('Executing %s tasks on repo `%s`', tasks, repoid) |
|
341 | log.debug('Executing %s tasks on repo `%s`', tasks, repoid) | |
342 | executed_types = maintenance.execute(repo) |
|
342 | executed_types = maintenance.execute(repo) | |
343 | log.debug('Got execution results %s', executed_types) |
|
343 | log.debug('Got execution results %s', executed_types) | |
344 | else: |
|
344 | else: | |
345 | log.debug('Repo `%s` not found or without a clone_url', repoid) |
|
345 | log.debug('Repo `%s` not found or without a clone_url', repoid) | |
346 |
|
346 | |||
347 |
|
347 | |||
348 | @async_task(ignore_result=True) |
|
348 | @async_task(ignore_result=True, base=RequestContextTask) | |
349 | def check_for_update(send_email_notification=True, email_recipients=None): |
|
349 | def check_for_update(send_email_notification=True, email_recipients=None): | |
350 | from rhodecode.model.update import UpdateModel |
|
350 | from rhodecode.model.update import UpdateModel | |
351 | from rhodecode.model.notification import EmailNotificationModel |
|
351 | from rhodecode.model.notification import EmailNotificationModel | |
352 |
|
352 | |||
353 | log = get_logger(check_for_update) |
|
353 | log = get_logger(check_for_update) | |
354 | update_url = UpdateModel().get_update_url() |
|
354 | update_url = UpdateModel().get_update_url() | |
355 | cur_ver = rhodecode.__version__ |
|
355 | cur_ver = rhodecode.__version__ | |
356 |
|
356 | |||
357 | try: |
|
357 | try: | |
358 | data = UpdateModel().get_update_data(update_url) |
|
358 | data = UpdateModel().get_update_data(update_url) | |
359 |
|
359 | |||
360 | current_ver = UpdateModel().get_stored_version(fallback=cur_ver) |
|
360 | current_ver = UpdateModel().get_stored_version(fallback=cur_ver) | |
361 | latest_ver = data['versions'][0]['version'] |
|
361 | latest_ver = data['versions'][0]['version'] | |
362 | UpdateModel().store_version(latest_ver) |
|
362 | UpdateModel().store_version(latest_ver) | |
363 |
|
363 | |||
364 | if send_email_notification: |
|
364 | if send_email_notification: | |
365 | log.debug('Send email notification is enabled. ' |
|
365 | log.debug('Send email notification is enabled. ' | |
366 | 'Current RhodeCode version: %s, latest known: %s', current_ver, latest_ver) |
|
366 | 'Current RhodeCode version: %s, latest known: %s', current_ver, latest_ver) | |
367 | if UpdateModel().is_outdated(current_ver, latest_ver): |
|
367 | if UpdateModel().is_outdated(current_ver, latest_ver): | |
368 |
|
368 | |||
369 | email_kwargs = { |
|
369 | email_kwargs = { | |
370 | 'current_ver': current_ver, |
|
370 | 'current_ver': current_ver, | |
371 | 'latest_ver': latest_ver, |
|
371 | 'latest_ver': latest_ver, | |
372 | } |
|
372 | } | |
373 |
|
373 | |||
374 | (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email( |
|
374 | (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email( | |
375 | EmailNotificationModel.TYPE_UPDATE_AVAILABLE, **email_kwargs) |
|
375 | EmailNotificationModel.TYPE_UPDATE_AVAILABLE, **email_kwargs) | |
376 |
|
376 | |||
377 | email_recipients = aslist(email_recipients, sep=',') or \ |
|
377 | email_recipients = aslist(email_recipients, sep=',') or \ | |
378 | [user.email for user in User.get_all_super_admins()] |
|
378 | [user.email for user in User.get_all_super_admins()] | |
379 | run_task(send_email, email_recipients, subject, |
|
379 | run_task(send_email, email_recipients, subject, | |
380 | email_body_plaintext, email_body) |
|
380 | email_body_plaintext, email_body) | |
381 |
|
381 | |||
382 | except Exception: |
|
382 | except Exception: | |
383 | pass |
|
383 | log.exception('Failed to check for update') | |
384 |
|
384 | raise | ||
385 |
|
||||
386 | @async_task(ignore_result=False) |
|
|||
387 | def beat_check(*args, **kwargs): |
|
|||
388 | log = get_logger(beat_check) |
|
|||
389 | log.info('%r: Got args: %r and kwargs %r', beat_check, args, kwargs) |
|
|||
390 | return time.time() |
|
|||
391 |
|
385 | |||
392 |
|
386 | |||
393 | def sync_last_update_for_objects(*args, **kwargs): |
|
387 | def sync_last_update_for_objects(*args, **kwargs): | |
394 | skip_repos = kwargs.get('skip_repos') |
|
388 | skip_repos = kwargs.get('skip_repos') | |
395 | if not skip_repos: |
|
389 | if not skip_repos: | |
396 | repos = Repository.query() \ |
|
390 | repos = Repository.query() \ | |
397 | .order_by(Repository.group_id.asc()) |
|
391 | .order_by(Repository.group_id.asc()) | |
398 |
|
392 | |||
399 | for repo in repos: |
|
393 | for repo in repos: | |
400 | repo.update_commit_cache() |
|
394 | repo.update_commit_cache() | |
401 |
|
395 | |||
402 | skip_groups = kwargs.get('skip_groups') |
|
396 | skip_groups = kwargs.get('skip_groups') | |
403 | if not skip_groups: |
|
397 | if not skip_groups: | |
404 | repo_groups = RepoGroup.query() \ |
|
398 | repo_groups = RepoGroup.query() \ | |
405 | .filter(RepoGroup.group_parent_id == None) |
|
399 | .filter(RepoGroup.group_parent_id == None) | |
406 |
|
400 | |||
407 | for root_gr in repo_groups: |
|
401 | for root_gr in repo_groups: | |
408 | for repo_gr in reversed(root_gr.recursive_groups()): |
|
402 | for repo_gr in reversed(root_gr.recursive_groups()): | |
409 | repo_gr.update_commit_cache() |
|
403 | repo_gr.update_commit_cache() | |
410 |
|
404 | |||
411 |
|
405 | |||
412 | @async_task(ignore_result=True) |
|
406 | @async_task(ignore_result=True, base=RequestContextTask) | |
413 | def sync_last_update(*args, **kwargs): |
|
407 | def sync_last_update(*args, **kwargs): | |
414 | sync_last_update_for_objects(*args, **kwargs) |
|
408 | sync_last_update_for_objects(*args, **kwargs) | |
|
409 | ||||
|
410 | ||||
|
411 | @async_task(ignore_result=False) | |||
|
412 | def beat_check(*args, **kwargs): | |||
|
413 | log = get_logger(beat_check) | |||
|
414 | log.info('%r: Got args: %r and kwargs %r', beat_check, args, kwargs) | |||
|
415 | return time.time() |
@@ -1,38 +1,50 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2017-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2017-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | from uuid import uuid4 |
|
21 | from uuid import uuid4 | |
22 | from pyramid.decorator import reify |
|
22 | from pyramid.decorator import reify | |
23 | from pyramid.request import Request as _Request |
|
23 | from pyramid.request import Request as _Request | |
|
24 | from rhodecode.translation import _ as tsf | |||
24 |
|
25 | |||
25 |
|
26 | |||
26 | class Request(_Request): |
|
27 | class Request(_Request): | |
27 | _req_id_bucket = list() |
|
28 | _req_id_bucket = list() | |
28 |
|
29 | |||
29 | @reify |
|
30 | @reify | |
30 | def req_id(self): |
|
31 | def req_id(self): | |
31 | return str(uuid4()) |
|
32 | return str(uuid4()) | |
32 |
|
33 | |||
33 | @property |
|
34 | @property | |
34 | def req_id_bucket(self): |
|
35 | def req_id_bucket(self): | |
35 | return self._req_id_bucket |
|
36 | return self._req_id_bucket | |
36 |
|
37 | |||
37 | def req_id_records_init(self): |
|
38 | def req_id_records_init(self): | |
38 | self._req_id_bucket = list() |
|
39 | self._req_id_bucket = list() | |
|
40 | ||||
|
41 | def plularize(self, *args, **kwargs): | |||
|
42 | return self.localizer.pluralize(*args, **kwargs) | |||
|
43 | ||||
|
44 | def translate(self, *args, **kwargs): | |||
|
45 | localizer = self.localizer | |||
|
46 | ||||
|
47 | def auto_translate(*_args, **_kwargs): | |||
|
48 | return localizer.translate(tsf(*_args, **_kwargs)) | |||
|
49 | ||||
|
50 | return auto_translate(*args, **kwargs) |
@@ -1,453 +1,453 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2011-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2011-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | """ |
|
22 | """ | |
23 | Model for notifications |
|
23 | Model for notifications | |
24 | """ |
|
24 | """ | |
25 |
|
25 | |||
26 | import logging |
|
26 | import logging | |
27 | import traceback |
|
27 | import traceback | |
28 |
|
28 | |||
29 | import premailer |
|
29 | import premailer | |
30 | from pyramid.threadlocal import get_current_request |
|
30 | from pyramid.threadlocal import get_current_request | |
31 | from sqlalchemy.sql.expression import false, true |
|
31 | from sqlalchemy.sql.expression import false, true | |
32 |
|
32 | |||
33 | import rhodecode |
|
33 | import rhodecode | |
34 | from rhodecode.lib import helpers as h |
|
34 | from rhodecode.lib import helpers as h | |
35 | from rhodecode.model import BaseModel |
|
35 | from rhodecode.model import BaseModel | |
36 | from rhodecode.model.db import Notification, User, UserNotification |
|
36 | from rhodecode.model.db import Notification, User, UserNotification | |
37 | from rhodecode.model.meta import Session |
|
37 | from rhodecode.model.meta import Session | |
38 | from rhodecode.translation import TranslationString |
|
38 | from rhodecode.translation import TranslationString | |
39 |
|
39 | |||
40 | log = logging.getLogger(__name__) |
|
40 | log = logging.getLogger(__name__) | |
41 |
|
41 | |||
42 |
|
42 | |||
43 | class NotificationModel(BaseModel): |
|
43 | class NotificationModel(BaseModel): | |
44 |
|
44 | |||
45 | cls = Notification |
|
45 | cls = Notification | |
46 |
|
46 | |||
47 | def __get_notification(self, notification): |
|
47 | def __get_notification(self, notification): | |
48 | if isinstance(notification, Notification): |
|
48 | if isinstance(notification, Notification): | |
49 | return notification |
|
49 | return notification | |
50 | elif isinstance(notification, (int, long)): |
|
50 | elif isinstance(notification, (int, long)): | |
51 | return Notification.get(notification) |
|
51 | return Notification.get(notification) | |
52 | else: |
|
52 | else: | |
53 | if notification: |
|
53 | if notification: | |
54 | raise Exception('notification must be int, long or Instance' |
|
54 | raise Exception('notification must be int, long or Instance' | |
55 | ' of Notification got %s' % type(notification)) |
|
55 | ' of Notification got %s' % type(notification)) | |
56 |
|
56 | |||
57 | def create( |
|
57 | def create( | |
58 | self, created_by, notification_subject='', notification_body='', |
|
58 | self, created_by, notification_subject='', notification_body='', | |
59 | notification_type=Notification.TYPE_MESSAGE, recipients=None, |
|
59 | notification_type=Notification.TYPE_MESSAGE, recipients=None, | |
60 | mention_recipients=None, with_email=True, email_kwargs=None): |
|
60 | mention_recipients=None, with_email=True, email_kwargs=None): | |
61 | """ |
|
61 | """ | |
62 |
|
62 | |||
63 | Creates notification of given type |
|
63 | Creates notification of given type | |
64 |
|
64 | |||
65 | :param created_by: int, str or User instance. User who created this |
|
65 | :param created_by: int, str or User instance. User who created this | |
66 | notification |
|
66 | notification | |
67 | :param notification_subject: subject of notification itself, |
|
67 | :param notification_subject: subject of notification itself, | |
68 | it will be generated automatically from notification_type if not specified |
|
68 | it will be generated automatically from notification_type if not specified | |
69 | :param notification_body: body of notification text |
|
69 | :param notification_body: body of notification text | |
70 | it will be generated automatically from notification_type if not specified |
|
70 | it will be generated automatically from notification_type if not specified | |
71 | :param notification_type: type of notification, based on that we |
|
71 | :param notification_type: type of notification, based on that we | |
72 | pick templates |
|
72 | pick templates | |
73 | :param recipients: list of int, str or User objects, when None |
|
73 | :param recipients: list of int, str or User objects, when None | |
74 | is given send to all admins |
|
74 | is given send to all admins | |
75 | :param mention_recipients: list of int, str or User objects, |
|
75 | :param mention_recipients: list of int, str or User objects, | |
76 | that were mentioned |
|
76 | that were mentioned | |
77 | :param with_email: send email with this notification |
|
77 | :param with_email: send email with this notification | |
78 | :param email_kwargs: dict with arguments to generate email |
|
78 | :param email_kwargs: dict with arguments to generate email | |
79 | """ |
|
79 | """ | |
80 |
|
80 | |||
81 | from rhodecode.lib.celerylib import tasks, run_task |
|
81 | from rhodecode.lib.celerylib import tasks, run_task | |
82 |
|
82 | |||
83 | if recipients and not getattr(recipients, '__iter__', False): |
|
83 | if recipients and not getattr(recipients, '__iter__', False): | |
84 | raise Exception('recipients must be an iterable object') |
|
84 | raise Exception('recipients must be an iterable object') | |
85 |
|
85 | |||
86 | if not (notification_subject and notification_body) and not notification_type: |
|
86 | if not (notification_subject and notification_body) and not notification_type: | |
87 | raise ValueError('notification_subject, and notification_body ' |
|
87 | raise ValueError('notification_subject, and notification_body ' | |
88 | 'cannot be empty when notification_type is not specified') |
|
88 | 'cannot be empty when notification_type is not specified') | |
89 |
|
89 | |||
90 | created_by_obj = self._get_user(created_by) |
|
90 | created_by_obj = self._get_user(created_by) | |
91 |
|
91 | |||
92 | if not created_by_obj: |
|
92 | if not created_by_obj: | |
93 | raise Exception('unknown user %s' % created_by) |
|
93 | raise Exception('unknown user %s' % created_by) | |
94 |
|
94 | |||
95 | # default MAIN body if not given |
|
95 | # default MAIN body if not given | |
96 | email_kwargs = email_kwargs or {'body': notification_body} |
|
96 | email_kwargs = email_kwargs or {'body': notification_body} | |
97 | mention_recipients = mention_recipients or set() |
|
97 | mention_recipients = mention_recipients or set() | |
98 |
|
98 | |||
99 | if recipients is None: |
|
99 | if recipients is None: | |
100 | # recipients is None means to all admins |
|
100 | # recipients is None means to all admins | |
101 | recipients_objs = User.query().filter(User.admin == true()).all() |
|
101 | recipients_objs = User.query().filter(User.admin == true()).all() | |
102 | log.debug('sending notifications %s to admins: %s', |
|
102 | log.debug('sending notifications %s to admins: %s', | |
103 | notification_type, recipients_objs) |
|
103 | notification_type, recipients_objs) | |
104 | else: |
|
104 | else: | |
105 | recipients_objs = set() |
|
105 | recipients_objs = set() | |
106 | for u in recipients: |
|
106 | for u in recipients: | |
107 | obj = self._get_user(u) |
|
107 | obj = self._get_user(u) | |
108 | if obj: |
|
108 | if obj: | |
109 | recipients_objs.add(obj) |
|
109 | recipients_objs.add(obj) | |
110 | else: # we didn't find this user, log the error and carry on |
|
110 | else: # we didn't find this user, log the error and carry on | |
111 | log.error('cannot notify unknown user %r', u) |
|
111 | log.error('cannot notify unknown user %r', u) | |
112 |
|
112 | |||
113 | if not recipients_objs: |
|
113 | if not recipients_objs: | |
114 | raise Exception('no valid recipients specified') |
|
114 | raise Exception('no valid recipients specified') | |
115 |
|
115 | |||
116 | log.debug('sending notifications %s to %s', |
|
116 | log.debug('sending notifications %s to %s', | |
117 | notification_type, recipients_objs) |
|
117 | notification_type, recipients_objs) | |
118 |
|
118 | |||
119 | # add mentioned users into recipients |
|
119 | # add mentioned users into recipients | |
120 | final_recipients = set(recipients_objs).union(mention_recipients) |
|
120 | final_recipients = set(recipients_objs).union(mention_recipients) | |
121 |
|
121 | |||
122 | (subject, email_body, email_body_plaintext) = \ |
|
122 | (subject, email_body, email_body_plaintext) = \ | |
123 | EmailNotificationModel().render_email(notification_type, **email_kwargs) |
|
123 | EmailNotificationModel().render_email(notification_type, **email_kwargs) | |
124 |
|
124 | |||
125 | if not notification_subject: |
|
125 | if not notification_subject: | |
126 | notification_subject = subject |
|
126 | notification_subject = subject | |
127 |
|
127 | |||
128 | if not notification_body: |
|
128 | if not notification_body: | |
129 | notification_body = email_body_plaintext |
|
129 | notification_body = email_body_plaintext | |
130 |
|
130 | |||
131 | notification = Notification.create( |
|
131 | notification = Notification.create( | |
132 | created_by=created_by_obj, subject=notification_subject, |
|
132 | created_by=created_by_obj, subject=notification_subject, | |
133 | body=notification_body, recipients=final_recipients, |
|
133 | body=notification_body, recipients=final_recipients, | |
134 | type_=notification_type |
|
134 | type_=notification_type | |
135 | ) |
|
135 | ) | |
136 |
|
136 | |||
137 | if not with_email: # skip sending email, and just create notification |
|
137 | if not with_email: # skip sending email, and just create notification | |
138 | return notification |
|
138 | return notification | |
139 |
|
139 | |||
140 | # don't send email to person who created this comment |
|
140 | # don't send email to person who created this comment | |
141 | rec_objs = set(recipients_objs).difference({created_by_obj}) |
|
141 | rec_objs = set(recipients_objs).difference({created_by_obj}) | |
142 |
|
142 | |||
143 | # now notify all recipients in question |
|
143 | # now notify all recipients in question | |
144 |
|
144 | |||
145 | for recipient in rec_objs.union(mention_recipients): |
|
145 | for recipient in rec_objs.union(mention_recipients): | |
146 | # inject current recipient |
|
146 | # inject current recipient | |
147 | email_kwargs['recipient'] = recipient |
|
147 | email_kwargs['recipient'] = recipient | |
148 | email_kwargs['mention'] = recipient in mention_recipients |
|
148 | email_kwargs['mention'] = recipient in mention_recipients | |
149 | (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email( |
|
149 | (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email( | |
150 | notification_type, **email_kwargs) |
|
150 | notification_type, **email_kwargs) | |
151 |
|
151 | |||
152 | extra_headers = None |
|
152 | extra_headers = None | |
153 | if 'thread_ids' in email_kwargs: |
|
153 | if 'thread_ids' in email_kwargs: | |
154 | extra_headers = {'thread_ids': email_kwargs.pop('thread_ids')} |
|
154 | extra_headers = {'thread_ids': email_kwargs.pop('thread_ids')} | |
155 |
|
155 | |||
156 | log.debug('Creating notification email task for user:`%s`', recipient) |
|
156 | log.debug('Creating notification email task for user:`%s`', recipient) | |
157 | task = run_task( |
|
157 | task = run_task( | |
158 | tasks.send_email, recipient.email, subject, |
|
158 | tasks.send_email, recipient.email, subject, | |
159 | email_body_plaintext, email_body, extra_headers=extra_headers) |
|
159 | email_body_plaintext, email_body, extra_headers=extra_headers) | |
160 | log.debug('Created email task: %s', task) |
|
160 | log.debug('Created email task: %s', task) | |
161 |
|
161 | |||
162 | return notification |
|
162 | return notification | |
163 |
|
163 | |||
164 | def delete(self, user, notification): |
|
164 | def delete(self, user, notification): | |
165 | # we don't want to remove actual notification just the assignment |
|
165 | # we don't want to remove actual notification just the assignment | |
166 | try: |
|
166 | try: | |
167 | notification = self.__get_notification(notification) |
|
167 | notification = self.__get_notification(notification) | |
168 | user = self._get_user(user) |
|
168 | user = self._get_user(user) | |
169 | if notification and user: |
|
169 | if notification and user: | |
170 | obj = UserNotification.query()\ |
|
170 | obj = UserNotification.query()\ | |
171 | .filter(UserNotification.user == user)\ |
|
171 | .filter(UserNotification.user == user)\ | |
172 | .filter(UserNotification.notification == notification)\ |
|
172 | .filter(UserNotification.notification == notification)\ | |
173 | .one() |
|
173 | .one() | |
174 | Session().delete(obj) |
|
174 | Session().delete(obj) | |
175 | return True |
|
175 | return True | |
176 | except Exception: |
|
176 | except Exception: | |
177 | log.error(traceback.format_exc()) |
|
177 | log.error(traceback.format_exc()) | |
178 | raise |
|
178 | raise | |
179 |
|
179 | |||
180 | def get_for_user(self, user, filter_=None): |
|
180 | def get_for_user(self, user, filter_=None): | |
181 | """ |
|
181 | """ | |
182 | Get mentions for given user, filter them if filter dict is given |
|
182 | Get mentions for given user, filter them if filter dict is given | |
183 | """ |
|
183 | """ | |
184 | user = self._get_user(user) |
|
184 | user = self._get_user(user) | |
185 |
|
185 | |||
186 | q = UserNotification.query()\ |
|
186 | q = UserNotification.query()\ | |
187 | .filter(UserNotification.user == user)\ |
|
187 | .filter(UserNotification.user == user)\ | |
188 | .join(( |
|
188 | .join(( | |
189 | Notification, UserNotification.notification_id == |
|
189 | Notification, UserNotification.notification_id == | |
190 | Notification.notification_id)) |
|
190 | Notification.notification_id)) | |
191 | if filter_ == ['all']: |
|
191 | if filter_ == ['all']: | |
192 | q = q # no filter |
|
192 | q = q # no filter | |
193 | elif filter_ == ['unread']: |
|
193 | elif filter_ == ['unread']: | |
194 | q = q.filter(UserNotification.read == false()) |
|
194 | q = q.filter(UserNotification.read == false()) | |
195 | elif filter_: |
|
195 | elif filter_: | |
196 | q = q.filter(Notification.type_.in_(filter_)) |
|
196 | q = q.filter(Notification.type_.in_(filter_)) | |
197 |
|
197 | |||
198 | return q |
|
198 | return q | |
199 |
|
199 | |||
200 | def mark_read(self, user, notification): |
|
200 | def mark_read(self, user, notification): | |
201 | try: |
|
201 | try: | |
202 | notification = self.__get_notification(notification) |
|
202 | notification = self.__get_notification(notification) | |
203 | user = self._get_user(user) |
|
203 | user = self._get_user(user) | |
204 | if notification and user: |
|
204 | if notification and user: | |
205 | obj = UserNotification.query()\ |
|
205 | obj = UserNotification.query()\ | |
206 | .filter(UserNotification.user == user)\ |
|
206 | .filter(UserNotification.user == user)\ | |
207 | .filter(UserNotification.notification == notification)\ |
|
207 | .filter(UserNotification.notification == notification)\ | |
208 | .one() |
|
208 | .one() | |
209 | obj.read = True |
|
209 | obj.read = True | |
210 | Session().add(obj) |
|
210 | Session().add(obj) | |
211 | return True |
|
211 | return True | |
212 | except Exception: |
|
212 | except Exception: | |
213 | log.error(traceback.format_exc()) |
|
213 | log.error(traceback.format_exc()) | |
214 | raise |
|
214 | raise | |
215 |
|
215 | |||
216 | def mark_all_read_for_user(self, user, filter_=None): |
|
216 | def mark_all_read_for_user(self, user, filter_=None): | |
217 | user = self._get_user(user) |
|
217 | user = self._get_user(user) | |
218 | q = UserNotification.query()\ |
|
218 | q = UserNotification.query()\ | |
219 | .filter(UserNotification.user == user)\ |
|
219 | .filter(UserNotification.user == user)\ | |
220 | .filter(UserNotification.read == false())\ |
|
220 | .filter(UserNotification.read == false())\ | |
221 | .join(( |
|
221 | .join(( | |
222 | Notification, UserNotification.notification_id == |
|
222 | Notification, UserNotification.notification_id == | |
223 | Notification.notification_id)) |
|
223 | Notification.notification_id)) | |
224 | if filter_ == ['unread']: |
|
224 | if filter_ == ['unread']: | |
225 | q = q.filter(UserNotification.read == false()) |
|
225 | q = q.filter(UserNotification.read == false()) | |
226 | elif filter_: |
|
226 | elif filter_: | |
227 | q = q.filter(Notification.type_.in_(filter_)) |
|
227 | q = q.filter(Notification.type_.in_(filter_)) | |
228 |
|
228 | |||
229 | # this is a little inefficient but sqlalchemy doesn't support |
|
229 | # this is a little inefficient but sqlalchemy doesn't support | |
230 | # update on joined tables :( |
|
230 | # update on joined tables :( | |
231 | for obj in q.all(): |
|
231 | for obj in q.all(): | |
232 | obj.read = True |
|
232 | obj.read = True | |
233 | Session().add(obj) |
|
233 | Session().add(obj) | |
234 |
|
234 | |||
235 | def get_unread_cnt_for_user(self, user): |
|
235 | def get_unread_cnt_for_user(self, user): | |
236 | user = self._get_user(user) |
|
236 | user = self._get_user(user) | |
237 | return UserNotification.query()\ |
|
237 | return UserNotification.query()\ | |
238 | .filter(UserNotification.read == false())\ |
|
238 | .filter(UserNotification.read == false())\ | |
239 | .filter(UserNotification.user == user).count() |
|
239 | .filter(UserNotification.user == user).count() | |
240 |
|
240 | |||
241 | def get_unread_for_user(self, user): |
|
241 | def get_unread_for_user(self, user): | |
242 | user = self._get_user(user) |
|
242 | user = self._get_user(user) | |
243 | return [x.notification for x in UserNotification.query() |
|
243 | return [x.notification for x in UserNotification.query() | |
244 | .filter(UserNotification.read == false()) |
|
244 | .filter(UserNotification.read == false()) | |
245 | .filter(UserNotification.user == user).all()] |
|
245 | .filter(UserNotification.user == user).all()] | |
246 |
|
246 | |||
247 | def get_user_notification(self, user, notification): |
|
247 | def get_user_notification(self, user, notification): | |
248 | user = self._get_user(user) |
|
248 | user = self._get_user(user) | |
249 | notification = self.__get_notification(notification) |
|
249 | notification = self.__get_notification(notification) | |
250 |
|
250 | |||
251 | return UserNotification.query()\ |
|
251 | return UserNotification.query()\ | |
252 | .filter(UserNotification.notification == notification)\ |
|
252 | .filter(UserNotification.notification == notification)\ | |
253 | .filter(UserNotification.user == user).scalar() |
|
253 | .filter(UserNotification.user == user).scalar() | |
254 |
|
254 | |||
255 | def make_description(self, notification, translate, show_age=True): |
|
255 | def make_description(self, notification, translate, show_age=True): | |
256 | """ |
|
256 | """ | |
257 | Creates a human readable description based on properties |
|
257 | Creates a human readable description based on properties | |
258 | of notification object |
|
258 | of notification object | |
259 | """ |
|
259 | """ | |
260 | _ = translate |
|
260 | _ = translate | |
261 | _map = { |
|
261 | _map = { | |
262 | notification.TYPE_CHANGESET_COMMENT: [ |
|
262 | notification.TYPE_CHANGESET_COMMENT: [ | |
263 | _('%(user)s commented on commit %(date_or_age)s'), |
|
263 | _('%(user)s commented on commit %(date_or_age)s'), | |
264 | _('%(user)s commented on commit at %(date_or_age)s'), |
|
264 | _('%(user)s commented on commit at %(date_or_age)s'), | |
265 | ], |
|
265 | ], | |
266 | notification.TYPE_MESSAGE: [ |
|
266 | notification.TYPE_MESSAGE: [ | |
267 | _('%(user)s sent message %(date_or_age)s'), |
|
267 | _('%(user)s sent message %(date_or_age)s'), | |
268 | _('%(user)s sent message at %(date_or_age)s'), |
|
268 | _('%(user)s sent message at %(date_or_age)s'), | |
269 | ], |
|
269 | ], | |
270 | notification.TYPE_MENTION: [ |
|
270 | notification.TYPE_MENTION: [ | |
271 | _('%(user)s mentioned you %(date_or_age)s'), |
|
271 | _('%(user)s mentioned you %(date_or_age)s'), | |
272 | _('%(user)s mentioned you at %(date_or_age)s'), |
|
272 | _('%(user)s mentioned you at %(date_or_age)s'), | |
273 | ], |
|
273 | ], | |
274 | notification.TYPE_REGISTRATION: [ |
|
274 | notification.TYPE_REGISTRATION: [ | |
275 | _('%(user)s registered in RhodeCode %(date_or_age)s'), |
|
275 | _('%(user)s registered in RhodeCode %(date_or_age)s'), | |
276 | _('%(user)s registered in RhodeCode at %(date_or_age)s'), |
|
276 | _('%(user)s registered in RhodeCode at %(date_or_age)s'), | |
277 | ], |
|
277 | ], | |
278 | notification.TYPE_PULL_REQUEST: [ |
|
278 | notification.TYPE_PULL_REQUEST: [ | |
279 | _('%(user)s opened new pull request %(date_or_age)s'), |
|
279 | _('%(user)s opened new pull request %(date_or_age)s'), | |
280 | _('%(user)s opened new pull request at %(date_or_age)s'), |
|
280 | _('%(user)s opened new pull request at %(date_or_age)s'), | |
281 | ], |
|
281 | ], | |
282 | notification.TYPE_PULL_REQUEST_UPDATE: [ |
|
282 | notification.TYPE_PULL_REQUEST_UPDATE: [ | |
283 | _('%(user)s updated pull request %(date_or_age)s'), |
|
283 | _('%(user)s updated pull request %(date_or_age)s'), | |
284 | _('%(user)s updated pull request at %(date_or_age)s'), |
|
284 | _('%(user)s updated pull request at %(date_or_age)s'), | |
285 | ], |
|
285 | ], | |
286 | notification.TYPE_PULL_REQUEST_COMMENT: [ |
|
286 | notification.TYPE_PULL_REQUEST_COMMENT: [ | |
287 | _('%(user)s commented on pull request %(date_or_age)s'), |
|
287 | _('%(user)s commented on pull request %(date_or_age)s'), | |
288 | _('%(user)s commented on pull request at %(date_or_age)s'), |
|
288 | _('%(user)s commented on pull request at %(date_or_age)s'), | |
289 | ], |
|
289 | ], | |
290 | } |
|
290 | } | |
291 |
|
291 | |||
292 | templates = _map[notification.type_] |
|
292 | templates = _map[notification.type_] | |
293 |
|
293 | |||
294 | if show_age: |
|
294 | if show_age: | |
295 | template = templates[0] |
|
295 | template = templates[0] | |
296 | date_or_age = h.age(notification.created_on) |
|
296 | date_or_age = h.age(notification.created_on) | |
297 | if translate: |
|
297 | if translate: | |
298 | date_or_age = translate(date_or_age) |
|
298 | date_or_age = translate(date_or_age) | |
299 |
|
299 | |||
300 | if isinstance(date_or_age, TranslationString): |
|
300 | if isinstance(date_or_age, TranslationString): | |
301 | date_or_age = date_or_age.interpolate() |
|
301 | date_or_age = date_or_age.interpolate() | |
302 |
|
302 | |||
303 | else: |
|
303 | else: | |
304 | template = templates[1] |
|
304 | template = templates[1] | |
305 | date_or_age = h.format_date(notification.created_on) |
|
305 | date_or_age = h.format_date(notification.created_on) | |
306 |
|
306 | |||
307 | return template % { |
|
307 | return template % { | |
308 | 'user': notification.created_by_user.username, |
|
308 | 'user': notification.created_by_user.username, | |
309 | 'date_or_age': date_or_age, |
|
309 | 'date_or_age': date_or_age, | |
310 | } |
|
310 | } | |
311 |
|
311 | |||
312 |
|
312 | |||
313 | # Templates for Titles, that could be overwritten by rcextensions |
|
313 | # Templates for Titles, that could be overwritten by rcextensions | |
314 | # Title of email for pull-request update |
|
314 | # Title of email for pull-request update | |
315 | EMAIL_PR_UPDATE_SUBJECT_TEMPLATE = '' |
|
315 | EMAIL_PR_UPDATE_SUBJECT_TEMPLATE = '' | |
316 | # Title of email for request for pull request review |
|
316 | # Title of email for request for pull request review | |
317 | EMAIL_PR_REVIEW_SUBJECT_TEMPLATE = '' |
|
317 | EMAIL_PR_REVIEW_SUBJECT_TEMPLATE = '' | |
318 |
|
318 | |||
319 | # Title of email for general comment on pull request |
|
319 | # Title of email for general comment on pull request | |
320 | EMAIL_PR_COMMENT_SUBJECT_TEMPLATE = '' |
|
320 | EMAIL_PR_COMMENT_SUBJECT_TEMPLATE = '' | |
321 | # Title of email for general comment which includes status change on pull request |
|
321 | # Title of email for general comment which includes status change on pull request | |
322 | EMAIL_PR_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE = '' |
|
322 | EMAIL_PR_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE = '' | |
323 | # Title of email for inline comment on a file in pull request |
|
323 | # Title of email for inline comment on a file in pull request | |
324 | EMAIL_PR_COMMENT_FILE_SUBJECT_TEMPLATE = '' |
|
324 | EMAIL_PR_COMMENT_FILE_SUBJECT_TEMPLATE = '' | |
325 |
|
325 | |||
326 | # Title of email for general comment on commit |
|
326 | # Title of email for general comment on commit | |
327 | EMAIL_COMMENT_SUBJECT_TEMPLATE = '' |
|
327 | EMAIL_COMMENT_SUBJECT_TEMPLATE = '' | |
328 | # Title of email for general comment which includes status change on commit |
|
328 | # Title of email for general comment which includes status change on commit | |
329 | EMAIL_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE = '' |
|
329 | EMAIL_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE = '' | |
330 | # Title of email for inline comment on a file in commit |
|
330 | # Title of email for inline comment on a file in commit | |
331 | EMAIL_COMMENT_FILE_SUBJECT_TEMPLATE = '' |
|
331 | EMAIL_COMMENT_FILE_SUBJECT_TEMPLATE = '' | |
332 |
|
332 | |||
333 |
|
333 | |||
334 | class EmailNotificationModel(BaseModel): |
|
334 | class EmailNotificationModel(BaseModel): | |
335 | TYPE_COMMIT_COMMENT = Notification.TYPE_CHANGESET_COMMENT |
|
335 | TYPE_COMMIT_COMMENT = Notification.TYPE_CHANGESET_COMMENT | |
336 | TYPE_REGISTRATION = Notification.TYPE_REGISTRATION |
|
336 | TYPE_REGISTRATION = Notification.TYPE_REGISTRATION | |
337 | TYPE_PULL_REQUEST = Notification.TYPE_PULL_REQUEST |
|
337 | TYPE_PULL_REQUEST = Notification.TYPE_PULL_REQUEST | |
338 | TYPE_PULL_REQUEST_COMMENT = Notification.TYPE_PULL_REQUEST_COMMENT |
|
338 | TYPE_PULL_REQUEST_COMMENT = Notification.TYPE_PULL_REQUEST_COMMENT | |
339 | TYPE_PULL_REQUEST_UPDATE = Notification.TYPE_PULL_REQUEST_UPDATE |
|
339 | TYPE_PULL_REQUEST_UPDATE = Notification.TYPE_PULL_REQUEST_UPDATE | |
340 | TYPE_MAIN = Notification.TYPE_MESSAGE |
|
340 | TYPE_MAIN = Notification.TYPE_MESSAGE | |
341 |
|
341 | |||
342 | TYPE_PASSWORD_RESET = 'password_reset' |
|
342 | TYPE_PASSWORD_RESET = 'password_reset' | |
343 | TYPE_PASSWORD_RESET_CONFIRMATION = 'password_reset_confirmation' |
|
343 | TYPE_PASSWORD_RESET_CONFIRMATION = 'password_reset_confirmation' | |
344 | TYPE_EMAIL_TEST = 'email_test' |
|
344 | TYPE_EMAIL_TEST = 'email_test' | |
345 | TYPE_EMAIL_EXCEPTION = 'exception' |
|
345 | TYPE_EMAIL_EXCEPTION = 'exception' | |
346 | TYPE_UPDATE_AVAILABLE = 'update_available' |
|
346 | TYPE_UPDATE_AVAILABLE = 'update_available' | |
347 | TYPE_TEST = 'test' |
|
347 | TYPE_TEST = 'test' | |
348 |
|
348 | |||
349 | email_types = { |
|
349 | email_types = { | |
350 | TYPE_MAIN: |
|
350 | TYPE_MAIN: | |
351 | 'rhodecode:templates/email_templates/main.mako', |
|
351 | 'rhodecode:templates/email_templates/main.mako', | |
352 | TYPE_TEST: |
|
352 | TYPE_TEST: | |
353 | 'rhodecode:templates/email_templates/test.mako', |
|
353 | 'rhodecode:templates/email_templates/test.mako', | |
354 | TYPE_EMAIL_EXCEPTION: |
|
354 | TYPE_EMAIL_EXCEPTION: | |
355 | 'rhodecode:templates/email_templates/exception_tracker.mako', |
|
355 | 'rhodecode:templates/email_templates/exception_tracker.mako', | |
356 | TYPE_UPDATE_AVAILABLE: |
|
356 | TYPE_UPDATE_AVAILABLE: | |
357 | 'rhodecode:templates/email_templates/update_available.mako', |
|
357 | 'rhodecode:templates/email_templates/update_available.mako', | |
358 | TYPE_EMAIL_TEST: |
|
358 | TYPE_EMAIL_TEST: | |
359 | 'rhodecode:templates/email_templates/email_test.mako', |
|
359 | 'rhodecode:templates/email_templates/email_test.mako', | |
360 | TYPE_REGISTRATION: |
|
360 | TYPE_REGISTRATION: | |
361 | 'rhodecode:templates/email_templates/user_registration.mako', |
|
361 | 'rhodecode:templates/email_templates/user_registration.mako', | |
362 | TYPE_PASSWORD_RESET: |
|
362 | TYPE_PASSWORD_RESET: | |
363 | 'rhodecode:templates/email_templates/password_reset.mako', |
|
363 | 'rhodecode:templates/email_templates/password_reset.mako', | |
364 | TYPE_PASSWORD_RESET_CONFIRMATION: |
|
364 | TYPE_PASSWORD_RESET_CONFIRMATION: | |
365 | 'rhodecode:templates/email_templates/password_reset_confirmation.mako', |
|
365 | 'rhodecode:templates/email_templates/password_reset_confirmation.mako', | |
366 | TYPE_COMMIT_COMMENT: |
|
366 | TYPE_COMMIT_COMMENT: | |
367 | 'rhodecode:templates/email_templates/commit_comment.mako', |
|
367 | 'rhodecode:templates/email_templates/commit_comment.mako', | |
368 | TYPE_PULL_REQUEST: |
|
368 | TYPE_PULL_REQUEST: | |
369 | 'rhodecode:templates/email_templates/pull_request_review.mako', |
|
369 | 'rhodecode:templates/email_templates/pull_request_review.mako', | |
370 | TYPE_PULL_REQUEST_COMMENT: |
|
370 | TYPE_PULL_REQUEST_COMMENT: | |
371 | 'rhodecode:templates/email_templates/pull_request_comment.mako', |
|
371 | 'rhodecode:templates/email_templates/pull_request_comment.mako', | |
372 | TYPE_PULL_REQUEST_UPDATE: |
|
372 | TYPE_PULL_REQUEST_UPDATE: | |
373 | 'rhodecode:templates/email_templates/pull_request_update.mako', |
|
373 | 'rhodecode:templates/email_templates/pull_request_update.mako', | |
374 | } |
|
374 | } | |
375 |
|
375 | |||
376 | premailer_instance = premailer.Premailer( |
|
376 | premailer_instance = premailer.Premailer( | |
377 | cssutils_logging_level=logging.ERROR, |
|
377 | cssutils_logging_level=logging.ERROR, | |
378 | cssutils_logging_handler=logging.getLogger().handlers[0] |
|
378 | cssutils_logging_handler=logging.getLogger().handlers[0] | |
379 | if logging.getLogger().handlers else None, |
|
379 | if logging.getLogger().handlers else None, | |
380 | ) |
|
380 | ) | |
381 |
|
381 | |||
382 | def __init__(self): |
|
382 | def __init__(self): | |
383 | """ |
|
383 | """ | |
384 | Example usage:: |
|
384 | Example usage:: | |
385 |
|
385 | |||
386 | (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email( |
|
386 | (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email( | |
387 | EmailNotificationModel.TYPE_TEST, **email_kwargs) |
|
387 | EmailNotificationModel.TYPE_TEST, **email_kwargs) | |
388 |
|
388 | |||
389 | """ |
|
389 | """ | |
390 | super(EmailNotificationModel, self).__init__() |
|
390 | super(EmailNotificationModel, self).__init__() | |
391 | self.rhodecode_instance_name = rhodecode.CONFIG.get('rhodecode_title') |
|
391 | self.rhodecode_instance_name = rhodecode.CONFIG.get('rhodecode_title') | |
392 |
|
392 | |||
393 | def _update_kwargs_for_render(self, kwargs): |
|
393 | def _update_kwargs_for_render(self, kwargs): | |
394 | """ |
|
394 | """ | |
395 | Inject params required for Mako rendering |
|
395 | Inject params required for Mako rendering | |
396 |
|
396 | |||
397 | :param kwargs: |
|
397 | :param kwargs: | |
398 | """ |
|
398 | """ | |
399 |
|
399 | |||
400 | kwargs['rhodecode_instance_name'] = self.rhodecode_instance_name |
|
400 | kwargs['rhodecode_instance_name'] = self.rhodecode_instance_name | |
401 | kwargs['rhodecode_version'] = rhodecode.__version__ |
|
401 | kwargs['rhodecode_version'] = rhodecode.__version__ | |
402 | instance_url = h.route_url('home') |
|
402 | instance_url = h.route_url('home') | |
403 | _kwargs = { |
|
403 | _kwargs = { | |
404 | 'instance_url': instance_url, |
|
404 | 'instance_url': instance_url, | |
405 | 'whitespace_filter': self.whitespace_filter, |
|
405 | 'whitespace_filter': self.whitespace_filter, | |
406 | 'email_pr_update_subject_template': EMAIL_PR_UPDATE_SUBJECT_TEMPLATE, |
|
406 | 'email_pr_update_subject_template': EMAIL_PR_UPDATE_SUBJECT_TEMPLATE, | |
407 | 'email_pr_review_subject_template': EMAIL_PR_REVIEW_SUBJECT_TEMPLATE, |
|
407 | 'email_pr_review_subject_template': EMAIL_PR_REVIEW_SUBJECT_TEMPLATE, | |
408 | 'email_pr_comment_subject_template': EMAIL_PR_COMMENT_SUBJECT_TEMPLATE, |
|
408 | 'email_pr_comment_subject_template': EMAIL_PR_COMMENT_SUBJECT_TEMPLATE, | |
409 | 'email_pr_comment_status_change_subject_template': EMAIL_PR_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE, |
|
409 | 'email_pr_comment_status_change_subject_template': EMAIL_PR_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE, | |
410 | 'email_pr_comment_file_subject_template': EMAIL_PR_COMMENT_FILE_SUBJECT_TEMPLATE, |
|
410 | 'email_pr_comment_file_subject_template': EMAIL_PR_COMMENT_FILE_SUBJECT_TEMPLATE, | |
411 | 'email_comment_subject_template': EMAIL_COMMENT_SUBJECT_TEMPLATE, |
|
411 | 'email_comment_subject_template': EMAIL_COMMENT_SUBJECT_TEMPLATE, | |
412 | 'email_comment_status_change_subject_template': EMAIL_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE, |
|
412 | 'email_comment_status_change_subject_template': EMAIL_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE, | |
413 | 'email_comment_file_subject_template': EMAIL_COMMENT_FILE_SUBJECT_TEMPLATE, |
|
413 | 'email_comment_file_subject_template': EMAIL_COMMENT_FILE_SUBJECT_TEMPLATE, | |
414 | } |
|
414 | } | |
415 | _kwargs.update(kwargs) |
|
415 | _kwargs.update(kwargs) | |
416 | return _kwargs |
|
416 | return _kwargs | |
417 |
|
417 | |||
418 | def whitespace_filter(self, text): |
|
418 | def whitespace_filter(self, text): | |
419 | return text.replace('\n', '').replace('\t', '') |
|
419 | return text.replace('\n', '').replace('\t', '') | |
420 |
|
420 | |||
421 | def get_renderer(self, type_, request): |
|
421 | def get_renderer(self, type_, request): | |
422 | template_name = self.email_types[type_] |
|
422 | template_name = self.email_types[type_] | |
423 | return request.get_partial_renderer(template_name) |
|
423 | return request.get_partial_renderer(template_name) | |
424 |
|
424 | |||
425 | def render_email(self, type_, **kwargs): |
|
425 | def render_email(self, type_, **kwargs): | |
426 | """ |
|
426 | """ | |
427 | renders template for email, and returns a tuple of |
|
427 | renders template for email, and returns a tuple of | |
428 | (subject, email_headers, email_html_body, email_plaintext_body) |
|
428 | (subject, email_headers, email_html_body, email_plaintext_body) | |
429 | """ |
|
429 | """ | |
|
430 | request = get_current_request() | |||
|
431 | ||||
430 | # translator and helpers inject |
|
432 | # translator and helpers inject | |
431 | _kwargs = self._update_kwargs_for_render(kwargs) |
|
433 | _kwargs = self._update_kwargs_for_render(kwargs) | |
432 | request = get_current_request() |
|
|||
433 | email_template = self.get_renderer(type_, request=request) |
|
434 | email_template = self.get_renderer(type_, request=request) | |
434 |
|
||||
435 | subject = email_template.render('subject', **_kwargs) |
|
435 | subject = email_template.render('subject', **_kwargs) | |
436 |
|
436 | |||
437 | try: |
|
437 | try: | |
438 | body_plaintext = email_template.render('body_plaintext', **_kwargs) |
|
438 | body_plaintext = email_template.render('body_plaintext', **_kwargs) | |
439 | except AttributeError: |
|
439 | except AttributeError: | |
440 | # it's not defined in template, ok we can skip it |
|
440 | # it's not defined in template, ok we can skip it | |
441 | body_plaintext = '' |
|
441 | body_plaintext = '' | |
442 |
|
442 | |||
443 | # render WHOLE template |
|
443 | # render WHOLE template | |
444 | body = email_template.render(None, **_kwargs) |
|
444 | body = email_template.render(None, **_kwargs) | |
445 |
|
445 | |||
446 | try: |
|
446 | try: | |
447 | # Inline CSS styles and conversion |
|
447 | # Inline CSS styles and conversion | |
448 | body = self.premailer_instance.transform(body) |
|
448 | body = self.premailer_instance.transform(body) | |
449 | except Exception: |
|
449 | except Exception: | |
450 | log.exception('Failed to parse body with premailer') |
|
450 | log.exception('Failed to parse body with premailer') | |
451 | pass |
|
451 | pass | |
452 |
|
452 | |||
453 | return subject, body, body_plaintext |
|
453 | return subject, body, body_plaintext |
@@ -1,405 +1,392 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 | import io |
|
20 | import io | |
21 | import shlex |
|
21 | import shlex | |
22 |
|
22 | |||
23 | import math |
|
23 | import math | |
24 | import re |
|
24 | import re | |
25 | import os |
|
25 | import os | |
26 | import datetime |
|
26 | import datetime | |
27 | import logging |
|
27 | import logging | |
28 | import Queue |
|
28 | import Queue | |
29 | import subprocess32 |
|
29 | import subprocess32 | |
30 |
|
30 | |||
31 |
|
31 | |||
32 | from dateutil.parser import parse |
|
32 | from dateutil.parser import parse | |
33 | from pyramid.threadlocal import get_current_request |
|
33 | from pyramid.threadlocal import get_current_request | |
34 | from pyramid.interfaces import IRoutesMapper |
|
34 | from pyramid.interfaces import IRoutesMapper | |
35 | from pyramid.settings import asbool |
|
35 | from pyramid.settings import asbool | |
36 | from pyramid.path import AssetResolver |
|
36 | from pyramid.path import AssetResolver | |
37 | from threading import Thread |
|
37 | from threading import Thread | |
38 |
|
38 | |||
39 | from rhodecode.translation import _ as tsf |
|
|||
40 | from rhodecode.config.jsroutes import generate_jsroutes_content |
|
39 | from rhodecode.config.jsroutes import generate_jsroutes_content | |
41 | from rhodecode.lib import auth |
|
|||
42 | from rhodecode.lib.base import get_auth_user |
|
40 | from rhodecode.lib.base import get_auth_user | |
43 |
|
41 | |||
44 | import rhodecode |
|
42 | import rhodecode | |
45 |
|
43 | |||
46 |
|
44 | |||
47 | log = logging.getLogger(__name__) |
|
45 | log = logging.getLogger(__name__) | |
48 |
|
46 | |||
49 |
|
47 | |||
50 | def add_renderer_globals(event): |
|
48 | def add_renderer_globals(event): | |
51 | from rhodecode.lib import helpers |
|
49 | from rhodecode.lib import helpers | |
52 |
|
50 | |||
53 | # TODO: When executed in pyramid view context the request is not available |
|
51 | # TODO: When executed in pyramid view context the request is not available | |
54 | # in the event. Find a better solution to get the request. |
|
52 | # in the event. Find a better solution to get the request. | |
55 | request = event['request'] or get_current_request() |
|
53 | request = event['request'] or get_current_request() | |
56 |
|
54 | |||
57 | # Add Pyramid translation as '_' to context |
|
55 | # Add Pyramid translation as '_' to context | |
58 | event['_'] = request.translate |
|
56 | event['_'] = request.translate | |
59 | event['_ungettext'] = request.plularize |
|
57 | event['_ungettext'] = request.plularize | |
60 | event['h'] = helpers |
|
58 | event['h'] = helpers | |
61 |
|
59 | |||
62 |
|
60 | |||
63 | def add_localizer(event): |
|
|||
64 | request = event.request |
|
|||
65 | localizer = request.localizer |
|
|||
66 |
|
||||
67 | def auto_translate(*args, **kwargs): |
|
|||
68 | return localizer.translate(tsf(*args, **kwargs)) |
|
|||
69 |
|
||||
70 | request.translate = auto_translate |
|
|||
71 | request.plularize = localizer.pluralize |
|
|||
72 |
|
||||
73 |
|
||||
74 | def set_user_lang(event): |
|
61 | def set_user_lang(event): | |
75 | request = event.request |
|
62 | request = event.request | |
76 | cur_user = getattr(request, 'user', None) |
|
63 | cur_user = getattr(request, 'user', None) | |
77 |
|
64 | |||
78 | if cur_user: |
|
65 | if cur_user: | |
79 | user_lang = cur_user.get_instance().user_data.get('language') |
|
66 | user_lang = cur_user.get_instance().user_data.get('language') | |
80 | if user_lang: |
|
67 | if user_lang: | |
81 | log.debug('lang: setting current user:%s language to: %s', cur_user, user_lang) |
|
68 | log.debug('lang: setting current user:%s language to: %s', cur_user, user_lang) | |
82 | event.request._LOCALE_ = user_lang |
|
69 | event.request._LOCALE_ = user_lang | |
83 |
|
70 | |||
84 |
|
71 | |||
85 | def add_request_user_context(event): |
|
72 | def add_request_user_context(event): | |
86 | """ |
|
73 | """ | |
87 | Adds auth user into request context |
|
74 | Adds auth user into request context | |
88 | """ |
|
75 | """ | |
89 | request = event.request |
|
76 | request = event.request | |
90 | # access req_id as soon as possible |
|
77 | # access req_id as soon as possible | |
91 | req_id = request.req_id |
|
78 | req_id = request.req_id | |
92 |
|
79 | |||
93 | if hasattr(request, 'vcs_call'): |
|
80 | if hasattr(request, 'vcs_call'): | |
94 | # skip vcs calls |
|
81 | # skip vcs calls | |
95 | return |
|
82 | return | |
96 |
|
83 | |||
97 | if hasattr(request, 'rpc_method'): |
|
84 | if hasattr(request, 'rpc_method'): | |
98 | # skip api calls |
|
85 | # skip api calls | |
99 | return |
|
86 | return | |
100 |
|
87 | |||
101 | auth_user, auth_token = get_auth_user(request) |
|
88 | auth_user, auth_token = get_auth_user(request) | |
102 | request.user = auth_user |
|
89 | request.user = auth_user | |
103 | request.user_auth_token = auth_token |
|
90 | request.user_auth_token = auth_token | |
104 | request.environ['rc_auth_user'] = auth_user |
|
91 | request.environ['rc_auth_user'] = auth_user | |
105 | request.environ['rc_auth_user_id'] = auth_user.user_id |
|
92 | request.environ['rc_auth_user_id'] = auth_user.user_id | |
106 | request.environ['rc_req_id'] = req_id |
|
93 | request.environ['rc_req_id'] = req_id | |
107 |
|
94 | |||
108 |
|
95 | |||
109 | def reset_log_bucket(event): |
|
96 | def reset_log_bucket(event): | |
110 | """ |
|
97 | """ | |
111 | reset the log bucket on new request |
|
98 | reset the log bucket on new request | |
112 | """ |
|
99 | """ | |
113 | request = event.request |
|
100 | request = event.request | |
114 | request.req_id_records_init() |
|
101 | request.req_id_records_init() | |
115 |
|
102 | |||
116 |
|
103 | |||
117 | def scan_repositories_if_enabled(event): |
|
104 | def scan_repositories_if_enabled(event): | |
118 | """ |
|
105 | """ | |
119 | This is subscribed to the `pyramid.events.ApplicationCreated` event. It |
|
106 | This is subscribed to the `pyramid.events.ApplicationCreated` event. It | |
120 | does a repository scan if enabled in the settings. |
|
107 | does a repository scan if enabled in the settings. | |
121 | """ |
|
108 | """ | |
122 | settings = event.app.registry.settings |
|
109 | settings = event.app.registry.settings | |
123 | vcs_server_enabled = settings['vcs.server.enable'] |
|
110 | vcs_server_enabled = settings['vcs.server.enable'] | |
124 | import_on_startup = settings['startup.import_repos'] |
|
111 | import_on_startup = settings['startup.import_repos'] | |
125 | if vcs_server_enabled and import_on_startup: |
|
112 | if vcs_server_enabled and import_on_startup: | |
126 | from rhodecode.model.scm import ScmModel |
|
113 | from rhodecode.model.scm import ScmModel | |
127 | from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_base_path |
|
114 | from rhodecode.lib.utils import repo2db_mapper, get_rhodecode_base_path | |
128 | repositories = ScmModel().repo_scan(get_rhodecode_base_path()) |
|
115 | repositories = ScmModel().repo_scan(get_rhodecode_base_path()) | |
129 | repo2db_mapper(repositories, remove_obsolete=False) |
|
116 | repo2db_mapper(repositories, remove_obsolete=False) | |
130 |
|
117 | |||
131 |
|
118 | |||
132 | def write_metadata_if_needed(event): |
|
119 | def write_metadata_if_needed(event): | |
133 | """ |
|
120 | """ | |
134 | Writes upgrade metadata |
|
121 | Writes upgrade metadata | |
135 | """ |
|
122 | """ | |
136 | import rhodecode |
|
123 | import rhodecode | |
137 | from rhodecode.lib import system_info |
|
124 | from rhodecode.lib import system_info | |
138 | from rhodecode.lib import ext_json |
|
125 | from rhodecode.lib import ext_json | |
139 |
|
126 | |||
140 | fname = '.rcmetadata.json' |
|
127 | fname = '.rcmetadata.json' | |
141 | ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__')) |
|
128 | ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__')) | |
142 | metadata_destination = os.path.join(ini_loc, fname) |
|
129 | metadata_destination = os.path.join(ini_loc, fname) | |
143 |
|
130 | |||
144 | def get_update_age(): |
|
131 | def get_update_age(): | |
145 | now = datetime.datetime.utcnow() |
|
132 | now = datetime.datetime.utcnow() | |
146 |
|
133 | |||
147 | with open(metadata_destination, 'rb') as f: |
|
134 | with open(metadata_destination, 'rb') as f: | |
148 | data = ext_json.json.loads(f.read()) |
|
135 | data = ext_json.json.loads(f.read()) | |
149 | if 'created_on' in data: |
|
136 | if 'created_on' in data: | |
150 | update_date = parse(data['created_on']) |
|
137 | update_date = parse(data['created_on']) | |
151 | diff = now - update_date |
|
138 | diff = now - update_date | |
152 | return diff.total_seconds() / 60.0 |
|
139 | return diff.total_seconds() / 60.0 | |
153 |
|
140 | |||
154 | return 0 |
|
141 | return 0 | |
155 |
|
142 | |||
156 | def write(): |
|
143 | def write(): | |
157 | configuration = system_info.SysInfo( |
|
144 | configuration = system_info.SysInfo( | |
158 | system_info.rhodecode_config)()['value'] |
|
145 | system_info.rhodecode_config)()['value'] | |
159 | license_token = configuration['config']['license_token'] |
|
146 | license_token = configuration['config']['license_token'] | |
160 |
|
147 | |||
161 | setup = dict( |
|
148 | setup = dict( | |
162 | workers=configuration['config']['server:main'].get( |
|
149 | workers=configuration['config']['server:main'].get( | |
163 | 'workers', '?'), |
|
150 | 'workers', '?'), | |
164 | worker_type=configuration['config']['server:main'].get( |
|
151 | worker_type=configuration['config']['server:main'].get( | |
165 | 'worker_class', 'sync'), |
|
152 | 'worker_class', 'sync'), | |
166 | ) |
|
153 | ) | |
167 | dbinfo = system_info.SysInfo(system_info.database_info)()['value'] |
|
154 | dbinfo = system_info.SysInfo(system_info.database_info)()['value'] | |
168 | del dbinfo['url'] |
|
155 | del dbinfo['url'] | |
169 |
|
156 | |||
170 | metadata = dict( |
|
157 | metadata = dict( | |
171 | desc='upgrade metadata info', |
|
158 | desc='upgrade metadata info', | |
172 | license_token=license_token, |
|
159 | license_token=license_token, | |
173 | created_on=datetime.datetime.utcnow().isoformat(), |
|
160 | created_on=datetime.datetime.utcnow().isoformat(), | |
174 | usage=system_info.SysInfo(system_info.usage_info)()['value'], |
|
161 | usage=system_info.SysInfo(system_info.usage_info)()['value'], | |
175 | platform=system_info.SysInfo(system_info.platform_type)()['value'], |
|
162 | platform=system_info.SysInfo(system_info.platform_type)()['value'], | |
176 | database=dbinfo, |
|
163 | database=dbinfo, | |
177 | cpu=system_info.SysInfo(system_info.cpu)()['value'], |
|
164 | cpu=system_info.SysInfo(system_info.cpu)()['value'], | |
178 | memory=system_info.SysInfo(system_info.memory)()['value'], |
|
165 | memory=system_info.SysInfo(system_info.memory)()['value'], | |
179 | setup=setup |
|
166 | setup=setup | |
180 | ) |
|
167 | ) | |
181 |
|
168 | |||
182 | with open(metadata_destination, 'wb') as f: |
|
169 | with open(metadata_destination, 'wb') as f: | |
183 | f.write(ext_json.json.dumps(metadata)) |
|
170 | f.write(ext_json.json.dumps(metadata)) | |
184 |
|
171 | |||
185 | settings = event.app.registry.settings |
|
172 | settings = event.app.registry.settings | |
186 | if settings.get('metadata.skip'): |
|
173 | if settings.get('metadata.skip'): | |
187 | return |
|
174 | return | |
188 |
|
175 | |||
189 | # only write this every 24h, workers restart caused unwanted delays |
|
176 | # only write this every 24h, workers restart caused unwanted delays | |
190 | try: |
|
177 | try: | |
191 | age_in_min = get_update_age() |
|
178 | age_in_min = get_update_age() | |
192 | except Exception: |
|
179 | except Exception: | |
193 | age_in_min = 0 |
|
180 | age_in_min = 0 | |
194 |
|
181 | |||
195 | if age_in_min > 60 * 60 * 24: |
|
182 | if age_in_min > 60 * 60 * 24: | |
196 | return |
|
183 | return | |
197 |
|
184 | |||
198 | try: |
|
185 | try: | |
199 | write() |
|
186 | write() | |
200 | except Exception: |
|
187 | except Exception: | |
201 | pass |
|
188 | pass | |
202 |
|
189 | |||
203 |
|
190 | |||
204 | def write_usage_data(event): |
|
191 | def write_usage_data(event): | |
205 | import rhodecode |
|
192 | import rhodecode | |
206 | from rhodecode.lib import system_info |
|
193 | from rhodecode.lib import system_info | |
207 | from rhodecode.lib import ext_json |
|
194 | from rhodecode.lib import ext_json | |
208 |
|
195 | |||
209 | settings = event.app.registry.settings |
|
196 | settings = event.app.registry.settings | |
210 | instance_tag = settings.get('metadata.write_usage_tag') |
|
197 | instance_tag = settings.get('metadata.write_usage_tag') | |
211 | if not settings.get('metadata.write_usage'): |
|
198 | if not settings.get('metadata.write_usage'): | |
212 | return |
|
199 | return | |
213 |
|
200 | |||
214 | def get_update_age(dest_file): |
|
201 | def get_update_age(dest_file): | |
215 | now = datetime.datetime.utcnow() |
|
202 | now = datetime.datetime.utcnow() | |
216 |
|
203 | |||
217 | with open(dest_file, 'rb') as f: |
|
204 | with open(dest_file, 'rb') as f: | |
218 | data = ext_json.json.loads(f.read()) |
|
205 | data = ext_json.json.loads(f.read()) | |
219 | if 'created_on' in data: |
|
206 | if 'created_on' in data: | |
220 | update_date = parse(data['created_on']) |
|
207 | update_date = parse(data['created_on']) | |
221 | diff = now - update_date |
|
208 | diff = now - update_date | |
222 | return math.ceil(diff.total_seconds() / 60.0) |
|
209 | return math.ceil(diff.total_seconds() / 60.0) | |
223 |
|
210 | |||
224 | return 0 |
|
211 | return 0 | |
225 |
|
212 | |||
226 | utc_date = datetime.datetime.utcnow() |
|
213 | utc_date = datetime.datetime.utcnow() | |
227 | hour_quarter = int(math.ceil((utc_date.hour + utc_date.minute/60.0) / 6.)) |
|
214 | hour_quarter = int(math.ceil((utc_date.hour + utc_date.minute/60.0) / 6.)) | |
228 | fname = '.rc_usage_{date.year}{date.month:02d}{date.day:02d}_{hour}.json'.format( |
|
215 | fname = '.rc_usage_{date.year}{date.month:02d}{date.day:02d}_{hour}.json'.format( | |
229 | date=utc_date, hour=hour_quarter) |
|
216 | date=utc_date, hour=hour_quarter) | |
230 | ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__')) |
|
217 | ini_loc = os.path.dirname(rhodecode.CONFIG.get('__file__')) | |
231 |
|
218 | |||
232 | usage_dir = os.path.join(ini_loc, '.rcusage') |
|
219 | usage_dir = os.path.join(ini_loc, '.rcusage') | |
233 | if not os.path.isdir(usage_dir): |
|
220 | if not os.path.isdir(usage_dir): | |
234 | os.makedirs(usage_dir) |
|
221 | os.makedirs(usage_dir) | |
235 | usage_metadata_destination = os.path.join(usage_dir, fname) |
|
222 | usage_metadata_destination = os.path.join(usage_dir, fname) | |
236 |
|
223 | |||
237 | try: |
|
224 | try: | |
238 | age_in_min = get_update_age(usage_metadata_destination) |
|
225 | age_in_min = get_update_age(usage_metadata_destination) | |
239 | except Exception: |
|
226 | except Exception: | |
240 | age_in_min = 0 |
|
227 | age_in_min = 0 | |
241 |
|
228 | |||
242 | # write every 6th hour |
|
229 | # write every 6th hour | |
243 | if age_in_min and age_in_min < 60 * 6: |
|
230 | if age_in_min and age_in_min < 60 * 6: | |
244 | log.debug('Usage file created %s minutes ago, skipping (threshold: %s minutes)...', |
|
231 | log.debug('Usage file created %s minutes ago, skipping (threshold: %s minutes)...', | |
245 | age_in_min, 60 * 6) |
|
232 | age_in_min, 60 * 6) | |
246 | return |
|
233 | return | |
247 |
|
234 | |||
248 | def write(dest_file): |
|
235 | def write(dest_file): | |
249 | configuration = system_info.SysInfo(system_info.rhodecode_config)()['value'] |
|
236 | configuration = system_info.SysInfo(system_info.rhodecode_config)()['value'] | |
250 | license_token = configuration['config']['license_token'] |
|
237 | license_token = configuration['config']['license_token'] | |
251 |
|
238 | |||
252 | metadata = dict( |
|
239 | metadata = dict( | |
253 | desc='Usage data', |
|
240 | desc='Usage data', | |
254 | instance_tag=instance_tag, |
|
241 | instance_tag=instance_tag, | |
255 | license_token=license_token, |
|
242 | license_token=license_token, | |
256 | created_on=datetime.datetime.utcnow().isoformat(), |
|
243 | created_on=datetime.datetime.utcnow().isoformat(), | |
257 | usage=system_info.SysInfo(system_info.usage_info)()['value'], |
|
244 | usage=system_info.SysInfo(system_info.usage_info)()['value'], | |
258 | ) |
|
245 | ) | |
259 |
|
246 | |||
260 | with open(dest_file, 'wb') as f: |
|
247 | with open(dest_file, 'wb') as f: | |
261 | f.write(ext_json.json.dumps(metadata, indent=2, sort_keys=True)) |
|
248 | f.write(ext_json.json.dumps(metadata, indent=2, sort_keys=True)) | |
262 |
|
249 | |||
263 | try: |
|
250 | try: | |
264 | log.debug('Writing usage file at: %s', usage_metadata_destination) |
|
251 | log.debug('Writing usage file at: %s', usage_metadata_destination) | |
265 | write(usage_metadata_destination) |
|
252 | write(usage_metadata_destination) | |
266 | except Exception: |
|
253 | except Exception: | |
267 | pass |
|
254 | pass | |
268 |
|
255 | |||
269 |
|
256 | |||
270 | def write_js_routes_if_enabled(event): |
|
257 | def write_js_routes_if_enabled(event): | |
271 | registry = event.app.registry |
|
258 | registry = event.app.registry | |
272 |
|
259 | |||
273 | mapper = registry.queryUtility(IRoutesMapper) |
|
260 | mapper = registry.queryUtility(IRoutesMapper) | |
274 | _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)') |
|
261 | _argument_prog = re.compile('\{(.*?)\}|:\((.*)\)') | |
275 |
|
262 | |||
276 | def _extract_route_information(route): |
|
263 | def _extract_route_information(route): | |
277 | """ |
|
264 | """ | |
278 | Convert a route into tuple(name, path, args), eg: |
|
265 | Convert a route into tuple(name, path, args), eg: | |
279 | ('show_user', '/profile/%(username)s', ['username']) |
|
266 | ('show_user', '/profile/%(username)s', ['username']) | |
280 | """ |
|
267 | """ | |
281 |
|
268 | |||
282 | routepath = route.pattern |
|
269 | routepath = route.pattern | |
283 | pattern = route.pattern |
|
270 | pattern = route.pattern | |
284 |
|
271 | |||
285 | def replace(matchobj): |
|
272 | def replace(matchobj): | |
286 | if matchobj.group(1): |
|
273 | if matchobj.group(1): | |
287 | return "%%(%s)s" % matchobj.group(1).split(':')[0] |
|
274 | return "%%(%s)s" % matchobj.group(1).split(':')[0] | |
288 | else: |
|
275 | else: | |
289 | return "%%(%s)s" % matchobj.group(2) |
|
276 | return "%%(%s)s" % matchobj.group(2) | |
290 |
|
277 | |||
291 | routepath = _argument_prog.sub(replace, routepath) |
|
278 | routepath = _argument_prog.sub(replace, routepath) | |
292 |
|
279 | |||
293 | if not routepath.startswith('/'): |
|
280 | if not routepath.startswith('/'): | |
294 | routepath = '/'+routepath |
|
281 | routepath = '/'+routepath | |
295 |
|
282 | |||
296 | return ( |
|
283 | return ( | |
297 | route.name, |
|
284 | route.name, | |
298 | routepath, |
|
285 | routepath, | |
299 | [(arg[0].split(':')[0] if arg[0] != '' else arg[1]) |
|
286 | [(arg[0].split(':')[0] if arg[0] != '' else arg[1]) | |
300 | for arg in _argument_prog.findall(pattern)] |
|
287 | for arg in _argument_prog.findall(pattern)] | |
301 | ) |
|
288 | ) | |
302 |
|
289 | |||
303 | def get_routes(): |
|
290 | def get_routes(): | |
304 | # pyramid routes |
|
291 | # pyramid routes | |
305 | for route in mapper.get_routes(): |
|
292 | for route in mapper.get_routes(): | |
306 | if not route.name.startswith('__'): |
|
293 | if not route.name.startswith('__'): | |
307 | yield _extract_route_information(route) |
|
294 | yield _extract_route_information(route) | |
308 |
|
295 | |||
309 | if asbool(registry.settings.get('generate_js_files', 'false')): |
|
296 | if asbool(registry.settings.get('generate_js_files', 'false')): | |
310 | static_path = AssetResolver().resolve('rhodecode:public').abspath() |
|
297 | static_path = AssetResolver().resolve('rhodecode:public').abspath() | |
311 | jsroutes = get_routes() |
|
298 | jsroutes = get_routes() | |
312 | jsroutes_file_content = generate_jsroutes_content(jsroutes) |
|
299 | jsroutes_file_content = generate_jsroutes_content(jsroutes) | |
313 | jsroutes_file_path = os.path.join( |
|
300 | jsroutes_file_path = os.path.join( | |
314 | static_path, 'js', 'rhodecode', 'routes.js') |
|
301 | static_path, 'js', 'rhodecode', 'routes.js') | |
315 |
|
302 | |||
316 | try: |
|
303 | try: | |
317 | with io.open(jsroutes_file_path, 'w', encoding='utf-8') as f: |
|
304 | with io.open(jsroutes_file_path, 'w', encoding='utf-8') as f: | |
318 | f.write(jsroutes_file_content) |
|
305 | f.write(jsroutes_file_content) | |
319 | except Exception: |
|
306 | except Exception: | |
320 | log.exception('Failed to write routes.js into %s', jsroutes_file_path) |
|
307 | log.exception('Failed to write routes.js into %s', jsroutes_file_path) | |
321 |
|
308 | |||
322 |
|
309 | |||
323 | class Subscriber(object): |
|
310 | class Subscriber(object): | |
324 | """ |
|
311 | """ | |
325 | Base class for subscribers to the pyramid event system. |
|
312 | Base class for subscribers to the pyramid event system. | |
326 | """ |
|
313 | """ | |
327 | def __call__(self, event): |
|
314 | def __call__(self, event): | |
328 | self.run(event) |
|
315 | self.run(event) | |
329 |
|
316 | |||
330 | def run(self, event): |
|
317 | def run(self, event): | |
331 | raise NotImplementedError('Subclass has to implement this.') |
|
318 | raise NotImplementedError('Subclass has to implement this.') | |
332 |
|
319 | |||
333 |
|
320 | |||
334 | class AsyncSubscriber(Subscriber): |
|
321 | class AsyncSubscriber(Subscriber): | |
335 | """ |
|
322 | """ | |
336 | Subscriber that handles the execution of events in a separate task to not |
|
323 | Subscriber that handles the execution of events in a separate task to not | |
337 | block the execution of the code which triggers the event. It puts the |
|
324 | block the execution of the code which triggers the event. It puts the | |
338 | received events into a queue from which the worker process takes them in |
|
325 | received events into a queue from which the worker process takes them in | |
339 | order. |
|
326 | order. | |
340 | """ |
|
327 | """ | |
341 | def __init__(self): |
|
328 | def __init__(self): | |
342 | self._stop = False |
|
329 | self._stop = False | |
343 | self._eventq = Queue.Queue() |
|
330 | self._eventq = Queue.Queue() | |
344 | self._worker = self.create_worker() |
|
331 | self._worker = self.create_worker() | |
345 | self._worker.start() |
|
332 | self._worker.start() | |
346 |
|
333 | |||
347 | def __call__(self, event): |
|
334 | def __call__(self, event): | |
348 | self._eventq.put(event) |
|
335 | self._eventq.put(event) | |
349 |
|
336 | |||
350 | def create_worker(self): |
|
337 | def create_worker(self): | |
351 | worker = Thread(target=self.do_work) |
|
338 | worker = Thread(target=self.do_work) | |
352 | worker.daemon = True |
|
339 | worker.daemon = True | |
353 | return worker |
|
340 | return worker | |
354 |
|
341 | |||
355 | def stop_worker(self): |
|
342 | def stop_worker(self): | |
356 | self._stop = False |
|
343 | self._stop = False | |
357 | self._eventq.put(None) |
|
344 | self._eventq.put(None) | |
358 | self._worker.join() |
|
345 | self._worker.join() | |
359 |
|
346 | |||
360 | def do_work(self): |
|
347 | def do_work(self): | |
361 | while not self._stop: |
|
348 | while not self._stop: | |
362 | event = self._eventq.get() |
|
349 | event = self._eventq.get() | |
363 | if event is not None: |
|
350 | if event is not None: | |
364 | self.run(event) |
|
351 | self.run(event) | |
365 |
|
352 | |||
366 |
|
353 | |||
367 | class AsyncSubprocessSubscriber(AsyncSubscriber): |
|
354 | class AsyncSubprocessSubscriber(AsyncSubscriber): | |
368 | """ |
|
355 | """ | |
369 | Subscriber that uses the subprocess32 module to execute a command if an |
|
356 | Subscriber that uses the subprocess32 module to execute a command if an | |
370 | event is received. Events are handled asynchronously:: |
|
357 | event is received. Events are handled asynchronously:: | |
371 |
|
358 | |||
372 | subscriber = AsyncSubprocessSubscriber('ls -la', timeout=10) |
|
359 | subscriber = AsyncSubprocessSubscriber('ls -la', timeout=10) | |
373 | subscriber(dummyEvent) # running __call__(event) |
|
360 | subscriber(dummyEvent) # running __call__(event) | |
374 |
|
361 | |||
375 | """ |
|
362 | """ | |
376 |
|
363 | |||
377 | def __init__(self, cmd, timeout=None): |
|
364 | def __init__(self, cmd, timeout=None): | |
378 | if not isinstance(cmd, (list, tuple)): |
|
365 | if not isinstance(cmd, (list, tuple)): | |
379 | cmd = shlex.split(cmd) |
|
366 | cmd = shlex.split(cmd) | |
380 | super(AsyncSubprocessSubscriber, self).__init__() |
|
367 | super(AsyncSubprocessSubscriber, self).__init__() | |
381 | self._cmd = cmd |
|
368 | self._cmd = cmd | |
382 | self._timeout = timeout |
|
369 | self._timeout = timeout | |
383 |
|
370 | |||
384 | def run(self, event): |
|
371 | def run(self, event): | |
385 | cmd = self._cmd |
|
372 | cmd = self._cmd | |
386 | timeout = self._timeout |
|
373 | timeout = self._timeout | |
387 | log.debug('Executing command %s.', cmd) |
|
374 | log.debug('Executing command %s.', cmd) | |
388 |
|
375 | |||
389 | try: |
|
376 | try: | |
390 | output = subprocess32.check_output( |
|
377 | output = subprocess32.check_output( | |
391 | cmd, timeout=timeout, stderr=subprocess32.STDOUT) |
|
378 | cmd, timeout=timeout, stderr=subprocess32.STDOUT) | |
392 | log.debug('Command finished %s', cmd) |
|
379 | log.debug('Command finished %s', cmd) | |
393 | if output: |
|
380 | if output: | |
394 | log.debug('Command output: %s', output) |
|
381 | log.debug('Command output: %s', output) | |
395 | except subprocess32.TimeoutExpired as e: |
|
382 | except subprocess32.TimeoutExpired as e: | |
396 | log.exception('Timeout while executing command.') |
|
383 | log.exception('Timeout while executing command.') | |
397 | if e.output: |
|
384 | if e.output: | |
398 | log.error('Command output: %s', e.output) |
|
385 | log.error('Command output: %s', e.output) | |
399 | except subprocess32.CalledProcessError as e: |
|
386 | except subprocess32.CalledProcessError as e: | |
400 | log.exception('Error while executing command.') |
|
387 | log.exception('Error while executing command.') | |
401 | if e.output: |
|
388 | if e.output: | |
402 | log.error('Command output: %s', e.output) |
|
389 | log.error('Command output: %s', e.output) | |
403 | except Exception: |
|
390 | except Exception: | |
404 | log.exception( |
|
391 | log.exception( | |
405 | 'Exception while executing command %s.', cmd) |
|
392 | 'Exception while executing command %s.', cmd) |
@@ -1,124 +1,122 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | import logging |
|
22 | import logging | |
23 | from pyramid.httpexceptions import HTTPException, HTTPBadRequest |
|
23 | from pyramid.httpexceptions import HTTPException, HTTPBadRequest | |
24 |
|
24 | |||
25 | from rhodecode.lib.middleware.vcs import ( |
|
25 | from rhodecode.lib.middleware.vcs import ( | |
26 | detect_vcs_request, VCS_TYPE_KEY, VCS_TYPE_SKIP) |
|
26 | detect_vcs_request, VCS_TYPE_KEY, VCS_TYPE_SKIP) | |
27 |
|
27 | |||
28 |
|
28 | |||
29 | log = logging.getLogger(__name__) |
|
29 | log = logging.getLogger(__name__) | |
30 |
|
30 | |||
31 |
|
31 | |||
32 | def vcs_detection_tween_factory(handler, registry): |
|
32 | def vcs_detection_tween_factory(handler, registry): | |
33 |
|
33 | |||
34 | def vcs_detection_tween(request): |
|
34 | def vcs_detection_tween(request): | |
35 | """ |
|
35 | """ | |
36 | Do detection of vcs type, and save results for other layers to re-use |
|
36 | Do detection of vcs type, and save results for other layers to re-use | |
37 | this information |
|
37 | this information | |
38 | """ |
|
38 | """ | |
39 | vcs_server_enabled = request.registry.settings.get('vcs.server.enable') |
|
39 | vcs_server_enabled = request.registry.settings.get('vcs.server.enable') | |
40 | vcs_handler = vcs_server_enabled and detect_vcs_request( |
|
40 | vcs_handler = vcs_server_enabled and detect_vcs_request( | |
41 | request.environ, request.registry.settings.get('vcs.backends')) |
|
41 | request.environ, request.registry.settings.get('vcs.backends')) | |
42 |
|
42 | |||
43 | if vcs_handler: |
|
43 | if vcs_handler: | |
44 | # save detected VCS type for later re-use |
|
44 | # save detected VCS type for later re-use | |
45 | request.environ[VCS_TYPE_KEY] = vcs_handler.SCM |
|
45 | request.environ[VCS_TYPE_KEY] = vcs_handler.SCM | |
46 | request.vcs_call = vcs_handler.SCM |
|
46 | request.vcs_call = vcs_handler.SCM | |
47 |
|
47 | |||
48 | log.debug('Processing request with `%s` handler', handler) |
|
48 | log.debug('Processing request with `%s` handler', handler) | |
49 | return handler(request) |
|
49 | return handler(request) | |
50 |
|
50 | |||
51 | # mark that we didn't detect an VCS, and we can skip detection later on |
|
51 | # mark that we didn't detect an VCS, and we can skip detection later on | |
52 | request.environ[VCS_TYPE_KEY] = VCS_TYPE_SKIP |
|
52 | request.environ[VCS_TYPE_KEY] = VCS_TYPE_SKIP | |
53 |
|
53 | |||
54 | log.debug('Processing request with `%s` handler', handler) |
|
54 | log.debug('Processing request with `%s` handler', handler) | |
55 | return handler(request) |
|
55 | return handler(request) | |
56 |
|
56 | |||
57 | return vcs_detection_tween |
|
57 | return vcs_detection_tween | |
58 |
|
58 | |||
59 |
|
59 | |||
60 | def junk_encoding_detector(request): |
|
60 | def junk_encoding_detector(request): | |
61 | """ |
|
61 | """ | |
62 | Detect bad encoded GET params, and fail immediately with BadRequest |
|
62 | Detect bad encoded GET params, and fail immediately with BadRequest | |
63 | """ |
|
63 | """ | |
64 |
|
64 | |||
65 | try: |
|
65 | try: | |
66 | request.GET.get("", None) |
|
66 | request.GET.get("", None) | |
67 | except UnicodeDecodeError: |
|
67 | except UnicodeDecodeError: | |
68 | raise HTTPBadRequest("Invalid bytes in query string.") |
|
68 | raise HTTPBadRequest("Invalid bytes in query string.") | |
69 |
|
69 | |||
70 |
|
70 | |||
71 | def bad_url_data_detector(request): |
|
71 | def bad_url_data_detector(request): | |
72 | """ |
|
72 | """ | |
73 | Detect invalid bytes in a path. |
|
73 | Detect invalid bytes in a path. | |
74 | """ |
|
74 | """ | |
75 | try: |
|
75 | try: | |
76 | request.path_info |
|
76 | request.path_info | |
77 | except UnicodeDecodeError: |
|
77 | except UnicodeDecodeError: | |
78 | raise HTTPBadRequest("Invalid bytes in URL.") |
|
78 | raise HTTPBadRequest("Invalid bytes in URL.") | |
79 |
|
79 | |||
80 |
|
80 | |||
81 | def junk_form_data_detector(request): |
|
81 | def junk_form_data_detector(request): | |
82 | """ |
|
82 | """ | |
83 | Detect bad encoded POST params, and fail immediately with BadRequest |
|
83 | Detect bad encoded POST params, and fail immediately with BadRequest | |
84 | """ |
|
84 | """ | |
85 |
|
85 | |||
86 | if request.method == "POST": |
|
86 | if request.method == "POST": | |
87 | try: |
|
87 | try: | |
88 | request.POST.get("", None) |
|
88 | request.POST.get("", None) | |
89 | except ValueError: |
|
89 | except ValueError: | |
90 | raise HTTPBadRequest("Invalid bytes in form data.") |
|
90 | raise HTTPBadRequest("Invalid bytes in form data.") | |
91 |
|
91 | |||
92 |
|
92 | |||
93 | def sanity_check_factory(handler, registry): |
|
93 | def sanity_check_factory(handler, registry): | |
94 | def sanity_check(request): |
|
94 | def sanity_check(request): | |
95 | log.debug('Checking current URL sanity for bad data') |
|
95 | log.debug('Checking current URL sanity for bad data') | |
96 | try: |
|
96 | try: | |
97 | junk_encoding_detector(request) |
|
97 | junk_encoding_detector(request) | |
98 | bad_url_data_detector(request) |
|
98 | bad_url_data_detector(request) | |
99 | junk_form_data_detector(request) |
|
99 | junk_form_data_detector(request) | |
100 | except HTTPException as exc: |
|
100 | except HTTPException as exc: | |
101 | return exc |
|
101 | return exc | |
102 |
|
102 | |||
103 | return handler(request) |
|
103 | return handler(request) | |
104 |
|
104 | |||
105 | return sanity_check |
|
105 | return sanity_check | |
106 |
|
106 | |||
107 |
|
107 | |||
108 | def includeme(config): |
|
108 | def includeme(config): | |
109 | config.add_subscriber('rhodecode.subscribers.add_renderer_globals', |
|
109 | config.add_subscriber('rhodecode.subscribers.add_renderer_globals', | |
110 | 'pyramid.events.BeforeRender') |
|
110 | 'pyramid.events.BeforeRender') | |
111 | config.add_subscriber('rhodecode.subscribers.set_user_lang', |
|
111 | config.add_subscriber('rhodecode.subscribers.set_user_lang', | |
112 | 'pyramid.events.NewRequest') |
|
112 | 'pyramid.events.NewRequest') | |
113 | config.add_subscriber('rhodecode.subscribers.add_localizer', |
|
|||
114 | 'pyramid.events.NewRequest') |
|
|||
115 | config.add_subscriber('rhodecode.subscribers.reset_log_bucket', |
|
113 | config.add_subscriber('rhodecode.subscribers.reset_log_bucket', | |
116 | 'pyramid.events.NewRequest') |
|
114 | 'pyramid.events.NewRequest') | |
117 | config.add_subscriber('rhodecode.subscribers.add_request_user_context', |
|
115 | config.add_subscriber('rhodecode.subscribers.add_request_user_context', | |
118 | 'pyramid.events.ContextFound') |
|
116 | 'pyramid.events.ContextFound') | |
119 | config.add_tween('rhodecode.tweens.vcs_detection_tween_factory') |
|
117 | config.add_tween('rhodecode.tweens.vcs_detection_tween_factory') | |
120 | config.add_tween('rhodecode.tweens.sanity_check_factory') |
|
118 | config.add_tween('rhodecode.tweens.sanity_check_factory') | |
121 |
|
119 | |||
122 | # This needs to be the LAST item |
|
120 | # This needs to be the LAST item | |
123 | config.add_tween('rhodecode.lib.middleware.request_wrapper.RequestWrapperTween') |
|
121 | config.add_tween('rhodecode.lib.middleware.request_wrapper.RequestWrapperTween') | |
124 | log.debug('configured all tweens') |
|
122 | log.debug('configured all tweens') |
General Comments 0
You need to be logged in to leave comments.
Login now