Show More
@@ -1,411 +1,410 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.controllers.admin.settings |
|
15 | kallithea.controllers.admin.settings | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | settings controller for Kallithea admin |
|
18 | settings controller for Kallithea admin | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Jul 14, 2010 |
|
22 | :created_on: Jul 14, 2010 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 | import logging |
|
28 | import logging | |
29 | import traceback |
|
29 | import traceback | |
30 |
|
30 | |||
31 | import formencode |
|
31 | import formencode | |
32 | from formencode import htmlfill |
|
32 | from formencode import htmlfill | |
33 | from tg import config, request |
|
33 | from tg import config, request | |
34 | from tg import tmpl_context as c |
|
34 | from tg import tmpl_context as c | |
35 | from tg.i18n import ugettext as _ |
|
35 | from tg.i18n import ugettext as _ | |
36 | from webob.exc import HTTPFound |
|
36 | from webob.exc import HTTPFound | |
37 |
|
37 | |||
38 | import kallithea |
|
38 | import kallithea | |
39 | from kallithea.lib import webutils |
|
39 | from kallithea.lib import webutils | |
40 | from kallithea.lib.auth import HasPermissionAnyDecorator, LoginRequired |
|
40 | from kallithea.lib.auth import HasPermissionAnyDecorator, LoginRequired | |
41 | from kallithea.lib.base import BaseController, render |
|
41 | from kallithea.lib.base import BaseController, render | |
42 | from kallithea.lib.celerylib import tasks |
|
|||
43 | from kallithea.lib.utils import repo2db_mapper, set_app_settings |
|
42 | from kallithea.lib.utils import repo2db_mapper, set_app_settings | |
44 | from kallithea.lib.utils2 import safe_str |
|
43 | from kallithea.lib.utils2 import safe_str | |
45 | from kallithea.lib.vcs import VCSError |
|
44 | from kallithea.lib.vcs import VCSError | |
46 | from kallithea.lib.webutils import url |
|
45 | from kallithea.lib.webutils import url | |
47 | from kallithea.model import db, meta |
|
46 | from kallithea.model import async_tasks, db, meta | |
48 | from kallithea.model.forms import ApplicationSettingsForm, ApplicationUiSettingsForm, ApplicationVisualisationForm |
|
47 | from kallithea.model.forms import ApplicationSettingsForm, ApplicationUiSettingsForm, ApplicationVisualisationForm | |
49 | from kallithea.model.notification import EmailNotificationModel |
|
48 | from kallithea.model.notification import EmailNotificationModel | |
50 | from kallithea.model.scm import ScmModel |
|
49 | from kallithea.model.scm import ScmModel | |
51 |
|
50 | |||
52 |
|
51 | |||
53 | log = logging.getLogger(__name__) |
|
52 | log = logging.getLogger(__name__) | |
54 |
|
53 | |||
55 |
|
54 | |||
56 | class SettingsController(BaseController): |
|
55 | class SettingsController(BaseController): | |
57 |
|
56 | |||
58 | @LoginRequired(allow_default_user=True) |
|
57 | @LoginRequired(allow_default_user=True) | |
59 | def _before(self, *args, **kwargs): |
|
58 | def _before(self, *args, **kwargs): | |
60 | super(SettingsController, self)._before(*args, **kwargs) |
|
59 | super(SettingsController, self)._before(*args, **kwargs) | |
61 |
|
60 | |||
62 | def _get_hg_ui_settings(self): |
|
61 | def _get_hg_ui_settings(self): | |
63 | ret = db.Ui.query().all() |
|
62 | ret = db.Ui.query().all() | |
64 |
|
63 | |||
65 | settings = {} |
|
64 | settings = {} | |
66 | for each in ret: |
|
65 | for each in ret: | |
67 | k = each.ui_section + '_' + each.ui_key |
|
66 | k = each.ui_section + '_' + each.ui_key | |
68 | v = each.ui_value |
|
67 | v = each.ui_value | |
69 | if k == 'paths_/': |
|
68 | if k == 'paths_/': | |
70 | k = 'paths_root_path' |
|
69 | k = 'paths_root_path' | |
71 |
|
70 | |||
72 | k = k.replace('.', '_') |
|
71 | k = k.replace('.', '_') | |
73 |
|
72 | |||
74 | if each.ui_section in ['hooks', 'extensions']: |
|
73 | if each.ui_section in ['hooks', 'extensions']: | |
75 | v = each.ui_active |
|
74 | v = each.ui_active | |
76 |
|
75 | |||
77 | settings[k] = v |
|
76 | settings[k] = v | |
78 | return settings |
|
77 | return settings | |
79 |
|
78 | |||
80 | @HasPermissionAnyDecorator('hg.admin') |
|
79 | @HasPermissionAnyDecorator('hg.admin') | |
81 | def settings_vcs(self): |
|
80 | def settings_vcs(self): | |
82 | c.active = 'vcs' |
|
81 | c.active = 'vcs' | |
83 | if request.POST: |
|
82 | if request.POST: | |
84 | application_form = ApplicationUiSettingsForm()() |
|
83 | application_form = ApplicationUiSettingsForm()() | |
85 | try: |
|
84 | try: | |
86 | form_result = application_form.to_python(dict(request.POST)) |
|
85 | form_result = application_form.to_python(dict(request.POST)) | |
87 | except formencode.Invalid as errors: |
|
86 | except formencode.Invalid as errors: | |
88 | return htmlfill.render( |
|
87 | return htmlfill.render( | |
89 | render('admin/settings/settings.html'), |
|
88 | render('admin/settings/settings.html'), | |
90 | defaults=errors.value, |
|
89 | defaults=errors.value, | |
91 | errors=errors.error_dict or {}, |
|
90 | errors=errors.error_dict or {}, | |
92 | prefix_error=False, |
|
91 | prefix_error=False, | |
93 | encoding="UTF-8", |
|
92 | encoding="UTF-8", | |
94 | force_defaults=False) |
|
93 | force_defaults=False) | |
95 |
|
94 | |||
96 | try: |
|
95 | try: | |
97 | if c.visual.allow_repo_location_change: |
|
96 | if c.visual.allow_repo_location_change: | |
98 | sett = db.Ui.get_by_key('paths', '/') |
|
97 | sett = db.Ui.get_by_key('paths', '/') | |
99 | sett.ui_value = form_result['paths_root_path'] |
|
98 | sett.ui_value = form_result['paths_root_path'] | |
100 |
|
99 | |||
101 | # HOOKS |
|
100 | # HOOKS | |
102 | sett = db.Ui.get_by_key('hooks', db.Ui.HOOK_UPDATE) |
|
101 | sett = db.Ui.get_by_key('hooks', db.Ui.HOOK_UPDATE) | |
103 | sett.ui_active = form_result['hooks_changegroup_update'] |
|
102 | sett.ui_active = form_result['hooks_changegroup_update'] | |
104 |
|
103 | |||
105 | sett = db.Ui.get_by_key('hooks', db.Ui.HOOK_REPO_SIZE) |
|
104 | sett = db.Ui.get_by_key('hooks', db.Ui.HOOK_REPO_SIZE) | |
106 | sett.ui_active = form_result['hooks_changegroup_repo_size'] |
|
105 | sett.ui_active = form_result['hooks_changegroup_repo_size'] | |
107 |
|
106 | |||
108 | ## EXTENSIONS |
|
107 | ## EXTENSIONS | |
109 | sett = db.Ui.get_or_create('extensions', 'largefiles') |
|
108 | sett = db.Ui.get_or_create('extensions', 'largefiles') | |
110 | sett.ui_active = form_result['extensions_largefiles'] |
|
109 | sett.ui_active = form_result['extensions_largefiles'] | |
111 |
|
110 | |||
112 | # sett = db.Ui.get_or_create('extensions', 'hggit') |
|
111 | # sett = db.Ui.get_or_create('extensions', 'hggit') | |
113 | # sett.ui_active = form_result['extensions_hggit'] |
|
112 | # sett.ui_active = form_result['extensions_hggit'] | |
114 |
|
113 | |||
115 | meta.Session().commit() |
|
114 | meta.Session().commit() | |
116 |
|
115 | |||
117 | webutils.flash(_('Updated VCS settings'), category='success') |
|
116 | webutils.flash(_('Updated VCS settings'), category='success') | |
118 |
|
117 | |||
119 | except Exception: |
|
118 | except Exception: | |
120 | log.error(traceback.format_exc()) |
|
119 | log.error(traceback.format_exc()) | |
121 | webutils.flash(_('Error occurred while updating ' |
|
120 | webutils.flash(_('Error occurred while updating ' | |
122 | 'application settings'), category='error') |
|
121 | 'application settings'), category='error') | |
123 |
|
122 | |||
124 | defaults = db.Setting.get_app_settings() |
|
123 | defaults = db.Setting.get_app_settings() | |
125 | defaults.update(self._get_hg_ui_settings()) |
|
124 | defaults.update(self._get_hg_ui_settings()) | |
126 |
|
125 | |||
127 | return htmlfill.render( |
|
126 | return htmlfill.render( | |
128 | render('admin/settings/settings.html'), |
|
127 | render('admin/settings/settings.html'), | |
129 | defaults=defaults, |
|
128 | defaults=defaults, | |
130 | encoding="UTF-8", |
|
129 | encoding="UTF-8", | |
131 | force_defaults=False) |
|
130 | force_defaults=False) | |
132 |
|
131 | |||
133 | @HasPermissionAnyDecorator('hg.admin') |
|
132 | @HasPermissionAnyDecorator('hg.admin') | |
134 | def settings_mapping(self): |
|
133 | def settings_mapping(self): | |
135 | c.active = 'mapping' |
|
134 | c.active = 'mapping' | |
136 | if request.POST: |
|
135 | if request.POST: | |
137 | rm_obsolete = request.POST.get('destroy', False) |
|
136 | rm_obsolete = request.POST.get('destroy', False) | |
138 | install_git_hooks = request.POST.get('hooks', False) |
|
137 | install_git_hooks = request.POST.get('hooks', False) | |
139 | overwrite_git_hooks = request.POST.get('hooks_overwrite', False) |
|
138 | overwrite_git_hooks = request.POST.get('hooks_overwrite', False) | |
140 | invalidate_cache = request.POST.get('invalidate', False) |
|
139 | invalidate_cache = request.POST.get('invalidate', False) | |
141 | log.debug('rescanning repo location with destroy obsolete=%s, ' |
|
140 | log.debug('rescanning repo location with destroy obsolete=%s, ' | |
142 | 'install git hooks=%s and ' |
|
141 | 'install git hooks=%s and ' | |
143 | 'overwrite git hooks=%s' % (rm_obsolete, install_git_hooks, overwrite_git_hooks)) |
|
142 | 'overwrite git hooks=%s' % (rm_obsolete, install_git_hooks, overwrite_git_hooks)) | |
144 |
|
143 | |||
145 | filesystem_repos = ScmModel().repo_scan() |
|
144 | filesystem_repos = ScmModel().repo_scan() | |
146 | added, removed = repo2db_mapper(filesystem_repos, rm_obsolete, |
|
145 | added, removed = repo2db_mapper(filesystem_repos, rm_obsolete, | |
147 | install_git_hooks=install_git_hooks, |
|
146 | install_git_hooks=install_git_hooks, | |
148 | user=request.authuser.username, |
|
147 | user=request.authuser.username, | |
149 | overwrite_git_hooks=overwrite_git_hooks) |
|
148 | overwrite_git_hooks=overwrite_git_hooks) | |
150 | added_msg = webutils.HTML(', ').join( |
|
149 | added_msg = webutils.HTML(', ').join( | |
151 | webutils.link_to(safe_str(repo_name), webutils.url('summary_home', repo_name=repo_name)) for repo_name in added |
|
150 | webutils.link_to(safe_str(repo_name), webutils.url('summary_home', repo_name=repo_name)) for repo_name in added | |
152 | ) or '-' |
|
151 | ) or '-' | |
153 | removed_msg = webutils.HTML(', ').join( |
|
152 | removed_msg = webutils.HTML(', ').join( | |
154 | safe_str(repo_name) for repo_name in removed |
|
153 | safe_str(repo_name) for repo_name in removed | |
155 | ) or '-' |
|
154 | ) or '-' | |
156 | webutils.flash(webutils.HTML(_('Repositories successfully rescanned. Added: %s. Removed: %s.')) % |
|
155 | webutils.flash(webutils.HTML(_('Repositories successfully rescanned. Added: %s. Removed: %s.')) % | |
157 | (added_msg, removed_msg), category='success') |
|
156 | (added_msg, removed_msg), category='success') | |
158 |
|
157 | |||
159 | if invalidate_cache: |
|
158 | if invalidate_cache: | |
160 | log.debug('invalidating all repositories cache') |
|
159 | log.debug('invalidating all repositories cache') | |
161 | i = 0 |
|
160 | i = 0 | |
162 | for repo in db.Repository.query(): |
|
161 | for repo in db.Repository.query(): | |
163 | try: |
|
162 | try: | |
164 | ScmModel().mark_for_invalidation(repo.repo_name) |
|
163 | ScmModel().mark_for_invalidation(repo.repo_name) | |
165 | i += 1 |
|
164 | i += 1 | |
166 | except VCSError as e: |
|
165 | except VCSError as e: | |
167 | log.warning('VCS error invalidating %s: %s', repo.repo_name, e) |
|
166 | log.warning('VCS error invalidating %s: %s', repo.repo_name, e) | |
168 | webutils.flash(_('Invalidated %s repositories') % i, category='success') |
|
167 | webutils.flash(_('Invalidated %s repositories') % i, category='success') | |
169 |
|
168 | |||
170 | raise HTTPFound(location=url('admin_settings_mapping')) |
|
169 | raise HTTPFound(location=url('admin_settings_mapping')) | |
171 |
|
170 | |||
172 | defaults = db.Setting.get_app_settings() |
|
171 | defaults = db.Setting.get_app_settings() | |
173 | defaults.update(self._get_hg_ui_settings()) |
|
172 | defaults.update(self._get_hg_ui_settings()) | |
174 |
|
173 | |||
175 | return htmlfill.render( |
|
174 | return htmlfill.render( | |
176 | render('admin/settings/settings.html'), |
|
175 | render('admin/settings/settings.html'), | |
177 | defaults=defaults, |
|
176 | defaults=defaults, | |
178 | encoding="UTF-8", |
|
177 | encoding="UTF-8", | |
179 | force_defaults=False) |
|
178 | force_defaults=False) | |
180 |
|
179 | |||
181 | @HasPermissionAnyDecorator('hg.admin') |
|
180 | @HasPermissionAnyDecorator('hg.admin') | |
182 | def settings_global(self): |
|
181 | def settings_global(self): | |
183 | c.active = 'global' |
|
182 | c.active = 'global' | |
184 | if request.POST: |
|
183 | if request.POST: | |
185 | application_form = ApplicationSettingsForm()() |
|
184 | application_form = ApplicationSettingsForm()() | |
186 | try: |
|
185 | try: | |
187 | form_result = application_form.to_python(dict(request.POST)) |
|
186 | form_result = application_form.to_python(dict(request.POST)) | |
188 | except formencode.Invalid as errors: |
|
187 | except formencode.Invalid as errors: | |
189 | return htmlfill.render( |
|
188 | return htmlfill.render( | |
190 | render('admin/settings/settings.html'), |
|
189 | render('admin/settings/settings.html'), | |
191 | defaults=errors.value, |
|
190 | defaults=errors.value, | |
192 | errors=errors.error_dict or {}, |
|
191 | errors=errors.error_dict or {}, | |
193 | prefix_error=False, |
|
192 | prefix_error=False, | |
194 | encoding="UTF-8", |
|
193 | encoding="UTF-8", | |
195 | force_defaults=False) |
|
194 | force_defaults=False) | |
196 |
|
195 | |||
197 | try: |
|
196 | try: | |
198 | for setting in ( |
|
197 | for setting in ( | |
199 | 'title', |
|
198 | 'title', | |
200 | 'realm', |
|
199 | 'realm', | |
201 | 'ga_code', |
|
200 | 'ga_code', | |
202 | 'captcha_public_key', |
|
201 | 'captcha_public_key', | |
203 | 'captcha_private_key', |
|
202 | 'captcha_private_key', | |
204 | ): |
|
203 | ): | |
205 | db.Setting.create_or_update(setting, form_result[setting]) |
|
204 | db.Setting.create_or_update(setting, form_result[setting]) | |
206 |
|
205 | |||
207 | meta.Session().commit() |
|
206 | meta.Session().commit() | |
208 | set_app_settings(config) |
|
207 | set_app_settings(config) | |
209 | webutils.flash(_('Updated application settings'), category='success') |
|
208 | webutils.flash(_('Updated application settings'), category='success') | |
210 |
|
209 | |||
211 | except Exception: |
|
210 | except Exception: | |
212 | log.error(traceback.format_exc()) |
|
211 | log.error(traceback.format_exc()) | |
213 | webutils.flash(_('Error occurred while updating ' |
|
212 | webutils.flash(_('Error occurred while updating ' | |
214 | 'application settings'), |
|
213 | 'application settings'), | |
215 | category='error') |
|
214 | category='error') | |
216 |
|
215 | |||
217 | raise HTTPFound(location=url('admin_settings_global')) |
|
216 | raise HTTPFound(location=url('admin_settings_global')) | |
218 |
|
217 | |||
219 | defaults = db.Setting.get_app_settings() |
|
218 | defaults = db.Setting.get_app_settings() | |
220 | defaults.update(self._get_hg_ui_settings()) |
|
219 | defaults.update(self._get_hg_ui_settings()) | |
221 |
|
220 | |||
222 | return htmlfill.render( |
|
221 | return htmlfill.render( | |
223 | render('admin/settings/settings.html'), |
|
222 | render('admin/settings/settings.html'), | |
224 | defaults=defaults, |
|
223 | defaults=defaults, | |
225 | encoding="UTF-8", |
|
224 | encoding="UTF-8", | |
226 | force_defaults=False) |
|
225 | force_defaults=False) | |
227 |
|
226 | |||
228 | @HasPermissionAnyDecorator('hg.admin') |
|
227 | @HasPermissionAnyDecorator('hg.admin') | |
229 | def settings_visual(self): |
|
228 | def settings_visual(self): | |
230 | c.active = 'visual' |
|
229 | c.active = 'visual' | |
231 | if request.POST: |
|
230 | if request.POST: | |
232 | application_form = ApplicationVisualisationForm()() |
|
231 | application_form = ApplicationVisualisationForm()() | |
233 | try: |
|
232 | try: | |
234 | form_result = application_form.to_python(dict(request.POST)) |
|
233 | form_result = application_form.to_python(dict(request.POST)) | |
235 | except formencode.Invalid as errors: |
|
234 | except formencode.Invalid as errors: | |
236 | return htmlfill.render( |
|
235 | return htmlfill.render( | |
237 | render('admin/settings/settings.html'), |
|
236 | render('admin/settings/settings.html'), | |
238 | defaults=errors.value, |
|
237 | defaults=errors.value, | |
239 | errors=errors.error_dict or {}, |
|
238 | errors=errors.error_dict or {}, | |
240 | prefix_error=False, |
|
239 | prefix_error=False, | |
241 | encoding="UTF-8", |
|
240 | encoding="UTF-8", | |
242 | force_defaults=False) |
|
241 | force_defaults=False) | |
243 |
|
242 | |||
244 | try: |
|
243 | try: | |
245 | settings = [ |
|
244 | settings = [ | |
246 | ('show_public_icon', 'show_public_icon', 'bool'), |
|
245 | ('show_public_icon', 'show_public_icon', 'bool'), | |
247 | ('show_private_icon', 'show_private_icon', 'bool'), |
|
246 | ('show_private_icon', 'show_private_icon', 'bool'), | |
248 | ('stylify_metalabels', 'stylify_metalabels', 'bool'), |
|
247 | ('stylify_metalabels', 'stylify_metalabels', 'bool'), | |
249 | ('repository_fields', 'repository_fields', 'bool'), |
|
248 | ('repository_fields', 'repository_fields', 'bool'), | |
250 | ('dashboard_items', 'dashboard_items', 'int'), |
|
249 | ('dashboard_items', 'dashboard_items', 'int'), | |
251 | ('admin_grid_items', 'admin_grid_items', 'int'), |
|
250 | ('admin_grid_items', 'admin_grid_items', 'int'), | |
252 | ('show_version', 'show_version', 'bool'), |
|
251 | ('show_version', 'show_version', 'bool'), | |
253 | ('use_gravatar', 'use_gravatar', 'bool'), |
|
252 | ('use_gravatar', 'use_gravatar', 'bool'), | |
254 | ('gravatar_url', 'gravatar_url', 'unicode'), |
|
253 | ('gravatar_url', 'gravatar_url', 'unicode'), | |
255 | ('clone_uri_tmpl', 'clone_uri_tmpl', 'unicode'), |
|
254 | ('clone_uri_tmpl', 'clone_uri_tmpl', 'unicode'), | |
256 | ('clone_ssh_tmpl', 'clone_ssh_tmpl', 'unicode'), |
|
255 | ('clone_ssh_tmpl', 'clone_ssh_tmpl', 'unicode'), | |
257 | ] |
|
256 | ] | |
258 | for setting, form_key, type_ in settings: |
|
257 | for setting, form_key, type_ in settings: | |
259 | db.Setting.create_or_update(setting, form_result[form_key], type_) |
|
258 | db.Setting.create_or_update(setting, form_result[form_key], type_) | |
260 |
|
259 | |||
261 | meta.Session().commit() |
|
260 | meta.Session().commit() | |
262 | set_app_settings(config) |
|
261 | set_app_settings(config) | |
263 | webutils.flash(_('Updated visualisation settings'), |
|
262 | webutils.flash(_('Updated visualisation settings'), | |
264 | category='success') |
|
263 | category='success') | |
265 |
|
264 | |||
266 | except Exception: |
|
265 | except Exception: | |
267 | log.error(traceback.format_exc()) |
|
266 | log.error(traceback.format_exc()) | |
268 | webutils.flash(_('Error occurred during updating ' |
|
267 | webutils.flash(_('Error occurred during updating ' | |
269 | 'visualisation settings'), |
|
268 | 'visualisation settings'), | |
270 | category='error') |
|
269 | category='error') | |
271 |
|
270 | |||
272 | raise HTTPFound(location=url('admin_settings_visual')) |
|
271 | raise HTTPFound(location=url('admin_settings_visual')) | |
273 |
|
272 | |||
274 | defaults = db.Setting.get_app_settings() |
|
273 | defaults = db.Setting.get_app_settings() | |
275 | defaults.update(self._get_hg_ui_settings()) |
|
274 | defaults.update(self._get_hg_ui_settings()) | |
276 |
|
275 | |||
277 | return htmlfill.render( |
|
276 | return htmlfill.render( | |
278 | render('admin/settings/settings.html'), |
|
277 | render('admin/settings/settings.html'), | |
279 | defaults=defaults, |
|
278 | defaults=defaults, | |
280 | encoding="UTF-8", |
|
279 | encoding="UTF-8", | |
281 | force_defaults=False) |
|
280 | force_defaults=False) | |
282 |
|
281 | |||
283 | @HasPermissionAnyDecorator('hg.admin') |
|
282 | @HasPermissionAnyDecorator('hg.admin') | |
284 | def settings_email(self): |
|
283 | def settings_email(self): | |
285 | c.active = 'email' |
|
284 | c.active = 'email' | |
286 | if request.POST: |
|
285 | if request.POST: | |
287 | test_email = request.POST.get('test_email') |
|
286 | test_email = request.POST.get('test_email') | |
288 | test_email_subj = 'Kallithea test email' |
|
287 | test_email_subj = 'Kallithea test email' | |
289 | test_body = ('Kallithea Email test, ' |
|
288 | test_body = ('Kallithea Email test, ' | |
290 | 'Kallithea version: %s' % c.kallithea_version) |
|
289 | 'Kallithea version: %s' % c.kallithea_version) | |
291 | if not test_email: |
|
290 | if not test_email: | |
292 | webutils.flash(_('Please enter email address'), category='error') |
|
291 | webutils.flash(_('Please enter email address'), category='error') | |
293 | raise HTTPFound(location=url('admin_settings_email')) |
|
292 | raise HTTPFound(location=url('admin_settings_email')) | |
294 |
|
293 | |||
295 | test_email_txt_body = EmailNotificationModel() \ |
|
294 | test_email_txt_body = EmailNotificationModel() \ | |
296 | .get_email_tmpl(EmailNotificationModel.TYPE_DEFAULT, |
|
295 | .get_email_tmpl(EmailNotificationModel.TYPE_DEFAULT, | |
297 | 'txt', body=test_body) |
|
296 | 'txt', body=test_body) | |
298 | test_email_html_body = EmailNotificationModel() \ |
|
297 | test_email_html_body = EmailNotificationModel() \ | |
299 | .get_email_tmpl(EmailNotificationModel.TYPE_DEFAULT, |
|
298 | .get_email_tmpl(EmailNotificationModel.TYPE_DEFAULT, | |
300 | 'html', body=test_body) |
|
299 | 'html', body=test_body) | |
301 |
|
300 | |||
302 | recipients = [test_email] if test_email else None |
|
301 | recipients = [test_email] if test_email else None | |
303 |
|
302 | |||
304 | tasks.send_email(recipients, test_email_subj, |
|
303 | async_tasks.send_email(recipients, test_email_subj, | |
305 | test_email_txt_body, test_email_html_body) |
|
304 | test_email_txt_body, test_email_html_body) | |
306 |
|
305 | |||
307 | webutils.flash(_('Send email task created'), category='success') |
|
306 | webutils.flash(_('Send email task created'), category='success') | |
308 | raise HTTPFound(location=url('admin_settings_email')) |
|
307 | raise HTTPFound(location=url('admin_settings_email')) | |
309 |
|
308 | |||
310 | defaults = db.Setting.get_app_settings() |
|
309 | defaults = db.Setting.get_app_settings() | |
311 | defaults.update(self._get_hg_ui_settings()) |
|
310 | defaults.update(self._get_hg_ui_settings()) | |
312 |
|
311 | |||
313 | c.ini = kallithea.CONFIG |
|
312 | c.ini = kallithea.CONFIG | |
314 |
|
313 | |||
315 | return htmlfill.render( |
|
314 | return htmlfill.render( | |
316 | render('admin/settings/settings.html'), |
|
315 | render('admin/settings/settings.html'), | |
317 | defaults=defaults, |
|
316 | defaults=defaults, | |
318 | encoding="UTF-8", |
|
317 | encoding="UTF-8", | |
319 | force_defaults=False) |
|
318 | force_defaults=False) | |
320 |
|
319 | |||
321 | @HasPermissionAnyDecorator('hg.admin') |
|
320 | @HasPermissionAnyDecorator('hg.admin') | |
322 | def settings_hooks(self): |
|
321 | def settings_hooks(self): | |
323 | c.active = 'hooks' |
|
322 | c.active = 'hooks' | |
324 | if request.POST: |
|
323 | if request.POST: | |
325 | if c.visual.allow_custom_hooks_settings: |
|
324 | if c.visual.allow_custom_hooks_settings: | |
326 | ui_key = request.POST.get('new_hook_ui_key') |
|
325 | ui_key = request.POST.get('new_hook_ui_key') | |
327 | ui_value = request.POST.get('new_hook_ui_value') |
|
326 | ui_value = request.POST.get('new_hook_ui_value') | |
328 |
|
327 | |||
329 | hook_id = request.POST.get('hook_id') |
|
328 | hook_id = request.POST.get('hook_id') | |
330 |
|
329 | |||
331 | try: |
|
330 | try: | |
332 | ui_key = ui_key and ui_key.strip() |
|
331 | ui_key = ui_key and ui_key.strip() | |
333 | if ui_key in (x.ui_key for x in db.Ui.get_custom_hooks()): |
|
332 | if ui_key in (x.ui_key for x in db.Ui.get_custom_hooks()): | |
334 | webutils.flash(_('Hook already exists'), category='error') |
|
333 | webutils.flash(_('Hook already exists'), category='error') | |
335 | elif ui_key in (x.ui_key for x in db.Ui.get_builtin_hooks()): |
|
334 | elif ui_key in (x.ui_key for x in db.Ui.get_builtin_hooks()): | |
336 | webutils.flash(_('Builtin hooks are read-only. Please use another hook name.'), category='error') |
|
335 | webutils.flash(_('Builtin hooks are read-only. Please use another hook name.'), category='error') | |
337 | elif ui_value and ui_key: |
|
336 | elif ui_value and ui_key: | |
338 | db.Ui.create_or_update_hook(ui_key, ui_value) |
|
337 | db.Ui.create_or_update_hook(ui_key, ui_value) | |
339 | webutils.flash(_('Added new hook'), category='success') |
|
338 | webutils.flash(_('Added new hook'), category='success') | |
340 | elif hook_id: |
|
339 | elif hook_id: | |
341 | db.Ui.delete(hook_id) |
|
340 | db.Ui.delete(hook_id) | |
342 | meta.Session().commit() |
|
341 | meta.Session().commit() | |
343 |
|
342 | |||
344 | # check for edits |
|
343 | # check for edits | |
345 | update = False |
|
344 | update = False | |
346 | _d = request.POST.dict_of_lists() |
|
345 | _d = request.POST.dict_of_lists() | |
347 | for k, v, ov in zip(_d.get('hook_ui_key', []), |
|
346 | for k, v, ov in zip(_d.get('hook_ui_key', []), | |
348 | _d.get('hook_ui_value_new', []), |
|
347 | _d.get('hook_ui_value_new', []), | |
349 | _d.get('hook_ui_value', [])): |
|
348 | _d.get('hook_ui_value', [])): | |
350 | if v != ov: |
|
349 | if v != ov: | |
351 | db.Ui.create_or_update_hook(k, v) |
|
350 | db.Ui.create_or_update_hook(k, v) | |
352 | update = True |
|
351 | update = True | |
353 |
|
352 | |||
354 | if update: |
|
353 | if update: | |
355 | webutils.flash(_('Updated hooks'), category='success') |
|
354 | webutils.flash(_('Updated hooks'), category='success') | |
356 | meta.Session().commit() |
|
355 | meta.Session().commit() | |
357 | except Exception: |
|
356 | except Exception: | |
358 | log.error(traceback.format_exc()) |
|
357 | log.error(traceback.format_exc()) | |
359 | webutils.flash(_('Error occurred during hook creation'), |
|
358 | webutils.flash(_('Error occurred during hook creation'), | |
360 | category='error') |
|
359 | category='error') | |
361 |
|
360 | |||
362 | raise HTTPFound(location=url('admin_settings_hooks')) |
|
361 | raise HTTPFound(location=url('admin_settings_hooks')) | |
363 |
|
362 | |||
364 | defaults = db.Setting.get_app_settings() |
|
363 | defaults = db.Setting.get_app_settings() | |
365 | defaults.update(self._get_hg_ui_settings()) |
|
364 | defaults.update(self._get_hg_ui_settings()) | |
366 |
|
365 | |||
367 | c.hooks = db.Ui.get_builtin_hooks() |
|
366 | c.hooks = db.Ui.get_builtin_hooks() | |
368 | c.custom_hooks = db.Ui.get_custom_hooks() |
|
367 | c.custom_hooks = db.Ui.get_custom_hooks() | |
369 |
|
368 | |||
370 | return htmlfill.render( |
|
369 | return htmlfill.render( | |
371 | render('admin/settings/settings.html'), |
|
370 | render('admin/settings/settings.html'), | |
372 | defaults=defaults, |
|
371 | defaults=defaults, | |
373 | encoding="UTF-8", |
|
372 | encoding="UTF-8", | |
374 | force_defaults=False) |
|
373 | force_defaults=False) | |
375 |
|
374 | |||
376 | @HasPermissionAnyDecorator('hg.admin') |
|
375 | @HasPermissionAnyDecorator('hg.admin') | |
377 | def settings_search(self): |
|
376 | def settings_search(self): | |
378 | c.active = 'search' |
|
377 | c.active = 'search' | |
379 | if request.POST: |
|
378 | if request.POST: | |
380 | repo_location = self._get_hg_ui_settings()['paths_root_path'] |
|
379 | repo_location = self._get_hg_ui_settings()['paths_root_path'] | |
381 | full_index = request.POST.get('full_index', False) |
|
380 | full_index = request.POST.get('full_index', False) | |
382 | tasks.whoosh_index(repo_location, full_index) |
|
381 | async_tasks.whoosh_index(repo_location, full_index) | |
383 | webutils.flash(_('Whoosh reindex task scheduled'), category='success') |
|
382 | webutils.flash(_('Whoosh reindex task scheduled'), category='success') | |
384 | raise HTTPFound(location=url('admin_settings_search')) |
|
383 | raise HTTPFound(location=url('admin_settings_search')) | |
385 |
|
384 | |||
386 | defaults = db.Setting.get_app_settings() |
|
385 | defaults = db.Setting.get_app_settings() | |
387 | defaults.update(self._get_hg_ui_settings()) |
|
386 | defaults.update(self._get_hg_ui_settings()) | |
388 |
|
387 | |||
389 | return htmlfill.render( |
|
388 | return htmlfill.render( | |
390 | render('admin/settings/settings.html'), |
|
389 | render('admin/settings/settings.html'), | |
391 | defaults=defaults, |
|
390 | defaults=defaults, | |
392 | encoding="UTF-8", |
|
391 | encoding="UTF-8", | |
393 | force_defaults=False) |
|
392 | force_defaults=False) | |
394 |
|
393 | |||
395 | @HasPermissionAnyDecorator('hg.admin') |
|
394 | @HasPermissionAnyDecorator('hg.admin') | |
396 | def settings_system(self): |
|
395 | def settings_system(self): | |
397 | c.active = 'system' |
|
396 | c.active = 'system' | |
398 |
|
397 | |||
399 | defaults = db.Setting.get_app_settings() |
|
398 | defaults = db.Setting.get_app_settings() | |
400 | defaults.update(self._get_hg_ui_settings()) |
|
399 | defaults.update(self._get_hg_ui_settings()) | |
401 |
|
400 | |||
402 | c.ini = kallithea.CONFIG |
|
401 | c.ini = kallithea.CONFIG | |
403 | server_info = db.Setting.get_server_info() |
|
402 | server_info = db.Setting.get_server_info() | |
404 | for key, val in server_info.items(): |
|
403 | for key, val in server_info.items(): | |
405 | setattr(c, key, val) |
|
404 | setattr(c, key, val) | |
406 |
|
405 | |||
407 | return htmlfill.render( |
|
406 | return htmlfill.render( | |
408 | render('admin/settings/settings.html'), |
|
407 | render('admin/settings/settings.html'), | |
409 | defaults=defaults, |
|
408 | defaults=defaults, | |
410 | encoding="UTF-8", |
|
409 | encoding="UTF-8", | |
411 | force_defaults=False) |
|
410 | force_defaults=False) |
@@ -1,213 +1,212 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.controllers.summary |
|
15 | kallithea.controllers.summary | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Summary controller for Kallithea |
|
18 | Summary controller for Kallithea | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Apr 18, 2010 |
|
22 | :created_on: Apr 18, 2010 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 | import calendar |
|
28 | import calendar | |
29 | import itertools |
|
29 | import itertools | |
30 | import logging |
|
30 | import logging | |
31 | import traceback |
|
31 | import traceback | |
32 | from datetime import date, timedelta |
|
32 | from datetime import date, timedelta | |
33 | from time import mktime |
|
33 | from time import mktime | |
34 |
|
34 | |||
35 | from beaker.cache import cache_region |
|
35 | from beaker.cache import cache_region | |
36 | from tg import request |
|
36 | from tg import request | |
37 | from tg import tmpl_context as c |
|
37 | from tg import tmpl_context as c | |
38 | from tg.i18n import ugettext as _ |
|
38 | from tg.i18n import ugettext as _ | |
39 | from webob.exc import HTTPBadRequest |
|
39 | from webob.exc import HTTPBadRequest | |
40 |
|
40 | |||
41 | from kallithea.lib import ext_json, webutils |
|
41 | from kallithea.lib import ext_json, webutils | |
42 | from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired |
|
42 | from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired | |
43 | from kallithea.lib.base import BaseRepoController, jsonify, render |
|
43 | from kallithea.lib.base import BaseRepoController, jsonify, render | |
44 | from kallithea.lib.celerylib.tasks import get_commits_stats |
|
|||
45 | from kallithea.lib.conf import ALL_EXTS, ALL_READMES, LANGUAGES_EXTENSIONS_MAP |
|
44 | from kallithea.lib.conf import ALL_EXTS, ALL_READMES, LANGUAGES_EXTENSIONS_MAP | |
46 | from kallithea.lib.markup_renderer import MarkupRenderer |
|
45 | from kallithea.lib.markup_renderer import MarkupRenderer | |
47 | from kallithea.lib.page import Page |
|
46 | from kallithea.lib.page import Page | |
48 | from kallithea.lib.utils2 import safe_int, safe_str |
|
47 | from kallithea.lib.utils2 import safe_int, safe_str | |
49 | from kallithea.lib.vcs.backends.base import EmptyChangeset |
|
48 | from kallithea.lib.vcs.backends.base import EmptyChangeset | |
50 | from kallithea.lib.vcs.exceptions import ChangesetError, EmptyRepositoryError, NodeDoesNotExistError |
|
49 | from kallithea.lib.vcs.exceptions import ChangesetError, EmptyRepositoryError, NodeDoesNotExistError | |
51 | from kallithea.lib.vcs.nodes import FileNode |
|
50 | from kallithea.lib.vcs.nodes import FileNode | |
52 | from kallithea.model import db |
|
51 | from kallithea.model import async_tasks, db | |
53 |
|
52 | |||
54 |
|
53 | |||
55 | log = logging.getLogger(__name__) |
|
54 | log = logging.getLogger(__name__) | |
56 |
|
55 | |||
57 | README_FILES = [''.join([x[0][0], x[1][0]]) for x in |
|
56 | README_FILES = [''.join([x[0][0], x[1][0]]) for x in | |
58 | sorted(list(itertools.product(ALL_READMES, ALL_EXTS)), |
|
57 | sorted(list(itertools.product(ALL_READMES, ALL_EXTS)), | |
59 | key=lambda y:y[0][1] + y[1][1])] |
|
58 | key=lambda y:y[0][1] + y[1][1])] | |
60 |
|
59 | |||
61 |
|
60 | |||
62 | class SummaryController(BaseRepoController): |
|
61 | class SummaryController(BaseRepoController): | |
63 |
|
62 | |||
64 | def __get_readme_data(self, db_repo): |
|
63 | def __get_readme_data(self, db_repo): | |
65 | repo_name = db_repo.repo_name |
|
64 | repo_name = db_repo.repo_name | |
66 | log.debug('Looking for README file') |
|
65 | log.debug('Looking for README file') | |
67 |
|
66 | |||
68 | @cache_region('long_term_file', '_get_readme_from_cache') |
|
67 | @cache_region('long_term_file', '_get_readme_from_cache') | |
69 | def _get_readme_from_cache(*_cache_keys): # parameters are not really used - only as caching key |
|
68 | def _get_readme_from_cache(*_cache_keys): # parameters are not really used - only as caching key | |
70 | readme_data = None |
|
69 | readme_data = None | |
71 | readme_file = None |
|
70 | readme_file = None | |
72 | try: |
|
71 | try: | |
73 | # gets the landing revision! or tip if fails |
|
72 | # gets the landing revision! or tip if fails | |
74 | cs = db_repo.get_landing_changeset() |
|
73 | cs = db_repo.get_landing_changeset() | |
75 | if isinstance(cs, EmptyChangeset): |
|
74 | if isinstance(cs, EmptyChangeset): | |
76 | raise EmptyRepositoryError() |
|
75 | raise EmptyRepositoryError() | |
77 | renderer = MarkupRenderer() |
|
76 | renderer = MarkupRenderer() | |
78 | for f in README_FILES: |
|
77 | for f in README_FILES: | |
79 | try: |
|
78 | try: | |
80 | readme = cs.get_node(f) |
|
79 | readme = cs.get_node(f) | |
81 | if not isinstance(readme, FileNode): |
|
80 | if not isinstance(readme, FileNode): | |
82 | continue |
|
81 | continue | |
83 | readme_file = f |
|
82 | readme_file = f | |
84 | log.debug('Found README file `%s` rendering...', |
|
83 | log.debug('Found README file `%s` rendering...', | |
85 | readme_file) |
|
84 | readme_file) | |
86 | readme_data = renderer.render(safe_str(readme.content), |
|
85 | readme_data = renderer.render(safe_str(readme.content), | |
87 | filename=f) |
|
86 | filename=f) | |
88 | break |
|
87 | break | |
89 | except NodeDoesNotExistError: |
|
88 | except NodeDoesNotExistError: | |
90 | continue |
|
89 | continue | |
91 | except ChangesetError: |
|
90 | except ChangesetError: | |
92 | log.error(traceback.format_exc()) |
|
91 | log.error(traceback.format_exc()) | |
93 | pass |
|
92 | pass | |
94 | except EmptyRepositoryError: |
|
93 | except EmptyRepositoryError: | |
95 | pass |
|
94 | pass | |
96 |
|
95 | |||
97 | return readme_data, readme_file |
|
96 | return readme_data, readme_file | |
98 |
|
97 | |||
99 | kind = 'README' |
|
98 | kind = 'README' | |
100 | return _get_readme_from_cache(repo_name, kind, c.db_repo.changeset_cache.get('raw_id')) |
|
99 | return _get_readme_from_cache(repo_name, kind, c.db_repo.changeset_cache.get('raw_id')) | |
101 |
|
100 | |||
102 | @LoginRequired(allow_default_user=True) |
|
101 | @LoginRequired(allow_default_user=True) | |
103 | @HasRepoPermissionLevelDecorator('read') |
|
102 | @HasRepoPermissionLevelDecorator('read') | |
104 | def index(self, repo_name): |
|
103 | def index(self, repo_name): | |
105 | p = safe_int(request.GET.get('page'), 1) |
|
104 | p = safe_int(request.GET.get('page'), 1) | |
106 | size = safe_int(request.GET.get('size'), 10) |
|
105 | size = safe_int(request.GET.get('size'), 10) | |
107 | try: |
|
106 | try: | |
108 | collection = c.db_repo_scm_instance.get_changesets(reverse=True) |
|
107 | collection = c.db_repo_scm_instance.get_changesets(reverse=True) | |
109 | except EmptyRepositoryError as e: |
|
108 | except EmptyRepositoryError as e: | |
110 | webutils.flash(e, category='warning') |
|
109 | webutils.flash(e, category='warning') | |
111 | collection = [] |
|
110 | collection = [] | |
112 | c.cs_pagination = Page(collection, page=p, items_per_page=size) |
|
111 | c.cs_pagination = Page(collection, page=p, items_per_page=size) | |
113 | page_revisions = [x.raw_id for x in list(c.cs_pagination)] |
|
112 | page_revisions = [x.raw_id for x in list(c.cs_pagination)] | |
114 | c.cs_comments = c.db_repo.get_comments(page_revisions) |
|
113 | c.cs_comments = c.db_repo.get_comments(page_revisions) | |
115 | c.cs_statuses = c.db_repo.statuses(page_revisions) |
|
114 | c.cs_statuses = c.db_repo.statuses(page_revisions) | |
116 |
|
115 | |||
117 | c.ssh_repo_url = None |
|
116 | c.ssh_repo_url = None | |
118 | if request.authuser.is_default_user: |
|
117 | if request.authuser.is_default_user: | |
119 | username = None |
|
118 | username = None | |
120 | else: |
|
119 | else: | |
121 | username = request.authuser.username |
|
120 | username = request.authuser.username | |
122 | if c.ssh_enabled: |
|
121 | if c.ssh_enabled: | |
123 | c.ssh_repo_url = c.db_repo.clone_url(clone_uri_tmpl=c.clone_ssh_tmpl) |
|
122 | c.ssh_repo_url = c.db_repo.clone_url(clone_uri_tmpl=c.clone_ssh_tmpl) | |
124 |
|
123 | |||
125 | c.clone_repo_url = c.db_repo.clone_url(clone_uri_tmpl=c.clone_uri_tmpl, with_id=False, username=username) |
|
124 | c.clone_repo_url = c.db_repo.clone_url(clone_uri_tmpl=c.clone_uri_tmpl, with_id=False, username=username) | |
126 | c.clone_repo_url_id = c.db_repo.clone_url(clone_uri_tmpl=c.clone_uri_tmpl, with_id=True, username=username) |
|
125 | c.clone_repo_url_id = c.db_repo.clone_url(clone_uri_tmpl=c.clone_uri_tmpl, with_id=True, username=username) | |
127 |
|
126 | |||
128 | if c.db_repo.enable_statistics: |
|
127 | if c.db_repo.enable_statistics: | |
129 | c.show_stats = True |
|
128 | c.show_stats = True | |
130 | else: |
|
129 | else: | |
131 | c.show_stats = False |
|
130 | c.show_stats = False | |
132 |
|
131 | |||
133 | stats = db.Statistics.query() \ |
|
132 | stats = db.Statistics.query() \ | |
134 | .filter(db.Statistics.repository == c.db_repo) \ |
|
133 | .filter(db.Statistics.repository == c.db_repo) \ | |
135 | .scalar() |
|
134 | .scalar() | |
136 |
|
135 | |||
137 | c.stats_percentage = 0 |
|
136 | c.stats_percentage = 0 | |
138 |
|
137 | |||
139 | if stats and stats.languages: |
|
138 | if stats and stats.languages: | |
140 | lang_stats_d = ext_json.loads(stats.languages) |
|
139 | lang_stats_d = ext_json.loads(stats.languages) | |
141 | lang_stats = [(x, {"count": y, |
|
140 | lang_stats = [(x, {"count": y, | |
142 | "desc": LANGUAGES_EXTENSIONS_MAP.get(x, '?')}) |
|
141 | "desc": LANGUAGES_EXTENSIONS_MAP.get(x, '?')}) | |
143 | for x, y in lang_stats_d.items()] |
|
142 | for x, y in lang_stats_d.items()] | |
144 | lang_stats.sort(key=lambda k: (-k[1]['count'], k[0])) |
|
143 | lang_stats.sort(key=lambda k: (-k[1]['count'], k[0])) | |
145 | c.trending_languages = lang_stats[:10] |
|
144 | c.trending_languages = lang_stats[:10] | |
146 | else: |
|
145 | else: | |
147 | c.trending_languages = [] |
|
146 | c.trending_languages = [] | |
148 |
|
147 | |||
149 | c.enable_downloads = c.db_repo.enable_downloads |
|
148 | c.enable_downloads = c.db_repo.enable_downloads | |
150 | c.readme_data, c.readme_file = \ |
|
149 | c.readme_data, c.readme_file = \ | |
151 | self.__get_readme_data(c.db_repo) |
|
150 | self.__get_readme_data(c.db_repo) | |
152 | return render('summary/summary.html') |
|
151 | return render('summary/summary.html') | |
153 |
|
152 | |||
154 | @LoginRequired() |
|
153 | @LoginRequired() | |
155 | @HasRepoPermissionLevelDecorator('read') |
|
154 | @HasRepoPermissionLevelDecorator('read') | |
156 | @jsonify |
|
155 | @jsonify | |
157 | def repo_size(self, repo_name): |
|
156 | def repo_size(self, repo_name): | |
158 | if request.is_xhr: |
|
157 | if request.is_xhr: | |
159 | return c.db_repo._repo_size() |
|
158 | return c.db_repo._repo_size() | |
160 | else: |
|
159 | else: | |
161 | raise HTTPBadRequest() |
|
160 | raise HTTPBadRequest() | |
162 |
|
161 | |||
163 | @LoginRequired(allow_default_user=True) |
|
162 | @LoginRequired(allow_default_user=True) | |
164 | @HasRepoPermissionLevelDecorator('read') |
|
163 | @HasRepoPermissionLevelDecorator('read') | |
165 | def statistics(self, repo_name): |
|
164 | def statistics(self, repo_name): | |
166 | if c.db_repo.enable_statistics: |
|
165 | if c.db_repo.enable_statistics: | |
167 | c.show_stats = True |
|
166 | c.show_stats = True | |
168 | c.no_data_msg = _('No data ready yet') |
|
167 | c.no_data_msg = _('No data ready yet') | |
169 | else: |
|
168 | else: | |
170 | c.show_stats = False |
|
169 | c.show_stats = False | |
171 | c.no_data_msg = _('Statistics are disabled for this repository') |
|
170 | c.no_data_msg = _('Statistics are disabled for this repository') | |
172 |
|
171 | |||
173 | td = date.today() + timedelta(days=1) |
|
172 | td = date.today() + timedelta(days=1) | |
174 | td_1m = td - timedelta(days=calendar.monthrange(td.year, td.month)[1]) |
|
173 | td_1m = td - timedelta(days=calendar.monthrange(td.year, td.month)[1]) | |
175 | td_1y = td - timedelta(days=365) |
|
174 | td_1y = td - timedelta(days=365) | |
176 |
|
175 | |||
177 | ts_min_m = mktime(td_1m.timetuple()) |
|
176 | ts_min_m = mktime(td_1m.timetuple()) | |
178 | ts_min_y = mktime(td_1y.timetuple()) |
|
177 | ts_min_y = mktime(td_1y.timetuple()) | |
179 | ts_max_y = mktime(td.timetuple()) |
|
178 | ts_max_y = mktime(td.timetuple()) | |
180 | c.ts_min = ts_min_m |
|
179 | c.ts_min = ts_min_m | |
181 | c.ts_max = ts_max_y |
|
180 | c.ts_max = ts_max_y | |
182 |
|
181 | |||
183 | stats = db.Statistics.query() \ |
|
182 | stats = db.Statistics.query() \ | |
184 | .filter(db.Statistics.repository == c.db_repo) \ |
|
183 | .filter(db.Statistics.repository == c.db_repo) \ | |
185 | .scalar() |
|
184 | .scalar() | |
186 | c.stats_percentage = 0 |
|
185 | c.stats_percentage = 0 | |
187 | if stats and stats.languages: |
|
186 | if stats and stats.languages: | |
188 | c.commit_data = ext_json.loads(stats.commit_activity) |
|
187 | c.commit_data = ext_json.loads(stats.commit_activity) | |
189 | c.overview_data = ext_json.loads(stats.commit_activity_combined) |
|
188 | c.overview_data = ext_json.loads(stats.commit_activity_combined) | |
190 |
|
189 | |||
191 | lang_stats_d = ext_json.loads(stats.languages) |
|
190 | lang_stats_d = ext_json.loads(stats.languages) | |
192 | lang_stats = [(x, {"count": y, |
|
191 | lang_stats = [(x, {"count": y, | |
193 | "desc": LANGUAGES_EXTENSIONS_MAP.get(x, '?')}) |
|
192 | "desc": LANGUAGES_EXTENSIONS_MAP.get(x, '?')}) | |
194 | for x, y in lang_stats_d.items()] |
|
193 | for x, y in lang_stats_d.items()] | |
195 | lang_stats.sort(key=lambda k: (-k[1]['count'], k[0])) |
|
194 | lang_stats.sort(key=lambda k: (-k[1]['count'], k[0])) | |
196 | c.trending_languages = lang_stats[:10] |
|
195 | c.trending_languages = lang_stats[:10] | |
197 |
|
196 | |||
198 | last_rev = stats.stat_on_revision + 1 |
|
197 | last_rev = stats.stat_on_revision + 1 | |
199 | c.repo_last_rev = c.db_repo_scm_instance.count() \ |
|
198 | c.repo_last_rev = c.db_repo_scm_instance.count() \ | |
200 | if c.db_repo_scm_instance.revisions else 0 |
|
199 | if c.db_repo_scm_instance.revisions else 0 | |
201 | if last_rev == 0 or c.repo_last_rev == 0: |
|
200 | if last_rev == 0 or c.repo_last_rev == 0: | |
202 | pass |
|
201 | pass | |
203 | else: |
|
202 | else: | |
204 | c.stats_percentage = '%.2f' % ((float((last_rev)) / |
|
203 | c.stats_percentage = '%.2f' % ((float((last_rev)) / | |
205 | c.repo_last_rev) * 100) |
|
204 | c.repo_last_rev) * 100) | |
206 | else: |
|
205 | else: | |
207 | c.commit_data = {} |
|
206 | c.commit_data = {} | |
208 | c.overview_data = ([[ts_min_y, 0], [ts_max_y, 10]]) |
|
207 | c.overview_data = ([[ts_min_y, 0], [ts_max_y, 10]]) | |
209 | c.trending_languages = [] |
|
208 | c.trending_languages = [] | |
210 |
|
209 | |||
211 | recurse_limit = 500 # don't recurse more than 500 times when parsing |
|
210 | recurse_limit = 500 # don't recurse more than 500 times when parsing | |
212 | get_commits_stats(c.db_repo.repo_name, ts_min_y, ts_max_y, recurse_limit) |
|
211 | async_tasks.get_commits_stats(c.db_repo.repo_name, ts_min_y, ts_max_y, recurse_limit) | |
213 | return render('summary/statistics.html') |
|
212 | return render('summary/statistics.html') |
@@ -1,92 +1,92 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | """ |
|
3 | """ | |
4 | Kallithea wrapper of Celery |
|
4 | Kallithea wrapper of Celery | |
5 |
|
5 | |||
6 | The Celery configuration is in the Kallithea ini file but must be converted to an |
|
6 | The Celery configuration is in the Kallithea ini file but must be converted to an | |
7 | entirely different format before Celery can use it. |
|
7 | entirely different format before Celery can use it. | |
8 |
|
8 | |||
9 | We read the configuration from tg.config at module import time. This module can |
|
9 | We read the configuration from tg.config at module import time. This module can | |
10 | thus not be imported in global scope but must be imported on demand in function |
|
10 | thus not be imported in global scope but must be imported on demand in function | |
11 | scope after tg.config has been initialized. |
|
11 | scope after tg.config has been initialized. | |
12 |
|
12 | |||
13 | To make sure that the config really has been initialized, we check one of the |
|
13 | To make sure that the config really has been initialized, we check one of the | |
14 | mandatory settings. |
|
14 | mandatory settings. | |
15 | """ |
|
15 | """ | |
16 |
|
16 | |||
17 | import logging |
|
17 | import logging | |
18 |
|
18 | |||
19 | import celery |
|
19 | import celery | |
20 | import tg |
|
20 | import tg | |
21 |
|
21 | |||
22 | import kallithea |
|
22 | import kallithea | |
23 |
|
23 | |||
24 |
|
24 | |||
25 | class CeleryConfig(object): |
|
25 | class CeleryConfig(object): | |
26 |
imports = ['kallithea. |
|
26 | imports = ['kallithea.model.async_tasks'] | |
27 | task_always_eager = False |
|
27 | task_always_eager = False | |
28 |
|
28 | |||
29 | # map from Kallithea .ini Celery 3 config names to Celery 4 config names |
|
29 | # map from Kallithea .ini Celery 3 config names to Celery 4 config names | |
30 | celery3_compat = { |
|
30 | celery3_compat = { | |
31 | 'broker.url': 'broker_url', |
|
31 | 'broker.url': 'broker_url', | |
32 | 'celery.accept.content': 'accept_content', |
|
32 | 'celery.accept.content': 'accept_content', | |
33 | 'celery.always.eager': 'task_always_eager', |
|
33 | 'celery.always.eager': 'task_always_eager', | |
34 | 'celery.amqp.task.result.expires': 'result_expires', |
|
34 | 'celery.amqp.task.result.expires': 'result_expires', | |
35 | 'celeryd.concurrency': 'worker_concurrency', |
|
35 | 'celeryd.concurrency': 'worker_concurrency', | |
36 | 'celeryd.max.tasks.per.child': 'worker_max_tasks_per_child', |
|
36 | 'celeryd.max.tasks.per.child': 'worker_max_tasks_per_child', | |
37 | #'celery.imports' ends up unchanged |
|
37 | #'celery.imports' ends up unchanged | |
38 | 'celery.result.backend': 'result_backend', |
|
38 | 'celery.result.backend': 'result_backend', | |
39 | 'celery.result.serializer': 'result_serializer', |
|
39 | 'celery.result.serializer': 'result_serializer', | |
40 | 'celery.task.serializer': 'task_serializer', |
|
40 | 'celery.task.serializer': 'task_serializer', | |
41 | } |
|
41 | } | |
42 |
|
42 | |||
43 | list_config_names = """imports accept_content""".split() |
|
43 | list_config_names = """imports accept_content""".split() | |
44 |
|
44 | |||
45 |
|
45 | |||
46 | desupported = set([ |
|
46 | desupported = set([ | |
47 | 'celery.result.dburi', |
|
47 | 'celery.result.dburi', | |
48 | 'celery.result.serialier', |
|
48 | 'celery.result.serialier', | |
49 | 'celery.send.task.error.emails', |
|
49 | 'celery.send.task.error.emails', | |
50 | ]) |
|
50 | ]) | |
51 |
|
51 | |||
52 |
|
52 | |||
53 | log = logging.getLogger(__name__) |
|
53 | log = logging.getLogger(__name__) | |
54 |
|
54 | |||
55 |
|
55 | |||
56 | def make_celery_config(config): |
|
56 | def make_celery_config(config): | |
57 | """Return Celery config object populated from relevant settings in a config dict, such as tg.config""" |
|
57 | """Return Celery config object populated from relevant settings in a config dict, such as tg.config""" | |
58 |
|
58 | |||
59 | celery_config = CeleryConfig() |
|
59 | celery_config = CeleryConfig() | |
60 |
|
60 | |||
61 | for config_key, config_value in sorted(config.items()): |
|
61 | for config_key, config_value in sorted(config.items()): | |
62 | if config_key in desupported and config_value: |
|
62 | if config_key in desupported and config_value: | |
63 | log.error('Celery configuration setting %r is no longer supported', config_key) |
|
63 | log.error('Celery configuration setting %r is no longer supported', config_key) | |
64 | celery_key = celery3_compat.get(config_key) |
|
64 | celery_key = celery3_compat.get(config_key) | |
65 | parts = config_key.split('.', 1) |
|
65 | parts = config_key.split('.', 1) | |
66 | if celery_key: # explicit Celery 3 backwards compatibility |
|
66 | if celery_key: # explicit Celery 3 backwards compatibility | |
67 | pass |
|
67 | pass | |
68 | elif parts[0] == 'celery' and len(parts) == 2: # Celery 4 config key |
|
68 | elif parts[0] == 'celery' and len(parts) == 2: # Celery 4 config key | |
69 | celery_key = parts[1] |
|
69 | celery_key = parts[1] | |
70 | else: |
|
70 | else: | |
71 | continue |
|
71 | continue | |
72 | if not isinstance(config_value, str): |
|
72 | if not isinstance(config_value, str): | |
73 | continue |
|
73 | continue | |
74 | if celery_key in list_config_names: |
|
74 | if celery_key in list_config_names: | |
75 | celery_value = config_value.split() |
|
75 | celery_value = config_value.split() | |
76 | elif config_value.isdigit(): |
|
76 | elif config_value.isdigit(): | |
77 | celery_value = int(config_value) |
|
77 | celery_value = int(config_value) | |
78 | elif config_value.lower() in ['true', 'false']: |
|
78 | elif config_value.lower() in ['true', 'false']: | |
79 | celery_value = config_value.lower() == 'true' |
|
79 | celery_value = config_value.lower() == 'true' | |
80 | else: |
|
80 | else: | |
81 | celery_value = config_value |
|
81 | celery_value = config_value | |
82 | setattr(celery_config, celery_key, celery_value) |
|
82 | setattr(celery_config, celery_key, celery_value) | |
83 | return celery_config |
|
83 | return celery_config | |
84 |
|
84 | |||
85 |
|
85 | |||
86 | def make_app(): |
|
86 | def make_app(): | |
87 | """Create celery app from the TurboGears configuration file""" |
|
87 | """Create celery app from the TurboGears configuration file""" | |
88 | app = celery.Celery() |
|
88 | app = celery.Celery() | |
89 | celery_config = make_celery_config(tg.config) |
|
89 | celery_config = make_celery_config(tg.config) | |
90 | kallithea.CELERY_EAGER = celery_config.task_always_eager |
|
90 | kallithea.CELERY_EAGER = celery_config.task_always_eager | |
91 | app.config_from_object(celery_config) |
|
91 | app.config_from_object(celery_config) | |
92 | return app |
|
92 | return app |
@@ -1,521 +1,520 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 |
kallithea. |
|
15 | kallithea.model.async_tasks | |
16 |
~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Kallithea task modules, containing all task that suppose to be run |
|
18 | Kallithea task modules, containing all task that suppose to be run | |
19 | by celery daemon |
|
19 | by celery daemon | |
20 |
|
20 | |||
21 | This file was forked by the Kallithea project in July 2014. |
|
21 | This file was forked by the Kallithea project in July 2014. | |
22 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | Original author and date, and relevant copyright and licensing information is below: | |
23 | :created_on: Oct 6, 2010 |
|
23 | :created_on: Oct 6, 2010 | |
24 | :author: marcink |
|
24 | :author: marcink | |
25 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
26 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | :license: GPLv3, see LICENSE.md for more details. | |
27 | """ |
|
27 | """ | |
28 |
|
28 | |||
29 | import email.message |
|
29 | import email.message | |
30 | import email.utils |
|
30 | import email.utils | |
31 | import os |
|
31 | import os | |
32 | import smtplib |
|
32 | import smtplib | |
33 | import time |
|
33 | import time | |
34 | import traceback |
|
34 | import traceback | |
35 | from collections import OrderedDict |
|
35 | from collections import OrderedDict | |
36 | from operator import itemgetter |
|
36 | from operator import itemgetter | |
37 | from time import mktime |
|
37 | from time import mktime | |
38 |
|
38 | |||
39 | import celery.utils.log |
|
39 | import celery.utils.log | |
40 | from tg import config |
|
40 | from tg import config | |
41 |
|
41 | |||
42 | import kallithea |
|
42 | import kallithea | |
43 | import kallithea.lib.helpers as h |
|
43 | import kallithea.lib.helpers as h | |
44 | from kallithea.lib import celerylib, conf, ext_json, hooks |
|
44 | from kallithea.lib import celerylib, conf, ext_json, hooks | |
45 | from kallithea.lib.indexers.daemon import WhooshIndexingDaemon |
|
45 | from kallithea.lib.indexers.daemon import WhooshIndexingDaemon | |
46 | from kallithea.lib.utils2 import asbool, ascii_bytes |
|
46 | from kallithea.lib.utils2 import asbool, ascii_bytes | |
47 | from kallithea.lib.vcs.utils import author_email |
|
47 | from kallithea.lib.vcs.utils import author_email | |
48 | from kallithea.model import db, userlog |
|
48 | from kallithea.model import db, repo, userlog | |
49 | from kallithea.model.repo import RepoModel |
|
|||
50 |
|
49 | |||
51 |
|
50 | |||
52 | __all__ = ['whoosh_index', 'get_commits_stats', 'send_email'] |
|
51 | __all__ = ['whoosh_index', 'get_commits_stats', 'send_email'] | |
53 |
|
52 | |||
54 |
|
53 | |||
55 | log = celery.utils.log.get_task_logger(__name__) |
|
54 | log = celery.utils.log.get_task_logger(__name__) | |
56 |
|
55 | |||
57 |
|
56 | |||
58 | @celerylib.task |
|
57 | @celerylib.task | |
59 | @celerylib.locked_task |
|
58 | @celerylib.locked_task | |
60 | @celerylib.dbsession |
|
59 | @celerylib.dbsession | |
61 | def whoosh_index(repo_location, full_index): |
|
60 | def whoosh_index(repo_location, full_index): | |
62 | celerylib.get_session() # initialize database connection |
|
61 | celerylib.get_session() # initialize database connection | |
63 |
|
62 | |||
64 | index_location = config['index_dir'] |
|
63 | index_location = config['index_dir'] | |
65 | WhooshIndexingDaemon(index_location=index_location, |
|
64 | WhooshIndexingDaemon(index_location=index_location, | |
66 | repo_location=repo_location) \ |
|
65 | repo_location=repo_location) \ | |
67 | .run(full_index=full_index) |
|
66 | .run(full_index=full_index) | |
68 |
|
67 | |||
69 |
|
68 | |||
70 | # for js data compatibility cleans the key for person from ' |
|
69 | # for js data compatibility cleans the key for person from ' | |
71 | def akc(k): |
|
70 | def akc(k): | |
72 | return h.person(k).replace('"', '') |
|
71 | return h.person(k).replace('"', '') | |
73 |
|
72 | |||
74 |
|
73 | |||
75 | @celerylib.task |
|
74 | @celerylib.task | |
76 | @celerylib.dbsession |
|
75 | @celerylib.dbsession | |
77 | def get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit=100): |
|
76 | def get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit=100): | |
78 | DBS = celerylib.get_session() |
|
77 | DBS = celerylib.get_session() | |
79 | lockkey = celerylib.__get_lockkey('get_commits_stats', repo_name, ts_min_y, |
|
78 | lockkey = celerylib.__get_lockkey('get_commits_stats', repo_name, ts_min_y, | |
80 | ts_max_y) |
|
79 | ts_max_y) | |
81 | lockkey_path = config.get('cache_dir') or config['app_conf']['cache_dir'] # Backward compatibility for TurboGears < 2.4 |
|
80 | lockkey_path = config.get('cache_dir') or config['app_conf']['cache_dir'] # Backward compatibility for TurboGears < 2.4 | |
82 |
|
81 | |||
83 | log.info('running task with lockkey %s', lockkey) |
|
82 | log.info('running task with lockkey %s', lockkey) | |
84 |
|
83 | |||
85 | try: |
|
84 | try: | |
86 | lock = celerylib.DaemonLock(os.path.join(lockkey_path, lockkey)) |
|
85 | lock = celerylib.DaemonLock(os.path.join(lockkey_path, lockkey)) | |
87 |
|
86 | |||
88 | co_day_auth_aggr = {} |
|
87 | co_day_auth_aggr = {} | |
89 | commits_by_day_aggregate = {} |
|
88 | commits_by_day_aggregate = {} | |
90 | repo = db.Repository.get_by_repo_name(repo_name) |
|
89 | db_repo = db.Repository.get_by_repo_name(repo_name) | |
91 | if repo is None: |
|
90 | if db_repo is None: | |
92 | return True |
|
91 | return True | |
93 |
|
92 | |||
94 | repo = repo.scm_instance |
|
93 | scm_repo = db_repo.scm_instance | |
95 | repo_size = repo.count() |
|
94 | repo_size = scm_repo.count() | |
96 | # return if repo have no revisions |
|
95 | # return if repo have no revisions | |
97 | if repo_size < 1: |
|
96 | if repo_size < 1: | |
98 | lock.release() |
|
97 | lock.release() | |
99 | return True |
|
98 | return True | |
100 |
|
99 | |||
101 | skip_date_limit = True |
|
100 | skip_date_limit = True | |
102 | parse_limit = int(config.get('commit_parse_limit')) |
|
101 | parse_limit = int(config.get('commit_parse_limit')) | |
103 | last_rev = None |
|
102 | last_rev = None | |
104 | last_cs = None |
|
103 | last_cs = None | |
105 | timegetter = itemgetter('time') |
|
104 | timegetter = itemgetter('time') | |
106 |
|
105 | |||
107 | dbrepo = DBS.query(db.Repository) \ |
|
106 | dbrepo = DBS.query(db.Repository) \ | |
108 | .filter(db.Repository.repo_name == repo_name).scalar() |
|
107 | .filter(db.Repository.repo_name == repo_name).scalar() | |
109 | cur_stats = DBS.query(db.Statistics) \ |
|
108 | cur_stats = DBS.query(db.Statistics) \ | |
110 | .filter(db.Statistics.repository == dbrepo).scalar() |
|
109 | .filter(db.Statistics.repository == dbrepo).scalar() | |
111 |
|
110 | |||
112 | if cur_stats is not None: |
|
111 | if cur_stats is not None: | |
113 | last_rev = cur_stats.stat_on_revision |
|
112 | last_rev = cur_stats.stat_on_revision | |
114 |
|
113 | |||
115 | if last_rev == repo.get_changeset().revision and repo_size > 1: |
|
114 | if last_rev == scm_repo.get_changeset().revision and repo_size > 1: | |
116 | # pass silently without any work if we're not on first revision or |
|
115 | # pass silently without any work if we're not on first revision or | |
117 | # current state of parsing revision(from db marker) is the |
|
116 | # current state of parsing revision(from db marker) is the | |
118 | # last revision |
|
117 | # last revision | |
119 | lock.release() |
|
118 | lock.release() | |
120 | return True |
|
119 | return True | |
121 |
|
120 | |||
122 | if cur_stats: |
|
121 | if cur_stats: | |
123 | commits_by_day_aggregate = OrderedDict(ext_json.loads( |
|
122 | commits_by_day_aggregate = OrderedDict(ext_json.loads( | |
124 | cur_stats.commit_activity_combined)) |
|
123 | cur_stats.commit_activity_combined)) | |
125 | co_day_auth_aggr = ext_json.loads(cur_stats.commit_activity) |
|
124 | co_day_auth_aggr = ext_json.loads(cur_stats.commit_activity) | |
126 |
|
125 | |||
127 | log.debug('starting parsing %s', parse_limit) |
|
126 | log.debug('starting parsing %s', parse_limit) | |
128 |
|
127 | |||
129 | last_rev = last_rev + 1 if last_rev and last_rev >= 0 else 0 |
|
128 | last_rev = last_rev + 1 if last_rev and last_rev >= 0 else 0 | |
130 | log.debug('Getting revisions from %s to %s', |
|
129 | log.debug('Getting revisions from %s to %s', | |
131 | last_rev, last_rev + parse_limit |
|
130 | last_rev, last_rev + parse_limit | |
132 | ) |
|
131 | ) | |
133 | for cs in repo[last_rev:last_rev + parse_limit]: |
|
132 | for cs in scm_repo[last_rev:last_rev + parse_limit]: | |
134 | log.debug('parsing %s', cs) |
|
133 | log.debug('parsing %s', cs) | |
135 | last_cs = cs # remember last parsed changeset |
|
134 | last_cs = cs # remember last parsed changeset | |
136 | tt = cs.date.timetuple() |
|
135 | tt = cs.date.timetuple() | |
137 | k = mktime(tt[:3] + (0, 0, 0, 0, 0, 0)) |
|
136 | k = mktime(tt[:3] + (0, 0, 0, 0, 0, 0)) | |
138 |
|
137 | |||
139 | if akc(cs.author) in co_day_auth_aggr: |
|
138 | if akc(cs.author) in co_day_auth_aggr: | |
140 | try: |
|
139 | try: | |
141 | l = [timegetter(x) for x in |
|
140 | l = [timegetter(x) for x in | |
142 | co_day_auth_aggr[akc(cs.author)]['data']] |
|
141 | co_day_auth_aggr[akc(cs.author)]['data']] | |
143 | time_pos = l.index(k) |
|
142 | time_pos = l.index(k) | |
144 | except ValueError: |
|
143 | except ValueError: | |
145 | time_pos = None |
|
144 | time_pos = None | |
146 |
|
145 | |||
147 | if time_pos is not None and time_pos >= 0: |
|
146 | if time_pos is not None and time_pos >= 0: | |
148 | datadict = \ |
|
147 | datadict = \ | |
149 | co_day_auth_aggr[akc(cs.author)]['data'][time_pos] |
|
148 | co_day_auth_aggr[akc(cs.author)]['data'][time_pos] | |
150 |
|
149 | |||
151 | datadict["commits"] += 1 |
|
150 | datadict["commits"] += 1 | |
152 | datadict["added"] += len(cs.added) |
|
151 | datadict["added"] += len(cs.added) | |
153 | datadict["changed"] += len(cs.changed) |
|
152 | datadict["changed"] += len(cs.changed) | |
154 | datadict["removed"] += len(cs.removed) |
|
153 | datadict["removed"] += len(cs.removed) | |
155 |
|
154 | |||
156 | else: |
|
155 | else: | |
157 | if k >= ts_min_y and k <= ts_max_y or skip_date_limit: |
|
156 | if k >= ts_min_y and k <= ts_max_y or skip_date_limit: | |
158 |
|
157 | |||
159 | datadict = {"time": k, |
|
158 | datadict = {"time": k, | |
160 | "commits": 1, |
|
159 | "commits": 1, | |
161 | "added": len(cs.added), |
|
160 | "added": len(cs.added), | |
162 | "changed": len(cs.changed), |
|
161 | "changed": len(cs.changed), | |
163 | "removed": len(cs.removed), |
|
162 | "removed": len(cs.removed), | |
164 | } |
|
163 | } | |
165 | co_day_auth_aggr[akc(cs.author)]['data'] \ |
|
164 | co_day_auth_aggr[akc(cs.author)]['data'] \ | |
166 | .append(datadict) |
|
165 | .append(datadict) | |
167 |
|
166 | |||
168 | else: |
|
167 | else: | |
169 | if k >= ts_min_y and k <= ts_max_y or skip_date_limit: |
|
168 | if k >= ts_min_y and k <= ts_max_y or skip_date_limit: | |
170 | co_day_auth_aggr[akc(cs.author)] = { |
|
169 | co_day_auth_aggr[akc(cs.author)] = { | |
171 | "label": akc(cs.author), |
|
170 | "label": akc(cs.author), | |
172 | "data": [{"time": k, |
|
171 | "data": [{"time": k, | |
173 | "commits": 1, |
|
172 | "commits": 1, | |
174 | "added": len(cs.added), |
|
173 | "added": len(cs.added), | |
175 | "changed": len(cs.changed), |
|
174 | "changed": len(cs.changed), | |
176 | "removed": len(cs.removed), |
|
175 | "removed": len(cs.removed), | |
177 | }], |
|
176 | }], | |
178 | "schema": ["commits"], |
|
177 | "schema": ["commits"], | |
179 | } |
|
178 | } | |
180 |
|
179 | |||
181 | # gather all data by day |
|
180 | # gather all data by day | |
182 | if k in commits_by_day_aggregate: |
|
181 | if k in commits_by_day_aggregate: | |
183 | commits_by_day_aggregate[k] += 1 |
|
182 | commits_by_day_aggregate[k] += 1 | |
184 | else: |
|
183 | else: | |
185 | commits_by_day_aggregate[k] = 1 |
|
184 | commits_by_day_aggregate[k] = 1 | |
186 |
|
185 | |||
187 | overview_data = sorted(commits_by_day_aggregate.items(), |
|
186 | overview_data = sorted(commits_by_day_aggregate.items(), | |
188 | key=itemgetter(0)) |
|
187 | key=itemgetter(0)) | |
189 |
|
188 | |||
190 | if not co_day_auth_aggr: |
|
189 | if not co_day_auth_aggr: | |
191 | co_day_auth_aggr[akc(repo.contact)] = { |
|
190 | co_day_auth_aggr[akc(scm_repo.contact)] = { | |
192 | "label": akc(repo.contact), |
|
191 | "label": akc(scm_repo.contact), | |
193 | "data": [0, 1], |
|
192 | "data": [0, 1], | |
194 | "schema": ["commits"], |
|
193 | "schema": ["commits"], | |
195 | } |
|
194 | } | |
196 |
|
195 | |||
197 | stats = cur_stats if cur_stats else db.Statistics() |
|
196 | stats = cur_stats if cur_stats else db.Statistics() | |
198 | stats.commit_activity = ascii_bytes(ext_json.dumps(co_day_auth_aggr)) |
|
197 | stats.commit_activity = ascii_bytes(ext_json.dumps(co_day_auth_aggr)) | |
199 | stats.commit_activity_combined = ascii_bytes(ext_json.dumps(overview_data)) |
|
198 | stats.commit_activity_combined = ascii_bytes(ext_json.dumps(overview_data)) | |
200 |
|
199 | |||
201 | log.debug('last revision %s', last_rev) |
|
200 | log.debug('last revision %s', last_rev) | |
202 | leftovers = len(repo.revisions[last_rev:]) |
|
201 | leftovers = len(scm_repo.revisions[last_rev:]) | |
203 | log.debug('revisions to parse %s', leftovers) |
|
202 | log.debug('revisions to parse %s', leftovers) | |
204 |
|
203 | |||
205 | if last_rev == 0 or leftovers < parse_limit: |
|
204 | if last_rev == 0 or leftovers < parse_limit: | |
206 | log.debug('getting code trending stats') |
|
205 | log.debug('getting code trending stats') | |
207 | stats.languages = ascii_bytes(ext_json.dumps(__get_codes_stats(repo_name))) |
|
206 | stats.languages = ascii_bytes(ext_json.dumps(__get_codes_stats(repo_name))) | |
208 |
|
207 | |||
209 | try: |
|
208 | try: | |
210 | stats.repository = dbrepo |
|
209 | stats.repository = dbrepo | |
211 | stats.stat_on_revision = last_cs.revision if last_cs else 0 |
|
210 | stats.stat_on_revision = last_cs.revision if last_cs else 0 | |
212 | DBS.add(stats) |
|
211 | DBS.add(stats) | |
213 | DBS.commit() |
|
212 | DBS.commit() | |
214 | except: |
|
213 | except: | |
215 | log.error(traceback.format_exc()) |
|
214 | log.error(traceback.format_exc()) | |
216 | DBS.rollback() |
|
215 | DBS.rollback() | |
217 | lock.release() |
|
216 | lock.release() | |
218 | return False |
|
217 | return False | |
219 |
|
218 | |||
220 | # final release |
|
219 | # final release | |
221 | lock.release() |
|
220 | lock.release() | |
222 |
|
221 | |||
223 | # execute another task if celery is enabled |
|
222 | # execute another task if celery is enabled | |
224 | if len(repo.revisions) > 1 and kallithea.CELERY_APP and recurse_limit > 0: |
|
223 | if len(scm_repo.revisions) > 1 and kallithea.CELERY_APP and recurse_limit > 0: | |
225 | get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit - 1) |
|
224 | get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit - 1) | |
226 | elif recurse_limit <= 0: |
|
225 | elif recurse_limit <= 0: | |
227 | log.debug('Not recursing - limit has been reached') |
|
226 | log.debug('Not recursing - limit has been reached') | |
228 | else: |
|
227 | else: | |
229 | log.debug('Not recursing') |
|
228 | log.debug('Not recursing') | |
230 | except celerylib.LockHeld: |
|
229 | except celerylib.LockHeld: | |
231 | log.info('Task with key %s already running', lockkey) |
|
230 | log.info('Task with key %s already running', lockkey) | |
232 | return 'Task with key %s already running' % lockkey |
|
231 | return 'Task with key %s already running' % lockkey | |
233 |
|
232 | |||
234 |
|
233 | |||
235 | @celerylib.task |
|
234 | @celerylib.task | |
236 | @celerylib.dbsession |
|
235 | @celerylib.dbsession | |
237 | def send_email(recipients, subject, body='', html_body='', headers=None, from_name=None): |
|
236 | def send_email(recipients, subject, body='', html_body='', headers=None, from_name=None): | |
238 | """ |
|
237 | """ | |
239 | Sends an email with defined parameters from the .ini files. |
|
238 | Sends an email with defined parameters from the .ini files. | |
240 |
|
239 | |||
241 | :param recipients: list of recipients, if this is None, the defined email |
|
240 | :param recipients: list of recipients, if this is None, the defined email | |
242 | address from field 'email_to' and all admins is used instead |
|
241 | address from field 'email_to' and all admins is used instead | |
243 | :param subject: subject of the mail |
|
242 | :param subject: subject of the mail | |
244 | :param body: plain text body of the mail |
|
243 | :param body: plain text body of the mail | |
245 | :param html_body: html version of body |
|
244 | :param html_body: html version of body | |
246 | :param headers: dictionary of prepopulated e-mail headers |
|
245 | :param headers: dictionary of prepopulated e-mail headers | |
247 | :param from_name: full name to be used as sender of this mail - often a |
|
246 | :param from_name: full name to be used as sender of this mail - often a | |
248 | .full_name_or_username value |
|
247 | .full_name_or_username value | |
249 | """ |
|
248 | """ | |
250 | assert isinstance(recipients, list), recipients |
|
249 | assert isinstance(recipients, list), recipients | |
251 | if headers is None: |
|
250 | if headers is None: | |
252 | headers = {} |
|
251 | headers = {} | |
253 | else: |
|
252 | else: | |
254 | # do not modify the original headers object passed by the caller |
|
253 | # do not modify the original headers object passed by the caller | |
255 | headers = headers.copy() |
|
254 | headers = headers.copy() | |
256 |
|
255 | |||
257 | email_config = config |
|
256 | email_config = config | |
258 | email_prefix = email_config.get('email_prefix', '') |
|
257 | email_prefix = email_config.get('email_prefix', '') | |
259 | if email_prefix: |
|
258 | if email_prefix: | |
260 | subject = "%s %s" % (email_prefix, subject) |
|
259 | subject = "%s %s" % (email_prefix, subject) | |
261 |
|
260 | |||
262 | if not recipients: |
|
261 | if not recipients: | |
263 | # if recipients are not defined we send to email_config + all admins |
|
262 | # if recipients are not defined we send to email_config + all admins | |
264 | recipients = [u.email for u in db.User.query() |
|
263 | recipients = [u.email for u in db.User.query() | |
265 | .filter(db.User.admin == True).all()] |
|
264 | .filter(db.User.admin == True).all()] | |
266 | if email_config.get('email_to') is not None: |
|
265 | if email_config.get('email_to') is not None: | |
267 | recipients += email_config.get('email_to').split(',') |
|
266 | recipients += email_config.get('email_to').split(',') | |
268 |
|
267 | |||
269 | # If there are still no recipients, there are no admins and no address |
|
268 | # If there are still no recipients, there are no admins and no address | |
270 | # configured in email_to, so return. |
|
269 | # configured in email_to, so return. | |
271 | if not recipients: |
|
270 | if not recipients: | |
272 | log.error("No recipients specified and no fallback available.") |
|
271 | log.error("No recipients specified and no fallback available.") | |
273 | return False |
|
272 | return False | |
274 |
|
273 | |||
275 | log.warning("No recipients specified for '%s' - sending to admins %s", subject, ' '.join(recipients)) |
|
274 | log.warning("No recipients specified for '%s' - sending to admins %s", subject, ' '.join(recipients)) | |
276 |
|
275 | |||
277 | # SMTP sender |
|
276 | # SMTP sender | |
278 | app_email_from = email_config.get('app_email_from', 'Kallithea') |
|
277 | app_email_from = email_config.get('app_email_from', 'Kallithea') | |
279 | # 'From' header |
|
278 | # 'From' header | |
280 | if from_name is not None: |
|
279 | if from_name is not None: | |
281 | # set From header based on from_name but with a generic e-mail address |
|
280 | # set From header based on from_name but with a generic e-mail address | |
282 | # In case app_email_from is in "Some Name <e-mail>" format, we first |
|
281 | # In case app_email_from is in "Some Name <e-mail>" format, we first | |
283 | # extract the e-mail address. |
|
282 | # extract the e-mail address. | |
284 | envelope_addr = author_email(app_email_from) |
|
283 | envelope_addr = author_email(app_email_from) | |
285 | headers['From'] = '"%s" <%s>' % ( |
|
284 | headers['From'] = '"%s" <%s>' % ( | |
286 | email.utils.quote('%s (no-reply)' % from_name), |
|
285 | email.utils.quote('%s (no-reply)' % from_name), | |
287 | envelope_addr) |
|
286 | envelope_addr) | |
288 |
|
287 | |||
289 | smtp_server = email_config.get('smtp_server') |
|
288 | smtp_server = email_config.get('smtp_server') | |
290 | smtp_port = email_config.get('smtp_port') |
|
289 | smtp_port = email_config.get('smtp_port') | |
291 | smtp_use_tls = asbool(email_config.get('smtp_use_tls')) |
|
290 | smtp_use_tls = asbool(email_config.get('smtp_use_tls')) | |
292 | smtp_use_ssl = asbool(email_config.get('smtp_use_ssl')) |
|
291 | smtp_use_ssl = asbool(email_config.get('smtp_use_ssl')) | |
293 | smtp_auth = email_config.get('smtp_auth') # undocumented - overrule automatic choice of auth mechanism |
|
292 | smtp_auth = email_config.get('smtp_auth') # undocumented - overrule automatic choice of auth mechanism | |
294 | smtp_username = email_config.get('smtp_username') |
|
293 | smtp_username = email_config.get('smtp_username') | |
295 | smtp_password = email_config.get('smtp_password') |
|
294 | smtp_password = email_config.get('smtp_password') | |
296 |
|
295 | |||
297 | logmsg = ("Mail details:\n" |
|
296 | logmsg = ("Mail details:\n" | |
298 | "recipients: %s\n" |
|
297 | "recipients: %s\n" | |
299 | "headers: %s\n" |
|
298 | "headers: %s\n" | |
300 | "subject: %s\n" |
|
299 | "subject: %s\n" | |
301 | "body:\n%s\n" |
|
300 | "body:\n%s\n" | |
302 | "html:\n%s\n" |
|
301 | "html:\n%s\n" | |
303 | % (' '.join(recipients), headers, subject, body, html_body)) |
|
302 | % (' '.join(recipients), headers, subject, body, html_body)) | |
304 |
|
303 | |||
305 | if smtp_server: |
|
304 | if smtp_server: | |
306 | log.debug("Sending e-mail. " + logmsg) |
|
305 | log.debug("Sending e-mail. " + logmsg) | |
307 | else: |
|
306 | else: | |
308 | log.error("SMTP mail server not configured - cannot send e-mail.") |
|
307 | log.error("SMTP mail server not configured - cannot send e-mail.") | |
309 | log.warning(logmsg) |
|
308 | log.warning(logmsg) | |
310 | return False |
|
309 | return False | |
311 |
|
310 | |||
312 | msg = email.message.EmailMessage() |
|
311 | msg = email.message.EmailMessage() | |
313 | msg['Subject'] = subject |
|
312 | msg['Subject'] = subject | |
314 | msg['From'] = app_email_from # fallback - might be overridden by a header |
|
313 | msg['From'] = app_email_from # fallback - might be overridden by a header | |
315 | msg['To'] = ', '.join(recipients) |
|
314 | msg['To'] = ', '.join(recipients) | |
316 | msg['Date'] = email.utils.formatdate(time.time()) |
|
315 | msg['Date'] = email.utils.formatdate(time.time()) | |
317 |
|
316 | |||
318 | for key, value in headers.items(): |
|
317 | for key, value in headers.items(): | |
319 | del msg[key] # Delete key first to make sure add_header will replace header (if any), no matter the casing |
|
318 | del msg[key] # Delete key first to make sure add_header will replace header (if any), no matter the casing | |
320 | msg.add_header(key, value) |
|
319 | msg.add_header(key, value) | |
321 |
|
320 | |||
322 | msg.set_content(body) |
|
321 | msg.set_content(body) | |
323 | msg.add_alternative(html_body, subtype='html') |
|
322 | msg.add_alternative(html_body, subtype='html') | |
324 |
|
323 | |||
325 | try: |
|
324 | try: | |
326 | if smtp_use_ssl: |
|
325 | if smtp_use_ssl: | |
327 | smtp_serv = smtplib.SMTP_SSL(smtp_server, smtp_port) |
|
326 | smtp_serv = smtplib.SMTP_SSL(smtp_server, smtp_port) | |
328 | else: |
|
327 | else: | |
329 | smtp_serv = smtplib.SMTP(smtp_server, smtp_port) |
|
328 | smtp_serv = smtplib.SMTP(smtp_server, smtp_port) | |
330 |
|
329 | |||
331 | if smtp_use_tls: |
|
330 | if smtp_use_tls: | |
332 | smtp_serv.starttls() |
|
331 | smtp_serv.starttls() | |
333 |
|
332 | |||
334 | if smtp_auth: |
|
333 | if smtp_auth: | |
335 | smtp_serv.ehlo() # populate esmtp_features |
|
334 | smtp_serv.ehlo() # populate esmtp_features | |
336 | smtp_serv.esmtp_features["auth"] = smtp_auth |
|
335 | smtp_serv.esmtp_features["auth"] = smtp_auth | |
337 |
|
336 | |||
338 | if smtp_username and smtp_password is not None: |
|
337 | if smtp_username and smtp_password is not None: | |
339 | smtp_serv.login(smtp_username, smtp_password) |
|
338 | smtp_serv.login(smtp_username, smtp_password) | |
340 |
|
339 | |||
341 | smtp_serv.sendmail(app_email_from, recipients, msg.as_string()) |
|
340 | smtp_serv.sendmail(app_email_from, recipients, msg.as_string()) | |
342 | smtp_serv.quit() |
|
341 | smtp_serv.quit() | |
343 |
|
342 | |||
344 | log.info('Mail was sent to: %s' % recipients) |
|
343 | log.info('Mail was sent to: %s' % recipients) | |
345 | except: |
|
344 | except: | |
346 | log.error('Mail sending failed') |
|
345 | log.error('Mail sending failed') | |
347 | log.error(traceback.format_exc()) |
|
346 | log.error(traceback.format_exc()) | |
348 | return False |
|
347 | return False | |
349 | return True |
|
348 | return True | |
350 |
|
349 | |||
351 |
|
350 | |||
352 | @celerylib.task |
|
351 | @celerylib.task | |
353 | @celerylib.dbsession |
|
352 | @celerylib.dbsession | |
354 | def create_repo(form_data, cur_user): |
|
353 | def create_repo(form_data, cur_user): | |
355 | DBS = celerylib.get_session() |
|
354 | DBS = celerylib.get_session() | |
356 |
|
355 | |||
357 | cur_user = db.User.guess_instance(cur_user) |
|
356 | cur_user = db.User.guess_instance(cur_user) | |
358 |
|
357 | |||
359 | owner = cur_user |
|
358 | owner = cur_user | |
360 | repo_name = form_data['repo_name'] |
|
359 | repo_name = form_data['repo_name'] | |
361 | repo_name_full = form_data['repo_name_full'] |
|
360 | repo_name_full = form_data['repo_name_full'] | |
362 | repo_type = form_data['repo_type'] |
|
361 | repo_type = form_data['repo_type'] | |
363 | description = form_data['repo_description'] |
|
362 | description = form_data['repo_description'] | |
364 | private = form_data['repo_private'] |
|
363 | private = form_data['repo_private'] | |
365 | clone_uri = form_data.get('clone_uri') |
|
364 | clone_uri = form_data.get('clone_uri') | |
366 | repo_group = form_data['repo_group'] |
|
365 | repo_group = form_data['repo_group'] | |
367 | landing_rev = form_data['repo_landing_rev'] |
|
366 | landing_rev = form_data['repo_landing_rev'] | |
368 | copy_fork_permissions = form_data.get('copy_permissions') |
|
367 | copy_fork_permissions = form_data.get('copy_permissions') | |
369 | copy_group_permissions = form_data.get('repo_copy_permissions') |
|
368 | copy_group_permissions = form_data.get('repo_copy_permissions') | |
370 | fork_of = form_data.get('fork_parent_id') |
|
369 | fork_of = form_data.get('fork_parent_id') | |
371 | state = form_data.get('repo_state', db.Repository.STATE_PENDING) |
|
370 | state = form_data.get('repo_state', db.Repository.STATE_PENDING) | |
372 |
|
371 | |||
373 | # repo creation defaults, private and repo_type are filled in form |
|
372 | # repo creation defaults, private and repo_type are filled in form | |
374 | defs = db.Setting.get_default_repo_settings(strip_prefix=True) |
|
373 | defs = db.Setting.get_default_repo_settings(strip_prefix=True) | |
375 | enable_statistics = defs.get('repo_enable_statistics') |
|
374 | enable_statistics = defs.get('repo_enable_statistics') | |
376 | enable_downloads = defs.get('repo_enable_downloads') |
|
375 | enable_downloads = defs.get('repo_enable_downloads') | |
377 |
|
376 | |||
378 | try: |
|
377 | try: | |
379 | repo = RepoModel()._create_repo( |
|
378 | db_repo = repo.RepoModel()._create_repo( | |
380 | repo_name=repo_name_full, |
|
379 | repo_name=repo_name_full, | |
381 | repo_type=repo_type, |
|
380 | repo_type=repo_type, | |
382 | description=description, |
|
381 | description=description, | |
383 | owner=owner, |
|
382 | owner=owner, | |
384 | private=private, |
|
383 | private=private, | |
385 | clone_uri=clone_uri, |
|
384 | clone_uri=clone_uri, | |
386 | repo_group=repo_group, |
|
385 | repo_group=repo_group, | |
387 | landing_rev=landing_rev, |
|
386 | landing_rev=landing_rev, | |
388 | fork_of=fork_of, |
|
387 | fork_of=fork_of, | |
389 | copy_fork_permissions=copy_fork_permissions, |
|
388 | copy_fork_permissions=copy_fork_permissions, | |
390 | copy_group_permissions=copy_group_permissions, |
|
389 | copy_group_permissions=copy_group_permissions, | |
391 | enable_statistics=enable_statistics, |
|
390 | enable_statistics=enable_statistics, | |
392 | enable_downloads=enable_downloads, |
|
391 | enable_downloads=enable_downloads, | |
393 | state=state |
|
392 | state=state | |
394 | ) |
|
393 | ) | |
395 |
|
394 | |||
396 | userlog.action_logger(cur_user, 'user_created_repo', |
|
395 | userlog.action_logger(cur_user, 'user_created_repo', | |
397 | form_data['repo_name_full'], '') |
|
396 | form_data['repo_name_full'], '') | |
398 |
|
397 | |||
399 | DBS.commit() |
|
398 | DBS.commit() | |
400 | # now create this repo on Filesystem |
|
399 | # now create this repo on Filesystem | |
401 | RepoModel()._create_filesystem_repo( |
|
400 | repo.RepoModel()._create_filesystem_repo( | |
402 | repo_name=repo_name, |
|
401 | repo_name=repo_name, | |
403 | repo_type=repo_type, |
|
402 | repo_type=repo_type, | |
404 | repo_group=db.RepoGroup.guess_instance(repo_group), |
|
403 | repo_group=db.RepoGroup.guess_instance(repo_group), | |
405 | clone_uri=clone_uri, |
|
404 | clone_uri=clone_uri, | |
406 | ) |
|
405 | ) | |
407 | repo = db.Repository.get_by_repo_name(repo_name_full) |
|
406 | db_repo = db.Repository.get_by_repo_name(repo_name_full) | |
408 | hooks.log_create_repository(repo.get_dict(), created_by=owner.username) |
|
407 | hooks.log_create_repository(db_repo.get_dict(), created_by=owner.username) | |
409 |
|
408 | |||
410 | # update repo changeset caches initially |
|
409 | # update repo changeset caches initially | |
411 | repo.update_changeset_cache() |
|
410 | db_repo.update_changeset_cache() | |
412 |
|
411 | |||
413 | # set new created state |
|
412 | # set new created state | |
414 | repo.set_state(db.Repository.STATE_CREATED) |
|
413 | db_repo.set_state(db.Repository.STATE_CREATED) | |
415 | DBS.commit() |
|
414 | DBS.commit() | |
416 | except Exception as e: |
|
415 | except Exception as e: | |
417 | log.warning('Exception %s occurred when forking repository, ' |
|
416 | log.warning('Exception %s occurred when forking repository, ' | |
418 | 'doing cleanup...' % e) |
|
417 | 'doing cleanup...' % e) | |
419 | # rollback things manually ! |
|
418 | # rollback things manually ! | |
420 | repo = db.Repository.get_by_repo_name(repo_name_full) |
|
419 | db_repo = db.Repository.get_by_repo_name(repo_name_full) | |
421 | if repo: |
|
420 | if db_repo: | |
422 | db.Repository.delete(repo.repo_id) |
|
421 | db.Repository.delete(db_repo.repo_id) | |
423 | DBS.commit() |
|
422 | DBS.commit() | |
424 | RepoModel()._delete_filesystem_repo(repo) |
|
423 | repo.RepoModel()._delete_filesystem_repo(db_repo) | |
425 | raise |
|
424 | raise | |
426 |
|
425 | |||
427 | return True |
|
426 | return True | |
428 |
|
427 | |||
429 |
|
428 | |||
430 | @celerylib.task |
|
429 | @celerylib.task | |
431 | @celerylib.dbsession |
|
430 | @celerylib.dbsession | |
432 | def create_repo_fork(form_data, cur_user): |
|
431 | def create_repo_fork(form_data, cur_user): | |
433 | """ |
|
432 | """ | |
434 | Creates a fork of repository using interval VCS methods |
|
433 | Creates a fork of repository using interval VCS methods | |
435 |
|
434 | |||
436 | :param form_data: |
|
435 | :param form_data: | |
437 | :param cur_user: |
|
436 | :param cur_user: | |
438 | """ |
|
437 | """ | |
439 | DBS = celerylib.get_session() |
|
438 | DBS = celerylib.get_session() | |
440 |
|
439 | |||
441 | base_path = kallithea.CONFIG['base_path'] |
|
440 | base_path = kallithea.CONFIG['base_path'] | |
442 | cur_user = db.User.guess_instance(cur_user) |
|
441 | cur_user = db.User.guess_instance(cur_user) | |
443 |
|
442 | |||
444 | repo_name = form_data['repo_name'] # fork in this case |
|
443 | repo_name = form_data['repo_name'] # fork in this case | |
445 | repo_name_full = form_data['repo_name_full'] |
|
444 | repo_name_full = form_data['repo_name_full'] | |
446 |
|
445 | |||
447 | repo_type = form_data['repo_type'] |
|
446 | repo_type = form_data['repo_type'] | |
448 | owner = cur_user |
|
447 | owner = cur_user | |
449 | private = form_data['private'] |
|
448 | private = form_data['private'] | |
450 | clone_uri = form_data.get('clone_uri') |
|
449 | clone_uri = form_data.get('clone_uri') | |
451 | repo_group = form_data['repo_group'] |
|
450 | repo_group = form_data['repo_group'] | |
452 | landing_rev = form_data['landing_rev'] |
|
451 | landing_rev = form_data['landing_rev'] | |
453 | copy_fork_permissions = form_data.get('copy_permissions') |
|
452 | copy_fork_permissions = form_data.get('copy_permissions') | |
454 |
|
453 | |||
455 | try: |
|
454 | try: | |
456 | fork_of = db.Repository.guess_instance(form_data.get('fork_parent_id')) |
|
455 | fork_of = db.Repository.guess_instance(form_data.get('fork_parent_id')) | |
457 |
|
456 | |||
458 | RepoModel()._create_repo( |
|
457 | repo.RepoModel()._create_repo( | |
459 | repo_name=repo_name_full, |
|
458 | repo_name=repo_name_full, | |
460 | repo_type=repo_type, |
|
459 | repo_type=repo_type, | |
461 | description=form_data['description'], |
|
460 | description=form_data['description'], | |
462 | owner=owner, |
|
461 | owner=owner, | |
463 | private=private, |
|
462 | private=private, | |
464 | clone_uri=clone_uri, |
|
463 | clone_uri=clone_uri, | |
465 | repo_group=repo_group, |
|
464 | repo_group=repo_group, | |
466 | landing_rev=landing_rev, |
|
465 | landing_rev=landing_rev, | |
467 | fork_of=fork_of, |
|
466 | fork_of=fork_of, | |
468 | copy_fork_permissions=copy_fork_permissions |
|
467 | copy_fork_permissions=copy_fork_permissions | |
469 | ) |
|
468 | ) | |
470 | userlog.action_logger(cur_user, 'user_forked_repo:%s' % repo_name_full, |
|
469 | userlog.action_logger(cur_user, 'user_forked_repo:%s' % repo_name_full, | |
471 | fork_of.repo_name, '') |
|
470 | fork_of.repo_name, '') | |
472 | DBS.commit() |
|
471 | DBS.commit() | |
473 |
|
472 | |||
474 | source_repo_path = os.path.join(base_path, fork_of.repo_name) |
|
473 | source_repo_path = os.path.join(base_path, fork_of.repo_name) | |
475 |
|
474 | |||
476 | # now create this repo on Filesystem |
|
475 | # now create this repo on Filesystem | |
477 | RepoModel()._create_filesystem_repo( |
|
476 | repo.RepoModel()._create_filesystem_repo( | |
478 | repo_name=repo_name, |
|
477 | repo_name=repo_name, | |
479 | repo_type=repo_type, |
|
478 | repo_type=repo_type, | |
480 | repo_group=db.RepoGroup.guess_instance(repo_group), |
|
479 | repo_group=db.RepoGroup.guess_instance(repo_group), | |
481 | clone_uri=source_repo_path, |
|
480 | clone_uri=source_repo_path, | |
482 | ) |
|
481 | ) | |
483 | repo = db.Repository.get_by_repo_name(repo_name_full) |
|
482 | db_repo = db.Repository.get_by_repo_name(repo_name_full) | |
484 | hooks.log_create_repository(repo.get_dict(), created_by=owner.username) |
|
483 | hooks.log_create_repository(db_repo.get_dict(), created_by=owner.username) | |
485 |
|
484 | |||
486 | # update repo changeset caches initially |
|
485 | # update repo changeset caches initially | |
487 | repo.update_changeset_cache() |
|
486 | db_repo.update_changeset_cache() | |
488 |
|
487 | |||
489 | # set new created state |
|
488 | # set new created state | |
490 | repo.set_state(db.Repository.STATE_CREATED) |
|
489 | db_repo.set_state(db.Repository.STATE_CREATED) | |
491 | DBS.commit() |
|
490 | DBS.commit() | |
492 | except Exception as e: |
|
491 | except Exception as e: | |
493 | log.warning('Exception %s occurred when forking repository, ' |
|
492 | log.warning('Exception %s occurred when forking repository, ' | |
494 | 'doing cleanup...' % e) |
|
493 | 'doing cleanup...' % e) | |
495 | # rollback things manually ! |
|
494 | # rollback things manually ! | |
496 | repo = db.Repository.get_by_repo_name(repo_name_full) |
|
495 | db_repo = db.Repository.get_by_repo_name(repo_name_full) | |
497 | if repo: |
|
496 | if db_repo: | |
498 | db.Repository.delete(repo.repo_id) |
|
497 | db.Repository.delete(db_repo.repo_id) | |
499 | DBS.commit() |
|
498 | DBS.commit() | |
500 | RepoModel()._delete_filesystem_repo(repo) |
|
499 | repo.RepoModel()._delete_filesystem_repo(db_repo) | |
501 | raise |
|
500 | raise | |
502 |
|
501 | |||
503 | return True |
|
502 | return True | |
504 |
|
503 | |||
505 |
|
504 | |||
506 | def __get_codes_stats(repo_name): |
|
505 | def __get_codes_stats(repo_name): | |
507 | repo = db.Repository.get_by_repo_name(repo_name).scm_instance |
|
506 | scm_repo = db.Repository.get_by_repo_name(repo_name).scm_instance | |
508 |
|
507 | |||
509 | tip = repo.get_changeset() |
|
508 | tip = scm_repo.get_changeset() | |
510 | code_stats = {} |
|
509 | code_stats = {} | |
511 |
|
510 | |||
512 | for _topnode, _dirnodes, filenodes in tip.walk('/'): |
|
511 | for _topnode, _dirnodes, filenodes in tip.walk('/'): | |
513 | for filenode in filenodes: |
|
512 | for filenode in filenodes: | |
514 | ext = filenode.extension.lower() |
|
513 | ext = filenode.extension.lower() | |
515 | if ext in conf.LANGUAGES_EXTENSIONS_MAP and not filenode.is_binary: |
|
514 | if ext in conf.LANGUAGES_EXTENSIONS_MAP and not filenode.is_binary: | |
516 | if ext in code_stats: |
|
515 | if ext in code_stats: | |
517 | code_stats[ext] += 1 |
|
516 | code_stats[ext] += 1 | |
518 | else: |
|
517 | else: | |
519 | code_stats[ext] = 1 |
|
518 | code_stats[ext] = 1 | |
520 |
|
519 | |||
521 | return code_stats or {} |
|
520 | return code_stats or {} |
@@ -1,234 +1,233 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.model.notification |
|
15 | kallithea.model.notification | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Model for notifications |
|
18 | Model for notifications | |
19 |
|
19 | |||
20 |
|
20 | |||
21 | This file was forked by the Kallithea project in July 2014. |
|
21 | This file was forked by the Kallithea project in July 2014. | |
22 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | Original author and date, and relevant copyright and licensing information is below: | |
23 | :created_on: Nov 20, 2011 |
|
23 | :created_on: Nov 20, 2011 | |
24 | :author: marcink |
|
24 | :author: marcink | |
25 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
26 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | :license: GPLv3, see LICENSE.md for more details. | |
27 | """ |
|
27 | """ | |
28 |
|
28 | |||
29 | import datetime |
|
29 | import datetime | |
30 | import logging |
|
30 | import logging | |
31 |
|
31 | |||
32 | from tg import app_globals |
|
32 | from tg import app_globals | |
33 | from tg import tmpl_context as c |
|
33 | from tg import tmpl_context as c | |
34 | from tg.i18n import ugettext as _ |
|
34 | from tg.i18n import ugettext as _ | |
35 |
|
35 | |||
36 | from kallithea.lib.utils2 import fmt_date |
|
36 | from kallithea.lib.utils2 import fmt_date | |
37 | from kallithea.model import db |
|
37 | from kallithea.model import async_tasks, db | |
38 |
|
38 | |||
39 |
|
39 | |||
40 | log = logging.getLogger(__name__) |
|
40 | log = logging.getLogger(__name__) | |
41 |
|
41 | |||
42 |
|
42 | |||
43 | class NotificationModel(object): |
|
43 | class NotificationModel(object): | |
44 |
|
44 | |||
45 | TYPE_CHANGESET_COMMENT = 'cs_comment' |
|
45 | TYPE_CHANGESET_COMMENT = 'cs_comment' | |
46 | TYPE_MESSAGE = 'message' |
|
46 | TYPE_MESSAGE = 'message' | |
47 | TYPE_MENTION = 'mention' # not used |
|
47 | TYPE_MENTION = 'mention' # not used | |
48 | TYPE_REGISTRATION = 'registration' |
|
48 | TYPE_REGISTRATION = 'registration' | |
49 | TYPE_PULL_REQUEST = 'pull_request' |
|
49 | TYPE_PULL_REQUEST = 'pull_request' | |
50 | TYPE_PULL_REQUEST_COMMENT = 'pull_request_comment' |
|
50 | TYPE_PULL_REQUEST_COMMENT = 'pull_request_comment' | |
51 |
|
51 | |||
52 | def create(self, created_by, body, recipients=None, |
|
52 | def create(self, created_by, body, recipients=None, | |
53 | type_=TYPE_MESSAGE, with_email=True, |
|
53 | type_=TYPE_MESSAGE, with_email=True, | |
54 | email_kwargs=None, repo_name=None): |
|
54 | email_kwargs=None, repo_name=None): | |
55 | """ |
|
55 | """ | |
56 |
|
56 | |||
57 | Creates notification of given type |
|
57 | Creates notification of given type | |
58 |
|
58 | |||
59 | :param created_by: int, str or User instance. User who created this |
|
59 | :param created_by: int, str or User instance. User who created this | |
60 | notification |
|
60 | notification | |
61 | :param body: |
|
61 | :param body: | |
62 | :param recipients: list of int, str or User objects, when None |
|
62 | :param recipients: list of int, str or User objects, when None | |
63 | is given send to all admins |
|
63 | is given send to all admins | |
64 | :param type_: type of notification |
|
64 | :param type_: type of notification | |
65 | :param with_email: send email with this notification |
|
65 | :param with_email: send email with this notification | |
66 | :param email_kwargs: additional dict to pass as args to email template |
|
66 | :param email_kwargs: additional dict to pass as args to email template | |
67 | """ |
|
67 | """ | |
68 | import kallithea.lib.helpers as h |
|
68 | import kallithea.lib.helpers as h | |
69 | from kallithea.lib.celerylib import tasks |
|
|||
70 | email_kwargs = email_kwargs or {} |
|
69 | email_kwargs = email_kwargs or {} | |
71 | if recipients and not getattr(recipients, '__iter__', False): |
|
70 | if recipients and not getattr(recipients, '__iter__', False): | |
72 | raise Exception('recipients must be a list or iterable') |
|
71 | raise Exception('recipients must be a list or iterable') | |
73 |
|
72 | |||
74 | created_by_obj = db.User.guess_instance(created_by) |
|
73 | created_by_obj = db.User.guess_instance(created_by) | |
75 |
|
74 | |||
76 | recipients_objs = set() |
|
75 | recipients_objs = set() | |
77 | if recipients: |
|
76 | if recipients: | |
78 | for u in recipients: |
|
77 | for u in recipients: | |
79 | obj = db.User.guess_instance(u) |
|
78 | obj = db.User.guess_instance(u) | |
80 | if obj is not None: |
|
79 | if obj is not None: | |
81 | recipients_objs.add(obj) |
|
80 | recipients_objs.add(obj) | |
82 | else: |
|
81 | else: | |
83 | # TODO: inform user that requested operation couldn't be completed |
|
82 | # TODO: inform user that requested operation couldn't be completed | |
84 | log.error('cannot email unknown user %r', u) |
|
83 | log.error('cannot email unknown user %r', u) | |
85 | log.debug('sending notifications %s to %s', |
|
84 | log.debug('sending notifications %s to %s', | |
86 | type_, recipients_objs |
|
85 | type_, recipients_objs | |
87 | ) |
|
86 | ) | |
88 | elif recipients is None: |
|
87 | elif recipients is None: | |
89 | # empty recipients means to all admins |
|
88 | # empty recipients means to all admins | |
90 | recipients_objs = db.User.query().filter(db.User.admin == True).all() |
|
89 | recipients_objs = db.User.query().filter(db.User.admin == True).all() | |
91 | log.debug('sending notifications %s to admins: %s', |
|
90 | log.debug('sending notifications %s to admins: %s', | |
92 | type_, recipients_objs |
|
91 | type_, recipients_objs | |
93 | ) |
|
92 | ) | |
94 | #else: silently skip notification mails? |
|
93 | #else: silently skip notification mails? | |
95 |
|
94 | |||
96 | if not with_email: |
|
95 | if not with_email: | |
97 | return |
|
96 | return | |
98 |
|
97 | |||
99 | headers = {} |
|
98 | headers = {} | |
100 | headers['X-Kallithea-Notification-Type'] = type_ |
|
99 | headers['X-Kallithea-Notification-Type'] = type_ | |
101 | if 'threading' in email_kwargs: |
|
100 | if 'threading' in email_kwargs: | |
102 | headers['References'] = ' '.join('<%s>' % x for x in email_kwargs['threading']) |
|
101 | headers['References'] = ' '.join('<%s>' % x for x in email_kwargs['threading']) | |
103 |
|
102 | |||
104 | # this is passed into template |
|
103 | # this is passed into template | |
105 | created_on = fmt_date(datetime.datetime.now()) |
|
104 | created_on = fmt_date(datetime.datetime.now()) | |
106 | html_kwargs = { |
|
105 | html_kwargs = { | |
107 | 'body': h.render_w_mentions(body, repo_name), |
|
106 | 'body': h.render_w_mentions(body, repo_name), | |
108 | 'when': created_on, |
|
107 | 'when': created_on, | |
109 | 'user': created_by_obj.username, |
|
108 | 'user': created_by_obj.username, | |
110 | } |
|
109 | } | |
111 |
|
110 | |||
112 | txt_kwargs = { |
|
111 | txt_kwargs = { | |
113 | 'body': body, |
|
112 | 'body': body, | |
114 | 'when': created_on, |
|
113 | 'when': created_on, | |
115 | 'user': created_by_obj.username, |
|
114 | 'user': created_by_obj.username, | |
116 | } |
|
115 | } | |
117 |
|
116 | |||
118 | html_kwargs.update(email_kwargs) |
|
117 | html_kwargs.update(email_kwargs) | |
119 | txt_kwargs.update(email_kwargs) |
|
118 | txt_kwargs.update(email_kwargs) | |
120 | email_subject = EmailNotificationModel() \ |
|
119 | email_subject = EmailNotificationModel() \ | |
121 | .get_email_description(type_, **txt_kwargs) |
|
120 | .get_email_description(type_, **txt_kwargs) | |
122 | email_txt_body = EmailNotificationModel() \ |
|
121 | email_txt_body = EmailNotificationModel() \ | |
123 | .get_email_tmpl(type_, 'txt', **txt_kwargs) |
|
122 | .get_email_tmpl(type_, 'txt', **txt_kwargs) | |
124 | email_html_body = EmailNotificationModel() \ |
|
123 | email_html_body = EmailNotificationModel() \ | |
125 | .get_email_tmpl(type_, 'html', **html_kwargs) |
|
124 | .get_email_tmpl(type_, 'html', **html_kwargs) | |
126 |
|
125 | |||
127 | # don't send email to the person who caused the notification, except for |
|
126 | # don't send email to the person who caused the notification, except for | |
128 | # notifications about new pull requests where the author is explicitly |
|
127 | # notifications about new pull requests where the author is explicitly | |
129 | # added. |
|
128 | # added. | |
130 | rec_mails = set(obj.email for obj in recipients_objs) |
|
129 | rec_mails = set(obj.email for obj in recipients_objs) | |
131 | if type_ == NotificationModel.TYPE_PULL_REQUEST: |
|
130 | if type_ == NotificationModel.TYPE_PULL_REQUEST: | |
132 | rec_mails.add(created_by_obj.email) |
|
131 | rec_mails.add(created_by_obj.email) | |
133 | else: |
|
132 | else: | |
134 | rec_mails.discard(created_by_obj.email) |
|
133 | rec_mails.discard(created_by_obj.email) | |
135 |
|
134 | |||
136 | # send email with notification to participants |
|
135 | # send email with notification to participants | |
137 | for rec_mail in sorted(rec_mails): |
|
136 | for rec_mail in sorted(rec_mails): | |
138 | tasks.send_email([rec_mail], email_subject, email_txt_body, |
|
137 | async_tasks.send_email([rec_mail], email_subject, email_txt_body, | |
139 | email_html_body, headers, |
|
138 | email_html_body, headers, | |
140 | from_name=created_by_obj.full_name_or_username) |
|
139 | from_name=created_by_obj.full_name_or_username) | |
141 |
|
140 | |||
142 |
|
141 | |||
143 | class EmailNotificationModel(object): |
|
142 | class EmailNotificationModel(object): | |
144 |
|
143 | |||
145 | TYPE_CHANGESET_COMMENT = NotificationModel.TYPE_CHANGESET_COMMENT |
|
144 | TYPE_CHANGESET_COMMENT = NotificationModel.TYPE_CHANGESET_COMMENT | |
146 | TYPE_MESSAGE = NotificationModel.TYPE_MESSAGE # only used for testing |
|
145 | TYPE_MESSAGE = NotificationModel.TYPE_MESSAGE # only used for testing | |
147 | # NotificationModel.TYPE_MENTION is not used |
|
146 | # NotificationModel.TYPE_MENTION is not used | |
148 | TYPE_PASSWORD_RESET = 'password_link' |
|
147 | TYPE_PASSWORD_RESET = 'password_link' | |
149 | TYPE_REGISTRATION = NotificationModel.TYPE_REGISTRATION |
|
148 | TYPE_REGISTRATION = NotificationModel.TYPE_REGISTRATION | |
150 | TYPE_PULL_REQUEST = NotificationModel.TYPE_PULL_REQUEST |
|
149 | TYPE_PULL_REQUEST = NotificationModel.TYPE_PULL_REQUEST | |
151 | TYPE_PULL_REQUEST_COMMENT = NotificationModel.TYPE_PULL_REQUEST_COMMENT |
|
150 | TYPE_PULL_REQUEST_COMMENT = NotificationModel.TYPE_PULL_REQUEST_COMMENT | |
152 | TYPE_DEFAULT = 'default' |
|
151 | TYPE_DEFAULT = 'default' | |
153 |
|
152 | |||
154 | def __init__(self): |
|
153 | def __init__(self): | |
155 | super(EmailNotificationModel, self).__init__() |
|
154 | super(EmailNotificationModel, self).__init__() | |
156 | self._tmpl_lookup = app_globals.mako_lookup |
|
155 | self._tmpl_lookup = app_globals.mako_lookup | |
157 | self.email_types = { |
|
156 | self.email_types = { | |
158 | self.TYPE_CHANGESET_COMMENT: 'changeset_comment', |
|
157 | self.TYPE_CHANGESET_COMMENT: 'changeset_comment', | |
159 | self.TYPE_PASSWORD_RESET: 'password_reset', |
|
158 | self.TYPE_PASSWORD_RESET: 'password_reset', | |
160 | self.TYPE_REGISTRATION: 'registration', |
|
159 | self.TYPE_REGISTRATION: 'registration', | |
161 | self.TYPE_DEFAULT: 'default', |
|
160 | self.TYPE_DEFAULT: 'default', | |
162 | self.TYPE_PULL_REQUEST: 'pull_request', |
|
161 | self.TYPE_PULL_REQUEST: 'pull_request', | |
163 | self.TYPE_PULL_REQUEST_COMMENT: 'pull_request_comment', |
|
162 | self.TYPE_PULL_REQUEST_COMMENT: 'pull_request_comment', | |
164 | } |
|
163 | } | |
165 | self._subj_map = { |
|
164 | self._subj_map = { | |
166 | self.TYPE_CHANGESET_COMMENT: _('[Comment] %(repo_name)s changeset %(short_id)s "%(message_short)s" on %(branch)s by %(cs_author_username)s'), |
|
165 | self.TYPE_CHANGESET_COMMENT: _('[Comment] %(repo_name)s changeset %(short_id)s "%(message_short)s" on %(branch)s by %(cs_author_username)s'), | |
167 | self.TYPE_MESSAGE: 'Test Message', |
|
166 | self.TYPE_MESSAGE: 'Test Message', | |
168 | # self.TYPE_PASSWORD_RESET |
|
167 | # self.TYPE_PASSWORD_RESET | |
169 | self.TYPE_REGISTRATION: _('New user %(new_username)s registered'), |
|
168 | self.TYPE_REGISTRATION: _('New user %(new_username)s registered'), | |
170 | # self.TYPE_DEFAULT |
|
169 | # self.TYPE_DEFAULT | |
171 | self.TYPE_PULL_REQUEST: _('[Review] %(repo_name)s PR %(pr_nice_id)s "%(pr_title_short)s" from %(pr_source_branch)s by %(pr_owner_username)s'), |
|
170 | self.TYPE_PULL_REQUEST: _('[Review] %(repo_name)s PR %(pr_nice_id)s "%(pr_title_short)s" from %(pr_source_branch)s by %(pr_owner_username)s'), | |
172 | self.TYPE_PULL_REQUEST_COMMENT: _('[Comment] %(repo_name)s PR %(pr_nice_id)s "%(pr_title_short)s" from %(pr_source_branch)s by %(pr_owner_username)s'), |
|
171 | self.TYPE_PULL_REQUEST_COMMENT: _('[Comment] %(repo_name)s PR %(pr_nice_id)s "%(pr_title_short)s" from %(pr_source_branch)s by %(pr_owner_username)s'), | |
173 | } |
|
172 | } | |
174 |
|
173 | |||
175 | def get_email_description(self, type_, **kwargs): |
|
174 | def get_email_description(self, type_, **kwargs): | |
176 | """ |
|
175 | """ | |
177 | return subject for email based on given type |
|
176 | return subject for email based on given type | |
178 | """ |
|
177 | """ | |
179 | tmpl = self._subj_map[type_] |
|
178 | tmpl = self._subj_map[type_] | |
180 | try: |
|
179 | try: | |
181 | subj = tmpl % kwargs |
|
180 | subj = tmpl % kwargs | |
182 | except KeyError as e: |
|
181 | except KeyError as e: | |
183 | log.error('error generating email subject for %r from %s: %s', type_, ', '.join(self._subj_map), e) |
|
182 | log.error('error generating email subject for %r from %s: %s', type_, ', '.join(self._subj_map), e) | |
184 | raise |
|
183 | raise | |
185 | # gmail doesn't do proper threading but will ignore leading square |
|
184 | # gmail doesn't do proper threading but will ignore leading square | |
186 | # bracket content ... so that is where we put status info |
|
185 | # bracket content ... so that is where we put status info | |
187 | bracket_tags = [] |
|
186 | bracket_tags = [] | |
188 | status_change = kwargs.get('status_change') |
|
187 | status_change = kwargs.get('status_change') | |
189 | if status_change: |
|
188 | if status_change: | |
190 | bracket_tags.append(str(status_change)) # apply str to evaluate LazyString before .join |
|
189 | bracket_tags.append(str(status_change)) # apply str to evaluate LazyString before .join | |
191 | if kwargs.get('closing_pr'): |
|
190 | if kwargs.get('closing_pr'): | |
192 | bracket_tags.append(_('Closing')) |
|
191 | bracket_tags.append(_('Closing')) | |
193 | if bracket_tags: |
|
192 | if bracket_tags: | |
194 | if subj.startswith('['): |
|
193 | if subj.startswith('['): | |
195 | subj = '[' + ', '.join(bracket_tags) + ': ' + subj[1:] |
|
194 | subj = '[' + ', '.join(bracket_tags) + ': ' + subj[1:] | |
196 | else: |
|
195 | else: | |
197 | subj = '[' + ', '.join(bracket_tags) + '] ' + subj |
|
196 | subj = '[' + ', '.join(bracket_tags) + '] ' + subj | |
198 | return subj |
|
197 | return subj | |
199 |
|
198 | |||
200 | def get_email_tmpl(self, type_, content_type, **kwargs): |
|
199 | def get_email_tmpl(self, type_, content_type, **kwargs): | |
201 | """ |
|
200 | """ | |
202 | return generated template for email based on given type |
|
201 | return generated template for email based on given type | |
203 | """ |
|
202 | """ | |
204 | import kallithea.lib.helpers as h |
|
203 | import kallithea.lib.helpers as h | |
205 |
|
204 | |||
206 | base = 'email/' + self.email_types.get(type_, self.email_types[self.TYPE_DEFAULT]) + '.' + content_type |
|
205 | base = 'email/' + self.email_types.get(type_, self.email_types[self.TYPE_DEFAULT]) + '.' + content_type | |
207 | email_template = self._tmpl_lookup.get_template(base) |
|
206 | email_template = self._tmpl_lookup.get_template(base) | |
208 | # translator and helpers inject |
|
207 | # translator and helpers inject | |
209 | _kwargs = {'_': _, |
|
208 | _kwargs = {'_': _, | |
210 | 'h': h, |
|
209 | 'h': h, | |
211 | 'c': c} |
|
210 | 'c': c} | |
212 | _kwargs.update(kwargs) |
|
211 | _kwargs.update(kwargs) | |
213 | if content_type == 'html': |
|
212 | if content_type == 'html': | |
214 | _kwargs.update({ |
|
213 | _kwargs.update({ | |
215 | "color_text": "#202020", |
|
214 | "color_text": "#202020", | |
216 | "color_emph": "#395fa0", |
|
215 | "color_emph": "#395fa0", | |
217 | "color_link": "#395fa0", |
|
216 | "color_link": "#395fa0", | |
218 | "color_border": "#ddd", |
|
217 | "color_border": "#ddd", | |
219 | "color_background_grey": "#f9f9f9", |
|
218 | "color_background_grey": "#f9f9f9", | |
220 | "color_button": "#395fa0", |
|
219 | "color_button": "#395fa0", | |
221 | "monospace_style": "font-family:Lucida Console,Consolas,Monaco,Inconsolata,Liberation Mono,monospace", |
|
220 | "monospace_style": "font-family:Lucida Console,Consolas,Monaco,Inconsolata,Liberation Mono,monospace", | |
222 | "sans_style": "font-family:Helvetica,Arial,sans-serif", |
|
221 | "sans_style": "font-family:Helvetica,Arial,sans-serif", | |
223 | }) |
|
222 | }) | |
224 | _kwargs.update({ |
|
223 | _kwargs.update({ | |
225 | "default_style": "%(sans_style)s;font-weight:200;font-size:14px;line-height:17px;color:%(color_text)s" % _kwargs, |
|
224 | "default_style": "%(sans_style)s;font-weight:200;font-size:14px;line-height:17px;color:%(color_text)s" % _kwargs, | |
226 | "comment_style": "%(monospace_style)s;white-space:pre-wrap" % _kwargs, |
|
225 | "comment_style": "%(monospace_style)s;white-space:pre-wrap" % _kwargs, | |
227 | "data_style": "border:%(color_border)s 1px solid;background:%(color_background_grey)s" % _kwargs, |
|
226 | "data_style": "border:%(color_border)s 1px solid;background:%(color_background_grey)s" % _kwargs, | |
228 | "emph_style": "font-weight:600;color:%(color_emph)s" % _kwargs, |
|
227 | "emph_style": "font-weight:600;color:%(color_emph)s" % _kwargs, | |
229 | "link_style": "color:%(color_link)s;text-decoration:none" % _kwargs, |
|
228 | "link_style": "color:%(color_link)s;text-decoration:none" % _kwargs, | |
230 | "link_text_style": "color:%(color_text)s;text-decoration:none;border:%(color_border)s 1px solid;background:%(color_background_grey)s" % _kwargs, |
|
229 | "link_text_style": "color:%(color_text)s;text-decoration:none;border:%(color_border)s 1px solid;background:%(color_background_grey)s" % _kwargs, | |
231 | }) |
|
230 | }) | |
232 |
|
231 | |||
233 | log.debug('rendering tmpl %s with kwargs %s', base, _kwargs) |
|
232 | log.debug('rendering tmpl %s with kwargs %s', base, _kwargs) | |
234 | return email_template.render_unicode(**_kwargs) |
|
233 | return email_template.render_unicode(**_kwargs) |
@@ -1,698 +1,698 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.model.repo |
|
15 | kallithea.model.repo | |
16 | ~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Repository model for kallithea |
|
18 | Repository model for kallithea | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Jun 5, 2010 |
|
22 | :created_on: Jun 5, 2010 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 |
|
26 | |||
27 | """ |
|
27 | """ | |
28 |
|
28 | |||
29 | import logging |
|
29 | import logging | |
30 | import os |
|
30 | import os | |
31 | import shutil |
|
31 | import shutil | |
32 | import traceback |
|
32 | import traceback | |
33 | from datetime import datetime |
|
33 | from datetime import datetime | |
34 |
|
34 | |||
35 | import kallithea.lib.utils2 |
|
35 | import kallithea.lib.utils2 | |
36 | from kallithea.lib import hooks, webutils |
|
36 | from kallithea.lib import hooks, webutils | |
37 | from kallithea.lib.auth import HasRepoPermissionLevel, HasUserGroupPermissionLevel |
|
37 | from kallithea.lib.auth import HasRepoPermissionLevel, HasUserGroupPermissionLevel | |
38 | from kallithea.lib.exceptions import AttachedForksError |
|
38 | from kallithea.lib.exceptions import AttachedForksError | |
39 | from kallithea.lib.utils import is_valid_repo_uri, make_ui |
|
39 | from kallithea.lib.utils import is_valid_repo_uri, make_ui | |
40 | from kallithea.lib.utils2 import LazyProperty, get_current_authuser, obfuscate_url_pw, remove_prefix |
|
40 | from kallithea.lib.utils2 import LazyProperty, get_current_authuser, obfuscate_url_pw, remove_prefix | |
41 | from kallithea.lib.vcs.backends import get_backend |
|
41 | from kallithea.lib.vcs.backends import get_backend | |
42 | from kallithea.model import db, meta, scm |
|
42 | from kallithea.model import db, meta, scm | |
43 |
|
43 | |||
44 |
|
44 | |||
45 | log = logging.getLogger(__name__) |
|
45 | log = logging.getLogger(__name__) | |
46 |
|
46 | |||
47 |
|
47 | |||
48 | class RepoModel(object): |
|
48 | class RepoModel(object): | |
49 |
|
49 | |||
50 | def _create_default_perms(self, repository, private): |
|
50 | def _create_default_perms(self, repository, private): | |
51 | # create default permission |
|
51 | # create default permission | |
52 | default = 'repository.read' |
|
52 | default = 'repository.read' | |
53 | def_user = db.User.get_default_user() |
|
53 | def_user = db.User.get_default_user() | |
54 | for p in def_user.user_perms: |
|
54 | for p in def_user.user_perms: | |
55 | if p.permission.permission_name.startswith('repository.'): |
|
55 | if p.permission.permission_name.startswith('repository.'): | |
56 | default = p.permission.permission_name |
|
56 | default = p.permission.permission_name | |
57 | break |
|
57 | break | |
58 |
|
58 | |||
59 | default_perm = 'repository.none' if private else default |
|
59 | default_perm = 'repository.none' if private else default | |
60 |
|
60 | |||
61 | repo_to_perm = db.UserRepoToPerm() |
|
61 | repo_to_perm = db.UserRepoToPerm() | |
62 | repo_to_perm.permission = db.Permission.get_by_key(default_perm) |
|
62 | repo_to_perm.permission = db.Permission.get_by_key(default_perm) | |
63 |
|
63 | |||
64 | repo_to_perm.repository = repository |
|
64 | repo_to_perm.repository = repository | |
65 | repo_to_perm.user_id = def_user.user_id |
|
65 | repo_to_perm.user_id = def_user.user_id | |
66 | meta.Session().add(repo_to_perm) |
|
66 | meta.Session().add(repo_to_perm) | |
67 |
|
67 | |||
68 | return repo_to_perm |
|
68 | return repo_to_perm | |
69 |
|
69 | |||
70 | @LazyProperty |
|
70 | @LazyProperty | |
71 | def repos_path(self): |
|
71 | def repos_path(self): | |
72 | """ |
|
72 | """ | |
73 | Gets the repositories root path from database |
|
73 | Gets the repositories root path from database | |
74 | """ |
|
74 | """ | |
75 |
|
75 | |||
76 | q = db.Ui.query().filter(db.Ui.ui_key == '/').one() |
|
76 | q = db.Ui.query().filter(db.Ui.ui_key == '/').one() | |
77 | return q.ui_value |
|
77 | return q.ui_value | |
78 |
|
78 | |||
79 | def get(self, repo_id): |
|
79 | def get(self, repo_id): | |
80 | repo = db.Repository.query() \ |
|
80 | repo = db.Repository.query() \ | |
81 | .filter(db.Repository.repo_id == repo_id) |
|
81 | .filter(db.Repository.repo_id == repo_id) | |
82 | return repo.scalar() |
|
82 | return repo.scalar() | |
83 |
|
83 | |||
84 | def get_repo(self, repository): |
|
84 | def get_repo(self, repository): | |
85 | return db.Repository.guess_instance(repository) |
|
85 | return db.Repository.guess_instance(repository) | |
86 |
|
86 | |||
87 | def get_by_repo_name(self, repo_name): |
|
87 | def get_by_repo_name(self, repo_name): | |
88 | repo = db.Repository.query() \ |
|
88 | repo = db.Repository.query() \ | |
89 | .filter(db.Repository.repo_name == repo_name) |
|
89 | .filter(db.Repository.repo_name == repo_name) | |
90 | return repo.scalar() |
|
90 | return repo.scalar() | |
91 |
|
91 | |||
92 | @classmethod |
|
92 | @classmethod | |
93 | def _render_datatable(cls, tmpl, *args, **kwargs): |
|
93 | def _render_datatable(cls, tmpl, *args, **kwargs): | |
94 | from tg import app_globals, request |
|
94 | from tg import app_globals, request | |
95 | from tg import tmpl_context as c |
|
95 | from tg import tmpl_context as c | |
96 | from tg.i18n import ugettext as _ |
|
96 | from tg.i18n import ugettext as _ | |
97 |
|
97 | |||
98 | import kallithea.lib.helpers as h |
|
98 | import kallithea.lib.helpers as h | |
99 |
|
99 | |||
100 | _tmpl_lookup = app_globals.mako_lookup |
|
100 | _tmpl_lookup = app_globals.mako_lookup | |
101 | template = _tmpl_lookup.get_template('data_table/_dt_elements.html') |
|
101 | template = _tmpl_lookup.get_template('data_table/_dt_elements.html') | |
102 |
|
102 | |||
103 | tmpl = template.get_def(tmpl) |
|
103 | tmpl = template.get_def(tmpl) | |
104 | kwargs.update(dict(_=_, h=h, c=c, request=request)) |
|
104 | kwargs.update(dict(_=_, h=h, c=c, request=request)) | |
105 | return tmpl.render_unicode(*args, **kwargs) |
|
105 | return tmpl.render_unicode(*args, **kwargs) | |
106 |
|
106 | |||
107 | def get_repos_as_dict(self, repos_list, repo_groups_list=None, |
|
107 | def get_repos_as_dict(self, repos_list, repo_groups_list=None, | |
108 | admin=False, |
|
108 | admin=False, | |
109 | short_name=False): |
|
109 | short_name=False): | |
110 | """Return repository list for use by DataTable. |
|
110 | """Return repository list for use by DataTable. | |
111 | repos_list: list of repositories - but will be filtered for read permission. |
|
111 | repos_list: list of repositories - but will be filtered for read permission. | |
112 | repo_groups_list: added at top of list without permission check. |
|
112 | repo_groups_list: added at top of list without permission check. | |
113 | admin: return data for action column. |
|
113 | admin: return data for action column. | |
114 | """ |
|
114 | """ | |
115 | _render = self._render_datatable |
|
115 | _render = self._render_datatable | |
116 | from tg import request |
|
116 | from tg import request | |
117 | from tg import tmpl_context as c |
|
117 | from tg import tmpl_context as c | |
118 |
|
118 | |||
119 | import kallithea.lib.helpers as h |
|
119 | import kallithea.lib.helpers as h | |
120 |
|
120 | |||
121 | def repo_lnk(name, rtype, rstate, private, fork_of): |
|
121 | def repo_lnk(name, rtype, rstate, private, fork_of): | |
122 | return _render('repo_name', name, rtype, rstate, private, fork_of, |
|
122 | return _render('repo_name', name, rtype, rstate, private, fork_of, | |
123 | short_name=short_name) |
|
123 | short_name=short_name) | |
124 |
|
124 | |||
125 | def following(repo_id, is_following): |
|
125 | def following(repo_id, is_following): | |
126 | return _render('following', repo_id, is_following) |
|
126 | return _render('following', repo_id, is_following) | |
127 |
|
127 | |||
128 | def last_change(last_change): |
|
128 | def last_change(last_change): | |
129 | return _render("last_change", last_change) |
|
129 | return _render("last_change", last_change) | |
130 |
|
130 | |||
131 | def rss_lnk(repo_name): |
|
131 | def rss_lnk(repo_name): | |
132 | return _render("rss", repo_name) |
|
132 | return _render("rss", repo_name) | |
133 |
|
133 | |||
134 | def atom_lnk(repo_name): |
|
134 | def atom_lnk(repo_name): | |
135 | return _render("atom", repo_name) |
|
135 | return _render("atom", repo_name) | |
136 |
|
136 | |||
137 | def last_rev(repo_name, cs_cache): |
|
137 | def last_rev(repo_name, cs_cache): | |
138 | return _render('revision', repo_name, cs_cache.get('revision'), |
|
138 | return _render('revision', repo_name, cs_cache.get('revision'), | |
139 | cs_cache.get('raw_id'), cs_cache.get('author'), |
|
139 | cs_cache.get('raw_id'), cs_cache.get('author'), | |
140 | cs_cache.get('message')) |
|
140 | cs_cache.get('message')) | |
141 |
|
141 | |||
142 | def desc(desc): |
|
142 | def desc(desc): | |
143 | import kallithea.lib.helpers as h |
|
143 | import kallithea.lib.helpers as h | |
144 | return h.urlify_text(desc, truncate=80, stylize=c.visual.stylify_metalabels) |
|
144 | return h.urlify_text(desc, truncate=80, stylize=c.visual.stylify_metalabels) | |
145 |
|
145 | |||
146 | def state(repo_state): |
|
146 | def state(repo_state): | |
147 | return _render("repo_state", repo_state) |
|
147 | return _render("repo_state", repo_state) | |
148 |
|
148 | |||
149 | def repo_actions(repo_name): |
|
149 | def repo_actions(repo_name): | |
150 | return _render('repo_actions', repo_name) |
|
150 | return _render('repo_actions', repo_name) | |
151 |
|
151 | |||
152 | def owner_actions(owner_id, username): |
|
152 | def owner_actions(owner_id, username): | |
153 | return _render('user_name', owner_id, username) |
|
153 | return _render('user_name', owner_id, username) | |
154 |
|
154 | |||
155 | repos_data = [] |
|
155 | repos_data = [] | |
156 |
|
156 | |||
157 | for gr in repo_groups_list or []: |
|
157 | for gr in repo_groups_list or []: | |
158 | repos_data.append(dict( |
|
158 | repos_data.append(dict( | |
159 | raw_name='\0' + webutils.html_escape(gr.name), # sort before repositories |
|
159 | raw_name='\0' + webutils.html_escape(gr.name), # sort before repositories | |
160 | just_name=webutils.html_escape(gr.name), |
|
160 | just_name=webutils.html_escape(gr.name), | |
161 | name=_render('group_name_html', group_name=gr.group_name, name=gr.name), |
|
161 | name=_render('group_name_html', group_name=gr.group_name, name=gr.name), | |
162 | desc=desc(gr.group_description))) |
|
162 | desc=desc(gr.group_description))) | |
163 |
|
163 | |||
164 | for repo in repos_list: |
|
164 | for repo in repos_list: | |
165 | if not HasRepoPermissionLevel('read')(repo.repo_name, 'get_repos_as_dict check'): |
|
165 | if not HasRepoPermissionLevel('read')(repo.repo_name, 'get_repos_as_dict check'): | |
166 | continue |
|
166 | continue | |
167 | cs_cache = repo.changeset_cache |
|
167 | cs_cache = repo.changeset_cache | |
168 | row = { |
|
168 | row = { | |
169 | "raw_name": webutils.html_escape(repo.repo_name), |
|
169 | "raw_name": webutils.html_escape(repo.repo_name), | |
170 | "just_name": webutils.html_escape(repo.just_name), |
|
170 | "just_name": webutils.html_escape(repo.just_name), | |
171 | "name": repo_lnk(repo.repo_name, repo.repo_type, |
|
171 | "name": repo_lnk(repo.repo_name, repo.repo_type, | |
172 | repo.repo_state, repo.private, repo.fork), |
|
172 | repo.repo_state, repo.private, repo.fork), | |
173 | "following": following( |
|
173 | "following": following( | |
174 | repo.repo_id, |
|
174 | repo.repo_id, | |
175 | scm.ScmModel().is_following_repo(repo.repo_name, request.authuser.user_id), |
|
175 | scm.ScmModel().is_following_repo(repo.repo_name, request.authuser.user_id), | |
176 | ), |
|
176 | ), | |
177 | "last_change_iso": repo.last_db_change.isoformat(), |
|
177 | "last_change_iso": repo.last_db_change.isoformat(), | |
178 | "last_change": last_change(repo.last_db_change), |
|
178 | "last_change": last_change(repo.last_db_change), | |
179 | "last_changeset": last_rev(repo.repo_name, cs_cache), |
|
179 | "last_changeset": last_rev(repo.repo_name, cs_cache), | |
180 | "last_rev_raw": cs_cache.get('revision'), |
|
180 | "last_rev_raw": cs_cache.get('revision'), | |
181 | "desc": desc(repo.description), |
|
181 | "desc": desc(repo.description), | |
182 | "owner": h.person(repo.owner), |
|
182 | "owner": h.person(repo.owner), | |
183 | "state": state(repo.repo_state), |
|
183 | "state": state(repo.repo_state), | |
184 | "rss": rss_lnk(repo.repo_name), |
|
184 | "rss": rss_lnk(repo.repo_name), | |
185 | "atom": atom_lnk(repo.repo_name), |
|
185 | "atom": atom_lnk(repo.repo_name), | |
186 | } |
|
186 | } | |
187 | if admin: |
|
187 | if admin: | |
188 | row.update({ |
|
188 | row.update({ | |
189 | "action": repo_actions(repo.repo_name), |
|
189 | "action": repo_actions(repo.repo_name), | |
190 | "owner": owner_actions(repo.owner_id, |
|
190 | "owner": owner_actions(repo.owner_id, | |
191 | h.person(repo.owner)) |
|
191 | h.person(repo.owner)) | |
192 | }) |
|
192 | }) | |
193 | repos_data.append(row) |
|
193 | repos_data.append(row) | |
194 |
|
194 | |||
195 | return { |
|
195 | return { | |
196 | "sort": "name", |
|
196 | "sort": "name", | |
197 | "dir": "asc", |
|
197 | "dir": "asc", | |
198 | "records": repos_data |
|
198 | "records": repos_data | |
199 | } |
|
199 | } | |
200 |
|
200 | |||
201 | def _get_defaults(self, repo_name): |
|
201 | def _get_defaults(self, repo_name): | |
202 | """ |
|
202 | """ | |
203 | Gets information about repository, and returns a dict for |
|
203 | Gets information about repository, and returns a dict for | |
204 | usage in forms |
|
204 | usage in forms | |
205 |
|
205 | |||
206 | :param repo_name: |
|
206 | :param repo_name: | |
207 | """ |
|
207 | """ | |
208 |
|
208 | |||
209 | repo_info = db.Repository.get_by_repo_name(repo_name) |
|
209 | repo_info = db.Repository.get_by_repo_name(repo_name) | |
210 |
|
210 | |||
211 | if repo_info is None: |
|
211 | if repo_info is None: | |
212 | return None |
|
212 | return None | |
213 |
|
213 | |||
214 | defaults = repo_info.get_dict() |
|
214 | defaults = repo_info.get_dict() | |
215 | defaults['repo_name'] = repo_info.just_name |
|
215 | defaults['repo_name'] = repo_info.just_name | |
216 | defaults['repo_group'] = repo_info.group_id |
|
216 | defaults['repo_group'] = repo_info.group_id | |
217 |
|
217 | |||
218 | for strip, k in [(0, 'repo_type'), (1, 'repo_enable_downloads'), |
|
218 | for strip, k in [(0, 'repo_type'), (1, 'repo_enable_downloads'), | |
219 | (1, 'repo_description'), |
|
219 | (1, 'repo_description'), | |
220 | (1, 'repo_landing_rev'), (0, 'clone_uri'), |
|
220 | (1, 'repo_landing_rev'), (0, 'clone_uri'), | |
221 | (1, 'repo_private'), (1, 'repo_enable_statistics')]: |
|
221 | (1, 'repo_private'), (1, 'repo_enable_statistics')]: | |
222 | attr = k |
|
222 | attr = k | |
223 | if strip: |
|
223 | if strip: | |
224 | attr = remove_prefix(k, 'repo_') |
|
224 | attr = remove_prefix(k, 'repo_') | |
225 |
|
225 | |||
226 | val = defaults[attr] |
|
226 | val = defaults[attr] | |
227 | if k == 'repo_landing_rev': |
|
227 | if k == 'repo_landing_rev': | |
228 | val = ':'.join(defaults[attr]) |
|
228 | val = ':'.join(defaults[attr]) | |
229 | defaults[k] = val |
|
229 | defaults[k] = val | |
230 | if k == 'clone_uri': |
|
230 | if k == 'clone_uri': | |
231 | defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden |
|
231 | defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden | |
232 |
|
232 | |||
233 | # fill owner |
|
233 | # fill owner | |
234 | if repo_info.owner: |
|
234 | if repo_info.owner: | |
235 | defaults.update({'owner': repo_info.owner.username}) |
|
235 | defaults.update({'owner': repo_info.owner.username}) | |
236 | else: |
|
236 | else: | |
237 | replacement_user = db.User.query().filter(db.User.admin == |
|
237 | replacement_user = db.User.query().filter(db.User.admin == | |
238 | True).first().username |
|
238 | True).first().username | |
239 | defaults.update({'owner': replacement_user}) |
|
239 | defaults.update({'owner': replacement_user}) | |
240 |
|
240 | |||
241 | # fill repository users |
|
241 | # fill repository users | |
242 | for p in repo_info.repo_to_perm: |
|
242 | for p in repo_info.repo_to_perm: | |
243 | defaults.update({'u_perm_%s' % p.user.username: |
|
243 | defaults.update({'u_perm_%s' % p.user.username: | |
244 | p.permission.permission_name}) |
|
244 | p.permission.permission_name}) | |
245 |
|
245 | |||
246 | # fill repository groups |
|
246 | # fill repository groups | |
247 | for p in repo_info.users_group_to_perm: |
|
247 | for p in repo_info.users_group_to_perm: | |
248 | defaults.update({'g_perm_%s' % p.users_group.users_group_name: |
|
248 | defaults.update({'g_perm_%s' % p.users_group.users_group_name: | |
249 | p.permission.permission_name}) |
|
249 | p.permission.permission_name}) | |
250 |
|
250 | |||
251 | return defaults |
|
251 | return defaults | |
252 |
|
252 | |||
253 | def update(self, repo, **kwargs): |
|
253 | def update(self, repo, **kwargs): | |
254 | try: |
|
254 | try: | |
255 | cur_repo = db.Repository.guess_instance(repo) |
|
255 | cur_repo = db.Repository.guess_instance(repo) | |
256 | org_repo_name = cur_repo.repo_name |
|
256 | org_repo_name = cur_repo.repo_name | |
257 | if 'owner' in kwargs: |
|
257 | if 'owner' in kwargs: | |
258 | cur_repo.owner = db.User.get_by_username(kwargs['owner']) |
|
258 | cur_repo.owner = db.User.get_by_username(kwargs['owner']) | |
259 |
|
259 | |||
260 | if 'repo_group' in kwargs: |
|
260 | if 'repo_group' in kwargs: | |
261 | assert kwargs['repo_group'] != '-1', kwargs # RepoForm should have converted to None |
|
261 | assert kwargs['repo_group'] != '-1', kwargs # RepoForm should have converted to None | |
262 | cur_repo.group = db.RepoGroup.get(kwargs['repo_group']) |
|
262 | cur_repo.group = db.RepoGroup.get(kwargs['repo_group']) | |
263 | cur_repo.repo_name = cur_repo.get_new_name(cur_repo.just_name) |
|
263 | cur_repo.repo_name = cur_repo.get_new_name(cur_repo.just_name) | |
264 | log.debug('Updating repo %s with params:%s', cur_repo, kwargs) |
|
264 | log.debug('Updating repo %s with params:%s', cur_repo, kwargs) | |
265 | for k in ['repo_enable_downloads', |
|
265 | for k in ['repo_enable_downloads', | |
266 | 'repo_description', |
|
266 | 'repo_description', | |
267 | 'repo_landing_rev', |
|
267 | 'repo_landing_rev', | |
268 | 'repo_private', |
|
268 | 'repo_private', | |
269 | 'repo_enable_statistics', |
|
269 | 'repo_enable_statistics', | |
270 | ]: |
|
270 | ]: | |
271 | if k in kwargs: |
|
271 | if k in kwargs: | |
272 | setattr(cur_repo, remove_prefix(k, 'repo_'), kwargs[k]) |
|
272 | setattr(cur_repo, remove_prefix(k, 'repo_'), kwargs[k]) | |
273 | clone_uri = kwargs.get('clone_uri') |
|
273 | clone_uri = kwargs.get('clone_uri') | |
274 | if clone_uri is not None and clone_uri != cur_repo.clone_uri_hidden: |
|
274 | if clone_uri is not None and clone_uri != cur_repo.clone_uri_hidden: | |
275 | # clone_uri is modified - if given a value, check it is valid |
|
275 | # clone_uri is modified - if given a value, check it is valid | |
276 | if clone_uri != '': |
|
276 | if clone_uri != '': | |
277 | # will raise exception on error |
|
277 | # will raise exception on error | |
278 | is_valid_repo_uri(cur_repo.repo_type, clone_uri, make_ui()) |
|
278 | is_valid_repo_uri(cur_repo.repo_type, clone_uri, make_ui()) | |
279 | cur_repo.clone_uri = clone_uri |
|
279 | cur_repo.clone_uri = clone_uri | |
280 |
|
280 | |||
281 | if 'repo_name' in kwargs: |
|
281 | if 'repo_name' in kwargs: | |
282 | repo_name = kwargs['repo_name'] |
|
282 | repo_name = kwargs['repo_name'] | |
283 | if kallithea.lib.utils2.repo_name_slug(repo_name) != repo_name: |
|
283 | if kallithea.lib.utils2.repo_name_slug(repo_name) != repo_name: | |
284 | raise Exception('invalid repo name %s' % repo_name) |
|
284 | raise Exception('invalid repo name %s' % repo_name) | |
285 | cur_repo.repo_name = cur_repo.get_new_name(repo_name) |
|
285 | cur_repo.repo_name = cur_repo.get_new_name(repo_name) | |
286 |
|
286 | |||
287 | # if private flag is set, reset default permission to NONE |
|
287 | # if private flag is set, reset default permission to NONE | |
288 | if kwargs.get('repo_private'): |
|
288 | if kwargs.get('repo_private'): | |
289 | EMPTY_PERM = 'repository.none' |
|
289 | EMPTY_PERM = 'repository.none' | |
290 | RepoModel().grant_user_permission( |
|
290 | RepoModel().grant_user_permission( | |
291 | repo=cur_repo, user='default', perm=EMPTY_PERM |
|
291 | repo=cur_repo, user='default', perm=EMPTY_PERM | |
292 | ) |
|
292 | ) | |
293 | # handle extra fields |
|
293 | # handle extra fields | |
294 | for field in [k for k in kwargs if k.startswith(db.RepositoryField.PREFIX)]: |
|
294 | for field in [k for k in kwargs if k.startswith(db.RepositoryField.PREFIX)]: | |
295 | k = db.RepositoryField.un_prefix_key(field) |
|
295 | k = db.RepositoryField.un_prefix_key(field) | |
296 | ex_field = db.RepositoryField.get_by_key_name(key=k, repo=cur_repo) |
|
296 | ex_field = db.RepositoryField.get_by_key_name(key=k, repo=cur_repo) | |
297 | if ex_field: |
|
297 | if ex_field: | |
298 | ex_field.field_value = kwargs[field] |
|
298 | ex_field.field_value = kwargs[field] | |
299 |
|
299 | |||
300 | if org_repo_name != cur_repo.repo_name: |
|
300 | if org_repo_name != cur_repo.repo_name: | |
301 | # rename repository |
|
301 | # rename repository | |
302 | self._rename_filesystem_repo(old=org_repo_name, new=cur_repo.repo_name) |
|
302 | self._rename_filesystem_repo(old=org_repo_name, new=cur_repo.repo_name) | |
303 |
|
303 | |||
304 | return cur_repo |
|
304 | return cur_repo | |
305 | except Exception: |
|
305 | except Exception: | |
306 | log.error(traceback.format_exc()) |
|
306 | log.error(traceback.format_exc()) | |
307 | raise |
|
307 | raise | |
308 |
|
308 | |||
309 | def _create_repo(self, repo_name, repo_type, description, owner, |
|
309 | def _create_repo(self, repo_name, repo_type, description, owner, | |
310 | private=False, clone_uri=None, repo_group=None, |
|
310 | private=False, clone_uri=None, repo_group=None, | |
311 | landing_rev='rev:tip', fork_of=None, |
|
311 | landing_rev='rev:tip', fork_of=None, | |
312 | copy_fork_permissions=False, enable_statistics=False, |
|
312 | copy_fork_permissions=False, enable_statistics=False, | |
313 | enable_downloads=False, |
|
313 | enable_downloads=False, | |
314 | copy_group_permissions=False, state=db.Repository.STATE_PENDING): |
|
314 | copy_group_permissions=False, state=db.Repository.STATE_PENDING): | |
315 | """ |
|
315 | """ | |
316 | Create repository inside database with PENDING state. This should only be |
|
316 | Create repository inside database with PENDING state. This should only be | |
317 | executed by create() repo, with exception of importing existing repos. |
|
317 | executed by create() repo, with exception of importing existing repos. | |
318 |
|
318 | |||
319 | """ |
|
319 | """ | |
320 | owner = db.User.guess_instance(owner) |
|
320 | owner = db.User.guess_instance(owner) | |
321 | fork_of = db.Repository.guess_instance(fork_of) |
|
321 | fork_of = db.Repository.guess_instance(fork_of) | |
322 | repo_group = db.RepoGroup.guess_instance(repo_group) |
|
322 | repo_group = db.RepoGroup.guess_instance(repo_group) | |
323 | try: |
|
323 | try: | |
324 | # repo name is just a name of repository |
|
324 | # repo name is just a name of repository | |
325 | # while repo_name_full is a full qualified name that is combined |
|
325 | # while repo_name_full is a full qualified name that is combined | |
326 | # with name and path of group |
|
326 | # with name and path of group | |
327 | repo_name_full = repo_name |
|
327 | repo_name_full = repo_name | |
328 | repo_name = repo_name.split(kallithea.URL_SEP)[-1] |
|
328 | repo_name = repo_name.split(kallithea.URL_SEP)[-1] | |
329 | if kallithea.lib.utils2.repo_name_slug(repo_name) != repo_name: |
|
329 | if kallithea.lib.utils2.repo_name_slug(repo_name) != repo_name: | |
330 | raise Exception('invalid repo name %s' % repo_name) |
|
330 | raise Exception('invalid repo name %s' % repo_name) | |
331 |
|
331 | |||
332 | new_repo = db.Repository() |
|
332 | new_repo = db.Repository() | |
333 | new_repo.repo_state = state |
|
333 | new_repo.repo_state = state | |
334 | new_repo.enable_statistics = False |
|
334 | new_repo.enable_statistics = False | |
335 | new_repo.repo_name = repo_name_full |
|
335 | new_repo.repo_name = repo_name_full | |
336 | new_repo.repo_type = repo_type |
|
336 | new_repo.repo_type = repo_type | |
337 | new_repo.owner = owner |
|
337 | new_repo.owner = owner | |
338 | new_repo.group = repo_group |
|
338 | new_repo.group = repo_group | |
339 | new_repo.description = description or repo_name |
|
339 | new_repo.description = description or repo_name | |
340 | new_repo.private = private |
|
340 | new_repo.private = private | |
341 | if clone_uri: |
|
341 | if clone_uri: | |
342 | # will raise exception on error |
|
342 | # will raise exception on error | |
343 | is_valid_repo_uri(repo_type, clone_uri, make_ui()) |
|
343 | is_valid_repo_uri(repo_type, clone_uri, make_ui()) | |
344 | new_repo.clone_uri = clone_uri |
|
344 | new_repo.clone_uri = clone_uri | |
345 | new_repo.landing_rev = landing_rev |
|
345 | new_repo.landing_rev = landing_rev | |
346 |
|
346 | |||
347 | new_repo.enable_statistics = enable_statistics |
|
347 | new_repo.enable_statistics = enable_statistics | |
348 | new_repo.enable_downloads = enable_downloads |
|
348 | new_repo.enable_downloads = enable_downloads | |
349 |
|
349 | |||
350 | if fork_of: |
|
350 | if fork_of: | |
351 | parent_repo = fork_of |
|
351 | parent_repo = fork_of | |
352 | new_repo.fork = parent_repo |
|
352 | new_repo.fork = parent_repo | |
353 |
|
353 | |||
354 | meta.Session().add(new_repo) |
|
354 | meta.Session().add(new_repo) | |
355 |
|
355 | |||
356 | if fork_of and copy_fork_permissions: |
|
356 | if fork_of and copy_fork_permissions: | |
357 | repo = fork_of |
|
357 | repo = fork_of | |
358 | user_perms = db.UserRepoToPerm.query() \ |
|
358 | user_perms = db.UserRepoToPerm.query() \ | |
359 | .filter(db.UserRepoToPerm.repository == repo).all() |
|
359 | .filter(db.UserRepoToPerm.repository == repo).all() | |
360 | group_perms = db.UserGroupRepoToPerm.query() \ |
|
360 | group_perms = db.UserGroupRepoToPerm.query() \ | |
361 | .filter(db.UserGroupRepoToPerm.repository == repo).all() |
|
361 | .filter(db.UserGroupRepoToPerm.repository == repo).all() | |
362 |
|
362 | |||
363 | for perm in user_perms: |
|
363 | for perm in user_perms: | |
364 | db.UserRepoToPerm.create(perm.user, new_repo, perm.permission) |
|
364 | db.UserRepoToPerm.create(perm.user, new_repo, perm.permission) | |
365 |
|
365 | |||
366 | for perm in group_perms: |
|
366 | for perm in group_perms: | |
367 | db.UserGroupRepoToPerm.create(perm.users_group, new_repo, |
|
367 | db.UserGroupRepoToPerm.create(perm.users_group, new_repo, | |
368 | perm.permission) |
|
368 | perm.permission) | |
369 |
|
369 | |||
370 | elif repo_group and copy_group_permissions: |
|
370 | elif repo_group and copy_group_permissions: | |
371 |
|
371 | |||
372 | user_perms = db.UserRepoGroupToPerm.query() \ |
|
372 | user_perms = db.UserRepoGroupToPerm.query() \ | |
373 | .filter(db.UserRepoGroupToPerm.group == repo_group).all() |
|
373 | .filter(db.UserRepoGroupToPerm.group == repo_group).all() | |
374 |
|
374 | |||
375 | group_perms = db.UserGroupRepoGroupToPerm.query() \ |
|
375 | group_perms = db.UserGroupRepoGroupToPerm.query() \ | |
376 | .filter(db.UserGroupRepoGroupToPerm.group == repo_group).all() |
|
376 | .filter(db.UserGroupRepoGroupToPerm.group == repo_group).all() | |
377 |
|
377 | |||
378 | for perm in user_perms: |
|
378 | for perm in user_perms: | |
379 | perm_name = perm.permission.permission_name.replace('group.', 'repository.') |
|
379 | perm_name = perm.permission.permission_name.replace('group.', 'repository.') | |
380 | perm_obj = db.Permission.get_by_key(perm_name) |
|
380 | perm_obj = db.Permission.get_by_key(perm_name) | |
381 | db.UserRepoToPerm.create(perm.user, new_repo, perm_obj) |
|
381 | db.UserRepoToPerm.create(perm.user, new_repo, perm_obj) | |
382 |
|
382 | |||
383 | for perm in group_perms: |
|
383 | for perm in group_perms: | |
384 | perm_name = perm.permission.permission_name.replace('group.', 'repository.') |
|
384 | perm_name = perm.permission.permission_name.replace('group.', 'repository.') | |
385 | perm_obj = db.Permission.get_by_key(perm_name) |
|
385 | perm_obj = db.Permission.get_by_key(perm_name) | |
386 | db.UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj) |
|
386 | db.UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj) | |
387 |
|
387 | |||
388 | else: |
|
388 | else: | |
389 | self._create_default_perms(new_repo, private) |
|
389 | self._create_default_perms(new_repo, private) | |
390 |
|
390 | |||
391 | # now automatically start following this repository as owner |
|
391 | # now automatically start following this repository as owner | |
392 | scm.ScmModel().toggle_following_repo(new_repo.repo_id, owner.user_id) |
|
392 | scm.ScmModel().toggle_following_repo(new_repo.repo_id, owner.user_id) | |
393 | # we need to flush here, in order to check if database won't |
|
393 | # we need to flush here, in order to check if database won't | |
394 | # throw any exceptions, create filesystem dirs at the very end |
|
394 | # throw any exceptions, create filesystem dirs at the very end | |
395 | meta.Session().flush() |
|
395 | meta.Session().flush() | |
396 | return new_repo |
|
396 | return new_repo | |
397 | except Exception: |
|
397 | except Exception: | |
398 | log.error(traceback.format_exc()) |
|
398 | log.error(traceback.format_exc()) | |
399 | raise |
|
399 | raise | |
400 |
|
400 | |||
401 | def create(self, form_data, cur_user): |
|
401 | def create(self, form_data, cur_user): | |
402 | """ |
|
402 | """ | |
403 | Create repository using celery tasks |
|
403 | Create repository using celery tasks | |
404 |
|
404 | |||
405 | :param form_data: |
|
405 | :param form_data: | |
406 | :param cur_user: |
|
406 | :param cur_user: | |
407 | """ |
|
407 | """ | |
408 |
from kallithea. |
|
408 | from kallithea.model import async_tasks | |
409 | return tasks.create_repo(form_data, cur_user) |
|
409 | return async_tasks.create_repo(form_data, cur_user) | |
410 |
|
410 | |||
411 | def _update_permissions(self, repo, perms_new=None, perms_updates=None, |
|
411 | def _update_permissions(self, repo, perms_new=None, perms_updates=None, | |
412 | check_perms=True): |
|
412 | check_perms=True): | |
413 | if not perms_new: |
|
413 | if not perms_new: | |
414 | perms_new = [] |
|
414 | perms_new = [] | |
415 | if not perms_updates: |
|
415 | if not perms_updates: | |
416 | perms_updates = [] |
|
416 | perms_updates = [] | |
417 |
|
417 | |||
418 | # update permissions |
|
418 | # update permissions | |
419 | for member, perm, member_type in perms_updates: |
|
419 | for member, perm, member_type in perms_updates: | |
420 | if member_type == 'user': |
|
420 | if member_type == 'user': | |
421 | # this updates existing one |
|
421 | # this updates existing one | |
422 | self.grant_user_permission( |
|
422 | self.grant_user_permission( | |
423 | repo=repo, user=member, perm=perm |
|
423 | repo=repo, user=member, perm=perm | |
424 | ) |
|
424 | ) | |
425 | else: |
|
425 | else: | |
426 | # check if we have permissions to alter this usergroup's access |
|
426 | # check if we have permissions to alter this usergroup's access | |
427 | if not check_perms or HasUserGroupPermissionLevel('read')(member): |
|
427 | if not check_perms or HasUserGroupPermissionLevel('read')(member): | |
428 | self.grant_user_group_permission( |
|
428 | self.grant_user_group_permission( | |
429 | repo=repo, group_name=member, perm=perm |
|
429 | repo=repo, group_name=member, perm=perm | |
430 | ) |
|
430 | ) | |
431 | # set new permissions |
|
431 | # set new permissions | |
432 | for member, perm, member_type in perms_new: |
|
432 | for member, perm, member_type in perms_new: | |
433 | if member_type == 'user': |
|
433 | if member_type == 'user': | |
434 | self.grant_user_permission( |
|
434 | self.grant_user_permission( | |
435 | repo=repo, user=member, perm=perm |
|
435 | repo=repo, user=member, perm=perm | |
436 | ) |
|
436 | ) | |
437 | else: |
|
437 | else: | |
438 | # check if we have permissions to alter this usergroup's access |
|
438 | # check if we have permissions to alter this usergroup's access | |
439 | if not check_perms or HasUserGroupPermissionLevel('read')(member): |
|
439 | if not check_perms or HasUserGroupPermissionLevel('read')(member): | |
440 | self.grant_user_group_permission( |
|
440 | self.grant_user_group_permission( | |
441 | repo=repo, group_name=member, perm=perm |
|
441 | repo=repo, group_name=member, perm=perm | |
442 | ) |
|
442 | ) | |
443 |
|
443 | |||
444 | def create_fork(self, form_data, cur_user): |
|
444 | def create_fork(self, form_data, cur_user): | |
445 | """ |
|
445 | """ | |
446 | Simple wrapper into executing celery task for fork creation |
|
446 | Simple wrapper into executing celery task for fork creation | |
447 |
|
447 | |||
448 | :param form_data: |
|
448 | :param form_data: | |
449 | :param cur_user: |
|
449 | :param cur_user: | |
450 | """ |
|
450 | """ | |
451 |
from kallithea. |
|
451 | from kallithea.model import async_tasks | |
452 | return tasks.create_repo_fork(form_data, cur_user) |
|
452 | return async_tasks.create_repo_fork(form_data, cur_user) | |
453 |
|
453 | |||
454 | def delete(self, repo, forks=None, fs_remove=True, cur_user=None): |
|
454 | def delete(self, repo, forks=None, fs_remove=True, cur_user=None): | |
455 | """ |
|
455 | """ | |
456 | Delete given repository, forks parameter defines what do do with |
|
456 | Delete given repository, forks parameter defines what do do with | |
457 | attached forks. Throws AttachedForksError if deleted repo has attached |
|
457 | attached forks. Throws AttachedForksError if deleted repo has attached | |
458 | forks |
|
458 | forks | |
459 |
|
459 | |||
460 | :param repo: |
|
460 | :param repo: | |
461 | :param forks: str 'delete' or 'detach' |
|
461 | :param forks: str 'delete' or 'detach' | |
462 | :param fs_remove: remove(archive) repo from filesystem |
|
462 | :param fs_remove: remove(archive) repo from filesystem | |
463 | """ |
|
463 | """ | |
464 | if not cur_user: |
|
464 | if not cur_user: | |
465 | cur_user = getattr(get_current_authuser(), 'username', None) |
|
465 | cur_user = getattr(get_current_authuser(), 'username', None) | |
466 | repo = db.Repository.guess_instance(repo) |
|
466 | repo = db.Repository.guess_instance(repo) | |
467 | if repo is not None: |
|
467 | if repo is not None: | |
468 | if forks == 'detach': |
|
468 | if forks == 'detach': | |
469 | for r in repo.forks: |
|
469 | for r in repo.forks: | |
470 | r.fork = None |
|
470 | r.fork = None | |
471 | elif forks == 'delete': |
|
471 | elif forks == 'delete': | |
472 | for r in repo.forks: |
|
472 | for r in repo.forks: | |
473 | self.delete(r, forks='delete') |
|
473 | self.delete(r, forks='delete') | |
474 | elif [f for f in repo.forks]: |
|
474 | elif [f for f in repo.forks]: | |
475 | raise AttachedForksError() |
|
475 | raise AttachedForksError() | |
476 |
|
476 | |||
477 | old_repo_dict = repo.get_dict() |
|
477 | old_repo_dict = repo.get_dict() | |
478 | try: |
|
478 | try: | |
479 | meta.Session().delete(repo) |
|
479 | meta.Session().delete(repo) | |
480 | if fs_remove: |
|
480 | if fs_remove: | |
481 | self._delete_filesystem_repo(repo) |
|
481 | self._delete_filesystem_repo(repo) | |
482 | else: |
|
482 | else: | |
483 | log.debug('skipping removal from filesystem') |
|
483 | log.debug('skipping removal from filesystem') | |
484 | hooks.log_delete_repository(old_repo_dict, |
|
484 | hooks.log_delete_repository(old_repo_dict, | |
485 | deleted_by=cur_user) |
|
485 | deleted_by=cur_user) | |
486 | except Exception: |
|
486 | except Exception: | |
487 | log.error(traceback.format_exc()) |
|
487 | log.error(traceback.format_exc()) | |
488 | raise |
|
488 | raise | |
489 |
|
489 | |||
490 | def grant_user_permission(self, repo, user, perm): |
|
490 | def grant_user_permission(self, repo, user, perm): | |
491 | """ |
|
491 | """ | |
492 | Grant permission for user on given repository, or update existing one |
|
492 | Grant permission for user on given repository, or update existing one | |
493 | if found |
|
493 | if found | |
494 |
|
494 | |||
495 | :param repo: Instance of Repository, repository_id, or repository name |
|
495 | :param repo: Instance of Repository, repository_id, or repository name | |
496 | :param user: Instance of User, user_id or username |
|
496 | :param user: Instance of User, user_id or username | |
497 | :param perm: Instance of Permission, or permission_name |
|
497 | :param perm: Instance of Permission, or permission_name | |
498 | """ |
|
498 | """ | |
499 | user = db.User.guess_instance(user) |
|
499 | user = db.User.guess_instance(user) | |
500 | repo = db.Repository.guess_instance(repo) |
|
500 | repo = db.Repository.guess_instance(repo) | |
501 | permission = db.Permission.guess_instance(perm) |
|
501 | permission = db.Permission.guess_instance(perm) | |
502 |
|
502 | |||
503 | # check if we have that permission already |
|
503 | # check if we have that permission already | |
504 | obj = db.UserRepoToPerm.query() \ |
|
504 | obj = db.UserRepoToPerm.query() \ | |
505 | .filter(db.UserRepoToPerm.user == user) \ |
|
505 | .filter(db.UserRepoToPerm.user == user) \ | |
506 | .filter(db.UserRepoToPerm.repository == repo) \ |
|
506 | .filter(db.UserRepoToPerm.repository == repo) \ | |
507 | .scalar() |
|
507 | .scalar() | |
508 | if obj is None: |
|
508 | if obj is None: | |
509 | # create new ! |
|
509 | # create new ! | |
510 | obj = db.UserRepoToPerm() |
|
510 | obj = db.UserRepoToPerm() | |
511 | meta.Session().add(obj) |
|
511 | meta.Session().add(obj) | |
512 | obj.repository = repo |
|
512 | obj.repository = repo | |
513 | obj.user = user |
|
513 | obj.user = user | |
514 | obj.permission = permission |
|
514 | obj.permission = permission | |
515 | log.debug('Granted perm %s to %s on %s', perm, user, repo) |
|
515 | log.debug('Granted perm %s to %s on %s', perm, user, repo) | |
516 | return obj |
|
516 | return obj | |
517 |
|
517 | |||
518 | def revoke_user_permission(self, repo, user): |
|
518 | def revoke_user_permission(self, repo, user): | |
519 | """ |
|
519 | """ | |
520 | Revoke permission for user on given repository |
|
520 | Revoke permission for user on given repository | |
521 |
|
521 | |||
522 | :param repo: Instance of Repository, repository_id, or repository name |
|
522 | :param repo: Instance of Repository, repository_id, or repository name | |
523 | :param user: Instance of User, user_id or username |
|
523 | :param user: Instance of User, user_id or username | |
524 | """ |
|
524 | """ | |
525 |
|
525 | |||
526 | user = db.User.guess_instance(user) |
|
526 | user = db.User.guess_instance(user) | |
527 | repo = db.Repository.guess_instance(repo) |
|
527 | repo = db.Repository.guess_instance(repo) | |
528 |
|
528 | |||
529 | obj = db.UserRepoToPerm.query() \ |
|
529 | obj = db.UserRepoToPerm.query() \ | |
530 | .filter(db.UserRepoToPerm.repository == repo) \ |
|
530 | .filter(db.UserRepoToPerm.repository == repo) \ | |
531 | .filter(db.UserRepoToPerm.user == user) \ |
|
531 | .filter(db.UserRepoToPerm.user == user) \ | |
532 | .scalar() |
|
532 | .scalar() | |
533 | if obj is not None: |
|
533 | if obj is not None: | |
534 | meta.Session().delete(obj) |
|
534 | meta.Session().delete(obj) | |
535 | log.debug('Revoked perm on %s on %s', repo, user) |
|
535 | log.debug('Revoked perm on %s on %s', repo, user) | |
536 |
|
536 | |||
537 | def grant_user_group_permission(self, repo, group_name, perm): |
|
537 | def grant_user_group_permission(self, repo, group_name, perm): | |
538 | """ |
|
538 | """ | |
539 | Grant permission for user group on given repository, or update |
|
539 | Grant permission for user group on given repository, or update | |
540 | existing one if found |
|
540 | existing one if found | |
541 |
|
541 | |||
542 | :param repo: Instance of Repository, repository_id, or repository name |
|
542 | :param repo: Instance of Repository, repository_id, or repository name | |
543 | :param group_name: Instance of UserGroup, users_group_id, |
|
543 | :param group_name: Instance of UserGroup, users_group_id, | |
544 | or user group name |
|
544 | or user group name | |
545 | :param perm: Instance of Permission, or permission_name |
|
545 | :param perm: Instance of Permission, or permission_name | |
546 | """ |
|
546 | """ | |
547 | repo = db.Repository.guess_instance(repo) |
|
547 | repo = db.Repository.guess_instance(repo) | |
548 | group_name = db.UserGroup.guess_instance(group_name) |
|
548 | group_name = db.UserGroup.guess_instance(group_name) | |
549 | permission = db.Permission.guess_instance(perm) |
|
549 | permission = db.Permission.guess_instance(perm) | |
550 |
|
550 | |||
551 | # check if we have that permission already |
|
551 | # check if we have that permission already | |
552 | obj = db.UserGroupRepoToPerm.query() \ |
|
552 | obj = db.UserGroupRepoToPerm.query() \ | |
553 | .filter(db.UserGroupRepoToPerm.users_group == group_name) \ |
|
553 | .filter(db.UserGroupRepoToPerm.users_group == group_name) \ | |
554 | .filter(db.UserGroupRepoToPerm.repository == repo) \ |
|
554 | .filter(db.UserGroupRepoToPerm.repository == repo) \ | |
555 | .scalar() |
|
555 | .scalar() | |
556 |
|
556 | |||
557 | if obj is None: |
|
557 | if obj is None: | |
558 | # create new |
|
558 | # create new | |
559 | obj = db.UserGroupRepoToPerm() |
|
559 | obj = db.UserGroupRepoToPerm() | |
560 | meta.Session().add(obj) |
|
560 | meta.Session().add(obj) | |
561 |
|
561 | |||
562 | obj.repository = repo |
|
562 | obj.repository = repo | |
563 | obj.users_group = group_name |
|
563 | obj.users_group = group_name | |
564 | obj.permission = permission |
|
564 | obj.permission = permission | |
565 | log.debug('Granted perm %s to %s on %s', perm, group_name, repo) |
|
565 | log.debug('Granted perm %s to %s on %s', perm, group_name, repo) | |
566 | return obj |
|
566 | return obj | |
567 |
|
567 | |||
568 | def revoke_user_group_permission(self, repo, group_name): |
|
568 | def revoke_user_group_permission(self, repo, group_name): | |
569 | """ |
|
569 | """ | |
570 | Revoke permission for user group on given repository |
|
570 | Revoke permission for user group on given repository | |
571 |
|
571 | |||
572 | :param repo: Instance of Repository, repository_id, or repository name |
|
572 | :param repo: Instance of Repository, repository_id, or repository name | |
573 | :param group_name: Instance of UserGroup, users_group_id, |
|
573 | :param group_name: Instance of UserGroup, users_group_id, | |
574 | or user group name |
|
574 | or user group name | |
575 | """ |
|
575 | """ | |
576 | repo = db.Repository.guess_instance(repo) |
|
576 | repo = db.Repository.guess_instance(repo) | |
577 | group_name = db.UserGroup.guess_instance(group_name) |
|
577 | group_name = db.UserGroup.guess_instance(group_name) | |
578 |
|
578 | |||
579 | obj = db.UserGroupRepoToPerm.query() \ |
|
579 | obj = db.UserGroupRepoToPerm.query() \ | |
580 | .filter(db.UserGroupRepoToPerm.repository == repo) \ |
|
580 | .filter(db.UserGroupRepoToPerm.repository == repo) \ | |
581 | .filter(db.UserGroupRepoToPerm.users_group == group_name) \ |
|
581 | .filter(db.UserGroupRepoToPerm.users_group == group_name) \ | |
582 | .scalar() |
|
582 | .scalar() | |
583 | if obj is not None: |
|
583 | if obj is not None: | |
584 | meta.Session().delete(obj) |
|
584 | meta.Session().delete(obj) | |
585 | log.debug('Revoked perm to %s on %s', repo, group_name) |
|
585 | log.debug('Revoked perm to %s on %s', repo, group_name) | |
586 |
|
586 | |||
587 | def delete_stats(self, repo_name): |
|
587 | def delete_stats(self, repo_name): | |
588 | """ |
|
588 | """ | |
589 | removes stats for given repo |
|
589 | removes stats for given repo | |
590 |
|
590 | |||
591 | :param repo_name: |
|
591 | :param repo_name: | |
592 | """ |
|
592 | """ | |
593 | repo = db.Repository.guess_instance(repo_name) |
|
593 | repo = db.Repository.guess_instance(repo_name) | |
594 | try: |
|
594 | try: | |
595 | obj = db.Statistics.query() \ |
|
595 | obj = db.Statistics.query() \ | |
596 | .filter(db.Statistics.repository == repo).scalar() |
|
596 | .filter(db.Statistics.repository == repo).scalar() | |
597 | if obj is not None: |
|
597 | if obj is not None: | |
598 | meta.Session().delete(obj) |
|
598 | meta.Session().delete(obj) | |
599 | except Exception: |
|
599 | except Exception: | |
600 | log.error(traceback.format_exc()) |
|
600 | log.error(traceback.format_exc()) | |
601 | raise |
|
601 | raise | |
602 |
|
602 | |||
603 | def _create_filesystem_repo(self, repo_name, repo_type, repo_group, |
|
603 | def _create_filesystem_repo(self, repo_name, repo_type, repo_group, | |
604 | clone_uri=None, repo_store_location=None): |
|
604 | clone_uri=None, repo_store_location=None): | |
605 | """ |
|
605 | """ | |
606 | Makes repository on filesystem. Operation is group aware, meaning that it will create |
|
606 | Makes repository on filesystem. Operation is group aware, meaning that it will create | |
607 | a repository within a group, and alter the paths accordingly to the group location. |
|
607 | a repository within a group, and alter the paths accordingly to the group location. | |
608 |
|
608 | |||
609 | Note: clone_uri is low level and not validated - it might be a file system path used for validated cloning |
|
609 | Note: clone_uri is low level and not validated - it might be a file system path used for validated cloning | |
610 | """ |
|
610 | """ | |
611 | from kallithea.lib.utils import is_valid_repo, is_valid_repo_group |
|
611 | from kallithea.lib.utils import is_valid_repo, is_valid_repo_group | |
612 |
|
612 | |||
613 | if '/' in repo_name: |
|
613 | if '/' in repo_name: | |
614 | raise ValueError('repo_name must not contain groups got `%s`' % repo_name) |
|
614 | raise ValueError('repo_name must not contain groups got `%s`' % repo_name) | |
615 |
|
615 | |||
616 | if isinstance(repo_group, db.RepoGroup): |
|
616 | if isinstance(repo_group, db.RepoGroup): | |
617 | new_parent_path = os.sep.join(repo_group.full_path_splitted) |
|
617 | new_parent_path = os.sep.join(repo_group.full_path_splitted) | |
618 | else: |
|
618 | else: | |
619 | new_parent_path = repo_group or '' |
|
619 | new_parent_path = repo_group or '' | |
620 |
|
620 | |||
621 | if repo_store_location: |
|
621 | if repo_store_location: | |
622 | _paths = [repo_store_location] |
|
622 | _paths = [repo_store_location] | |
623 | else: |
|
623 | else: | |
624 | _paths = [self.repos_path, new_parent_path, repo_name] |
|
624 | _paths = [self.repos_path, new_parent_path, repo_name] | |
625 | repo_path = os.path.join(*_paths) |
|
625 | repo_path = os.path.join(*_paths) | |
626 |
|
626 | |||
627 | # check if this path is not a repository |
|
627 | # check if this path is not a repository | |
628 | if is_valid_repo(repo_path, self.repos_path): |
|
628 | if is_valid_repo(repo_path, self.repos_path): | |
629 | raise Exception('This path %s is a valid repository' % repo_path) |
|
629 | raise Exception('This path %s is a valid repository' % repo_path) | |
630 |
|
630 | |||
631 | # check if this path is a group |
|
631 | # check if this path is a group | |
632 | if is_valid_repo_group(repo_path, self.repos_path): |
|
632 | if is_valid_repo_group(repo_path, self.repos_path): | |
633 | raise Exception('This path %s is a valid group' % repo_path) |
|
633 | raise Exception('This path %s is a valid group' % repo_path) | |
634 |
|
634 | |||
635 | log.info('creating repo %s in %s from url: `%s`', |
|
635 | log.info('creating repo %s in %s from url: `%s`', | |
636 | repo_name, repo_path, |
|
636 | repo_name, repo_path, | |
637 | obfuscate_url_pw(clone_uri)) |
|
637 | obfuscate_url_pw(clone_uri)) | |
638 |
|
638 | |||
639 | backend = get_backend(repo_type) |
|
639 | backend = get_backend(repo_type) | |
640 |
|
640 | |||
641 | if repo_type == 'hg': |
|
641 | if repo_type == 'hg': | |
642 | baseui = make_ui() |
|
642 | baseui = make_ui() | |
643 | # patch and reset hooks section of UI config to not run any |
|
643 | # patch and reset hooks section of UI config to not run any | |
644 | # hooks on creating remote repo |
|
644 | # hooks on creating remote repo | |
645 | for k, v in baseui.configitems('hooks'): |
|
645 | for k, v in baseui.configitems('hooks'): | |
646 | baseui.setconfig('hooks', k, None) |
|
646 | baseui.setconfig('hooks', k, None) | |
647 |
|
647 | |||
648 | repo = backend(repo_path, create=True, src_url=clone_uri, baseui=baseui) |
|
648 | repo = backend(repo_path, create=True, src_url=clone_uri, baseui=baseui) | |
649 | elif repo_type == 'git': |
|
649 | elif repo_type == 'git': | |
650 | repo = backend(repo_path, create=True, src_url=clone_uri, bare=True) |
|
650 | repo = backend(repo_path, create=True, src_url=clone_uri, bare=True) | |
651 | # add kallithea hook into this repo |
|
651 | # add kallithea hook into this repo | |
652 | scm.ScmModel().install_git_hooks(repo) |
|
652 | scm.ScmModel().install_git_hooks(repo) | |
653 | else: |
|
653 | else: | |
654 | raise Exception('Not supported repo_type %s expected hg/git' % repo_type) |
|
654 | raise Exception('Not supported repo_type %s expected hg/git' % repo_type) | |
655 |
|
655 | |||
656 | log.debug('Created repo %s with %s backend', |
|
656 | log.debug('Created repo %s with %s backend', | |
657 | repo_name, repo_type) |
|
657 | repo_name, repo_type) | |
658 | return repo |
|
658 | return repo | |
659 |
|
659 | |||
660 | def _rename_filesystem_repo(self, old, new): |
|
660 | def _rename_filesystem_repo(self, old, new): | |
661 | """ |
|
661 | """ | |
662 | renames repository on filesystem |
|
662 | renames repository on filesystem | |
663 |
|
663 | |||
664 | :param old: old name |
|
664 | :param old: old name | |
665 | :param new: new name |
|
665 | :param new: new name | |
666 | """ |
|
666 | """ | |
667 | log.info('renaming repo from %s to %s', old, new) |
|
667 | log.info('renaming repo from %s to %s', old, new) | |
668 |
|
668 | |||
669 | old_path = os.path.join(self.repos_path, old) |
|
669 | old_path = os.path.join(self.repos_path, old) | |
670 | new_path = os.path.join(self.repos_path, new) |
|
670 | new_path = os.path.join(self.repos_path, new) | |
671 | if os.path.isdir(new_path): |
|
671 | if os.path.isdir(new_path): | |
672 | raise Exception( |
|
672 | raise Exception( | |
673 | 'Was trying to rename to already existing dir %s' % new_path |
|
673 | 'Was trying to rename to already existing dir %s' % new_path | |
674 | ) |
|
674 | ) | |
675 | shutil.move(old_path, new_path) |
|
675 | shutil.move(old_path, new_path) | |
676 |
|
676 | |||
677 | def _delete_filesystem_repo(self, repo): |
|
677 | def _delete_filesystem_repo(self, repo): | |
678 | """ |
|
678 | """ | |
679 | removes repo from filesystem, the removal is actually done by |
|
679 | removes repo from filesystem, the removal is actually done by | |
680 | renaming dir to a 'rm__*' prefix which Kallithea will skip. |
|
680 | renaming dir to a 'rm__*' prefix which Kallithea will skip. | |
681 | It can be undeleted later by reverting the rename. |
|
681 | It can be undeleted later by reverting the rename. | |
682 |
|
682 | |||
683 | :param repo: repo object |
|
683 | :param repo: repo object | |
684 | """ |
|
684 | """ | |
685 | rm_path = os.path.join(self.repos_path, repo.repo_name) |
|
685 | rm_path = os.path.join(self.repos_path, repo.repo_name) | |
686 | log.info("Removing %s", rm_path) |
|
686 | log.info("Removing %s", rm_path) | |
687 |
|
687 | |||
688 | _now = datetime.now() |
|
688 | _now = datetime.now() | |
689 | _ms = str(_now.microsecond).rjust(6, '0') |
|
689 | _ms = str(_now.microsecond).rjust(6, '0') | |
690 | _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms), |
|
690 | _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms), | |
691 | repo.just_name) |
|
691 | repo.just_name) | |
692 | if repo.group: |
|
692 | if repo.group: | |
693 | args = repo.group.full_path_splitted + [_d] |
|
693 | args = repo.group.full_path_splitted + [_d] | |
694 | _d = os.path.join(*args) |
|
694 | _d = os.path.join(*args) | |
695 | if os.path.exists(rm_path): |
|
695 | if os.path.exists(rm_path): | |
696 | shutil.move(rm_path, os.path.join(self.repos_path, _d)) |
|
696 | shutil.move(rm_path, os.path.join(self.repos_path, _d)) | |
697 | else: |
|
697 | else: | |
698 | log.error("Can't find repo to delete in %r", rm_path) |
|
698 | log.error("Can't find repo to delete in %r", rm_path) |
@@ -1,486 +1,485 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.model.user |
|
15 | kallithea.model.user | |
16 | ~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | users model for Kallithea |
|
18 | users model for Kallithea | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Apr 9, 2010 |
|
22 | :created_on: Apr 9, 2010 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 |
|
28 | |||
29 | import hashlib |
|
29 | import hashlib | |
30 | import hmac |
|
30 | import hmac | |
31 | import logging |
|
31 | import logging | |
32 | import time |
|
32 | import time | |
33 | import traceback |
|
33 | import traceback | |
34 |
|
34 | |||
35 | from sqlalchemy.exc import DatabaseError |
|
35 | from sqlalchemy.exc import DatabaseError | |
36 | from tg import config |
|
36 | from tg import config | |
37 | from tg.i18n import ugettext as _ |
|
37 | from tg.i18n import ugettext as _ | |
38 |
|
38 | |||
39 | from kallithea.lib import hooks, webutils |
|
39 | from kallithea.lib import hooks, webutils | |
40 | from kallithea.lib.exceptions import DefaultUserException, UserOwnsReposException |
|
40 | from kallithea.lib.exceptions import DefaultUserException, UserOwnsReposException | |
41 | from kallithea.lib.utils2 import check_password, generate_api_key, get_crypt_password, get_current_authuser |
|
41 | from kallithea.lib.utils2 import check_password, generate_api_key, get_crypt_password, get_current_authuser | |
42 | from kallithea.model import db, forms, meta |
|
42 | from kallithea.model import db, forms, meta | |
43 |
|
43 | |||
44 |
|
44 | |||
45 | log = logging.getLogger(__name__) |
|
45 | log = logging.getLogger(__name__) | |
46 |
|
46 | |||
47 |
|
47 | |||
48 | class UserModel(object): |
|
48 | class UserModel(object): | |
49 | password_reset_token_lifetime = 86400 # 24 hours |
|
49 | password_reset_token_lifetime = 86400 # 24 hours | |
50 |
|
50 | |||
51 | def get(self, user_id): |
|
51 | def get(self, user_id): | |
52 | user = db.User.query() |
|
52 | user = db.User.query() | |
53 | return user.get(user_id) |
|
53 | return user.get(user_id) | |
54 |
|
54 | |||
55 | def get_user(self, user): |
|
55 | def get_user(self, user): | |
56 | return db.User.guess_instance(user) |
|
56 | return db.User.guess_instance(user) | |
57 |
|
57 | |||
58 | def create(self, form_data, cur_user=None): |
|
58 | def create(self, form_data, cur_user=None): | |
59 | if not cur_user: |
|
59 | if not cur_user: | |
60 | cur_user = getattr(get_current_authuser(), 'username', None) |
|
60 | cur_user = getattr(get_current_authuser(), 'username', None) | |
61 |
|
61 | |||
62 | _fd = form_data |
|
62 | _fd = form_data | |
63 | user_data = { |
|
63 | user_data = { | |
64 | 'username': _fd['username'], |
|
64 | 'username': _fd['username'], | |
65 | 'password': _fd['password'], |
|
65 | 'password': _fd['password'], | |
66 | 'email': _fd['email'], |
|
66 | 'email': _fd['email'], | |
67 | 'firstname': _fd['firstname'], |
|
67 | 'firstname': _fd['firstname'], | |
68 | 'lastname': _fd['lastname'], |
|
68 | 'lastname': _fd['lastname'], | |
69 | 'active': _fd['active'], |
|
69 | 'active': _fd['active'], | |
70 | 'admin': False |
|
70 | 'admin': False | |
71 | } |
|
71 | } | |
72 | # raises UserCreationError if it's not allowed |
|
72 | # raises UserCreationError if it's not allowed | |
73 | hooks.check_allowed_create_user(user_data, cur_user) |
|
73 | hooks.check_allowed_create_user(user_data, cur_user) | |
74 |
|
74 | |||
75 | new_user = db.User() |
|
75 | new_user = db.User() | |
76 | for k, v in form_data.items(): |
|
76 | for k, v in form_data.items(): | |
77 | if k == 'password': |
|
77 | if k == 'password': | |
78 | v = get_crypt_password(v) |
|
78 | v = get_crypt_password(v) | |
79 | if k == 'firstname': |
|
79 | if k == 'firstname': | |
80 | k = 'name' |
|
80 | k = 'name' | |
81 | setattr(new_user, k, v) |
|
81 | setattr(new_user, k, v) | |
82 |
|
82 | |||
83 | new_user.api_key = generate_api_key() |
|
83 | new_user.api_key = generate_api_key() | |
84 | meta.Session().add(new_user) |
|
84 | meta.Session().add(new_user) | |
85 | meta.Session().flush() # make database assign new_user.user_id |
|
85 | meta.Session().flush() # make database assign new_user.user_id | |
86 |
|
86 | |||
87 | hooks.log_create_user(new_user.get_dict(), cur_user) |
|
87 | hooks.log_create_user(new_user.get_dict(), cur_user) | |
88 | return new_user |
|
88 | return new_user | |
89 |
|
89 | |||
90 | def create_or_update(self, username, password, email, firstname='', |
|
90 | def create_or_update(self, username, password, email, firstname='', | |
91 | lastname='', active=True, admin=False, |
|
91 | lastname='', active=True, admin=False, | |
92 | extern_type=None, extern_name=None, cur_user=None): |
|
92 | extern_type=None, extern_name=None, cur_user=None): | |
93 | """ |
|
93 | """ | |
94 | Creates a new instance if not found, or updates current one |
|
94 | Creates a new instance if not found, or updates current one | |
95 |
|
95 | |||
96 | :param username: |
|
96 | :param username: | |
97 | :param password: |
|
97 | :param password: | |
98 | :param email: |
|
98 | :param email: | |
99 | :param active: |
|
99 | :param active: | |
100 | :param firstname: |
|
100 | :param firstname: | |
101 | :param lastname: |
|
101 | :param lastname: | |
102 | :param active: |
|
102 | :param active: | |
103 | :param admin: |
|
103 | :param admin: | |
104 | :param extern_name: |
|
104 | :param extern_name: | |
105 | :param extern_type: |
|
105 | :param extern_type: | |
106 | :param cur_user: |
|
106 | :param cur_user: | |
107 | """ |
|
107 | """ | |
108 | if not cur_user: |
|
108 | if not cur_user: | |
109 | cur_user = getattr(get_current_authuser(), 'username', None) |
|
109 | cur_user = getattr(get_current_authuser(), 'username', None) | |
110 |
|
110 | |||
111 | user_data = { |
|
111 | user_data = { | |
112 | 'username': username, 'password': password, |
|
112 | 'username': username, 'password': password, | |
113 | 'email': email, 'firstname': firstname, 'lastname': lastname, |
|
113 | 'email': email, 'firstname': firstname, 'lastname': lastname, | |
114 | 'active': active, 'admin': admin |
|
114 | 'active': active, 'admin': admin | |
115 | } |
|
115 | } | |
116 | # raises UserCreationError if it's not allowed |
|
116 | # raises UserCreationError if it's not allowed | |
117 | hooks.check_allowed_create_user(user_data, cur_user) |
|
117 | hooks.check_allowed_create_user(user_data, cur_user) | |
118 |
|
118 | |||
119 | log.debug('Checking for %s account in Kallithea database', username) |
|
119 | log.debug('Checking for %s account in Kallithea database', username) | |
120 | user = db.User.get_by_username(username, case_insensitive=True) |
|
120 | user = db.User.get_by_username(username, case_insensitive=True) | |
121 | if user is None: |
|
121 | if user is None: | |
122 | log.debug('creating new user %s', username) |
|
122 | log.debug('creating new user %s', username) | |
123 | new_user = db.User() |
|
123 | new_user = db.User() | |
124 | edit = False |
|
124 | edit = False | |
125 | else: |
|
125 | else: | |
126 | log.debug('updating user %s', username) |
|
126 | log.debug('updating user %s', username) | |
127 | new_user = user |
|
127 | new_user = user | |
128 | edit = True |
|
128 | edit = True | |
129 |
|
129 | |||
130 | try: |
|
130 | try: | |
131 | new_user.username = username |
|
131 | new_user.username = username | |
132 | new_user.admin = admin |
|
132 | new_user.admin = admin | |
133 | new_user.email = email |
|
133 | new_user.email = email | |
134 | new_user.active = active |
|
134 | new_user.active = active | |
135 | new_user.extern_name = extern_name |
|
135 | new_user.extern_name = extern_name | |
136 | new_user.extern_type = extern_type |
|
136 | new_user.extern_type = extern_type | |
137 | new_user.name = firstname |
|
137 | new_user.name = firstname | |
138 | new_user.lastname = lastname |
|
138 | new_user.lastname = lastname | |
139 |
|
139 | |||
140 | if not edit: |
|
140 | if not edit: | |
141 | new_user.api_key = generate_api_key() |
|
141 | new_user.api_key = generate_api_key() | |
142 |
|
142 | |||
143 | # set password only if creating an user or password is changed |
|
143 | # set password only if creating an user or password is changed | |
144 | password_change = new_user.password and \ |
|
144 | password_change = new_user.password and \ | |
145 | not check_password(password, new_user.password) |
|
145 | not check_password(password, new_user.password) | |
146 | if not edit or password_change: |
|
146 | if not edit or password_change: | |
147 | reason = 'new password' if edit else 'new user' |
|
147 | reason = 'new password' if edit else 'new user' | |
148 | log.debug('Updating password reason=>%s', reason) |
|
148 | log.debug('Updating password reason=>%s', reason) | |
149 | new_user.password = get_crypt_password(password) \ |
|
149 | new_user.password = get_crypt_password(password) \ | |
150 | if password else '' |
|
150 | if password else '' | |
151 |
|
151 | |||
152 | if user is None: |
|
152 | if user is None: | |
153 | meta.Session().add(new_user) |
|
153 | meta.Session().add(new_user) | |
154 | meta.Session().flush() # make database assign new_user.user_id |
|
154 | meta.Session().flush() # make database assign new_user.user_id | |
155 |
|
155 | |||
156 | if not edit: |
|
156 | if not edit: | |
157 | hooks.log_create_user(new_user.get_dict(), cur_user) |
|
157 | hooks.log_create_user(new_user.get_dict(), cur_user) | |
158 |
|
158 | |||
159 | return new_user |
|
159 | return new_user | |
160 | except (DatabaseError,): |
|
160 | except (DatabaseError,): | |
161 | log.error(traceback.format_exc()) |
|
161 | log.error(traceback.format_exc()) | |
162 | raise |
|
162 | raise | |
163 |
|
163 | |||
164 | def create_registration(self, form_data): |
|
164 | def create_registration(self, form_data): | |
165 | from kallithea.model import notification |
|
165 | from kallithea.model import notification | |
166 |
|
166 | |||
167 | form_data['admin'] = False |
|
167 | form_data['admin'] = False | |
168 | form_data['extern_type'] = db.User.DEFAULT_AUTH_TYPE |
|
168 | form_data['extern_type'] = db.User.DEFAULT_AUTH_TYPE | |
169 | form_data['extern_name'] = '' |
|
169 | form_data['extern_name'] = '' | |
170 | new_user = self.create(form_data) |
|
170 | new_user = self.create(form_data) | |
171 |
|
171 | |||
172 | # notification to admins |
|
172 | # notification to admins | |
173 | edit_url = webutils.canonical_url('edit_user', id=new_user.user_id) |
|
173 | edit_url = webutils.canonical_url('edit_user', id=new_user.user_id) | |
174 | email_kwargs = { |
|
174 | email_kwargs = { | |
175 | 'registered_user_url': edit_url, |
|
175 | 'registered_user_url': edit_url, | |
176 | 'new_username': new_user.username, |
|
176 | 'new_username': new_user.username, | |
177 | 'new_email': new_user.email, |
|
177 | 'new_email': new_user.email, | |
178 | 'new_full_name': new_user.full_name} |
|
178 | 'new_full_name': new_user.full_name} | |
179 | notification.NotificationModel().create(created_by=new_user, |
|
179 | notification.NotificationModel().create(created_by=new_user, | |
180 | body=None, recipients=None, |
|
180 | body=None, recipients=None, | |
181 | type_=notification.NotificationModel.TYPE_REGISTRATION, |
|
181 | type_=notification.NotificationModel.TYPE_REGISTRATION, | |
182 | email_kwargs=email_kwargs) |
|
182 | email_kwargs=email_kwargs) | |
183 |
|
183 | |||
184 | def update(self, user_id, form_data, skip_attrs=None): |
|
184 | def update(self, user_id, form_data, skip_attrs=None): | |
185 | skip_attrs = skip_attrs or [] |
|
185 | skip_attrs = skip_attrs or [] | |
186 | user = self.get(user_id) |
|
186 | user = self.get(user_id) | |
187 | if user.is_default_user: |
|
187 | if user.is_default_user: | |
188 | raise DefaultUserException( |
|
188 | raise DefaultUserException( | |
189 | _("You can't edit this user since it's " |
|
189 | _("You can't edit this user since it's " | |
190 | "crucial for entire application")) |
|
190 | "crucial for entire application")) | |
191 |
|
191 | |||
192 | for k, v in form_data.items(): |
|
192 | for k, v in form_data.items(): | |
193 | if k in skip_attrs: |
|
193 | if k in skip_attrs: | |
194 | continue |
|
194 | continue | |
195 | if k == 'new_password' and v: |
|
195 | if k == 'new_password' and v: | |
196 | user.password = get_crypt_password(v) |
|
196 | user.password = get_crypt_password(v) | |
197 | else: |
|
197 | else: | |
198 | # old legacy thing orm models store firstname as name, |
|
198 | # old legacy thing orm models store firstname as name, | |
199 | # need proper refactor to username |
|
199 | # need proper refactor to username | |
200 | if k == 'firstname': |
|
200 | if k == 'firstname': | |
201 | k = 'name' |
|
201 | k = 'name' | |
202 | setattr(user, k, v) |
|
202 | setattr(user, k, v) | |
203 |
|
203 | |||
204 | def update_user(self, user, **kwargs): |
|
204 | def update_user(self, user, **kwargs): | |
205 | user = db.User.guess_instance(user) |
|
205 | user = db.User.guess_instance(user) | |
206 | if user.is_default_user: |
|
206 | if user.is_default_user: | |
207 | raise DefaultUserException( |
|
207 | raise DefaultUserException( | |
208 | _("You can't edit this user since it's" |
|
208 | _("You can't edit this user since it's" | |
209 | " crucial for entire application") |
|
209 | " crucial for entire application") | |
210 | ) |
|
210 | ) | |
211 |
|
211 | |||
212 | for k, v in kwargs.items(): |
|
212 | for k, v in kwargs.items(): | |
213 | if k == 'password' and v: |
|
213 | if k == 'password' and v: | |
214 | v = get_crypt_password(v) |
|
214 | v = get_crypt_password(v) | |
215 |
|
215 | |||
216 | setattr(user, k, v) |
|
216 | setattr(user, k, v) | |
217 | return user |
|
217 | return user | |
218 |
|
218 | |||
219 | def delete(self, user, cur_user=None): |
|
219 | def delete(self, user, cur_user=None): | |
220 | if cur_user is None: |
|
220 | if cur_user is None: | |
221 | cur_user = getattr(get_current_authuser(), 'username', None) |
|
221 | cur_user = getattr(get_current_authuser(), 'username', None) | |
222 | user = db.User.guess_instance(user) |
|
222 | user = db.User.guess_instance(user) | |
223 |
|
223 | |||
224 | if user.is_default_user: |
|
224 | if user.is_default_user: | |
225 | raise DefaultUserException( |
|
225 | raise DefaultUserException( | |
226 | _("You can't remove this user since it is" |
|
226 | _("You can't remove this user since it is" | |
227 | " crucial for the entire application")) |
|
227 | " crucial for the entire application")) | |
228 | if user.repositories: |
|
228 | if user.repositories: | |
229 | repos = [x.repo_name for x in user.repositories] |
|
229 | repos = [x.repo_name for x in user.repositories] | |
230 | raise UserOwnsReposException( |
|
230 | raise UserOwnsReposException( | |
231 | _('User "%s" still owns %s repositories and cannot be ' |
|
231 | _('User "%s" still owns %s repositories and cannot be ' | |
232 | 'removed. Switch owners or remove those repositories: %s') |
|
232 | 'removed. Switch owners or remove those repositories: %s') | |
233 | % (user.username, len(repos), ', '.join(repos))) |
|
233 | % (user.username, len(repos), ', '.join(repos))) | |
234 | if user.repo_groups: |
|
234 | if user.repo_groups: | |
235 | repogroups = [x.group_name for x in user.repo_groups] |
|
235 | repogroups = [x.group_name for x in user.repo_groups] | |
236 | raise UserOwnsReposException(_( |
|
236 | raise UserOwnsReposException(_( | |
237 | 'User "%s" still owns %s repository groups and cannot be ' |
|
237 | 'User "%s" still owns %s repository groups and cannot be ' | |
238 | 'removed. Switch owners or remove those repository groups: %s') |
|
238 | 'removed. Switch owners or remove those repository groups: %s') | |
239 | % (user.username, len(repogroups), ', '.join(repogroups))) |
|
239 | % (user.username, len(repogroups), ', '.join(repogroups))) | |
240 | if user.user_groups: |
|
240 | if user.user_groups: | |
241 | usergroups = [x.users_group_name for x in user.user_groups] |
|
241 | usergroups = [x.users_group_name for x in user.user_groups] | |
242 | raise UserOwnsReposException( |
|
242 | raise UserOwnsReposException( | |
243 | _('User "%s" still owns %s user groups and cannot be ' |
|
243 | _('User "%s" still owns %s user groups and cannot be ' | |
244 | 'removed. Switch owners or remove those user groups: %s') |
|
244 | 'removed. Switch owners or remove those user groups: %s') | |
245 | % (user.username, len(usergroups), ', '.join(usergroups))) |
|
245 | % (user.username, len(usergroups), ', '.join(usergroups))) | |
246 | meta.Session().delete(user) |
|
246 | meta.Session().delete(user) | |
247 |
|
247 | |||
248 | hooks.log_delete_user(user.get_dict(), cur_user) |
|
248 | hooks.log_delete_user(user.get_dict(), cur_user) | |
249 |
|
249 | |||
250 | def can_change_password(self, user): |
|
250 | def can_change_password(self, user): | |
251 | from kallithea.lib import auth_modules |
|
251 | from kallithea.lib import auth_modules | |
252 | managed_fields = auth_modules.get_managed_fields(user) |
|
252 | managed_fields = auth_modules.get_managed_fields(user) | |
253 | return 'password' not in managed_fields |
|
253 | return 'password' not in managed_fields | |
254 |
|
254 | |||
255 | def get_reset_password_token(self, user, timestamp, session_id): |
|
255 | def get_reset_password_token(self, user, timestamp, session_id): | |
256 | """ |
|
256 | """ | |
257 | The token is a 40-digit hexstring, calculated as a HMAC-SHA1. |
|
257 | The token is a 40-digit hexstring, calculated as a HMAC-SHA1. | |
258 |
|
258 | |||
259 | In a traditional HMAC scenario, an attacker is unable to know or |
|
259 | In a traditional HMAC scenario, an attacker is unable to know or | |
260 | influence the secret key, but can know or influence the message |
|
260 | influence the secret key, but can know or influence the message | |
261 | and token. This scenario is slightly different (in particular |
|
261 | and token. This scenario is slightly different (in particular | |
262 | since the message sender is also the message recipient), but |
|
262 | since the message sender is also the message recipient), but | |
263 | sufficiently similar to use an HMAC. Benefits compared to a plain |
|
263 | sufficiently similar to use an HMAC. Benefits compared to a plain | |
264 | SHA1 hash includes resistance against a length extension attack. |
|
264 | SHA1 hash includes resistance against a length extension attack. | |
265 |
|
265 | |||
266 | The HMAC key consists of the following values (known only to the |
|
266 | The HMAC key consists of the following values (known only to the | |
267 | server and authorized users): |
|
267 | server and authorized users): | |
268 |
|
268 | |||
269 | * per-application secret (the `app_instance_uuid` setting), without |
|
269 | * per-application secret (the `app_instance_uuid` setting), without | |
270 | which an attacker cannot counterfeit tokens |
|
270 | which an attacker cannot counterfeit tokens | |
271 | * hashed user password, invalidating the token upon password change |
|
271 | * hashed user password, invalidating the token upon password change | |
272 |
|
272 | |||
273 | The HMAC message consists of the following values (potentially known |
|
273 | The HMAC message consists of the following values (potentially known | |
274 | to an attacker): |
|
274 | to an attacker): | |
275 |
|
275 | |||
276 | * session ID (the anti-CSRF token), requiring an attacker to have |
|
276 | * session ID (the anti-CSRF token), requiring an attacker to have | |
277 | access to the browser session in which the token was created |
|
277 | access to the browser session in which the token was created | |
278 | * numeric user ID, limiting the token to a specific user (yet allowing |
|
278 | * numeric user ID, limiting the token to a specific user (yet allowing | |
279 | users to be renamed) |
|
279 | users to be renamed) | |
280 | * user email address |
|
280 | * user email address | |
281 | * time of token issue (a Unix timestamp, to enable token expiration) |
|
281 | * time of token issue (a Unix timestamp, to enable token expiration) | |
282 |
|
282 | |||
283 | The key and message values are separated by NUL characters, which are |
|
283 | The key and message values are separated by NUL characters, which are | |
284 | guaranteed not to occur in any of the values. |
|
284 | guaranteed not to occur in any of the values. | |
285 | """ |
|
285 | """ | |
286 | app_secret = config.get('app_instance_uuid') |
|
286 | app_secret = config.get('app_instance_uuid') | |
287 | return hmac.new( |
|
287 | return hmac.new( | |
288 | '\0'.join([app_secret, user.password]).encode('utf-8'), |
|
288 | '\0'.join([app_secret, user.password]).encode('utf-8'), | |
289 | msg='\0'.join([session_id, str(user.user_id), user.email, str(timestamp)]).encode('utf-8'), |
|
289 | msg='\0'.join([session_id, str(user.user_id), user.email, str(timestamp)]).encode('utf-8'), | |
290 | digestmod=hashlib.sha1, |
|
290 | digestmod=hashlib.sha1, | |
291 | ).hexdigest() |
|
291 | ).hexdigest() | |
292 |
|
292 | |||
293 | def send_reset_password_email(self, data): |
|
293 | def send_reset_password_email(self, data): | |
294 | """ |
|
294 | """ | |
295 | Sends email with a password reset token and link to the password |
|
295 | Sends email with a password reset token and link to the password | |
296 | reset confirmation page with all information (including the token) |
|
296 | reset confirmation page with all information (including the token) | |
297 | pre-filled. Also returns URL of that page, only without the token, |
|
297 | pre-filled. Also returns URL of that page, only without the token, | |
298 | allowing users to copy-paste or manually enter the token from the |
|
298 | allowing users to copy-paste or manually enter the token from the | |
299 | email. |
|
299 | email. | |
300 | """ |
|
300 | """ | |
301 |
from kallithea. |
|
301 | from kallithea.model import async_tasks, notification | |
302 | from kallithea.model import notification |
|
|||
303 |
|
302 | |||
304 | user_email = data['email'] |
|
303 | user_email = data['email'] | |
305 | user = db.User.get_by_email(user_email) |
|
304 | user = db.User.get_by_email(user_email) | |
306 | timestamp = int(time.time()) |
|
305 | timestamp = int(time.time()) | |
307 | if user is not None: |
|
306 | if user is not None: | |
308 | if self.can_change_password(user): |
|
307 | if self.can_change_password(user): | |
309 | log.debug('password reset user %s found', user) |
|
308 | log.debug('password reset user %s found', user) | |
310 | token = self.get_reset_password_token(user, |
|
309 | token = self.get_reset_password_token(user, | |
311 | timestamp, |
|
310 | timestamp, | |
312 | webutils.session_csrf_secret_token()) |
|
311 | webutils.session_csrf_secret_token()) | |
313 | # URL must be fully qualified; but since the token is locked to |
|
312 | # URL must be fully qualified; but since the token is locked to | |
314 | # the current browser session, we must provide a URL with the |
|
313 | # the current browser session, we must provide a URL with the | |
315 | # current scheme and hostname, rather than the canonical_url. |
|
314 | # current scheme and hostname, rather than the canonical_url. | |
316 | link = webutils.url('reset_password_confirmation', qualified=True, |
|
315 | link = webutils.url('reset_password_confirmation', qualified=True, | |
317 | email=user_email, |
|
316 | email=user_email, | |
318 | timestamp=timestamp, |
|
317 | timestamp=timestamp, | |
319 | token=token) |
|
318 | token=token) | |
320 | else: |
|
319 | else: | |
321 | log.debug('password reset user %s found but was managed', user) |
|
320 | log.debug('password reset user %s found but was managed', user) | |
322 | token = link = None |
|
321 | token = link = None | |
323 | reg_type = notification.EmailNotificationModel.TYPE_PASSWORD_RESET |
|
322 | reg_type = notification.EmailNotificationModel.TYPE_PASSWORD_RESET | |
324 | body = notification.EmailNotificationModel().get_email_tmpl( |
|
323 | body = notification.EmailNotificationModel().get_email_tmpl( | |
325 | reg_type, 'txt', |
|
324 | reg_type, 'txt', | |
326 | user=user.short_contact, |
|
325 | user=user.short_contact, | |
327 | reset_token=token, |
|
326 | reset_token=token, | |
328 | reset_url=link) |
|
327 | reset_url=link) | |
329 | html_body = notification.EmailNotificationModel().get_email_tmpl( |
|
328 | html_body = notification.EmailNotificationModel().get_email_tmpl( | |
330 | reg_type, 'html', |
|
329 | reg_type, 'html', | |
331 | user=user.short_contact, |
|
330 | user=user.short_contact, | |
332 | reset_token=token, |
|
331 | reset_token=token, | |
333 | reset_url=link) |
|
332 | reset_url=link) | |
334 | log.debug('sending email') |
|
333 | log.debug('sending email') | |
335 | tasks.send_email([user_email], _("Password reset link"), body, html_body) |
|
334 | async_tasks.send_email([user_email], _("Password reset link"), body, html_body) | |
336 | log.info('send new password mail to %s', user_email) |
|
335 | log.info('send new password mail to %s', user_email) | |
337 | else: |
|
336 | else: | |
338 | log.debug("password reset email %s not found", user_email) |
|
337 | log.debug("password reset email %s not found", user_email) | |
339 |
|
338 | |||
340 | return webutils.url('reset_password_confirmation', |
|
339 | return webutils.url('reset_password_confirmation', | |
341 | email=user_email, |
|
340 | email=user_email, | |
342 | timestamp=timestamp) |
|
341 | timestamp=timestamp) | |
343 |
|
342 | |||
344 | def verify_reset_password_token(self, email, timestamp, token): |
|
343 | def verify_reset_password_token(self, email, timestamp, token): | |
345 | user = db.User.get_by_email(email) |
|
344 | user = db.User.get_by_email(email) | |
346 | if user is None: |
|
345 | if user is None: | |
347 | log.debug("user with email %s not found", email) |
|
346 | log.debug("user with email %s not found", email) | |
348 | return False |
|
347 | return False | |
349 |
|
348 | |||
350 | token_age = int(time.time()) - int(timestamp) |
|
349 | token_age = int(time.time()) - int(timestamp) | |
351 |
|
350 | |||
352 | if token_age < 0: |
|
351 | if token_age < 0: | |
353 | log.debug('timestamp is from the future') |
|
352 | log.debug('timestamp is from the future') | |
354 | return False |
|
353 | return False | |
355 |
|
354 | |||
356 | if token_age > UserModel.password_reset_token_lifetime: |
|
355 | if token_age > UserModel.password_reset_token_lifetime: | |
357 | log.debug('password reset token expired') |
|
356 | log.debug('password reset token expired') | |
358 | return False |
|
357 | return False | |
359 |
|
358 | |||
360 | expected_token = self.get_reset_password_token(user, |
|
359 | expected_token = self.get_reset_password_token(user, | |
361 | timestamp, |
|
360 | timestamp, | |
362 | webutils.session_csrf_secret_token()) |
|
361 | webutils.session_csrf_secret_token()) | |
363 | log.debug('computed password reset token: %s', expected_token) |
|
362 | log.debug('computed password reset token: %s', expected_token) | |
364 | log.debug('received password reset token: %s', token) |
|
363 | log.debug('received password reset token: %s', token) | |
365 | return expected_token == token |
|
364 | return expected_token == token | |
366 |
|
365 | |||
367 | def reset_password(self, user_email, new_passwd): |
|
366 | def reset_password(self, user_email, new_passwd): | |
368 |
from kallithea. |
|
367 | from kallithea.model import async_tasks | |
369 | user = db.User.get_by_email(user_email) |
|
368 | user = db.User.get_by_email(user_email) | |
370 | if user is not None: |
|
369 | if user is not None: | |
371 | if not self.can_change_password(user): |
|
370 | if not self.can_change_password(user): | |
372 | raise Exception('trying to change password for external user') |
|
371 | raise Exception('trying to change password for external user') | |
373 | user.password = get_crypt_password(new_passwd) |
|
372 | user.password = get_crypt_password(new_passwd) | |
374 | meta.Session().commit() |
|
373 | meta.Session().commit() | |
375 | log.info('change password for %s', user_email) |
|
374 | log.info('change password for %s', user_email) | |
376 | if new_passwd is None: |
|
375 | if new_passwd is None: | |
377 | raise Exception('unable to set new password') |
|
376 | raise Exception('unable to set new password') | |
378 |
|
377 | |||
379 | tasks.send_email([user_email], |
|
378 | async_tasks.send_email([user_email], | |
380 | _('Password reset notification'), |
|
379 | _('Password reset notification'), | |
381 | _('The password to your account %s has been changed using password reset form.') % (user.username,)) |
|
380 | _('The password to your account %s has been changed using password reset form.') % (user.username,)) | |
382 | log.info('send password reset mail to %s', user_email) |
|
381 | log.info('send password reset mail to %s', user_email) | |
383 |
|
382 | |||
384 | return True |
|
383 | return True | |
385 |
|
384 | |||
386 | def has_perm(self, user, perm): |
|
385 | def has_perm(self, user, perm): | |
387 | perm = db.Permission.guess_instance(perm) |
|
386 | perm = db.Permission.guess_instance(perm) | |
388 | user = db.User.guess_instance(user) |
|
387 | user = db.User.guess_instance(user) | |
389 |
|
388 | |||
390 | return db.UserToPerm.query().filter(db.UserToPerm.user == user) \ |
|
389 | return db.UserToPerm.query().filter(db.UserToPerm.user == user) \ | |
391 | .filter(db.UserToPerm.permission == perm).scalar() is not None |
|
390 | .filter(db.UserToPerm.permission == perm).scalar() is not None | |
392 |
|
391 | |||
393 | def grant_perm(self, user, perm): |
|
392 | def grant_perm(self, user, perm): | |
394 | """ |
|
393 | """ | |
395 | Grant user global permissions |
|
394 | Grant user global permissions | |
396 |
|
395 | |||
397 | :param user: |
|
396 | :param user: | |
398 | :param perm: |
|
397 | :param perm: | |
399 | """ |
|
398 | """ | |
400 | user = db.User.guess_instance(user) |
|
399 | user = db.User.guess_instance(user) | |
401 | perm = db.Permission.guess_instance(perm) |
|
400 | perm = db.Permission.guess_instance(perm) | |
402 | # if this permission is already granted skip it |
|
401 | # if this permission is already granted skip it | |
403 | _perm = db.UserToPerm.query() \ |
|
402 | _perm = db.UserToPerm.query() \ | |
404 | .filter(db.UserToPerm.user == user) \ |
|
403 | .filter(db.UserToPerm.user == user) \ | |
405 | .filter(db.UserToPerm.permission == perm) \ |
|
404 | .filter(db.UserToPerm.permission == perm) \ | |
406 | .scalar() |
|
405 | .scalar() | |
407 | if _perm: |
|
406 | if _perm: | |
408 | return |
|
407 | return | |
409 | new = db.UserToPerm() |
|
408 | new = db.UserToPerm() | |
410 | new.user = user |
|
409 | new.user = user | |
411 | new.permission = perm |
|
410 | new.permission = perm | |
412 | meta.Session().add(new) |
|
411 | meta.Session().add(new) | |
413 | return new |
|
412 | return new | |
414 |
|
413 | |||
415 | def revoke_perm(self, user, perm): |
|
414 | def revoke_perm(self, user, perm): | |
416 | """ |
|
415 | """ | |
417 | Revoke users global permissions |
|
416 | Revoke users global permissions | |
418 |
|
417 | |||
419 | :param user: |
|
418 | :param user: | |
420 | :param perm: |
|
419 | :param perm: | |
421 | """ |
|
420 | """ | |
422 | user = db.User.guess_instance(user) |
|
421 | user = db.User.guess_instance(user) | |
423 | perm = db.Permission.guess_instance(perm) |
|
422 | perm = db.Permission.guess_instance(perm) | |
424 |
|
423 | |||
425 | db.UserToPerm.query().filter( |
|
424 | db.UserToPerm.query().filter( | |
426 | db.UserToPerm.user == user, |
|
425 | db.UserToPerm.user == user, | |
427 | db.UserToPerm.permission == perm, |
|
426 | db.UserToPerm.permission == perm, | |
428 | ).delete() |
|
427 | ).delete() | |
429 |
|
428 | |||
430 | def add_extra_email(self, user, email): |
|
429 | def add_extra_email(self, user, email): | |
431 | """ |
|
430 | """ | |
432 | Adds email address to UserEmailMap |
|
431 | Adds email address to UserEmailMap | |
433 |
|
432 | |||
434 | :param user: |
|
433 | :param user: | |
435 | :param email: |
|
434 | :param email: | |
436 | """ |
|
435 | """ | |
437 | form = forms.UserExtraEmailForm()() |
|
436 | form = forms.UserExtraEmailForm()() | |
438 | data = form.to_python(dict(email=email)) |
|
437 | data = form.to_python(dict(email=email)) | |
439 | user = db.User.guess_instance(user) |
|
438 | user = db.User.guess_instance(user) | |
440 |
|
439 | |||
441 | obj = db.UserEmailMap() |
|
440 | obj = db.UserEmailMap() | |
442 | obj.user = user |
|
441 | obj.user = user | |
443 | obj.email = data['email'] |
|
442 | obj.email = data['email'] | |
444 | meta.Session().add(obj) |
|
443 | meta.Session().add(obj) | |
445 | return obj |
|
444 | return obj | |
446 |
|
445 | |||
447 | def delete_extra_email(self, user, email_id): |
|
446 | def delete_extra_email(self, user, email_id): | |
448 | """ |
|
447 | """ | |
449 | Removes email address from UserEmailMap |
|
448 | Removes email address from UserEmailMap | |
450 |
|
449 | |||
451 | :param user: |
|
450 | :param user: | |
452 | :param email_id: |
|
451 | :param email_id: | |
453 | """ |
|
452 | """ | |
454 | user = db.User.guess_instance(user) |
|
453 | user = db.User.guess_instance(user) | |
455 | obj = db.UserEmailMap.query().get(email_id) |
|
454 | obj = db.UserEmailMap.query().get(email_id) | |
456 | if obj is not None: |
|
455 | if obj is not None: | |
457 | meta.Session().delete(obj) |
|
456 | meta.Session().delete(obj) | |
458 |
|
457 | |||
459 | def add_extra_ip(self, user, ip): |
|
458 | def add_extra_ip(self, user, ip): | |
460 | """ |
|
459 | """ | |
461 | Adds IP address to UserIpMap |
|
460 | Adds IP address to UserIpMap | |
462 |
|
461 | |||
463 | :param user: |
|
462 | :param user: | |
464 | :param ip: |
|
463 | :param ip: | |
465 | """ |
|
464 | """ | |
466 | form = forms.UserExtraIpForm()() |
|
465 | form = forms.UserExtraIpForm()() | |
467 | data = form.to_python(dict(ip=ip)) |
|
466 | data = form.to_python(dict(ip=ip)) | |
468 | user = db.User.guess_instance(user) |
|
467 | user = db.User.guess_instance(user) | |
469 |
|
468 | |||
470 | obj = db.UserIpMap() |
|
469 | obj = db.UserIpMap() | |
471 | obj.user = user |
|
470 | obj.user = user | |
472 | obj.ip_addr = data['ip'] |
|
471 | obj.ip_addr = data['ip'] | |
473 | meta.Session().add(obj) |
|
472 | meta.Session().add(obj) | |
474 | return obj |
|
473 | return obj | |
475 |
|
474 | |||
476 | def delete_extra_ip(self, user, ip_id): |
|
475 | def delete_extra_ip(self, user, ip_id): | |
477 | """ |
|
476 | """ | |
478 | Removes IP address from UserIpMap |
|
477 | Removes IP address from UserIpMap | |
479 |
|
478 | |||
480 | :param user: |
|
479 | :param user: | |
481 | :param ip_id: |
|
480 | :param ip_id: | |
482 | """ |
|
481 | """ | |
483 | user = db.User.guess_instance(user) |
|
482 | user = db.User.guess_instance(user) | |
484 | obj = db.UserIpMap.query().get(ip_id) |
|
483 | obj = db.UserIpMap.query().get(ip_id) | |
485 | if obj: |
|
484 | if obj: | |
486 | meta.Session().delete(obj) |
|
485 | meta.Session().delete(obj) |
@@ -1,531 +1,531 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | import re |
|
2 | import re | |
3 | import time |
|
3 | import time | |
4 | import urllib.parse |
|
4 | import urllib.parse | |
5 |
|
5 | |||
6 | import mock |
|
6 | import mock | |
7 | from tg.util.webtest import test_context |
|
7 | from tg.util.webtest import test_context | |
8 |
|
8 | |||
9 |
import kallithea.l |
|
9 | import kallithea.model.async_tasks | |
10 | from kallithea.lib import webutils |
|
10 | from kallithea.lib import webutils | |
11 | from kallithea.lib.utils2 import check_password, generate_api_key |
|
11 | from kallithea.lib.utils2 import check_password, generate_api_key | |
12 | from kallithea.model import db, meta, validators |
|
12 | from kallithea.model import db, meta, validators | |
13 | from kallithea.model.api_key import ApiKeyModel |
|
13 | from kallithea.model.api_key import ApiKeyModel | |
14 | from kallithea.model.user import UserModel |
|
14 | from kallithea.model.user import UserModel | |
15 | from kallithea.tests import base |
|
15 | from kallithea.tests import base | |
16 | from kallithea.tests.fixture import Fixture |
|
16 | from kallithea.tests.fixture import Fixture | |
17 |
|
17 | |||
18 |
|
18 | |||
19 | fixture = Fixture() |
|
19 | fixture = Fixture() | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | class TestLoginController(base.TestController): |
|
22 | class TestLoginController(base.TestController): | |
23 |
|
23 | |||
24 | def test_index(self): |
|
24 | def test_index(self): | |
25 | response = self.app.get(base.url(controller='login', action='index')) |
|
25 | response = self.app.get(base.url(controller='login', action='index')) | |
26 | assert response.status == '200 OK' |
|
26 | assert response.status == '200 OK' | |
27 | # Test response... |
|
27 | # Test response... | |
28 |
|
28 | |||
29 | def test_login_admin_ok(self): |
|
29 | def test_login_admin_ok(self): | |
30 | response = self.app.post(base.url(controller='login', action='index'), |
|
30 | response = self.app.post(base.url(controller='login', action='index'), | |
31 | {'username': base.TEST_USER_ADMIN_LOGIN, |
|
31 | {'username': base.TEST_USER_ADMIN_LOGIN, | |
32 | 'password': base.TEST_USER_ADMIN_PASS, |
|
32 | 'password': base.TEST_USER_ADMIN_PASS, | |
33 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) |
|
33 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) | |
34 | assert response.status == '302 Found' |
|
34 | assert response.status == '302 Found' | |
35 | self.assert_authenticated_user(response, base.TEST_USER_ADMIN_LOGIN) |
|
35 | self.assert_authenticated_user(response, base.TEST_USER_ADMIN_LOGIN) | |
36 |
|
36 | |||
37 | response = response.follow() |
|
37 | response = response.follow() | |
38 | response.mustcontain('/%s' % base.HG_REPO) |
|
38 | response.mustcontain('/%s' % base.HG_REPO) | |
39 |
|
39 | |||
40 | def test_login_regular_ok(self): |
|
40 | def test_login_regular_ok(self): | |
41 | response = self.app.post(base.url(controller='login', action='index'), |
|
41 | response = self.app.post(base.url(controller='login', action='index'), | |
42 | {'username': base.TEST_USER_REGULAR_LOGIN, |
|
42 | {'username': base.TEST_USER_REGULAR_LOGIN, | |
43 | 'password': base.TEST_USER_REGULAR_PASS, |
|
43 | 'password': base.TEST_USER_REGULAR_PASS, | |
44 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) |
|
44 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) | |
45 |
|
45 | |||
46 | assert response.status == '302 Found' |
|
46 | assert response.status == '302 Found' | |
47 | self.assert_authenticated_user(response, base.TEST_USER_REGULAR_LOGIN) |
|
47 | self.assert_authenticated_user(response, base.TEST_USER_REGULAR_LOGIN) | |
48 |
|
48 | |||
49 | response = response.follow() |
|
49 | response = response.follow() | |
50 | response.mustcontain('/%s' % base.HG_REPO) |
|
50 | response.mustcontain('/%s' % base.HG_REPO) | |
51 |
|
51 | |||
52 | def test_login_regular_email_ok(self): |
|
52 | def test_login_regular_email_ok(self): | |
53 | response = self.app.post(base.url(controller='login', action='index'), |
|
53 | response = self.app.post(base.url(controller='login', action='index'), | |
54 | {'username': base.TEST_USER_REGULAR_EMAIL, |
|
54 | {'username': base.TEST_USER_REGULAR_EMAIL, | |
55 | 'password': base.TEST_USER_REGULAR_PASS, |
|
55 | 'password': base.TEST_USER_REGULAR_PASS, | |
56 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) |
|
56 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) | |
57 |
|
57 | |||
58 | assert response.status == '302 Found' |
|
58 | assert response.status == '302 Found' | |
59 | self.assert_authenticated_user(response, base.TEST_USER_REGULAR_LOGIN) |
|
59 | self.assert_authenticated_user(response, base.TEST_USER_REGULAR_LOGIN) | |
60 |
|
60 | |||
61 | response = response.follow() |
|
61 | response = response.follow() | |
62 | response.mustcontain('/%s' % base.HG_REPO) |
|
62 | response.mustcontain('/%s' % base.HG_REPO) | |
63 |
|
63 | |||
64 | def test_login_ok_came_from(self): |
|
64 | def test_login_ok_came_from(self): | |
65 | test_came_from = '/_admin/users' |
|
65 | test_came_from = '/_admin/users' | |
66 | response = self.app.post(base.url(controller='login', action='index', |
|
66 | response = self.app.post(base.url(controller='login', action='index', | |
67 | came_from=test_came_from), |
|
67 | came_from=test_came_from), | |
68 | {'username': base.TEST_USER_ADMIN_LOGIN, |
|
68 | {'username': base.TEST_USER_ADMIN_LOGIN, | |
69 | 'password': base.TEST_USER_ADMIN_PASS, |
|
69 | 'password': base.TEST_USER_ADMIN_PASS, | |
70 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) |
|
70 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) | |
71 | assert response.status == '302 Found' |
|
71 | assert response.status == '302 Found' | |
72 | response = response.follow() |
|
72 | response = response.follow() | |
73 |
|
73 | |||
74 | assert response.status == '200 OK' |
|
74 | assert response.status == '200 OK' | |
75 | response.mustcontain('Users Administration') |
|
75 | response.mustcontain('Users Administration') | |
76 |
|
76 | |||
77 | def test_login_do_not_remember(self): |
|
77 | def test_login_do_not_remember(self): | |
78 | response = self.app.post(base.url(controller='login', action='index'), |
|
78 | response = self.app.post(base.url(controller='login', action='index'), | |
79 | {'username': base.TEST_USER_REGULAR_LOGIN, |
|
79 | {'username': base.TEST_USER_REGULAR_LOGIN, | |
80 | 'password': base.TEST_USER_REGULAR_PASS, |
|
80 | 'password': base.TEST_USER_REGULAR_PASS, | |
81 | 'remember': False, |
|
81 | 'remember': False, | |
82 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) |
|
82 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) | |
83 |
|
83 | |||
84 | assert 'Set-Cookie' in response.headers |
|
84 | assert 'Set-Cookie' in response.headers | |
85 | for cookie in response.headers.getall('Set-Cookie'): |
|
85 | for cookie in response.headers.getall('Set-Cookie'): | |
86 | assert not re.search(r';\s+(Max-Age|Expires)=', cookie, re.IGNORECASE), 'Cookie %r has expiration date, but should be a session cookie' % cookie |
|
86 | assert not re.search(r';\s+(Max-Age|Expires)=', cookie, re.IGNORECASE), 'Cookie %r has expiration date, but should be a session cookie' % cookie | |
87 |
|
87 | |||
88 | def test_login_remember(self): |
|
88 | def test_login_remember(self): | |
89 | response = self.app.post(base.url(controller='login', action='index'), |
|
89 | response = self.app.post(base.url(controller='login', action='index'), | |
90 | {'username': base.TEST_USER_REGULAR_LOGIN, |
|
90 | {'username': base.TEST_USER_REGULAR_LOGIN, | |
91 | 'password': base.TEST_USER_REGULAR_PASS, |
|
91 | 'password': base.TEST_USER_REGULAR_PASS, | |
92 | 'remember': True, |
|
92 | 'remember': True, | |
93 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) |
|
93 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) | |
94 |
|
94 | |||
95 | assert 'Set-Cookie' in response.headers |
|
95 | assert 'Set-Cookie' in response.headers | |
96 | for cookie in response.headers.getall('Set-Cookie'): |
|
96 | for cookie in response.headers.getall('Set-Cookie'): | |
97 | assert re.search(r';\s+(Max-Age|Expires)=', cookie, re.IGNORECASE), 'Cookie %r should have expiration date, but is a session cookie' % cookie |
|
97 | assert re.search(r';\s+(Max-Age|Expires)=', cookie, re.IGNORECASE), 'Cookie %r should have expiration date, but is a session cookie' % cookie | |
98 |
|
98 | |||
99 | def test_logout(self): |
|
99 | def test_logout(self): | |
100 | response = self.app.post(base.url(controller='login', action='index'), |
|
100 | response = self.app.post(base.url(controller='login', action='index'), | |
101 | {'username': base.TEST_USER_REGULAR_LOGIN, |
|
101 | {'username': base.TEST_USER_REGULAR_LOGIN, | |
102 | 'password': base.TEST_USER_REGULAR_PASS, |
|
102 | 'password': base.TEST_USER_REGULAR_PASS, | |
103 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) |
|
103 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) | |
104 |
|
104 | |||
105 | # Verify that a login session has been established. |
|
105 | # Verify that a login session has been established. | |
106 | response = self.app.get(base.url(controller='login', action='index')) |
|
106 | response = self.app.get(base.url(controller='login', action='index')) | |
107 | response = response.follow() |
|
107 | response = response.follow() | |
108 | assert 'authuser' in response.session |
|
108 | assert 'authuser' in response.session | |
109 |
|
109 | |||
110 | response.click('Log Out') |
|
110 | response.click('Log Out') | |
111 |
|
111 | |||
112 | # Verify that the login session has been terminated. |
|
112 | # Verify that the login session has been terminated. | |
113 | response = self.app.get(base.url(controller='login', action='index')) |
|
113 | response = self.app.get(base.url(controller='login', action='index')) | |
114 | assert 'authuser' not in response.session |
|
114 | assert 'authuser' not in response.session | |
115 |
|
115 | |||
116 | @base.parametrize('url_came_from', [ |
|
116 | @base.parametrize('url_came_from', [ | |
117 | ('data:text/html,<script>window.alert("xss")</script>',), |
|
117 | ('data:text/html,<script>window.alert("xss")</script>',), | |
118 | ('mailto:test@example.com',), |
|
118 | ('mailto:test@example.com',), | |
119 | ('file:///etc/passwd',), |
|
119 | ('file:///etc/passwd',), | |
120 | ('ftp://ftp.example.com',), |
|
120 | ('ftp://ftp.example.com',), | |
121 | ('http://other.example.com/bl%C3%A5b%C3%A6rgr%C3%B8d',), |
|
121 | ('http://other.example.com/bl%C3%A5b%C3%A6rgr%C3%B8d',), | |
122 | ('//evil.example.com/',), |
|
122 | ('//evil.example.com/',), | |
123 | ('/\r\nX-Header-Injection: boo',), |
|
123 | ('/\r\nX-Header-Injection: boo',), | |
124 | ('/invΓ€lid_url_bytes',), |
|
124 | ('/invΓ€lid_url_bytes',), | |
125 | ('non-absolute-path',), |
|
125 | ('non-absolute-path',), | |
126 | ]) |
|
126 | ]) | |
127 | def test_login_bad_came_froms(self, url_came_from): |
|
127 | def test_login_bad_came_froms(self, url_came_from): | |
128 | response = self.app.post(base.url(controller='login', action='index', |
|
128 | response = self.app.post(base.url(controller='login', action='index', | |
129 | came_from=url_came_from), |
|
129 | came_from=url_came_from), | |
130 | {'username': base.TEST_USER_ADMIN_LOGIN, |
|
130 | {'username': base.TEST_USER_ADMIN_LOGIN, | |
131 | 'password': base.TEST_USER_ADMIN_PASS, |
|
131 | 'password': base.TEST_USER_ADMIN_PASS, | |
132 | '_session_csrf_secret_token': self.session_csrf_secret_token()}, |
|
132 | '_session_csrf_secret_token': self.session_csrf_secret_token()}, | |
133 | status=400) |
|
133 | status=400) | |
134 |
|
134 | |||
135 | def test_login_short_password(self): |
|
135 | def test_login_short_password(self): | |
136 | response = self.app.post(base.url(controller='login', action='index'), |
|
136 | response = self.app.post(base.url(controller='login', action='index'), | |
137 | {'username': base.TEST_USER_ADMIN_LOGIN, |
|
137 | {'username': base.TEST_USER_ADMIN_LOGIN, | |
138 | 'password': 'as', |
|
138 | 'password': 'as', | |
139 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) |
|
139 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) | |
140 | assert response.status == '200 OK' |
|
140 | assert response.status == '200 OK' | |
141 |
|
141 | |||
142 | response.mustcontain('Enter 3 characters or more') |
|
142 | response.mustcontain('Enter 3 characters or more') | |
143 |
|
143 | |||
144 | def test_login_wrong_username_password(self): |
|
144 | def test_login_wrong_username_password(self): | |
145 | response = self.app.post(base.url(controller='login', action='index'), |
|
145 | response = self.app.post(base.url(controller='login', action='index'), | |
146 | {'username': 'error', |
|
146 | {'username': 'error', | |
147 | 'password': 'test12', |
|
147 | 'password': 'test12', | |
148 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) |
|
148 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) | |
149 |
|
149 | |||
150 | response.mustcontain('Invalid username or password') |
|
150 | response.mustcontain('Invalid username or password') | |
151 |
|
151 | |||
152 | def test_login_non_ascii(self): |
|
152 | def test_login_non_ascii(self): | |
153 | response = self.app.post(base.url(controller='login', action='index'), |
|
153 | response = self.app.post(base.url(controller='login', action='index'), | |
154 | {'username': base.TEST_USER_REGULAR_LOGIN, |
|
154 | {'username': base.TEST_USER_REGULAR_LOGIN, | |
155 | 'password': 'blΓ₯bΓ¦rgrΓΈd', |
|
155 | 'password': 'blΓ₯bΓ¦rgrΓΈd', | |
156 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) |
|
156 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) | |
157 |
|
157 | |||
158 | response.mustcontain('>Invalid username or password<') |
|
158 | response.mustcontain('>Invalid username or password<') | |
159 |
|
159 | |||
160 | # verify that get arguments are correctly passed along login redirection |
|
160 | # verify that get arguments are correctly passed along login redirection | |
161 |
|
161 | |||
162 | @base.parametrize('args', [ |
|
162 | @base.parametrize('args', [ | |
163 | {'foo':'one', 'bar':'two'}, |
|
163 | {'foo':'one', 'bar':'two'}, | |
164 | {'blue': 'blΓ₯', 'green': 'grΓΈn'}, |
|
164 | {'blue': 'blΓ₯', 'green': 'grΓΈn'}, | |
165 | ]) |
|
165 | ]) | |
166 | def test_redirection_to_login_form_preserves_get_args(self, args): |
|
166 | def test_redirection_to_login_form_preserves_get_args(self, args): | |
167 | with fixture.anon_access(False): |
|
167 | with fixture.anon_access(False): | |
168 | response = self.app.get(base.url(controller='summary', action='index', |
|
168 | response = self.app.get(base.url(controller='summary', action='index', | |
169 | repo_name=base.HG_REPO, |
|
169 | repo_name=base.HG_REPO, | |
170 | **args)) |
|
170 | **args)) | |
171 | assert response.status == '302 Found' |
|
171 | assert response.status == '302 Found' | |
172 | came_from = urllib.parse.parse_qs(urllib.parse.urlparse(response.location).query)['came_from'][0] |
|
172 | came_from = urllib.parse.parse_qs(urllib.parse.urlparse(response.location).query)['came_from'][0] | |
173 | came_from_qs = urllib.parse.parse_qsl(urllib.parse.urlparse(came_from).query) |
|
173 | came_from_qs = urllib.parse.parse_qsl(urllib.parse.urlparse(came_from).query) | |
174 | assert sorted(came_from_qs) == sorted(args.items()) |
|
174 | assert sorted(came_from_qs) == sorted(args.items()) | |
175 |
|
175 | |||
176 | @base.parametrize('args,args_encoded', [ |
|
176 | @base.parametrize('args,args_encoded', [ | |
177 | ({'foo':'one', 'bar':'two'}, ('foo=one', 'bar=two')), |
|
177 | ({'foo':'one', 'bar':'two'}, ('foo=one', 'bar=two')), | |
178 | ({'blue': 'blΓ₯', 'green':'grΓΈn'}, |
|
178 | ({'blue': 'blΓ₯', 'green':'grΓΈn'}, | |
179 | ('blue=bl%C3%A5', 'green=gr%C3%B8n')), |
|
179 | ('blue=bl%C3%A5', 'green=gr%C3%B8n')), | |
180 | ]) |
|
180 | ]) | |
181 | def test_login_form_preserves_get_args(self, args, args_encoded): |
|
181 | def test_login_form_preserves_get_args(self, args, args_encoded): | |
182 | response = self.app.get(base.url(controller='login', action='index', |
|
182 | response = self.app.get(base.url(controller='login', action='index', | |
183 | came_from=base.url('/_admin/users', **args))) |
|
183 | came_from=base.url('/_admin/users', **args))) | |
184 | came_from = urllib.parse.parse_qs(urllib.parse.urlparse(response.form.action).query)['came_from'][0] |
|
184 | came_from = urllib.parse.parse_qs(urllib.parse.urlparse(response.form.action).query)['came_from'][0] | |
185 | for encoded in args_encoded: |
|
185 | for encoded in args_encoded: | |
186 | assert encoded in came_from |
|
186 | assert encoded in came_from | |
187 |
|
187 | |||
188 | @base.parametrize('args,args_encoded', [ |
|
188 | @base.parametrize('args,args_encoded', [ | |
189 | ({'foo':'one', 'bar':'two'}, ('foo=one', 'bar=two')), |
|
189 | ({'foo':'one', 'bar':'two'}, ('foo=one', 'bar=two')), | |
190 | ({'blue': 'blΓ₯', 'green':'grΓΈn'}, |
|
190 | ({'blue': 'blΓ₯', 'green':'grΓΈn'}, | |
191 | ('blue=bl%C3%A5', 'green=gr%C3%B8n')), |
|
191 | ('blue=bl%C3%A5', 'green=gr%C3%B8n')), | |
192 | ]) |
|
192 | ]) | |
193 | def test_redirection_after_successful_login_preserves_get_args(self, args, args_encoded): |
|
193 | def test_redirection_after_successful_login_preserves_get_args(self, args, args_encoded): | |
194 | response = self.app.post(base.url(controller='login', action='index', |
|
194 | response = self.app.post(base.url(controller='login', action='index', | |
195 | came_from=base.url('/_admin/users', **args)), |
|
195 | came_from=base.url('/_admin/users', **args)), | |
196 | {'username': base.TEST_USER_ADMIN_LOGIN, |
|
196 | {'username': base.TEST_USER_ADMIN_LOGIN, | |
197 | 'password': base.TEST_USER_ADMIN_PASS, |
|
197 | 'password': base.TEST_USER_ADMIN_PASS, | |
198 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) |
|
198 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) | |
199 | assert response.status == '302 Found' |
|
199 | assert response.status == '302 Found' | |
200 | for encoded in args_encoded: |
|
200 | for encoded in args_encoded: | |
201 | assert encoded in response.location |
|
201 | assert encoded in response.location | |
202 |
|
202 | |||
203 | @base.parametrize('args,args_encoded', [ |
|
203 | @base.parametrize('args,args_encoded', [ | |
204 | ({'foo':'one', 'bar':'two'}, ('foo=one', 'bar=two')), |
|
204 | ({'foo':'one', 'bar':'two'}, ('foo=one', 'bar=two')), | |
205 | ({'blue': 'blΓ₯', 'green':'grΓΈn'}, |
|
205 | ({'blue': 'blΓ₯', 'green':'grΓΈn'}, | |
206 | ('blue=bl%C3%A5', 'green=gr%C3%B8n')), |
|
206 | ('blue=bl%C3%A5', 'green=gr%C3%B8n')), | |
207 | ]) |
|
207 | ]) | |
208 | def test_login_form_after_incorrect_login_preserves_get_args(self, args, args_encoded): |
|
208 | def test_login_form_after_incorrect_login_preserves_get_args(self, args, args_encoded): | |
209 | response = self.app.post(base.url(controller='login', action='index', |
|
209 | response = self.app.post(base.url(controller='login', action='index', | |
210 | came_from=base.url('/_admin/users', **args)), |
|
210 | came_from=base.url('/_admin/users', **args)), | |
211 | {'username': 'error', |
|
211 | {'username': 'error', | |
212 | 'password': 'test12', |
|
212 | 'password': 'test12', | |
213 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) |
|
213 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) | |
214 |
|
214 | |||
215 | response.mustcontain('Invalid username or password') |
|
215 | response.mustcontain('Invalid username or password') | |
216 | came_from = urllib.parse.parse_qs(urllib.parse.urlparse(response.form.action).query)['came_from'][0] |
|
216 | came_from = urllib.parse.parse_qs(urllib.parse.urlparse(response.form.action).query)['came_from'][0] | |
217 | for encoded in args_encoded: |
|
217 | for encoded in args_encoded: | |
218 | assert encoded in came_from |
|
218 | assert encoded in came_from | |
219 |
|
219 | |||
220 | #========================================================================== |
|
220 | #========================================================================== | |
221 | # REGISTRATIONS |
|
221 | # REGISTRATIONS | |
222 | #========================================================================== |
|
222 | #========================================================================== | |
223 | def test_register(self): |
|
223 | def test_register(self): | |
224 | response = self.app.get(base.url(controller='login', action='register')) |
|
224 | response = self.app.get(base.url(controller='login', action='register')) | |
225 | response.mustcontain('Sign Up') |
|
225 | response.mustcontain('Sign Up') | |
226 |
|
226 | |||
227 | def test_register_err_same_username(self): |
|
227 | def test_register_err_same_username(self): | |
228 | uname = base.TEST_USER_ADMIN_LOGIN |
|
228 | uname = base.TEST_USER_ADMIN_LOGIN | |
229 | response = self.app.post(base.url(controller='login', action='register'), |
|
229 | response = self.app.post(base.url(controller='login', action='register'), | |
230 | {'username': uname, |
|
230 | {'username': uname, | |
231 | 'password': 'test12', |
|
231 | 'password': 'test12', | |
232 | 'password_confirmation': 'test12', |
|
232 | 'password_confirmation': 'test12', | |
233 | 'email': 'goodmail@example.com', |
|
233 | 'email': 'goodmail@example.com', | |
234 | 'firstname': 'test', |
|
234 | 'firstname': 'test', | |
235 | 'lastname': 'test', |
|
235 | 'lastname': 'test', | |
236 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) |
|
236 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) | |
237 |
|
237 | |||
238 | with test_context(self.app): |
|
238 | with test_context(self.app): | |
239 | msg = validators.ValidUsername()._messages['username_exists'] |
|
239 | msg = validators.ValidUsername()._messages['username_exists'] | |
240 | msg = webutils.html_escape(msg % {'username': uname}) |
|
240 | msg = webutils.html_escape(msg % {'username': uname}) | |
241 | response.mustcontain(msg) |
|
241 | response.mustcontain(msg) | |
242 |
|
242 | |||
243 | def test_register_err_same_email(self): |
|
243 | def test_register_err_same_email(self): | |
244 | response = self.app.post(base.url(controller='login', action='register'), |
|
244 | response = self.app.post(base.url(controller='login', action='register'), | |
245 | {'username': 'test_admin_0', |
|
245 | {'username': 'test_admin_0', | |
246 | 'password': 'test12', |
|
246 | 'password': 'test12', | |
247 | 'password_confirmation': 'test12', |
|
247 | 'password_confirmation': 'test12', | |
248 | 'email': base.TEST_USER_ADMIN_EMAIL, |
|
248 | 'email': base.TEST_USER_ADMIN_EMAIL, | |
249 | 'firstname': 'test', |
|
249 | 'firstname': 'test', | |
250 | 'lastname': 'test', |
|
250 | 'lastname': 'test', | |
251 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) |
|
251 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) | |
252 |
|
252 | |||
253 | with test_context(self.app): |
|
253 | with test_context(self.app): | |
254 | msg = validators.UniqSystemEmail()()._messages['email_taken'] |
|
254 | msg = validators.UniqSystemEmail()()._messages['email_taken'] | |
255 | response.mustcontain(msg) |
|
255 | response.mustcontain(msg) | |
256 |
|
256 | |||
257 | def test_register_err_same_email_case_sensitive(self): |
|
257 | def test_register_err_same_email_case_sensitive(self): | |
258 | response = self.app.post(base.url(controller='login', action='register'), |
|
258 | response = self.app.post(base.url(controller='login', action='register'), | |
259 | {'username': 'test_admin_1', |
|
259 | {'username': 'test_admin_1', | |
260 | 'password': 'test12', |
|
260 | 'password': 'test12', | |
261 | 'password_confirmation': 'test12', |
|
261 | 'password_confirmation': 'test12', | |
262 | 'email': base.TEST_USER_ADMIN_EMAIL.title(), |
|
262 | 'email': base.TEST_USER_ADMIN_EMAIL.title(), | |
263 | 'firstname': 'test', |
|
263 | 'firstname': 'test', | |
264 | 'lastname': 'test', |
|
264 | 'lastname': 'test', | |
265 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) |
|
265 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) | |
266 | with test_context(self.app): |
|
266 | with test_context(self.app): | |
267 | msg = validators.UniqSystemEmail()()._messages['email_taken'] |
|
267 | msg = validators.UniqSystemEmail()()._messages['email_taken'] | |
268 | response.mustcontain(msg) |
|
268 | response.mustcontain(msg) | |
269 |
|
269 | |||
270 | def test_register_err_wrong_data(self): |
|
270 | def test_register_err_wrong_data(self): | |
271 | response = self.app.post(base.url(controller='login', action='register'), |
|
271 | response = self.app.post(base.url(controller='login', action='register'), | |
272 | {'username': 'xs', |
|
272 | {'username': 'xs', | |
273 | 'password': 'test', |
|
273 | 'password': 'test', | |
274 | 'password_confirmation': 'test', |
|
274 | 'password_confirmation': 'test', | |
275 | 'email': 'goodmailm', |
|
275 | 'email': 'goodmailm', | |
276 | 'firstname': 'test', |
|
276 | 'firstname': 'test', | |
277 | 'lastname': 'test', |
|
277 | 'lastname': 'test', | |
278 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) |
|
278 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) | |
279 | assert response.status == '200 OK' |
|
279 | assert response.status == '200 OK' | |
280 | response.mustcontain('An email address must contain a single @') |
|
280 | response.mustcontain('An email address must contain a single @') | |
281 | response.mustcontain('Enter a value 6 characters long or more') |
|
281 | response.mustcontain('Enter a value 6 characters long or more') | |
282 |
|
282 | |||
283 | def test_register_err_username(self): |
|
283 | def test_register_err_username(self): | |
284 | response = self.app.post(base.url(controller='login', action='register'), |
|
284 | response = self.app.post(base.url(controller='login', action='register'), | |
285 | {'username': 'error user', |
|
285 | {'username': 'error user', | |
286 | 'password': 'test12', |
|
286 | 'password': 'test12', | |
287 | 'password_confirmation': 'test12', |
|
287 | 'password_confirmation': 'test12', | |
288 | 'email': 'goodmailm', |
|
288 | 'email': 'goodmailm', | |
289 | 'firstname': 'test', |
|
289 | 'firstname': 'test', | |
290 | 'lastname': 'test', |
|
290 | 'lastname': 'test', | |
291 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) |
|
291 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) | |
292 |
|
292 | |||
293 | response.mustcontain('An email address must contain a single @') |
|
293 | response.mustcontain('An email address must contain a single @') | |
294 | response.mustcontain('Username may only contain ' |
|
294 | response.mustcontain('Username may only contain ' | |
295 | 'alphanumeric characters underscores, ' |
|
295 | 'alphanumeric characters underscores, ' | |
296 | 'periods or dashes and must begin with an ' |
|
296 | 'periods or dashes and must begin with an ' | |
297 | 'alphanumeric character') |
|
297 | 'alphanumeric character') | |
298 |
|
298 | |||
299 | def test_register_err_case_sensitive(self): |
|
299 | def test_register_err_case_sensitive(self): | |
300 | usr = base.TEST_USER_ADMIN_LOGIN.title() |
|
300 | usr = base.TEST_USER_ADMIN_LOGIN.title() | |
301 | response = self.app.post(base.url(controller='login', action='register'), |
|
301 | response = self.app.post(base.url(controller='login', action='register'), | |
302 | {'username': usr, |
|
302 | {'username': usr, | |
303 | 'password': 'test12', |
|
303 | 'password': 'test12', | |
304 | 'password_confirmation': 'test12', |
|
304 | 'password_confirmation': 'test12', | |
305 | 'email': 'goodmailm', |
|
305 | 'email': 'goodmailm', | |
306 | 'firstname': 'test', |
|
306 | 'firstname': 'test', | |
307 | 'lastname': 'test', |
|
307 | 'lastname': 'test', | |
308 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) |
|
308 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) | |
309 |
|
309 | |||
310 | response.mustcontain('An email address must contain a single @') |
|
310 | response.mustcontain('An email address must contain a single @') | |
311 | with test_context(self.app): |
|
311 | with test_context(self.app): | |
312 | msg = validators.ValidUsername()._messages['username_exists'] |
|
312 | msg = validators.ValidUsername()._messages['username_exists'] | |
313 | msg = webutils.html_escape(msg % {'username': usr}) |
|
313 | msg = webutils.html_escape(msg % {'username': usr}) | |
314 | response.mustcontain(msg) |
|
314 | response.mustcontain(msg) | |
315 |
|
315 | |||
316 | def test_register_special_chars(self): |
|
316 | def test_register_special_chars(self): | |
317 | response = self.app.post(base.url(controller='login', action='register'), |
|
317 | response = self.app.post(base.url(controller='login', action='register'), | |
318 | {'username': 'xxxaxn', |
|
318 | {'username': 'xxxaxn', | |
319 | 'password': 'Δ ΔΕΊΕΌΔ ΕΕΕΕ', |
|
319 | 'password': 'Δ ΔΕΊΕΌΔ ΕΕΕΕ', | |
320 | 'password_confirmation': 'Δ ΔΕΊΕΌΔ ΕΕΕΕ', |
|
320 | 'password_confirmation': 'Δ ΔΕΊΕΌΔ ΕΕΕΕ', | |
321 | 'email': 'goodmailm@test.plx', |
|
321 | 'email': 'goodmailm@test.plx', | |
322 | 'firstname': 'test', |
|
322 | 'firstname': 'test', | |
323 | 'lastname': 'test', |
|
323 | 'lastname': 'test', | |
324 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) |
|
324 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) | |
325 |
|
325 | |||
326 | with test_context(self.app): |
|
326 | with test_context(self.app): | |
327 | msg = validators.ValidPassword()._messages['invalid_password'] |
|
327 | msg = validators.ValidPassword()._messages['invalid_password'] | |
328 | response.mustcontain(msg) |
|
328 | response.mustcontain(msg) | |
329 |
|
329 | |||
330 | def test_register_password_mismatch(self): |
|
330 | def test_register_password_mismatch(self): | |
331 | response = self.app.post(base.url(controller='login', action='register'), |
|
331 | response = self.app.post(base.url(controller='login', action='register'), | |
332 | {'username': 'xs', |
|
332 | {'username': 'xs', | |
333 | 'password': '123qwe', |
|
333 | 'password': '123qwe', | |
334 | 'password_confirmation': 'qwe123', |
|
334 | 'password_confirmation': 'qwe123', | |
335 | 'email': 'goodmailm@test.plxa', |
|
335 | 'email': 'goodmailm@test.plxa', | |
336 | 'firstname': 'test', |
|
336 | 'firstname': 'test', | |
337 | 'lastname': 'test', |
|
337 | 'lastname': 'test', | |
338 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) |
|
338 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) | |
339 | with test_context(self.app): |
|
339 | with test_context(self.app): | |
340 | msg = validators.ValidPasswordsMatch('password', 'password_confirmation')._messages['password_mismatch'] |
|
340 | msg = validators.ValidPasswordsMatch('password', 'password_confirmation')._messages['password_mismatch'] | |
341 | response.mustcontain(msg) |
|
341 | response.mustcontain(msg) | |
342 |
|
342 | |||
343 | def test_register_ok(self): |
|
343 | def test_register_ok(self): | |
344 | username = 'test_regular4' |
|
344 | username = 'test_regular4' | |
345 | password = 'qweqwe' |
|
345 | password = 'qweqwe' | |
346 | email = 'user4@example.com' |
|
346 | email = 'user4@example.com' | |
347 | name = 'testname' |
|
347 | name = 'testname' | |
348 | lastname = 'testlastname' |
|
348 | lastname = 'testlastname' | |
349 |
|
349 | |||
350 | response = self.app.post(base.url(controller='login', action='register'), |
|
350 | response = self.app.post(base.url(controller='login', action='register'), | |
351 | {'username': username, |
|
351 | {'username': username, | |
352 | 'password': password, |
|
352 | 'password': password, | |
353 | 'password_confirmation': password, |
|
353 | 'password_confirmation': password, | |
354 | 'email': email, |
|
354 | 'email': email, | |
355 | 'firstname': name, |
|
355 | 'firstname': name, | |
356 | 'lastname': lastname, |
|
356 | 'lastname': lastname, | |
357 | 'admin': True, |
|
357 | 'admin': True, | |
358 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) # This should be overridden |
|
358 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) # This should be overridden | |
359 | assert response.status == '302 Found' |
|
359 | assert response.status == '302 Found' | |
360 | self.checkSessionFlash(response, 'You have successfully registered with Kallithea') |
|
360 | self.checkSessionFlash(response, 'You have successfully registered with Kallithea') | |
361 |
|
361 | |||
362 | ret = meta.Session().query(db.User).filter(db.User.username == 'test_regular4').one() |
|
362 | ret = meta.Session().query(db.User).filter(db.User.username == 'test_regular4').one() | |
363 | assert ret.username == username |
|
363 | assert ret.username == username | |
364 | assert check_password(password, ret.password) == True |
|
364 | assert check_password(password, ret.password) == True | |
365 | assert ret.email == email |
|
365 | assert ret.email == email | |
366 | assert ret.name == name |
|
366 | assert ret.name == name | |
367 | assert ret.lastname == lastname |
|
367 | assert ret.lastname == lastname | |
368 | assert ret.api_key is not None |
|
368 | assert ret.api_key is not None | |
369 | assert ret.admin == False |
|
369 | assert ret.admin == False | |
370 |
|
370 | |||
371 | #========================================================================== |
|
371 | #========================================================================== | |
372 | # PASSWORD RESET |
|
372 | # PASSWORD RESET | |
373 | #========================================================================== |
|
373 | #========================================================================== | |
374 |
|
374 | |||
375 | def test_forgot_password_wrong_mail(self): |
|
375 | def test_forgot_password_wrong_mail(self): | |
376 | bad_email = 'username%wrongmail.org' |
|
376 | bad_email = 'username%wrongmail.org' | |
377 | response = self.app.post( |
|
377 | response = self.app.post( | |
378 | base.url(controller='login', action='password_reset'), |
|
378 | base.url(controller='login', action='password_reset'), | |
379 | {'email': bad_email, |
|
379 | {'email': bad_email, | |
380 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) |
|
380 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) | |
381 |
|
381 | |||
382 | response.mustcontain('An email address must contain a single @') |
|
382 | response.mustcontain('An email address must contain a single @') | |
383 |
|
383 | |||
384 | def test_forgot_password(self): |
|
384 | def test_forgot_password(self): | |
385 | response = self.app.get(base.url(controller='login', |
|
385 | response = self.app.get(base.url(controller='login', | |
386 | action='password_reset')) |
|
386 | action='password_reset')) | |
387 | assert response.status == '200 OK' |
|
387 | assert response.status == '200 OK' | |
388 |
|
388 | |||
389 | username = 'test_password_reset_1' |
|
389 | username = 'test_password_reset_1' | |
390 | password = 'qweqwe' |
|
390 | password = 'qweqwe' | |
391 | email = 'username@example.com' |
|
391 | email = 'username@example.com' | |
392 | name = 'passwd' |
|
392 | name = 'passwd' | |
393 | lastname = 'reset' |
|
393 | lastname = 'reset' | |
394 | timestamp = int(time.time()) |
|
394 | timestamp = int(time.time()) | |
395 |
|
395 | |||
396 | new = db.User() |
|
396 | new = db.User() | |
397 | new.username = username |
|
397 | new.username = username | |
398 | new.password = password |
|
398 | new.password = password | |
399 | new.email = email |
|
399 | new.email = email | |
400 | new.name = name |
|
400 | new.name = name | |
401 | new.lastname = lastname |
|
401 | new.lastname = lastname | |
402 | new.api_key = generate_api_key() |
|
402 | new.api_key = generate_api_key() | |
403 | meta.Session().add(new) |
|
403 | meta.Session().add(new) | |
404 | meta.Session().commit() |
|
404 | meta.Session().commit() | |
405 |
|
405 | |||
406 | token = UserModel().get_reset_password_token( |
|
406 | token = UserModel().get_reset_password_token( | |
407 | db.User.get_by_username(username), timestamp, self.session_csrf_secret_token()) |
|
407 | db.User.get_by_username(username), timestamp, self.session_csrf_secret_token()) | |
408 |
|
408 | |||
409 | collected = [] |
|
409 | collected = [] | |
410 | def mock_send_email(recipients, subject, body='', html_body='', headers=None, from_name=None): |
|
410 | def mock_send_email(recipients, subject, body='', html_body='', headers=None, from_name=None): | |
411 | collected.append((recipients, subject, body, html_body)) |
|
411 | collected.append((recipients, subject, body, html_body)) | |
412 |
|
412 | |||
413 |
with mock.patch.object(kallithea.l |
|
413 | with mock.patch.object(kallithea.model.async_tasks, 'send_email', mock_send_email), \ | |
414 | mock.patch.object(time, 'time', lambda: timestamp): |
|
414 | mock.patch.object(time, 'time', lambda: timestamp): | |
415 | response = self.app.post(base.url(controller='login', |
|
415 | response = self.app.post(base.url(controller='login', | |
416 | action='password_reset'), |
|
416 | action='password_reset'), | |
417 | {'email': email, |
|
417 | {'email': email, | |
418 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) |
|
418 | '_session_csrf_secret_token': self.session_csrf_secret_token()}) | |
419 |
|
419 | |||
420 | self.checkSessionFlash(response, 'A password reset confirmation code has been sent') |
|
420 | self.checkSessionFlash(response, 'A password reset confirmation code has been sent') | |
421 |
|
421 | |||
422 | ((recipients, subject, body, html_body),) = collected |
|
422 | ((recipients, subject, body, html_body),) = collected | |
423 | assert recipients == ['username@example.com'] |
|
423 | assert recipients == ['username@example.com'] | |
424 | assert subject == 'Password reset link' |
|
424 | assert subject == 'Password reset link' | |
425 | assert '\n%s\n' % token in body |
|
425 | assert '\n%s\n' % token in body | |
426 | (confirmation_url,) = (line for line in body.splitlines() if line.startswith('http://')) |
|
426 | (confirmation_url,) = (line for line in body.splitlines() if line.startswith('http://')) | |
427 | assert ' href="%s"' % confirmation_url.replace('&', '&').replace('@', '%40') in html_body |
|
427 | assert ' href="%s"' % confirmation_url.replace('&', '&').replace('@', '%40') in html_body | |
428 |
|
428 | |||
429 | d = urllib.parse.parse_qs(urllib.parse.urlparse(confirmation_url).query) |
|
429 | d = urllib.parse.parse_qs(urllib.parse.urlparse(confirmation_url).query) | |
430 | assert d['token'] == [token] |
|
430 | assert d['token'] == [token] | |
431 | assert d['timestamp'] == [str(timestamp)] |
|
431 | assert d['timestamp'] == [str(timestamp)] | |
432 | assert d['email'] == [email] |
|
432 | assert d['email'] == [email] | |
433 |
|
433 | |||
434 | response = response.follow() |
|
434 | response = response.follow() | |
435 |
|
435 | |||
436 | # BAD TOKEN |
|
436 | # BAD TOKEN | |
437 |
|
437 | |||
438 | bad_token = "bad" |
|
438 | bad_token = "bad" | |
439 |
|
439 | |||
440 | response = self.app.post(base.url(controller='login', |
|
440 | response = self.app.post(base.url(controller='login', | |
441 | action='password_reset_confirmation'), |
|
441 | action='password_reset_confirmation'), | |
442 | {'email': email, |
|
442 | {'email': email, | |
443 | 'timestamp': timestamp, |
|
443 | 'timestamp': timestamp, | |
444 | 'password': "p@ssw0rd", |
|
444 | 'password': "p@ssw0rd", | |
445 | 'password_confirm': "p@ssw0rd", |
|
445 | 'password_confirm': "p@ssw0rd", | |
446 | 'token': bad_token, |
|
446 | 'token': bad_token, | |
447 | '_session_csrf_secret_token': self.session_csrf_secret_token(), |
|
447 | '_session_csrf_secret_token': self.session_csrf_secret_token(), | |
448 | }) |
|
448 | }) | |
449 | assert response.status == '200 OK' |
|
449 | assert response.status == '200 OK' | |
450 | response.mustcontain('Invalid password reset token') |
|
450 | response.mustcontain('Invalid password reset token') | |
451 |
|
451 | |||
452 | # GOOD TOKEN |
|
452 | # GOOD TOKEN | |
453 |
|
453 | |||
454 | response = self.app.get(confirmation_url) |
|
454 | response = self.app.get(confirmation_url) | |
455 | assert response.status == '200 OK' |
|
455 | assert response.status == '200 OK' | |
456 | response.mustcontain("You are about to set a new password for the email address %s" % email) |
|
456 | response.mustcontain("You are about to set a new password for the email address %s" % email) | |
457 | response.mustcontain('<form action="%s" method="post">' % base.url(controller='login', action='password_reset_confirmation')) |
|
457 | response.mustcontain('<form action="%s" method="post">' % base.url(controller='login', action='password_reset_confirmation')) | |
458 | response.mustcontain('value="%s"' % self.session_csrf_secret_token()) |
|
458 | response.mustcontain('value="%s"' % self.session_csrf_secret_token()) | |
459 | response.mustcontain('value="%s"' % token) |
|
459 | response.mustcontain('value="%s"' % token) | |
460 | response.mustcontain('value="%s"' % timestamp) |
|
460 | response.mustcontain('value="%s"' % timestamp) | |
461 | response.mustcontain('value="username@example.com"') |
|
461 | response.mustcontain('value="username@example.com"') | |
462 |
|
462 | |||
463 | # fake a submit of that form |
|
463 | # fake a submit of that form | |
464 | response = self.app.post(base.url(controller='login', |
|
464 | response = self.app.post(base.url(controller='login', | |
465 | action='password_reset_confirmation'), |
|
465 | action='password_reset_confirmation'), | |
466 | {'email': email, |
|
466 | {'email': email, | |
467 | 'timestamp': timestamp, |
|
467 | 'timestamp': timestamp, | |
468 | 'password': "p@ssw0rd", |
|
468 | 'password': "p@ssw0rd", | |
469 | 'password_confirm': "p@ssw0rd", |
|
469 | 'password_confirm': "p@ssw0rd", | |
470 | 'token': token, |
|
470 | 'token': token, | |
471 | '_session_csrf_secret_token': self.session_csrf_secret_token(), |
|
471 | '_session_csrf_secret_token': self.session_csrf_secret_token(), | |
472 | }) |
|
472 | }) | |
473 | assert response.status == '302 Found' |
|
473 | assert response.status == '302 Found' | |
474 | self.checkSessionFlash(response, 'Successfully updated password') |
|
474 | self.checkSessionFlash(response, 'Successfully updated password') | |
475 |
|
475 | |||
476 | response = response.follow() |
|
476 | response = response.follow() | |
477 |
|
477 | |||
478 | #========================================================================== |
|
478 | #========================================================================== | |
479 | # API |
|
479 | # API | |
480 | #========================================================================== |
|
480 | #========================================================================== | |
481 |
|
481 | |||
482 | def _api_key_test(self, api_key, status): |
|
482 | def _api_key_test(self, api_key, status): | |
483 | """Verifies HTTP status code for accessing an auth-requiring page, |
|
483 | """Verifies HTTP status code for accessing an auth-requiring page, | |
484 | using the given api_key URL parameter as well as using the API key |
|
484 | using the given api_key URL parameter as well as using the API key | |
485 | with bearer authentication. |
|
485 | with bearer authentication. | |
486 |
|
486 | |||
487 | If api_key is None, no api_key is passed at all. If api_key is True, |
|
487 | If api_key is None, no api_key is passed at all. If api_key is True, | |
488 | a real, working API key is used. |
|
488 | a real, working API key is used. | |
489 | """ |
|
489 | """ | |
490 | with fixture.anon_access(False): |
|
490 | with fixture.anon_access(False): | |
491 | if api_key is None: |
|
491 | if api_key is None: | |
492 | params = {} |
|
492 | params = {} | |
493 | headers = {} |
|
493 | headers = {} | |
494 | else: |
|
494 | else: | |
495 | if api_key is True: |
|
495 | if api_key is True: | |
496 | api_key = db.User.get_first_admin().api_key |
|
496 | api_key = db.User.get_first_admin().api_key | |
497 | params = {'api_key': api_key} |
|
497 | params = {'api_key': api_key} | |
498 | headers = {'Authorization': 'Bearer ' + str(api_key)} |
|
498 | headers = {'Authorization': 'Bearer ' + str(api_key)} | |
499 |
|
499 | |||
500 | self.app.get(base.url(controller='changeset', action='changeset_raw', |
|
500 | self.app.get(base.url(controller='changeset', action='changeset_raw', | |
501 | repo_name=base.HG_REPO, revision='tip', **params), |
|
501 | repo_name=base.HG_REPO, revision='tip', **params), | |
502 | status=status) |
|
502 | status=status) | |
503 |
|
503 | |||
504 | self.app.get(base.url(controller='changeset', action='changeset_raw', |
|
504 | self.app.get(base.url(controller='changeset', action='changeset_raw', | |
505 | repo_name=base.HG_REPO, revision='tip'), |
|
505 | repo_name=base.HG_REPO, revision='tip'), | |
506 | headers=headers, |
|
506 | headers=headers, | |
507 | status=status) |
|
507 | status=status) | |
508 |
|
508 | |||
509 | @base.parametrize('test_name,api_key,code', [ |
|
509 | @base.parametrize('test_name,api_key,code', [ | |
510 | ('none', None, 302), |
|
510 | ('none', None, 302), | |
511 | ('empty_string', '', 403), |
|
511 | ('empty_string', '', 403), | |
512 | ('fake_number', '123456', 403), |
|
512 | ('fake_number', '123456', 403), | |
513 | ('fake_not_alnum', 'a-z', 403), |
|
513 | ('fake_not_alnum', 'a-z', 403), | |
514 | ('fake_api_key', '0123456789abcdef0123456789ABCDEF01234567', 403), |
|
514 | ('fake_api_key', '0123456789abcdef0123456789ABCDEF01234567', 403), | |
515 | ('proper_api_key', True, 200) |
|
515 | ('proper_api_key', True, 200) | |
516 | ]) |
|
516 | ]) | |
517 | def test_access_page_via_api_key(self, test_name, api_key, code): |
|
517 | def test_access_page_via_api_key(self, test_name, api_key, code): | |
518 | self._api_key_test(api_key, code) |
|
518 | self._api_key_test(api_key, code) | |
519 |
|
519 | |||
520 | def test_access_page_via_extra_api_key(self): |
|
520 | def test_access_page_via_extra_api_key(self): | |
521 | new_api_key = ApiKeyModel().create(base.TEST_USER_ADMIN_LOGIN, 'test') |
|
521 | new_api_key = ApiKeyModel().create(base.TEST_USER_ADMIN_LOGIN, 'test') | |
522 | meta.Session().commit() |
|
522 | meta.Session().commit() | |
523 | self._api_key_test(new_api_key.api_key, status=200) |
|
523 | self._api_key_test(new_api_key.api_key, status=200) | |
524 |
|
524 | |||
525 | def test_access_page_via_expired_api_key(self): |
|
525 | def test_access_page_via_expired_api_key(self): | |
526 | new_api_key = ApiKeyModel().create(base.TEST_USER_ADMIN_LOGIN, 'test') |
|
526 | new_api_key = ApiKeyModel().create(base.TEST_USER_ADMIN_LOGIN, 'test') | |
527 | meta.Session().commit() |
|
527 | meta.Session().commit() | |
528 | # patch the API key and make it expired |
|
528 | # patch the API key and make it expired | |
529 | new_api_key.expires = 0 |
|
529 | new_api_key.expires = 0 | |
530 | meta.Session().commit() |
|
530 | meta.Session().commit() | |
531 | self._api_key_test(new_api_key.api_key, status=403) |
|
531 | self._api_key_test(new_api_key.api_key, status=403) |
@@ -1,174 +1,174 b'' | |||||
1 | import os |
|
1 | import os | |
2 | import re |
|
2 | import re | |
3 |
|
3 | |||
4 | import mock |
|
4 | import mock | |
5 | from tg.util.webtest import test_context |
|
5 | from tg.util.webtest import test_context | |
6 |
|
6 | |||
7 | import kallithea.lib.celerylib |
|
7 | import kallithea.lib.celerylib | |
8 | import kallithea.lib.celerylib.tasks |
|
|||
9 | import kallithea.lib.helpers as h |
|
8 | import kallithea.lib.helpers as h | |
|
9 | import kallithea.model.async_tasks | |||
10 | from kallithea.model import db, meta |
|
10 | from kallithea.model import db, meta | |
11 | from kallithea.model.notification import EmailNotificationModel, NotificationModel |
|
11 | from kallithea.model.notification import EmailNotificationModel, NotificationModel | |
12 | from kallithea.model.user import UserModel |
|
12 | from kallithea.model.user import UserModel | |
13 | from kallithea.tests import base |
|
13 | from kallithea.tests import base | |
14 |
|
14 | |||
15 |
|
15 | |||
16 | class TestNotifications(base.TestController): |
|
16 | class TestNotifications(base.TestController): | |
17 |
|
17 | |||
18 | def setup_method(self, method): |
|
18 | def setup_method(self, method): | |
19 | meta.Session.remove() |
|
19 | meta.Session.remove() | |
20 | u1 = UserModel().create_or_update(username='u1', |
|
20 | u1 = UserModel().create_or_update(username='u1', | |
21 | password='qweqwe', |
|
21 | password='qweqwe', | |
22 | email='u1@example.com', |
|
22 | email='u1@example.com', | |
23 | firstname='u1', lastname='u1') |
|
23 | firstname='u1', lastname='u1') | |
24 | meta.Session().commit() |
|
24 | meta.Session().commit() | |
25 | self.u1 = u1.user_id |
|
25 | self.u1 = u1.user_id | |
26 |
|
26 | |||
27 | u2 = UserModel().create_or_update(username='u2', |
|
27 | u2 = UserModel().create_or_update(username='u2', | |
28 | password='qweqwe', |
|
28 | password='qweqwe', | |
29 | email='u2@example.com', |
|
29 | email='u2@example.com', | |
30 | firstname='u2', lastname='u3') |
|
30 | firstname='u2', lastname='u3') | |
31 | meta.Session().commit() |
|
31 | meta.Session().commit() | |
32 | self.u2 = u2.user_id |
|
32 | self.u2 = u2.user_id | |
33 |
|
33 | |||
34 | u3 = UserModel().create_or_update(username='u3', |
|
34 | u3 = UserModel().create_or_update(username='u3', | |
35 | password='qweqwe', |
|
35 | password='qweqwe', | |
36 | email='u3@example.com', |
|
36 | email='u3@example.com', | |
37 | firstname='u3', lastname='u3') |
|
37 | firstname='u3', lastname='u3') | |
38 | meta.Session().commit() |
|
38 | meta.Session().commit() | |
39 | self.u3 = u3.user_id |
|
39 | self.u3 = u3.user_id | |
40 |
|
40 | |||
41 | def test_create_notification(self): |
|
41 | def test_create_notification(self): | |
42 | with test_context(self.app): |
|
42 | with test_context(self.app): | |
43 | usrs = [self.u1, self.u2] |
|
43 | usrs = [self.u1, self.u2] | |
44 |
|
44 | |||
45 | def send_email(recipients, subject, body='', html_body='', headers=None, from_name=None): |
|
45 | def send_email(recipients, subject, body='', html_body='', headers=None, from_name=None): | |
46 | assert recipients == ['u2@example.com'] |
|
46 | assert recipients == ['u2@example.com'] | |
47 | assert subject == 'Test Message' |
|
47 | assert subject == 'Test Message' | |
48 | assert body == "hi there" |
|
48 | assert body == "hi there" | |
49 | assert '>hi there<' in html_body |
|
49 | assert '>hi there<' in html_body | |
50 | assert from_name == 'u1 u1' |
|
50 | assert from_name == 'u1 u1' | |
51 |
with mock.patch.object(kallithea.l |
|
51 | with mock.patch.object(kallithea.model.async_tasks, 'send_email', send_email): | |
52 | NotificationModel().create(created_by=self.u1, |
|
52 | NotificationModel().create(created_by=self.u1, | |
53 | body='hi there', |
|
53 | body='hi there', | |
54 | recipients=usrs) |
|
54 | recipients=usrs) | |
55 |
|
55 | |||
56 | @mock.patch.object(h, 'canonical_url', (lambda arg, **kwargs: 'http://%s/?%s' % (arg, '&'.join('%s=%s' % (k, v) for (k, v) in sorted(kwargs.items()))))) |
|
56 | @mock.patch.object(h, 'canonical_url', (lambda arg, **kwargs: 'http://%s/?%s' % (arg, '&'.join('%s=%s' % (k, v) for (k, v) in sorted(kwargs.items()))))) | |
57 | def test_dump_html_mails(self): |
|
57 | def test_dump_html_mails(self): | |
58 | # Exercise all notification types and dump them to one big html file |
|
58 | # Exercise all notification types and dump them to one big html file | |
59 | l = [] |
|
59 | l = [] | |
60 |
|
60 | |||
61 | def send_email(recipients, subject, body='', html_body='', headers=None, from_name=None): |
|
61 | def send_email(recipients, subject, body='', html_body='', headers=None, from_name=None): | |
62 | l.append('<hr/>\n') |
|
62 | l.append('<hr/>\n') | |
63 | l.append('<h1>%s</h1>\n' % desc) # desc is from outer scope |
|
63 | l.append('<h1>%s</h1>\n' % desc) # desc is from outer scope | |
64 | l.append('<pre>\n') |
|
64 | l.append('<pre>\n') | |
65 | l.append('From: %s <name@example.com>\n' % from_name) |
|
65 | l.append('From: %s <name@example.com>\n' % from_name) | |
66 | l.append('To: %s\n' % ' '.join(recipients)) |
|
66 | l.append('To: %s\n' % ' '.join(recipients)) | |
67 | l.append('Subject: %s\n' % subject) |
|
67 | l.append('Subject: %s\n' % subject) | |
68 | l.append('</pre>\n') |
|
68 | l.append('</pre>\n') | |
69 | l.append('<hr/>\n') |
|
69 | l.append('<hr/>\n') | |
70 | l.append('<pre>%s</pre>\n' % body) |
|
70 | l.append('<pre>%s</pre>\n' % body) | |
71 | l.append('<hr/>\n') |
|
71 | l.append('<hr/>\n') | |
72 | l.append(html_body) |
|
72 | l.append(html_body) | |
73 | l.append('<hr/>\n') |
|
73 | l.append('<hr/>\n') | |
74 |
|
74 | |||
75 | with test_context(self.app): |
|
75 | with test_context(self.app): | |
76 |
with mock.patch.object(kallithea.l |
|
76 | with mock.patch.object(kallithea.model.async_tasks, 'send_email', send_email): | |
77 | pr_kwargs = dict( |
|
77 | pr_kwargs = dict( | |
78 | pr_nice_id='#7', |
|
78 | pr_nice_id='#7', | |
79 | pr_title='The Title', |
|
79 | pr_title='The Title', | |
80 | pr_title_short='The Title', |
|
80 | pr_title_short='The Title', | |
81 | pr_url='http://pr.org/7', |
|
81 | pr_url='http://pr.org/7', | |
82 | pr_target_repo='http://mainline.com/repo', |
|
82 | pr_target_repo='http://mainline.com/repo', | |
83 | pr_target_branch='trunk', |
|
83 | pr_target_branch='trunk', | |
84 | pr_source_repo='https://dev.org/repo', |
|
84 | pr_source_repo='https://dev.org/repo', | |
85 | pr_source_branch='devbranch', |
|
85 | pr_source_branch='devbranch', | |
86 | pr_owner=db.User.get(self.u2), |
|
86 | pr_owner=db.User.get(self.u2), | |
87 | pr_owner_username='u2' |
|
87 | pr_owner_username='u2' | |
88 | ) |
|
88 | ) | |
89 |
|
89 | |||
90 | for type_, body, kwargs in [ |
|
90 | for type_, body, kwargs in [ | |
91 | (NotificationModel.TYPE_CHANGESET_COMMENT, |
|
91 | (NotificationModel.TYPE_CHANGESET_COMMENT, | |
92 | 'This is the new \'comment\'.\n\n - and here it ends indented.', |
|
92 | 'This is the new \'comment\'.\n\n - and here it ends indented.', | |
93 | dict( |
|
93 | dict( | |
94 | short_id='cafe1234', |
|
94 | short_id='cafe1234', | |
95 | raw_id='cafe1234c0ffeecafe', |
|
95 | raw_id='cafe1234c0ffeecafe', | |
96 | branch='brunch', |
|
96 | branch='brunch', | |
97 | cs_comment_user='Opinionated User (jsmith)', |
|
97 | cs_comment_user='Opinionated User (jsmith)', | |
98 | cs_comment_url='http://comment.org', |
|
98 | cs_comment_url='http://comment.org', | |
99 | is_mention=[False, True], |
|
99 | is_mention=[False, True], | |
100 | message='This changeset did something clever which is hard to explain', |
|
100 | message='This changeset did something clever which is hard to explain', | |
101 | message_short='This changeset did something cl...', |
|
101 | message_short='This changeset did something cl...', | |
102 | status_change=[None, 'Approved'], |
|
102 | status_change=[None, 'Approved'], | |
103 | cs_target_repo='http://example.com/repo_target', |
|
103 | cs_target_repo='http://example.com/repo_target', | |
104 | cs_url='http://changeset.com', |
|
104 | cs_url='http://changeset.com', | |
105 | cs_author_username=db.User.get(self.u2).username, |
|
105 | cs_author_username=db.User.get(self.u2).username, | |
106 | cs_author=db.User.get(self.u2))), |
|
106 | cs_author=db.User.get(self.u2))), | |
107 | (NotificationModel.TYPE_MESSAGE, |
|
107 | (NotificationModel.TYPE_MESSAGE, | |
108 | 'This is the \'body\' of the "test" message\n - nothing interesting here except indentation.', |
|
108 | 'This is the \'body\' of the "test" message\n - nothing interesting here except indentation.', | |
109 | dict()), |
|
109 | dict()), | |
110 | #(NotificationModel.TYPE_MENTION, '$body', None), # not used |
|
110 | #(NotificationModel.TYPE_MENTION, '$body', None), # not used | |
111 | (NotificationModel.TYPE_REGISTRATION, |
|
111 | (NotificationModel.TYPE_REGISTRATION, | |
112 | 'Registration body', |
|
112 | 'Registration body', | |
113 | dict( |
|
113 | dict( | |
114 | new_username='newbie', |
|
114 | new_username='newbie', | |
115 | registered_user_url='http://newbie.org', |
|
115 | registered_user_url='http://newbie.org', | |
116 | new_email='new@email.com', |
|
116 | new_email='new@email.com', | |
117 | new_full_name='New Full Name')), |
|
117 | new_full_name='New Full Name')), | |
118 | (NotificationModel.TYPE_PULL_REQUEST, |
|
118 | (NotificationModel.TYPE_PULL_REQUEST, | |
119 | 'This PR is \'awesome\' because it does <stuff>\n - please approve indented!', |
|
119 | 'This PR is \'awesome\' because it does <stuff>\n - please approve indented!', | |
120 | dict( |
|
120 | dict( | |
121 | pr_user_created='Requesting User (root)', # pr_owner should perhaps be used for @mention in description ... |
|
121 | pr_user_created='Requesting User (root)', # pr_owner should perhaps be used for @mention in description ... | |
122 | is_mention=[False, True], |
|
122 | is_mention=[False, True], | |
123 | pr_revisions=[('123abc'*7, "Introduce one and two\n\nand that's it"), ('567fed'*7, 'Make one plus two equal tree')], |
|
123 | pr_revisions=[('123abc'*7, "Introduce one and two\n\nand that's it"), ('567fed'*7, 'Make one plus two equal tree')], | |
124 | org_repo_name='repo_org', |
|
124 | org_repo_name='repo_org', | |
125 | **pr_kwargs)), |
|
125 | **pr_kwargs)), | |
126 | (NotificationModel.TYPE_PULL_REQUEST_COMMENT, |
|
126 | (NotificationModel.TYPE_PULL_REQUEST_COMMENT, | |
127 | 'Me too!\n\n - and indented on second line', |
|
127 | 'Me too!\n\n - and indented on second line', | |
128 | dict( |
|
128 | dict( | |
129 | closing_pr=[False, True], |
|
129 | closing_pr=[False, True], | |
130 | is_mention=[False, True], |
|
130 | is_mention=[False, True], | |
131 | pr_comment_user='Opinionated User (jsmith)', |
|
131 | pr_comment_user='Opinionated User (jsmith)', | |
132 | pr_comment_url='http://pr.org/comment', |
|
132 | pr_comment_url='http://pr.org/comment', | |
133 | status_change=[None, 'Under Review'], |
|
133 | status_change=[None, 'Under Review'], | |
134 | **pr_kwargs)), |
|
134 | **pr_kwargs)), | |
135 | ]: |
|
135 | ]: | |
136 | kwargs['repo_name'] = 'repo/name' |
|
136 | kwargs['repo_name'] = 'repo/name' | |
137 | params = [(type_, type_, body, kwargs)] |
|
137 | params = [(type_, type_, body, kwargs)] | |
138 | for param_name in ['is_mention', 'status_change', 'closing_pr']: # TODO: inline/general |
|
138 | for param_name in ['is_mention', 'status_change', 'closing_pr']: # TODO: inline/general | |
139 | if not isinstance(kwargs.get(param_name), list): |
|
139 | if not isinstance(kwargs.get(param_name), list): | |
140 | continue |
|
140 | continue | |
141 | new_params = [] |
|
141 | new_params = [] | |
142 | for v in kwargs[param_name]: |
|
142 | for v in kwargs[param_name]: | |
143 | for desc, type_, body, kwargs in params: |
|
143 | for desc, type_, body, kwargs in params: | |
144 | kwargs = dict(kwargs) |
|
144 | kwargs = dict(kwargs) | |
145 | kwargs[param_name] = v |
|
145 | kwargs[param_name] = v | |
146 | new_params.append(('%s, %s=%r' % (desc, param_name, v), type_, body, kwargs)) |
|
146 | new_params.append(('%s, %s=%r' % (desc, param_name, v), type_, body, kwargs)) | |
147 | params = new_params |
|
147 | params = new_params | |
148 |
|
148 | |||
149 | for desc, type_, body, kwargs in params: |
|
149 | for desc, type_, body, kwargs in params: | |
150 | # desc is used as "global" variable |
|
150 | # desc is used as "global" variable | |
151 | NotificationModel().create(created_by=self.u1, |
|
151 | NotificationModel().create(created_by=self.u1, | |
152 | body=body, email_kwargs=kwargs, |
|
152 | body=body, email_kwargs=kwargs, | |
153 | recipients=[self.u2], type_=type_) |
|
153 | recipients=[self.u2], type_=type_) | |
154 |
|
154 | |||
155 | # Email type TYPE_PASSWORD_RESET has no corresponding notification type - test it directly: |
|
155 | # Email type TYPE_PASSWORD_RESET has no corresponding notification type - test it directly: | |
156 | desc = 'TYPE_PASSWORD_RESET' |
|
156 | desc = 'TYPE_PASSWORD_RESET' | |
157 | kwargs = dict(user='John Doe', reset_token='decbf64715098db5b0bd23eab44bd792670ab746', reset_url='http://reset.com/decbf64715098db5b0bd23eab44bd792670ab746') |
|
157 | kwargs = dict(user='John Doe', reset_token='decbf64715098db5b0bd23eab44bd792670ab746', reset_url='http://reset.com/decbf64715098db5b0bd23eab44bd792670ab746') | |
158 |
kallithea.l |
|
158 | kallithea.model.async_tasks.send_email(['john@doe.com'], | |
159 | "Password reset link", |
|
159 | "Password reset link", | |
160 | EmailNotificationModel().get_email_tmpl(EmailNotificationModel.TYPE_PASSWORD_RESET, 'txt', **kwargs), |
|
160 | EmailNotificationModel().get_email_tmpl(EmailNotificationModel.TYPE_PASSWORD_RESET, 'txt', **kwargs), | |
161 | EmailNotificationModel().get_email_tmpl(EmailNotificationModel.TYPE_PASSWORD_RESET, 'html', **kwargs), |
|
161 | EmailNotificationModel().get_email_tmpl(EmailNotificationModel.TYPE_PASSWORD_RESET, 'html', **kwargs), | |
162 | from_name=db.User.get(self.u1).full_name_or_username) |
|
162 | from_name=db.User.get(self.u1).full_name_or_username) | |
163 |
|
163 | |||
164 | out = '<!doctype html>\n<html lang="en">\n<head><title>Notifications</title><meta http-equiv="Content-Type" content="text/html; charset=UTF-8"></head>\n<body>\n%s\n</body>\n</html>\n' % \ |
|
164 | out = '<!doctype html>\n<html lang="en">\n<head><title>Notifications</title><meta http-equiv="Content-Type" content="text/html; charset=UTF-8"></head>\n<body>\n%s\n</body>\n</html>\n' % \ | |
165 | re.sub(r'<(/?(?:!doctype|html|head|title|meta|body)\b[^>]*)>', r'<!--\1-->', ''.join(l)) |
|
165 | re.sub(r'<(/?(?:!doctype|html|head|title|meta|body)\b[^>]*)>', r'<!--\1-->', ''.join(l)) | |
166 |
|
166 | |||
167 | outfn = os.path.join(os.path.dirname(__file__), 'test_dump_html_mails.out.html') |
|
167 | outfn = os.path.join(os.path.dirname(__file__), 'test_dump_html_mails.out.html') | |
168 | reffn = os.path.join(os.path.dirname(__file__), 'test_dump_html_mails.ref.html') |
|
168 | reffn = os.path.join(os.path.dirname(__file__), 'test_dump_html_mails.ref.html') | |
169 | with open(outfn, 'w') as f: |
|
169 | with open(outfn, 'w') as f: | |
170 | f.write(out) |
|
170 | f.write(out) | |
171 | with open(reffn) as f: |
|
171 | with open(reffn) as f: | |
172 | ref = f.read() |
|
172 | ref = f.read() | |
173 | assert ref == out # copy test_dump_html_mails.out.html to test_dump_html_mails.ref.html to update expectations |
|
173 | assert ref == out # copy test_dump_html_mails.out.html to test_dump_html_mails.ref.html to update expectations | |
174 | os.unlink(outfn) |
|
174 | os.unlink(outfn) |
@@ -1,196 +1,196 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | import mock |
|
3 | import mock | |
4 |
|
4 | |||
5 | import kallithea |
|
5 | import kallithea | |
6 | from kallithea.model import db |
|
6 | from kallithea.model import db | |
7 | from kallithea.tests import base |
|
7 | from kallithea.tests import base | |
8 |
|
8 | |||
9 |
|
9 | |||
10 | class smtplib_mock(object): |
|
10 | class smtplib_mock(object): | |
11 |
|
11 | |||
12 | @classmethod |
|
12 | @classmethod | |
13 | def SMTP(cls, server, port): |
|
13 | def SMTP(cls, server, port): | |
14 | return smtplib_mock() |
|
14 | return smtplib_mock() | |
15 |
|
15 | |||
16 | def ehlo(self): |
|
16 | def ehlo(self): | |
17 | pass |
|
17 | pass | |
18 |
|
18 | |||
19 | def quit(self): |
|
19 | def quit(self): | |
20 | pass |
|
20 | pass | |
21 |
|
21 | |||
22 | def sendmail(self, sender, dest, msg): |
|
22 | def sendmail(self, sender, dest, msg): | |
23 | smtplib_mock.lastsender = sender |
|
23 | smtplib_mock.lastsender = sender | |
24 | smtplib_mock.lastdest = set(dest) |
|
24 | smtplib_mock.lastdest = set(dest) | |
25 | smtplib_mock.lastmsg = msg |
|
25 | smtplib_mock.lastmsg = msg | |
26 |
|
26 | |||
27 |
|
27 | |||
28 |
@mock.patch('kallithea. |
|
28 | @mock.patch('kallithea.model.async_tasks.smtplib', smtplib_mock) | |
29 | class TestMail(base.TestController): |
|
29 | class TestMail(base.TestController): | |
30 |
|
30 | |||
31 | def test_send_mail_trivial(self): |
|
31 | def test_send_mail_trivial(self): | |
32 | mailserver = 'smtp.mailserver.org' |
|
32 | mailserver = 'smtp.mailserver.org' | |
33 | recipients = ['rcpt1', 'rcpt2'] |
|
33 | recipients = ['rcpt1', 'rcpt2'] | |
34 | envelope_from = 'noreply@mailserver.org' |
|
34 | envelope_from = 'noreply@mailserver.org' | |
35 | subject = 'subject' |
|
35 | subject = 'subject' | |
36 | body = 'body' |
|
36 | body = 'body' | |
37 | html_body = 'html_body' |
|
37 | html_body = 'html_body' | |
38 |
|
38 | |||
39 | config_mock = { |
|
39 | config_mock = { | |
40 | 'smtp_server': mailserver, |
|
40 | 'smtp_server': mailserver, | |
41 | 'app_email_from': envelope_from, |
|
41 | 'app_email_from': envelope_from, | |
42 | } |
|
42 | } | |
43 |
with mock.patch('kallithea. |
|
43 | with mock.patch('kallithea.model.async_tasks.config', config_mock): | |
44 |
kallithea.l |
|
44 | kallithea.model.async_tasks.send_email(recipients, subject, body, html_body) | |
45 |
|
45 | |||
46 | assert smtplib_mock.lastdest == set(recipients) |
|
46 | assert smtplib_mock.lastdest == set(recipients) | |
47 | assert smtplib_mock.lastsender == envelope_from |
|
47 | assert smtplib_mock.lastsender == envelope_from | |
48 | assert 'From: %s' % envelope_from in smtplib_mock.lastmsg |
|
48 | assert 'From: %s' % envelope_from in smtplib_mock.lastmsg | |
49 | assert 'Subject: %s' % subject in smtplib_mock.lastmsg |
|
49 | assert 'Subject: %s' % subject in smtplib_mock.lastmsg | |
50 | assert body in smtplib_mock.lastmsg |
|
50 | assert body in smtplib_mock.lastmsg | |
51 | assert html_body in smtplib_mock.lastmsg |
|
51 | assert html_body in smtplib_mock.lastmsg | |
52 |
|
52 | |||
53 | def test_send_mail_no_recipients(self): |
|
53 | def test_send_mail_no_recipients(self): | |
54 | mailserver = 'smtp.mailserver.org' |
|
54 | mailserver = 'smtp.mailserver.org' | |
55 | recipients = [] |
|
55 | recipients = [] | |
56 | envelope_from = 'noreply@mailserver.org' |
|
56 | envelope_from = 'noreply@mailserver.org' | |
57 | email_to = 'admin@mailserver.org' |
|
57 | email_to = 'admin@mailserver.org' | |
58 | subject = 'subject' |
|
58 | subject = 'subject' | |
59 | body = 'body' |
|
59 | body = 'body' | |
60 | html_body = 'html_body' |
|
60 | html_body = 'html_body' | |
61 |
|
61 | |||
62 | config_mock = { |
|
62 | config_mock = { | |
63 | 'smtp_server': mailserver, |
|
63 | 'smtp_server': mailserver, | |
64 | 'app_email_from': envelope_from, |
|
64 | 'app_email_from': envelope_from, | |
65 | 'email_to': email_to, |
|
65 | 'email_to': email_to, | |
66 | } |
|
66 | } | |
67 |
with mock.patch('kallithea. |
|
67 | with mock.patch('kallithea.model.async_tasks.config', config_mock): | |
68 |
kallithea.l |
|
68 | kallithea.model.async_tasks.send_email(recipients, subject, body, html_body) | |
69 |
|
69 | |||
70 | assert smtplib_mock.lastdest == set([base.TEST_USER_ADMIN_EMAIL, email_to]) |
|
70 | assert smtplib_mock.lastdest == set([base.TEST_USER_ADMIN_EMAIL, email_to]) | |
71 | assert smtplib_mock.lastsender == envelope_from |
|
71 | assert smtplib_mock.lastsender == envelope_from | |
72 | assert 'From: %s' % envelope_from in smtplib_mock.lastmsg |
|
72 | assert 'From: %s' % envelope_from in smtplib_mock.lastmsg | |
73 | assert 'Subject: %s' % subject in smtplib_mock.lastmsg |
|
73 | assert 'Subject: %s' % subject in smtplib_mock.lastmsg | |
74 | assert body in smtplib_mock.lastmsg |
|
74 | assert body in smtplib_mock.lastmsg | |
75 | assert html_body in smtplib_mock.lastmsg |
|
75 | assert html_body in smtplib_mock.lastmsg | |
76 |
|
76 | |||
77 | def test_send_mail_no_recipients_multiple_email_to(self): |
|
77 | def test_send_mail_no_recipients_multiple_email_to(self): | |
78 | mailserver = 'smtp.mailserver.org' |
|
78 | mailserver = 'smtp.mailserver.org' | |
79 | recipients = [] |
|
79 | recipients = [] | |
80 | envelope_from = 'noreply@mailserver.org' |
|
80 | envelope_from = 'noreply@mailserver.org' | |
81 | email_to = 'admin@mailserver.org,admin2@example.com' |
|
81 | email_to = 'admin@mailserver.org,admin2@example.com' | |
82 | subject = 'subject' |
|
82 | subject = 'subject' | |
83 | body = 'body' |
|
83 | body = 'body' | |
84 | html_body = 'html_body' |
|
84 | html_body = 'html_body' | |
85 |
|
85 | |||
86 | config_mock = { |
|
86 | config_mock = { | |
87 | 'smtp_server': mailserver, |
|
87 | 'smtp_server': mailserver, | |
88 | 'app_email_from': envelope_from, |
|
88 | 'app_email_from': envelope_from, | |
89 | 'email_to': email_to, |
|
89 | 'email_to': email_to, | |
90 | } |
|
90 | } | |
91 |
with mock.patch('kallithea. |
|
91 | with mock.patch('kallithea.model.async_tasks.config', config_mock): | |
92 |
kallithea.l |
|
92 | kallithea.model.async_tasks.send_email(recipients, subject, body, html_body) | |
93 |
|
93 | |||
94 | assert smtplib_mock.lastdest == set([base.TEST_USER_ADMIN_EMAIL] + email_to.split(',')) |
|
94 | assert smtplib_mock.lastdest == set([base.TEST_USER_ADMIN_EMAIL] + email_to.split(',')) | |
95 | assert smtplib_mock.lastsender == envelope_from |
|
95 | assert smtplib_mock.lastsender == envelope_from | |
96 | assert 'From: %s' % envelope_from in smtplib_mock.lastmsg |
|
96 | assert 'From: %s' % envelope_from in smtplib_mock.lastmsg | |
97 | assert 'Subject: %s' % subject in smtplib_mock.lastmsg |
|
97 | assert 'Subject: %s' % subject in smtplib_mock.lastmsg | |
98 | assert body in smtplib_mock.lastmsg |
|
98 | assert body in smtplib_mock.lastmsg | |
99 | assert html_body in smtplib_mock.lastmsg |
|
99 | assert html_body in smtplib_mock.lastmsg | |
100 |
|
100 | |||
101 | def test_send_mail_no_recipients_no_email_to(self): |
|
101 | def test_send_mail_no_recipients_no_email_to(self): | |
102 | mailserver = 'smtp.mailserver.org' |
|
102 | mailserver = 'smtp.mailserver.org' | |
103 | recipients = [] |
|
103 | recipients = [] | |
104 | envelope_from = 'noreply@mailserver.org' |
|
104 | envelope_from = 'noreply@mailserver.org' | |
105 | subject = 'subject' |
|
105 | subject = 'subject' | |
106 | body = 'body' |
|
106 | body = 'body' | |
107 | html_body = 'html_body' |
|
107 | html_body = 'html_body' | |
108 |
|
108 | |||
109 | config_mock = { |
|
109 | config_mock = { | |
110 | 'smtp_server': mailserver, |
|
110 | 'smtp_server': mailserver, | |
111 | 'app_email_from': envelope_from, |
|
111 | 'app_email_from': envelope_from, | |
112 | } |
|
112 | } | |
113 |
with mock.patch('kallithea. |
|
113 | with mock.patch('kallithea.model.async_tasks.config', config_mock): | |
114 |
kallithea.l |
|
114 | kallithea.model.async_tasks.send_email(recipients, subject, body, html_body) | |
115 |
|
115 | |||
116 | assert smtplib_mock.lastdest == set([base.TEST_USER_ADMIN_EMAIL]) |
|
116 | assert smtplib_mock.lastdest == set([base.TEST_USER_ADMIN_EMAIL]) | |
117 | assert smtplib_mock.lastsender == envelope_from |
|
117 | assert smtplib_mock.lastsender == envelope_from | |
118 | assert 'From: %s' % envelope_from in smtplib_mock.lastmsg |
|
118 | assert 'From: %s' % envelope_from in smtplib_mock.lastmsg | |
119 | assert 'Subject: %s' % subject in smtplib_mock.lastmsg |
|
119 | assert 'Subject: %s' % subject in smtplib_mock.lastmsg | |
120 | assert body in smtplib_mock.lastmsg |
|
120 | assert body in smtplib_mock.lastmsg | |
121 | assert html_body in smtplib_mock.lastmsg |
|
121 | assert html_body in smtplib_mock.lastmsg | |
122 |
|
122 | |||
123 | def test_send_mail_with_author(self): |
|
123 | def test_send_mail_with_author(self): | |
124 | mailserver = 'smtp.mailserver.org' |
|
124 | mailserver = 'smtp.mailserver.org' | |
125 | recipients = ['rcpt1', 'rcpt2'] |
|
125 | recipients = ['rcpt1', 'rcpt2'] | |
126 | envelope_from = 'noreply@mailserver.org' |
|
126 | envelope_from = 'noreply@mailserver.org' | |
127 | subject = 'subject' |
|
127 | subject = 'subject' | |
128 | body = 'body' |
|
128 | body = 'body' | |
129 | html_body = 'html_body' |
|
129 | html_body = 'html_body' | |
130 | author = db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN) |
|
130 | author = db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN) | |
131 |
|
131 | |||
132 | config_mock = { |
|
132 | config_mock = { | |
133 | 'smtp_server': mailserver, |
|
133 | 'smtp_server': mailserver, | |
134 | 'app_email_from': envelope_from, |
|
134 | 'app_email_from': envelope_from, | |
135 | } |
|
135 | } | |
136 |
with mock.patch('kallithea. |
|
136 | with mock.patch('kallithea.model.async_tasks.config', config_mock): | |
137 |
kallithea.l |
|
137 | kallithea.model.async_tasks.send_email(recipients, subject, body, html_body, from_name=author.full_name_or_username) | |
138 |
|
138 | |||
139 | assert smtplib_mock.lastdest == set(recipients) |
|
139 | assert smtplib_mock.lastdest == set(recipients) | |
140 | assert smtplib_mock.lastsender == envelope_from |
|
140 | assert smtplib_mock.lastsender == envelope_from | |
141 | assert 'From: "Kallithea Admin (no-reply)" <%s>' % envelope_from in smtplib_mock.lastmsg |
|
141 | assert 'From: "Kallithea Admin (no-reply)" <%s>' % envelope_from in smtplib_mock.lastmsg | |
142 | assert 'Subject: %s' % subject in smtplib_mock.lastmsg |
|
142 | assert 'Subject: %s' % subject in smtplib_mock.lastmsg | |
143 | assert body in smtplib_mock.lastmsg |
|
143 | assert body in smtplib_mock.lastmsg | |
144 | assert html_body in smtplib_mock.lastmsg |
|
144 | assert html_body in smtplib_mock.lastmsg | |
145 |
|
145 | |||
146 | def test_send_mail_with_author_full_mail_from(self): |
|
146 | def test_send_mail_with_author_full_mail_from(self): | |
147 | mailserver = 'smtp.mailserver.org' |
|
147 | mailserver = 'smtp.mailserver.org' | |
148 | recipients = ['ræcpt1', 'receptor2 <rcpt2@example.com>', 'tæst@example.com', 'Tæst <test@example.com>'] |
|
148 | recipients = ['ræcpt1', 'receptor2 <rcpt2@example.com>', 'tæst@example.com', 'Tæst <test@example.com>'] | |
149 | envelope_addr = 'noreply@mailserver.org' |
|
149 | envelope_addr = 'noreply@mailserver.org' | |
150 | envelope_from = 'Sâme Næme <%s>' % envelope_addr |
|
150 | envelope_from = 'Sâme Næme <%s>' % envelope_addr | |
151 | subject = 'subject' |
|
151 | subject = 'subject' | |
152 | body = 'body' |
|
152 | body = 'body' | |
153 | html_body = 'html_body' |
|
153 | html_body = 'html_body' | |
154 | author = db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN) |
|
154 | author = db.User.get_by_username(base.TEST_USER_REGULAR_LOGIN) | |
155 |
|
155 | |||
156 | config_mock = { |
|
156 | config_mock = { | |
157 | 'smtp_server': mailserver, |
|
157 | 'smtp_server': mailserver, | |
158 | 'app_email_from': envelope_from, |
|
158 | 'app_email_from': envelope_from, | |
159 | } |
|
159 | } | |
160 |
with mock.patch('kallithea. |
|
160 | with mock.patch('kallithea.model.async_tasks.config', config_mock): | |
161 |
kallithea.l |
|
161 | kallithea.model.async_tasks.send_email(recipients, subject, body, html_body, from_name=author.full_name_or_username) | |
162 |
|
162 | |||
163 | assert smtplib_mock.lastdest == set(recipients) |
|
163 | assert smtplib_mock.lastdest == set(recipients) | |
164 | assert smtplib_mock.lastsender == envelope_from |
|
164 | assert smtplib_mock.lastsender == envelope_from | |
165 | assert 'From: "Kallithea Admin (no-reply)" <%s>' % envelope_addr in smtplib_mock.lastmsg |
|
165 | assert 'From: "Kallithea Admin (no-reply)" <%s>' % envelope_addr in smtplib_mock.lastmsg | |
166 | assert 'Subject: %s' % subject in smtplib_mock.lastmsg |
|
166 | assert 'Subject: %s' % subject in smtplib_mock.lastmsg | |
167 | assert body in smtplib_mock.lastmsg |
|
167 | assert body in smtplib_mock.lastmsg | |
168 | assert html_body in smtplib_mock.lastmsg |
|
168 | assert html_body in smtplib_mock.lastmsg | |
169 |
|
169 | |||
170 | def test_send_mail_extra_headers(self): |
|
170 | def test_send_mail_extra_headers(self): | |
171 | mailserver = 'smtp.mailserver.org' |
|
171 | mailserver = 'smtp.mailserver.org' | |
172 | recipients = ['rcpt1', 'rcpt2'] |
|
172 | recipients = ['rcpt1', 'rcpt2'] | |
173 | envelope_from = 'noreply@mailserver.org' |
|
173 | envelope_from = 'noreply@mailserver.org' | |
174 | subject = 'subject' |
|
174 | subject = 'subject' | |
175 | body = 'body' |
|
175 | body = 'body' | |
176 | html_body = 'html_body' |
|
176 | html_body = 'html_body' | |
177 | author = db.User(name='foo', lastname='(fubar) "baz"') |
|
177 | author = db.User(name='foo', lastname='(fubar) "baz"') | |
178 | headers = {'extra': 'yes'} |
|
178 | headers = {'extra': 'yes'} | |
179 |
|
179 | |||
180 | config_mock = { |
|
180 | config_mock = { | |
181 | 'smtp_server': mailserver, |
|
181 | 'smtp_server': mailserver, | |
182 | 'app_email_from': envelope_from, |
|
182 | 'app_email_from': envelope_from, | |
183 | } |
|
183 | } | |
184 |
with mock.patch('kallithea. |
|
184 | with mock.patch('kallithea.model.async_tasks.config', config_mock): | |
185 |
kallithea.l |
|
185 | kallithea.model.async_tasks.send_email(recipients, subject, body, html_body, | |
186 | from_name=author.full_name_or_username, headers=headers) |
|
186 | from_name=author.full_name_or_username, headers=headers) | |
187 |
|
187 | |||
188 | assert smtplib_mock.lastdest == set(recipients) |
|
188 | assert smtplib_mock.lastdest == set(recipients) | |
189 | assert smtplib_mock.lastsender == envelope_from |
|
189 | assert smtplib_mock.lastsender == envelope_from | |
190 | assert r'From: "foo (fubar) \"baz\" (no-reply)" <%s>' % envelope_from in smtplib_mock.lastmsg |
|
190 | assert r'From: "foo (fubar) \"baz\" (no-reply)" <%s>' % envelope_from in smtplib_mock.lastmsg | |
191 | assert 'Subject: %s' % subject in smtplib_mock.lastmsg |
|
191 | assert 'Subject: %s' % subject in smtplib_mock.lastmsg | |
192 | assert body in smtplib_mock.lastmsg |
|
192 | assert body in smtplib_mock.lastmsg | |
193 | assert html_body in smtplib_mock.lastmsg |
|
193 | assert html_body in smtplib_mock.lastmsg | |
194 | assert 'extra: yes' in smtplib_mock.lastmsg |
|
194 | assert 'extra: yes' in smtplib_mock.lastmsg | |
195 | # verify that headers dict hasn't mutated by send_email |
|
195 | # verify that headers dict hasn't mutated by send_email | |
196 | assert headers == {'extra': 'yes'} |
|
196 | assert headers == {'extra': 'yes'} |
@@ -1,295 +1,295 b'' | |||||
1 | #!/usr/bin/env python3 |
|
1 | #!/usr/bin/env python3 | |
2 |
|
2 | |||
3 |
|
3 | |||
4 | import re |
|
4 | import re | |
5 | import sys |
|
5 | import sys | |
6 |
|
6 | |||
7 |
|
7 | |||
8 | ignored_modules = set(''' |
|
8 | ignored_modules = set(''' | |
9 | argparse |
|
9 | argparse | |
10 | base64 |
|
10 | base64 | |
11 | bcrypt |
|
11 | bcrypt | |
12 | binascii |
|
12 | binascii | |
13 | bleach |
|
13 | bleach | |
14 | calendar |
|
14 | calendar | |
15 | celery |
|
15 | celery | |
16 | celery |
|
16 | celery | |
17 | chardet |
|
17 | chardet | |
18 | click |
|
18 | click | |
19 | collections |
|
19 | collections | |
20 | configparser |
|
20 | configparser | |
21 | copy |
|
21 | copy | |
22 | csv |
|
22 | csv | |
23 | ctypes |
|
23 | ctypes | |
24 | datetime |
|
24 | datetime | |
25 | dateutil |
|
25 | dateutil | |
26 | decimal |
|
26 | decimal | |
27 | decorator |
|
27 | decorator | |
28 | difflib |
|
28 | difflib | |
29 | distutils |
|
29 | distutils | |
30 | docutils |
|
30 | docutils | |
31 |
|
31 | |||
32 | errno |
|
32 | errno | |
33 | fileinput |
|
33 | fileinput | |
34 | functools |
|
34 | functools | |
35 | getpass |
|
35 | getpass | |
36 | grp |
|
36 | grp | |
37 | hashlib |
|
37 | hashlib | |
38 | hmac |
|
38 | hmac | |
39 | html |
|
39 | html | |
40 | http |
|
40 | http | |
41 | imp |
|
41 | imp | |
42 | importlib |
|
42 | importlib | |
43 | inspect |
|
43 | inspect | |
44 | io |
|
44 | io | |
45 | ipaddr |
|
45 | ipaddr | |
46 | IPython |
|
46 | IPython | |
47 | isapi_wsgi |
|
47 | isapi_wsgi | |
48 | itertools |
|
48 | itertools | |
49 | json |
|
49 | json | |
50 | kajiki |
|
50 | kajiki | |
51 | ldap |
|
51 | ldap | |
52 | logging |
|
52 | logging | |
53 | mako |
|
53 | mako | |
54 | markdown |
|
54 | markdown | |
55 | mimetypes |
|
55 | mimetypes | |
56 | mock |
|
56 | mock | |
57 | msvcrt |
|
57 | msvcrt | |
58 | multiprocessing |
|
58 | multiprocessing | |
59 | operator |
|
59 | operator | |
60 | os |
|
60 | os | |
61 | paginate |
|
61 | paginate | |
62 | paginate_sqlalchemy |
|
62 | paginate_sqlalchemy | |
63 | pam |
|
63 | pam | |
64 | paste |
|
64 | paste | |
65 | pkg_resources |
|
65 | pkg_resources | |
66 | platform |
|
66 | platform | |
67 | posixpath |
|
67 | posixpath | |
68 | pprint |
|
68 | pprint | |
69 | pwd |
|
69 | pwd | |
70 | pyflakes |
|
70 | pyflakes | |
71 | pytest |
|
71 | pytest | |
72 | pytest_localserver |
|
72 | pytest_localserver | |
73 | random |
|
73 | random | |
74 | re |
|
74 | re | |
75 | routes |
|
75 | routes | |
76 | setuptools |
|
76 | setuptools | |
77 | shlex |
|
77 | shlex | |
78 | shutil |
|
78 | shutil | |
79 | smtplib |
|
79 | smtplib | |
80 | socket |
|
80 | socket | |
81 | ssl |
|
81 | ssl | |
82 | stat |
|
82 | stat | |
83 | string |
|
83 | string | |
84 | struct |
|
84 | struct | |
85 | subprocess |
|
85 | subprocess | |
86 | sys |
|
86 | sys | |
87 | tarfile |
|
87 | tarfile | |
88 | tempfile |
|
88 | tempfile | |
89 | textwrap |
|
89 | textwrap | |
90 | tgext |
|
90 | tgext | |
91 | threading |
|
91 | threading | |
92 | time |
|
92 | time | |
93 | traceback |
|
93 | traceback | |
94 | traitlets |
|
94 | traitlets | |
95 | types |
|
95 | types | |
96 | urllib |
|
96 | urllib | |
97 | urlobject |
|
97 | urlobject | |
98 | uuid |
|
98 | uuid | |
99 | warnings |
|
99 | warnings | |
100 | webhelpers2 |
|
100 | webhelpers2 | |
101 | webob |
|
101 | webob | |
102 | webtest |
|
102 | webtest | |
103 | whoosh |
|
103 | whoosh | |
104 | win32traceutil |
|
104 | win32traceutil | |
105 | zipfile |
|
105 | zipfile | |
106 | '''.split()) |
|
106 | '''.split()) | |
107 |
|
107 | |||
108 | top_modules = set(''' |
|
108 | top_modules = set(''' | |
109 | kallithea.alembic |
|
109 | kallithea.alembic | |
110 | kallithea.bin |
|
110 | kallithea.bin | |
111 | kallithea.config |
|
111 | kallithea.config | |
112 | kallithea.controllers |
|
112 | kallithea.controllers | |
113 | kallithea.templates.py |
|
113 | kallithea.templates.py | |
114 | scripts |
|
114 | scripts | |
115 | '''.split()) |
|
115 | '''.split()) | |
116 |
|
116 | |||
117 | bottom_external_modules = set(''' |
|
117 | bottom_external_modules = set(''' | |
118 | tg |
|
118 | tg | |
119 | mercurial |
|
119 | mercurial | |
120 | sqlalchemy |
|
120 | sqlalchemy | |
121 | alembic |
|
121 | alembic | |
122 | formencode |
|
122 | formencode | |
123 | pygments |
|
123 | pygments | |
124 | dulwich |
|
124 | dulwich | |
125 | beaker |
|
125 | beaker | |
126 | psycopg2 |
|
126 | psycopg2 | |
127 | docs |
|
127 | docs | |
128 | setup |
|
128 | setup | |
129 | conftest |
|
129 | conftest | |
130 | '''.split()) |
|
130 | '''.split()) | |
131 |
|
131 | |||
132 | normal_modules = set(''' |
|
132 | normal_modules = set(''' | |
133 | kallithea |
|
133 | kallithea | |
134 | kallithea.lib.celerylib.tasks |
|
|||
135 | kallithea.lib |
|
134 | kallithea.lib | |
136 | kallithea.lib.auth |
|
135 | kallithea.lib.auth | |
137 | kallithea.lib.auth_modules |
|
136 | kallithea.lib.auth_modules | |
138 | kallithea.lib.base |
|
137 | kallithea.lib.base | |
139 | kallithea.lib.celerylib |
|
138 | kallithea.lib.celerylib | |
140 | kallithea.lib.db_manage |
|
139 | kallithea.lib.db_manage | |
141 | kallithea.lib.helpers |
|
140 | kallithea.lib.helpers | |
142 | kallithea.lib.hooks |
|
141 | kallithea.lib.hooks | |
143 | kallithea.lib.indexers |
|
142 | kallithea.lib.indexers | |
144 | kallithea.lib.utils |
|
143 | kallithea.lib.utils | |
145 | kallithea.lib.utils2 |
|
144 | kallithea.lib.utils2 | |
146 | kallithea.lib.vcs |
|
145 | kallithea.lib.vcs | |
147 | kallithea.lib.webutils |
|
146 | kallithea.lib.webutils | |
148 | kallithea.model |
|
147 | kallithea.model | |
|
148 | kallithea.model.async_tasks | |||
149 | kallithea.model.scm |
|
149 | kallithea.model.scm | |
150 | kallithea.templates.py |
|
150 | kallithea.templates.py | |
151 | '''.split()) |
|
151 | '''.split()) | |
152 |
|
152 | |||
153 | shown_modules = normal_modules | top_modules |
|
153 | shown_modules = normal_modules | top_modules | |
154 |
|
154 | |||
155 | # break the chains somehow - this is a cleanup TODO list |
|
155 | # break the chains somehow - this is a cleanup TODO list | |
156 | known_violations = [ |
|
156 | known_violations = [ | |
157 | ('kallithea.lib.auth_modules', 'kallithea.lib.auth'), # needs base&facade |
|
157 | ('kallithea.lib.auth_modules', 'kallithea.lib.auth'), # needs base&facade | |
158 | ('kallithea.lib.utils', 'kallithea.model'), # clean up utils |
|
158 | ('kallithea.lib.utils', 'kallithea.model'), # clean up utils | |
159 | ('kallithea.lib.utils', 'kallithea.model.db'), |
|
159 | ('kallithea.lib.utils', 'kallithea.model.db'), | |
160 | ('kallithea.lib.utils', 'kallithea.model.scm'), |
|
160 | ('kallithea.lib.utils', 'kallithea.model.scm'), | |
161 |
('kallithea. |
|
161 | ('kallithea.model.async_tasks', 'kallithea.lib.helpers'), | |
162 |
('kallithea. |
|
162 | ('kallithea.model.async_tasks', 'kallithea.lib.hooks'), | |
163 |
('kallithea. |
|
163 | ('kallithea.model.async_tasks', 'kallithea.lib.indexers'), | |
164 |
('kallithea. |
|
164 | ('kallithea.model.async_tasks', 'kallithea.model'), | |
165 | ('kallithea.model', 'kallithea.lib.auth'), # auth.HasXXX |
|
165 | ('kallithea.model', 'kallithea.lib.auth'), # auth.HasXXX | |
166 | ('kallithea.model', 'kallithea.lib.auth_modules'), # validators |
|
166 | ('kallithea.model', 'kallithea.lib.auth_modules'), # validators | |
167 | ('kallithea.model', 'kallithea.lib.helpers'), |
|
167 | ('kallithea.model', 'kallithea.lib.helpers'), | |
168 | ('kallithea.model', 'kallithea.lib.hooks'), # clean up hooks |
|
168 | ('kallithea.model', 'kallithea.lib.hooks'), # clean up hooks | |
169 | ('kallithea.model', 'kallithea.model.scm'), |
|
169 | ('kallithea.model', 'kallithea.model.scm'), | |
170 | ('kallithea.model.scm', 'kallithea.lib.hooks'), |
|
170 | ('kallithea.model.scm', 'kallithea.lib.hooks'), | |
171 | ] |
|
171 | ] | |
172 |
|
172 | |||
173 | extra_edges = [ |
|
173 | extra_edges = [ | |
174 | ('kallithea.config', 'kallithea.controllers'), # through TG |
|
174 | ('kallithea.config', 'kallithea.controllers'), # through TG | |
175 | ('kallithea.lib.auth', 'kallithea.lib.auth_modules'), # custom loader |
|
175 | ('kallithea.lib.auth', 'kallithea.lib.auth_modules'), # custom loader | |
176 | ] |
|
176 | ] | |
177 |
|
177 | |||
178 |
|
178 | |||
179 | def normalize(s): |
|
179 | def normalize(s): | |
180 | """Given a string with dot path, return the string it should be shown as.""" |
|
180 | """Given a string with dot path, return the string it should be shown as.""" | |
181 | parts = s.replace('.__init__', '').split('.') |
|
181 | parts = s.replace('.__init__', '').split('.') | |
182 | short_2 = '.'.join(parts[:2]) |
|
182 | short_2 = '.'.join(parts[:2]) | |
183 | short_3 = '.'.join(parts[:3]) |
|
183 | short_3 = '.'.join(parts[:3]) | |
184 | short_4 = '.'.join(parts[:4]) |
|
184 | short_4 = '.'.join(parts[:4]) | |
185 | if parts[0] in ['scripts', 'contributor_data', 'i18n_utils']: |
|
185 | if parts[0] in ['scripts', 'contributor_data', 'i18n_utils']: | |
186 | return 'scripts' |
|
186 | return 'scripts' | |
187 | if short_3 == 'kallithea.model.meta': |
|
187 | if short_3 == 'kallithea.model.meta': | |
188 | return 'kallithea.model.db' |
|
188 | return 'kallithea.model.db' | |
189 | if parts[:4] == ['kallithea', 'lib', 'vcs', 'ssh']: |
|
189 | if parts[:4] == ['kallithea', 'lib', 'vcs', 'ssh']: | |
190 | return 'kallithea.lib.vcs.ssh' |
|
190 | return 'kallithea.lib.vcs.ssh' | |
191 | if short_4 in shown_modules: |
|
191 | if short_4 in shown_modules: | |
192 | return short_4 |
|
192 | return short_4 | |
193 | if short_3 in shown_modules: |
|
193 | if short_3 in shown_modules: | |
194 | return short_3 |
|
194 | return short_3 | |
195 | if short_2 in shown_modules: |
|
195 | if short_2 in shown_modules: | |
196 | return short_2 |
|
196 | return short_2 | |
197 | if short_2 == 'kallithea.tests': |
|
197 | if short_2 == 'kallithea.tests': | |
198 | return None |
|
198 | return None | |
199 | if parts[0] in ignored_modules: |
|
199 | if parts[0] in ignored_modules: | |
200 | return None |
|
200 | return None | |
201 | assert parts[0] in bottom_external_modules, parts |
|
201 | assert parts[0] in bottom_external_modules, parts | |
202 | return parts[0] |
|
202 | return parts[0] | |
203 |
|
203 | |||
204 |
|
204 | |||
205 | def main(filenames): |
|
205 | def main(filenames): | |
206 | if not filenames or filenames[0].startswith('-'): |
|
206 | if not filenames or filenames[0].startswith('-'): | |
207 | print('''\ |
|
207 | print('''\ | |
208 | Usage: |
|
208 | Usage: | |
209 | hg files 'set:!binary()&grep("^#!.*python")' 'set:**.py' | xargs scripts/deps.py |
|
209 | hg files 'set:!binary()&grep("^#!.*python")' 'set:**.py' | xargs scripts/deps.py | |
210 | dot -Tsvg deps.dot > deps.svg |
|
210 | dot -Tsvg deps.dot > deps.svg | |
211 | ''') |
|
211 | ''') | |
212 | raise SystemExit(1) |
|
212 | raise SystemExit(1) | |
213 |
|
213 | |||
214 | files_imports = dict() # map filenames to its imports |
|
214 | files_imports = dict() # map filenames to its imports | |
215 | import_deps = set() # set of tuples with module name and its imports |
|
215 | import_deps = set() # set of tuples with module name and its imports | |
216 | for fn in filenames: |
|
216 | for fn in filenames: | |
217 | with open(fn) as f: |
|
217 | with open(fn) as f: | |
218 | s = f.read() |
|
218 | s = f.read() | |
219 |
|
219 | |||
220 | dot_name = (fn[:-3] if fn.endswith('.py') else fn).replace('/', '.') |
|
220 | dot_name = (fn[:-3] if fn.endswith('.py') else fn).replace('/', '.') | |
221 | file_imports = set() |
|
221 | file_imports = set() | |
222 | for m in re.finditer(r'^ *(?:from ([^ ]*) import (?:([a-zA-Z].*)|\(([^)]*)\))|import (.*))$', s, re.MULTILINE): |
|
222 | for m in re.finditer(r'^ *(?:from ([^ ]*) import (?:([a-zA-Z].*)|\(([^)]*)\))|import (.*))$', s, re.MULTILINE): | |
223 | m_from, m_from_import, m_from_import2, m_import = m.groups() |
|
223 | m_from, m_from_import, m_from_import2, m_import = m.groups() | |
224 | if m_from: |
|
224 | if m_from: | |
225 | pre = m_from + '.' |
|
225 | pre = m_from + '.' | |
226 | if pre.startswith('.'): |
|
226 | if pre.startswith('.'): | |
227 | pre = dot_name.rsplit('.', 1)[0] + pre |
|
227 | pre = dot_name.rsplit('.', 1)[0] + pre | |
228 | importlist = m_from_import or m_from_import2 |
|
228 | importlist = m_from_import or m_from_import2 | |
229 | else: |
|
229 | else: | |
230 | pre = '' |
|
230 | pre = '' | |
231 | importlist = m_import |
|
231 | importlist = m_import | |
232 | for imp in importlist.split('#', 1)[0].split(','): |
|
232 | for imp in importlist.split('#', 1)[0].split(','): | |
233 | full_imp = pre + imp.strip().split(' as ', 1)[0] |
|
233 | full_imp = pre + imp.strip().split(' as ', 1)[0] | |
234 | file_imports.add(full_imp) |
|
234 | file_imports.add(full_imp) | |
235 | import_deps.add((dot_name, full_imp)) |
|
235 | import_deps.add((dot_name, full_imp)) | |
236 | files_imports[fn] = file_imports |
|
236 | files_imports[fn] = file_imports | |
237 |
|
237 | |||
238 | # dump out all deps for debugging and analysis |
|
238 | # dump out all deps for debugging and analysis | |
239 | with open('deps.txt', 'w') as f: |
|
239 | with open('deps.txt', 'w') as f: | |
240 | for fn, file_imports in sorted(files_imports.items()): |
|
240 | for fn, file_imports in sorted(files_imports.items()): | |
241 | for file_import in sorted(file_imports): |
|
241 | for file_import in sorted(file_imports): | |
242 | if file_import.split('.', 1)[0] in ignored_modules: |
|
242 | if file_import.split('.', 1)[0] in ignored_modules: | |
243 | continue |
|
243 | continue | |
244 | f.write('%s: %s\n' % (fn, file_import)) |
|
244 | f.write('%s: %s\n' % (fn, file_import)) | |
245 |
|
245 | |||
246 | # find leafs that haven't been ignored - they are the important external dependencies and shown in the bottom row |
|
246 | # find leafs that haven't been ignored - they are the important external dependencies and shown in the bottom row | |
247 | only_imported = set( |
|
247 | only_imported = set( | |
248 | set(normalize(b) for a, b in import_deps) - |
|
248 | set(normalize(b) for a, b in import_deps) - | |
249 | set(normalize(a) for a, b in import_deps) - |
|
249 | set(normalize(a) for a, b in import_deps) - | |
250 | set([None, 'kallithea']) |
|
250 | set([None, 'kallithea']) | |
251 | ) |
|
251 | ) | |
252 |
|
252 | |||
253 | normalized_dep_edges = set() |
|
253 | normalized_dep_edges = set() | |
254 | for dot_name, full_imp in import_deps: |
|
254 | for dot_name, full_imp in import_deps: | |
255 | a = normalize(dot_name) |
|
255 | a = normalize(dot_name) | |
256 | b = normalize(full_imp) |
|
256 | b = normalize(full_imp) | |
257 | if a is None or b is None or a == b: |
|
257 | if a is None or b is None or a == b: | |
258 | continue |
|
258 | continue | |
259 | normalized_dep_edges.add((a, b)) |
|
259 | normalized_dep_edges.add((a, b)) | |
260 | #print((dot_name, full_imp, a, b)) |
|
260 | #print((dot_name, full_imp, a, b)) | |
261 | normalized_dep_edges.update(extra_edges) |
|
261 | normalized_dep_edges.update(extra_edges) | |
262 |
|
262 | |||
263 | unseen_shown_modules = shown_modules.difference(a for a, b in normalized_dep_edges).difference(b for a, b in normalized_dep_edges) |
|
263 | unseen_shown_modules = shown_modules.difference(a for a, b in normalized_dep_edges).difference(b for a, b in normalized_dep_edges) | |
264 | assert not unseen_shown_modules, unseen_shown_modules |
|
264 | assert not unseen_shown_modules, unseen_shown_modules | |
265 |
|
265 | |||
266 | with open('deps.dot', 'w') as f: |
|
266 | with open('deps.dot', 'w') as f: | |
267 | f.write('digraph {\n') |
|
267 | f.write('digraph {\n') | |
268 | f.write('subgraph { rank = same; %s}\n' % ''.join('"%s"; ' % s for s in sorted(top_modules))) |
|
268 | f.write('subgraph { rank = same; %s}\n' % ''.join('"%s"; ' % s for s in sorted(top_modules))) | |
269 | f.write('subgraph { rank = same; %s}\n' % ''.join('"%s"; ' % s for s in sorted(only_imported))) |
|
269 | f.write('subgraph { rank = same; %s}\n' % ''.join('"%s"; ' % s for s in sorted(only_imported))) | |
270 | for a, b in sorted(normalized_dep_edges): |
|
270 | for a, b in sorted(normalized_dep_edges): | |
271 | f.write(' "%s" -> "%s"%s\n' % (a, b, ' [color=red]' if (a, b) in known_violations else ' [color=green]' if (a, b) in extra_edges else '')) |
|
271 | f.write(' "%s" -> "%s"%s\n' % (a, b, ' [color=red]' if (a, b) in known_violations else ' [color=green]' if (a, b) in extra_edges else '')) | |
272 | f.write('}\n') |
|
272 | f.write('}\n') | |
273 |
|
273 | |||
274 | # verify dependencies by untangling dependency chain bottom-up: |
|
274 | # verify dependencies by untangling dependency chain bottom-up: | |
275 | todo = set(normalized_dep_edges) |
|
275 | todo = set(normalized_dep_edges) | |
276 | for x in known_violations: |
|
276 | for x in known_violations: | |
277 | todo.remove(x) |
|
277 | todo.remove(x) | |
278 |
|
278 | |||
279 | while todo: |
|
279 | while todo: | |
280 | depending = set(a for a, b in todo) |
|
280 | depending = set(a for a, b in todo) | |
281 | depended = set(b for a, b in todo) |
|
281 | depended = set(b for a, b in todo) | |
282 | drop = depended - depending |
|
282 | drop = depended - depending | |
283 | if not drop: |
|
283 | if not drop: | |
284 | print('ERROR: cycles:', len(todo)) |
|
284 | print('ERROR: cycles:', len(todo)) | |
285 | for x in sorted(todo): |
|
285 | for x in sorted(todo): | |
286 | print('%s,' % (x,)) |
|
286 | print('%s,' % (x,)) | |
287 | raise SystemExit(1) |
|
287 | raise SystemExit(1) | |
288 | #for do_b in sorted(drop): |
|
288 | #for do_b in sorted(drop): | |
289 | # print('Picking', do_b, '- unblocks:', ' '.join(a for a, b in sorted((todo)) if b == do_b)) |
|
289 | # print('Picking', do_b, '- unblocks:', ' '.join(a for a, b in sorted((todo)) if b == do_b)) | |
290 | todo = set((a, b) for a, b in todo if b in depending) |
|
290 | todo = set((a, b) for a, b in todo if b in depending) | |
291 | #print() |
|
291 | #print() | |
292 |
|
292 | |||
293 |
|
293 | |||
294 | if __name__ == '__main__': |
|
294 | if __name__ == '__main__': | |
295 | main(sys.argv[1:]) |
|
295 | main(sys.argv[1:]) |
General Comments 0
You need to be logged in to leave comments.
Login now