Show More
@@ -1,397 +1,396 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.controllers.admin.repo_groups |
|
15 | kallithea.controllers.admin.repo_groups | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Repository groups controller for Kallithea |
|
18 | Repository groups controller for Kallithea | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Mar 23, 2010 |
|
22 | :created_on: Mar 23, 2010 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 | import logging |
|
28 | import logging | |
29 | import traceback |
|
29 | import traceback | |
30 |
|
30 | |||
31 | import formencode |
|
31 | import formencode | |
32 | from formencode import htmlfill |
|
32 | from formencode import htmlfill | |
33 | from tg import app_globals, request |
|
33 | from tg import app_globals, request | |
34 | from tg import tmpl_context as c |
|
34 | from tg import tmpl_context as c | |
35 | from tg.i18n import ugettext as _ |
|
35 | from tg.i18n import ugettext as _ | |
36 | from tg.i18n import ungettext |
|
36 | from tg.i18n import ungettext | |
37 | from webob.exc import HTTPForbidden, HTTPFound, HTTPInternalServerError, HTTPNotFound |
|
37 | from webob.exc import HTTPForbidden, HTTPFound, HTTPInternalServerError, HTTPNotFound | |
38 |
|
38 | |||
39 | from kallithea.config.routing import url |
|
39 | from kallithea.config.routing import url | |
40 | from kallithea.lib import helpers as h |
|
40 | from kallithea.lib import helpers as h | |
41 | from kallithea.lib.auth import HasPermissionAny, HasRepoGroupPermissionLevel, HasRepoGroupPermissionLevelDecorator, LoginRequired |
|
41 | from kallithea.lib.auth import HasPermissionAny, HasRepoGroupPermissionLevel, HasRepoGroupPermissionLevelDecorator, LoginRequired | |
42 | from kallithea.lib.base import BaseController, render |
|
42 | from kallithea.lib.base import BaseController, render | |
43 | from kallithea.lib.utils2 import safe_int |
|
43 | from kallithea.lib.utils2 import safe_int | |
44 | from kallithea.model.db import RepoGroup, Repository |
|
44 | from kallithea.model.db import RepoGroup, Repository | |
45 | from kallithea.model.forms import RepoGroupForm, RepoGroupPermsForm |
|
45 | from kallithea.model.forms import RepoGroupForm, RepoGroupPermsForm | |
46 | from kallithea.model.meta import Session |
|
46 | from kallithea.model.meta import Session | |
47 | from kallithea.model.repo import RepoModel |
|
47 | from kallithea.model.repo import RepoModel | |
48 | from kallithea.model.repo_group import RepoGroupModel |
|
48 | from kallithea.model.repo_group import RepoGroupModel | |
49 | from kallithea.model.scm import AvailableRepoGroupChoices, RepoGroupList |
|
49 | from kallithea.model.scm import AvailableRepoGroupChoices, RepoGroupList | |
50 |
|
50 | |||
51 |
|
51 | |||
52 | log = logging.getLogger(__name__) |
|
52 | log = logging.getLogger(__name__) | |
53 |
|
53 | |||
54 |
|
54 | |||
55 | class RepoGroupsController(BaseController): |
|
55 | class RepoGroupsController(BaseController): | |
56 |
|
56 | |||
57 | @LoginRequired(allow_default_user=True) |
|
57 | @LoginRequired(allow_default_user=True) | |
58 | def _before(self, *args, **kwargs): |
|
58 | def _before(self, *args, **kwargs): | |
59 | super(RepoGroupsController, self)._before(*args, **kwargs) |
|
59 | super(RepoGroupsController, self)._before(*args, **kwargs) | |
60 |
|
60 | |||
61 | def __load_defaults(self, extras=(), exclude=()): |
|
61 | def __load_defaults(self, extras=(), exclude=()): | |
62 | """extras is used for keeping current parent ignoring permissions |
|
62 | """extras is used for keeping current parent ignoring permissions | |
63 | exclude is used for not moving group to itself TODO: also exclude descendants |
|
63 | exclude is used for not moving group to itself TODO: also exclude descendants | |
64 | Note: only admin can create top level groups |
|
64 | Note: only admin can create top level groups | |
65 | """ |
|
65 | """ | |
66 | repo_groups = AvailableRepoGroupChoices([], 'admin', extras) |
|
66 | repo_groups = AvailableRepoGroupChoices([], 'admin', extras) | |
67 | exclude_group_ids = set(rg.group_id for rg in exclude) |
|
67 | exclude_group_ids = set(rg.group_id for rg in exclude) | |
68 | c.repo_groups = [rg for rg in repo_groups |
|
68 | c.repo_groups = [rg for rg in repo_groups | |
69 | if rg[0] not in exclude_group_ids] |
|
69 | if rg[0] not in exclude_group_ids] | |
70 |
|
70 | |||
71 | def __load_data(self, group_id): |
|
71 | def __load_data(self, group_id): | |
72 | """ |
|
72 | """ | |
73 | Load defaults settings for edit, and update |
|
73 | Load defaults settings for edit, and update | |
74 |
|
74 | |||
75 | :param group_id: |
|
75 | :param group_id: | |
76 | """ |
|
76 | """ | |
77 | repo_group = RepoGroup.get_or_404(group_id) |
|
77 | repo_group = RepoGroup.get_or_404(group_id) | |
78 | data = repo_group.get_dict() |
|
78 | data = repo_group.get_dict() | |
79 | data['group_name'] = repo_group.name |
|
79 | data['group_name'] = repo_group.name | |
80 |
|
80 | |||
81 | # fill repository group users |
|
81 | # fill repository group users | |
82 | for p in repo_group.repo_group_to_perm: |
|
82 | for p in repo_group.repo_group_to_perm: | |
83 | data.update({'u_perm_%s' % p.user.username: |
|
83 | data.update({'u_perm_%s' % p.user.username: | |
84 | p.permission.permission_name}) |
|
84 | p.permission.permission_name}) | |
85 |
|
85 | |||
86 | # fill repository group groups |
|
86 | # fill repository group groups | |
87 | for p in repo_group.users_group_to_perm: |
|
87 | for p in repo_group.users_group_to_perm: | |
88 | data.update({'g_perm_%s' % p.users_group.users_group_name: |
|
88 | data.update({'g_perm_%s' % p.users_group.users_group_name: | |
89 | p.permission.permission_name}) |
|
89 | p.permission.permission_name}) | |
90 |
|
90 | |||
91 | return data |
|
91 | return data | |
92 |
|
92 | |||
93 | def _revoke_perms_on_yourself(self, form_result): |
|
93 | def _revoke_perms_on_yourself(self, form_result): | |
94 | _up = [u for u in form_result['perms_updates'] if request.authuser.username == u[0]] |
|
94 | _up = [u for u in form_result['perms_updates'] if request.authuser.username == u[0]] | |
95 | _new = [u for u in form_result['perms_new'] if request.authuser.username == u[0]] |
|
95 | _new = [u for u in form_result['perms_new'] if request.authuser.username == u[0]] | |
96 | if _new and _new[0][1] != 'group.admin' or _up and _up[0][1] != 'group.admin': |
|
96 | if _new and _new[0][1] != 'group.admin' or _up and _up[0][1] != 'group.admin': | |
97 | return True |
|
97 | return True | |
98 | return False |
|
98 | return False | |
99 |
|
99 | |||
100 | def index(self, format='html'): |
|
100 | def index(self, format='html'): | |
101 | _list = RepoGroup.query(sorted=True).all() |
|
101 | _list = RepoGroup.query(sorted=True).all() | |
102 | group_iter = RepoGroupList(_list, perm_level='admin') |
|
102 | group_iter = RepoGroupList(_list, perm_level='admin') | |
103 | repo_groups_data = [] |
|
103 | repo_groups_data = [] | |
104 | _tmpl_lookup = app_globals.mako_lookup |
|
104 | _tmpl_lookup = app_globals.mako_lookup | |
105 | template = _tmpl_lookup.get_template('data_table/_dt_elements.html') |
|
105 | template = _tmpl_lookup.get_template('data_table/_dt_elements.html') | |
106 |
|
106 | |||
107 |
repo_group_name |
|
107 | def repo_group_name(repo_group_name, children_groups): | |
108 | template.get_def("repo_group_name") |
|
108 | return template.get_def("repo_group_name") \ | |
109 | .render_unicode(repo_group_name, children_groups, _=_, h=h, c=c) |
|
109 | .render_unicode(repo_group_name, children_groups, _=_, h=h, c=c) | |
110 | ) |
|
110 | ||
111 |
repo_group_actions |
|
111 | def repo_group_actions(repo_group_id, repo_group_name, gr_count): | |
112 | template.get_def("repo_group_actions") |
|
112 | return template.get_def("repo_group_actions") \ | |
113 | .render_unicode(repo_group_id, repo_group_name, gr_count, _=_, h=h, c=c, |
|
113 | .render_unicode(repo_group_id, repo_group_name, gr_count, _=_, h=h, c=c, | |
114 | ungettext=ungettext) |
|
114 | ungettext=ungettext) | |
115 | ) |
|
|||
116 |
|
115 | |||
117 | for repo_gr in group_iter: |
|
116 | for repo_gr in group_iter: | |
118 | children_groups = [g.name for g in repo_gr.parents] + [repo_gr.name] |
|
117 | children_groups = [g.name for g in repo_gr.parents] + [repo_gr.name] | |
119 | repo_count = repo_gr.repositories.count() |
|
118 | repo_count = repo_gr.repositories.count() | |
120 | repo_groups_data.append({ |
|
119 | repo_groups_data.append({ | |
121 | "raw_name": repo_gr.group_name, |
|
120 | "raw_name": repo_gr.group_name, | |
122 | "group_name": repo_group_name(repo_gr.group_name, children_groups), |
|
121 | "group_name": repo_group_name(repo_gr.group_name, children_groups), | |
123 | "desc": h.escape(repo_gr.group_description), |
|
122 | "desc": h.escape(repo_gr.group_description), | |
124 | "repos": repo_count, |
|
123 | "repos": repo_count, | |
125 | "owner": h.person(repo_gr.owner), |
|
124 | "owner": h.person(repo_gr.owner), | |
126 | "action": repo_group_actions(repo_gr.group_id, repo_gr.group_name, |
|
125 | "action": repo_group_actions(repo_gr.group_id, repo_gr.group_name, | |
127 | repo_count) |
|
126 | repo_count) | |
128 | }) |
|
127 | }) | |
129 |
|
128 | |||
130 | c.data = { |
|
129 | c.data = { | |
131 | "sort": None, |
|
130 | "sort": None, | |
132 | "dir": "asc", |
|
131 | "dir": "asc", | |
133 | "records": repo_groups_data |
|
132 | "records": repo_groups_data | |
134 | } |
|
133 | } | |
135 |
|
134 | |||
136 | return render('admin/repo_groups/repo_groups.html') |
|
135 | return render('admin/repo_groups/repo_groups.html') | |
137 |
|
136 | |||
138 | def create(self): |
|
137 | def create(self): | |
139 | self.__load_defaults() |
|
138 | self.__load_defaults() | |
140 |
|
139 | |||
141 | # permissions for can create group based on parent_id are checked |
|
140 | # permissions for can create group based on parent_id are checked | |
142 | # here in the Form |
|
141 | # here in the Form | |
143 | repo_group_form = RepoGroupForm(repo_groups=c.repo_groups) |
|
142 | repo_group_form = RepoGroupForm(repo_groups=c.repo_groups) | |
144 | form_result = None |
|
143 | form_result = None | |
145 | try: |
|
144 | try: | |
146 | form_result = repo_group_form.to_python(dict(request.POST)) |
|
145 | form_result = repo_group_form.to_python(dict(request.POST)) | |
147 | gr = RepoGroupModel().create( |
|
146 | gr = RepoGroupModel().create( | |
148 | group_name=form_result['group_name'], |
|
147 | group_name=form_result['group_name'], | |
149 | group_description=form_result['group_description'], |
|
148 | group_description=form_result['group_description'], | |
150 | parent=form_result['parent_group_id'], |
|
149 | parent=form_result['parent_group_id'], | |
151 | owner=request.authuser.user_id, # TODO: make editable |
|
150 | owner=request.authuser.user_id, # TODO: make editable | |
152 | copy_permissions=form_result['group_copy_permissions'] |
|
151 | copy_permissions=form_result['group_copy_permissions'] | |
153 | ) |
|
152 | ) | |
154 | Session().commit() |
|
153 | Session().commit() | |
155 | # TODO: in future action_logger(, '', '', '') |
|
154 | # TODO: in future action_logger(, '', '', '') | |
156 | except formencode.Invalid as errors: |
|
155 | except formencode.Invalid as errors: | |
157 | return htmlfill.render( |
|
156 | return htmlfill.render( | |
158 | render('admin/repo_groups/repo_group_add.html'), |
|
157 | render('admin/repo_groups/repo_group_add.html'), | |
159 | defaults=errors.value, |
|
158 | defaults=errors.value, | |
160 | errors=errors.error_dict or {}, |
|
159 | errors=errors.error_dict or {}, | |
161 | prefix_error=False, |
|
160 | prefix_error=False, | |
162 | encoding="UTF-8", |
|
161 | encoding="UTF-8", | |
163 | force_defaults=False) |
|
162 | force_defaults=False) | |
164 | except Exception: |
|
163 | except Exception: | |
165 | log.error(traceback.format_exc()) |
|
164 | log.error(traceback.format_exc()) | |
166 | h.flash(_('Error occurred during creation of repository group %s') |
|
165 | h.flash(_('Error occurred during creation of repository group %s') | |
167 | % request.POST.get('group_name'), category='error') |
|
166 | % request.POST.get('group_name'), category='error') | |
168 | if form_result is None: |
|
167 | if form_result is None: | |
169 | raise |
|
168 | raise | |
170 | parent_group_id = form_result['parent_group_id'] |
|
169 | parent_group_id = form_result['parent_group_id'] | |
171 | # TODO: maybe we should get back to the main view, not the admin one |
|
170 | # TODO: maybe we should get back to the main view, not the admin one | |
172 | raise HTTPFound(location=url('repos_groups', parent_group=parent_group_id)) |
|
171 | raise HTTPFound(location=url('repos_groups', parent_group=parent_group_id)) | |
173 | h.flash(_('Created repository group %s') % gr.group_name, |
|
172 | h.flash(_('Created repository group %s') % gr.group_name, | |
174 | category='success') |
|
173 | category='success') | |
175 | raise HTTPFound(location=url('repos_group_home', group_name=gr.group_name)) |
|
174 | raise HTTPFound(location=url('repos_group_home', group_name=gr.group_name)) | |
176 |
|
175 | |||
177 | def new(self): |
|
176 | def new(self): | |
178 | if HasPermissionAny('hg.admin')('group create'): |
|
177 | if HasPermissionAny('hg.admin')('group create'): | |
179 | # we're global admin, we're ok and we can create TOP level groups |
|
178 | # we're global admin, we're ok and we can create TOP level groups | |
180 | pass |
|
179 | pass | |
181 | else: |
|
180 | else: | |
182 | # we pass in parent group into creation form, thus we know |
|
181 | # we pass in parent group into creation form, thus we know | |
183 | # what would be the group, we can check perms here ! |
|
182 | # what would be the group, we can check perms here ! | |
184 | group_id = safe_int(request.GET.get('parent_group')) |
|
183 | group_id = safe_int(request.GET.get('parent_group')) | |
185 | group = RepoGroup.get(group_id) if group_id else None |
|
184 | group = RepoGroup.get(group_id) if group_id else None | |
186 | group_name = group.group_name if group else None |
|
185 | group_name = group.group_name if group else None | |
187 | if HasRepoGroupPermissionLevel('admin')(group_name, 'group create'): |
|
186 | if HasRepoGroupPermissionLevel('admin')(group_name, 'group create'): | |
188 | pass |
|
187 | pass | |
189 | else: |
|
188 | else: | |
190 | raise HTTPForbidden() |
|
189 | raise HTTPForbidden() | |
191 |
|
190 | |||
192 | self.__load_defaults() |
|
191 | self.__load_defaults() | |
193 | return render('admin/repo_groups/repo_group_add.html') |
|
192 | return render('admin/repo_groups/repo_group_add.html') | |
194 |
|
193 | |||
195 | @HasRepoGroupPermissionLevelDecorator('admin') |
|
194 | @HasRepoGroupPermissionLevelDecorator('admin') | |
196 | def update(self, group_name): |
|
195 | def update(self, group_name): | |
197 | c.repo_group = RepoGroup.guess_instance(group_name) |
|
196 | c.repo_group = RepoGroup.guess_instance(group_name) | |
198 | self.__load_defaults(extras=[c.repo_group.parent_group], |
|
197 | self.__load_defaults(extras=[c.repo_group.parent_group], | |
199 | exclude=[c.repo_group]) |
|
198 | exclude=[c.repo_group]) | |
200 |
|
199 | |||
201 | # TODO: kill allow_empty_group - it is only used for redundant form validation! |
|
200 | # TODO: kill allow_empty_group - it is only used for redundant form validation! | |
202 | if HasPermissionAny('hg.admin')('group edit'): |
|
201 | if HasPermissionAny('hg.admin')('group edit'): | |
203 | # we're global admin, we're ok and we can create TOP level groups |
|
202 | # we're global admin, we're ok and we can create TOP level groups | |
204 | allow_empty_group = True |
|
203 | allow_empty_group = True | |
205 | elif not c.repo_group.parent_group: |
|
204 | elif not c.repo_group.parent_group: | |
206 | allow_empty_group = True |
|
205 | allow_empty_group = True | |
207 | else: |
|
206 | else: | |
208 | allow_empty_group = False |
|
207 | allow_empty_group = False | |
209 | repo_group_form = RepoGroupForm( |
|
208 | repo_group_form = RepoGroupForm( | |
210 | edit=True, |
|
209 | edit=True, | |
211 | old_data=c.repo_group.get_dict(), |
|
210 | old_data=c.repo_group.get_dict(), | |
212 | repo_groups=c.repo_groups, |
|
211 | repo_groups=c.repo_groups, | |
213 | can_create_in_root=allow_empty_group, |
|
212 | can_create_in_root=allow_empty_group, | |
214 | )() |
|
213 | )() | |
215 | try: |
|
214 | try: | |
216 | form_result = repo_group_form.to_python(dict(request.POST)) |
|
215 | form_result = repo_group_form.to_python(dict(request.POST)) | |
217 |
|
216 | |||
218 | new_gr = RepoGroupModel().update(group_name, form_result) |
|
217 | new_gr = RepoGroupModel().update(group_name, form_result) | |
219 | Session().commit() |
|
218 | Session().commit() | |
220 | h.flash(_('Updated repository group %s') |
|
219 | h.flash(_('Updated repository group %s') | |
221 | % form_result['group_name'], category='success') |
|
220 | % form_result['group_name'], category='success') | |
222 | # we now have new name ! |
|
221 | # we now have new name ! | |
223 | group_name = new_gr.group_name |
|
222 | group_name = new_gr.group_name | |
224 | # TODO: in future action_logger(, '', '', '') |
|
223 | # TODO: in future action_logger(, '', '', '') | |
225 | except formencode.Invalid as errors: |
|
224 | except formencode.Invalid as errors: | |
226 | c.active = 'settings' |
|
225 | c.active = 'settings' | |
227 | return htmlfill.render( |
|
226 | return htmlfill.render( | |
228 | render('admin/repo_groups/repo_group_edit.html'), |
|
227 | render('admin/repo_groups/repo_group_edit.html'), | |
229 | defaults=errors.value, |
|
228 | defaults=errors.value, | |
230 | errors=errors.error_dict or {}, |
|
229 | errors=errors.error_dict or {}, | |
231 | prefix_error=False, |
|
230 | prefix_error=False, | |
232 | encoding="UTF-8", |
|
231 | encoding="UTF-8", | |
233 | force_defaults=False) |
|
232 | force_defaults=False) | |
234 | except Exception: |
|
233 | except Exception: | |
235 | log.error(traceback.format_exc()) |
|
234 | log.error(traceback.format_exc()) | |
236 | h.flash(_('Error occurred during update of repository group %s') |
|
235 | h.flash(_('Error occurred during update of repository group %s') | |
237 | % request.POST.get('group_name'), category='error') |
|
236 | % request.POST.get('group_name'), category='error') | |
238 |
|
237 | |||
239 | raise HTTPFound(location=url('edit_repo_group', group_name=group_name)) |
|
238 | raise HTTPFound(location=url('edit_repo_group', group_name=group_name)) | |
240 |
|
239 | |||
241 | @HasRepoGroupPermissionLevelDecorator('admin') |
|
240 | @HasRepoGroupPermissionLevelDecorator('admin') | |
242 | def delete(self, group_name): |
|
241 | def delete(self, group_name): | |
243 | gr = c.repo_group = RepoGroup.guess_instance(group_name) |
|
242 | gr = c.repo_group = RepoGroup.guess_instance(group_name) | |
244 | repos = gr.repositories.all() |
|
243 | repos = gr.repositories.all() | |
245 | if repos: |
|
244 | if repos: | |
246 | h.flash(_('This group contains %s repositories and cannot be ' |
|
245 | h.flash(_('This group contains %s repositories and cannot be ' | |
247 | 'deleted') % len(repos), category='warning') |
|
246 | 'deleted') % len(repos), category='warning') | |
248 | raise HTTPFound(location=url('repos_groups')) |
|
247 | raise HTTPFound(location=url('repos_groups')) | |
249 |
|
248 | |||
250 | children = gr.children.all() |
|
249 | children = gr.children.all() | |
251 | if children: |
|
250 | if children: | |
252 | h.flash(_('This group contains %s subgroups and cannot be deleted' |
|
251 | h.flash(_('This group contains %s subgroups and cannot be deleted' | |
253 | % (len(children))), category='warning') |
|
252 | % (len(children))), category='warning') | |
254 | raise HTTPFound(location=url('repos_groups')) |
|
253 | raise HTTPFound(location=url('repos_groups')) | |
255 |
|
254 | |||
256 | try: |
|
255 | try: | |
257 | RepoGroupModel().delete(group_name) |
|
256 | RepoGroupModel().delete(group_name) | |
258 | Session().commit() |
|
257 | Session().commit() | |
259 | h.flash(_('Removed repository group %s') % group_name, |
|
258 | h.flash(_('Removed repository group %s') % group_name, | |
260 | category='success') |
|
259 | category='success') | |
261 | # TODO: in future action_logger(, '', '', '') |
|
260 | # TODO: in future action_logger(, '', '', '') | |
262 | except Exception: |
|
261 | except Exception: | |
263 | log.error(traceback.format_exc()) |
|
262 | log.error(traceback.format_exc()) | |
264 | h.flash(_('Error occurred during deletion of repository group %s') |
|
263 | h.flash(_('Error occurred during deletion of repository group %s') | |
265 | % group_name, category='error') |
|
264 | % group_name, category='error') | |
266 |
|
265 | |||
267 | if gr.parent_group: |
|
266 | if gr.parent_group: | |
268 | raise HTTPFound(location=url('repos_group_home', group_name=gr.parent_group.group_name)) |
|
267 | raise HTTPFound(location=url('repos_group_home', group_name=gr.parent_group.group_name)) | |
269 | raise HTTPFound(location=url('repos_groups')) |
|
268 | raise HTTPFound(location=url('repos_groups')) | |
270 |
|
269 | |||
271 | def show_by_name(self, group_name): |
|
270 | def show_by_name(self, group_name): | |
272 | """ |
|
271 | """ | |
273 | This is a proxy that does a lookup group_name -> id, and shows |
|
272 | This is a proxy that does a lookup group_name -> id, and shows | |
274 | the group by id view instead |
|
273 | the group by id view instead | |
275 | """ |
|
274 | """ | |
276 | group_name = group_name.rstrip('/') |
|
275 | group_name = group_name.rstrip('/') | |
277 | id_ = RepoGroup.get_by_group_name(group_name) |
|
276 | id_ = RepoGroup.get_by_group_name(group_name) | |
278 | if id_: |
|
277 | if id_: | |
279 | return self.show(group_name) |
|
278 | return self.show(group_name) | |
280 | raise HTTPNotFound |
|
279 | raise HTTPNotFound | |
281 |
|
280 | |||
282 | @HasRepoGroupPermissionLevelDecorator('read') |
|
281 | @HasRepoGroupPermissionLevelDecorator('read') | |
283 | def show(self, group_name): |
|
282 | def show(self, group_name): | |
284 | c.active = 'settings' |
|
283 | c.active = 'settings' | |
285 |
|
284 | |||
286 | c.group = c.repo_group = RepoGroup.guess_instance(group_name) |
|
285 | c.group = c.repo_group = RepoGroup.guess_instance(group_name) | |
287 |
|
286 | |||
288 | groups = RepoGroup.query(sorted=True).filter_by(parent_group=c.group).all() |
|
287 | groups = RepoGroup.query(sorted=True).filter_by(parent_group=c.group).all() | |
289 | repo_groups_list = self.scm_model.get_repo_groups(groups) |
|
288 | repo_groups_list = self.scm_model.get_repo_groups(groups) | |
290 |
|
289 | |||
291 | repos_list = Repository.query(sorted=True).filter_by(group=c.group).all() |
|
290 | repos_list = Repository.query(sorted=True).filter_by(group=c.group).all() | |
292 | c.data = RepoModel().get_repos_as_dict(repos_list, |
|
291 | c.data = RepoModel().get_repos_as_dict(repos_list, | |
293 | repo_groups_list=repo_groups_list, |
|
292 | repo_groups_list=repo_groups_list, | |
294 | short_name=True) |
|
293 | short_name=True) | |
295 |
|
294 | |||
296 | return render('admin/repo_groups/repo_group_show.html') |
|
295 | return render('admin/repo_groups/repo_group_show.html') | |
297 |
|
296 | |||
298 | @HasRepoGroupPermissionLevelDecorator('admin') |
|
297 | @HasRepoGroupPermissionLevelDecorator('admin') | |
299 | def edit(self, group_name): |
|
298 | def edit(self, group_name): | |
300 | c.active = 'settings' |
|
299 | c.active = 'settings' | |
301 |
|
300 | |||
302 | c.repo_group = RepoGroup.guess_instance(group_name) |
|
301 | c.repo_group = RepoGroup.guess_instance(group_name) | |
303 | self.__load_defaults(extras=[c.repo_group.parent_group], |
|
302 | self.__load_defaults(extras=[c.repo_group.parent_group], | |
304 | exclude=[c.repo_group]) |
|
303 | exclude=[c.repo_group]) | |
305 | defaults = self.__load_data(c.repo_group.group_id) |
|
304 | defaults = self.__load_data(c.repo_group.group_id) | |
306 |
|
305 | |||
307 | return htmlfill.render( |
|
306 | return htmlfill.render( | |
308 | render('admin/repo_groups/repo_group_edit.html'), |
|
307 | render('admin/repo_groups/repo_group_edit.html'), | |
309 | defaults=defaults, |
|
308 | defaults=defaults, | |
310 | encoding="UTF-8", |
|
309 | encoding="UTF-8", | |
311 | force_defaults=False |
|
310 | force_defaults=False | |
312 | ) |
|
311 | ) | |
313 |
|
312 | |||
314 | @HasRepoGroupPermissionLevelDecorator('admin') |
|
313 | @HasRepoGroupPermissionLevelDecorator('admin') | |
315 | def edit_repo_group_advanced(self, group_name): |
|
314 | def edit_repo_group_advanced(self, group_name): | |
316 | c.active = 'advanced' |
|
315 | c.active = 'advanced' | |
317 | c.repo_group = RepoGroup.guess_instance(group_name) |
|
316 | c.repo_group = RepoGroup.guess_instance(group_name) | |
318 |
|
317 | |||
319 | return render('admin/repo_groups/repo_group_edit.html') |
|
318 | return render('admin/repo_groups/repo_group_edit.html') | |
320 |
|
319 | |||
321 | @HasRepoGroupPermissionLevelDecorator('admin') |
|
320 | @HasRepoGroupPermissionLevelDecorator('admin') | |
322 | def edit_repo_group_perms(self, group_name): |
|
321 | def edit_repo_group_perms(self, group_name): | |
323 | c.active = 'perms' |
|
322 | c.active = 'perms' | |
324 | c.repo_group = RepoGroup.guess_instance(group_name) |
|
323 | c.repo_group = RepoGroup.guess_instance(group_name) | |
325 | self.__load_defaults() |
|
324 | self.__load_defaults() | |
326 | defaults = self.__load_data(c.repo_group.group_id) |
|
325 | defaults = self.__load_data(c.repo_group.group_id) | |
327 |
|
326 | |||
328 | return htmlfill.render( |
|
327 | return htmlfill.render( | |
329 | render('admin/repo_groups/repo_group_edit.html'), |
|
328 | render('admin/repo_groups/repo_group_edit.html'), | |
330 | defaults=defaults, |
|
329 | defaults=defaults, | |
331 | encoding="UTF-8", |
|
330 | encoding="UTF-8", | |
332 | force_defaults=False |
|
331 | force_defaults=False | |
333 | ) |
|
332 | ) | |
334 |
|
333 | |||
335 | @HasRepoGroupPermissionLevelDecorator('admin') |
|
334 | @HasRepoGroupPermissionLevelDecorator('admin') | |
336 | def update_perms(self, group_name): |
|
335 | def update_perms(self, group_name): | |
337 | """ |
|
336 | """ | |
338 | Update permissions for given repository group |
|
337 | Update permissions for given repository group | |
339 |
|
338 | |||
340 | :param group_name: |
|
339 | :param group_name: | |
341 | """ |
|
340 | """ | |
342 |
|
341 | |||
343 | c.repo_group = RepoGroup.guess_instance(group_name) |
|
342 | c.repo_group = RepoGroup.guess_instance(group_name) | |
344 | valid_recursive_choices = ['none', 'repos', 'groups', 'all'] |
|
343 | valid_recursive_choices = ['none', 'repos', 'groups', 'all'] | |
345 | form_result = RepoGroupPermsForm(valid_recursive_choices)().to_python(request.POST) |
|
344 | form_result = RepoGroupPermsForm(valid_recursive_choices)().to_python(request.POST) | |
346 | if not request.authuser.is_admin: |
|
345 | if not request.authuser.is_admin: | |
347 | if self._revoke_perms_on_yourself(form_result): |
|
346 | if self._revoke_perms_on_yourself(form_result): | |
348 | msg = _('Cannot revoke permission for yourself as admin') |
|
347 | msg = _('Cannot revoke permission for yourself as admin') | |
349 | h.flash(msg, category='warning') |
|
348 | h.flash(msg, category='warning') | |
350 | raise HTTPFound(location=url('edit_repo_group_perms', group_name=group_name)) |
|
349 | raise HTTPFound(location=url('edit_repo_group_perms', group_name=group_name)) | |
351 | recursive = form_result['recursive'] |
|
350 | recursive = form_result['recursive'] | |
352 | # iterate over all members(if in recursive mode) of this groups and |
|
351 | # iterate over all members(if in recursive mode) of this groups and | |
353 | # set the permissions ! |
|
352 | # set the permissions ! | |
354 | # this can be potentially heavy operation |
|
353 | # this can be potentially heavy operation | |
355 | RepoGroupModel()._update_permissions(c.repo_group, |
|
354 | RepoGroupModel()._update_permissions(c.repo_group, | |
356 | form_result['perms_new'], |
|
355 | form_result['perms_new'], | |
357 | form_result['perms_updates'], |
|
356 | form_result['perms_updates'], | |
358 | recursive) |
|
357 | recursive) | |
359 | # TODO: implement this |
|
358 | # TODO: implement this | |
360 | #action_logger(request.authuser, 'admin_changed_repo_permissions', |
|
359 | #action_logger(request.authuser, 'admin_changed_repo_permissions', | |
361 | # repo_name, request.ip_addr) |
|
360 | # repo_name, request.ip_addr) | |
362 | Session().commit() |
|
361 | Session().commit() | |
363 | h.flash(_('Repository group permissions updated'), category='success') |
|
362 | h.flash(_('Repository group permissions updated'), category='success') | |
364 | raise HTTPFound(location=url('edit_repo_group_perms', group_name=group_name)) |
|
363 | raise HTTPFound(location=url('edit_repo_group_perms', group_name=group_name)) | |
365 |
|
364 | |||
366 | @HasRepoGroupPermissionLevelDecorator('admin') |
|
365 | @HasRepoGroupPermissionLevelDecorator('admin') | |
367 | def delete_perms(self, group_name): |
|
366 | def delete_perms(self, group_name): | |
368 | try: |
|
367 | try: | |
369 | obj_type = request.POST.get('obj_type') |
|
368 | obj_type = request.POST.get('obj_type') | |
370 | obj_id = None |
|
369 | obj_id = None | |
371 | if obj_type == 'user': |
|
370 | if obj_type == 'user': | |
372 | obj_id = safe_int(request.POST.get('user_id')) |
|
371 | obj_id = safe_int(request.POST.get('user_id')) | |
373 | elif obj_type == 'user_group': |
|
372 | elif obj_type == 'user_group': | |
374 | obj_id = safe_int(request.POST.get('user_group_id')) |
|
373 | obj_id = safe_int(request.POST.get('user_group_id')) | |
375 |
|
374 | |||
376 | if not request.authuser.is_admin: |
|
375 | if not request.authuser.is_admin: | |
377 | if obj_type == 'user' and request.authuser.user_id == obj_id: |
|
376 | if obj_type == 'user' and request.authuser.user_id == obj_id: | |
378 | msg = _('Cannot revoke permission for yourself as admin') |
|
377 | msg = _('Cannot revoke permission for yourself as admin') | |
379 | h.flash(msg, category='warning') |
|
378 | h.flash(msg, category='warning') | |
380 | raise Exception('revoke admin permission on self') |
|
379 | raise Exception('revoke admin permission on self') | |
381 | recursive = request.POST.get('recursive', 'none') |
|
380 | recursive = request.POST.get('recursive', 'none') | |
382 | if obj_type == 'user': |
|
381 | if obj_type == 'user': | |
383 | RepoGroupModel().delete_permission(repo_group=group_name, |
|
382 | RepoGroupModel().delete_permission(repo_group=group_name, | |
384 | obj=obj_id, obj_type='user', |
|
383 | obj=obj_id, obj_type='user', | |
385 | recursive=recursive) |
|
384 | recursive=recursive) | |
386 | elif obj_type == 'user_group': |
|
385 | elif obj_type == 'user_group': | |
387 | RepoGroupModel().delete_permission(repo_group=group_name, |
|
386 | RepoGroupModel().delete_permission(repo_group=group_name, | |
388 | obj=obj_id, |
|
387 | obj=obj_id, | |
389 | obj_type='user_group', |
|
388 | obj_type='user_group', | |
390 | recursive=recursive) |
|
389 | recursive=recursive) | |
391 |
|
390 | |||
392 | Session().commit() |
|
391 | Session().commit() | |
393 | except Exception: |
|
392 | except Exception: | |
394 | log.error(traceback.format_exc()) |
|
393 | log.error(traceback.format_exc()) | |
395 | h.flash(_('An error occurred during revoking of permission'), |
|
394 | h.flash(_('An error occurred during revoking of permission'), | |
396 | category='error') |
|
395 | category='error') | |
397 | raise HTTPInternalServerError() |
|
396 | raise HTTPInternalServerError() |
@@ -1,410 +1,409 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.controllers.admin.user_groups |
|
15 | kallithea.controllers.admin.user_groups | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | User Groups crud controller |
|
18 | User Groups crud controller | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Jan 25, 2011 |
|
22 | :created_on: Jan 25, 2011 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 | import logging |
|
28 | import logging | |
29 | import traceback |
|
29 | import traceback | |
30 |
|
30 | |||
31 | import formencode |
|
31 | import formencode | |
32 | from formencode import htmlfill |
|
32 | from formencode import htmlfill | |
33 | from sqlalchemy.orm import joinedload |
|
33 | from sqlalchemy.orm import joinedload | |
34 | from sqlalchemy.sql.expression import func |
|
34 | from sqlalchemy.sql.expression import func | |
35 | from tg import app_globals, request |
|
35 | from tg import app_globals, request | |
36 | from tg import tmpl_context as c |
|
36 | from tg import tmpl_context as c | |
37 | from tg.i18n import ugettext as _ |
|
37 | from tg.i18n import ugettext as _ | |
38 | from webob.exc import HTTPFound, HTTPInternalServerError |
|
38 | from webob.exc import HTTPFound, HTTPInternalServerError | |
39 |
|
39 | |||
40 | from kallithea.config.routing import url |
|
40 | from kallithea.config.routing import url | |
41 | from kallithea.lib import helpers as h |
|
41 | from kallithea.lib import helpers as h | |
42 | from kallithea.lib.auth import HasPermissionAnyDecorator, HasUserGroupPermissionLevelDecorator, LoginRequired |
|
42 | from kallithea.lib.auth import HasPermissionAnyDecorator, HasUserGroupPermissionLevelDecorator, LoginRequired | |
43 | from kallithea.lib.base import BaseController, render |
|
43 | from kallithea.lib.base import BaseController, render | |
44 | from kallithea.lib.exceptions import RepoGroupAssignmentError, UserGroupsAssignedException |
|
44 | from kallithea.lib.exceptions import RepoGroupAssignmentError, UserGroupsAssignedException | |
45 | from kallithea.lib.utils import action_logger |
|
45 | from kallithea.lib.utils import action_logger | |
46 | from kallithea.lib.utils2 import safe_int, safe_str |
|
46 | from kallithea.lib.utils2 import safe_int, safe_str | |
47 | from kallithea.model.db import User, UserGroup, UserGroupRepoGroupToPerm, UserGroupRepoToPerm, UserGroupToPerm |
|
47 | from kallithea.model.db import User, UserGroup, UserGroupRepoGroupToPerm, UserGroupRepoToPerm, UserGroupToPerm | |
48 | from kallithea.model.forms import CustomDefaultPermissionsForm, UserGroupForm, UserGroupPermsForm |
|
48 | from kallithea.model.forms import CustomDefaultPermissionsForm, UserGroupForm, UserGroupPermsForm | |
49 | from kallithea.model.meta import Session |
|
49 | from kallithea.model.meta import Session | |
50 | from kallithea.model.scm import UserGroupList |
|
50 | from kallithea.model.scm import UserGroupList | |
51 | from kallithea.model.user_group import UserGroupModel |
|
51 | from kallithea.model.user_group import UserGroupModel | |
52 |
|
52 | |||
53 |
|
53 | |||
54 | log = logging.getLogger(__name__) |
|
54 | log = logging.getLogger(__name__) | |
55 |
|
55 | |||
56 |
|
56 | |||
57 | class UserGroupsController(BaseController): |
|
57 | class UserGroupsController(BaseController): | |
58 | """REST Controller styled on the Atom Publishing Protocol""" |
|
58 | """REST Controller styled on the Atom Publishing Protocol""" | |
59 |
|
59 | |||
60 | @LoginRequired(allow_default_user=True) |
|
60 | @LoginRequired(allow_default_user=True) | |
61 | def _before(self, *args, **kwargs): |
|
61 | def _before(self, *args, **kwargs): | |
62 | super(UserGroupsController, self)._before(*args, **kwargs) |
|
62 | super(UserGroupsController, self)._before(*args, **kwargs) | |
63 |
|
63 | |||
64 | def __load_data(self, user_group_id): |
|
64 | def __load_data(self, user_group_id): | |
65 | c.group_members_obj = sorted((x.user for x in c.user_group.members), |
|
65 | c.group_members_obj = sorted((x.user for x in c.user_group.members), | |
66 | key=lambda u: u.username.lower()) |
|
66 | key=lambda u: u.username.lower()) | |
67 |
|
67 | |||
68 | c.group_members = [(x.user_id, x.username) for x in c.group_members_obj] |
|
68 | c.group_members = [(x.user_id, x.username) for x in c.group_members_obj] | |
69 | c.available_members = sorted(((x.user_id, x.username) for x in |
|
69 | c.available_members = sorted(((x.user_id, x.username) for x in | |
70 | User.query().all()), |
|
70 | User.query().all()), | |
71 | key=lambda u: u[1].lower()) |
|
71 | key=lambda u: u[1].lower()) | |
72 |
|
72 | |||
73 | def __load_defaults(self, user_group_id): |
|
73 | def __load_defaults(self, user_group_id): | |
74 | """ |
|
74 | """ | |
75 | Load defaults settings for edit, and update |
|
75 | Load defaults settings for edit, and update | |
76 |
|
76 | |||
77 | :param user_group_id: |
|
77 | :param user_group_id: | |
78 | """ |
|
78 | """ | |
79 | user_group = UserGroup.get_or_404(user_group_id) |
|
79 | user_group = UserGroup.get_or_404(user_group_id) | |
80 | data = user_group.get_dict() |
|
80 | data = user_group.get_dict() | |
81 | return data |
|
81 | return data | |
82 |
|
82 | |||
83 | def index(self, format='html'): |
|
83 | def index(self, format='html'): | |
84 | _list = UserGroup.query() \ |
|
84 | _list = UserGroup.query() \ | |
85 | .order_by(func.lower(UserGroup.users_group_name)) \ |
|
85 | .order_by(func.lower(UserGroup.users_group_name)) \ | |
86 | .all() |
|
86 | .all() | |
87 | group_iter = UserGroupList(_list, perm_level='admin') |
|
87 | group_iter = UserGroupList(_list, perm_level='admin') | |
88 | user_groups_data = [] |
|
88 | user_groups_data = [] | |
89 | _tmpl_lookup = app_globals.mako_lookup |
|
89 | _tmpl_lookup = app_globals.mako_lookup | |
90 | template = _tmpl_lookup.get_template('data_table/_dt_elements.html') |
|
90 | template = _tmpl_lookup.get_template('data_table/_dt_elements.html') | |
91 |
|
91 | |||
92 |
user_group_name |
|
92 | def user_group_name(user_group_id, user_group_name): | |
93 | template.get_def("user_group_name") |
|
93 | return template.get_def("user_group_name") \ | |
94 | .render_unicode(user_group_id, user_group_name, _=_, h=h, c=c) |
|
94 | .render_unicode(user_group_id, user_group_name, _=_, h=h, c=c) | |
95 | ) |
|
95 | ||
96 |
user_group_actions |
|
96 | def user_group_actions(user_group_id, user_group_name): | |
97 | template.get_def("user_group_actions") |
|
97 | return template.get_def("user_group_actions") \ | |
98 | .render_unicode(user_group_id, user_group_name, _=_, h=h, c=c) |
|
98 | .render_unicode(user_group_id, user_group_name, _=_, h=h, c=c) | |
99 | ) |
|
99 | ||
100 | for user_gr in group_iter: |
|
100 | for user_gr in group_iter: | |
101 |
|
||||
102 | user_groups_data.append({ |
|
101 | user_groups_data.append({ | |
103 | "raw_name": user_gr.users_group_name, |
|
102 | "raw_name": user_gr.users_group_name, | |
104 | "group_name": user_group_name(user_gr.users_group_id, |
|
103 | "group_name": user_group_name(user_gr.users_group_id, | |
105 | user_gr.users_group_name), |
|
104 | user_gr.users_group_name), | |
106 | "desc": h.escape(user_gr.user_group_description), |
|
105 | "desc": h.escape(user_gr.user_group_description), | |
107 | "members": len(user_gr.members), |
|
106 | "members": len(user_gr.members), | |
108 | "active": h.boolicon(user_gr.users_group_active), |
|
107 | "active": h.boolicon(user_gr.users_group_active), | |
109 | "owner": h.person(user_gr.owner.username), |
|
108 | "owner": h.person(user_gr.owner.username), | |
110 | "action": user_group_actions(user_gr.users_group_id, user_gr.users_group_name) |
|
109 | "action": user_group_actions(user_gr.users_group_id, user_gr.users_group_name) | |
111 | }) |
|
110 | }) | |
112 |
|
111 | |||
113 | c.data = { |
|
112 | c.data = { | |
114 | "sort": None, |
|
113 | "sort": None, | |
115 | "dir": "asc", |
|
114 | "dir": "asc", | |
116 | "records": user_groups_data |
|
115 | "records": user_groups_data | |
117 | } |
|
116 | } | |
118 |
|
117 | |||
119 | return render('admin/user_groups/user_groups.html') |
|
118 | return render('admin/user_groups/user_groups.html') | |
120 |
|
119 | |||
121 | @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true') |
|
120 | @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true') | |
122 | def create(self): |
|
121 | def create(self): | |
123 | users_group_form = UserGroupForm()() |
|
122 | users_group_form = UserGroupForm()() | |
124 | try: |
|
123 | try: | |
125 | form_result = users_group_form.to_python(dict(request.POST)) |
|
124 | form_result = users_group_form.to_python(dict(request.POST)) | |
126 | ug = UserGroupModel().create(name=form_result['users_group_name'], |
|
125 | ug = UserGroupModel().create(name=form_result['users_group_name'], | |
127 | description=form_result['user_group_description'], |
|
126 | description=form_result['user_group_description'], | |
128 | owner=request.authuser.user_id, |
|
127 | owner=request.authuser.user_id, | |
129 | active=form_result['users_group_active']) |
|
128 | active=form_result['users_group_active']) | |
130 |
|
129 | |||
131 | gr = form_result['users_group_name'] |
|
130 | gr = form_result['users_group_name'] | |
132 | action_logger(request.authuser, |
|
131 | action_logger(request.authuser, | |
133 | 'admin_created_users_group:%s' % gr, |
|
132 | 'admin_created_users_group:%s' % gr, | |
134 | None, request.ip_addr) |
|
133 | None, request.ip_addr) | |
135 | h.flash(h.HTML(_('Created user group %s')) % h.link_to(gr, url('edit_users_group', id=ug.users_group_id)), |
|
134 | h.flash(h.HTML(_('Created user group %s')) % h.link_to(gr, url('edit_users_group', id=ug.users_group_id)), | |
136 | category='success') |
|
135 | category='success') | |
137 | Session().commit() |
|
136 | Session().commit() | |
138 | except formencode.Invalid as errors: |
|
137 | except formencode.Invalid as errors: | |
139 | return htmlfill.render( |
|
138 | return htmlfill.render( | |
140 | render('admin/user_groups/user_group_add.html'), |
|
139 | render('admin/user_groups/user_group_add.html'), | |
141 | defaults=errors.value, |
|
140 | defaults=errors.value, | |
142 | errors=errors.error_dict or {}, |
|
141 | errors=errors.error_dict or {}, | |
143 | prefix_error=False, |
|
142 | prefix_error=False, | |
144 | encoding="UTF-8", |
|
143 | encoding="UTF-8", | |
145 | force_defaults=False) |
|
144 | force_defaults=False) | |
146 | except Exception: |
|
145 | except Exception: | |
147 | log.error(traceback.format_exc()) |
|
146 | log.error(traceback.format_exc()) | |
148 | h.flash(_('Error occurred during creation of user group %s') |
|
147 | h.flash(_('Error occurred during creation of user group %s') | |
149 | % request.POST.get('users_group_name'), category='error') |
|
148 | % request.POST.get('users_group_name'), category='error') | |
150 |
|
149 | |||
151 | raise HTTPFound(location=url('users_groups')) |
|
150 | raise HTTPFound(location=url('users_groups')) | |
152 |
|
151 | |||
153 | @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true') |
|
152 | @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true') | |
154 | def new(self, format='html'): |
|
153 | def new(self, format='html'): | |
155 | return render('admin/user_groups/user_group_add.html') |
|
154 | return render('admin/user_groups/user_group_add.html') | |
156 |
|
155 | |||
157 | @HasUserGroupPermissionLevelDecorator('admin') |
|
156 | @HasUserGroupPermissionLevelDecorator('admin') | |
158 | def update(self, id): |
|
157 | def update(self, id): | |
159 | c.user_group = UserGroup.get_or_404(id) |
|
158 | c.user_group = UserGroup.get_or_404(id) | |
160 | c.active = 'settings' |
|
159 | c.active = 'settings' | |
161 | self.__load_data(id) |
|
160 | self.__load_data(id) | |
162 |
|
161 | |||
163 | available_members = [safe_str(x[0]) for x in c.available_members] |
|
162 | available_members = [safe_str(x[0]) for x in c.available_members] | |
164 |
|
163 | |||
165 | users_group_form = UserGroupForm(edit=True, |
|
164 | users_group_form = UserGroupForm(edit=True, | |
166 | old_data=c.user_group.get_dict(), |
|
165 | old_data=c.user_group.get_dict(), | |
167 | available_members=available_members)() |
|
166 | available_members=available_members)() | |
168 |
|
167 | |||
169 | try: |
|
168 | try: | |
170 | form_result = users_group_form.to_python(request.POST) |
|
169 | form_result = users_group_form.to_python(request.POST) | |
171 | UserGroupModel().update(c.user_group, form_result) |
|
170 | UserGroupModel().update(c.user_group, form_result) | |
172 | gr = form_result['users_group_name'] |
|
171 | gr = form_result['users_group_name'] | |
173 | action_logger(request.authuser, |
|
172 | action_logger(request.authuser, | |
174 | 'admin_updated_users_group:%s' % gr, |
|
173 | 'admin_updated_users_group:%s' % gr, | |
175 | None, request.ip_addr) |
|
174 | None, request.ip_addr) | |
176 | h.flash(_('Updated user group %s') % gr, category='success') |
|
175 | h.flash(_('Updated user group %s') % gr, category='success') | |
177 | Session().commit() |
|
176 | Session().commit() | |
178 | except formencode.Invalid as errors: |
|
177 | except formencode.Invalid as errors: | |
179 | ug_model = UserGroupModel() |
|
178 | ug_model = UserGroupModel() | |
180 | defaults = errors.value |
|
179 | defaults = errors.value | |
181 | e = errors.error_dict or {} |
|
180 | e = errors.error_dict or {} | |
182 | defaults.update({ |
|
181 | defaults.update({ | |
183 | 'create_repo_perm': ug_model.has_perm(id, |
|
182 | 'create_repo_perm': ug_model.has_perm(id, | |
184 | 'hg.create.repository'), |
|
183 | 'hg.create.repository'), | |
185 | 'fork_repo_perm': ug_model.has_perm(id, |
|
184 | 'fork_repo_perm': ug_model.has_perm(id, | |
186 | 'hg.fork.repository'), |
|
185 | 'hg.fork.repository'), | |
187 | }) |
|
186 | }) | |
188 |
|
187 | |||
189 | return htmlfill.render( |
|
188 | return htmlfill.render( | |
190 | render('admin/user_groups/user_group_edit.html'), |
|
189 | render('admin/user_groups/user_group_edit.html'), | |
191 | defaults=defaults, |
|
190 | defaults=defaults, | |
192 | errors=e, |
|
191 | errors=e, | |
193 | prefix_error=False, |
|
192 | prefix_error=False, | |
194 | encoding="UTF-8", |
|
193 | encoding="UTF-8", | |
195 | force_defaults=False) |
|
194 | force_defaults=False) | |
196 | except Exception: |
|
195 | except Exception: | |
197 | log.error(traceback.format_exc()) |
|
196 | log.error(traceback.format_exc()) | |
198 | h.flash(_('Error occurred during update of user group %s') |
|
197 | h.flash(_('Error occurred during update of user group %s') | |
199 | % request.POST.get('users_group_name'), category='error') |
|
198 | % request.POST.get('users_group_name'), category='error') | |
200 |
|
199 | |||
201 | raise HTTPFound(location=url('edit_users_group', id=id)) |
|
200 | raise HTTPFound(location=url('edit_users_group', id=id)) | |
202 |
|
201 | |||
203 | @HasUserGroupPermissionLevelDecorator('admin') |
|
202 | @HasUserGroupPermissionLevelDecorator('admin') | |
204 | def delete(self, id): |
|
203 | def delete(self, id): | |
205 | usr_gr = UserGroup.get_or_404(id) |
|
204 | usr_gr = UserGroup.get_or_404(id) | |
206 | try: |
|
205 | try: | |
207 | UserGroupModel().delete(usr_gr) |
|
206 | UserGroupModel().delete(usr_gr) | |
208 | Session().commit() |
|
207 | Session().commit() | |
209 | h.flash(_('Successfully deleted user group'), category='success') |
|
208 | h.flash(_('Successfully deleted user group'), category='success') | |
210 | except UserGroupsAssignedException as e: |
|
209 | except UserGroupsAssignedException as e: | |
211 | h.flash(e, category='error') |
|
210 | h.flash(e, category='error') | |
212 | except Exception: |
|
211 | except Exception: | |
213 | log.error(traceback.format_exc()) |
|
212 | log.error(traceback.format_exc()) | |
214 | h.flash(_('An error occurred during deletion of user group'), |
|
213 | h.flash(_('An error occurred during deletion of user group'), | |
215 | category='error') |
|
214 | category='error') | |
216 | raise HTTPFound(location=url('users_groups')) |
|
215 | raise HTTPFound(location=url('users_groups')) | |
217 |
|
216 | |||
218 | @HasUserGroupPermissionLevelDecorator('admin') |
|
217 | @HasUserGroupPermissionLevelDecorator('admin') | |
219 | def edit(self, id, format='html'): |
|
218 | def edit(self, id, format='html'): | |
220 | c.user_group = UserGroup.get_or_404(id) |
|
219 | c.user_group = UserGroup.get_or_404(id) | |
221 | c.active = 'settings' |
|
220 | c.active = 'settings' | |
222 | self.__load_data(id) |
|
221 | self.__load_data(id) | |
223 |
|
222 | |||
224 | defaults = self.__load_defaults(id) |
|
223 | defaults = self.__load_defaults(id) | |
225 |
|
224 | |||
226 | return htmlfill.render( |
|
225 | return htmlfill.render( | |
227 | render('admin/user_groups/user_group_edit.html'), |
|
226 | render('admin/user_groups/user_group_edit.html'), | |
228 | defaults=defaults, |
|
227 | defaults=defaults, | |
229 | encoding="UTF-8", |
|
228 | encoding="UTF-8", | |
230 | force_defaults=False |
|
229 | force_defaults=False | |
231 | ) |
|
230 | ) | |
232 |
|
231 | |||
233 | @HasUserGroupPermissionLevelDecorator('admin') |
|
232 | @HasUserGroupPermissionLevelDecorator('admin') | |
234 | def edit_perms(self, id): |
|
233 | def edit_perms(self, id): | |
235 | c.user_group = UserGroup.get_or_404(id) |
|
234 | c.user_group = UserGroup.get_or_404(id) | |
236 | c.active = 'perms' |
|
235 | c.active = 'perms' | |
237 |
|
236 | |||
238 | defaults = {} |
|
237 | defaults = {} | |
239 | # fill user group users |
|
238 | # fill user group users | |
240 | for p in c.user_group.user_user_group_to_perm: |
|
239 | for p in c.user_group.user_user_group_to_perm: | |
241 | defaults.update({'u_perm_%s' % p.user.username: |
|
240 | defaults.update({'u_perm_%s' % p.user.username: | |
242 | p.permission.permission_name}) |
|
241 | p.permission.permission_name}) | |
243 |
|
242 | |||
244 | for p in c.user_group.user_group_user_group_to_perm: |
|
243 | for p in c.user_group.user_group_user_group_to_perm: | |
245 | defaults.update({'g_perm_%s' % p.user_group.users_group_name: |
|
244 | defaults.update({'g_perm_%s' % p.user_group.users_group_name: | |
246 | p.permission.permission_name}) |
|
245 | p.permission.permission_name}) | |
247 |
|
246 | |||
248 | return htmlfill.render( |
|
247 | return htmlfill.render( | |
249 | render('admin/user_groups/user_group_edit.html'), |
|
248 | render('admin/user_groups/user_group_edit.html'), | |
250 | defaults=defaults, |
|
249 | defaults=defaults, | |
251 | encoding="UTF-8", |
|
250 | encoding="UTF-8", | |
252 | force_defaults=False |
|
251 | force_defaults=False | |
253 | ) |
|
252 | ) | |
254 |
|
253 | |||
255 | @HasUserGroupPermissionLevelDecorator('admin') |
|
254 | @HasUserGroupPermissionLevelDecorator('admin') | |
256 | def update_perms(self, id): |
|
255 | def update_perms(self, id): | |
257 | """ |
|
256 | """ | |
258 | grant permission for given usergroup |
|
257 | grant permission for given usergroup | |
259 |
|
258 | |||
260 | :param id: |
|
259 | :param id: | |
261 | """ |
|
260 | """ | |
262 | user_group = UserGroup.get_or_404(id) |
|
261 | user_group = UserGroup.get_or_404(id) | |
263 | form = UserGroupPermsForm()().to_python(request.POST) |
|
262 | form = UserGroupPermsForm()().to_python(request.POST) | |
264 |
|
263 | |||
265 | # set the permissions ! |
|
264 | # set the permissions ! | |
266 | try: |
|
265 | try: | |
267 | UserGroupModel()._update_permissions(user_group, form['perms_new'], |
|
266 | UserGroupModel()._update_permissions(user_group, form['perms_new'], | |
268 | form['perms_updates']) |
|
267 | form['perms_updates']) | |
269 | except RepoGroupAssignmentError: |
|
268 | except RepoGroupAssignmentError: | |
270 | h.flash(_('Target group cannot be the same'), category='error') |
|
269 | h.flash(_('Target group cannot be the same'), category='error') | |
271 | raise HTTPFound(location=url('edit_user_group_perms', id=id)) |
|
270 | raise HTTPFound(location=url('edit_user_group_perms', id=id)) | |
272 | # TODO: implement this |
|
271 | # TODO: implement this | |
273 | #action_logger(request.authuser, 'admin_changed_repo_permissions', |
|
272 | #action_logger(request.authuser, 'admin_changed_repo_permissions', | |
274 | # repo_name, request.ip_addr) |
|
273 | # repo_name, request.ip_addr) | |
275 | Session().commit() |
|
274 | Session().commit() | |
276 | h.flash(_('User group permissions updated'), category='success') |
|
275 | h.flash(_('User group permissions updated'), category='success') | |
277 | raise HTTPFound(location=url('edit_user_group_perms', id=id)) |
|
276 | raise HTTPFound(location=url('edit_user_group_perms', id=id)) | |
278 |
|
277 | |||
279 | @HasUserGroupPermissionLevelDecorator('admin') |
|
278 | @HasUserGroupPermissionLevelDecorator('admin') | |
280 | def delete_perms(self, id): |
|
279 | def delete_perms(self, id): | |
281 | try: |
|
280 | try: | |
282 | obj_type = request.POST.get('obj_type') |
|
281 | obj_type = request.POST.get('obj_type') | |
283 | obj_id = None |
|
282 | obj_id = None | |
284 | if obj_type == 'user': |
|
283 | if obj_type == 'user': | |
285 | obj_id = safe_int(request.POST.get('user_id')) |
|
284 | obj_id = safe_int(request.POST.get('user_id')) | |
286 | elif obj_type == 'user_group': |
|
285 | elif obj_type == 'user_group': | |
287 | obj_id = safe_int(request.POST.get('user_group_id')) |
|
286 | obj_id = safe_int(request.POST.get('user_group_id')) | |
288 |
|
287 | |||
289 | if not request.authuser.is_admin: |
|
288 | if not request.authuser.is_admin: | |
290 | if obj_type == 'user' and request.authuser.user_id == obj_id: |
|
289 | if obj_type == 'user' and request.authuser.user_id == obj_id: | |
291 | msg = _('Cannot revoke permission for yourself as admin') |
|
290 | msg = _('Cannot revoke permission for yourself as admin') | |
292 | h.flash(msg, category='warning') |
|
291 | h.flash(msg, category='warning') | |
293 | raise Exception('revoke admin permission on self') |
|
292 | raise Exception('revoke admin permission on self') | |
294 | if obj_type == 'user': |
|
293 | if obj_type == 'user': | |
295 | UserGroupModel().revoke_user_permission(user_group=id, |
|
294 | UserGroupModel().revoke_user_permission(user_group=id, | |
296 | user=obj_id) |
|
295 | user=obj_id) | |
297 | elif obj_type == 'user_group': |
|
296 | elif obj_type == 'user_group': | |
298 | UserGroupModel().revoke_user_group_permission(target_user_group=id, |
|
297 | UserGroupModel().revoke_user_group_permission(target_user_group=id, | |
299 | user_group=obj_id) |
|
298 | user_group=obj_id) | |
300 | Session().commit() |
|
299 | Session().commit() | |
301 | except Exception: |
|
300 | except Exception: | |
302 | log.error(traceback.format_exc()) |
|
301 | log.error(traceback.format_exc()) | |
303 | h.flash(_('An error occurred during revoking of permission'), |
|
302 | h.flash(_('An error occurred during revoking of permission'), | |
304 | category='error') |
|
303 | category='error') | |
305 | raise HTTPInternalServerError() |
|
304 | raise HTTPInternalServerError() | |
306 |
|
305 | |||
307 | @HasUserGroupPermissionLevelDecorator('admin') |
|
306 | @HasUserGroupPermissionLevelDecorator('admin') | |
308 | def edit_default_perms(self, id): |
|
307 | def edit_default_perms(self, id): | |
309 | c.user_group = UserGroup.get_or_404(id) |
|
308 | c.user_group = UserGroup.get_or_404(id) | |
310 | c.active = 'default_perms' |
|
309 | c.active = 'default_perms' | |
311 |
|
310 | |||
312 | permissions = { |
|
311 | permissions = { | |
313 | 'repositories': {}, |
|
312 | 'repositories': {}, | |
314 | 'repositories_groups': {} |
|
313 | 'repositories_groups': {} | |
315 | } |
|
314 | } | |
316 | ugroup_repo_perms = UserGroupRepoToPerm.query() \ |
|
315 | ugroup_repo_perms = UserGroupRepoToPerm.query() \ | |
317 | .options(joinedload(UserGroupRepoToPerm.permission)) \ |
|
316 | .options(joinedload(UserGroupRepoToPerm.permission)) \ | |
318 | .options(joinedload(UserGroupRepoToPerm.repository)) \ |
|
317 | .options(joinedload(UserGroupRepoToPerm.repository)) \ | |
319 | .filter(UserGroupRepoToPerm.users_group_id == id) \ |
|
318 | .filter(UserGroupRepoToPerm.users_group_id == id) \ | |
320 | .all() |
|
319 | .all() | |
321 |
|
320 | |||
322 | for gr in ugroup_repo_perms: |
|
321 | for gr in ugroup_repo_perms: | |
323 | permissions['repositories'][gr.repository.repo_name] \ |
|
322 | permissions['repositories'][gr.repository.repo_name] \ | |
324 | = gr.permission.permission_name |
|
323 | = gr.permission.permission_name | |
325 |
|
324 | |||
326 | ugroup_group_perms = UserGroupRepoGroupToPerm.query() \ |
|
325 | ugroup_group_perms = UserGroupRepoGroupToPerm.query() \ | |
327 | .options(joinedload(UserGroupRepoGroupToPerm.permission)) \ |
|
326 | .options(joinedload(UserGroupRepoGroupToPerm.permission)) \ | |
328 | .options(joinedload(UserGroupRepoGroupToPerm.group)) \ |
|
327 | .options(joinedload(UserGroupRepoGroupToPerm.group)) \ | |
329 | .filter(UserGroupRepoGroupToPerm.users_group_id == id) \ |
|
328 | .filter(UserGroupRepoGroupToPerm.users_group_id == id) \ | |
330 | .all() |
|
329 | .all() | |
331 |
|
330 | |||
332 | for gr in ugroup_group_perms: |
|
331 | for gr in ugroup_group_perms: | |
333 | permissions['repositories_groups'][gr.group.group_name] \ |
|
332 | permissions['repositories_groups'][gr.group.group_name] \ | |
334 | = gr.permission.permission_name |
|
333 | = gr.permission.permission_name | |
335 | c.permissions = permissions |
|
334 | c.permissions = permissions | |
336 |
|
335 | |||
337 | ug_model = UserGroupModel() |
|
336 | ug_model = UserGroupModel() | |
338 |
|
337 | |||
339 | defaults = c.user_group.get_dict() |
|
338 | defaults = c.user_group.get_dict() | |
340 | defaults.update({ |
|
339 | defaults.update({ | |
341 | 'create_repo_perm': ug_model.has_perm(c.user_group, |
|
340 | 'create_repo_perm': ug_model.has_perm(c.user_group, | |
342 | 'hg.create.repository'), |
|
341 | 'hg.create.repository'), | |
343 | 'create_user_group_perm': ug_model.has_perm(c.user_group, |
|
342 | 'create_user_group_perm': ug_model.has_perm(c.user_group, | |
344 | 'hg.usergroup.create.true'), |
|
343 | 'hg.usergroup.create.true'), | |
345 | 'fork_repo_perm': ug_model.has_perm(c.user_group, |
|
344 | 'fork_repo_perm': ug_model.has_perm(c.user_group, | |
346 | 'hg.fork.repository'), |
|
345 | 'hg.fork.repository'), | |
347 | }) |
|
346 | }) | |
348 |
|
347 | |||
349 | return htmlfill.render( |
|
348 | return htmlfill.render( | |
350 | render('admin/user_groups/user_group_edit.html'), |
|
349 | render('admin/user_groups/user_group_edit.html'), | |
351 | defaults=defaults, |
|
350 | defaults=defaults, | |
352 | encoding="UTF-8", |
|
351 | encoding="UTF-8", | |
353 | force_defaults=False |
|
352 | force_defaults=False | |
354 | ) |
|
353 | ) | |
355 |
|
354 | |||
356 | @HasUserGroupPermissionLevelDecorator('admin') |
|
355 | @HasUserGroupPermissionLevelDecorator('admin') | |
357 | def update_default_perms(self, id): |
|
356 | def update_default_perms(self, id): | |
358 | user_group = UserGroup.get_or_404(id) |
|
357 | user_group = UserGroup.get_or_404(id) | |
359 |
|
358 | |||
360 | try: |
|
359 | try: | |
361 | form = CustomDefaultPermissionsForm()() |
|
360 | form = CustomDefaultPermissionsForm()() | |
362 | form_result = form.to_python(request.POST) |
|
361 | form_result = form.to_python(request.POST) | |
363 |
|
362 | |||
364 | usergroup_model = UserGroupModel() |
|
363 | usergroup_model = UserGroupModel() | |
365 |
|
364 | |||
366 | defs = UserGroupToPerm.query() \ |
|
365 | defs = UserGroupToPerm.query() \ | |
367 | .filter(UserGroupToPerm.users_group == user_group) \ |
|
366 | .filter(UserGroupToPerm.users_group == user_group) \ | |
368 | .all() |
|
367 | .all() | |
369 | for ug in defs: |
|
368 | for ug in defs: | |
370 | Session().delete(ug) |
|
369 | Session().delete(ug) | |
371 |
|
370 | |||
372 | if form_result['create_repo_perm']: |
|
371 | if form_result['create_repo_perm']: | |
373 | usergroup_model.grant_perm(id, 'hg.create.repository') |
|
372 | usergroup_model.grant_perm(id, 'hg.create.repository') | |
374 | else: |
|
373 | else: | |
375 | usergroup_model.grant_perm(id, 'hg.create.none') |
|
374 | usergroup_model.grant_perm(id, 'hg.create.none') | |
376 | if form_result['create_user_group_perm']: |
|
375 | if form_result['create_user_group_perm']: | |
377 | usergroup_model.grant_perm(id, 'hg.usergroup.create.true') |
|
376 | usergroup_model.grant_perm(id, 'hg.usergroup.create.true') | |
378 | else: |
|
377 | else: | |
379 | usergroup_model.grant_perm(id, 'hg.usergroup.create.false') |
|
378 | usergroup_model.grant_perm(id, 'hg.usergroup.create.false') | |
380 | if form_result['fork_repo_perm']: |
|
379 | if form_result['fork_repo_perm']: | |
381 | usergroup_model.grant_perm(id, 'hg.fork.repository') |
|
380 | usergroup_model.grant_perm(id, 'hg.fork.repository') | |
382 | else: |
|
381 | else: | |
383 | usergroup_model.grant_perm(id, 'hg.fork.none') |
|
382 | usergroup_model.grant_perm(id, 'hg.fork.none') | |
384 |
|
383 | |||
385 | h.flash(_("Updated permissions"), category='success') |
|
384 | h.flash(_("Updated permissions"), category='success') | |
386 | Session().commit() |
|
385 | Session().commit() | |
387 | except Exception: |
|
386 | except Exception: | |
388 | log.error(traceback.format_exc()) |
|
387 | log.error(traceback.format_exc()) | |
389 | h.flash(_('An error occurred during permissions saving'), |
|
388 | h.flash(_('An error occurred during permissions saving'), | |
390 | category='error') |
|
389 | category='error') | |
391 |
|
390 | |||
392 | raise HTTPFound(location=url('edit_user_group_default_perms', id=id)) |
|
391 | raise HTTPFound(location=url('edit_user_group_default_perms', id=id)) | |
393 |
|
392 | |||
394 | @HasUserGroupPermissionLevelDecorator('admin') |
|
393 | @HasUserGroupPermissionLevelDecorator('admin') | |
395 | def edit_advanced(self, id): |
|
394 | def edit_advanced(self, id): | |
396 | c.user_group = UserGroup.get_or_404(id) |
|
395 | c.user_group = UserGroup.get_or_404(id) | |
397 | c.active = 'advanced' |
|
396 | c.active = 'advanced' | |
398 | c.group_members_obj = sorted((x.user for x in c.user_group.members), |
|
397 | c.group_members_obj = sorted((x.user for x in c.user_group.members), | |
399 | key=lambda u: u.username.lower()) |
|
398 | key=lambda u: u.username.lower()) | |
400 | return render('admin/user_groups/user_group_edit.html') |
|
399 | return render('admin/user_groups/user_group_edit.html') | |
401 |
|
400 | |||
402 | @HasUserGroupPermissionLevelDecorator('admin') |
|
401 | @HasUserGroupPermissionLevelDecorator('admin') | |
403 | def edit_members(self, id): |
|
402 | def edit_members(self, id): | |
404 | c.user_group = UserGroup.get_or_404(id) |
|
403 | c.user_group = UserGroup.get_or_404(id) | |
405 | c.active = 'members' |
|
404 | c.active = 'members' | |
406 | c.group_members_obj = sorted((x.user for x in c.user_group.members), |
|
405 | c.group_members_obj = sorted((x.user for x in c.user_group.members), | |
407 | key=lambda u: u.username.lower()) |
|
406 | key=lambda u: u.username.lower()) | |
408 |
|
407 | |||
409 | c.group_members = [(x.user_id, x.username) for x in c.group_members_obj] |
|
408 | c.group_members = [(x.user_id, x.username) for x in c.group_members_obj] | |
410 | return render('admin/user_groups/user_group_edit.html') |
|
409 | return render('admin/user_groups/user_group_edit.html') |
@@ -1,470 +1,470 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.controllers.admin.users |
|
15 | kallithea.controllers.admin.users | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Users crud controller |
|
18 | Users crud controller | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Apr 4, 2010 |
|
22 | :created_on: Apr 4, 2010 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 | import logging |
|
28 | import logging | |
29 | import traceback |
|
29 | import traceback | |
30 |
|
30 | |||
31 | import formencode |
|
31 | import formencode | |
32 | from formencode import htmlfill |
|
32 | from formencode import htmlfill | |
33 | from sqlalchemy.sql.expression import func |
|
33 | from sqlalchemy.sql.expression import func | |
34 | from tg import app_globals, request |
|
34 | from tg import app_globals, request | |
35 | from tg import tmpl_context as c |
|
35 | from tg import tmpl_context as c | |
36 | from tg.i18n import ugettext as _ |
|
36 | from tg.i18n import ugettext as _ | |
37 | from webob.exc import HTTPFound, HTTPNotFound |
|
37 | from webob.exc import HTTPFound, HTTPNotFound | |
38 |
|
38 | |||
39 | from kallithea.config.routing import url |
|
39 | from kallithea.config.routing import url | |
40 | from kallithea.lib import auth_modules |
|
40 | from kallithea.lib import auth_modules | |
41 | from kallithea.lib import helpers as h |
|
41 | from kallithea.lib import helpers as h | |
42 | from kallithea.lib.auth import AuthUser, HasPermissionAnyDecorator, LoginRequired |
|
42 | from kallithea.lib.auth import AuthUser, HasPermissionAnyDecorator, LoginRequired | |
43 | from kallithea.lib.base import BaseController, IfSshEnabled, render |
|
43 | from kallithea.lib.base import BaseController, IfSshEnabled, render | |
44 | from kallithea.lib.exceptions import DefaultUserException, UserCreationError, UserOwnsReposException |
|
44 | from kallithea.lib.exceptions import DefaultUserException, UserCreationError, UserOwnsReposException | |
45 | from kallithea.lib.utils import action_logger |
|
45 | from kallithea.lib.utils import action_logger | |
46 | from kallithea.lib.utils2 import datetime_to_time, generate_api_key, safe_int |
|
46 | from kallithea.lib.utils2 import datetime_to_time, generate_api_key, safe_int | |
47 | from kallithea.model.api_key import ApiKeyModel |
|
47 | from kallithea.model.api_key import ApiKeyModel | |
48 | from kallithea.model.db import User, UserEmailMap, UserIpMap, UserToPerm |
|
48 | from kallithea.model.db import User, UserEmailMap, UserIpMap, UserToPerm | |
49 | from kallithea.model.forms import CustomDefaultPermissionsForm, UserForm |
|
49 | from kallithea.model.forms import CustomDefaultPermissionsForm, UserForm | |
50 | from kallithea.model.meta import Session |
|
50 | from kallithea.model.meta import Session | |
51 | from kallithea.model.ssh_key import SshKeyModel, SshKeyModelException |
|
51 | from kallithea.model.ssh_key import SshKeyModel, SshKeyModelException | |
52 | from kallithea.model.user import UserModel |
|
52 | from kallithea.model.user import UserModel | |
53 |
|
53 | |||
54 |
|
54 | |||
55 | log = logging.getLogger(__name__) |
|
55 | log = logging.getLogger(__name__) | |
56 |
|
56 | |||
57 |
|
57 | |||
58 | class UsersController(BaseController): |
|
58 | class UsersController(BaseController): | |
59 | """REST Controller styled on the Atom Publishing Protocol""" |
|
59 | """REST Controller styled on the Atom Publishing Protocol""" | |
60 |
|
60 | |||
61 | @LoginRequired() |
|
61 | @LoginRequired() | |
62 | @HasPermissionAnyDecorator('hg.admin') |
|
62 | @HasPermissionAnyDecorator('hg.admin') | |
63 | def _before(self, *args, **kwargs): |
|
63 | def _before(self, *args, **kwargs): | |
64 | super(UsersController, self)._before(*args, **kwargs) |
|
64 | super(UsersController, self)._before(*args, **kwargs) | |
65 |
|
65 | |||
66 | def index(self, format='html'): |
|
66 | def index(self, format='html'): | |
67 | c.users_list = User.query().order_by(User.username) \ |
|
67 | c.users_list = User.query().order_by(User.username) \ | |
68 | .filter_by(is_default_user=False) \ |
|
68 | .filter_by(is_default_user=False) \ | |
69 | .order_by(func.lower(User.username)) \ |
|
69 | .order_by(func.lower(User.username)) \ | |
70 | .all() |
|
70 | .all() | |
71 |
|
71 | |||
72 | users_data = [] |
|
72 | users_data = [] | |
73 | _tmpl_lookup = app_globals.mako_lookup |
|
73 | _tmpl_lookup = app_globals.mako_lookup | |
74 | template = _tmpl_lookup.get_template('data_table/_dt_elements.html') |
|
74 | template = _tmpl_lookup.get_template('data_table/_dt_elements.html') | |
75 |
|
75 | |||
76 | grav_tmpl = '<div class="gravatar">%s</div>' |
|
76 | grav_tmpl = '<div class="gravatar">%s</div>' | |
77 |
|
77 | |||
78 |
username |
|
78 | def username(user_id, username): | |
79 |
|
|
79 | return template.get_def("user_name") \ | |
80 |
.render_unicode(user_id, username, _=_, h=h, c=c) |
|
80 | .render_unicode(user_id, username, _=_, h=h, c=c) | |
81 |
|
81 | |||
82 |
user_actions |
|
82 | def user_actions(user_id, username): | |
83 |
|
|
83 | return template.get_def("user_actions") \ | |
84 |
.render_unicode(user_id, username, _=_, h=h, c=c) |
|
84 | .render_unicode(user_id, username, _=_, h=h, c=c) | |
85 |
|
85 | |||
86 | for user in c.users_list: |
|
86 | for user in c.users_list: | |
87 | users_data.append({ |
|
87 | users_data.append({ | |
88 | "gravatar": grav_tmpl % h.gravatar(user.email, size=20), |
|
88 | "gravatar": grav_tmpl % h.gravatar(user.email, size=20), | |
89 | "raw_name": user.username, |
|
89 | "raw_name": user.username, | |
90 | "username": username(user.user_id, user.username), |
|
90 | "username": username(user.user_id, user.username), | |
91 | "firstname": h.escape(user.name), |
|
91 | "firstname": h.escape(user.name), | |
92 | "lastname": h.escape(user.lastname), |
|
92 | "lastname": h.escape(user.lastname), | |
93 | "last_login": h.fmt_date(user.last_login), |
|
93 | "last_login": h.fmt_date(user.last_login), | |
94 | "last_login_raw": datetime_to_time(user.last_login), |
|
94 | "last_login_raw": datetime_to_time(user.last_login), | |
95 | "active": h.boolicon(user.active), |
|
95 | "active": h.boolicon(user.active), | |
96 | "admin": h.boolicon(user.admin), |
|
96 | "admin": h.boolicon(user.admin), | |
97 | "extern_type": user.extern_type, |
|
97 | "extern_type": user.extern_type, | |
98 | "extern_name": user.extern_name, |
|
98 | "extern_name": user.extern_name, | |
99 | "action": user_actions(user.user_id, user.username), |
|
99 | "action": user_actions(user.user_id, user.username), | |
100 | }) |
|
100 | }) | |
101 |
|
101 | |||
102 | c.data = { |
|
102 | c.data = { | |
103 | "sort": None, |
|
103 | "sort": None, | |
104 | "dir": "asc", |
|
104 | "dir": "asc", | |
105 | "records": users_data |
|
105 | "records": users_data | |
106 | } |
|
106 | } | |
107 |
|
107 | |||
108 | return render('admin/users/users.html') |
|
108 | return render('admin/users/users.html') | |
109 |
|
109 | |||
110 | def create(self): |
|
110 | def create(self): | |
111 | c.default_extern_type = User.DEFAULT_AUTH_TYPE |
|
111 | c.default_extern_type = User.DEFAULT_AUTH_TYPE | |
112 | c.default_extern_name = '' |
|
112 | c.default_extern_name = '' | |
113 | user_model = UserModel() |
|
113 | user_model = UserModel() | |
114 | user_form = UserForm()() |
|
114 | user_form = UserForm()() | |
115 | try: |
|
115 | try: | |
116 | form_result = user_form.to_python(dict(request.POST)) |
|
116 | form_result = user_form.to_python(dict(request.POST)) | |
117 | user = user_model.create(form_result) |
|
117 | user = user_model.create(form_result) | |
118 | action_logger(request.authuser, 'admin_created_user:%s' % user.username, |
|
118 | action_logger(request.authuser, 'admin_created_user:%s' % user.username, | |
119 | None, request.ip_addr) |
|
119 | None, request.ip_addr) | |
120 | h.flash(_('Created user %s') % user.username, |
|
120 | h.flash(_('Created user %s') % user.username, | |
121 | category='success') |
|
121 | category='success') | |
122 | Session().commit() |
|
122 | Session().commit() | |
123 | except formencode.Invalid as errors: |
|
123 | except formencode.Invalid as errors: | |
124 | return htmlfill.render( |
|
124 | return htmlfill.render( | |
125 | render('admin/users/user_add.html'), |
|
125 | render('admin/users/user_add.html'), | |
126 | defaults=errors.value, |
|
126 | defaults=errors.value, | |
127 | errors=errors.error_dict or {}, |
|
127 | errors=errors.error_dict or {}, | |
128 | prefix_error=False, |
|
128 | prefix_error=False, | |
129 | encoding="UTF-8", |
|
129 | encoding="UTF-8", | |
130 | force_defaults=False) |
|
130 | force_defaults=False) | |
131 | except UserCreationError as e: |
|
131 | except UserCreationError as e: | |
132 | h.flash(e, 'error') |
|
132 | h.flash(e, 'error') | |
133 | except Exception: |
|
133 | except Exception: | |
134 | log.error(traceback.format_exc()) |
|
134 | log.error(traceback.format_exc()) | |
135 | h.flash(_('Error occurred during creation of user %s') |
|
135 | h.flash(_('Error occurred during creation of user %s') | |
136 | % request.POST.get('username'), category='error') |
|
136 | % request.POST.get('username'), category='error') | |
137 | raise HTTPFound(location=url('edit_user', id=user.user_id)) |
|
137 | raise HTTPFound(location=url('edit_user', id=user.user_id)) | |
138 |
|
138 | |||
139 | def new(self, format='html'): |
|
139 | def new(self, format='html'): | |
140 | c.default_extern_type = User.DEFAULT_AUTH_TYPE |
|
140 | c.default_extern_type = User.DEFAULT_AUTH_TYPE | |
141 | c.default_extern_name = '' |
|
141 | c.default_extern_name = '' | |
142 | return render('admin/users/user_add.html') |
|
142 | return render('admin/users/user_add.html') | |
143 |
|
143 | |||
144 | def update(self, id): |
|
144 | def update(self, id): | |
145 | user_model = UserModel() |
|
145 | user_model = UserModel() | |
146 | user = user_model.get(id) |
|
146 | user = user_model.get(id) | |
147 | _form = UserForm(edit=True, old_data={'user_id': id, |
|
147 | _form = UserForm(edit=True, old_data={'user_id': id, | |
148 | 'email': user.email})() |
|
148 | 'email': user.email})() | |
149 | form_result = {} |
|
149 | form_result = {} | |
150 | try: |
|
150 | try: | |
151 | form_result = _form.to_python(dict(request.POST)) |
|
151 | form_result = _form.to_python(dict(request.POST)) | |
152 | skip_attrs = ['extern_type', 'extern_name', |
|
152 | skip_attrs = ['extern_type', 'extern_name', | |
153 | ] + auth_modules.get_managed_fields(user) |
|
153 | ] + auth_modules.get_managed_fields(user) | |
154 |
|
154 | |||
155 | user_model.update(id, form_result, skip_attrs=skip_attrs) |
|
155 | user_model.update(id, form_result, skip_attrs=skip_attrs) | |
156 | usr = form_result['username'] |
|
156 | usr = form_result['username'] | |
157 | action_logger(request.authuser, 'admin_updated_user:%s' % usr, |
|
157 | action_logger(request.authuser, 'admin_updated_user:%s' % usr, | |
158 | None, request.ip_addr) |
|
158 | None, request.ip_addr) | |
159 | h.flash(_('User updated successfully'), category='success') |
|
159 | h.flash(_('User updated successfully'), category='success') | |
160 | Session().commit() |
|
160 | Session().commit() | |
161 | except formencode.Invalid as errors: |
|
161 | except formencode.Invalid as errors: | |
162 | defaults = errors.value |
|
162 | defaults = errors.value | |
163 | e = errors.error_dict or {} |
|
163 | e = errors.error_dict or {} | |
164 | defaults.update({ |
|
164 | defaults.update({ | |
165 | 'create_repo_perm': user_model.has_perm(id, |
|
165 | 'create_repo_perm': user_model.has_perm(id, | |
166 | 'hg.create.repository'), |
|
166 | 'hg.create.repository'), | |
167 | 'fork_repo_perm': user_model.has_perm(id, 'hg.fork.repository'), |
|
167 | 'fork_repo_perm': user_model.has_perm(id, 'hg.fork.repository'), | |
168 | }) |
|
168 | }) | |
169 | return htmlfill.render( |
|
169 | return htmlfill.render( | |
170 | self._render_edit_profile(user), |
|
170 | self._render_edit_profile(user), | |
171 | defaults=defaults, |
|
171 | defaults=defaults, | |
172 | errors=e, |
|
172 | errors=e, | |
173 | prefix_error=False, |
|
173 | prefix_error=False, | |
174 | encoding="UTF-8", |
|
174 | encoding="UTF-8", | |
175 | force_defaults=False) |
|
175 | force_defaults=False) | |
176 | except Exception: |
|
176 | except Exception: | |
177 | log.error(traceback.format_exc()) |
|
177 | log.error(traceback.format_exc()) | |
178 | h.flash(_('Error occurred during update of user %s') |
|
178 | h.flash(_('Error occurred during update of user %s') | |
179 | % form_result.get('username'), category='error') |
|
179 | % form_result.get('username'), category='error') | |
180 | raise HTTPFound(location=url('edit_user', id=id)) |
|
180 | raise HTTPFound(location=url('edit_user', id=id)) | |
181 |
|
181 | |||
182 | def delete(self, id): |
|
182 | def delete(self, id): | |
183 | usr = User.get_or_404(id) |
|
183 | usr = User.get_or_404(id) | |
184 | try: |
|
184 | try: | |
185 | UserModel().delete(usr) |
|
185 | UserModel().delete(usr) | |
186 | Session().commit() |
|
186 | Session().commit() | |
187 | h.flash(_('Successfully deleted user'), category='success') |
|
187 | h.flash(_('Successfully deleted user'), category='success') | |
188 | except (UserOwnsReposException, DefaultUserException) as e: |
|
188 | except (UserOwnsReposException, DefaultUserException) as e: | |
189 | h.flash(e, category='warning') |
|
189 | h.flash(e, category='warning') | |
190 | except Exception: |
|
190 | except Exception: | |
191 | log.error(traceback.format_exc()) |
|
191 | log.error(traceback.format_exc()) | |
192 | h.flash(_('An error occurred during deletion of user'), |
|
192 | h.flash(_('An error occurred during deletion of user'), | |
193 | category='error') |
|
193 | category='error') | |
194 | raise HTTPFound(location=url('users')) |
|
194 | raise HTTPFound(location=url('users')) | |
195 |
|
195 | |||
196 | def _get_user_or_raise_if_default(self, id): |
|
196 | def _get_user_or_raise_if_default(self, id): | |
197 | try: |
|
197 | try: | |
198 | return User.get_or_404(id, allow_default=False) |
|
198 | return User.get_or_404(id, allow_default=False) | |
199 | except DefaultUserException: |
|
199 | except DefaultUserException: | |
200 | h.flash(_("The default user cannot be edited"), category='warning') |
|
200 | h.flash(_("The default user cannot be edited"), category='warning') | |
201 | raise HTTPNotFound |
|
201 | raise HTTPNotFound | |
202 |
|
202 | |||
203 | def _render_edit_profile(self, user): |
|
203 | def _render_edit_profile(self, user): | |
204 | c.user = user |
|
204 | c.user = user | |
205 | c.active = 'profile' |
|
205 | c.active = 'profile' | |
206 | c.perm_user = AuthUser(dbuser=user) |
|
206 | c.perm_user = AuthUser(dbuser=user) | |
207 | managed_fields = auth_modules.get_managed_fields(user) |
|
207 | managed_fields = auth_modules.get_managed_fields(user) | |
208 | c.readonly = lambda n: 'readonly' if n in managed_fields else None |
|
208 | c.readonly = lambda n: 'readonly' if n in managed_fields else None | |
209 | return render('admin/users/user_edit.html') |
|
209 | return render('admin/users/user_edit.html') | |
210 |
|
210 | |||
211 | def edit(self, id, format='html'): |
|
211 | def edit(self, id, format='html'): | |
212 | user = self._get_user_or_raise_if_default(id) |
|
212 | user = self._get_user_or_raise_if_default(id) | |
213 | defaults = user.get_dict() |
|
213 | defaults = user.get_dict() | |
214 |
|
214 | |||
215 | return htmlfill.render( |
|
215 | return htmlfill.render( | |
216 | self._render_edit_profile(user), |
|
216 | self._render_edit_profile(user), | |
217 | defaults=defaults, |
|
217 | defaults=defaults, | |
218 | encoding="UTF-8", |
|
218 | encoding="UTF-8", | |
219 | force_defaults=False) |
|
219 | force_defaults=False) | |
220 |
|
220 | |||
221 | def edit_advanced(self, id): |
|
221 | def edit_advanced(self, id): | |
222 | c.user = self._get_user_or_raise_if_default(id) |
|
222 | c.user = self._get_user_or_raise_if_default(id) | |
223 | c.active = 'advanced' |
|
223 | c.active = 'advanced' | |
224 | c.perm_user = AuthUser(dbuser=c.user) |
|
224 | c.perm_user = AuthUser(dbuser=c.user) | |
225 |
|
225 | |||
226 | umodel = UserModel() |
|
226 | umodel = UserModel() | |
227 | defaults = c.user.get_dict() |
|
227 | defaults = c.user.get_dict() | |
228 | defaults.update({ |
|
228 | defaults.update({ | |
229 | 'create_repo_perm': umodel.has_perm(c.user, 'hg.create.repository'), |
|
229 | 'create_repo_perm': umodel.has_perm(c.user, 'hg.create.repository'), | |
230 | 'create_user_group_perm': umodel.has_perm(c.user, |
|
230 | 'create_user_group_perm': umodel.has_perm(c.user, | |
231 | 'hg.usergroup.create.true'), |
|
231 | 'hg.usergroup.create.true'), | |
232 | 'fork_repo_perm': umodel.has_perm(c.user, 'hg.fork.repository'), |
|
232 | 'fork_repo_perm': umodel.has_perm(c.user, 'hg.fork.repository'), | |
233 | }) |
|
233 | }) | |
234 | return htmlfill.render( |
|
234 | return htmlfill.render( | |
235 | render('admin/users/user_edit.html'), |
|
235 | render('admin/users/user_edit.html'), | |
236 | defaults=defaults, |
|
236 | defaults=defaults, | |
237 | encoding="UTF-8", |
|
237 | encoding="UTF-8", | |
238 | force_defaults=False) |
|
238 | force_defaults=False) | |
239 |
|
239 | |||
240 | def edit_api_keys(self, id): |
|
240 | def edit_api_keys(self, id): | |
241 | c.user = self._get_user_or_raise_if_default(id) |
|
241 | c.user = self._get_user_or_raise_if_default(id) | |
242 | c.active = 'api_keys' |
|
242 | c.active = 'api_keys' | |
243 | show_expired = True |
|
243 | show_expired = True | |
244 | c.lifetime_values = [ |
|
244 | c.lifetime_values = [ | |
245 | (str(-1), _('Forever')), |
|
245 | (str(-1), _('Forever')), | |
246 | (str(5), _('5 minutes')), |
|
246 | (str(5), _('5 minutes')), | |
247 | (str(60), _('1 hour')), |
|
247 | (str(60), _('1 hour')), | |
248 | (str(60 * 24), _('1 day')), |
|
248 | (str(60 * 24), _('1 day')), | |
249 | (str(60 * 24 * 30), _('1 month')), |
|
249 | (str(60 * 24 * 30), _('1 month')), | |
250 | ] |
|
250 | ] | |
251 | c.lifetime_options = [(c.lifetime_values, _("Lifetime"))] |
|
251 | c.lifetime_options = [(c.lifetime_values, _("Lifetime"))] | |
252 | c.user_api_keys = ApiKeyModel().get_api_keys(c.user.user_id, |
|
252 | c.user_api_keys = ApiKeyModel().get_api_keys(c.user.user_id, | |
253 | show_expired=show_expired) |
|
253 | show_expired=show_expired) | |
254 | defaults = c.user.get_dict() |
|
254 | defaults = c.user.get_dict() | |
255 | return htmlfill.render( |
|
255 | return htmlfill.render( | |
256 | render('admin/users/user_edit.html'), |
|
256 | render('admin/users/user_edit.html'), | |
257 | defaults=defaults, |
|
257 | defaults=defaults, | |
258 | encoding="UTF-8", |
|
258 | encoding="UTF-8", | |
259 | force_defaults=False) |
|
259 | force_defaults=False) | |
260 |
|
260 | |||
261 | def add_api_key(self, id): |
|
261 | def add_api_key(self, id): | |
262 | c.user = self._get_user_or_raise_if_default(id) |
|
262 | c.user = self._get_user_or_raise_if_default(id) | |
263 |
|
263 | |||
264 | lifetime = safe_int(request.POST.get('lifetime'), -1) |
|
264 | lifetime = safe_int(request.POST.get('lifetime'), -1) | |
265 | description = request.POST.get('description') |
|
265 | description = request.POST.get('description') | |
266 | ApiKeyModel().create(c.user.user_id, description, lifetime) |
|
266 | ApiKeyModel().create(c.user.user_id, description, lifetime) | |
267 | Session().commit() |
|
267 | Session().commit() | |
268 | h.flash(_("API key successfully created"), category='success') |
|
268 | h.flash(_("API key successfully created"), category='success') | |
269 | raise HTTPFound(location=url('edit_user_api_keys', id=c.user.user_id)) |
|
269 | raise HTTPFound(location=url('edit_user_api_keys', id=c.user.user_id)) | |
270 |
|
270 | |||
271 | def delete_api_key(self, id): |
|
271 | def delete_api_key(self, id): | |
272 | c.user = self._get_user_or_raise_if_default(id) |
|
272 | c.user = self._get_user_or_raise_if_default(id) | |
273 |
|
273 | |||
274 | api_key = request.POST.get('del_api_key') |
|
274 | api_key = request.POST.get('del_api_key') | |
275 | if request.POST.get('del_api_key_builtin'): |
|
275 | if request.POST.get('del_api_key_builtin'): | |
276 | c.user.api_key = generate_api_key() |
|
276 | c.user.api_key = generate_api_key() | |
277 | Session().commit() |
|
277 | Session().commit() | |
278 | h.flash(_("API key successfully reset"), category='success') |
|
278 | h.flash(_("API key successfully reset"), category='success') | |
279 | elif api_key: |
|
279 | elif api_key: | |
280 | ApiKeyModel().delete(api_key, c.user.user_id) |
|
280 | ApiKeyModel().delete(api_key, c.user.user_id) | |
281 | Session().commit() |
|
281 | Session().commit() | |
282 | h.flash(_("API key successfully deleted"), category='success') |
|
282 | h.flash(_("API key successfully deleted"), category='success') | |
283 |
|
283 | |||
284 | raise HTTPFound(location=url('edit_user_api_keys', id=c.user.user_id)) |
|
284 | raise HTTPFound(location=url('edit_user_api_keys', id=c.user.user_id)) | |
285 |
|
285 | |||
286 | def update_account(self, id): |
|
286 | def update_account(self, id): | |
287 | pass |
|
287 | pass | |
288 |
|
288 | |||
289 | def edit_perms(self, id): |
|
289 | def edit_perms(self, id): | |
290 | c.user = self._get_user_or_raise_if_default(id) |
|
290 | c.user = self._get_user_or_raise_if_default(id) | |
291 | c.active = 'perms' |
|
291 | c.active = 'perms' | |
292 | c.perm_user = AuthUser(dbuser=c.user) |
|
292 | c.perm_user = AuthUser(dbuser=c.user) | |
293 |
|
293 | |||
294 | umodel = UserModel() |
|
294 | umodel = UserModel() | |
295 | defaults = c.user.get_dict() |
|
295 | defaults = c.user.get_dict() | |
296 | defaults.update({ |
|
296 | defaults.update({ | |
297 | 'create_repo_perm': umodel.has_perm(c.user, 'hg.create.repository'), |
|
297 | 'create_repo_perm': umodel.has_perm(c.user, 'hg.create.repository'), | |
298 | 'create_user_group_perm': umodel.has_perm(c.user, |
|
298 | 'create_user_group_perm': umodel.has_perm(c.user, | |
299 | 'hg.usergroup.create.true'), |
|
299 | 'hg.usergroup.create.true'), | |
300 | 'fork_repo_perm': umodel.has_perm(c.user, 'hg.fork.repository'), |
|
300 | 'fork_repo_perm': umodel.has_perm(c.user, 'hg.fork.repository'), | |
301 | }) |
|
301 | }) | |
302 | return htmlfill.render( |
|
302 | return htmlfill.render( | |
303 | render('admin/users/user_edit.html'), |
|
303 | render('admin/users/user_edit.html'), | |
304 | defaults=defaults, |
|
304 | defaults=defaults, | |
305 | encoding="UTF-8", |
|
305 | encoding="UTF-8", | |
306 | force_defaults=False) |
|
306 | force_defaults=False) | |
307 |
|
307 | |||
308 | def update_perms(self, id): |
|
308 | def update_perms(self, id): | |
309 | user = self._get_user_or_raise_if_default(id) |
|
309 | user = self._get_user_or_raise_if_default(id) | |
310 |
|
310 | |||
311 | try: |
|
311 | try: | |
312 | form = CustomDefaultPermissionsForm()() |
|
312 | form = CustomDefaultPermissionsForm()() | |
313 | form_result = form.to_python(request.POST) |
|
313 | form_result = form.to_python(request.POST) | |
314 |
|
314 | |||
315 | user_model = UserModel() |
|
315 | user_model = UserModel() | |
316 |
|
316 | |||
317 | defs = UserToPerm.query() \ |
|
317 | defs = UserToPerm.query() \ | |
318 | .filter(UserToPerm.user == user) \ |
|
318 | .filter(UserToPerm.user == user) \ | |
319 | .all() |
|
319 | .all() | |
320 | for ug in defs: |
|
320 | for ug in defs: | |
321 | Session().delete(ug) |
|
321 | Session().delete(ug) | |
322 |
|
322 | |||
323 | if form_result['create_repo_perm']: |
|
323 | if form_result['create_repo_perm']: | |
324 | user_model.grant_perm(id, 'hg.create.repository') |
|
324 | user_model.grant_perm(id, 'hg.create.repository') | |
325 | else: |
|
325 | else: | |
326 | user_model.grant_perm(id, 'hg.create.none') |
|
326 | user_model.grant_perm(id, 'hg.create.none') | |
327 | if form_result['create_user_group_perm']: |
|
327 | if form_result['create_user_group_perm']: | |
328 | user_model.grant_perm(id, 'hg.usergroup.create.true') |
|
328 | user_model.grant_perm(id, 'hg.usergroup.create.true') | |
329 | else: |
|
329 | else: | |
330 | user_model.grant_perm(id, 'hg.usergroup.create.false') |
|
330 | user_model.grant_perm(id, 'hg.usergroup.create.false') | |
331 | if form_result['fork_repo_perm']: |
|
331 | if form_result['fork_repo_perm']: | |
332 | user_model.grant_perm(id, 'hg.fork.repository') |
|
332 | user_model.grant_perm(id, 'hg.fork.repository') | |
333 | else: |
|
333 | else: | |
334 | user_model.grant_perm(id, 'hg.fork.none') |
|
334 | user_model.grant_perm(id, 'hg.fork.none') | |
335 | h.flash(_("Updated permissions"), category='success') |
|
335 | h.flash(_("Updated permissions"), category='success') | |
336 | Session().commit() |
|
336 | Session().commit() | |
337 | except Exception: |
|
337 | except Exception: | |
338 | log.error(traceback.format_exc()) |
|
338 | log.error(traceback.format_exc()) | |
339 | h.flash(_('An error occurred during permissions saving'), |
|
339 | h.flash(_('An error occurred during permissions saving'), | |
340 | category='error') |
|
340 | category='error') | |
341 | raise HTTPFound(location=url('edit_user_perms', id=id)) |
|
341 | raise HTTPFound(location=url('edit_user_perms', id=id)) | |
342 |
|
342 | |||
343 | def edit_emails(self, id): |
|
343 | def edit_emails(self, id): | |
344 | c.user = self._get_user_or_raise_if_default(id) |
|
344 | c.user = self._get_user_or_raise_if_default(id) | |
345 | c.active = 'emails' |
|
345 | c.active = 'emails' | |
346 | c.user_email_map = UserEmailMap.query() \ |
|
346 | c.user_email_map = UserEmailMap.query() \ | |
347 | .filter(UserEmailMap.user == c.user).all() |
|
347 | .filter(UserEmailMap.user == c.user).all() | |
348 |
|
348 | |||
349 | defaults = c.user.get_dict() |
|
349 | defaults = c.user.get_dict() | |
350 | return htmlfill.render( |
|
350 | return htmlfill.render( | |
351 | render('admin/users/user_edit.html'), |
|
351 | render('admin/users/user_edit.html'), | |
352 | defaults=defaults, |
|
352 | defaults=defaults, | |
353 | encoding="UTF-8", |
|
353 | encoding="UTF-8", | |
354 | force_defaults=False) |
|
354 | force_defaults=False) | |
355 |
|
355 | |||
356 | def add_email(self, id): |
|
356 | def add_email(self, id): | |
357 | user = self._get_user_or_raise_if_default(id) |
|
357 | user = self._get_user_or_raise_if_default(id) | |
358 | email = request.POST.get('new_email') |
|
358 | email = request.POST.get('new_email') | |
359 | user_model = UserModel() |
|
359 | user_model = UserModel() | |
360 |
|
360 | |||
361 | try: |
|
361 | try: | |
362 | user_model.add_extra_email(id, email) |
|
362 | user_model.add_extra_email(id, email) | |
363 | Session().commit() |
|
363 | Session().commit() | |
364 | h.flash(_("Added email %s to user") % email, category='success') |
|
364 | h.flash(_("Added email %s to user") % email, category='success') | |
365 | except formencode.Invalid as error: |
|
365 | except formencode.Invalid as error: | |
366 | msg = error.error_dict['email'] |
|
366 | msg = error.error_dict['email'] | |
367 | h.flash(msg, category='error') |
|
367 | h.flash(msg, category='error') | |
368 | except Exception: |
|
368 | except Exception: | |
369 | log.error(traceback.format_exc()) |
|
369 | log.error(traceback.format_exc()) | |
370 | h.flash(_('An error occurred during email saving'), |
|
370 | h.flash(_('An error occurred during email saving'), | |
371 | category='error') |
|
371 | category='error') | |
372 | raise HTTPFound(location=url('edit_user_emails', id=id)) |
|
372 | raise HTTPFound(location=url('edit_user_emails', id=id)) | |
373 |
|
373 | |||
374 | def delete_email(self, id): |
|
374 | def delete_email(self, id): | |
375 | user = self._get_user_or_raise_if_default(id) |
|
375 | user = self._get_user_or_raise_if_default(id) | |
376 | email_id = request.POST.get('del_email_id') |
|
376 | email_id = request.POST.get('del_email_id') | |
377 | user_model = UserModel() |
|
377 | user_model = UserModel() | |
378 | user_model.delete_extra_email(id, email_id) |
|
378 | user_model.delete_extra_email(id, email_id) | |
379 | Session().commit() |
|
379 | Session().commit() | |
380 | h.flash(_("Removed email from user"), category='success') |
|
380 | h.flash(_("Removed email from user"), category='success') | |
381 | raise HTTPFound(location=url('edit_user_emails', id=id)) |
|
381 | raise HTTPFound(location=url('edit_user_emails', id=id)) | |
382 |
|
382 | |||
383 | def edit_ips(self, id): |
|
383 | def edit_ips(self, id): | |
384 | c.user = self._get_user_or_raise_if_default(id) |
|
384 | c.user = self._get_user_or_raise_if_default(id) | |
385 | c.active = 'ips' |
|
385 | c.active = 'ips' | |
386 | c.user_ip_map = UserIpMap.query() \ |
|
386 | c.user_ip_map = UserIpMap.query() \ | |
387 | .filter(UserIpMap.user == c.user).all() |
|
387 | .filter(UserIpMap.user == c.user).all() | |
388 |
|
388 | |||
389 | c.default_user_ip_map = UserIpMap.query() \ |
|
389 | c.default_user_ip_map = UserIpMap.query() \ | |
390 | .filter(UserIpMap.user == User.get_default_user()).all() |
|
390 | .filter(UserIpMap.user == User.get_default_user()).all() | |
391 |
|
391 | |||
392 | defaults = c.user.get_dict() |
|
392 | defaults = c.user.get_dict() | |
393 | return htmlfill.render( |
|
393 | return htmlfill.render( | |
394 | render('admin/users/user_edit.html'), |
|
394 | render('admin/users/user_edit.html'), | |
395 | defaults=defaults, |
|
395 | defaults=defaults, | |
396 | encoding="UTF-8", |
|
396 | encoding="UTF-8", | |
397 | force_defaults=False) |
|
397 | force_defaults=False) | |
398 |
|
398 | |||
399 | def add_ip(self, id): |
|
399 | def add_ip(self, id): | |
400 | ip = request.POST.get('new_ip') |
|
400 | ip = request.POST.get('new_ip') | |
401 | user_model = UserModel() |
|
401 | user_model = UserModel() | |
402 |
|
402 | |||
403 | try: |
|
403 | try: | |
404 | user_model.add_extra_ip(id, ip) |
|
404 | user_model.add_extra_ip(id, ip) | |
405 | Session().commit() |
|
405 | Session().commit() | |
406 | h.flash(_("Added IP address %s to user whitelist") % ip, category='success') |
|
406 | h.flash(_("Added IP address %s to user whitelist") % ip, category='success') | |
407 | except formencode.Invalid as error: |
|
407 | except formencode.Invalid as error: | |
408 | msg = error.error_dict['ip'] |
|
408 | msg = error.error_dict['ip'] | |
409 | h.flash(msg, category='error') |
|
409 | h.flash(msg, category='error') | |
410 | except Exception: |
|
410 | except Exception: | |
411 | log.error(traceback.format_exc()) |
|
411 | log.error(traceback.format_exc()) | |
412 | h.flash(_('An error occurred while adding IP address'), |
|
412 | h.flash(_('An error occurred while adding IP address'), | |
413 | category='error') |
|
413 | category='error') | |
414 |
|
414 | |||
415 | if 'default_user' in request.POST: |
|
415 | if 'default_user' in request.POST: | |
416 | raise HTTPFound(location=url('admin_permissions_ips')) |
|
416 | raise HTTPFound(location=url('admin_permissions_ips')) | |
417 | raise HTTPFound(location=url('edit_user_ips', id=id)) |
|
417 | raise HTTPFound(location=url('edit_user_ips', id=id)) | |
418 |
|
418 | |||
419 | def delete_ip(self, id): |
|
419 | def delete_ip(self, id): | |
420 | ip_id = request.POST.get('del_ip_id') |
|
420 | ip_id = request.POST.get('del_ip_id') | |
421 | user_model = UserModel() |
|
421 | user_model = UserModel() | |
422 | user_model.delete_extra_ip(id, ip_id) |
|
422 | user_model.delete_extra_ip(id, ip_id) | |
423 | Session().commit() |
|
423 | Session().commit() | |
424 | h.flash(_("Removed IP address from user whitelist"), category='success') |
|
424 | h.flash(_("Removed IP address from user whitelist"), category='success') | |
425 |
|
425 | |||
426 | if 'default_user' in request.POST: |
|
426 | if 'default_user' in request.POST: | |
427 | raise HTTPFound(location=url('admin_permissions_ips')) |
|
427 | raise HTTPFound(location=url('admin_permissions_ips')) | |
428 | raise HTTPFound(location=url('edit_user_ips', id=id)) |
|
428 | raise HTTPFound(location=url('edit_user_ips', id=id)) | |
429 |
|
429 | |||
430 | @IfSshEnabled |
|
430 | @IfSshEnabled | |
431 | def edit_ssh_keys(self, id): |
|
431 | def edit_ssh_keys(self, id): | |
432 | c.user = self._get_user_or_raise_if_default(id) |
|
432 | c.user = self._get_user_or_raise_if_default(id) | |
433 | c.active = 'ssh_keys' |
|
433 | c.active = 'ssh_keys' | |
434 | c.user_ssh_keys = SshKeyModel().get_ssh_keys(c.user.user_id) |
|
434 | c.user_ssh_keys = SshKeyModel().get_ssh_keys(c.user.user_id) | |
435 | defaults = c.user.get_dict() |
|
435 | defaults = c.user.get_dict() | |
436 | return htmlfill.render( |
|
436 | return htmlfill.render( | |
437 | render('admin/users/user_edit.html'), |
|
437 | render('admin/users/user_edit.html'), | |
438 | defaults=defaults, |
|
438 | defaults=defaults, | |
439 | encoding="UTF-8", |
|
439 | encoding="UTF-8", | |
440 | force_defaults=False) |
|
440 | force_defaults=False) | |
441 |
|
441 | |||
442 | @IfSshEnabled |
|
442 | @IfSshEnabled | |
443 | def ssh_keys_add(self, id): |
|
443 | def ssh_keys_add(self, id): | |
444 | c.user = self._get_user_or_raise_if_default(id) |
|
444 | c.user = self._get_user_or_raise_if_default(id) | |
445 |
|
445 | |||
446 | description = request.POST.get('description') |
|
446 | description = request.POST.get('description') | |
447 | public_key = request.POST.get('public_key') |
|
447 | public_key = request.POST.get('public_key') | |
448 | try: |
|
448 | try: | |
449 | new_ssh_key = SshKeyModel().create(c.user.user_id, |
|
449 | new_ssh_key = SshKeyModel().create(c.user.user_id, | |
450 | description, public_key) |
|
450 | description, public_key) | |
451 | Session().commit() |
|
451 | Session().commit() | |
452 | SshKeyModel().write_authorized_keys() |
|
452 | SshKeyModel().write_authorized_keys() | |
453 | h.flash(_("SSH key %s successfully added") % new_ssh_key.fingerprint, category='success') |
|
453 | h.flash(_("SSH key %s successfully added") % new_ssh_key.fingerprint, category='success') | |
454 | except SshKeyModelException as e: |
|
454 | except SshKeyModelException as e: | |
455 | h.flash(e.args[0], category='error') |
|
455 | h.flash(e.args[0], category='error') | |
456 | raise HTTPFound(location=url('edit_user_ssh_keys', id=c.user.user_id)) |
|
456 | raise HTTPFound(location=url('edit_user_ssh_keys', id=c.user.user_id)) | |
457 |
|
457 | |||
458 | @IfSshEnabled |
|
458 | @IfSshEnabled | |
459 | def ssh_keys_delete(self, id): |
|
459 | def ssh_keys_delete(self, id): | |
460 | c.user = self._get_user_or_raise_if_default(id) |
|
460 | c.user = self._get_user_or_raise_if_default(id) | |
461 |
|
461 | |||
462 | fingerprint = request.POST.get('del_public_key_fingerprint') |
|
462 | fingerprint = request.POST.get('del_public_key_fingerprint') | |
463 | try: |
|
463 | try: | |
464 | SshKeyModel().delete(fingerprint, c.user.user_id) |
|
464 | SshKeyModel().delete(fingerprint, c.user.user_id) | |
465 | Session().commit() |
|
465 | Session().commit() | |
466 | SshKeyModel().write_authorized_keys() |
|
466 | SshKeyModel().write_authorized_keys() | |
467 | h.flash(_("SSH key successfully deleted"), category='success') |
|
467 | h.flash(_("SSH key successfully deleted"), category='success') | |
468 | except SshKeyModelException as e: |
|
468 | except SshKeyModelException as e: | |
469 | h.flash(e.args[0], category='error') |
|
469 | h.flash(e.args[0], category='error') | |
470 | raise HTTPFound(location=url('edit_user_ssh_keys', id=c.user.user_id)) |
|
470 | raise HTTPFound(location=url('edit_user_ssh_keys', id=c.user.user_id)) |
@@ -1,359 +1,360 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.lib.auth_modules.auth_ldap |
|
15 | kallithea.lib.auth_modules.auth_ldap | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Kallithea authentication plugin for LDAP |
|
18 | Kallithea authentication plugin for LDAP | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Created on Nov 17, 2010 |
|
22 | :created_on: Created on Nov 17, 2010 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 |
|
28 | |||
29 | import logging |
|
29 | import logging | |
30 |
|
30 | |||
31 | from kallithea.lib import auth_modules |
|
31 | from kallithea.lib import auth_modules | |
32 | from kallithea.lib.compat import hybrid_property |
|
32 | from kallithea.lib.compat import hybrid_property | |
33 | from kallithea.lib.exceptions import LdapConnectionError, LdapImportError, LdapPasswordError, LdapUsernameError |
|
33 | from kallithea.lib.exceptions import LdapConnectionError, LdapImportError, LdapPasswordError, LdapUsernameError | |
34 |
|
34 | |||
35 |
|
35 | |||
36 | log = logging.getLogger(__name__) |
|
36 | log = logging.getLogger(__name__) | |
37 |
|
37 | |||
38 | try: |
|
38 | try: | |
39 | import ldap |
|
39 | import ldap | |
40 | import ldap.filter |
|
40 | import ldap.filter | |
41 | except ImportError: |
|
41 | except ImportError: | |
42 | # means that python-ldap is not installed |
|
42 | # means that python-ldap is not installed | |
43 | ldap = None |
|
43 | ldap = None | |
44 |
|
44 | |||
45 |
|
45 | |||
46 | class AuthLdap(object): |
|
46 | class AuthLdap(object): | |
47 |
|
47 | |||
48 | def __init__(self, server, base_dn, port=None, bind_dn='', bind_pass='', |
|
48 | def __init__(self, server, base_dn, port=None, bind_dn='', bind_pass='', | |
49 | tls_kind='LDAPS', tls_reqcert='DEMAND', cacertdir=None, ldap_version=3, |
|
49 | tls_kind='LDAPS', tls_reqcert='DEMAND', cacertdir=None, ldap_version=3, | |
50 | ldap_filter='(&(objectClass=user)(!(objectClass=computer)))', |
|
50 | ldap_filter='(&(objectClass=user)(!(objectClass=computer)))', | |
51 | search_scope='SUBTREE', attr_login='uid'): |
|
51 | search_scope='SUBTREE', attr_login='uid'): | |
52 | if ldap is None: |
|
52 | if ldap is None: | |
53 | raise LdapImportError |
|
53 | raise LdapImportError | |
54 |
|
54 | |||
55 | self.ldap_version = ldap_version |
|
55 | self.ldap_version = ldap_version | |
56 |
|
56 | |||
57 | self.TLS_KIND = tls_kind |
|
57 | self.TLS_KIND = tls_kind | |
58 | OPT_X_TLS_DEMAND = 2 |
|
58 | OPT_X_TLS_DEMAND = 2 | |
59 | self.TLS_REQCERT = getattr(ldap, 'OPT_X_TLS_%s' % tls_reqcert, |
|
59 | self.TLS_REQCERT = getattr(ldap, 'OPT_X_TLS_%s' % tls_reqcert, | |
60 | OPT_X_TLS_DEMAND) |
|
60 | OPT_X_TLS_DEMAND) | |
61 | self.cacertdir = cacertdir |
|
61 | self.cacertdir = cacertdir | |
62 |
|
62 | |||
63 | protocol = 'ldaps' if self.TLS_KIND == 'LDAPS' else 'ldap' |
|
63 | protocol = 'ldaps' if self.TLS_KIND == 'LDAPS' else 'ldap' | |
64 | if not port: |
|
64 | if not port: | |
65 | port = 636 if self.TLS_KIND == 'LDAPS' else 389 |
|
65 | port = 636 if self.TLS_KIND == 'LDAPS' else 389 | |
66 | self.LDAP_SERVER = str(', '.join( |
|
66 | self.LDAP_SERVER = str(', '.join( | |
67 | "%s://%s:%s" % (protocol, |
|
67 | "%s://%s:%s" % (protocol, | |
68 | host.strip(), |
|
68 | host.strip(), | |
69 | port) |
|
69 | port) | |
70 | for host in server.split(','))) |
|
70 | for host in server.split(','))) | |
71 |
|
71 | |||
72 | self.LDAP_BIND_DN = bind_dn |
|
72 | self.LDAP_BIND_DN = bind_dn | |
73 | self.LDAP_BIND_PASS = bind_pass |
|
73 | self.LDAP_BIND_PASS = bind_pass | |
74 |
|
74 | |||
75 | self.BASE_DN = base_dn |
|
75 | self.BASE_DN = base_dn | |
76 | self.LDAP_FILTER = ldap_filter |
|
76 | self.LDAP_FILTER = ldap_filter | |
77 | self.SEARCH_SCOPE = getattr(ldap, 'SCOPE_%s' % search_scope) |
|
77 | self.SEARCH_SCOPE = getattr(ldap, 'SCOPE_%s' % search_scope) | |
78 | self.attr_login = attr_login |
|
78 | self.attr_login = attr_login | |
79 |
|
79 | |||
80 | def authenticate_ldap(self, username, password): |
|
80 | def authenticate_ldap(self, username, password): | |
81 | """ |
|
81 | """ | |
82 | Authenticate a user via LDAP and return his/her LDAP properties. |
|
82 | Authenticate a user via LDAP and return his/her LDAP properties. | |
83 |
|
83 | |||
84 | Raises AuthenticationError if the credentials are rejected, or |
|
84 | Raises AuthenticationError if the credentials are rejected, or | |
85 | EnvironmentError if the LDAP server can't be reached. |
|
85 | EnvironmentError if the LDAP server can't be reached. | |
86 |
|
86 | |||
87 | :param username: username |
|
87 | :param username: username | |
88 | :param password: password |
|
88 | :param password: password | |
89 | """ |
|
89 | """ | |
90 |
|
90 | |||
91 | if not password: |
|
91 | if not password: | |
92 | log.debug("Attempt to authenticate LDAP user " |
|
92 | log.debug("Attempt to authenticate LDAP user " | |
93 | "with blank password rejected.") |
|
93 | "with blank password rejected.") | |
94 | raise LdapPasswordError() |
|
94 | raise LdapPasswordError() | |
95 | if "," in username: |
|
95 | if "," in username: | |
96 | raise LdapUsernameError("invalid character in username: ,") |
|
96 | raise LdapUsernameError("invalid character in username: ,") | |
97 | try: |
|
97 | try: | |
98 | if self.cacertdir: |
|
98 | if self.cacertdir: | |
99 | if hasattr(ldap, 'OPT_X_TLS_CACERTDIR'): |
|
99 | if hasattr(ldap, 'OPT_X_TLS_CACERTDIR'): | |
100 | ldap.set_option(ldap.OPT_X_TLS_CACERTDIR, self.cacertdir) |
|
100 | ldap.set_option(ldap.OPT_X_TLS_CACERTDIR, self.cacertdir) | |
101 | else: |
|
101 | else: | |
102 | log.debug("OPT_X_TLS_CACERTDIR is not available - can't set %s", self.cacertdir) |
|
102 | log.debug("OPT_X_TLS_CACERTDIR is not available - can't set %s", self.cacertdir) | |
103 | ldap.set_option(ldap.OPT_REFERRALS, ldap.OPT_OFF) |
|
103 | ldap.set_option(ldap.OPT_REFERRALS, ldap.OPT_OFF) | |
104 | ldap.set_option(ldap.OPT_RESTART, ldap.OPT_ON) |
|
104 | ldap.set_option(ldap.OPT_RESTART, ldap.OPT_ON) | |
105 | ldap.set_option(ldap.OPT_TIMEOUT, 20) |
|
105 | ldap.set_option(ldap.OPT_TIMEOUT, 20) | |
106 | ldap.set_option(ldap.OPT_NETWORK_TIMEOUT, 10) |
|
106 | ldap.set_option(ldap.OPT_NETWORK_TIMEOUT, 10) | |
107 | ldap.set_option(ldap.OPT_TIMELIMIT, 15) |
|
107 | ldap.set_option(ldap.OPT_TIMELIMIT, 15) | |
108 | if self.TLS_KIND != 'PLAIN': |
|
108 | if self.TLS_KIND != 'PLAIN': | |
109 | ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, self.TLS_REQCERT) |
|
109 | ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, self.TLS_REQCERT) | |
110 | server = ldap.initialize(self.LDAP_SERVER) |
|
110 | server = ldap.initialize(self.LDAP_SERVER) | |
111 | if self.ldap_version == 2: |
|
111 | if self.ldap_version == 2: | |
112 | server.protocol = ldap.VERSION2 |
|
112 | server.protocol = ldap.VERSION2 | |
113 | else: |
|
113 | else: | |
114 | server.protocol = ldap.VERSION3 |
|
114 | server.protocol = ldap.VERSION3 | |
115 |
|
115 | |||
116 | if self.TLS_KIND == 'START_TLS': |
|
116 | if self.TLS_KIND == 'START_TLS': | |
117 | server.start_tls_s() |
|
117 | server.start_tls_s() | |
118 |
|
118 | |||
119 | if self.LDAP_BIND_DN and self.LDAP_BIND_PASS: |
|
119 | if self.LDAP_BIND_DN and self.LDAP_BIND_PASS: | |
120 | log.debug('Trying simple_bind with password and given DN: %s', |
|
120 | log.debug('Trying simple_bind with password and given DN: %s', | |
121 | self.LDAP_BIND_DN) |
|
121 | self.LDAP_BIND_DN) | |
122 | server.simple_bind_s(self.LDAP_BIND_DN, self.LDAP_BIND_PASS) |
|
122 | server.simple_bind_s(self.LDAP_BIND_DN, self.LDAP_BIND_PASS) | |
123 |
|
123 | |||
124 | filter_ = '(&%s(%s=%s))' % (self.LDAP_FILTER, |
|
124 | filter_ = '(&%s(%s=%s))' % (self.LDAP_FILTER, | |
125 | ldap.filter.escape_filter_chars(self.attr_login), |
|
125 | ldap.filter.escape_filter_chars(self.attr_login), | |
126 | ldap.filter.escape_filter_chars(username)) |
|
126 | ldap.filter.escape_filter_chars(username)) | |
127 | log.debug("Authenticating %r filter %s at %s", self.BASE_DN, |
|
127 | log.debug("Authenticating %r filter %s at %s", self.BASE_DN, | |
128 | filter_, self.LDAP_SERVER) |
|
128 | filter_, self.LDAP_SERVER) | |
129 | lobjects = server.search_ext_s(self.BASE_DN, self.SEARCH_SCOPE, |
|
129 | lobjects = server.search_ext_s(self.BASE_DN, self.SEARCH_SCOPE, | |
130 | filter_) |
|
130 | filter_) | |
131 |
|
131 | |||
132 | if not lobjects: |
|
132 | if not lobjects: | |
133 | raise ldap.NO_SUCH_OBJECT() |
|
133 | raise ldap.NO_SUCH_OBJECT() | |
134 |
|
134 | |||
135 | for (dn, _attrs) in lobjects: |
|
135 | for (dn, _attrs) in lobjects: | |
136 | if dn is None: |
|
136 | if dn is None: | |
137 | continue |
|
137 | continue | |
138 |
|
138 | |||
139 | try: |
|
139 | try: | |
140 | log.debug('Trying simple bind with %s', dn) |
|
140 | log.debug('Trying simple bind with %s', dn) | |
141 | server.simple_bind_s(dn, password) |
|
141 | server.simple_bind_s(dn, password) | |
142 | results = server.search_ext_s(dn, ldap.SCOPE_BASE, |
|
142 | results = server.search_ext_s(dn, ldap.SCOPE_BASE, | |
143 | '(objectClass=*)') |
|
143 | '(objectClass=*)') | |
144 | if len(results) == 1: |
|
144 | if len(results) == 1: | |
145 | dn_, attrs = results[0] |
|
145 | dn_, attrs = results[0] | |
146 | assert dn_ == dn |
|
146 | assert dn_ == dn | |
147 | return dn, attrs |
|
147 | return dn, attrs | |
148 |
|
148 | |||
149 | except ldap.INVALID_CREDENTIALS: |
|
149 | except ldap.INVALID_CREDENTIALS: | |
150 | log.debug("LDAP rejected password for user '%s': %s", |
|
150 | log.debug("LDAP rejected password for user '%s': %s", | |
151 | username, dn) |
|
151 | username, dn) | |
152 | continue # accept authentication as another ldap user with same username |
|
152 | continue # accept authentication as another ldap user with same username | |
153 |
|
153 | |||
154 | log.debug("No matching LDAP objects for authentication " |
|
154 | log.debug("No matching LDAP objects for authentication " | |
155 | "of '%s'", username) |
|
155 | "of '%s'", username) | |
156 | raise LdapPasswordError() |
|
156 | raise LdapPasswordError() | |
157 |
|
157 | |||
158 | except ldap.NO_SUCH_OBJECT: |
|
158 | except ldap.NO_SUCH_OBJECT: | |
159 | log.debug("LDAP says no such user '%s'", username) |
|
159 | log.debug("LDAP says no such user '%s'", username) | |
160 | raise LdapUsernameError() |
|
160 | raise LdapUsernameError() | |
161 | except ldap.SERVER_DOWN: |
|
161 | except ldap.SERVER_DOWN: | |
162 | # [0] might be {'info': "TLS error -8179:Peer's Certificate issuer is not recognized.", 'desc': "Can't contact LDAP server"} |
|
162 | # [0] might be {'info': "TLS error -8179:Peer's Certificate issuer is not recognized.", 'desc': "Can't contact LDAP server"} | |
163 | raise LdapConnectionError("LDAP can't connect to authentication server") |
|
163 | raise LdapConnectionError("LDAP can't connect to authentication server") | |
164 |
|
164 | |||
165 |
|
165 | |||
166 | class KallitheaAuthPlugin(auth_modules.KallitheaExternalAuthPlugin): |
|
166 | class KallitheaAuthPlugin(auth_modules.KallitheaExternalAuthPlugin): | |
167 | def __init__(self): |
|
167 | def __init__(self): | |
168 | self._logger = logging.getLogger(__name__) |
|
168 | self._logger = logging.getLogger(__name__) | |
169 | self._tls_kind_values = ["PLAIN", "LDAPS", "START_TLS"] |
|
169 | self._tls_kind_values = ["PLAIN", "LDAPS", "START_TLS"] | |
170 | self._tls_reqcert_values = ["NEVER", "ALLOW", "TRY", "DEMAND", "HARD"] |
|
170 | self._tls_reqcert_values = ["NEVER", "ALLOW", "TRY", "DEMAND", "HARD"] | |
171 | self._search_scopes = ["BASE", "ONELEVEL", "SUBTREE"] |
|
171 | self._search_scopes = ["BASE", "ONELEVEL", "SUBTREE"] | |
172 |
|
172 | |||
173 | @hybrid_property |
|
173 | @hybrid_property | |
174 | def name(self): |
|
174 | def name(self): | |
175 | return "ldap" |
|
175 | return "ldap" | |
176 |
|
176 | |||
177 | def settings(self): |
|
177 | def settings(self): | |
178 | settings = [ |
|
178 | settings = [ | |
179 | { |
|
179 | { | |
180 | "name": "host", |
|
180 | "name": "host", | |
181 | "validator": self.validators.UnicodeString(strip=True), |
|
181 | "validator": self.validators.UnicodeString(strip=True), | |
182 | "type": "string", |
|
182 | "type": "string", | |
183 | "description": "Host of the LDAP Server", |
|
183 | "description": "Host of the LDAP Server", | |
184 | "formname": "LDAP Host" |
|
184 | "formname": "LDAP Host" | |
185 | }, |
|
185 | }, | |
186 | { |
|
186 | { | |
187 | "name": "port", |
|
187 | "name": "port", | |
188 | "validator": self.validators.Number(strip=True), |
|
188 | "validator": self.validators.Number(strip=True), | |
189 | "type": "string", |
|
189 | "type": "string", | |
190 | "description": "Port that the LDAP server is listening on. Defaults to 389 for PLAIN/START_TLS and 636 for LDAPS.", |
|
190 | "description": "Port that the LDAP server is listening on. Defaults to 389 for PLAIN/START_TLS and 636 for LDAPS.", | |
191 | "default": "", |
|
191 | "default": "", | |
192 | "formname": "Custom LDAP Port" |
|
192 | "formname": "Custom LDAP Port" | |
193 | }, |
|
193 | }, | |
194 | { |
|
194 | { | |
195 | "name": "dn_user", |
|
195 | "name": "dn_user", | |
196 | "validator": self.validators.UnicodeString(strip=True), |
|
196 | "validator": self.validators.UnicodeString(strip=True), | |
197 | "type": "string", |
|
197 | "type": "string", | |
198 | "description": "User to connect to LDAP", |
|
198 | "description": "User to connect to LDAP", | |
199 | "formname": "Account" |
|
199 | "formname": "Account" | |
200 | }, |
|
200 | }, | |
201 | { |
|
201 | { | |
202 | "name": "dn_pass", |
|
202 | "name": "dn_pass", | |
203 | "validator": self.validators.UnicodeString(strip=True), |
|
203 | "validator": self.validators.UnicodeString(strip=True), | |
204 | "type": "password", |
|
204 | "type": "password", | |
205 | "description": "Password to connect to LDAP", |
|
205 | "description": "Password to connect to LDAP", | |
206 | "formname": "Password" |
|
206 | "formname": "Password" | |
207 | }, |
|
207 | }, | |
208 | { |
|
208 | { | |
209 | "name": "tls_kind", |
|
209 | "name": "tls_kind", | |
210 | "validator": self.validators.OneOf(self._tls_kind_values), |
|
210 | "validator": self.validators.OneOf(self._tls_kind_values), | |
211 | "type": "select", |
|
211 | "type": "select", | |
212 | "values": self._tls_kind_values, |
|
212 | "values": self._tls_kind_values, | |
213 | "description": "TLS Type", |
|
213 | "description": "TLS Type", | |
214 | "default": 'LDAPS', |
|
214 | "default": 'LDAPS', | |
215 | "formname": "Connection Security" |
|
215 | "formname": "Connection Security" | |
216 | }, |
|
216 | }, | |
217 | { |
|
217 | { | |
218 | "name": "tls_reqcert", |
|
218 | "name": "tls_reqcert", | |
219 | "validator": self.validators.OneOf(self._tls_reqcert_values), |
|
219 | "validator": self.validators.OneOf(self._tls_reqcert_values), | |
220 | "type": "select", |
|
220 | "type": "select", | |
221 | "values": self._tls_reqcert_values, |
|
221 | "values": self._tls_reqcert_values, | |
222 | "description": "Require Cert over TLS?", |
|
222 | "description": "Require Cert over TLS?", | |
223 | "formname": "Certificate Checks" |
|
223 | "formname": "Certificate Checks" | |
224 | }, |
|
224 | }, | |
225 | { |
|
225 | { | |
226 | "name": "cacertdir", |
|
226 | "name": "cacertdir", | |
227 | "validator": self.validators.UnicodeString(strip=True), |
|
227 | "validator": self.validators.UnicodeString(strip=True), | |
228 | "type": "string", |
|
228 | "type": "string", | |
229 | "description": "Optional: Custom CA certificate directory for validating LDAPS", |
|
229 | "description": "Optional: Custom CA certificate directory for validating LDAPS", | |
230 | "formname": "Custom CA Certificates" |
|
230 | "formname": "Custom CA Certificates" | |
231 | }, |
|
231 | }, | |
232 | { |
|
232 | { | |
233 | "name": "base_dn", |
|
233 | "name": "base_dn", | |
234 | "validator": self.validators.UnicodeString(strip=True), |
|
234 | "validator": self.validators.UnicodeString(strip=True), | |
235 | "type": "string", |
|
235 | "type": "string", | |
236 | "description": "Base DN to search (e.g., dc=mydomain,dc=com)", |
|
236 | "description": "Base DN to search (e.g., dc=mydomain,dc=com)", | |
237 | "formname": "Base DN" |
|
237 | "formname": "Base DN" | |
238 | }, |
|
238 | }, | |
239 | { |
|
239 | { | |
240 | "name": "filter", |
|
240 | "name": "filter", | |
241 | "validator": self.validators.UnicodeString(strip=True), |
|
241 | "validator": self.validators.UnicodeString(strip=True), | |
242 | "type": "string", |
|
242 | "type": "string", | |
243 | "description": "Filter to narrow results (e.g., ou=Users, etc)", |
|
243 | "description": "Filter to narrow results (e.g., ou=Users, etc)", | |
244 | "formname": "LDAP Search Filter" |
|
244 | "formname": "LDAP Search Filter" | |
245 | }, |
|
245 | }, | |
246 | { |
|
246 | { | |
247 | "name": "search_scope", |
|
247 | "name": "search_scope", | |
248 | "validator": self.validators.OneOf(self._search_scopes), |
|
248 | "validator": self.validators.OneOf(self._search_scopes), | |
249 | "type": "select", |
|
249 | "type": "select", | |
250 | "values": self._search_scopes, |
|
250 | "values": self._search_scopes, | |
251 | "description": "How deep to search LDAP", |
|
251 | "description": "How deep to search LDAP", | |
252 | "formname": "LDAP Search Scope" |
|
252 | "formname": "LDAP Search Scope" | |
253 | }, |
|
253 | }, | |
254 | { |
|
254 | { | |
255 | "name": "attr_login", |
|
255 | "name": "attr_login", | |
256 | "validator": self.validators.AttrLoginValidator(not_empty=True, strip=True), |
|
256 | "validator": self.validators.AttrLoginValidator(not_empty=True, strip=True), | |
257 | "type": "string", |
|
257 | "type": "string", | |
258 | "description": "LDAP Attribute to map to user name", |
|
258 | "description": "LDAP Attribute to map to user name", | |
259 | "formname": "Login Attribute" |
|
259 | "formname": "Login Attribute" | |
260 | }, |
|
260 | }, | |
261 | { |
|
261 | { | |
262 | "name": "attr_firstname", |
|
262 | "name": "attr_firstname", | |
263 | "validator": self.validators.UnicodeString(strip=True), |
|
263 | "validator": self.validators.UnicodeString(strip=True), | |
264 | "type": "string", |
|
264 | "type": "string", | |
265 | "description": "LDAP Attribute to map to first name", |
|
265 | "description": "LDAP Attribute to map to first name", | |
266 | "formname": "First Name Attribute" |
|
266 | "formname": "First Name Attribute" | |
267 | }, |
|
267 | }, | |
268 | { |
|
268 | { | |
269 | "name": "attr_lastname", |
|
269 | "name": "attr_lastname", | |
270 | "validator": self.validators.UnicodeString(strip=True), |
|
270 | "validator": self.validators.UnicodeString(strip=True), | |
271 | "type": "string", |
|
271 | "type": "string", | |
272 | "description": "LDAP Attribute to map to last name", |
|
272 | "description": "LDAP Attribute to map to last name", | |
273 | "formname": "Last Name Attribute" |
|
273 | "formname": "Last Name Attribute" | |
274 | }, |
|
274 | }, | |
275 | { |
|
275 | { | |
276 | "name": "attr_email", |
|
276 | "name": "attr_email", | |
277 | "validator": self.validators.UnicodeString(strip=True), |
|
277 | "validator": self.validators.UnicodeString(strip=True), | |
278 | "type": "string", |
|
278 | "type": "string", | |
279 | "description": "LDAP Attribute to map to email address", |
|
279 | "description": "LDAP Attribute to map to email address", | |
280 | "formname": "Email Attribute" |
|
280 | "formname": "Email Attribute" | |
281 | } |
|
281 | } | |
282 | ] |
|
282 | ] | |
283 | return settings |
|
283 | return settings | |
284 |
|
284 | |||
285 | def use_fake_password(self): |
|
285 | def use_fake_password(self): | |
286 | return True |
|
286 | return True | |
287 |
|
287 | |||
288 | def auth(self, userobj, username, password, settings, **kwargs): |
|
288 | def auth(self, userobj, username, password, settings, **kwargs): | |
289 | """ |
|
289 | """ | |
290 | Given a user object (which may be null), username, a plaintext password, |
|
290 | Given a user object (which may be null), username, a plaintext password, | |
291 | and a settings object (containing all the keys needed as listed in settings()), |
|
291 | and a settings object (containing all the keys needed as listed in settings()), | |
292 | authenticate this user's login attempt. |
|
292 | authenticate this user's login attempt. | |
293 |
|
293 | |||
294 | Return None on failure. On success, return a dictionary of the form: |
|
294 | Return None on failure. On success, return a dictionary of the form: | |
295 |
|
295 | |||
296 | see: KallitheaAuthPluginBase.auth_func_attrs |
|
296 | see: KallitheaAuthPluginBase.auth_func_attrs | |
297 | This is later validated for correctness |
|
297 | This is later validated for correctness | |
298 | """ |
|
298 | """ | |
299 |
|
299 | |||
300 | if not username or not password: |
|
300 | if not username or not password: | |
301 | log.debug('Empty username or password skipping...') |
|
301 | log.debug('Empty username or password skipping...') | |
302 | return None |
|
302 | return None | |
303 |
|
303 | |||
304 | kwargs = { |
|
304 | kwargs = { | |
305 | 'server': settings.get('host', ''), |
|
305 | 'server': settings.get('host', ''), | |
306 | 'base_dn': settings.get('base_dn', ''), |
|
306 | 'base_dn': settings.get('base_dn', ''), | |
307 | 'port': settings.get('port'), |
|
307 | 'port': settings.get('port'), | |
308 | 'bind_dn': settings.get('dn_user'), |
|
308 | 'bind_dn': settings.get('dn_user'), | |
309 | 'bind_pass': settings.get('dn_pass'), |
|
309 | 'bind_pass': settings.get('dn_pass'), | |
310 | 'tls_kind': settings.get('tls_kind'), |
|
310 | 'tls_kind': settings.get('tls_kind'), | |
311 | 'tls_reqcert': settings.get('tls_reqcert'), |
|
311 | 'tls_reqcert': settings.get('tls_reqcert'), | |
312 | 'cacertdir': settings.get('cacertdir'), |
|
312 | 'cacertdir': settings.get('cacertdir'), | |
313 | 'ldap_filter': settings.get('filter'), |
|
313 | 'ldap_filter': settings.get('filter'), | |
314 | 'search_scope': settings.get('search_scope'), |
|
314 | 'search_scope': settings.get('search_scope'), | |
315 | 'attr_login': settings.get('attr_login'), |
|
315 | 'attr_login': settings.get('attr_login'), | |
316 | 'ldap_version': 3, |
|
316 | 'ldap_version': 3, | |
317 | } |
|
317 | } | |
318 |
|
318 | |||
319 | if kwargs['bind_dn'] and not kwargs['bind_pass']: |
|
319 | if kwargs['bind_dn'] and not kwargs['bind_pass']: | |
320 | log.debug('Using dynamic binding.') |
|
320 | log.debug('Using dynamic binding.') | |
321 | kwargs['bind_dn'] = kwargs['bind_dn'].replace('$login', username) |
|
321 | kwargs['bind_dn'] = kwargs['bind_dn'].replace('$login', username) | |
322 | kwargs['bind_pass'] = password |
|
322 | kwargs['bind_pass'] = password | |
323 | log.debug('Checking for ldap authentication') |
|
323 | log.debug('Checking for ldap authentication') | |
324 |
|
324 | |||
325 | try: |
|
325 | try: | |
326 | aldap = AuthLdap(**kwargs) |
|
326 | aldap = AuthLdap(**kwargs) | |
327 | (user_dn, ldap_attrs) = aldap.authenticate_ldap(username, password) |
|
327 | (user_dn, ldap_attrs) = aldap.authenticate_ldap(username, password) | |
328 | log.debug('Got ldap DN response %s', user_dn) |
|
328 | log.debug('Got ldap DN response %s', user_dn) | |
329 |
|
329 | |||
330 | get_ldap_attr = lambda k: ldap_attrs.get(settings.get(k), [''])[0] |
|
330 | def get_ldap_attr(k): | |
|
331 | return ldap_attrs.get(settings.get(k), [''])[0] | |||
331 |
|
332 | |||
332 | # old attrs fetched from Kallithea database |
|
333 | # old attrs fetched from Kallithea database | |
333 | admin = getattr(userobj, 'admin', False) |
|
334 | admin = getattr(userobj, 'admin', False) | |
334 | email = getattr(userobj, 'email', '') |
|
335 | email = getattr(userobj, 'email', '') | |
335 | firstname = getattr(userobj, 'firstname', '') |
|
336 | firstname = getattr(userobj, 'firstname', '') | |
336 | lastname = getattr(userobj, 'lastname', '') |
|
337 | lastname = getattr(userobj, 'lastname', '') | |
337 |
|
338 | |||
338 | user_data = { |
|
339 | user_data = { | |
339 | 'username': username, |
|
340 | 'username': username, | |
340 | 'firstname': get_ldap_attr('attr_firstname') or firstname, |
|
341 | 'firstname': get_ldap_attr('attr_firstname') or firstname, | |
341 | 'lastname': get_ldap_attr('attr_lastname') or lastname, |
|
342 | 'lastname': get_ldap_attr('attr_lastname') or lastname, | |
342 | 'groups': [], |
|
343 | 'groups': [], | |
343 | 'email': get_ldap_attr('attr_email') or email, |
|
344 | 'email': get_ldap_attr('attr_email') or email, | |
344 | 'admin': admin, |
|
345 | 'admin': admin, | |
345 | 'extern_name': user_dn, |
|
346 | 'extern_name': user_dn, | |
346 | } |
|
347 | } | |
347 | log.info('user %s authenticated correctly', user_data['username']) |
|
348 | log.info('user %s authenticated correctly', user_data['username']) | |
348 | return user_data |
|
349 | return user_data | |
349 |
|
350 | |||
350 | except LdapUsernameError: |
|
351 | except LdapUsernameError: | |
351 | log.info('Error authenticating %s with LDAP: User not found', username) |
|
352 | log.info('Error authenticating %s with LDAP: User not found', username) | |
352 | except LdapPasswordError: |
|
353 | except LdapPasswordError: | |
353 | log.info('Error authenticating %s with LDAP: Password error', username) |
|
354 | log.info('Error authenticating %s with LDAP: Password error', username) | |
354 | except LdapImportError: |
|
355 | except LdapImportError: | |
355 | log.error('Error authenticating %s with LDAP: LDAP not available', username) |
|
356 | log.error('Error authenticating %s with LDAP: LDAP not available', username) | |
356 | return None |
|
357 | return None | |
357 |
|
358 | |||
358 | def get_managed_fields(self): |
|
359 | def get_managed_fields(self): | |
359 | return ['username', 'firstname', 'lastname', 'email', 'password'] |
|
360 | return ['username', 'firstname', 'lastname', 'email', 'password'] |
@@ -1,495 +1,497 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.lib.celerylib.tasks |
|
15 | kallithea.lib.celerylib.tasks | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Kallithea task modules, containing all task that suppose to be run |
|
18 | Kallithea task modules, containing all task that suppose to be run | |
19 | by celery daemon |
|
19 | by celery daemon | |
20 |
|
20 | |||
21 | This file was forked by the Kallithea project in July 2014. |
|
21 | This file was forked by the Kallithea project in July 2014. | |
22 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | Original author and date, and relevant copyright and licensing information is below: | |
23 | :created_on: Oct 6, 2010 |
|
23 | :created_on: Oct 6, 2010 | |
24 | :author: marcink |
|
24 | :author: marcink | |
25 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
26 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | :license: GPLv3, see LICENSE.md for more details. | |
27 | """ |
|
27 | """ | |
28 |
|
28 | |||
29 | import email.utils |
|
29 | import email.utils | |
30 | import os |
|
30 | import os | |
31 | import traceback |
|
31 | import traceback | |
32 | from collections import OrderedDict |
|
32 | from collections import OrderedDict | |
33 | from operator import itemgetter |
|
33 | from operator import itemgetter | |
34 | from time import mktime |
|
34 | from time import mktime | |
35 |
|
35 | |||
36 | import celery.utils.log |
|
36 | import celery.utils.log | |
37 | from tg import config |
|
37 | from tg import config | |
38 |
|
38 | |||
39 | import kallithea |
|
39 | import kallithea | |
40 | from kallithea.lib import celerylib, ext_json |
|
40 | from kallithea.lib import celerylib, ext_json | |
41 | from kallithea.lib.helpers import person |
|
41 | from kallithea.lib.helpers import person | |
42 | from kallithea.lib.hooks import log_create_repository |
|
42 | from kallithea.lib.hooks import log_create_repository | |
43 | from kallithea.lib.rcmail.smtp_mailer import SmtpMailer |
|
43 | from kallithea.lib.rcmail.smtp_mailer import SmtpMailer | |
44 | from kallithea.lib.utils import action_logger |
|
44 | from kallithea.lib.utils import action_logger | |
45 | from kallithea.lib.utils2 import ascii_bytes, str2bool |
|
45 | from kallithea.lib.utils2 import ascii_bytes, str2bool | |
46 | from kallithea.lib.vcs.utils import author_email |
|
46 | from kallithea.lib.vcs.utils import author_email | |
47 | from kallithea.model.db import RepoGroup, Repository, Statistics, User |
|
47 | from kallithea.model.db import RepoGroup, Repository, Statistics, User | |
48 |
|
48 | |||
49 |
|
49 | |||
50 | __all__ = ['whoosh_index', 'get_commits_stats', 'send_email'] |
|
50 | __all__ = ['whoosh_index', 'get_commits_stats', 'send_email'] | |
51 |
|
51 | |||
52 |
|
52 | |||
53 | log = celery.utils.log.get_task_logger(__name__) |
|
53 | log = celery.utils.log.get_task_logger(__name__) | |
54 |
|
54 | |||
55 |
|
55 | |||
56 | @celerylib.task |
|
56 | @celerylib.task | |
57 | @celerylib.locked_task |
|
57 | @celerylib.locked_task | |
58 | @celerylib.dbsession |
|
58 | @celerylib.dbsession | |
59 | def whoosh_index(repo_location, full_index): |
|
59 | def whoosh_index(repo_location, full_index): | |
60 | from kallithea.lib.indexers.daemon import WhooshIndexingDaemon |
|
60 | from kallithea.lib.indexers.daemon import WhooshIndexingDaemon | |
61 | celerylib.get_session() # initialize database connection |
|
61 | celerylib.get_session() # initialize database connection | |
62 |
|
62 | |||
63 | index_location = config['index_dir'] |
|
63 | index_location = config['index_dir'] | |
64 | WhooshIndexingDaemon(index_location=index_location, |
|
64 | WhooshIndexingDaemon(index_location=index_location, | |
65 | repo_location=repo_location) \ |
|
65 | repo_location=repo_location) \ | |
66 | .run(full_index=full_index) |
|
66 | .run(full_index=full_index) | |
67 |
|
67 | |||
68 |
|
68 | |||
|
69 | # for js data compatibility cleans the key for person from ' | |||
|
70 | def akc(k): | |||
|
71 | return person(k).replace('"', '') | |||
|
72 | ||||
|
73 | ||||
69 | @celerylib.task |
|
74 | @celerylib.task | |
70 | @celerylib.dbsession |
|
75 | @celerylib.dbsession | |
71 | def get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit=100): |
|
76 | def get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit=100): | |
72 | DBS = celerylib.get_session() |
|
77 | DBS = celerylib.get_session() | |
73 | lockkey = celerylib.__get_lockkey('get_commits_stats', repo_name, ts_min_y, |
|
78 | lockkey = celerylib.__get_lockkey('get_commits_stats', repo_name, ts_min_y, | |
74 | ts_max_y) |
|
79 | ts_max_y) | |
75 | lockkey_path = config.get('cache_dir') or config['app_conf']['cache_dir'] # Backward compatibility for TurboGears < 2.4 |
|
80 | lockkey_path = config.get('cache_dir') or config['app_conf']['cache_dir'] # Backward compatibility for TurboGears < 2.4 | |
76 |
|
81 | |||
77 | log.info('running task with lockkey %s', lockkey) |
|
82 | log.info('running task with lockkey %s', lockkey) | |
78 |
|
83 | |||
79 | try: |
|
84 | try: | |
80 | lock = celerylib.DaemonLock(os.path.join(lockkey_path, lockkey)) |
|
85 | lock = celerylib.DaemonLock(os.path.join(lockkey_path, lockkey)) | |
81 |
|
86 | |||
82 | # for js data compatibility cleans the key for person from ' |
|
|||
83 | akc = lambda k: person(k).replace('"', "") |
|
|||
84 |
|
||||
85 | co_day_auth_aggr = {} |
|
87 | co_day_auth_aggr = {} | |
86 | commits_by_day_aggregate = {} |
|
88 | commits_by_day_aggregate = {} | |
87 | repo = Repository.get_by_repo_name(repo_name) |
|
89 | repo = Repository.get_by_repo_name(repo_name) | |
88 | if repo is None: |
|
90 | if repo is None: | |
89 | return True |
|
91 | return True | |
90 |
|
92 | |||
91 | repo = repo.scm_instance |
|
93 | repo = repo.scm_instance | |
92 | repo_size = repo.count() |
|
94 | repo_size = repo.count() | |
93 | # return if repo have no revisions |
|
95 | # return if repo have no revisions | |
94 | if repo_size < 1: |
|
96 | if repo_size < 1: | |
95 | lock.release() |
|
97 | lock.release() | |
96 | return True |
|
98 | return True | |
97 |
|
99 | |||
98 | skip_date_limit = True |
|
100 | skip_date_limit = True | |
99 | parse_limit = int(config.get('commit_parse_limit')) |
|
101 | parse_limit = int(config.get('commit_parse_limit')) | |
100 | last_rev = None |
|
102 | last_rev = None | |
101 | last_cs = None |
|
103 | last_cs = None | |
102 | timegetter = itemgetter('time') |
|
104 | timegetter = itemgetter('time') | |
103 |
|
105 | |||
104 | dbrepo = DBS.query(Repository) \ |
|
106 | dbrepo = DBS.query(Repository) \ | |
105 | .filter(Repository.repo_name == repo_name).scalar() |
|
107 | .filter(Repository.repo_name == repo_name).scalar() | |
106 | cur_stats = DBS.query(Statistics) \ |
|
108 | cur_stats = DBS.query(Statistics) \ | |
107 | .filter(Statistics.repository == dbrepo).scalar() |
|
109 | .filter(Statistics.repository == dbrepo).scalar() | |
108 |
|
110 | |||
109 | if cur_stats is not None: |
|
111 | if cur_stats is not None: | |
110 | last_rev = cur_stats.stat_on_revision |
|
112 | last_rev = cur_stats.stat_on_revision | |
111 |
|
113 | |||
112 | if last_rev == repo.get_changeset().revision and repo_size > 1: |
|
114 | if last_rev == repo.get_changeset().revision and repo_size > 1: | |
113 | # pass silently without any work if we're not on first revision or |
|
115 | # pass silently without any work if we're not on first revision or | |
114 | # current state of parsing revision(from db marker) is the |
|
116 | # current state of parsing revision(from db marker) is the | |
115 | # last revision |
|
117 | # last revision | |
116 | lock.release() |
|
118 | lock.release() | |
117 | return True |
|
119 | return True | |
118 |
|
120 | |||
119 | if cur_stats: |
|
121 | if cur_stats: | |
120 | commits_by_day_aggregate = OrderedDict(ext_json.loads( |
|
122 | commits_by_day_aggregate = OrderedDict(ext_json.loads( | |
121 | cur_stats.commit_activity_combined)) |
|
123 | cur_stats.commit_activity_combined)) | |
122 | co_day_auth_aggr = ext_json.loads(cur_stats.commit_activity) |
|
124 | co_day_auth_aggr = ext_json.loads(cur_stats.commit_activity) | |
123 |
|
125 | |||
124 | log.debug('starting parsing %s', parse_limit) |
|
126 | log.debug('starting parsing %s', parse_limit) | |
125 |
|
127 | |||
126 | last_rev = last_rev + 1 if last_rev and last_rev >= 0 else 0 |
|
128 | last_rev = last_rev + 1 if last_rev and last_rev >= 0 else 0 | |
127 | log.debug('Getting revisions from %s to %s', |
|
129 | log.debug('Getting revisions from %s to %s', | |
128 | last_rev, last_rev + parse_limit |
|
130 | last_rev, last_rev + parse_limit | |
129 | ) |
|
131 | ) | |
130 | for cs in repo[last_rev:last_rev + parse_limit]: |
|
132 | for cs in repo[last_rev:last_rev + parse_limit]: | |
131 | log.debug('parsing %s', cs) |
|
133 | log.debug('parsing %s', cs) | |
132 | last_cs = cs # remember last parsed changeset |
|
134 | last_cs = cs # remember last parsed changeset | |
133 | tt = cs.date.timetuple() |
|
135 | tt = cs.date.timetuple() | |
134 | k = mktime(tt[:3] + (0, 0, 0, 0, 0, 0)) |
|
136 | k = mktime(tt[:3] + (0, 0, 0, 0, 0, 0)) | |
135 |
|
137 | |||
136 | if akc(cs.author) in co_day_auth_aggr: |
|
138 | if akc(cs.author) in co_day_auth_aggr: | |
137 | try: |
|
139 | try: | |
138 | l = [timegetter(x) for x in |
|
140 | l = [timegetter(x) for x in | |
139 | co_day_auth_aggr[akc(cs.author)]['data']] |
|
141 | co_day_auth_aggr[akc(cs.author)]['data']] | |
140 | time_pos = l.index(k) |
|
142 | time_pos = l.index(k) | |
141 | except ValueError: |
|
143 | except ValueError: | |
142 | time_pos = None |
|
144 | time_pos = None | |
143 |
|
145 | |||
144 | if time_pos is not None and time_pos >= 0: |
|
146 | if time_pos is not None and time_pos >= 0: | |
145 | datadict = \ |
|
147 | datadict = \ | |
146 | co_day_auth_aggr[akc(cs.author)]['data'][time_pos] |
|
148 | co_day_auth_aggr[akc(cs.author)]['data'][time_pos] | |
147 |
|
149 | |||
148 | datadict["commits"] += 1 |
|
150 | datadict["commits"] += 1 | |
149 | datadict["added"] += len(cs.added) |
|
151 | datadict["added"] += len(cs.added) | |
150 | datadict["changed"] += len(cs.changed) |
|
152 | datadict["changed"] += len(cs.changed) | |
151 | datadict["removed"] += len(cs.removed) |
|
153 | datadict["removed"] += len(cs.removed) | |
152 |
|
154 | |||
153 | else: |
|
155 | else: | |
154 | if k >= ts_min_y and k <= ts_max_y or skip_date_limit: |
|
156 | if k >= ts_min_y and k <= ts_max_y or skip_date_limit: | |
155 |
|
157 | |||
156 | datadict = {"time": k, |
|
158 | datadict = {"time": k, | |
157 | "commits": 1, |
|
159 | "commits": 1, | |
158 | "added": len(cs.added), |
|
160 | "added": len(cs.added), | |
159 | "changed": len(cs.changed), |
|
161 | "changed": len(cs.changed), | |
160 | "removed": len(cs.removed), |
|
162 | "removed": len(cs.removed), | |
161 | } |
|
163 | } | |
162 | co_day_auth_aggr[akc(cs.author)]['data'] \ |
|
164 | co_day_auth_aggr[akc(cs.author)]['data'] \ | |
163 | .append(datadict) |
|
165 | .append(datadict) | |
164 |
|
166 | |||
165 | else: |
|
167 | else: | |
166 | if k >= ts_min_y and k <= ts_max_y or skip_date_limit: |
|
168 | if k >= ts_min_y and k <= ts_max_y or skip_date_limit: | |
167 | co_day_auth_aggr[akc(cs.author)] = { |
|
169 | co_day_auth_aggr[akc(cs.author)] = { | |
168 | "label": akc(cs.author), |
|
170 | "label": akc(cs.author), | |
169 | "data": [{"time": k, |
|
171 | "data": [{"time": k, | |
170 | "commits": 1, |
|
172 | "commits": 1, | |
171 | "added": len(cs.added), |
|
173 | "added": len(cs.added), | |
172 | "changed": len(cs.changed), |
|
174 | "changed": len(cs.changed), | |
173 | "removed": len(cs.removed), |
|
175 | "removed": len(cs.removed), | |
174 | }], |
|
176 | }], | |
175 | "schema": ["commits"], |
|
177 | "schema": ["commits"], | |
176 | } |
|
178 | } | |
177 |
|
179 | |||
178 | # gather all data by day |
|
180 | # gather all data by day | |
179 | if k in commits_by_day_aggregate: |
|
181 | if k in commits_by_day_aggregate: | |
180 | commits_by_day_aggregate[k] += 1 |
|
182 | commits_by_day_aggregate[k] += 1 | |
181 | else: |
|
183 | else: | |
182 | commits_by_day_aggregate[k] = 1 |
|
184 | commits_by_day_aggregate[k] = 1 | |
183 |
|
185 | |||
184 | overview_data = sorted(commits_by_day_aggregate.items(), |
|
186 | overview_data = sorted(commits_by_day_aggregate.items(), | |
185 | key=itemgetter(0)) |
|
187 | key=itemgetter(0)) | |
186 |
|
188 | |||
187 | if not co_day_auth_aggr: |
|
189 | if not co_day_auth_aggr: | |
188 | co_day_auth_aggr[akc(repo.contact)] = { |
|
190 | co_day_auth_aggr[akc(repo.contact)] = { | |
189 | "label": akc(repo.contact), |
|
191 | "label": akc(repo.contact), | |
190 | "data": [0, 1], |
|
192 | "data": [0, 1], | |
191 | "schema": ["commits"], |
|
193 | "schema": ["commits"], | |
192 | } |
|
194 | } | |
193 |
|
195 | |||
194 | stats = cur_stats if cur_stats else Statistics() |
|
196 | stats = cur_stats if cur_stats else Statistics() | |
195 | stats.commit_activity = ascii_bytes(ext_json.dumps(co_day_auth_aggr)) |
|
197 | stats.commit_activity = ascii_bytes(ext_json.dumps(co_day_auth_aggr)) | |
196 | stats.commit_activity_combined = ascii_bytes(ext_json.dumps(overview_data)) |
|
198 | stats.commit_activity_combined = ascii_bytes(ext_json.dumps(overview_data)) | |
197 |
|
199 | |||
198 | log.debug('last revision %s', last_rev) |
|
200 | log.debug('last revision %s', last_rev) | |
199 | leftovers = len(repo.revisions[last_rev:]) |
|
201 | leftovers = len(repo.revisions[last_rev:]) | |
200 | log.debug('revisions to parse %s', leftovers) |
|
202 | log.debug('revisions to parse %s', leftovers) | |
201 |
|
203 | |||
202 | if last_rev == 0 or leftovers < parse_limit: |
|
204 | if last_rev == 0 or leftovers < parse_limit: | |
203 | log.debug('getting code trending stats') |
|
205 | log.debug('getting code trending stats') | |
204 | stats.languages = ascii_bytes(ext_json.dumps(__get_codes_stats(repo_name))) |
|
206 | stats.languages = ascii_bytes(ext_json.dumps(__get_codes_stats(repo_name))) | |
205 |
|
207 | |||
206 | try: |
|
208 | try: | |
207 | stats.repository = dbrepo |
|
209 | stats.repository = dbrepo | |
208 | stats.stat_on_revision = last_cs.revision if last_cs else 0 |
|
210 | stats.stat_on_revision = last_cs.revision if last_cs else 0 | |
209 | DBS.add(stats) |
|
211 | DBS.add(stats) | |
210 | DBS.commit() |
|
212 | DBS.commit() | |
211 | except: |
|
213 | except: | |
212 | log.error(traceback.format_exc()) |
|
214 | log.error(traceback.format_exc()) | |
213 | DBS.rollback() |
|
215 | DBS.rollback() | |
214 | lock.release() |
|
216 | lock.release() | |
215 | return False |
|
217 | return False | |
216 |
|
218 | |||
217 | # final release |
|
219 | # final release | |
218 | lock.release() |
|
220 | lock.release() | |
219 |
|
221 | |||
220 | # execute another task if celery is enabled |
|
222 | # execute another task if celery is enabled | |
221 | if len(repo.revisions) > 1 and kallithea.CELERY_APP and recurse_limit > 0: |
|
223 | if len(repo.revisions) > 1 and kallithea.CELERY_APP and recurse_limit > 0: | |
222 | get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit - 1) |
|
224 | get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit - 1) | |
223 | elif recurse_limit <= 0: |
|
225 | elif recurse_limit <= 0: | |
224 | log.debug('Not recursing - limit has been reached') |
|
226 | log.debug('Not recursing - limit has been reached') | |
225 | else: |
|
227 | else: | |
226 | log.debug('Not recursing') |
|
228 | log.debug('Not recursing') | |
227 | except celerylib.LockHeld: |
|
229 | except celerylib.LockHeld: | |
228 | log.info('Task with key %s already running', lockkey) |
|
230 | log.info('Task with key %s already running', lockkey) | |
229 | return 'Task with key %s already running' % lockkey |
|
231 | return 'Task with key %s already running' % lockkey | |
230 |
|
232 | |||
231 |
|
233 | |||
232 | @celerylib.task |
|
234 | @celerylib.task | |
233 | @celerylib.dbsession |
|
235 | @celerylib.dbsession | |
234 | def send_email(recipients, subject, body='', html_body='', headers=None, author=None): |
|
236 | def send_email(recipients, subject, body='', html_body='', headers=None, author=None): | |
235 | """ |
|
237 | """ | |
236 | Sends an email with defined parameters from the .ini files. |
|
238 | Sends an email with defined parameters from the .ini files. | |
237 |
|
239 | |||
238 | :param recipients: list of recipients, if this is None, the defined email |
|
240 | :param recipients: list of recipients, if this is None, the defined email | |
239 | address from field 'email_to' and all admins is used instead |
|
241 | address from field 'email_to' and all admins is used instead | |
240 | :param subject: subject of the mail |
|
242 | :param subject: subject of the mail | |
241 | :param body: body of the mail |
|
243 | :param body: body of the mail | |
242 | :param html_body: html version of body |
|
244 | :param html_body: html version of body | |
243 | :param headers: dictionary of prepopulated e-mail headers |
|
245 | :param headers: dictionary of prepopulated e-mail headers | |
244 | :param author: User object of the author of this mail, if known and relevant |
|
246 | :param author: User object of the author of this mail, if known and relevant | |
245 | """ |
|
247 | """ | |
246 | assert isinstance(recipients, list), recipients |
|
248 | assert isinstance(recipients, list), recipients | |
247 | if headers is None: |
|
249 | if headers is None: | |
248 | headers = {} |
|
250 | headers = {} | |
249 | else: |
|
251 | else: | |
250 | # do not modify the original headers object passed by the caller |
|
252 | # do not modify the original headers object passed by the caller | |
251 | headers = headers.copy() |
|
253 | headers = headers.copy() | |
252 |
|
254 | |||
253 | email_config = config |
|
255 | email_config = config | |
254 | email_prefix = email_config.get('email_prefix', '') |
|
256 | email_prefix = email_config.get('email_prefix', '') | |
255 | if email_prefix: |
|
257 | if email_prefix: | |
256 | subject = "%s %s" % (email_prefix, subject) |
|
258 | subject = "%s %s" % (email_prefix, subject) | |
257 |
|
259 | |||
258 | if not recipients: |
|
260 | if not recipients: | |
259 | # if recipients are not defined we send to email_config + all admins |
|
261 | # if recipients are not defined we send to email_config + all admins | |
260 | recipients = [u.email for u in User.query() |
|
262 | recipients = [u.email for u in User.query() | |
261 | .filter(User.admin == True).all()] |
|
263 | .filter(User.admin == True).all()] | |
262 | if email_config.get('email_to') is not None: |
|
264 | if email_config.get('email_to') is not None: | |
263 | recipients += email_config.get('email_to').split(',') |
|
265 | recipients += email_config.get('email_to').split(',') | |
264 |
|
266 | |||
265 | # If there are still no recipients, there are no admins and no address |
|
267 | # If there are still no recipients, there are no admins and no address | |
266 | # configured in email_to, so return. |
|
268 | # configured in email_to, so return. | |
267 | if not recipients: |
|
269 | if not recipients: | |
268 | log.error("No recipients specified and no fallback available.") |
|
270 | log.error("No recipients specified and no fallback available.") | |
269 | return False |
|
271 | return False | |
270 |
|
272 | |||
271 | log.warning("No recipients specified for '%s' - sending to admins %s", subject, ' '.join(recipients)) |
|
273 | log.warning("No recipients specified for '%s' - sending to admins %s", subject, ' '.join(recipients)) | |
272 |
|
274 | |||
273 | # SMTP sender |
|
275 | # SMTP sender | |
274 | envelope_from = email_config.get('app_email_from', 'Kallithea') |
|
276 | envelope_from = email_config.get('app_email_from', 'Kallithea') | |
275 | # 'From' header |
|
277 | # 'From' header | |
276 | if author is not None: |
|
278 | if author is not None: | |
277 | # set From header based on author but with a generic e-mail address |
|
279 | # set From header based on author but with a generic e-mail address | |
278 | # In case app_email_from is in "Some Name <e-mail>" format, we first |
|
280 | # In case app_email_from is in "Some Name <e-mail>" format, we first | |
279 | # extract the e-mail address. |
|
281 | # extract the e-mail address. | |
280 | envelope_addr = author_email(envelope_from) |
|
282 | envelope_addr = author_email(envelope_from) | |
281 | headers['From'] = '"%s" <%s>' % ( |
|
283 | headers['From'] = '"%s" <%s>' % ( | |
282 | email.utils.quote('%s (no-reply)' % author.full_name_or_username), |
|
284 | email.utils.quote('%s (no-reply)' % author.full_name_or_username), | |
283 | envelope_addr) |
|
285 | envelope_addr) | |
284 |
|
286 | |||
285 | user = email_config.get('smtp_username') |
|
287 | user = email_config.get('smtp_username') | |
286 | passwd = email_config.get('smtp_password') |
|
288 | passwd = email_config.get('smtp_password') | |
287 | mail_server = email_config.get('smtp_server') |
|
289 | mail_server = email_config.get('smtp_server') | |
288 | mail_port = email_config.get('smtp_port') |
|
290 | mail_port = email_config.get('smtp_port') | |
289 | tls = str2bool(email_config.get('smtp_use_tls')) |
|
291 | tls = str2bool(email_config.get('smtp_use_tls')) | |
290 | ssl = str2bool(email_config.get('smtp_use_ssl')) |
|
292 | ssl = str2bool(email_config.get('smtp_use_ssl')) | |
291 | debug = str2bool(email_config.get('debug')) |
|
293 | debug = str2bool(email_config.get('debug')) | |
292 | smtp_auth = email_config.get('smtp_auth') |
|
294 | smtp_auth = email_config.get('smtp_auth') | |
293 |
|
295 | |||
294 | logmsg = ("Mail details:\n" |
|
296 | logmsg = ("Mail details:\n" | |
295 | "recipients: %s\n" |
|
297 | "recipients: %s\n" | |
296 | "headers: %s\n" |
|
298 | "headers: %s\n" | |
297 | "subject: %s\n" |
|
299 | "subject: %s\n" | |
298 | "body:\n%s\n" |
|
300 | "body:\n%s\n" | |
299 | "html:\n%s\n" |
|
301 | "html:\n%s\n" | |
300 | % (' '.join(recipients), headers, subject, body, html_body)) |
|
302 | % (' '.join(recipients), headers, subject, body, html_body)) | |
301 |
|
303 | |||
302 | if mail_server: |
|
304 | if mail_server: | |
303 | log.debug("Sending e-mail. " + logmsg) |
|
305 | log.debug("Sending e-mail. " + logmsg) | |
304 | else: |
|
306 | else: | |
305 | log.error("SMTP mail server not configured - cannot send e-mail.") |
|
307 | log.error("SMTP mail server not configured - cannot send e-mail.") | |
306 | log.warning(logmsg) |
|
308 | log.warning(logmsg) | |
307 | return False |
|
309 | return False | |
308 |
|
310 | |||
309 | try: |
|
311 | try: | |
310 | m = SmtpMailer(envelope_from, user, passwd, mail_server, smtp_auth, |
|
312 | m = SmtpMailer(envelope_from, user, passwd, mail_server, smtp_auth, | |
311 | mail_port, ssl, tls, debug=debug) |
|
313 | mail_port, ssl, tls, debug=debug) | |
312 | m.send(recipients, subject, body, html_body, headers=headers) |
|
314 | m.send(recipients, subject, body, html_body, headers=headers) | |
313 | except: |
|
315 | except: | |
314 | log.error('Mail sending failed') |
|
316 | log.error('Mail sending failed') | |
315 | log.error(traceback.format_exc()) |
|
317 | log.error(traceback.format_exc()) | |
316 | return False |
|
318 | return False | |
317 | return True |
|
319 | return True | |
318 |
|
320 | |||
319 |
|
321 | |||
320 | @celerylib.task |
|
322 | @celerylib.task | |
321 | @celerylib.dbsession |
|
323 | @celerylib.dbsession | |
322 | def create_repo(form_data, cur_user): |
|
324 | def create_repo(form_data, cur_user): | |
323 | from kallithea.model.repo import RepoModel |
|
325 | from kallithea.model.repo import RepoModel | |
324 | from kallithea.model.db import Setting |
|
326 | from kallithea.model.db import Setting | |
325 |
|
327 | |||
326 | DBS = celerylib.get_session() |
|
328 | DBS = celerylib.get_session() | |
327 |
|
329 | |||
328 | cur_user = User.guess_instance(cur_user) |
|
330 | cur_user = User.guess_instance(cur_user) | |
329 |
|
331 | |||
330 | owner = cur_user |
|
332 | owner = cur_user | |
331 | repo_name = form_data['repo_name'] |
|
333 | repo_name = form_data['repo_name'] | |
332 | repo_name_full = form_data['repo_name_full'] |
|
334 | repo_name_full = form_data['repo_name_full'] | |
333 | repo_type = form_data['repo_type'] |
|
335 | repo_type = form_data['repo_type'] | |
334 | description = form_data['repo_description'] |
|
336 | description = form_data['repo_description'] | |
335 | private = form_data['repo_private'] |
|
337 | private = form_data['repo_private'] | |
336 | clone_uri = form_data.get('clone_uri') |
|
338 | clone_uri = form_data.get('clone_uri') | |
337 | repo_group = form_data['repo_group'] |
|
339 | repo_group = form_data['repo_group'] | |
338 | landing_rev = form_data['repo_landing_rev'] |
|
340 | landing_rev = form_data['repo_landing_rev'] | |
339 | copy_fork_permissions = form_data.get('copy_permissions') |
|
341 | copy_fork_permissions = form_data.get('copy_permissions') | |
340 | copy_group_permissions = form_data.get('repo_copy_permissions') |
|
342 | copy_group_permissions = form_data.get('repo_copy_permissions') | |
341 | fork_of = form_data.get('fork_parent_id') |
|
343 | fork_of = form_data.get('fork_parent_id') | |
342 | state = form_data.get('repo_state', Repository.STATE_PENDING) |
|
344 | state = form_data.get('repo_state', Repository.STATE_PENDING) | |
343 |
|
345 | |||
344 | # repo creation defaults, private and repo_type are filled in form |
|
346 | # repo creation defaults, private and repo_type are filled in form | |
345 | defs = Setting.get_default_repo_settings(strip_prefix=True) |
|
347 | defs = Setting.get_default_repo_settings(strip_prefix=True) | |
346 | enable_statistics = defs.get('repo_enable_statistics') |
|
348 | enable_statistics = defs.get('repo_enable_statistics') | |
347 | enable_downloads = defs.get('repo_enable_downloads') |
|
349 | enable_downloads = defs.get('repo_enable_downloads') | |
348 |
|
350 | |||
349 | try: |
|
351 | try: | |
350 | repo = RepoModel()._create_repo( |
|
352 | repo = RepoModel()._create_repo( | |
351 | repo_name=repo_name_full, |
|
353 | repo_name=repo_name_full, | |
352 | repo_type=repo_type, |
|
354 | repo_type=repo_type, | |
353 | description=description, |
|
355 | description=description, | |
354 | owner=owner, |
|
356 | owner=owner, | |
355 | private=private, |
|
357 | private=private, | |
356 | clone_uri=clone_uri, |
|
358 | clone_uri=clone_uri, | |
357 | repo_group=repo_group, |
|
359 | repo_group=repo_group, | |
358 | landing_rev=landing_rev, |
|
360 | landing_rev=landing_rev, | |
359 | fork_of=fork_of, |
|
361 | fork_of=fork_of, | |
360 | copy_fork_permissions=copy_fork_permissions, |
|
362 | copy_fork_permissions=copy_fork_permissions, | |
361 | copy_group_permissions=copy_group_permissions, |
|
363 | copy_group_permissions=copy_group_permissions, | |
362 | enable_statistics=enable_statistics, |
|
364 | enable_statistics=enable_statistics, | |
363 | enable_downloads=enable_downloads, |
|
365 | enable_downloads=enable_downloads, | |
364 | state=state |
|
366 | state=state | |
365 | ) |
|
367 | ) | |
366 |
|
368 | |||
367 | action_logger(cur_user, 'user_created_repo', |
|
369 | action_logger(cur_user, 'user_created_repo', | |
368 | form_data['repo_name_full'], '') |
|
370 | form_data['repo_name_full'], '') | |
369 |
|
371 | |||
370 | DBS.commit() |
|
372 | DBS.commit() | |
371 | # now create this repo on Filesystem |
|
373 | # now create this repo on Filesystem | |
372 | RepoModel()._create_filesystem_repo( |
|
374 | RepoModel()._create_filesystem_repo( | |
373 | repo_name=repo_name, |
|
375 | repo_name=repo_name, | |
374 | repo_type=repo_type, |
|
376 | repo_type=repo_type, | |
375 | repo_group=RepoGroup.guess_instance(repo_group), |
|
377 | repo_group=RepoGroup.guess_instance(repo_group), | |
376 | clone_uri=clone_uri, |
|
378 | clone_uri=clone_uri, | |
377 | ) |
|
379 | ) | |
378 | repo = Repository.get_by_repo_name(repo_name_full) |
|
380 | repo = Repository.get_by_repo_name(repo_name_full) | |
379 | log_create_repository(repo.get_dict(), created_by=owner.username) |
|
381 | log_create_repository(repo.get_dict(), created_by=owner.username) | |
380 |
|
382 | |||
381 | # update repo changeset caches initially |
|
383 | # update repo changeset caches initially | |
382 | repo.update_changeset_cache() |
|
384 | repo.update_changeset_cache() | |
383 |
|
385 | |||
384 | # set new created state |
|
386 | # set new created state | |
385 | repo.set_state(Repository.STATE_CREATED) |
|
387 | repo.set_state(Repository.STATE_CREATED) | |
386 | DBS.commit() |
|
388 | DBS.commit() | |
387 | except Exception as e: |
|
389 | except Exception as e: | |
388 | log.warning('Exception %s occurred when forking repository, ' |
|
390 | log.warning('Exception %s occurred when forking repository, ' | |
389 | 'doing cleanup...' % e) |
|
391 | 'doing cleanup...' % e) | |
390 | # rollback things manually ! |
|
392 | # rollback things manually ! | |
391 | repo = Repository.get_by_repo_name(repo_name_full) |
|
393 | repo = Repository.get_by_repo_name(repo_name_full) | |
392 | if repo: |
|
394 | if repo: | |
393 | Repository.delete(repo.repo_id) |
|
395 | Repository.delete(repo.repo_id) | |
394 | DBS.commit() |
|
396 | DBS.commit() | |
395 | RepoModel()._delete_filesystem_repo(repo) |
|
397 | RepoModel()._delete_filesystem_repo(repo) | |
396 | raise |
|
398 | raise | |
397 |
|
399 | |||
398 | return True |
|
400 | return True | |
399 |
|
401 | |||
400 |
|
402 | |||
401 | @celerylib.task |
|
403 | @celerylib.task | |
402 | @celerylib.dbsession |
|
404 | @celerylib.dbsession | |
403 | def create_repo_fork(form_data, cur_user): |
|
405 | def create_repo_fork(form_data, cur_user): | |
404 | """ |
|
406 | """ | |
405 | Creates a fork of repository using interval VCS methods |
|
407 | Creates a fork of repository using interval VCS methods | |
406 |
|
408 | |||
407 | :param form_data: |
|
409 | :param form_data: | |
408 | :param cur_user: |
|
410 | :param cur_user: | |
409 | """ |
|
411 | """ | |
410 | from kallithea.model.repo import RepoModel |
|
412 | from kallithea.model.repo import RepoModel | |
411 |
|
413 | |||
412 | DBS = celerylib.get_session() |
|
414 | DBS = celerylib.get_session() | |
413 |
|
415 | |||
414 | base_path = Repository.base_path() |
|
416 | base_path = Repository.base_path() | |
415 | cur_user = User.guess_instance(cur_user) |
|
417 | cur_user = User.guess_instance(cur_user) | |
416 |
|
418 | |||
417 | repo_name = form_data['repo_name'] # fork in this case |
|
419 | repo_name = form_data['repo_name'] # fork in this case | |
418 | repo_name_full = form_data['repo_name_full'] |
|
420 | repo_name_full = form_data['repo_name_full'] | |
419 |
|
421 | |||
420 | repo_type = form_data['repo_type'] |
|
422 | repo_type = form_data['repo_type'] | |
421 | owner = cur_user |
|
423 | owner = cur_user | |
422 | private = form_data['private'] |
|
424 | private = form_data['private'] | |
423 | clone_uri = form_data.get('clone_uri') |
|
425 | clone_uri = form_data.get('clone_uri') | |
424 | repo_group = form_data['repo_group'] |
|
426 | repo_group = form_data['repo_group'] | |
425 | landing_rev = form_data['landing_rev'] |
|
427 | landing_rev = form_data['landing_rev'] | |
426 | copy_fork_permissions = form_data.get('copy_permissions') |
|
428 | copy_fork_permissions = form_data.get('copy_permissions') | |
427 |
|
429 | |||
428 | try: |
|
430 | try: | |
429 | fork_of = Repository.guess_instance(form_data.get('fork_parent_id')) |
|
431 | fork_of = Repository.guess_instance(form_data.get('fork_parent_id')) | |
430 |
|
432 | |||
431 | RepoModel()._create_repo( |
|
433 | RepoModel()._create_repo( | |
432 | repo_name=repo_name_full, |
|
434 | repo_name=repo_name_full, | |
433 | repo_type=repo_type, |
|
435 | repo_type=repo_type, | |
434 | description=form_data['description'], |
|
436 | description=form_data['description'], | |
435 | owner=owner, |
|
437 | owner=owner, | |
436 | private=private, |
|
438 | private=private, | |
437 | clone_uri=clone_uri, |
|
439 | clone_uri=clone_uri, | |
438 | repo_group=repo_group, |
|
440 | repo_group=repo_group, | |
439 | landing_rev=landing_rev, |
|
441 | landing_rev=landing_rev, | |
440 | fork_of=fork_of, |
|
442 | fork_of=fork_of, | |
441 | copy_fork_permissions=copy_fork_permissions |
|
443 | copy_fork_permissions=copy_fork_permissions | |
442 | ) |
|
444 | ) | |
443 | action_logger(cur_user, 'user_forked_repo:%s' % repo_name_full, |
|
445 | action_logger(cur_user, 'user_forked_repo:%s' % repo_name_full, | |
444 | fork_of.repo_name, '') |
|
446 | fork_of.repo_name, '') | |
445 | DBS.commit() |
|
447 | DBS.commit() | |
446 |
|
448 | |||
447 | source_repo_path = os.path.join(base_path, fork_of.repo_name) |
|
449 | source_repo_path = os.path.join(base_path, fork_of.repo_name) | |
448 |
|
450 | |||
449 | # now create this repo on Filesystem |
|
451 | # now create this repo on Filesystem | |
450 | RepoModel()._create_filesystem_repo( |
|
452 | RepoModel()._create_filesystem_repo( | |
451 | repo_name=repo_name, |
|
453 | repo_name=repo_name, | |
452 | repo_type=repo_type, |
|
454 | repo_type=repo_type, | |
453 | repo_group=RepoGroup.guess_instance(repo_group), |
|
455 | repo_group=RepoGroup.guess_instance(repo_group), | |
454 | clone_uri=source_repo_path, |
|
456 | clone_uri=source_repo_path, | |
455 | ) |
|
457 | ) | |
456 | repo = Repository.get_by_repo_name(repo_name_full) |
|
458 | repo = Repository.get_by_repo_name(repo_name_full) | |
457 | log_create_repository(repo.get_dict(), created_by=owner.username) |
|
459 | log_create_repository(repo.get_dict(), created_by=owner.username) | |
458 |
|
460 | |||
459 | # update repo changeset caches initially |
|
461 | # update repo changeset caches initially | |
460 | repo.update_changeset_cache() |
|
462 | repo.update_changeset_cache() | |
461 |
|
463 | |||
462 | # set new created state |
|
464 | # set new created state | |
463 | repo.set_state(Repository.STATE_CREATED) |
|
465 | repo.set_state(Repository.STATE_CREATED) | |
464 | DBS.commit() |
|
466 | DBS.commit() | |
465 | except Exception as e: |
|
467 | except Exception as e: | |
466 | log.warning('Exception %s occurred when forking repository, ' |
|
468 | log.warning('Exception %s occurred when forking repository, ' | |
467 | 'doing cleanup...' % e) |
|
469 | 'doing cleanup...' % e) | |
468 | # rollback things manually ! |
|
470 | # rollback things manually ! | |
469 | repo = Repository.get_by_repo_name(repo_name_full) |
|
471 | repo = Repository.get_by_repo_name(repo_name_full) | |
470 | if repo: |
|
472 | if repo: | |
471 | Repository.delete(repo.repo_id) |
|
473 | Repository.delete(repo.repo_id) | |
472 | DBS.commit() |
|
474 | DBS.commit() | |
473 | RepoModel()._delete_filesystem_repo(repo) |
|
475 | RepoModel()._delete_filesystem_repo(repo) | |
474 | raise |
|
476 | raise | |
475 |
|
477 | |||
476 | return True |
|
478 | return True | |
477 |
|
479 | |||
478 |
|
480 | |||
479 | def __get_codes_stats(repo_name): |
|
481 | def __get_codes_stats(repo_name): | |
480 | from kallithea.config.conf import LANGUAGES_EXTENSIONS_MAP |
|
482 | from kallithea.config.conf import LANGUAGES_EXTENSIONS_MAP | |
481 | repo = Repository.get_by_repo_name(repo_name).scm_instance |
|
483 | repo = Repository.get_by_repo_name(repo_name).scm_instance | |
482 |
|
484 | |||
483 | tip = repo.get_changeset() |
|
485 | tip = repo.get_changeset() | |
484 | code_stats = {} |
|
486 | code_stats = {} | |
485 |
|
487 | |||
486 | for _topnode, _dirnodes, filenodes in tip.walk('/'): |
|
488 | for _topnode, _dirnodes, filenodes in tip.walk('/'): | |
487 | for filenode in filenodes: |
|
489 | for filenode in filenodes: | |
488 | ext = filenode.extension.lower() |
|
490 | ext = filenode.extension.lower() | |
489 | if ext in LANGUAGES_EXTENSIONS_MAP and not filenode.is_binary: |
|
491 | if ext in LANGUAGES_EXTENSIONS_MAP and not filenode.is_binary: | |
490 | if ext in code_stats: |
|
492 | if ext in code_stats: | |
491 | code_stats[ext] += 1 |
|
493 | code_stats[ext] += 1 | |
492 | else: |
|
494 | else: | |
493 | code_stats[ext] = 1 |
|
495 | code_stats[ext] = 1 | |
494 |
|
496 | |||
495 | return code_stats or {} |
|
497 | return code_stats or {} |
@@ -1,1341 +1,1344 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | Helper functions |
|
15 | Helper functions | |
16 |
|
16 | |||
17 | Consists of functions to typically be used within templates, but also |
|
17 | Consists of functions to typically be used within templates, but also | |
18 | available to Controllers. This module is available to both as 'h'. |
|
18 | available to Controllers. This module is available to both as 'h'. | |
19 | """ |
|
19 | """ | |
20 | import hashlib |
|
20 | import hashlib | |
21 | import json |
|
21 | import json | |
22 | import logging |
|
22 | import logging | |
23 | import random |
|
23 | import random | |
24 | import re |
|
24 | import re | |
25 | import textwrap |
|
25 | import textwrap | |
26 | import urllib.parse |
|
26 | import urllib.parse | |
27 |
|
27 | |||
28 | from beaker.cache import cache_region |
|
28 | from beaker.cache import cache_region | |
29 | from pygments import highlight as code_highlight |
|
29 | from pygments import highlight as code_highlight | |
30 | from pygments.formatters.html import HtmlFormatter |
|
30 | from pygments.formatters.html import HtmlFormatter | |
31 | from tg.i18n import ugettext as _ |
|
31 | from tg.i18n import ugettext as _ | |
32 | from webhelpers2.html import HTML, escape, literal |
|
32 | from webhelpers2.html import HTML, escape, literal | |
33 | from webhelpers2.html.tags import NotGiven, Option, Options, _input, _make_safe_id_component, checkbox, end_form |
|
33 | from webhelpers2.html.tags import NotGiven, Option, Options, _input, _make_safe_id_component, checkbox, end_form | |
34 | from webhelpers2.html.tags import form as insecure_form |
|
34 | from webhelpers2.html.tags import form as insecure_form | |
35 | from webhelpers2.html.tags import hidden, link_to, password, radio |
|
35 | from webhelpers2.html.tags import hidden, link_to, password, radio | |
36 | from webhelpers2.html.tags import select as webhelpers2_select |
|
36 | from webhelpers2.html.tags import select as webhelpers2_select | |
37 | from webhelpers2.html.tags import submit, text, textarea |
|
37 | from webhelpers2.html.tags import submit, text, textarea | |
38 | from webhelpers2.number import format_byte_size |
|
38 | from webhelpers2.number import format_byte_size | |
39 | from webhelpers2.text import chop_at, truncate, wrap_paragraphs |
|
39 | from webhelpers2.text import chop_at, truncate, wrap_paragraphs | |
40 |
|
40 | |||
41 | from kallithea.config.routing import url |
|
41 | from kallithea.config.routing import url | |
42 | from kallithea.lib.annotate import annotate_highlight |
|
42 | from kallithea.lib.annotate import annotate_highlight | |
43 | #============================================================================== |
|
43 | #============================================================================== | |
44 | # PERMS |
|
44 | # PERMS | |
45 | #============================================================================== |
|
45 | #============================================================================== | |
46 | from kallithea.lib.auth import HasPermissionAny, HasRepoGroupPermissionLevel, HasRepoPermissionLevel |
|
46 | from kallithea.lib.auth import HasPermissionAny, HasRepoGroupPermissionLevel, HasRepoPermissionLevel | |
47 | from kallithea.lib.markup_renderer import url_re |
|
47 | from kallithea.lib.markup_renderer import url_re | |
48 | from kallithea.lib.pygmentsutils import get_custom_lexer |
|
48 | from kallithea.lib.pygmentsutils import get_custom_lexer | |
49 | from kallithea.lib.utils2 import MENTIONS_REGEX, AttributeDict |
|
49 | from kallithea.lib.utils2 import MENTIONS_REGEX, AttributeDict | |
50 | from kallithea.lib.utils2 import age as _age |
|
50 | from kallithea.lib.utils2 import age as _age | |
51 | from kallithea.lib.utils2 import credentials_filter, safe_bytes, safe_int, safe_str, str2bool, time_to_datetime |
|
51 | from kallithea.lib.utils2 import credentials_filter, safe_bytes, safe_int, safe_str, str2bool, time_to_datetime | |
52 | from kallithea.lib.vcs.backends.base import BaseChangeset, EmptyChangeset |
|
52 | from kallithea.lib.vcs.backends.base import BaseChangeset, EmptyChangeset | |
53 | from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError |
|
53 | from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError | |
54 | #============================================================================== |
|
54 | #============================================================================== | |
55 | # SCM FILTERS available via h. |
|
55 | # SCM FILTERS available via h. | |
56 | #============================================================================== |
|
56 | #============================================================================== | |
57 | from kallithea.lib.vcs.utils import author_email, author_name |
|
57 | from kallithea.lib.vcs.utils import author_email, author_name | |
58 |
|
58 | |||
59 |
|
59 | |||
60 | # mute pyflakes "imported but unused" |
|
60 | # mute pyflakes "imported but unused" | |
61 | assert Option |
|
61 | assert Option | |
62 | assert checkbox |
|
62 | assert checkbox | |
63 | assert end_form |
|
63 | assert end_form | |
64 | assert password |
|
64 | assert password | |
65 | assert radio |
|
65 | assert radio | |
66 | assert submit |
|
66 | assert submit | |
67 | assert text |
|
67 | assert text | |
68 | assert textarea |
|
68 | assert textarea | |
69 | assert format_byte_size |
|
69 | assert format_byte_size | |
70 | assert chop_at |
|
70 | assert chop_at | |
71 | assert wrap_paragraphs |
|
71 | assert wrap_paragraphs | |
72 | assert HasPermissionAny |
|
72 | assert HasPermissionAny | |
73 | assert HasRepoGroupPermissionLevel |
|
73 | assert HasRepoGroupPermissionLevel | |
74 | assert HasRepoPermissionLevel |
|
74 | assert HasRepoPermissionLevel | |
75 | assert time_to_datetime |
|
75 | assert time_to_datetime | |
76 | assert EmptyChangeset |
|
76 | assert EmptyChangeset | |
77 |
|
77 | |||
78 |
|
78 | |||
79 | log = logging.getLogger(__name__) |
|
79 | log = logging.getLogger(__name__) | |
80 |
|
80 | |||
81 |
|
81 | |||
82 | def canonical_url(*args, **kargs): |
|
82 | def canonical_url(*args, **kargs): | |
83 | '''Like url(x, qualified=True), but returns url that not only is qualified |
|
83 | '''Like url(x, qualified=True), but returns url that not only is qualified | |
84 | but also canonical, as configured in canonical_url''' |
|
84 | but also canonical, as configured in canonical_url''' | |
85 | from kallithea import CONFIG |
|
85 | from kallithea import CONFIG | |
86 | try: |
|
86 | try: | |
87 | parts = CONFIG.get('canonical_url', '').split('://', 1) |
|
87 | parts = CONFIG.get('canonical_url', '').split('://', 1) | |
88 | kargs['host'] = parts[1] |
|
88 | kargs['host'] = parts[1] | |
89 | kargs['protocol'] = parts[0] |
|
89 | kargs['protocol'] = parts[0] | |
90 | except IndexError: |
|
90 | except IndexError: | |
91 | kargs['qualified'] = True |
|
91 | kargs['qualified'] = True | |
92 | return url(*args, **kargs) |
|
92 | return url(*args, **kargs) | |
93 |
|
93 | |||
94 |
|
94 | |||
95 | def canonical_hostname(): |
|
95 | def canonical_hostname(): | |
96 | '''Return canonical hostname of system''' |
|
96 | '''Return canonical hostname of system''' | |
97 | from kallithea import CONFIG |
|
97 | from kallithea import CONFIG | |
98 | try: |
|
98 | try: | |
99 | parts = CONFIG.get('canonical_url', '').split('://', 1) |
|
99 | parts = CONFIG.get('canonical_url', '').split('://', 1) | |
100 | return parts[1].split('/', 1)[0] |
|
100 | return parts[1].split('/', 1)[0] | |
101 | except IndexError: |
|
101 | except IndexError: | |
102 | parts = url('home', qualified=True).split('://', 1) |
|
102 | parts = url('home', qualified=True).split('://', 1) | |
103 | return parts[1].split('/', 1)[0] |
|
103 | return parts[1].split('/', 1)[0] | |
104 |
|
104 | |||
105 |
|
105 | |||
106 | def html_escape(s): |
|
106 | def html_escape(s): | |
107 | """Return string with all html escaped. |
|
107 | """Return string with all html escaped. | |
108 | This is also safe for javascript in html but not necessarily correct. |
|
108 | This is also safe for javascript in html but not necessarily correct. | |
109 | """ |
|
109 | """ | |
110 | return (s |
|
110 | return (s | |
111 | .replace('&', '&') |
|
111 | .replace('&', '&') | |
112 | .replace(">", ">") |
|
112 | .replace(">", ">") | |
113 | .replace("<", "<") |
|
113 | .replace("<", "<") | |
114 | .replace('"', """) |
|
114 | .replace('"', """) | |
115 | .replace("'", "'") # Note: this is HTML5 not HTML4 and might not work in mails |
|
115 | .replace("'", "'") # Note: this is HTML5 not HTML4 and might not work in mails | |
116 | ) |
|
116 | ) | |
117 |
|
117 | |||
118 | def js(value): |
|
118 | def js(value): | |
119 | """Convert Python value to the corresponding JavaScript representation. |
|
119 | """Convert Python value to the corresponding JavaScript representation. | |
120 |
|
120 | |||
121 | This is necessary to safely insert arbitrary values into HTML <script> |
|
121 | This is necessary to safely insert arbitrary values into HTML <script> | |
122 | sections e.g. using Mako template expression substitution. |
|
122 | sections e.g. using Mako template expression substitution. | |
123 |
|
123 | |||
124 | Note: Rather than using this function, it's preferable to avoid the |
|
124 | Note: Rather than using this function, it's preferable to avoid the | |
125 | insertion of values into HTML <script> sections altogether. Instead, |
|
125 | insertion of values into HTML <script> sections altogether. Instead, | |
126 | data should (to the extent possible) be passed to JavaScript using |
|
126 | data should (to the extent possible) be passed to JavaScript using | |
127 | data attributes or AJAX calls, eliminating the need for JS specific |
|
127 | data attributes or AJAX calls, eliminating the need for JS specific | |
128 | escaping. |
|
128 | escaping. | |
129 |
|
129 | |||
130 | Note: This is not safe for use in attributes (e.g. onclick), because |
|
130 | Note: This is not safe for use in attributes (e.g. onclick), because | |
131 | quotes are not escaped. |
|
131 | quotes are not escaped. | |
132 |
|
132 | |||
133 | Because the rules for parsing <script> varies between XHTML (where |
|
133 | Because the rules for parsing <script> varies between XHTML (where | |
134 | normal rules apply for any special characters) and HTML (where |
|
134 | normal rules apply for any special characters) and HTML (where | |
135 | entities are not interpreted, but the literal string "</script>" |
|
135 | entities are not interpreted, but the literal string "</script>" | |
136 | is forbidden), the function ensures that the result never contains |
|
136 | is forbidden), the function ensures that the result never contains | |
137 | '&', '<' and '>', thus making it safe in both those contexts (but |
|
137 | '&', '<' and '>', thus making it safe in both those contexts (but | |
138 | not in attributes). |
|
138 | not in attributes). | |
139 | """ |
|
139 | """ | |
140 | return literal( |
|
140 | return literal( | |
141 | ('(' + json.dumps(value) + ')') |
|
141 | ('(' + json.dumps(value) + ')') | |
142 | # In JSON, the following can only appear in string literals. |
|
142 | # In JSON, the following can only appear in string literals. | |
143 | .replace('&', r'\x26') |
|
143 | .replace('&', r'\x26') | |
144 | .replace('<', r'\x3c') |
|
144 | .replace('<', r'\x3c') | |
145 | .replace('>', r'\x3e') |
|
145 | .replace('>', r'\x3e') | |
146 | ) |
|
146 | ) | |
147 |
|
147 | |||
148 |
|
148 | |||
149 | def jshtml(val): |
|
149 | def jshtml(val): | |
150 | """HTML escapes a string value, then converts the resulting string |
|
150 | """HTML escapes a string value, then converts the resulting string | |
151 | to its corresponding JavaScript representation (see `js`). |
|
151 | to its corresponding JavaScript representation (see `js`). | |
152 |
|
152 | |||
153 | This is used when a plain-text string (possibly containing special |
|
153 | This is used when a plain-text string (possibly containing special | |
154 | HTML characters) will be used by a script in an HTML context (e.g. |
|
154 | HTML characters) will be used by a script in an HTML context (e.g. | |
155 | element.innerHTML or jQuery's 'html' method). |
|
155 | element.innerHTML or jQuery's 'html' method). | |
156 |
|
156 | |||
157 | If in doubt, err on the side of using `jshtml` over `js`, since it's |
|
157 | If in doubt, err on the side of using `jshtml` over `js`, since it's | |
158 | better to escape too much than too little. |
|
158 | better to escape too much than too little. | |
159 | """ |
|
159 | """ | |
160 | return js(escape(val)) |
|
160 | return js(escape(val)) | |
161 |
|
161 | |||
162 |
|
162 | |||
163 | def shorter(s, size=20, firstline=False, postfix='...'): |
|
163 | def shorter(s, size=20, firstline=False, postfix='...'): | |
164 | """Truncate s to size, including the postfix string if truncating. |
|
164 | """Truncate s to size, including the postfix string if truncating. | |
165 | If firstline, truncate at newline. |
|
165 | If firstline, truncate at newline. | |
166 | """ |
|
166 | """ | |
167 | if firstline: |
|
167 | if firstline: | |
168 | s = s.split('\n', 1)[0].rstrip() |
|
168 | s = s.split('\n', 1)[0].rstrip() | |
169 | if len(s) > size: |
|
169 | if len(s) > size: | |
170 | return s[:size - len(postfix)] + postfix |
|
170 | return s[:size - len(postfix)] + postfix | |
171 | return s |
|
171 | return s | |
172 |
|
172 | |||
173 |
|
173 | |||
174 | def reset(name, value, id=NotGiven, **attrs): |
|
174 | def reset(name, value, id=NotGiven, **attrs): | |
175 | """Create a reset button, similar to webhelpers2.html.tags.submit .""" |
|
175 | """Create a reset button, similar to webhelpers2.html.tags.submit .""" | |
176 | return _input("reset", name, value, id, attrs) |
|
176 | return _input("reset", name, value, id, attrs) | |
177 |
|
177 | |||
178 |
|
178 | |||
179 | def select(name, selected_values, options, id=NotGiven, **attrs): |
|
179 | def select(name, selected_values, options, id=NotGiven, **attrs): | |
180 | """Convenient wrapper of webhelpers2 to let it accept options as a tuple list""" |
|
180 | """Convenient wrapper of webhelpers2 to let it accept options as a tuple list""" | |
181 | if isinstance(options, list): |
|
181 | if isinstance(options, list): | |
182 | option_list = options |
|
182 | option_list = options | |
183 | # Handle old value,label lists ... where value also can be value,label lists |
|
183 | # Handle old value,label lists ... where value also can be value,label lists | |
184 | options = Options() |
|
184 | options = Options() | |
185 | for x in option_list: |
|
185 | for x in option_list: | |
186 | if isinstance(x, tuple) and len(x) == 2: |
|
186 | if isinstance(x, tuple) and len(x) == 2: | |
187 | value, label = x |
|
187 | value, label = x | |
188 | elif isinstance(x, str): |
|
188 | elif isinstance(x, str): | |
189 | value = label = x |
|
189 | value = label = x | |
190 | else: |
|
190 | else: | |
191 | log.error('invalid select option %r', x) |
|
191 | log.error('invalid select option %r', x) | |
192 | raise |
|
192 | raise | |
193 | if isinstance(value, list): |
|
193 | if isinstance(value, list): | |
194 | og = options.add_optgroup(label) |
|
194 | og = options.add_optgroup(label) | |
195 | for x in value: |
|
195 | for x in value: | |
196 | if isinstance(x, tuple) and len(x) == 2: |
|
196 | if isinstance(x, tuple) and len(x) == 2: | |
197 | group_value, group_label = x |
|
197 | group_value, group_label = x | |
198 | elif isinstance(x, str): |
|
198 | elif isinstance(x, str): | |
199 | group_value = group_label = x |
|
199 | group_value = group_label = x | |
200 | else: |
|
200 | else: | |
201 | log.error('invalid select option %r', x) |
|
201 | log.error('invalid select option %r', x) | |
202 | raise |
|
202 | raise | |
203 | og.add_option(group_label, group_value) |
|
203 | og.add_option(group_label, group_value) | |
204 | else: |
|
204 | else: | |
205 | options.add_option(label, value) |
|
205 | options.add_option(label, value) | |
206 | return webhelpers2_select(name, selected_values, options, id=id, **attrs) |
|
206 | return webhelpers2_select(name, selected_values, options, id=id, **attrs) | |
207 |
|
207 | |||
208 |
|
208 | |||
209 | safeid = _make_safe_id_component |
|
209 | safeid = _make_safe_id_component | |
210 |
|
210 | |||
211 |
|
211 | |||
212 | def FID(raw_id, path): |
|
212 | def FID(raw_id, path): | |
213 | """ |
|
213 | """ | |
214 | Creates a unique ID for filenode based on it's hash of path and revision |
|
214 | Creates a unique ID for filenode based on it's hash of path and revision | |
215 | it's safe to use in urls |
|
215 | it's safe to use in urls | |
216 |
|
216 | |||
217 | :param raw_id: |
|
217 | :param raw_id: | |
218 | :param path: |
|
218 | :param path: | |
219 | """ |
|
219 | """ | |
220 |
|
220 | |||
221 | return 'C-%s-%s' % (short_id(raw_id), hashlib.md5(safe_bytes(path)).hexdigest()[:12]) |
|
221 | return 'C-%s-%s' % (short_id(raw_id), hashlib.md5(safe_bytes(path)).hexdigest()[:12]) | |
222 |
|
222 | |||
223 |
|
223 | |||
224 | class _FilesBreadCrumbs(object): |
|
224 | class _FilesBreadCrumbs(object): | |
225 |
|
225 | |||
226 | def __call__(self, repo_name, rev, paths): |
|
226 | def __call__(self, repo_name, rev, paths): | |
227 | url_l = [link_to(repo_name, url('files_home', |
|
227 | url_l = [link_to(repo_name, url('files_home', | |
228 | repo_name=repo_name, |
|
228 | repo_name=repo_name, | |
229 | revision=rev, f_path=''), |
|
229 | revision=rev, f_path=''), | |
230 | class_='ypjax-link')] |
|
230 | class_='ypjax-link')] | |
231 | paths_l = paths.split('/') |
|
231 | paths_l = paths.split('/') | |
232 | for cnt, p in enumerate(paths_l): |
|
232 | for cnt, p in enumerate(paths_l): | |
233 | if p != '': |
|
233 | if p != '': | |
234 | url_l.append(link_to(p, |
|
234 | url_l.append(link_to(p, | |
235 | url('files_home', |
|
235 | url('files_home', | |
236 | repo_name=repo_name, |
|
236 | repo_name=repo_name, | |
237 | revision=rev, |
|
237 | revision=rev, | |
238 | f_path='/'.join(paths_l[:cnt + 1]) |
|
238 | f_path='/'.join(paths_l[:cnt + 1]) | |
239 | ), |
|
239 | ), | |
240 | class_='ypjax-link' |
|
240 | class_='ypjax-link' | |
241 | ) |
|
241 | ) | |
242 | ) |
|
242 | ) | |
243 |
|
243 | |||
244 | return literal('/'.join(url_l)) |
|
244 | return literal('/'.join(url_l)) | |
245 |
|
245 | |||
246 |
|
246 | |||
247 | files_breadcrumbs = _FilesBreadCrumbs() |
|
247 | files_breadcrumbs = _FilesBreadCrumbs() | |
248 |
|
248 | |||
249 |
|
249 | |||
250 | class CodeHtmlFormatter(HtmlFormatter): |
|
250 | class CodeHtmlFormatter(HtmlFormatter): | |
251 | """ |
|
251 | """ | |
252 | My code Html Formatter for source codes |
|
252 | My code Html Formatter for source codes | |
253 | """ |
|
253 | """ | |
254 |
|
254 | |||
255 | def wrap(self, source, outfile): |
|
255 | def wrap(self, source, outfile): | |
256 | return self._wrap_div(self._wrap_pre(self._wrap_code(source))) |
|
256 | return self._wrap_div(self._wrap_pre(self._wrap_code(source))) | |
257 |
|
257 | |||
258 | def _wrap_code(self, source): |
|
258 | def _wrap_code(self, source): | |
259 | for cnt, it in enumerate(source): |
|
259 | for cnt, it in enumerate(source): | |
260 | i, t = it |
|
260 | i, t = it | |
261 | t = '<span id="L%s">%s</span>' % (cnt + 1, t) |
|
261 | t = '<span id="L%s">%s</span>' % (cnt + 1, t) | |
262 | yield i, t |
|
262 | yield i, t | |
263 |
|
263 | |||
264 | def _wrap_tablelinenos(self, inner): |
|
264 | def _wrap_tablelinenos(self, inner): | |
265 | inner_lines = [] |
|
265 | inner_lines = [] | |
266 | lncount = 0 |
|
266 | lncount = 0 | |
267 | for t, line in inner: |
|
267 | for t, line in inner: | |
268 | if t: |
|
268 | if t: | |
269 | lncount += 1 |
|
269 | lncount += 1 | |
270 | inner_lines.append(line) |
|
270 | inner_lines.append(line) | |
271 |
|
271 | |||
272 | fl = self.linenostart |
|
272 | fl = self.linenostart | |
273 | mw = len(str(lncount + fl - 1)) |
|
273 | mw = len(str(lncount + fl - 1)) | |
274 | sp = self.linenospecial |
|
274 | sp = self.linenospecial | |
275 | st = self.linenostep |
|
275 | st = self.linenostep | |
276 | la = self.lineanchors |
|
276 | la = self.lineanchors | |
277 | aln = self.anchorlinenos |
|
277 | aln = self.anchorlinenos | |
278 | nocls = self.noclasses |
|
278 | nocls = self.noclasses | |
279 | if sp: |
|
279 | if sp: | |
280 | lines = [] |
|
280 | lines = [] | |
281 |
|
281 | |||
282 | for i in range(fl, fl + lncount): |
|
282 | for i in range(fl, fl + lncount): | |
283 | if i % st == 0: |
|
283 | if i % st == 0: | |
284 | if i % sp == 0: |
|
284 | if i % sp == 0: | |
285 | if aln: |
|
285 | if aln: | |
286 | lines.append('<a href="#%s%d" class="special">%*d</a>' % |
|
286 | lines.append('<a href="#%s%d" class="special">%*d</a>' % | |
287 | (la, i, mw, i)) |
|
287 | (la, i, mw, i)) | |
288 | else: |
|
288 | else: | |
289 | lines.append('<span class="special">%*d</span>' % (mw, i)) |
|
289 | lines.append('<span class="special">%*d</span>' % (mw, i)) | |
290 | else: |
|
290 | else: | |
291 | if aln: |
|
291 | if aln: | |
292 | lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i)) |
|
292 | lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i)) | |
293 | else: |
|
293 | else: | |
294 | lines.append('%*d' % (mw, i)) |
|
294 | lines.append('%*d' % (mw, i)) | |
295 | else: |
|
295 | else: | |
296 | lines.append('') |
|
296 | lines.append('') | |
297 | ls = '\n'.join(lines) |
|
297 | ls = '\n'.join(lines) | |
298 | else: |
|
298 | else: | |
299 | lines = [] |
|
299 | lines = [] | |
300 | for i in range(fl, fl + lncount): |
|
300 | for i in range(fl, fl + lncount): | |
301 | if i % st == 0: |
|
301 | if i % st == 0: | |
302 | if aln: |
|
302 | if aln: | |
303 | lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i)) |
|
303 | lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i)) | |
304 | else: |
|
304 | else: | |
305 | lines.append('%*d' % (mw, i)) |
|
305 | lines.append('%*d' % (mw, i)) | |
306 | else: |
|
306 | else: | |
307 | lines.append('') |
|
307 | lines.append('') | |
308 | ls = '\n'.join(lines) |
|
308 | ls = '\n'.join(lines) | |
309 |
|
309 | |||
310 | # in case you wonder about the seemingly redundant <div> here: since the |
|
310 | # in case you wonder about the seemingly redundant <div> here: since the | |
311 | # content in the other cell also is wrapped in a div, some browsers in |
|
311 | # content in the other cell also is wrapped in a div, some browsers in | |
312 | # some configurations seem to mess up the formatting... |
|
312 | # some configurations seem to mess up the formatting... | |
313 | if nocls: |
|
313 | if nocls: | |
314 | yield 0, ('<table class="%stable">' % self.cssclass + |
|
314 | yield 0, ('<table class="%stable">' % self.cssclass + | |
315 | '<tr><td><div class="linenodiv">' |
|
315 | '<tr><td><div class="linenodiv">' | |
316 | '<pre>' + ls + '</pre></div></td>' |
|
316 | '<pre>' + ls + '</pre></div></td>' | |
317 | '<td id="hlcode" class="code">') |
|
317 | '<td id="hlcode" class="code">') | |
318 | else: |
|
318 | else: | |
319 | yield 0, ('<table class="%stable">' % self.cssclass + |
|
319 | yield 0, ('<table class="%stable">' % self.cssclass + | |
320 | '<tr><td class="linenos"><div class="linenodiv">' |
|
320 | '<tr><td class="linenos"><div class="linenodiv">' | |
321 | '<pre>' + ls + '</pre></div></td>' |
|
321 | '<pre>' + ls + '</pre></div></td>' | |
322 | '<td id="hlcode" class="code">') |
|
322 | '<td id="hlcode" class="code">') | |
323 | yield 0, ''.join(inner_lines) |
|
323 | yield 0, ''.join(inner_lines) | |
324 | yield 0, '</td></tr></table>' |
|
324 | yield 0, '</td></tr></table>' | |
325 |
|
325 | |||
326 |
|
326 | |||
327 | _whitespace_re = re.compile(r'(\t)|( )(?=\n|</div>)') |
|
327 | _whitespace_re = re.compile(r'(\t)|( )(?=\n|</div>)') | |
328 |
|
328 | |||
329 |
|
329 | |||
330 | def _markup_whitespace(m): |
|
330 | def _markup_whitespace(m): | |
331 | groups = m.groups() |
|
331 | groups = m.groups() | |
332 | if groups[0]: |
|
332 | if groups[0]: | |
333 | return '<u>\t</u>' |
|
333 | return '<u>\t</u>' | |
334 | if groups[1]: |
|
334 | if groups[1]: | |
335 | return ' <i></i>' |
|
335 | return ' <i></i>' | |
336 |
|
336 | |||
337 |
|
337 | |||
338 | def markup_whitespace(s): |
|
338 | def markup_whitespace(s): | |
339 | return _whitespace_re.sub(_markup_whitespace, s) |
|
339 | return _whitespace_re.sub(_markup_whitespace, s) | |
340 |
|
340 | |||
341 |
|
341 | |||
342 | def pygmentize(filenode, **kwargs): |
|
342 | def pygmentize(filenode, **kwargs): | |
343 | """ |
|
343 | """ | |
344 | pygmentize function using pygments |
|
344 | pygmentize function using pygments | |
345 |
|
345 | |||
346 | :param filenode: |
|
346 | :param filenode: | |
347 | """ |
|
347 | """ | |
348 | lexer = get_custom_lexer(filenode.extension) or filenode.lexer |
|
348 | lexer = get_custom_lexer(filenode.extension) or filenode.lexer | |
349 | return literal(markup_whitespace( |
|
349 | return literal(markup_whitespace( | |
350 | code_highlight(safe_str(filenode.content), lexer, CodeHtmlFormatter(**kwargs)))) |
|
350 | code_highlight(safe_str(filenode.content), lexer, CodeHtmlFormatter(**kwargs)))) | |
351 |
|
351 | |||
352 |
|
352 | |||
353 | def hsv_to_rgb(h, s, v): |
|
353 | def hsv_to_rgb(h, s, v): | |
354 | if s == 0.0: |
|
354 | if s == 0.0: | |
355 | return v, v, v |
|
355 | return v, v, v | |
356 | i = int(h * 6.0) # XXX assume int() truncates! |
|
356 | i = int(h * 6.0) # XXX assume int() truncates! | |
357 | f = (h * 6.0) - i |
|
357 | f = (h * 6.0) - i | |
358 | p = v * (1.0 - s) |
|
358 | p = v * (1.0 - s) | |
359 | q = v * (1.0 - s * f) |
|
359 | q = v * (1.0 - s * f) | |
360 | t = v * (1.0 - s * (1.0 - f)) |
|
360 | t = v * (1.0 - s * (1.0 - f)) | |
361 | i = i % 6 |
|
361 | i = i % 6 | |
362 | if i == 0: |
|
362 | if i == 0: | |
363 | return v, t, p |
|
363 | return v, t, p | |
364 | if i == 1: |
|
364 | if i == 1: | |
365 | return q, v, p |
|
365 | return q, v, p | |
366 | if i == 2: |
|
366 | if i == 2: | |
367 | return p, v, t |
|
367 | return p, v, t | |
368 | if i == 3: |
|
368 | if i == 3: | |
369 | return p, q, v |
|
369 | return p, q, v | |
370 | if i == 4: |
|
370 | if i == 4: | |
371 | return t, p, v |
|
371 | return t, p, v | |
372 | if i == 5: |
|
372 | if i == 5: | |
373 | return v, p, q |
|
373 | return v, p, q | |
374 |
|
374 | |||
375 |
|
375 | |||
376 | def gen_color(n=10000): |
|
376 | def gen_color(n=10000): | |
377 | """generator for getting n of evenly distributed colors using |
|
377 | """generator for getting n of evenly distributed colors using | |
378 | hsv color and golden ratio. It always return same order of colors |
|
378 | hsv color and golden ratio. It always return same order of colors | |
379 |
|
379 | |||
380 | :returns: RGB tuple |
|
380 | :returns: RGB tuple | |
381 | """ |
|
381 | """ | |
382 |
|
382 | |||
383 | golden_ratio = 0.618033988749895 |
|
383 | golden_ratio = 0.618033988749895 | |
384 | h = 0.22717784590367374 |
|
384 | h = 0.22717784590367374 | |
385 |
|
385 | |||
386 | for _unused in range(n): |
|
386 | for _unused in range(n): | |
387 | h += golden_ratio |
|
387 | h += golden_ratio | |
388 | h %= 1 |
|
388 | h %= 1 | |
389 | HSV_tuple = [h, 0.95, 0.95] |
|
389 | HSV_tuple = [h, 0.95, 0.95] | |
390 | RGB_tuple = hsv_to_rgb(*HSV_tuple) |
|
390 | RGB_tuple = hsv_to_rgb(*HSV_tuple) | |
391 | yield [str(int(x * 256)) for x in RGB_tuple] |
|
391 | yield [str(int(x * 256)) for x in RGB_tuple] | |
392 |
|
392 | |||
393 |
|
393 | |||
394 | def pygmentize_annotation(repo_name, filenode, **kwargs): |
|
394 | def pygmentize_annotation(repo_name, filenode, **kwargs): | |
395 | """ |
|
395 | """ | |
396 | pygmentize function for annotation |
|
396 | pygmentize function for annotation | |
397 |
|
397 | |||
398 | :param filenode: |
|
398 | :param filenode: | |
399 | """ |
|
399 | """ | |
400 | cgenerator = gen_color() |
|
400 | cgenerator = gen_color() | |
401 | color_dict = {} |
|
401 | color_dict = {} | |
402 |
|
402 | |||
403 | def get_color_string(cs): |
|
403 | def get_color_string(cs): | |
404 | if cs in color_dict: |
|
404 | if cs in color_dict: | |
405 | col = color_dict[cs] |
|
405 | col = color_dict[cs] | |
406 | else: |
|
406 | else: | |
407 | col = color_dict[cs] = next(cgenerator) |
|
407 | col = color_dict[cs] = next(cgenerator) | |
408 | return "color: rgb(%s)! important;" % (', '.join(col)) |
|
408 | return "color: rgb(%s)! important;" % (', '.join(col)) | |
409 |
|
409 | |||
410 | def url_func(changeset): |
|
410 | def url_func(changeset): | |
411 | author = escape(changeset.author) |
|
411 | author = escape(changeset.author) | |
412 | date = changeset.date |
|
412 | date = changeset.date | |
413 | message = escape(changeset.message) |
|
413 | message = escape(changeset.message) | |
414 | tooltip_html = ("<b>Author:</b> %s<br/>" |
|
414 | tooltip_html = ("<b>Author:</b> %s<br/>" | |
415 | "<b>Date:</b> %s</b><br/>" |
|
415 | "<b>Date:</b> %s</b><br/>" | |
416 | "<b>Message:</b> %s") % (author, date, message) |
|
416 | "<b>Message:</b> %s") % (author, date, message) | |
417 |
|
417 | |||
418 | lnk_format = show_id(changeset) |
|
418 | lnk_format = show_id(changeset) | |
419 | uri = link_to( |
|
419 | uri = link_to( | |
420 | lnk_format, |
|
420 | lnk_format, | |
421 | url('changeset_home', repo_name=repo_name, |
|
421 | url('changeset_home', repo_name=repo_name, | |
422 | revision=changeset.raw_id), |
|
422 | revision=changeset.raw_id), | |
423 | style=get_color_string(changeset.raw_id), |
|
423 | style=get_color_string(changeset.raw_id), | |
424 | **{'data-toggle': 'popover', |
|
424 | **{'data-toggle': 'popover', | |
425 | 'data-content': tooltip_html} |
|
425 | 'data-content': tooltip_html} | |
426 | ) |
|
426 | ) | |
427 |
|
427 | |||
428 | uri += '\n' |
|
428 | uri += '\n' | |
429 | return uri |
|
429 | return uri | |
430 |
|
430 | |||
431 | return literal(markup_whitespace(annotate_highlight(filenode, url_func, **kwargs))) |
|
431 | return literal(markup_whitespace(annotate_highlight(filenode, url_func, **kwargs))) | |
432 |
|
432 | |||
433 |
|
433 | |||
434 | class _Message(object): |
|
434 | class _Message(object): | |
435 | """A message returned by ``pop_flash_messages()``. |
|
435 | """A message returned by ``pop_flash_messages()``. | |
436 |
|
436 | |||
437 | Converting the message to a string returns the message text. Instances |
|
437 | Converting the message to a string returns the message text. Instances | |
438 | also have the following attributes: |
|
438 | also have the following attributes: | |
439 |
|
439 | |||
440 | * ``category``: the category specified when the message was created. |
|
440 | * ``category``: the category specified when the message was created. | |
441 | * ``message``: the html-safe message text. |
|
441 | * ``message``: the html-safe message text. | |
442 | """ |
|
442 | """ | |
443 |
|
443 | |||
444 | def __init__(self, category, message): |
|
444 | def __init__(self, category, message): | |
445 | self.category = category |
|
445 | self.category = category | |
446 | self.message = message |
|
446 | self.message = message | |
447 |
|
447 | |||
448 |
|
448 | |||
449 | def _session_flash_messages(append=None, clear=False): |
|
449 | def _session_flash_messages(append=None, clear=False): | |
450 | """Manage a message queue in tg.session: return the current message queue |
|
450 | """Manage a message queue in tg.session: return the current message queue | |
451 | after appending the given message, and possibly clearing the queue.""" |
|
451 | after appending the given message, and possibly clearing the queue.""" | |
452 | key = 'flash' |
|
452 | key = 'flash' | |
453 | from tg import session |
|
453 | from tg import session | |
454 | if key in session: |
|
454 | if key in session: | |
455 | flash_messages = session[key] |
|
455 | flash_messages = session[key] | |
456 | else: |
|
456 | else: | |
457 | if append is None: # common fast path - also used for clearing empty queue |
|
457 | if append is None: # common fast path - also used for clearing empty queue | |
458 | return [] # don't bother saving |
|
458 | return [] # don't bother saving | |
459 | flash_messages = [] |
|
459 | flash_messages = [] | |
460 | session[key] = flash_messages |
|
460 | session[key] = flash_messages | |
461 | if append is not None and append not in flash_messages: |
|
461 | if append is not None and append not in flash_messages: | |
462 | flash_messages.append(append) |
|
462 | flash_messages.append(append) | |
463 | if clear: |
|
463 | if clear: | |
464 | session.pop(key, None) |
|
464 | session.pop(key, None) | |
465 | session.save() |
|
465 | session.save() | |
466 | return flash_messages |
|
466 | return flash_messages | |
467 |
|
467 | |||
468 |
|
468 | |||
469 | def flash(message, category, logf=None): |
|
469 | def flash(message, category, logf=None): | |
470 | """ |
|
470 | """ | |
471 | Show a message to the user _and_ log it through the specified function |
|
471 | Show a message to the user _and_ log it through the specified function | |
472 |
|
472 | |||
473 | category: notice (default), warning, error, success |
|
473 | category: notice (default), warning, error, success | |
474 | logf: a custom log function - such as log.debug |
|
474 | logf: a custom log function - such as log.debug | |
475 |
|
475 | |||
476 | logf defaults to log.info, unless category equals 'success', in which |
|
476 | logf defaults to log.info, unless category equals 'success', in which | |
477 | case logf defaults to log.debug. |
|
477 | case logf defaults to log.debug. | |
478 | """ |
|
478 | """ | |
479 | assert category in ('error', 'success', 'warning'), category |
|
479 | assert category in ('error', 'success', 'warning'), category | |
480 | if hasattr(message, '__html__'): |
|
480 | if hasattr(message, '__html__'): | |
481 | # render to HTML for storing in cookie |
|
481 | # render to HTML for storing in cookie | |
482 | safe_message = str(message) |
|
482 | safe_message = str(message) | |
483 | else: |
|
483 | else: | |
484 | # Apply str - the message might be an exception with __str__ |
|
484 | # Apply str - the message might be an exception with __str__ | |
485 | # Escape, so we can trust the result without further escaping, without any risk of injection |
|
485 | # Escape, so we can trust the result without further escaping, without any risk of injection | |
486 | safe_message = html_escape(str(message)) |
|
486 | safe_message = html_escape(str(message)) | |
487 | if logf is None: |
|
487 | if logf is None: | |
488 | logf = log.info |
|
488 | logf = log.info | |
489 | if category == 'success': |
|
489 | if category == 'success': | |
490 | logf = log.debug |
|
490 | logf = log.debug | |
491 |
|
491 | |||
492 | logf('Flash %s: %s', category, safe_message) |
|
492 | logf('Flash %s: %s', category, safe_message) | |
493 |
|
493 | |||
494 | _session_flash_messages(append=(category, safe_message)) |
|
494 | _session_flash_messages(append=(category, safe_message)) | |
495 |
|
495 | |||
496 |
|
496 | |||
497 | def pop_flash_messages(): |
|
497 | def pop_flash_messages(): | |
498 | """Return all accumulated messages and delete them from the session. |
|
498 | """Return all accumulated messages and delete them from the session. | |
499 |
|
499 | |||
500 | The return value is a list of ``Message`` objects. |
|
500 | The return value is a list of ``Message`` objects. | |
501 | """ |
|
501 | """ | |
502 | return [_Message(category, message) for category, message in _session_flash_messages(clear=True)] |
|
502 | return [_Message(category, message) for category, message in _session_flash_messages(clear=True)] | |
503 |
|
503 | |||
504 |
|
504 | |||
505 | age = lambda x, y=False: _age(x, y) |
|
505 | def age(x, y=False): | |
506 | capitalize = lambda x: x.capitalize() |
|
506 | return _age(x, y) | |
|
507 | ||||
|
508 | def capitalize(x): | |||
|
509 | return x.capitalize() | |||
|
510 | ||||
507 | email = author_email |
|
511 | email = author_email | |
508 | short_id = lambda x: x[:12] |
|
512 | ||
509 | hide_credentials = lambda x: ''.join(credentials_filter(x)) |
|
513 | def short_id(x): | |
|
514 | return x[:12] | |||
|
515 | ||||
|
516 | def hide_credentials(x): | |||
|
517 | return ''.join(credentials_filter(x)) | |||
510 |
|
518 | |||
511 |
|
519 | |||
512 | def show_id(cs): |
|
520 | def show_id(cs): | |
513 | """ |
|
521 | """ | |
514 | Configurable function that shows ID |
|
522 | Configurable function that shows ID | |
515 | by default it's r123:fffeeefffeee |
|
523 | by default it's r123:fffeeefffeee | |
516 |
|
524 | |||
517 | :param cs: changeset instance |
|
525 | :param cs: changeset instance | |
518 | """ |
|
526 | """ | |
519 | from kallithea import CONFIG |
|
527 | from kallithea import CONFIG | |
520 | def_len = safe_int(CONFIG.get('show_sha_length', 12)) |
|
528 | def_len = safe_int(CONFIG.get('show_sha_length', 12)) | |
521 | show_rev = str2bool(CONFIG.get('show_revision_number', False)) |
|
529 | show_rev = str2bool(CONFIG.get('show_revision_number', False)) | |
522 |
|
530 | |||
523 | raw_id = cs.raw_id[:def_len] |
|
531 | raw_id = cs.raw_id[:def_len] | |
524 | if show_rev: |
|
532 | if show_rev: | |
525 | return 'r%s:%s' % (cs.revision, raw_id) |
|
533 | return 'r%s:%s' % (cs.revision, raw_id) | |
526 | else: |
|
534 | else: | |
527 | return raw_id |
|
535 | return raw_id | |
528 |
|
536 | |||
529 |
|
537 | |||
530 | def fmt_date(date): |
|
538 | def fmt_date(date): | |
531 | if date: |
|
539 | if date: | |
532 | return date.strftime("%Y-%m-%d %H:%M:%S") |
|
540 | return date.strftime("%Y-%m-%d %H:%M:%S") | |
533 | return "" |
|
541 | return "" | |
534 |
|
542 | |||
535 |
|
543 | |||
536 | def is_git(repository): |
|
544 | def is_git(repository): | |
537 | if hasattr(repository, 'alias'): |
|
545 | if hasattr(repository, 'alias'): | |
538 | _type = repository.alias |
|
546 | _type = repository.alias | |
539 | elif hasattr(repository, 'repo_type'): |
|
547 | elif hasattr(repository, 'repo_type'): | |
540 | _type = repository.repo_type |
|
548 | _type = repository.repo_type | |
541 | else: |
|
549 | else: | |
542 | _type = repository |
|
550 | _type = repository | |
543 | return _type == 'git' |
|
551 | return _type == 'git' | |
544 |
|
552 | |||
545 |
|
553 | |||
546 | def is_hg(repository): |
|
554 | def is_hg(repository): | |
547 | if hasattr(repository, 'alias'): |
|
555 | if hasattr(repository, 'alias'): | |
548 | _type = repository.alias |
|
556 | _type = repository.alias | |
549 | elif hasattr(repository, 'repo_type'): |
|
557 | elif hasattr(repository, 'repo_type'): | |
550 | _type = repository.repo_type |
|
558 | _type = repository.repo_type | |
551 | else: |
|
559 | else: | |
552 | _type = repository |
|
560 | _type = repository | |
553 | return _type == 'hg' |
|
561 | return _type == 'hg' | |
554 |
|
562 | |||
555 |
|
563 | |||
556 | @cache_region('long_term', 'user_attr_or_none') |
|
564 | @cache_region('long_term', 'user_attr_or_none') | |
557 | def user_attr_or_none(author, show_attr): |
|
565 | def user_attr_or_none(author, show_attr): | |
558 | """Try to match email part of VCS committer string with a local user and return show_attr |
|
566 | """Try to match email part of VCS committer string with a local user and return show_attr | |
559 | - or return None if user not found""" |
|
567 | - or return None if user not found""" | |
560 | email = author_email(author) |
|
568 | email = author_email(author) | |
561 | if email: |
|
569 | if email: | |
562 | from kallithea.model.db import User |
|
570 | from kallithea.model.db import User | |
563 | user = User.get_by_email(email, cache=True) # cache will only use sql_cache_short |
|
571 | user = User.get_by_email(email, cache=True) # cache will only use sql_cache_short | |
564 | if user is not None: |
|
572 | if user is not None: | |
565 | return getattr(user, show_attr) |
|
573 | return getattr(user, show_attr) | |
566 | return None |
|
574 | return None | |
567 |
|
575 | |||
568 |
|
576 | |||
569 | def email_or_none(author): |
|
577 | def email_or_none(author): | |
570 | """Try to match email part of VCS committer string with a local user. |
|
578 | """Try to match email part of VCS committer string with a local user. | |
571 | Return primary email of user, email part of the specified author name, or None.""" |
|
579 | Return primary email of user, email part of the specified author name, or None.""" | |
572 | if not author: |
|
580 | if not author: | |
573 | return None |
|
581 | return None | |
574 | email = user_attr_or_none(author, 'email') |
|
582 | email = user_attr_or_none(author, 'email') | |
575 | if email is not None: |
|
583 | if email is not None: | |
576 | return email # always use user's main email address - not necessarily the one used to find user |
|
584 | return email # always use user's main email address - not necessarily the one used to find user | |
577 |
|
585 | |||
578 | # extract email from the commit string |
|
586 | # extract email from the commit string | |
579 | email = author_email(author) |
|
587 | email = author_email(author) | |
580 | if email: |
|
588 | if email: | |
581 | return email |
|
589 | return email | |
582 |
|
590 | |||
583 | # No valid email, not a valid user in the system, none! |
|
591 | # No valid email, not a valid user in the system, none! | |
584 | return None |
|
592 | return None | |
585 |
|
593 | |||
586 |
|
594 | |||
587 | def person(author, show_attr="username"): |
|
595 | def person(author, show_attr="username"): | |
588 | """Find the user identified by 'author', return one of the users attributes, |
|
596 | """Find the user identified by 'author', return one of the users attributes, | |
589 | default to the username attribute, None if there is no user""" |
|
597 | default to the username attribute, None if there is no user""" | |
590 | from kallithea.model.db import User |
|
598 | from kallithea.model.db import User | |
591 | # if author is already an instance use it for extraction |
|
599 | # if author is already an instance use it for extraction | |
592 | if isinstance(author, User): |
|
600 | if isinstance(author, User): | |
593 | return getattr(author, show_attr) |
|
601 | return getattr(author, show_attr) | |
594 |
|
602 | |||
595 | value = user_attr_or_none(author, show_attr) |
|
603 | value = user_attr_or_none(author, show_attr) | |
596 | if value is not None: |
|
604 | if value is not None: | |
597 | return value |
|
605 | return value | |
598 |
|
606 | |||
599 | # Still nothing? Just pass back the author name if any, else the email |
|
607 | # Still nothing? Just pass back the author name if any, else the email | |
600 | return author_name(author) or email(author) |
|
608 | return author_name(author) or email(author) | |
601 |
|
609 | |||
602 |
|
610 | |||
603 | def person_by_id(id_, show_attr="username"): |
|
611 | def person_by_id(id_, show_attr="username"): | |
604 | from kallithea.model.db import User |
|
612 | from kallithea.model.db import User | |
605 | # attr to return from fetched user |
|
|||
606 | person_getter = lambda usr: getattr(usr, show_attr) |
|
|||
607 |
|
||||
608 | # maybe it's an ID ? |
|
613 | # maybe it's an ID ? | |
609 | if str(id_).isdigit() or isinstance(id_, int): |
|
614 | if str(id_).isdigit() or isinstance(id_, int): | |
610 | id_ = int(id_) |
|
615 | id_ = int(id_) | |
611 | user = User.get(id_) |
|
616 | user = User.get(id_) | |
612 | if user is not None: |
|
617 | if user is not None: | |
613 |
return |
|
618 | return getattr(user, show_attr) | |
614 | return id_ |
|
619 | return id_ | |
615 |
|
620 | |||
616 |
|
621 | |||
617 | def boolicon(value): |
|
622 | def boolicon(value): | |
618 | """Returns boolean value of a value, represented as small html image of true/false |
|
623 | """Returns boolean value of a value, represented as small html image of true/false | |
619 | icons |
|
624 | icons | |
620 |
|
625 | |||
621 | :param value: value |
|
626 | :param value: value | |
622 | """ |
|
627 | """ | |
623 |
|
628 | |||
624 | if value: |
|
629 | if value: | |
625 | return HTML.tag('i', class_="icon-ok") |
|
630 | return HTML.tag('i', class_="icon-ok") | |
626 | else: |
|
631 | else: | |
627 | return HTML.tag('i', class_="icon-minus-circled") |
|
632 | return HTML.tag('i', class_="icon-minus-circled") | |
628 |
|
633 | |||
629 |
|
634 | |||
630 | def action_parser(user_log, feed=False, parse_cs=False): |
|
635 | def action_parser(user_log, feed=False, parse_cs=False): | |
631 | """ |
|
636 | """ | |
632 | This helper will action_map the specified string action into translated |
|
637 | This helper will action_map the specified string action into translated | |
633 | fancy names with icons and links |
|
638 | fancy names with icons and links | |
634 |
|
639 | |||
635 | :param user_log: user log instance |
|
640 | :param user_log: user log instance | |
636 | :param feed: use output for feeds (no html and fancy icons) |
|
641 | :param feed: use output for feeds (no html and fancy icons) | |
637 | :param parse_cs: parse Changesets into VCS instances |
|
642 | :param parse_cs: parse Changesets into VCS instances | |
638 | """ |
|
643 | """ | |
639 |
|
644 | |||
640 | action = user_log.action |
|
645 | action = user_log.action | |
641 | action_params = ' ' |
|
646 | action_params = ' ' | |
642 |
|
647 | |||
643 | x = action.split(':') |
|
648 | x = action.split(':') | |
644 |
|
649 | |||
645 | if len(x) > 1: |
|
650 | if len(x) > 1: | |
646 | action, action_params = x |
|
651 | action, action_params = x | |
647 |
|
652 | |||
648 | def get_cs_links(): |
|
653 | def get_cs_links(): | |
649 | revs_limit = 3 # display this amount always |
|
654 | revs_limit = 3 # display this amount always | |
650 | revs_top_limit = 50 # show upto this amount of changesets hidden |
|
655 | revs_top_limit = 50 # show upto this amount of changesets hidden | |
651 | revs_ids = action_params.split(',') |
|
656 | revs_ids = action_params.split(',') | |
652 | deleted = user_log.repository is None |
|
657 | deleted = user_log.repository is None | |
653 | if deleted: |
|
658 | if deleted: | |
654 | return ','.join(revs_ids) |
|
659 | return ','.join(revs_ids) | |
655 |
|
660 | |||
656 | repo_name = user_log.repository.repo_name |
|
661 | repo_name = user_log.repository.repo_name | |
657 |
|
662 | |||
658 | def lnk(rev, repo_name): |
|
663 | def lnk(rev, repo_name): | |
659 | lazy_cs = False |
|
664 | lazy_cs = False | |
660 | title_ = None |
|
665 | title_ = None | |
661 | url_ = '#' |
|
666 | url_ = '#' | |
662 | if isinstance(rev, BaseChangeset) or isinstance(rev, AttributeDict): |
|
667 | if isinstance(rev, BaseChangeset) or isinstance(rev, AttributeDict): | |
663 | if rev.op and rev.ref_name: |
|
668 | if rev.op and rev.ref_name: | |
664 | if rev.op == 'delete_branch': |
|
669 | if rev.op == 'delete_branch': | |
665 | lbl = _('Deleted branch: %s') % rev.ref_name |
|
670 | lbl = _('Deleted branch: %s') % rev.ref_name | |
666 | elif rev.op == 'tag': |
|
671 | elif rev.op == 'tag': | |
667 | lbl = _('Created tag: %s') % rev.ref_name |
|
672 | lbl = _('Created tag: %s') % rev.ref_name | |
668 | else: |
|
673 | else: | |
669 | lbl = 'Unknown operation %s' % rev.op |
|
674 | lbl = 'Unknown operation %s' % rev.op | |
670 | else: |
|
675 | else: | |
671 | lazy_cs = True |
|
676 | lazy_cs = True | |
672 | lbl = rev.short_id[:8] |
|
677 | lbl = rev.short_id[:8] | |
673 | url_ = url('changeset_home', repo_name=repo_name, |
|
678 | url_ = url('changeset_home', repo_name=repo_name, | |
674 | revision=rev.raw_id) |
|
679 | revision=rev.raw_id) | |
675 | else: |
|
680 | else: | |
676 | # changeset cannot be found - it might have been stripped or removed |
|
681 | # changeset cannot be found - it might have been stripped or removed | |
677 | lbl = rev[:12] |
|
682 | lbl = rev[:12] | |
678 | title_ = _('Changeset %s not found') % lbl |
|
683 | title_ = _('Changeset %s not found') % lbl | |
679 | if parse_cs: |
|
684 | if parse_cs: | |
680 | return link_to(lbl, url_, title=title_, **{'data-toggle': 'tooltip'}) |
|
685 | return link_to(lbl, url_, title=title_, **{'data-toggle': 'tooltip'}) | |
681 | return link_to(lbl, url_, class_='lazy-cs' if lazy_cs else '', |
|
686 | return link_to(lbl, url_, class_='lazy-cs' if lazy_cs else '', | |
682 | **{'data-raw_id': rev.raw_id, 'data-repo_name': repo_name}) |
|
687 | **{'data-raw_id': rev.raw_id, 'data-repo_name': repo_name}) | |
683 |
|
688 | |||
684 | def _get_op(rev_txt): |
|
689 | def _get_op(rev_txt): | |
685 | _op = None |
|
690 | _op = None | |
686 | _name = rev_txt |
|
691 | _name = rev_txt | |
687 | if len(rev_txt.split('=>')) == 2: |
|
692 | if len(rev_txt.split('=>')) == 2: | |
688 | _op, _name = rev_txt.split('=>') |
|
693 | _op, _name = rev_txt.split('=>') | |
689 | return _op, _name |
|
694 | return _op, _name | |
690 |
|
695 | |||
691 | revs = [] |
|
696 | revs = [] | |
692 | if len([v for v in revs_ids if v != '']) > 0: |
|
697 | if len([v for v in revs_ids if v != '']) > 0: | |
693 | repo = None |
|
698 | repo = None | |
694 | for rev in revs_ids[:revs_top_limit]: |
|
699 | for rev in revs_ids[:revs_top_limit]: | |
695 | _op, _name = _get_op(rev) |
|
700 | _op, _name = _get_op(rev) | |
696 |
|
701 | |||
697 | # we want parsed changesets, or new log store format is bad |
|
702 | # we want parsed changesets, or new log store format is bad | |
698 | if parse_cs: |
|
703 | if parse_cs: | |
699 | try: |
|
704 | try: | |
700 | if repo is None: |
|
705 | if repo is None: | |
701 | repo = user_log.repository.scm_instance |
|
706 | repo = user_log.repository.scm_instance | |
702 | _rev = repo.get_changeset(rev) |
|
707 | _rev = repo.get_changeset(rev) | |
703 | revs.append(_rev) |
|
708 | revs.append(_rev) | |
704 | except ChangesetDoesNotExistError: |
|
709 | except ChangesetDoesNotExistError: | |
705 | log.error('cannot find revision %s in this repo', rev) |
|
710 | log.error('cannot find revision %s in this repo', rev) | |
706 | revs.append(rev) |
|
711 | revs.append(rev) | |
707 | else: |
|
712 | else: | |
708 | _rev = AttributeDict({ |
|
713 | _rev = AttributeDict({ | |
709 | 'short_id': rev[:12], |
|
714 | 'short_id': rev[:12], | |
710 | 'raw_id': rev, |
|
715 | 'raw_id': rev, | |
711 | 'message': '', |
|
716 | 'message': '', | |
712 | 'op': _op, |
|
717 | 'op': _op, | |
713 | 'ref_name': _name |
|
718 | 'ref_name': _name | |
714 | }) |
|
719 | }) | |
715 | revs.append(_rev) |
|
720 | revs.append(_rev) | |
716 | cs_links = [" " + ', '.join( |
|
721 | cs_links = [" " + ', '.join( | |
717 | [lnk(rev, repo_name) for rev in revs[:revs_limit]] |
|
722 | [lnk(rev, repo_name) for rev in revs[:revs_limit]] | |
718 | )] |
|
723 | )] | |
719 | _op1, _name1 = _get_op(revs_ids[0]) |
|
724 | _op1, _name1 = _get_op(revs_ids[0]) | |
720 | _op2, _name2 = _get_op(revs_ids[-1]) |
|
725 | _op2, _name2 = _get_op(revs_ids[-1]) | |
721 |
|
726 | |||
722 | _rev = '%s...%s' % (_name1, _name2) |
|
727 | _rev = '%s...%s' % (_name1, _name2) | |
723 |
|
728 | |||
724 | compare_view = ( |
|
729 | compare_view = ( | |
725 | ' <div class="compare_view" data-toggle="tooltip" title="%s">' |
|
730 | ' <div class="compare_view" data-toggle="tooltip" title="%s">' | |
726 | '<a href="%s">%s</a> </div>' % ( |
|
731 | '<a href="%s">%s</a> </div>' % ( | |
727 | _('Show all combined changesets %s->%s') % ( |
|
732 | _('Show all combined changesets %s->%s') % ( | |
728 | revs_ids[0][:12], revs_ids[-1][:12] |
|
733 | revs_ids[0][:12], revs_ids[-1][:12] | |
729 | ), |
|
734 | ), | |
730 | url('changeset_home', repo_name=repo_name, |
|
735 | url('changeset_home', repo_name=repo_name, | |
731 | revision=_rev |
|
736 | revision=_rev | |
732 | ), |
|
737 | ), | |
733 | _('Compare view') |
|
738 | _('Compare view') | |
734 | ) |
|
739 | ) | |
735 | ) |
|
740 | ) | |
736 |
|
741 | |||
737 | # if we have exactly one more than normally displayed |
|
742 | # if we have exactly one more than normally displayed | |
738 | # just display it, takes less space than displaying |
|
743 | # just display it, takes less space than displaying | |
739 | # "and 1 more revisions" |
|
744 | # "and 1 more revisions" | |
740 | if len(revs_ids) == revs_limit + 1: |
|
745 | if len(revs_ids) == revs_limit + 1: | |
741 | cs_links.append(", " + lnk(revs[revs_limit], repo_name)) |
|
746 | cs_links.append(", " + lnk(revs[revs_limit], repo_name)) | |
742 |
|
747 | |||
743 | # hidden-by-default ones |
|
748 | # hidden-by-default ones | |
744 | if len(revs_ids) > revs_limit + 1: |
|
749 | if len(revs_ids) > revs_limit + 1: | |
745 | uniq_id = revs_ids[0] |
|
750 | uniq_id = revs_ids[0] | |
746 | html_tmpl = ( |
|
751 | html_tmpl = ( | |
747 | '<span> %s <a class="show_more" id="_%s" ' |
|
752 | '<span> %s <a class="show_more" id="_%s" ' | |
748 | 'href="#more">%s</a> %s</span>' |
|
753 | 'href="#more">%s</a> %s</span>' | |
749 | ) |
|
754 | ) | |
750 | if not feed: |
|
755 | if not feed: | |
751 | cs_links.append(html_tmpl % ( |
|
756 | cs_links.append(html_tmpl % ( | |
752 | _('and'), |
|
757 | _('and'), | |
753 | uniq_id, _('%s more') % (len(revs_ids) - revs_limit), |
|
758 | uniq_id, _('%s more') % (len(revs_ids) - revs_limit), | |
754 | _('revisions') |
|
759 | _('revisions') | |
755 | ) |
|
760 | ) | |
756 | ) |
|
761 | ) | |
757 |
|
762 | |||
758 | if not feed: |
|
763 | if not feed: | |
759 | html_tmpl = '<span id="%s" style="display:none">, %s </span>' |
|
764 | html_tmpl = '<span id="%s" style="display:none">, %s </span>' | |
760 | else: |
|
765 | else: | |
761 | html_tmpl = '<span id="%s"> %s </span>' |
|
766 | html_tmpl = '<span id="%s"> %s </span>' | |
762 |
|
767 | |||
763 | morelinks = ', '.join( |
|
768 | morelinks = ', '.join( | |
764 | [lnk(rev, repo_name) for rev in revs[revs_limit:]] |
|
769 | [lnk(rev, repo_name) for rev in revs[revs_limit:]] | |
765 | ) |
|
770 | ) | |
766 |
|
771 | |||
767 | if len(revs_ids) > revs_top_limit: |
|
772 | if len(revs_ids) > revs_top_limit: | |
768 | morelinks += ', ...' |
|
773 | morelinks += ', ...' | |
769 |
|
774 | |||
770 | cs_links.append(html_tmpl % (uniq_id, morelinks)) |
|
775 | cs_links.append(html_tmpl % (uniq_id, morelinks)) | |
771 | if len(revs) > 1: |
|
776 | if len(revs) > 1: | |
772 | cs_links.append(compare_view) |
|
777 | cs_links.append(compare_view) | |
773 | return ''.join(cs_links) |
|
778 | return ''.join(cs_links) | |
774 |
|
779 | |||
775 | def get_fork_name(): |
|
780 | def get_fork_name(): | |
776 | repo_name = action_params |
|
781 | repo_name = action_params | |
777 | url_ = url('summary_home', repo_name=repo_name) |
|
782 | url_ = url('summary_home', repo_name=repo_name) | |
778 | return _('Fork name %s') % link_to(action_params, url_) |
|
783 | return _('Fork name %s') % link_to(action_params, url_) | |
779 |
|
784 | |||
780 | def get_user_name(): |
|
785 | def get_user_name(): | |
781 | user_name = action_params |
|
786 | user_name = action_params | |
782 | return user_name |
|
787 | return user_name | |
783 |
|
788 | |||
784 | def get_users_group(): |
|
789 | def get_users_group(): | |
785 | group_name = action_params |
|
790 | group_name = action_params | |
786 | return group_name |
|
791 | return group_name | |
787 |
|
792 | |||
788 | def get_pull_request(): |
|
793 | def get_pull_request(): | |
789 | from kallithea.model.db import PullRequest |
|
794 | from kallithea.model.db import PullRequest | |
790 | pull_request_id = action_params |
|
795 | pull_request_id = action_params | |
791 | nice_id = PullRequest.make_nice_id(pull_request_id) |
|
796 | nice_id = PullRequest.make_nice_id(pull_request_id) | |
792 |
|
797 | |||
793 | deleted = user_log.repository is None |
|
798 | deleted = user_log.repository is None | |
794 | if deleted: |
|
799 | if deleted: | |
795 | repo_name = user_log.repository_name |
|
800 | repo_name = user_log.repository_name | |
796 | else: |
|
801 | else: | |
797 | repo_name = user_log.repository.repo_name |
|
802 | repo_name = user_log.repository.repo_name | |
798 |
|
803 | |||
799 | return link_to(_('Pull request %s') % nice_id, |
|
804 | return link_to(_('Pull request %s') % nice_id, | |
800 | url('pullrequest_show', repo_name=repo_name, |
|
805 | url('pullrequest_show', repo_name=repo_name, | |
801 | pull_request_id=pull_request_id)) |
|
806 | pull_request_id=pull_request_id)) | |
802 |
|
807 | |||
803 | def get_archive_name(): |
|
808 | def get_archive_name(): | |
804 | archive_name = action_params |
|
809 | archive_name = action_params | |
805 | return archive_name |
|
810 | return archive_name | |
806 |
|
811 | |||
807 | # action : translated str, callback(extractor), icon |
|
812 | # action : translated str, callback(extractor), icon | |
808 | action_map = { |
|
813 | action_map = { | |
809 | 'user_deleted_repo': (_('[deleted] repository'), |
|
814 | 'user_deleted_repo': (_('[deleted] repository'), | |
810 | None, 'icon-trashcan'), |
|
815 | None, 'icon-trashcan'), | |
811 | 'user_created_repo': (_('[created] repository'), |
|
816 | 'user_created_repo': (_('[created] repository'), | |
812 | None, 'icon-plus'), |
|
817 | None, 'icon-plus'), | |
813 | 'user_created_fork': (_('[created] repository as fork'), |
|
818 | 'user_created_fork': (_('[created] repository as fork'), | |
814 | None, 'icon-fork'), |
|
819 | None, 'icon-fork'), | |
815 | 'user_forked_repo': (_('[forked] repository'), |
|
820 | 'user_forked_repo': (_('[forked] repository'), | |
816 | get_fork_name, 'icon-fork'), |
|
821 | get_fork_name, 'icon-fork'), | |
817 | 'user_updated_repo': (_('[updated] repository'), |
|
822 | 'user_updated_repo': (_('[updated] repository'), | |
818 | None, 'icon-pencil'), |
|
823 | None, 'icon-pencil'), | |
819 | 'user_downloaded_archive': (_('[downloaded] archive from repository'), |
|
824 | 'user_downloaded_archive': (_('[downloaded] archive from repository'), | |
820 | get_archive_name, 'icon-download-cloud'), |
|
825 | get_archive_name, 'icon-download-cloud'), | |
821 | 'admin_deleted_repo': (_('[delete] repository'), |
|
826 | 'admin_deleted_repo': (_('[delete] repository'), | |
822 | None, 'icon-trashcan'), |
|
827 | None, 'icon-trashcan'), | |
823 | 'admin_created_repo': (_('[created] repository'), |
|
828 | 'admin_created_repo': (_('[created] repository'), | |
824 | None, 'icon-plus'), |
|
829 | None, 'icon-plus'), | |
825 | 'admin_forked_repo': (_('[forked] repository'), |
|
830 | 'admin_forked_repo': (_('[forked] repository'), | |
826 | None, 'icon-fork'), |
|
831 | None, 'icon-fork'), | |
827 | 'admin_updated_repo': (_('[updated] repository'), |
|
832 | 'admin_updated_repo': (_('[updated] repository'), | |
828 | None, 'icon-pencil'), |
|
833 | None, 'icon-pencil'), | |
829 | 'admin_created_user': (_('[created] user'), |
|
834 | 'admin_created_user': (_('[created] user'), | |
830 | get_user_name, 'icon-user'), |
|
835 | get_user_name, 'icon-user'), | |
831 | 'admin_updated_user': (_('[updated] user'), |
|
836 | 'admin_updated_user': (_('[updated] user'), | |
832 | get_user_name, 'icon-user'), |
|
837 | get_user_name, 'icon-user'), | |
833 | 'admin_created_users_group': (_('[created] user group'), |
|
838 | 'admin_created_users_group': (_('[created] user group'), | |
834 | get_users_group, 'icon-pencil'), |
|
839 | get_users_group, 'icon-pencil'), | |
835 | 'admin_updated_users_group': (_('[updated] user group'), |
|
840 | 'admin_updated_users_group': (_('[updated] user group'), | |
836 | get_users_group, 'icon-pencil'), |
|
841 | get_users_group, 'icon-pencil'), | |
837 | 'user_commented_revision': (_('[commented] on revision in repository'), |
|
842 | 'user_commented_revision': (_('[commented] on revision in repository'), | |
838 | get_cs_links, 'icon-comment'), |
|
843 | get_cs_links, 'icon-comment'), | |
839 | 'user_commented_pull_request': (_('[commented] on pull request for'), |
|
844 | 'user_commented_pull_request': (_('[commented] on pull request for'), | |
840 | get_pull_request, 'icon-comment'), |
|
845 | get_pull_request, 'icon-comment'), | |
841 | 'user_closed_pull_request': (_('[closed] pull request for'), |
|
846 | 'user_closed_pull_request': (_('[closed] pull request for'), | |
842 | get_pull_request, 'icon-ok'), |
|
847 | get_pull_request, 'icon-ok'), | |
843 | 'push': (_('[pushed] into'), |
|
848 | 'push': (_('[pushed] into'), | |
844 | get_cs_links, 'icon-move-up'), |
|
849 | get_cs_links, 'icon-move-up'), | |
845 | 'push_local': (_('[committed via Kallithea] into repository'), |
|
850 | 'push_local': (_('[committed via Kallithea] into repository'), | |
846 | get_cs_links, 'icon-pencil'), |
|
851 | get_cs_links, 'icon-pencil'), | |
847 | 'push_remote': (_('[pulled from remote] into repository'), |
|
852 | 'push_remote': (_('[pulled from remote] into repository'), | |
848 | get_cs_links, 'icon-move-up'), |
|
853 | get_cs_links, 'icon-move-up'), | |
849 | 'pull': (_('[pulled] from'), |
|
854 | 'pull': (_('[pulled] from'), | |
850 | None, 'icon-move-down'), |
|
855 | None, 'icon-move-down'), | |
851 | 'started_following_repo': (_('[started following] repository'), |
|
856 | 'started_following_repo': (_('[started following] repository'), | |
852 | None, 'icon-heart'), |
|
857 | None, 'icon-heart'), | |
853 | 'stopped_following_repo': (_('[stopped following] repository'), |
|
858 | 'stopped_following_repo': (_('[stopped following] repository'), | |
854 | None, 'icon-heart-empty'), |
|
859 | None, 'icon-heart-empty'), | |
855 | } |
|
860 | } | |
856 |
|
861 | |||
857 | action_str = action_map.get(action, action) |
|
862 | action_str = action_map.get(action, action) | |
858 | if feed: |
|
863 | if feed: | |
859 | action = action_str[0].replace('[', '').replace(']', '') |
|
864 | action = action_str[0].replace('[', '').replace(']', '') | |
860 | else: |
|
865 | else: | |
861 | action = action_str[0] \ |
|
866 | action = action_str[0] \ | |
862 | .replace('[', '<b>') \ |
|
867 | .replace('[', '<b>') \ | |
863 | .replace(']', '</b>') |
|
868 | .replace(']', '</b>') | |
864 |
|
869 | |||
865 | action_params_func = lambda: "" |
|
870 | action_params_func = action_str[1] if callable(action_str[1]) else (lambda: "") | |
866 |
|
||||
867 | if callable(action_str[1]): |
|
|||
868 | action_params_func = action_str[1] |
|
|||
869 |
|
871 | |||
870 | def action_parser_icon(): |
|
872 | def action_parser_icon(): | |
871 | action = user_log.action |
|
873 | action = user_log.action | |
872 | action_params = None |
|
874 | action_params = None | |
873 | x = action.split(':') |
|
875 | x = action.split(':') | |
874 |
|
876 | |||
875 | if len(x) > 1: |
|
877 | if len(x) > 1: | |
876 | action, action_params = x |
|
878 | action, action_params = x | |
877 |
|
879 | |||
878 | ico = action_map.get(action, ['', '', ''])[2] |
|
880 | ico = action_map.get(action, ['', '', ''])[2] | |
879 | html = """<i class="%s"></i>""" % ico |
|
881 | html = """<i class="%s"></i>""" % ico | |
880 | return literal(html) |
|
882 | return literal(html) | |
881 |
|
883 | |||
882 | # returned callbacks we need to call to get |
|
884 | # returned callbacks we need to call to get | |
883 | return [lambda: literal(action), action_params_func, action_parser_icon] |
|
885 | return [lambda: literal(action), action_params_func, action_parser_icon] | |
884 |
|
886 | |||
885 |
|
887 | |||
886 | #============================================================================== |
|
888 | #============================================================================== | |
887 | # GRAVATAR URL |
|
889 | # GRAVATAR URL | |
888 | #============================================================================== |
|
890 | #============================================================================== | |
889 | def gravatar_div(email_address, cls='', size=30, **div_attributes): |
|
891 | def gravatar_div(email_address, cls='', size=30, **div_attributes): | |
890 | """Return an html literal with a span around a gravatar if they are enabled. |
|
892 | """Return an html literal with a span around a gravatar if they are enabled. | |
891 | Extra keyword parameters starting with 'div_' will get the prefix removed |
|
893 | Extra keyword parameters starting with 'div_' will get the prefix removed | |
892 | and '_' changed to '-' and be used as attributes on the div. The default |
|
894 | and '_' changed to '-' and be used as attributes on the div. The default | |
893 | class is 'gravatar'. |
|
895 | class is 'gravatar'. | |
894 | """ |
|
896 | """ | |
895 | from tg import tmpl_context as c |
|
897 | from tg import tmpl_context as c | |
896 | if not c.visual.use_gravatar: |
|
898 | if not c.visual.use_gravatar: | |
897 | return '' |
|
899 | return '' | |
898 | if 'div_class' not in div_attributes: |
|
900 | if 'div_class' not in div_attributes: | |
899 | div_attributes['div_class'] = "gravatar" |
|
901 | div_attributes['div_class'] = "gravatar" | |
900 | attributes = [] |
|
902 | attributes = [] | |
901 | for k, v in sorted(div_attributes.items()): |
|
903 | for k, v in sorted(div_attributes.items()): | |
902 | assert k.startswith('div_'), k |
|
904 | assert k.startswith('div_'), k | |
903 | attributes.append(' %s="%s"' % (k[4:].replace('_', '-'), escape(v))) |
|
905 | attributes.append(' %s="%s"' % (k[4:].replace('_', '-'), escape(v))) | |
904 | return literal("""<span%s>%s</span>""" % |
|
906 | return literal("""<span%s>%s</span>""" % | |
905 | (''.join(attributes), |
|
907 | (''.join(attributes), | |
906 | gravatar(email_address, cls=cls, size=size))) |
|
908 | gravatar(email_address, cls=cls, size=size))) | |
907 |
|
909 | |||
908 |
|
910 | |||
909 | def gravatar(email_address, cls='', size=30): |
|
911 | def gravatar(email_address, cls='', size=30): | |
910 | """return html element of the gravatar |
|
912 | """return html element of the gravatar | |
911 |
|
913 | |||
912 | This method will return an <img> with the resolution double the size (for |
|
914 | This method will return an <img> with the resolution double the size (for | |
913 | retina screens) of the image. If the url returned from gravatar_url is |
|
915 | retina screens) of the image. If the url returned from gravatar_url is | |
914 | empty then we fallback to using an icon. |
|
916 | empty then we fallback to using an icon. | |
915 |
|
917 | |||
916 | """ |
|
918 | """ | |
917 | from tg import tmpl_context as c |
|
919 | from tg import tmpl_context as c | |
918 | if not c.visual.use_gravatar: |
|
920 | if not c.visual.use_gravatar: | |
919 | return '' |
|
921 | return '' | |
920 |
|
922 | |||
921 | src = gravatar_url(email_address, size * 2) |
|
923 | src = gravatar_url(email_address, size * 2) | |
922 |
|
924 | |||
923 | if src: |
|
925 | if src: | |
924 | # here it makes sense to use style="width: ..." (instead of, say, a |
|
926 | # here it makes sense to use style="width: ..." (instead of, say, a | |
925 | # stylesheet) because we using this to generate a high-res (retina) size |
|
927 | # stylesheet) because we using this to generate a high-res (retina) size | |
926 | html = ('<i class="icon-gravatar {cls}"' |
|
928 | html = ('<i class="icon-gravatar {cls}"' | |
927 | ' style="font-size: {size}px;background-size: {size}px;background-image: url(\'{src}\')"' |
|
929 | ' style="font-size: {size}px;background-size: {size}px;background-image: url(\'{src}\')"' | |
928 | '></i>').format(cls=cls, size=size, src=src) |
|
930 | '></i>').format(cls=cls, size=size, src=src) | |
929 |
|
931 | |||
930 | else: |
|
932 | else: | |
931 | # if src is empty then there was no gravatar, so we use a font icon |
|
933 | # if src is empty then there was no gravatar, so we use a font icon | |
932 | html = ("""<i class="icon-user {cls}" style="font-size: {size}px;"></i>""" |
|
934 | html = ("""<i class="icon-user {cls}" style="font-size: {size}px;"></i>""" | |
933 | .format(cls=cls, size=size, src=src)) |
|
935 | .format(cls=cls, size=size, src=src)) | |
934 |
|
936 | |||
935 | return literal(html) |
|
937 | return literal(html) | |
936 |
|
938 | |||
937 |
|
939 | |||
938 | def gravatar_url(email_address, size=30, default=''): |
|
940 | def gravatar_url(email_address, size=30, default=''): | |
939 | # doh, we need to re-import those to mock it later |
|
941 | # doh, we need to re-import those to mock it later | |
940 | from kallithea.config.routing import url |
|
942 | from kallithea.config.routing import url | |
941 | from kallithea.model.db import User |
|
943 | from kallithea.model.db import User | |
942 | from tg import tmpl_context as c |
|
944 | from tg import tmpl_context as c | |
943 | if not c.visual.use_gravatar: |
|
945 | if not c.visual.use_gravatar: | |
944 | return "" |
|
946 | return "" | |
945 |
|
947 | |||
946 | _def = 'anonymous@kallithea-scm.org' # default gravatar |
|
948 | _def = 'anonymous@kallithea-scm.org' # default gravatar | |
947 | email_address = email_address or _def |
|
949 | email_address = email_address or _def | |
948 |
|
950 | |||
949 | if email_address == _def: |
|
951 | if email_address == _def: | |
950 | return default |
|
952 | return default | |
951 |
|
953 | |||
952 | parsed_url = urllib.parse.urlparse(url.current(qualified=True)) |
|
954 | parsed_url = urllib.parse.urlparse(url.current(qualified=True)) | |
953 | url = (c.visual.gravatar_url or User.DEFAULT_GRAVATAR_URL) \ |
|
955 | url = (c.visual.gravatar_url or User.DEFAULT_GRAVATAR_URL) \ | |
954 | .replace('{email}', email_address) \ |
|
956 | .replace('{email}', email_address) \ | |
955 | .replace('{md5email}', hashlib.md5(safe_bytes(email_address).lower()).hexdigest()) \ |
|
957 | .replace('{md5email}', hashlib.md5(safe_bytes(email_address).lower()).hexdigest()) \ | |
956 | .replace('{netloc}', parsed_url.netloc) \ |
|
958 | .replace('{netloc}', parsed_url.netloc) \ | |
957 | .replace('{scheme}', parsed_url.scheme) \ |
|
959 | .replace('{scheme}', parsed_url.scheme) \ | |
958 | .replace('{size}', str(size)) |
|
960 | .replace('{size}', str(size)) | |
959 | return url |
|
961 | return url | |
960 |
|
962 | |||
961 |
|
963 | |||
962 | def changed_tooltip(nodes): |
|
964 | def changed_tooltip(nodes): | |
963 | """ |
|
965 | """ | |
964 | Generates a html string for changed nodes in changeset page. |
|
966 | Generates a html string for changed nodes in changeset page. | |
965 | It limits the output to 30 entries |
|
967 | It limits the output to 30 entries | |
966 |
|
968 | |||
967 | :param nodes: LazyNodesGenerator |
|
969 | :param nodes: LazyNodesGenerator | |
968 | """ |
|
970 | """ | |
969 | if nodes: |
|
971 | if nodes: | |
970 | pref = ': <br/> ' |
|
972 | pref = ': <br/> ' | |
971 | suf = '' |
|
973 | suf = '' | |
972 | if len(nodes) > 30: |
|
974 | if len(nodes) > 30: | |
973 | suf = '<br/>' + _(' and %s more') % (len(nodes) - 30) |
|
975 | suf = '<br/>' + _(' and %s more') % (len(nodes) - 30) | |
974 | return literal(pref + '<br/> '.join([x.path |
|
976 | return literal(pref + '<br/> '.join([x.path | |
975 | for x in nodes[:30]]) + suf) |
|
977 | for x in nodes[:30]]) + suf) | |
976 | else: |
|
978 | else: | |
977 | return ': ' + _('No files') |
|
979 | return ': ' + _('No files') | |
978 |
|
980 | |||
979 |
|
981 | |||
980 | def fancy_file_stats(stats): |
|
982 | def fancy_file_stats(stats): | |
981 | """ |
|
983 | """ | |
982 | Displays a fancy two colored bar for number of added/deleted |
|
984 | Displays a fancy two colored bar for number of added/deleted | |
983 | lines of code on file |
|
985 | lines of code on file | |
984 |
|
986 | |||
985 | :param stats: two element list of added/deleted lines of code |
|
987 | :param stats: two element list of added/deleted lines of code | |
986 | """ |
|
988 | """ | |
987 | from kallithea.lib.diffs import NEW_FILENODE, DEL_FILENODE, \ |
|
989 | from kallithea.lib.diffs import NEW_FILENODE, DEL_FILENODE, \ | |
988 | MOD_FILENODE, RENAMED_FILENODE, CHMOD_FILENODE, BIN_FILENODE |
|
990 | MOD_FILENODE, RENAMED_FILENODE, CHMOD_FILENODE, BIN_FILENODE | |
989 |
|
991 | |||
990 | a, d = stats['added'], stats['deleted'] |
|
992 | a, d = stats['added'], stats['deleted'] | |
991 | width = 100 |
|
993 | width = 100 | |
992 |
|
994 | |||
993 | if stats['binary']: |
|
995 | if stats['binary']: | |
994 | # binary mode |
|
996 | # binary mode | |
995 | lbl = '' |
|
997 | lbl = '' | |
996 | bin_op = 1 |
|
998 | bin_op = 1 | |
997 |
|
999 | |||
998 | if BIN_FILENODE in stats['ops']: |
|
1000 | if BIN_FILENODE in stats['ops']: | |
999 | lbl = 'bin+' |
|
1001 | lbl = 'bin+' | |
1000 |
|
1002 | |||
1001 | if NEW_FILENODE in stats['ops']: |
|
1003 | if NEW_FILENODE in stats['ops']: | |
1002 | lbl += _('new file') |
|
1004 | lbl += _('new file') | |
1003 | bin_op = NEW_FILENODE |
|
1005 | bin_op = NEW_FILENODE | |
1004 | elif MOD_FILENODE in stats['ops']: |
|
1006 | elif MOD_FILENODE in stats['ops']: | |
1005 | lbl += _('mod') |
|
1007 | lbl += _('mod') | |
1006 | bin_op = MOD_FILENODE |
|
1008 | bin_op = MOD_FILENODE | |
1007 | elif DEL_FILENODE in stats['ops']: |
|
1009 | elif DEL_FILENODE in stats['ops']: | |
1008 | lbl += _('del') |
|
1010 | lbl += _('del') | |
1009 | bin_op = DEL_FILENODE |
|
1011 | bin_op = DEL_FILENODE | |
1010 | elif RENAMED_FILENODE in stats['ops']: |
|
1012 | elif RENAMED_FILENODE in stats['ops']: | |
1011 | lbl += _('rename') |
|
1013 | lbl += _('rename') | |
1012 | bin_op = RENAMED_FILENODE |
|
1014 | bin_op = RENAMED_FILENODE | |
1013 |
|
1015 | |||
1014 | # chmod can go with other operations |
|
1016 | # chmod can go with other operations | |
1015 | if CHMOD_FILENODE in stats['ops']: |
|
1017 | if CHMOD_FILENODE in stats['ops']: | |
1016 | _org_lbl = _('chmod') |
|
1018 | _org_lbl = _('chmod') | |
1017 | lbl += _org_lbl if lbl.endswith('+') else '+%s' % _org_lbl |
|
1019 | lbl += _org_lbl if lbl.endswith('+') else '+%s' % _org_lbl | |
1018 |
|
1020 | |||
1019 | #import ipdb;ipdb.set_trace() |
|
1021 | #import ipdb;ipdb.set_trace() | |
1020 | b_d = '<div class="bin bin%s progress-bar" style="width:100%%">%s</div>' % (bin_op, lbl) |
|
1022 | b_d = '<div class="bin bin%s progress-bar" style="width:100%%">%s</div>' % (bin_op, lbl) | |
1021 | b_a = '<div class="bin bin1" style="width:0%"></div>' |
|
1023 | b_a = '<div class="bin bin1" style="width:0%"></div>' | |
1022 | return literal('<div style="width:%spx" class="progress">%s%s</div>' % (width, b_a, b_d)) |
|
1024 | return literal('<div style="width:%spx" class="progress">%s%s</div>' % (width, b_a, b_d)) | |
1023 |
|
1025 | |||
1024 | t = stats['added'] + stats['deleted'] |
|
1026 | t = stats['added'] + stats['deleted'] | |
1025 | unit = float(width) / (t or 1) |
|
1027 | unit = float(width) / (t or 1) | |
1026 |
|
1028 | |||
1027 | # needs > 9% of width to be visible or 0 to be hidden |
|
1029 | # needs > 9% of width to be visible or 0 to be hidden | |
1028 | a_p = max(9, unit * a) if a > 0 else 0 |
|
1030 | a_p = max(9, unit * a) if a > 0 else 0 | |
1029 | d_p = max(9, unit * d) if d > 0 else 0 |
|
1031 | d_p = max(9, unit * d) if d > 0 else 0 | |
1030 | p_sum = a_p + d_p |
|
1032 | p_sum = a_p + d_p | |
1031 |
|
1033 | |||
1032 | if p_sum > width: |
|
1034 | if p_sum > width: | |
1033 | # adjust the percentage to be == 100% since we adjusted to 9 |
|
1035 | # adjust the percentage to be == 100% since we adjusted to 9 | |
1034 | if a_p > d_p: |
|
1036 | if a_p > d_p: | |
1035 | a_p = a_p - (p_sum - width) |
|
1037 | a_p = a_p - (p_sum - width) | |
1036 | else: |
|
1038 | else: | |
1037 | d_p = d_p - (p_sum - width) |
|
1039 | d_p = d_p - (p_sum - width) | |
1038 |
|
1040 | |||
1039 | a_v = a if a > 0 else '' |
|
1041 | a_v = a if a > 0 else '' | |
1040 | d_v = d if d > 0 else '' |
|
1042 | d_v = d if d > 0 else '' | |
1041 |
|
1043 | |||
1042 | d_a = '<div class="added progress-bar" style="width:%s%%">%s</div>' % ( |
|
1044 | d_a = '<div class="added progress-bar" style="width:%s%%">%s</div>' % ( | |
1043 | a_p, a_v |
|
1045 | a_p, a_v | |
1044 | ) |
|
1046 | ) | |
1045 | d_d = '<div class="deleted progress-bar" style="width:%s%%">%s</div>' % ( |
|
1047 | d_d = '<div class="deleted progress-bar" style="width:%s%%">%s</div>' % ( | |
1046 | d_p, d_v |
|
1048 | d_p, d_v | |
1047 | ) |
|
1049 | ) | |
1048 | return literal('<div class="progress" style="width:%spx">%s%s</div>' % (width, d_a, d_d)) |
|
1050 | return literal('<div class="progress" style="width:%spx">%s%s</div>' % (width, d_a, d_d)) | |
1049 |
|
1051 | |||
1050 |
|
1052 | |||
1051 | _URLIFY_RE = re.compile(r''' |
|
1053 | _URLIFY_RE = re.compile(r''' | |
1052 | # URL markup |
|
1054 | # URL markup | |
1053 | (?P<url>%s) | |
|
1055 | (?P<url>%s) | | |
1054 | # @mention markup |
|
1056 | # @mention markup | |
1055 | (?P<mention>%s) | |
|
1057 | (?P<mention>%s) | | |
1056 | # Changeset hash markup |
|
1058 | # Changeset hash markup | |
1057 | (?<!\w|[-_]) |
|
1059 | (?<!\w|[-_]) | |
1058 | (?P<hash>[0-9a-f]{12,40}) |
|
1060 | (?P<hash>[0-9a-f]{12,40}) | |
1059 | (?!\w|[-_]) | |
|
1061 | (?!\w|[-_]) | | |
1060 | # Markup of *bold text* |
|
1062 | # Markup of *bold text* | |
1061 | (?: |
|
1063 | (?: | |
1062 | (?:^|(?<=\s)) |
|
1064 | (?:^|(?<=\s)) | |
1063 | (?P<bold> [*] (?!\s) [^*\n]* (?<!\s) [*] ) |
|
1065 | (?P<bold> [*] (?!\s) [^*\n]* (?<!\s) [*] ) | |
1064 | (?![*\w]) |
|
1066 | (?![*\w]) | |
1065 | ) | |
|
1067 | ) | | |
1066 | # "Stylize" markup |
|
1068 | # "Stylize" markup | |
1067 | \[see\ \=>\ *(?P<seen>[a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\] | |
|
1069 | \[see\ \=>\ *(?P<seen>[a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\] | | |
1068 | \[license\ \=>\ *(?P<license>[a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\] | |
|
1070 | \[license\ \=>\ *(?P<license>[a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\] | | |
1069 | \[(?P<tagtype>requires|recommends|conflicts|base)\ \=>\ *(?P<tagvalue>[a-zA-Z0-9\-\/]*)\] | |
|
1071 | \[(?P<tagtype>requires|recommends|conflicts|base)\ \=>\ *(?P<tagvalue>[a-zA-Z0-9\-\/]*)\] | | |
1070 | \[(?:lang|language)\ \=>\ *(?P<lang>[a-zA-Z\-\/\#\+]*)\] | |
|
1072 | \[(?:lang|language)\ \=>\ *(?P<lang>[a-zA-Z\-\/\#\+]*)\] | | |
1071 | \[(?P<tag>[a-z]+)\] |
|
1073 | \[(?P<tag>[a-z]+)\] | |
1072 | ''' % (url_re.pattern, MENTIONS_REGEX.pattern), |
|
1074 | ''' % (url_re.pattern, MENTIONS_REGEX.pattern), | |
1073 | re.VERBOSE | re.MULTILINE | re.IGNORECASE) |
|
1075 | re.VERBOSE | re.MULTILINE | re.IGNORECASE) | |
1074 |
|
1076 | |||
1075 |
|
1077 | |||
1076 | def urlify_text(s, repo_name=None, link_=None, truncate=None, stylize=False, truncatef=truncate): |
|
1078 | def urlify_text(s, repo_name=None, link_=None, truncate=None, stylize=False, truncatef=truncate): | |
1077 | """ |
|
1079 | """ | |
1078 | Parses given text message and make literal html with markup. |
|
1080 | Parses given text message and make literal html with markup. | |
1079 | The text will be truncated to the specified length. |
|
1081 | The text will be truncated to the specified length. | |
1080 | Hashes are turned into changeset links to specified repository. |
|
1082 | Hashes are turned into changeset links to specified repository. | |
1081 | URLs links to what they say. |
|
1083 | URLs links to what they say. | |
1082 | Issues are linked to given issue-server. |
|
1084 | Issues are linked to given issue-server. | |
1083 | If link_ is provided, all text not already linking somewhere will link there. |
|
1085 | If link_ is provided, all text not already linking somewhere will link there. | |
1084 | >>> urlify_text("Urlify http://example.com/ and 'https://example.com' *and* <b>markup/b>") |
|
1086 | >>> urlify_text("Urlify http://example.com/ and 'https://example.com' *and* <b>markup/b>") | |
1085 | literal('Urlify <a href="http://example.com/">http://example.com/</a> and '<a href="https://example.com&apos">https://example.com&apos</a>; <b>*and*</b> <b>markup/b>') |
|
1087 | literal('Urlify <a href="http://example.com/">http://example.com/</a> and '<a href="https://example.com&apos">https://example.com&apos</a>; <b>*and*</b> <b>markup/b>') | |
1086 | """ |
|
1088 | """ | |
1087 |
|
1089 | |||
1088 | def _replace(match_obj): |
|
1090 | def _replace(match_obj): | |
1089 | url = match_obj.group('url') |
|
1091 | url = match_obj.group('url') | |
1090 | if url is not None: |
|
1092 | if url is not None: | |
1091 | return '<a href="%(url)s">%(url)s</a>' % {'url': url} |
|
1093 | return '<a href="%(url)s">%(url)s</a>' % {'url': url} | |
1092 | mention = match_obj.group('mention') |
|
1094 | mention = match_obj.group('mention') | |
1093 | if mention is not None: |
|
1095 | if mention is not None: | |
1094 | return '<b>%s</b>' % mention |
|
1096 | return '<b>%s</b>' % mention | |
1095 | hash_ = match_obj.group('hash') |
|
1097 | hash_ = match_obj.group('hash') | |
1096 | if hash_ is not None and repo_name is not None: |
|
1098 | if hash_ is not None and repo_name is not None: | |
1097 | from kallithea.config.routing import url # doh, we need to re-import url to mock it later |
|
1099 | from kallithea.config.routing import url # doh, we need to re-import url to mock it later | |
1098 | return '<a class="changeset_hash" href="%(url)s">%(hash)s</a>' % { |
|
1100 | return '<a class="changeset_hash" href="%(url)s">%(hash)s</a>' % { | |
1099 | 'url': url('changeset_home', repo_name=repo_name, revision=hash_), |
|
1101 | 'url': url('changeset_home', repo_name=repo_name, revision=hash_), | |
1100 | 'hash': hash_, |
|
1102 | 'hash': hash_, | |
1101 | } |
|
1103 | } | |
1102 | bold = match_obj.group('bold') |
|
1104 | bold = match_obj.group('bold') | |
1103 | if bold is not None: |
|
1105 | if bold is not None: | |
1104 | return '<b>*%s*</b>' % _urlify(bold[1:-1]) |
|
1106 | return '<b>*%s*</b>' % _urlify(bold[1:-1]) | |
1105 | if stylize: |
|
1107 | if stylize: | |
1106 | seen = match_obj.group('seen') |
|
1108 | seen = match_obj.group('seen') | |
1107 | if seen: |
|
1109 | if seen: | |
1108 | return '<div class="label label-meta" data-tag="see">see => %s</div>' % seen |
|
1110 | return '<div class="label label-meta" data-tag="see">see => %s</div>' % seen | |
1109 | license = match_obj.group('license') |
|
1111 | license = match_obj.group('license') | |
1110 | if license: |
|
1112 | if license: | |
1111 | return '<div class="label label-meta" data-tag="license"><a href="http://www.opensource.org/licenses/%s">%s</a></div>' % (license, license) |
|
1113 | return '<div class="label label-meta" data-tag="license"><a href="http://www.opensource.org/licenses/%s">%s</a></div>' % (license, license) | |
1112 | tagtype = match_obj.group('tagtype') |
|
1114 | tagtype = match_obj.group('tagtype') | |
1113 | if tagtype: |
|
1115 | if tagtype: | |
1114 | tagvalue = match_obj.group('tagvalue') |
|
1116 | tagvalue = match_obj.group('tagvalue') | |
1115 | return '<div class="label label-meta" data-tag="%s">%s => <a href="/%s">%s</a></div>' % (tagtype, tagtype, tagvalue, tagvalue) |
|
1117 | return '<div class="label label-meta" data-tag="%s">%s => <a href="/%s">%s</a></div>' % (tagtype, tagtype, tagvalue, tagvalue) | |
1116 | lang = match_obj.group('lang') |
|
1118 | lang = match_obj.group('lang') | |
1117 | if lang: |
|
1119 | if lang: | |
1118 | return '<div class="label label-meta" data-tag="lang">%s</div>' % lang |
|
1120 | return '<div class="label label-meta" data-tag="lang">%s</div>' % lang | |
1119 | tag = match_obj.group('tag') |
|
1121 | tag = match_obj.group('tag') | |
1120 | if tag: |
|
1122 | if tag: | |
1121 | return '<div class="label label-meta" data-tag="%s">%s</div>' % (tag, tag) |
|
1123 | return '<div class="label label-meta" data-tag="%s">%s</div>' % (tag, tag) | |
1122 | return match_obj.group(0) |
|
1124 | return match_obj.group(0) | |
1123 |
|
1125 | |||
1124 | def _urlify(s): |
|
1126 | def _urlify(s): | |
1125 | """ |
|
1127 | """ | |
1126 | Extract urls from text and make html links out of them |
|
1128 | Extract urls from text and make html links out of them | |
1127 | """ |
|
1129 | """ | |
1128 | return _URLIFY_RE.sub(_replace, s) |
|
1130 | return _URLIFY_RE.sub(_replace, s) | |
1129 |
|
1131 | |||
1130 | if truncate is None: |
|
1132 | if truncate is None: | |
1131 | s = s.rstrip() |
|
1133 | s = s.rstrip() | |
1132 | else: |
|
1134 | else: | |
1133 | s = truncatef(s, truncate, whole_word=True) |
|
1135 | s = truncatef(s, truncate, whole_word=True) | |
1134 | s = html_escape(s) |
|
1136 | s = html_escape(s) | |
1135 | s = _urlify(s) |
|
1137 | s = _urlify(s) | |
1136 | if repo_name is not None: |
|
1138 | if repo_name is not None: | |
1137 | s = urlify_issues(s, repo_name) |
|
1139 | s = urlify_issues(s, repo_name) | |
1138 | if link_ is not None: |
|
1140 | if link_ is not None: | |
1139 | # make href around everything that isn't a href already |
|
1141 | # make href around everything that isn't a href already | |
1140 | s = linkify_others(s, link_) |
|
1142 | s = linkify_others(s, link_) | |
1141 | s = s.replace('\r\n', '<br/>').replace('\n', '<br/>') |
|
1143 | s = s.replace('\r\n', '<br/>').replace('\n', '<br/>') | |
1142 | # Turn HTML5 into more valid HTML4 as required by some mail readers. |
|
1144 | # Turn HTML5 into more valid HTML4 as required by some mail readers. | |
1143 | # (This is not done in one step in html_escape, because character codes like |
|
1145 | # (This is not done in one step in html_escape, because character codes like | |
1144 | # { risk to be seen as an issue reference due to the presence of '#'.) |
|
1146 | # { risk to be seen as an issue reference due to the presence of '#'.) | |
1145 | s = s.replace("'", "'") |
|
1147 | s = s.replace("'", "'") | |
1146 | return literal(s) |
|
1148 | return literal(s) | |
1147 |
|
1149 | |||
1148 |
|
1150 | |||
1149 | def linkify_others(t, l): |
|
1151 | def linkify_others(t, l): | |
1150 | """Add a default link to html with links. |
|
1152 | """Add a default link to html with links. | |
1151 | HTML doesn't allow nesting of links, so the outer link must be broken up |
|
1153 | HTML doesn't allow nesting of links, so the outer link must be broken up | |
1152 | in pieces and give space for other links. |
|
1154 | in pieces and give space for other links. | |
1153 | """ |
|
1155 | """ | |
1154 | urls = re.compile(r'(\<a.*?\<\/a\>)',) |
|
1156 | urls = re.compile(r'(\<a.*?\<\/a\>)',) | |
1155 | links = [] |
|
1157 | links = [] | |
1156 | for e in urls.split(t): |
|
1158 | for e in urls.split(t): | |
1157 | if e.strip() and not urls.match(e): |
|
1159 | if e.strip() and not urls.match(e): | |
1158 | links.append('<a class="message-link" href="%s">%s</a>' % (l, e)) |
|
1160 | links.append('<a class="message-link" href="%s">%s</a>' % (l, e)) | |
1159 | else: |
|
1161 | else: | |
1160 | links.append(e) |
|
1162 | links.append(e) | |
1161 |
|
1163 | |||
1162 | return ''.join(links) |
|
1164 | return ''.join(links) | |
1163 |
|
1165 | |||
1164 |
|
1166 | |||
1165 | # Global variable that will hold the actual urlify_issues function body. |
|
1167 | # Global variable that will hold the actual urlify_issues function body. | |
1166 | # Will be set on first use when the global configuration has been read. |
|
1168 | # Will be set on first use when the global configuration has been read. | |
1167 | _urlify_issues_f = None |
|
1169 | _urlify_issues_f = None | |
1168 |
|
1170 | |||
1169 |
|
1171 | |||
1170 | def urlify_issues(newtext, repo_name): |
|
1172 | def urlify_issues(newtext, repo_name): | |
1171 | """Urlify issue references according to .ini configuration""" |
|
1173 | """Urlify issue references according to .ini configuration""" | |
1172 | global _urlify_issues_f |
|
1174 | global _urlify_issues_f | |
1173 | if _urlify_issues_f is None: |
|
1175 | if _urlify_issues_f is None: | |
1174 | from kallithea import CONFIG |
|
1176 | from kallithea import CONFIG | |
1175 | from kallithea.model.db import URL_SEP |
|
1177 | from kallithea.model.db import URL_SEP | |
1176 | assert CONFIG['sqlalchemy.url'] # make sure config has been loaded |
|
1178 | assert CONFIG['sqlalchemy.url'] # make sure config has been loaded | |
1177 |
|
1179 | |||
1178 | # Build chain of urlify functions, starting with not doing any transformation |
|
1180 | # Build chain of urlify functions, starting with not doing any transformation | |
1179 |
tmp_urlify_issues_f |
|
1181 | def tmp_urlify_issues_f(s): | |
|
1182 | return s | |||
1180 |
|
1183 | |||
1181 | issue_pat_re = re.compile(r'issue_pat(.*)') |
|
1184 | issue_pat_re = re.compile(r'issue_pat(.*)') | |
1182 | for k in CONFIG: |
|
1185 | for k in CONFIG: | |
1183 | # Find all issue_pat* settings that also have corresponding server_link and prefix configuration |
|
1186 | # Find all issue_pat* settings that also have corresponding server_link and prefix configuration | |
1184 | m = issue_pat_re.match(k) |
|
1187 | m = issue_pat_re.match(k) | |
1185 | if m is None: |
|
1188 | if m is None: | |
1186 | continue |
|
1189 | continue | |
1187 | suffix = m.group(1) |
|
1190 | suffix = m.group(1) | |
1188 | issue_pat = CONFIG.get(k) |
|
1191 | issue_pat = CONFIG.get(k) | |
1189 | issue_server_link = CONFIG.get('issue_server_link%s' % suffix) |
|
1192 | issue_server_link = CONFIG.get('issue_server_link%s' % suffix) | |
1190 | issue_sub = CONFIG.get('issue_sub%s' % suffix) |
|
1193 | issue_sub = CONFIG.get('issue_sub%s' % suffix) | |
1191 | if not issue_pat or not issue_server_link or issue_sub is None: # issue_sub can be empty but should be present |
|
1194 | if not issue_pat or not issue_server_link or issue_sub is None: # issue_sub can be empty but should be present | |
1192 | log.error('skipping incomplete issue pattern %r: %r -> %r %r', suffix, issue_pat, issue_server_link, issue_sub) |
|
1195 | log.error('skipping incomplete issue pattern %r: %r -> %r %r', suffix, issue_pat, issue_server_link, issue_sub) | |
1193 | continue |
|
1196 | continue | |
1194 |
|
1197 | |||
1195 | # Wrap tmp_urlify_issues_f with substitution of this pattern, while making sure all loop variables (and compiled regexpes) are bound |
|
1198 | # Wrap tmp_urlify_issues_f with substitution of this pattern, while making sure all loop variables (and compiled regexpes) are bound | |
1196 | try: |
|
1199 | try: | |
1197 | issue_re = re.compile(issue_pat) |
|
1200 | issue_re = re.compile(issue_pat) | |
1198 | except re.error as e: |
|
1201 | except re.error as e: | |
1199 | log.error('skipping invalid issue pattern %r: %r -> %r %r. Error: %s', suffix, issue_pat, issue_server_link, issue_sub, str(e)) |
|
1202 | log.error('skipping invalid issue pattern %r: %r -> %r %r. Error: %s', suffix, issue_pat, issue_server_link, issue_sub, str(e)) | |
1200 | continue |
|
1203 | continue | |
1201 |
|
1204 | |||
1202 | log.debug('issue pattern %r: %r -> %r %r', suffix, issue_pat, issue_server_link, issue_sub) |
|
1205 | log.debug('issue pattern %r: %r -> %r %r', suffix, issue_pat, issue_server_link, issue_sub) | |
1203 |
|
1206 | |||
1204 | def issues_replace(match_obj, |
|
1207 | def issues_replace(match_obj, | |
1205 | issue_server_link=issue_server_link, issue_sub=issue_sub): |
|
1208 | issue_server_link=issue_server_link, issue_sub=issue_sub): | |
1206 | try: |
|
1209 | try: | |
1207 | issue_url = match_obj.expand(issue_server_link) |
|
1210 | issue_url = match_obj.expand(issue_server_link) | |
1208 | except (IndexError, re.error) as e: |
|
1211 | except (IndexError, re.error) as e: | |
1209 | log.error('invalid issue_url setting %r -> %r %r. Error: %s', issue_pat, issue_server_link, issue_sub, str(e)) |
|
1212 | log.error('invalid issue_url setting %r -> %r %r. Error: %s', issue_pat, issue_server_link, issue_sub, str(e)) | |
1210 | issue_url = issue_server_link |
|
1213 | issue_url = issue_server_link | |
1211 | issue_url = issue_url.replace('{repo}', repo_name) |
|
1214 | issue_url = issue_url.replace('{repo}', repo_name) | |
1212 | issue_url = issue_url.replace('{repo_name}', repo_name.split(URL_SEP)[-1]) |
|
1215 | issue_url = issue_url.replace('{repo_name}', repo_name.split(URL_SEP)[-1]) | |
1213 | # if issue_sub is empty use the matched issue reference verbatim |
|
1216 | # if issue_sub is empty use the matched issue reference verbatim | |
1214 | if not issue_sub: |
|
1217 | if not issue_sub: | |
1215 | issue_text = match_obj.group() |
|
1218 | issue_text = match_obj.group() | |
1216 | else: |
|
1219 | else: | |
1217 | try: |
|
1220 | try: | |
1218 | issue_text = match_obj.expand(issue_sub) |
|
1221 | issue_text = match_obj.expand(issue_sub) | |
1219 | except (IndexError, re.error) as e: |
|
1222 | except (IndexError, re.error) as e: | |
1220 | log.error('invalid issue_sub setting %r -> %r %r. Error: %s', issue_pat, issue_server_link, issue_sub, str(e)) |
|
1223 | log.error('invalid issue_sub setting %r -> %r %r. Error: %s', issue_pat, issue_server_link, issue_sub, str(e)) | |
1221 | issue_text = match_obj.group() |
|
1224 | issue_text = match_obj.group() | |
1222 |
|
1225 | |||
1223 | return ( |
|
1226 | return ( | |
1224 | '<a class="issue-tracker-link" href="%(url)s">' |
|
1227 | '<a class="issue-tracker-link" href="%(url)s">' | |
1225 | '%(text)s' |
|
1228 | '%(text)s' | |
1226 | '</a>' |
|
1229 | '</a>' | |
1227 | ) % { |
|
1230 | ) % { | |
1228 | 'url': issue_url, |
|
1231 | 'url': issue_url, | |
1229 | 'text': issue_text, |
|
1232 | 'text': issue_text, | |
1230 | } |
|
1233 | } | |
1231 | tmp_urlify_issues_f = (lambda s, |
|
1234 | ||
1232 |
|
|
1235 | def tmp_urlify_issues_f(s, issue_re=issue_re, issues_replace=issues_replace, chain_f=tmp_urlify_issues_f): | |
1233 |
|
|
1236 | return issue_re.sub(issues_replace, chain_f(s)) | |
1234 |
|
1237 | |||
1235 | # Set tmp function globally - atomically |
|
1238 | # Set tmp function globally - atomically | |
1236 | _urlify_issues_f = tmp_urlify_issues_f |
|
1239 | _urlify_issues_f = tmp_urlify_issues_f | |
1237 |
|
1240 | |||
1238 | return _urlify_issues_f(newtext) |
|
1241 | return _urlify_issues_f(newtext) | |
1239 |
|
1242 | |||
1240 |
|
1243 | |||
1241 | def render_w_mentions(source, repo_name=None): |
|
1244 | def render_w_mentions(source, repo_name=None): | |
1242 | """ |
|
1245 | """ | |
1243 | Render plain text with revision hashes and issue references urlified |
|
1246 | Render plain text with revision hashes and issue references urlified | |
1244 | and with @mention highlighting. |
|
1247 | and with @mention highlighting. | |
1245 | """ |
|
1248 | """ | |
1246 | s = safe_str(source) |
|
1249 | s = safe_str(source) | |
1247 | s = urlify_text(s, repo_name=repo_name) |
|
1250 | s = urlify_text(s, repo_name=repo_name) | |
1248 | return literal('<div class="formatted-fixed">%s</div>' % s) |
|
1251 | return literal('<div class="formatted-fixed">%s</div>' % s) | |
1249 |
|
1252 | |||
1250 |
|
1253 | |||
1251 | def short_ref(ref_type, ref_name): |
|
1254 | def short_ref(ref_type, ref_name): | |
1252 | if ref_type == 'rev': |
|
1255 | if ref_type == 'rev': | |
1253 | return short_id(ref_name) |
|
1256 | return short_id(ref_name) | |
1254 | return ref_name |
|
1257 | return ref_name | |
1255 |
|
1258 | |||
1256 |
|
1259 | |||
1257 | def link_to_ref(repo_name, ref_type, ref_name, rev=None): |
|
1260 | def link_to_ref(repo_name, ref_type, ref_name, rev=None): | |
1258 | """ |
|
1261 | """ | |
1259 | Return full markup for a href to changeset_home for a changeset. |
|
1262 | Return full markup for a href to changeset_home for a changeset. | |
1260 | If ref_type is branch it will link to changelog. |
|
1263 | If ref_type is branch it will link to changelog. | |
1261 | ref_name is shortened if ref_type is 'rev'. |
|
1264 | ref_name is shortened if ref_type is 'rev'. | |
1262 | if rev is specified show it too, explicitly linking to that revision. |
|
1265 | if rev is specified show it too, explicitly linking to that revision. | |
1263 | """ |
|
1266 | """ | |
1264 | txt = short_ref(ref_type, ref_name) |
|
1267 | txt = short_ref(ref_type, ref_name) | |
1265 | if ref_type == 'branch': |
|
1268 | if ref_type == 'branch': | |
1266 | u = url('changelog_home', repo_name=repo_name, branch=ref_name) |
|
1269 | u = url('changelog_home', repo_name=repo_name, branch=ref_name) | |
1267 | else: |
|
1270 | else: | |
1268 | u = url('changeset_home', repo_name=repo_name, revision=ref_name) |
|
1271 | u = url('changeset_home', repo_name=repo_name, revision=ref_name) | |
1269 | l = link_to(repo_name + '#' + txt, u) |
|
1272 | l = link_to(repo_name + '#' + txt, u) | |
1270 | if rev and ref_type != 'rev': |
|
1273 | if rev and ref_type != 'rev': | |
1271 | l = literal('%s (%s)' % (l, link_to(short_id(rev), url('changeset_home', repo_name=repo_name, revision=rev)))) |
|
1274 | l = literal('%s (%s)' % (l, link_to(short_id(rev), url('changeset_home', repo_name=repo_name, revision=rev)))) | |
1272 | return l |
|
1275 | return l | |
1273 |
|
1276 | |||
1274 |
|
1277 | |||
1275 | def changeset_status(repo, revision): |
|
1278 | def changeset_status(repo, revision): | |
1276 | from kallithea.model.changeset_status import ChangesetStatusModel |
|
1279 | from kallithea.model.changeset_status import ChangesetStatusModel | |
1277 | return ChangesetStatusModel().get_status(repo, revision) |
|
1280 | return ChangesetStatusModel().get_status(repo, revision) | |
1278 |
|
1281 | |||
1279 |
|
1282 | |||
1280 | def changeset_status_lbl(changeset_status): |
|
1283 | def changeset_status_lbl(changeset_status): | |
1281 | from kallithea.model.db import ChangesetStatus |
|
1284 | from kallithea.model.db import ChangesetStatus | |
1282 | return ChangesetStatus.get_status_lbl(changeset_status) |
|
1285 | return ChangesetStatus.get_status_lbl(changeset_status) | |
1283 |
|
1286 | |||
1284 |
|
1287 | |||
1285 | def get_permission_name(key): |
|
1288 | def get_permission_name(key): | |
1286 | from kallithea.model.db import Permission |
|
1289 | from kallithea.model.db import Permission | |
1287 | return dict(Permission.PERMS).get(key) |
|
1290 | return dict(Permission.PERMS).get(key) | |
1288 |
|
1291 | |||
1289 |
|
1292 | |||
1290 | def journal_filter_help(): |
|
1293 | def journal_filter_help(): | |
1291 | return _(textwrap.dedent(''' |
|
1294 | return _(textwrap.dedent(''' | |
1292 | Example filter terms: |
|
1295 | Example filter terms: | |
1293 | repository:vcs |
|
1296 | repository:vcs | |
1294 | username:developer |
|
1297 | username:developer | |
1295 | action:*push* |
|
1298 | action:*push* | |
1296 | ip:127.0.0.1 |
|
1299 | ip:127.0.0.1 | |
1297 | date:20120101 |
|
1300 | date:20120101 | |
1298 | date:[20120101100000 TO 20120102] |
|
1301 | date:[20120101100000 TO 20120102] | |
1299 |
|
1302 | |||
1300 | Generate wildcards using '*' character: |
|
1303 | Generate wildcards using '*' character: | |
1301 | "repository:vcs*" - search everything starting with 'vcs' |
|
1304 | "repository:vcs*" - search everything starting with 'vcs' | |
1302 | "repository:*vcs*" - search for repository containing 'vcs' |
|
1305 | "repository:*vcs*" - search for repository containing 'vcs' | |
1303 |
|
1306 | |||
1304 | Optional AND / OR operators in queries |
|
1307 | Optional AND / OR operators in queries | |
1305 | "repository:vcs OR repository:test" |
|
1308 | "repository:vcs OR repository:test" | |
1306 | "username:test AND repository:test*" |
|
1309 | "username:test AND repository:test*" | |
1307 | ''')) |
|
1310 | ''')) | |
1308 |
|
1311 | |||
1309 |
|
1312 | |||
1310 | def not_mapped_error(repo_name): |
|
1313 | def not_mapped_error(repo_name): | |
1311 | flash(_('%s repository is not mapped to db perhaps' |
|
1314 | flash(_('%s repository is not mapped to db perhaps' | |
1312 | ' it was created or renamed from the filesystem' |
|
1315 | ' it was created or renamed from the filesystem' | |
1313 | ' please run the application again' |
|
1316 | ' please run the application again' | |
1314 | ' in order to rescan repositories') % repo_name, category='error') |
|
1317 | ' in order to rescan repositories') % repo_name, category='error') | |
1315 |
|
1318 | |||
1316 |
|
1319 | |||
1317 | def ip_range(ip_addr): |
|
1320 | def ip_range(ip_addr): | |
1318 | from kallithea.model.db import UserIpMap |
|
1321 | from kallithea.model.db import UserIpMap | |
1319 | s, e = UserIpMap._get_ip_range(ip_addr) |
|
1322 | s, e = UserIpMap._get_ip_range(ip_addr) | |
1320 | return '%s - %s' % (s, e) |
|
1323 | return '%s - %s' % (s, e) | |
1321 |
|
1324 | |||
1322 |
|
1325 | |||
1323 | session_csrf_secret_name = "_session_csrf_secret_token" |
|
1326 | session_csrf_secret_name = "_session_csrf_secret_token" | |
1324 |
|
1327 | |||
1325 | def session_csrf_secret_token(): |
|
1328 | def session_csrf_secret_token(): | |
1326 | """Return (and create) the current session's CSRF protection token.""" |
|
1329 | """Return (and create) the current session's CSRF protection token.""" | |
1327 | from tg import session |
|
1330 | from tg import session | |
1328 | if not session_csrf_secret_name in session: |
|
1331 | if not session_csrf_secret_name in session: | |
1329 | session[session_csrf_secret_name] = str(random.getrandbits(128)) |
|
1332 | session[session_csrf_secret_name] = str(random.getrandbits(128)) | |
1330 | session.save() |
|
1333 | session.save() | |
1331 | return session[session_csrf_secret_name] |
|
1334 | return session[session_csrf_secret_name] | |
1332 |
|
1335 | |||
1333 | def form(url, method="post", **attrs): |
|
1336 | def form(url, method="post", **attrs): | |
1334 | """Like webhelpers.html.tags.form , but automatically adding |
|
1337 | """Like webhelpers.html.tags.form , but automatically adding | |
1335 | session_csrf_secret_token for POST. The secret is thus never leaked in GET |
|
1338 | session_csrf_secret_token for POST. The secret is thus never leaked in GET | |
1336 | URLs. |
|
1339 | URLs. | |
1337 | """ |
|
1340 | """ | |
1338 | form = insecure_form(url, method, **attrs) |
|
1341 | form = insecure_form(url, method, **attrs) | |
1339 | if method.lower() == 'get': |
|
1342 | if method.lower() == 'get': | |
1340 | return form |
|
1343 | return form | |
1341 | return form + HTML.div(hidden(session_csrf_secret_name, session_csrf_secret_token()), style="display: none;") |
|
1344 | return form + HTML.div(hidden(session_csrf_secret_name, session_csrf_secret_token()), style="display: none;") |
@@ -1,454 +1,456 b'' | |||||
1 | # The code in this module is entirely lifted from the Lamson project |
|
1 | # The code in this module is entirely lifted from the Lamson project | |
2 | # (http://lamsonproject.org/). Its copyright is: |
|
2 | # (http://lamsonproject.org/). Its copyright is: | |
3 |
|
3 | |||
4 | # Copyright (c) 2008, Zed A. Shaw |
|
4 | # Copyright (c) 2008, Zed A. Shaw | |
5 | # All rights reserved. |
|
5 | # All rights reserved. | |
6 |
|
6 | |||
7 | # It is provided under this license: |
|
7 | # It is provided under this license: | |
8 |
|
8 | |||
9 | # Redistribution and use in source and binary forms, with or without |
|
9 | # Redistribution and use in source and binary forms, with or without | |
10 | # modification, are permitted provided that the following conditions are met: |
|
10 | # modification, are permitted provided that the following conditions are met: | |
11 |
|
11 | |||
12 | # * Redistributions of source code must retain the above copyright notice, this |
|
12 | # * Redistributions of source code must retain the above copyright notice, this | |
13 | # list of conditions and the following disclaimer. |
|
13 | # list of conditions and the following disclaimer. | |
14 |
|
14 | |||
15 | # * Redistributions in binary form must reproduce the above copyright notice, |
|
15 | # * Redistributions in binary form must reproduce the above copyright notice, | |
16 | # this list of conditions and the following disclaimer in the documentation |
|
16 | # this list of conditions and the following disclaimer in the documentation | |
17 | # and/or other materials provided with the distribution. |
|
17 | # and/or other materials provided with the distribution. | |
18 |
|
18 | |||
19 | # * Neither the name of the Zed A. Shaw nor the names of its contributors may |
|
19 | # * Neither the name of the Zed A. Shaw nor the names of its contributors may | |
20 | # be used to endorse or promote products derived from this software without |
|
20 | # be used to endorse or promote products derived from this software without | |
21 | # specific prior written permission. |
|
21 | # specific prior written permission. | |
22 |
|
22 | |||
23 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
|
23 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | |
24 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
|
24 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | |
25 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS |
|
25 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS | |
26 | # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE |
|
26 | # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE | |
27 | # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, |
|
27 | # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, | |
28 | # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES |
|
28 | # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES | |
29 | # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR |
|
29 | # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR | |
30 | # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) |
|
30 | # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) | |
31 | # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, |
|
31 | # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, | |
32 | # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) |
|
32 | # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) | |
33 | # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE |
|
33 | # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE | |
34 | # POSSIBILITY OF SUCH DAMAGE. |
|
34 | # POSSIBILITY OF SUCH DAMAGE. | |
35 |
|
35 | |||
36 | import mimetypes |
|
36 | import mimetypes | |
37 | import os |
|
37 | import os | |
38 | import string |
|
38 | import string | |
39 | from email import encoders |
|
39 | from email import encoders | |
40 | from email.charset import Charset |
|
40 | from email.charset import Charset | |
41 | from email.mime.base import MIMEBase |
|
41 | from email.mime.base import MIMEBase | |
42 | from email.utils import parseaddr |
|
42 | from email.utils import parseaddr | |
43 |
|
43 | |||
44 |
|
44 | |||
45 | ADDRESS_HEADERS_WHITELIST = ['From', 'To', 'Delivered-To', 'Cc'] |
|
45 | ADDRESS_HEADERS_WHITELIST = ['From', 'To', 'Delivered-To', 'Cc'] | |
46 | DEFAULT_ENCODING = "utf-8" |
|
46 | DEFAULT_ENCODING = "utf-8" | |
47 | VALUE_IS_EMAIL_ADDRESS = lambda v: '@' in v |
|
47 | ||
|
48 | def VALUE_IS_EMAIL_ADDRESS(v): | |||
|
49 | return '@' in v | |||
48 |
|
50 | |||
49 |
|
51 | |||
50 | def normalize_header(header): |
|
52 | def normalize_header(header): | |
51 | return string.capwords(header.lower(), '-') |
|
53 | return string.capwords(header.lower(), '-') | |
52 |
|
54 | |||
53 |
|
55 | |||
54 | class EncodingError(Exception): |
|
56 | class EncodingError(Exception): | |
55 | """Thrown when there is an encoding error.""" |
|
57 | """Thrown when there is an encoding error.""" | |
56 | pass |
|
58 | pass | |
57 |
|
59 | |||
58 |
|
60 | |||
59 | class MailBase(object): |
|
61 | class MailBase(object): | |
60 | """MailBase is used as the basis of lamson.mail and contains the basics of |
|
62 | """MailBase is used as the basis of lamson.mail and contains the basics of | |
61 | encoding an email. You actually can do all your email processing with this |
|
63 | encoding an email. You actually can do all your email processing with this | |
62 | class, but it's more raw. |
|
64 | class, but it's more raw. | |
63 | """ |
|
65 | """ | |
64 | def __init__(self, items=()): |
|
66 | def __init__(self, items=()): | |
65 | self.headers = dict(items) |
|
67 | self.headers = dict(items) | |
66 | self.parts = [] |
|
68 | self.parts = [] | |
67 | self.body = None |
|
69 | self.body = None | |
68 | self.content_encoding = {'Content-Type': (None, {}), |
|
70 | self.content_encoding = {'Content-Type': (None, {}), | |
69 | 'Content-Disposition': (None, {}), |
|
71 | 'Content-Disposition': (None, {}), | |
70 | 'Content-Transfer-Encoding': (None, {})} |
|
72 | 'Content-Transfer-Encoding': (None, {})} | |
71 |
|
73 | |||
72 | def __getitem__(self, key): |
|
74 | def __getitem__(self, key): | |
73 | return self.headers.get(normalize_header(key), None) |
|
75 | return self.headers.get(normalize_header(key), None) | |
74 |
|
76 | |||
75 | def __len__(self): |
|
77 | def __len__(self): | |
76 | return len(self.headers) |
|
78 | return len(self.headers) | |
77 |
|
79 | |||
78 | def __iter__(self): |
|
80 | def __iter__(self): | |
79 | return iter(self.headers) |
|
81 | return iter(self.headers) | |
80 |
|
82 | |||
81 | def __contains__(self, key): |
|
83 | def __contains__(self, key): | |
82 | return normalize_header(key) in self.headers |
|
84 | return normalize_header(key) in self.headers | |
83 |
|
85 | |||
84 | def __setitem__(self, key, value): |
|
86 | def __setitem__(self, key, value): | |
85 | self.headers[normalize_header(key)] = value |
|
87 | self.headers[normalize_header(key)] = value | |
86 |
|
88 | |||
87 | def __delitem__(self, key): |
|
89 | def __delitem__(self, key): | |
88 | del self.headers[normalize_header(key)] |
|
90 | del self.headers[normalize_header(key)] | |
89 |
|
91 | |||
90 | def __bool__(self): |
|
92 | def __bool__(self): | |
91 | return self.body is not None or len(self.headers) > 0 or len(self.parts) > 0 |
|
93 | return self.body is not None or len(self.headers) > 0 or len(self.parts) > 0 | |
92 |
|
94 | |||
93 | def keys(self): |
|
95 | def keys(self): | |
94 | """Returns the sorted keys.""" |
|
96 | """Returns the sorted keys.""" | |
95 | return sorted(self.headers.keys()) |
|
97 | return sorted(self.headers.keys()) | |
96 |
|
98 | |||
97 | def attach_file(self, filename, data, ctype, disposition): |
|
99 | def attach_file(self, filename, data, ctype, disposition): | |
98 | """ |
|
100 | """ | |
99 | A file attachment is a raw attachment with a disposition that |
|
101 | A file attachment is a raw attachment with a disposition that | |
100 | indicates the file name. |
|
102 | indicates the file name. | |
101 | """ |
|
103 | """ | |
102 | assert filename, "You can't attach a file without a filename." |
|
104 | assert filename, "You can't attach a file without a filename." | |
103 | ctype = ctype.lower() |
|
105 | ctype = ctype.lower() | |
104 |
|
106 | |||
105 | part = MailBase() |
|
107 | part = MailBase() | |
106 | part.body = data |
|
108 | part.body = data | |
107 | part.content_encoding['Content-Type'] = (ctype, {'name': filename}) |
|
109 | part.content_encoding['Content-Type'] = (ctype, {'name': filename}) | |
108 | part.content_encoding['Content-Disposition'] = (disposition, |
|
110 | part.content_encoding['Content-Disposition'] = (disposition, | |
109 | {'filename': filename}) |
|
111 | {'filename': filename}) | |
110 | self.parts.append(part) |
|
112 | self.parts.append(part) | |
111 |
|
113 | |||
112 | def attach_text(self, data, ctype): |
|
114 | def attach_text(self, data, ctype): | |
113 | """ |
|
115 | """ | |
114 | This attaches a simpler text encoded part, which doesn't have a |
|
116 | This attaches a simpler text encoded part, which doesn't have a | |
115 | filename. |
|
117 | filename. | |
116 | """ |
|
118 | """ | |
117 | ctype = ctype.lower() |
|
119 | ctype = ctype.lower() | |
118 |
|
120 | |||
119 | part = MailBase() |
|
121 | part = MailBase() | |
120 | part.body = data |
|
122 | part.body = data | |
121 | part.content_encoding['Content-Type'] = (ctype, {}) |
|
123 | part.content_encoding['Content-Type'] = (ctype, {}) | |
122 | self.parts.append(part) |
|
124 | self.parts.append(part) | |
123 |
|
125 | |||
124 | def walk(self): |
|
126 | def walk(self): | |
125 | for p in self.parts: |
|
127 | for p in self.parts: | |
126 | yield p |
|
128 | yield p | |
127 | for x in p.walk(): |
|
129 | for x in p.walk(): | |
128 | yield x |
|
130 | yield x | |
129 |
|
131 | |||
130 |
|
132 | |||
131 | class MailResponse(object): |
|
133 | class MailResponse(object): | |
132 | """ |
|
134 | """ | |
133 | You are given MailResponse objects from the lamson.view methods, and |
|
135 | You are given MailResponse objects from the lamson.view methods, and | |
134 | whenever you want to generate an email to send to someone. It has the |
|
136 | whenever you want to generate an email to send to someone. It has the | |
135 | same basic functionality as MailRequest, but it is designed to be written |
|
137 | same basic functionality as MailRequest, but it is designed to be written | |
136 | to, rather than read from (although you can do both). |
|
138 | to, rather than read from (although you can do both). | |
137 |
|
139 | |||
138 | You can easily set a Body or Html during creation or after by passing it |
|
140 | You can easily set a Body or Html during creation or after by passing it | |
139 | as __init__ parameters, or by setting those attributes. |
|
141 | as __init__ parameters, or by setting those attributes. | |
140 |
|
142 | |||
141 | You can initially set the From, To, and Subject, but they are headers so |
|
143 | You can initially set the From, To, and Subject, but they are headers so | |
142 | use the dict notation to change them: msg['From'] = 'joe@example.com'. |
|
144 | use the dict notation to change them: msg['From'] = 'joe@example.com'. | |
143 |
|
145 | |||
144 | The message is not fully crafted until right when you convert it with |
|
146 | The message is not fully crafted until right when you convert it with | |
145 | MailResponse.to_message. This lets you change it and work with it, then |
|
147 | MailResponse.to_message. This lets you change it and work with it, then | |
146 | send it out when it's ready. |
|
148 | send it out when it's ready. | |
147 | """ |
|
149 | """ | |
148 | def __init__(self, To=None, From=None, Subject=None, Body=None, Html=None, |
|
150 | def __init__(self, To=None, From=None, Subject=None, Body=None, Html=None, | |
149 | separator="; "): |
|
151 | separator="; "): | |
150 | self.Body = Body |
|
152 | self.Body = Body | |
151 | self.Html = Html |
|
153 | self.Html = Html | |
152 | self.base = MailBase([('To', To), ('From', From), ('Subject', Subject)]) |
|
154 | self.base = MailBase([('To', To), ('From', From), ('Subject', Subject)]) | |
153 | self.multipart = self.Body and self.Html |
|
155 | self.multipart = self.Body and self.Html | |
154 | self.attachments = [] |
|
156 | self.attachments = [] | |
155 | self.separator = separator |
|
157 | self.separator = separator | |
156 |
|
158 | |||
157 | def __contains__(self, key): |
|
159 | def __contains__(self, key): | |
158 | return self.base.__contains__(key) |
|
160 | return self.base.__contains__(key) | |
159 |
|
161 | |||
160 | def __getitem__(self, key): |
|
162 | def __getitem__(self, key): | |
161 | return self.base.__getitem__(key) |
|
163 | return self.base.__getitem__(key) | |
162 |
|
164 | |||
163 | def __setitem__(self, key, val): |
|
165 | def __setitem__(self, key, val): | |
164 | return self.base.__setitem__(key, val) |
|
166 | return self.base.__setitem__(key, val) | |
165 |
|
167 | |||
166 | def __delitem__(self, name): |
|
168 | def __delitem__(self, name): | |
167 | del self.base[name] |
|
169 | del self.base[name] | |
168 |
|
170 | |||
169 | def attach(self, filename=None, content_type=None, data=None, |
|
171 | def attach(self, filename=None, content_type=None, data=None, | |
170 | disposition=None): |
|
172 | disposition=None): | |
171 | """ |
|
173 | """ | |
172 |
|
174 | |||
173 | Simplifies attaching files from disk or data as files. To attach |
|
175 | Simplifies attaching files from disk or data as files. To attach | |
174 | simple text simple give data and a content_type. To attach a file, |
|
176 | simple text simple give data and a content_type. To attach a file, | |
175 | give the data/content_type/filename/disposition combination. |
|
177 | give the data/content_type/filename/disposition combination. | |
176 |
|
178 | |||
177 | For convenience, if you don't give data and only a filename, then it |
|
179 | For convenience, if you don't give data and only a filename, then it | |
178 | will read that file's contents when you call to_message() later. If |
|
180 | will read that file's contents when you call to_message() later. If | |
179 | you give data and filename then it will assume you've filled data |
|
181 | you give data and filename then it will assume you've filled data | |
180 | with what the file's contents are and filename is just the name to |
|
182 | with what the file's contents are and filename is just the name to | |
181 | use. |
|
183 | use. | |
182 | """ |
|
184 | """ | |
183 |
|
185 | |||
184 | assert filename or data, ("You must give a filename or some data to " |
|
186 | assert filename or data, ("You must give a filename or some data to " | |
185 | "attach.") |
|
187 | "attach.") | |
186 | assert data or os.path.exists(filename), ("File doesn't exist, and no " |
|
188 | assert data or os.path.exists(filename), ("File doesn't exist, and no " | |
187 | "data given.") |
|
189 | "data given.") | |
188 |
|
190 | |||
189 | self.multipart = True |
|
191 | self.multipart = True | |
190 |
|
192 | |||
191 | if filename and not content_type: |
|
193 | if filename and not content_type: | |
192 | content_type, encoding = mimetypes.guess_type(filename) |
|
194 | content_type, encoding = mimetypes.guess_type(filename) | |
193 |
|
195 | |||
194 | assert content_type, ("No content type given, and couldn't guess " |
|
196 | assert content_type, ("No content type given, and couldn't guess " | |
195 | "from the filename: %r" % filename) |
|
197 | "from the filename: %r" % filename) | |
196 |
|
198 | |||
197 | self.attachments.append({'filename': filename, |
|
199 | self.attachments.append({'filename': filename, | |
198 | 'content_type': content_type, |
|
200 | 'content_type': content_type, | |
199 | 'data': data, |
|
201 | 'data': data, | |
200 | 'disposition': disposition}) |
|
202 | 'disposition': disposition}) | |
201 |
|
203 | |||
202 | def attach_part(self, part): |
|
204 | def attach_part(self, part): | |
203 | """ |
|
205 | """ | |
204 | Attaches a raw MailBase part from a MailRequest (or anywhere) |
|
206 | Attaches a raw MailBase part from a MailRequest (or anywhere) | |
205 | so that you can copy it over. |
|
207 | so that you can copy it over. | |
206 | """ |
|
208 | """ | |
207 | self.multipart = True |
|
209 | self.multipart = True | |
208 |
|
210 | |||
209 | self.attachments.append({'filename': None, |
|
211 | self.attachments.append({'filename': None, | |
210 | 'content_type': None, |
|
212 | 'content_type': None, | |
211 | 'data': None, |
|
213 | 'data': None, | |
212 | 'disposition': None, |
|
214 | 'disposition': None, | |
213 | 'part': part, |
|
215 | 'part': part, | |
214 | }) |
|
216 | }) | |
215 |
|
217 | |||
216 | def attach_all_parts(self, mail_request): |
|
218 | def attach_all_parts(self, mail_request): | |
217 | """ |
|
219 | """ | |
218 | Used for copying the attachment parts of a mail.MailRequest |
|
220 | Used for copying the attachment parts of a mail.MailRequest | |
219 | object for mailing lists that need to maintain attachments. |
|
221 | object for mailing lists that need to maintain attachments. | |
220 | """ |
|
222 | """ | |
221 | for part in mail_request.all_parts(): |
|
223 | for part in mail_request.all_parts(): | |
222 | self.attach_part(part) |
|
224 | self.attach_part(part) | |
223 |
|
225 | |||
224 | self.base.content_encoding = mail_request.base.content_encoding.copy() |
|
226 | self.base.content_encoding = mail_request.base.content_encoding.copy() | |
225 |
|
227 | |||
226 | def clear(self): |
|
228 | def clear(self): | |
227 | """ |
|
229 | """ | |
228 | Clears out the attachments so you can redo them. Use this to keep the |
|
230 | Clears out the attachments so you can redo them. Use this to keep the | |
229 | headers for a series of different messages with different attachments. |
|
231 | headers for a series of different messages with different attachments. | |
230 | """ |
|
232 | """ | |
231 | del self.attachments[:] |
|
233 | del self.attachments[:] | |
232 | del self.base.parts[:] |
|
234 | del self.base.parts[:] | |
233 | self.multipart = False |
|
235 | self.multipart = False | |
234 |
|
236 | |||
235 | def update(self, message): |
|
237 | def update(self, message): | |
236 | """ |
|
238 | """ | |
237 | Used to easily set a bunch of heading from another dict |
|
239 | Used to easily set a bunch of heading from another dict | |
238 | like object. |
|
240 | like object. | |
239 | """ |
|
241 | """ | |
240 | for k in message.keys(): |
|
242 | for k in message.keys(): | |
241 | self.base[k] = message[k] |
|
243 | self.base[k] = message[k] | |
242 |
|
244 | |||
243 | def __str__(self): |
|
245 | def __str__(self): | |
244 | """ |
|
246 | """ | |
245 | Converts to a string. |
|
247 | Converts to a string. | |
246 | """ |
|
248 | """ | |
247 | return self.to_message().as_string() |
|
249 | return self.to_message().as_string() | |
248 |
|
250 | |||
249 | def _encode_attachment(self, filename=None, content_type=None, data=None, |
|
251 | def _encode_attachment(self, filename=None, content_type=None, data=None, | |
250 | disposition=None, part=None): |
|
252 | disposition=None, part=None): | |
251 | """ |
|
253 | """ | |
252 | Used internally to take the attachments mentioned in self.attachments |
|
254 | Used internally to take the attachments mentioned in self.attachments | |
253 | and do the actual encoding in a lazy way when you call to_message. |
|
255 | and do the actual encoding in a lazy way when you call to_message. | |
254 | """ |
|
256 | """ | |
255 | if part: |
|
257 | if part: | |
256 | self.base.parts.append(part) |
|
258 | self.base.parts.append(part) | |
257 | elif filename: |
|
259 | elif filename: | |
258 | if not data: |
|
260 | if not data: | |
259 | data = open(filename).read() |
|
261 | data = open(filename).read() | |
260 |
|
262 | |||
261 | self.base.attach_file(filename, data, content_type, |
|
263 | self.base.attach_file(filename, data, content_type, | |
262 | disposition or 'attachment') |
|
264 | disposition or 'attachment') | |
263 | else: |
|
265 | else: | |
264 | self.base.attach_text(data, content_type) |
|
266 | self.base.attach_text(data, content_type) | |
265 |
|
267 | |||
266 | ctype = self.base.content_encoding['Content-Type'][0] |
|
268 | ctype = self.base.content_encoding['Content-Type'][0] | |
267 |
|
269 | |||
268 | if ctype and not ctype.startswith('multipart'): |
|
270 | if ctype and not ctype.startswith('multipart'): | |
269 | self.base.content_encoding['Content-Type'] = ('multipart/mixed', {}) |
|
271 | self.base.content_encoding['Content-Type'] = ('multipart/mixed', {}) | |
270 |
|
272 | |||
271 | def to_message(self): |
|
273 | def to_message(self): | |
272 | """ |
|
274 | """ | |
273 | Figures out all the required steps to finally craft the |
|
275 | Figures out all the required steps to finally craft the | |
274 | message you need and return it. The resulting message |
|
276 | message you need and return it. The resulting message | |
275 | is also available as a self.base attribute. |
|
277 | is also available as a self.base attribute. | |
276 |
|
278 | |||
277 | What is returned is a Python email API message you can |
|
279 | What is returned is a Python email API message you can | |
278 | use with those APIs. The self.base attribute is the raw |
|
280 | use with those APIs. The self.base attribute is the raw | |
279 | lamson.encoding.MailBase. |
|
281 | lamson.encoding.MailBase. | |
280 | """ |
|
282 | """ | |
281 | del self.base.parts[:] |
|
283 | del self.base.parts[:] | |
282 |
|
284 | |||
283 | if self.Body and self.Html: |
|
285 | if self.Body and self.Html: | |
284 | self.multipart = True |
|
286 | self.multipart = True | |
285 | self.base.content_encoding['Content-Type'] = ( |
|
287 | self.base.content_encoding['Content-Type'] = ( | |
286 | 'multipart/alternative', {}) |
|
288 | 'multipart/alternative', {}) | |
287 |
|
289 | |||
288 | if self.multipart: |
|
290 | if self.multipart: | |
289 | self.base.body = None |
|
291 | self.base.body = None | |
290 | if self.Body: |
|
292 | if self.Body: | |
291 | self.base.attach_text(self.Body, 'text/plain') |
|
293 | self.base.attach_text(self.Body, 'text/plain') | |
292 |
|
294 | |||
293 | if self.Html: |
|
295 | if self.Html: | |
294 | self.base.attach_text(self.Html, 'text/html') |
|
296 | self.base.attach_text(self.Html, 'text/html') | |
295 |
|
297 | |||
296 | for args in self.attachments: |
|
298 | for args in self.attachments: | |
297 | self._encode_attachment(**args) |
|
299 | self._encode_attachment(**args) | |
298 |
|
300 | |||
299 | elif self.Body: |
|
301 | elif self.Body: | |
300 | self.base.body = self.Body |
|
302 | self.base.body = self.Body | |
301 | self.base.content_encoding['Content-Type'] = ('text/plain', {}) |
|
303 | self.base.content_encoding['Content-Type'] = ('text/plain', {}) | |
302 |
|
304 | |||
303 | elif self.Html: |
|
305 | elif self.Html: | |
304 | self.base.body = self.Html |
|
306 | self.base.body = self.Html | |
305 | self.base.content_encoding['Content-Type'] = ('text/html', {}) |
|
307 | self.base.content_encoding['Content-Type'] = ('text/html', {}) | |
306 |
|
308 | |||
307 | return to_message(self.base, separator=self.separator) |
|
309 | return to_message(self.base, separator=self.separator) | |
308 |
|
310 | |||
309 | def all_parts(self): |
|
311 | def all_parts(self): | |
310 | """ |
|
312 | """ | |
311 | Returns all the encoded parts. Only useful for debugging |
|
313 | Returns all the encoded parts. Only useful for debugging | |
312 | or inspecting after calling to_message(). |
|
314 | or inspecting after calling to_message(). | |
313 | """ |
|
315 | """ | |
314 | return self.base.parts |
|
316 | return self.base.parts | |
315 |
|
317 | |||
316 | def keys(self): |
|
318 | def keys(self): | |
317 | return self.base.keys() |
|
319 | return self.base.keys() | |
318 |
|
320 | |||
319 |
|
321 | |||
320 | def to_message(mail, separator="; "): |
|
322 | def to_message(mail, separator="; "): | |
321 | """ |
|
323 | """ | |
322 | Given a MailBase message, this will construct a MIMEPart |
|
324 | Given a MailBase message, this will construct a MIMEPart | |
323 | that is canonicalized for use with the Python email API. |
|
325 | that is canonicalized for use with the Python email API. | |
324 | """ |
|
326 | """ | |
325 | ctype, params = mail.content_encoding['Content-Type'] |
|
327 | ctype, params = mail.content_encoding['Content-Type'] | |
326 |
|
328 | |||
327 | if not ctype: |
|
329 | if not ctype: | |
328 | if mail.parts: |
|
330 | if mail.parts: | |
329 | ctype = 'multipart/mixed' |
|
331 | ctype = 'multipart/mixed' | |
330 | else: |
|
332 | else: | |
331 | ctype = 'text/plain' |
|
333 | ctype = 'text/plain' | |
332 | else: |
|
334 | else: | |
333 | if mail.parts: |
|
335 | if mail.parts: | |
334 | assert ctype.startswith(("multipart", "message")), \ |
|
336 | assert ctype.startswith(("multipart", "message")), \ | |
335 | "Content type should be multipart or message, not %r" % ctype |
|
337 | "Content type should be multipart or message, not %r" % ctype | |
336 |
|
338 | |||
337 | # adjust the content type according to what it should be now |
|
339 | # adjust the content type according to what it should be now | |
338 | mail.content_encoding['Content-Type'] = (ctype, params) |
|
340 | mail.content_encoding['Content-Type'] = (ctype, params) | |
339 |
|
341 | |||
340 | try: |
|
342 | try: | |
341 | out = MIMEPart(ctype, **params) |
|
343 | out = MIMEPart(ctype, **params) | |
342 | except TypeError as e: # pragma: no cover |
|
344 | except TypeError as e: # pragma: no cover | |
343 | raise EncodingError("Content-Type malformed, not allowed: %r; " |
|
345 | raise EncodingError("Content-Type malformed, not allowed: %r; " | |
344 | "%r (Python ERROR: %s)" % |
|
346 | "%r (Python ERROR: %s)" % | |
345 | (ctype, params, e.args[0])) |
|
347 | (ctype, params, e.args[0])) | |
346 |
|
348 | |||
347 | for k in mail.keys(): |
|
349 | for k in mail.keys(): | |
348 | if k in ADDRESS_HEADERS_WHITELIST: |
|
350 | if k in ADDRESS_HEADERS_WHITELIST: | |
349 | out[k] = header_to_mime_encoding( |
|
351 | out[k] = header_to_mime_encoding( | |
350 | mail[k], |
|
352 | mail[k], | |
351 | not_email=False, |
|
353 | not_email=False, | |
352 | separator=separator |
|
354 | separator=separator | |
353 | ) |
|
355 | ) | |
354 | else: |
|
356 | else: | |
355 | out[k] = header_to_mime_encoding( |
|
357 | out[k] = header_to_mime_encoding( | |
356 | mail[k], |
|
358 | mail[k], | |
357 | not_email=True |
|
359 | not_email=True | |
358 | ) |
|
360 | ) | |
359 |
|
361 | |||
360 | out.extract_payload(mail) |
|
362 | out.extract_payload(mail) | |
361 |
|
363 | |||
362 | # go through the children |
|
364 | # go through the children | |
363 | for part in mail.parts: |
|
365 | for part in mail.parts: | |
364 | out.attach(to_message(part)) |
|
366 | out.attach(to_message(part)) | |
365 |
|
367 | |||
366 | return out |
|
368 | return out | |
367 |
|
369 | |||
368 |
|
370 | |||
369 | class MIMEPart(MIMEBase): |
|
371 | class MIMEPart(MIMEBase): | |
370 | """ |
|
372 | """ | |
371 | A reimplementation of nearly everything in email.mime to be more useful |
|
373 | A reimplementation of nearly everything in email.mime to be more useful | |
372 | for actually attaching things. Rather than one class for every type of |
|
374 | for actually attaching things. Rather than one class for every type of | |
373 | thing you'd encode, there's just this one, and it figures out how to |
|
375 | thing you'd encode, there's just this one, and it figures out how to | |
374 | encode what you ask it. |
|
376 | encode what you ask it. | |
375 | """ |
|
377 | """ | |
376 | def __init__(self, type, **params): |
|
378 | def __init__(self, type, **params): | |
377 | self.maintype, self.subtype = type.split('/') |
|
379 | self.maintype, self.subtype = type.split('/') | |
378 | MIMEBase.__init__(self, self.maintype, self.subtype, **params) |
|
380 | MIMEBase.__init__(self, self.maintype, self.subtype, **params) | |
379 |
|
381 | |||
380 | def add_text(self, content): |
|
382 | def add_text(self, content): | |
381 | # this is text, so encode it in canonical form |
|
383 | # this is text, so encode it in canonical form | |
382 | try: |
|
384 | try: | |
383 | encoded = content.encode('ascii') |
|
385 | encoded = content.encode('ascii') | |
384 | charset = 'ascii' |
|
386 | charset = 'ascii' | |
385 | except UnicodeError: |
|
387 | except UnicodeError: | |
386 | encoded = content.encode('utf-8') |
|
388 | encoded = content.encode('utf-8') | |
387 | charset = 'utf-8' |
|
389 | charset = 'utf-8' | |
388 |
|
390 | |||
389 | self.set_payload(encoded, charset=charset) |
|
391 | self.set_payload(encoded, charset=charset) | |
390 |
|
392 | |||
391 | def extract_payload(self, mail): |
|
393 | def extract_payload(self, mail): | |
392 | if mail.body is None: |
|
394 | if mail.body is None: | |
393 | return # only None, '' is still ok |
|
395 | return # only None, '' is still ok | |
394 |
|
396 | |||
395 | ctype, _ctype_params = mail.content_encoding['Content-Type'] |
|
397 | ctype, _ctype_params = mail.content_encoding['Content-Type'] | |
396 | cdisp, cdisp_params = mail.content_encoding['Content-Disposition'] |
|
398 | cdisp, cdisp_params = mail.content_encoding['Content-Disposition'] | |
397 |
|
399 | |||
398 | assert ctype, ("Extract payload requires that mail.content_encoding " |
|
400 | assert ctype, ("Extract payload requires that mail.content_encoding " | |
399 | "have a valid Content-Type.") |
|
401 | "have a valid Content-Type.") | |
400 |
|
402 | |||
401 | if ctype.startswith("text/"): |
|
403 | if ctype.startswith("text/"): | |
402 | self.add_text(mail.body) |
|
404 | self.add_text(mail.body) | |
403 | else: |
|
405 | else: | |
404 | if cdisp: |
|
406 | if cdisp: | |
405 | # replicate the content-disposition settings |
|
407 | # replicate the content-disposition settings | |
406 | self.add_header('Content-Disposition', cdisp, **cdisp_params) |
|
408 | self.add_header('Content-Disposition', cdisp, **cdisp_params) | |
407 |
|
409 | |||
408 | self.set_payload(mail.body) |
|
410 | self.set_payload(mail.body) | |
409 | encoders.encode_base64(self) |
|
411 | encoders.encode_base64(self) | |
410 |
|
412 | |||
411 | def __repr__(self): |
|
413 | def __repr__(self): | |
412 | return "<MIMEPart '%s/%s': %r, %r, multipart=%r>" % ( |
|
414 | return "<MIMEPart '%s/%s': %r, %r, multipart=%r>" % ( | |
413 | self.subtype, |
|
415 | self.subtype, | |
414 | self.maintype, |
|
416 | self.maintype, | |
415 | self['Content-Type'], |
|
417 | self['Content-Type'], | |
416 | self['Content-Disposition'], |
|
418 | self['Content-Disposition'], | |
417 | self.is_multipart()) |
|
419 | self.is_multipart()) | |
418 |
|
420 | |||
419 |
|
421 | |||
420 | def header_to_mime_encoding(value, not_email=False, separator=", "): |
|
422 | def header_to_mime_encoding(value, not_email=False, separator=", "): | |
421 | if not value: |
|
423 | if not value: | |
422 | return "" |
|
424 | return "" | |
423 |
|
425 | |||
424 | encoder = Charset(DEFAULT_ENCODING) |
|
426 | encoder = Charset(DEFAULT_ENCODING) | |
425 | if isinstance(value, list): |
|
427 | if isinstance(value, list): | |
426 | return separator.join(properly_encode_header( |
|
428 | return separator.join(properly_encode_header( | |
427 | v, encoder, not_email) for v in value) |
|
429 | v, encoder, not_email) for v in value) | |
428 | else: |
|
430 | else: | |
429 | return properly_encode_header(value, encoder, not_email) |
|
431 | return properly_encode_header(value, encoder, not_email) | |
430 |
|
432 | |||
431 |
|
433 | |||
432 | def properly_encode_header(value, encoder, not_email): |
|
434 | def properly_encode_header(value, encoder, not_email): | |
433 | """ |
|
435 | """ | |
434 | The only thing special (weird) about this function is that it tries |
|
436 | The only thing special (weird) about this function is that it tries | |
435 | to do a fast check to see if the header value has an email address in |
|
437 | to do a fast check to see if the header value has an email address in | |
436 | it. Since random headers could have an email address, and email addresses |
|
438 | it. Since random headers could have an email address, and email addresses | |
437 | have weird special formatting rules, we have to check for it. |
|
439 | have weird special formatting rules, we have to check for it. | |
438 |
|
440 | |||
439 | Normally this works fine, but in Librelist, we need to "obfuscate" email |
|
441 | Normally this works fine, but in Librelist, we need to "obfuscate" email | |
440 | addresses by changing the '@' to '-AT-'. This is where |
|
442 | addresses by changing the '@' to '-AT-'. This is where | |
441 | VALUE_IS_EMAIL_ADDRESS exists. It's a simple lambda returning True/False |
|
443 | VALUE_IS_EMAIL_ADDRESS exists. It's a simple lambda returning True/False | |
442 | to check if a header value has an email address. If you need to make this |
|
444 | to check if a header value has an email address. If you need to make this | |
443 | check different, then change this. |
|
445 | check different, then change this. | |
444 | """ |
|
446 | """ | |
445 | try: |
|
447 | try: | |
446 | value.encode("ascii") |
|
448 | value.encode("ascii") | |
447 | return value |
|
449 | return value | |
448 | except UnicodeError: |
|
450 | except UnicodeError: | |
449 | if not not_email and VALUE_IS_EMAIL_ADDRESS(value): |
|
451 | if not not_email and VALUE_IS_EMAIL_ADDRESS(value): | |
450 | # this could have an email address, make sure we don't screw it up |
|
452 | # this could have an email address, make sure we don't screw it up | |
451 | name, address = parseaddr(value) |
|
453 | name, address = parseaddr(value) | |
452 | return '"%s" <%s>' % (encoder.header_encode(name), address) |
|
454 | return '"%s" <%s>' % (encoder.header_encode(name), address) | |
453 |
|
455 | |||
454 | return encoder.header_encode(value) |
|
456 | return encoder.header_encode(value) |
@@ -1,541 +1,541 b'' | |||||
1 | import re |
|
1 | import re | |
2 | from io import BytesIO |
|
2 | from io import BytesIO | |
3 | from itertools import chain |
|
3 | from itertools import chain | |
4 | from subprocess import PIPE, Popen |
|
4 | from subprocess import PIPE, Popen | |
5 |
|
5 | |||
6 | from dulwich import objects |
|
6 | from dulwich import objects | |
7 | from dulwich.config import ConfigFile |
|
7 | from dulwich.config import ConfigFile | |
8 |
|
8 | |||
9 | from kallithea.lib.vcs.backends.base import BaseChangeset, EmptyChangeset |
|
9 | from kallithea.lib.vcs.backends.base import BaseChangeset, EmptyChangeset | |
10 | from kallithea.lib.vcs.conf import settings |
|
10 | from kallithea.lib.vcs.conf import settings | |
11 | from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, ChangesetError, ImproperArchiveTypeError, NodeDoesNotExistError, RepositoryError, VCSError |
|
11 | from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, ChangesetError, ImproperArchiveTypeError, NodeDoesNotExistError, RepositoryError, VCSError | |
12 | from kallithea.lib.vcs.nodes import ( |
|
12 | from kallithea.lib.vcs.nodes import ( | |
13 | AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode, NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode) |
|
13 | AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode, NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode) | |
14 | from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, date_fromtimestamp, safe_int, safe_str |
|
14 | from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, date_fromtimestamp, safe_int, safe_str | |
15 | from kallithea.lib.vcs.utils.lazy import LazyProperty |
|
15 | from kallithea.lib.vcs.utils.lazy import LazyProperty | |
16 |
|
16 | |||
17 |
|
17 | |||
18 | class GitChangeset(BaseChangeset): |
|
18 | class GitChangeset(BaseChangeset): | |
19 | """ |
|
19 | """ | |
20 | Represents state of the repository at a revision. |
|
20 | Represents state of the repository at a revision. | |
21 | """ |
|
21 | """ | |
22 |
|
22 | |||
23 | def __init__(self, repository, revision): |
|
23 | def __init__(self, repository, revision): | |
24 | self._stat_modes = {} |
|
24 | self._stat_modes = {} | |
25 | self.repository = repository |
|
25 | self.repository = repository | |
26 | try: |
|
26 | try: | |
27 | commit = self.repository._repo[ascii_bytes(revision)] |
|
27 | commit = self.repository._repo[ascii_bytes(revision)] | |
28 | if isinstance(commit, objects.Tag): |
|
28 | if isinstance(commit, objects.Tag): | |
29 | revision = safe_str(commit.object[1]) |
|
29 | revision = safe_str(commit.object[1]) | |
30 | commit = self.repository._repo.get_object(commit.object[1]) |
|
30 | commit = self.repository._repo.get_object(commit.object[1]) | |
31 | except KeyError: |
|
31 | except KeyError: | |
32 | raise RepositoryError("Cannot get object with id %s" % revision) |
|
32 | raise RepositoryError("Cannot get object with id %s" % revision) | |
33 | self.raw_id = ascii_str(commit.id) |
|
33 | self.raw_id = ascii_str(commit.id) | |
34 | self.short_id = self.raw_id[:12] |
|
34 | self.short_id = self.raw_id[:12] | |
35 | self._commit = commit # a Dulwich Commmit with .id |
|
35 | self._commit = commit # a Dulwich Commmit with .id | |
36 | self._tree_id = commit.tree |
|
36 | self._tree_id = commit.tree | |
37 | self._committer_property = 'committer' |
|
37 | self._committer_property = 'committer' | |
38 | self._author_property = 'author' |
|
38 | self._author_property = 'author' | |
39 | self._date_property = 'commit_time' |
|
39 | self._date_property = 'commit_time' | |
40 | self._date_tz_property = 'commit_timezone' |
|
40 | self._date_tz_property = 'commit_timezone' | |
41 | self.revision = repository.revisions.index(self.raw_id) |
|
41 | self.revision = repository.revisions.index(self.raw_id) | |
42 |
|
42 | |||
43 | self.nodes = {} |
|
43 | self.nodes = {} | |
44 | self._paths = {} |
|
44 | self._paths = {} | |
45 |
|
45 | |||
46 | @LazyProperty |
|
46 | @LazyProperty | |
47 | def bookmarks(self): |
|
47 | def bookmarks(self): | |
48 | return () |
|
48 | return () | |
49 |
|
49 | |||
50 | @LazyProperty |
|
50 | @LazyProperty | |
51 | def message(self): |
|
51 | def message(self): | |
52 | return safe_str(self._commit.message) |
|
52 | return safe_str(self._commit.message) | |
53 |
|
53 | |||
54 | @LazyProperty |
|
54 | @LazyProperty | |
55 | def committer(self): |
|
55 | def committer(self): | |
56 | return safe_str(getattr(self._commit, self._committer_property)) |
|
56 | return safe_str(getattr(self._commit, self._committer_property)) | |
57 |
|
57 | |||
58 | @LazyProperty |
|
58 | @LazyProperty | |
59 | def author(self): |
|
59 | def author(self): | |
60 | return safe_str(getattr(self._commit, self._author_property)) |
|
60 | return safe_str(getattr(self._commit, self._author_property)) | |
61 |
|
61 | |||
62 | @LazyProperty |
|
62 | @LazyProperty | |
63 | def date(self): |
|
63 | def date(self): | |
64 | return date_fromtimestamp(getattr(self._commit, self._date_property), |
|
64 | return date_fromtimestamp(getattr(self._commit, self._date_property), | |
65 | getattr(self._commit, self._date_tz_property)) |
|
65 | getattr(self._commit, self._date_tz_property)) | |
66 |
|
66 | |||
67 | @LazyProperty |
|
67 | @LazyProperty | |
68 | def _timestamp(self): |
|
68 | def _timestamp(self): | |
69 | return getattr(self._commit, self._date_property) |
|
69 | return getattr(self._commit, self._date_property) | |
70 |
|
70 | |||
71 | @LazyProperty |
|
71 | @LazyProperty | |
72 | def status(self): |
|
72 | def status(self): | |
73 | """ |
|
73 | """ | |
74 | Returns modified, added, removed, deleted files for current changeset |
|
74 | Returns modified, added, removed, deleted files for current changeset | |
75 | """ |
|
75 | """ | |
76 | return self.changed, self.added, self.removed |
|
76 | return self.changed, self.added, self.removed | |
77 |
|
77 | |||
78 | @LazyProperty |
|
78 | @LazyProperty | |
79 | def tags(self): |
|
79 | def tags(self): | |
80 | _tags = [] |
|
80 | _tags = [] | |
81 | for tname, tsha in self.repository.tags.items(): |
|
81 | for tname, tsha in self.repository.tags.items(): | |
82 | if tsha == self.raw_id: |
|
82 | if tsha == self.raw_id: | |
83 | _tags.append(tname) |
|
83 | _tags.append(tname) | |
84 | return _tags |
|
84 | return _tags | |
85 |
|
85 | |||
86 | @LazyProperty |
|
86 | @LazyProperty | |
87 | def branch(self): |
|
87 | def branch(self): | |
88 | # Note: This function will return one branch name for the changeset - |
|
88 | # Note: This function will return one branch name for the changeset - | |
89 | # that might not make sense in Git where branches() is a better match |
|
89 | # that might not make sense in Git where branches() is a better match | |
90 | # for the basic model |
|
90 | # for the basic model | |
91 | heads = self.repository._heads(reverse=False) |
|
91 | heads = self.repository._heads(reverse=False) | |
92 | ref = heads.get(self._commit.id) |
|
92 | ref = heads.get(self._commit.id) | |
93 | if ref: |
|
93 | if ref: | |
94 | return safe_str(ref) |
|
94 | return safe_str(ref) | |
95 |
|
95 | |||
96 | @LazyProperty |
|
96 | @LazyProperty | |
97 | def branches(self): |
|
97 | def branches(self): | |
98 | heads = self.repository._heads(reverse=True) |
|
98 | heads = self.repository._heads(reverse=True) | |
99 | return [safe_str(b) for b in heads if heads[b] == self._commit.id] # FIXME: Inefficient ... and returning None! |
|
99 | return [safe_str(b) for b in heads if heads[b] == self._commit.id] # FIXME: Inefficient ... and returning None! | |
100 |
|
100 | |||
101 | def _get_id_for_path(self, path): |
|
101 | def _get_id_for_path(self, path): | |
102 | # FIXME: Please, spare a couple of minutes and make those codes cleaner; |
|
102 | # FIXME: Please, spare a couple of minutes and make those codes cleaner; | |
103 | if path not in self._paths: |
|
103 | if path not in self._paths: | |
104 | path = path.strip('/') |
|
104 | path = path.strip('/') | |
105 | # set root tree |
|
105 | # set root tree | |
106 | tree = self.repository._repo[self._tree_id] |
|
106 | tree = self.repository._repo[self._tree_id] | |
107 | if path == '': |
|
107 | if path == '': | |
108 | self._paths[''] = tree.id |
|
108 | self._paths[''] = tree.id | |
109 | return tree.id |
|
109 | return tree.id | |
110 | splitted = path.split('/') |
|
110 | splitted = path.split('/') | |
111 | dirs, name = splitted[:-1], splitted[-1] |
|
111 | dirs, name = splitted[:-1], splitted[-1] | |
112 | curdir = '' |
|
112 | curdir = '' | |
113 |
|
113 | |||
114 | # initially extract things from root dir |
|
114 | # initially extract things from root dir | |
115 | for item, stat, id in tree.items(): |
|
115 | for item, stat, id in tree.items(): | |
116 | name = safe_str(item) |
|
116 | name = safe_str(item) | |
117 | if curdir: |
|
117 | if curdir: | |
118 | name = '/'.join((curdir, name)) |
|
118 | name = '/'.join((curdir, name)) | |
119 | self._paths[name] = id |
|
119 | self._paths[name] = id | |
120 | self._stat_modes[name] = stat |
|
120 | self._stat_modes[name] = stat | |
121 |
|
121 | |||
122 | for dir in dirs: |
|
122 | for dir in dirs: | |
123 | if curdir: |
|
123 | if curdir: | |
124 | curdir = '/'.join((curdir, dir)) |
|
124 | curdir = '/'.join((curdir, dir)) | |
125 | else: |
|
125 | else: | |
126 | curdir = dir |
|
126 | curdir = dir | |
127 | dir_id = None |
|
127 | dir_id = None | |
128 | for item, stat, id in tree.items(): |
|
128 | for item, stat, id in tree.items(): | |
129 | name = safe_str(item) |
|
129 | name = safe_str(item) | |
130 | if dir == name: |
|
130 | if dir == name: | |
131 | dir_id = id |
|
131 | dir_id = id | |
132 | if dir_id: |
|
132 | if dir_id: | |
133 | # Update tree |
|
133 | # Update tree | |
134 | tree = self.repository._repo[dir_id] |
|
134 | tree = self.repository._repo[dir_id] | |
135 | if not isinstance(tree, objects.Tree): |
|
135 | if not isinstance(tree, objects.Tree): | |
136 | raise ChangesetError('%s is not a directory' % curdir) |
|
136 | raise ChangesetError('%s is not a directory' % curdir) | |
137 | else: |
|
137 | else: | |
138 | raise ChangesetError('%s have not been found' % curdir) |
|
138 | raise ChangesetError('%s have not been found' % curdir) | |
139 |
|
139 | |||
140 | # cache all items from the given traversed tree |
|
140 | # cache all items from the given traversed tree | |
141 | for item, stat, id in tree.items(): |
|
141 | for item, stat, id in tree.items(): | |
142 | name = safe_str(item) |
|
142 | name = safe_str(item) | |
143 | if curdir: |
|
143 | if curdir: | |
144 | name = '/'.join((curdir, name)) |
|
144 | name = '/'.join((curdir, name)) | |
145 | self._paths[name] = id |
|
145 | self._paths[name] = id | |
146 | self._stat_modes[name] = stat |
|
146 | self._stat_modes[name] = stat | |
147 | if path not in self._paths: |
|
147 | if path not in self._paths: | |
148 | raise NodeDoesNotExistError("There is no file nor directory " |
|
148 | raise NodeDoesNotExistError("There is no file nor directory " | |
149 | "at the given path '%s' at revision %s" |
|
149 | "at the given path '%s' at revision %s" | |
150 | % (path, self.short_id)) |
|
150 | % (path, self.short_id)) | |
151 | return self._paths[path] |
|
151 | return self._paths[path] | |
152 |
|
152 | |||
153 | def _get_kind(self, path): |
|
153 | def _get_kind(self, path): | |
154 | obj = self.repository._repo[self._get_id_for_path(path)] |
|
154 | obj = self.repository._repo[self._get_id_for_path(path)] | |
155 | if isinstance(obj, objects.Blob): |
|
155 | if isinstance(obj, objects.Blob): | |
156 | return NodeKind.FILE |
|
156 | return NodeKind.FILE | |
157 | elif isinstance(obj, objects.Tree): |
|
157 | elif isinstance(obj, objects.Tree): | |
158 | return NodeKind.DIR |
|
158 | return NodeKind.DIR | |
159 |
|
159 | |||
160 | def _get_filectx(self, path): |
|
160 | def _get_filectx(self, path): | |
161 | path = path.rstrip('/') |
|
161 | path = path.rstrip('/') | |
162 | if self._get_kind(path) != NodeKind.FILE: |
|
162 | if self._get_kind(path) != NodeKind.FILE: | |
163 | raise ChangesetError("File does not exist for revision %s at " |
|
163 | raise ChangesetError("File does not exist for revision %s at " | |
164 | " '%s'" % (self.raw_id, path)) |
|
164 | " '%s'" % (self.raw_id, path)) | |
165 | return path |
|
165 | return path | |
166 |
|
166 | |||
167 | def _get_file_nodes(self): |
|
167 | def _get_file_nodes(self): | |
168 | return chain(*(t[2] for t in self.walk())) |
|
168 | return chain(*(t[2] for t in self.walk())) | |
169 |
|
169 | |||
170 | @LazyProperty |
|
170 | @LazyProperty | |
171 | def parents(self): |
|
171 | def parents(self): | |
172 | """ |
|
172 | """ | |
173 | Returns list of parents changesets. |
|
173 | Returns list of parents changesets. | |
174 | """ |
|
174 | """ | |
175 | return [self.repository.get_changeset(ascii_str(parent_id)) |
|
175 | return [self.repository.get_changeset(ascii_str(parent_id)) | |
176 | for parent_id in self._commit.parents] |
|
176 | for parent_id in self._commit.parents] | |
177 |
|
177 | |||
178 | @LazyProperty |
|
178 | @LazyProperty | |
179 | def children(self): |
|
179 | def children(self): | |
180 | """ |
|
180 | """ | |
181 | Returns list of children changesets. |
|
181 | Returns list of children changesets. | |
182 | """ |
|
182 | """ | |
183 | rev_filter = settings.GIT_REV_FILTER |
|
183 | rev_filter = settings.GIT_REV_FILTER | |
184 | so = self.repository.run_git_command( |
|
184 | so = self.repository.run_git_command( | |
185 | ['rev-list', rev_filter, '--children'] |
|
185 | ['rev-list', rev_filter, '--children'] | |
186 | ) |
|
186 | ) | |
187 | return [ |
|
187 | return [ | |
188 | self.repository.get_changeset(cs) |
|
188 | self.repository.get_changeset(cs) | |
189 | for parts in (l.split(' ') for l in so.splitlines()) |
|
189 | for parts in (l.split(' ') for l in so.splitlines()) | |
190 | if parts[0] == self.raw_id |
|
190 | if parts[0] == self.raw_id | |
191 | for cs in parts[1:] |
|
191 | for cs in parts[1:] | |
192 | ] |
|
192 | ] | |
193 |
|
193 | |||
194 | def next(self, branch=None): |
|
194 | def next(self, branch=None): | |
195 | if branch and self.branch != branch: |
|
195 | if branch and self.branch != branch: | |
196 | raise VCSError('Branch option used on changeset not belonging ' |
|
196 | raise VCSError('Branch option used on changeset not belonging ' | |
197 | 'to that branch') |
|
197 | 'to that branch') | |
198 |
|
198 | |||
199 | cs = self |
|
199 | cs = self | |
200 | while True: |
|
200 | while True: | |
201 | try: |
|
201 | try: | |
202 | next_ = cs.revision + 1 |
|
202 | next_ = cs.revision + 1 | |
203 | next_rev = cs.repository.revisions[next_] |
|
203 | next_rev = cs.repository.revisions[next_] | |
204 | except IndexError: |
|
204 | except IndexError: | |
205 | raise ChangesetDoesNotExistError |
|
205 | raise ChangesetDoesNotExistError | |
206 | cs = cs.repository.get_changeset(next_rev) |
|
206 | cs = cs.repository.get_changeset(next_rev) | |
207 |
|
207 | |||
208 | if not branch or branch == cs.branch: |
|
208 | if not branch or branch == cs.branch: | |
209 | return cs |
|
209 | return cs | |
210 |
|
210 | |||
211 | def prev(self, branch=None): |
|
211 | def prev(self, branch=None): | |
212 | if branch and self.branch != branch: |
|
212 | if branch and self.branch != branch: | |
213 | raise VCSError('Branch option used on changeset not belonging ' |
|
213 | raise VCSError('Branch option used on changeset not belonging ' | |
214 | 'to that branch') |
|
214 | 'to that branch') | |
215 |
|
215 | |||
216 | cs = self |
|
216 | cs = self | |
217 | while True: |
|
217 | while True: | |
218 | try: |
|
218 | try: | |
219 | prev_ = cs.revision - 1 |
|
219 | prev_ = cs.revision - 1 | |
220 | if prev_ < 0: |
|
220 | if prev_ < 0: | |
221 | raise IndexError |
|
221 | raise IndexError | |
222 | prev_rev = cs.repository.revisions[prev_] |
|
222 | prev_rev = cs.repository.revisions[prev_] | |
223 | except IndexError: |
|
223 | except IndexError: | |
224 | raise ChangesetDoesNotExistError |
|
224 | raise ChangesetDoesNotExistError | |
225 | cs = cs.repository.get_changeset(prev_rev) |
|
225 | cs = cs.repository.get_changeset(prev_rev) | |
226 |
|
226 | |||
227 | if not branch or branch == cs.branch: |
|
227 | if not branch or branch == cs.branch: | |
228 | return cs |
|
228 | return cs | |
229 |
|
229 | |||
230 | def diff(self, ignore_whitespace=True, context=3): |
|
230 | def diff(self, ignore_whitespace=True, context=3): | |
231 | # Only used to feed diffstat |
|
231 | # Only used to feed diffstat | |
232 | rev1 = self.parents[0] if self.parents else self.repository.EMPTY_CHANGESET |
|
232 | rev1 = self.parents[0] if self.parents else self.repository.EMPTY_CHANGESET | |
233 | rev2 = self |
|
233 | rev2 = self | |
234 | return b''.join(self.repository.get_diff(rev1, rev2, |
|
234 | return b''.join(self.repository.get_diff(rev1, rev2, | |
235 | ignore_whitespace=ignore_whitespace, |
|
235 | ignore_whitespace=ignore_whitespace, | |
236 | context=context)) |
|
236 | context=context)) | |
237 |
|
237 | |||
238 | def get_file_mode(self, path): |
|
238 | def get_file_mode(self, path): | |
239 | """ |
|
239 | """ | |
240 | Returns stat mode of the file at the given ``path``. |
|
240 | Returns stat mode of the file at the given ``path``. | |
241 | """ |
|
241 | """ | |
242 | # ensure path is traversed |
|
242 | # ensure path is traversed | |
243 | self._get_id_for_path(path) |
|
243 | self._get_id_for_path(path) | |
244 | return self._stat_modes[path] |
|
244 | return self._stat_modes[path] | |
245 |
|
245 | |||
246 | def get_file_content(self, path): |
|
246 | def get_file_content(self, path): | |
247 | """ |
|
247 | """ | |
248 | Returns content of the file at given ``path``. |
|
248 | Returns content of the file at given ``path``. | |
249 | """ |
|
249 | """ | |
250 | id = self._get_id_for_path(path) |
|
250 | id = self._get_id_for_path(path) | |
251 | blob = self.repository._repo[id] |
|
251 | blob = self.repository._repo[id] | |
252 | return blob.as_pretty_string() |
|
252 | return blob.as_pretty_string() | |
253 |
|
253 | |||
254 | def get_file_size(self, path): |
|
254 | def get_file_size(self, path): | |
255 | """ |
|
255 | """ | |
256 | Returns size of the file at given ``path``. |
|
256 | Returns size of the file at given ``path``. | |
257 | """ |
|
257 | """ | |
258 | id = self._get_id_for_path(path) |
|
258 | id = self._get_id_for_path(path) | |
259 | blob = self.repository._repo[id] |
|
259 | blob = self.repository._repo[id] | |
260 | return blob.raw_length() |
|
260 | return blob.raw_length() | |
261 |
|
261 | |||
262 | def get_file_changeset(self, path): |
|
262 | def get_file_changeset(self, path): | |
263 | """ |
|
263 | """ | |
264 | Returns last commit of the file at the given ``path``. |
|
264 | Returns last commit of the file at the given ``path``. | |
265 | """ |
|
265 | """ | |
266 | return self.get_file_history(path, limit=1)[0] |
|
266 | return self.get_file_history(path, limit=1)[0] | |
267 |
|
267 | |||
268 | def get_file_history(self, path, limit=None): |
|
268 | def get_file_history(self, path, limit=None): | |
269 | """ |
|
269 | """ | |
270 | Returns history of file as reversed list of ``Changeset`` objects for |
|
270 | Returns history of file as reversed list of ``Changeset`` objects for | |
271 | which file at given ``path`` has been modified. |
|
271 | which file at given ``path`` has been modified. | |
272 |
|
272 | |||
273 | TODO: This function now uses os underlying 'git' and 'grep' commands |
|
273 | TODO: This function now uses os underlying 'git' and 'grep' commands | |
274 | which is generally not good. Should be replaced with algorithm |
|
274 | which is generally not good. Should be replaced with algorithm | |
275 | iterating commits. |
|
275 | iterating commits. | |
276 | """ |
|
276 | """ | |
277 | self._get_filectx(path) |
|
277 | self._get_filectx(path) | |
278 |
|
278 | |||
279 | if limit is not None: |
|
279 | if limit is not None: | |
280 | cmd = ['log', '-n', str(safe_int(limit, 0)), |
|
280 | cmd = ['log', '-n', str(safe_int(limit, 0)), | |
281 | '--pretty=format:%H', '-s', self.raw_id, '--', path] |
|
281 | '--pretty=format:%H', '-s', self.raw_id, '--', path] | |
282 |
|
282 | |||
283 | else: |
|
283 | else: | |
284 | cmd = ['log', |
|
284 | cmd = ['log', | |
285 | '--pretty=format:%H', '-s', self.raw_id, '--', path] |
|
285 | '--pretty=format:%H', '-s', self.raw_id, '--', path] | |
286 | so = self.repository.run_git_command(cmd) |
|
286 | so = self.repository.run_git_command(cmd) | |
287 | ids = re.findall(r'[0-9a-fA-F]{40}', so) |
|
287 | ids = re.findall(r'[0-9a-fA-F]{40}', so) | |
288 | return [self.repository.get_changeset(sha) for sha in ids] |
|
288 | return [self.repository.get_changeset(sha) for sha in ids] | |
289 |
|
289 | |||
290 | def get_file_history_2(self, path): |
|
290 | def get_file_history_2(self, path): | |
291 | """ |
|
291 | """ | |
292 | Returns history of file as reversed list of ``Changeset`` objects for |
|
292 | Returns history of file as reversed list of ``Changeset`` objects for | |
293 | which file at given ``path`` has been modified. |
|
293 | which file at given ``path`` has been modified. | |
294 |
|
294 | |||
295 | """ |
|
295 | """ | |
296 | self._get_filectx(path) |
|
296 | self._get_filectx(path) | |
297 | from dulwich.walk import Walker |
|
297 | from dulwich.walk import Walker | |
298 | include = [self.raw_id] |
|
298 | include = [self.raw_id] | |
299 | walker = Walker(self.repository._repo.object_store, include, |
|
299 | walker = Walker(self.repository._repo.object_store, include, | |
300 | paths=[path], max_entries=1) |
|
300 | paths=[path], max_entries=1) | |
301 | return [self.repository.get_changeset(ascii_str(x.commit.id.decode)) |
|
301 | return [self.repository.get_changeset(ascii_str(x.commit.id.decode)) | |
302 | for x in walker] |
|
302 | for x in walker] | |
303 |
|
303 | |||
304 | def get_file_annotate(self, path): |
|
304 | def get_file_annotate(self, path): | |
305 | """ |
|
305 | """ | |
306 | Returns a generator of four element tuples with |
|
306 | Returns a generator of four element tuples with | |
307 | lineno, sha, changeset lazy loader and line |
|
307 | lineno, sha, changeset lazy loader and line | |
308 | """ |
|
308 | """ | |
309 | # TODO: This function now uses os underlying 'git' command which is |
|
309 | # TODO: This function now uses os underlying 'git' command which is | |
310 | # generally not good. Should be replaced with algorithm iterating |
|
310 | # generally not good. Should be replaced with algorithm iterating | |
311 | # commits. |
|
311 | # commits. | |
312 | cmd = ['blame', '-l', '--root', '-r', self.raw_id, '--', path] |
|
312 | cmd = ['blame', '-l', '--root', '-r', self.raw_id, '--', path] | |
313 | # -l ==> outputs long shas (and we need all 40 characters) |
|
313 | # -l ==> outputs long shas (and we need all 40 characters) | |
314 | # --root ==> doesn't put '^' character for boundaries |
|
314 | # --root ==> doesn't put '^' character for boundaries | |
315 | # -r sha ==> blames for the given revision |
|
315 | # -r sha ==> blames for the given revision | |
316 | so = self.repository.run_git_command(cmd) |
|
316 | so = self.repository.run_git_command(cmd) | |
317 |
|
317 | |||
318 | for i, blame_line in enumerate(so.split('\n')[:-1]): |
|
318 | for i, blame_line in enumerate(so.split('\n')[:-1]): | |
319 | sha, line = re.split(r' ', blame_line, 1) |
|
319 | sha, line = re.split(r' ', blame_line, 1) | |
320 | yield (i + 1, sha, lambda sha=sha: self.repository.get_changeset(sha), line) |
|
320 | yield (i + 1, sha, lambda sha=sha: self.repository.get_changeset(sha), line) | |
321 |
|
321 | |||
322 | def fill_archive(self, stream=None, kind='tgz', prefix=None, |
|
322 | def fill_archive(self, stream=None, kind='tgz', prefix=None, | |
323 | subrepos=False): |
|
323 | subrepos=False): | |
324 | """ |
|
324 | """ | |
325 | Fills up given stream. |
|
325 | Fills up given stream. | |
326 |
|
326 | |||
327 | :param stream: file like object. |
|
327 | :param stream: file like object. | |
328 | :param kind: one of following: ``zip``, ``tgz`` or ``tbz2``. |
|
328 | :param kind: one of following: ``zip``, ``tgz`` or ``tbz2``. | |
329 | Default: ``tgz``. |
|
329 | Default: ``tgz``. | |
330 | :param prefix: name of root directory in archive. |
|
330 | :param prefix: name of root directory in archive. | |
331 | Default is repository name and changeset's raw_id joined with dash |
|
331 | Default is repository name and changeset's raw_id joined with dash | |
332 | (``repo-tip.<KIND>``). |
|
332 | (``repo-tip.<KIND>``). | |
333 | :param subrepos: include subrepos in this archive. |
|
333 | :param subrepos: include subrepos in this archive. | |
334 |
|
334 | |||
335 | :raise ImproperArchiveTypeError: If given kind is wrong. |
|
335 | :raise ImproperArchiveTypeError: If given kind is wrong. | |
336 | :raise VcsError: If given stream is None |
|
336 | :raise VcsError: If given stream is None | |
337 | """ |
|
337 | """ | |
338 | allowed_kinds = settings.ARCHIVE_SPECS |
|
338 | allowed_kinds = settings.ARCHIVE_SPECS | |
339 | if kind not in allowed_kinds: |
|
339 | if kind not in allowed_kinds: | |
340 | raise ImproperArchiveTypeError('Archive kind not supported use one' |
|
340 | raise ImproperArchiveTypeError('Archive kind not supported use one' | |
341 | 'of %s' % ' '.join(allowed_kinds)) |
|
341 | 'of %s' % ' '.join(allowed_kinds)) | |
342 |
|
342 | |||
343 | if stream is None: |
|
343 | if stream is None: | |
344 | raise VCSError('You need to pass in a valid stream for filling' |
|
344 | raise VCSError('You need to pass in a valid stream for filling' | |
345 | ' with archival data') |
|
345 | ' with archival data') | |
346 |
|
346 | |||
347 | if prefix is None: |
|
347 | if prefix is None: | |
348 | prefix = '%s-%s' % (self.repository.name, self.short_id) |
|
348 | prefix = '%s-%s' % (self.repository.name, self.short_id) | |
349 | elif prefix.startswith('/'): |
|
349 | elif prefix.startswith('/'): | |
350 | raise VCSError("Prefix cannot start with leading slash") |
|
350 | raise VCSError("Prefix cannot start with leading slash") | |
351 | elif prefix.strip() == '': |
|
351 | elif prefix.strip() == '': | |
352 | raise VCSError("Prefix cannot be empty") |
|
352 | raise VCSError("Prefix cannot be empty") | |
353 |
|
353 | |||
354 | if kind == 'zip': |
|
354 | if kind == 'zip': | |
355 | frmt = 'zip' |
|
355 | frmt = 'zip' | |
356 | else: |
|
356 | else: | |
357 | frmt = 'tar' |
|
357 | frmt = 'tar' | |
358 | _git_path = settings.GIT_EXECUTABLE_PATH |
|
358 | _git_path = settings.GIT_EXECUTABLE_PATH | |
359 | cmd = '%s archive --format=%s --prefix=%s/ %s' % (_git_path, |
|
359 | cmd = '%s archive --format=%s --prefix=%s/ %s' % (_git_path, | |
360 | frmt, prefix, self.raw_id) |
|
360 | frmt, prefix, self.raw_id) | |
361 | if kind == 'tgz': |
|
361 | if kind == 'tgz': | |
362 | cmd += ' | gzip -9' |
|
362 | cmd += ' | gzip -9' | |
363 | elif kind == 'tbz2': |
|
363 | elif kind == 'tbz2': | |
364 | cmd += ' | bzip2 -9' |
|
364 | cmd += ' | bzip2 -9' | |
365 |
|
365 | |||
366 | if stream is None: |
|
366 | if stream is None: | |
367 | raise VCSError('You need to pass in a valid stream for filling' |
|
367 | raise VCSError('You need to pass in a valid stream for filling' | |
368 | ' with archival data') |
|
368 | ' with archival data') | |
369 | popen = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True, |
|
369 | popen = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True, | |
370 | cwd=self.repository.path) |
|
370 | cwd=self.repository.path) | |
371 |
|
371 | |||
372 | buffer_size = 1024 * 8 |
|
372 | buffer_size = 1024 * 8 | |
373 | chunk = popen.stdout.read(buffer_size) |
|
373 | chunk = popen.stdout.read(buffer_size) | |
374 | while chunk: |
|
374 | while chunk: | |
375 | stream.write(chunk) |
|
375 | stream.write(chunk) | |
376 | chunk = popen.stdout.read(buffer_size) |
|
376 | chunk = popen.stdout.read(buffer_size) | |
377 | # Make sure all descriptors would be read |
|
377 | # Make sure all descriptors would be read | |
378 | popen.communicate() |
|
378 | popen.communicate() | |
379 |
|
379 | |||
380 | def get_nodes(self, path): |
|
380 | def get_nodes(self, path): | |
381 | """ |
|
381 | """ | |
382 | Returns combined ``DirNode`` and ``FileNode`` objects list representing |
|
382 | Returns combined ``DirNode`` and ``FileNode`` objects list representing | |
383 | state of changeset at the given ``path``. If node at the given ``path`` |
|
383 | state of changeset at the given ``path``. If node at the given ``path`` | |
384 | is not instance of ``DirNode``, ChangesetError would be raised. |
|
384 | is not instance of ``DirNode``, ChangesetError would be raised. | |
385 | """ |
|
385 | """ | |
386 |
|
386 | |||
387 | if self._get_kind(path) != NodeKind.DIR: |
|
387 | if self._get_kind(path) != NodeKind.DIR: | |
388 | raise ChangesetError("Directory does not exist for revision %s at " |
|
388 | raise ChangesetError("Directory does not exist for revision %s at " | |
389 | " '%s'" % (self.revision, path)) |
|
389 | " '%s'" % (self.revision, path)) | |
390 | path = path.rstrip('/') |
|
390 | path = path.rstrip('/') | |
391 | id = self._get_id_for_path(path) |
|
391 | id = self._get_id_for_path(path) | |
392 | tree = self.repository._repo[id] |
|
392 | tree = self.repository._repo[id] | |
393 | dirnodes = [] |
|
393 | dirnodes = [] | |
394 | filenodes = [] |
|
394 | filenodes = [] | |
395 | als = self.repository.alias |
|
395 | als = self.repository.alias | |
396 | for name, stat, id in tree.items(): |
|
396 | for name, stat, id in tree.items(): | |
397 | obj_path = safe_str(name) |
|
397 | obj_path = safe_str(name) | |
398 | if path != '': |
|
398 | if path != '': | |
399 | obj_path = '/'.join((path, obj_path)) |
|
399 | obj_path = '/'.join((path, obj_path)) | |
400 | if objects.S_ISGITLINK(stat): |
|
400 | if objects.S_ISGITLINK(stat): | |
401 | root_tree = self.repository._repo[self._tree_id] |
|
401 | root_tree = self.repository._repo[self._tree_id] | |
402 | cf = ConfigFile.from_file(BytesIO(self.repository._repo.get_object(root_tree[b'.gitmodules'][1]).data)) |
|
402 | cf = ConfigFile.from_file(BytesIO(self.repository._repo.get_object(root_tree[b'.gitmodules'][1]).data)) | |
403 | url = ascii_str(cf.get(('submodule', obj_path), 'url')) |
|
403 | url = ascii_str(cf.get(('submodule', obj_path), 'url')) | |
404 | dirnodes.append(SubModuleNode(obj_path, url=url, changeset=ascii_str(id), |
|
404 | dirnodes.append(SubModuleNode(obj_path, url=url, changeset=ascii_str(id), | |
405 | alias=als)) |
|
405 | alias=als)) | |
406 | continue |
|
406 | continue | |
407 |
|
407 | |||
408 | obj = self.repository._repo.get_object(id) |
|
408 | obj = self.repository._repo.get_object(id) | |
409 | if obj_path not in self._stat_modes: |
|
409 | if obj_path not in self._stat_modes: | |
410 | self._stat_modes[obj_path] = stat |
|
410 | self._stat_modes[obj_path] = stat | |
411 | if isinstance(obj, objects.Tree): |
|
411 | if isinstance(obj, objects.Tree): | |
412 | dirnodes.append(DirNode(obj_path, changeset=self)) |
|
412 | dirnodes.append(DirNode(obj_path, changeset=self)) | |
413 | elif isinstance(obj, objects.Blob): |
|
413 | elif isinstance(obj, objects.Blob): | |
414 | filenodes.append(FileNode(obj_path, changeset=self, mode=stat)) |
|
414 | filenodes.append(FileNode(obj_path, changeset=self, mode=stat)) | |
415 | else: |
|
415 | else: | |
416 | raise ChangesetError("Requested object should be Tree " |
|
416 | raise ChangesetError("Requested object should be Tree " | |
417 | "or Blob, is %r" % type(obj)) |
|
417 | "or Blob, is %r" % type(obj)) | |
418 | nodes = dirnodes + filenodes |
|
418 | nodes = dirnodes + filenodes | |
419 | for node in nodes: |
|
419 | for node in nodes: | |
420 | if node.path not in self.nodes: |
|
420 | if node.path not in self.nodes: | |
421 | self.nodes[node.path] = node |
|
421 | self.nodes[node.path] = node | |
422 | nodes.sort() |
|
422 | nodes.sort() | |
423 | return nodes |
|
423 | return nodes | |
424 |
|
424 | |||
425 | def get_node(self, path): |
|
425 | def get_node(self, path): | |
426 | """ |
|
426 | """ | |
427 | Returns ``Node`` object from the given ``path``. If there is no node at |
|
427 | Returns ``Node`` object from the given ``path``. If there is no node at | |
428 | the given ``path``, ``ChangesetError`` would be raised. |
|
428 | the given ``path``, ``ChangesetError`` would be raised. | |
429 | """ |
|
429 | """ | |
430 | path = path.rstrip('/') |
|
430 | path = path.rstrip('/') | |
431 | if path not in self.nodes: |
|
431 | if path not in self.nodes: | |
432 | try: |
|
432 | try: | |
433 | id_ = self._get_id_for_path(path) |
|
433 | id_ = self._get_id_for_path(path) | |
434 | except ChangesetError: |
|
434 | except ChangesetError: | |
435 | raise NodeDoesNotExistError("Cannot find one of parents' " |
|
435 | raise NodeDoesNotExistError("Cannot find one of parents' " | |
436 | "directories for a given path: %s" % path) |
|
436 | "directories for a given path: %s" % path) | |
437 |
|
437 | |||
438 | _GL = lambda m: m and objects.S_ISGITLINK(m) |
|
438 | stat = self._stat_modes.get(path) | |
439 | if _GL(self._stat_modes.get(path)): |
|
439 | if stat and objects.S_ISGITLINK(stat): | |
440 | tree = self.repository._repo[self._tree_id] |
|
440 | tree = self.repository._repo[self._tree_id] | |
441 | cf = ConfigFile.from_file(BytesIO(self.repository._repo.get_object(tree[b'.gitmodules'][1]).data)) |
|
441 | cf = ConfigFile.from_file(BytesIO(self.repository._repo.get_object(tree[b'.gitmodules'][1]).data)) | |
442 | url = ascii_str(cf.get(('submodule', path), 'url')) |
|
442 | url = ascii_str(cf.get(('submodule', path), 'url')) | |
443 | node = SubModuleNode(path, url=url, changeset=ascii_str(id_), |
|
443 | node = SubModuleNode(path, url=url, changeset=ascii_str(id_), | |
444 | alias=self.repository.alias) |
|
444 | alias=self.repository.alias) | |
445 | else: |
|
445 | else: | |
446 | obj = self.repository._repo.get_object(id_) |
|
446 | obj = self.repository._repo.get_object(id_) | |
447 |
|
447 | |||
448 | if isinstance(obj, objects.Tree): |
|
448 | if isinstance(obj, objects.Tree): | |
449 | if path == '': |
|
449 | if path == '': | |
450 | node = RootNode(changeset=self) |
|
450 | node = RootNode(changeset=self) | |
451 | else: |
|
451 | else: | |
452 | node = DirNode(path, changeset=self) |
|
452 | node = DirNode(path, changeset=self) | |
453 | node._tree = obj |
|
453 | node._tree = obj | |
454 | elif isinstance(obj, objects.Blob): |
|
454 | elif isinstance(obj, objects.Blob): | |
455 | node = FileNode(path, changeset=self) |
|
455 | node = FileNode(path, changeset=self) | |
456 | node._blob = obj |
|
456 | node._blob = obj | |
457 | else: |
|
457 | else: | |
458 | raise NodeDoesNotExistError("There is no file nor directory " |
|
458 | raise NodeDoesNotExistError("There is no file nor directory " | |
459 | "at the given path: '%s' at revision %s" |
|
459 | "at the given path: '%s' at revision %s" | |
460 | % (path, self.short_id)) |
|
460 | % (path, self.short_id)) | |
461 | # cache node |
|
461 | # cache node | |
462 | self.nodes[path] = node |
|
462 | self.nodes[path] = node | |
463 | return self.nodes[path] |
|
463 | return self.nodes[path] | |
464 |
|
464 | |||
465 | @LazyProperty |
|
465 | @LazyProperty | |
466 | def affected_files(self): |
|
466 | def affected_files(self): | |
467 | """ |
|
467 | """ | |
468 | Gets a fast accessible file changes for given changeset |
|
468 | Gets a fast accessible file changes for given changeset | |
469 | """ |
|
469 | """ | |
470 | added, modified, deleted = self._changes_cache |
|
470 | added, modified, deleted = self._changes_cache | |
471 | return list(added.union(modified).union(deleted)) |
|
471 | return list(added.union(modified).union(deleted)) | |
472 |
|
472 | |||
473 | @LazyProperty |
|
473 | @LazyProperty | |
474 | def _changes_cache(self): |
|
474 | def _changes_cache(self): | |
475 | added = set() |
|
475 | added = set() | |
476 | modified = set() |
|
476 | modified = set() | |
477 | deleted = set() |
|
477 | deleted = set() | |
478 | _r = self.repository._repo |
|
478 | _r = self.repository._repo | |
479 |
|
479 | |||
480 | parents = self.parents |
|
480 | parents = self.parents | |
481 | if not self.parents: |
|
481 | if not self.parents: | |
482 | parents = [EmptyChangeset()] |
|
482 | parents = [EmptyChangeset()] | |
483 | for parent in parents: |
|
483 | for parent in parents: | |
484 | if isinstance(parent, EmptyChangeset): |
|
484 | if isinstance(parent, EmptyChangeset): | |
485 | oid = None |
|
485 | oid = None | |
486 | else: |
|
486 | else: | |
487 | oid = _r[parent._commit.id].tree |
|
487 | oid = _r[parent._commit.id].tree | |
488 | changes = _r.object_store.tree_changes(oid, _r[self._commit.id].tree) |
|
488 | changes = _r.object_store.tree_changes(oid, _r[self._commit.id].tree) | |
489 | for (oldpath, newpath), (_, _), (_, _) in changes: |
|
489 | for (oldpath, newpath), (_, _), (_, _) in changes: | |
490 | if newpath and oldpath: |
|
490 | if newpath and oldpath: | |
491 | modified.add(safe_str(newpath)) |
|
491 | modified.add(safe_str(newpath)) | |
492 | elif newpath and not oldpath: |
|
492 | elif newpath and not oldpath: | |
493 | added.add(safe_str(newpath)) |
|
493 | added.add(safe_str(newpath)) | |
494 | elif not newpath and oldpath: |
|
494 | elif not newpath and oldpath: | |
495 | deleted.add(safe_str(oldpath)) |
|
495 | deleted.add(safe_str(oldpath)) | |
496 | return added, modified, deleted |
|
496 | return added, modified, deleted | |
497 |
|
497 | |||
498 | def _get_paths_for_status(self, status): |
|
498 | def _get_paths_for_status(self, status): | |
499 | """ |
|
499 | """ | |
500 | Returns sorted list of paths for given ``status``. |
|
500 | Returns sorted list of paths for given ``status``. | |
501 |
|
501 | |||
502 | :param status: one of: *added*, *modified* or *deleted* |
|
502 | :param status: one of: *added*, *modified* or *deleted* | |
503 | """ |
|
503 | """ | |
504 | added, modified, deleted = self._changes_cache |
|
504 | added, modified, deleted = self._changes_cache | |
505 | return sorted({ |
|
505 | return sorted({ | |
506 | 'added': list(added), |
|
506 | 'added': list(added), | |
507 | 'modified': list(modified), |
|
507 | 'modified': list(modified), | |
508 | 'deleted': list(deleted)}[status] |
|
508 | 'deleted': list(deleted)}[status] | |
509 | ) |
|
509 | ) | |
510 |
|
510 | |||
511 | @LazyProperty |
|
511 | @LazyProperty | |
512 | def added(self): |
|
512 | def added(self): | |
513 | """ |
|
513 | """ | |
514 | Returns list of added ``FileNode`` objects. |
|
514 | Returns list of added ``FileNode`` objects. | |
515 | """ |
|
515 | """ | |
516 | if not self.parents: |
|
516 | if not self.parents: | |
517 | return list(self._get_file_nodes()) |
|
517 | return list(self._get_file_nodes()) | |
518 | return AddedFileNodesGenerator([n for n in |
|
518 | return AddedFileNodesGenerator([n for n in | |
519 | self._get_paths_for_status('added')], self) |
|
519 | self._get_paths_for_status('added')], self) | |
520 |
|
520 | |||
521 | @LazyProperty |
|
521 | @LazyProperty | |
522 | def changed(self): |
|
522 | def changed(self): | |
523 | """ |
|
523 | """ | |
524 | Returns list of modified ``FileNode`` objects. |
|
524 | Returns list of modified ``FileNode`` objects. | |
525 | """ |
|
525 | """ | |
526 | if not self.parents: |
|
526 | if not self.parents: | |
527 | return [] |
|
527 | return [] | |
528 | return ChangedFileNodesGenerator([n for n in |
|
528 | return ChangedFileNodesGenerator([n for n in | |
529 | self._get_paths_for_status('modified')], self) |
|
529 | self._get_paths_for_status('modified')], self) | |
530 |
|
530 | |||
531 | @LazyProperty |
|
531 | @LazyProperty | |
532 | def removed(self): |
|
532 | def removed(self): | |
533 | """ |
|
533 | """ | |
534 | Returns list of removed ``FileNode`` objects. |
|
534 | Returns list of removed ``FileNode`` objects. | |
535 | """ |
|
535 | """ | |
536 | if not self.parents: |
|
536 | if not self.parents: | |
537 | return [] |
|
537 | return [] | |
538 | return RemovedFileNodesGenerator([n for n in |
|
538 | return RemovedFileNodesGenerator([n for n in | |
539 | self._get_paths_for_status('deleted')], self) |
|
539 | self._get_paths_for_status('deleted')], self) | |
540 |
|
540 | |||
541 | extra = {} |
|
541 | extra = {} |
@@ -1,737 +1,734 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | """ |
|
2 | """ | |
3 | vcs.backends.git.repository |
|
3 | vcs.backends.git.repository | |
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
5 |
|
5 | |||
6 | Git repository implementation. |
|
6 | Git repository implementation. | |
7 |
|
7 | |||
8 | :created_on: Apr 8, 2010 |
|
8 | :created_on: Apr 8, 2010 | |
9 | :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak. |
|
9 | :copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak. | |
10 | """ |
|
10 | """ | |
11 |
|
11 | |||
12 | import errno |
|
12 | import errno | |
13 | import logging |
|
13 | import logging | |
14 | import os |
|
14 | import os | |
15 | import re |
|
15 | import re | |
16 | import time |
|
16 | import time | |
17 | import urllib.error |
|
17 | import urllib.error | |
18 | import urllib.parse |
|
18 | import urllib.parse | |
19 | import urllib.request |
|
19 | import urllib.request | |
20 | from collections import OrderedDict |
|
20 | from collections import OrderedDict | |
21 |
|
21 | |||
22 | import mercurial.url # import httpbasicauthhandler, httpdigestauthhandler |
|
22 | import mercurial.url # import httpbasicauthhandler, httpdigestauthhandler | |
23 | import mercurial.util # import url as hg_url |
|
23 | import mercurial.util # import url as hg_url | |
24 | from dulwich.config import ConfigFile |
|
24 | from dulwich.config import ConfigFile | |
25 | from dulwich.objects import Tag |
|
25 | from dulwich.objects import Tag | |
26 | from dulwich.repo import NotGitRepository, Repo |
|
26 | from dulwich.repo import NotGitRepository, Repo | |
27 |
|
27 | |||
28 | from kallithea.lib.vcs import subprocessio |
|
28 | from kallithea.lib.vcs import subprocessio | |
29 | from kallithea.lib.vcs.backends.base import BaseRepository, CollectionGenerator |
|
29 | from kallithea.lib.vcs.backends.base import BaseRepository, CollectionGenerator | |
30 | from kallithea.lib.vcs.conf import settings |
|
30 | from kallithea.lib.vcs.conf import settings | |
31 | from kallithea.lib.vcs.exceptions import ( |
|
31 | from kallithea.lib.vcs.exceptions import ( | |
32 | BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError, TagDoesNotExistError) |
|
32 | BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError, TagAlreadyExistError, TagDoesNotExistError) | |
33 | from kallithea.lib.vcs.utils import ascii_str, date_fromtimestamp, makedate, safe_bytes, safe_str |
|
33 | from kallithea.lib.vcs.utils import ascii_str, date_fromtimestamp, makedate, safe_bytes, safe_str | |
34 | from kallithea.lib.vcs.utils.lazy import LazyProperty |
|
34 | from kallithea.lib.vcs.utils.lazy import LazyProperty | |
35 | from kallithea.lib.vcs.utils.paths import abspath, get_user_home |
|
35 | from kallithea.lib.vcs.utils.paths import abspath, get_user_home | |
36 |
|
36 | |||
37 | from .changeset import GitChangeset |
|
37 | from .changeset import GitChangeset | |
38 | from .inmemory import GitInMemoryChangeset |
|
38 | from .inmemory import GitInMemoryChangeset | |
39 | from .workdir import GitWorkdir |
|
39 | from .workdir import GitWorkdir | |
40 |
|
40 | |||
41 |
|
41 | |||
42 | SHA_PATTERN = re.compile(r'^([0-9a-fA-F]{12}|[0-9a-fA-F]{40})$') |
|
42 | SHA_PATTERN = re.compile(r'^([0-9a-fA-F]{12}|[0-9a-fA-F]{40})$') | |
43 |
|
43 | |||
44 | log = logging.getLogger(__name__) |
|
44 | log = logging.getLogger(__name__) | |
45 |
|
45 | |||
46 |
|
46 | |||
47 | class GitRepository(BaseRepository): |
|
47 | class GitRepository(BaseRepository): | |
48 | """ |
|
48 | """ | |
49 | Git repository backend. |
|
49 | Git repository backend. | |
50 | """ |
|
50 | """ | |
51 | DEFAULT_BRANCH_NAME = 'master' |
|
51 | DEFAULT_BRANCH_NAME = 'master' | |
52 | scm = 'git' |
|
52 | scm = 'git' | |
53 |
|
53 | |||
54 | def __init__(self, repo_path, create=False, src_url=None, |
|
54 | def __init__(self, repo_path, create=False, src_url=None, | |
55 | update_after_clone=False, bare=False): |
|
55 | update_after_clone=False, bare=False): | |
56 |
|
56 | |||
57 | self.path = abspath(repo_path) |
|
57 | self.path = abspath(repo_path) | |
58 | self.repo = self._get_repo(create, src_url, update_after_clone, bare) |
|
58 | self.repo = self._get_repo(create, src_url, update_after_clone, bare) | |
59 | self.bare = self.repo.bare |
|
59 | self.bare = self.repo.bare | |
60 |
|
60 | |||
61 | @property |
|
61 | @property | |
62 | def _config_files(self): |
|
62 | def _config_files(self): | |
63 | return [ |
|
63 | return [ | |
64 | self.bare and abspath(self.path, 'config') |
|
64 | self.bare and abspath(self.path, 'config') | |
65 | or abspath(self.path, '.git', 'config'), |
|
65 | or abspath(self.path, '.git', 'config'), | |
66 | abspath(get_user_home(), '.gitconfig'), |
|
66 | abspath(get_user_home(), '.gitconfig'), | |
67 | ] |
|
67 | ] | |
68 |
|
68 | |||
69 | @property |
|
69 | @property | |
70 | def _repo(self): |
|
70 | def _repo(self): | |
71 | return self.repo |
|
71 | return self.repo | |
72 |
|
72 | |||
73 | @property |
|
73 | @property | |
74 | def head(self): |
|
74 | def head(self): | |
75 | try: |
|
75 | try: | |
76 | return self._repo.head() |
|
76 | return self._repo.head() | |
77 | except KeyError: |
|
77 | except KeyError: | |
78 | return None |
|
78 | return None | |
79 |
|
79 | |||
80 | @property |
|
80 | @property | |
81 | def _empty(self): |
|
81 | def _empty(self): | |
82 | """ |
|
82 | """ | |
83 | Checks if repository is empty ie. without any changesets |
|
83 | Checks if repository is empty ie. without any changesets | |
84 | """ |
|
84 | """ | |
85 |
|
85 | |||
86 | try: |
|
86 | try: | |
87 | self.revisions[0] |
|
87 | self.revisions[0] | |
88 | except (KeyError, IndexError): |
|
88 | except (KeyError, IndexError): | |
89 | return True |
|
89 | return True | |
90 | return False |
|
90 | return False | |
91 |
|
91 | |||
92 | @LazyProperty |
|
92 | @LazyProperty | |
93 | def revisions(self): |
|
93 | def revisions(self): | |
94 | """ |
|
94 | """ | |
95 | Returns list of revisions' ids, in ascending order. Being lazy |
|
95 | Returns list of revisions' ids, in ascending order. Being lazy | |
96 | attribute allows external tools to inject shas from cache. |
|
96 | attribute allows external tools to inject shas from cache. | |
97 | """ |
|
97 | """ | |
98 | return self._get_all_revisions() |
|
98 | return self._get_all_revisions() | |
99 |
|
99 | |||
100 | @classmethod |
|
100 | @classmethod | |
101 | def _run_git_command(cls, cmd, cwd=None): |
|
101 | def _run_git_command(cls, cmd, cwd=None): | |
102 | """ |
|
102 | """ | |
103 | Runs given ``cmd`` as git command and returns output bytes in a tuple |
|
103 | Runs given ``cmd`` as git command and returns output bytes in a tuple | |
104 | (stdout, stderr) ... or raise RepositoryError. |
|
104 | (stdout, stderr) ... or raise RepositoryError. | |
105 |
|
105 | |||
106 | :param cmd: git command to be executed |
|
106 | :param cmd: git command to be executed | |
107 | :param cwd: passed directly to subprocess |
|
107 | :param cwd: passed directly to subprocess | |
108 | """ |
|
108 | """ | |
109 | # need to clean fix GIT_DIR ! |
|
109 | # need to clean fix GIT_DIR ! | |
110 | gitenv = dict(os.environ) |
|
110 | gitenv = dict(os.environ) | |
111 | gitenv.pop('GIT_DIR', None) |
|
111 | gitenv.pop('GIT_DIR', None) | |
112 | gitenv['GIT_CONFIG_NOGLOBAL'] = '1' |
|
112 | gitenv['GIT_CONFIG_NOGLOBAL'] = '1' | |
113 |
|
113 | |||
114 | assert isinstance(cmd, list), cmd |
|
114 | assert isinstance(cmd, list), cmd | |
115 | cmd = [settings.GIT_EXECUTABLE_PATH, '-c', 'core.quotepath=false'] + cmd |
|
115 | cmd = [settings.GIT_EXECUTABLE_PATH, '-c', 'core.quotepath=false'] + cmd | |
116 | try: |
|
116 | try: | |
117 | p = subprocessio.SubprocessIOChunker(cmd, cwd=cwd, env=gitenv, shell=False) |
|
117 | p = subprocessio.SubprocessIOChunker(cmd, cwd=cwd, env=gitenv, shell=False) | |
118 | except (EnvironmentError, OSError) as err: |
|
118 | except (EnvironmentError, OSError) as err: | |
119 | # output from the failing process is in str(EnvironmentError) |
|
119 | # output from the failing process is in str(EnvironmentError) | |
120 | msg = ("Couldn't run git command %s.\n" |
|
120 | msg = ("Couldn't run git command %s.\n" | |
121 | "Subprocess failed with '%s': %s\n" % |
|
121 | "Subprocess failed with '%s': %s\n" % | |
122 | (cmd, type(err).__name__, err) |
|
122 | (cmd, type(err).__name__, err) | |
123 | ).strip() |
|
123 | ).strip() | |
124 | log.error(msg) |
|
124 | log.error(msg) | |
125 | raise RepositoryError(msg) |
|
125 | raise RepositoryError(msg) | |
126 |
|
126 | |||
127 | try: |
|
127 | try: | |
128 | stdout = b''.join(p.output) |
|
128 | stdout = b''.join(p.output) | |
129 | stderr = b''.join(p.error) |
|
129 | stderr = b''.join(p.error) | |
130 | finally: |
|
130 | finally: | |
131 | p.close() |
|
131 | p.close() | |
132 | # TODO: introduce option to make commands fail if they have any stderr output? |
|
132 | # TODO: introduce option to make commands fail if they have any stderr output? | |
133 | if stderr: |
|
133 | if stderr: | |
134 | log.debug('stderr from %s:\n%s', cmd, stderr) |
|
134 | log.debug('stderr from %s:\n%s', cmd, stderr) | |
135 | else: |
|
135 | else: | |
136 | log.debug('stderr from %s: None', cmd) |
|
136 | log.debug('stderr from %s: None', cmd) | |
137 | return stdout, stderr |
|
137 | return stdout, stderr | |
138 |
|
138 | |||
139 | def run_git_command(self, cmd): |
|
139 | def run_git_command(self, cmd): | |
140 | """ |
|
140 | """ | |
141 | Runs given ``cmd`` as git command with cwd set to current repo. |
|
141 | Runs given ``cmd`` as git command with cwd set to current repo. | |
142 | Returns stdout as unicode str ... or raise RepositoryError. |
|
142 | Returns stdout as unicode str ... or raise RepositoryError. | |
143 | """ |
|
143 | """ | |
144 | cwd = None |
|
144 | cwd = None | |
145 | if os.path.isdir(self.path): |
|
145 | if os.path.isdir(self.path): | |
146 | cwd = self.path |
|
146 | cwd = self.path | |
147 | stdout, _stderr = self._run_git_command(cmd, cwd=cwd) |
|
147 | stdout, _stderr = self._run_git_command(cmd, cwd=cwd) | |
148 | return safe_str(stdout) |
|
148 | return safe_str(stdout) | |
149 |
|
149 | |||
150 | @classmethod |
|
150 | @classmethod | |
151 | def _check_url(cls, url): |
|
151 | def _check_url(cls, url): | |
152 | """ |
|
152 | """ | |
153 | Function will check given url and try to verify if it's a valid |
|
153 | Function will check given url and try to verify if it's a valid | |
154 | link. Sometimes it may happened that git will issue basic |
|
154 | link. Sometimes it may happened that git will issue basic | |
155 | auth request that can cause whole API to hang when used from python |
|
155 | auth request that can cause whole API to hang when used from python | |
156 | or other external calls. |
|
156 | or other external calls. | |
157 |
|
157 | |||
158 | On failures it'll raise urllib2.HTTPError, exception is also thrown |
|
158 | On failures it'll raise urllib2.HTTPError, exception is also thrown | |
159 | when the return code is non 200 |
|
159 | when the return code is non 200 | |
160 | """ |
|
160 | """ | |
161 | # check first if it's not an local url |
|
161 | # check first if it's not an local url | |
162 | if os.path.isdir(url) or url.startswith('file:'): |
|
162 | if os.path.isdir(url) or url.startswith('file:'): | |
163 | return True |
|
163 | return True | |
164 |
|
164 | |||
165 | if url.startswith('git://'): |
|
165 | if url.startswith('git://'): | |
166 | return True |
|
166 | return True | |
167 |
|
167 | |||
168 | if '+' in url[:url.find('://')]: |
|
168 | if '+' in url[:url.find('://')]: | |
169 | url = url[url.find('+') + 1:] |
|
169 | url = url[url.find('+') + 1:] | |
170 |
|
170 | |||
171 | handlers = [] |
|
171 | handlers = [] | |
172 | url_obj = mercurial.util.url(safe_bytes(url)) |
|
172 | url_obj = mercurial.util.url(safe_bytes(url)) | |
173 | test_uri, authinfo = url_obj.authinfo() |
|
173 | test_uri, authinfo = url_obj.authinfo() | |
174 | if not test_uri.endswith('info/refs'): |
|
174 | if not test_uri.endswith('info/refs'): | |
175 | test_uri = test_uri.rstrip('/') + '/info/refs' |
|
175 | test_uri = test_uri.rstrip('/') + '/info/refs' | |
176 |
|
176 | |||
177 | url_obj.passwd = b'*****' |
|
177 | url_obj.passwd = b'*****' | |
178 | cleaned_uri = str(url_obj) |
|
178 | cleaned_uri = str(url_obj) | |
179 |
|
179 | |||
180 | if authinfo: |
|
180 | if authinfo: | |
181 | # create a password manager |
|
181 | # create a password manager | |
182 | passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm() |
|
182 | passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm() | |
183 | passmgr.add_password(*authinfo) |
|
183 | passmgr.add_password(*authinfo) | |
184 |
|
184 | |||
185 | handlers.extend((mercurial.url.httpbasicauthhandler(passmgr), |
|
185 | handlers.extend((mercurial.url.httpbasicauthhandler(passmgr), | |
186 | mercurial.url.httpdigestauthhandler(passmgr))) |
|
186 | mercurial.url.httpdigestauthhandler(passmgr))) | |
187 |
|
187 | |||
188 | o = urllib.request.build_opener(*handlers) |
|
188 | o = urllib.request.build_opener(*handlers) | |
189 | o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git |
|
189 | o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git | |
190 |
|
190 | |||
191 | req = urllib.request.Request( |
|
191 | req = urllib.request.Request( | |
192 | "%s?%s" % ( |
|
192 | "%s?%s" % ( | |
193 | test_uri, |
|
193 | test_uri, | |
194 | urllib.parse.urlencode({"service": 'git-upload-pack'}) |
|
194 | urllib.parse.urlencode({"service": 'git-upload-pack'}) | |
195 | )) |
|
195 | )) | |
196 |
|
196 | |||
197 | try: |
|
197 | try: | |
198 | resp = o.open(req) |
|
198 | resp = o.open(req) | |
199 | if resp.code != 200: |
|
199 | if resp.code != 200: | |
200 | raise Exception('Return Code is not 200') |
|
200 | raise Exception('Return Code is not 200') | |
201 | except Exception as e: |
|
201 | except Exception as e: | |
202 | # means it cannot be cloned |
|
202 | # means it cannot be cloned | |
203 | raise urllib.error.URLError("[%s] org_exc: %s" % (cleaned_uri, e)) |
|
203 | raise urllib.error.URLError("[%s] org_exc: %s" % (cleaned_uri, e)) | |
204 |
|
204 | |||
205 | # now detect if it's proper git repo |
|
205 | # now detect if it's proper git repo | |
206 | gitdata = resp.read() |
|
206 | gitdata = resp.read() | |
207 | if 'service=git-upload-pack' not in gitdata: |
|
207 | if 'service=git-upload-pack' not in gitdata: | |
208 | raise urllib.error.URLError( |
|
208 | raise urllib.error.URLError( | |
209 | "url [%s] does not look like an git" % cleaned_uri) |
|
209 | "url [%s] does not look like an git" % cleaned_uri) | |
210 |
|
210 | |||
211 | return True |
|
211 | return True | |
212 |
|
212 | |||
213 | def _get_repo(self, create, src_url=None, update_after_clone=False, |
|
213 | def _get_repo(self, create, src_url=None, update_after_clone=False, | |
214 | bare=False): |
|
214 | bare=False): | |
215 | if create and os.path.exists(self.path): |
|
215 | if create and os.path.exists(self.path): | |
216 | raise RepositoryError("Location already exist") |
|
216 | raise RepositoryError("Location already exist") | |
217 | if src_url and not create: |
|
217 | if src_url and not create: | |
218 | raise RepositoryError("Create should be set to True if src_url is " |
|
218 | raise RepositoryError("Create should be set to True if src_url is " | |
219 | "given (clone operation creates repository)") |
|
219 | "given (clone operation creates repository)") | |
220 | try: |
|
220 | try: | |
221 | if create and src_url: |
|
221 | if create and src_url: | |
222 | GitRepository._check_url(src_url) |
|
222 | GitRepository._check_url(src_url) | |
223 | self.clone(src_url, update_after_clone, bare) |
|
223 | self.clone(src_url, update_after_clone, bare) | |
224 | return Repo(self.path) |
|
224 | return Repo(self.path) | |
225 | elif create: |
|
225 | elif create: | |
226 | os.makedirs(self.path) |
|
226 | os.makedirs(self.path) | |
227 | if bare: |
|
227 | if bare: | |
228 | return Repo.init_bare(self.path) |
|
228 | return Repo.init_bare(self.path) | |
229 | else: |
|
229 | else: | |
230 | return Repo.init(self.path) |
|
230 | return Repo.init(self.path) | |
231 | else: |
|
231 | else: | |
232 | return Repo(self.path) |
|
232 | return Repo(self.path) | |
233 | except (NotGitRepository, OSError) as err: |
|
233 | except (NotGitRepository, OSError) as err: | |
234 | raise RepositoryError(err) |
|
234 | raise RepositoryError(err) | |
235 |
|
235 | |||
236 | def _get_all_revisions(self): |
|
236 | def _get_all_revisions(self): | |
237 | # we must check if this repo is not empty, since later command |
|
237 | # we must check if this repo is not empty, since later command | |
238 | # fails if it is. And it's cheaper to ask than throw the subprocess |
|
238 | # fails if it is. And it's cheaper to ask than throw the subprocess | |
239 | # errors |
|
239 | # errors | |
240 | try: |
|
240 | try: | |
241 | self._repo.head() |
|
241 | self._repo.head() | |
242 | except KeyError: |
|
242 | except KeyError: | |
243 | return [] |
|
243 | return [] | |
244 |
|
244 | |||
245 | rev_filter = settings.GIT_REV_FILTER |
|
245 | rev_filter = settings.GIT_REV_FILTER | |
246 | cmd = ['rev-list', rev_filter, '--reverse', '--date-order'] |
|
246 | cmd = ['rev-list', rev_filter, '--reverse', '--date-order'] | |
247 | try: |
|
247 | try: | |
248 | so = self.run_git_command(cmd) |
|
248 | so = self.run_git_command(cmd) | |
249 | except RepositoryError: |
|
249 | except RepositoryError: | |
250 | # Can be raised for empty repositories |
|
250 | # Can be raised for empty repositories | |
251 | return [] |
|
251 | return [] | |
252 | return so.splitlines() |
|
252 | return so.splitlines() | |
253 |
|
253 | |||
254 | def _get_all_revisions2(self): |
|
254 | def _get_all_revisions2(self): | |
255 | # alternate implementation using dulwich |
|
255 | # alternate implementation using dulwich | |
256 | includes = [ascii_str(sha) for key, (sha, type_) in self._parsed_refs.items() |
|
256 | includes = [ascii_str(sha) for key, (sha, type_) in self._parsed_refs.items() | |
257 | if type_ != b'T'] |
|
257 | if type_ != b'T'] | |
258 | return [c.commit.id for c in self._repo.get_walker(include=includes)] |
|
258 | return [c.commit.id for c in self._repo.get_walker(include=includes)] | |
259 |
|
259 | |||
260 | def _get_revision(self, revision): |
|
260 | def _get_revision(self, revision): | |
261 | """ |
|
261 | """ | |
262 | Given any revision identifier, returns a 40 char string with revision hash. |
|
262 | Given any revision identifier, returns a 40 char string with revision hash. | |
263 | """ |
|
263 | """ | |
264 | if self._empty: |
|
264 | if self._empty: | |
265 | raise EmptyRepositoryError("There are no changesets yet") |
|
265 | raise EmptyRepositoryError("There are no changesets yet") | |
266 |
|
266 | |||
267 | if revision in (None, '', 'tip', 'HEAD', 'head', -1): |
|
267 | if revision in (None, '', 'tip', 'HEAD', 'head', -1): | |
268 | revision = -1 |
|
268 | revision = -1 | |
269 |
|
269 | |||
270 | if isinstance(revision, int): |
|
270 | if isinstance(revision, int): | |
271 | try: |
|
271 | try: | |
272 | return self.revisions[revision] |
|
272 | return self.revisions[revision] | |
273 | except IndexError: |
|
273 | except IndexError: | |
274 | msg = "Revision %r does not exist for %s" % (revision, self.name) |
|
274 | msg = "Revision %r does not exist for %s" % (revision, self.name) | |
275 | raise ChangesetDoesNotExistError(msg) |
|
275 | raise ChangesetDoesNotExistError(msg) | |
276 |
|
276 | |||
277 | if isinstance(revision, str): |
|
277 | if isinstance(revision, str): | |
278 | if revision.isdigit() and (len(revision) < 12 or len(revision) == revision.count('0')): |
|
278 | if revision.isdigit() and (len(revision) < 12 or len(revision) == revision.count('0')): | |
279 | try: |
|
279 | try: | |
280 | return self.revisions[int(revision)] |
|
280 | return self.revisions[int(revision)] | |
281 | except IndexError: |
|
281 | except IndexError: | |
282 | msg = "Revision %r does not exist for %s" % (revision, self) |
|
282 | msg = "Revision %r does not exist for %s" % (revision, self) | |
283 | raise ChangesetDoesNotExistError(msg) |
|
283 | raise ChangesetDoesNotExistError(msg) | |
284 |
|
284 | |||
285 | # get by branch/tag name |
|
285 | # get by branch/tag name | |
286 | _ref_revision = self._parsed_refs.get(safe_bytes(revision)) |
|
286 | _ref_revision = self._parsed_refs.get(safe_bytes(revision)) | |
287 | if _ref_revision: # and _ref_revision[1] in [b'H', b'RH', b'T']: |
|
287 | if _ref_revision: # and _ref_revision[1] in [b'H', b'RH', b'T']: | |
288 | return ascii_str(_ref_revision[0]) |
|
288 | return ascii_str(_ref_revision[0]) | |
289 |
|
289 | |||
290 | if revision in self.revisions: |
|
290 | if revision in self.revisions: | |
291 | return revision |
|
291 | return revision | |
292 |
|
292 | |||
293 | # maybe it's a tag ? we don't have them in self.revisions |
|
293 | # maybe it's a tag ? we don't have them in self.revisions | |
294 | if revision in self.tags.values(): |
|
294 | if revision in self.tags.values(): | |
295 | return revision |
|
295 | return revision | |
296 |
|
296 | |||
297 | if SHA_PATTERN.match(revision): |
|
297 | if SHA_PATTERN.match(revision): | |
298 | msg = "Revision %r does not exist for %s" % (revision, self.name) |
|
298 | msg = "Revision %r does not exist for %s" % (revision, self.name) | |
299 | raise ChangesetDoesNotExistError(msg) |
|
299 | raise ChangesetDoesNotExistError(msg) | |
300 |
|
300 | |||
301 | raise ChangesetDoesNotExistError("Given revision %r not recognized" % revision) |
|
301 | raise ChangesetDoesNotExistError("Given revision %r not recognized" % revision) | |
302 |
|
302 | |||
303 | def get_ref_revision(self, ref_type, ref_name): |
|
303 | def get_ref_revision(self, ref_type, ref_name): | |
304 | """ |
|
304 | """ | |
305 | Returns ``GitChangeset`` object representing repository's |
|
305 | Returns ``GitChangeset`` object representing repository's | |
306 | changeset at the given ``revision``. |
|
306 | changeset at the given ``revision``. | |
307 | """ |
|
307 | """ | |
308 | return self._get_revision(ref_name) |
|
308 | return self._get_revision(ref_name) | |
309 |
|
309 | |||
310 | def _get_archives(self, archive_name='tip'): |
|
310 | def _get_archives(self, archive_name='tip'): | |
311 |
|
311 | |||
312 | for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]: |
|
312 | for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]: | |
313 | yield {"type": i[0], "extension": i[1], "node": archive_name} |
|
313 | yield {"type": i[0], "extension": i[1], "node": archive_name} | |
314 |
|
314 | |||
315 | def _get_url(self, url): |
|
315 | def _get_url(self, url): | |
316 | """ |
|
316 | """ | |
317 | Returns normalized url. If schema is not given, would fall to |
|
317 | Returns normalized url. If schema is not given, would fall to | |
318 | filesystem (``file:///``) schema. |
|
318 | filesystem (``file:///``) schema. | |
319 | """ |
|
319 | """ | |
320 | if url != 'default' and '://' not in url: |
|
320 | if url != 'default' and '://' not in url: | |
321 | url = ':///'.join(('file', url)) |
|
321 | url = ':///'.join(('file', url)) | |
322 | return url |
|
322 | return url | |
323 |
|
323 | |||
324 | @LazyProperty |
|
324 | @LazyProperty | |
325 | def name(self): |
|
325 | def name(self): | |
326 | return os.path.basename(self.path) |
|
326 | return os.path.basename(self.path) | |
327 |
|
327 | |||
328 | @LazyProperty |
|
328 | @LazyProperty | |
329 | def last_change(self): |
|
329 | def last_change(self): | |
330 | """ |
|
330 | """ | |
331 | Returns last change made on this repository as datetime object |
|
331 | Returns last change made on this repository as datetime object | |
332 | """ |
|
332 | """ | |
333 | return date_fromtimestamp(self._get_mtime(), makedate()[1]) |
|
333 | return date_fromtimestamp(self._get_mtime(), makedate()[1]) | |
334 |
|
334 | |||
335 | def _get_mtime(self): |
|
335 | def _get_mtime(self): | |
336 | try: |
|
336 | try: | |
337 | return time.mktime(self.get_changeset().date.timetuple()) |
|
337 | return time.mktime(self.get_changeset().date.timetuple()) | |
338 | except RepositoryError: |
|
338 | except RepositoryError: | |
339 | idx_loc = '' if self.bare else '.git' |
|
339 | idx_loc = '' if self.bare else '.git' | |
340 | # fallback to filesystem |
|
340 | # fallback to filesystem | |
341 | in_path = os.path.join(self.path, idx_loc, "index") |
|
341 | in_path = os.path.join(self.path, idx_loc, "index") | |
342 | he_path = os.path.join(self.path, idx_loc, "HEAD") |
|
342 | he_path = os.path.join(self.path, idx_loc, "HEAD") | |
343 | if os.path.exists(in_path): |
|
343 | if os.path.exists(in_path): | |
344 | return os.stat(in_path).st_mtime |
|
344 | return os.stat(in_path).st_mtime | |
345 | else: |
|
345 | else: | |
346 | return os.stat(he_path).st_mtime |
|
346 | return os.stat(he_path).st_mtime | |
347 |
|
347 | |||
348 | @LazyProperty |
|
348 | @LazyProperty | |
349 | def description(self): |
|
349 | def description(self): | |
350 | return safe_str(self._repo.get_description() or b'unknown') |
|
350 | return safe_str(self._repo.get_description() or b'unknown') | |
351 |
|
351 | |||
352 | @LazyProperty |
|
352 | @LazyProperty | |
353 | def contact(self): |
|
353 | def contact(self): | |
354 | undefined_contact = 'Unknown' |
|
354 | undefined_contact = 'Unknown' | |
355 | return undefined_contact |
|
355 | return undefined_contact | |
356 |
|
356 | |||
357 | @property |
|
357 | @property | |
358 | def branches(self): |
|
358 | def branches(self): | |
359 | if not self.revisions: |
|
359 | if not self.revisions: | |
360 | return {} |
|
360 | return {} | |
361 | sortkey = lambda ctx: ctx[0] |
|
|||
362 | _branches = [(safe_str(key), ascii_str(sha)) |
|
361 | _branches = [(safe_str(key), ascii_str(sha)) | |
363 | for key, (sha, type_) in self._parsed_refs.items() if type_ == b'H'] |
|
362 | for key, (sha, type_) in self._parsed_refs.items() if type_ == b'H'] | |
364 |
return OrderedDict(sorted(_branches, key= |
|
363 | return OrderedDict(sorted(_branches, key=(lambda ctx: ctx[0]), reverse=False)) | |
365 |
|
364 | |||
366 | @LazyProperty |
|
365 | @LazyProperty | |
367 | def closed_branches(self): |
|
366 | def closed_branches(self): | |
368 | return {} |
|
367 | return {} | |
369 |
|
368 | |||
370 | @LazyProperty |
|
369 | @LazyProperty | |
371 | def tags(self): |
|
370 | def tags(self): | |
372 | return self._get_tags() |
|
371 | return self._get_tags() | |
373 |
|
372 | |||
374 | def _get_tags(self): |
|
373 | def _get_tags(self): | |
375 | if not self.revisions: |
|
374 | if not self.revisions: | |
376 | return {} |
|
375 | return {} | |
377 |
|
||||
378 | sortkey = lambda ctx: ctx[0] |
|
|||
379 | _tags = [(safe_str(key), ascii_str(sha)) |
|
376 | _tags = [(safe_str(key), ascii_str(sha)) | |
380 | for key, (sha, type_) in self._parsed_refs.items() if type_ == b'T'] |
|
377 | for key, (sha, type_) in self._parsed_refs.items() if type_ == b'T'] | |
381 |
return OrderedDict(sorted(_tags, key= |
|
378 | return OrderedDict(sorted(_tags, key=(lambda ctx: ctx[0]), reverse=True)) | |
382 |
|
379 | |||
383 | def tag(self, name, user, revision=None, message=None, date=None, |
|
380 | def tag(self, name, user, revision=None, message=None, date=None, | |
384 | **kwargs): |
|
381 | **kwargs): | |
385 | """ |
|
382 | """ | |
386 | Creates and returns a tag for the given ``revision``. |
|
383 | Creates and returns a tag for the given ``revision``. | |
387 |
|
384 | |||
388 | :param name: name for new tag |
|
385 | :param name: name for new tag | |
389 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
386 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" | |
390 | :param revision: changeset id for which new tag would be created |
|
387 | :param revision: changeset id for which new tag would be created | |
391 | :param message: message of the tag's commit |
|
388 | :param message: message of the tag's commit | |
392 | :param date: date of tag's commit |
|
389 | :param date: date of tag's commit | |
393 |
|
390 | |||
394 | :raises TagAlreadyExistError: if tag with same name already exists |
|
391 | :raises TagAlreadyExistError: if tag with same name already exists | |
395 | """ |
|
392 | """ | |
396 | if name in self.tags: |
|
393 | if name in self.tags: | |
397 | raise TagAlreadyExistError("Tag %s already exists" % name) |
|
394 | raise TagAlreadyExistError("Tag %s already exists" % name) | |
398 | changeset = self.get_changeset(revision) |
|
395 | changeset = self.get_changeset(revision) | |
399 | message = message or "Added tag %s for commit %s" % (name, |
|
396 | message = message or "Added tag %s for commit %s" % (name, | |
400 | changeset.raw_id) |
|
397 | changeset.raw_id) | |
401 | self._repo.refs[b"refs/tags/%s" % safe_bytes(name)] = changeset._commit.id |
|
398 | self._repo.refs[b"refs/tags/%s" % safe_bytes(name)] = changeset._commit.id | |
402 |
|
399 | |||
403 | self._parsed_refs = self._get_parsed_refs() |
|
400 | self._parsed_refs = self._get_parsed_refs() | |
404 | self.tags = self._get_tags() |
|
401 | self.tags = self._get_tags() | |
405 | return changeset |
|
402 | return changeset | |
406 |
|
403 | |||
407 | def remove_tag(self, name, user, message=None, date=None): |
|
404 | def remove_tag(self, name, user, message=None, date=None): | |
408 | """ |
|
405 | """ | |
409 | Removes tag with the given ``name``. |
|
406 | Removes tag with the given ``name``. | |
410 |
|
407 | |||
411 | :param name: name of the tag to be removed |
|
408 | :param name: name of the tag to be removed | |
412 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
409 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" | |
413 | :param message: message of the tag's removal commit |
|
410 | :param message: message of the tag's removal commit | |
414 | :param date: date of tag's removal commit |
|
411 | :param date: date of tag's removal commit | |
415 |
|
412 | |||
416 | :raises TagDoesNotExistError: if tag with given name does not exists |
|
413 | :raises TagDoesNotExistError: if tag with given name does not exists | |
417 | """ |
|
414 | """ | |
418 | if name not in self.tags: |
|
415 | if name not in self.tags: | |
419 | raise TagDoesNotExistError("Tag %s does not exist" % name) |
|
416 | raise TagDoesNotExistError("Tag %s does not exist" % name) | |
420 | # self._repo.refs is a DiskRefsContainer, and .path gives the full absolute path of '.git' |
|
417 | # self._repo.refs is a DiskRefsContainer, and .path gives the full absolute path of '.git' | |
421 | tagpath = os.path.join(safe_str(self._repo.refs.path), 'refs', 'tags', name) |
|
418 | tagpath = os.path.join(safe_str(self._repo.refs.path), 'refs', 'tags', name) | |
422 | try: |
|
419 | try: | |
423 | os.remove(tagpath) |
|
420 | os.remove(tagpath) | |
424 | self._parsed_refs = self._get_parsed_refs() |
|
421 | self._parsed_refs = self._get_parsed_refs() | |
425 | self.tags = self._get_tags() |
|
422 | self.tags = self._get_tags() | |
426 | except OSError as e: |
|
423 | except OSError as e: | |
427 | raise RepositoryError(e.strerror) |
|
424 | raise RepositoryError(e.strerror) | |
428 |
|
425 | |||
429 | @LazyProperty |
|
426 | @LazyProperty | |
430 | def bookmarks(self): |
|
427 | def bookmarks(self): | |
431 | """ |
|
428 | """ | |
432 | Gets bookmarks for this repository |
|
429 | Gets bookmarks for this repository | |
433 | """ |
|
430 | """ | |
434 | return {} |
|
431 | return {} | |
435 |
|
432 | |||
436 | @LazyProperty |
|
433 | @LazyProperty | |
437 | def _parsed_refs(self): |
|
434 | def _parsed_refs(self): | |
438 | return self._get_parsed_refs() |
|
435 | return self._get_parsed_refs() | |
439 |
|
436 | |||
440 | def _get_parsed_refs(self): |
|
437 | def _get_parsed_refs(self): | |
441 | """Return refs as a dict, like: |
|
438 | """Return refs as a dict, like: | |
442 | { b'v0.2.0': [b'599ba911aa24d2981225f3966eb659dfae9e9f30', b'T'] } |
|
439 | { b'v0.2.0': [b'599ba911aa24d2981225f3966eb659dfae9e9f30', b'T'] } | |
443 | """ |
|
440 | """ | |
444 | _repo = self._repo |
|
441 | _repo = self._repo | |
445 | refs = _repo.get_refs() |
|
442 | refs = _repo.get_refs() | |
446 | keys = [(b'refs/heads/', b'H'), |
|
443 | keys = [(b'refs/heads/', b'H'), | |
447 | (b'refs/remotes/origin/', b'RH'), |
|
444 | (b'refs/remotes/origin/', b'RH'), | |
448 | (b'refs/tags/', b'T')] |
|
445 | (b'refs/tags/', b'T')] | |
449 | _refs = {} |
|
446 | _refs = {} | |
450 | for ref, sha in refs.items(): |
|
447 | for ref, sha in refs.items(): | |
451 | for k, type_ in keys: |
|
448 | for k, type_ in keys: | |
452 | if ref.startswith(k): |
|
449 | if ref.startswith(k): | |
453 | _key = ref[len(k):] |
|
450 | _key = ref[len(k):] | |
454 | if type_ == b'T': |
|
451 | if type_ == b'T': | |
455 | obj = _repo.get_object(sha) |
|
452 | obj = _repo.get_object(sha) | |
456 | if isinstance(obj, Tag): |
|
453 | if isinstance(obj, Tag): | |
457 | sha = _repo.get_object(sha).object[1] |
|
454 | sha = _repo.get_object(sha).object[1] | |
458 | _refs[_key] = [sha, type_] |
|
455 | _refs[_key] = [sha, type_] | |
459 | break |
|
456 | break | |
460 | return _refs |
|
457 | return _refs | |
461 |
|
458 | |||
462 | def _heads(self, reverse=False): |
|
459 | def _heads(self, reverse=False): | |
463 | refs = self._repo.get_refs() |
|
460 | refs = self._repo.get_refs() | |
464 | heads = {} |
|
461 | heads = {} | |
465 |
|
462 | |||
466 | for key, val in refs.items(): |
|
463 | for key, val in refs.items(): | |
467 | for ref_key in [b'refs/heads/', b'refs/remotes/origin/']: |
|
464 | for ref_key in [b'refs/heads/', b'refs/remotes/origin/']: | |
468 | if key.startswith(ref_key): |
|
465 | if key.startswith(ref_key): | |
469 | n = key[len(ref_key):] |
|
466 | n = key[len(ref_key):] | |
470 | if n not in [b'HEAD']: |
|
467 | if n not in [b'HEAD']: | |
471 | heads[n] = val |
|
468 | heads[n] = val | |
472 |
|
469 | |||
473 | return heads if reverse else dict((y, x) for x, y in heads.items()) |
|
470 | return heads if reverse else dict((y, x) for x, y in heads.items()) | |
474 |
|
471 | |||
475 | def get_changeset(self, revision=None): |
|
472 | def get_changeset(self, revision=None): | |
476 | """ |
|
473 | """ | |
477 | Returns ``GitChangeset`` object representing commit from git repository |
|
474 | Returns ``GitChangeset`` object representing commit from git repository | |
478 | at the given revision or head (most recent commit) if None given. |
|
475 | at the given revision or head (most recent commit) if None given. | |
479 | """ |
|
476 | """ | |
480 | if isinstance(revision, GitChangeset): |
|
477 | if isinstance(revision, GitChangeset): | |
481 | return revision |
|
478 | return revision | |
482 | return GitChangeset(repository=self, revision=self._get_revision(revision)) |
|
479 | return GitChangeset(repository=self, revision=self._get_revision(revision)) | |
483 |
|
480 | |||
484 | def get_changesets(self, start=None, end=None, start_date=None, |
|
481 | def get_changesets(self, start=None, end=None, start_date=None, | |
485 | end_date=None, branch_name=None, reverse=False, max_revisions=None): |
|
482 | end_date=None, branch_name=None, reverse=False, max_revisions=None): | |
486 | """ |
|
483 | """ | |
487 | Returns iterator of ``GitChangeset`` objects from start to end (both |
|
484 | Returns iterator of ``GitChangeset`` objects from start to end (both | |
488 | are inclusive), in ascending date order (unless ``reverse`` is set). |
|
485 | are inclusive), in ascending date order (unless ``reverse`` is set). | |
489 |
|
486 | |||
490 | :param start: changeset ID, as str; first returned changeset |
|
487 | :param start: changeset ID, as str; first returned changeset | |
491 | :param end: changeset ID, as str; last returned changeset |
|
488 | :param end: changeset ID, as str; last returned changeset | |
492 | :param start_date: if specified, changesets with commit date less than |
|
489 | :param start_date: if specified, changesets with commit date less than | |
493 | ``start_date`` would be filtered out from returned set |
|
490 | ``start_date`` would be filtered out from returned set | |
494 | :param end_date: if specified, changesets with commit date greater than |
|
491 | :param end_date: if specified, changesets with commit date greater than | |
495 | ``end_date`` would be filtered out from returned set |
|
492 | ``end_date`` would be filtered out from returned set | |
496 | :param branch_name: if specified, changesets not reachable from given |
|
493 | :param branch_name: if specified, changesets not reachable from given | |
497 | branch would be filtered out from returned set |
|
494 | branch would be filtered out from returned set | |
498 | :param reverse: if ``True``, returned generator would be reversed |
|
495 | :param reverse: if ``True``, returned generator would be reversed | |
499 | (meaning that returned changesets would have descending date order) |
|
496 | (meaning that returned changesets would have descending date order) | |
500 |
|
497 | |||
501 | :raise BranchDoesNotExistError: If given ``branch_name`` does not |
|
498 | :raise BranchDoesNotExistError: If given ``branch_name`` does not | |
502 | exist. |
|
499 | exist. | |
503 | :raise ChangesetDoesNotExistError: If changeset for given ``start`` or |
|
500 | :raise ChangesetDoesNotExistError: If changeset for given ``start`` or | |
504 | ``end`` could not be found. |
|
501 | ``end`` could not be found. | |
505 |
|
502 | |||
506 | """ |
|
503 | """ | |
507 | if branch_name and branch_name not in self.branches: |
|
504 | if branch_name and branch_name not in self.branches: | |
508 | raise BranchDoesNotExistError("Branch '%s' not found" |
|
505 | raise BranchDoesNotExistError("Branch '%s' not found" | |
509 | % branch_name) |
|
506 | % branch_name) | |
510 | # actually we should check now if it's not an empty repo to not spaw |
|
507 | # actually we should check now if it's not an empty repo to not spaw | |
511 | # subprocess commands |
|
508 | # subprocess commands | |
512 | if self._empty: |
|
509 | if self._empty: | |
513 | raise EmptyRepositoryError("There are no changesets yet") |
|
510 | raise EmptyRepositoryError("There are no changesets yet") | |
514 |
|
511 | |||
515 | # %H at format means (full) commit hash, initial hashes are retrieved |
|
512 | # %H at format means (full) commit hash, initial hashes are retrieved | |
516 | # in ascending date order |
|
513 | # in ascending date order | |
517 | cmd = ['log', '--date-order', '--reverse', '--pretty=format:%H'] |
|
514 | cmd = ['log', '--date-order', '--reverse', '--pretty=format:%H'] | |
518 | if max_revisions: |
|
515 | if max_revisions: | |
519 | cmd += ['--max-count=%s' % max_revisions] |
|
516 | cmd += ['--max-count=%s' % max_revisions] | |
520 | if start_date: |
|
517 | if start_date: | |
521 | cmd += ['--since', start_date.strftime('%m/%d/%y %H:%M:%S')] |
|
518 | cmd += ['--since', start_date.strftime('%m/%d/%y %H:%M:%S')] | |
522 | if end_date: |
|
519 | if end_date: | |
523 | cmd += ['--until', end_date.strftime('%m/%d/%y %H:%M:%S')] |
|
520 | cmd += ['--until', end_date.strftime('%m/%d/%y %H:%M:%S')] | |
524 | if branch_name: |
|
521 | if branch_name: | |
525 | cmd.append(branch_name) |
|
522 | cmd.append(branch_name) | |
526 | else: |
|
523 | else: | |
527 | cmd.append(settings.GIT_REV_FILTER) |
|
524 | cmd.append(settings.GIT_REV_FILTER) | |
528 |
|
525 | |||
529 | revs = self.run_git_command(cmd).splitlines() |
|
526 | revs = self.run_git_command(cmd).splitlines() | |
530 | start_pos = 0 |
|
527 | start_pos = 0 | |
531 | end_pos = len(revs) |
|
528 | end_pos = len(revs) | |
532 | if start: |
|
529 | if start: | |
533 | _start = self._get_revision(start) |
|
530 | _start = self._get_revision(start) | |
534 | try: |
|
531 | try: | |
535 | start_pos = revs.index(_start) |
|
532 | start_pos = revs.index(_start) | |
536 | except ValueError: |
|
533 | except ValueError: | |
537 | pass |
|
534 | pass | |
538 |
|
535 | |||
539 | if end is not None: |
|
536 | if end is not None: | |
540 | _end = self._get_revision(end) |
|
537 | _end = self._get_revision(end) | |
541 | try: |
|
538 | try: | |
542 | end_pos = revs.index(_end) |
|
539 | end_pos = revs.index(_end) | |
543 | except ValueError: |
|
540 | except ValueError: | |
544 | pass |
|
541 | pass | |
545 |
|
542 | |||
546 | if None not in [start, end] and start_pos > end_pos: |
|
543 | if None not in [start, end] and start_pos > end_pos: | |
547 | raise RepositoryError('start cannot be after end') |
|
544 | raise RepositoryError('start cannot be after end') | |
548 |
|
545 | |||
549 | if end_pos is not None: |
|
546 | if end_pos is not None: | |
550 | end_pos += 1 |
|
547 | end_pos += 1 | |
551 |
|
548 | |||
552 | revs = revs[start_pos:end_pos] |
|
549 | revs = revs[start_pos:end_pos] | |
553 | if reverse: |
|
550 | if reverse: | |
554 | revs.reverse() |
|
551 | revs.reverse() | |
555 |
|
552 | |||
556 | return CollectionGenerator(self, revs) |
|
553 | return CollectionGenerator(self, revs) | |
557 |
|
554 | |||
558 | def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False, |
|
555 | def get_diff(self, rev1, rev2, path=None, ignore_whitespace=False, | |
559 | context=3): |
|
556 | context=3): | |
560 | """ |
|
557 | """ | |
561 | Returns (git like) *diff*, as plain bytes text. Shows changes |
|
558 | Returns (git like) *diff*, as plain bytes text. Shows changes | |
562 | introduced by ``rev2`` since ``rev1``. |
|
559 | introduced by ``rev2`` since ``rev1``. | |
563 |
|
560 | |||
564 | :param rev1: Entry point from which diff is shown. Can be |
|
561 | :param rev1: Entry point from which diff is shown. Can be | |
565 | ``self.EMPTY_CHANGESET`` - in this case, patch showing all |
|
562 | ``self.EMPTY_CHANGESET`` - in this case, patch showing all | |
566 | the changes since empty state of the repository until ``rev2`` |
|
563 | the changes since empty state of the repository until ``rev2`` | |
567 | :param rev2: Until which revision changes should be shown. |
|
564 | :param rev2: Until which revision changes should be shown. | |
568 | :param ignore_whitespace: If set to ``True``, would not show whitespace |
|
565 | :param ignore_whitespace: If set to ``True``, would not show whitespace | |
569 | changes. Defaults to ``False``. |
|
566 | changes. Defaults to ``False``. | |
570 | :param context: How many lines before/after changed lines should be |
|
567 | :param context: How many lines before/after changed lines should be | |
571 | shown. Defaults to ``3``. Due to limitations in Git, if |
|
568 | shown. Defaults to ``3``. Due to limitations in Git, if | |
572 | value passed-in is greater than ``2**31-1`` |
|
569 | value passed-in is greater than ``2**31-1`` | |
573 | (``2147483647``), it will be set to ``2147483647`` |
|
570 | (``2147483647``), it will be set to ``2147483647`` | |
574 | instead. If negative value is passed-in, it will be set to |
|
571 | instead. If negative value is passed-in, it will be set to | |
575 | ``0`` instead. |
|
572 | ``0`` instead. | |
576 | """ |
|
573 | """ | |
577 |
|
574 | |||
578 | # Git internally uses a signed long int for storing context |
|
575 | # Git internally uses a signed long int for storing context | |
579 | # size (number of lines to show before and after the |
|
576 | # size (number of lines to show before and after the | |
580 | # differences). This can result in integer overflow, so we |
|
577 | # differences). This can result in integer overflow, so we | |
581 | # ensure the requested context is smaller by one than the |
|
578 | # ensure the requested context is smaller by one than the | |
582 | # number that would cause the overflow. It is highly unlikely |
|
579 | # number that would cause the overflow. It is highly unlikely | |
583 | # that a single file will contain that many lines, so this |
|
580 | # that a single file will contain that many lines, so this | |
584 | # kind of change should not cause any realistic consequences. |
|
581 | # kind of change should not cause any realistic consequences. | |
585 | overflowed_long_int = 2**31 |
|
582 | overflowed_long_int = 2**31 | |
586 |
|
583 | |||
587 | if context >= overflowed_long_int: |
|
584 | if context >= overflowed_long_int: | |
588 | context = overflowed_long_int - 1 |
|
585 | context = overflowed_long_int - 1 | |
589 |
|
586 | |||
590 | # Negative context values make no sense, and will result in |
|
587 | # Negative context values make no sense, and will result in | |
591 | # errors. Ensure this does not happen. |
|
588 | # errors. Ensure this does not happen. | |
592 | if context < 0: |
|
589 | if context < 0: | |
593 | context = 0 |
|
590 | context = 0 | |
594 |
|
591 | |||
595 | flags = ['-U%s' % context, '--full-index', '--binary', '-p', '-M', '--abbrev=40'] |
|
592 | flags = ['-U%s' % context, '--full-index', '--binary', '-p', '-M', '--abbrev=40'] | |
596 | if ignore_whitespace: |
|
593 | if ignore_whitespace: | |
597 | flags.append('-w') |
|
594 | flags.append('-w') | |
598 |
|
595 | |||
599 | if hasattr(rev1, 'raw_id'): |
|
596 | if hasattr(rev1, 'raw_id'): | |
600 | rev1 = getattr(rev1, 'raw_id') |
|
597 | rev1 = getattr(rev1, 'raw_id') | |
601 |
|
598 | |||
602 | if hasattr(rev2, 'raw_id'): |
|
599 | if hasattr(rev2, 'raw_id'): | |
603 | rev2 = getattr(rev2, 'raw_id') |
|
600 | rev2 = getattr(rev2, 'raw_id') | |
604 |
|
601 | |||
605 | if rev1 == self.EMPTY_CHANGESET: |
|
602 | if rev1 == self.EMPTY_CHANGESET: | |
606 | rev2 = self.get_changeset(rev2).raw_id |
|
603 | rev2 = self.get_changeset(rev2).raw_id | |
607 | cmd = ['show'] + flags + [rev2] |
|
604 | cmd = ['show'] + flags + [rev2] | |
608 | else: |
|
605 | else: | |
609 | rev1 = self.get_changeset(rev1).raw_id |
|
606 | rev1 = self.get_changeset(rev1).raw_id | |
610 | rev2 = self.get_changeset(rev2).raw_id |
|
607 | rev2 = self.get_changeset(rev2).raw_id | |
611 | cmd = ['diff'] + flags + [rev1, rev2] |
|
608 | cmd = ['diff'] + flags + [rev1, rev2] | |
612 |
|
609 | |||
613 | if path: |
|
610 | if path: | |
614 | cmd += ['--', path] |
|
611 | cmd += ['--', path] | |
615 |
|
612 | |||
616 | stdout, stderr = self._run_git_command(cmd, cwd=self.path) |
|
613 | stdout, stderr = self._run_git_command(cmd, cwd=self.path) | |
617 | # If we used 'show' command, strip first few lines (until actual diff |
|
614 | # If we used 'show' command, strip first few lines (until actual diff | |
618 | # starts) |
|
615 | # starts) | |
619 | if rev1 == self.EMPTY_CHANGESET: |
|
616 | if rev1 == self.EMPTY_CHANGESET: | |
620 | parts = stdout.split(b'\ndiff ', 1) |
|
617 | parts = stdout.split(b'\ndiff ', 1) | |
621 | if len(parts) > 1: |
|
618 | if len(parts) > 1: | |
622 | stdout = b'diff ' + parts[1] |
|
619 | stdout = b'diff ' + parts[1] | |
623 | return stdout |
|
620 | return stdout | |
624 |
|
621 | |||
625 | @LazyProperty |
|
622 | @LazyProperty | |
626 | def in_memory_changeset(self): |
|
623 | def in_memory_changeset(self): | |
627 | """ |
|
624 | """ | |
628 | Returns ``GitInMemoryChangeset`` object for this repository. |
|
625 | Returns ``GitInMemoryChangeset`` object for this repository. | |
629 | """ |
|
626 | """ | |
630 | return GitInMemoryChangeset(self) |
|
627 | return GitInMemoryChangeset(self) | |
631 |
|
628 | |||
632 | def clone(self, url, update_after_clone=True, bare=False): |
|
629 | def clone(self, url, update_after_clone=True, bare=False): | |
633 | """ |
|
630 | """ | |
634 | Tries to clone changes from external location. |
|
631 | Tries to clone changes from external location. | |
635 |
|
632 | |||
636 | :param update_after_clone: If set to ``False``, git won't checkout |
|
633 | :param update_after_clone: If set to ``False``, git won't checkout | |
637 | working directory |
|
634 | working directory | |
638 | :param bare: If set to ``True``, repository would be cloned into |
|
635 | :param bare: If set to ``True``, repository would be cloned into | |
639 | *bare* git repository (no working directory at all). |
|
636 | *bare* git repository (no working directory at all). | |
640 | """ |
|
637 | """ | |
641 | url = self._get_url(url) |
|
638 | url = self._get_url(url) | |
642 | cmd = ['clone', '-q'] |
|
639 | cmd = ['clone', '-q'] | |
643 | if bare: |
|
640 | if bare: | |
644 | cmd.append('--bare') |
|
641 | cmd.append('--bare') | |
645 | elif not update_after_clone: |
|
642 | elif not update_after_clone: | |
646 | cmd.append('--no-checkout') |
|
643 | cmd.append('--no-checkout') | |
647 | cmd += ['--', url, self.path] |
|
644 | cmd += ['--', url, self.path] | |
648 | # If error occurs run_git_command raises RepositoryError already |
|
645 | # If error occurs run_git_command raises RepositoryError already | |
649 | self.run_git_command(cmd) |
|
646 | self.run_git_command(cmd) | |
650 |
|
647 | |||
651 | def pull(self, url): |
|
648 | def pull(self, url): | |
652 | """ |
|
649 | """ | |
653 | Tries to pull changes from external location. |
|
650 | Tries to pull changes from external location. | |
654 | """ |
|
651 | """ | |
655 | url = self._get_url(url) |
|
652 | url = self._get_url(url) | |
656 | cmd = ['pull', '--ff-only', url] |
|
653 | cmd = ['pull', '--ff-only', url] | |
657 | # If error occurs run_git_command raises RepositoryError already |
|
654 | # If error occurs run_git_command raises RepositoryError already | |
658 | self.run_git_command(cmd) |
|
655 | self.run_git_command(cmd) | |
659 |
|
656 | |||
660 | def fetch(self, url): |
|
657 | def fetch(self, url): | |
661 | """ |
|
658 | """ | |
662 | Tries to pull changes from external location. |
|
659 | Tries to pull changes from external location. | |
663 | """ |
|
660 | """ | |
664 | url = self._get_url(url) |
|
661 | url = self._get_url(url) | |
665 | so = self.run_git_command(['ls-remote', '-h', url]) |
|
662 | so = self.run_git_command(['ls-remote', '-h', url]) | |
666 | cmd = ['fetch', url, '--'] |
|
663 | cmd = ['fetch', url, '--'] | |
667 | for line in (x for x in so.splitlines()): |
|
664 | for line in (x for x in so.splitlines()): | |
668 | sha, ref = line.split('\t') |
|
665 | sha, ref = line.split('\t') | |
669 | cmd.append('+%s:%s' % (ref, ref)) |
|
666 | cmd.append('+%s:%s' % (ref, ref)) | |
670 | self.run_git_command(cmd) |
|
667 | self.run_git_command(cmd) | |
671 |
|
668 | |||
672 | def _update_server_info(self): |
|
669 | def _update_server_info(self): | |
673 | """ |
|
670 | """ | |
674 | runs gits update-server-info command in this repo instance |
|
671 | runs gits update-server-info command in this repo instance | |
675 | """ |
|
672 | """ | |
676 | from dulwich.server import update_server_info |
|
673 | from dulwich.server import update_server_info | |
677 | try: |
|
674 | try: | |
678 | update_server_info(self._repo) |
|
675 | update_server_info(self._repo) | |
679 | except OSError as e: |
|
676 | except OSError as e: | |
680 | if e.errno not in [errno.ENOENT, errno.EROFS]: |
|
677 | if e.errno not in [errno.ENOENT, errno.EROFS]: | |
681 | raise |
|
678 | raise | |
682 | # Workaround for dulwich crashing on for example its own dulwich/tests/data/repos/simple_merge.git/info/refs.lock |
|
679 | # Workaround for dulwich crashing on for example its own dulwich/tests/data/repos/simple_merge.git/info/refs.lock | |
683 | log.error('Ignoring %s running update-server-info: %s', type(e).__name__, e) |
|
680 | log.error('Ignoring %s running update-server-info: %s', type(e).__name__, e) | |
684 |
|
681 | |||
685 | @LazyProperty |
|
682 | @LazyProperty | |
686 | def workdir(self): |
|
683 | def workdir(self): | |
687 | """ |
|
684 | """ | |
688 | Returns ``Workdir`` instance for this repository. |
|
685 | Returns ``Workdir`` instance for this repository. | |
689 | """ |
|
686 | """ | |
690 | return GitWorkdir(self) |
|
687 | return GitWorkdir(self) | |
691 |
|
688 | |||
692 | def get_config_value(self, section, name, config_file=None): |
|
689 | def get_config_value(self, section, name, config_file=None): | |
693 | """ |
|
690 | """ | |
694 | Returns configuration value for a given [``section``] and ``name``. |
|
691 | Returns configuration value for a given [``section``] and ``name``. | |
695 |
|
692 | |||
696 | :param section: Section we want to retrieve value from |
|
693 | :param section: Section we want to retrieve value from | |
697 | :param name: Name of configuration we want to retrieve |
|
694 | :param name: Name of configuration we want to retrieve | |
698 | :param config_file: A path to file which should be used to retrieve |
|
695 | :param config_file: A path to file which should be used to retrieve | |
699 | configuration from (might also be a list of file paths) |
|
696 | configuration from (might also be a list of file paths) | |
700 | """ |
|
697 | """ | |
701 | if config_file is None: |
|
698 | if config_file is None: | |
702 | config_file = [] |
|
699 | config_file = [] | |
703 | elif isinstance(config_file, str): |
|
700 | elif isinstance(config_file, str): | |
704 | config_file = [config_file] |
|
701 | config_file = [config_file] | |
705 |
|
702 | |||
706 | def gen_configs(): |
|
703 | def gen_configs(): | |
707 | for path in config_file + self._config_files: |
|
704 | for path in config_file + self._config_files: | |
708 | try: |
|
705 | try: | |
709 | yield ConfigFile.from_path(path) |
|
706 | yield ConfigFile.from_path(path) | |
710 | except (IOError, OSError, ValueError): |
|
707 | except (IOError, OSError, ValueError): | |
711 | continue |
|
708 | continue | |
712 |
|
709 | |||
713 | for config in gen_configs(): |
|
710 | for config in gen_configs(): | |
714 | try: |
|
711 | try: | |
715 | value = config.get(section, name) |
|
712 | value = config.get(section, name) | |
716 | except KeyError: |
|
713 | except KeyError: | |
717 | continue |
|
714 | continue | |
718 | return None if value is None else safe_str(value) |
|
715 | return None if value is None else safe_str(value) | |
719 | return None |
|
716 | return None | |
720 |
|
717 | |||
721 | def get_user_name(self, config_file=None): |
|
718 | def get_user_name(self, config_file=None): | |
722 | """ |
|
719 | """ | |
723 | Returns user's name from global configuration file. |
|
720 | Returns user's name from global configuration file. | |
724 |
|
721 | |||
725 | :param config_file: A path to file which should be used to retrieve |
|
722 | :param config_file: A path to file which should be used to retrieve | |
726 | configuration from (might also be a list of file paths) |
|
723 | configuration from (might also be a list of file paths) | |
727 | """ |
|
724 | """ | |
728 | return self.get_config_value('user', 'name', config_file) |
|
725 | return self.get_config_value('user', 'name', config_file) | |
729 |
|
726 | |||
730 | def get_user_email(self, config_file=None): |
|
727 | def get_user_email(self, config_file=None): | |
731 | """ |
|
728 | """ | |
732 | Returns user's email from global configuration file. |
|
729 | Returns user's email from global configuration file. | |
733 |
|
730 | |||
734 | :param config_file: A path to file which should be used to retrieve |
|
731 | :param config_file: A path to file which should be used to retrieve | |
735 | configuration from (might also be a list of file paths) |
|
732 | configuration from (might also be a list of file paths) | |
736 | """ |
|
733 | """ | |
737 | return self.get_config_value('user', 'email', config_file) |
|
734 | return self.get_config_value('user', 'email', config_file) |
@@ -1,39 +1,40 b'' | |||||
1 | import os |
|
1 | import os | |
2 | import tempfile |
|
2 | import tempfile | |
3 |
|
3 | |||
4 | from kallithea.lib.vcs.utils import aslist |
|
4 | from kallithea.lib.vcs.utils import aslist | |
5 | from kallithea.lib.vcs.utils.paths import get_user_home |
|
5 | from kallithea.lib.vcs.utils.paths import get_user_home | |
6 |
|
6 | |||
7 |
|
7 | |||
8 | abspath = lambda * p: os.path.abspath(os.path.join(*p)) |
|
8 | def abspath(*p): | |
|
9 | return os.path.abspath(os.path.join(*p)) | |||
9 |
|
10 | |||
10 | VCSRC_PATH = os.environ.get('VCSRC_PATH') |
|
11 | VCSRC_PATH = os.environ.get('VCSRC_PATH') | |
11 |
|
12 | |||
12 | if not VCSRC_PATH: |
|
13 | if not VCSRC_PATH: | |
13 | HOME_ = get_user_home() |
|
14 | HOME_ = get_user_home() | |
14 | if not HOME_: |
|
15 | if not HOME_: | |
15 | HOME_ = tempfile.gettempdir() |
|
16 | HOME_ = tempfile.gettempdir() | |
16 |
|
17 | |||
17 | VCSRC_PATH = VCSRC_PATH or abspath(HOME_, '.vcsrc') |
|
18 | VCSRC_PATH = VCSRC_PATH or abspath(HOME_, '.vcsrc') | |
18 | if os.path.isdir(VCSRC_PATH): |
|
19 | if os.path.isdir(VCSRC_PATH): | |
19 | VCSRC_PATH = os.path.join(VCSRC_PATH, '__init__.py') |
|
20 | VCSRC_PATH = os.path.join(VCSRC_PATH, '__init__.py') | |
20 |
|
21 | |||
21 | # list of default encoding used in safe_str/safe_bytes methods |
|
22 | # list of default encoding used in safe_str/safe_bytes methods | |
22 | DEFAULT_ENCODINGS = aslist('utf-8') |
|
23 | DEFAULT_ENCODINGS = aslist('utf-8') | |
23 |
|
24 | |||
24 | # path to git executable run by run_git_command function |
|
25 | # path to git executable run by run_git_command function | |
25 | GIT_EXECUTABLE_PATH = 'git' |
|
26 | GIT_EXECUTABLE_PATH = 'git' | |
26 | # can be also --branches --tags |
|
27 | # can be also --branches --tags | |
27 | GIT_REV_FILTER = '--all' |
|
28 | GIT_REV_FILTER = '--all' | |
28 |
|
29 | |||
29 | BACKENDS = { |
|
30 | BACKENDS = { | |
30 | 'hg': 'kallithea.lib.vcs.backends.hg.MercurialRepository', |
|
31 | 'hg': 'kallithea.lib.vcs.backends.hg.MercurialRepository', | |
31 | 'git': 'kallithea.lib.vcs.backends.git.GitRepository', |
|
32 | 'git': 'kallithea.lib.vcs.backends.git.GitRepository', | |
32 | } |
|
33 | } | |
33 |
|
34 | |||
34 | ARCHIVE_SPECS = { |
|
35 | ARCHIVE_SPECS = { | |
35 | 'tar': ('application/x-tar', '.tar'), |
|
36 | 'tar': ('application/x-tar', '.tar'), | |
36 | 'tbz2': ('application/x-bzip2', '.tar.bz2'), |
|
37 | 'tbz2': ('application/x-bzip2', '.tar.bz2'), | |
37 | 'tgz': ('application/x-gzip', '.tar.gz'), |
|
38 | 'tgz': ('application/x-gzip', '.tar.gz'), | |
38 | 'zip': ('application/zip', '.zip'), |
|
39 | 'zip': ('application/zip', '.zip'), | |
39 | } |
|
40 | } |
@@ -1,38 +1,39 b'' | |||||
1 | import os |
|
1 | import os | |
2 |
|
2 | |||
3 |
|
3 | |||
4 | abspath = lambda * p: os.path.abspath(os.path.join(*p)) |
|
4 | def abspath(*p): | |
|
5 | return os.path.abspath(os.path.join(*p)) | |||
5 |
|
6 | |||
6 |
|
7 | |||
7 | def get_dirs_for_path(*paths): |
|
8 | def get_dirs_for_path(*paths): | |
8 | """ |
|
9 | """ | |
9 | Returns list of directories, including intermediate. |
|
10 | Returns list of directories, including intermediate. | |
10 | """ |
|
11 | """ | |
11 | for path in paths: |
|
12 | for path in paths: | |
12 | head = path |
|
13 | head = path | |
13 | while head: |
|
14 | while head: | |
14 | head, _tail = os.path.split(head) |
|
15 | head, _tail = os.path.split(head) | |
15 | if head: |
|
16 | if head: | |
16 | yield head |
|
17 | yield head | |
17 | else: |
|
18 | else: | |
18 | # We don't need to yield empty path |
|
19 | # We don't need to yield empty path | |
19 | break |
|
20 | break | |
20 |
|
21 | |||
21 |
|
22 | |||
22 | def get_dir_size(path): |
|
23 | def get_dir_size(path): | |
23 | root_path = path |
|
24 | root_path = path | |
24 | size = 0 |
|
25 | size = 0 | |
25 | for path, dirs, files in os.walk(root_path): |
|
26 | for path, dirs, files in os.walk(root_path): | |
26 | for f in files: |
|
27 | for f in files: | |
27 | try: |
|
28 | try: | |
28 | size += os.path.getsize(os.path.join(path, f)) |
|
29 | size += os.path.getsize(os.path.join(path, f)) | |
29 | except OSError: |
|
30 | except OSError: | |
30 | pass |
|
31 | pass | |
31 | return size |
|
32 | return size | |
32 |
|
33 | |||
33 |
|
34 | |||
34 | def get_user_home(): |
|
35 | def get_user_home(): | |
35 | """ |
|
36 | """ | |
36 | Returns home path of the user. |
|
37 | Returns home path of the user. | |
37 | """ |
|
38 | """ | |
38 | return os.getenv('HOME', os.getenv('USERPROFILE')) or '' |
|
39 | return os.getenv('HOME', os.getenv('USERPROFILE')) or '' |
@@ -1,725 +1,724 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.model.repo |
|
15 | kallithea.model.repo | |
16 | ~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Repository model for kallithea |
|
18 | Repository model for kallithea | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Jun 5, 2010 |
|
22 | :created_on: Jun 5, 2010 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 |
|
26 | |||
27 | """ |
|
27 | """ | |
28 |
|
28 | |||
29 | import logging |
|
29 | import logging | |
30 | import os |
|
30 | import os | |
31 | import shutil |
|
31 | import shutil | |
32 | import traceback |
|
32 | import traceback | |
33 | from datetime import datetime |
|
33 | from datetime import datetime | |
34 |
|
34 | |||
35 | import kallithea.lib.utils2 |
|
35 | import kallithea.lib.utils2 | |
36 | from kallithea.lib import helpers as h |
|
36 | from kallithea.lib import helpers as h | |
37 | from kallithea.lib.auth import HasRepoPermissionLevel, HasUserGroupPermissionLevel |
|
37 | from kallithea.lib.auth import HasRepoPermissionLevel, HasUserGroupPermissionLevel | |
38 | from kallithea.lib.caching_query import FromCache |
|
38 | from kallithea.lib.caching_query import FromCache | |
39 | from kallithea.lib.exceptions import AttachedForksError |
|
39 | from kallithea.lib.exceptions import AttachedForksError | |
40 | from kallithea.lib.hooks import log_delete_repository |
|
40 | from kallithea.lib.hooks import log_delete_repository | |
41 | from kallithea.lib.utils import is_valid_repo_uri, make_ui |
|
41 | from kallithea.lib.utils import is_valid_repo_uri, make_ui | |
42 | from kallithea.lib.utils2 import LazyProperty, get_current_authuser, obfuscate_url_pw, remove_prefix |
|
42 | from kallithea.lib.utils2 import LazyProperty, get_current_authuser, obfuscate_url_pw, remove_prefix | |
43 | from kallithea.lib.vcs.backends import get_backend |
|
43 | from kallithea.lib.vcs.backends import get_backend | |
44 | from kallithea.model.db import ( |
|
44 | from kallithea.model.db import ( | |
45 | Permission, RepoGroup, Repository, RepositoryField, Session, Statistics, Ui, User, UserGroup, UserGroupRepoGroupToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm, UserRepoToPerm) |
|
45 | Permission, RepoGroup, Repository, RepositoryField, Session, Statistics, Ui, User, UserGroup, UserGroupRepoGroupToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm, UserRepoToPerm) | |
46 |
|
46 | |||
47 |
|
47 | |||
48 | log = logging.getLogger(__name__) |
|
48 | log = logging.getLogger(__name__) | |
49 |
|
49 | |||
50 |
|
50 | |||
51 | class RepoModel(object): |
|
51 | class RepoModel(object): | |
52 |
|
52 | |||
53 | URL_SEPARATOR = Repository.url_sep() |
|
53 | URL_SEPARATOR = Repository.url_sep() | |
54 |
|
54 | |||
55 | def _create_default_perms(self, repository, private): |
|
55 | def _create_default_perms(self, repository, private): | |
56 | # create default permission |
|
56 | # create default permission | |
57 | default = 'repository.read' |
|
57 | default = 'repository.read' | |
58 | def_user = User.get_default_user() |
|
58 | def_user = User.get_default_user() | |
59 | for p in def_user.user_perms: |
|
59 | for p in def_user.user_perms: | |
60 | if p.permission.permission_name.startswith('repository.'): |
|
60 | if p.permission.permission_name.startswith('repository.'): | |
61 | default = p.permission.permission_name |
|
61 | default = p.permission.permission_name | |
62 | break |
|
62 | break | |
63 |
|
63 | |||
64 | default_perm = 'repository.none' if private else default |
|
64 | default_perm = 'repository.none' if private else default | |
65 |
|
65 | |||
66 | repo_to_perm = UserRepoToPerm() |
|
66 | repo_to_perm = UserRepoToPerm() | |
67 | repo_to_perm.permission = Permission.get_by_key(default_perm) |
|
67 | repo_to_perm.permission = Permission.get_by_key(default_perm) | |
68 |
|
68 | |||
69 | repo_to_perm.repository = repository |
|
69 | repo_to_perm.repository = repository | |
70 | repo_to_perm.user_id = def_user.user_id |
|
70 | repo_to_perm.user_id = def_user.user_id | |
71 | Session().add(repo_to_perm) |
|
71 | Session().add(repo_to_perm) | |
72 |
|
72 | |||
73 | return repo_to_perm |
|
73 | return repo_to_perm | |
74 |
|
74 | |||
75 | @LazyProperty |
|
75 | @LazyProperty | |
76 | def repos_path(self): |
|
76 | def repos_path(self): | |
77 | """ |
|
77 | """ | |
78 | Gets the repositories root path from database |
|
78 | Gets the repositories root path from database | |
79 | """ |
|
79 | """ | |
80 |
|
80 | |||
81 | q = Ui.query().filter(Ui.ui_key == '/').one() |
|
81 | q = Ui.query().filter(Ui.ui_key == '/').one() | |
82 | return q.ui_value |
|
82 | return q.ui_value | |
83 |
|
83 | |||
84 | def get(self, repo_id, cache=False): |
|
84 | def get(self, repo_id, cache=False): | |
85 | repo = Repository.query() \ |
|
85 | repo = Repository.query() \ | |
86 | .filter(Repository.repo_id == repo_id) |
|
86 | .filter(Repository.repo_id == repo_id) | |
87 |
|
87 | |||
88 | if cache: |
|
88 | if cache: | |
89 | repo = repo.options(FromCache("sql_cache_short", |
|
89 | repo = repo.options(FromCache("sql_cache_short", | |
90 | "get_repo_%s" % repo_id)) |
|
90 | "get_repo_%s" % repo_id)) | |
91 | return repo.scalar() |
|
91 | return repo.scalar() | |
92 |
|
92 | |||
93 | def get_repo(self, repository): |
|
93 | def get_repo(self, repository): | |
94 | return Repository.guess_instance(repository) |
|
94 | return Repository.guess_instance(repository) | |
95 |
|
95 | |||
96 | def get_by_repo_name(self, repo_name, cache=False): |
|
96 | def get_by_repo_name(self, repo_name, cache=False): | |
97 | repo = Repository.query() \ |
|
97 | repo = Repository.query() \ | |
98 | .filter(Repository.repo_name == repo_name) |
|
98 | .filter(Repository.repo_name == repo_name) | |
99 |
|
99 | |||
100 | if cache: |
|
100 | if cache: | |
101 | repo = repo.options(FromCache("sql_cache_short", |
|
101 | repo = repo.options(FromCache("sql_cache_short", | |
102 | "get_repo_%s" % repo_name)) |
|
102 | "get_repo_%s" % repo_name)) | |
103 | return repo.scalar() |
|
103 | return repo.scalar() | |
104 |
|
104 | |||
105 | def get_all_user_repos(self, user): |
|
105 | def get_all_user_repos(self, user): | |
106 | """ |
|
106 | """ | |
107 | Gets all repositories that user have at least read access |
|
107 | Gets all repositories that user have at least read access | |
108 |
|
108 | |||
109 | :param user: |
|
109 | :param user: | |
110 | """ |
|
110 | """ | |
111 | from kallithea.lib.auth import AuthUser |
|
111 | from kallithea.lib.auth import AuthUser | |
112 | user = User.guess_instance(user) |
|
112 | auth_user = AuthUser(dbuser=User.guess_instance(user)) | |
113 | repos = AuthUser(dbuser=user).permissions['repositories'] |
|
113 | repos = [repo_name | |
114 | access_check = lambda r: r[1] in ['repository.read', |
|
114 | for repo_name, perm in auth_user.permissions['repositories'].items() | |
115 | 'repository.write', |
|
115 | if perm in ['repository.read', 'repository.write', 'repository.admin'] | |
116 | 'repository.admin'] |
|
116 | ] | |
117 | repos = [x[0] for x in filter(access_check, repos.items())] |
|
|||
118 | return Repository.query().filter(Repository.repo_name.in_(repos)) |
|
117 | return Repository.query().filter(Repository.repo_name.in_(repos)) | |
119 |
|
118 | |||
120 | @classmethod |
|
119 | @classmethod | |
121 | def _render_datatable(cls, tmpl, *args, **kwargs): |
|
120 | def _render_datatable(cls, tmpl, *args, **kwargs): | |
122 | from tg import tmpl_context as c, request, app_globals |
|
121 | from tg import tmpl_context as c, request, app_globals | |
123 | from tg.i18n import ugettext as _ |
|
122 | from tg.i18n import ugettext as _ | |
124 |
|
123 | |||
125 | _tmpl_lookup = app_globals.mako_lookup |
|
124 | _tmpl_lookup = app_globals.mako_lookup | |
126 | template = _tmpl_lookup.get_template('data_table/_dt_elements.html') |
|
125 | template = _tmpl_lookup.get_template('data_table/_dt_elements.html') | |
127 |
|
126 | |||
128 | tmpl = template.get_def(tmpl) |
|
127 | tmpl = template.get_def(tmpl) | |
129 | kwargs.update(dict(_=_, h=h, c=c, request=request)) |
|
128 | kwargs.update(dict(_=_, h=h, c=c, request=request)) | |
130 | return tmpl.render_unicode(*args, **kwargs) |
|
129 | return tmpl.render_unicode(*args, **kwargs) | |
131 |
|
130 | |||
132 | def get_repos_as_dict(self, repos_list, repo_groups_list=None, |
|
131 | def get_repos_as_dict(self, repos_list, repo_groups_list=None, | |
133 | admin=False, |
|
132 | admin=False, | |
134 | short_name=False): |
|
133 | short_name=False): | |
135 | """Return repository list for use by DataTable. |
|
134 | """Return repository list for use by DataTable. | |
136 | repos_list: list of repositories - but will be filtered for read permission. |
|
135 | repos_list: list of repositories - but will be filtered for read permission. | |
137 | repo_groups_list: added at top of list without permission check. |
|
136 | repo_groups_list: added at top of list without permission check. | |
138 | admin: return data for action column. |
|
137 | admin: return data for action column. | |
139 | """ |
|
138 | """ | |
140 | _render = self._render_datatable |
|
139 | _render = self._render_datatable | |
141 | from tg import tmpl_context as c, request |
|
140 | from tg import tmpl_context as c, request | |
142 | from kallithea.model.scm import ScmModel |
|
141 | from kallithea.model.scm import ScmModel | |
143 |
|
142 | |||
144 | def repo_lnk(name, rtype, rstate, private, fork_of): |
|
143 | def repo_lnk(name, rtype, rstate, private, fork_of): | |
145 | return _render('repo_name', name, rtype, rstate, private, fork_of, |
|
144 | return _render('repo_name', name, rtype, rstate, private, fork_of, | |
146 | short_name=short_name) |
|
145 | short_name=short_name) | |
147 |
|
146 | |||
148 | def following(repo_id, is_following): |
|
147 | def following(repo_id, is_following): | |
149 | return _render('following', repo_id, is_following) |
|
148 | return _render('following', repo_id, is_following) | |
150 |
|
149 | |||
151 | def last_change(last_change): |
|
150 | def last_change(last_change): | |
152 | return _render("last_change", last_change) |
|
151 | return _render("last_change", last_change) | |
153 |
|
152 | |||
154 | def rss_lnk(repo_name): |
|
153 | def rss_lnk(repo_name): | |
155 | return _render("rss", repo_name) |
|
154 | return _render("rss", repo_name) | |
156 |
|
155 | |||
157 | def atom_lnk(repo_name): |
|
156 | def atom_lnk(repo_name): | |
158 | return _render("atom", repo_name) |
|
157 | return _render("atom", repo_name) | |
159 |
|
158 | |||
160 | def last_rev(repo_name, cs_cache): |
|
159 | def last_rev(repo_name, cs_cache): | |
161 | return _render('revision', repo_name, cs_cache.get('revision'), |
|
160 | return _render('revision', repo_name, cs_cache.get('revision'), | |
162 | cs_cache.get('raw_id'), cs_cache.get('author'), |
|
161 | cs_cache.get('raw_id'), cs_cache.get('author'), | |
163 | cs_cache.get('message')) |
|
162 | cs_cache.get('message')) | |
164 |
|
163 | |||
165 | def desc(desc): |
|
164 | def desc(desc): | |
166 | return h.urlify_text(desc, truncate=80, stylize=c.visual.stylify_metalabels) |
|
165 | return h.urlify_text(desc, truncate=80, stylize=c.visual.stylify_metalabels) | |
167 |
|
166 | |||
168 | def state(repo_state): |
|
167 | def state(repo_state): | |
169 | return _render("repo_state", repo_state) |
|
168 | return _render("repo_state", repo_state) | |
170 |
|
169 | |||
171 | def repo_actions(repo_name): |
|
170 | def repo_actions(repo_name): | |
172 | return _render('repo_actions', repo_name) |
|
171 | return _render('repo_actions', repo_name) | |
173 |
|
172 | |||
174 | def owner_actions(owner_id, username): |
|
173 | def owner_actions(owner_id, username): | |
175 | return _render('user_name', owner_id, username) |
|
174 | return _render('user_name', owner_id, username) | |
176 |
|
175 | |||
177 | repos_data = [] |
|
176 | repos_data = [] | |
178 |
|
177 | |||
179 | for gr in repo_groups_list or []: |
|
178 | for gr in repo_groups_list or []: | |
180 | repos_data.append(dict( |
|
179 | repos_data.append(dict( | |
181 | raw_name='\0' + gr.name, # sort before repositories |
|
180 | raw_name='\0' + gr.name, # sort before repositories | |
182 | just_name=gr.name, |
|
181 | just_name=gr.name, | |
183 | name=_render('group_name_html', group_name=gr.group_name, name=gr.name), |
|
182 | name=_render('group_name_html', group_name=gr.group_name, name=gr.name), | |
184 | desc=gr.group_description)) |
|
183 | desc=gr.group_description)) | |
185 |
|
184 | |||
186 | for repo in repos_list: |
|
185 | for repo in repos_list: | |
187 | if not HasRepoPermissionLevel('read')(repo.repo_name, 'get_repos_as_dict check'): |
|
186 | if not HasRepoPermissionLevel('read')(repo.repo_name, 'get_repos_as_dict check'): | |
188 | continue |
|
187 | continue | |
189 | cs_cache = repo.changeset_cache |
|
188 | cs_cache = repo.changeset_cache | |
190 | row = { |
|
189 | row = { | |
191 | "raw_name": repo.repo_name, |
|
190 | "raw_name": repo.repo_name, | |
192 | "just_name": repo.just_name, |
|
191 | "just_name": repo.just_name, | |
193 | "name": repo_lnk(repo.repo_name, repo.repo_type, |
|
192 | "name": repo_lnk(repo.repo_name, repo.repo_type, | |
194 | repo.repo_state, repo.private, repo.fork), |
|
193 | repo.repo_state, repo.private, repo.fork), | |
195 | "following": following( |
|
194 | "following": following( | |
196 | repo.repo_id, |
|
195 | repo.repo_id, | |
197 | ScmModel().is_following_repo(repo.repo_name, request.authuser.user_id), |
|
196 | ScmModel().is_following_repo(repo.repo_name, request.authuser.user_id), | |
198 | ), |
|
197 | ), | |
199 | "last_change_iso": repo.last_db_change.isoformat(), |
|
198 | "last_change_iso": repo.last_db_change.isoformat(), | |
200 | "last_change": last_change(repo.last_db_change), |
|
199 | "last_change": last_change(repo.last_db_change), | |
201 | "last_changeset": last_rev(repo.repo_name, cs_cache), |
|
200 | "last_changeset": last_rev(repo.repo_name, cs_cache), | |
202 | "last_rev_raw": cs_cache.get('revision'), |
|
201 | "last_rev_raw": cs_cache.get('revision'), | |
203 | "desc": desc(repo.description), |
|
202 | "desc": desc(repo.description), | |
204 | "owner": h.person(repo.owner), |
|
203 | "owner": h.person(repo.owner), | |
205 | "state": state(repo.repo_state), |
|
204 | "state": state(repo.repo_state), | |
206 | "rss": rss_lnk(repo.repo_name), |
|
205 | "rss": rss_lnk(repo.repo_name), | |
207 | "atom": atom_lnk(repo.repo_name), |
|
206 | "atom": atom_lnk(repo.repo_name), | |
208 | } |
|
207 | } | |
209 | if admin: |
|
208 | if admin: | |
210 | row.update({ |
|
209 | row.update({ | |
211 | "action": repo_actions(repo.repo_name), |
|
210 | "action": repo_actions(repo.repo_name), | |
212 | "owner": owner_actions(repo.owner_id, |
|
211 | "owner": owner_actions(repo.owner_id, | |
213 | h.person(repo.owner)) |
|
212 | h.person(repo.owner)) | |
214 | }) |
|
213 | }) | |
215 | repos_data.append(row) |
|
214 | repos_data.append(row) | |
216 |
|
215 | |||
217 | return { |
|
216 | return { | |
218 | "sort": "name", |
|
217 | "sort": "name", | |
219 | "dir": "asc", |
|
218 | "dir": "asc", | |
220 | "records": repos_data |
|
219 | "records": repos_data | |
221 | } |
|
220 | } | |
222 |
|
221 | |||
223 | def _get_defaults(self, repo_name): |
|
222 | def _get_defaults(self, repo_name): | |
224 | """ |
|
223 | """ | |
225 | Gets information about repository, and returns a dict for |
|
224 | Gets information about repository, and returns a dict for | |
226 | usage in forms |
|
225 | usage in forms | |
227 |
|
226 | |||
228 | :param repo_name: |
|
227 | :param repo_name: | |
229 | """ |
|
228 | """ | |
230 |
|
229 | |||
231 | repo_info = Repository.get_by_repo_name(repo_name) |
|
230 | repo_info = Repository.get_by_repo_name(repo_name) | |
232 |
|
231 | |||
233 | if repo_info is None: |
|
232 | if repo_info is None: | |
234 | return None |
|
233 | return None | |
235 |
|
234 | |||
236 | defaults = repo_info.get_dict() |
|
235 | defaults = repo_info.get_dict() | |
237 | defaults['repo_name'] = repo_info.just_name |
|
236 | defaults['repo_name'] = repo_info.just_name | |
238 | defaults['repo_group'] = repo_info.group_id |
|
237 | defaults['repo_group'] = repo_info.group_id | |
239 |
|
238 | |||
240 | for strip, k in [(0, 'repo_type'), (1, 'repo_enable_downloads'), |
|
239 | for strip, k in [(0, 'repo_type'), (1, 'repo_enable_downloads'), | |
241 | (1, 'repo_description'), |
|
240 | (1, 'repo_description'), | |
242 | (1, 'repo_landing_rev'), (0, 'clone_uri'), |
|
241 | (1, 'repo_landing_rev'), (0, 'clone_uri'), | |
243 | (1, 'repo_private'), (1, 'repo_enable_statistics')]: |
|
242 | (1, 'repo_private'), (1, 'repo_enable_statistics')]: | |
244 | attr = k |
|
243 | attr = k | |
245 | if strip: |
|
244 | if strip: | |
246 | attr = remove_prefix(k, 'repo_') |
|
245 | attr = remove_prefix(k, 'repo_') | |
247 |
|
246 | |||
248 | val = defaults[attr] |
|
247 | val = defaults[attr] | |
249 | if k == 'repo_landing_rev': |
|
248 | if k == 'repo_landing_rev': | |
250 | val = ':'.join(defaults[attr]) |
|
249 | val = ':'.join(defaults[attr]) | |
251 | defaults[k] = val |
|
250 | defaults[k] = val | |
252 | if k == 'clone_uri': |
|
251 | if k == 'clone_uri': | |
253 | defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden |
|
252 | defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden | |
254 |
|
253 | |||
255 | # fill owner |
|
254 | # fill owner | |
256 | if repo_info.owner: |
|
255 | if repo_info.owner: | |
257 | defaults.update({'owner': repo_info.owner.username}) |
|
256 | defaults.update({'owner': repo_info.owner.username}) | |
258 | else: |
|
257 | else: | |
259 | replacement_user = User.query().filter(User.admin == |
|
258 | replacement_user = User.query().filter(User.admin == | |
260 | True).first().username |
|
259 | True).first().username | |
261 | defaults.update({'owner': replacement_user}) |
|
260 | defaults.update({'owner': replacement_user}) | |
262 |
|
261 | |||
263 | # fill repository users |
|
262 | # fill repository users | |
264 | for p in repo_info.repo_to_perm: |
|
263 | for p in repo_info.repo_to_perm: | |
265 | defaults.update({'u_perm_%s' % p.user.username: |
|
264 | defaults.update({'u_perm_%s' % p.user.username: | |
266 | p.permission.permission_name}) |
|
265 | p.permission.permission_name}) | |
267 |
|
266 | |||
268 | # fill repository groups |
|
267 | # fill repository groups | |
269 | for p in repo_info.users_group_to_perm: |
|
268 | for p in repo_info.users_group_to_perm: | |
270 | defaults.update({'g_perm_%s' % p.users_group.users_group_name: |
|
269 | defaults.update({'g_perm_%s' % p.users_group.users_group_name: | |
271 | p.permission.permission_name}) |
|
270 | p.permission.permission_name}) | |
272 |
|
271 | |||
273 | return defaults |
|
272 | return defaults | |
274 |
|
273 | |||
275 | def update(self, repo, **kwargs): |
|
274 | def update(self, repo, **kwargs): | |
276 | try: |
|
275 | try: | |
277 | cur_repo = Repository.guess_instance(repo) |
|
276 | cur_repo = Repository.guess_instance(repo) | |
278 | org_repo_name = cur_repo.repo_name |
|
277 | org_repo_name = cur_repo.repo_name | |
279 | if 'owner' in kwargs: |
|
278 | if 'owner' in kwargs: | |
280 | cur_repo.owner = User.get_by_username(kwargs['owner']) |
|
279 | cur_repo.owner = User.get_by_username(kwargs['owner']) | |
281 |
|
280 | |||
282 | if 'repo_group' in kwargs: |
|
281 | if 'repo_group' in kwargs: | |
283 | assert kwargs['repo_group'] != '-1', kwargs # RepoForm should have converted to None |
|
282 | assert kwargs['repo_group'] != '-1', kwargs # RepoForm should have converted to None | |
284 | cur_repo.group = RepoGroup.get(kwargs['repo_group']) |
|
283 | cur_repo.group = RepoGroup.get(kwargs['repo_group']) | |
285 | cur_repo.repo_name = cur_repo.get_new_name(cur_repo.just_name) |
|
284 | cur_repo.repo_name = cur_repo.get_new_name(cur_repo.just_name) | |
286 | log.debug('Updating repo %s with params:%s', cur_repo, kwargs) |
|
285 | log.debug('Updating repo %s with params:%s', cur_repo, kwargs) | |
287 | for k in ['repo_enable_downloads', |
|
286 | for k in ['repo_enable_downloads', | |
288 | 'repo_description', |
|
287 | 'repo_description', | |
289 | 'repo_landing_rev', |
|
288 | 'repo_landing_rev', | |
290 | 'repo_private', |
|
289 | 'repo_private', | |
291 | 'repo_enable_statistics', |
|
290 | 'repo_enable_statistics', | |
292 | ]: |
|
291 | ]: | |
293 | if k in kwargs: |
|
292 | if k in kwargs: | |
294 | setattr(cur_repo, remove_prefix(k, 'repo_'), kwargs[k]) |
|
293 | setattr(cur_repo, remove_prefix(k, 'repo_'), kwargs[k]) | |
295 | clone_uri = kwargs.get('clone_uri') |
|
294 | clone_uri = kwargs.get('clone_uri') | |
296 | if clone_uri is not None and clone_uri != cur_repo.clone_uri_hidden: |
|
295 | if clone_uri is not None and clone_uri != cur_repo.clone_uri_hidden: | |
297 | # clone_uri is modified - if given a value, check it is valid |
|
296 | # clone_uri is modified - if given a value, check it is valid | |
298 | if clone_uri != '': |
|
297 | if clone_uri != '': | |
299 | # will raise exception on error |
|
298 | # will raise exception on error | |
300 | is_valid_repo_uri(cur_repo.repo_type, clone_uri, make_ui()) |
|
299 | is_valid_repo_uri(cur_repo.repo_type, clone_uri, make_ui()) | |
301 | cur_repo.clone_uri = clone_uri |
|
300 | cur_repo.clone_uri = clone_uri | |
302 |
|
301 | |||
303 | if 'repo_name' in kwargs: |
|
302 | if 'repo_name' in kwargs: | |
304 | repo_name = kwargs['repo_name'] |
|
303 | repo_name = kwargs['repo_name'] | |
305 | if kallithea.lib.utils2.repo_name_slug(repo_name) != repo_name: |
|
304 | if kallithea.lib.utils2.repo_name_slug(repo_name) != repo_name: | |
306 | raise Exception('invalid repo name %s' % repo_name) |
|
305 | raise Exception('invalid repo name %s' % repo_name) | |
307 | cur_repo.repo_name = cur_repo.get_new_name(repo_name) |
|
306 | cur_repo.repo_name = cur_repo.get_new_name(repo_name) | |
308 |
|
307 | |||
309 | # if private flag is set, reset default permission to NONE |
|
308 | # if private flag is set, reset default permission to NONE | |
310 | if kwargs.get('repo_private'): |
|
309 | if kwargs.get('repo_private'): | |
311 | EMPTY_PERM = 'repository.none' |
|
310 | EMPTY_PERM = 'repository.none' | |
312 | RepoModel().grant_user_permission( |
|
311 | RepoModel().grant_user_permission( | |
313 | repo=cur_repo, user='default', perm=EMPTY_PERM |
|
312 | repo=cur_repo, user='default', perm=EMPTY_PERM | |
314 | ) |
|
313 | ) | |
315 | # handle extra fields |
|
314 | # handle extra fields | |
316 | for field in [k for k in kwargs if k.startswith(RepositoryField.PREFIX)]: |
|
315 | for field in [k for k in kwargs if k.startswith(RepositoryField.PREFIX)]: | |
317 | k = RepositoryField.un_prefix_key(field) |
|
316 | k = RepositoryField.un_prefix_key(field) | |
318 | ex_field = RepositoryField.get_by_key_name(key=k, repo=cur_repo) |
|
317 | ex_field = RepositoryField.get_by_key_name(key=k, repo=cur_repo) | |
319 | if ex_field: |
|
318 | if ex_field: | |
320 | ex_field.field_value = kwargs[field] |
|
319 | ex_field.field_value = kwargs[field] | |
321 |
|
320 | |||
322 | if org_repo_name != cur_repo.repo_name: |
|
321 | if org_repo_name != cur_repo.repo_name: | |
323 | # rename repository |
|
322 | # rename repository | |
324 | self._rename_filesystem_repo(old=org_repo_name, new=cur_repo.repo_name) |
|
323 | self._rename_filesystem_repo(old=org_repo_name, new=cur_repo.repo_name) | |
325 |
|
324 | |||
326 | return cur_repo |
|
325 | return cur_repo | |
327 | except Exception: |
|
326 | except Exception: | |
328 | log.error(traceback.format_exc()) |
|
327 | log.error(traceback.format_exc()) | |
329 | raise |
|
328 | raise | |
330 |
|
329 | |||
331 | def _create_repo(self, repo_name, repo_type, description, owner, |
|
330 | def _create_repo(self, repo_name, repo_type, description, owner, | |
332 | private=False, clone_uri=None, repo_group=None, |
|
331 | private=False, clone_uri=None, repo_group=None, | |
333 | landing_rev='rev:tip', fork_of=None, |
|
332 | landing_rev='rev:tip', fork_of=None, | |
334 | copy_fork_permissions=False, enable_statistics=False, |
|
333 | copy_fork_permissions=False, enable_statistics=False, | |
335 | enable_downloads=False, |
|
334 | enable_downloads=False, | |
336 | copy_group_permissions=False, state=Repository.STATE_PENDING): |
|
335 | copy_group_permissions=False, state=Repository.STATE_PENDING): | |
337 | """ |
|
336 | """ | |
338 | Create repository inside database with PENDING state. This should only be |
|
337 | Create repository inside database with PENDING state. This should only be | |
339 | executed by create() repo, with exception of importing existing repos. |
|
338 | executed by create() repo, with exception of importing existing repos. | |
340 |
|
339 | |||
341 | """ |
|
340 | """ | |
342 | from kallithea.model.scm import ScmModel |
|
341 | from kallithea.model.scm import ScmModel | |
343 |
|
342 | |||
344 | owner = User.guess_instance(owner) |
|
343 | owner = User.guess_instance(owner) | |
345 | fork_of = Repository.guess_instance(fork_of) |
|
344 | fork_of = Repository.guess_instance(fork_of) | |
346 | repo_group = RepoGroup.guess_instance(repo_group) |
|
345 | repo_group = RepoGroup.guess_instance(repo_group) | |
347 | try: |
|
346 | try: | |
348 | repo_name = repo_name |
|
347 | repo_name = repo_name | |
349 | description = description |
|
348 | description = description | |
350 | # repo name is just a name of repository |
|
349 | # repo name is just a name of repository | |
351 | # while repo_name_full is a full qualified name that is combined |
|
350 | # while repo_name_full is a full qualified name that is combined | |
352 | # with name and path of group |
|
351 | # with name and path of group | |
353 | repo_name_full = repo_name |
|
352 | repo_name_full = repo_name | |
354 | repo_name = repo_name.split(self.URL_SEPARATOR)[-1] |
|
353 | repo_name = repo_name.split(self.URL_SEPARATOR)[-1] | |
355 | if kallithea.lib.utils2.repo_name_slug(repo_name) != repo_name: |
|
354 | if kallithea.lib.utils2.repo_name_slug(repo_name) != repo_name: | |
356 | raise Exception('invalid repo name %s' % repo_name) |
|
355 | raise Exception('invalid repo name %s' % repo_name) | |
357 |
|
356 | |||
358 | new_repo = Repository() |
|
357 | new_repo = Repository() | |
359 | new_repo.repo_state = state |
|
358 | new_repo.repo_state = state | |
360 | new_repo.enable_statistics = False |
|
359 | new_repo.enable_statistics = False | |
361 | new_repo.repo_name = repo_name_full |
|
360 | new_repo.repo_name = repo_name_full | |
362 | new_repo.repo_type = repo_type |
|
361 | new_repo.repo_type = repo_type | |
363 | new_repo.owner = owner |
|
362 | new_repo.owner = owner | |
364 | new_repo.group = repo_group |
|
363 | new_repo.group = repo_group | |
365 | new_repo.description = description or repo_name |
|
364 | new_repo.description = description or repo_name | |
366 | new_repo.private = private |
|
365 | new_repo.private = private | |
367 | if clone_uri: |
|
366 | if clone_uri: | |
368 | # will raise exception on error |
|
367 | # will raise exception on error | |
369 | is_valid_repo_uri(repo_type, clone_uri, make_ui()) |
|
368 | is_valid_repo_uri(repo_type, clone_uri, make_ui()) | |
370 | new_repo.clone_uri = clone_uri |
|
369 | new_repo.clone_uri = clone_uri | |
371 | new_repo.landing_rev = landing_rev |
|
370 | new_repo.landing_rev = landing_rev | |
372 |
|
371 | |||
373 | new_repo.enable_statistics = enable_statistics |
|
372 | new_repo.enable_statistics = enable_statistics | |
374 | new_repo.enable_downloads = enable_downloads |
|
373 | new_repo.enable_downloads = enable_downloads | |
375 |
|
374 | |||
376 | if fork_of: |
|
375 | if fork_of: | |
377 | parent_repo = fork_of |
|
376 | parent_repo = fork_of | |
378 | new_repo.fork = parent_repo |
|
377 | new_repo.fork = parent_repo | |
379 |
|
378 | |||
380 | Session().add(new_repo) |
|
379 | Session().add(new_repo) | |
381 |
|
380 | |||
382 | if fork_of and copy_fork_permissions: |
|
381 | if fork_of and copy_fork_permissions: | |
383 | repo = fork_of |
|
382 | repo = fork_of | |
384 | user_perms = UserRepoToPerm.query() \ |
|
383 | user_perms = UserRepoToPerm.query() \ | |
385 | .filter(UserRepoToPerm.repository == repo).all() |
|
384 | .filter(UserRepoToPerm.repository == repo).all() | |
386 | group_perms = UserGroupRepoToPerm.query() \ |
|
385 | group_perms = UserGroupRepoToPerm.query() \ | |
387 | .filter(UserGroupRepoToPerm.repository == repo).all() |
|
386 | .filter(UserGroupRepoToPerm.repository == repo).all() | |
388 |
|
387 | |||
389 | for perm in user_perms: |
|
388 | for perm in user_perms: | |
390 | UserRepoToPerm.create(perm.user, new_repo, perm.permission) |
|
389 | UserRepoToPerm.create(perm.user, new_repo, perm.permission) | |
391 |
|
390 | |||
392 | for perm in group_perms: |
|
391 | for perm in group_perms: | |
393 | UserGroupRepoToPerm.create(perm.users_group, new_repo, |
|
392 | UserGroupRepoToPerm.create(perm.users_group, new_repo, | |
394 | perm.permission) |
|
393 | perm.permission) | |
395 |
|
394 | |||
396 | elif repo_group and copy_group_permissions: |
|
395 | elif repo_group and copy_group_permissions: | |
397 |
|
396 | |||
398 | user_perms = UserRepoGroupToPerm.query() \ |
|
397 | user_perms = UserRepoGroupToPerm.query() \ | |
399 | .filter(UserRepoGroupToPerm.group == repo_group).all() |
|
398 | .filter(UserRepoGroupToPerm.group == repo_group).all() | |
400 |
|
399 | |||
401 | group_perms = UserGroupRepoGroupToPerm.query() \ |
|
400 | group_perms = UserGroupRepoGroupToPerm.query() \ | |
402 | .filter(UserGroupRepoGroupToPerm.group == repo_group).all() |
|
401 | .filter(UserGroupRepoGroupToPerm.group == repo_group).all() | |
403 |
|
402 | |||
404 | for perm in user_perms: |
|
403 | for perm in user_perms: | |
405 | perm_name = perm.permission.permission_name.replace('group.', 'repository.') |
|
404 | perm_name = perm.permission.permission_name.replace('group.', 'repository.') | |
406 | perm_obj = Permission.get_by_key(perm_name) |
|
405 | perm_obj = Permission.get_by_key(perm_name) | |
407 | UserRepoToPerm.create(perm.user, new_repo, perm_obj) |
|
406 | UserRepoToPerm.create(perm.user, new_repo, perm_obj) | |
408 |
|
407 | |||
409 | for perm in group_perms: |
|
408 | for perm in group_perms: | |
410 | perm_name = perm.permission.permission_name.replace('group.', 'repository.') |
|
409 | perm_name = perm.permission.permission_name.replace('group.', 'repository.') | |
411 | perm_obj = Permission.get_by_key(perm_name) |
|
410 | perm_obj = Permission.get_by_key(perm_name) | |
412 | UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj) |
|
411 | UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj) | |
413 |
|
412 | |||
414 | else: |
|
413 | else: | |
415 | self._create_default_perms(new_repo, private) |
|
414 | self._create_default_perms(new_repo, private) | |
416 |
|
415 | |||
417 | # now automatically start following this repository as owner |
|
416 | # now automatically start following this repository as owner | |
418 | ScmModel().toggle_following_repo(new_repo.repo_id, owner.user_id) |
|
417 | ScmModel().toggle_following_repo(new_repo.repo_id, owner.user_id) | |
419 | # we need to flush here, in order to check if database won't |
|
418 | # we need to flush here, in order to check if database won't | |
420 | # throw any exceptions, create filesystem dirs at the very end |
|
419 | # throw any exceptions, create filesystem dirs at the very end | |
421 | Session().flush() |
|
420 | Session().flush() | |
422 | return new_repo |
|
421 | return new_repo | |
423 | except Exception: |
|
422 | except Exception: | |
424 | log.error(traceback.format_exc()) |
|
423 | log.error(traceback.format_exc()) | |
425 | raise |
|
424 | raise | |
426 |
|
425 | |||
427 | def create(self, form_data, cur_user): |
|
426 | def create(self, form_data, cur_user): | |
428 | """ |
|
427 | """ | |
429 | Create repository using celery tasks |
|
428 | Create repository using celery tasks | |
430 |
|
429 | |||
431 | :param form_data: |
|
430 | :param form_data: | |
432 | :param cur_user: |
|
431 | :param cur_user: | |
433 | """ |
|
432 | """ | |
434 | from kallithea.lib.celerylib import tasks |
|
433 | from kallithea.lib.celerylib import tasks | |
435 | return tasks.create_repo(form_data, cur_user) |
|
434 | return tasks.create_repo(form_data, cur_user) | |
436 |
|
435 | |||
437 | def _update_permissions(self, repo, perms_new=None, perms_updates=None, |
|
436 | def _update_permissions(self, repo, perms_new=None, perms_updates=None, | |
438 | check_perms=True): |
|
437 | check_perms=True): | |
439 | if not perms_new: |
|
438 | if not perms_new: | |
440 | perms_new = [] |
|
439 | perms_new = [] | |
441 | if not perms_updates: |
|
440 | if not perms_updates: | |
442 | perms_updates = [] |
|
441 | perms_updates = [] | |
443 |
|
442 | |||
444 | # update permissions |
|
443 | # update permissions | |
445 | for member, perm, member_type in perms_updates: |
|
444 | for member, perm, member_type in perms_updates: | |
446 | if member_type == 'user': |
|
445 | if member_type == 'user': | |
447 | # this updates existing one |
|
446 | # this updates existing one | |
448 | self.grant_user_permission( |
|
447 | self.grant_user_permission( | |
449 | repo=repo, user=member, perm=perm |
|
448 | repo=repo, user=member, perm=perm | |
450 | ) |
|
449 | ) | |
451 | else: |
|
450 | else: | |
452 | # check if we have permissions to alter this usergroup's access |
|
451 | # check if we have permissions to alter this usergroup's access | |
453 | if not check_perms or HasUserGroupPermissionLevel('read')(member): |
|
452 | if not check_perms or HasUserGroupPermissionLevel('read')(member): | |
454 | self.grant_user_group_permission( |
|
453 | self.grant_user_group_permission( | |
455 | repo=repo, group_name=member, perm=perm |
|
454 | repo=repo, group_name=member, perm=perm | |
456 | ) |
|
455 | ) | |
457 | # set new permissions |
|
456 | # set new permissions | |
458 | for member, perm, member_type in perms_new: |
|
457 | for member, perm, member_type in perms_new: | |
459 | if member_type == 'user': |
|
458 | if member_type == 'user': | |
460 | self.grant_user_permission( |
|
459 | self.grant_user_permission( | |
461 | repo=repo, user=member, perm=perm |
|
460 | repo=repo, user=member, perm=perm | |
462 | ) |
|
461 | ) | |
463 | else: |
|
462 | else: | |
464 | # check if we have permissions to alter this usergroup's access |
|
463 | # check if we have permissions to alter this usergroup's access | |
465 | if not check_perms or HasUserGroupPermissionLevel('read')(member): |
|
464 | if not check_perms or HasUserGroupPermissionLevel('read')(member): | |
466 | self.grant_user_group_permission( |
|
465 | self.grant_user_group_permission( | |
467 | repo=repo, group_name=member, perm=perm |
|
466 | repo=repo, group_name=member, perm=perm | |
468 | ) |
|
467 | ) | |
469 |
|
468 | |||
470 | def create_fork(self, form_data, cur_user): |
|
469 | def create_fork(self, form_data, cur_user): | |
471 | """ |
|
470 | """ | |
472 | Simple wrapper into executing celery task for fork creation |
|
471 | Simple wrapper into executing celery task for fork creation | |
473 |
|
472 | |||
474 | :param form_data: |
|
473 | :param form_data: | |
475 | :param cur_user: |
|
474 | :param cur_user: | |
476 | """ |
|
475 | """ | |
477 | from kallithea.lib.celerylib import tasks |
|
476 | from kallithea.lib.celerylib import tasks | |
478 | return tasks.create_repo_fork(form_data, cur_user) |
|
477 | return tasks.create_repo_fork(form_data, cur_user) | |
479 |
|
478 | |||
480 | def delete(self, repo, forks=None, fs_remove=True, cur_user=None): |
|
479 | def delete(self, repo, forks=None, fs_remove=True, cur_user=None): | |
481 | """ |
|
480 | """ | |
482 | Delete given repository, forks parameter defines what do do with |
|
481 | Delete given repository, forks parameter defines what do do with | |
483 | attached forks. Throws AttachedForksError if deleted repo has attached |
|
482 | attached forks. Throws AttachedForksError if deleted repo has attached | |
484 | forks |
|
483 | forks | |
485 |
|
484 | |||
486 | :param repo: |
|
485 | :param repo: | |
487 | :param forks: str 'delete' or 'detach' |
|
486 | :param forks: str 'delete' or 'detach' | |
488 | :param fs_remove: remove(archive) repo from filesystem |
|
487 | :param fs_remove: remove(archive) repo from filesystem | |
489 | """ |
|
488 | """ | |
490 | if not cur_user: |
|
489 | if not cur_user: | |
491 | cur_user = getattr(get_current_authuser(), 'username', None) |
|
490 | cur_user = getattr(get_current_authuser(), 'username', None) | |
492 | repo = Repository.guess_instance(repo) |
|
491 | repo = Repository.guess_instance(repo) | |
493 | if repo is not None: |
|
492 | if repo is not None: | |
494 | if forks == 'detach': |
|
493 | if forks == 'detach': | |
495 | for r in repo.forks: |
|
494 | for r in repo.forks: | |
496 | r.fork = None |
|
495 | r.fork = None | |
497 | elif forks == 'delete': |
|
496 | elif forks == 'delete': | |
498 | for r in repo.forks: |
|
497 | for r in repo.forks: | |
499 | self.delete(r, forks='delete') |
|
498 | self.delete(r, forks='delete') | |
500 | elif [f for f in repo.forks]: |
|
499 | elif [f for f in repo.forks]: | |
501 | raise AttachedForksError() |
|
500 | raise AttachedForksError() | |
502 |
|
501 | |||
503 | old_repo_dict = repo.get_dict() |
|
502 | old_repo_dict = repo.get_dict() | |
504 | try: |
|
503 | try: | |
505 | Session().delete(repo) |
|
504 | Session().delete(repo) | |
506 | if fs_remove: |
|
505 | if fs_remove: | |
507 | self._delete_filesystem_repo(repo) |
|
506 | self._delete_filesystem_repo(repo) | |
508 | else: |
|
507 | else: | |
509 | log.debug('skipping removal from filesystem') |
|
508 | log.debug('skipping removal from filesystem') | |
510 | log_delete_repository(old_repo_dict, |
|
509 | log_delete_repository(old_repo_dict, | |
511 | deleted_by=cur_user) |
|
510 | deleted_by=cur_user) | |
512 | except Exception: |
|
511 | except Exception: | |
513 | log.error(traceback.format_exc()) |
|
512 | log.error(traceback.format_exc()) | |
514 | raise |
|
513 | raise | |
515 |
|
514 | |||
516 | def grant_user_permission(self, repo, user, perm): |
|
515 | def grant_user_permission(self, repo, user, perm): | |
517 | """ |
|
516 | """ | |
518 | Grant permission for user on given repository, or update existing one |
|
517 | Grant permission for user on given repository, or update existing one | |
519 | if found |
|
518 | if found | |
520 |
|
519 | |||
521 | :param repo: Instance of Repository, repository_id, or repository name |
|
520 | :param repo: Instance of Repository, repository_id, or repository name | |
522 | :param user: Instance of User, user_id or username |
|
521 | :param user: Instance of User, user_id or username | |
523 | :param perm: Instance of Permission, or permission_name |
|
522 | :param perm: Instance of Permission, or permission_name | |
524 | """ |
|
523 | """ | |
525 | user = User.guess_instance(user) |
|
524 | user = User.guess_instance(user) | |
526 | repo = Repository.guess_instance(repo) |
|
525 | repo = Repository.guess_instance(repo) | |
527 | permission = Permission.guess_instance(perm) |
|
526 | permission = Permission.guess_instance(perm) | |
528 |
|
527 | |||
529 | # check if we have that permission already |
|
528 | # check if we have that permission already | |
530 | obj = UserRepoToPerm.query() \ |
|
529 | obj = UserRepoToPerm.query() \ | |
531 | .filter(UserRepoToPerm.user == user) \ |
|
530 | .filter(UserRepoToPerm.user == user) \ | |
532 | .filter(UserRepoToPerm.repository == repo) \ |
|
531 | .filter(UserRepoToPerm.repository == repo) \ | |
533 | .scalar() |
|
532 | .scalar() | |
534 | if obj is None: |
|
533 | if obj is None: | |
535 | # create new ! |
|
534 | # create new ! | |
536 | obj = UserRepoToPerm() |
|
535 | obj = UserRepoToPerm() | |
537 | Session().add(obj) |
|
536 | Session().add(obj) | |
538 | obj.repository = repo |
|
537 | obj.repository = repo | |
539 | obj.user = user |
|
538 | obj.user = user | |
540 | obj.permission = permission |
|
539 | obj.permission = permission | |
541 | log.debug('Granted perm %s to %s on %s', perm, user, repo) |
|
540 | log.debug('Granted perm %s to %s on %s', perm, user, repo) | |
542 | return obj |
|
541 | return obj | |
543 |
|
542 | |||
544 | def revoke_user_permission(self, repo, user): |
|
543 | def revoke_user_permission(self, repo, user): | |
545 | """ |
|
544 | """ | |
546 | Revoke permission for user on given repository |
|
545 | Revoke permission for user on given repository | |
547 |
|
546 | |||
548 | :param repo: Instance of Repository, repository_id, or repository name |
|
547 | :param repo: Instance of Repository, repository_id, or repository name | |
549 | :param user: Instance of User, user_id or username |
|
548 | :param user: Instance of User, user_id or username | |
550 | """ |
|
549 | """ | |
551 |
|
550 | |||
552 | user = User.guess_instance(user) |
|
551 | user = User.guess_instance(user) | |
553 | repo = Repository.guess_instance(repo) |
|
552 | repo = Repository.guess_instance(repo) | |
554 |
|
553 | |||
555 | obj = UserRepoToPerm.query() \ |
|
554 | obj = UserRepoToPerm.query() \ | |
556 | .filter(UserRepoToPerm.repository == repo) \ |
|
555 | .filter(UserRepoToPerm.repository == repo) \ | |
557 | .filter(UserRepoToPerm.user == user) \ |
|
556 | .filter(UserRepoToPerm.user == user) \ | |
558 | .scalar() |
|
557 | .scalar() | |
559 | if obj is not None: |
|
558 | if obj is not None: | |
560 | Session().delete(obj) |
|
559 | Session().delete(obj) | |
561 | log.debug('Revoked perm on %s on %s', repo, user) |
|
560 | log.debug('Revoked perm on %s on %s', repo, user) | |
562 |
|
561 | |||
563 | def grant_user_group_permission(self, repo, group_name, perm): |
|
562 | def grant_user_group_permission(self, repo, group_name, perm): | |
564 | """ |
|
563 | """ | |
565 | Grant permission for user group on given repository, or update |
|
564 | Grant permission for user group on given repository, or update | |
566 | existing one if found |
|
565 | existing one if found | |
567 |
|
566 | |||
568 | :param repo: Instance of Repository, repository_id, or repository name |
|
567 | :param repo: Instance of Repository, repository_id, or repository name | |
569 | :param group_name: Instance of UserGroup, users_group_id, |
|
568 | :param group_name: Instance of UserGroup, users_group_id, | |
570 | or user group name |
|
569 | or user group name | |
571 | :param perm: Instance of Permission, or permission_name |
|
570 | :param perm: Instance of Permission, or permission_name | |
572 | """ |
|
571 | """ | |
573 | repo = Repository.guess_instance(repo) |
|
572 | repo = Repository.guess_instance(repo) | |
574 | group_name = UserGroup.guess_instance(group_name) |
|
573 | group_name = UserGroup.guess_instance(group_name) | |
575 | permission = Permission.guess_instance(perm) |
|
574 | permission = Permission.guess_instance(perm) | |
576 |
|
575 | |||
577 | # check if we have that permission already |
|
576 | # check if we have that permission already | |
578 | obj = UserGroupRepoToPerm.query() \ |
|
577 | obj = UserGroupRepoToPerm.query() \ | |
579 | .filter(UserGroupRepoToPerm.users_group == group_name) \ |
|
578 | .filter(UserGroupRepoToPerm.users_group == group_name) \ | |
580 | .filter(UserGroupRepoToPerm.repository == repo) \ |
|
579 | .filter(UserGroupRepoToPerm.repository == repo) \ | |
581 | .scalar() |
|
580 | .scalar() | |
582 |
|
581 | |||
583 | if obj is None: |
|
582 | if obj is None: | |
584 | # create new |
|
583 | # create new | |
585 | obj = UserGroupRepoToPerm() |
|
584 | obj = UserGroupRepoToPerm() | |
586 | Session().add(obj) |
|
585 | Session().add(obj) | |
587 |
|
586 | |||
588 | obj.repository = repo |
|
587 | obj.repository = repo | |
589 | obj.users_group = group_name |
|
588 | obj.users_group = group_name | |
590 | obj.permission = permission |
|
589 | obj.permission = permission | |
591 | log.debug('Granted perm %s to %s on %s', perm, group_name, repo) |
|
590 | log.debug('Granted perm %s to %s on %s', perm, group_name, repo) | |
592 | return obj |
|
591 | return obj | |
593 |
|
592 | |||
594 | def revoke_user_group_permission(self, repo, group_name): |
|
593 | def revoke_user_group_permission(self, repo, group_name): | |
595 | """ |
|
594 | """ | |
596 | Revoke permission for user group on given repository |
|
595 | Revoke permission for user group on given repository | |
597 |
|
596 | |||
598 | :param repo: Instance of Repository, repository_id, or repository name |
|
597 | :param repo: Instance of Repository, repository_id, or repository name | |
599 | :param group_name: Instance of UserGroup, users_group_id, |
|
598 | :param group_name: Instance of UserGroup, users_group_id, | |
600 | or user group name |
|
599 | or user group name | |
601 | """ |
|
600 | """ | |
602 | repo = Repository.guess_instance(repo) |
|
601 | repo = Repository.guess_instance(repo) | |
603 | group_name = UserGroup.guess_instance(group_name) |
|
602 | group_name = UserGroup.guess_instance(group_name) | |
604 |
|
603 | |||
605 | obj = UserGroupRepoToPerm.query() \ |
|
604 | obj = UserGroupRepoToPerm.query() \ | |
606 | .filter(UserGroupRepoToPerm.repository == repo) \ |
|
605 | .filter(UserGroupRepoToPerm.repository == repo) \ | |
607 | .filter(UserGroupRepoToPerm.users_group == group_name) \ |
|
606 | .filter(UserGroupRepoToPerm.users_group == group_name) \ | |
608 | .scalar() |
|
607 | .scalar() | |
609 | if obj is not None: |
|
608 | if obj is not None: | |
610 | Session().delete(obj) |
|
609 | Session().delete(obj) | |
611 | log.debug('Revoked perm to %s on %s', repo, group_name) |
|
610 | log.debug('Revoked perm to %s on %s', repo, group_name) | |
612 |
|
611 | |||
613 | def delete_stats(self, repo_name): |
|
612 | def delete_stats(self, repo_name): | |
614 | """ |
|
613 | """ | |
615 | removes stats for given repo |
|
614 | removes stats for given repo | |
616 |
|
615 | |||
617 | :param repo_name: |
|
616 | :param repo_name: | |
618 | """ |
|
617 | """ | |
619 | repo = Repository.guess_instance(repo_name) |
|
618 | repo = Repository.guess_instance(repo_name) | |
620 | try: |
|
619 | try: | |
621 | obj = Statistics.query() \ |
|
620 | obj = Statistics.query() \ | |
622 | .filter(Statistics.repository == repo).scalar() |
|
621 | .filter(Statistics.repository == repo).scalar() | |
623 | if obj is not None: |
|
622 | if obj is not None: | |
624 | Session().delete(obj) |
|
623 | Session().delete(obj) | |
625 | except Exception: |
|
624 | except Exception: | |
626 | log.error(traceback.format_exc()) |
|
625 | log.error(traceback.format_exc()) | |
627 | raise |
|
626 | raise | |
628 |
|
627 | |||
629 | def _create_filesystem_repo(self, repo_name, repo_type, repo_group, |
|
628 | def _create_filesystem_repo(self, repo_name, repo_type, repo_group, | |
630 | clone_uri=None, repo_store_location=None): |
|
629 | clone_uri=None, repo_store_location=None): | |
631 | """ |
|
630 | """ | |
632 | Makes repository on filesystem. Operation is group aware, meaning that it will create |
|
631 | Makes repository on filesystem. Operation is group aware, meaning that it will create | |
633 | a repository within a group, and alter the paths accordingly to the group location. |
|
632 | a repository within a group, and alter the paths accordingly to the group location. | |
634 |
|
633 | |||
635 | Note: clone_uri is low level and not validated - it might be a file system path used for validated cloning |
|
634 | Note: clone_uri is low level and not validated - it might be a file system path used for validated cloning | |
636 | """ |
|
635 | """ | |
637 | from kallithea.lib.utils import is_valid_repo, is_valid_repo_group |
|
636 | from kallithea.lib.utils import is_valid_repo, is_valid_repo_group | |
638 | from kallithea.model.scm import ScmModel |
|
637 | from kallithea.model.scm import ScmModel | |
639 |
|
638 | |||
640 | if '/' in repo_name: |
|
639 | if '/' in repo_name: | |
641 | raise ValueError('repo_name must not contain groups got `%s`' % repo_name) |
|
640 | raise ValueError('repo_name must not contain groups got `%s`' % repo_name) | |
642 |
|
641 | |||
643 | if isinstance(repo_group, RepoGroup): |
|
642 | if isinstance(repo_group, RepoGroup): | |
644 | new_parent_path = os.sep.join(repo_group.full_path_splitted) |
|
643 | new_parent_path = os.sep.join(repo_group.full_path_splitted) | |
645 | else: |
|
644 | else: | |
646 | new_parent_path = repo_group or '' |
|
645 | new_parent_path = repo_group or '' | |
647 |
|
646 | |||
648 | if repo_store_location: |
|
647 | if repo_store_location: | |
649 | _paths = [repo_store_location] |
|
648 | _paths = [repo_store_location] | |
650 | else: |
|
649 | else: | |
651 | _paths = [self.repos_path, new_parent_path, repo_name] |
|
650 | _paths = [self.repos_path, new_parent_path, repo_name] | |
652 | repo_path = os.path.join(*_paths) |
|
651 | repo_path = os.path.join(*_paths) | |
653 |
|
652 | |||
654 | # check if this path is not a repository |
|
653 | # check if this path is not a repository | |
655 | if is_valid_repo(repo_path, self.repos_path): |
|
654 | if is_valid_repo(repo_path, self.repos_path): | |
656 | raise Exception('This path %s is a valid repository' % repo_path) |
|
655 | raise Exception('This path %s is a valid repository' % repo_path) | |
657 |
|
656 | |||
658 | # check if this path is a group |
|
657 | # check if this path is a group | |
659 | if is_valid_repo_group(repo_path, self.repos_path): |
|
658 | if is_valid_repo_group(repo_path, self.repos_path): | |
660 | raise Exception('This path %s is a valid group' % repo_path) |
|
659 | raise Exception('This path %s is a valid group' % repo_path) | |
661 |
|
660 | |||
662 | log.info('creating repo %s in %s from url: `%s`', |
|
661 | log.info('creating repo %s in %s from url: `%s`', | |
663 | repo_name, repo_path, |
|
662 | repo_name, repo_path, | |
664 | obfuscate_url_pw(clone_uri)) |
|
663 | obfuscate_url_pw(clone_uri)) | |
665 |
|
664 | |||
666 | backend = get_backend(repo_type) |
|
665 | backend = get_backend(repo_type) | |
667 |
|
666 | |||
668 | if repo_type == 'hg': |
|
667 | if repo_type == 'hg': | |
669 | baseui = make_ui() |
|
668 | baseui = make_ui() | |
670 | # patch and reset hooks section of UI config to not run any |
|
669 | # patch and reset hooks section of UI config to not run any | |
671 | # hooks on creating remote repo |
|
670 | # hooks on creating remote repo | |
672 | for k, v in baseui.configitems('hooks'): |
|
671 | for k, v in baseui.configitems('hooks'): | |
673 | baseui.setconfig('hooks', k, None) |
|
672 | baseui.setconfig('hooks', k, None) | |
674 |
|
673 | |||
675 | repo = backend(repo_path, create=True, src_url=clone_uri, baseui=baseui) |
|
674 | repo = backend(repo_path, create=True, src_url=clone_uri, baseui=baseui) | |
676 | elif repo_type == 'git': |
|
675 | elif repo_type == 'git': | |
677 | repo = backend(repo_path, create=True, src_url=clone_uri, bare=True) |
|
676 | repo = backend(repo_path, create=True, src_url=clone_uri, bare=True) | |
678 | # add kallithea hook into this repo |
|
677 | # add kallithea hook into this repo | |
679 | ScmModel().install_git_hooks(repo=repo) |
|
678 | ScmModel().install_git_hooks(repo=repo) | |
680 | else: |
|
679 | else: | |
681 | raise Exception('Not supported repo_type %s expected hg/git' % repo_type) |
|
680 | raise Exception('Not supported repo_type %s expected hg/git' % repo_type) | |
682 |
|
681 | |||
683 | log.debug('Created repo %s with %s backend', |
|
682 | log.debug('Created repo %s with %s backend', | |
684 | repo_name, repo_type) |
|
683 | repo_name, repo_type) | |
685 | return repo |
|
684 | return repo | |
686 |
|
685 | |||
687 | def _rename_filesystem_repo(self, old, new): |
|
686 | def _rename_filesystem_repo(self, old, new): | |
688 | """ |
|
687 | """ | |
689 | renames repository on filesystem |
|
688 | renames repository on filesystem | |
690 |
|
689 | |||
691 | :param old: old name |
|
690 | :param old: old name | |
692 | :param new: new name |
|
691 | :param new: new name | |
693 | """ |
|
692 | """ | |
694 | log.info('renaming repo from %s to %s', old, new) |
|
693 | log.info('renaming repo from %s to %s', old, new) | |
695 |
|
694 | |||
696 | old_path = os.path.join(self.repos_path, old) |
|
695 | old_path = os.path.join(self.repos_path, old) | |
697 | new_path = os.path.join(self.repos_path, new) |
|
696 | new_path = os.path.join(self.repos_path, new) | |
698 | if os.path.isdir(new_path): |
|
697 | if os.path.isdir(new_path): | |
699 | raise Exception( |
|
698 | raise Exception( | |
700 | 'Was trying to rename to already existing dir %s' % new_path |
|
699 | 'Was trying to rename to already existing dir %s' % new_path | |
701 | ) |
|
700 | ) | |
702 | shutil.move(old_path, new_path) |
|
701 | shutil.move(old_path, new_path) | |
703 |
|
702 | |||
704 | def _delete_filesystem_repo(self, repo): |
|
703 | def _delete_filesystem_repo(self, repo): | |
705 | """ |
|
704 | """ | |
706 | removes repo from filesystem, the removal is actually done by |
|
705 | removes repo from filesystem, the removal is actually done by | |
707 | renaming dir to a 'rm__*' prefix which Kallithea will skip. |
|
706 | renaming dir to a 'rm__*' prefix which Kallithea will skip. | |
708 | It can be undeleted later by reverting the rename. |
|
707 | It can be undeleted later by reverting the rename. | |
709 |
|
708 | |||
710 | :param repo: repo object |
|
709 | :param repo: repo object | |
711 | """ |
|
710 | """ | |
712 | rm_path = os.path.join(self.repos_path, repo.repo_name) |
|
711 | rm_path = os.path.join(self.repos_path, repo.repo_name) | |
713 | log.info("Removing %s", rm_path) |
|
712 | log.info("Removing %s", rm_path) | |
714 |
|
713 | |||
715 | _now = datetime.now() |
|
714 | _now = datetime.now() | |
716 | _ms = str(_now.microsecond).rjust(6, '0') |
|
715 | _ms = str(_now.microsecond).rjust(6, '0') | |
717 | _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms), |
|
716 | _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms), | |
718 | repo.just_name) |
|
717 | repo.just_name) | |
719 | if repo.group: |
|
718 | if repo.group: | |
720 | args = repo.group.full_path_splitted + [_d] |
|
719 | args = repo.group.full_path_splitted + [_d] | |
721 | _d = os.path.join(*args) |
|
720 | _d = os.path.join(*args) | |
722 | if os.path.exists(rm_path): |
|
721 | if os.path.exists(rm_path): | |
723 | shutil.move(rm_path, os.path.join(self.repos_path, _d)) |
|
722 | shutil.move(rm_path, os.path.join(self.repos_path, _d)) | |
724 | else: |
|
723 | else: | |
725 | log.error("Can't find repo to delete in %r", rm_path) |
|
724 | log.error("Can't find repo to delete in %r", rm_path) |
@@ -1,804 +1,800 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | Set of generic validators |
|
15 | Set of generic validators | |
16 | """ |
|
16 | """ | |
17 |
|
17 | |||
18 | import logging |
|
18 | import logging | |
19 | import os |
|
19 | import os | |
20 | import re |
|
20 | import re | |
21 | from collections import defaultdict |
|
21 | from collections import defaultdict | |
22 |
|
22 | |||
23 | import formencode |
|
23 | import formencode | |
24 | import ipaddr |
|
24 | import ipaddr | |
25 | import sqlalchemy |
|
25 | import sqlalchemy | |
26 | from formencode.validators import CIDR, Bool, Email, FancyValidator, Int, IPAddress, NotEmpty, Number, OneOf, Regex, Set, String, StringBoolean, UnicodeString |
|
26 | from formencode.validators import CIDR, Bool, Email, FancyValidator, Int, IPAddress, NotEmpty, Number, OneOf, Regex, Set, String, StringBoolean, UnicodeString | |
27 | from sqlalchemy import func |
|
27 | from sqlalchemy import func | |
28 | from tg.i18n import ugettext as _ |
|
28 | from tg.i18n import ugettext as _ | |
29 |
|
29 | |||
30 | from kallithea.config.routing import ADMIN_PREFIX |
|
30 | from kallithea.config.routing import ADMIN_PREFIX | |
31 | from kallithea.lib.auth import HasPermissionAny, HasRepoGroupPermissionLevel |
|
31 | from kallithea.lib.auth import HasPermissionAny, HasRepoGroupPermissionLevel | |
32 | from kallithea.lib.compat import OrderedSet |
|
32 | from kallithea.lib.compat import OrderedSet | |
33 | from kallithea.lib.exceptions import LdapImportError |
|
33 | from kallithea.lib.exceptions import LdapImportError | |
34 | from kallithea.lib.utils import is_valid_repo_uri |
|
34 | from kallithea.lib.utils import is_valid_repo_uri | |
35 | from kallithea.lib.utils2 import aslist, repo_name_slug, str2bool |
|
35 | from kallithea.lib.utils2 import aslist, repo_name_slug, str2bool | |
36 | from kallithea.model.db import RepoGroup, Repository, User, UserGroup |
|
36 | from kallithea.model.db import RepoGroup, Repository, User, UserGroup | |
37 |
|
37 | |||
38 |
|
38 | |||
39 | # silence warnings and pylint |
|
39 | # silence warnings and pylint | |
40 | UnicodeString, OneOf, Int, Number, Regex, Email, Bool, StringBoolean, Set, \ |
|
40 | UnicodeString, OneOf, Int, Number, Regex, Email, Bool, StringBoolean, Set, \ | |
41 | NotEmpty, IPAddress, CIDR, String, FancyValidator |
|
41 | NotEmpty, IPAddress, CIDR, String, FancyValidator | |
42 |
|
42 | |||
43 | log = logging.getLogger(__name__) |
|
43 | log = logging.getLogger(__name__) | |
44 |
|
44 | |||
45 |
|
45 | |||
46 | def UniqueListFromString(): |
|
46 | def UniqueListFromString(): | |
47 | class _UniqueListFromString(formencode.FancyValidator): |
|
47 | class _UniqueListFromString(formencode.FancyValidator): | |
48 | """ |
|
48 | """ | |
49 | Split value on ',' and make unique while preserving order |
|
49 | Split value on ',' and make unique while preserving order | |
50 | """ |
|
50 | """ | |
51 | messages = dict( |
|
51 | messages = dict( | |
52 | empty=_('Value cannot be an empty list'), |
|
52 | empty=_('Value cannot be an empty list'), | |
53 | missing_value=_('Value cannot be an empty list'), |
|
53 | missing_value=_('Value cannot be an empty list'), | |
54 | ) |
|
54 | ) | |
55 |
|
55 | |||
56 | def _convert_to_python(self, value, state): |
|
56 | def _convert_to_python(self, value, state): | |
57 | value = aslist(value, ',') |
|
57 | value = aslist(value, ',') | |
58 | seen = set() |
|
58 | seen = set() | |
59 | return [c for c in value if not (c in seen or seen.add(c))] |
|
59 | return [c for c in value if not (c in seen or seen.add(c))] | |
60 |
|
60 | |||
61 | def empty_value(self, value): |
|
61 | def empty_value(self, value): | |
62 | return [] |
|
62 | return [] | |
63 |
|
63 | |||
64 | return _UniqueListFromString |
|
64 | return _UniqueListFromString | |
65 |
|
65 | |||
66 |
|
66 | |||
67 | def ValidUsername(edit=False, old_data=None): |
|
67 | def ValidUsername(edit=False, old_data=None): | |
68 | old_data = old_data or {} |
|
68 | old_data = old_data or {} | |
69 |
|
69 | |||
70 | class _validator(formencode.validators.FancyValidator): |
|
70 | class _validator(formencode.validators.FancyValidator): | |
71 | messages = { |
|
71 | messages = { | |
72 | 'username_exists': _('Username "%(username)s" already exists'), |
|
72 | 'username_exists': _('Username "%(username)s" already exists'), | |
73 | 'system_invalid_username': |
|
73 | 'system_invalid_username': | |
74 | _('Username "%(username)s" cannot be used'), |
|
74 | _('Username "%(username)s" cannot be used'), | |
75 | 'invalid_username': |
|
75 | 'invalid_username': | |
76 | _('Username may only contain alphanumeric characters ' |
|
76 | _('Username may only contain alphanumeric characters ' | |
77 | 'underscores, periods or dashes and must begin with an ' |
|
77 | 'underscores, periods or dashes and must begin with an ' | |
78 | 'alphanumeric character or underscore') |
|
78 | 'alphanumeric character or underscore') | |
79 | } |
|
79 | } | |
80 |
|
80 | |||
81 | def _validate_python(self, value, state): |
|
81 | def _validate_python(self, value, state): | |
82 | if value in ['default', 'new_user']: |
|
82 | if value in ['default', 'new_user']: | |
83 | msg = self.message('system_invalid_username', state, username=value) |
|
83 | msg = self.message('system_invalid_username', state, username=value) | |
84 | raise formencode.Invalid(msg, value, state) |
|
84 | raise formencode.Invalid(msg, value, state) | |
85 | # check if user is unique |
|
85 | # check if user is unique | |
86 | old_un = None |
|
86 | old_un = None | |
87 | if edit: |
|
87 | if edit: | |
88 | old_un = User.get(old_data.get('user_id')).username |
|
88 | old_un = User.get(old_data.get('user_id')).username | |
89 |
|
89 | |||
90 | if old_un != value or not edit: |
|
90 | if old_un != value or not edit: | |
91 | if User.get_by_username(value, case_insensitive=True): |
|
91 | if User.get_by_username(value, case_insensitive=True): | |
92 | msg = self.message('username_exists', state, username=value) |
|
92 | msg = self.message('username_exists', state, username=value) | |
93 | raise formencode.Invalid(msg, value, state) |
|
93 | raise formencode.Invalid(msg, value, state) | |
94 |
|
94 | |||
95 | if re.match(r'^[a-zA-Z0-9\_]{1}[a-zA-Z0-9\-\_\.]*$', value) is None: |
|
95 | if re.match(r'^[a-zA-Z0-9\_]{1}[a-zA-Z0-9\-\_\.]*$', value) is None: | |
96 | msg = self.message('invalid_username', state) |
|
96 | msg = self.message('invalid_username', state) | |
97 | raise formencode.Invalid(msg, value, state) |
|
97 | raise formencode.Invalid(msg, value, state) | |
98 | return _validator |
|
98 | return _validator | |
99 |
|
99 | |||
100 |
|
100 | |||
101 | def ValidRegex(msg=None): |
|
101 | def ValidRegex(msg=None): | |
102 | class _validator(formencode.validators.Regex): |
|
102 | class _validator(formencode.validators.Regex): | |
103 | messages = dict(invalid=msg or _('The input is not valid')) |
|
103 | messages = dict(invalid=msg or _('The input is not valid')) | |
104 | return _validator |
|
104 | return _validator | |
105 |
|
105 | |||
106 |
|
106 | |||
107 | def ValidRepoUser(): |
|
107 | def ValidRepoUser(): | |
108 | class _validator(formencode.validators.FancyValidator): |
|
108 | class _validator(formencode.validators.FancyValidator): | |
109 | messages = { |
|
109 | messages = { | |
110 | 'invalid_username': _('Username %(username)s is not valid') |
|
110 | 'invalid_username': _('Username %(username)s is not valid') | |
111 | } |
|
111 | } | |
112 |
|
112 | |||
113 | def _validate_python(self, value, state): |
|
113 | def _validate_python(self, value, state): | |
114 | try: |
|
114 | try: | |
115 | User.query().filter(User.active == True) \ |
|
115 | User.query().filter(User.active == True) \ | |
116 | .filter(User.username == value).one() |
|
116 | .filter(User.username == value).one() | |
117 | except sqlalchemy.exc.InvalidRequestError: # NoResultFound/MultipleResultsFound |
|
117 | except sqlalchemy.exc.InvalidRequestError: # NoResultFound/MultipleResultsFound | |
118 | msg = self.message('invalid_username', state, username=value) |
|
118 | msg = self.message('invalid_username', state, username=value) | |
119 | raise formencode.Invalid(msg, value, state, |
|
119 | raise formencode.Invalid(msg, value, state, | |
120 | error_dict=dict(username=msg) |
|
120 | error_dict=dict(username=msg) | |
121 | ) |
|
121 | ) | |
122 |
|
122 | |||
123 | return _validator |
|
123 | return _validator | |
124 |
|
124 | |||
125 |
|
125 | |||
126 | def ValidUserGroup(edit=False, old_data=None): |
|
126 | def ValidUserGroup(edit=False, old_data=None): | |
127 | old_data = old_data or {} |
|
127 | old_data = old_data or {} | |
128 |
|
128 | |||
129 | class _validator(formencode.validators.FancyValidator): |
|
129 | class _validator(formencode.validators.FancyValidator): | |
130 | messages = { |
|
130 | messages = { | |
131 | 'invalid_group': _('Invalid user group name'), |
|
131 | 'invalid_group': _('Invalid user group name'), | |
132 | 'group_exist': _('User group "%(usergroup)s" already exists'), |
|
132 | 'group_exist': _('User group "%(usergroup)s" already exists'), | |
133 | 'invalid_usergroup_name': |
|
133 | 'invalid_usergroup_name': | |
134 | _('user group name may only contain alphanumeric ' |
|
134 | _('user group name may only contain alphanumeric ' | |
135 | 'characters underscores, periods or dashes and must begin ' |
|
135 | 'characters underscores, periods or dashes and must begin ' | |
136 | 'with alphanumeric character') |
|
136 | 'with alphanumeric character') | |
137 | } |
|
137 | } | |
138 |
|
138 | |||
139 | def _validate_python(self, value, state): |
|
139 | def _validate_python(self, value, state): | |
140 | if value in ['default']: |
|
140 | if value in ['default']: | |
141 | msg = self.message('invalid_group', state) |
|
141 | msg = self.message('invalid_group', state) | |
142 | raise formencode.Invalid(msg, value, state, |
|
142 | raise formencode.Invalid(msg, value, state, | |
143 | error_dict=dict(users_group_name=msg) |
|
143 | error_dict=dict(users_group_name=msg) | |
144 | ) |
|
144 | ) | |
145 | # check if group is unique |
|
145 | # check if group is unique | |
146 | old_ugname = None |
|
146 | old_ugname = None | |
147 | if edit: |
|
147 | if edit: | |
148 | old_id = old_data.get('users_group_id') |
|
148 | old_id = old_data.get('users_group_id') | |
149 | old_ugname = UserGroup.get(old_id).users_group_name |
|
149 | old_ugname = UserGroup.get(old_id).users_group_name | |
150 |
|
150 | |||
151 | if old_ugname != value or not edit: |
|
151 | if old_ugname != value or not edit: | |
152 | is_existing_group = UserGroup.get_by_group_name(value, |
|
152 | is_existing_group = UserGroup.get_by_group_name(value, | |
153 | case_insensitive=True) |
|
153 | case_insensitive=True) | |
154 | if is_existing_group: |
|
154 | if is_existing_group: | |
155 | msg = self.message('group_exist', state, usergroup=value) |
|
155 | msg = self.message('group_exist', state, usergroup=value) | |
156 | raise formencode.Invalid(msg, value, state, |
|
156 | raise formencode.Invalid(msg, value, state, | |
157 | error_dict=dict(users_group_name=msg) |
|
157 | error_dict=dict(users_group_name=msg) | |
158 | ) |
|
158 | ) | |
159 |
|
159 | |||
160 | if re.match(r'^[a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+$', value) is None: |
|
160 | if re.match(r'^[a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+$', value) is None: | |
161 | msg = self.message('invalid_usergroup_name', state) |
|
161 | msg = self.message('invalid_usergroup_name', state) | |
162 | raise formencode.Invalid(msg, value, state, |
|
162 | raise formencode.Invalid(msg, value, state, | |
163 | error_dict=dict(users_group_name=msg) |
|
163 | error_dict=dict(users_group_name=msg) | |
164 | ) |
|
164 | ) | |
165 |
|
165 | |||
166 | return _validator |
|
166 | return _validator | |
167 |
|
167 | |||
168 |
|
168 | |||
169 | def ValidRepoGroup(edit=False, old_data=None): |
|
169 | def ValidRepoGroup(edit=False, old_data=None): | |
170 | old_data = old_data or {} |
|
170 | old_data = old_data or {} | |
171 |
|
171 | |||
172 | class _validator(formencode.validators.FancyValidator): |
|
172 | class _validator(formencode.validators.FancyValidator): | |
173 | messages = { |
|
173 | messages = { | |
174 | 'parent_group_id': _('Cannot assign this group as parent'), |
|
174 | 'parent_group_id': _('Cannot assign this group as parent'), | |
175 | 'group_exists': _('Group "%(group_name)s" already exists'), |
|
175 | 'group_exists': _('Group "%(group_name)s" already exists'), | |
176 | 'repo_exists': |
|
176 | 'repo_exists': | |
177 | _('Repository with name "%(group_name)s" already exists') |
|
177 | _('Repository with name "%(group_name)s" already exists') | |
178 | } |
|
178 | } | |
179 |
|
179 | |||
180 | def _validate_python(self, value, state): |
|
180 | def _validate_python(self, value, state): | |
181 | # TODO WRITE VALIDATIONS |
|
181 | # TODO WRITE VALIDATIONS | |
182 | group_name = value.get('group_name') |
|
182 | group_name = value.get('group_name') | |
183 | parent_group_id = value.get('parent_group_id') |
|
183 | parent_group_id = value.get('parent_group_id') | |
184 |
|
184 | |||
185 | # slugify repo group just in case :) |
|
185 | # slugify repo group just in case :) | |
186 | slug = repo_name_slug(group_name) |
|
186 | slug = repo_name_slug(group_name) | |
187 |
|
187 | |||
188 | # check for parent of self |
|
188 | # check for parent of self | |
189 | parent_of_self = lambda: ( |
|
189 | if edit and parent_group_id and old_data['group_id'] == parent_group_id: | |
190 | old_data['group_id'] == parent_group_id |
|
|||
191 | if parent_group_id else False |
|
|||
192 | ) |
|
|||
193 | if edit and parent_of_self(): |
|
|||
194 | msg = self.message('parent_group_id', state) |
|
190 | msg = self.message('parent_group_id', state) | |
195 | raise formencode.Invalid(msg, value, state, |
|
191 | raise formencode.Invalid(msg, value, state, | |
196 | error_dict=dict(parent_group_id=msg) |
|
192 | error_dict=dict(parent_group_id=msg) | |
197 | ) |
|
193 | ) | |
198 |
|
194 | |||
199 | old_gname = None |
|
195 | old_gname = None | |
200 | if edit: |
|
196 | if edit: | |
201 | old_gname = RepoGroup.get(old_data.get('group_id')).group_name |
|
197 | old_gname = RepoGroup.get(old_data.get('group_id')).group_name | |
202 |
|
198 | |||
203 | if old_gname != group_name or not edit: |
|
199 | if old_gname != group_name or not edit: | |
204 |
|
200 | |||
205 | # check group |
|
201 | # check group | |
206 | gr = RepoGroup.query() \ |
|
202 | gr = RepoGroup.query() \ | |
207 | .filter(func.lower(RepoGroup.group_name) == func.lower(slug)) \ |
|
203 | .filter(func.lower(RepoGroup.group_name) == func.lower(slug)) \ | |
208 | .filter(RepoGroup.parent_group_id == parent_group_id) \ |
|
204 | .filter(RepoGroup.parent_group_id == parent_group_id) \ | |
209 | .scalar() |
|
205 | .scalar() | |
210 | if gr is not None: |
|
206 | if gr is not None: | |
211 | msg = self.message('group_exists', state, group_name=slug) |
|
207 | msg = self.message('group_exists', state, group_name=slug) | |
212 | raise formencode.Invalid(msg, value, state, |
|
208 | raise formencode.Invalid(msg, value, state, | |
213 | error_dict=dict(group_name=msg) |
|
209 | error_dict=dict(group_name=msg) | |
214 | ) |
|
210 | ) | |
215 |
|
211 | |||
216 | # check for same repo |
|
212 | # check for same repo | |
217 | repo = Repository.query() \ |
|
213 | repo = Repository.query() \ | |
218 | .filter(func.lower(Repository.repo_name) == func.lower(slug)) \ |
|
214 | .filter(func.lower(Repository.repo_name) == func.lower(slug)) \ | |
219 | .scalar() |
|
215 | .scalar() | |
220 | if repo is not None: |
|
216 | if repo is not None: | |
221 | msg = self.message('repo_exists', state, group_name=slug) |
|
217 | msg = self.message('repo_exists', state, group_name=slug) | |
222 | raise formencode.Invalid(msg, value, state, |
|
218 | raise formencode.Invalid(msg, value, state, | |
223 | error_dict=dict(group_name=msg) |
|
219 | error_dict=dict(group_name=msg) | |
224 | ) |
|
220 | ) | |
225 |
|
221 | |||
226 | return _validator |
|
222 | return _validator | |
227 |
|
223 | |||
228 |
|
224 | |||
229 | def ValidPassword(): |
|
225 | def ValidPassword(): | |
230 | class _validator(formencode.validators.FancyValidator): |
|
226 | class _validator(formencode.validators.FancyValidator): | |
231 | messages = { |
|
227 | messages = { | |
232 | 'invalid_password': |
|
228 | 'invalid_password': | |
233 | _('Invalid characters (non-ascii) in password') |
|
229 | _('Invalid characters (non-ascii) in password') | |
234 | } |
|
230 | } | |
235 |
|
231 | |||
236 | def _validate_python(self, value, state): |
|
232 | def _validate_python(self, value, state): | |
237 | try: |
|
233 | try: | |
238 | (value or '').encode('ascii') |
|
234 | (value or '').encode('ascii') | |
239 | except UnicodeError: |
|
235 | except UnicodeError: | |
240 | msg = self.message('invalid_password', state) |
|
236 | msg = self.message('invalid_password', state) | |
241 | raise formencode.Invalid(msg, value, state,) |
|
237 | raise formencode.Invalid(msg, value, state,) | |
242 | return _validator |
|
238 | return _validator | |
243 |
|
239 | |||
244 |
|
240 | |||
245 | def ValidOldPassword(username): |
|
241 | def ValidOldPassword(username): | |
246 | class _validator(formencode.validators.FancyValidator): |
|
242 | class _validator(formencode.validators.FancyValidator): | |
247 | messages = { |
|
243 | messages = { | |
248 | 'invalid_password': _('Invalid old password') |
|
244 | 'invalid_password': _('Invalid old password') | |
249 | } |
|
245 | } | |
250 |
|
246 | |||
251 | def _validate_python(self, value, state): |
|
247 | def _validate_python(self, value, state): | |
252 | from kallithea.lib import auth_modules |
|
248 | from kallithea.lib import auth_modules | |
253 | if auth_modules.authenticate(username, value, '') is None: |
|
249 | if auth_modules.authenticate(username, value, '') is None: | |
254 | msg = self.message('invalid_password', state) |
|
250 | msg = self.message('invalid_password', state) | |
255 | raise formencode.Invalid(msg, value, state, |
|
251 | raise formencode.Invalid(msg, value, state, | |
256 | error_dict=dict(current_password=msg) |
|
252 | error_dict=dict(current_password=msg) | |
257 | ) |
|
253 | ) | |
258 | return _validator |
|
254 | return _validator | |
259 |
|
255 | |||
260 |
|
256 | |||
261 | def ValidPasswordsMatch(password_field, password_confirmation_field): |
|
257 | def ValidPasswordsMatch(password_field, password_confirmation_field): | |
262 | class _validator(formencode.validators.FancyValidator): |
|
258 | class _validator(formencode.validators.FancyValidator): | |
263 | messages = { |
|
259 | messages = { | |
264 | 'password_mismatch': _('Passwords do not match'), |
|
260 | 'password_mismatch': _('Passwords do not match'), | |
265 | } |
|
261 | } | |
266 |
|
262 | |||
267 | def _validate_python(self, value, state): |
|
263 | def _validate_python(self, value, state): | |
268 | if value.get(password_field) != value[password_confirmation_field]: |
|
264 | if value.get(password_field) != value[password_confirmation_field]: | |
269 | msg = self.message('password_mismatch', state) |
|
265 | msg = self.message('password_mismatch', state) | |
270 | raise formencode.Invalid(msg, value, state, |
|
266 | raise formencode.Invalid(msg, value, state, | |
271 | error_dict={password_field: msg, password_confirmation_field: msg} |
|
267 | error_dict={password_field: msg, password_confirmation_field: msg} | |
272 | ) |
|
268 | ) | |
273 | return _validator |
|
269 | return _validator | |
274 |
|
270 | |||
275 |
|
271 | |||
276 | def ValidAuth(): |
|
272 | def ValidAuth(): | |
277 | class _validator(formencode.validators.FancyValidator): |
|
273 | class _validator(formencode.validators.FancyValidator): | |
278 | messages = { |
|
274 | messages = { | |
279 | 'invalid_auth': _('Invalid username or password'), |
|
275 | 'invalid_auth': _('Invalid username or password'), | |
280 | } |
|
276 | } | |
281 |
|
277 | |||
282 | def _validate_python(self, value, state): |
|
278 | def _validate_python(self, value, state): | |
283 | from kallithea.lib import auth_modules |
|
279 | from kallithea.lib import auth_modules | |
284 |
|
280 | |||
285 | password = value['password'] |
|
281 | password = value['password'] | |
286 | username = value['username'] |
|
282 | username = value['username'] | |
287 |
|
283 | |||
288 | # authenticate returns unused dict but has called |
|
284 | # authenticate returns unused dict but has called | |
289 | # plugin._authenticate which has create_or_update'ed the username user in db |
|
285 | # plugin._authenticate which has create_or_update'ed the username user in db | |
290 | if auth_modules.authenticate(username, password) is None: |
|
286 | if auth_modules.authenticate(username, password) is None: | |
291 | user = User.get_by_username_or_email(username) |
|
287 | user = User.get_by_username_or_email(username) | |
292 | if user and not user.active: |
|
288 | if user and not user.active: | |
293 | log.warning('user %s is disabled', username) |
|
289 | log.warning('user %s is disabled', username) | |
294 | msg = self.message('invalid_auth', state) |
|
290 | msg = self.message('invalid_auth', state) | |
295 | raise formencode.Invalid(msg, value, state, |
|
291 | raise formencode.Invalid(msg, value, state, | |
296 | error_dict=dict(username=' ', password=msg) |
|
292 | error_dict=dict(username=' ', password=msg) | |
297 | ) |
|
293 | ) | |
298 | else: |
|
294 | else: | |
299 | log.warning('user %s failed to authenticate', username) |
|
295 | log.warning('user %s failed to authenticate', username) | |
300 | msg = self.message('invalid_auth', state) |
|
296 | msg = self.message('invalid_auth', state) | |
301 | raise formencode.Invalid(msg, value, state, |
|
297 | raise formencode.Invalid(msg, value, state, | |
302 | error_dict=dict(username=' ', password=msg) |
|
298 | error_dict=dict(username=' ', password=msg) | |
303 | ) |
|
299 | ) | |
304 | return _validator |
|
300 | return _validator | |
305 |
|
301 | |||
306 |
|
302 | |||
307 | def ValidRepoName(edit=False, old_data=None): |
|
303 | def ValidRepoName(edit=False, old_data=None): | |
308 | old_data = old_data or {} |
|
304 | old_data = old_data or {} | |
309 |
|
305 | |||
310 | class _validator(formencode.validators.FancyValidator): |
|
306 | class _validator(formencode.validators.FancyValidator): | |
311 | messages = { |
|
307 | messages = { | |
312 | 'invalid_repo_name': |
|
308 | 'invalid_repo_name': | |
313 | _('Repository name %(repo)s is not allowed'), |
|
309 | _('Repository name %(repo)s is not allowed'), | |
314 | 'repository_exists': |
|
310 | 'repository_exists': | |
315 | _('Repository named %(repo)s already exists'), |
|
311 | _('Repository named %(repo)s already exists'), | |
316 | 'repository_in_group_exists': _('Repository "%(repo)s" already ' |
|
312 | 'repository_in_group_exists': _('Repository "%(repo)s" already ' | |
317 | 'exists in group "%(group)s"'), |
|
313 | 'exists in group "%(group)s"'), | |
318 | 'same_group_exists': _('Repository group with name "%(repo)s" ' |
|
314 | 'same_group_exists': _('Repository group with name "%(repo)s" ' | |
319 | 'already exists') |
|
315 | 'already exists') | |
320 | } |
|
316 | } | |
321 |
|
317 | |||
322 | def _convert_to_python(self, value, state): |
|
318 | def _convert_to_python(self, value, state): | |
323 | repo_name = repo_name_slug(value.get('repo_name', '')) |
|
319 | repo_name = repo_name_slug(value.get('repo_name', '')) | |
324 | repo_group = value.get('repo_group') |
|
320 | repo_group = value.get('repo_group') | |
325 | if repo_group: |
|
321 | if repo_group: | |
326 | gr = RepoGroup.get(repo_group) |
|
322 | gr = RepoGroup.get(repo_group) | |
327 | group_path = gr.full_path |
|
323 | group_path = gr.full_path | |
328 | group_name = gr.group_name |
|
324 | group_name = gr.group_name | |
329 | # value needs to be aware of group name in order to check |
|
325 | # value needs to be aware of group name in order to check | |
330 | # db key This is an actual just the name to store in the |
|
326 | # db key This is an actual just the name to store in the | |
331 | # database |
|
327 | # database | |
332 | repo_name_full = group_path + RepoGroup.url_sep() + repo_name |
|
328 | repo_name_full = group_path + RepoGroup.url_sep() + repo_name | |
333 | else: |
|
329 | else: | |
334 | group_name = group_path = '' |
|
330 | group_name = group_path = '' | |
335 | repo_name_full = repo_name |
|
331 | repo_name_full = repo_name | |
336 |
|
332 | |||
337 | value['repo_name'] = repo_name |
|
333 | value['repo_name'] = repo_name | |
338 | value['repo_name_full'] = repo_name_full |
|
334 | value['repo_name_full'] = repo_name_full | |
339 | value['group_path'] = group_path |
|
335 | value['group_path'] = group_path | |
340 | value['group_name'] = group_name |
|
336 | value['group_name'] = group_name | |
341 | return value |
|
337 | return value | |
342 |
|
338 | |||
343 | def _validate_python(self, value, state): |
|
339 | def _validate_python(self, value, state): | |
344 | repo_name = value.get('repo_name') |
|
340 | repo_name = value.get('repo_name') | |
345 | repo_name_full = value.get('repo_name_full') |
|
341 | repo_name_full = value.get('repo_name_full') | |
346 | group_path = value.get('group_path') |
|
342 | group_path = value.get('group_path') | |
347 | group_name = value.get('group_name') |
|
343 | group_name = value.get('group_name') | |
348 |
|
344 | |||
349 | if repo_name in [ADMIN_PREFIX, '']: |
|
345 | if repo_name in [ADMIN_PREFIX, '']: | |
350 | msg = self.message('invalid_repo_name', state, repo=repo_name) |
|
346 | msg = self.message('invalid_repo_name', state, repo=repo_name) | |
351 | raise formencode.Invalid(msg, value, state, |
|
347 | raise formencode.Invalid(msg, value, state, | |
352 | error_dict=dict(repo_name=msg) |
|
348 | error_dict=dict(repo_name=msg) | |
353 | ) |
|
349 | ) | |
354 |
|
350 | |||
355 | rename = old_data.get('repo_name') != repo_name_full |
|
351 | rename = old_data.get('repo_name') != repo_name_full | |
356 | create = not edit |
|
352 | create = not edit | |
357 | if rename or create: |
|
353 | if rename or create: | |
358 | repo = Repository.get_by_repo_name(repo_name_full, case_insensitive=True) |
|
354 | repo = Repository.get_by_repo_name(repo_name_full, case_insensitive=True) | |
359 | repo_group = RepoGroup.get_by_group_name(repo_name_full, case_insensitive=True) |
|
355 | repo_group = RepoGroup.get_by_group_name(repo_name_full, case_insensitive=True) | |
360 | if group_path != '': |
|
356 | if group_path != '': | |
361 | if repo is not None: |
|
357 | if repo is not None: | |
362 | msg = self.message('repository_in_group_exists', state, |
|
358 | msg = self.message('repository_in_group_exists', state, | |
363 | repo=repo.repo_name, group=group_name) |
|
359 | repo=repo.repo_name, group=group_name) | |
364 | raise formencode.Invalid(msg, value, state, |
|
360 | raise formencode.Invalid(msg, value, state, | |
365 | error_dict=dict(repo_name=msg) |
|
361 | error_dict=dict(repo_name=msg) | |
366 | ) |
|
362 | ) | |
367 | elif repo_group is not None: |
|
363 | elif repo_group is not None: | |
368 | msg = self.message('same_group_exists', state, |
|
364 | msg = self.message('same_group_exists', state, | |
369 | repo=repo_name) |
|
365 | repo=repo_name) | |
370 | raise formencode.Invalid(msg, value, state, |
|
366 | raise formencode.Invalid(msg, value, state, | |
371 | error_dict=dict(repo_name=msg) |
|
367 | error_dict=dict(repo_name=msg) | |
372 | ) |
|
368 | ) | |
373 | elif repo is not None: |
|
369 | elif repo is not None: | |
374 | msg = self.message('repository_exists', state, |
|
370 | msg = self.message('repository_exists', state, | |
375 | repo=repo.repo_name) |
|
371 | repo=repo.repo_name) | |
376 | raise formencode.Invalid(msg, value, state, |
|
372 | raise formencode.Invalid(msg, value, state, | |
377 | error_dict=dict(repo_name=msg) |
|
373 | error_dict=dict(repo_name=msg) | |
378 | ) |
|
374 | ) | |
379 | return value |
|
375 | return value | |
380 | return _validator |
|
376 | return _validator | |
381 |
|
377 | |||
382 |
|
378 | |||
383 | def ValidForkName(*args, **kwargs): |
|
379 | def ValidForkName(*args, **kwargs): | |
384 | return ValidRepoName(*args, **kwargs) |
|
380 | return ValidRepoName(*args, **kwargs) | |
385 |
|
381 | |||
386 |
|
382 | |||
387 | def SlugifyName(): |
|
383 | def SlugifyName(): | |
388 | class _validator(formencode.validators.FancyValidator): |
|
384 | class _validator(formencode.validators.FancyValidator): | |
389 |
|
385 | |||
390 | def _convert_to_python(self, value, state): |
|
386 | def _convert_to_python(self, value, state): | |
391 | return repo_name_slug(value) |
|
387 | return repo_name_slug(value) | |
392 |
|
388 | |||
393 | def _validate_python(self, value, state): |
|
389 | def _validate_python(self, value, state): | |
394 | pass |
|
390 | pass | |
395 |
|
391 | |||
396 | return _validator |
|
392 | return _validator | |
397 |
|
393 | |||
398 |
|
394 | |||
399 | def ValidCloneUri(): |
|
395 | def ValidCloneUri(): | |
400 | from kallithea.lib.utils import make_ui |
|
396 | from kallithea.lib.utils import make_ui | |
401 |
|
397 | |||
402 | class _validator(formencode.validators.FancyValidator): |
|
398 | class _validator(formencode.validators.FancyValidator): | |
403 | messages = { |
|
399 | messages = { | |
404 | 'clone_uri': _('Invalid repository URL'), |
|
400 | 'clone_uri': _('Invalid repository URL'), | |
405 | 'invalid_clone_uri': _('Invalid repository URL. It must be a ' |
|
401 | 'invalid_clone_uri': _('Invalid repository URL. It must be a ' | |
406 | 'valid http, https, ssh, svn+http or svn+https URL'), |
|
402 | 'valid http, https, ssh, svn+http or svn+https URL'), | |
407 | } |
|
403 | } | |
408 |
|
404 | |||
409 | def _validate_python(self, value, state): |
|
405 | def _validate_python(self, value, state): | |
410 | repo_type = value.get('repo_type') |
|
406 | repo_type = value.get('repo_type') | |
411 | url = value.get('clone_uri') |
|
407 | url = value.get('clone_uri') | |
412 |
|
408 | |||
413 | if url and url != value.get('clone_uri_hidden'): |
|
409 | if url and url != value.get('clone_uri_hidden'): | |
414 | try: |
|
410 | try: | |
415 | is_valid_repo_uri(repo_type, url, make_ui()) |
|
411 | is_valid_repo_uri(repo_type, url, make_ui()) | |
416 | except Exception: |
|
412 | except Exception: | |
417 | log.exception('URL validation failed') |
|
413 | log.exception('URL validation failed') | |
418 | msg = self.message('clone_uri', state) |
|
414 | msg = self.message('clone_uri', state) | |
419 | raise formencode.Invalid(msg, value, state, |
|
415 | raise formencode.Invalid(msg, value, state, | |
420 | error_dict=dict(clone_uri=msg) |
|
416 | error_dict=dict(clone_uri=msg) | |
421 | ) |
|
417 | ) | |
422 | return _validator |
|
418 | return _validator | |
423 |
|
419 | |||
424 |
|
420 | |||
425 | def ValidForkType(old_data=None): |
|
421 | def ValidForkType(old_data=None): | |
426 | old_data = old_data or {} |
|
422 | old_data = old_data or {} | |
427 |
|
423 | |||
428 | class _validator(formencode.validators.FancyValidator): |
|
424 | class _validator(formencode.validators.FancyValidator): | |
429 | messages = { |
|
425 | messages = { | |
430 | 'invalid_fork_type': _('Fork has to be the same type as parent') |
|
426 | 'invalid_fork_type': _('Fork has to be the same type as parent') | |
431 | } |
|
427 | } | |
432 |
|
428 | |||
433 | def _validate_python(self, value, state): |
|
429 | def _validate_python(self, value, state): | |
434 | if old_data['repo_type'] != value: |
|
430 | if old_data['repo_type'] != value: | |
435 | msg = self.message('invalid_fork_type', state) |
|
431 | msg = self.message('invalid_fork_type', state) | |
436 | raise formencode.Invalid(msg, value, state, |
|
432 | raise formencode.Invalid(msg, value, state, | |
437 | error_dict=dict(repo_type=msg) |
|
433 | error_dict=dict(repo_type=msg) | |
438 | ) |
|
434 | ) | |
439 | return _validator |
|
435 | return _validator | |
440 |
|
436 | |||
441 |
|
437 | |||
442 | def CanWriteGroup(old_data=None): |
|
438 | def CanWriteGroup(old_data=None): | |
443 | class _validator(formencode.validators.FancyValidator): |
|
439 | class _validator(formencode.validators.FancyValidator): | |
444 | messages = { |
|
440 | messages = { | |
445 | 'permission_denied': _("You don't have permissions " |
|
441 | 'permission_denied': _("You don't have permissions " | |
446 | "to create repository in this group"), |
|
442 | "to create repository in this group"), | |
447 | 'permission_denied_root': _("no permission to create repository " |
|
443 | 'permission_denied_root': _("no permission to create repository " | |
448 | "in root location") |
|
444 | "in root location") | |
449 | } |
|
445 | } | |
450 |
|
446 | |||
451 | def _convert_to_python(self, value, state): |
|
447 | def _convert_to_python(self, value, state): | |
452 | # root location |
|
448 | # root location | |
453 | if value == -1: |
|
449 | if value == -1: | |
454 | return None |
|
450 | return None | |
455 | return value |
|
451 | return value | |
456 |
|
452 | |||
457 | def _validate_python(self, value, state): |
|
453 | def _validate_python(self, value, state): | |
458 | gr = RepoGroup.get(value) |
|
454 | gr = RepoGroup.get(value) | |
459 | gr_name = gr.group_name if gr is not None else None # None means ROOT location |
|
455 | gr_name = gr.group_name if gr is not None else None # None means ROOT location | |
460 |
|
456 | |||
461 | # create repositories with write permission on group is set to true |
|
457 | # create repositories with write permission on group is set to true | |
462 | create_on_write = HasPermissionAny('hg.create.write_on_repogroup.true')() |
|
458 | create_on_write = HasPermissionAny('hg.create.write_on_repogroup.true')() | |
463 | group_admin = HasRepoGroupPermissionLevel('admin')(gr_name, |
|
459 | group_admin = HasRepoGroupPermissionLevel('admin')(gr_name, | |
464 | 'can write into group validator') |
|
460 | 'can write into group validator') | |
465 | group_write = HasRepoGroupPermissionLevel('write')(gr_name, |
|
461 | group_write = HasRepoGroupPermissionLevel('write')(gr_name, | |
466 | 'can write into group validator') |
|
462 | 'can write into group validator') | |
467 | forbidden = not (group_admin or (group_write and create_on_write)) |
|
463 | forbidden = not (group_admin or (group_write and create_on_write)) | |
468 | can_create_repos = HasPermissionAny('hg.admin', 'hg.create.repository') |
|
464 | can_create_repos = HasPermissionAny('hg.admin', 'hg.create.repository') | |
469 | gid = (old_data['repo_group'].get('group_id') |
|
465 | gid = (old_data['repo_group'].get('group_id') | |
470 | if (old_data and 'repo_group' in old_data) else None) |
|
466 | if (old_data and 'repo_group' in old_data) else None) | |
471 | value_changed = gid != value |
|
467 | value_changed = gid != value | |
472 | new = not old_data |
|
468 | new = not old_data | |
473 | # do check if we changed the value, there's a case that someone got |
|
469 | # do check if we changed the value, there's a case that someone got | |
474 | # revoked write permissions to a repository, he still created, we |
|
470 | # revoked write permissions to a repository, he still created, we | |
475 | # don't need to check permission if he didn't change the value of |
|
471 | # don't need to check permission if he didn't change the value of | |
476 | # groups in form box |
|
472 | # groups in form box | |
477 | if value_changed or new: |
|
473 | if value_changed or new: | |
478 | # parent group need to be existing |
|
474 | # parent group need to be existing | |
479 | if gr and forbidden: |
|
475 | if gr and forbidden: | |
480 | msg = self.message('permission_denied', state) |
|
476 | msg = self.message('permission_denied', state) | |
481 | raise formencode.Invalid(msg, value, state, |
|
477 | raise formencode.Invalid(msg, value, state, | |
482 | error_dict=dict(repo_type=msg) |
|
478 | error_dict=dict(repo_type=msg) | |
483 | ) |
|
479 | ) | |
484 | ## check if we can write to root location ! |
|
480 | ## check if we can write to root location ! | |
485 | elif gr is None and not can_create_repos(): |
|
481 | elif gr is None and not can_create_repos(): | |
486 | msg = self.message('permission_denied_root', state) |
|
482 | msg = self.message('permission_denied_root', state) | |
487 | raise formencode.Invalid(msg, value, state, |
|
483 | raise formencode.Invalid(msg, value, state, | |
488 | error_dict=dict(repo_type=msg) |
|
484 | error_dict=dict(repo_type=msg) | |
489 | ) |
|
485 | ) | |
490 |
|
486 | |||
491 | return _validator |
|
487 | return _validator | |
492 |
|
488 | |||
493 |
|
489 | |||
494 | def CanCreateGroup(can_create_in_root=False): |
|
490 | def CanCreateGroup(can_create_in_root=False): | |
495 | class _validator(formencode.validators.FancyValidator): |
|
491 | class _validator(formencode.validators.FancyValidator): | |
496 | messages = { |
|
492 | messages = { | |
497 | 'permission_denied': _("You don't have permissions " |
|
493 | 'permission_denied': _("You don't have permissions " | |
498 | "to create a group in this location") |
|
494 | "to create a group in this location") | |
499 | } |
|
495 | } | |
500 |
|
496 | |||
501 | def to_python(self, value, state): |
|
497 | def to_python(self, value, state): | |
502 | # root location |
|
498 | # root location | |
503 | if value == -1: |
|
499 | if value == -1: | |
504 | return None |
|
500 | return None | |
505 | return value |
|
501 | return value | |
506 |
|
502 | |||
507 | def _validate_python(self, value, state): |
|
503 | def _validate_python(self, value, state): | |
508 | gr = RepoGroup.get(value) |
|
504 | gr = RepoGroup.get(value) | |
509 | gr_name = gr.group_name if gr is not None else None # None means ROOT location |
|
505 | gr_name = gr.group_name if gr is not None else None # None means ROOT location | |
510 |
|
506 | |||
511 | if can_create_in_root and gr is None: |
|
507 | if can_create_in_root and gr is None: | |
512 | # we can create in root, we're fine no validations required |
|
508 | # we can create in root, we're fine no validations required | |
513 | return |
|
509 | return | |
514 |
|
510 | |||
515 | forbidden_in_root = gr is None and not can_create_in_root |
|
511 | forbidden_in_root = gr is None and not can_create_in_root | |
516 | forbidden = not HasRepoGroupPermissionLevel('admin')(gr_name, 'can create group validator') |
|
512 | forbidden = not HasRepoGroupPermissionLevel('admin')(gr_name, 'can create group validator') | |
517 | if forbidden_in_root or forbidden: |
|
513 | if forbidden_in_root or forbidden: | |
518 | msg = self.message('permission_denied', state) |
|
514 | msg = self.message('permission_denied', state) | |
519 | raise formencode.Invalid(msg, value, state, |
|
515 | raise formencode.Invalid(msg, value, state, | |
520 | error_dict=dict(parent_group_id=msg) |
|
516 | error_dict=dict(parent_group_id=msg) | |
521 | ) |
|
517 | ) | |
522 |
|
518 | |||
523 | return _validator |
|
519 | return _validator | |
524 |
|
520 | |||
525 |
|
521 | |||
526 | def ValidPerms(type_='repo'): |
|
522 | def ValidPerms(type_='repo'): | |
527 | if type_ == 'repo_group': |
|
523 | if type_ == 'repo_group': | |
528 | EMPTY_PERM = 'group.none' |
|
524 | EMPTY_PERM = 'group.none' | |
529 | elif type_ == 'repo': |
|
525 | elif type_ == 'repo': | |
530 | EMPTY_PERM = 'repository.none' |
|
526 | EMPTY_PERM = 'repository.none' | |
531 | elif type_ == 'user_group': |
|
527 | elif type_ == 'user_group': | |
532 | EMPTY_PERM = 'usergroup.none' |
|
528 | EMPTY_PERM = 'usergroup.none' | |
533 |
|
529 | |||
534 | class _validator(formencode.validators.FancyValidator): |
|
530 | class _validator(formencode.validators.FancyValidator): | |
535 | messages = { |
|
531 | messages = { | |
536 | 'perm_new_member_name': |
|
532 | 'perm_new_member_name': | |
537 | _('This username or user group name is not valid') |
|
533 | _('This username or user group name is not valid') | |
538 | } |
|
534 | } | |
539 |
|
535 | |||
540 | def to_python(self, value, state): |
|
536 | def to_python(self, value, state): | |
541 | perms_update = OrderedSet() |
|
537 | perms_update = OrderedSet() | |
542 | perms_new = OrderedSet() |
|
538 | perms_new = OrderedSet() | |
543 | # build a list of permission to update and new permission to create |
|
539 | # build a list of permission to update and new permission to create | |
544 |
|
540 | |||
545 | # CLEAN OUT ORG VALUE FROM NEW MEMBERS, and group them using |
|
541 | # CLEAN OUT ORG VALUE FROM NEW MEMBERS, and group them using | |
546 | new_perms_group = defaultdict(dict) |
|
542 | new_perms_group = defaultdict(dict) | |
547 | for k, v in value.copy().items(): |
|
543 | for k, v in value.copy().items(): | |
548 | if k.startswith('perm_new_member'): |
|
544 | if k.startswith('perm_new_member'): | |
549 | del value[k] |
|
545 | del value[k] | |
550 | _type, part = k.split('perm_new_member_') |
|
546 | _type, part = k.split('perm_new_member_') | |
551 | args = part.split('_') |
|
547 | args = part.split('_') | |
552 | if len(args) == 1: |
|
548 | if len(args) == 1: | |
553 | new_perms_group[args[0]]['perm'] = v |
|
549 | new_perms_group[args[0]]['perm'] = v | |
554 | elif len(args) == 2: |
|
550 | elif len(args) == 2: | |
555 | _key, pos = args |
|
551 | _key, pos = args | |
556 | new_perms_group[pos][_key] = v |
|
552 | new_perms_group[pos][_key] = v | |
557 |
|
553 | |||
558 | # fill new permissions in order of how they were added |
|
554 | # fill new permissions in order of how they were added | |
559 | for k in sorted(new_perms_group, key=lambda k: int(k)): |
|
555 | for k in sorted(new_perms_group, key=lambda k: int(k)): | |
560 | perm_dict = new_perms_group[k] |
|
556 | perm_dict = new_perms_group[k] | |
561 | new_member = perm_dict.get('name') |
|
557 | new_member = perm_dict.get('name') | |
562 | new_perm = perm_dict.get('perm') |
|
558 | new_perm = perm_dict.get('perm') | |
563 | new_type = perm_dict.get('type') |
|
559 | new_type = perm_dict.get('type') | |
564 | if new_member and new_perm and new_type: |
|
560 | if new_member and new_perm and new_type: | |
565 | perms_new.add((new_member, new_perm, new_type)) |
|
561 | perms_new.add((new_member, new_perm, new_type)) | |
566 |
|
562 | |||
567 | for k, v in value.items(): |
|
563 | for k, v in value.items(): | |
568 | if k.startswith('u_perm_') or k.startswith('g_perm_'): |
|
564 | if k.startswith('u_perm_') or k.startswith('g_perm_'): | |
569 | member = k[7:] |
|
565 | member = k[7:] | |
570 | t = {'u': 'user', |
|
566 | t = {'u': 'user', | |
571 | 'g': 'users_group' |
|
567 | 'g': 'users_group' | |
572 | }[k[0]] |
|
568 | }[k[0]] | |
573 | if member == User.DEFAULT_USER: |
|
569 | if member == User.DEFAULT_USER: | |
574 | if str2bool(value.get('repo_private')): |
|
570 | if str2bool(value.get('repo_private')): | |
575 | # set none for default when updating to |
|
571 | # set none for default when updating to | |
576 | # private repo protects against form manipulation |
|
572 | # private repo protects against form manipulation | |
577 | v = EMPTY_PERM |
|
573 | v = EMPTY_PERM | |
578 | perms_update.add((member, v, t)) |
|
574 | perms_update.add((member, v, t)) | |
579 |
|
575 | |||
580 | value['perms_updates'] = list(perms_update) |
|
576 | value['perms_updates'] = list(perms_update) | |
581 | value['perms_new'] = list(perms_new) |
|
577 | value['perms_new'] = list(perms_new) | |
582 |
|
578 | |||
583 | # update permissions |
|
579 | # update permissions | |
584 | for k, v, t in perms_new: |
|
580 | for k, v, t in perms_new: | |
585 | try: |
|
581 | try: | |
586 | if t == 'user': |
|
582 | if t == 'user': | |
587 | _user_db = User.query() \ |
|
583 | _user_db = User.query() \ | |
588 | .filter(User.active == True) \ |
|
584 | .filter(User.active == True) \ | |
589 | .filter(User.username == k).one() |
|
585 | .filter(User.username == k).one() | |
590 | if t == 'users_group': |
|
586 | if t == 'users_group': | |
591 | _user_db = UserGroup.query() \ |
|
587 | _user_db = UserGroup.query() \ | |
592 | .filter(UserGroup.users_group_active == True) \ |
|
588 | .filter(UserGroup.users_group_active == True) \ | |
593 | .filter(UserGroup.users_group_name == k).one() |
|
589 | .filter(UserGroup.users_group_name == k).one() | |
594 |
|
590 | |||
595 | except Exception: |
|
591 | except Exception: | |
596 | log.exception('Updated permission failed') |
|
592 | log.exception('Updated permission failed') | |
597 | msg = self.message('perm_new_member_type', state) |
|
593 | msg = self.message('perm_new_member_type', state) | |
598 | raise formencode.Invalid(msg, value, state, |
|
594 | raise formencode.Invalid(msg, value, state, | |
599 | error_dict=dict(perm_new_member_name=msg) |
|
595 | error_dict=dict(perm_new_member_name=msg) | |
600 | ) |
|
596 | ) | |
601 | return value |
|
597 | return value | |
602 | return _validator |
|
598 | return _validator | |
603 |
|
599 | |||
604 |
|
600 | |||
605 | def ValidSettings(): |
|
601 | def ValidSettings(): | |
606 | class _validator(formencode.validators.FancyValidator): |
|
602 | class _validator(formencode.validators.FancyValidator): | |
607 | def _convert_to_python(self, value, state): |
|
603 | def _convert_to_python(self, value, state): | |
608 | # settings form for users that are not admin |
|
604 | # settings form for users that are not admin | |
609 | # can't edit certain parameters, it's extra backup if they mangle |
|
605 | # can't edit certain parameters, it's extra backup if they mangle | |
610 | # with forms |
|
606 | # with forms | |
611 |
|
607 | |||
612 | forbidden_params = [ |
|
608 | forbidden_params = [ | |
613 | 'user', 'repo_type', |
|
609 | 'user', 'repo_type', | |
614 | 'repo_enable_downloads', 'repo_enable_statistics' |
|
610 | 'repo_enable_downloads', 'repo_enable_statistics' | |
615 | ] |
|
611 | ] | |
616 |
|
612 | |||
617 | for param in forbidden_params: |
|
613 | for param in forbidden_params: | |
618 | if param in value: |
|
614 | if param in value: | |
619 | del value[param] |
|
615 | del value[param] | |
620 | return value |
|
616 | return value | |
621 |
|
617 | |||
622 | def _validate_python(self, value, state): |
|
618 | def _validate_python(self, value, state): | |
623 | pass |
|
619 | pass | |
624 | return _validator |
|
620 | return _validator | |
625 |
|
621 | |||
626 |
|
622 | |||
627 | def ValidPath(): |
|
623 | def ValidPath(): | |
628 | class _validator(formencode.validators.FancyValidator): |
|
624 | class _validator(formencode.validators.FancyValidator): | |
629 | messages = { |
|
625 | messages = { | |
630 | 'invalid_path': _('This is not a valid path') |
|
626 | 'invalid_path': _('This is not a valid path') | |
631 | } |
|
627 | } | |
632 |
|
628 | |||
633 | def _validate_python(self, value, state): |
|
629 | def _validate_python(self, value, state): | |
634 | if not os.path.isdir(value): |
|
630 | if not os.path.isdir(value): | |
635 | msg = self.message('invalid_path', state) |
|
631 | msg = self.message('invalid_path', state) | |
636 | raise formencode.Invalid(msg, value, state, |
|
632 | raise formencode.Invalid(msg, value, state, | |
637 | error_dict=dict(paths_root_path=msg) |
|
633 | error_dict=dict(paths_root_path=msg) | |
638 | ) |
|
634 | ) | |
639 | return _validator |
|
635 | return _validator | |
640 |
|
636 | |||
641 |
|
637 | |||
642 | def UniqSystemEmail(old_data=None): |
|
638 | def UniqSystemEmail(old_data=None): | |
643 | old_data = old_data or {} |
|
639 | old_data = old_data or {} | |
644 |
|
640 | |||
645 | class _validator(formencode.validators.FancyValidator): |
|
641 | class _validator(formencode.validators.FancyValidator): | |
646 | messages = { |
|
642 | messages = { | |
647 | 'email_taken': _('This email address is already in use') |
|
643 | 'email_taken': _('This email address is already in use') | |
648 | } |
|
644 | } | |
649 |
|
645 | |||
650 | def _convert_to_python(self, value, state): |
|
646 | def _convert_to_python(self, value, state): | |
651 | return value.lower() |
|
647 | return value.lower() | |
652 |
|
648 | |||
653 | def _validate_python(self, value, state): |
|
649 | def _validate_python(self, value, state): | |
654 | if (old_data.get('email') or '').lower() != value: |
|
650 | if (old_data.get('email') or '').lower() != value: | |
655 | user = User.get_by_email(value) |
|
651 | user = User.get_by_email(value) | |
656 | if user is not None: |
|
652 | if user is not None: | |
657 | msg = self.message('email_taken', state) |
|
653 | msg = self.message('email_taken', state) | |
658 | raise formencode.Invalid(msg, value, state, |
|
654 | raise formencode.Invalid(msg, value, state, | |
659 | error_dict=dict(email=msg) |
|
655 | error_dict=dict(email=msg) | |
660 | ) |
|
656 | ) | |
661 | return _validator |
|
657 | return _validator | |
662 |
|
658 | |||
663 |
|
659 | |||
664 | def ValidSystemEmail(): |
|
660 | def ValidSystemEmail(): | |
665 | class _validator(formencode.validators.FancyValidator): |
|
661 | class _validator(formencode.validators.FancyValidator): | |
666 | messages = { |
|
662 | messages = { | |
667 | 'non_existing_email': _('Email address "%(email)s" not found') |
|
663 | 'non_existing_email': _('Email address "%(email)s" not found') | |
668 | } |
|
664 | } | |
669 |
|
665 | |||
670 | def _convert_to_python(self, value, state): |
|
666 | def _convert_to_python(self, value, state): | |
671 | return value.lower() |
|
667 | return value.lower() | |
672 |
|
668 | |||
673 | def _validate_python(self, value, state): |
|
669 | def _validate_python(self, value, state): | |
674 | user = User.get_by_email(value) |
|
670 | user = User.get_by_email(value) | |
675 | if user is None: |
|
671 | if user is None: | |
676 | msg = self.message('non_existing_email', state, email=value) |
|
672 | msg = self.message('non_existing_email', state, email=value) | |
677 | raise formencode.Invalid(msg, value, state, |
|
673 | raise formencode.Invalid(msg, value, state, | |
678 | error_dict=dict(email=msg) |
|
674 | error_dict=dict(email=msg) | |
679 | ) |
|
675 | ) | |
680 |
|
676 | |||
681 | return _validator |
|
677 | return _validator | |
682 |
|
678 | |||
683 |
|
679 | |||
684 | def LdapLibValidator(): |
|
680 | def LdapLibValidator(): | |
685 | class _validator(formencode.validators.FancyValidator): |
|
681 | class _validator(formencode.validators.FancyValidator): | |
686 | messages = { |
|
682 | messages = { | |
687 |
|
683 | |||
688 | } |
|
684 | } | |
689 |
|
685 | |||
690 | def _validate_python(self, value, state): |
|
686 | def _validate_python(self, value, state): | |
691 | try: |
|
687 | try: | |
692 | import ldap |
|
688 | import ldap | |
693 | ldap # pyflakes silence ! |
|
689 | ldap # pyflakes silence ! | |
694 | except ImportError: |
|
690 | except ImportError: | |
695 | raise LdapImportError() |
|
691 | raise LdapImportError() | |
696 |
|
692 | |||
697 | return _validator |
|
693 | return _validator | |
698 |
|
694 | |||
699 |
|
695 | |||
700 | def AttrLoginValidator(): |
|
696 | def AttrLoginValidator(): | |
701 | class _validator(formencode.validators.UnicodeString): |
|
697 | class _validator(formencode.validators.UnicodeString): | |
702 | messages = { |
|
698 | messages = { | |
703 | 'invalid_cn': |
|
699 | 'invalid_cn': | |
704 | _('The LDAP Login attribute of the CN must be specified - ' |
|
700 | _('The LDAP Login attribute of the CN must be specified - ' | |
705 | 'this is the name of the attribute that is equivalent ' |
|
701 | 'this is the name of the attribute that is equivalent ' | |
706 | 'to "username"') |
|
702 | 'to "username"') | |
707 | } |
|
703 | } | |
708 | messages['empty'] = messages['invalid_cn'] |
|
704 | messages['empty'] = messages['invalid_cn'] | |
709 |
|
705 | |||
710 | return _validator |
|
706 | return _validator | |
711 |
|
707 | |||
712 |
|
708 | |||
713 | def ValidIp(): |
|
709 | def ValidIp(): | |
714 | class _validator(CIDR): |
|
710 | class _validator(CIDR): | |
715 | messages = dict( |
|
711 | messages = dict( | |
716 | badFormat=_('Please enter a valid IPv4 or IPv6 address'), |
|
712 | badFormat=_('Please enter a valid IPv4 or IPv6 address'), | |
717 | illegalBits=_('The network size (bits) must be within the range' |
|
713 | illegalBits=_('The network size (bits) must be within the range' | |
718 | ' of 0-32 (not %(bits)r)') |
|
714 | ' of 0-32 (not %(bits)r)') | |
719 | ) |
|
715 | ) | |
720 |
|
716 | |||
721 | def to_python(self, value, state): |
|
717 | def to_python(self, value, state): | |
722 | v = super(_validator, self).to_python(value, state) |
|
718 | v = super(_validator, self).to_python(value, state) | |
723 | v = v.strip() |
|
719 | v = v.strip() | |
724 | net = ipaddr.IPNetwork(address=v) |
|
720 | net = ipaddr.IPNetwork(address=v) | |
725 | if isinstance(net, ipaddr.IPv4Network): |
|
721 | if isinstance(net, ipaddr.IPv4Network): | |
726 | # if IPv4 doesn't end with a mask, add /32 |
|
722 | # if IPv4 doesn't end with a mask, add /32 | |
727 | if '/' not in value: |
|
723 | if '/' not in value: | |
728 | v += '/32' |
|
724 | v += '/32' | |
729 | if isinstance(net, ipaddr.IPv6Network): |
|
725 | if isinstance(net, ipaddr.IPv6Network): | |
730 | # if IPv6 doesn't end with a mask, add /128 |
|
726 | # if IPv6 doesn't end with a mask, add /128 | |
731 | if '/' not in value: |
|
727 | if '/' not in value: | |
732 | v += '/128' |
|
728 | v += '/128' | |
733 | return v |
|
729 | return v | |
734 |
|
730 | |||
735 | def _validate_python(self, value, state): |
|
731 | def _validate_python(self, value, state): | |
736 | try: |
|
732 | try: | |
737 | addr = value.strip() |
|
733 | addr = value.strip() | |
738 | # this raises an ValueError if address is not IPv4 or IPv6 |
|
734 | # this raises an ValueError if address is not IPv4 or IPv6 | |
739 | ipaddr.IPNetwork(address=addr) |
|
735 | ipaddr.IPNetwork(address=addr) | |
740 | except ValueError: |
|
736 | except ValueError: | |
741 | raise formencode.Invalid(self.message('badFormat', state), |
|
737 | raise formencode.Invalid(self.message('badFormat', state), | |
742 | value, state) |
|
738 | value, state) | |
743 |
|
739 | |||
744 | return _validator |
|
740 | return _validator | |
745 |
|
741 | |||
746 |
|
742 | |||
747 | def FieldKey(): |
|
743 | def FieldKey(): | |
748 | class _validator(formencode.validators.FancyValidator): |
|
744 | class _validator(formencode.validators.FancyValidator): | |
749 | messages = dict( |
|
745 | messages = dict( | |
750 | badFormat=_('Key name can only consist of letters, ' |
|
746 | badFormat=_('Key name can only consist of letters, ' | |
751 | 'underscore, dash or numbers') |
|
747 | 'underscore, dash or numbers') | |
752 | ) |
|
748 | ) | |
753 |
|
749 | |||
754 | def _validate_python(self, value, state): |
|
750 | def _validate_python(self, value, state): | |
755 | if not re.match('[a-zA-Z0-9_-]+$', value): |
|
751 | if not re.match('[a-zA-Z0-9_-]+$', value): | |
756 | raise formencode.Invalid(self.message('badFormat', state), |
|
752 | raise formencode.Invalid(self.message('badFormat', state), | |
757 | value, state) |
|
753 | value, state) | |
758 | return _validator |
|
754 | return _validator | |
759 |
|
755 | |||
760 |
|
756 | |||
761 | def BasePath(): |
|
757 | def BasePath(): | |
762 | class _validator(formencode.validators.FancyValidator): |
|
758 | class _validator(formencode.validators.FancyValidator): | |
763 | messages = dict( |
|
759 | messages = dict( | |
764 | badPath=_('Filename cannot be inside a directory') |
|
760 | badPath=_('Filename cannot be inside a directory') | |
765 | ) |
|
761 | ) | |
766 |
|
762 | |||
767 | def _convert_to_python(self, value, state): |
|
763 | def _convert_to_python(self, value, state): | |
768 | return value |
|
764 | return value | |
769 |
|
765 | |||
770 | def _validate_python(self, value, state): |
|
766 | def _validate_python(self, value, state): | |
771 | if value != os.path.basename(value): |
|
767 | if value != os.path.basename(value): | |
772 | raise formencode.Invalid(self.message('badPath', state), |
|
768 | raise formencode.Invalid(self.message('badPath', state), | |
773 | value, state) |
|
769 | value, state) | |
774 | return _validator |
|
770 | return _validator | |
775 |
|
771 | |||
776 |
|
772 | |||
777 | def ValidAuthPlugins(): |
|
773 | def ValidAuthPlugins(): | |
778 | class _validator(formencode.validators.FancyValidator): |
|
774 | class _validator(formencode.validators.FancyValidator): | |
779 | messages = dict( |
|
775 | messages = dict( | |
780 | import_duplicate=_('Plugins %(loaded)s and %(next_to_load)s both export the same name') |
|
776 | import_duplicate=_('Plugins %(loaded)s and %(next_to_load)s both export the same name') | |
781 | ) |
|
777 | ) | |
782 |
|
778 | |||
783 | def _convert_to_python(self, value, state): |
|
779 | def _convert_to_python(self, value, state): | |
784 | # filter empty values |
|
780 | # filter empty values | |
785 | return [s for s in value if s not in [None, '']] |
|
781 | return [s for s in value if s not in [None, '']] | |
786 |
|
782 | |||
787 | def _validate_python(self, value, state): |
|
783 | def _validate_python(self, value, state): | |
788 | from kallithea.lib import auth_modules |
|
784 | from kallithea.lib import auth_modules | |
789 | module_list = value |
|
785 | module_list = value | |
790 | unique_names = {} |
|
786 | unique_names = {} | |
791 | try: |
|
787 | try: | |
792 | for module in module_list: |
|
788 | for module in module_list: | |
793 | plugin = auth_modules.loadplugin(module) |
|
789 | plugin = auth_modules.loadplugin(module) | |
794 | plugin_name = plugin.name |
|
790 | plugin_name = plugin.name | |
795 | if plugin_name in unique_names: |
|
791 | if plugin_name in unique_names: | |
796 | msg = self.message('import_duplicate', state, |
|
792 | msg = self.message('import_duplicate', state, | |
797 | loaded=unique_names[plugin_name], |
|
793 | loaded=unique_names[plugin_name], | |
798 | next_to_load=plugin_name) |
|
794 | next_to_load=plugin_name) | |
799 | raise formencode.Invalid(msg, value, state) |
|
795 | raise formencode.Invalid(msg, value, state) | |
800 | unique_names[plugin_name] = plugin |
|
796 | unique_names[plugin_name] = plugin | |
801 | except (ImportError, AttributeError, TypeError) as e: |
|
797 | except (ImportError, AttributeError, TypeError) as e: | |
802 | raise formencode.Invalid(str(e), value, state) |
|
798 | raise formencode.Invalid(str(e), value, state) | |
803 |
|
799 | |||
804 | return _validator |
|
800 | return _validator |
@@ -1,162 +1,163 b'' | |||||
1 | #!/usr/bin/env python3 |
|
1 | #!/usr/bin/env python3 | |
2 | # -*- coding: utf-8 -*- |
|
2 | # -*- coding: utf-8 -*- | |
3 | import os |
|
3 | import os | |
4 | import platform |
|
4 | import platform | |
5 | import sys |
|
5 | import sys | |
6 |
|
6 | |||
7 | import setuptools |
|
7 | import setuptools | |
8 | # monkey patch setuptools to use distutils owner/group functionality |
|
8 | # monkey patch setuptools to use distutils owner/group functionality | |
9 | from setuptools.command import sdist |
|
9 | from setuptools.command import sdist | |
10 |
|
10 | |||
11 |
|
11 | |||
12 | if sys.version_info < (3, 6): |
|
12 | if sys.version_info < (3, 6): | |
13 | raise Exception('Kallithea requires Python 3.6 or later') |
|
13 | raise Exception('Kallithea requires Python 3.6 or later') | |
14 |
|
14 | |||
15 |
|
15 | |||
16 | here = os.path.abspath(os.path.dirname(__file__)) |
|
16 | here = os.path.abspath(os.path.dirname(__file__)) | |
17 |
|
17 | |||
18 |
|
18 | |||
19 | def _get_meta_var(name, data, callback_handler=None): |
|
19 | def _get_meta_var(name, data, callback_handler=None): | |
20 | import re |
|
20 | import re | |
21 | matches = re.compile(r'(?:%s)\s*=\s*(.*)' % name).search(data) |
|
21 | matches = re.compile(r'(?:%s)\s*=\s*(.*)' % name).search(data) | |
22 | if matches: |
|
22 | if matches: | |
23 | if not callable(callback_handler): |
|
23 | s = eval(matches.groups()[0]) | |
24 |
|
|
24 | if callable(callback_handler): | |
25 |
|
25 | return callback_handler(s) | ||
26 | return callback_handler(eval(matches.groups()[0])) |
|
26 | return s | |
27 |
|
27 | |||
28 | _meta = open(os.path.join(here, 'kallithea', '__init__.py'), 'r') |
|
28 | _meta = open(os.path.join(here, 'kallithea', '__init__.py'), 'r') | |
29 | _metadata = _meta.read() |
|
29 | _metadata = _meta.read() | |
30 | _meta.close() |
|
30 | _meta.close() | |
31 |
|
31 | |||
32 | callback = lambda V: ('.'.join(map(str, V[:3])) + '.'.join(V[3:])) |
|
32 | def callback(V): | |
|
33 | return '.'.join(map(str, V[:3])) + '.'.join(V[3:]) | |||
33 | __version__ = _get_meta_var('VERSION', _metadata, callback) |
|
34 | __version__ = _get_meta_var('VERSION', _metadata, callback) | |
34 | __license__ = _get_meta_var('__license__', _metadata) |
|
35 | __license__ = _get_meta_var('__license__', _metadata) | |
35 | __author__ = _get_meta_var('__author__', _metadata) |
|
36 | __author__ = _get_meta_var('__author__', _metadata) | |
36 | __url__ = _get_meta_var('__url__', _metadata) |
|
37 | __url__ = _get_meta_var('__url__', _metadata) | |
37 | # defines current platform |
|
38 | # defines current platform | |
38 | __platform__ = platform.system() |
|
39 | __platform__ = platform.system() | |
39 |
|
40 | |||
40 | is_windows = __platform__ in ['Windows'] |
|
41 | is_windows = __platform__ in ['Windows'] | |
41 |
|
42 | |||
42 | requirements = [ |
|
43 | requirements = [ | |
43 | "alembic >= 1.0.10, < 1.5", |
|
44 | "alembic >= 1.0.10, < 1.5", | |
44 | "gearbox >= 0.1.0, < 1", |
|
45 | "gearbox >= 0.1.0, < 1", | |
45 | "waitress >= 0.8.8, < 1.5", |
|
46 | "waitress >= 0.8.8, < 1.5", | |
46 | "WebOb >= 1.8, < 1.9", |
|
47 | "WebOb >= 1.8, < 1.9", | |
47 | "backlash >= 0.1.2, < 1", |
|
48 | "backlash >= 0.1.2, < 1", | |
48 | "TurboGears2 >= 2.4, < 2.5", |
|
49 | "TurboGears2 >= 2.4, < 2.5", | |
49 | "tgext.routes >= 0.2.0, < 1", |
|
50 | "tgext.routes >= 0.2.0, < 1", | |
50 | "Beaker >= 1.10.1, < 2", |
|
51 | "Beaker >= 1.10.1, < 2", | |
51 | "WebHelpers2 >= 2.0, < 2.1", |
|
52 | "WebHelpers2 >= 2.0, < 2.1", | |
52 | "FormEncode >= 1.3.1, < 1.4", |
|
53 | "FormEncode >= 1.3.1, < 1.4", | |
53 | "SQLAlchemy >= 1.2.9, < 1.4", |
|
54 | "SQLAlchemy >= 1.2.9, < 1.4", | |
54 | "Mako >= 0.9.1, < 1.2", |
|
55 | "Mako >= 0.9.1, < 1.2", | |
55 | "Pygments >= 2.2.0, < 2.6", |
|
56 | "Pygments >= 2.2.0, < 2.6", | |
56 | "Whoosh >= 2.7.1, < 2.8", |
|
57 | "Whoosh >= 2.7.1, < 2.8", | |
57 | "celery >= 4.3, < 4.5", |
|
58 | "celery >= 4.3, < 4.5", | |
58 | "Babel >= 1.3, < 2.9", |
|
59 | "Babel >= 1.3, < 2.9", | |
59 | "python-dateutil >= 2.1.0, < 2.9", |
|
60 | "python-dateutil >= 2.1.0, < 2.9", | |
60 | "Markdown >= 2.2.1, < 3.2", |
|
61 | "Markdown >= 2.2.1, < 3.2", | |
61 | "docutils >= 0.11, < 0.17", |
|
62 | "docutils >= 0.11, < 0.17", | |
62 | "URLObject >= 2.3.4, < 2.5", |
|
63 | "URLObject >= 2.3.4, < 2.5", | |
63 | "Routes >= 2.0, < 2.5", |
|
64 | "Routes >= 2.0, < 2.5", | |
64 | "dulwich >= 0.19.0, < 0.20", |
|
65 | "dulwich >= 0.19.0, < 0.20", | |
65 | "mercurial >= 5.2, < 5.4", |
|
66 | "mercurial >= 5.2, < 5.4", | |
66 | "decorator >= 4.2.1, < 4.5", |
|
67 | "decorator >= 4.2.1, < 4.5", | |
67 | "Paste >= 2.0.3, < 3.4", |
|
68 | "Paste >= 2.0.3, < 3.4", | |
68 | "bleach >= 3.0, < 3.2", |
|
69 | "bleach >= 3.0, < 3.2", | |
69 | "Click >= 7.0, < 8", |
|
70 | "Click >= 7.0, < 8", | |
70 | "ipaddr >= 2.2.0, < 2.3", |
|
71 | "ipaddr >= 2.2.0, < 2.3", | |
71 | "paginate >= 0.5, < 0.6", |
|
72 | "paginate >= 0.5, < 0.6", | |
72 | "paginate_sqlalchemy >= 0.3.0, < 0.4", |
|
73 | "paginate_sqlalchemy >= 0.3.0, < 0.4", | |
73 | ] |
|
74 | ] | |
74 |
|
75 | |||
75 | if not is_windows: |
|
76 | if not is_windows: | |
76 | requirements.append("bcrypt >= 3.1.0, < 3.2") |
|
77 | requirements.append("bcrypt >= 3.1.0, < 3.2") | |
77 |
|
78 | |||
78 | dependency_links = [ |
|
79 | dependency_links = [ | |
79 | ] |
|
80 | ] | |
80 |
|
81 | |||
81 | classifiers = [ |
|
82 | classifiers = [ | |
82 | 'Development Status :: 4 - Beta', |
|
83 | 'Development Status :: 4 - Beta', | |
83 | 'Environment :: Web Environment', |
|
84 | 'Environment :: Web Environment', | |
84 | 'Framework :: Pylons', |
|
85 | 'Framework :: Pylons', | |
85 | 'Intended Audience :: Developers', |
|
86 | 'Intended Audience :: Developers', | |
86 | 'License :: OSI Approved :: GNU General Public License (GPL)', |
|
87 | 'License :: OSI Approved :: GNU General Public License (GPL)', | |
87 | 'Operating System :: OS Independent', |
|
88 | 'Operating System :: OS Independent', | |
88 | 'Programming Language :: Python :: 3.6', |
|
89 | 'Programming Language :: Python :: 3.6', | |
89 | 'Programming Language :: Python :: 3.7', |
|
90 | 'Programming Language :: Python :: 3.7', | |
90 | 'Programming Language :: Python :: 3.8', |
|
91 | 'Programming Language :: Python :: 3.8', | |
91 | 'Topic :: Software Development :: Version Control', |
|
92 | 'Topic :: Software Development :: Version Control', | |
92 | ] |
|
93 | ] | |
93 |
|
94 | |||
94 |
|
95 | |||
95 | # additional files from project that goes somewhere in the filesystem |
|
96 | # additional files from project that goes somewhere in the filesystem | |
96 | # relative to sys.prefix |
|
97 | # relative to sys.prefix | |
97 | data_files = [] |
|
98 | data_files = [] | |
98 |
|
99 | |||
99 | description = ('Kallithea is a fast and powerful management tool ' |
|
100 | description = ('Kallithea is a fast and powerful management tool ' | |
100 | 'for Mercurial and Git with a built in push/pull server, ' |
|
101 | 'for Mercurial and Git with a built in push/pull server, ' | |
101 | 'full text search and code-review.') |
|
102 | 'full text search and code-review.') | |
102 |
|
103 | |||
103 | keywords = ' '.join([ |
|
104 | keywords = ' '.join([ | |
104 | 'kallithea', 'mercurial', 'git', 'code review', |
|
105 | 'kallithea', 'mercurial', 'git', 'code review', | |
105 | 'repo groups', 'ldap', 'repository management', 'hgweb replacement', |
|
106 | 'repo groups', 'ldap', 'repository management', 'hgweb replacement', | |
106 | 'hgwebdir', 'gitweb replacement', 'serving hgweb', |
|
107 | 'hgwebdir', 'gitweb replacement', 'serving hgweb', | |
107 | ]) |
|
108 | ]) | |
108 |
|
109 | |||
109 | # long description |
|
110 | # long description | |
110 | README_FILE = 'README.rst' |
|
111 | README_FILE = 'README.rst' | |
111 | try: |
|
112 | try: | |
112 | long_description = open(README_FILE).read() |
|
113 | long_description = open(README_FILE).read() | |
113 | except IOError as err: |
|
114 | except IOError as err: | |
114 | sys.stderr.write( |
|
115 | sys.stderr.write( | |
115 | "[WARNING] Cannot find file specified as long_description (%s): %s\n" |
|
116 | "[WARNING] Cannot find file specified as long_description (%s): %s\n" | |
116 | % (README_FILE, err) |
|
117 | % (README_FILE, err) | |
117 | ) |
|
118 | ) | |
118 | long_description = description |
|
119 | long_description = description | |
119 |
|
120 | |||
120 |
|
121 | |||
121 | sdist_org = sdist.sdist |
|
122 | sdist_org = sdist.sdist | |
122 | class sdist_new(sdist_org): |
|
123 | class sdist_new(sdist_org): | |
123 | def initialize_options(self): |
|
124 | def initialize_options(self): | |
124 | sdist_org.initialize_options(self) |
|
125 | sdist_org.initialize_options(self) | |
125 | self.owner = self.group = 'root' |
|
126 | self.owner = self.group = 'root' | |
126 | sdist.sdist = sdist_new |
|
127 | sdist.sdist = sdist_new | |
127 |
|
128 | |||
128 | packages = setuptools.find_packages(exclude=['ez_setup']) |
|
129 | packages = setuptools.find_packages(exclude=['ez_setup']) | |
129 |
|
130 | |||
130 | setuptools.setup( |
|
131 | setuptools.setup( | |
131 | name='Kallithea', |
|
132 | name='Kallithea', | |
132 | version=__version__, |
|
133 | version=__version__, | |
133 | description=description, |
|
134 | description=description, | |
134 | long_description=long_description, |
|
135 | long_description=long_description, | |
135 | keywords=keywords, |
|
136 | keywords=keywords, | |
136 | license=__license__, |
|
137 | license=__license__, | |
137 | author=__author__, |
|
138 | author=__author__, | |
138 | author_email='kallithea@sfconservancy.org', |
|
139 | author_email='kallithea@sfconservancy.org', | |
139 | dependency_links=dependency_links, |
|
140 | dependency_links=dependency_links, | |
140 | url=__url__, |
|
141 | url=__url__, | |
141 | install_requires=requirements, |
|
142 | install_requires=requirements, | |
142 | classifiers=classifiers, |
|
143 | classifiers=classifiers, | |
143 | data_files=data_files, |
|
144 | data_files=data_files, | |
144 | packages=packages, |
|
145 | packages=packages, | |
145 | include_package_data=True, |
|
146 | include_package_data=True, | |
146 | message_extractors={'kallithea': [ |
|
147 | message_extractors={'kallithea': [ | |
147 | ('**.py', 'python', None), |
|
148 | ('**.py', 'python', None), | |
148 | ('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}), |
|
149 | ('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}), | |
149 | ('templates/**.html', 'mako', {'input_encoding': 'utf-8'}), |
|
150 | ('templates/**.html', 'mako', {'input_encoding': 'utf-8'}), | |
150 | ('public/**', 'ignore', None)]}, |
|
151 | ('public/**', 'ignore', None)]}, | |
151 | zip_safe=False, |
|
152 | zip_safe=False, | |
152 | entry_points=""" |
|
153 | entry_points=""" | |
153 | [console_scripts] |
|
154 | [console_scripts] | |
154 | kallithea-api = kallithea.bin.kallithea_api:main |
|
155 | kallithea-api = kallithea.bin.kallithea_api:main | |
155 | kallithea-gist = kallithea.bin.kallithea_gist:main |
|
156 | kallithea-gist = kallithea.bin.kallithea_gist:main | |
156 | kallithea-config = kallithea.bin.kallithea_config:main |
|
157 | kallithea-config = kallithea.bin.kallithea_config:main | |
157 | kallithea-cli = kallithea.bin.kallithea_cli:cli |
|
158 | kallithea-cli = kallithea.bin.kallithea_cli:cli | |
158 |
|
159 | |||
159 | [paste.app_factory] |
|
160 | [paste.app_factory] | |
160 | main = kallithea.config.middleware:make_app |
|
161 | main = kallithea.config.middleware:make_app | |
161 | """, |
|
162 | """, | |
162 | ) |
|
163 | ) |
General Comments 0
You need to be logged in to leave comments.
Login now