Show More
@@ -1,472 +1,472 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.controllers.admin.users |
|
15 | kallithea.controllers.admin.users | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Users crud controller |
|
18 | Users crud controller | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Apr 4, 2010 |
|
22 | :created_on: Apr 4, 2010 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 | import logging |
|
28 | import logging | |
29 | import traceback |
|
29 | import traceback | |
30 |
|
30 | |||
31 | import formencode |
|
31 | import formencode | |
32 | from formencode import htmlfill |
|
32 | from formencode import htmlfill | |
33 | from sqlalchemy.sql.expression import func |
|
33 | from sqlalchemy.sql.expression import func | |
34 | from tg import app_globals, request |
|
34 | from tg import app_globals, request | |
35 | from tg import tmpl_context as c |
|
35 | from tg import tmpl_context as c | |
36 | from tg.i18n import ugettext as _ |
|
36 | from tg.i18n import ugettext as _ | |
37 | from webob.exc import HTTPFound, HTTPNotFound |
|
37 | from webob.exc import HTTPFound, HTTPNotFound | |
38 |
|
38 | |||
39 | import kallithea |
|
39 | import kallithea | |
40 | import kallithea.lib.helpers as h |
|
40 | import kallithea.lib.helpers as h | |
41 | from kallithea.controllers import base |
|
41 | from kallithea.controllers import base | |
42 | from kallithea.lib import auth_modules, webutils |
|
42 | from kallithea.lib import auth_modules, webutils | |
43 | from kallithea.lib.auth import AuthUser, HasPermissionAnyDecorator, LoginRequired |
|
43 | from kallithea.lib.auth import AuthUser, HasPermissionAnyDecorator, LoginRequired | |
44 | from kallithea.lib.exceptions import DefaultUserException, UserCreationError, UserOwnsReposException |
|
44 | from kallithea.lib.exceptions import DefaultUserException, UserCreationError, UserOwnsReposException | |
45 |
from kallithea.lib.utils2 import datetime_to_time, |
|
45 | from kallithea.lib.utils2 import datetime_to_time, generate_api_key, safe_int | |
46 | from kallithea.lib.webutils import url |
|
46 | from kallithea.lib.webutils import fmt_date, url | |
47 | from kallithea.model import db, meta, userlog |
|
47 | from kallithea.model import db, meta, userlog | |
48 | from kallithea.model.api_key import ApiKeyModel |
|
48 | from kallithea.model.api_key import ApiKeyModel | |
49 | from kallithea.model.forms import CustomDefaultPermissionsForm, UserForm |
|
49 | from kallithea.model.forms import CustomDefaultPermissionsForm, UserForm | |
50 | from kallithea.model.ssh_key import SshKeyModel, SshKeyModelException |
|
50 | from kallithea.model.ssh_key import SshKeyModel, SshKeyModelException | |
51 | from kallithea.model.user import UserModel |
|
51 | from kallithea.model.user import UserModel | |
52 |
|
52 | |||
53 |
|
53 | |||
54 | log = logging.getLogger(__name__) |
|
54 | log = logging.getLogger(__name__) | |
55 |
|
55 | |||
56 |
|
56 | |||
57 | class UsersController(base.BaseController): |
|
57 | class UsersController(base.BaseController): | |
58 |
|
58 | |||
59 | @LoginRequired() |
|
59 | @LoginRequired() | |
60 | @HasPermissionAnyDecorator('hg.admin') |
|
60 | @HasPermissionAnyDecorator('hg.admin') | |
61 | def _before(self, *args, **kwargs): |
|
61 | def _before(self, *args, **kwargs): | |
62 | super(UsersController, self)._before(*args, **kwargs) |
|
62 | super(UsersController, self)._before(*args, **kwargs) | |
63 |
|
63 | |||
64 | def index(self, format='html'): |
|
64 | def index(self, format='html'): | |
65 | c.users_list = db.User.query().order_by(db.User.username) \ |
|
65 | c.users_list = db.User.query().order_by(db.User.username) \ | |
66 | .filter_by(is_default_user=False) \ |
|
66 | .filter_by(is_default_user=False) \ | |
67 | .order_by(func.lower(db.User.username)) \ |
|
67 | .order_by(func.lower(db.User.username)) \ | |
68 | .all() |
|
68 | .all() | |
69 |
|
69 | |||
70 | users_data = [] |
|
70 | users_data = [] | |
71 | _tmpl_lookup = app_globals.mako_lookup |
|
71 | _tmpl_lookup = app_globals.mako_lookup | |
72 | template = _tmpl_lookup.get_template('data_table/_dt_elements.html') |
|
72 | template = _tmpl_lookup.get_template('data_table/_dt_elements.html') | |
73 |
|
73 | |||
74 | grav_tmpl = '<div class="gravatar">%s</div>' |
|
74 | grav_tmpl = '<div class="gravatar">%s</div>' | |
75 |
|
75 | |||
76 | def username(user_id, username): |
|
76 | def username(user_id, username): | |
77 | return template.get_def("user_name") \ |
|
77 | return template.get_def("user_name") \ | |
78 | .render_unicode(user_id, username, _=_, h=h, c=c) |
|
78 | .render_unicode(user_id, username, _=_, h=h, c=c) | |
79 |
|
79 | |||
80 | def user_actions(user_id, username): |
|
80 | def user_actions(user_id, username): | |
81 | return template.get_def("user_actions") \ |
|
81 | return template.get_def("user_actions") \ | |
82 | .render_unicode(user_id, username, _=_, h=h, c=c) |
|
82 | .render_unicode(user_id, username, _=_, h=h, c=c) | |
83 |
|
83 | |||
84 | for user in c.users_list: |
|
84 | for user in c.users_list: | |
85 | users_data.append({ |
|
85 | users_data.append({ | |
86 | "gravatar": grav_tmpl % h.gravatar(user.email, size=20), |
|
86 | "gravatar": grav_tmpl % h.gravatar(user.email, size=20), | |
87 | "raw_name": user.username, |
|
87 | "raw_name": user.username, | |
88 | "username": username(user.user_id, user.username), |
|
88 | "username": username(user.user_id, user.username), | |
89 | "firstname": webutils.escape(user.name), |
|
89 | "firstname": webutils.escape(user.name), | |
90 | "lastname": webutils.escape(user.lastname), |
|
90 | "lastname": webutils.escape(user.lastname), | |
91 | "last_login": fmt_date(user.last_login), |
|
91 | "last_login": fmt_date(user.last_login), | |
92 | "last_login_raw": datetime_to_time(user.last_login), |
|
92 | "last_login_raw": datetime_to_time(user.last_login), | |
93 | "active": h.boolicon(user.active), |
|
93 | "active": h.boolicon(user.active), | |
94 | "admin": h.boolicon(user.admin), |
|
94 | "admin": h.boolicon(user.admin), | |
95 | "extern_type": user.extern_type, |
|
95 | "extern_type": user.extern_type, | |
96 | "extern_name": user.extern_name, |
|
96 | "extern_name": user.extern_name, | |
97 | "action": user_actions(user.user_id, user.username), |
|
97 | "action": user_actions(user.user_id, user.username), | |
98 | }) |
|
98 | }) | |
99 |
|
99 | |||
100 | c.data = { |
|
100 | c.data = { | |
101 | "sort": None, |
|
101 | "sort": None, | |
102 | "dir": "asc", |
|
102 | "dir": "asc", | |
103 | "records": users_data |
|
103 | "records": users_data | |
104 | } |
|
104 | } | |
105 |
|
105 | |||
106 | return base.render('admin/users/users.html') |
|
106 | return base.render('admin/users/users.html') | |
107 |
|
107 | |||
108 | def create(self): |
|
108 | def create(self): | |
109 | c.default_extern_type = db.User.DEFAULT_AUTH_TYPE |
|
109 | c.default_extern_type = db.User.DEFAULT_AUTH_TYPE | |
110 | c.default_extern_name = '' |
|
110 | c.default_extern_name = '' | |
111 | user_model = UserModel() |
|
111 | user_model = UserModel() | |
112 | user_form = UserForm()() |
|
112 | user_form = UserForm()() | |
113 | try: |
|
113 | try: | |
114 | form_result = user_form.to_python(dict(request.POST)) |
|
114 | form_result = user_form.to_python(dict(request.POST)) | |
115 | user = user_model.create(form_result) |
|
115 | user = user_model.create(form_result) | |
116 | userlog.action_logger(request.authuser, 'admin_created_user:%s' % user.username, |
|
116 | userlog.action_logger(request.authuser, 'admin_created_user:%s' % user.username, | |
117 | None, request.ip_addr) |
|
117 | None, request.ip_addr) | |
118 | webutils.flash(_('Created user %s') % user.username, |
|
118 | webutils.flash(_('Created user %s') % user.username, | |
119 | category='success') |
|
119 | category='success') | |
120 | meta.Session().commit() |
|
120 | meta.Session().commit() | |
121 | except formencode.Invalid as errors: |
|
121 | except formencode.Invalid as errors: | |
122 | return htmlfill.render( |
|
122 | return htmlfill.render( | |
123 | base.render('admin/users/user_add.html'), |
|
123 | base.render('admin/users/user_add.html'), | |
124 | defaults=errors.value, |
|
124 | defaults=errors.value, | |
125 | errors=errors.error_dict or {}, |
|
125 | errors=errors.error_dict or {}, | |
126 | prefix_error=False, |
|
126 | prefix_error=False, | |
127 | encoding="UTF-8", |
|
127 | encoding="UTF-8", | |
128 | force_defaults=False) |
|
128 | force_defaults=False) | |
129 | except UserCreationError as e: |
|
129 | except UserCreationError as e: | |
130 | webutils.flash(e, 'error') |
|
130 | webutils.flash(e, 'error') | |
131 | except Exception: |
|
131 | except Exception: | |
132 | log.error(traceback.format_exc()) |
|
132 | log.error(traceback.format_exc()) | |
133 | webutils.flash(_('Error occurred during creation of user %s') |
|
133 | webutils.flash(_('Error occurred during creation of user %s') | |
134 | % request.POST.get('username'), category='error') |
|
134 | % request.POST.get('username'), category='error') | |
135 | raise HTTPFound(location=url('edit_user', id=user.user_id)) |
|
135 | raise HTTPFound(location=url('edit_user', id=user.user_id)) | |
136 |
|
136 | |||
137 | def new(self, format='html'): |
|
137 | def new(self, format='html'): | |
138 | c.default_extern_type = db.User.DEFAULT_AUTH_TYPE |
|
138 | c.default_extern_type = db.User.DEFAULT_AUTH_TYPE | |
139 | c.default_extern_name = '' |
|
139 | c.default_extern_name = '' | |
140 | return base.render('admin/users/user_add.html') |
|
140 | return base.render('admin/users/user_add.html') | |
141 |
|
141 | |||
142 | def update(self, id): |
|
142 | def update(self, id): | |
143 | user_model = UserModel() |
|
143 | user_model = UserModel() | |
144 | user = user_model.get(id) |
|
144 | user = user_model.get(id) | |
145 | _form = UserForm(edit=True, old_data={'user_id': id, |
|
145 | _form = UserForm(edit=True, old_data={'user_id': id, | |
146 | 'email': user.email})() |
|
146 | 'email': user.email})() | |
147 | form_result = {} |
|
147 | form_result = {} | |
148 | try: |
|
148 | try: | |
149 | form_result = _form.to_python(dict(request.POST)) |
|
149 | form_result = _form.to_python(dict(request.POST)) | |
150 | skip_attrs = ['extern_type', 'extern_name', |
|
150 | skip_attrs = ['extern_type', 'extern_name', | |
151 | ] + auth_modules.get_managed_fields(user) |
|
151 | ] + auth_modules.get_managed_fields(user) | |
152 |
|
152 | |||
153 | user_model.update(id, form_result, skip_attrs=skip_attrs) |
|
153 | user_model.update(id, form_result, skip_attrs=skip_attrs) | |
154 | usr = form_result['username'] |
|
154 | usr = form_result['username'] | |
155 | userlog.action_logger(request.authuser, 'admin_updated_user:%s' % usr, |
|
155 | userlog.action_logger(request.authuser, 'admin_updated_user:%s' % usr, | |
156 | None, request.ip_addr) |
|
156 | None, request.ip_addr) | |
157 | webutils.flash(_('User updated successfully'), category='success') |
|
157 | webutils.flash(_('User updated successfully'), category='success') | |
158 | meta.Session().commit() |
|
158 | meta.Session().commit() | |
159 | except formencode.Invalid as errors: |
|
159 | except formencode.Invalid as errors: | |
160 | defaults = errors.value |
|
160 | defaults = errors.value | |
161 | e = errors.error_dict or {} |
|
161 | e = errors.error_dict or {} | |
162 | defaults.update({ |
|
162 | defaults.update({ | |
163 | 'create_repo_perm': user_model.has_perm(id, |
|
163 | 'create_repo_perm': user_model.has_perm(id, | |
164 | 'hg.create.repository'), |
|
164 | 'hg.create.repository'), | |
165 | 'fork_repo_perm': user_model.has_perm(id, 'hg.fork.repository'), |
|
165 | 'fork_repo_perm': user_model.has_perm(id, 'hg.fork.repository'), | |
166 | }) |
|
166 | }) | |
167 | return htmlfill.render( |
|
167 | return htmlfill.render( | |
168 | self._render_edit_profile(user), |
|
168 | self._render_edit_profile(user), | |
169 | defaults=defaults, |
|
169 | defaults=defaults, | |
170 | errors=e, |
|
170 | errors=e, | |
171 | prefix_error=False, |
|
171 | prefix_error=False, | |
172 | encoding="UTF-8", |
|
172 | encoding="UTF-8", | |
173 | force_defaults=False) |
|
173 | force_defaults=False) | |
174 | except Exception: |
|
174 | except Exception: | |
175 | log.error(traceback.format_exc()) |
|
175 | log.error(traceback.format_exc()) | |
176 | webutils.flash(_('Error occurred during update of user %s') |
|
176 | webutils.flash(_('Error occurred during update of user %s') | |
177 | % form_result.get('username'), category='error') |
|
177 | % form_result.get('username'), category='error') | |
178 | raise HTTPFound(location=url('edit_user', id=id)) |
|
178 | raise HTTPFound(location=url('edit_user', id=id)) | |
179 |
|
179 | |||
180 | def delete(self, id): |
|
180 | def delete(self, id): | |
181 | usr = db.User.get_or_404(id) |
|
181 | usr = db.User.get_or_404(id) | |
182 | has_ssh_keys = bool(usr.ssh_keys) |
|
182 | has_ssh_keys = bool(usr.ssh_keys) | |
183 | try: |
|
183 | try: | |
184 | UserModel().delete(usr) |
|
184 | UserModel().delete(usr) | |
185 | meta.Session().commit() |
|
185 | meta.Session().commit() | |
186 | webutils.flash(_('Successfully deleted user'), category='success') |
|
186 | webutils.flash(_('Successfully deleted user'), category='success') | |
187 | except (UserOwnsReposException, DefaultUserException) as e: |
|
187 | except (UserOwnsReposException, DefaultUserException) as e: | |
188 | webutils.flash(e, category='warning') |
|
188 | webutils.flash(e, category='warning') | |
189 | except Exception: |
|
189 | except Exception: | |
190 | log.error(traceback.format_exc()) |
|
190 | log.error(traceback.format_exc()) | |
191 | webutils.flash(_('An error occurred during deletion of user'), |
|
191 | webutils.flash(_('An error occurred during deletion of user'), | |
192 | category='error') |
|
192 | category='error') | |
193 | else: |
|
193 | else: | |
194 | if has_ssh_keys: |
|
194 | if has_ssh_keys: | |
195 | SshKeyModel().write_authorized_keys() |
|
195 | SshKeyModel().write_authorized_keys() | |
196 | raise HTTPFound(location=url('users')) |
|
196 | raise HTTPFound(location=url('users')) | |
197 |
|
197 | |||
198 | def _get_user_or_raise_if_default(self, id): |
|
198 | def _get_user_or_raise_if_default(self, id): | |
199 | try: |
|
199 | try: | |
200 | return db.User.get_or_404(id, allow_default=False) |
|
200 | return db.User.get_or_404(id, allow_default=False) | |
201 | except DefaultUserException: |
|
201 | except DefaultUserException: | |
202 | webutils.flash(_("The default user cannot be edited"), category='warning') |
|
202 | webutils.flash(_("The default user cannot be edited"), category='warning') | |
203 | raise HTTPNotFound |
|
203 | raise HTTPNotFound | |
204 |
|
204 | |||
205 | def _render_edit_profile(self, user): |
|
205 | def _render_edit_profile(self, user): | |
206 | c.user = user |
|
206 | c.user = user | |
207 | c.active = 'profile' |
|
207 | c.active = 'profile' | |
208 | c.perm_user = AuthUser(dbuser=user) |
|
208 | c.perm_user = AuthUser(dbuser=user) | |
209 | managed_fields = auth_modules.get_managed_fields(user) |
|
209 | managed_fields = auth_modules.get_managed_fields(user) | |
210 | c.readonly = lambda n: 'readonly' if n in managed_fields else None |
|
210 | c.readonly = lambda n: 'readonly' if n in managed_fields else None | |
211 | return base.render('admin/users/user_edit.html') |
|
211 | return base.render('admin/users/user_edit.html') | |
212 |
|
212 | |||
213 | def edit(self, id, format='html'): |
|
213 | def edit(self, id, format='html'): | |
214 | user = self._get_user_or_raise_if_default(id) |
|
214 | user = self._get_user_or_raise_if_default(id) | |
215 | defaults = user.get_dict() |
|
215 | defaults = user.get_dict() | |
216 |
|
216 | |||
217 | return htmlfill.render( |
|
217 | return htmlfill.render( | |
218 | self._render_edit_profile(user), |
|
218 | self._render_edit_profile(user), | |
219 | defaults=defaults, |
|
219 | defaults=defaults, | |
220 | encoding="UTF-8", |
|
220 | encoding="UTF-8", | |
221 | force_defaults=False) |
|
221 | force_defaults=False) | |
222 |
|
222 | |||
223 | def edit_advanced(self, id): |
|
223 | def edit_advanced(self, id): | |
224 | c.user = self._get_user_or_raise_if_default(id) |
|
224 | c.user = self._get_user_or_raise_if_default(id) | |
225 | c.active = 'advanced' |
|
225 | c.active = 'advanced' | |
226 | c.perm_user = AuthUser(dbuser=c.user) |
|
226 | c.perm_user = AuthUser(dbuser=c.user) | |
227 |
|
227 | |||
228 | umodel = UserModel() |
|
228 | umodel = UserModel() | |
229 | defaults = c.user.get_dict() |
|
229 | defaults = c.user.get_dict() | |
230 | defaults.update({ |
|
230 | defaults.update({ | |
231 | 'create_repo_perm': umodel.has_perm(c.user, 'hg.create.repository'), |
|
231 | 'create_repo_perm': umodel.has_perm(c.user, 'hg.create.repository'), | |
232 | 'create_user_group_perm': umodel.has_perm(c.user, |
|
232 | 'create_user_group_perm': umodel.has_perm(c.user, | |
233 | 'hg.usergroup.create.true'), |
|
233 | 'hg.usergroup.create.true'), | |
234 | 'fork_repo_perm': umodel.has_perm(c.user, 'hg.fork.repository'), |
|
234 | 'fork_repo_perm': umodel.has_perm(c.user, 'hg.fork.repository'), | |
235 | }) |
|
235 | }) | |
236 | return htmlfill.render( |
|
236 | return htmlfill.render( | |
237 | base.render('admin/users/user_edit.html'), |
|
237 | base.render('admin/users/user_edit.html'), | |
238 | defaults=defaults, |
|
238 | defaults=defaults, | |
239 | encoding="UTF-8", |
|
239 | encoding="UTF-8", | |
240 | force_defaults=False) |
|
240 | force_defaults=False) | |
241 |
|
241 | |||
242 | def edit_api_keys(self, id): |
|
242 | def edit_api_keys(self, id): | |
243 | c.user = self._get_user_or_raise_if_default(id) |
|
243 | c.user = self._get_user_or_raise_if_default(id) | |
244 | c.active = 'api_keys' |
|
244 | c.active = 'api_keys' | |
245 | show_expired = True |
|
245 | show_expired = True | |
246 | c.lifetime_values = [ |
|
246 | c.lifetime_values = [ | |
247 | (str(-1), _('Forever')), |
|
247 | (str(-1), _('Forever')), | |
248 | (str(5), _('5 minutes')), |
|
248 | (str(5), _('5 minutes')), | |
249 | (str(60), _('1 hour')), |
|
249 | (str(60), _('1 hour')), | |
250 | (str(60 * 24), _('1 day')), |
|
250 | (str(60 * 24), _('1 day')), | |
251 | (str(60 * 24 * 30), _('1 month')), |
|
251 | (str(60 * 24 * 30), _('1 month')), | |
252 | ] |
|
252 | ] | |
253 | c.lifetime_options = [(c.lifetime_values, _("Lifetime"))] |
|
253 | c.lifetime_options = [(c.lifetime_values, _("Lifetime"))] | |
254 | c.user_api_keys = ApiKeyModel().get_api_keys(c.user.user_id, |
|
254 | c.user_api_keys = ApiKeyModel().get_api_keys(c.user.user_id, | |
255 | show_expired=show_expired) |
|
255 | show_expired=show_expired) | |
256 | defaults = c.user.get_dict() |
|
256 | defaults = c.user.get_dict() | |
257 | return htmlfill.render( |
|
257 | return htmlfill.render( | |
258 | base.render('admin/users/user_edit.html'), |
|
258 | base.render('admin/users/user_edit.html'), | |
259 | defaults=defaults, |
|
259 | defaults=defaults, | |
260 | encoding="UTF-8", |
|
260 | encoding="UTF-8", | |
261 | force_defaults=False) |
|
261 | force_defaults=False) | |
262 |
|
262 | |||
263 | def add_api_key(self, id): |
|
263 | def add_api_key(self, id): | |
264 | c.user = self._get_user_or_raise_if_default(id) |
|
264 | c.user = self._get_user_or_raise_if_default(id) | |
265 |
|
265 | |||
266 | lifetime = safe_int(request.POST.get('lifetime'), -1) |
|
266 | lifetime = safe_int(request.POST.get('lifetime'), -1) | |
267 | description = request.POST.get('description') |
|
267 | description = request.POST.get('description') | |
268 | ApiKeyModel().create(c.user.user_id, description, lifetime) |
|
268 | ApiKeyModel().create(c.user.user_id, description, lifetime) | |
269 | meta.Session().commit() |
|
269 | meta.Session().commit() | |
270 | webutils.flash(_("API key successfully created"), category='success') |
|
270 | webutils.flash(_("API key successfully created"), category='success') | |
271 | raise HTTPFound(location=url('edit_user_api_keys', id=c.user.user_id)) |
|
271 | raise HTTPFound(location=url('edit_user_api_keys', id=c.user.user_id)) | |
272 |
|
272 | |||
273 | def delete_api_key(self, id): |
|
273 | def delete_api_key(self, id): | |
274 | c.user = self._get_user_or_raise_if_default(id) |
|
274 | c.user = self._get_user_or_raise_if_default(id) | |
275 |
|
275 | |||
276 | api_key = request.POST.get('del_api_key') |
|
276 | api_key = request.POST.get('del_api_key') | |
277 | if request.POST.get('del_api_key_builtin'): |
|
277 | if request.POST.get('del_api_key_builtin'): | |
278 | c.user.api_key = generate_api_key() |
|
278 | c.user.api_key = generate_api_key() | |
279 | meta.Session().commit() |
|
279 | meta.Session().commit() | |
280 | webutils.flash(_("API key successfully reset"), category='success') |
|
280 | webutils.flash(_("API key successfully reset"), category='success') | |
281 | elif api_key: |
|
281 | elif api_key: | |
282 | ApiKeyModel().delete(api_key, c.user.user_id) |
|
282 | ApiKeyModel().delete(api_key, c.user.user_id) | |
283 | meta.Session().commit() |
|
283 | meta.Session().commit() | |
284 | webutils.flash(_("API key successfully deleted"), category='success') |
|
284 | webutils.flash(_("API key successfully deleted"), category='success') | |
285 |
|
285 | |||
286 | raise HTTPFound(location=url('edit_user_api_keys', id=c.user.user_id)) |
|
286 | raise HTTPFound(location=url('edit_user_api_keys', id=c.user.user_id)) | |
287 |
|
287 | |||
288 | def update_account(self, id): |
|
288 | def update_account(self, id): | |
289 | pass |
|
289 | pass | |
290 |
|
290 | |||
291 | def edit_perms(self, id): |
|
291 | def edit_perms(self, id): | |
292 | c.user = self._get_user_or_raise_if_default(id) |
|
292 | c.user = self._get_user_or_raise_if_default(id) | |
293 | c.active = 'perms' |
|
293 | c.active = 'perms' | |
294 | c.perm_user = AuthUser(dbuser=c.user) |
|
294 | c.perm_user = AuthUser(dbuser=c.user) | |
295 |
|
295 | |||
296 | umodel = UserModel() |
|
296 | umodel = UserModel() | |
297 | defaults = c.user.get_dict() |
|
297 | defaults = c.user.get_dict() | |
298 | defaults.update({ |
|
298 | defaults.update({ | |
299 | 'create_repo_perm': umodel.has_perm(c.user, 'hg.create.repository'), |
|
299 | 'create_repo_perm': umodel.has_perm(c.user, 'hg.create.repository'), | |
300 | 'create_user_group_perm': umodel.has_perm(c.user, |
|
300 | 'create_user_group_perm': umodel.has_perm(c.user, | |
301 | 'hg.usergroup.create.true'), |
|
301 | 'hg.usergroup.create.true'), | |
302 | 'fork_repo_perm': umodel.has_perm(c.user, 'hg.fork.repository'), |
|
302 | 'fork_repo_perm': umodel.has_perm(c.user, 'hg.fork.repository'), | |
303 | }) |
|
303 | }) | |
304 | return htmlfill.render( |
|
304 | return htmlfill.render( | |
305 | base.render('admin/users/user_edit.html'), |
|
305 | base.render('admin/users/user_edit.html'), | |
306 | defaults=defaults, |
|
306 | defaults=defaults, | |
307 | encoding="UTF-8", |
|
307 | encoding="UTF-8", | |
308 | force_defaults=False) |
|
308 | force_defaults=False) | |
309 |
|
309 | |||
310 | def update_perms(self, id): |
|
310 | def update_perms(self, id): | |
311 | user = self._get_user_or_raise_if_default(id) |
|
311 | user = self._get_user_or_raise_if_default(id) | |
312 |
|
312 | |||
313 | try: |
|
313 | try: | |
314 | form = CustomDefaultPermissionsForm()() |
|
314 | form = CustomDefaultPermissionsForm()() | |
315 | form_result = form.to_python(request.POST) |
|
315 | form_result = form.to_python(request.POST) | |
316 |
|
316 | |||
317 | user_model = UserModel() |
|
317 | user_model = UserModel() | |
318 |
|
318 | |||
319 | defs = db.UserToPerm.query() \ |
|
319 | defs = db.UserToPerm.query() \ | |
320 | .filter(db.UserToPerm.user == user) \ |
|
320 | .filter(db.UserToPerm.user == user) \ | |
321 | .all() |
|
321 | .all() | |
322 | for ug in defs: |
|
322 | for ug in defs: | |
323 | meta.Session().delete(ug) |
|
323 | meta.Session().delete(ug) | |
324 |
|
324 | |||
325 | if form_result['create_repo_perm']: |
|
325 | if form_result['create_repo_perm']: | |
326 | user_model.grant_perm(id, 'hg.create.repository') |
|
326 | user_model.grant_perm(id, 'hg.create.repository') | |
327 | else: |
|
327 | else: | |
328 | user_model.grant_perm(id, 'hg.create.none') |
|
328 | user_model.grant_perm(id, 'hg.create.none') | |
329 | if form_result['create_user_group_perm']: |
|
329 | if form_result['create_user_group_perm']: | |
330 | user_model.grant_perm(id, 'hg.usergroup.create.true') |
|
330 | user_model.grant_perm(id, 'hg.usergroup.create.true') | |
331 | else: |
|
331 | else: | |
332 | user_model.grant_perm(id, 'hg.usergroup.create.false') |
|
332 | user_model.grant_perm(id, 'hg.usergroup.create.false') | |
333 | if form_result['fork_repo_perm']: |
|
333 | if form_result['fork_repo_perm']: | |
334 | user_model.grant_perm(id, 'hg.fork.repository') |
|
334 | user_model.grant_perm(id, 'hg.fork.repository') | |
335 | else: |
|
335 | else: | |
336 | user_model.grant_perm(id, 'hg.fork.none') |
|
336 | user_model.grant_perm(id, 'hg.fork.none') | |
337 | webutils.flash(_("Updated permissions"), category='success') |
|
337 | webutils.flash(_("Updated permissions"), category='success') | |
338 | meta.Session().commit() |
|
338 | meta.Session().commit() | |
339 | except Exception: |
|
339 | except Exception: | |
340 | log.error(traceback.format_exc()) |
|
340 | log.error(traceback.format_exc()) | |
341 | webutils.flash(_('An error occurred during permissions saving'), |
|
341 | webutils.flash(_('An error occurred during permissions saving'), | |
342 | category='error') |
|
342 | category='error') | |
343 | raise HTTPFound(location=url('edit_user_perms', id=id)) |
|
343 | raise HTTPFound(location=url('edit_user_perms', id=id)) | |
344 |
|
344 | |||
345 | def edit_emails(self, id): |
|
345 | def edit_emails(self, id): | |
346 | c.user = self._get_user_or_raise_if_default(id) |
|
346 | c.user = self._get_user_or_raise_if_default(id) | |
347 | c.active = 'emails' |
|
347 | c.active = 'emails' | |
348 | c.user_email_map = db.UserEmailMap.query() \ |
|
348 | c.user_email_map = db.UserEmailMap.query() \ | |
349 | .filter(db.UserEmailMap.user == c.user).all() |
|
349 | .filter(db.UserEmailMap.user == c.user).all() | |
350 |
|
350 | |||
351 | defaults = c.user.get_dict() |
|
351 | defaults = c.user.get_dict() | |
352 | return htmlfill.render( |
|
352 | return htmlfill.render( | |
353 | base.render('admin/users/user_edit.html'), |
|
353 | base.render('admin/users/user_edit.html'), | |
354 | defaults=defaults, |
|
354 | defaults=defaults, | |
355 | encoding="UTF-8", |
|
355 | encoding="UTF-8", | |
356 | force_defaults=False) |
|
356 | force_defaults=False) | |
357 |
|
357 | |||
358 | def add_email(self, id): |
|
358 | def add_email(self, id): | |
359 | user = self._get_user_or_raise_if_default(id) |
|
359 | user = self._get_user_or_raise_if_default(id) | |
360 | email = request.POST.get('new_email') |
|
360 | email = request.POST.get('new_email') | |
361 | user_model = UserModel() |
|
361 | user_model = UserModel() | |
362 |
|
362 | |||
363 | try: |
|
363 | try: | |
364 | user_model.add_extra_email(id, email) |
|
364 | user_model.add_extra_email(id, email) | |
365 | meta.Session().commit() |
|
365 | meta.Session().commit() | |
366 | webutils.flash(_("Added email %s to user") % email, category='success') |
|
366 | webutils.flash(_("Added email %s to user") % email, category='success') | |
367 | except formencode.Invalid as error: |
|
367 | except formencode.Invalid as error: | |
368 | msg = error.error_dict['email'] |
|
368 | msg = error.error_dict['email'] | |
369 | webutils.flash(msg, category='error') |
|
369 | webutils.flash(msg, category='error') | |
370 | except Exception: |
|
370 | except Exception: | |
371 | log.error(traceback.format_exc()) |
|
371 | log.error(traceback.format_exc()) | |
372 | webutils.flash(_('An error occurred during email saving'), |
|
372 | webutils.flash(_('An error occurred during email saving'), | |
373 | category='error') |
|
373 | category='error') | |
374 | raise HTTPFound(location=url('edit_user_emails', id=id)) |
|
374 | raise HTTPFound(location=url('edit_user_emails', id=id)) | |
375 |
|
375 | |||
376 | def delete_email(self, id): |
|
376 | def delete_email(self, id): | |
377 | user = self._get_user_or_raise_if_default(id) |
|
377 | user = self._get_user_or_raise_if_default(id) | |
378 | email_id = request.POST.get('del_email_id') |
|
378 | email_id = request.POST.get('del_email_id') | |
379 | user_model = UserModel() |
|
379 | user_model = UserModel() | |
380 | user_model.delete_extra_email(id, email_id) |
|
380 | user_model.delete_extra_email(id, email_id) | |
381 | meta.Session().commit() |
|
381 | meta.Session().commit() | |
382 | webutils.flash(_("Removed email from user"), category='success') |
|
382 | webutils.flash(_("Removed email from user"), category='success') | |
383 | raise HTTPFound(location=url('edit_user_emails', id=id)) |
|
383 | raise HTTPFound(location=url('edit_user_emails', id=id)) | |
384 |
|
384 | |||
385 | def edit_ips(self, id): |
|
385 | def edit_ips(self, id): | |
386 | c.user = self._get_user_or_raise_if_default(id) |
|
386 | c.user = self._get_user_or_raise_if_default(id) | |
387 | c.active = 'ips' |
|
387 | c.active = 'ips' | |
388 | c.user_ip_map = db.UserIpMap.query() \ |
|
388 | c.user_ip_map = db.UserIpMap.query() \ | |
389 | .filter(db.UserIpMap.user == c.user).all() |
|
389 | .filter(db.UserIpMap.user == c.user).all() | |
390 |
|
390 | |||
391 | c.default_user_ip_map = db.UserIpMap.query() \ |
|
391 | c.default_user_ip_map = db.UserIpMap.query() \ | |
392 | .filter(db.UserIpMap.user_id == kallithea.DEFAULT_USER_ID).all() |
|
392 | .filter(db.UserIpMap.user_id == kallithea.DEFAULT_USER_ID).all() | |
393 |
|
393 | |||
394 | defaults = c.user.get_dict() |
|
394 | defaults = c.user.get_dict() | |
395 | return htmlfill.render( |
|
395 | return htmlfill.render( | |
396 | base.render('admin/users/user_edit.html'), |
|
396 | base.render('admin/users/user_edit.html'), | |
397 | defaults=defaults, |
|
397 | defaults=defaults, | |
398 | encoding="UTF-8", |
|
398 | encoding="UTF-8", | |
399 | force_defaults=False) |
|
399 | force_defaults=False) | |
400 |
|
400 | |||
401 | def add_ip(self, id): |
|
401 | def add_ip(self, id): | |
402 | ip = request.POST.get('new_ip') |
|
402 | ip = request.POST.get('new_ip') | |
403 | user_model = UserModel() |
|
403 | user_model = UserModel() | |
404 |
|
404 | |||
405 | try: |
|
405 | try: | |
406 | user_model.add_extra_ip(id, ip) |
|
406 | user_model.add_extra_ip(id, ip) | |
407 | meta.Session().commit() |
|
407 | meta.Session().commit() | |
408 | webutils.flash(_("Added IP address %s to user whitelist") % ip, category='success') |
|
408 | webutils.flash(_("Added IP address %s to user whitelist") % ip, category='success') | |
409 | except formencode.Invalid as error: |
|
409 | except formencode.Invalid as error: | |
410 | msg = error.error_dict['ip'] |
|
410 | msg = error.error_dict['ip'] | |
411 | webutils.flash(msg, category='error') |
|
411 | webutils.flash(msg, category='error') | |
412 | except Exception: |
|
412 | except Exception: | |
413 | log.error(traceback.format_exc()) |
|
413 | log.error(traceback.format_exc()) | |
414 | webutils.flash(_('An error occurred while adding IP address'), |
|
414 | webutils.flash(_('An error occurred while adding IP address'), | |
415 | category='error') |
|
415 | category='error') | |
416 |
|
416 | |||
417 | if 'default_user' in request.POST: |
|
417 | if 'default_user' in request.POST: | |
418 | raise HTTPFound(location=url('admin_permissions_ips')) |
|
418 | raise HTTPFound(location=url('admin_permissions_ips')) | |
419 | raise HTTPFound(location=url('edit_user_ips', id=id)) |
|
419 | raise HTTPFound(location=url('edit_user_ips', id=id)) | |
420 |
|
420 | |||
421 | def delete_ip(self, id): |
|
421 | def delete_ip(self, id): | |
422 | ip_id = request.POST.get('del_ip_id') |
|
422 | ip_id = request.POST.get('del_ip_id') | |
423 | user_model = UserModel() |
|
423 | user_model = UserModel() | |
424 | user_model.delete_extra_ip(id, ip_id) |
|
424 | user_model.delete_extra_ip(id, ip_id) | |
425 | meta.Session().commit() |
|
425 | meta.Session().commit() | |
426 | webutils.flash(_("Removed IP address from user whitelist"), category='success') |
|
426 | webutils.flash(_("Removed IP address from user whitelist"), category='success') | |
427 |
|
427 | |||
428 | if 'default_user' in request.POST: |
|
428 | if 'default_user' in request.POST: | |
429 | raise HTTPFound(location=url('admin_permissions_ips')) |
|
429 | raise HTTPFound(location=url('admin_permissions_ips')) | |
430 | raise HTTPFound(location=url('edit_user_ips', id=id)) |
|
430 | raise HTTPFound(location=url('edit_user_ips', id=id)) | |
431 |
|
431 | |||
432 | @base.IfSshEnabled |
|
432 | @base.IfSshEnabled | |
433 | def edit_ssh_keys(self, id): |
|
433 | def edit_ssh_keys(self, id): | |
434 | c.user = self._get_user_or_raise_if_default(id) |
|
434 | c.user = self._get_user_or_raise_if_default(id) | |
435 | c.active = 'ssh_keys' |
|
435 | c.active = 'ssh_keys' | |
436 | c.user_ssh_keys = SshKeyModel().get_ssh_keys(c.user.user_id) |
|
436 | c.user_ssh_keys = SshKeyModel().get_ssh_keys(c.user.user_id) | |
437 | defaults = c.user.get_dict() |
|
437 | defaults = c.user.get_dict() | |
438 | return htmlfill.render( |
|
438 | return htmlfill.render( | |
439 | base.render('admin/users/user_edit.html'), |
|
439 | base.render('admin/users/user_edit.html'), | |
440 | defaults=defaults, |
|
440 | defaults=defaults, | |
441 | encoding="UTF-8", |
|
441 | encoding="UTF-8", | |
442 | force_defaults=False) |
|
442 | force_defaults=False) | |
443 |
|
443 | |||
444 | @base.IfSshEnabled |
|
444 | @base.IfSshEnabled | |
445 | def ssh_keys_add(self, id): |
|
445 | def ssh_keys_add(self, id): | |
446 | c.user = self._get_user_or_raise_if_default(id) |
|
446 | c.user = self._get_user_or_raise_if_default(id) | |
447 |
|
447 | |||
448 | description = request.POST.get('description') |
|
448 | description = request.POST.get('description') | |
449 | public_key = request.POST.get('public_key') |
|
449 | public_key = request.POST.get('public_key') | |
450 | try: |
|
450 | try: | |
451 | new_ssh_key = SshKeyModel().create(c.user.user_id, |
|
451 | new_ssh_key = SshKeyModel().create(c.user.user_id, | |
452 | description, public_key) |
|
452 | description, public_key) | |
453 | meta.Session().commit() |
|
453 | meta.Session().commit() | |
454 | SshKeyModel().write_authorized_keys() |
|
454 | SshKeyModel().write_authorized_keys() | |
455 | webutils.flash(_("SSH key %s successfully added") % new_ssh_key.fingerprint, category='success') |
|
455 | webutils.flash(_("SSH key %s successfully added") % new_ssh_key.fingerprint, category='success') | |
456 | except SshKeyModelException as e: |
|
456 | except SshKeyModelException as e: | |
457 | webutils.flash(e.args[0], category='error') |
|
457 | webutils.flash(e.args[0], category='error') | |
458 | raise HTTPFound(location=url('edit_user_ssh_keys', id=c.user.user_id)) |
|
458 | raise HTTPFound(location=url('edit_user_ssh_keys', id=c.user.user_id)) | |
459 |
|
459 | |||
460 | @base.IfSshEnabled |
|
460 | @base.IfSshEnabled | |
461 | def ssh_keys_delete(self, id): |
|
461 | def ssh_keys_delete(self, id): | |
462 | c.user = self._get_user_or_raise_if_default(id) |
|
462 | c.user = self._get_user_or_raise_if_default(id) | |
463 |
|
463 | |||
464 | fingerprint = request.POST.get('del_public_key_fingerprint') |
|
464 | fingerprint = request.POST.get('del_public_key_fingerprint') | |
465 | try: |
|
465 | try: | |
466 | SshKeyModel().delete(fingerprint, c.user.user_id) |
|
466 | SshKeyModel().delete(fingerprint, c.user.user_id) | |
467 | meta.Session().commit() |
|
467 | meta.Session().commit() | |
468 | SshKeyModel().write_authorized_keys() |
|
468 | SshKeyModel().write_authorized_keys() | |
469 | webutils.flash(_("SSH key successfully deleted"), category='success') |
|
469 | webutils.flash(_("SSH key successfully deleted"), category='success') | |
470 | except SshKeyModelException as e: |
|
470 | except SshKeyModelException as e: | |
471 | webutils.flash(e.args[0], category='error') |
|
471 | webutils.flash(e.args[0], category='error') | |
472 | raise HTTPFound(location=url('edit_user_ssh_keys', id=c.user.user_id)) |
|
472 | raise HTTPFound(location=url('edit_user_ssh_keys', id=c.user.user_id)) |
@@ -1,134 +1,134 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.controllers.feed |
|
15 | kallithea.controllers.feed | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Feed controller for Kallithea |
|
18 | Feed controller for Kallithea | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Apr 23, 2010 |
|
22 | :created_on: Apr 23, 2010 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 |
|
28 | |||
29 | import logging |
|
29 | import logging | |
30 |
|
30 | |||
31 | from beaker.cache import cache_region |
|
31 | from beaker.cache import cache_region | |
32 | from tg import response |
|
32 | from tg import response | |
33 | from tg import tmpl_context as c |
|
33 | from tg import tmpl_context as c | |
34 | from tg.i18n import ugettext as _ |
|
34 | from tg.i18n import ugettext as _ | |
35 |
|
35 | |||
36 | import kallithea |
|
36 | import kallithea | |
37 | import kallithea.lib.helpers as h |
|
37 | import kallithea.lib.helpers as h | |
38 | from kallithea.controllers import base |
|
38 | from kallithea.controllers import base | |
39 | from kallithea.lib import feeds, webutils |
|
39 | from kallithea.lib import feeds, webutils | |
40 | from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired |
|
40 | from kallithea.lib.auth import HasRepoPermissionLevelDecorator, LoginRequired | |
41 | from kallithea.lib.diffs import DiffProcessor |
|
41 | from kallithea.lib.diffs import DiffProcessor | |
42 |
from kallithea.lib.utils2 import asbool, |
|
42 | from kallithea.lib.utils2 import asbool, safe_int, safe_str | |
43 |
|
43 | |||
44 |
|
44 | |||
45 | log = logging.getLogger(__name__) |
|
45 | log = logging.getLogger(__name__) | |
46 |
|
46 | |||
47 |
|
47 | |||
48 | class FeedController(base.BaseRepoController): |
|
48 | class FeedController(base.BaseRepoController): | |
49 |
|
49 | |||
50 | @LoginRequired(allow_default_user=True) |
|
50 | @LoginRequired(allow_default_user=True) | |
51 | @HasRepoPermissionLevelDecorator('read') |
|
51 | @HasRepoPermissionLevelDecorator('read') | |
52 | def _before(self, *args, **kwargs): |
|
52 | def _before(self, *args, **kwargs): | |
53 | super(FeedController, self)._before(*args, **kwargs) |
|
53 | super(FeedController, self)._before(*args, **kwargs) | |
54 |
|
54 | |||
55 | def _get_title(self, cs): |
|
55 | def _get_title(self, cs): | |
56 | return shorter(cs.message, 160) |
|
56 | return webutils.shorter(cs.message, 160) | |
57 |
|
57 | |||
58 | def __get_desc(self, cs): |
|
58 | def __get_desc(self, cs): | |
59 | desc_msg = [(_('%s committed on %s') |
|
59 | desc_msg = [(_('%s committed on %s') | |
60 | % (h.person(cs.author), fmt_date(cs.date))) + '<br/>'] |
|
60 | % (h.person(cs.author), webutils.fmt_date(cs.date))) + '<br/>'] | |
61 | # branches, tags, bookmarks |
|
61 | # branches, tags, bookmarks | |
62 | for branch in cs.branches: |
|
62 | for branch in cs.branches: | |
63 | desc_msg.append('branch: %s<br/>' % branch) |
|
63 | desc_msg.append('branch: %s<br/>' % branch) | |
64 | for book in cs.bookmarks: |
|
64 | for book in cs.bookmarks: | |
65 | desc_msg.append('bookmark: %s<br/>' % book) |
|
65 | desc_msg.append('bookmark: %s<br/>' % book) | |
66 | for tag in cs.tags: |
|
66 | for tag in cs.tags: | |
67 | desc_msg.append('tag: %s<br/>' % tag) |
|
67 | desc_msg.append('tag: %s<br/>' % tag) | |
68 |
|
68 | |||
69 | changes = [] |
|
69 | changes = [] | |
70 | diff_limit = safe_int(kallithea.CONFIG.get('rss_cut_off_limit', 32 * 1024)) |
|
70 | diff_limit = safe_int(kallithea.CONFIG.get('rss_cut_off_limit', 32 * 1024)) | |
71 | raw_diff = cs.diff() |
|
71 | raw_diff = cs.diff() | |
72 | diff_processor = DiffProcessor(raw_diff, |
|
72 | diff_processor = DiffProcessor(raw_diff, | |
73 | diff_limit=diff_limit, |
|
73 | diff_limit=diff_limit, | |
74 | inline_diff=False) |
|
74 | inline_diff=False) | |
75 |
|
75 | |||
76 | for st in diff_processor.parsed: |
|
76 | for st in diff_processor.parsed: | |
77 | st.update({'added': st['stats']['added'], |
|
77 | st.update({'added': st['stats']['added'], | |
78 | 'removed': st['stats']['deleted']}) |
|
78 | 'removed': st['stats']['deleted']}) | |
79 | changes.append('\n %(operation)s %(filename)s ' |
|
79 | changes.append('\n %(operation)s %(filename)s ' | |
80 | '(%(added)s lines added, %(removed)s lines removed)' |
|
80 | '(%(added)s lines added, %(removed)s lines removed)' | |
81 | % st) |
|
81 | % st) | |
82 | if diff_processor.limited_diff: |
|
82 | if diff_processor.limited_diff: | |
83 | changes = changes + ['\n ' + |
|
83 | changes = changes + ['\n ' + | |
84 | _('Changeset was too big and was cut off...')] |
|
84 | _('Changeset was too big and was cut off...')] | |
85 |
|
85 | |||
86 | # rev link |
|
86 | # rev link | |
87 | _url = webutils.canonical_url('changeset_home', repo_name=c.db_repo.repo_name, |
|
87 | _url = webutils.canonical_url('changeset_home', repo_name=c.db_repo.repo_name, | |
88 | revision=cs.raw_id) |
|
88 | revision=cs.raw_id) | |
89 | desc_msg.append('changeset: <a href="%s">%s</a>' % (_url, cs.raw_id[:8])) |
|
89 | desc_msg.append('changeset: <a href="%s">%s</a>' % (_url, cs.raw_id[:8])) | |
90 |
|
90 | |||
91 | desc_msg.append('<pre>') |
|
91 | desc_msg.append('<pre>') | |
92 | desc_msg.append(webutils.urlify_text(cs.message)) |
|
92 | desc_msg.append(webutils.urlify_text(cs.message)) | |
93 | desc_msg.append('\n') |
|
93 | desc_msg.append('\n') | |
94 | desc_msg.extend(changes) |
|
94 | desc_msg.extend(changes) | |
95 | if asbool(kallithea.CONFIG.get('rss_include_diff', False)): |
|
95 | if asbool(kallithea.CONFIG.get('rss_include_diff', False)): | |
96 | desc_msg.append('\n\n') |
|
96 | desc_msg.append('\n\n') | |
97 | desc_msg.append(safe_str(raw_diff)) |
|
97 | desc_msg.append(safe_str(raw_diff)) | |
98 | desc_msg.append('</pre>') |
|
98 | desc_msg.append('</pre>') | |
99 | return desc_msg |
|
99 | return desc_msg | |
100 |
|
100 | |||
101 | def _feed(self, repo_name, feeder): |
|
101 | def _feed(self, repo_name, feeder): | |
102 | """Produce a simple feed""" |
|
102 | """Produce a simple feed""" | |
103 |
|
103 | |||
104 | @cache_region('long_term_file', '_get_feed_from_cache') |
|
104 | @cache_region('long_term_file', '_get_feed_from_cache') | |
105 | def _get_feed_from_cache(*_cache_keys): # parameters are not really used - only as caching key |
|
105 | def _get_feed_from_cache(*_cache_keys): # parameters are not really used - only as caching key | |
106 | header = dict( |
|
106 | header = dict( | |
107 | title=_('%s %s feed') % (c.site_name, repo_name), |
|
107 | title=_('%s %s feed') % (c.site_name, repo_name), | |
108 | link=webutils.canonical_url('summary_home', repo_name=repo_name), |
|
108 | link=webutils.canonical_url('summary_home', repo_name=repo_name), | |
109 | description=_('Changes on %s repository') % repo_name, |
|
109 | description=_('Changes on %s repository') % repo_name, | |
110 | ) |
|
110 | ) | |
111 |
|
111 | |||
112 | rss_items_per_page = safe_int(kallithea.CONFIG.get('rss_items_per_page', 20)) |
|
112 | rss_items_per_page = safe_int(kallithea.CONFIG.get('rss_items_per_page', 20)) | |
113 | entries=[] |
|
113 | entries=[] | |
114 | for cs in reversed(list(c.db_repo_scm_instance[-rss_items_per_page:])): |
|
114 | for cs in reversed(list(c.db_repo_scm_instance[-rss_items_per_page:])): | |
115 | entries.append(dict( |
|
115 | entries.append(dict( | |
116 | title=self._get_title(cs), |
|
116 | title=self._get_title(cs), | |
117 | link=webutils.canonical_url('changeset_home', repo_name=repo_name, revision=cs.raw_id), |
|
117 | link=webutils.canonical_url('changeset_home', repo_name=repo_name, revision=cs.raw_id), | |
118 | author_email=cs.author_email, |
|
118 | author_email=cs.author_email, | |
119 | author_name=cs.author_name, |
|
119 | author_name=cs.author_name, | |
120 | description=''.join(self.__get_desc(cs)), |
|
120 | description=''.join(self.__get_desc(cs)), | |
121 | pubdate=cs.date, |
|
121 | pubdate=cs.date, | |
122 | )) |
|
122 | )) | |
123 | return feeder.render(header, entries) |
|
123 | return feeder.render(header, entries) | |
124 |
|
124 | |||
125 | response.content_type = feeder.content_type |
|
125 | response.content_type = feeder.content_type | |
126 | return _get_feed_from_cache(repo_name, feeder.__name__) |
|
126 | return _get_feed_from_cache(repo_name, feeder.__name__) | |
127 |
|
127 | |||
128 | def atom(self, repo_name): |
|
128 | def atom(self, repo_name): | |
129 | """Produce a simple atom-1.0 feed""" |
|
129 | """Produce a simple atom-1.0 feed""" | |
130 | return self._feed(repo_name, feeds.AtomFeed) |
|
130 | return self._feed(repo_name, feeds.AtomFeed) | |
131 |
|
131 | |||
132 | def rss(self, repo_name): |
|
132 | def rss(self, repo_name): | |
133 | """Produce a simple rss2 feed""" |
|
133 | """Produce a simple rss2 feed""" | |
134 | return self._feed(repo_name, feeds.RssFeed) |
|
134 | return self._feed(repo_name, feeds.RssFeed) |
@@ -1,875 +1,874 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | Helper functions |
|
15 | Helper functions | |
16 |
|
16 | |||
17 | Consists of functions to typically be used within templates, but also |
|
17 | Consists of functions to typically be used within templates, but also | |
18 | available to Controllers. This module is available to both as 'h'. |
|
18 | available to Controllers. This module is available to both as 'h'. | |
19 | """ |
|
19 | """ | |
20 | import hashlib |
|
20 | import hashlib | |
21 | import logging |
|
21 | import logging | |
22 | import re |
|
22 | import re | |
23 | import textwrap |
|
23 | import textwrap | |
24 | import urllib.parse |
|
24 | import urllib.parse | |
25 |
|
25 | |||
26 | from beaker.cache import cache_region |
|
26 | from beaker.cache import cache_region | |
27 | from pygments import highlight as code_highlight |
|
27 | from pygments import highlight as code_highlight | |
28 | from pygments.formatters.html import HtmlFormatter |
|
28 | from pygments.formatters.html import HtmlFormatter | |
29 | from tg import tmpl_context as c |
|
29 | from tg import tmpl_context as c | |
30 | from tg.i18n import ugettext as _ |
|
30 | from tg.i18n import ugettext as _ | |
31 |
|
31 | |||
32 | import kallithea |
|
32 | import kallithea | |
33 | from kallithea.lib.annotate import annotate_highlight |
|
33 | from kallithea.lib.annotate import annotate_highlight | |
34 | from kallithea.lib.auth import HasPermissionAny, HasRepoGroupPermissionLevel, HasRepoPermissionLevel |
|
34 | from kallithea.lib.auth import HasPermissionAny, HasRepoGroupPermissionLevel, HasRepoPermissionLevel | |
35 | from kallithea.lib.diffs import BIN_FILENODE, CHMOD_FILENODE, DEL_FILENODE, MOD_FILENODE, NEW_FILENODE, RENAMED_FILENODE |
|
35 | from kallithea.lib.diffs import BIN_FILENODE, CHMOD_FILENODE, DEL_FILENODE, MOD_FILENODE, NEW_FILENODE, RENAMED_FILENODE | |
36 | from kallithea.lib.pygmentsutils import get_custom_lexer |
|
36 | from kallithea.lib.pygmentsutils import get_custom_lexer | |
37 |
from kallithea.lib.utils2 import |
|
37 | from kallithea.lib.utils2 import AttributeDict, asbool, credentials_filter, link_to_ref, safe_bytes, safe_int, safe_str, time_to_datetime | |
38 | time_to_datetime) |
|
|||
39 | from kallithea.lib.vcs.backends.base import BaseChangeset, EmptyChangeset |
|
38 | from kallithea.lib.vcs.backends.base import BaseChangeset, EmptyChangeset | |
40 | from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError |
|
39 | from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError | |
41 | from kallithea.lib.vcs.utils import author_email, author_name |
|
40 | from kallithea.lib.vcs.utils import author_email, author_name | |
42 |
from kallithea.lib.webutils import (HTML, Option, canonical_url, checkbox, chop_at, end_form, escape, form, format_byte_size, hidden, js, jshtml, |
|
41 | from kallithea.lib.webutils import (HTML, Option, age, canonical_url, checkbox, chop_at, end_form, escape, fmt_date, form, format_byte_size, hidden, js, jshtml, | |
43 | literal, password, pop_flash_messages, radio, render_w_mentions, reset, safeid, select, session_csrf_secret_name, |
|
42 | link_to, literal, password, pop_flash_messages, radio, render_w_mentions, reset, safeid, select, session_csrf_secret_name, | |
44 | session_csrf_secret_token, submit, text, textarea, url, urlify_text, wrap_paragraphs) |
|
43 | session_csrf_secret_token, shorter, submit, text, textarea, url, urlify_text, wrap_paragraphs) | |
45 | from kallithea.model import db |
|
44 | from kallithea.model import db | |
46 | from kallithea.model.changeset_status import ChangesetStatusModel |
|
45 | from kallithea.model.changeset_status import ChangesetStatusModel | |
47 |
|
46 | |||
48 |
|
47 | |||
49 | # mute pyflakes "imported but unused" |
|
48 | # mute pyflakes "imported but unused" | |
50 | # from webutils |
|
49 | # from webutils | |
51 | assert Option |
|
50 | assert Option | |
|
51 | assert age | |||
52 | assert canonical_url |
|
52 | assert canonical_url | |
53 | assert checkbox |
|
53 | assert checkbox | |
54 | assert chop_at |
|
54 | assert chop_at | |
55 | assert end_form |
|
55 | assert end_form | |
|
56 | assert fmt_date | |||
56 | assert form |
|
57 | assert form | |
57 | assert format_byte_size |
|
58 | assert format_byte_size | |
58 | assert hidden |
|
59 | assert hidden | |
59 | assert js |
|
60 | assert js | |
60 | assert jshtml |
|
61 | assert jshtml | |
61 | assert password |
|
62 | assert password | |
62 | assert pop_flash_messages |
|
63 | assert pop_flash_messages | |
63 | assert radio |
|
64 | assert radio | |
64 | assert render_w_mentions |
|
65 | assert render_w_mentions | |
65 | assert reset |
|
66 | assert reset | |
66 | assert safeid |
|
67 | assert safeid | |
67 | assert select |
|
68 | assert select | |
68 | assert session_csrf_secret_name |
|
69 | assert session_csrf_secret_name | |
69 | assert session_csrf_secret_token |
|
70 | assert session_csrf_secret_token | |
|
71 | assert shorter | |||
70 | assert submit |
|
72 | assert submit | |
71 | assert text |
|
73 | assert text | |
72 | assert textarea |
|
74 | assert textarea | |
73 | assert urlify_text |
|
75 | assert urlify_text | |
74 | assert wrap_paragraphs |
|
76 | assert wrap_paragraphs | |
75 | # from kallithea.lib.auth |
|
77 | # from kallithea.lib.auth | |
76 | assert HasPermissionAny |
|
78 | assert HasPermissionAny | |
77 | assert HasRepoGroupPermissionLevel |
|
79 | assert HasRepoGroupPermissionLevel | |
78 | assert HasRepoPermissionLevel |
|
80 | assert HasRepoPermissionLevel | |
79 | # from utils2 |
|
81 | # from utils2 | |
80 | assert age |
|
|||
81 | assert credentials_filter |
|
82 | assert credentials_filter | |
82 | assert fmt_date |
|
|||
83 | assert link_to_ref |
|
83 | assert link_to_ref | |
84 | assert shorter |
|
|||
85 | assert time_to_datetime |
|
84 | assert time_to_datetime | |
86 | # from vcs |
|
85 | # from vcs | |
87 | assert EmptyChangeset |
|
86 | assert EmptyChangeset | |
88 |
|
87 | |||
89 |
|
88 | |||
90 | log = logging.getLogger(__name__) |
|
89 | log = logging.getLogger(__name__) | |
91 |
|
90 | |||
92 |
|
91 | |||
93 | def FID(raw_id, path): |
|
92 | def FID(raw_id, path): | |
94 | """ |
|
93 | """ | |
95 | Creates a unique ID for filenode based on it's hash of path and revision |
|
94 | Creates a unique ID for filenode based on it's hash of path and revision | |
96 | it's safe to use in urls |
|
95 | it's safe to use in urls | |
97 | """ |
|
96 | """ | |
98 | return 'C-%s-%s' % (short_id(raw_id), hashlib.md5(safe_bytes(path)).hexdigest()[:12]) |
|
97 | return 'C-%s-%s' % (short_id(raw_id), hashlib.md5(safe_bytes(path)).hexdigest()[:12]) | |
99 |
|
98 | |||
100 |
|
99 | |||
101 | def get_ignore_whitespace_diff(GET): |
|
100 | def get_ignore_whitespace_diff(GET): | |
102 | """Return true if URL requested whitespace to be ignored""" |
|
101 | """Return true if URL requested whitespace to be ignored""" | |
103 | return bool(GET.get('ignorews')) |
|
102 | return bool(GET.get('ignorews')) | |
104 |
|
103 | |||
105 |
|
104 | |||
106 | def ignore_whitespace_link(GET, anchor=None): |
|
105 | def ignore_whitespace_link(GET, anchor=None): | |
107 | """Return snippet with link to current URL with whitespace ignoring toggled""" |
|
106 | """Return snippet with link to current URL with whitespace ignoring toggled""" | |
108 | params = dict(GET) # ignoring duplicates |
|
107 | params = dict(GET) # ignoring duplicates | |
109 | if get_ignore_whitespace_diff(GET): |
|
108 | if get_ignore_whitespace_diff(GET): | |
110 | params.pop('ignorews') |
|
109 | params.pop('ignorews') | |
111 | title = _("Show whitespace changes") |
|
110 | title = _("Show whitespace changes") | |
112 | else: |
|
111 | else: | |
113 | params['ignorews'] = '1' |
|
112 | params['ignorews'] = '1' | |
114 | title = _("Ignore whitespace changes") |
|
113 | title = _("Ignore whitespace changes") | |
115 | params['anchor'] = anchor |
|
114 | params['anchor'] = anchor | |
116 | return link_to( |
|
115 | return link_to( | |
117 | literal('<i class="icon-strike"></i>'), |
|
116 | literal('<i class="icon-strike"></i>'), | |
118 | url.current(**params), |
|
117 | url.current(**params), | |
119 | title=title, |
|
118 | title=title, | |
120 | **{'data-toggle': 'tooltip'}) |
|
119 | **{'data-toggle': 'tooltip'}) | |
121 |
|
120 | |||
122 |
|
121 | |||
123 | def get_diff_context_size(GET): |
|
122 | def get_diff_context_size(GET): | |
124 | """Return effective context size requested in URL""" |
|
123 | """Return effective context size requested in URL""" | |
125 | return safe_int(GET.get('context'), default=3) |
|
124 | return safe_int(GET.get('context'), default=3) | |
126 |
|
125 | |||
127 |
|
126 | |||
128 | def increase_context_link(GET, anchor=None): |
|
127 | def increase_context_link(GET, anchor=None): | |
129 | """Return snippet with link to current URL with double context size""" |
|
128 | """Return snippet with link to current URL with double context size""" | |
130 | context = get_diff_context_size(GET) * 2 |
|
129 | context = get_diff_context_size(GET) * 2 | |
131 | params = dict(GET) # ignoring duplicates |
|
130 | params = dict(GET) # ignoring duplicates | |
132 | params['context'] = str(context) |
|
131 | params['context'] = str(context) | |
133 | params['anchor'] = anchor |
|
132 | params['anchor'] = anchor | |
134 | return link_to( |
|
133 | return link_to( | |
135 | literal('<i class="icon-sort"></i>'), |
|
134 | literal('<i class="icon-sort"></i>'), | |
136 | url.current(**params), |
|
135 | url.current(**params), | |
137 | title=_('Increase diff context to %(num)s lines') % {'num': context}, |
|
136 | title=_('Increase diff context to %(num)s lines') % {'num': context}, | |
138 | **{'data-toggle': 'tooltip'}) |
|
137 | **{'data-toggle': 'tooltip'}) | |
139 |
|
138 | |||
140 |
|
139 | |||
141 | def files_breadcrumbs(repo_name, rev, paths): |
|
140 | def files_breadcrumbs(repo_name, rev, paths): | |
142 | url_l = [link_to(repo_name, url('files_home', |
|
141 | url_l = [link_to(repo_name, url('files_home', | |
143 | repo_name=repo_name, |
|
142 | repo_name=repo_name, | |
144 | revision=rev, f_path=''), |
|
143 | revision=rev, f_path=''), | |
145 | class_='ypjax-link')] |
|
144 | class_='ypjax-link')] | |
146 | paths_l = paths.split('/') |
|
145 | paths_l = paths.split('/') | |
147 | for cnt, p in enumerate(paths_l): |
|
146 | for cnt, p in enumerate(paths_l): | |
148 | if p != '': |
|
147 | if p != '': | |
149 | url_l.append(link_to(p, |
|
148 | url_l.append(link_to(p, | |
150 | url('files_home', |
|
149 | url('files_home', | |
151 | repo_name=repo_name, |
|
150 | repo_name=repo_name, | |
152 | revision=rev, |
|
151 | revision=rev, | |
153 | f_path='/'.join(paths_l[:cnt + 1]) |
|
152 | f_path='/'.join(paths_l[:cnt + 1]) | |
154 | ), |
|
153 | ), | |
155 | class_='ypjax-link' |
|
154 | class_='ypjax-link' | |
156 | ) |
|
155 | ) | |
157 | ) |
|
156 | ) | |
158 | return literal('/'.join(url_l)) |
|
157 | return literal('/'.join(url_l)) | |
159 |
|
158 | |||
160 |
|
159 | |||
161 | class CodeHtmlFormatter(HtmlFormatter): |
|
160 | class CodeHtmlFormatter(HtmlFormatter): | |
162 | """ |
|
161 | """ | |
163 | My code Html Formatter for source codes |
|
162 | My code Html Formatter for source codes | |
164 | """ |
|
163 | """ | |
165 |
|
164 | |||
166 | def wrap(self, source, outfile): |
|
165 | def wrap(self, source, outfile): | |
167 | return self._wrap_div(self._wrap_pre(self._wrap_code(source))) |
|
166 | return self._wrap_div(self._wrap_pre(self._wrap_code(source))) | |
168 |
|
167 | |||
169 | def _wrap_code(self, source): |
|
168 | def _wrap_code(self, source): | |
170 | for cnt, it in enumerate(source): |
|
169 | for cnt, it in enumerate(source): | |
171 | i, t = it |
|
170 | i, t = it | |
172 | t = '<span id="L%s">%s</span>' % (cnt + 1, t) |
|
171 | t = '<span id="L%s">%s</span>' % (cnt + 1, t) | |
173 | yield i, t |
|
172 | yield i, t | |
174 |
|
173 | |||
175 | def _wrap_tablelinenos(self, inner): |
|
174 | def _wrap_tablelinenos(self, inner): | |
176 | inner_lines = [] |
|
175 | inner_lines = [] | |
177 | lncount = 0 |
|
176 | lncount = 0 | |
178 | for t, line in inner: |
|
177 | for t, line in inner: | |
179 | if t: |
|
178 | if t: | |
180 | lncount += 1 |
|
179 | lncount += 1 | |
181 | inner_lines.append(line) |
|
180 | inner_lines.append(line) | |
182 |
|
181 | |||
183 | fl = self.linenostart |
|
182 | fl = self.linenostart | |
184 | mw = len(str(lncount + fl - 1)) |
|
183 | mw = len(str(lncount + fl - 1)) | |
185 | sp = self.linenospecial |
|
184 | sp = self.linenospecial | |
186 | st = self.linenostep |
|
185 | st = self.linenostep | |
187 | la = self.lineanchors |
|
186 | la = self.lineanchors | |
188 | aln = self.anchorlinenos |
|
187 | aln = self.anchorlinenos | |
189 | nocls = self.noclasses |
|
188 | nocls = self.noclasses | |
190 | if sp: |
|
189 | if sp: | |
191 | lines = [] |
|
190 | lines = [] | |
192 |
|
191 | |||
193 | for i in range(fl, fl + lncount): |
|
192 | for i in range(fl, fl + lncount): | |
194 | if i % st == 0: |
|
193 | if i % st == 0: | |
195 | if i % sp == 0: |
|
194 | if i % sp == 0: | |
196 | if aln: |
|
195 | if aln: | |
197 | lines.append('<a href="#%s%d" class="special">%*d</a>' % |
|
196 | lines.append('<a href="#%s%d" class="special">%*d</a>' % | |
198 | (la, i, mw, i)) |
|
197 | (la, i, mw, i)) | |
199 | else: |
|
198 | else: | |
200 | lines.append('<span class="special">%*d</span>' % (mw, i)) |
|
199 | lines.append('<span class="special">%*d</span>' % (mw, i)) | |
201 | else: |
|
200 | else: | |
202 | if aln: |
|
201 | if aln: | |
203 | lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i)) |
|
202 | lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i)) | |
204 | else: |
|
203 | else: | |
205 | lines.append('%*d' % (mw, i)) |
|
204 | lines.append('%*d' % (mw, i)) | |
206 | else: |
|
205 | else: | |
207 | lines.append('') |
|
206 | lines.append('') | |
208 | ls = '\n'.join(lines) |
|
207 | ls = '\n'.join(lines) | |
209 | else: |
|
208 | else: | |
210 | lines = [] |
|
209 | lines = [] | |
211 | for i in range(fl, fl + lncount): |
|
210 | for i in range(fl, fl + lncount): | |
212 | if i % st == 0: |
|
211 | if i % st == 0: | |
213 | if aln: |
|
212 | if aln: | |
214 | lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i)) |
|
213 | lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i)) | |
215 | else: |
|
214 | else: | |
216 | lines.append('%*d' % (mw, i)) |
|
215 | lines.append('%*d' % (mw, i)) | |
217 | else: |
|
216 | else: | |
218 | lines.append('') |
|
217 | lines.append('') | |
219 | ls = '\n'.join(lines) |
|
218 | ls = '\n'.join(lines) | |
220 |
|
219 | |||
221 | # in case you wonder about the seemingly redundant <div> here: since the |
|
220 | # in case you wonder about the seemingly redundant <div> here: since the | |
222 | # content in the other cell also is wrapped in a div, some browsers in |
|
221 | # content in the other cell also is wrapped in a div, some browsers in | |
223 | # some configurations seem to mess up the formatting... |
|
222 | # some configurations seem to mess up the formatting... | |
224 | if nocls: |
|
223 | if nocls: | |
225 | yield 0, ('<table class="%stable">' % self.cssclass + |
|
224 | yield 0, ('<table class="%stable">' % self.cssclass + | |
226 | '<tr><td><div class="linenodiv">' |
|
225 | '<tr><td><div class="linenodiv">' | |
227 | '<pre>' + ls + '</pre></div></td>' |
|
226 | '<pre>' + ls + '</pre></div></td>' | |
228 | '<td id="hlcode" class="code">') |
|
227 | '<td id="hlcode" class="code">') | |
229 | else: |
|
228 | else: | |
230 | yield 0, ('<table class="%stable">' % self.cssclass + |
|
229 | yield 0, ('<table class="%stable">' % self.cssclass + | |
231 | '<tr><td class="linenos"><div class="linenodiv">' |
|
230 | '<tr><td class="linenos"><div class="linenodiv">' | |
232 | '<pre>' + ls + '</pre></div></td>' |
|
231 | '<pre>' + ls + '</pre></div></td>' | |
233 | '<td id="hlcode" class="code">') |
|
232 | '<td id="hlcode" class="code">') | |
234 | yield 0, ''.join(inner_lines) |
|
233 | yield 0, ''.join(inner_lines) | |
235 | yield 0, '</td></tr></table>' |
|
234 | yield 0, '</td></tr></table>' | |
236 |
|
235 | |||
237 |
|
236 | |||
238 | _whitespace_re = re.compile(r'(\t)|( )(?=\n|</div>)') |
|
237 | _whitespace_re = re.compile(r'(\t)|( )(?=\n|</div>)') | |
239 |
|
238 | |||
240 |
|
239 | |||
241 | def _markup_whitespace(m): |
|
240 | def _markup_whitespace(m): | |
242 | groups = m.groups() |
|
241 | groups = m.groups() | |
243 | if groups[0]: |
|
242 | if groups[0]: | |
244 | return '<u>\t</u>' |
|
243 | return '<u>\t</u>' | |
245 | if groups[1]: |
|
244 | if groups[1]: | |
246 | return ' <i></i>' |
|
245 | return ' <i></i>' | |
247 |
|
246 | |||
248 |
|
247 | |||
249 | def markup_whitespace(s): |
|
248 | def markup_whitespace(s): | |
250 | return _whitespace_re.sub(_markup_whitespace, s) |
|
249 | return _whitespace_re.sub(_markup_whitespace, s) | |
251 |
|
250 | |||
252 |
|
251 | |||
253 | def pygmentize(filenode, **kwargs): |
|
252 | def pygmentize(filenode, **kwargs): | |
254 | """ |
|
253 | """ | |
255 | pygmentize function using pygments |
|
254 | pygmentize function using pygments | |
256 |
|
255 | |||
257 | :param filenode: |
|
256 | :param filenode: | |
258 | """ |
|
257 | """ | |
259 | lexer = get_custom_lexer(filenode.extension) or filenode.lexer |
|
258 | lexer = get_custom_lexer(filenode.extension) or filenode.lexer | |
260 | return literal(markup_whitespace( |
|
259 | return literal(markup_whitespace( | |
261 | code_highlight(safe_str(filenode.content), lexer, CodeHtmlFormatter(**kwargs)))) |
|
260 | code_highlight(safe_str(filenode.content), lexer, CodeHtmlFormatter(**kwargs)))) | |
262 |
|
261 | |||
263 |
|
262 | |||
264 | def hsv_to_rgb(h, s, v): |
|
263 | def hsv_to_rgb(h, s, v): | |
265 | if s == 0.0: |
|
264 | if s == 0.0: | |
266 | return v, v, v |
|
265 | return v, v, v | |
267 | i = int(h * 6.0) # XXX assume int() truncates! |
|
266 | i = int(h * 6.0) # XXX assume int() truncates! | |
268 | f = (h * 6.0) - i |
|
267 | f = (h * 6.0) - i | |
269 | p = v * (1.0 - s) |
|
268 | p = v * (1.0 - s) | |
270 | q = v * (1.0 - s * f) |
|
269 | q = v * (1.0 - s * f) | |
271 | t = v * (1.0 - s * (1.0 - f)) |
|
270 | t = v * (1.0 - s * (1.0 - f)) | |
272 | i = i % 6 |
|
271 | i = i % 6 | |
273 | if i == 0: |
|
272 | if i == 0: | |
274 | return v, t, p |
|
273 | return v, t, p | |
275 | if i == 1: |
|
274 | if i == 1: | |
276 | return q, v, p |
|
275 | return q, v, p | |
277 | if i == 2: |
|
276 | if i == 2: | |
278 | return p, v, t |
|
277 | return p, v, t | |
279 | if i == 3: |
|
278 | if i == 3: | |
280 | return p, q, v |
|
279 | return p, q, v | |
281 | if i == 4: |
|
280 | if i == 4: | |
282 | return t, p, v |
|
281 | return t, p, v | |
283 | if i == 5: |
|
282 | if i == 5: | |
284 | return v, p, q |
|
283 | return v, p, q | |
285 |
|
284 | |||
286 |
|
285 | |||
287 | def gen_color(n=10000): |
|
286 | def gen_color(n=10000): | |
288 | """generator for getting n of evenly distributed colors using |
|
287 | """generator for getting n of evenly distributed colors using | |
289 | hsv color and golden ratio. It always return same order of colors |
|
288 | hsv color and golden ratio. It always return same order of colors | |
290 |
|
289 | |||
291 | :returns: RGB tuple |
|
290 | :returns: RGB tuple | |
292 | """ |
|
291 | """ | |
293 |
|
292 | |||
294 | golden_ratio = 0.618033988749895 |
|
293 | golden_ratio = 0.618033988749895 | |
295 | h = 0.22717784590367374 |
|
294 | h = 0.22717784590367374 | |
296 |
|
295 | |||
297 | for _unused in range(n): |
|
296 | for _unused in range(n): | |
298 | h += golden_ratio |
|
297 | h += golden_ratio | |
299 | h %= 1 |
|
298 | h %= 1 | |
300 | HSV_tuple = [h, 0.95, 0.95] |
|
299 | HSV_tuple = [h, 0.95, 0.95] | |
301 | RGB_tuple = hsv_to_rgb(*HSV_tuple) |
|
300 | RGB_tuple = hsv_to_rgb(*HSV_tuple) | |
302 | yield [str(int(x * 256)) for x in RGB_tuple] |
|
301 | yield [str(int(x * 256)) for x in RGB_tuple] | |
303 |
|
302 | |||
304 |
|
303 | |||
305 | def pygmentize_annotation(repo_name, filenode, **kwargs): |
|
304 | def pygmentize_annotation(repo_name, filenode, **kwargs): | |
306 | """ |
|
305 | """ | |
307 | pygmentize function for annotation |
|
306 | pygmentize function for annotation | |
308 |
|
307 | |||
309 | :param filenode: |
|
308 | :param filenode: | |
310 | """ |
|
309 | """ | |
311 | cgenerator = gen_color() |
|
310 | cgenerator = gen_color() | |
312 | color_dict = {} |
|
311 | color_dict = {} | |
313 |
|
312 | |||
314 | def get_color_string(cs): |
|
313 | def get_color_string(cs): | |
315 | if cs in color_dict: |
|
314 | if cs in color_dict: | |
316 | col = color_dict[cs] |
|
315 | col = color_dict[cs] | |
317 | else: |
|
316 | else: | |
318 | col = color_dict[cs] = next(cgenerator) |
|
317 | col = color_dict[cs] = next(cgenerator) | |
319 | return "color: rgb(%s)! important;" % (', '.join(col)) |
|
318 | return "color: rgb(%s)! important;" % (', '.join(col)) | |
320 |
|
319 | |||
321 | def url_func(changeset): |
|
320 | def url_func(changeset): | |
322 | author = escape(changeset.author) |
|
321 | author = escape(changeset.author) | |
323 | date = changeset.date |
|
322 | date = changeset.date | |
324 | message = escape(changeset.message) |
|
323 | message = escape(changeset.message) | |
325 | tooltip_html = ("<b>Author:</b> %s<br/>" |
|
324 | tooltip_html = ("<b>Author:</b> %s<br/>" | |
326 | "<b>Date:</b> %s</b><br/>" |
|
325 | "<b>Date:</b> %s</b><br/>" | |
327 | "<b>Message:</b> %s") % (author, date, message) |
|
326 | "<b>Message:</b> %s") % (author, date, message) | |
328 |
|
327 | |||
329 | lnk_format = show_id(changeset) |
|
328 | lnk_format = show_id(changeset) | |
330 | uri = link_to( |
|
329 | uri = link_to( | |
331 | lnk_format, |
|
330 | lnk_format, | |
332 | url('changeset_home', repo_name=repo_name, |
|
331 | url('changeset_home', repo_name=repo_name, | |
333 | revision=changeset.raw_id), |
|
332 | revision=changeset.raw_id), | |
334 | style=get_color_string(changeset.raw_id), |
|
333 | style=get_color_string(changeset.raw_id), | |
335 | **{'data-toggle': 'popover', |
|
334 | **{'data-toggle': 'popover', | |
336 | 'data-content': tooltip_html} |
|
335 | 'data-content': tooltip_html} | |
337 | ) |
|
336 | ) | |
338 |
|
337 | |||
339 | uri += '\n' |
|
338 | uri += '\n' | |
340 | return uri |
|
339 | return uri | |
341 |
|
340 | |||
342 | return literal(markup_whitespace(annotate_highlight(filenode, url_func, **kwargs))) |
|
341 | return literal(markup_whitespace(annotate_highlight(filenode, url_func, **kwargs))) | |
343 |
|
342 | |||
344 |
|
343 | |||
345 | def capitalize(x): |
|
344 | def capitalize(x): | |
346 | return x.capitalize() |
|
345 | return x.capitalize() | |
347 |
|
346 | |||
348 | def short_id(x): |
|
347 | def short_id(x): | |
349 | return x[:12] |
|
348 | return x[:12] | |
350 |
|
349 | |||
351 |
|
350 | |||
352 | def show_id(cs): |
|
351 | def show_id(cs): | |
353 | """ |
|
352 | """ | |
354 | Configurable function that shows ID |
|
353 | Configurable function that shows ID | |
355 | by default it's r123:fffeeefffeee |
|
354 | by default it's r123:fffeeefffeee | |
356 |
|
355 | |||
357 | :param cs: changeset instance |
|
356 | :param cs: changeset instance | |
358 | """ |
|
357 | """ | |
359 | def_len = safe_int(kallithea.CONFIG.get('show_sha_length', 12)) |
|
358 | def_len = safe_int(kallithea.CONFIG.get('show_sha_length', 12)) | |
360 | show_rev = asbool(kallithea.CONFIG.get('show_revision_number', False)) |
|
359 | show_rev = asbool(kallithea.CONFIG.get('show_revision_number', False)) | |
361 |
|
360 | |||
362 | raw_id = cs.raw_id[:def_len] |
|
361 | raw_id = cs.raw_id[:def_len] | |
363 | if show_rev: |
|
362 | if show_rev: | |
364 | return 'r%s:%s' % (cs.revision, raw_id) |
|
363 | return 'r%s:%s' % (cs.revision, raw_id) | |
365 | else: |
|
364 | else: | |
366 | return raw_id |
|
365 | return raw_id | |
367 |
|
366 | |||
368 |
|
367 | |||
369 | @cache_region('long_term', 'user_attr_or_none') |
|
368 | @cache_region('long_term', 'user_attr_or_none') | |
370 | def user_attr_or_none(author, show_attr): |
|
369 | def user_attr_or_none(author, show_attr): | |
371 | """Try to match email part of VCS committer string with a local user and return show_attr |
|
370 | """Try to match email part of VCS committer string with a local user and return show_attr | |
372 | - or return None if user not found""" |
|
371 | - or return None if user not found""" | |
373 | email = author_email(author) |
|
372 | email = author_email(author) | |
374 | if email: |
|
373 | if email: | |
375 | user = db.User.get_by_email(email) |
|
374 | user = db.User.get_by_email(email) | |
376 | if user is not None: |
|
375 | if user is not None: | |
377 | return getattr(user, show_attr) |
|
376 | return getattr(user, show_attr) | |
378 | return None |
|
377 | return None | |
379 |
|
378 | |||
380 |
|
379 | |||
381 | def email_or_none(author): |
|
380 | def email_or_none(author): | |
382 | """Try to match email part of VCS committer string with a local user. |
|
381 | """Try to match email part of VCS committer string with a local user. | |
383 | Return primary email of user, email part of the specified author name, or None.""" |
|
382 | Return primary email of user, email part of the specified author name, or None.""" | |
384 | if not author: |
|
383 | if not author: | |
385 | return None |
|
384 | return None | |
386 | email = user_attr_or_none(author, 'email') |
|
385 | email = user_attr_or_none(author, 'email') | |
387 | if email is not None: |
|
386 | if email is not None: | |
388 | return email # always use user's main email address - not necessarily the one used to find user |
|
387 | return email # always use user's main email address - not necessarily the one used to find user | |
389 |
|
388 | |||
390 | # extract email from the commit string |
|
389 | # extract email from the commit string | |
391 | email = author_email(author) |
|
390 | email = author_email(author) | |
392 | if email: |
|
391 | if email: | |
393 | return email |
|
392 | return email | |
394 |
|
393 | |||
395 | # No valid email, not a valid user in the system, none! |
|
394 | # No valid email, not a valid user in the system, none! | |
396 | return None |
|
395 | return None | |
397 |
|
396 | |||
398 |
|
397 | |||
399 | def person(author, show_attr="username"): |
|
398 | def person(author, show_attr="username"): | |
400 | """Find the user identified by 'author' string, return one of the users attributes, |
|
399 | """Find the user identified by 'author' string, return one of the users attributes, | |
401 | default to the username attribute, None if there is no user""" |
|
400 | default to the username attribute, None if there is no user""" | |
402 | value = user_attr_or_none(author, show_attr) |
|
401 | value = user_attr_or_none(author, show_attr) | |
403 | if value is not None: |
|
402 | if value is not None: | |
404 | return value |
|
403 | return value | |
405 |
|
404 | |||
406 | # Still nothing? Just pass back the author name if any, else the email |
|
405 | # Still nothing? Just pass back the author name if any, else the email | |
407 | return author_name(author) or author_email(author) |
|
406 | return author_name(author) or author_email(author) | |
408 |
|
407 | |||
409 |
|
408 | |||
410 | def person_by_id(id_, show_attr="username"): |
|
409 | def person_by_id(id_, show_attr="username"): | |
411 | # maybe it's an ID ? |
|
410 | # maybe it's an ID ? | |
412 | if str(id_).isdigit() or isinstance(id_, int): |
|
411 | if str(id_).isdigit() or isinstance(id_, int): | |
413 | id_ = int(id_) |
|
412 | id_ = int(id_) | |
414 | user = db.User.get(id_) |
|
413 | user = db.User.get(id_) | |
415 | if user is not None: |
|
414 | if user is not None: | |
416 | return getattr(user, show_attr) |
|
415 | return getattr(user, show_attr) | |
417 | return id_ |
|
416 | return id_ | |
418 |
|
417 | |||
419 |
|
418 | |||
420 | def boolicon(value): |
|
419 | def boolicon(value): | |
421 | """Returns boolean value of a value, represented as small html image of true/false |
|
420 | """Returns boolean value of a value, represented as small html image of true/false | |
422 | icons |
|
421 | icons | |
423 |
|
422 | |||
424 | :param value: value |
|
423 | :param value: value | |
425 | """ |
|
424 | """ | |
426 |
|
425 | |||
427 | if value: |
|
426 | if value: | |
428 | return HTML.tag('i', class_="icon-ok") |
|
427 | return HTML.tag('i', class_="icon-ok") | |
429 | else: |
|
428 | else: | |
430 | return HTML.tag('i', class_="icon-minus-circled") |
|
429 | return HTML.tag('i', class_="icon-minus-circled") | |
431 |
|
430 | |||
432 |
|
431 | |||
433 | def action_parser(user_log, feed=False, parse_cs=False): |
|
432 | def action_parser(user_log, feed=False, parse_cs=False): | |
434 | """ |
|
433 | """ | |
435 | This helper will action_map the specified string action into translated |
|
434 | This helper will action_map the specified string action into translated | |
436 | fancy names with icons and links |
|
435 | fancy names with icons and links | |
437 |
|
436 | |||
438 | :param user_log: user log instance |
|
437 | :param user_log: user log instance | |
439 | :param feed: use output for feeds (no html and fancy icons) |
|
438 | :param feed: use output for feeds (no html and fancy icons) | |
440 | :param parse_cs: parse Changesets into VCS instances |
|
439 | :param parse_cs: parse Changesets into VCS instances | |
441 | """ |
|
440 | """ | |
442 |
|
441 | |||
443 | action = user_log.action |
|
442 | action = user_log.action | |
444 | action_params = ' ' |
|
443 | action_params = ' ' | |
445 |
|
444 | |||
446 | x = action.split(':') |
|
445 | x = action.split(':') | |
447 |
|
446 | |||
448 | if len(x) > 1: |
|
447 | if len(x) > 1: | |
449 | action, action_params = x |
|
448 | action, action_params = x | |
450 |
|
449 | |||
451 | def get_cs_links(): |
|
450 | def get_cs_links(): | |
452 | revs_limit = 3 # display this amount always |
|
451 | revs_limit = 3 # display this amount always | |
453 | revs_top_limit = 50 # show upto this amount of changesets hidden |
|
452 | revs_top_limit = 50 # show upto this amount of changesets hidden | |
454 | revs_ids = action_params.split(',') |
|
453 | revs_ids = action_params.split(',') | |
455 | deleted = user_log.repository is None |
|
454 | deleted = user_log.repository is None | |
456 | if deleted: |
|
455 | if deleted: | |
457 | return ','.join(revs_ids) |
|
456 | return ','.join(revs_ids) | |
458 |
|
457 | |||
459 | repo_name = user_log.repository.repo_name |
|
458 | repo_name = user_log.repository.repo_name | |
460 |
|
459 | |||
461 | def lnk(rev, repo_name): |
|
460 | def lnk(rev, repo_name): | |
462 | lazy_cs = False |
|
461 | lazy_cs = False | |
463 | title_ = None |
|
462 | title_ = None | |
464 | url_ = '#' |
|
463 | url_ = '#' | |
465 | if isinstance(rev, BaseChangeset) or isinstance(rev, AttributeDict): |
|
464 | if isinstance(rev, BaseChangeset) or isinstance(rev, AttributeDict): | |
466 | if rev.op and rev.ref_name: |
|
465 | if rev.op and rev.ref_name: | |
467 | if rev.op == 'delete_branch': |
|
466 | if rev.op == 'delete_branch': | |
468 | lbl = _('Deleted branch: %s') % rev.ref_name |
|
467 | lbl = _('Deleted branch: %s') % rev.ref_name | |
469 | elif rev.op == 'tag': |
|
468 | elif rev.op == 'tag': | |
470 | lbl = _('Created tag: %s') % rev.ref_name |
|
469 | lbl = _('Created tag: %s') % rev.ref_name | |
471 | else: |
|
470 | else: | |
472 | lbl = 'Unknown operation %s' % rev.op |
|
471 | lbl = 'Unknown operation %s' % rev.op | |
473 | else: |
|
472 | else: | |
474 | lazy_cs = True |
|
473 | lazy_cs = True | |
475 | lbl = rev.short_id[:8] |
|
474 | lbl = rev.short_id[:8] | |
476 | url_ = url('changeset_home', repo_name=repo_name, |
|
475 | url_ = url('changeset_home', repo_name=repo_name, | |
477 | revision=rev.raw_id) |
|
476 | revision=rev.raw_id) | |
478 | else: |
|
477 | else: | |
479 | # changeset cannot be found - it might have been stripped or removed |
|
478 | # changeset cannot be found - it might have been stripped or removed | |
480 | lbl = rev[:12] |
|
479 | lbl = rev[:12] | |
481 | title_ = _('Changeset %s not found') % lbl |
|
480 | title_ = _('Changeset %s not found') % lbl | |
482 | if parse_cs: |
|
481 | if parse_cs: | |
483 | return link_to(lbl, url_, title=title_, **{'data-toggle': 'tooltip'}) |
|
482 | return link_to(lbl, url_, title=title_, **{'data-toggle': 'tooltip'}) | |
484 | return link_to(lbl, url_, class_='lazy-cs' if lazy_cs else '', |
|
483 | return link_to(lbl, url_, class_='lazy-cs' if lazy_cs else '', | |
485 | **{'data-raw_id': rev.raw_id, 'data-repo_name': repo_name}) |
|
484 | **{'data-raw_id': rev.raw_id, 'data-repo_name': repo_name}) | |
486 |
|
485 | |||
487 | def _get_op(rev_txt): |
|
486 | def _get_op(rev_txt): | |
488 | _op = None |
|
487 | _op = None | |
489 | _name = rev_txt |
|
488 | _name = rev_txt | |
490 | if len(rev_txt.split('=>')) == 2: |
|
489 | if len(rev_txt.split('=>')) == 2: | |
491 | _op, _name = rev_txt.split('=>') |
|
490 | _op, _name = rev_txt.split('=>') | |
492 | return _op, _name |
|
491 | return _op, _name | |
493 |
|
492 | |||
494 | revs = [] |
|
493 | revs = [] | |
495 | if len([v for v in revs_ids if v != '']) > 0: |
|
494 | if len([v for v in revs_ids if v != '']) > 0: | |
496 | repo = None |
|
495 | repo = None | |
497 | for rev in revs_ids[:revs_top_limit]: |
|
496 | for rev in revs_ids[:revs_top_limit]: | |
498 | _op, _name = _get_op(rev) |
|
497 | _op, _name = _get_op(rev) | |
499 |
|
498 | |||
500 | # we want parsed changesets, or new log store format is bad |
|
499 | # we want parsed changesets, or new log store format is bad | |
501 | if parse_cs: |
|
500 | if parse_cs: | |
502 | try: |
|
501 | try: | |
503 | if repo is None: |
|
502 | if repo is None: | |
504 | repo = user_log.repository.scm_instance |
|
503 | repo = user_log.repository.scm_instance | |
505 | _rev = repo.get_changeset(rev) |
|
504 | _rev = repo.get_changeset(rev) | |
506 | revs.append(_rev) |
|
505 | revs.append(_rev) | |
507 | except ChangesetDoesNotExistError: |
|
506 | except ChangesetDoesNotExistError: | |
508 | log.error('cannot find revision %s in this repo', rev) |
|
507 | log.error('cannot find revision %s in this repo', rev) | |
509 | revs.append(rev) |
|
508 | revs.append(rev) | |
510 | else: |
|
509 | else: | |
511 | _rev = AttributeDict({ |
|
510 | _rev = AttributeDict({ | |
512 | 'short_id': rev[:12], |
|
511 | 'short_id': rev[:12], | |
513 | 'raw_id': rev, |
|
512 | 'raw_id': rev, | |
514 | 'message': '', |
|
513 | 'message': '', | |
515 | 'op': _op, |
|
514 | 'op': _op, | |
516 | 'ref_name': _name |
|
515 | 'ref_name': _name | |
517 | }) |
|
516 | }) | |
518 | revs.append(_rev) |
|
517 | revs.append(_rev) | |
519 | cs_links = [" " + ', '.join( |
|
518 | cs_links = [" " + ', '.join( | |
520 | [lnk(rev, repo_name) for rev in revs[:revs_limit]] |
|
519 | [lnk(rev, repo_name) for rev in revs[:revs_limit]] | |
521 | )] |
|
520 | )] | |
522 | _op1, _name1 = _get_op(revs_ids[0]) |
|
521 | _op1, _name1 = _get_op(revs_ids[0]) | |
523 | _op2, _name2 = _get_op(revs_ids[-1]) |
|
522 | _op2, _name2 = _get_op(revs_ids[-1]) | |
524 |
|
523 | |||
525 | _rev = '%s...%s' % (_name1, _name2) |
|
524 | _rev = '%s...%s' % (_name1, _name2) | |
526 |
|
525 | |||
527 | compare_view = ( |
|
526 | compare_view = ( | |
528 | ' <div class="compare_view" data-toggle="tooltip" title="%s">' |
|
527 | ' <div class="compare_view" data-toggle="tooltip" title="%s">' | |
529 | '<a href="%s">%s</a> </div>' % ( |
|
528 | '<a href="%s">%s</a> </div>' % ( | |
530 | _('Show all combined changesets %s->%s') % ( |
|
529 | _('Show all combined changesets %s->%s') % ( | |
531 | revs_ids[0][:12], revs_ids[-1][:12] |
|
530 | revs_ids[0][:12], revs_ids[-1][:12] | |
532 | ), |
|
531 | ), | |
533 | url('changeset_home', repo_name=repo_name, |
|
532 | url('changeset_home', repo_name=repo_name, | |
534 | revision=_rev |
|
533 | revision=_rev | |
535 | ), |
|
534 | ), | |
536 | _('Compare view') |
|
535 | _('Compare view') | |
537 | ) |
|
536 | ) | |
538 | ) |
|
537 | ) | |
539 |
|
538 | |||
540 | # if we have exactly one more than normally displayed |
|
539 | # if we have exactly one more than normally displayed | |
541 | # just display it, takes less space than displaying |
|
540 | # just display it, takes less space than displaying | |
542 | # "and 1 more revisions" |
|
541 | # "and 1 more revisions" | |
543 | if len(revs_ids) == revs_limit + 1: |
|
542 | if len(revs_ids) == revs_limit + 1: | |
544 | cs_links.append(", " + lnk(revs[revs_limit], repo_name)) |
|
543 | cs_links.append(", " + lnk(revs[revs_limit], repo_name)) | |
545 |
|
544 | |||
546 | # hidden-by-default ones |
|
545 | # hidden-by-default ones | |
547 | if len(revs_ids) > revs_limit + 1: |
|
546 | if len(revs_ids) > revs_limit + 1: | |
548 | uniq_id = revs_ids[0] |
|
547 | uniq_id = revs_ids[0] | |
549 | html_tmpl = ( |
|
548 | html_tmpl = ( | |
550 | '<span> %s <a class="show_more" id="_%s" ' |
|
549 | '<span> %s <a class="show_more" id="_%s" ' | |
551 | 'href="#more">%s</a> %s</span>' |
|
550 | 'href="#more">%s</a> %s</span>' | |
552 | ) |
|
551 | ) | |
553 | if not feed: |
|
552 | if not feed: | |
554 | cs_links.append(html_tmpl % ( |
|
553 | cs_links.append(html_tmpl % ( | |
555 | _('and'), |
|
554 | _('and'), | |
556 | uniq_id, _('%s more') % (len(revs_ids) - revs_limit), |
|
555 | uniq_id, _('%s more') % (len(revs_ids) - revs_limit), | |
557 | _('revisions') |
|
556 | _('revisions') | |
558 | ) |
|
557 | ) | |
559 | ) |
|
558 | ) | |
560 |
|
559 | |||
561 | if not feed: |
|
560 | if not feed: | |
562 | html_tmpl = '<span id="%s" style="display:none">, %s </span>' |
|
561 | html_tmpl = '<span id="%s" style="display:none">, %s </span>' | |
563 | else: |
|
562 | else: | |
564 | html_tmpl = '<span id="%s"> %s </span>' |
|
563 | html_tmpl = '<span id="%s"> %s </span>' | |
565 |
|
564 | |||
566 | morelinks = ', '.join( |
|
565 | morelinks = ', '.join( | |
567 | [lnk(rev, repo_name) for rev in revs[revs_limit:]] |
|
566 | [lnk(rev, repo_name) for rev in revs[revs_limit:]] | |
568 | ) |
|
567 | ) | |
569 |
|
568 | |||
570 | if len(revs_ids) > revs_top_limit: |
|
569 | if len(revs_ids) > revs_top_limit: | |
571 | morelinks += ', ...' |
|
570 | morelinks += ', ...' | |
572 |
|
571 | |||
573 | cs_links.append(html_tmpl % (uniq_id, morelinks)) |
|
572 | cs_links.append(html_tmpl % (uniq_id, morelinks)) | |
574 | if len(revs) > 1: |
|
573 | if len(revs) > 1: | |
575 | cs_links.append(compare_view) |
|
574 | cs_links.append(compare_view) | |
576 | return ''.join(cs_links) |
|
575 | return ''.join(cs_links) | |
577 |
|
576 | |||
578 | def get_fork_name(): |
|
577 | def get_fork_name(): | |
579 | repo_name = action_params |
|
578 | repo_name = action_params | |
580 | url_ = url('summary_home', repo_name=repo_name) |
|
579 | url_ = url('summary_home', repo_name=repo_name) | |
581 | return _('Fork name %s') % link_to(action_params, url_) |
|
580 | return _('Fork name %s') % link_to(action_params, url_) | |
582 |
|
581 | |||
583 | def get_user_name(): |
|
582 | def get_user_name(): | |
584 | user_name = action_params |
|
583 | user_name = action_params | |
585 | return user_name |
|
584 | return user_name | |
586 |
|
585 | |||
587 | def get_users_group(): |
|
586 | def get_users_group(): | |
588 | group_name = action_params |
|
587 | group_name = action_params | |
589 | return group_name |
|
588 | return group_name | |
590 |
|
589 | |||
591 | def get_pull_request(): |
|
590 | def get_pull_request(): | |
592 | pull_request_id = action_params |
|
591 | pull_request_id = action_params | |
593 | nice_id = db.PullRequest.make_nice_id(pull_request_id) |
|
592 | nice_id = db.PullRequest.make_nice_id(pull_request_id) | |
594 |
|
593 | |||
595 | deleted = user_log.repository is None |
|
594 | deleted = user_log.repository is None | |
596 | if deleted: |
|
595 | if deleted: | |
597 | repo_name = user_log.repository_name |
|
596 | repo_name = user_log.repository_name | |
598 | else: |
|
597 | else: | |
599 | repo_name = user_log.repository.repo_name |
|
598 | repo_name = user_log.repository.repo_name | |
600 |
|
599 | |||
601 | return link_to(_('Pull request %s') % nice_id, |
|
600 | return link_to(_('Pull request %s') % nice_id, | |
602 | url('pullrequest_show', repo_name=repo_name, |
|
601 | url('pullrequest_show', repo_name=repo_name, | |
603 | pull_request_id=pull_request_id)) |
|
602 | pull_request_id=pull_request_id)) | |
604 |
|
603 | |||
605 | def get_archive_name(): |
|
604 | def get_archive_name(): | |
606 | archive_name = action_params |
|
605 | archive_name = action_params | |
607 | return archive_name |
|
606 | return archive_name | |
608 |
|
607 | |||
609 | # action : translated str, callback(extractor), icon |
|
608 | # action : translated str, callback(extractor), icon | |
610 | action_map = { |
|
609 | action_map = { | |
611 | 'user_deleted_repo': (_('[deleted] repository'), |
|
610 | 'user_deleted_repo': (_('[deleted] repository'), | |
612 | None, 'icon-trashcan'), |
|
611 | None, 'icon-trashcan'), | |
613 | 'user_created_repo': (_('[created] repository'), |
|
612 | 'user_created_repo': (_('[created] repository'), | |
614 | None, 'icon-plus'), |
|
613 | None, 'icon-plus'), | |
615 | 'user_created_fork': (_('[created] repository as fork'), |
|
614 | 'user_created_fork': (_('[created] repository as fork'), | |
616 | None, 'icon-fork'), |
|
615 | None, 'icon-fork'), | |
617 | 'user_forked_repo': (_('[forked] repository'), |
|
616 | 'user_forked_repo': (_('[forked] repository'), | |
618 | get_fork_name, 'icon-fork'), |
|
617 | get_fork_name, 'icon-fork'), | |
619 | 'user_updated_repo': (_('[updated] repository'), |
|
618 | 'user_updated_repo': (_('[updated] repository'), | |
620 | None, 'icon-pencil'), |
|
619 | None, 'icon-pencil'), | |
621 | 'user_downloaded_archive': (_('[downloaded] archive from repository'), |
|
620 | 'user_downloaded_archive': (_('[downloaded] archive from repository'), | |
622 | get_archive_name, 'icon-download-cloud'), |
|
621 | get_archive_name, 'icon-download-cloud'), | |
623 | 'admin_deleted_repo': (_('[delete] repository'), |
|
622 | 'admin_deleted_repo': (_('[delete] repository'), | |
624 | None, 'icon-trashcan'), |
|
623 | None, 'icon-trashcan'), | |
625 | 'admin_created_repo': (_('[created] repository'), |
|
624 | 'admin_created_repo': (_('[created] repository'), | |
626 | None, 'icon-plus'), |
|
625 | None, 'icon-plus'), | |
627 | 'admin_forked_repo': (_('[forked] repository'), |
|
626 | 'admin_forked_repo': (_('[forked] repository'), | |
628 | None, 'icon-fork'), |
|
627 | None, 'icon-fork'), | |
629 | 'admin_updated_repo': (_('[updated] repository'), |
|
628 | 'admin_updated_repo': (_('[updated] repository'), | |
630 | None, 'icon-pencil'), |
|
629 | None, 'icon-pencil'), | |
631 | 'admin_created_user': (_('[created] user'), |
|
630 | 'admin_created_user': (_('[created] user'), | |
632 | get_user_name, 'icon-user'), |
|
631 | get_user_name, 'icon-user'), | |
633 | 'admin_updated_user': (_('[updated] user'), |
|
632 | 'admin_updated_user': (_('[updated] user'), | |
634 | get_user_name, 'icon-user'), |
|
633 | get_user_name, 'icon-user'), | |
635 | 'admin_created_users_group': (_('[created] user group'), |
|
634 | 'admin_created_users_group': (_('[created] user group'), | |
636 | get_users_group, 'icon-pencil'), |
|
635 | get_users_group, 'icon-pencil'), | |
637 | 'admin_updated_users_group': (_('[updated] user group'), |
|
636 | 'admin_updated_users_group': (_('[updated] user group'), | |
638 | get_users_group, 'icon-pencil'), |
|
637 | get_users_group, 'icon-pencil'), | |
639 | 'user_commented_revision': (_('[commented] on revision in repository'), |
|
638 | 'user_commented_revision': (_('[commented] on revision in repository'), | |
640 | get_cs_links, 'icon-comment'), |
|
639 | get_cs_links, 'icon-comment'), | |
641 | 'user_commented_pull_request': (_('[commented] on pull request for'), |
|
640 | 'user_commented_pull_request': (_('[commented] on pull request for'), | |
642 | get_pull_request, 'icon-comment'), |
|
641 | get_pull_request, 'icon-comment'), | |
643 | 'user_closed_pull_request': (_('[closed] pull request for'), |
|
642 | 'user_closed_pull_request': (_('[closed] pull request for'), | |
644 | get_pull_request, 'icon-ok'), |
|
643 | get_pull_request, 'icon-ok'), | |
645 | 'push': (_('[pushed] into'), |
|
644 | 'push': (_('[pushed] into'), | |
646 | get_cs_links, 'icon-move-up'), |
|
645 | get_cs_links, 'icon-move-up'), | |
647 | 'push_local': (_('[committed via Kallithea] into repository'), |
|
646 | 'push_local': (_('[committed via Kallithea] into repository'), | |
648 | get_cs_links, 'icon-pencil'), |
|
647 | get_cs_links, 'icon-pencil'), | |
649 | 'push_remote': (_('[pulled from remote] into repository'), |
|
648 | 'push_remote': (_('[pulled from remote] into repository'), | |
650 | get_cs_links, 'icon-move-up'), |
|
649 | get_cs_links, 'icon-move-up'), | |
651 | 'pull': (_('[pulled] from'), |
|
650 | 'pull': (_('[pulled] from'), | |
652 | None, 'icon-move-down'), |
|
651 | None, 'icon-move-down'), | |
653 | 'started_following_repo': (_('[started following] repository'), |
|
652 | 'started_following_repo': (_('[started following] repository'), | |
654 | None, 'icon-heart'), |
|
653 | None, 'icon-heart'), | |
655 | 'stopped_following_repo': (_('[stopped following] repository'), |
|
654 | 'stopped_following_repo': (_('[stopped following] repository'), | |
656 | None, 'icon-heart-empty'), |
|
655 | None, 'icon-heart-empty'), | |
657 | } |
|
656 | } | |
658 |
|
657 | |||
659 | action_str = action_map.get(action, action) |
|
658 | action_str = action_map.get(action, action) | |
660 | if feed: |
|
659 | if feed: | |
661 | action = action_str[0].replace('[', '').replace(']', '') |
|
660 | action = action_str[0].replace('[', '').replace(']', '') | |
662 | else: |
|
661 | else: | |
663 | action = action_str[0] \ |
|
662 | action = action_str[0] \ | |
664 | .replace('[', '<b>') \ |
|
663 | .replace('[', '<b>') \ | |
665 | .replace(']', '</b>') |
|
664 | .replace(']', '</b>') | |
666 |
|
665 | |||
667 | action_params_func = action_str[1] if callable(action_str[1]) else (lambda: "") |
|
666 | action_params_func = action_str[1] if callable(action_str[1]) else (lambda: "") | |
668 |
|
667 | |||
669 | def action_parser_icon(): |
|
668 | def action_parser_icon(): | |
670 | action = user_log.action |
|
669 | action = user_log.action | |
671 | action_params = None |
|
670 | action_params = None | |
672 | x = action.split(':') |
|
671 | x = action.split(':') | |
673 |
|
672 | |||
674 | if len(x) > 1: |
|
673 | if len(x) > 1: | |
675 | action, action_params = x |
|
674 | action, action_params = x | |
676 |
|
675 | |||
677 | ico = action_map.get(action, ['', '', ''])[2] |
|
676 | ico = action_map.get(action, ['', '', ''])[2] | |
678 | html = """<i class="%s"></i>""" % ico |
|
677 | html = """<i class="%s"></i>""" % ico | |
679 | return literal(html) |
|
678 | return literal(html) | |
680 |
|
679 | |||
681 | # returned callbacks we need to call to get |
|
680 | # returned callbacks we need to call to get | |
682 | return [lambda: literal(action), action_params_func, action_parser_icon] |
|
681 | return [lambda: literal(action), action_params_func, action_parser_icon] | |
683 |
|
682 | |||
684 |
|
683 | |||
685 | #============================================================================== |
|
684 | #============================================================================== | |
686 | # GRAVATAR URL |
|
685 | # GRAVATAR URL | |
687 | #============================================================================== |
|
686 | #============================================================================== | |
688 | def gravatar_div(email_address, cls='', size=30, **div_attributes): |
|
687 | def gravatar_div(email_address, cls='', size=30, **div_attributes): | |
689 | """Return an html literal with a span around a gravatar if they are enabled. |
|
688 | """Return an html literal with a span around a gravatar if they are enabled. | |
690 | Extra keyword parameters starting with 'div_' will get the prefix removed |
|
689 | Extra keyword parameters starting with 'div_' will get the prefix removed | |
691 | and '_' changed to '-' and be used as attributes on the div. The default |
|
690 | and '_' changed to '-' and be used as attributes on the div. The default | |
692 | class is 'gravatar'. |
|
691 | class is 'gravatar'. | |
693 | """ |
|
692 | """ | |
694 | if not c.visual.use_gravatar: |
|
693 | if not c.visual.use_gravatar: | |
695 | return '' |
|
694 | return '' | |
696 | if 'div_class' not in div_attributes: |
|
695 | if 'div_class' not in div_attributes: | |
697 | div_attributes['div_class'] = "gravatar" |
|
696 | div_attributes['div_class'] = "gravatar" | |
698 | attributes = [] |
|
697 | attributes = [] | |
699 | for k, v in sorted(div_attributes.items()): |
|
698 | for k, v in sorted(div_attributes.items()): | |
700 | assert k.startswith('div_'), k |
|
699 | assert k.startswith('div_'), k | |
701 | attributes.append(' %s="%s"' % (k[4:].replace('_', '-'), escape(v))) |
|
700 | attributes.append(' %s="%s"' % (k[4:].replace('_', '-'), escape(v))) | |
702 | return literal("""<span%s>%s</span>""" % |
|
701 | return literal("""<span%s>%s</span>""" % | |
703 | (''.join(attributes), |
|
702 | (''.join(attributes), | |
704 | gravatar(email_address, cls=cls, size=size))) |
|
703 | gravatar(email_address, cls=cls, size=size))) | |
705 |
|
704 | |||
706 |
|
705 | |||
707 | def gravatar(email_address, cls='', size=30): |
|
706 | def gravatar(email_address, cls='', size=30): | |
708 | """return html element of the gravatar |
|
707 | """return html element of the gravatar | |
709 |
|
708 | |||
710 | This method will return an <img> with the resolution double the size (for |
|
709 | This method will return an <img> with the resolution double the size (for | |
711 | retina screens) of the image. If the url returned from gravatar_url is |
|
710 | retina screens) of the image. If the url returned from gravatar_url is | |
712 | empty then we fallback to using an icon. |
|
711 | empty then we fallback to using an icon. | |
713 |
|
712 | |||
714 | """ |
|
713 | """ | |
715 | if not c.visual.use_gravatar: |
|
714 | if not c.visual.use_gravatar: | |
716 | return '' |
|
715 | return '' | |
717 |
|
716 | |||
718 | src = gravatar_url(email_address, size * 2) |
|
717 | src = gravatar_url(email_address, size * 2) | |
719 |
|
718 | |||
720 | if src: |
|
719 | if src: | |
721 | # here it makes sense to use style="width: ..." (instead of, say, a |
|
720 | # here it makes sense to use style="width: ..." (instead of, say, a | |
722 | # stylesheet) because we using this to generate a high-res (retina) size |
|
721 | # stylesheet) because we using this to generate a high-res (retina) size | |
723 | html = ('<i class="icon-gravatar {cls}"' |
|
722 | html = ('<i class="icon-gravatar {cls}"' | |
724 | ' style="font-size: {size}px;background-size: {size}px;background-image: url(\'{src}\')"' |
|
723 | ' style="font-size: {size}px;background-size: {size}px;background-image: url(\'{src}\')"' | |
725 | '></i>').format(cls=cls, size=size, src=src) |
|
724 | '></i>').format(cls=cls, size=size, src=src) | |
726 |
|
725 | |||
727 | else: |
|
726 | else: | |
728 | # if src is empty then there was no gravatar, so we use a font icon |
|
727 | # if src is empty then there was no gravatar, so we use a font icon | |
729 | html = ("""<i class="icon-user {cls}" style="font-size: {size}px;"></i>""" |
|
728 | html = ("""<i class="icon-user {cls}" style="font-size: {size}px;"></i>""" | |
730 | .format(cls=cls, size=size)) |
|
729 | .format(cls=cls, size=size)) | |
731 |
|
730 | |||
732 | return literal(html) |
|
731 | return literal(html) | |
733 |
|
732 | |||
734 |
|
733 | |||
735 | def gravatar_url(email_address, size=30, default=''): |
|
734 | def gravatar_url(email_address, size=30, default=''): | |
736 | if not c.visual.use_gravatar: |
|
735 | if not c.visual.use_gravatar: | |
737 | return "" |
|
736 | return "" | |
738 |
|
737 | |||
739 | _def = 'anonymous@kallithea-scm.org' # default gravatar |
|
738 | _def = 'anonymous@kallithea-scm.org' # default gravatar | |
740 | email_address = email_address or _def |
|
739 | email_address = email_address or _def | |
741 |
|
740 | |||
742 | if email_address == _def: |
|
741 | if email_address == _def: | |
743 | return default |
|
742 | return default | |
744 |
|
743 | |||
745 | parsed_url = urllib.parse.urlparse(url.current(qualified=True)) |
|
744 | parsed_url = urllib.parse.urlparse(url.current(qualified=True)) | |
746 | return (c.visual.gravatar_url or db.User.DEFAULT_GRAVATAR_URL) \ |
|
745 | return (c.visual.gravatar_url or db.User.DEFAULT_GRAVATAR_URL) \ | |
747 | .replace('{email}', email_address) \ |
|
746 | .replace('{email}', email_address) \ | |
748 | .replace('{md5email}', hashlib.md5(safe_bytes(email_address).lower()).hexdigest()) \ |
|
747 | .replace('{md5email}', hashlib.md5(safe_bytes(email_address).lower()).hexdigest()) \ | |
749 | .replace('{netloc}', parsed_url.netloc) \ |
|
748 | .replace('{netloc}', parsed_url.netloc) \ | |
750 | .replace('{scheme}', parsed_url.scheme) \ |
|
749 | .replace('{scheme}', parsed_url.scheme) \ | |
751 | .replace('{size}', str(size)) |
|
750 | .replace('{size}', str(size)) | |
752 |
|
751 | |||
753 |
|
752 | |||
754 | def changed_tooltip(nodes): |
|
753 | def changed_tooltip(nodes): | |
755 | """ |
|
754 | """ | |
756 | Generates a html string for changed nodes in changeset page. |
|
755 | Generates a html string for changed nodes in changeset page. | |
757 | It limits the output to 30 entries |
|
756 | It limits the output to 30 entries | |
758 |
|
757 | |||
759 | :param nodes: LazyNodesGenerator |
|
758 | :param nodes: LazyNodesGenerator | |
760 | """ |
|
759 | """ | |
761 | if nodes: |
|
760 | if nodes: | |
762 | pref = ': <br/> ' |
|
761 | pref = ': <br/> ' | |
763 | suf = '' |
|
762 | suf = '' | |
764 | if len(nodes) > 30: |
|
763 | if len(nodes) > 30: | |
765 | suf = '<br/>' + _(' and %s more') % (len(nodes) - 30) |
|
764 | suf = '<br/>' + _(' and %s more') % (len(nodes) - 30) | |
766 | return literal(pref + '<br/> '.join([x.path |
|
765 | return literal(pref + '<br/> '.join([x.path | |
767 | for x in nodes[:30]]) + suf) |
|
766 | for x in nodes[:30]]) + suf) | |
768 | else: |
|
767 | else: | |
769 | return ': ' + _('No files') |
|
768 | return ': ' + _('No files') | |
770 |
|
769 | |||
771 |
|
770 | |||
772 | def fancy_file_stats(stats): |
|
771 | def fancy_file_stats(stats): | |
773 | """ |
|
772 | """ | |
774 | Displays a fancy two colored bar for number of added/deleted |
|
773 | Displays a fancy two colored bar for number of added/deleted | |
775 | lines of code on file |
|
774 | lines of code on file | |
776 |
|
775 | |||
777 | :param stats: two element list of added/deleted lines of code |
|
776 | :param stats: two element list of added/deleted lines of code | |
778 | """ |
|
777 | """ | |
779 |
|
778 | |||
780 | a, d = stats['added'], stats['deleted'] |
|
779 | a, d = stats['added'], stats['deleted'] | |
781 | width = 100 |
|
780 | width = 100 | |
782 |
|
781 | |||
783 | if stats['binary']: |
|
782 | if stats['binary']: | |
784 | # binary mode |
|
783 | # binary mode | |
785 | lbl = '' |
|
784 | lbl = '' | |
786 | bin_op = 1 |
|
785 | bin_op = 1 | |
787 |
|
786 | |||
788 | if BIN_FILENODE in stats['ops']: |
|
787 | if BIN_FILENODE in stats['ops']: | |
789 | lbl = 'bin+' |
|
788 | lbl = 'bin+' | |
790 |
|
789 | |||
791 | if NEW_FILENODE in stats['ops']: |
|
790 | if NEW_FILENODE in stats['ops']: | |
792 | lbl += _('new file') |
|
791 | lbl += _('new file') | |
793 | bin_op = NEW_FILENODE |
|
792 | bin_op = NEW_FILENODE | |
794 | elif MOD_FILENODE in stats['ops']: |
|
793 | elif MOD_FILENODE in stats['ops']: | |
795 | lbl += _('mod') |
|
794 | lbl += _('mod') | |
796 | bin_op = MOD_FILENODE |
|
795 | bin_op = MOD_FILENODE | |
797 | elif DEL_FILENODE in stats['ops']: |
|
796 | elif DEL_FILENODE in stats['ops']: | |
798 | lbl += _('del') |
|
797 | lbl += _('del') | |
799 | bin_op = DEL_FILENODE |
|
798 | bin_op = DEL_FILENODE | |
800 | elif RENAMED_FILENODE in stats['ops']: |
|
799 | elif RENAMED_FILENODE in stats['ops']: | |
801 | lbl += _('rename') |
|
800 | lbl += _('rename') | |
802 | bin_op = RENAMED_FILENODE |
|
801 | bin_op = RENAMED_FILENODE | |
803 |
|
802 | |||
804 | # chmod can go with other operations |
|
803 | # chmod can go with other operations | |
805 | if CHMOD_FILENODE in stats['ops']: |
|
804 | if CHMOD_FILENODE in stats['ops']: | |
806 | _org_lbl = _('chmod') |
|
805 | _org_lbl = _('chmod') | |
807 | lbl += _org_lbl if lbl.endswith('+') else '+%s' % _org_lbl |
|
806 | lbl += _org_lbl if lbl.endswith('+') else '+%s' % _org_lbl | |
808 |
|
807 | |||
809 | #import ipdb;ipdb.set_trace() |
|
808 | #import ipdb;ipdb.set_trace() | |
810 | b_d = '<div class="bin bin%s progress-bar" style="width:100%%">%s</div>' % (bin_op, lbl) |
|
809 | b_d = '<div class="bin bin%s progress-bar" style="width:100%%">%s</div>' % (bin_op, lbl) | |
811 | b_a = '<div class="bin bin1" style="width:0%"></div>' |
|
810 | b_a = '<div class="bin bin1" style="width:0%"></div>' | |
812 | return literal('<div style="width:%spx" class="progress">%s%s</div>' % (width, b_a, b_d)) |
|
811 | return literal('<div style="width:%spx" class="progress">%s%s</div>' % (width, b_a, b_d)) | |
813 |
|
812 | |||
814 | t = stats['added'] + stats['deleted'] |
|
813 | t = stats['added'] + stats['deleted'] | |
815 | unit = float(width) / (t or 1) |
|
814 | unit = float(width) / (t or 1) | |
816 |
|
815 | |||
817 | # needs > 9% of width to be visible or 0 to be hidden |
|
816 | # needs > 9% of width to be visible or 0 to be hidden | |
818 | a_p = max(9, unit * a) if a > 0 else 0 |
|
817 | a_p = max(9, unit * a) if a > 0 else 0 | |
819 | d_p = max(9, unit * d) if d > 0 else 0 |
|
818 | d_p = max(9, unit * d) if d > 0 else 0 | |
820 | p_sum = a_p + d_p |
|
819 | p_sum = a_p + d_p | |
821 |
|
820 | |||
822 | if p_sum > width: |
|
821 | if p_sum > width: | |
823 | # adjust the percentage to be == 100% since we adjusted to 9 |
|
822 | # adjust the percentage to be == 100% since we adjusted to 9 | |
824 | if a_p > d_p: |
|
823 | if a_p > d_p: | |
825 | a_p = a_p - (p_sum - width) |
|
824 | a_p = a_p - (p_sum - width) | |
826 | else: |
|
825 | else: | |
827 | d_p = d_p - (p_sum - width) |
|
826 | d_p = d_p - (p_sum - width) | |
828 |
|
827 | |||
829 | a_v = a if a > 0 else '' |
|
828 | a_v = a if a > 0 else '' | |
830 | d_v = d if d > 0 else '' |
|
829 | d_v = d if d > 0 else '' | |
831 |
|
830 | |||
832 | d_a = '<div class="added progress-bar" style="width:%s%%">%s</div>' % ( |
|
831 | d_a = '<div class="added progress-bar" style="width:%s%%">%s</div>' % ( | |
833 | a_p, a_v |
|
832 | a_p, a_v | |
834 | ) |
|
833 | ) | |
835 | d_d = '<div class="deleted progress-bar" style="width:%s%%">%s</div>' % ( |
|
834 | d_d = '<div class="deleted progress-bar" style="width:%s%%">%s</div>' % ( | |
836 | d_p, d_v |
|
835 | d_p, d_v | |
837 | ) |
|
836 | ) | |
838 | return literal('<div class="progress" style="width:%spx">%s%s</div>' % (width, d_a, d_d)) |
|
837 | return literal('<div class="progress" style="width:%spx">%s%s</div>' % (width, d_a, d_d)) | |
839 |
|
838 | |||
840 |
|
839 | |||
841 | def changeset_status(repo, revision): |
|
840 | def changeset_status(repo, revision): | |
842 | return ChangesetStatusModel().get_status(repo, revision) |
|
841 | return ChangesetStatusModel().get_status(repo, revision) | |
843 |
|
842 | |||
844 |
|
843 | |||
845 | def changeset_status_lbl(changeset_status): |
|
844 | def changeset_status_lbl(changeset_status): | |
846 | return db.ChangesetStatus.get_status_lbl(changeset_status) |
|
845 | return db.ChangesetStatus.get_status_lbl(changeset_status) | |
847 |
|
846 | |||
848 |
|
847 | |||
849 | def get_permission_name(key): |
|
848 | def get_permission_name(key): | |
850 | return dict(db.Permission.PERMS).get(key) |
|
849 | return dict(db.Permission.PERMS).get(key) | |
851 |
|
850 | |||
852 |
|
851 | |||
853 | def journal_filter_help(): |
|
852 | def journal_filter_help(): | |
854 | return _(textwrap.dedent(''' |
|
853 | return _(textwrap.dedent(''' | |
855 | Example filter terms: |
|
854 | Example filter terms: | |
856 | repository:vcs |
|
855 | repository:vcs | |
857 | username:developer |
|
856 | username:developer | |
858 | action:*push* |
|
857 | action:*push* | |
859 | ip:127.0.0.1 |
|
858 | ip:127.0.0.1 | |
860 | date:20120101 |
|
859 | date:20120101 | |
861 | date:[20120101100000 TO 20120102] |
|
860 | date:[20120101100000 TO 20120102] | |
862 |
|
861 | |||
863 | Generate wildcards using '*' character: |
|
862 | Generate wildcards using '*' character: | |
864 | "repository:vcs*" - search everything starting with 'vcs' |
|
863 | "repository:vcs*" - search everything starting with 'vcs' | |
865 | "repository:*vcs*" - search for repository containing 'vcs' |
|
864 | "repository:*vcs*" - search for repository containing 'vcs' | |
866 |
|
865 | |||
867 | Optional AND / OR operators in queries |
|
866 | Optional AND / OR operators in queries | |
868 | "repository:vcs OR repository:test" |
|
867 | "repository:vcs OR repository:test" | |
869 | "username:test AND repository:test*" |
|
868 | "username:test AND repository:test*" | |
870 | ''')) |
|
869 | ''')) | |
871 |
|
870 | |||
872 |
|
871 | |||
873 | def ip_range(ip_addr): |
|
872 | def ip_range(ip_addr): | |
874 | s, e = db.UserIpMap._get_ip_range(ip_addr) |
|
873 | s, e = db.UserIpMap._get_ip_range(ip_addr) | |
875 | return '%s - %s' % (s, e) |
|
874 | return '%s - %s' % (s, e) |
@@ -1,666 +1,545 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.lib.utils2 |
|
15 | kallithea.lib.utils2 | |
16 | ~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Some simple helper functions. |
|
18 | Some simple helper functions. | |
19 | Note: all these functions should be independent of Kallithea classes, i.e. |
|
19 | Note: all these functions should be independent of Kallithea classes, i.e. | |
20 | models, controllers, etc. to prevent import cycles. |
|
20 | models, controllers, etc. to prevent import cycles. | |
21 |
|
21 | |||
22 | This file was forked by the Kallithea project in July 2014. |
|
22 | This file was forked by the Kallithea project in July 2014. | |
23 | Original author and date, and relevant copyright and licensing information is below: |
|
23 | Original author and date, and relevant copyright and licensing information is below: | |
24 | :created_on: Jan 5, 2011 |
|
24 | :created_on: Jan 5, 2011 | |
25 | :author: marcink |
|
25 | :author: marcink | |
26 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
26 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
27 | :license: GPLv3, see LICENSE.md for more details. |
|
27 | :license: GPLv3, see LICENSE.md for more details. | |
28 | """ |
|
28 | """ | |
29 |
|
29 | |||
30 | import binascii |
|
30 | import binascii | |
31 | import datetime |
|
31 | import datetime | |
32 | import hashlib |
|
32 | import hashlib | |
33 | import json |
|
33 | import json | |
34 | import logging |
|
34 | import logging | |
35 | import os |
|
35 | import os | |
36 | import re |
|
36 | import re | |
37 | import string |
|
37 | import string | |
38 | import sys |
|
38 | import sys | |
39 | import time |
|
39 | import time | |
40 | import urllib.parse |
|
40 | import urllib.parse | |
41 | from distutils.version import StrictVersion |
|
41 | from distutils.version import StrictVersion | |
42 |
|
42 | |||
43 | import bcrypt |
|
43 | import bcrypt | |
44 | import urlobject |
|
44 | import urlobject | |
45 | from dateutil import relativedelta |
|
|||
46 | from sqlalchemy.engine import url as sa_url |
|
45 | from sqlalchemy.engine import url as sa_url | |
47 | from sqlalchemy.exc import ArgumentError |
|
46 | from sqlalchemy.exc import ArgumentError | |
48 | from tg import tmpl_context |
|
47 | from tg import tmpl_context | |
49 | from tg.i18n import ugettext as _ |
|
|||
50 | from tg.i18n import ungettext |
|
|||
51 | from tg.support.converters import asbool, aslist |
|
48 | from tg.support.converters import asbool, aslist | |
52 | from webhelpers2.text import collapse, remove_formatting, strip_tags |
|
49 | from webhelpers2.text import collapse, remove_formatting, strip_tags | |
53 |
|
50 | |||
54 | import kallithea |
|
51 | import kallithea | |
55 | from kallithea.lib import webutils |
|
52 | from kallithea.lib import webutils | |
56 | from kallithea.lib.vcs.backends.base import BaseRepository, EmptyChangeset |
|
53 | from kallithea.lib.vcs.backends.base import BaseRepository, EmptyChangeset | |
57 | from kallithea.lib.vcs.backends.git.repository import GitRepository |
|
54 | from kallithea.lib.vcs.backends.git.repository import GitRepository | |
58 | from kallithea.lib.vcs.conf import settings |
|
55 | from kallithea.lib.vcs.conf import settings | |
59 | from kallithea.lib.vcs.exceptions import RepositoryError |
|
56 | from kallithea.lib.vcs.exceptions import RepositoryError | |
60 | from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, safe_bytes, safe_str # re-export |
|
57 | from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, safe_bytes, safe_str # re-export | |
61 | from kallithea.lib.vcs.utils.lazy import LazyProperty |
|
58 | from kallithea.lib.vcs.utils.lazy import LazyProperty | |
62 |
|
59 | |||
63 |
|
60 | |||
64 | try: |
|
61 | try: | |
65 | import pwd |
|
62 | import pwd | |
66 | except ImportError: |
|
63 | except ImportError: | |
67 | pass |
|
64 | pass | |
68 |
|
65 | |||
69 |
|
66 | |||
70 | log = logging.getLogger(__name__) |
|
67 | log = logging.getLogger(__name__) | |
71 |
|
68 | |||
72 |
|
69 | |||
73 | # mute pyflakes "imported but unused" |
|
70 | # mute pyflakes "imported but unused" | |
74 | assert asbool |
|
71 | assert asbool | |
75 | assert aslist |
|
72 | assert aslist | |
76 | assert ascii_bytes |
|
73 | assert ascii_bytes | |
77 | assert ascii_str |
|
74 | assert ascii_str | |
78 | assert safe_bytes |
|
75 | assert safe_bytes | |
79 | assert safe_str |
|
76 | assert safe_str | |
80 | assert LazyProperty |
|
77 | assert LazyProperty | |
81 |
|
78 | |||
82 |
|
79 | |||
83 | # get current umask value without changing it |
|
80 | # get current umask value without changing it | |
84 | umask = os.umask(0) |
|
81 | umask = os.umask(0) | |
85 | os.umask(umask) |
|
82 | os.umask(umask) | |
86 |
|
83 | |||
87 |
|
84 | |||
88 | def convert_line_endings(line, mode): |
|
85 | def convert_line_endings(line, mode): | |
89 | """ |
|
86 | """ | |
90 | Converts a given line "line end" according to given mode |
|
87 | Converts a given line "line end" according to given mode | |
91 |
|
88 | |||
92 | Available modes are:: |
|
89 | Available modes are:: | |
93 | 0 - Unix |
|
90 | 0 - Unix | |
94 | 1 - Mac |
|
91 | 1 - Mac | |
95 | 2 - DOS |
|
92 | 2 - DOS | |
96 |
|
93 | |||
97 | :param line: given line to convert |
|
94 | :param line: given line to convert | |
98 | :param mode: mode to convert to |
|
95 | :param mode: mode to convert to | |
99 | :rtype: str |
|
96 | :rtype: str | |
100 | :return: converted line according to mode |
|
97 | :return: converted line according to mode | |
101 | """ |
|
98 | """ | |
102 | if mode == 0: |
|
99 | if mode == 0: | |
103 | line = line.replace('\r\n', '\n') |
|
100 | line = line.replace('\r\n', '\n') | |
104 | line = line.replace('\r', '\n') |
|
101 | line = line.replace('\r', '\n') | |
105 | elif mode == 1: |
|
102 | elif mode == 1: | |
106 | line = line.replace('\r\n', '\r') |
|
103 | line = line.replace('\r\n', '\r') | |
107 | line = line.replace('\n', '\r') |
|
104 | line = line.replace('\n', '\r') | |
108 | elif mode == 2: |
|
105 | elif mode == 2: | |
109 | line = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", line) |
|
106 | line = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", line) | |
110 | return line |
|
107 | return line | |
111 |
|
108 | |||
112 |
|
109 | |||
113 | def detect_mode(line, default): |
|
110 | def detect_mode(line, default): | |
114 | """ |
|
111 | """ | |
115 | Detects line break for given line, if line break couldn't be found |
|
112 | Detects line break for given line, if line break couldn't be found | |
116 | given default value is returned |
|
113 | given default value is returned | |
117 |
|
114 | |||
118 | :param line: str line |
|
115 | :param line: str line | |
119 | :param default: default |
|
116 | :param default: default | |
120 | :rtype: int |
|
117 | :rtype: int | |
121 | :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS |
|
118 | :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS | |
122 | """ |
|
119 | """ | |
123 | if line.endswith('\r\n'): |
|
120 | if line.endswith('\r\n'): | |
124 | return 2 |
|
121 | return 2 | |
125 | elif line.endswith('\n'): |
|
122 | elif line.endswith('\n'): | |
126 | return 0 |
|
123 | return 0 | |
127 | elif line.endswith('\r'): |
|
124 | elif line.endswith('\r'): | |
128 | return 1 |
|
125 | return 1 | |
129 | else: |
|
126 | else: | |
130 | return default |
|
127 | return default | |
131 |
|
128 | |||
132 |
|
129 | |||
133 | def generate_api_key(): |
|
130 | def generate_api_key(): | |
134 | """ |
|
131 | """ | |
135 | Generates a random (presumably unique) API key. |
|
132 | Generates a random (presumably unique) API key. | |
136 |
|
133 | |||
137 | This value is used in URLs and "Bearer" HTTP Authorization headers, |
|
134 | This value is used in URLs and "Bearer" HTTP Authorization headers, | |
138 | which in practice means it should only contain URL-safe characters |
|
135 | which in practice means it should only contain URL-safe characters | |
139 | (RFC 3986): |
|
136 | (RFC 3986): | |
140 |
|
137 | |||
141 | unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" |
|
138 | unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" | |
142 | """ |
|
139 | """ | |
143 | # Hexadecimal certainly qualifies as URL-safe. |
|
140 | # Hexadecimal certainly qualifies as URL-safe. | |
144 | return ascii_str(binascii.hexlify(os.urandom(20))) |
|
141 | return ascii_str(binascii.hexlify(os.urandom(20))) | |
145 |
|
142 | |||
146 |
|
143 | |||
147 | def safe_int(val, default=None): |
|
144 | def safe_int(val, default=None): | |
148 | """ |
|
145 | """ | |
149 | Returns int() of val if val is not convertable to int use default |
|
146 | Returns int() of val if val is not convertable to int use default | |
150 | instead |
|
147 | instead | |
151 |
|
148 | |||
152 | :param val: |
|
149 | :param val: | |
153 | :param default: |
|
150 | :param default: | |
154 | """ |
|
151 | """ | |
155 | try: |
|
152 | try: | |
156 | val = int(val) |
|
153 | val = int(val) | |
157 | except (ValueError, TypeError): |
|
154 | except (ValueError, TypeError): | |
158 | val = default |
|
155 | val = default | |
159 | return val |
|
156 | return val | |
160 |
|
157 | |||
161 |
|
158 | |||
162 | def remove_suffix(s, suffix): |
|
159 | def remove_suffix(s, suffix): | |
163 | if s.endswith(suffix): |
|
160 | if s.endswith(suffix): | |
164 | s = s[:-1 * len(suffix)] |
|
161 | s = s[:-1 * len(suffix)] | |
165 | return s |
|
162 | return s | |
166 |
|
163 | |||
167 |
|
164 | |||
168 | def remove_prefix(s, prefix): |
|
165 | def remove_prefix(s, prefix): | |
169 | if s.startswith(prefix): |
|
166 | if s.startswith(prefix): | |
170 | s = s[len(prefix):] |
|
167 | s = s[len(prefix):] | |
171 | return s |
|
168 | return s | |
172 |
|
169 | |||
173 |
|
170 | |||
174 | def shorter(s, size=20, firstline=False, postfix='...'): |
|
|||
175 | """Truncate s to size, including the postfix string if truncating. |
|
|||
176 | If firstline, truncate at newline. |
|
|||
177 | """ |
|
|||
178 | if firstline: |
|
|||
179 | s = s.split('\n', 1)[0].rstrip() |
|
|||
180 | if len(s) > size: |
|
|||
181 | return s[:size - len(postfix)] + postfix |
|
|||
182 | return s |
|
|||
183 |
|
||||
184 |
|
||||
185 | def age(prevdate, show_short_version=False, now=None): |
|
|||
186 | """ |
|
|||
187 | turns a datetime into an age string. |
|
|||
188 | If show_short_version is True, then it will generate a not so accurate but shorter string, |
|
|||
189 | example: 2days ago, instead of 2 days and 23 hours ago. |
|
|||
190 |
|
||||
191 | :param prevdate: datetime object |
|
|||
192 | :param show_short_version: if it should approximate the date and return a shorter string |
|
|||
193 | :rtype: str |
|
|||
194 | :returns: str words describing age |
|
|||
195 | """ |
|
|||
196 | now = now or datetime.datetime.now() |
|
|||
197 | order = ['year', 'month', 'day', 'hour', 'minute', 'second'] |
|
|||
198 | deltas = {} |
|
|||
199 | future = False |
|
|||
200 |
|
||||
201 | if prevdate > now: |
|
|||
202 | now, prevdate = prevdate, now |
|
|||
203 | future = True |
|
|||
204 | if future: |
|
|||
205 | prevdate = prevdate.replace(microsecond=0) |
|
|||
206 | # Get date parts deltas |
|
|||
207 | for part in order: |
|
|||
208 | d = relativedelta.relativedelta(now, prevdate) |
|
|||
209 | deltas[part] = getattr(d, part + 's') |
|
|||
210 |
|
||||
211 | # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00, |
|
|||
212 | # not 1 hour, -59 minutes and -59 seconds) |
|
|||
213 | for num, length in [(5, 60), (4, 60), (3, 24)]: # seconds, minutes, hours |
|
|||
214 | part = order[num] |
|
|||
215 | carry_part = order[num - 1] |
|
|||
216 |
|
||||
217 | if deltas[part] < 0: |
|
|||
218 | deltas[part] += length |
|
|||
219 | deltas[carry_part] -= 1 |
|
|||
220 |
|
||||
221 | # Same thing for days except that the increment depends on the (variable) |
|
|||
222 | # number of days in the month |
|
|||
223 | month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] |
|
|||
224 | if deltas['day'] < 0: |
|
|||
225 | if prevdate.month == 2 and (prevdate.year % 4 == 0 and |
|
|||
226 | (prevdate.year % 100 != 0 or prevdate.year % 400 == 0) |
|
|||
227 | ): |
|
|||
228 | deltas['day'] += 29 |
|
|||
229 | else: |
|
|||
230 | deltas['day'] += month_lengths[prevdate.month - 1] |
|
|||
231 |
|
||||
232 | deltas['month'] -= 1 |
|
|||
233 |
|
||||
234 | if deltas['month'] < 0: |
|
|||
235 | deltas['month'] += 12 |
|
|||
236 | deltas['year'] -= 1 |
|
|||
237 |
|
||||
238 | # In short version, we want nicer handling of ages of more than a year |
|
|||
239 | if show_short_version: |
|
|||
240 | if deltas['year'] == 1: |
|
|||
241 | # ages between 1 and 2 years: show as months |
|
|||
242 | deltas['month'] += 12 |
|
|||
243 | deltas['year'] = 0 |
|
|||
244 | if deltas['year'] >= 2: |
|
|||
245 | # ages 2+ years: round |
|
|||
246 | if deltas['month'] > 6: |
|
|||
247 | deltas['year'] += 1 |
|
|||
248 | deltas['month'] = 0 |
|
|||
249 |
|
||||
250 | # Format the result |
|
|||
251 | fmt_funcs = { |
|
|||
252 | 'year': lambda d: ungettext('%d year', '%d years', d) % d, |
|
|||
253 | 'month': lambda d: ungettext('%d month', '%d months', d) % d, |
|
|||
254 | 'day': lambda d: ungettext('%d day', '%d days', d) % d, |
|
|||
255 | 'hour': lambda d: ungettext('%d hour', '%d hours', d) % d, |
|
|||
256 | 'minute': lambda d: ungettext('%d minute', '%d minutes', d) % d, |
|
|||
257 | 'second': lambda d: ungettext('%d second', '%d seconds', d) % d, |
|
|||
258 | } |
|
|||
259 |
|
||||
260 | for i, part in enumerate(order): |
|
|||
261 | value = deltas[part] |
|
|||
262 | if value == 0: |
|
|||
263 | continue |
|
|||
264 |
|
||||
265 | if i < 5: |
|
|||
266 | sub_part = order[i + 1] |
|
|||
267 | sub_value = deltas[sub_part] |
|
|||
268 | else: |
|
|||
269 | sub_value = 0 |
|
|||
270 |
|
||||
271 | if sub_value == 0 or show_short_version: |
|
|||
272 | if future: |
|
|||
273 | return _('in %s') % fmt_funcs[part](value) |
|
|||
274 | else: |
|
|||
275 | return _('%s ago') % fmt_funcs[part](value) |
|
|||
276 | if future: |
|
|||
277 | return _('in %s and %s') % (fmt_funcs[part](value), |
|
|||
278 | fmt_funcs[sub_part](sub_value)) |
|
|||
279 | else: |
|
|||
280 | return _('%s and %s ago') % (fmt_funcs[part](value), |
|
|||
281 | fmt_funcs[sub_part](sub_value)) |
|
|||
282 |
|
||||
283 | return _('just now') |
|
|||
284 |
|
||||
285 |
|
||||
286 | def fmt_date(date): |
|
|||
287 | if date: |
|
|||
288 | return date.strftime("%Y-%m-%d %H:%M:%S") |
|
|||
289 | return "" |
|
|||
290 |
|
||||
291 |
|
||||
292 | def uri_filter(uri): |
|
171 | def uri_filter(uri): | |
293 | """ |
|
172 | """ | |
294 | Removes user:password from given url string |
|
173 | Removes user:password from given url string | |
295 |
|
174 | |||
296 | :param uri: |
|
175 | :param uri: | |
297 | :rtype: str |
|
176 | :rtype: str | |
298 | :returns: filtered list of strings |
|
177 | :returns: filtered list of strings | |
299 | """ |
|
178 | """ | |
300 | if not uri: |
|
179 | if not uri: | |
301 | return [] |
|
180 | return [] | |
302 |
|
181 | |||
303 | proto = '' |
|
182 | proto = '' | |
304 |
|
183 | |||
305 | for pat in ('https://', 'http://', 'git://'): |
|
184 | for pat in ('https://', 'http://', 'git://'): | |
306 | if uri.startswith(pat): |
|
185 | if uri.startswith(pat): | |
307 | uri = uri[len(pat):] |
|
186 | uri = uri[len(pat):] | |
308 | proto = pat |
|
187 | proto = pat | |
309 | break |
|
188 | break | |
310 |
|
189 | |||
311 | # remove passwords and username |
|
190 | # remove passwords and username | |
312 | uri = uri[uri.find('@') + 1:] |
|
191 | uri = uri[uri.find('@') + 1:] | |
313 |
|
192 | |||
314 | # get the port |
|
193 | # get the port | |
315 | cred_pos = uri.find(':') |
|
194 | cred_pos = uri.find(':') | |
316 | if cred_pos == -1: |
|
195 | if cred_pos == -1: | |
317 | host, port = uri, None |
|
196 | host, port = uri, None | |
318 | else: |
|
197 | else: | |
319 | host, port = uri[:cred_pos], uri[cred_pos + 1:] |
|
198 | host, port = uri[:cred_pos], uri[cred_pos + 1:] | |
320 |
|
199 | |||
321 | return [_f for _f in [proto, host, port] if _f] |
|
200 | return [_f for _f in [proto, host, port] if _f] | |
322 |
|
201 | |||
323 |
|
202 | |||
324 | def credentials_filter(uri): |
|
203 | def credentials_filter(uri): | |
325 | """ |
|
204 | """ | |
326 | Returns a url with removed credentials |
|
205 | Returns a url with removed credentials | |
327 |
|
206 | |||
328 | :param uri: |
|
207 | :param uri: | |
329 | """ |
|
208 | """ | |
330 |
|
209 | |||
331 | uri = uri_filter(uri) |
|
210 | uri = uri_filter(uri) | |
332 | # check if we have port |
|
211 | # check if we have port | |
333 | if len(uri) > 2 and uri[2]: |
|
212 | if len(uri) > 2 and uri[2]: | |
334 | uri[2] = ':' + uri[2] |
|
213 | uri[2] = ':' + uri[2] | |
335 |
|
214 | |||
336 | return ''.join(uri) |
|
215 | return ''.join(uri) | |
337 |
|
216 | |||
338 |
|
217 | |||
339 | def get_clone_url(clone_uri_tmpl, prefix_url, repo_name, repo_id, username=None): |
|
218 | def get_clone_url(clone_uri_tmpl, prefix_url, repo_name, repo_id, username=None): | |
340 | parsed_url = urlobject.URLObject(prefix_url) |
|
219 | parsed_url = urlobject.URLObject(prefix_url) | |
341 | prefix = urllib.parse.unquote(parsed_url.path.rstrip('/')) |
|
220 | prefix = urllib.parse.unquote(parsed_url.path.rstrip('/')) | |
342 | try: |
|
221 | try: | |
343 | system_user = pwd.getpwuid(os.getuid()).pw_name |
|
222 | system_user = pwd.getpwuid(os.getuid()).pw_name | |
344 | except NameError: # TODO: support all systems - especially Windows |
|
223 | except NameError: # TODO: support all systems - especially Windows | |
345 | system_user = 'kallithea' # hardcoded default value ... |
|
224 | system_user = 'kallithea' # hardcoded default value ... | |
346 | args = { |
|
225 | args = { | |
347 | 'scheme': parsed_url.scheme, |
|
226 | 'scheme': parsed_url.scheme, | |
348 | 'user': urllib.parse.quote(username or ''), |
|
227 | 'user': urllib.parse.quote(username or ''), | |
349 | 'netloc': parsed_url.netloc + prefix, # like "hostname:port/prefix" (with optional ":port" and "/prefix") |
|
228 | 'netloc': parsed_url.netloc + prefix, # like "hostname:port/prefix" (with optional ":port" and "/prefix") | |
350 | 'prefix': prefix, # undocumented, empty or starting with / |
|
229 | 'prefix': prefix, # undocumented, empty or starting with / | |
351 | 'repo': repo_name, |
|
230 | 'repo': repo_name, | |
352 | 'repoid': str(repo_id), |
|
231 | 'repoid': str(repo_id), | |
353 | 'system_user': system_user, |
|
232 | 'system_user': system_user, | |
354 | 'hostname': parsed_url.hostname, |
|
233 | 'hostname': parsed_url.hostname, | |
355 | } |
|
234 | } | |
356 | url = re.sub('{([^{}]+)}', lambda m: args.get(m.group(1), m.group(0)), clone_uri_tmpl) |
|
235 | url = re.sub('{([^{}]+)}', lambda m: args.get(m.group(1), m.group(0)), clone_uri_tmpl) | |
357 |
|
236 | |||
358 | # remove leading @ sign if it's present. Case of empty user |
|
237 | # remove leading @ sign if it's present. Case of empty user | |
359 | url_obj = urlobject.URLObject(url) |
|
238 | url_obj = urlobject.URLObject(url) | |
360 | if not url_obj.username: |
|
239 | if not url_obj.username: | |
361 | url_obj = url_obj.with_username(None) |
|
240 | url_obj = url_obj.with_username(None) | |
362 |
|
241 | |||
363 | return str(url_obj) |
|
242 | return str(url_obj) | |
364 |
|
243 | |||
365 |
|
244 | |||
366 | def short_ref_name(ref_type, ref_name): |
|
245 | def short_ref_name(ref_type, ref_name): | |
367 | """Return short description of PR ref - revs will be truncated""" |
|
246 | """Return short description of PR ref - revs will be truncated""" | |
368 | if ref_type == 'rev': |
|
247 | if ref_type == 'rev': | |
369 | return ref_name[:12] |
|
248 | return ref_name[:12] | |
370 | return ref_name |
|
249 | return ref_name | |
371 |
|
250 | |||
372 |
|
251 | |||
373 | def link_to_ref(repo_name, ref_type, ref_name, rev=None): |
|
252 | def link_to_ref(repo_name, ref_type, ref_name, rev=None): | |
374 | """ |
|
253 | """ | |
375 | Return full markup for a PR ref to changeset_home for a changeset. |
|
254 | Return full markup for a PR ref to changeset_home for a changeset. | |
376 | If ref_type is 'branch', it will link to changelog. |
|
255 | If ref_type is 'branch', it will link to changelog. | |
377 | ref_name is shortened if ref_type is 'rev'. |
|
256 | ref_name is shortened if ref_type is 'rev'. | |
378 | if rev is specified, show it too, explicitly linking to that revision. |
|
257 | if rev is specified, show it too, explicitly linking to that revision. | |
379 | """ |
|
258 | """ | |
380 | txt = short_ref_name(ref_type, ref_name) |
|
259 | txt = short_ref_name(ref_type, ref_name) | |
381 | if ref_type == 'branch': |
|
260 | if ref_type == 'branch': | |
382 | u = webutils.url('changelog_home', repo_name=repo_name, branch=ref_name) |
|
261 | u = webutils.url('changelog_home', repo_name=repo_name, branch=ref_name) | |
383 | else: |
|
262 | else: | |
384 | u = webutils.url('changeset_home', repo_name=repo_name, revision=ref_name) |
|
263 | u = webutils.url('changeset_home', repo_name=repo_name, revision=ref_name) | |
385 | l = webutils.link_to(repo_name + '#' + txt, u) |
|
264 | l = webutils.link_to(repo_name + '#' + txt, u) | |
386 | if rev and ref_type != 'rev': |
|
265 | if rev and ref_type != 'rev': | |
387 | l = webutils.literal('%s (%s)' % (l, webutils.link_to(rev[:12], webutils.url('changeset_home', repo_name=repo_name, revision=rev)))) |
|
266 | l = webutils.literal('%s (%s)' % (l, webutils.link_to(rev[:12], webutils.url('changeset_home', repo_name=repo_name, revision=rev)))) | |
388 | return l |
|
267 | return l | |
389 |
|
268 | |||
390 |
|
269 | |||
391 | def get_changeset_safe(repo, rev): |
|
270 | def get_changeset_safe(repo, rev): | |
392 | """ |
|
271 | """ | |
393 | Safe version of get_changeset if this changeset doesn't exists for a |
|
272 | Safe version of get_changeset if this changeset doesn't exists for a | |
394 | repo it returns a Dummy one instead |
|
273 | repo it returns a Dummy one instead | |
395 |
|
274 | |||
396 | :param repo: |
|
275 | :param repo: | |
397 | :param rev: |
|
276 | :param rev: | |
398 | """ |
|
277 | """ | |
399 | if not isinstance(repo, BaseRepository): |
|
278 | if not isinstance(repo, BaseRepository): | |
400 | raise Exception('You must pass an Repository ' |
|
279 | raise Exception('You must pass an Repository ' | |
401 | 'object as first argument got %s' % type(repo)) |
|
280 | 'object as first argument got %s' % type(repo)) | |
402 |
|
281 | |||
403 | try: |
|
282 | try: | |
404 | cs = repo.get_changeset(rev) |
|
283 | cs = repo.get_changeset(rev) | |
405 | except (RepositoryError, LookupError): |
|
284 | except (RepositoryError, LookupError): | |
406 | cs = EmptyChangeset(requested_revision=rev) |
|
285 | cs = EmptyChangeset(requested_revision=rev) | |
407 | return cs |
|
286 | return cs | |
408 |
|
287 | |||
409 |
|
288 | |||
410 | def datetime_to_time(dt): |
|
289 | def datetime_to_time(dt): | |
411 | if dt: |
|
290 | if dt: | |
412 | return time.mktime(dt.timetuple()) |
|
291 | return time.mktime(dt.timetuple()) | |
413 |
|
292 | |||
414 |
|
293 | |||
415 | def time_to_datetime(tm): |
|
294 | def time_to_datetime(tm): | |
416 | if tm: |
|
295 | if tm: | |
417 | if isinstance(tm, str): |
|
296 | if isinstance(tm, str): | |
418 | try: |
|
297 | try: | |
419 | tm = float(tm) |
|
298 | tm = float(tm) | |
420 | except ValueError: |
|
299 | except ValueError: | |
421 | return |
|
300 | return | |
422 | return datetime.datetime.fromtimestamp(tm) |
|
301 | return datetime.datetime.fromtimestamp(tm) | |
423 |
|
302 | |||
424 |
|
303 | |||
425 | class AttributeDict(dict): |
|
304 | class AttributeDict(dict): | |
426 | def __getattr__(self, attr): |
|
305 | def __getattr__(self, attr): | |
427 | return self.get(attr, None) |
|
306 | return self.get(attr, None) | |
428 | __setattr__ = dict.__setitem__ |
|
307 | __setattr__ = dict.__setitem__ | |
429 | __delattr__ = dict.__delitem__ |
|
308 | __delattr__ = dict.__delitem__ | |
430 |
|
309 | |||
431 |
|
310 | |||
432 | def obfuscate_url_pw(engine): |
|
311 | def obfuscate_url_pw(engine): | |
433 | try: |
|
312 | try: | |
434 | _url = sa_url.make_url(engine or '') |
|
313 | _url = sa_url.make_url(engine or '') | |
435 | except ArgumentError: |
|
314 | except ArgumentError: | |
436 | return engine |
|
315 | return engine | |
437 | if _url.password: |
|
316 | if _url.password: | |
438 | _url.password = 'XXXXX' |
|
317 | _url.password = 'XXXXX' | |
439 | return str(_url) |
|
318 | return str(_url) | |
440 |
|
319 | |||
441 |
|
320 | |||
442 | class HookEnvironmentError(Exception): pass |
|
321 | class HookEnvironmentError(Exception): pass | |
443 |
|
322 | |||
444 |
|
323 | |||
445 | def get_hook_environment(): |
|
324 | def get_hook_environment(): | |
446 | """ |
|
325 | """ | |
447 | Get hook context by deserializing the global KALLITHEA_EXTRAS environment |
|
326 | Get hook context by deserializing the global KALLITHEA_EXTRAS environment | |
448 | variable. |
|
327 | variable. | |
449 |
|
328 | |||
450 | Called early in Git out-of-process hooks to get .ini config path so the |
|
329 | Called early in Git out-of-process hooks to get .ini config path so the | |
451 | basic environment can be configured properly. Also used in all hooks to get |
|
330 | basic environment can be configured properly. Also used in all hooks to get | |
452 | information about the action that triggered it. |
|
331 | information about the action that triggered it. | |
453 | """ |
|
332 | """ | |
454 |
|
333 | |||
455 | try: |
|
334 | try: | |
456 | kallithea_extras = os.environ['KALLITHEA_EXTRAS'] |
|
335 | kallithea_extras = os.environ['KALLITHEA_EXTRAS'] | |
457 | except KeyError: |
|
336 | except KeyError: | |
458 | raise HookEnvironmentError("Environment variable KALLITHEA_EXTRAS not found") |
|
337 | raise HookEnvironmentError("Environment variable KALLITHEA_EXTRAS not found") | |
459 |
|
338 | |||
460 | extras = json.loads(kallithea_extras) |
|
339 | extras = json.loads(kallithea_extras) | |
461 | for k in ['username', 'repository', 'scm', 'action', 'ip', 'config']: |
|
340 | for k in ['username', 'repository', 'scm', 'action', 'ip', 'config']: | |
462 | try: |
|
341 | try: | |
463 | extras[k] |
|
342 | extras[k] | |
464 | except KeyError: |
|
343 | except KeyError: | |
465 | raise HookEnvironmentError('Missing key %s in KALLITHEA_EXTRAS %s' % (k, extras)) |
|
344 | raise HookEnvironmentError('Missing key %s in KALLITHEA_EXTRAS %s' % (k, extras)) | |
466 |
|
345 | |||
467 | return AttributeDict(extras) |
|
346 | return AttributeDict(extras) | |
468 |
|
347 | |||
469 |
|
348 | |||
470 | def set_hook_environment(username, ip_addr, repo_name, repo_alias, action=None): |
|
349 | def set_hook_environment(username, ip_addr, repo_name, repo_alias, action=None): | |
471 | """Prepare global context for running hooks by serializing data in the |
|
350 | """Prepare global context for running hooks by serializing data in the | |
472 | global KALLITHEA_EXTRAS environment variable. |
|
351 | global KALLITHEA_EXTRAS environment variable. | |
473 |
|
352 | |||
474 | Most importantly, this allow Git hooks to do proper logging and updating of |
|
353 | Most importantly, this allow Git hooks to do proper logging and updating of | |
475 | caches after pushes. |
|
354 | caches after pushes. | |
476 |
|
355 | |||
477 | Must always be called before anything with hooks are invoked. |
|
356 | Must always be called before anything with hooks are invoked. | |
478 | """ |
|
357 | """ | |
479 | extras = { |
|
358 | extras = { | |
480 | 'ip': ip_addr, # used in action_logger |
|
359 | 'ip': ip_addr, # used in action_logger | |
481 | 'username': username, |
|
360 | 'username': username, | |
482 | 'action': action or 'push_local', # used in process_pushed_raw_ids action_logger |
|
361 | 'action': action or 'push_local', # used in process_pushed_raw_ids action_logger | |
483 | 'repository': repo_name, |
|
362 | 'repository': repo_name, | |
484 | 'scm': repo_alias, |
|
363 | 'scm': repo_alias, | |
485 | 'config': kallithea.CONFIG['__file__'], # used by git hook to read config |
|
364 | 'config': kallithea.CONFIG['__file__'], # used by git hook to read config | |
486 | } |
|
365 | } | |
487 | os.environ['KALLITHEA_EXTRAS'] = json.dumps(extras) |
|
366 | os.environ['KALLITHEA_EXTRAS'] = json.dumps(extras) | |
488 |
|
367 | |||
489 |
|
368 | |||
490 | def get_current_authuser(): |
|
369 | def get_current_authuser(): | |
491 | """ |
|
370 | """ | |
492 | Gets kallithea user from threadlocal tmpl_context variable if it's |
|
371 | Gets kallithea user from threadlocal tmpl_context variable if it's | |
493 | defined, else returns None. |
|
372 | defined, else returns None. | |
494 | """ |
|
373 | """ | |
495 | try: |
|
374 | try: | |
496 | return getattr(tmpl_context, 'authuser', None) |
|
375 | return getattr(tmpl_context, 'authuser', None) | |
497 | except TypeError: # No object (name: context) has been registered for this thread |
|
376 | except TypeError: # No object (name: context) has been registered for this thread | |
498 | return None |
|
377 | return None | |
499 |
|
378 | |||
500 |
|
379 | |||
501 | def urlreadable(s, _cleanstringsub=re.compile('[^-a-zA-Z0-9./]+').sub): |
|
380 | def urlreadable(s, _cleanstringsub=re.compile('[^-a-zA-Z0-9./]+').sub): | |
502 | return _cleanstringsub('_', s).rstrip('_') |
|
381 | return _cleanstringsub('_', s).rstrip('_') | |
503 |
|
382 | |||
504 |
|
383 | |||
505 | def recursive_replace(str_, replace=' '): |
|
384 | def recursive_replace(str_, replace=' '): | |
506 | """ |
|
385 | """ | |
507 | Recursive replace of given sign to just one instance |
|
386 | Recursive replace of given sign to just one instance | |
508 |
|
387 | |||
509 | :param str_: given string |
|
388 | :param str_: given string | |
510 | :param replace: char to find and replace multiple instances |
|
389 | :param replace: char to find and replace multiple instances | |
511 |
|
390 | |||
512 | Examples:: |
|
391 | Examples:: | |
513 | >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-') |
|
392 | >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-') | |
514 | 'Mighty-Mighty-Bo-sstones' |
|
393 | 'Mighty-Mighty-Bo-sstones' | |
515 | """ |
|
394 | """ | |
516 |
|
395 | |||
517 | if str_.find(replace * 2) == -1: |
|
396 | if str_.find(replace * 2) == -1: | |
518 | return str_ |
|
397 | return str_ | |
519 | else: |
|
398 | else: | |
520 | str_ = str_.replace(replace * 2, replace) |
|
399 | str_ = str_.replace(replace * 2, replace) | |
521 | return recursive_replace(str_, replace) |
|
400 | return recursive_replace(str_, replace) | |
522 |
|
401 | |||
523 |
|
402 | |||
524 | def repo_name_slug(value): |
|
403 | def repo_name_slug(value): | |
525 | """ |
|
404 | """ | |
526 | Return slug of name of repository |
|
405 | Return slug of name of repository | |
527 | This function is called on each creation/modification |
|
406 | This function is called on each creation/modification | |
528 | of repository to prevent bad names in repo |
|
407 | of repository to prevent bad names in repo | |
529 | """ |
|
408 | """ | |
530 |
|
409 | |||
531 | slug = remove_formatting(value) |
|
410 | slug = remove_formatting(value) | |
532 | slug = strip_tags(slug) |
|
411 | slug = strip_tags(slug) | |
533 |
|
412 | |||
534 | for c in r"""`?=[]\;'"<>,/~!@#$%^&*()+{}|: """: |
|
413 | for c in r"""`?=[]\;'"<>,/~!@#$%^&*()+{}|: """: | |
535 | slug = slug.replace(c, '-') |
|
414 | slug = slug.replace(c, '-') | |
536 | slug = recursive_replace(slug, '-') |
|
415 | slug = recursive_replace(slug, '-') | |
537 | slug = collapse(slug, '-') |
|
416 | slug = collapse(slug, '-') | |
538 | return slug |
|
417 | return slug | |
539 |
|
418 | |||
540 |
|
419 | |||
541 | def ask_ok(prompt, retries=4, complaint='Yes or no please!'): |
|
420 | def ask_ok(prompt, retries=4, complaint='Yes or no please!'): | |
542 | while True: |
|
421 | while True: | |
543 | ok = input(prompt) |
|
422 | ok = input(prompt) | |
544 | if ok in ('y', 'ye', 'yes'): |
|
423 | if ok in ('y', 'ye', 'yes'): | |
545 | return True |
|
424 | return True | |
546 | if ok in ('n', 'no', 'nop', 'nope'): |
|
425 | if ok in ('n', 'no', 'nop', 'nope'): | |
547 | return False |
|
426 | return False | |
548 | retries = retries - 1 |
|
427 | retries = retries - 1 | |
549 | if retries < 0: |
|
428 | if retries < 0: | |
550 | raise IOError |
|
429 | raise IOError | |
551 | print(complaint) |
|
430 | print(complaint) | |
552 |
|
431 | |||
553 |
|
432 | |||
554 | class PasswordGenerator(object): |
|
433 | class PasswordGenerator(object): | |
555 | """ |
|
434 | """ | |
556 | This is a simple class for generating password from different sets of |
|
435 | This is a simple class for generating password from different sets of | |
557 | characters |
|
436 | characters | |
558 | usage:: |
|
437 | usage:: | |
559 |
|
438 | |||
560 | passwd_gen = PasswordGenerator() |
|
439 | passwd_gen = PasswordGenerator() | |
561 | #print 8-letter password containing only big and small letters |
|
440 | #print 8-letter password containing only big and small letters | |
562 | of alphabet |
|
441 | of alphabet | |
563 | passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL) |
|
442 | passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL) | |
564 | """ |
|
443 | """ | |
565 | ALPHABETS_NUM = r'''1234567890''' |
|
444 | ALPHABETS_NUM = r'''1234567890''' | |
566 | ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm''' |
|
445 | ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm''' | |
567 | ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM''' |
|
446 | ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM''' | |
568 | ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?''' |
|
447 | ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?''' | |
569 | ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \ |
|
448 | ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \ | |
570 | + ALPHABETS_NUM + ALPHABETS_SPECIAL |
|
449 | + ALPHABETS_NUM + ALPHABETS_SPECIAL | |
571 | ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM |
|
450 | ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM | |
572 | ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL |
|
451 | ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL | |
573 | ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM |
|
452 | ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM | |
574 | ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM |
|
453 | ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM | |
575 |
|
454 | |||
576 | def gen_password(self, length, alphabet=ALPHABETS_FULL): |
|
455 | def gen_password(self, length, alphabet=ALPHABETS_FULL): | |
577 | assert len(alphabet) <= 256, alphabet |
|
456 | assert len(alphabet) <= 256, alphabet | |
578 | l = [] |
|
457 | l = [] | |
579 | while len(l) < length: |
|
458 | while len(l) < length: | |
580 | i = ord(os.urandom(1)) |
|
459 | i = ord(os.urandom(1)) | |
581 | if i < len(alphabet): |
|
460 | if i < len(alphabet): | |
582 | l.append(alphabet[i]) |
|
461 | l.append(alphabet[i]) | |
583 | return ''.join(l) |
|
462 | return ''.join(l) | |
584 |
|
463 | |||
585 |
|
464 | |||
586 | def get_crypt_password(password): |
|
465 | def get_crypt_password(password): | |
587 | """ |
|
466 | """ | |
588 | Cryptographic function used for bcrypt password hashing. |
|
467 | Cryptographic function used for bcrypt password hashing. | |
589 |
|
468 | |||
590 | :param password: password to hash |
|
469 | :param password: password to hash | |
591 | """ |
|
470 | """ | |
592 | return ascii_str(bcrypt.hashpw(safe_bytes(password), bcrypt.gensalt(10))) |
|
471 | return ascii_str(bcrypt.hashpw(safe_bytes(password), bcrypt.gensalt(10))) | |
593 |
|
472 | |||
594 |
|
473 | |||
595 | def check_password(password, hashed): |
|
474 | def check_password(password, hashed): | |
596 | """ |
|
475 | """ | |
597 | Checks password match the hashed value using bcrypt. |
|
476 | Checks password match the hashed value using bcrypt. | |
598 | Remains backwards compatible and accept plain sha256 hashes which used to |
|
477 | Remains backwards compatible and accept plain sha256 hashes which used to | |
599 | be used on Windows. |
|
478 | be used on Windows. | |
600 |
|
479 | |||
601 | :param password: password |
|
480 | :param password: password | |
602 | :param hashed: password in hashed form |
|
481 | :param hashed: password in hashed form | |
603 | """ |
|
482 | """ | |
604 | # sha256 hashes will always be 64 hex chars |
|
483 | # sha256 hashes will always be 64 hex chars | |
605 | # bcrypt hashes will always contain $ (and be shorter) |
|
484 | # bcrypt hashes will always contain $ (and be shorter) | |
606 | if len(hashed) == 64 and all(x in string.hexdigits for x in hashed): |
|
485 | if len(hashed) == 64 and all(x in string.hexdigits for x in hashed): | |
607 | return hashlib.sha256(password).hexdigest() == hashed |
|
486 | return hashlib.sha256(password).hexdigest() == hashed | |
608 | try: |
|
487 | try: | |
609 | return bcrypt.checkpw(safe_bytes(password), ascii_bytes(hashed)) |
|
488 | return bcrypt.checkpw(safe_bytes(password), ascii_bytes(hashed)) | |
610 | except ValueError as e: |
|
489 | except ValueError as e: | |
611 | # bcrypt will throw ValueError 'Invalid hashed_password salt' on all password errors |
|
490 | # bcrypt will throw ValueError 'Invalid hashed_password salt' on all password errors | |
612 | log.error('error from bcrypt checking password: %s', e) |
|
491 | log.error('error from bcrypt checking password: %s', e) | |
613 | return False |
|
492 | return False | |
614 | log.error('check_password failed - no method found for hash length %s', len(hashed)) |
|
493 | log.error('check_password failed - no method found for hash length %s', len(hashed)) | |
615 | return False |
|
494 | return False | |
616 |
|
495 | |||
617 |
|
496 | |||
618 | git_req_ver = StrictVersion('1.7.4') |
|
497 | git_req_ver = StrictVersion('1.7.4') | |
619 |
|
498 | |||
620 | def check_git_version(): |
|
499 | def check_git_version(): | |
621 | """ |
|
500 | """ | |
622 | Checks what version of git is installed on the system, and raise a system exit |
|
501 | Checks what version of git is installed on the system, and raise a system exit | |
623 | if it's too old for Kallithea to work properly. |
|
502 | if it's too old for Kallithea to work properly. | |
624 | """ |
|
503 | """ | |
625 | if 'git' not in kallithea.BACKENDS: |
|
504 | if 'git' not in kallithea.BACKENDS: | |
626 | return None |
|
505 | return None | |
627 |
|
506 | |||
628 | if not settings.GIT_EXECUTABLE_PATH: |
|
507 | if not settings.GIT_EXECUTABLE_PATH: | |
629 | log.warning('No git executable configured - check "git_path" in the ini file.') |
|
508 | log.warning('No git executable configured - check "git_path" in the ini file.') | |
630 | return None |
|
509 | return None | |
631 |
|
510 | |||
632 | try: |
|
511 | try: | |
633 | stdout, stderr = GitRepository._run_git_command(['--version']) |
|
512 | stdout, stderr = GitRepository._run_git_command(['--version']) | |
634 | except RepositoryError as e: |
|
513 | except RepositoryError as e: | |
635 | # message will already have been logged as error |
|
514 | # message will already have been logged as error | |
636 | log.warning('No working git executable found - check "git_path" in the ini file.') |
|
515 | log.warning('No working git executable found - check "git_path" in the ini file.') | |
637 | return None |
|
516 | return None | |
638 |
|
517 | |||
639 | if stderr: |
|
518 | if stderr: | |
640 | log.warning('Error/stderr from "%s --version":\n%s', settings.GIT_EXECUTABLE_PATH, safe_str(stderr)) |
|
519 | log.warning('Error/stderr from "%s --version":\n%s', settings.GIT_EXECUTABLE_PATH, safe_str(stderr)) | |
641 |
|
520 | |||
642 | if not stdout: |
|
521 | if not stdout: | |
643 | log.warning('No working git executable found - check "git_path" in the ini file.') |
|
522 | log.warning('No working git executable found - check "git_path" in the ini file.') | |
644 | return None |
|
523 | return None | |
645 |
|
524 | |||
646 | output = safe_str(stdout).strip() |
|
525 | output = safe_str(stdout).strip() | |
647 | m = re.search(r"\d+.\d+.\d+", output) |
|
526 | m = re.search(r"\d+.\d+.\d+", output) | |
648 | if m: |
|
527 | if m: | |
649 | ver = StrictVersion(m.group(0)) |
|
528 | ver = StrictVersion(m.group(0)) | |
650 | log.debug('Git executable: "%s", version %s (parsed from: "%s")', |
|
529 | log.debug('Git executable: "%s", version %s (parsed from: "%s")', | |
651 | settings.GIT_EXECUTABLE_PATH, ver, output) |
|
530 | settings.GIT_EXECUTABLE_PATH, ver, output) | |
652 | if ver < git_req_ver: |
|
531 | if ver < git_req_ver: | |
653 | log.error('Kallithea detected %s version %s, which is too old ' |
|
532 | log.error('Kallithea detected %s version %s, which is too old ' | |
654 | 'for the system to function properly. ' |
|
533 | 'for the system to function properly. ' | |
655 | 'Please upgrade to version %s or later. ' |
|
534 | 'Please upgrade to version %s or later. ' | |
656 | 'If you strictly need Mercurial repositories, you can ' |
|
535 | 'If you strictly need Mercurial repositories, you can ' | |
657 | 'clear the "git_path" setting in the ini file.', |
|
536 | 'clear the "git_path" setting in the ini file.', | |
658 | settings.GIT_EXECUTABLE_PATH, ver, git_req_ver) |
|
537 | settings.GIT_EXECUTABLE_PATH, ver, git_req_ver) | |
659 | log.error("Terminating ...") |
|
538 | log.error("Terminating ...") | |
660 | sys.exit(1) |
|
539 | sys.exit(1) | |
661 | else: |
|
540 | else: | |
662 | ver = StrictVersion('0.0.0') |
|
541 | ver = StrictVersion('0.0.0') | |
663 | log.warning('Error finding version number in "%s --version" stdout:\n%s', |
|
542 | log.warning('Error finding version number in "%s --version" stdout:\n%s', | |
664 | settings.GIT_EXECUTABLE_PATH, output) |
|
543 | settings.GIT_EXECUTABLE_PATH, output) | |
665 |
|
544 | |||
666 | return ver |
|
545 | return ver |
@@ -1,534 +1,668 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.lib.webutils |
|
15 | kallithea.lib.webutils | |
16 | ~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Helper functions that may rely on the current WSGI request, exposed in the TG2 |
|
18 | Helper functions that may rely on the current WSGI request, exposed in the TG2 | |
19 | thread-local "global" variables. It should have few dependencies so it can be |
|
19 | thread-local "global" variables. It should have few dependencies so it can be | |
20 | imported anywhere - just like the global variables can be used everywhere. |
|
20 | imported anywhere - just like the global variables can be used everywhere. | |
21 | """ |
|
21 | """ | |
22 |
|
22 | |||
|
23 | import datetime | |||
23 | import json |
|
24 | import json | |
24 | import logging |
|
25 | import logging | |
25 | import random |
|
26 | import random | |
26 | import re |
|
27 | import re | |
27 |
|
28 | |||
|
29 | from dateutil import relativedelta | |||
28 | from tg import request, session |
|
30 | from tg import request, session | |
|
31 | from tg.i18n import ugettext as _ | |||
|
32 | from tg.i18n import ungettext | |||
29 | from webhelpers2.html import HTML, escape, literal |
|
33 | from webhelpers2.html import HTML, escape, literal | |
30 | from webhelpers2.html.tags import NotGiven, Option, Options, _input |
|
34 | from webhelpers2.html.tags import NotGiven, Option, Options, _input | |
31 | from webhelpers2.html.tags import _make_safe_id_component as safeid |
|
35 | from webhelpers2.html.tags import _make_safe_id_component as safeid | |
32 | from webhelpers2.html.tags import checkbox, end_form |
|
36 | from webhelpers2.html.tags import checkbox, end_form | |
33 | from webhelpers2.html.tags import form as insecure_form |
|
37 | from webhelpers2.html.tags import form as insecure_form | |
34 | from webhelpers2.html.tags import hidden, link_to, password, radio |
|
38 | from webhelpers2.html.tags import hidden, link_to, password, radio | |
35 | from webhelpers2.html.tags import select as webhelpers2_select |
|
39 | from webhelpers2.html.tags import select as webhelpers2_select | |
36 | from webhelpers2.html.tags import submit, text, textarea |
|
40 | from webhelpers2.html.tags import submit, text, textarea | |
37 | from webhelpers2.number import format_byte_size |
|
41 | from webhelpers2.number import format_byte_size | |
38 | from webhelpers2.text import chop_at, truncate, wrap_paragraphs |
|
42 | from webhelpers2.text import chop_at, truncate, wrap_paragraphs | |
39 |
|
43 | |||
40 | import kallithea |
|
44 | import kallithea | |
41 |
|
45 | |||
42 |
|
46 | |||
43 | log = logging.getLogger(__name__) |
|
47 | log = logging.getLogger(__name__) | |
44 |
|
48 | |||
45 |
|
49 | |||
46 | # mute pyflakes "imported but unused" |
|
50 | # mute pyflakes "imported but unused" | |
47 | assert Option |
|
51 | assert Option | |
48 | assert checkbox |
|
52 | assert checkbox | |
49 | assert chop_at |
|
53 | assert chop_at | |
50 | assert end_form |
|
54 | assert end_form | |
51 | assert escape |
|
55 | assert escape | |
52 | assert format_byte_size |
|
56 | assert format_byte_size | |
53 | assert link_to |
|
57 | assert link_to | |
54 | assert literal |
|
58 | assert literal | |
55 | assert password |
|
59 | assert password | |
56 | assert radio |
|
60 | assert radio | |
57 | assert safeid |
|
61 | assert safeid | |
58 | assert submit |
|
62 | assert submit | |
59 | assert text |
|
63 | assert text | |
60 | assert textarea |
|
64 | assert textarea | |
61 | assert truncate |
|
65 | assert truncate | |
62 | assert wrap_paragraphs |
|
66 | assert wrap_paragraphs | |
63 |
|
67 | |||
64 |
|
68 | |||
65 | # |
|
69 | # | |
66 | # General Kallithea URL handling |
|
70 | # General Kallithea URL handling | |
67 | # |
|
71 | # | |
68 |
|
72 | |||
69 | class UrlGenerator(object): |
|
73 | class UrlGenerator(object): | |
70 | """Emulate pylons.url in providing a wrapper around routes.url |
|
74 | """Emulate pylons.url in providing a wrapper around routes.url | |
71 |
|
75 | |||
72 | This code was added during migration from Pylons to Turbogears2. Pylons |
|
76 | This code was added during migration from Pylons to Turbogears2. Pylons | |
73 | already provided a wrapper like this, but Turbogears2 does not. |
|
77 | already provided a wrapper like this, but Turbogears2 does not. | |
74 |
|
78 | |||
75 | When the routing of Kallithea is changed to use less Routes and more |
|
79 | When the routing of Kallithea is changed to use less Routes and more | |
76 | Turbogears2-style routing, this class may disappear or change. |
|
80 | Turbogears2-style routing, this class may disappear or change. | |
77 |
|
81 | |||
78 | url() (the __call__ method) returns the URL based on a route name and |
|
82 | url() (the __call__ method) returns the URL based on a route name and | |
79 | arguments. |
|
83 | arguments. | |
80 | url.current() returns the URL of the current page with arguments applied. |
|
84 | url.current() returns the URL of the current page with arguments applied. | |
81 |
|
85 | |||
82 | Refer to documentation of Routes for details: |
|
86 | Refer to documentation of Routes for details: | |
83 | https://routes.readthedocs.io/en/latest/generating.html#generation |
|
87 | https://routes.readthedocs.io/en/latest/generating.html#generation | |
84 | """ |
|
88 | """ | |
85 | def __call__(self, *args, **kwargs): |
|
89 | def __call__(self, *args, **kwargs): | |
86 | return request.environ['routes.url'](*args, **kwargs) |
|
90 | return request.environ['routes.url'](*args, **kwargs) | |
87 |
|
91 | |||
88 | def current(self, *args, **kwargs): |
|
92 | def current(self, *args, **kwargs): | |
89 | return request.environ['routes.url'].current(*args, **kwargs) |
|
93 | return request.environ['routes.url'].current(*args, **kwargs) | |
90 |
|
94 | |||
91 |
|
95 | |||
92 | url = UrlGenerator() |
|
96 | url = UrlGenerator() | |
93 |
|
97 | |||
94 |
|
98 | |||
95 | def canonical_url(*args, **kargs): |
|
99 | def canonical_url(*args, **kargs): | |
96 | '''Like url(x, qualified=True), but returns url that not only is qualified |
|
100 | '''Like url(x, qualified=True), but returns url that not only is qualified | |
97 | but also canonical, as configured in canonical_url''' |
|
101 | but also canonical, as configured in canonical_url''' | |
98 | try: |
|
102 | try: | |
99 | parts = kallithea.CONFIG.get('canonical_url', '').split('://', 1) |
|
103 | parts = kallithea.CONFIG.get('canonical_url', '').split('://', 1) | |
100 | kargs['host'] = parts[1] |
|
104 | kargs['host'] = parts[1] | |
101 | kargs['protocol'] = parts[0] |
|
105 | kargs['protocol'] = parts[0] | |
102 | except IndexError: |
|
106 | except IndexError: | |
103 | kargs['qualified'] = True |
|
107 | kargs['qualified'] = True | |
104 | return url(*args, **kargs) |
|
108 | return url(*args, **kargs) | |
105 |
|
109 | |||
106 |
|
110 | |||
107 | def canonical_hostname(): |
|
111 | def canonical_hostname(): | |
108 | '''Return canonical hostname of system''' |
|
112 | '''Return canonical hostname of system''' | |
109 | try: |
|
113 | try: | |
110 | parts = kallithea.CONFIG.get('canonical_url', '').split('://', 1) |
|
114 | parts = kallithea.CONFIG.get('canonical_url', '').split('://', 1) | |
111 | return parts[1].split('/', 1)[0] |
|
115 | return parts[1].split('/', 1)[0] | |
112 | except IndexError: |
|
116 | except IndexError: | |
113 | parts = url('home', qualified=True).split('://', 1) |
|
117 | parts = url('home', qualified=True).split('://', 1) | |
114 | return parts[1].split('/', 1)[0] |
|
118 | return parts[1].split('/', 1)[0] | |
115 |
|
119 | |||
116 |
|
120 | |||
117 | # |
|
121 | # | |
118 | # Custom Webhelpers2 stuff |
|
122 | # Custom Webhelpers2 stuff | |
119 | # |
|
123 | # | |
120 |
|
124 | |||
121 | def html_escape(s): |
|
125 | def html_escape(s): | |
122 | """Return string with all html escaped. |
|
126 | """Return string with all html escaped. | |
123 | This is also safe for javascript in html but not necessarily correct. |
|
127 | This is also safe for javascript in html but not necessarily correct. | |
124 | """ |
|
128 | """ | |
125 | return (s |
|
129 | return (s | |
126 | .replace('&', '&') |
|
130 | .replace('&', '&') | |
127 | .replace(">", ">") |
|
131 | .replace(">", ">") | |
128 | .replace("<", "<") |
|
132 | .replace("<", "<") | |
129 | .replace('"', """) |
|
133 | .replace('"', """) | |
130 | .replace("'", "'") # Note: this is HTML5 not HTML4 and might not work in mails |
|
134 | .replace("'", "'") # Note: this is HTML5 not HTML4 and might not work in mails | |
131 | ) |
|
135 | ) | |
132 |
|
136 | |||
133 |
|
137 | |||
134 | def reset(name, value, id=NotGiven, **attrs): |
|
138 | def reset(name, value, id=NotGiven, **attrs): | |
135 | """Create a reset button, similar to webhelpers2.html.tags.submit .""" |
|
139 | """Create a reset button, similar to webhelpers2.html.tags.submit .""" | |
136 | return _input("reset", name, value, id, attrs) |
|
140 | return _input("reset", name, value, id, attrs) | |
137 |
|
141 | |||
138 |
|
142 | |||
139 | def select(name, selected_values, options, id=NotGiven, **attrs): |
|
143 | def select(name, selected_values, options, id=NotGiven, **attrs): | |
140 | """Convenient wrapper of webhelpers2 to let it accept options as a tuple list""" |
|
144 | """Convenient wrapper of webhelpers2 to let it accept options as a tuple list""" | |
141 | if isinstance(options, list): |
|
145 | if isinstance(options, list): | |
142 | option_list = options |
|
146 | option_list = options | |
143 | # Handle old value,label lists ... where value also can be value,label lists |
|
147 | # Handle old value,label lists ... where value also can be value,label lists | |
144 | options = Options() |
|
148 | options = Options() | |
145 | for x in option_list: |
|
149 | for x in option_list: | |
146 | if isinstance(x, tuple) and len(x) == 2: |
|
150 | if isinstance(x, tuple) and len(x) == 2: | |
147 | value, label = x |
|
151 | value, label = x | |
148 | elif isinstance(x, str): |
|
152 | elif isinstance(x, str): | |
149 | value = label = x |
|
153 | value = label = x | |
150 | else: |
|
154 | else: | |
151 | log.error('invalid select option %r', x) |
|
155 | log.error('invalid select option %r', x) | |
152 | raise |
|
156 | raise | |
153 | if isinstance(value, list): |
|
157 | if isinstance(value, list): | |
154 | og = options.add_optgroup(label) |
|
158 | og = options.add_optgroup(label) | |
155 | for x in value: |
|
159 | for x in value: | |
156 | if isinstance(x, tuple) and len(x) == 2: |
|
160 | if isinstance(x, tuple) and len(x) == 2: | |
157 | group_value, group_label = x |
|
161 | group_value, group_label = x | |
158 | elif isinstance(x, str): |
|
162 | elif isinstance(x, str): | |
159 | group_value = group_label = x |
|
163 | group_value = group_label = x | |
160 | else: |
|
164 | else: | |
161 | log.error('invalid select option %r', x) |
|
165 | log.error('invalid select option %r', x) | |
162 | raise |
|
166 | raise | |
163 | og.add_option(group_label, group_value) |
|
167 | og.add_option(group_label, group_value) | |
164 | else: |
|
168 | else: | |
165 | options.add_option(label, value) |
|
169 | options.add_option(label, value) | |
166 | return webhelpers2_select(name, selected_values, options, id=id, **attrs) |
|
170 | return webhelpers2_select(name, selected_values, options, id=id, **attrs) | |
167 |
|
171 | |||
168 |
|
172 | |||
169 | session_csrf_secret_name = "_session_csrf_secret_token" |
|
173 | session_csrf_secret_name = "_session_csrf_secret_token" | |
170 |
|
174 | |||
171 | def session_csrf_secret_token(): |
|
175 | def session_csrf_secret_token(): | |
172 | """Return (and create) the current session's CSRF protection token.""" |
|
176 | """Return (and create) the current session's CSRF protection token.""" | |
173 | if not session_csrf_secret_name in session: |
|
177 | if not session_csrf_secret_name in session: | |
174 | session[session_csrf_secret_name] = str(random.getrandbits(128)) |
|
178 | session[session_csrf_secret_name] = str(random.getrandbits(128)) | |
175 | session.save() |
|
179 | session.save() | |
176 | return session[session_csrf_secret_name] |
|
180 | return session[session_csrf_secret_name] | |
177 |
|
181 | |||
178 | def form(url, method="post", **attrs): |
|
182 | def form(url, method="post", **attrs): | |
179 | """Like webhelpers.html.tags.form , but automatically adding |
|
183 | """Like webhelpers.html.tags.form , but automatically adding | |
180 | session_csrf_secret_token for POST. The secret is thus never leaked in GET |
|
184 | session_csrf_secret_token for POST. The secret is thus never leaked in GET | |
181 | URLs. |
|
185 | URLs. | |
182 | """ |
|
186 | """ | |
183 | form = insecure_form(url, method, **attrs) |
|
187 | form = insecure_form(url, method, **attrs) | |
184 | if method.lower() == 'get': |
|
188 | if method.lower() == 'get': | |
185 | return form |
|
189 | return form | |
186 | return form + HTML.div(hidden(session_csrf_secret_name, session_csrf_secret_token()), style="display: none;") |
|
190 | return form + HTML.div(hidden(session_csrf_secret_name, session_csrf_secret_token()), style="display: none;") | |
187 |
|
191 | |||
188 |
|
192 | |||
189 | # |
|
193 | # | |
190 | # Flash messages, stored in cookie |
|
194 | # Flash messages, stored in cookie | |
191 | # |
|
195 | # | |
192 |
|
196 | |||
193 | class _Message(object): |
|
197 | class _Message(object): | |
194 | """A message returned by ``pop_flash_messages()``. |
|
198 | """A message returned by ``pop_flash_messages()``. | |
195 |
|
199 | |||
196 | Converting the message to a string returns the message text. Instances |
|
200 | Converting the message to a string returns the message text. Instances | |
197 | also have the following attributes: |
|
201 | also have the following attributes: | |
198 |
|
202 | |||
199 | * ``category``: the category specified when the message was created. |
|
203 | * ``category``: the category specified when the message was created. | |
200 | * ``message``: the html-safe message text. |
|
204 | * ``message``: the html-safe message text. | |
201 | """ |
|
205 | """ | |
202 |
|
206 | |||
203 | def __init__(self, category, message): |
|
207 | def __init__(self, category, message): | |
204 | self.category = category |
|
208 | self.category = category | |
205 | self.message = message |
|
209 | self.message = message | |
206 |
|
210 | |||
207 |
|
211 | |||
208 | def _session_flash_messages(append=None, clear=False): |
|
212 | def _session_flash_messages(append=None, clear=False): | |
209 | """Manage a message queue in tg.session: return the current message queue |
|
213 | """Manage a message queue in tg.session: return the current message queue | |
210 | after appending the given message, and possibly clearing the queue.""" |
|
214 | after appending the given message, and possibly clearing the queue.""" | |
211 | key = 'flash' |
|
215 | key = 'flash' | |
212 | if key in session: |
|
216 | if key in session: | |
213 | flash_messages = session[key] |
|
217 | flash_messages = session[key] | |
214 | else: |
|
218 | else: | |
215 | if append is None: # common fast path - also used for clearing empty queue |
|
219 | if append is None: # common fast path - also used for clearing empty queue | |
216 | return [] # don't bother saving |
|
220 | return [] # don't bother saving | |
217 | flash_messages = [] |
|
221 | flash_messages = [] | |
218 | session[key] = flash_messages |
|
222 | session[key] = flash_messages | |
219 | if append is not None and append not in flash_messages: |
|
223 | if append is not None and append not in flash_messages: | |
220 | flash_messages.append(append) |
|
224 | flash_messages.append(append) | |
221 | if clear: |
|
225 | if clear: | |
222 | session.pop(key, None) |
|
226 | session.pop(key, None) | |
223 | session.save() |
|
227 | session.save() | |
224 | return flash_messages |
|
228 | return flash_messages | |
225 |
|
229 | |||
226 |
|
230 | |||
227 | def flash(message, category, logf=None): |
|
231 | def flash(message, category, logf=None): | |
228 | """ |
|
232 | """ | |
229 | Show a message to the user _and_ log it through the specified function |
|
233 | Show a message to the user _and_ log it through the specified function | |
230 |
|
234 | |||
231 | category: notice (default), warning, error, success |
|
235 | category: notice (default), warning, error, success | |
232 | logf: a custom log function - such as log.debug |
|
236 | logf: a custom log function - such as log.debug | |
233 |
|
237 | |||
234 | logf defaults to log.info, unless category equals 'success', in which |
|
238 | logf defaults to log.info, unless category equals 'success', in which | |
235 | case logf defaults to log.debug. |
|
239 | case logf defaults to log.debug. | |
236 | """ |
|
240 | """ | |
237 | assert category in ('error', 'success', 'warning'), category |
|
241 | assert category in ('error', 'success', 'warning'), category | |
238 | if hasattr(message, '__html__'): |
|
242 | if hasattr(message, '__html__'): | |
239 | # render to HTML for storing in cookie |
|
243 | # render to HTML for storing in cookie | |
240 | safe_message = str(message) |
|
244 | safe_message = str(message) | |
241 | else: |
|
245 | else: | |
242 | # Apply str - the message might be an exception with __str__ |
|
246 | # Apply str - the message might be an exception with __str__ | |
243 | # Escape, so we can trust the result without further escaping, without any risk of injection |
|
247 | # Escape, so we can trust the result without further escaping, without any risk of injection | |
244 | safe_message = html_escape(str(message)) |
|
248 | safe_message = html_escape(str(message)) | |
245 | if logf is None: |
|
249 | if logf is None: | |
246 | logf = log.info |
|
250 | logf = log.info | |
247 | if category == 'success': |
|
251 | if category == 'success': | |
248 | logf = log.debug |
|
252 | logf = log.debug | |
249 |
|
253 | |||
250 | logf('Flash %s: %s', category, safe_message) |
|
254 | logf('Flash %s: %s', category, safe_message) | |
251 |
|
255 | |||
252 | _session_flash_messages(append=(category, safe_message)) |
|
256 | _session_flash_messages(append=(category, safe_message)) | |
253 |
|
257 | |||
254 |
|
258 | |||
255 | def pop_flash_messages(): |
|
259 | def pop_flash_messages(): | |
256 | """Return all accumulated messages and delete them from the session. |
|
260 | """Return all accumulated messages and delete them from the session. | |
257 |
|
261 | |||
258 | The return value is a list of ``Message`` objects. |
|
262 | The return value is a list of ``Message`` objects. | |
259 | """ |
|
263 | """ | |
260 | return [_Message(category, message) for category, message in _session_flash_messages(clear=True)] |
|
264 | return [_Message(category, message) for category, message in _session_flash_messages(clear=True)] | |
261 |
|
265 | |||
262 |
|
266 | |||
263 | # |
|
267 | # | |
264 | # Generic-ish formatting and markup |
|
268 | # Generic-ish formatting and markup | |
265 | # |
|
269 | # | |
266 |
|
270 | |||
267 | def js(value): |
|
271 | def js(value): | |
268 | """Convert Python value to the corresponding JavaScript representation. |
|
272 | """Convert Python value to the corresponding JavaScript representation. | |
269 |
|
273 | |||
270 | This is necessary to safely insert arbitrary values into HTML <script> |
|
274 | This is necessary to safely insert arbitrary values into HTML <script> | |
271 | sections e.g. using Mako template expression substitution. |
|
275 | sections e.g. using Mako template expression substitution. | |
272 |
|
276 | |||
273 | Note: Rather than using this function, it's preferable to avoid the |
|
277 | Note: Rather than using this function, it's preferable to avoid the | |
274 | insertion of values into HTML <script> sections altogether. Instead, |
|
278 | insertion of values into HTML <script> sections altogether. Instead, | |
275 | data should (to the extent possible) be passed to JavaScript using |
|
279 | data should (to the extent possible) be passed to JavaScript using | |
276 | data attributes or AJAX calls, eliminating the need for JS specific |
|
280 | data attributes or AJAX calls, eliminating the need for JS specific | |
277 | escaping. |
|
281 | escaping. | |
278 |
|
282 | |||
279 | Note: This is not safe for use in attributes (e.g. onclick), because |
|
283 | Note: This is not safe for use in attributes (e.g. onclick), because | |
280 | quotes are not escaped. |
|
284 | quotes are not escaped. | |
281 |
|
285 | |||
282 | Because the rules for parsing <script> varies between XHTML (where |
|
286 | Because the rules for parsing <script> varies between XHTML (where | |
283 | normal rules apply for any special characters) and HTML (where |
|
287 | normal rules apply for any special characters) and HTML (where | |
284 | entities are not interpreted, but the literal string "</script>" |
|
288 | entities are not interpreted, but the literal string "</script>" | |
285 | is forbidden), the function ensures that the result never contains |
|
289 | is forbidden), the function ensures that the result never contains | |
286 | '&', '<' and '>', thus making it safe in both those contexts (but |
|
290 | '&', '<' and '>', thus making it safe in both those contexts (but | |
287 | not in attributes). |
|
291 | not in attributes). | |
288 | """ |
|
292 | """ | |
289 | return literal( |
|
293 | return literal( | |
290 | ('(' + json.dumps(value) + ')') |
|
294 | ('(' + json.dumps(value) + ')') | |
291 | # In JSON, the following can only appear in string literals. |
|
295 | # In JSON, the following can only appear in string literals. | |
292 | .replace('&', r'\x26') |
|
296 | .replace('&', r'\x26') | |
293 | .replace('<', r'\x3c') |
|
297 | .replace('<', r'\x3c') | |
294 | .replace('>', r'\x3e') |
|
298 | .replace('>', r'\x3e') | |
295 | ) |
|
299 | ) | |
296 |
|
300 | |||
297 |
|
301 | |||
298 | def jshtml(val): |
|
302 | def jshtml(val): | |
299 | """HTML escapes a string value, then converts the resulting string |
|
303 | """HTML escapes a string value, then converts the resulting string | |
300 | to its corresponding JavaScript representation (see `js`). |
|
304 | to its corresponding JavaScript representation (see `js`). | |
301 |
|
305 | |||
302 | This is used when a plain-text string (possibly containing special |
|
306 | This is used when a plain-text string (possibly containing special | |
303 | HTML characters) will be used by a script in an HTML context (e.g. |
|
307 | HTML characters) will be used by a script in an HTML context (e.g. | |
304 | element.innerHTML or jQuery's 'html' method). |
|
308 | element.innerHTML or jQuery's 'html' method). | |
305 |
|
309 | |||
306 | If in doubt, err on the side of using `jshtml` over `js`, since it's |
|
310 | If in doubt, err on the side of using `jshtml` over `js`, since it's | |
307 | better to escape too much than too little. |
|
311 | better to escape too much than too little. | |
308 | """ |
|
312 | """ | |
309 | return js(escape(val)) |
|
313 | return js(escape(val)) | |
310 |
|
314 | |||
311 |
|
315 | |||
312 | url_re = re.compile(r'''\bhttps?://(?:[\da-zA-Z0-9@:.-]+)''' |
|
316 | url_re = re.compile(r'''\bhttps?://(?:[\da-zA-Z0-9@:.-]+)''' | |
313 | r'''(?:[/a-zA-Z0-9_=@#~&+%.,:;?!*()-]*[/a-zA-Z0-9_=@#~])?''') |
|
317 | r'''(?:[/a-zA-Z0-9_=@#~&+%.,:;?!*()-]*[/a-zA-Z0-9_=@#~])?''') | |
314 |
|
318 | |||
315 |
|
319 | |||
316 | # Must match regexp in kallithea/public/js/base.js MentionsAutoComplete() |
|
320 | # Must match regexp in kallithea/public/js/base.js MentionsAutoComplete() | |
317 | # Check char before @ - it must not look like we are in an email addresses. |
|
321 | # Check char before @ - it must not look like we are in an email addresses. | |
318 | # Matching is greedy so we don't have to look beyond the end. |
|
322 | # Matching is greedy so we don't have to look beyond the end. | |
319 | MENTIONS_REGEX = re.compile(r'(?:^|(?<=[^a-zA-Z0-9]))@([a-zA-Z0-9][-_.a-zA-Z0-9]*[a-zA-Z0-9])') |
|
323 | MENTIONS_REGEX = re.compile(r'(?:^|(?<=[^a-zA-Z0-9]))@([a-zA-Z0-9][-_.a-zA-Z0-9]*[a-zA-Z0-9])') | |
320 |
|
324 | |||
321 |
|
325 | |||
322 | def extract_mentioned_usernames(text): |
|
326 | def extract_mentioned_usernames(text): | |
323 | r""" |
|
327 | r""" | |
324 | Returns list of (possible) usernames @mentioned in given text. |
|
328 | Returns list of (possible) usernames @mentioned in given text. | |
325 |
|
329 | |||
326 | >>> extract_mentioned_usernames('@1-2.a_X,@1234 not@not @ddd@not @n @ee @ff @gg, @gg;@hh @n\n@zz,') |
|
330 | >>> extract_mentioned_usernames('@1-2.a_X,@1234 not@not @ddd@not @n @ee @ff @gg, @gg;@hh @n\n@zz,') | |
327 | ['1-2.a_X', '1234', 'ddd', 'ee', 'ff', 'gg', 'gg', 'hh', 'zz'] |
|
331 | ['1-2.a_X', '1234', 'ddd', 'ee', 'ff', 'gg', 'gg', 'hh', 'zz'] | |
328 | """ |
|
332 | """ | |
329 | return MENTIONS_REGEX.findall(text) |
|
333 | return MENTIONS_REGEX.findall(text) | |
330 |
|
334 | |||
331 |
|
335 | |||
332 | _URLIFY_RE = re.compile(r''' |
|
336 | _URLIFY_RE = re.compile(r''' | |
333 | # URL markup |
|
337 | # URL markup | |
334 | (?P<url>%s) | |
|
338 | (?P<url>%s) | | |
335 | # @mention markup |
|
339 | # @mention markup | |
336 | (?P<mention>%s) | |
|
340 | (?P<mention>%s) | | |
337 | # Changeset hash markup |
|
341 | # Changeset hash markup | |
338 | (?<!\w|[-_]) |
|
342 | (?<!\w|[-_]) | |
339 | (?P<hash>[0-9a-f]{12,40}) |
|
343 | (?P<hash>[0-9a-f]{12,40}) | |
340 | (?!\w|[-_]) | |
|
344 | (?!\w|[-_]) | | |
341 | # Markup of *bold text* |
|
345 | # Markup of *bold text* | |
342 | (?: |
|
346 | (?: | |
343 | (?:^|(?<=\s)) |
|
347 | (?:^|(?<=\s)) | |
344 | (?P<bold> [*] (?!\s) [^*\n]* (?<!\s) [*] ) |
|
348 | (?P<bold> [*] (?!\s) [^*\n]* (?<!\s) [*] ) | |
345 | (?![*\w]) |
|
349 | (?![*\w]) | |
346 | ) | |
|
350 | ) | | |
347 | # "Stylize" markup |
|
351 | # "Stylize" markup | |
348 | \[see\ \=>\ *(?P<seen>[a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\] | |
|
352 | \[see\ \=>\ *(?P<seen>[a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\] | | |
349 | \[license\ \=>\ *(?P<license>[a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\] | |
|
353 | \[license\ \=>\ *(?P<license>[a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\] | | |
350 | \[(?P<tagtype>requires|recommends|conflicts|base)\ \=>\ *(?P<tagvalue>[a-zA-Z0-9\-\/]*)\] | |
|
354 | \[(?P<tagtype>requires|recommends|conflicts|base)\ \=>\ *(?P<tagvalue>[a-zA-Z0-9\-\/]*)\] | | |
351 | \[(?:lang|language)\ \=>\ *(?P<lang>[a-zA-Z\-\/\#\+]*)\] | |
|
355 | \[(?:lang|language)\ \=>\ *(?P<lang>[a-zA-Z\-\/\#\+]*)\] | | |
352 | \[(?P<tag>[a-z]+)\] |
|
356 | \[(?P<tag>[a-z]+)\] | |
353 | ''' % (url_re.pattern, MENTIONS_REGEX.pattern), |
|
357 | ''' % (url_re.pattern, MENTIONS_REGEX.pattern), | |
354 | re.VERBOSE | re.MULTILINE | re.IGNORECASE) |
|
358 | re.VERBOSE | re.MULTILINE | re.IGNORECASE) | |
355 |
|
359 | |||
356 |
|
360 | |||
357 | def urlify_text(s, repo_name=None, link_=None, truncate=None, stylize=False, truncatef=truncate): |
|
361 | def urlify_text(s, repo_name=None, link_=None, truncate=None, stylize=False, truncatef=truncate): | |
358 | """ |
|
362 | """ | |
359 | Parses given text message and make literal html with markup. |
|
363 | Parses given text message and make literal html with markup. | |
360 | The text will be truncated to the specified length. |
|
364 | The text will be truncated to the specified length. | |
361 | Hashes are turned into changeset links to specified repository. |
|
365 | Hashes are turned into changeset links to specified repository. | |
362 | URLs links to what they say. |
|
366 | URLs links to what they say. | |
363 | Issues are linked to given issue-server. |
|
367 | Issues are linked to given issue-server. | |
364 | If link_ is provided, all text not already linking somewhere will link there. |
|
368 | If link_ is provided, all text not already linking somewhere will link there. | |
365 | >>> urlify_text("Urlify http://example.com/ and 'https://example.com' *and* <b>markup/b>") |
|
369 | >>> urlify_text("Urlify http://example.com/ and 'https://example.com' *and* <b>markup/b>") | |
366 | literal('Urlify <a href="http://example.com/">http://example.com/</a> and '<a href="https://example.com&apos">https://example.com&apos</a>; <b>*and*</b> <b>markup/b>') |
|
370 | literal('Urlify <a href="http://example.com/">http://example.com/</a> and '<a href="https://example.com&apos">https://example.com&apos</a>; <b>*and*</b> <b>markup/b>') | |
367 | """ |
|
371 | """ | |
368 |
|
372 | |||
369 | def _replace(match_obj): |
|
373 | def _replace(match_obj): | |
370 | match_url = match_obj.group('url') |
|
374 | match_url = match_obj.group('url') | |
371 | if match_url is not None: |
|
375 | if match_url is not None: | |
372 | return '<a href="%(url)s">%(url)s</a>' % {'url': match_url} |
|
376 | return '<a href="%(url)s">%(url)s</a>' % {'url': match_url} | |
373 | mention = match_obj.group('mention') |
|
377 | mention = match_obj.group('mention') | |
374 | if mention is not None: |
|
378 | if mention is not None: | |
375 | return '<b>%s</b>' % mention |
|
379 | return '<b>%s</b>' % mention | |
376 | hash_ = match_obj.group('hash') |
|
380 | hash_ = match_obj.group('hash') | |
377 | if hash_ is not None and repo_name is not None: |
|
381 | if hash_ is not None and repo_name is not None: | |
378 | return '<a class="changeset_hash" href="%(url)s">%(hash)s</a>' % { |
|
382 | return '<a class="changeset_hash" href="%(url)s">%(hash)s</a>' % { | |
379 | 'url': url('changeset_home', repo_name=repo_name, revision=hash_), |
|
383 | 'url': url('changeset_home', repo_name=repo_name, revision=hash_), | |
380 | 'hash': hash_, |
|
384 | 'hash': hash_, | |
381 | } |
|
385 | } | |
382 | bold = match_obj.group('bold') |
|
386 | bold = match_obj.group('bold') | |
383 | if bold is not None: |
|
387 | if bold is not None: | |
384 | return '<b>*%s*</b>' % _urlify(bold[1:-1]) |
|
388 | return '<b>*%s*</b>' % _urlify(bold[1:-1]) | |
385 | if stylize: |
|
389 | if stylize: | |
386 | seen = match_obj.group('seen') |
|
390 | seen = match_obj.group('seen') | |
387 | if seen: |
|
391 | if seen: | |
388 | return '<div class="label label-meta" data-tag="see">see => %s</div>' % seen |
|
392 | return '<div class="label label-meta" data-tag="see">see => %s</div>' % seen | |
389 | license = match_obj.group('license') |
|
393 | license = match_obj.group('license') | |
390 | if license: |
|
394 | if license: | |
391 | return '<div class="label label-meta" data-tag="license"><a href="http://www.opensource.org/licenses/%s">%s</a></div>' % (license, license) |
|
395 | return '<div class="label label-meta" data-tag="license"><a href="http://www.opensource.org/licenses/%s">%s</a></div>' % (license, license) | |
392 | tagtype = match_obj.group('tagtype') |
|
396 | tagtype = match_obj.group('tagtype') | |
393 | if tagtype: |
|
397 | if tagtype: | |
394 | tagvalue = match_obj.group('tagvalue') |
|
398 | tagvalue = match_obj.group('tagvalue') | |
395 | return '<div class="label label-meta" data-tag="%s">%s => <a href="/%s">%s</a></div>' % (tagtype, tagtype, tagvalue, tagvalue) |
|
399 | return '<div class="label label-meta" data-tag="%s">%s => <a href="/%s">%s</a></div>' % (tagtype, tagtype, tagvalue, tagvalue) | |
396 | lang = match_obj.group('lang') |
|
400 | lang = match_obj.group('lang') | |
397 | if lang: |
|
401 | if lang: | |
398 | return '<div class="label label-meta" data-tag="lang">%s</div>' % lang |
|
402 | return '<div class="label label-meta" data-tag="lang">%s</div>' % lang | |
399 | tag = match_obj.group('tag') |
|
403 | tag = match_obj.group('tag') | |
400 | if tag: |
|
404 | if tag: | |
401 | return '<div class="label label-meta" data-tag="%s">%s</div>' % (tag, tag) |
|
405 | return '<div class="label label-meta" data-tag="%s">%s</div>' % (tag, tag) | |
402 | return match_obj.group(0) |
|
406 | return match_obj.group(0) | |
403 |
|
407 | |||
404 | def _urlify(s): |
|
408 | def _urlify(s): | |
405 | """ |
|
409 | """ | |
406 | Extract urls from text and make html links out of them |
|
410 | Extract urls from text and make html links out of them | |
407 | """ |
|
411 | """ | |
408 | return _URLIFY_RE.sub(_replace, s) |
|
412 | return _URLIFY_RE.sub(_replace, s) | |
409 |
|
413 | |||
410 | if truncate is None: |
|
414 | if truncate is None: | |
411 | s = s.rstrip() |
|
415 | s = s.rstrip() | |
412 | else: |
|
416 | else: | |
413 | s = truncatef(s, truncate, whole_word=True) |
|
417 | s = truncatef(s, truncate, whole_word=True) | |
414 | s = html_escape(s) |
|
418 | s = html_escape(s) | |
415 | s = _urlify(s) |
|
419 | s = _urlify(s) | |
416 | if repo_name is not None: |
|
420 | if repo_name is not None: | |
417 | s = _urlify_issues(s, repo_name) |
|
421 | s = _urlify_issues(s, repo_name) | |
418 | if link_ is not None: |
|
422 | if link_ is not None: | |
419 | # make href around everything that isn't a href already |
|
423 | # make href around everything that isn't a href already | |
420 | s = _linkify_others(s, link_) |
|
424 | s = _linkify_others(s, link_) | |
421 | s = s.replace('\r\n', '<br/>').replace('\n', '<br/>') |
|
425 | s = s.replace('\r\n', '<br/>').replace('\n', '<br/>') | |
422 | # Turn HTML5 into more valid HTML4 as required by some mail readers. |
|
426 | # Turn HTML5 into more valid HTML4 as required by some mail readers. | |
423 | # (This is not done in one step in html_escape, because character codes like |
|
427 | # (This is not done in one step in html_escape, because character codes like | |
424 | # { risk to be seen as an issue reference due to the presence of '#'.) |
|
428 | # { risk to be seen as an issue reference due to the presence of '#'.) | |
425 | s = s.replace("'", "'") |
|
429 | s = s.replace("'", "'") | |
426 | return literal(s) |
|
430 | return literal(s) | |
427 |
|
431 | |||
428 |
|
432 | |||
429 | def _linkify_others(t, l): |
|
433 | def _linkify_others(t, l): | |
430 | """Add a default link to html with links. |
|
434 | """Add a default link to html with links. | |
431 | HTML doesn't allow nesting of links, so the outer link must be broken up |
|
435 | HTML doesn't allow nesting of links, so the outer link must be broken up | |
432 | in pieces and give space for other links. |
|
436 | in pieces and give space for other links. | |
433 | """ |
|
437 | """ | |
434 | urls = re.compile(r'(\<a.*?\<\/a\>)',) |
|
438 | urls = re.compile(r'(\<a.*?\<\/a\>)',) | |
435 | links = [] |
|
439 | links = [] | |
436 | for e in urls.split(t): |
|
440 | for e in urls.split(t): | |
437 | if e.strip() and not urls.match(e): |
|
441 | if e.strip() and not urls.match(e): | |
438 | links.append('<a class="message-link" href="%s">%s</a>' % (l, e)) |
|
442 | links.append('<a class="message-link" href="%s">%s</a>' % (l, e)) | |
439 | else: |
|
443 | else: | |
440 | links.append(e) |
|
444 | links.append(e) | |
441 | return ''.join(links) |
|
445 | return ''.join(links) | |
442 |
|
446 | |||
443 |
|
447 | |||
444 | # Global variable that will hold the actual _urlify_issues function body. |
|
448 | # Global variable that will hold the actual _urlify_issues function body. | |
445 | # Will be set on first use when the global configuration has been read. |
|
449 | # Will be set on first use when the global configuration has been read. | |
446 | _urlify_issues_f = None |
|
450 | _urlify_issues_f = None | |
447 |
|
451 | |||
448 |
|
452 | |||
449 | def _urlify_issues(newtext, repo_name): |
|
453 | def _urlify_issues(newtext, repo_name): | |
450 | """Urlify issue references according to .ini configuration""" |
|
454 | """Urlify issue references according to .ini configuration""" | |
451 | global _urlify_issues_f |
|
455 | global _urlify_issues_f | |
452 | if _urlify_issues_f is None: |
|
456 | if _urlify_issues_f is None: | |
453 | assert kallithea.CONFIG['sqlalchemy.url'] # make sure config has been loaded |
|
457 | assert kallithea.CONFIG['sqlalchemy.url'] # make sure config has been loaded | |
454 |
|
458 | |||
455 | # Build chain of urlify functions, starting with not doing any transformation |
|
459 | # Build chain of urlify functions, starting with not doing any transformation | |
456 | def tmp_urlify_issues_f(s): |
|
460 | def tmp_urlify_issues_f(s): | |
457 | return s |
|
461 | return s | |
458 |
|
462 | |||
459 | issue_pat_re = re.compile(r'issue_pat(.*)') |
|
463 | issue_pat_re = re.compile(r'issue_pat(.*)') | |
460 | for k in kallithea.CONFIG: |
|
464 | for k in kallithea.CONFIG: | |
461 | # Find all issue_pat* settings that also have corresponding server_link and prefix configuration |
|
465 | # Find all issue_pat* settings that also have corresponding server_link and prefix configuration | |
462 | m = issue_pat_re.match(k) |
|
466 | m = issue_pat_re.match(k) | |
463 | if m is None: |
|
467 | if m is None: | |
464 | continue |
|
468 | continue | |
465 | suffix = m.group(1) |
|
469 | suffix = m.group(1) | |
466 | issue_pat = kallithea.CONFIG.get(k) |
|
470 | issue_pat = kallithea.CONFIG.get(k) | |
467 | issue_server_link = kallithea.CONFIG.get('issue_server_link%s' % suffix) |
|
471 | issue_server_link = kallithea.CONFIG.get('issue_server_link%s' % suffix) | |
468 | issue_sub = kallithea.CONFIG.get('issue_sub%s' % suffix) |
|
472 | issue_sub = kallithea.CONFIG.get('issue_sub%s' % suffix) | |
469 | issue_prefix = kallithea.CONFIG.get('issue_prefix%s' % suffix) |
|
473 | issue_prefix = kallithea.CONFIG.get('issue_prefix%s' % suffix) | |
470 | if issue_prefix: |
|
474 | if issue_prefix: | |
471 | log.error('found unsupported issue_prefix%s = %r - use issue_sub%s instead', suffix, issue_prefix, suffix) |
|
475 | log.error('found unsupported issue_prefix%s = %r - use issue_sub%s instead', suffix, issue_prefix, suffix) | |
472 | if not issue_pat: |
|
476 | if not issue_pat: | |
473 | log.error('skipping incomplete issue pattern %r: it needs a regexp', k) |
|
477 | log.error('skipping incomplete issue pattern %r: it needs a regexp', k) | |
474 | continue |
|
478 | continue | |
475 | if not issue_server_link: |
|
479 | if not issue_server_link: | |
476 | log.error('skipping incomplete issue pattern %r: it needs issue_server_link%s', k, suffix) |
|
480 | log.error('skipping incomplete issue pattern %r: it needs issue_server_link%s', k, suffix) | |
477 | continue |
|
481 | continue | |
478 | if issue_sub is None: # issue_sub can be empty but should be present |
|
482 | if issue_sub is None: # issue_sub can be empty but should be present | |
479 | log.error('skipping incomplete issue pattern %r: it needs (a potentially empty) issue_sub%s', k, suffix) |
|
483 | log.error('skipping incomplete issue pattern %r: it needs (a potentially empty) issue_sub%s', k, suffix) | |
480 | continue |
|
484 | continue | |
481 |
|
485 | |||
482 | # Wrap tmp_urlify_issues_f with substitution of this pattern, while making sure all loop variables (and compiled regexpes) are bound |
|
486 | # Wrap tmp_urlify_issues_f with substitution of this pattern, while making sure all loop variables (and compiled regexpes) are bound | |
483 | try: |
|
487 | try: | |
484 | issue_re = re.compile(issue_pat) |
|
488 | issue_re = re.compile(issue_pat) | |
485 | except re.error as e: |
|
489 | except re.error as e: | |
486 | log.error('skipping invalid issue pattern %r: %r -> %r %r. Error: %s', k, issue_pat, issue_server_link, issue_sub, str(e)) |
|
490 | log.error('skipping invalid issue pattern %r: %r -> %r %r. Error: %s', k, issue_pat, issue_server_link, issue_sub, str(e)) | |
487 | continue |
|
491 | continue | |
488 |
|
492 | |||
489 | log.debug('issue pattern %r: %r -> %r %r', k, issue_pat, issue_server_link, issue_sub) |
|
493 | log.debug('issue pattern %r: %r -> %r %r', k, issue_pat, issue_server_link, issue_sub) | |
490 |
|
494 | |||
491 | def issues_replace(match_obj, |
|
495 | def issues_replace(match_obj, | |
492 | issue_server_link=issue_server_link, issue_sub=issue_sub): |
|
496 | issue_server_link=issue_server_link, issue_sub=issue_sub): | |
493 | try: |
|
497 | try: | |
494 | issue_url = match_obj.expand(issue_server_link) |
|
498 | issue_url = match_obj.expand(issue_server_link) | |
495 | except (IndexError, re.error) as e: |
|
499 | except (IndexError, re.error) as e: | |
496 | log.error('invalid issue_url setting %r -> %r %r. Error: %s', issue_pat, issue_server_link, issue_sub, str(e)) |
|
500 | log.error('invalid issue_url setting %r -> %r %r. Error: %s', issue_pat, issue_server_link, issue_sub, str(e)) | |
497 | issue_url = issue_server_link |
|
501 | issue_url = issue_server_link | |
498 | issue_url = issue_url.replace('{repo}', repo_name) |
|
502 | issue_url = issue_url.replace('{repo}', repo_name) | |
499 | issue_url = issue_url.replace('{repo_name}', repo_name.split(kallithea.URL_SEP)[-1]) |
|
503 | issue_url = issue_url.replace('{repo_name}', repo_name.split(kallithea.URL_SEP)[-1]) | |
500 | # if issue_sub is empty use the matched issue reference verbatim |
|
504 | # if issue_sub is empty use the matched issue reference verbatim | |
501 | if not issue_sub: |
|
505 | if not issue_sub: | |
502 | issue_text = match_obj.group() |
|
506 | issue_text = match_obj.group() | |
503 | else: |
|
507 | else: | |
504 | try: |
|
508 | try: | |
505 | issue_text = match_obj.expand(issue_sub) |
|
509 | issue_text = match_obj.expand(issue_sub) | |
506 | except (IndexError, re.error) as e: |
|
510 | except (IndexError, re.error) as e: | |
507 | log.error('invalid issue_sub setting %r -> %r %r. Error: %s', issue_pat, issue_server_link, issue_sub, str(e)) |
|
511 | log.error('invalid issue_sub setting %r -> %r %r. Error: %s', issue_pat, issue_server_link, issue_sub, str(e)) | |
508 | issue_text = match_obj.group() |
|
512 | issue_text = match_obj.group() | |
509 |
|
513 | |||
510 | return ( |
|
514 | return ( | |
511 | '<a class="issue-tracker-link" href="%(url)s">' |
|
515 | '<a class="issue-tracker-link" href="%(url)s">' | |
512 | '%(text)s' |
|
516 | '%(text)s' | |
513 | '</a>' |
|
517 | '</a>' | |
514 | ) % { |
|
518 | ) % { | |
515 | 'url': issue_url, |
|
519 | 'url': issue_url, | |
516 | 'text': issue_text, |
|
520 | 'text': issue_text, | |
517 | } |
|
521 | } | |
518 |
|
522 | |||
519 | def tmp_urlify_issues_f(s, issue_re=issue_re, issues_replace=issues_replace, chain_f=tmp_urlify_issues_f): |
|
523 | def tmp_urlify_issues_f(s, issue_re=issue_re, issues_replace=issues_replace, chain_f=tmp_urlify_issues_f): | |
520 | return issue_re.sub(issues_replace, chain_f(s)) |
|
524 | return issue_re.sub(issues_replace, chain_f(s)) | |
521 |
|
525 | |||
522 | # Set tmp function globally - atomically |
|
526 | # Set tmp function globally - atomically | |
523 | _urlify_issues_f = tmp_urlify_issues_f |
|
527 | _urlify_issues_f = tmp_urlify_issues_f | |
524 |
|
528 | |||
525 | return _urlify_issues_f(newtext) |
|
529 | return _urlify_issues_f(newtext) | |
526 |
|
530 | |||
527 |
|
531 | |||
528 | def render_w_mentions(source, repo_name=None): |
|
532 | def render_w_mentions(source, repo_name=None): | |
529 | """ |
|
533 | """ | |
530 | Render plain text with revision hashes and issue references urlified |
|
534 | Render plain text with revision hashes and issue references urlified | |
531 | and with @mention highlighting. |
|
535 | and with @mention highlighting. | |
532 | """ |
|
536 | """ | |
533 | s = urlify_text(source, repo_name=repo_name) |
|
537 | s = urlify_text(source, repo_name=repo_name) | |
534 | return literal('<div class="formatted-fixed">%s</div>' % s) |
|
538 | return literal('<div class="formatted-fixed">%s</div>' % s) | |
|
539 | ||||
|
540 | ||||
|
541 | # | |||
|
542 | # Simple filters | |||
|
543 | # | |||
|
544 | ||||
|
545 | def shorter(s, size=20, firstline=False, postfix='...'): | |||
|
546 | """Truncate s to size, including the postfix string if truncating. | |||
|
547 | If firstline, truncate at newline. | |||
|
548 | """ | |||
|
549 | if firstline: | |||
|
550 | s = s.split('\n', 1)[0].rstrip() | |||
|
551 | if len(s) > size: | |||
|
552 | return s[:size - len(postfix)] + postfix | |||
|
553 | return s | |||
|
554 | ||||
|
555 | ||||
|
556 | def age(prevdate, show_short_version=False, now=None): | |||
|
557 | """ | |||
|
558 | turns a datetime into an age string. | |||
|
559 | If show_short_version is True, then it will generate a not so accurate but shorter string, | |||
|
560 | example: 2days ago, instead of 2 days and 23 hours ago. | |||
|
561 | ||||
|
562 | :param prevdate: datetime object | |||
|
563 | :param show_short_version: if it should approximate the date and return a shorter string | |||
|
564 | :rtype: str | |||
|
565 | :returns: str words describing age | |||
|
566 | """ | |||
|
567 | now = now or datetime.datetime.now() | |||
|
568 | order = ['year', 'month', 'day', 'hour', 'minute', 'second'] | |||
|
569 | deltas = {} | |||
|
570 | future = False | |||
|
571 | ||||
|
572 | if prevdate > now: | |||
|
573 | now, prevdate = prevdate, now | |||
|
574 | future = True | |||
|
575 | if future: | |||
|
576 | prevdate = prevdate.replace(microsecond=0) | |||
|
577 | # Get date parts deltas | |||
|
578 | for part in order: | |||
|
579 | d = relativedelta.relativedelta(now, prevdate) | |||
|
580 | deltas[part] = getattr(d, part + 's') | |||
|
581 | ||||
|
582 | # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00, | |||
|
583 | # not 1 hour, -59 minutes and -59 seconds) | |||
|
584 | for num, length in [(5, 60), (4, 60), (3, 24)]: # seconds, minutes, hours | |||
|
585 | part = order[num] | |||
|
586 | carry_part = order[num - 1] | |||
|
587 | ||||
|
588 | if deltas[part] < 0: | |||
|
589 | deltas[part] += length | |||
|
590 | deltas[carry_part] -= 1 | |||
|
591 | ||||
|
592 | # Same thing for days except that the increment depends on the (variable) | |||
|
593 | # number of days in the month | |||
|
594 | month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] | |||
|
595 | if deltas['day'] < 0: | |||
|
596 | if prevdate.month == 2 and (prevdate.year % 4 == 0 and | |||
|
597 | (prevdate.year % 100 != 0 or prevdate.year % 400 == 0) | |||
|
598 | ): | |||
|
599 | deltas['day'] += 29 | |||
|
600 | else: | |||
|
601 | deltas['day'] += month_lengths[prevdate.month - 1] | |||
|
602 | ||||
|
603 | deltas['month'] -= 1 | |||
|
604 | ||||
|
605 | if deltas['month'] < 0: | |||
|
606 | deltas['month'] += 12 | |||
|
607 | deltas['year'] -= 1 | |||
|
608 | ||||
|
609 | # In short version, we want nicer handling of ages of more than a year | |||
|
610 | if show_short_version: | |||
|
611 | if deltas['year'] == 1: | |||
|
612 | # ages between 1 and 2 years: show as months | |||
|
613 | deltas['month'] += 12 | |||
|
614 | deltas['year'] = 0 | |||
|
615 | if deltas['year'] >= 2: | |||
|
616 | # ages 2+ years: round | |||
|
617 | if deltas['month'] > 6: | |||
|
618 | deltas['year'] += 1 | |||
|
619 | deltas['month'] = 0 | |||
|
620 | ||||
|
621 | # Format the result | |||
|
622 | fmt_funcs = { | |||
|
623 | 'year': lambda d: ungettext('%d year', '%d years', d) % d, | |||
|
624 | 'month': lambda d: ungettext('%d month', '%d months', d) % d, | |||
|
625 | 'day': lambda d: ungettext('%d day', '%d days', d) % d, | |||
|
626 | 'hour': lambda d: ungettext('%d hour', '%d hours', d) % d, | |||
|
627 | 'minute': lambda d: ungettext('%d minute', '%d minutes', d) % d, | |||
|
628 | 'second': lambda d: ungettext('%d second', '%d seconds', d) % d, | |||
|
629 | } | |||
|
630 | ||||
|
631 | for i, part in enumerate(order): | |||
|
632 | value = deltas[part] | |||
|
633 | if value == 0: | |||
|
634 | continue | |||
|
635 | ||||
|
636 | if i < 5: | |||
|
637 | sub_part = order[i + 1] | |||
|
638 | sub_value = deltas[sub_part] | |||
|
639 | else: | |||
|
640 | sub_value = 0 | |||
|
641 | ||||
|
642 | if sub_value == 0 or show_short_version: | |||
|
643 | if future: | |||
|
644 | return _('in %s') % fmt_funcs[part](value) | |||
|
645 | else: | |||
|
646 | return _('%s ago') % fmt_funcs[part](value) | |||
|
647 | if future: | |||
|
648 | return _('in %s and %s') % (fmt_funcs[part](value), | |||
|
649 | fmt_funcs[sub_part](sub_value)) | |||
|
650 | else: | |||
|
651 | return _('%s and %s ago') % (fmt_funcs[part](value), | |||
|
652 | fmt_funcs[sub_part](sub_value)) | |||
|
653 | ||||
|
654 | return _('just now') | |||
|
655 | ||||
|
656 | ||||
|
657 | def fmt_date(date): | |||
|
658 | if date: | |||
|
659 | return date.strftime("%Y-%m-%d %H:%M:%S") | |||
|
660 | return "" | |||
|
661 | ||||
|
662 | ||||
|
663 | def capitalize(x): | |||
|
664 | return x.capitalize() | |||
|
665 | ||||
|
666 | ||||
|
667 | def short_id(x): | |||
|
668 | return x[:12] |
@@ -1,270 +1,269 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.model.comment |
|
15 | kallithea.model.comment | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | comments model for Kallithea |
|
18 | comments model for Kallithea | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Nov 11, 2011 |
|
22 | :created_on: Nov 11, 2011 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 | import logging |
|
28 | import logging | |
29 | from collections import defaultdict |
|
29 | from collections import defaultdict | |
30 |
|
30 | |||
31 | from kallithea.lib import webutils |
|
31 | from kallithea.lib import webutils | |
32 | from kallithea.lib.utils import extract_mentioned_users |
|
32 | from kallithea.lib.utils import extract_mentioned_users | |
33 | from kallithea.lib.utils2 import shorter |
|
|||
34 | from kallithea.model import db, meta, notification |
|
33 | from kallithea.model import db, meta, notification | |
35 |
|
34 | |||
36 |
|
35 | |||
37 | log = logging.getLogger(__name__) |
|
36 | log = logging.getLogger(__name__) | |
38 |
|
37 | |||
39 |
|
38 | |||
40 | def _list_changeset_commenters(revision): |
|
39 | def _list_changeset_commenters(revision): | |
41 | return (meta.Session().query(db.User) |
|
40 | return (meta.Session().query(db.User) | |
42 | .join(db.ChangesetComment.author) |
|
41 | .join(db.ChangesetComment.author) | |
43 | .filter(db.ChangesetComment.revision == revision) |
|
42 | .filter(db.ChangesetComment.revision == revision) | |
44 | .all()) |
|
43 | .all()) | |
45 |
|
44 | |||
46 | def _list_pull_request_commenters(pull_request): |
|
45 | def _list_pull_request_commenters(pull_request): | |
47 | return (meta.Session().query(db.User) |
|
46 | return (meta.Session().query(db.User) | |
48 | .join(db.ChangesetComment.author) |
|
47 | .join(db.ChangesetComment.author) | |
49 | .filter(db.ChangesetComment.pull_request_id == pull_request.pull_request_id) |
|
48 | .filter(db.ChangesetComment.pull_request_id == pull_request.pull_request_id) | |
50 | .all()) |
|
49 | .all()) | |
51 |
|
50 | |||
52 |
|
51 | |||
53 | class ChangesetCommentsModel(object): |
|
52 | class ChangesetCommentsModel(object): | |
54 |
|
53 | |||
55 | def create_notification(self, repo, comment, author, comment_text, |
|
54 | def create_notification(self, repo, comment, author, comment_text, | |
56 | line_no=None, revision=None, pull_request=None, |
|
55 | line_no=None, revision=None, pull_request=None, | |
57 | status_change=None, closing_pr=False): |
|
56 | status_change=None, closing_pr=False): | |
58 |
|
57 | |||
59 | # changeset |
|
58 | # changeset | |
60 | if revision: |
|
59 | if revision: | |
61 | notification_type = notification.NotificationModel.TYPE_CHANGESET_COMMENT |
|
60 | notification_type = notification.NotificationModel.TYPE_CHANGESET_COMMENT | |
62 | cs = repo.scm_instance.get_changeset(revision) |
|
61 | cs = repo.scm_instance.get_changeset(revision) | |
63 |
|
62 | |||
64 | threading = ['%s-rev-%s@%s' % (repo.repo_name, revision, webutils.canonical_hostname())] |
|
63 | threading = ['%s-rev-%s@%s' % (repo.repo_name, revision, webutils.canonical_hostname())] | |
65 | if line_no: # TODO: url to file _and_ line number |
|
64 | if line_no: # TODO: url to file _and_ line number | |
66 | threading.append('%s-rev-%s-line-%s@%s' % (repo.repo_name, revision, line_no, |
|
65 | threading.append('%s-rev-%s-line-%s@%s' % (repo.repo_name, revision, line_no, | |
67 | webutils.canonical_hostname())) |
|
66 | webutils.canonical_hostname())) | |
68 | comment_url = webutils.canonical_url('changeset_home', |
|
67 | comment_url = webutils.canonical_url('changeset_home', | |
69 | repo_name=repo.repo_name, |
|
68 | repo_name=repo.repo_name, | |
70 | revision=revision, |
|
69 | revision=revision, | |
71 | anchor='comment-%s' % comment.comment_id) |
|
70 | anchor='comment-%s' % comment.comment_id) | |
72 | # get the current participants of this changeset |
|
71 | # get the current participants of this changeset | |
73 | recipients = _list_changeset_commenters(revision) |
|
72 | recipients = _list_changeset_commenters(revision) | |
74 | # add changeset author if it's known locally |
|
73 | # add changeset author if it's known locally | |
75 | cs_author = db.User.get_from_cs_author(cs.author) |
|
74 | cs_author = db.User.get_from_cs_author(cs.author) | |
76 | if not cs_author: |
|
75 | if not cs_author: | |
77 | # use repo owner if we cannot extract the author correctly |
|
76 | # use repo owner if we cannot extract the author correctly | |
78 | # FIXME: just use committer name even if not a user |
|
77 | # FIXME: just use committer name even if not a user | |
79 | cs_author = repo.owner |
|
78 | cs_author = repo.owner | |
80 | recipients.append(cs_author) |
|
79 | recipients.append(cs_author) | |
81 |
|
80 | |||
82 | email_kwargs = { |
|
81 | email_kwargs = { | |
83 | 'status_change': status_change, |
|
82 | 'status_change': status_change, | |
84 | 'cs_comment_user': author.full_name_and_username, |
|
83 | 'cs_comment_user': author.full_name_and_username, | |
85 | 'cs_target_repo': webutils.canonical_url('summary_home', repo_name=repo.repo_name), |
|
84 | 'cs_target_repo': webutils.canonical_url('summary_home', repo_name=repo.repo_name), | |
86 | 'cs_comment_url': comment_url, |
|
85 | 'cs_comment_url': comment_url, | |
87 | 'cs_url': webutils.canonical_url('changeset_home', repo_name=repo.repo_name, revision=revision), |
|
86 | 'cs_url': webutils.canonical_url('changeset_home', repo_name=repo.repo_name, revision=revision), | |
88 | 'message': cs.message, |
|
87 | 'message': cs.message, | |
89 | 'message_short': shorter(cs.message, 50, firstline=True), |
|
88 | 'message_short': webutils.shorter(cs.message, 50, firstline=True), | |
90 | 'cs_author': cs_author, |
|
89 | 'cs_author': cs_author, | |
91 | 'cs_author_username': cs_author.username, |
|
90 | 'cs_author_username': cs_author.username, | |
92 | 'repo_name': repo.repo_name, |
|
91 | 'repo_name': repo.repo_name, | |
93 | 'short_id': revision[:12], |
|
92 | 'short_id': revision[:12], | |
94 | 'branch': cs.branch, |
|
93 | 'branch': cs.branch, | |
95 | 'threading': threading, |
|
94 | 'threading': threading, | |
96 | } |
|
95 | } | |
97 | # pull request |
|
96 | # pull request | |
98 | elif pull_request: |
|
97 | elif pull_request: | |
99 | notification_type = notification.NotificationModel.TYPE_PULL_REQUEST_COMMENT |
|
98 | notification_type = notification.NotificationModel.TYPE_PULL_REQUEST_COMMENT | |
100 | _org_ref_type, org_ref_name, _org_rev = comment.pull_request.org_ref.split(':') |
|
99 | _org_ref_type, org_ref_name, _org_rev = comment.pull_request.org_ref.split(':') | |
101 | _other_ref_type, other_ref_name, _other_rev = comment.pull_request.other_ref.split(':') |
|
100 | _other_ref_type, other_ref_name, _other_rev = comment.pull_request.other_ref.split(':') | |
102 | threading = ['%s-pr-%s@%s' % (pull_request.other_repo.repo_name, |
|
101 | threading = ['%s-pr-%s@%s' % (pull_request.other_repo.repo_name, | |
103 | pull_request.pull_request_id, |
|
102 | pull_request.pull_request_id, | |
104 | webutils.canonical_hostname())] |
|
103 | webutils.canonical_hostname())] | |
105 | if line_no: # TODO: url to file _and_ line number |
|
104 | if line_no: # TODO: url to file _and_ line number | |
106 | threading.append('%s-pr-%s-line-%s@%s' % (pull_request.other_repo.repo_name, |
|
105 | threading.append('%s-pr-%s-line-%s@%s' % (pull_request.other_repo.repo_name, | |
107 | pull_request.pull_request_id, line_no, |
|
106 | pull_request.pull_request_id, line_no, | |
108 | webutils.canonical_hostname())) |
|
107 | webutils.canonical_hostname())) | |
109 | comment_url = pull_request.url(canonical=True, |
|
108 | comment_url = pull_request.url(canonical=True, | |
110 | anchor='comment-%s' % comment.comment_id) |
|
109 | anchor='comment-%s' % comment.comment_id) | |
111 | # get the current participants of this pull request |
|
110 | # get the current participants of this pull request | |
112 | recipients = _list_pull_request_commenters(pull_request) |
|
111 | recipients = _list_pull_request_commenters(pull_request) | |
113 | recipients.append(pull_request.owner) |
|
112 | recipients.append(pull_request.owner) | |
114 | recipients += pull_request.get_reviewer_users() |
|
113 | recipients += pull_request.get_reviewer_users() | |
115 |
|
114 | |||
116 | # set some variables for email notification |
|
115 | # set some variables for email notification | |
117 | email_kwargs = { |
|
116 | email_kwargs = { | |
118 | 'pr_title': pull_request.title, |
|
117 | 'pr_title': pull_request.title, | |
119 | 'pr_title_short': shorter(pull_request.title, 50), |
|
118 | 'pr_title_short': webutils.shorter(pull_request.title, 50), | |
120 | 'pr_nice_id': pull_request.nice_id(), |
|
119 | 'pr_nice_id': pull_request.nice_id(), | |
121 | 'status_change': status_change, |
|
120 | 'status_change': status_change, | |
122 | 'closing_pr': closing_pr, |
|
121 | 'closing_pr': closing_pr, | |
123 | 'pr_comment_url': comment_url, |
|
122 | 'pr_comment_url': comment_url, | |
124 | 'pr_url': pull_request.url(canonical=True), |
|
123 | 'pr_url': pull_request.url(canonical=True), | |
125 | 'pr_comment_user': author.full_name_and_username, |
|
124 | 'pr_comment_user': author.full_name_and_username, | |
126 | 'pr_target_repo': webutils.canonical_url('summary_home', |
|
125 | 'pr_target_repo': webutils.canonical_url('summary_home', | |
127 | repo_name=pull_request.other_repo.repo_name), |
|
126 | repo_name=pull_request.other_repo.repo_name), | |
128 | 'pr_target_branch': other_ref_name, |
|
127 | 'pr_target_branch': other_ref_name, | |
129 | 'pr_source_repo': webutils.canonical_url('summary_home', |
|
128 | 'pr_source_repo': webutils.canonical_url('summary_home', | |
130 | repo_name=pull_request.org_repo.repo_name), |
|
129 | repo_name=pull_request.org_repo.repo_name), | |
131 | 'pr_source_branch': org_ref_name, |
|
130 | 'pr_source_branch': org_ref_name, | |
132 | 'pr_owner': pull_request.owner, |
|
131 | 'pr_owner': pull_request.owner, | |
133 | 'pr_owner_username': pull_request.owner.username, |
|
132 | 'pr_owner_username': pull_request.owner.username, | |
134 | 'repo_name': pull_request.other_repo.repo_name, |
|
133 | 'repo_name': pull_request.other_repo.repo_name, | |
135 | 'threading': threading, |
|
134 | 'threading': threading, | |
136 | } |
|
135 | } | |
137 |
|
136 | |||
138 | email_kwargs['is_mention'] = False |
|
137 | email_kwargs['is_mention'] = False | |
139 | # create notification objects, and emails |
|
138 | # create notification objects, and emails | |
140 | notification.NotificationModel().create( |
|
139 | notification.NotificationModel().create( | |
141 | created_by=author, body=comment_text, |
|
140 | created_by=author, body=comment_text, | |
142 | recipients=recipients, type_=notification_type, |
|
141 | recipients=recipients, type_=notification_type, | |
143 | email_kwargs=email_kwargs, |
|
142 | email_kwargs=email_kwargs, | |
144 | ) |
|
143 | ) | |
145 |
|
144 | |||
146 | mention_recipients = extract_mentioned_users(comment_text).difference(recipients) |
|
145 | mention_recipients = extract_mentioned_users(comment_text).difference(recipients) | |
147 | if mention_recipients: |
|
146 | if mention_recipients: | |
148 | email_kwargs['is_mention'] = True |
|
147 | email_kwargs['is_mention'] = True | |
149 | notification.NotificationModel().create( |
|
148 | notification.NotificationModel().create( | |
150 | created_by=author, body=comment_text, |
|
149 | created_by=author, body=comment_text, | |
151 | recipients=mention_recipients, |
|
150 | recipients=mention_recipients, | |
152 | type_=notification_type, |
|
151 | type_=notification_type, | |
153 | email_kwargs=email_kwargs |
|
152 | email_kwargs=email_kwargs | |
154 | ) |
|
153 | ) | |
155 |
|
154 | |||
156 |
|
155 | |||
157 | def create(self, text, repo, author, revision=None, pull_request=None, |
|
156 | def create(self, text, repo, author, revision=None, pull_request=None, | |
158 | f_path=None, line_no=None, status_change=None, closing_pr=False, |
|
157 | f_path=None, line_no=None, status_change=None, closing_pr=False, | |
159 | send_email=True): |
|
158 | send_email=True): | |
160 | """ |
|
159 | """ | |
161 | Creates a new comment for either a changeset or a pull request. |
|
160 | Creates a new comment for either a changeset or a pull request. | |
162 | status_change and closing_pr is only for the optional email. |
|
161 | status_change and closing_pr is only for the optional email. | |
163 |
|
162 | |||
164 | Returns the created comment. |
|
163 | Returns the created comment. | |
165 | """ |
|
164 | """ | |
166 | if not status_change and not text: |
|
165 | if not status_change and not text: | |
167 | log.warning('Missing text for comment, skipping...') |
|
166 | log.warning('Missing text for comment, skipping...') | |
168 | return None |
|
167 | return None | |
169 |
|
168 | |||
170 | repo = db.Repository.guess_instance(repo) |
|
169 | repo = db.Repository.guess_instance(repo) | |
171 | author = db.User.guess_instance(author) |
|
170 | author = db.User.guess_instance(author) | |
172 | comment = db.ChangesetComment() |
|
171 | comment = db.ChangesetComment() | |
173 | comment.repo = repo |
|
172 | comment.repo = repo | |
174 | comment.author = author |
|
173 | comment.author = author | |
175 | comment.text = text |
|
174 | comment.text = text | |
176 | comment.f_path = f_path |
|
175 | comment.f_path = f_path | |
177 | comment.line_no = line_no |
|
176 | comment.line_no = line_no | |
178 |
|
177 | |||
179 | if revision is not None: |
|
178 | if revision is not None: | |
180 | comment.revision = revision |
|
179 | comment.revision = revision | |
181 | elif pull_request is not None: |
|
180 | elif pull_request is not None: | |
182 | pull_request = db.PullRequest.guess_instance(pull_request) |
|
181 | pull_request = db.PullRequest.guess_instance(pull_request) | |
183 | comment.pull_request = pull_request |
|
182 | comment.pull_request = pull_request | |
184 | else: |
|
183 | else: | |
185 | raise Exception('Please specify revision or pull_request_id') |
|
184 | raise Exception('Please specify revision or pull_request_id') | |
186 |
|
185 | |||
187 | meta.Session().add(comment) |
|
186 | meta.Session().add(comment) | |
188 | meta.Session().flush() |
|
187 | meta.Session().flush() | |
189 |
|
188 | |||
190 | if send_email: |
|
189 | if send_email: | |
191 | self.create_notification( |
|
190 | self.create_notification( | |
192 | repo, comment, author, text, line_no, revision, pull_request, |
|
191 | repo, comment, author, text, line_no, revision, pull_request, | |
193 | status_change, closing_pr |
|
192 | status_change, closing_pr | |
194 | ) |
|
193 | ) | |
195 |
|
194 | |||
196 | return comment |
|
195 | return comment | |
197 |
|
196 | |||
198 | def delete(self, comment): |
|
197 | def delete(self, comment): | |
199 | comment = db.ChangesetComment.guess_instance(comment) |
|
198 | comment = db.ChangesetComment.guess_instance(comment) | |
200 | meta.Session().delete(comment) |
|
199 | meta.Session().delete(comment) | |
201 |
|
200 | |||
202 | return comment |
|
201 | return comment | |
203 |
|
202 | |||
204 | def get_comments(self, repo_id, revision=None, pull_request=None): |
|
203 | def get_comments(self, repo_id, revision=None, pull_request=None): | |
205 | """ |
|
204 | """ | |
206 | Gets general comments for either revision or pull_request. |
|
205 | Gets general comments for either revision or pull_request. | |
207 |
|
206 | |||
208 | Returns a list, ordered by creation date. |
|
207 | Returns a list, ordered by creation date. | |
209 | """ |
|
208 | """ | |
210 | return self._get_comments(repo_id, revision=revision, pull_request=pull_request, |
|
209 | return self._get_comments(repo_id, revision=revision, pull_request=pull_request, | |
211 | inline=False) |
|
210 | inline=False) | |
212 |
|
211 | |||
213 | def get_inline_comments(self, repo_id, revision=None, pull_request=None, |
|
212 | def get_inline_comments(self, repo_id, revision=None, pull_request=None, | |
214 | f_path=None, line_no=None): |
|
213 | f_path=None, line_no=None): | |
215 | """ |
|
214 | """ | |
216 | Gets inline comments for either revision or pull_request. |
|
215 | Gets inline comments for either revision or pull_request. | |
217 |
|
216 | |||
218 | Returns a list of tuples with file path and list of comments per line number. |
|
217 | Returns a list of tuples with file path and list of comments per line number. | |
219 | """ |
|
218 | """ | |
220 | comments = self._get_comments(repo_id, revision=revision, pull_request=pull_request, |
|
219 | comments = self._get_comments(repo_id, revision=revision, pull_request=pull_request, | |
221 | inline=True, f_path=f_path, line_no=line_no) |
|
220 | inline=True, f_path=f_path, line_no=line_no) | |
222 |
|
221 | |||
223 | paths = defaultdict(lambda: defaultdict(list)) |
|
222 | paths = defaultdict(lambda: defaultdict(list)) | |
224 | for co in comments: |
|
223 | for co in comments: | |
225 | paths[co.f_path][co.line_no].append(co) |
|
224 | paths[co.f_path][co.line_no].append(co) | |
226 | return sorted(paths.items()) |
|
225 | return sorted(paths.items()) | |
227 |
|
226 | |||
228 | def _get_comments(self, repo_id, revision=None, pull_request=None, |
|
227 | def _get_comments(self, repo_id, revision=None, pull_request=None, | |
229 | inline=False, f_path=None, line_no=None): |
|
228 | inline=False, f_path=None, line_no=None): | |
230 | """ |
|
229 | """ | |
231 | Gets comments for either revision or pull_request_id, either inline or general. |
|
230 | Gets comments for either revision or pull_request_id, either inline or general. | |
232 | If a file path and optionally line number are given, return only the matching inline comments. |
|
231 | If a file path and optionally line number are given, return only the matching inline comments. | |
233 | """ |
|
232 | """ | |
234 | if f_path is None and line_no is not None: |
|
233 | if f_path is None and line_no is not None: | |
235 | raise Exception("line_no only makes sense if f_path is given.") |
|
234 | raise Exception("line_no only makes sense if f_path is given.") | |
236 |
|
235 | |||
237 | if inline is None and f_path is not None: |
|
236 | if inline is None and f_path is not None: | |
238 | raise Exception("f_path only makes sense for inline comments.") |
|
237 | raise Exception("f_path only makes sense for inline comments.") | |
239 |
|
238 | |||
240 | q = meta.Session().query(db.ChangesetComment) |
|
239 | q = meta.Session().query(db.ChangesetComment) | |
241 |
|
240 | |||
242 | if inline: |
|
241 | if inline: | |
243 | if f_path is not None: |
|
242 | if f_path is not None: | |
244 | # inline comments for a given file... |
|
243 | # inline comments for a given file... | |
245 | q = q.filter(db.ChangesetComment.f_path == f_path) |
|
244 | q = q.filter(db.ChangesetComment.f_path == f_path) | |
246 | if line_no is None: |
|
245 | if line_no is None: | |
247 | # ... on any line |
|
246 | # ... on any line | |
248 | q = q.filter(db.ChangesetComment.line_no != None) |
|
247 | q = q.filter(db.ChangesetComment.line_no != None) | |
249 | else: |
|
248 | else: | |
250 | # ... on specific line |
|
249 | # ... on specific line | |
251 | q = q.filter(db.ChangesetComment.line_no == line_no) |
|
250 | q = q.filter(db.ChangesetComment.line_no == line_no) | |
252 | else: |
|
251 | else: | |
253 | # all inline comments |
|
252 | # all inline comments | |
254 | q = q.filter(db.ChangesetComment.line_no != None) \ |
|
253 | q = q.filter(db.ChangesetComment.line_no != None) \ | |
255 | .filter(db.ChangesetComment.f_path != None) |
|
254 | .filter(db.ChangesetComment.f_path != None) | |
256 | else: |
|
255 | else: | |
257 | # all general comments |
|
256 | # all general comments | |
258 | q = q.filter(db.ChangesetComment.line_no == None) \ |
|
257 | q = q.filter(db.ChangesetComment.line_no == None) \ | |
259 | .filter(db.ChangesetComment.f_path == None) |
|
258 | .filter(db.ChangesetComment.f_path == None) | |
260 |
|
259 | |||
261 | if revision is not None: |
|
260 | if revision is not None: | |
262 | q = q.filter(db.ChangesetComment.revision == revision) \ |
|
261 | q = q.filter(db.ChangesetComment.revision == revision) \ | |
263 | .filter(db.ChangesetComment.repo_id == repo_id) |
|
262 | .filter(db.ChangesetComment.repo_id == repo_id) | |
264 | elif pull_request is not None: |
|
263 | elif pull_request is not None: | |
265 | pull_request = db.PullRequest.guess_instance(pull_request) |
|
264 | pull_request = db.PullRequest.guess_instance(pull_request) | |
266 | q = q.filter(db.ChangesetComment.pull_request == pull_request) |
|
265 | q = q.filter(db.ChangesetComment.pull_request == pull_request) | |
267 | else: |
|
266 | else: | |
268 | raise Exception('Please specify either revision or pull_request') |
|
267 | raise Exception('Please specify either revision or pull_request') | |
269 |
|
268 | |||
270 | return q.order_by(db.ChangesetComment.created_on).all() |
|
269 | return q.order_by(db.ChangesetComment.created_on).all() |
@@ -1,233 +1,232 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.model.notification |
|
15 | kallithea.model.notification | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Model for notifications |
|
18 | Model for notifications | |
19 |
|
19 | |||
20 |
|
20 | |||
21 | This file was forked by the Kallithea project in July 2014. |
|
21 | This file was forked by the Kallithea project in July 2014. | |
22 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | Original author and date, and relevant copyright and licensing information is below: | |
23 | :created_on: Nov 20, 2011 |
|
23 | :created_on: Nov 20, 2011 | |
24 | :author: marcink |
|
24 | :author: marcink | |
25 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
26 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | :license: GPLv3, see LICENSE.md for more details. | |
27 | """ |
|
27 | """ | |
28 |
|
28 | |||
29 | import datetime |
|
29 | import datetime | |
30 | import logging |
|
30 | import logging | |
31 |
|
31 | |||
32 | from tg import app_globals |
|
32 | from tg import app_globals | |
33 | from tg import tmpl_context as c |
|
33 | from tg import tmpl_context as c | |
34 | from tg.i18n import ugettext as _ |
|
34 | from tg.i18n import ugettext as _ | |
35 |
|
35 | |||
36 | from kallithea.lib import webutils |
|
36 | from kallithea.lib import webutils | |
37 | from kallithea.lib.utils2 import fmt_date |
|
|||
38 | from kallithea.model import async_tasks, db |
|
37 | from kallithea.model import async_tasks, db | |
39 |
|
38 | |||
40 |
|
39 | |||
41 | log = logging.getLogger(__name__) |
|
40 | log = logging.getLogger(__name__) | |
42 |
|
41 | |||
43 |
|
42 | |||
44 | class NotificationModel(object): |
|
43 | class NotificationModel(object): | |
45 |
|
44 | |||
46 | TYPE_CHANGESET_COMMENT = 'cs_comment' |
|
45 | TYPE_CHANGESET_COMMENT = 'cs_comment' | |
47 | TYPE_MESSAGE = 'message' |
|
46 | TYPE_MESSAGE = 'message' | |
48 | TYPE_MENTION = 'mention' # not used |
|
47 | TYPE_MENTION = 'mention' # not used | |
49 | TYPE_REGISTRATION = 'registration' |
|
48 | TYPE_REGISTRATION = 'registration' | |
50 | TYPE_PULL_REQUEST = 'pull_request' |
|
49 | TYPE_PULL_REQUEST = 'pull_request' | |
51 | TYPE_PULL_REQUEST_COMMENT = 'pull_request_comment' |
|
50 | TYPE_PULL_REQUEST_COMMENT = 'pull_request_comment' | |
52 |
|
51 | |||
53 | def create(self, created_by, body, recipients=None, |
|
52 | def create(self, created_by, body, recipients=None, | |
54 | type_=TYPE_MESSAGE, with_email=True, |
|
53 | type_=TYPE_MESSAGE, with_email=True, | |
55 | email_kwargs=None, repo_name=None): |
|
54 | email_kwargs=None, repo_name=None): | |
56 | """ |
|
55 | """ | |
57 |
|
56 | |||
58 | Creates notification of given type |
|
57 | Creates notification of given type | |
59 |
|
58 | |||
60 | :param created_by: int, str or User instance. User who created this |
|
59 | :param created_by: int, str or User instance. User who created this | |
61 | notification |
|
60 | notification | |
62 | :param body: |
|
61 | :param body: | |
63 | :param recipients: list of int, str or User objects, when None |
|
62 | :param recipients: list of int, str or User objects, when None | |
64 | is given send to all admins |
|
63 | is given send to all admins | |
65 | :param type_: type of notification |
|
64 | :param type_: type of notification | |
66 | :param with_email: send email with this notification |
|
65 | :param with_email: send email with this notification | |
67 | :param email_kwargs: additional dict to pass as args to email template |
|
66 | :param email_kwargs: additional dict to pass as args to email template | |
68 | """ |
|
67 | """ | |
69 | email_kwargs = email_kwargs or {} |
|
68 | email_kwargs = email_kwargs or {} | |
70 | if recipients and not getattr(recipients, '__iter__', False): |
|
69 | if recipients and not getattr(recipients, '__iter__', False): | |
71 | raise Exception('recipients must be a list or iterable') |
|
70 | raise Exception('recipients must be a list or iterable') | |
72 |
|
71 | |||
73 | created_by_obj = db.User.guess_instance(created_by) |
|
72 | created_by_obj = db.User.guess_instance(created_by) | |
74 |
|
73 | |||
75 | recipients_objs = set() |
|
74 | recipients_objs = set() | |
76 | if recipients: |
|
75 | if recipients: | |
77 | for u in recipients: |
|
76 | for u in recipients: | |
78 | obj = db.User.guess_instance(u) |
|
77 | obj = db.User.guess_instance(u) | |
79 | if obj is not None: |
|
78 | if obj is not None: | |
80 | recipients_objs.add(obj) |
|
79 | recipients_objs.add(obj) | |
81 | else: |
|
80 | else: | |
82 | # TODO: inform user that requested operation couldn't be completed |
|
81 | # TODO: inform user that requested operation couldn't be completed | |
83 | log.error('cannot email unknown user %r', u) |
|
82 | log.error('cannot email unknown user %r', u) | |
84 | log.debug('sending notifications %s to %s', |
|
83 | log.debug('sending notifications %s to %s', | |
85 | type_, recipients_objs |
|
84 | type_, recipients_objs | |
86 | ) |
|
85 | ) | |
87 | elif recipients is None: |
|
86 | elif recipients is None: | |
88 | # empty recipients means to all admins |
|
87 | # empty recipients means to all admins | |
89 | recipients_objs = db.User.query().filter(db.User.admin == True).all() |
|
88 | recipients_objs = db.User.query().filter(db.User.admin == True).all() | |
90 | log.debug('sending notifications %s to admins: %s', |
|
89 | log.debug('sending notifications %s to admins: %s', | |
91 | type_, recipients_objs |
|
90 | type_, recipients_objs | |
92 | ) |
|
91 | ) | |
93 | #else: silently skip notification mails? |
|
92 | #else: silently skip notification mails? | |
94 |
|
93 | |||
95 | if not with_email: |
|
94 | if not with_email: | |
96 | return |
|
95 | return | |
97 |
|
96 | |||
98 | headers = {} |
|
97 | headers = {} | |
99 | headers['X-Kallithea-Notification-Type'] = type_ |
|
98 | headers['X-Kallithea-Notification-Type'] = type_ | |
100 | if 'threading' in email_kwargs: |
|
99 | if 'threading' in email_kwargs: | |
101 | headers['References'] = ' '.join('<%s>' % x for x in email_kwargs['threading']) |
|
100 | headers['References'] = ' '.join('<%s>' % x for x in email_kwargs['threading']) | |
102 |
|
101 | |||
103 | # this is passed into template |
|
102 | # this is passed into template | |
104 | created_on = fmt_date(datetime.datetime.now()) |
|
103 | created_on = webutils.fmt_date(datetime.datetime.now()) | |
105 | html_kwargs = { |
|
104 | html_kwargs = { | |
106 | 'body': None if body is None else webutils.render_w_mentions(body, repo_name), |
|
105 | 'body': None if body is None else webutils.render_w_mentions(body, repo_name), | |
107 | 'when': created_on, |
|
106 | 'when': created_on, | |
108 | 'user': created_by_obj.username, |
|
107 | 'user': created_by_obj.username, | |
109 | } |
|
108 | } | |
110 |
|
109 | |||
111 | txt_kwargs = { |
|
110 | txt_kwargs = { | |
112 | 'body': body, |
|
111 | 'body': body, | |
113 | 'when': created_on, |
|
112 | 'when': created_on, | |
114 | 'user': created_by_obj.username, |
|
113 | 'user': created_by_obj.username, | |
115 | } |
|
114 | } | |
116 |
|
115 | |||
117 | html_kwargs.update(email_kwargs) |
|
116 | html_kwargs.update(email_kwargs) | |
118 | txt_kwargs.update(email_kwargs) |
|
117 | txt_kwargs.update(email_kwargs) | |
119 | email_subject = EmailNotificationModel() \ |
|
118 | email_subject = EmailNotificationModel() \ | |
120 | .get_email_description(type_, **txt_kwargs) |
|
119 | .get_email_description(type_, **txt_kwargs) | |
121 | email_txt_body = EmailNotificationModel() \ |
|
120 | email_txt_body = EmailNotificationModel() \ | |
122 | .get_email_tmpl(type_, 'txt', **txt_kwargs) |
|
121 | .get_email_tmpl(type_, 'txt', **txt_kwargs) | |
123 | email_html_body = EmailNotificationModel() \ |
|
122 | email_html_body = EmailNotificationModel() \ | |
124 | .get_email_tmpl(type_, 'html', **html_kwargs) |
|
123 | .get_email_tmpl(type_, 'html', **html_kwargs) | |
125 |
|
124 | |||
126 | # don't send email to the person who caused the notification, except for |
|
125 | # don't send email to the person who caused the notification, except for | |
127 | # notifications about new pull requests where the author is explicitly |
|
126 | # notifications about new pull requests where the author is explicitly | |
128 | # added. |
|
127 | # added. | |
129 | rec_mails = set(obj.email for obj in recipients_objs) |
|
128 | rec_mails = set(obj.email for obj in recipients_objs) | |
130 | if type_ == NotificationModel.TYPE_PULL_REQUEST: |
|
129 | if type_ == NotificationModel.TYPE_PULL_REQUEST: | |
131 | rec_mails.add(created_by_obj.email) |
|
130 | rec_mails.add(created_by_obj.email) | |
132 | else: |
|
131 | else: | |
133 | rec_mails.discard(created_by_obj.email) |
|
132 | rec_mails.discard(created_by_obj.email) | |
134 |
|
133 | |||
135 | # send email with notification to participants |
|
134 | # send email with notification to participants | |
136 | for rec_mail in sorted(rec_mails): |
|
135 | for rec_mail in sorted(rec_mails): | |
137 | async_tasks.send_email([rec_mail], email_subject, email_txt_body, |
|
136 | async_tasks.send_email([rec_mail], email_subject, email_txt_body, | |
138 | email_html_body, headers, |
|
137 | email_html_body, headers, | |
139 | from_name=created_by_obj.full_name_or_username) |
|
138 | from_name=created_by_obj.full_name_or_username) | |
140 |
|
139 | |||
141 |
|
140 | |||
142 | class EmailNotificationModel(object): |
|
141 | class EmailNotificationModel(object): | |
143 |
|
142 | |||
144 | TYPE_CHANGESET_COMMENT = NotificationModel.TYPE_CHANGESET_COMMENT |
|
143 | TYPE_CHANGESET_COMMENT = NotificationModel.TYPE_CHANGESET_COMMENT | |
145 | TYPE_MESSAGE = NotificationModel.TYPE_MESSAGE # only used for testing |
|
144 | TYPE_MESSAGE = NotificationModel.TYPE_MESSAGE # only used for testing | |
146 | # NotificationModel.TYPE_MENTION is not used |
|
145 | # NotificationModel.TYPE_MENTION is not used | |
147 | TYPE_PASSWORD_RESET = 'password_link' |
|
146 | TYPE_PASSWORD_RESET = 'password_link' | |
148 | TYPE_REGISTRATION = NotificationModel.TYPE_REGISTRATION |
|
147 | TYPE_REGISTRATION = NotificationModel.TYPE_REGISTRATION | |
149 | TYPE_PULL_REQUEST = NotificationModel.TYPE_PULL_REQUEST |
|
148 | TYPE_PULL_REQUEST = NotificationModel.TYPE_PULL_REQUEST | |
150 | TYPE_PULL_REQUEST_COMMENT = NotificationModel.TYPE_PULL_REQUEST_COMMENT |
|
149 | TYPE_PULL_REQUEST_COMMENT = NotificationModel.TYPE_PULL_REQUEST_COMMENT | |
151 | TYPE_DEFAULT = 'default' |
|
150 | TYPE_DEFAULT = 'default' | |
152 |
|
151 | |||
153 | def __init__(self): |
|
152 | def __init__(self): | |
154 | super(EmailNotificationModel, self).__init__() |
|
153 | super(EmailNotificationModel, self).__init__() | |
155 | self._tmpl_lookup = app_globals.mako_lookup |
|
154 | self._tmpl_lookup = app_globals.mako_lookup | |
156 | self.email_types = { |
|
155 | self.email_types = { | |
157 | self.TYPE_CHANGESET_COMMENT: 'changeset_comment', |
|
156 | self.TYPE_CHANGESET_COMMENT: 'changeset_comment', | |
158 | self.TYPE_PASSWORD_RESET: 'password_reset', |
|
157 | self.TYPE_PASSWORD_RESET: 'password_reset', | |
159 | self.TYPE_REGISTRATION: 'registration', |
|
158 | self.TYPE_REGISTRATION: 'registration', | |
160 | self.TYPE_DEFAULT: 'default', |
|
159 | self.TYPE_DEFAULT: 'default', | |
161 | self.TYPE_PULL_REQUEST: 'pull_request', |
|
160 | self.TYPE_PULL_REQUEST: 'pull_request', | |
162 | self.TYPE_PULL_REQUEST_COMMENT: 'pull_request_comment', |
|
161 | self.TYPE_PULL_REQUEST_COMMENT: 'pull_request_comment', | |
163 | } |
|
162 | } | |
164 | self._subj_map = { |
|
163 | self._subj_map = { | |
165 | self.TYPE_CHANGESET_COMMENT: _('[Comment] %(repo_name)s changeset %(short_id)s "%(message_short)s" on %(branch)s by %(cs_author_username)s'), |
|
164 | self.TYPE_CHANGESET_COMMENT: _('[Comment] %(repo_name)s changeset %(short_id)s "%(message_short)s" on %(branch)s by %(cs_author_username)s'), | |
166 | self.TYPE_MESSAGE: 'Test Message', |
|
165 | self.TYPE_MESSAGE: 'Test Message', | |
167 | # self.TYPE_PASSWORD_RESET |
|
166 | # self.TYPE_PASSWORD_RESET | |
168 | self.TYPE_REGISTRATION: _('New user %(new_username)s registered'), |
|
167 | self.TYPE_REGISTRATION: _('New user %(new_username)s registered'), | |
169 | # self.TYPE_DEFAULT |
|
168 | # self.TYPE_DEFAULT | |
170 | self.TYPE_PULL_REQUEST: _('[Review] %(repo_name)s PR %(pr_nice_id)s "%(pr_title_short)s" from %(pr_source_branch)s by %(pr_owner_username)s'), |
|
169 | self.TYPE_PULL_REQUEST: _('[Review] %(repo_name)s PR %(pr_nice_id)s "%(pr_title_short)s" from %(pr_source_branch)s by %(pr_owner_username)s'), | |
171 | self.TYPE_PULL_REQUEST_COMMENT: _('[Comment] %(repo_name)s PR %(pr_nice_id)s "%(pr_title_short)s" from %(pr_source_branch)s by %(pr_owner_username)s'), |
|
170 | self.TYPE_PULL_REQUEST_COMMENT: _('[Comment] %(repo_name)s PR %(pr_nice_id)s "%(pr_title_short)s" from %(pr_source_branch)s by %(pr_owner_username)s'), | |
172 | } |
|
171 | } | |
173 |
|
172 | |||
174 | def get_email_description(self, type_, **kwargs): |
|
173 | def get_email_description(self, type_, **kwargs): | |
175 | """ |
|
174 | """ | |
176 | return subject for email based on given type |
|
175 | return subject for email based on given type | |
177 | """ |
|
176 | """ | |
178 | tmpl = self._subj_map[type_] |
|
177 | tmpl = self._subj_map[type_] | |
179 | try: |
|
178 | try: | |
180 | subj = tmpl % kwargs |
|
179 | subj = tmpl % kwargs | |
181 | except KeyError as e: |
|
180 | except KeyError as e: | |
182 | log.error('error generating email subject for %r from %s: %s', type_, ', '.join(self._subj_map), e) |
|
181 | log.error('error generating email subject for %r from %s: %s', type_, ', '.join(self._subj_map), e) | |
183 | raise |
|
182 | raise | |
184 | # gmail doesn't do proper threading but will ignore leading square |
|
183 | # gmail doesn't do proper threading but will ignore leading square | |
185 | # bracket content ... so that is where we put status info |
|
184 | # bracket content ... so that is where we put status info | |
186 | bracket_tags = [] |
|
185 | bracket_tags = [] | |
187 | status_change = kwargs.get('status_change') |
|
186 | status_change = kwargs.get('status_change') | |
188 | if status_change: |
|
187 | if status_change: | |
189 | bracket_tags.append(str(status_change)) # apply str to evaluate LazyString before .join |
|
188 | bracket_tags.append(str(status_change)) # apply str to evaluate LazyString before .join | |
190 | if kwargs.get('closing_pr'): |
|
189 | if kwargs.get('closing_pr'): | |
191 | bracket_tags.append(_('Closing')) |
|
190 | bracket_tags.append(_('Closing')) | |
192 | if bracket_tags: |
|
191 | if bracket_tags: | |
193 | if subj.startswith('['): |
|
192 | if subj.startswith('['): | |
194 | subj = '[' + ', '.join(bracket_tags) + ': ' + subj[1:] |
|
193 | subj = '[' + ', '.join(bracket_tags) + ': ' + subj[1:] | |
195 | else: |
|
194 | else: | |
196 | subj = '[' + ', '.join(bracket_tags) + '] ' + subj |
|
195 | subj = '[' + ', '.join(bracket_tags) + '] ' + subj | |
197 | return subj |
|
196 | return subj | |
198 |
|
197 | |||
199 | def get_email_tmpl(self, type_, content_type, **kwargs): |
|
198 | def get_email_tmpl(self, type_, content_type, **kwargs): | |
200 | """ |
|
199 | """ | |
201 | return generated template for email based on given type |
|
200 | return generated template for email based on given type | |
202 | """ |
|
201 | """ | |
203 | import kallithea.lib.helpers as h |
|
202 | import kallithea.lib.helpers as h | |
204 |
|
203 | |||
205 | base = 'email/' + self.email_types.get(type_, self.email_types[self.TYPE_DEFAULT]) + '.' + content_type |
|
204 | base = 'email/' + self.email_types.get(type_, self.email_types[self.TYPE_DEFAULT]) + '.' + content_type | |
206 | email_template = self._tmpl_lookup.get_template(base) |
|
205 | email_template = self._tmpl_lookup.get_template(base) | |
207 | # translator and helpers inject |
|
206 | # translator and helpers inject | |
208 | _kwargs = {'_': _, |
|
207 | _kwargs = {'_': _, | |
209 | 'h': h, |
|
208 | 'h': h, | |
210 | 'c': c} |
|
209 | 'c': c} | |
211 | _kwargs.update(kwargs) |
|
210 | _kwargs.update(kwargs) | |
212 | if content_type == 'html': |
|
211 | if content_type == 'html': | |
213 | _kwargs.update({ |
|
212 | _kwargs.update({ | |
214 | "color_text": "#202020", |
|
213 | "color_text": "#202020", | |
215 | "color_emph": "#395fa0", |
|
214 | "color_emph": "#395fa0", | |
216 | "color_link": "#395fa0", |
|
215 | "color_link": "#395fa0", | |
217 | "color_border": "#ddd", |
|
216 | "color_border": "#ddd", | |
218 | "color_background_grey": "#f9f9f9", |
|
217 | "color_background_grey": "#f9f9f9", | |
219 | "color_button": "#395fa0", |
|
218 | "color_button": "#395fa0", | |
220 | "monospace_style": "font-family:Lucida Console,Consolas,Monaco,Inconsolata,Liberation Mono,monospace", |
|
219 | "monospace_style": "font-family:Lucida Console,Consolas,Monaco,Inconsolata,Liberation Mono,monospace", | |
221 | "sans_style": "font-family:Helvetica,Arial,sans-serif", |
|
220 | "sans_style": "font-family:Helvetica,Arial,sans-serif", | |
222 | }) |
|
221 | }) | |
223 | _kwargs.update({ |
|
222 | _kwargs.update({ | |
224 | "default_style": "%(sans_style)s;font-weight:200;font-size:14px;line-height:17px;color:%(color_text)s" % _kwargs, |
|
223 | "default_style": "%(sans_style)s;font-weight:200;font-size:14px;line-height:17px;color:%(color_text)s" % _kwargs, | |
225 | "comment_style": "%(monospace_style)s;white-space:pre-wrap" % _kwargs, |
|
224 | "comment_style": "%(monospace_style)s;white-space:pre-wrap" % _kwargs, | |
226 | "data_style": "border:%(color_border)s 1px solid;background:%(color_background_grey)s" % _kwargs, |
|
225 | "data_style": "border:%(color_border)s 1px solid;background:%(color_background_grey)s" % _kwargs, | |
227 | "emph_style": "font-weight:600;color:%(color_emph)s" % _kwargs, |
|
226 | "emph_style": "font-weight:600;color:%(color_emph)s" % _kwargs, | |
228 | "link_style": "color:%(color_link)s;text-decoration:none" % _kwargs, |
|
227 | "link_style": "color:%(color_link)s;text-decoration:none" % _kwargs, | |
229 | "link_text_style": "color:%(color_text)s;text-decoration:none;border:%(color_border)s 1px solid;background:%(color_background_grey)s" % _kwargs, |
|
228 | "link_text_style": "color:%(color_text)s;text-decoration:none;border:%(color_border)s 1px solid;background:%(color_background_grey)s" % _kwargs, | |
230 | }) |
|
229 | }) | |
231 |
|
230 | |||
232 | log.debug('rendering tmpl %s with kwargs %s', base, _kwargs) |
|
231 | log.debug('rendering tmpl %s with kwargs %s', base, _kwargs) | |
233 | return email_template.render_unicode(**_kwargs) |
|
232 | return email_template.render_unicode(**_kwargs) |
@@ -1,391 +1,391 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.model.pull_request |
|
15 | kallithea.model.pull_request | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | pull request model for Kallithea |
|
18 | pull request model for Kallithea | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Jun 6, 2012 |
|
22 | :created_on: Jun 6, 2012 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 | import datetime |
|
28 | import datetime | |
29 | import logging |
|
29 | import logging | |
30 | import re |
|
30 | import re | |
31 |
|
31 | |||
32 | from tg import request |
|
32 | from tg import request | |
33 | from tg.i18n import ugettext as _ |
|
33 | from tg.i18n import ugettext as _ | |
34 |
|
34 | |||
35 | from kallithea.lib import auth, hooks, webutils |
|
35 | from kallithea.lib import auth, hooks, webutils | |
36 | from kallithea.lib.utils import extract_mentioned_users |
|
36 | from kallithea.lib.utils import extract_mentioned_users | |
37 |
from kallithea.lib.utils2 import ascii_bytes, short_ref_name |
|
37 | from kallithea.lib.utils2 import ascii_bytes, short_ref_name | |
38 | from kallithea.model import changeset_status, comment, db, meta, notification |
|
38 | from kallithea.model import changeset_status, comment, db, meta, notification | |
39 |
|
39 | |||
40 |
|
40 | |||
41 | log = logging.getLogger(__name__) |
|
41 | log = logging.getLogger(__name__) | |
42 |
|
42 | |||
43 |
|
43 | |||
44 | def _assert_valid_reviewers(seq): |
|
44 | def _assert_valid_reviewers(seq): | |
45 | """Sanity check: elements are actual User objects, and not the default user.""" |
|
45 | """Sanity check: elements are actual User objects, and not the default user.""" | |
46 | assert not any(user.is_default_user for user in seq) |
|
46 | assert not any(user.is_default_user for user in seq) | |
47 |
|
47 | |||
48 |
|
48 | |||
49 | class PullRequestModel(object): |
|
49 | class PullRequestModel(object): | |
50 |
|
50 | |||
51 | def add_reviewers(self, user, pr, reviewers, mention_recipients=None): |
|
51 | def add_reviewers(self, user, pr, reviewers, mention_recipients=None): | |
52 | """Add reviewer and send notification to them. |
|
52 | """Add reviewer and send notification to them. | |
53 | """ |
|
53 | """ | |
54 | reviewers = set(reviewers) |
|
54 | reviewers = set(reviewers) | |
55 | _assert_valid_reviewers(reviewers) |
|
55 | _assert_valid_reviewers(reviewers) | |
56 | if mention_recipients is not None: |
|
56 | if mention_recipients is not None: | |
57 | mention_recipients = set(mention_recipients) - reviewers |
|
57 | mention_recipients = set(mention_recipients) - reviewers | |
58 | _assert_valid_reviewers(mention_recipients) |
|
58 | _assert_valid_reviewers(mention_recipients) | |
59 |
|
59 | |||
60 | redundant_reviewers = set(db.User.query() \ |
|
60 | redundant_reviewers = set(db.User.query() \ | |
61 | .join(db.PullRequestReviewer) \ |
|
61 | .join(db.PullRequestReviewer) \ | |
62 | .filter(db.PullRequestReviewer.pull_request == pr) \ |
|
62 | .filter(db.PullRequestReviewer.pull_request == pr) \ | |
63 | .filter(db.PullRequestReviewer.user_id.in_(r.user_id for r in reviewers)) |
|
63 | .filter(db.PullRequestReviewer.user_id.in_(r.user_id for r in reviewers)) | |
64 | .all()) |
|
64 | .all()) | |
65 |
|
65 | |||
66 | if redundant_reviewers: |
|
66 | if redundant_reviewers: | |
67 | log.debug('Following reviewers were already part of pull request %s: %s', pr.pull_request_id, redundant_reviewers) |
|
67 | log.debug('Following reviewers were already part of pull request %s: %s', pr.pull_request_id, redundant_reviewers) | |
68 |
|
68 | |||
69 | reviewers -= redundant_reviewers |
|
69 | reviewers -= redundant_reviewers | |
70 |
|
70 | |||
71 | log.debug('Adding reviewers to pull request %s: %s', pr.pull_request_id, reviewers) |
|
71 | log.debug('Adding reviewers to pull request %s: %s', pr.pull_request_id, reviewers) | |
72 | for reviewer in reviewers: |
|
72 | for reviewer in reviewers: | |
73 | prr = db.PullRequestReviewer(reviewer, pr) |
|
73 | prr = db.PullRequestReviewer(reviewer, pr) | |
74 | meta.Session().add(prr) |
|
74 | meta.Session().add(prr) | |
75 |
|
75 | |||
76 | # notification to reviewers |
|
76 | # notification to reviewers | |
77 | pr_url = pr.url(canonical=True) |
|
77 | pr_url = pr.url(canonical=True) | |
78 | threading = ['%s-pr-%s@%s' % (pr.other_repo.repo_name, |
|
78 | threading = ['%s-pr-%s@%s' % (pr.other_repo.repo_name, | |
79 | pr.pull_request_id, |
|
79 | pr.pull_request_id, | |
80 | webutils.canonical_hostname())] |
|
80 | webutils.canonical_hostname())] | |
81 | body = pr.description |
|
81 | body = pr.description | |
82 | _org_ref_type, org_ref_name, _org_rev = pr.org_ref.split(':') |
|
82 | _org_ref_type, org_ref_name, _org_rev = pr.org_ref.split(':') | |
83 | _other_ref_type, other_ref_name, _other_rev = pr.other_ref.split(':') |
|
83 | _other_ref_type, other_ref_name, _other_rev = pr.other_ref.split(':') | |
84 | revision_data = [(x.raw_id, x.message) |
|
84 | revision_data = [(x.raw_id, x.message) | |
85 | for x in map(pr.org_repo.get_changeset, pr.revisions)] |
|
85 | for x in map(pr.org_repo.get_changeset, pr.revisions)] | |
86 | email_kwargs = { |
|
86 | email_kwargs = { | |
87 | 'pr_title': pr.title, |
|
87 | 'pr_title': pr.title, | |
88 | 'pr_title_short': shorter(pr.title, 50), |
|
88 | 'pr_title_short': webutils.shorter(pr.title, 50), | |
89 | 'pr_user_created': user.full_name_and_username, |
|
89 | 'pr_user_created': user.full_name_and_username, | |
90 | 'pr_repo_url': webutils.canonical_url('summary_home', repo_name=pr.other_repo.repo_name), |
|
90 | 'pr_repo_url': webutils.canonical_url('summary_home', repo_name=pr.other_repo.repo_name), | |
91 | 'pr_url': pr_url, |
|
91 | 'pr_url': pr_url, | |
92 | 'pr_revisions': revision_data, |
|
92 | 'pr_revisions': revision_data, | |
93 | 'repo_name': pr.other_repo.repo_name, |
|
93 | 'repo_name': pr.other_repo.repo_name, | |
94 | 'org_repo_name': pr.org_repo.repo_name, |
|
94 | 'org_repo_name': pr.org_repo.repo_name, | |
95 | 'pr_nice_id': pr.nice_id(), |
|
95 | 'pr_nice_id': pr.nice_id(), | |
96 | 'pr_target_repo': webutils.canonical_url('summary_home', |
|
96 | 'pr_target_repo': webutils.canonical_url('summary_home', | |
97 | repo_name=pr.other_repo.repo_name), |
|
97 | repo_name=pr.other_repo.repo_name), | |
98 | 'pr_target_branch': other_ref_name, |
|
98 | 'pr_target_branch': other_ref_name, | |
99 | 'pr_source_repo': webutils.canonical_url('summary_home', |
|
99 | 'pr_source_repo': webutils.canonical_url('summary_home', | |
100 | repo_name=pr.org_repo.repo_name), |
|
100 | repo_name=pr.org_repo.repo_name), | |
101 | 'pr_source_branch': org_ref_name, |
|
101 | 'pr_source_branch': org_ref_name, | |
102 | 'pr_owner': pr.owner, |
|
102 | 'pr_owner': pr.owner, | |
103 | 'pr_owner_username': pr.owner.username, |
|
103 | 'pr_owner_username': pr.owner.username, | |
104 | 'pr_username': user.username, |
|
104 | 'pr_username': user.username, | |
105 | 'threading': threading, |
|
105 | 'threading': threading, | |
106 | 'is_mention': False, |
|
106 | 'is_mention': False, | |
107 | } |
|
107 | } | |
108 | if reviewers: |
|
108 | if reviewers: | |
109 | notification.NotificationModel().create(created_by=user, body=body, |
|
109 | notification.NotificationModel().create(created_by=user, body=body, | |
110 | recipients=reviewers, |
|
110 | recipients=reviewers, | |
111 | type_=notification.NotificationModel.TYPE_PULL_REQUEST, |
|
111 | type_=notification.NotificationModel.TYPE_PULL_REQUEST, | |
112 | email_kwargs=email_kwargs) |
|
112 | email_kwargs=email_kwargs) | |
113 |
|
113 | |||
114 | if mention_recipients: |
|
114 | if mention_recipients: | |
115 | email_kwargs['is_mention'] = True |
|
115 | email_kwargs['is_mention'] = True | |
116 | notification.NotificationModel().create(created_by=user, body=body, |
|
116 | notification.NotificationModel().create(created_by=user, body=body, | |
117 | recipients=mention_recipients, |
|
117 | recipients=mention_recipients, | |
118 | type_=notification.NotificationModel.TYPE_PULL_REQUEST, |
|
118 | type_=notification.NotificationModel.TYPE_PULL_REQUEST, | |
119 | email_kwargs=email_kwargs) |
|
119 | email_kwargs=email_kwargs) | |
120 |
|
120 | |||
121 | return reviewers, redundant_reviewers |
|
121 | return reviewers, redundant_reviewers | |
122 |
|
122 | |||
123 | def mention_from_description(self, user, pr, old_description=''): |
|
123 | def mention_from_description(self, user, pr, old_description=''): | |
124 | mention_recipients = (extract_mentioned_users(pr.description) - |
|
124 | mention_recipients = (extract_mentioned_users(pr.description) - | |
125 | extract_mentioned_users(old_description)) |
|
125 | extract_mentioned_users(old_description)) | |
126 |
|
126 | |||
127 | log.debug("Mentioning %s", mention_recipients) |
|
127 | log.debug("Mentioning %s", mention_recipients) | |
128 | self.add_reviewers(user, pr, set(), mention_recipients) |
|
128 | self.add_reviewers(user, pr, set(), mention_recipients) | |
129 |
|
129 | |||
130 | def remove_reviewers(self, user, pull_request, reviewers): |
|
130 | def remove_reviewers(self, user, pull_request, reviewers): | |
131 | """Remove specified users from being reviewers of the PR.""" |
|
131 | """Remove specified users from being reviewers of the PR.""" | |
132 | if not reviewers: |
|
132 | if not reviewers: | |
133 | return # avoid SQLAlchemy warning about empty sequence for IN-predicate |
|
133 | return # avoid SQLAlchemy warning about empty sequence for IN-predicate | |
134 |
|
134 | |||
135 | db.PullRequestReviewer.query() \ |
|
135 | db.PullRequestReviewer.query() \ | |
136 | .filter_by(pull_request=pull_request) \ |
|
136 | .filter_by(pull_request=pull_request) \ | |
137 | .filter(db.PullRequestReviewer.user_id.in_(r.user_id for r in reviewers)) \ |
|
137 | .filter(db.PullRequestReviewer.user_id.in_(r.user_id for r in reviewers)) \ | |
138 | .delete(synchronize_session='fetch') # the default of 'evaluate' is not available |
|
138 | .delete(synchronize_session='fetch') # the default of 'evaluate' is not available | |
139 |
|
139 | |||
140 | def delete(self, pull_request): |
|
140 | def delete(self, pull_request): | |
141 | pull_request = db.PullRequest.guess_instance(pull_request) |
|
141 | pull_request = db.PullRequest.guess_instance(pull_request) | |
142 | meta.Session().delete(pull_request) |
|
142 | meta.Session().delete(pull_request) | |
143 | if pull_request.org_repo.scm_instance.alias == 'git': |
|
143 | if pull_request.org_repo.scm_instance.alias == 'git': | |
144 | # remove a ref under refs/pull/ so that commits can be garbage-collected |
|
144 | # remove a ref under refs/pull/ so that commits can be garbage-collected | |
145 | try: |
|
145 | try: | |
146 | del pull_request.org_repo.scm_instance._repo[b"refs/pull/%d/head" % pull_request.pull_request_id] |
|
146 | del pull_request.org_repo.scm_instance._repo[b"refs/pull/%d/head" % pull_request.pull_request_id] | |
147 | except KeyError: |
|
147 | except KeyError: | |
148 | pass |
|
148 | pass | |
149 |
|
149 | |||
150 | def close_pull_request(self, pull_request): |
|
150 | def close_pull_request(self, pull_request): | |
151 | pull_request = db.PullRequest.guess_instance(pull_request) |
|
151 | pull_request = db.PullRequest.guess_instance(pull_request) | |
152 | pull_request.status = db.PullRequest.STATUS_CLOSED |
|
152 | pull_request.status = db.PullRequest.STATUS_CLOSED | |
153 | pull_request.updated_on = datetime.datetime.now() |
|
153 | pull_request.updated_on = datetime.datetime.now() | |
154 |
|
154 | |||
155 |
|
155 | |||
156 | class CreatePullRequestAction(object): |
|
156 | class CreatePullRequestAction(object): | |
157 |
|
157 | |||
158 | class ValidationError(Exception): |
|
158 | class ValidationError(Exception): | |
159 | pass |
|
159 | pass | |
160 |
|
160 | |||
161 | class Empty(ValidationError): |
|
161 | class Empty(ValidationError): | |
162 | pass |
|
162 | pass | |
163 |
|
163 | |||
164 | class AmbiguousAncestor(ValidationError): |
|
164 | class AmbiguousAncestor(ValidationError): | |
165 | pass |
|
165 | pass | |
166 |
|
166 | |||
167 | class Unauthorized(ValidationError): |
|
167 | class Unauthorized(ValidationError): | |
168 | pass |
|
168 | pass | |
169 |
|
169 | |||
170 | @staticmethod |
|
170 | @staticmethod | |
171 | def is_user_authorized(org_repo, other_repo): |
|
171 | def is_user_authorized(org_repo, other_repo): | |
172 | """Performs authorization check with only the minimum amount of |
|
172 | """Performs authorization check with only the minimum amount of | |
173 | information needed for such a check, rather than a full command |
|
173 | information needed for such a check, rather than a full command | |
174 | object. |
|
174 | object. | |
175 | """ |
|
175 | """ | |
176 | if (auth.HasRepoPermissionLevel('read')(org_repo.repo_name) and |
|
176 | if (auth.HasRepoPermissionLevel('read')(org_repo.repo_name) and | |
177 | auth.HasRepoPermissionLevel('read')(other_repo.repo_name) |
|
177 | auth.HasRepoPermissionLevel('read')(other_repo.repo_name) | |
178 | ): |
|
178 | ): | |
179 | return True |
|
179 | return True | |
180 |
|
180 | |||
181 | return False |
|
181 | return False | |
182 |
|
182 | |||
183 | def __init__(self, org_repo, other_repo, org_ref, other_ref, title, description, owner, reviewers): |
|
183 | def __init__(self, org_repo, other_repo, org_ref, other_ref, title, description, owner, reviewers): | |
184 | reviewers = set(reviewers) |
|
184 | reviewers = set(reviewers) | |
185 | _assert_valid_reviewers(reviewers) |
|
185 | _assert_valid_reviewers(reviewers) | |
186 |
|
186 | |||
187 | (org_ref_type, |
|
187 | (org_ref_type, | |
188 | org_ref_name, |
|
188 | org_ref_name, | |
189 | org_rev) = org_ref.split(':') |
|
189 | org_rev) = org_ref.split(':') | |
190 | org_display = short_ref_name(org_ref_type, org_ref_name) |
|
190 | org_display = short_ref_name(org_ref_type, org_ref_name) | |
191 | if org_ref_type == 'rev': |
|
191 | if org_ref_type == 'rev': | |
192 | cs = org_repo.scm_instance.get_changeset(org_rev) |
|
192 | cs = org_repo.scm_instance.get_changeset(org_rev) | |
193 | org_ref = 'branch:%s:%s' % (cs.branch, cs.raw_id) |
|
193 | org_ref = 'branch:%s:%s' % (cs.branch, cs.raw_id) | |
194 |
|
194 | |||
195 | (other_ref_type, |
|
195 | (other_ref_type, | |
196 | other_ref_name, |
|
196 | other_ref_name, | |
197 | other_rev) = other_ref.split(':') |
|
197 | other_rev) = other_ref.split(':') | |
198 | if other_ref_type == 'rev': |
|
198 | if other_ref_type == 'rev': | |
199 | cs = other_repo.scm_instance.get_changeset(other_rev) |
|
199 | cs = other_repo.scm_instance.get_changeset(other_rev) | |
200 | other_ref_name = cs.raw_id[:12] |
|
200 | other_ref_name = cs.raw_id[:12] | |
201 | other_ref = '%s:%s:%s' % (other_ref_type, other_ref_name, cs.raw_id) |
|
201 | other_ref = '%s:%s:%s' % (other_ref_type, other_ref_name, cs.raw_id) | |
202 | other_display = short_ref_name(other_ref_type, other_ref_name) |
|
202 | other_display = short_ref_name(other_ref_type, other_ref_name) | |
203 |
|
203 | |||
204 | cs_ranges, _cs_ranges_not, ancestor_revs = \ |
|
204 | cs_ranges, _cs_ranges_not, ancestor_revs = \ | |
205 | org_repo.scm_instance.get_diff_changesets(other_rev, org_repo.scm_instance, org_rev) # org and other "swapped" |
|
205 | org_repo.scm_instance.get_diff_changesets(other_rev, org_repo.scm_instance, org_rev) # org and other "swapped" | |
206 | if not cs_ranges: |
|
206 | if not cs_ranges: | |
207 | raise self.Empty(_('Cannot create empty pull request')) |
|
207 | raise self.Empty(_('Cannot create empty pull request')) | |
208 |
|
208 | |||
209 | if not ancestor_revs: |
|
209 | if not ancestor_revs: | |
210 | ancestor_rev = org_repo.scm_instance.EMPTY_CHANGESET |
|
210 | ancestor_rev = org_repo.scm_instance.EMPTY_CHANGESET | |
211 | elif len(ancestor_revs) == 1: |
|
211 | elif len(ancestor_revs) == 1: | |
212 | ancestor_rev = ancestor_revs[0] |
|
212 | ancestor_rev = ancestor_revs[0] | |
213 | else: |
|
213 | else: | |
214 | raise self.AmbiguousAncestor( |
|
214 | raise self.AmbiguousAncestor( | |
215 | _('Cannot create pull request - criss cross merge detected, please merge a later %s revision to %s') |
|
215 | _('Cannot create pull request - criss cross merge detected, please merge a later %s revision to %s') | |
216 | % (other_ref_name, org_ref_name)) |
|
216 | % (other_ref_name, org_ref_name)) | |
217 |
|
217 | |||
218 | self.revisions = [cs_.raw_id for cs_ in cs_ranges] |
|
218 | self.revisions = [cs_.raw_id for cs_ in cs_ranges] | |
219 |
|
219 | |||
220 | # hack: ancestor_rev is not an other_rev but we want to show the |
|
220 | # hack: ancestor_rev is not an other_rev but we want to show the | |
221 | # requested destination and have the exact ancestor |
|
221 | # requested destination and have the exact ancestor | |
222 | other_ref = '%s:%s:%s' % (other_ref_type, other_ref_name, ancestor_rev) |
|
222 | other_ref = '%s:%s:%s' % (other_ref_type, other_ref_name, ancestor_rev) | |
223 |
|
223 | |||
224 | if not title: |
|
224 | if not title: | |
225 | if org_repo == other_repo: |
|
225 | if org_repo == other_repo: | |
226 | title = '%s to %s' % (org_display, other_display) |
|
226 | title = '%s to %s' % (org_display, other_display) | |
227 | else: |
|
227 | else: | |
228 | title = '%s#%s to %s#%s' % (org_repo.repo_name, org_display, |
|
228 | title = '%s#%s to %s#%s' % (org_repo.repo_name, org_display, | |
229 | other_repo.repo_name, other_display) |
|
229 | other_repo.repo_name, other_display) | |
230 | description = description or _('No description') |
|
230 | description = description or _('No description') | |
231 |
|
231 | |||
232 | self.org_repo = org_repo |
|
232 | self.org_repo = org_repo | |
233 | self.other_repo = other_repo |
|
233 | self.other_repo = other_repo | |
234 | self.org_ref = org_ref |
|
234 | self.org_ref = org_ref | |
235 | self.org_rev = org_rev |
|
235 | self.org_rev = org_rev | |
236 | self.other_ref = other_ref |
|
236 | self.other_ref = other_ref | |
237 | self.title = title |
|
237 | self.title = title | |
238 | self.description = description |
|
238 | self.description = description | |
239 | self.owner = owner |
|
239 | self.owner = owner | |
240 | self.reviewers = reviewers |
|
240 | self.reviewers = reviewers | |
241 |
|
241 | |||
242 | if not CreatePullRequestAction.is_user_authorized(self.org_repo, self.other_repo): |
|
242 | if not CreatePullRequestAction.is_user_authorized(self.org_repo, self.other_repo): | |
243 | raise self.Unauthorized(_('You are not authorized to create the pull request')) |
|
243 | raise self.Unauthorized(_('You are not authorized to create the pull request')) | |
244 |
|
244 | |||
245 | def execute(self): |
|
245 | def execute(self): | |
246 | created_by = db.User.get(request.authuser.user_id) |
|
246 | created_by = db.User.get(request.authuser.user_id) | |
247 |
|
247 | |||
248 | pr = db.PullRequest() |
|
248 | pr = db.PullRequest() | |
249 | pr.org_repo = self.org_repo |
|
249 | pr.org_repo = self.org_repo | |
250 | pr.org_ref = self.org_ref |
|
250 | pr.org_ref = self.org_ref | |
251 | pr.other_repo = self.other_repo |
|
251 | pr.other_repo = self.other_repo | |
252 | pr.other_ref = self.other_ref |
|
252 | pr.other_ref = self.other_ref | |
253 | pr.revisions = self.revisions |
|
253 | pr.revisions = self.revisions | |
254 | pr.title = self.title |
|
254 | pr.title = self.title | |
255 | pr.description = self.description |
|
255 | pr.description = self.description | |
256 | pr.owner = self.owner |
|
256 | pr.owner = self.owner | |
257 | meta.Session().add(pr) |
|
257 | meta.Session().add(pr) | |
258 | meta.Session().flush() # make database assign pull_request_id |
|
258 | meta.Session().flush() # make database assign pull_request_id | |
259 |
|
259 | |||
260 | if self.org_repo.scm_instance.alias == 'git': |
|
260 | if self.org_repo.scm_instance.alias == 'git': | |
261 | # create a ref under refs/pull/ so that commits don't get garbage-collected |
|
261 | # create a ref under refs/pull/ so that commits don't get garbage-collected | |
262 | self.org_repo.scm_instance._repo[b"refs/pull/%d/head" % pr.pull_request_id] = ascii_bytes(self.org_rev) |
|
262 | self.org_repo.scm_instance._repo[b"refs/pull/%d/head" % pr.pull_request_id] = ascii_bytes(self.org_rev) | |
263 |
|
263 | |||
264 | # reset state to under-review |
|
264 | # reset state to under-review | |
265 | new_comment = comment.ChangesetCommentsModel().create( |
|
265 | new_comment = comment.ChangesetCommentsModel().create( | |
266 | text='', |
|
266 | text='', | |
267 | repo=self.org_repo, |
|
267 | repo=self.org_repo, | |
268 | author=created_by, |
|
268 | author=created_by, | |
269 | pull_request=pr, |
|
269 | pull_request=pr, | |
270 | send_email=False, |
|
270 | send_email=False, | |
271 | status_change=db.ChangesetStatus.STATUS_UNDER_REVIEW, |
|
271 | status_change=db.ChangesetStatus.STATUS_UNDER_REVIEW, | |
272 | ) |
|
272 | ) | |
273 | changeset_status.ChangesetStatusModel().set_status( |
|
273 | changeset_status.ChangesetStatusModel().set_status( | |
274 | self.org_repo, |
|
274 | self.org_repo, | |
275 | db.ChangesetStatus.STATUS_UNDER_REVIEW, |
|
275 | db.ChangesetStatus.STATUS_UNDER_REVIEW, | |
276 | created_by, |
|
276 | created_by, | |
277 | new_comment, |
|
277 | new_comment, | |
278 | pull_request=pr, |
|
278 | pull_request=pr, | |
279 | ) |
|
279 | ) | |
280 |
|
280 | |||
281 | mention_recipients = extract_mentioned_users(self.description) |
|
281 | mention_recipients = extract_mentioned_users(self.description) | |
282 | PullRequestModel().add_reviewers(created_by, pr, self.reviewers, mention_recipients) |
|
282 | PullRequestModel().add_reviewers(created_by, pr, self.reviewers, mention_recipients) | |
283 |
|
283 | |||
284 | hooks.log_create_pullrequest(pr.get_dict(), created_by) |
|
284 | hooks.log_create_pullrequest(pr.get_dict(), created_by) | |
285 |
|
285 | |||
286 | return pr |
|
286 | return pr | |
287 |
|
287 | |||
288 |
|
288 | |||
289 | class CreatePullRequestIterationAction(object): |
|
289 | class CreatePullRequestIterationAction(object): | |
290 | @staticmethod |
|
290 | @staticmethod | |
291 | def is_user_authorized(old_pull_request): |
|
291 | def is_user_authorized(old_pull_request): | |
292 | """Performs authorization check with only the minimum amount of |
|
292 | """Performs authorization check with only the minimum amount of | |
293 | information needed for such a check, rather than a full command |
|
293 | information needed for such a check, rather than a full command | |
294 | object. |
|
294 | object. | |
295 | """ |
|
295 | """ | |
296 | if auth.HasPermissionAny('hg.admin')(): |
|
296 | if auth.HasPermissionAny('hg.admin')(): | |
297 | return True |
|
297 | return True | |
298 |
|
298 | |||
299 | # Authorized to edit the old PR? |
|
299 | # Authorized to edit the old PR? | |
300 | if request.authuser.user_id != old_pull_request.owner_id: |
|
300 | if request.authuser.user_id != old_pull_request.owner_id: | |
301 | return False |
|
301 | return False | |
302 |
|
302 | |||
303 | # Authorized to create a new PR? |
|
303 | # Authorized to create a new PR? | |
304 | if not CreatePullRequestAction.is_user_authorized(old_pull_request.org_repo, old_pull_request.other_repo): |
|
304 | if not CreatePullRequestAction.is_user_authorized(old_pull_request.org_repo, old_pull_request.other_repo): | |
305 | return False |
|
305 | return False | |
306 |
|
306 | |||
307 | return True |
|
307 | return True | |
308 |
|
308 | |||
309 | def __init__(self, old_pull_request, new_org_rev, new_other_rev, title, description, owner, reviewers): |
|
309 | def __init__(self, old_pull_request, new_org_rev, new_other_rev, title, description, owner, reviewers): | |
310 | self.old_pull_request = old_pull_request |
|
310 | self.old_pull_request = old_pull_request | |
311 |
|
311 | |||
312 | org_repo = old_pull_request.org_repo |
|
312 | org_repo = old_pull_request.org_repo | |
313 | org_ref_type, org_ref_name, org_rev = old_pull_request.org_ref.split(':') |
|
313 | org_ref_type, org_ref_name, org_rev = old_pull_request.org_ref.split(':') | |
314 |
|
314 | |||
315 | other_repo = old_pull_request.other_repo |
|
315 | other_repo = old_pull_request.other_repo | |
316 | other_ref_type, other_ref_name, other_rev = old_pull_request.other_ref.split(':') # other_rev is ancestor |
|
316 | other_ref_type, other_ref_name, other_rev = old_pull_request.other_ref.split(':') # other_rev is ancestor | |
317 | #assert other_ref_type == 'branch', other_ref_type # TODO: what if not? |
|
317 | #assert other_ref_type == 'branch', other_ref_type # TODO: what if not? | |
318 |
|
318 | |||
319 | new_org_ref = '%s:%s:%s' % (org_ref_type, org_ref_name, new_org_rev) |
|
319 | new_org_ref = '%s:%s:%s' % (org_ref_type, org_ref_name, new_org_rev) | |
320 | new_other_ref = '%s:%s:%s' % (other_ref_type, other_ref_name, new_other_rev) |
|
320 | new_other_ref = '%s:%s:%s' % (other_ref_type, other_ref_name, new_other_rev) | |
321 |
|
321 | |||
322 | self.create_action = CreatePullRequestAction(org_repo, other_repo, new_org_ref, new_other_ref, None, None, owner, reviewers) |
|
322 | self.create_action = CreatePullRequestAction(org_repo, other_repo, new_org_ref, new_other_ref, None, None, owner, reviewers) | |
323 |
|
323 | |||
324 | # Generate complete title/description |
|
324 | # Generate complete title/description | |
325 |
|
325 | |||
326 | old_revisions = set(old_pull_request.revisions) |
|
326 | old_revisions = set(old_pull_request.revisions) | |
327 | revisions = self.create_action.revisions |
|
327 | revisions = self.create_action.revisions | |
328 | new_revisions = [r for r in revisions if r not in old_revisions] |
|
328 | new_revisions = [r for r in revisions if r not in old_revisions] | |
329 | lost = old_revisions.difference(revisions) |
|
329 | lost = old_revisions.difference(revisions) | |
330 |
|
330 | |||
331 | infos = ['This is a new iteration of %s "%s".' % |
|
331 | infos = ['This is a new iteration of %s "%s".' % | |
332 | (webutils.canonical_url('pullrequest_show', repo_name=old_pull_request.other_repo.repo_name, |
|
332 | (webutils.canonical_url('pullrequest_show', repo_name=old_pull_request.other_repo.repo_name, | |
333 | pull_request_id=old_pull_request.pull_request_id), |
|
333 | pull_request_id=old_pull_request.pull_request_id), | |
334 | old_pull_request.title)] |
|
334 | old_pull_request.title)] | |
335 |
|
335 | |||
336 | if lost: |
|
336 | if lost: | |
337 | infos.append(_('Missing changesets since the previous iteration:')) |
|
337 | infos.append(_('Missing changesets since the previous iteration:')) | |
338 | for r in old_pull_request.revisions: |
|
338 | for r in old_pull_request.revisions: | |
339 | if r in lost: |
|
339 | if r in lost: | |
340 | rev_desc = org_repo.get_changeset(r).message.split('\n')[0] |
|
340 | rev_desc = org_repo.get_changeset(r).message.split('\n')[0] | |
341 | infos.append(' %s %s' % (r[:12], rev_desc)) |
|
341 | infos.append(' %s %s' % (r[:12], rev_desc)) | |
342 |
|
342 | |||
343 | if new_revisions: |
|
343 | if new_revisions: | |
344 | infos.append(_('New changesets on %s %s since the previous iteration:') % (org_ref_type, org_ref_name)) |
|
344 | infos.append(_('New changesets on %s %s since the previous iteration:') % (org_ref_type, org_ref_name)) | |
345 | for r in reversed(revisions): |
|
345 | for r in reversed(revisions): | |
346 | if r in new_revisions: |
|
346 | if r in new_revisions: | |
347 | rev_desc = org_repo.get_changeset(r).message.split('\n')[0] |
|
347 | rev_desc = org_repo.get_changeset(r).message.split('\n')[0] | |
348 | infos.append(' %s %s' % (r[:12], shorter(rev_desc, 80))) |
|
348 | infos.append(' %s %s' % (r[:12], webutils.shorter(rev_desc, 80))) | |
349 |
|
349 | |||
350 | if self.create_action.other_ref == old_pull_request.other_ref: |
|
350 | if self.create_action.other_ref == old_pull_request.other_ref: | |
351 | infos.append(_("Ancestor didn't change - diff since previous iteration:")) |
|
351 | infos.append(_("Ancestor didn't change - diff since previous iteration:")) | |
352 | infos.append(webutils.canonical_url('compare_url', |
|
352 | infos.append(webutils.canonical_url('compare_url', | |
353 | repo_name=org_repo.repo_name, # other_repo is always same as repo_name |
|
353 | repo_name=org_repo.repo_name, # other_repo is always same as repo_name | |
354 | org_ref_type='rev', org_ref_name=org_rev[:12], # use old org_rev as base |
|
354 | org_ref_type='rev', org_ref_name=org_rev[:12], # use old org_rev as base | |
355 | other_ref_type='rev', other_ref_name=new_org_rev[:12], |
|
355 | other_ref_type='rev', other_ref_name=new_org_rev[:12], | |
356 | )) # note: linear diff, merge or not doesn't matter |
|
356 | )) # note: linear diff, merge or not doesn't matter | |
357 | else: |
|
357 | else: | |
358 | infos.append(_('This iteration is based on another %s revision and there is no simple diff.') % other_ref_name) |
|
358 | infos.append(_('This iteration is based on another %s revision and there is no simple diff.') % other_ref_name) | |
359 | else: |
|
359 | else: | |
360 | infos.append(_('No changes found on %s %s since previous iteration.') % (org_ref_type, org_ref_name)) |
|
360 | infos.append(_('No changes found on %s %s since previous iteration.') % (org_ref_type, org_ref_name)) | |
361 | # TODO: fail? |
|
361 | # TODO: fail? | |
362 |
|
362 | |||
363 | v = 2 |
|
363 | v = 2 | |
364 | m = re.match(r'(.*)\(v(\d+)\)\s*$', title) |
|
364 | m = re.match(r'(.*)\(v(\d+)\)\s*$', title) | |
365 | if m is not None: |
|
365 | if m is not None: | |
366 | title = m.group(1) |
|
366 | title = m.group(1) | |
367 | v = int(m.group(2)) + 1 |
|
367 | v = int(m.group(2)) + 1 | |
368 | self.create_action.title = '%s (v%s)' % (title.strip(), v) |
|
368 | self.create_action.title = '%s (v%s)' % (title.strip(), v) | |
369 |
|
369 | |||
370 | # using a mail-like separator, insert new iteration info in description with latest first |
|
370 | # using a mail-like separator, insert new iteration info in description with latest first | |
371 | descriptions = description.replace('\r\n', '\n').split('\n-- \n', 1) |
|
371 | descriptions = description.replace('\r\n', '\n').split('\n-- \n', 1) | |
372 | description = descriptions[0].strip() + '\n\n-- \n' + '\n'.join(infos) |
|
372 | description = descriptions[0].strip() + '\n\n-- \n' + '\n'.join(infos) | |
373 | if len(descriptions) > 1: |
|
373 | if len(descriptions) > 1: | |
374 | description += '\n\n' + descriptions[1].strip() |
|
374 | description += '\n\n' + descriptions[1].strip() | |
375 | self.create_action.description = description |
|
375 | self.create_action.description = description | |
376 |
|
376 | |||
377 | if not CreatePullRequestIterationAction.is_user_authorized(self.old_pull_request): |
|
377 | if not CreatePullRequestIterationAction.is_user_authorized(self.old_pull_request): | |
378 | raise CreatePullRequestAction.Unauthorized(_('You are not authorized to create the pull request')) |
|
378 | raise CreatePullRequestAction.Unauthorized(_('You are not authorized to create the pull request')) | |
379 |
|
379 | |||
380 | def execute(self): |
|
380 | def execute(self): | |
381 | pull_request = self.create_action.execute() |
|
381 | pull_request = self.create_action.execute() | |
382 |
|
382 | |||
383 | # Close old iteration |
|
383 | # Close old iteration | |
384 | comment.ChangesetCommentsModel().create( |
|
384 | comment.ChangesetCommentsModel().create( | |
385 | text=_('Closed, next iteration: %s .') % pull_request.url(canonical=True), |
|
385 | text=_('Closed, next iteration: %s .') % pull_request.url(canonical=True), | |
386 | repo=self.old_pull_request.other_repo_id, |
|
386 | repo=self.old_pull_request.other_repo_id, | |
387 | author=request.authuser.user_id, |
|
387 | author=request.authuser.user_id, | |
388 | pull_request=self.old_pull_request.pull_request_id, |
|
388 | pull_request=self.old_pull_request.pull_request_id, | |
389 | closing_pr=True) |
|
389 | closing_pr=True) | |
390 | PullRequestModel().close_pull_request(self.old_pull_request.pull_request_id) |
|
390 | PullRequestModel().close_pull_request(self.old_pull_request.pull_request_id) | |
391 | return pull_request |
|
391 | return pull_request |
@@ -1,579 +1,579 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.tests.other.test_libs |
|
15 | kallithea.tests.other.test_libs | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Package for testing various lib/helper functions in kallithea |
|
18 | Package for testing various lib/helper functions in kallithea | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Jun 9, 2011 |
|
22 | :created_on: Jun 9, 2011 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 | import datetime |
|
28 | import datetime | |
29 | import hashlib |
|
29 | import hashlib | |
30 | import re |
|
30 | import re | |
31 |
|
31 | |||
32 | import mock |
|
32 | import mock | |
33 | import routes |
|
33 | import routes | |
34 | from dateutil import relativedelta |
|
34 | from dateutil import relativedelta | |
35 | from tg import request |
|
35 | from tg import request | |
36 | from tg.util.webtest import test_context |
|
36 | from tg.util.webtest import test_context | |
37 |
|
37 | |||
38 | import kallithea.lib.helpers as h |
|
38 | import kallithea.lib.helpers as h | |
39 | from kallithea.lib import webutils |
|
39 | from kallithea.lib import webutils | |
40 | from kallithea.lib.utils2 import AttributeDict, get_clone_url, safe_bytes |
|
40 | from kallithea.lib.utils2 import AttributeDict, get_clone_url, safe_bytes | |
41 | from kallithea.model import db |
|
41 | from kallithea.model import db | |
42 | from kallithea.tests import base |
|
42 | from kallithea.tests import base | |
43 |
|
43 | |||
44 |
|
44 | |||
45 | proto = 'http' |
|
45 | proto = 'http' | |
46 | TEST_URLS = [ |
|
46 | TEST_URLS = [ | |
47 | ('%s://127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'], |
|
47 | ('%s://127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'], | |
48 | '%s://127.0.0.1' % proto), |
|
48 | '%s://127.0.0.1' % proto), | |
49 | ('%s://username@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'], |
|
49 | ('%s://username@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'], | |
50 | '%s://127.0.0.1' % proto), |
|
50 | '%s://127.0.0.1' % proto), | |
51 | ('%s://username:pass@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'], |
|
51 | ('%s://username:pass@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'], | |
52 | '%s://127.0.0.1' % proto), |
|
52 | '%s://127.0.0.1' % proto), | |
53 | ('%s://127.0.0.1:8080' % proto, ['%s://' % proto, '127.0.0.1', '8080'], |
|
53 | ('%s://127.0.0.1:8080' % proto, ['%s://' % proto, '127.0.0.1', '8080'], | |
54 | '%s://127.0.0.1:8080' % proto), |
|
54 | '%s://127.0.0.1:8080' % proto), | |
55 | ('%s://example.com' % proto, ['%s://' % proto, 'example.com'], |
|
55 | ('%s://example.com' % proto, ['%s://' % proto, 'example.com'], | |
56 | '%s://example.com' % proto), |
|
56 | '%s://example.com' % proto), | |
57 | ('%s://user:pass@example.com:8080' % proto, ['%s://' % proto, 'example.com', |
|
57 | ('%s://user:pass@example.com:8080' % proto, ['%s://' % proto, 'example.com', | |
58 | '8080'], |
|
58 | '8080'], | |
59 | '%s://example.com:8080' % proto), |
|
59 | '%s://example.com:8080' % proto), | |
60 | ] |
|
60 | ] | |
61 |
|
61 | |||
62 | proto = 'https' |
|
62 | proto = 'https' | |
63 | TEST_URLS += [ |
|
63 | TEST_URLS += [ | |
64 | ('%s://127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'], |
|
64 | ('%s://127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'], | |
65 | '%s://127.0.0.1' % proto), |
|
65 | '%s://127.0.0.1' % proto), | |
66 | ('%s://username@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'], |
|
66 | ('%s://username@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'], | |
67 | '%s://127.0.0.1' % proto), |
|
67 | '%s://127.0.0.1' % proto), | |
68 | ('%s://username:pass@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'], |
|
68 | ('%s://username:pass@127.0.0.1' % proto, ['%s://' % proto, '127.0.0.1'], | |
69 | '%s://127.0.0.1' % proto), |
|
69 | '%s://127.0.0.1' % proto), | |
70 | ('%s://127.0.0.1:8080' % proto, ['%s://' % proto, '127.0.0.1', '8080'], |
|
70 | ('%s://127.0.0.1:8080' % proto, ['%s://' % proto, '127.0.0.1', '8080'], | |
71 | '%s://127.0.0.1:8080' % proto), |
|
71 | '%s://127.0.0.1:8080' % proto), | |
72 | ('%s://example.com' % proto, ['%s://' % proto, 'example.com'], |
|
72 | ('%s://example.com' % proto, ['%s://' % proto, 'example.com'], | |
73 | '%s://example.com' % proto), |
|
73 | '%s://example.com' % proto), | |
74 | ('%s://user:pass@example.com:8080' % proto, ['%s://' % proto, 'example.com', |
|
74 | ('%s://user:pass@example.com:8080' % proto, ['%s://' % proto, 'example.com', | |
75 | '8080'], |
|
75 | '8080'], | |
76 | '%s://example.com:8080' % proto), |
|
76 | '%s://example.com:8080' % proto), | |
77 | ] |
|
77 | ] | |
78 |
|
78 | |||
79 |
|
79 | |||
80 | class TestLibs(base.TestController): |
|
80 | class TestLibs(base.TestController): | |
81 |
|
81 | |||
82 | @base.parametrize('test_url,expected,expected_creds', TEST_URLS) |
|
82 | @base.parametrize('test_url,expected,expected_creds', TEST_URLS) | |
83 | def test_uri_filter(self, test_url, expected, expected_creds): |
|
83 | def test_uri_filter(self, test_url, expected, expected_creds): | |
84 | from kallithea.lib.utils2 import uri_filter |
|
84 | from kallithea.lib.utils2 import uri_filter | |
85 | assert uri_filter(test_url) == expected |
|
85 | assert uri_filter(test_url) == expected | |
86 |
|
86 | |||
87 | @base.parametrize('test_url,expected,expected_creds', TEST_URLS) |
|
87 | @base.parametrize('test_url,expected,expected_creds', TEST_URLS) | |
88 | def test_credentials_filter(self, test_url, expected, expected_creds): |
|
88 | def test_credentials_filter(self, test_url, expected, expected_creds): | |
89 | from kallithea.lib.utils2 import credentials_filter |
|
89 | from kallithea.lib.utils2 import credentials_filter | |
90 | assert credentials_filter(test_url) == expected_creds |
|
90 | assert credentials_filter(test_url) == expected_creds | |
91 |
|
91 | |||
92 | @base.parametrize('str_bool,expected', [ |
|
92 | @base.parametrize('str_bool,expected', [ | |
93 | ('t', True), |
|
93 | ('t', True), | |
94 | ('true', True), |
|
94 | ('true', True), | |
95 | ('y', True), |
|
95 | ('y', True), | |
96 | ('yes', True), |
|
96 | ('yes', True), | |
97 | ('on', True), |
|
97 | ('on', True), | |
98 | ('1', True), |
|
98 | ('1', True), | |
99 | ('Y', True), |
|
99 | ('Y', True), | |
100 | ('yeS', True), |
|
100 | ('yeS', True), | |
101 | ('Y', True), |
|
101 | ('Y', True), | |
102 | ('TRUE', True), |
|
102 | ('TRUE', True), | |
103 | ('T', True), |
|
103 | ('T', True), | |
104 | ('False', False), |
|
104 | ('False', False), | |
105 | ('F', False), |
|
105 | ('F', False), | |
106 | ('FALSE', False), |
|
106 | ('FALSE', False), | |
107 | ('0', False), |
|
107 | ('0', False), | |
108 | ]) |
|
108 | ]) | |
109 | def test_asbool(self, str_bool, expected): |
|
109 | def test_asbool(self, str_bool, expected): | |
110 | from kallithea.lib.utils2 import asbool |
|
110 | from kallithea.lib.utils2 import asbool | |
111 | assert asbool(str_bool) == expected |
|
111 | assert asbool(str_bool) == expected | |
112 |
|
112 | |||
113 | def test_mention_extractor(self): |
|
113 | def test_mention_extractor(self): | |
114 | sample = ( |
|
114 | sample = ( | |
115 | "@first hi there @world here's my email username@example.com " |
|
115 | "@first hi there @world here's my email username@example.com " | |
116 | "@lukaszb check @one_more22 it pls @ ttwelve @D[] @one@two@three " |
|
116 | "@lukaszb check @one_more22 it pls @ ttwelve @D[] @one@two@three " | |
117 | "@UPPER @cAmEL @2one_more22 @john please see this http://org.pl " |
|
117 | "@UPPER @cAmEL @2one_more22 @john please see this http://org.pl " | |
118 | "@marian.user just do it @marco-polo and next extract @marco_polo " |
|
118 | "@marian.user just do it @marco-polo and next extract @marco_polo " | |
119 | "user.dot hej ! not-needed maril@example.com" |
|
119 | "user.dot hej ! not-needed maril@example.com" | |
120 | ) |
|
120 | ) | |
121 |
|
121 | |||
122 | expected = set([ |
|
122 | expected = set([ | |
123 | '2one_more22', 'first', 'lukaszb', 'one', 'one_more22', 'UPPER', 'cAmEL', 'john', |
|
123 | '2one_more22', 'first', 'lukaszb', 'one', 'one_more22', 'UPPER', 'cAmEL', 'john', | |
124 | 'marian.user', 'marco-polo', 'marco_polo', 'world']) |
|
124 | 'marian.user', 'marco-polo', 'marco_polo', 'world']) | |
125 | assert expected == set(webutils.extract_mentioned_usernames(sample)) |
|
125 | assert expected == set(webutils.extract_mentioned_usernames(sample)) | |
126 |
|
126 | |||
127 | @base.parametrize('age_args,expected', [ |
|
127 | @base.parametrize('age_args,expected', [ | |
128 | (dict(), 'just now'), |
|
128 | (dict(), 'just now'), | |
129 | (dict(seconds= -1), '1 second ago'), |
|
129 | (dict(seconds= -1), '1 second ago'), | |
130 | (dict(seconds= -60 * 2), '2 minutes ago'), |
|
130 | (dict(seconds= -60 * 2), '2 minutes ago'), | |
131 | (dict(hours= -1), '1 hour ago'), |
|
131 | (dict(hours= -1), '1 hour ago'), | |
132 | (dict(hours= -24), '1 day ago'), |
|
132 | (dict(hours= -24), '1 day ago'), | |
133 | (dict(hours= -24 * 5), '5 days ago'), |
|
133 | (dict(hours= -24 * 5), '5 days ago'), | |
134 | (dict(months= -1), '1 month ago'), |
|
134 | (dict(months= -1), '1 month ago'), | |
135 | (dict(months= -1, days= -2), '1 month and 2 days ago'), |
|
135 | (dict(months= -1, days= -2), '1 month and 2 days ago'), | |
136 | (dict(months= -1, days= -20), '1 month and 19 days ago'), |
|
136 | (dict(months= -1, days= -20), '1 month and 19 days ago'), | |
137 | (dict(years= -1, months= -1), '1 year and 1 month ago'), |
|
137 | (dict(years= -1, months= -1), '1 year and 1 month ago'), | |
138 | (dict(years= -1, months= -10), '1 year and 10 months ago'), |
|
138 | (dict(years= -1, months= -10), '1 year and 10 months ago'), | |
139 | (dict(years= -2, months= -4), '2 years and 4 months ago'), |
|
139 | (dict(years= -2, months= -4), '2 years and 4 months ago'), | |
140 | (dict(years= -2, months= -11), '2 years and 11 months ago'), |
|
140 | (dict(years= -2, months= -11), '2 years and 11 months ago'), | |
141 | (dict(years= -3, months= -2), '3 years and 2 months ago'), |
|
141 | (dict(years= -3, months= -2), '3 years and 2 months ago'), | |
142 | ]) |
|
142 | ]) | |
143 | def test_age(self, age_args, expected): |
|
143 | def test_age(self, age_args, expected): | |
144 |
from kallithea.lib.utils |
|
144 | from kallithea.lib.webutils import age | |
145 | with test_context(self.app): |
|
145 | with test_context(self.app): | |
146 | n = datetime.datetime(year=2012, month=5, day=17) |
|
146 | n = datetime.datetime(year=2012, month=5, day=17) | |
147 | delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs) |
|
147 | delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs) | |
148 | assert age(n + delt(**age_args), now=n) == expected |
|
148 | assert age(n + delt(**age_args), now=n) == expected | |
149 |
|
149 | |||
150 | @base.parametrize('age_args,expected', [ |
|
150 | @base.parametrize('age_args,expected', [ | |
151 | (dict(), 'just now'), |
|
151 | (dict(), 'just now'), | |
152 | (dict(seconds= -1), '1 second ago'), |
|
152 | (dict(seconds= -1), '1 second ago'), | |
153 | (dict(seconds= -60 * 2), '2 minutes ago'), |
|
153 | (dict(seconds= -60 * 2), '2 minutes ago'), | |
154 | (dict(hours= -1), '1 hour ago'), |
|
154 | (dict(hours= -1), '1 hour ago'), | |
155 | (dict(hours= -24), '1 day ago'), |
|
155 | (dict(hours= -24), '1 day ago'), | |
156 | (dict(hours= -24 * 5), '5 days ago'), |
|
156 | (dict(hours= -24 * 5), '5 days ago'), | |
157 | (dict(months= -1), '1 month ago'), |
|
157 | (dict(months= -1), '1 month ago'), | |
158 | (dict(months= -1, days= -2), '1 month ago'), |
|
158 | (dict(months= -1, days= -2), '1 month ago'), | |
159 | (dict(months= -1, days= -20), '1 month ago'), |
|
159 | (dict(months= -1, days= -20), '1 month ago'), | |
160 | (dict(years= -1, months= -1), '13 months ago'), |
|
160 | (dict(years= -1, months= -1), '13 months ago'), | |
161 | (dict(years= -1, months= -10), '22 months ago'), |
|
161 | (dict(years= -1, months= -10), '22 months ago'), | |
162 | (dict(years= -2, months= -4), '2 years ago'), |
|
162 | (dict(years= -2, months= -4), '2 years ago'), | |
163 | (dict(years= -2, months= -11), '3 years ago'), |
|
163 | (dict(years= -2, months= -11), '3 years ago'), | |
164 | (dict(years= -3, months= -2), '3 years ago'), |
|
164 | (dict(years= -3, months= -2), '3 years ago'), | |
165 | (dict(years= -4, months= -8), '5 years ago'), |
|
165 | (dict(years= -4, months= -8), '5 years ago'), | |
166 | ]) |
|
166 | ]) | |
167 | def test_age_short(self, age_args, expected): |
|
167 | def test_age_short(self, age_args, expected): | |
168 |
from kallithea.lib.utils |
|
168 | from kallithea.lib.webutils import age | |
169 | with test_context(self.app): |
|
169 | with test_context(self.app): | |
170 | n = datetime.datetime(year=2012, month=5, day=17) |
|
170 | n = datetime.datetime(year=2012, month=5, day=17) | |
171 | delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs) |
|
171 | delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs) | |
172 | assert age(n + delt(**age_args), show_short_version=True, now=n) == expected |
|
172 | assert age(n + delt(**age_args), show_short_version=True, now=n) == expected | |
173 |
|
173 | |||
174 | @base.parametrize('age_args,expected', [ |
|
174 | @base.parametrize('age_args,expected', [ | |
175 | (dict(), 'just now'), |
|
175 | (dict(), 'just now'), | |
176 | (dict(seconds=1), 'in 1 second'), |
|
176 | (dict(seconds=1), 'in 1 second'), | |
177 | (dict(seconds=60 * 2), 'in 2 minutes'), |
|
177 | (dict(seconds=60 * 2), 'in 2 minutes'), | |
178 | (dict(hours=1), 'in 1 hour'), |
|
178 | (dict(hours=1), 'in 1 hour'), | |
179 | (dict(hours=24), 'in 1 day'), |
|
179 | (dict(hours=24), 'in 1 day'), | |
180 | (dict(hours=24 * 5), 'in 5 days'), |
|
180 | (dict(hours=24 * 5), 'in 5 days'), | |
181 | (dict(months=1), 'in 1 month'), |
|
181 | (dict(months=1), 'in 1 month'), | |
182 | (dict(months=1, days=1), 'in 1 month and 1 day'), |
|
182 | (dict(months=1, days=1), 'in 1 month and 1 day'), | |
183 | (dict(years=1, months=1), 'in 1 year and 1 month') |
|
183 | (dict(years=1, months=1), 'in 1 year and 1 month') | |
184 | ]) |
|
184 | ]) | |
185 | def test_age_in_future(self, age_args, expected): |
|
185 | def test_age_in_future(self, age_args, expected): | |
186 |
from kallithea.lib.utils |
|
186 | from kallithea.lib.webutils import age | |
187 | with test_context(self.app): |
|
187 | with test_context(self.app): | |
188 | n = datetime.datetime(year=2012, month=5, day=17) |
|
188 | n = datetime.datetime(year=2012, month=5, day=17) | |
189 | delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs) |
|
189 | delt = lambda *args, **kwargs: relativedelta.relativedelta(*args, **kwargs) | |
190 | assert age(n + delt(**age_args), now=n) == expected |
|
190 | assert age(n + delt(**age_args), now=n) == expected | |
191 |
|
191 | |||
192 | def test_tag_extractor(self): |
|
192 | def test_tag_extractor(self): | |
193 | sample = ( |
|
193 | sample = ( | |
194 | "hello pta[tag] gog [[]] [[] sda ero[or]d [me =>>< sa]" |
|
194 | "hello pta[tag] gog [[]] [[] sda ero[or]d [me =>>< sa]" | |
195 | "[requires] [stale] [see<>=>] [see => http://example.com]" |
|
195 | "[requires] [stale] [see<>=>] [see => http://example.com]" | |
196 | "[requires => url] [lang => python] [just a tag]" |
|
196 | "[requires => url] [lang => python] [just a tag]" | |
197 | "[,d] [ => ULR ] [obsolete] [desc]]" |
|
197 | "[,d] [ => ULR ] [obsolete] [desc]]" | |
198 | ) |
|
198 | ) | |
199 | res = webutils.urlify_text(sample, stylize=True) |
|
199 | res = webutils.urlify_text(sample, stylize=True) | |
200 | assert '<div class="label label-meta" data-tag="tag">tag</div>' in res |
|
200 | assert '<div class="label label-meta" data-tag="tag">tag</div>' in res | |
201 | assert '<div class="label label-meta" data-tag="obsolete">obsolete</div>' in res |
|
201 | assert '<div class="label label-meta" data-tag="obsolete">obsolete</div>' in res | |
202 | assert '<div class="label label-meta" data-tag="stale">stale</div>' in res |
|
202 | assert '<div class="label label-meta" data-tag="stale">stale</div>' in res | |
203 | assert '<div class="label label-meta" data-tag="lang">python</div>' in res |
|
203 | assert '<div class="label label-meta" data-tag="lang">python</div>' in res | |
204 | assert '<div class="label label-meta" data-tag="requires">requires => <a href="/url">url</a></div>' in res |
|
204 | assert '<div class="label label-meta" data-tag="requires">requires => <a href="/url">url</a></div>' in res | |
205 | assert '<div class="label label-meta" data-tag="tag">tag</div>' in res |
|
205 | assert '<div class="label label-meta" data-tag="tag">tag</div>' in res | |
206 |
|
206 | |||
207 | def test_alternative_gravatar(self): |
|
207 | def test_alternative_gravatar(self): | |
208 | _md5 = lambda s: hashlib.md5(safe_bytes(s)).hexdigest() |
|
208 | _md5 = lambda s: hashlib.md5(safe_bytes(s)).hexdigest() | |
209 |
|
209 | |||
210 | # mock tg.tmpl_context |
|
210 | # mock tg.tmpl_context | |
211 | def fake_tmpl_context(_url): |
|
211 | def fake_tmpl_context(_url): | |
212 | _c = AttributeDict() |
|
212 | _c = AttributeDict() | |
213 | _c.visual = AttributeDict() |
|
213 | _c.visual = AttributeDict() | |
214 | _c.visual.use_gravatar = True |
|
214 | _c.visual.use_gravatar = True | |
215 | _c.visual.gravatar_url = _url |
|
215 | _c.visual.gravatar_url = _url | |
216 |
|
216 | |||
217 | return _c |
|
217 | return _c | |
218 |
|
218 | |||
219 | with mock.patch('kallithea.lib.webutils.url.current', lambda *a, **b: 'https://example.com'): |
|
219 | with mock.patch('kallithea.lib.webutils.url.current', lambda *a, **b: 'https://example.com'): | |
220 | fake = fake_tmpl_context(_url='http://example.com/{email}') |
|
220 | fake = fake_tmpl_context(_url='http://example.com/{email}') | |
221 | with mock.patch('kallithea.lib.helpers.c', fake): |
|
221 | with mock.patch('kallithea.lib.helpers.c', fake): | |
222 | assert webutils.url.current() == 'https://example.com' |
|
222 | assert webutils.url.current() == 'https://example.com' | |
223 | grav = h.gravatar_url(email_address='test@example.com', size=24) |
|
223 | grav = h.gravatar_url(email_address='test@example.com', size=24) | |
224 | assert grav == 'http://example.com/test@example.com' |
|
224 | assert grav == 'http://example.com/test@example.com' | |
225 |
|
225 | |||
226 | fake = fake_tmpl_context(_url='http://example.com/{email}') |
|
226 | fake = fake_tmpl_context(_url='http://example.com/{email}') | |
227 | with mock.patch('kallithea.lib.helpers.c', fake): |
|
227 | with mock.patch('kallithea.lib.helpers.c', fake): | |
228 | grav = h.gravatar_url(email_address='test@example.com', size=24) |
|
228 | grav = h.gravatar_url(email_address='test@example.com', size=24) | |
229 | assert grav == 'http://example.com/test@example.com' |
|
229 | assert grav == 'http://example.com/test@example.com' | |
230 |
|
230 | |||
231 | fake = fake_tmpl_context(_url='http://example.com/{md5email}') |
|
231 | fake = fake_tmpl_context(_url='http://example.com/{md5email}') | |
232 | with mock.patch('kallithea.lib.helpers.c', fake): |
|
232 | with mock.patch('kallithea.lib.helpers.c', fake): | |
233 | em = 'test@example.com' |
|
233 | em = 'test@example.com' | |
234 | grav = h.gravatar_url(email_address=em, size=24) |
|
234 | grav = h.gravatar_url(email_address=em, size=24) | |
235 | assert grav == 'http://example.com/%s' % (_md5(em)) |
|
235 | assert grav == 'http://example.com/%s' % (_md5(em)) | |
236 |
|
236 | |||
237 | fake = fake_tmpl_context(_url='http://example.com/{md5email}/{size}') |
|
237 | fake = fake_tmpl_context(_url='http://example.com/{md5email}/{size}') | |
238 | with mock.patch('kallithea.lib.helpers.c', fake): |
|
238 | with mock.patch('kallithea.lib.helpers.c', fake): | |
239 | em = 'test@example.com' |
|
239 | em = 'test@example.com' | |
240 | grav = h.gravatar_url(email_address=em, size=24) |
|
240 | grav = h.gravatar_url(email_address=em, size=24) | |
241 | assert grav == 'http://example.com/%s/%s' % (_md5(em), 24) |
|
241 | assert grav == 'http://example.com/%s/%s' % (_md5(em), 24) | |
242 |
|
242 | |||
243 | fake = fake_tmpl_context(_url='{scheme}://{netloc}/{md5email}/{size}') |
|
243 | fake = fake_tmpl_context(_url='{scheme}://{netloc}/{md5email}/{size}') | |
244 | with mock.patch('kallithea.lib.helpers.c', fake): |
|
244 | with mock.patch('kallithea.lib.helpers.c', fake): | |
245 | em = 'test@example.com' |
|
245 | em = 'test@example.com' | |
246 | grav = h.gravatar_url(email_address=em, size=24) |
|
246 | grav = h.gravatar_url(email_address=em, size=24) | |
247 | assert grav == 'https://example.com/%s/%s' % (_md5(em), 24) |
|
247 | assert grav == 'https://example.com/%s/%s' % (_md5(em), 24) | |
248 |
|
248 | |||
249 | @base.parametrize('clone_uri_tmpl,repo_name,username,prefix,expected', [ |
|
249 | @base.parametrize('clone_uri_tmpl,repo_name,username,prefix,expected', [ | |
250 | (db.Repository.DEFAULT_CLONE_URI, 'group/repo1', None, '', 'http://vps1:8000/group/repo1'), |
|
250 | (db.Repository.DEFAULT_CLONE_URI, 'group/repo1', None, '', 'http://vps1:8000/group/repo1'), | |
251 | (db.Repository.DEFAULT_CLONE_URI, 'group/repo1', 'username', '', 'http://username@vps1:8000/group/repo1'), |
|
251 | (db.Repository.DEFAULT_CLONE_URI, 'group/repo1', 'username', '', 'http://username@vps1:8000/group/repo1'), | |
252 | (db.Repository.DEFAULT_CLONE_URI, 'group/repo1', None, '/prefix', 'http://vps1:8000/prefix/group/repo1'), |
|
252 | (db.Repository.DEFAULT_CLONE_URI, 'group/repo1', None, '/prefix', 'http://vps1:8000/prefix/group/repo1'), | |
253 | (db.Repository.DEFAULT_CLONE_URI, 'group/repo1', 'user', '/prefix', 'http://user@vps1:8000/prefix/group/repo1'), |
|
253 | (db.Repository.DEFAULT_CLONE_URI, 'group/repo1', 'user', '/prefix', 'http://user@vps1:8000/prefix/group/repo1'), | |
254 | (db.Repository.DEFAULT_CLONE_URI, 'group/repo1', 'username', '/prefix', 'http://username@vps1:8000/prefix/group/repo1'), |
|
254 | (db.Repository.DEFAULT_CLONE_URI, 'group/repo1', 'username', '/prefix', 'http://username@vps1:8000/prefix/group/repo1'), | |
255 | (db.Repository.DEFAULT_CLONE_URI, 'group/repo1', 'user', '/prefix/', 'http://user@vps1:8000/prefix/group/repo1'), |
|
255 | (db.Repository.DEFAULT_CLONE_URI, 'group/repo1', 'user', '/prefix/', 'http://user@vps1:8000/prefix/group/repo1'), | |
256 | (db.Repository.DEFAULT_CLONE_URI, 'group/repo1', 'username', '/prefix/', 'http://username@vps1:8000/prefix/group/repo1'), |
|
256 | (db.Repository.DEFAULT_CLONE_URI, 'group/repo1', 'username', '/prefix/', 'http://username@vps1:8000/prefix/group/repo1'), | |
257 | ('{scheme}://{user}@{netloc}/_{repoid}', 'group/repo1', None, '', 'http://vps1:8000/_23'), |
|
257 | ('{scheme}://{user}@{netloc}/_{repoid}', 'group/repo1', None, '', 'http://vps1:8000/_23'), | |
258 | ('{scheme}://{user}@{netloc}/_{repoid}', 'group/repo1', 'username', '', 'http://username@vps1:8000/_23'), |
|
258 | ('{scheme}://{user}@{netloc}/_{repoid}', 'group/repo1', 'username', '', 'http://username@vps1:8000/_23'), | |
259 | ('http://{user}@{netloc}/_{repoid}', 'group/repo1', 'username', '', 'http://username@vps1:8000/_23'), |
|
259 | ('http://{user}@{netloc}/_{repoid}', 'group/repo1', 'username', '', 'http://username@vps1:8000/_23'), | |
260 | ('http://{netloc}/_{repoid}', 'group/repo1', 'username', '', 'http://vps1:8000/_23'), |
|
260 | ('http://{netloc}/_{repoid}', 'group/repo1', 'username', '', 'http://vps1:8000/_23'), | |
261 | ('https://{user}@proxy1.example.com/{repo}', 'group/repo1', 'username', '', 'https://username@proxy1.example.com/group/repo1'), |
|
261 | ('https://{user}@proxy1.example.com/{repo}', 'group/repo1', 'username', '', 'https://username@proxy1.example.com/group/repo1'), | |
262 | ('https://{user}@proxy1.example.com/{repo}', 'group/repo1', None, '', 'https://proxy1.example.com/group/repo1'), |
|
262 | ('https://{user}@proxy1.example.com/{repo}', 'group/repo1', None, '', 'https://proxy1.example.com/group/repo1'), | |
263 | ('https://proxy1.example.com/{user}/{repo}', 'group/repo1', 'username', '', 'https://proxy1.example.com/username/group/repo1'), |
|
263 | ('https://proxy1.example.com/{user}/{repo}', 'group/repo1', 'username', '', 'https://proxy1.example.com/username/group/repo1'), | |
264 | ]) |
|
264 | ]) | |
265 | def test_clone_url_generator(self, clone_uri_tmpl, repo_name, username, prefix, expected): |
|
265 | def test_clone_url_generator(self, clone_uri_tmpl, repo_name, username, prefix, expected): | |
266 | clone_url = get_clone_url(clone_uri_tmpl=clone_uri_tmpl, prefix_url='http://vps1:8000' + prefix, |
|
266 | clone_url = get_clone_url(clone_uri_tmpl=clone_uri_tmpl, prefix_url='http://vps1:8000' + prefix, | |
267 | repo_name=repo_name, repo_id=23, username=username) |
|
267 | repo_name=repo_name, repo_id=23, username=username) | |
268 | assert clone_url == expected |
|
268 | assert clone_url == expected | |
269 |
|
269 | |||
270 | def _quick_url(self, text, tmpl="""<a class="changeset_hash" href="%s">%s</a>""", url_=None): |
|
270 | def _quick_url(self, text, tmpl="""<a class="changeset_hash" href="%s">%s</a>""", url_=None): | |
271 | """ |
|
271 | """ | |
272 | Changes `some text url[foo]` => `some text <a href="/">foo</a> |
|
272 | Changes `some text url[foo]` => `some text <a href="/">foo</a> | |
273 |
|
273 | |||
274 | :param text: |
|
274 | :param text: | |
275 | """ |
|
275 | """ | |
276 | # quickly change expected url[] into a link |
|
276 | # quickly change expected url[] into a link | |
277 | url_pattern = re.compile(r'(?:url\[)(.+?)(?:\])') |
|
277 | url_pattern = re.compile(r'(?:url\[)(.+?)(?:\])') | |
278 |
|
278 | |||
279 | def url_func(match_obj): |
|
279 | def url_func(match_obj): | |
280 | _url = match_obj.groups()[0] |
|
280 | _url = match_obj.groups()[0] | |
281 | return tmpl % (url_ or '/repo_name/changeset/%s' % _url, _url) |
|
281 | return tmpl % (url_ or '/repo_name/changeset/%s' % _url, _url) | |
282 | return url_pattern.sub(url_func, text) |
|
282 | return url_pattern.sub(url_func, text) | |
283 |
|
283 | |||
284 | @base.parametrize('sample,expected', [ |
|
284 | @base.parametrize('sample,expected', [ | |
285 | ("", |
|
285 | ("", | |
286 | ""), |
|
286 | ""), | |
287 | ("git-svn-id: https://svn.apache.org/repos/asf/libcloud/trunk@1441655 13f79535-47bb-0310-9956-ffa450edef68", |
|
287 | ("git-svn-id: https://svn.apache.org/repos/asf/libcloud/trunk@1441655 13f79535-47bb-0310-9956-ffa450edef68", | |
288 | """git-svn-id: <a href="https://svn.apache.org/repos/asf/libcloud/trunk@1441655">https://svn.apache.org/repos/asf/libcloud/trunk@1441655</a> 13f79535-47bb-0310-9956-ffa450edef68"""), |
|
288 | """git-svn-id: <a href="https://svn.apache.org/repos/asf/libcloud/trunk@1441655">https://svn.apache.org/repos/asf/libcloud/trunk@1441655</a> 13f79535-47bb-0310-9956-ffa450edef68"""), | |
289 | ("from rev 000000000000", |
|
289 | ("from rev 000000000000", | |
290 | """from rev url[000000000000]"""), |
|
290 | """from rev url[000000000000]"""), | |
291 | ("from rev 000000000000123123 also rev 000000000000", |
|
291 | ("from rev 000000000000123123 also rev 000000000000", | |
292 | """from rev url[000000000000123123] also rev url[000000000000]"""), |
|
292 | """from rev url[000000000000123123] also rev url[000000000000]"""), | |
293 | ("this should-000 00", |
|
293 | ("this should-000 00", | |
294 | """this should-000 00"""), |
|
294 | """this should-000 00"""), | |
295 | ("longtextffffffffff rev 123123123123", |
|
295 | ("longtextffffffffff rev 123123123123", | |
296 | """longtextffffffffff rev url[123123123123]"""), |
|
296 | """longtextffffffffff rev url[123123123123]"""), | |
297 | ("rev ffffffffffffffffffffffffffffffffffffffffffffffffff", |
|
297 | ("rev ffffffffffffffffffffffffffffffffffffffffffffffffff", | |
298 | """rev ffffffffffffffffffffffffffffffffffffffffffffffffff"""), |
|
298 | """rev ffffffffffffffffffffffffffffffffffffffffffffffffff"""), | |
299 | ("ffffffffffff some text traalaa", |
|
299 | ("ffffffffffff some text traalaa", | |
300 | """url[ffffffffffff] some text traalaa"""), |
|
300 | """url[ffffffffffff] some text traalaa"""), | |
301 | ("""Multi line |
|
301 | ("""Multi line | |
302 | 123123123123 |
|
302 | 123123123123 | |
303 | some text 123123123123 |
|
303 | some text 123123123123 | |
304 | sometimes ! |
|
304 | sometimes ! | |
305 | """, |
|
305 | """, | |
306 | """Multi line<br/>""" |
|
306 | """Multi line<br/>""" | |
307 | """ url[123123123123]<br/>""" |
|
307 | """ url[123123123123]<br/>""" | |
308 | """ some text url[123123123123]<br/>""" |
|
308 | """ some text url[123123123123]<br/>""" | |
309 | """ sometimes !"""), |
|
309 | """ sometimes !"""), | |
310 | ]) |
|
310 | ]) | |
311 | def test_urlify_text(self, sample, expected): |
|
311 | def test_urlify_text(self, sample, expected): | |
312 | expected = self._quick_url(expected) |
|
312 | expected = self._quick_url(expected) | |
313 |
|
313 | |||
314 | with mock.patch('kallithea.lib.webutils.UrlGenerator.__call__', |
|
314 | with mock.patch('kallithea.lib.webutils.UrlGenerator.__call__', | |
315 | lambda self, name, **kwargs: dict(changeset_home='/%(repo_name)s/changeset/%(revision)s')[name] % kwargs, |
|
315 | lambda self, name, **kwargs: dict(changeset_home='/%(repo_name)s/changeset/%(revision)s')[name] % kwargs, | |
316 | ): |
|
316 | ): | |
317 | assert webutils.urlify_text(sample, 'repo_name') == expected |
|
317 | assert webutils.urlify_text(sample, 'repo_name') == expected | |
318 |
|
318 | |||
319 | @base.parametrize('sample,expected,url_', [ |
|
319 | @base.parametrize('sample,expected,url_', [ | |
320 | ("", |
|
320 | ("", | |
321 | "", |
|
321 | "", | |
322 | ""), |
|
322 | ""), | |
323 | ("https://svn.apache.org/repos", |
|
323 | ("https://svn.apache.org/repos", | |
324 | """url[https://svn.apache.org/repos]""", |
|
324 | """url[https://svn.apache.org/repos]""", | |
325 | "https://svn.apache.org/repos"), |
|
325 | "https://svn.apache.org/repos"), | |
326 | ("http://svn.apache.org/repos", |
|
326 | ("http://svn.apache.org/repos", | |
327 | """url[http://svn.apache.org/repos]""", |
|
327 | """url[http://svn.apache.org/repos]""", | |
328 | "http://svn.apache.org/repos"), |
|
328 | "http://svn.apache.org/repos"), | |
329 | ("from rev a also rev http://google.com", |
|
329 | ("from rev a also rev http://google.com", | |
330 | """from rev a also rev url[http://google.com]""", |
|
330 | """from rev a also rev url[http://google.com]""", | |
331 | "http://google.com"), |
|
331 | "http://google.com"), | |
332 | ("http://imgur.com/foo.gif inline http://imgur.com/foo.gif ending http://imgur.com/foo.gif", |
|
332 | ("http://imgur.com/foo.gif inline http://imgur.com/foo.gif ending http://imgur.com/foo.gif", | |
333 | """url[http://imgur.com/foo.gif] inline url[http://imgur.com/foo.gif] ending url[http://imgur.com/foo.gif]""", |
|
333 | """url[http://imgur.com/foo.gif] inline url[http://imgur.com/foo.gif] ending url[http://imgur.com/foo.gif]""", | |
334 | "http://imgur.com/foo.gif"), |
|
334 | "http://imgur.com/foo.gif"), | |
335 | ("""Multi line |
|
335 | ("""Multi line | |
336 | https://foo.bar.example.com |
|
336 | https://foo.bar.example.com | |
337 | some text lalala""", |
|
337 | some text lalala""", | |
338 | """Multi line<br/>""" |
|
338 | """Multi line<br/>""" | |
339 | """ url[https://foo.bar.example.com]<br/>""" |
|
339 | """ url[https://foo.bar.example.com]<br/>""" | |
340 | """ some text lalala""", |
|
340 | """ some text lalala""", | |
341 | "https://foo.bar.example.com"), |
|
341 | "https://foo.bar.example.com"), | |
342 | ("@mention @someone", |
|
342 | ("@mention @someone", | |
343 | """<b>@mention</b> <b>@someone</b>""", |
|
343 | """<b>@mention</b> <b>@someone</b>""", | |
344 | ""), |
|
344 | ""), | |
345 | ("deadbeefcafe 123412341234", |
|
345 | ("deadbeefcafe 123412341234", | |
346 | """<a class="changeset_hash" href="/repo_name/changeset/deadbeefcafe">deadbeefcafe</a> <a class="changeset_hash" href="/repo_name/changeset/123412341234">123412341234</a>""", |
|
346 | """<a class="changeset_hash" href="/repo_name/changeset/deadbeefcafe">deadbeefcafe</a> <a class="changeset_hash" href="/repo_name/changeset/123412341234">123412341234</a>""", | |
347 | ""), |
|
347 | ""), | |
348 | ("We support * markup for *bold* markup of *single or multiple* words, " |
|
348 | ("We support * markup for *bold* markup of *single or multiple* words, " | |
349 | "*a bit @like http://slack.com*. " |
|
349 | "*a bit @like http://slack.com*. " | |
350 | "The first * must come after whitespace and not be followed by whitespace, " |
|
350 | "The first * must come after whitespace and not be followed by whitespace, " | |
351 | "contain anything but * and newline until the next *, " |
|
351 | "contain anything but * and newline until the next *, " | |
352 | "which must not come after whitespace " |
|
352 | "which must not come after whitespace " | |
353 | "and not be followed by * or alphanumerical *characters*.", |
|
353 | "and not be followed by * or alphanumerical *characters*.", | |
354 | """We support * markup for <b>*bold*</b> markup of <b>*single or multiple*</b> words, """ |
|
354 | """We support * markup for <b>*bold*</b> markup of <b>*single or multiple*</b> words, """ | |
355 | """<b>*a bit <b>@like</b> <a href="http://slack.com">http://slack.com</a>*</b>. """ |
|
355 | """<b>*a bit <b>@like</b> <a href="http://slack.com">http://slack.com</a>*</b>. """ | |
356 | """The first * must come after whitespace and not be followed by whitespace, """ |
|
356 | """The first * must come after whitespace and not be followed by whitespace, """ | |
357 | """contain anything but * and newline until the next *, """ |
|
357 | """contain anything but * and newline until the next *, """ | |
358 | """which must not come after whitespace """ |
|
358 | """which must not come after whitespace """ | |
359 | """and not be followed by * or alphanumerical <b>*characters*</b>.""", |
|
359 | """and not be followed by * or alphanumerical <b>*characters*</b>.""", | |
360 | "-"), |
|
360 | "-"), | |
361 | ("HTML escaping: <abc> 'single' \"double\" &pointer", |
|
361 | ("HTML escaping: <abc> 'single' \"double\" &pointer", | |
362 | "HTML escaping: <abc> 'single' "double" &pointer", |
|
362 | "HTML escaping: <abc> 'single' "double" &pointer", | |
363 | "-"), |
|
363 | "-"), | |
364 | # tags are covered by test_tag_extractor |
|
364 | # tags are covered by test_tag_extractor | |
365 | ]) |
|
365 | ]) | |
366 | def test_urlify_test(self, sample, expected, url_): |
|
366 | def test_urlify_test(self, sample, expected, url_): | |
367 | expected = self._quick_url(expected, |
|
367 | expected = self._quick_url(expected, | |
368 | tmpl="""<a href="%s">%s</a>""", url_=url_) |
|
368 | tmpl="""<a href="%s">%s</a>""", url_=url_) | |
369 | with mock.patch('kallithea.lib.webutils.UrlGenerator.__call__', |
|
369 | with mock.patch('kallithea.lib.webutils.UrlGenerator.__call__', | |
370 | lambda self, name, **kwargs: dict(changeset_home='/%(repo_name)s/changeset/%(revision)s')[name] % kwargs, |
|
370 | lambda self, name, **kwargs: dict(changeset_home='/%(repo_name)s/changeset/%(revision)s')[name] % kwargs, | |
371 | ): |
|
371 | ): | |
372 | assert webutils.urlify_text(sample, 'repo_name', stylize=True) == expected |
|
372 | assert webutils.urlify_text(sample, 'repo_name', stylize=True) == expected | |
373 |
|
373 | |||
374 | @base.parametrize('sample,expected', [ |
|
374 | @base.parametrize('sample,expected', [ | |
375 | ("deadbeefcafe @mention, and http://foo.bar/ yo", |
|
375 | ("deadbeefcafe @mention, and http://foo.bar/ yo", | |
376 | """<a class="changeset_hash" href="/repo_name/changeset/deadbeefcafe">deadbeefcafe</a>""" |
|
376 | """<a class="changeset_hash" href="/repo_name/changeset/deadbeefcafe">deadbeefcafe</a>""" | |
377 | """<a class="message-link" href="#the-link"> <b>@mention</b>, and </a>""" |
|
377 | """<a class="message-link" href="#the-link"> <b>@mention</b>, and </a>""" | |
378 | """<a href="http://foo.bar/">http://foo.bar/</a>""" |
|
378 | """<a href="http://foo.bar/">http://foo.bar/</a>""" | |
379 | """<a class="message-link" href="#the-link"> yo</a>"""), |
|
379 | """<a class="message-link" href="#the-link"> yo</a>"""), | |
380 | ]) |
|
380 | ]) | |
381 | def test_urlify_link(self, sample, expected): |
|
381 | def test_urlify_link(self, sample, expected): | |
382 | with mock.patch('kallithea.lib.webutils.UrlGenerator.__call__', |
|
382 | with mock.patch('kallithea.lib.webutils.UrlGenerator.__call__', | |
383 | lambda self, name, **kwargs: dict(changeset_home='/%(repo_name)s/changeset/%(revision)s')[name] % kwargs, |
|
383 | lambda self, name, **kwargs: dict(changeset_home='/%(repo_name)s/changeset/%(revision)s')[name] % kwargs, | |
384 | ): |
|
384 | ): | |
385 | assert webutils.urlify_text(sample, 'repo_name', link_='#the-link') == expected |
|
385 | assert webutils.urlify_text(sample, 'repo_name', link_='#the-link') == expected | |
386 |
|
386 | |||
387 | @base.parametrize('issue_pat,issue_server,issue_sub,sample,expected', [ |
|
387 | @base.parametrize('issue_pat,issue_server,issue_sub,sample,expected', [ | |
388 | (r'#(\d+)', 'http://foo/{repo}/issue/\\1', '#\\1', |
|
388 | (r'#(\d+)', 'http://foo/{repo}/issue/\\1', '#\\1', | |
389 | 'issue #123 and issue#456', |
|
389 | 'issue #123 and issue#456', | |
390 | """issue <a class="issue-tracker-link" href="http://foo/repo_name/issue/123">#123</a> and """ |
|
390 | """issue <a class="issue-tracker-link" href="http://foo/repo_name/issue/123">#123</a> and """ | |
391 | """issue<a class="issue-tracker-link" href="http://foo/repo_name/issue/456">#456</a>"""), |
|
391 | """issue<a class="issue-tracker-link" href="http://foo/repo_name/issue/456">#456</a>"""), | |
392 | (r'(?:\s*#)(\d+)', 'http://foo/{repo}/issue/\\1', '#\\1', |
|
392 | (r'(?:\s*#)(\d+)', 'http://foo/{repo}/issue/\\1', '#\\1', | |
393 | 'issue #123 and issue#456', |
|
393 | 'issue #123 and issue#456', | |
394 | """issue<a class="issue-tracker-link" href="http://foo/repo_name/issue/123">#123</a> and """ |
|
394 | """issue<a class="issue-tracker-link" href="http://foo/repo_name/issue/123">#123</a> and """ | |
395 | """issue<a class="issue-tracker-link" href="http://foo/repo_name/issue/456">#456</a>"""), |
|
395 | """issue<a class="issue-tracker-link" href="http://foo/repo_name/issue/456">#456</a>"""), | |
396 | # to require whitespace before the issue reference, one may be tempted to use \b... |
|
396 | # to require whitespace before the issue reference, one may be tempted to use \b... | |
397 | (r'\bPR(\d+)', 'http://foo/{repo}/issue/\\1', '#\\1', |
|
397 | (r'\bPR(\d+)', 'http://foo/{repo}/issue/\\1', '#\\1', | |
398 | 'issue PR123 and issuePR456', |
|
398 | 'issue PR123 and issuePR456', | |
399 | """issue <a class="issue-tracker-link" href="http://foo/repo_name/issue/123">#123</a> and """ |
|
399 | """issue <a class="issue-tracker-link" href="http://foo/repo_name/issue/123">#123</a> and """ | |
400 | """issuePR456"""), |
|
400 | """issuePR456"""), | |
401 | # ... but it turns out that \b does not work well in combination with '#': the expectations |
|
401 | # ... but it turns out that \b does not work well in combination with '#': the expectations | |
402 | # are reversed from what is actually happening. |
|
402 | # are reversed from what is actually happening. | |
403 | (r'\b#(\d+)', 'http://foo/{repo}/issue/\\1', '#\\1', |
|
403 | (r'\b#(\d+)', 'http://foo/{repo}/issue/\\1', '#\\1', | |
404 | 'issue #123 and issue#456', |
|
404 | 'issue #123 and issue#456', | |
405 | """issue #123 and """ |
|
405 | """issue #123 and """ | |
406 | """issue<a class="issue-tracker-link" href="http://foo/repo_name/issue/456">#456</a>"""), |
|
406 | """issue<a class="issue-tracker-link" href="http://foo/repo_name/issue/456">#456</a>"""), | |
407 | # ... so maybe try to be explicit? Unfortunately the whitespace before the issue |
|
407 | # ... so maybe try to be explicit? Unfortunately the whitespace before the issue | |
408 | # reference is not retained, again, because it is part of the pattern. |
|
408 | # reference is not retained, again, because it is part of the pattern. | |
409 | (r'(?:^|\s)#(\d+)', 'http://foo/{repo}/issue/\\1', '#\\1', |
|
409 | (r'(?:^|\s)#(\d+)', 'http://foo/{repo}/issue/\\1', '#\\1', | |
410 | '#15 and issue #123 and issue#456', |
|
410 | '#15 and issue #123 and issue#456', | |
411 | """<a class="issue-tracker-link" href="http://foo/repo_name/issue/15">#15</a> and """ |
|
411 | """<a class="issue-tracker-link" href="http://foo/repo_name/issue/15">#15</a> and """ | |
412 | """issue<a class="issue-tracker-link" href="http://foo/repo_name/issue/123">#123</a> and """ |
|
412 | """issue<a class="issue-tracker-link" href="http://foo/repo_name/issue/123">#123</a> and """ | |
413 | """issue#456"""), |
|
413 | """issue#456"""), | |
414 | # ... instead, use lookbehind assertions. |
|
414 | # ... instead, use lookbehind assertions. | |
415 | (r'(?:^|(?<=\s))#(\d+)', 'http://foo/{repo}/issue/\\1', '#\\1', |
|
415 | (r'(?:^|(?<=\s))#(\d+)', 'http://foo/{repo}/issue/\\1', '#\\1', | |
416 | '#15 and issue #123 and issue#456', |
|
416 | '#15 and issue #123 and issue#456', | |
417 | """<a class="issue-tracker-link" href="http://foo/repo_name/issue/15">#15</a> and """ |
|
417 | """<a class="issue-tracker-link" href="http://foo/repo_name/issue/15">#15</a> and """ | |
418 | """issue <a class="issue-tracker-link" href="http://foo/repo_name/issue/123">#123</a> and """ |
|
418 | """issue <a class="issue-tracker-link" href="http://foo/repo_name/issue/123">#123</a> and """ | |
419 | """issue#456"""), |
|
419 | """issue#456"""), | |
420 | (r'(?:pullrequest|pull request|PR|pr) ?#?(\d+)', 'http://foo/{repo}/issue/\\1', 'PR#\\1', |
|
420 | (r'(?:pullrequest|pull request|PR|pr) ?#?(\d+)', 'http://foo/{repo}/issue/\\1', 'PR#\\1', | |
421 | 'fixed with pullrequest #1, pull request#2, PR 3, pr4', |
|
421 | 'fixed with pullrequest #1, pull request#2, PR 3, pr4', | |
422 | """fixed with <a class="issue-tracker-link" href="http://foo/repo_name/issue/1">PR#1</a>, """ |
|
422 | """fixed with <a class="issue-tracker-link" href="http://foo/repo_name/issue/1">PR#1</a>, """ | |
423 | """<a class="issue-tracker-link" href="http://foo/repo_name/issue/2">PR#2</a>, """ |
|
423 | """<a class="issue-tracker-link" href="http://foo/repo_name/issue/2">PR#2</a>, """ | |
424 | """<a class="issue-tracker-link" href="http://foo/repo_name/issue/3">PR#3</a>, """ |
|
424 | """<a class="issue-tracker-link" href="http://foo/repo_name/issue/3">PR#3</a>, """ | |
425 | """<a class="issue-tracker-link" href="http://foo/repo_name/issue/4">PR#4</a>"""), |
|
425 | """<a class="issue-tracker-link" href="http://foo/repo_name/issue/4">PR#4</a>"""), | |
426 | (r'#(\d+)', 'http://foo/{repo}/issue/\\1', 'PR\\1', |
|
426 | (r'#(\d+)', 'http://foo/{repo}/issue/\\1', 'PR\\1', | |
427 | 'interesting issue #123', |
|
427 | 'interesting issue #123', | |
428 | """interesting issue <a class="issue-tracker-link" href="http://foo/repo_name/issue/123">PR123</a>"""), |
|
428 | """interesting issue <a class="issue-tracker-link" href="http://foo/repo_name/issue/123">PR123</a>"""), | |
429 | (r'BUG\d{5}', 'https://bar/{repo}/\\1', '\\1', |
|
429 | (r'BUG\d{5}', 'https://bar/{repo}/\\1', '\\1', | |
430 | 'silly me, I did not parenthesize the id, BUG12345.', |
|
430 | 'silly me, I did not parenthesize the id, BUG12345.', | |
431 | """silly me, I did not parenthesize the id, <a class="issue-tracker-link" href="https://bar/repo_name/\\1">BUG12345</a>."""), |
|
431 | """silly me, I did not parenthesize the id, <a class="issue-tracker-link" href="https://bar/repo_name/\\1">BUG12345</a>."""), | |
432 | (r'BUG(\d{5})', 'https://bar/{repo}/', 'BUG\\1', |
|
432 | (r'BUG(\d{5})', 'https://bar/{repo}/', 'BUG\\1', | |
433 | 'silly me, the URL does not contain id, BUG12345.', |
|
433 | 'silly me, the URL does not contain id, BUG12345.', | |
434 | """silly me, the URL does not contain id, <a class="issue-tracker-link" href="https://bar/repo_name/">BUG12345</a>."""), |
|
434 | """silly me, the URL does not contain id, <a class="issue-tracker-link" href="https://bar/repo_name/">BUG12345</a>."""), | |
435 | (r'(PR-\d+)', 'http://foo/{repo}/issue/\\1', '', |
|
435 | (r'(PR-\d+)', 'http://foo/{repo}/issue/\\1', '', | |
436 | 'interesting issue #123, err PR-56', |
|
436 | 'interesting issue #123, err PR-56', | |
437 | """interesting issue #123, err <a class="issue-tracker-link" href="http://foo/repo_name/issue/PR-56">PR-56</a>"""), |
|
437 | """interesting issue #123, err <a class="issue-tracker-link" href="http://foo/repo_name/issue/PR-56">PR-56</a>"""), | |
438 | (r'#(\d+)', 'http://foo/{repo}/issue/\\1', '#\\1', |
|
438 | (r'#(\d+)', 'http://foo/{repo}/issue/\\1', '#\\1', | |
439 | "some 'standard' text with apostrophes", |
|
439 | "some 'standard' text with apostrophes", | |
440 | """some 'standard' text with apostrophes"""), |
|
440 | """some 'standard' text with apostrophes"""), | |
441 | (r'#(\d+)', 'http://foo/{repo}/issue/\\1', '#\\1', |
|
441 | (r'#(\d+)', 'http://foo/{repo}/issue/\\1', '#\\1', | |
442 | "some 'standard' issue #123", |
|
442 | "some 'standard' issue #123", | |
443 | """some 'standard' issue <a class="issue-tracker-link" href="http://foo/repo_name/issue/123">#123</a>"""), |
|
443 | """some 'standard' issue <a class="issue-tracker-link" href="http://foo/repo_name/issue/123">#123</a>"""), | |
444 | (r'#(\d+)', 'http://foo/{repo}/issue/\\1', '#\\1', |
|
444 | (r'#(\d+)', 'http://foo/{repo}/issue/\\1', '#\\1', | |
445 | 'an issue #123 with extra whitespace', |
|
445 | 'an issue #123 with extra whitespace', | |
446 | """an issue <a class="issue-tracker-link" href="http://foo/repo_name/issue/123">#123</a> with extra whitespace"""), |
|
446 | """an issue <a class="issue-tracker-link" href="http://foo/repo_name/issue/123">#123</a> with extra whitespace"""), | |
447 | (r'(?:\s*#)(\d+)', 'http://foo/{repo}/issue/\\1', '#\\1', |
|
447 | (r'(?:\s*#)(\d+)', 'http://foo/{repo}/issue/\\1', '#\\1', | |
448 | 'an issue #123 with extra whitespace', |
|
448 | 'an issue #123 with extra whitespace', | |
449 | """an issue<a class="issue-tracker-link" href="http://foo/repo_name/issue/123">#123</a> with extra whitespace"""), |
|
449 | """an issue<a class="issue-tracker-link" href="http://foo/repo_name/issue/123">#123</a> with extra whitespace"""), | |
450 | # invalid issue pattern |
|
450 | # invalid issue pattern | |
451 | (r'(PR\d+', 'http://foo/{repo}/issue/{id}', '', |
|
451 | (r'(PR\d+', 'http://foo/{repo}/issue/{id}', '', | |
452 | 'PR135', |
|
452 | 'PR135', | |
453 | """PR135"""), |
|
453 | """PR135"""), | |
454 | # other character than # |
|
454 | # other character than # | |
455 | (r'(?:^|(?<=\s))\$(\d+)', 'http://foo/{repo}/issue/\\1', '', |
|
455 | (r'(?:^|(?<=\s))\$(\d+)', 'http://foo/{repo}/issue/\\1', '', | |
456 | 'empty issue_sub $123 and issue$456', |
|
456 | 'empty issue_sub $123 and issue$456', | |
457 | """empty issue_sub <a class="issue-tracker-link" href="http://foo/repo_name/issue/123">$123</a> and """ |
|
457 | """empty issue_sub <a class="issue-tracker-link" href="http://foo/repo_name/issue/123">$123</a> and """ | |
458 | """issue$456"""), |
|
458 | """issue$456"""), | |
459 | # named groups |
|
459 | # named groups | |
460 | (r'(PR|pullrequest|pull request) ?(?P<sitecode>BRU|CPH|BER)-(?P<id>\d+)', r'http://foo/\g<sitecode>/pullrequest/\g<id>/', r'PR-\g<sitecode>-\g<id>', |
|
460 | (r'(PR|pullrequest|pull request) ?(?P<sitecode>BRU|CPH|BER)-(?P<id>\d+)', r'http://foo/\g<sitecode>/pullrequest/\g<id>/', r'PR-\g<sitecode>-\g<id>', | |
461 | 'pullrequest CPH-789 is similar to PRBRU-747', |
|
461 | 'pullrequest CPH-789 is similar to PRBRU-747', | |
462 | """<a class="issue-tracker-link" href="http://foo/CPH/pullrequest/789/">PR-CPH-789</a> is similar to """ |
|
462 | """<a class="issue-tracker-link" href="http://foo/CPH/pullrequest/789/">PR-CPH-789</a> is similar to """ | |
463 | """<a class="issue-tracker-link" href="http://foo/BRU/pullrequest/747/">PR-BRU-747</a>"""), |
|
463 | """<a class="issue-tracker-link" href="http://foo/BRU/pullrequest/747/">PR-BRU-747</a>"""), | |
464 | ]) |
|
464 | ]) | |
465 | def test_urlify_issues(self, issue_pat, issue_server, issue_sub, sample, expected): |
|
465 | def test_urlify_issues(self, issue_pat, issue_server, issue_sub, sample, expected): | |
466 | config_stub = { |
|
466 | config_stub = { | |
467 | 'sqlalchemy.url': 'foo', |
|
467 | 'sqlalchemy.url': 'foo', | |
468 | 'issue_pat': issue_pat, |
|
468 | 'issue_pat': issue_pat, | |
469 | 'issue_server_link': issue_server, |
|
469 | 'issue_server_link': issue_server, | |
470 | 'issue_sub': issue_sub, |
|
470 | 'issue_sub': issue_sub, | |
471 | } |
|
471 | } | |
472 | # force recreation of lazy function |
|
472 | # force recreation of lazy function | |
473 | with mock.patch('kallithea.lib.webutils._urlify_issues_f', None): |
|
473 | with mock.patch('kallithea.lib.webutils._urlify_issues_f', None): | |
474 | with mock.patch('kallithea.CONFIG', config_stub): |
|
474 | with mock.patch('kallithea.CONFIG', config_stub): | |
475 | assert webutils.urlify_text(sample, 'repo_name') == expected |
|
475 | assert webutils.urlify_text(sample, 'repo_name') == expected | |
476 |
|
476 | |||
477 | @base.parametrize('sample,expected', [ |
|
477 | @base.parametrize('sample,expected', [ | |
478 | ('abc X5', 'abc <a class="issue-tracker-link" href="http://main/repo_name/main/5/">#5</a>'), |
|
478 | ('abc X5', 'abc <a class="issue-tracker-link" href="http://main/repo_name/main/5/">#5</a>'), | |
479 | ('abc pullrequest #6 xyz', 'abc <a class="issue-tracker-link" href="http://pr/repo_name/pr/6">PR#6</a> xyz'), |
|
479 | ('abc pullrequest #6 xyz', 'abc <a class="issue-tracker-link" href="http://pr/repo_name/pr/6">PR#6</a> xyz'), | |
480 | ('pull request7 #', '<a class="issue-tracker-link" href="http://pr/repo_name/pr/7">PR#7</a> #'), |
|
480 | ('pull request7 #', '<a class="issue-tracker-link" href="http://pr/repo_name/pr/7">PR#7</a> #'), | |
481 | ('look PR9 and pr #11', 'look <a class="issue-tracker-link" href="http://pr/repo_name/pr/9">PR#9</a> and <a class="issue-tracker-link" href="http://pr/repo_name/pr/11">PR#11</a>'), |
|
481 | ('look PR9 and pr #11', 'look <a class="issue-tracker-link" href="http://pr/repo_name/pr/9">PR#9</a> and <a class="issue-tracker-link" href="http://pr/repo_name/pr/11">PR#11</a>'), | |
482 | ('pullrequest#10 solves issue 9', '<a class="issue-tracker-link" href="http://pr/repo_name/pr/10">PR#10</a> solves <a class="issue-tracker-link" href="http://bug/repo_name/bug/9">bug#9</a>'), |
|
482 | ('pullrequest#10 solves issue 9', '<a class="issue-tracker-link" href="http://pr/repo_name/pr/10">PR#10</a> solves <a class="issue-tracker-link" href="http://bug/repo_name/bug/9">bug#9</a>'), | |
483 | ('issue FAIL67', 'issue <a class="issue-tracker-link" href="http://fail/repo_name/67">FAIL67</a>'), |
|
483 | ('issue FAIL67', 'issue <a class="issue-tracker-link" href="http://fail/repo_name/67">FAIL67</a>'), | |
484 | ('issue FAILMORE89', 'issue FAILMORE89'), # no match because absent prefix |
|
484 | ('issue FAILMORE89', 'issue FAILMORE89'), # no match because absent prefix | |
485 | ]) |
|
485 | ]) | |
486 | def test_urlify_issues_multiple_issue_patterns(self, sample, expected): |
|
486 | def test_urlify_issues_multiple_issue_patterns(self, sample, expected): | |
487 | config_stub = { |
|
487 | config_stub = { | |
488 | 'sqlalchemy.url': r'foo', |
|
488 | 'sqlalchemy.url': r'foo', | |
489 | 'issue_pat': r'X(\d+)', |
|
489 | 'issue_pat': r'X(\d+)', | |
490 | 'issue_server_link': r'http://main/{repo}/main/\1/', |
|
490 | 'issue_server_link': r'http://main/{repo}/main/\1/', | |
491 | 'issue_sub': r'#\1', |
|
491 | 'issue_sub': r'#\1', | |
492 | 'issue_pat_pr': r'(?:pullrequest|pull request|PR|pr) ?#?(\d+)', |
|
492 | 'issue_pat_pr': r'(?:pullrequest|pull request|PR|pr) ?#?(\d+)', | |
493 | 'issue_server_link_pr': r'http://pr/{repo}/pr/\1', |
|
493 | 'issue_server_link_pr': r'http://pr/{repo}/pr/\1', | |
494 | 'issue_sub_pr': r'PR#\1', |
|
494 | 'issue_sub_pr': r'PR#\1', | |
495 | 'issue_pat_bug': r'(?:BUG|bug|issue) ?#?(\d+)', |
|
495 | 'issue_pat_bug': r'(?:BUG|bug|issue) ?#?(\d+)', | |
496 | 'issue_server_link_bug': r'http://bug/{repo}/bug/\1', |
|
496 | 'issue_server_link_bug': r'http://bug/{repo}/bug/\1', | |
497 | 'issue_sub_bug': r'bug#\1', |
|
497 | 'issue_sub_bug': r'bug#\1', | |
498 | 'issue_pat_empty_prefix': r'FAIL(\d+)', |
|
498 | 'issue_pat_empty_prefix': r'FAIL(\d+)', | |
499 | 'issue_server_link_empty_prefix': r'http://fail/{repo}/\1', |
|
499 | 'issue_server_link_empty_prefix': r'http://fail/{repo}/\1', | |
500 | 'issue_sub_empty_prefix': r'', |
|
500 | 'issue_sub_empty_prefix': r'', | |
501 | 'issue_pat_absent_prefix': r'FAILMORE(\d+)', |
|
501 | 'issue_pat_absent_prefix': r'FAILMORE(\d+)', | |
502 | 'issue_server_link_absent_prefix': r'http://failmore/{repo}/\1', |
|
502 | 'issue_server_link_absent_prefix': r'http://failmore/{repo}/\1', | |
503 | } |
|
503 | } | |
504 | # force recreation of lazy function |
|
504 | # force recreation of lazy function | |
505 | with mock.patch('kallithea.lib.webutils._urlify_issues_f', None): |
|
505 | with mock.patch('kallithea.lib.webutils._urlify_issues_f', None): | |
506 | with mock.patch('kallithea.CONFIG', config_stub): |
|
506 | with mock.patch('kallithea.CONFIG', config_stub): | |
507 | assert webutils.urlify_text(sample, 'repo_name') == expected |
|
507 | assert webutils.urlify_text(sample, 'repo_name') == expected | |
508 |
|
508 | |||
509 | @base.parametrize('test,expected', [ |
|
509 | @base.parametrize('test,expected', [ | |
510 | ("", None), |
|
510 | ("", None), | |
511 | ("/_2", None), |
|
511 | ("/_2", None), | |
512 | ("_2", 2), |
|
512 | ("_2", 2), | |
513 | ("_2/", None), |
|
513 | ("_2/", None), | |
514 | ]) |
|
514 | ]) | |
515 | def test_get_permanent_id(self, test, expected): |
|
515 | def test_get_permanent_id(self, test, expected): | |
516 | from kallithea.lib.utils import _get_permanent_id |
|
516 | from kallithea.lib.utils import _get_permanent_id | |
517 | extracted = _get_permanent_id(test) |
|
517 | extracted = _get_permanent_id(test) | |
518 | assert extracted == expected, 'url:%s, got:`%s` expected: `%s`' % (test, base._test, expected) |
|
518 | assert extracted == expected, 'url:%s, got:`%s` expected: `%s`' % (test, base._test, expected) | |
519 |
|
519 | |||
520 | @base.parametrize('test,expected', [ |
|
520 | @base.parametrize('test,expected', [ | |
521 | ("", ""), |
|
521 | ("", ""), | |
522 | ("/", "/"), |
|
522 | ("/", "/"), | |
523 | ("/_ID", '/_ID'), |
|
523 | ("/_ID", '/_ID'), | |
524 | ("ID", "ID"), |
|
524 | ("ID", "ID"), | |
525 | ("_ID", 'NAME'), |
|
525 | ("_ID", 'NAME'), | |
526 | ("_ID/", 'NAME/'), |
|
526 | ("_ID/", 'NAME/'), | |
527 | ("_ID/1/2", 'NAME/1/2'), |
|
527 | ("_ID/1/2", 'NAME/1/2'), | |
528 | ("_IDa", '_IDa'), |
|
528 | ("_IDa", '_IDa'), | |
529 | ]) |
|
529 | ]) | |
530 | def test_fix_repo_id_name(self, test, expected): |
|
530 | def test_fix_repo_id_name(self, test, expected): | |
531 | repo = db.Repository.get_by_repo_name(base.HG_REPO) |
|
531 | repo = db.Repository.get_by_repo_name(base.HG_REPO) | |
532 | test = test.replace('ID', str(repo.repo_id)) |
|
532 | test = test.replace('ID', str(repo.repo_id)) | |
533 | expected = expected.replace('NAME', repo.repo_name).replace('ID', str(repo.repo_id)) |
|
533 | expected = expected.replace('NAME', repo.repo_name).replace('ID', str(repo.repo_id)) | |
534 | from kallithea.lib.utils import fix_repo_id_name |
|
534 | from kallithea.lib.utils import fix_repo_id_name | |
535 | replaced = fix_repo_id_name(test) |
|
535 | replaced = fix_repo_id_name(test) | |
536 | assert replaced == expected, 'url:%s, got:`%s` expected: `%s`' % (test, replaced, expected) |
|
536 | assert replaced == expected, 'url:%s, got:`%s` expected: `%s`' % (test, replaced, expected) | |
537 |
|
537 | |||
538 | @base.parametrize('canonical,test,expected', [ |
|
538 | @base.parametrize('canonical,test,expected', [ | |
539 | ('http://www.example.org/', '/abc/xyz', 'http://www.example.org/abc/xyz'), |
|
539 | ('http://www.example.org/', '/abc/xyz', 'http://www.example.org/abc/xyz'), | |
540 | ('http://www.example.org', '/abc/xyz', 'http://www.example.org/abc/xyz'), |
|
540 | ('http://www.example.org', '/abc/xyz', 'http://www.example.org/abc/xyz'), | |
541 | ('http://www.example.org', '/abc/xyz/', 'http://www.example.org/abc/xyz/'), |
|
541 | ('http://www.example.org', '/abc/xyz/', 'http://www.example.org/abc/xyz/'), | |
542 | ('http://www.example.org', 'abc/xyz/', 'http://www.example.org/abc/xyz/'), |
|
542 | ('http://www.example.org', 'abc/xyz/', 'http://www.example.org/abc/xyz/'), | |
543 | ('http://www.example.org', 'about', 'http://www.example.org/about-page'), |
|
543 | ('http://www.example.org', 'about', 'http://www.example.org/about-page'), | |
544 | ('http://www.example.org/repos/', 'abc/xyz/', 'http://www.example.org/repos/abc/xyz/'), |
|
544 | ('http://www.example.org/repos/', 'abc/xyz/', 'http://www.example.org/repos/abc/xyz/'), | |
545 | ('http://www.example.org/kallithea/repos/', 'abc/xyz/', 'http://www.example.org/kallithea/repos/abc/xyz/'), |
|
545 | ('http://www.example.org/kallithea/repos/', 'abc/xyz/', 'http://www.example.org/kallithea/repos/abc/xyz/'), | |
546 | ]) |
|
546 | ]) | |
547 | def test_canonical_url(self, canonical, test, expected): |
|
547 | def test_canonical_url(self, canonical, test, expected): | |
548 | # setup url(), used by canonical_url |
|
548 | # setup url(), used by canonical_url | |
549 | m = routes.Mapper() |
|
549 | m = routes.Mapper() | |
550 | m.connect('about', '/about-page') |
|
550 | m.connect('about', '/about-page') | |
551 | url = routes.URLGenerator(m, {'HTTP_HOST': 'http_host.example.org'}) |
|
551 | url = routes.URLGenerator(m, {'HTTP_HOST': 'http_host.example.org'}) | |
552 |
|
552 | |||
553 | config_mock = { |
|
553 | config_mock = { | |
554 | 'canonical_url': canonical, |
|
554 | 'canonical_url': canonical, | |
555 | } |
|
555 | } | |
556 |
|
556 | |||
557 | with test_context(self.app): |
|
557 | with test_context(self.app): | |
558 | request.environ['routes.url'] = url |
|
558 | request.environ['routes.url'] = url | |
559 | with mock.patch('kallithea.CONFIG', config_mock): |
|
559 | with mock.patch('kallithea.CONFIG', config_mock): | |
560 | assert webutils.canonical_url(test) == expected |
|
560 | assert webutils.canonical_url(test) == expected | |
561 |
|
561 | |||
562 | @base.parametrize('canonical,expected', [ |
|
562 | @base.parametrize('canonical,expected', [ | |
563 | ('http://www.example.org', 'www.example.org'), |
|
563 | ('http://www.example.org', 'www.example.org'), | |
564 | ('http://www.example.org/repos/', 'www.example.org'), |
|
564 | ('http://www.example.org/repos/', 'www.example.org'), | |
565 | ('http://www.example.org/kallithea/repos/', 'www.example.org'), |
|
565 | ('http://www.example.org/kallithea/repos/', 'www.example.org'), | |
566 | ]) |
|
566 | ]) | |
567 | def test_canonical_hostname(self, canonical, expected): |
|
567 | def test_canonical_hostname(self, canonical, expected): | |
568 | # setup url(), used by canonical_hostname |
|
568 | # setup url(), used by canonical_hostname | |
569 | m = routes.Mapper() |
|
569 | m = routes.Mapper() | |
570 | url = routes.URLGenerator(m, {'HTTP_HOST': 'http_host.example.org'}) |
|
570 | url = routes.URLGenerator(m, {'HTTP_HOST': 'http_host.example.org'}) | |
571 |
|
571 | |||
572 | config_mock = { |
|
572 | config_mock = { | |
573 | 'canonical_url': canonical, |
|
573 | 'canonical_url': canonical, | |
574 | } |
|
574 | } | |
575 |
|
575 | |||
576 | with test_context(self.app): |
|
576 | with test_context(self.app): | |
577 | request.environ['routes.url'] = url |
|
577 | request.environ['routes.url'] = url | |
578 | with mock.patch('kallithea.CONFIG', config_mock): |
|
578 | with mock.patch('kallithea.CONFIG', config_mock): | |
579 | assert webutils.canonical_hostname() == expected |
|
579 | assert webutils.canonical_hostname() == expected |
General Comments 0
You need to be logged in to leave comments.
Login now