Show More
The requested changes are too big and content was truncated. Show full diff
@@ -1,308 +1,308 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | import datetime |
|
23 | 23 | |
|
24 | 24 | from pyramid.httpexceptions import HTTPFound |
|
25 | 25 | from pyramid.view import view_config |
|
26 | 26 | from sqlalchemy.sql.functions import coalesce |
|
27 | 27 | |
|
28 | 28 | from rhodecode.lib.helpers import Page |
|
29 | 29 | from rhodecode.lib.ext_json import json |
|
30 | 30 | |
|
31 | 31 | from rhodecode.apps._base import BaseAppView, DataGridAppView |
|
32 | 32 | from rhodecode.lib.auth import ( |
|
33 | 33 | LoginRequired, HasPermissionAllDecorator, CSRFRequired) |
|
34 | 34 | from rhodecode.lib import helpers as h |
|
35 | 35 | from rhodecode.lib.utils import PartialRenderer |
|
36 | 36 | from rhodecode.lib.utils2 import safe_int, safe_unicode |
|
37 | 37 | from rhodecode.model.auth_token import AuthTokenModel |
|
38 | 38 | from rhodecode.model.user import UserModel |
|
39 | 39 | from rhodecode.model.user_group import UserGroupModel |
|
40 | 40 | from rhodecode.model.db import User, or_ |
|
41 | 41 | from rhodecode.model.meta import Session |
|
42 | 42 | |
|
43 | 43 | log = logging.getLogger(__name__) |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | class AdminUsersView(BaseAppView, DataGridAppView): |
|
47 | 47 | ALLOW_SCOPED_TOKENS = False |
|
48 | 48 | """ |
|
49 | 49 | This view has alternative version inside EE, if modified please take a look |
|
50 | 50 | in there as well. |
|
51 | 51 | """ |
|
52 | 52 | |
|
53 | 53 | def load_default_context(self): |
|
54 | 54 | c = self._get_local_tmpl_context() |
|
55 | 55 | c.allow_scoped_tokens = self.ALLOW_SCOPED_TOKENS |
|
56 | 56 | self._register_global_c(c) |
|
57 | 57 | return c |
|
58 | 58 | |
|
59 | 59 | def _redirect_for_default_user(self, username): |
|
60 | 60 | _ = self.request.translate |
|
61 | 61 | if username == User.DEFAULT_USER: |
|
62 | 62 | h.flash(_("You can't edit this user"), category='warning') |
|
63 | 63 | # TODO(marcink): redirect to 'users' admin panel once this |
|
64 | 64 | # is a pyramid view |
|
65 | 65 | raise HTTPFound('/') |
|
66 | 66 | |
|
67 | 67 | @HasPermissionAllDecorator('hg.admin') |
|
68 | 68 | @view_config( |
|
69 | 69 | route_name='users', request_method='GET', |
|
70 | 70 | renderer='rhodecode:templates/admin/users/users.mako') |
|
71 | 71 | def users_list(self): |
|
72 | 72 | c = self.load_default_context() |
|
73 | 73 | return self._get_template_context(c) |
|
74 | 74 | |
|
75 | 75 | @HasPermissionAllDecorator('hg.admin') |
|
76 | 76 | @view_config( |
|
77 | 77 | # renderer defined below |
|
78 | 78 | route_name='users_data', request_method='GET', |
|
79 | 79 | renderer='json_ext', xhr=True) |
|
80 | 80 | def users_list_data(self): |
|
81 | 81 | draw, start, limit = self._extract_chunk(self.request) |
|
82 | 82 | search_q, order_by, order_dir = self._extract_ordering(self.request) |
|
83 | 83 | |
|
84 | 84 | _render = PartialRenderer('data_table/_dt_elements.mako') |
|
85 | 85 | |
|
86 | 86 | def user_actions(user_id, username): |
|
87 | 87 | return _render("user_actions", user_id, username) |
|
88 | 88 | |
|
89 | 89 | users_data_total_count = User.query()\ |
|
90 | 90 | .filter(User.username != User.DEFAULT_USER) \ |
|
91 | 91 | .count() |
|
92 | 92 | |
|
93 | 93 | # json generate |
|
94 | 94 | base_q = User.query().filter(User.username != User.DEFAULT_USER) |
|
95 | 95 | |
|
96 | 96 | if search_q: |
|
97 | 97 | like_expression = u'%{}%'.format(safe_unicode(search_q)) |
|
98 | 98 | base_q = base_q.filter(or_( |
|
99 | 99 | User.username.ilike(like_expression), |
|
100 | 100 | User._email.ilike(like_expression), |
|
101 | 101 | User.name.ilike(like_expression), |
|
102 | 102 | User.lastname.ilike(like_expression), |
|
103 | 103 | )) |
|
104 | 104 | |
|
105 | 105 | users_data_total_filtered_count = base_q.count() |
|
106 | 106 | |
|
107 | 107 | sort_col = getattr(User, order_by, None) |
|
108 | 108 | if sort_col: |
|
109 | 109 | if order_dir == 'asc': |
|
110 | 110 | # handle null values properly to order by NULL last |
|
111 | 111 | if order_by in ['last_activity']: |
|
112 | 112 | sort_col = coalesce(sort_col, datetime.date.max) |
|
113 | 113 | sort_col = sort_col.asc() |
|
114 | 114 | else: |
|
115 | 115 | # handle null values properly to order by NULL last |
|
116 | 116 | if order_by in ['last_activity']: |
|
117 | 117 | sort_col = coalesce(sort_col, datetime.date.min) |
|
118 | 118 | sort_col = sort_col.desc() |
|
119 | 119 | |
|
120 | 120 | base_q = base_q.order_by(sort_col) |
|
121 | 121 | base_q = base_q.offset(start).limit(limit) |
|
122 | 122 | |
|
123 | 123 | users_list = base_q.all() |
|
124 | 124 | |
|
125 | 125 | users_data = [] |
|
126 | 126 | for user in users_list: |
|
127 | 127 | users_data.append({ |
|
128 | 128 | "username": h.gravatar_with_user(user.username), |
|
129 | 129 | "email": user.email, |
|
130 |
"first_name": |
|
|
131 |
"last_name": |
|
|
130 | "first_name": user.first_name, | |
|
131 | "last_name": user.last_name, | |
|
132 | 132 | "last_login": h.format_date(user.last_login), |
|
133 | 133 | "last_activity": h.format_date(user.last_activity), |
|
134 | 134 | "active": h.bool2icon(user.active), |
|
135 | 135 | "active_raw": user.active, |
|
136 | 136 | "admin": h.bool2icon(user.admin), |
|
137 | 137 | "extern_type": user.extern_type, |
|
138 | 138 | "extern_name": user.extern_name, |
|
139 | 139 | "action": user_actions(user.user_id, user.username), |
|
140 | 140 | }) |
|
141 | 141 | |
|
142 | 142 | data = ({ |
|
143 | 143 | 'draw': draw, |
|
144 | 144 | 'data': users_data, |
|
145 | 145 | 'recordsTotal': users_data_total_count, |
|
146 | 146 | 'recordsFiltered': users_data_total_filtered_count, |
|
147 | 147 | }) |
|
148 | 148 | |
|
149 | 149 | return data |
|
150 | 150 | |
|
151 | 151 | @LoginRequired() |
|
152 | 152 | @HasPermissionAllDecorator('hg.admin') |
|
153 | 153 | @view_config( |
|
154 | 154 | route_name='edit_user_auth_tokens', request_method='GET', |
|
155 | 155 | renderer='rhodecode:templates/admin/users/user_edit.mako') |
|
156 | 156 | def auth_tokens(self): |
|
157 | 157 | _ = self.request.translate |
|
158 | 158 | c = self.load_default_context() |
|
159 | 159 | |
|
160 | 160 | user_id = self.request.matchdict.get('user_id') |
|
161 | 161 | c.user = User.get_or_404(user_id, pyramid_exc=True) |
|
162 | 162 | self._redirect_for_default_user(c.user.username) |
|
163 | 163 | |
|
164 | 164 | c.active = 'auth_tokens' |
|
165 | 165 | |
|
166 | 166 | c.lifetime_values = [ |
|
167 | 167 | (str(-1), _('forever')), |
|
168 | 168 | (str(5), _('5 minutes')), |
|
169 | 169 | (str(60), _('1 hour')), |
|
170 | 170 | (str(60 * 24), _('1 day')), |
|
171 | 171 | (str(60 * 24 * 30), _('1 month')), |
|
172 | 172 | ] |
|
173 | 173 | c.lifetime_options = [(c.lifetime_values, _("Lifetime"))] |
|
174 | 174 | c.role_values = [ |
|
175 | 175 | (x, AuthTokenModel.cls._get_role_name(x)) |
|
176 | 176 | for x in AuthTokenModel.cls.ROLES] |
|
177 | 177 | c.role_options = [(c.role_values, _("Role"))] |
|
178 | 178 | c.user_auth_tokens = AuthTokenModel().get_auth_tokens( |
|
179 | 179 | c.user.user_id, show_expired=True) |
|
180 | 180 | return self._get_template_context(c) |
|
181 | 181 | |
|
182 | 182 | def maybe_attach_token_scope(self, token): |
|
183 | 183 | # implemented in EE edition |
|
184 | 184 | pass |
|
185 | 185 | |
|
186 | 186 | @LoginRequired() |
|
187 | 187 | @HasPermissionAllDecorator('hg.admin') |
|
188 | 188 | @CSRFRequired() |
|
189 | 189 | @view_config( |
|
190 | 190 | route_name='edit_user_auth_tokens_add', request_method='POST') |
|
191 | 191 | def auth_tokens_add(self): |
|
192 | 192 | _ = self.request.translate |
|
193 | 193 | c = self.load_default_context() |
|
194 | 194 | |
|
195 | 195 | user_id = self.request.matchdict.get('user_id') |
|
196 | 196 | c.user = User.get_or_404(user_id, pyramid_exc=True) |
|
197 | 197 | self._redirect_for_default_user(c.user.username) |
|
198 | 198 | |
|
199 | 199 | lifetime = safe_int(self.request.POST.get('lifetime'), -1) |
|
200 | 200 | description = self.request.POST.get('description') |
|
201 | 201 | role = self.request.POST.get('role') |
|
202 | 202 | |
|
203 | 203 | token = AuthTokenModel().create( |
|
204 | 204 | c.user.user_id, description, lifetime, role) |
|
205 | 205 | self.maybe_attach_token_scope(token) |
|
206 | 206 | Session().commit() |
|
207 | 207 | |
|
208 | 208 | h.flash(_("Auth token successfully created"), category='success') |
|
209 | 209 | return HTTPFound(h.route_path('edit_user_auth_tokens', user_id=user_id)) |
|
210 | 210 | |
|
211 | 211 | @LoginRequired() |
|
212 | 212 | @HasPermissionAllDecorator('hg.admin') |
|
213 | 213 | @CSRFRequired() |
|
214 | 214 | @view_config( |
|
215 | 215 | route_name='edit_user_auth_tokens_delete', request_method='POST') |
|
216 | 216 | def auth_tokens_delete(self): |
|
217 | 217 | _ = self.request.translate |
|
218 | 218 | c = self.load_default_context() |
|
219 | 219 | |
|
220 | 220 | user_id = self.request.matchdict.get('user_id') |
|
221 | 221 | c.user = User.get_or_404(user_id, pyramid_exc=True) |
|
222 | 222 | self._redirect_for_default_user(c.user.username) |
|
223 | 223 | |
|
224 | 224 | del_auth_token = self.request.POST.get('del_auth_token') |
|
225 | 225 | |
|
226 | 226 | if del_auth_token: |
|
227 | 227 | AuthTokenModel().delete(del_auth_token, c.user.user_id) |
|
228 | 228 | Session().commit() |
|
229 | 229 | h.flash(_("Auth token successfully deleted"), category='success') |
|
230 | 230 | |
|
231 | 231 | return HTTPFound(h.route_path('edit_user_auth_tokens', user_id=user_id)) |
|
232 | 232 | |
|
233 | 233 | @LoginRequired() |
|
234 | 234 | @HasPermissionAllDecorator('hg.admin') |
|
235 | 235 | @view_config( |
|
236 | 236 | route_name='edit_user_groups_management', request_method='GET', |
|
237 | 237 | renderer='rhodecode:templates/admin/users/user_edit.mako') |
|
238 | 238 | def groups_management(self): |
|
239 | 239 | c = self.load_default_context() |
|
240 | 240 | |
|
241 | 241 | user_id = self.request.matchdict.get('user_id') |
|
242 | 242 | c.user = User.get_or_404(user_id, pyramid_exc=True) |
|
243 | 243 | c.data = c.user.group_member |
|
244 | 244 | self._redirect_for_default_user(c.user.username) |
|
245 | 245 | groups = [UserGroupModel.get_user_groups_as_dict(group.users_group) |
|
246 | 246 | for group in c.user.group_member] |
|
247 | 247 | c.groups = json.dumps(groups) |
|
248 | 248 | c.active = 'groups' |
|
249 | 249 | |
|
250 | 250 | return self._get_template_context(c) |
|
251 | 251 | |
|
252 | 252 | @LoginRequired() |
|
253 | 253 | @HasPermissionAllDecorator('hg.admin') |
|
254 | 254 | @CSRFRequired() |
|
255 | 255 | @view_config( |
|
256 | 256 | route_name='edit_user_groups_management_updates', request_method='POST') |
|
257 | 257 | def groups_management_updates(self): |
|
258 | 258 | _ = self.request.translate |
|
259 | 259 | c = self.load_default_context() |
|
260 | 260 | |
|
261 | 261 | user_id = self.request.matchdict.get('user_id') |
|
262 | 262 | c.user = User.get_or_404(user_id, pyramid_exc=True) |
|
263 | 263 | self._redirect_for_default_user(c.user.username) |
|
264 | 264 | |
|
265 | 265 | users_groups = set(self.request.POST.getall('users_group_id')) |
|
266 | 266 | users_groups_model = [] |
|
267 | 267 | |
|
268 | 268 | for ugid in users_groups: |
|
269 | 269 | users_groups_model.append(UserGroupModel().get_group(safe_int(ugid))) |
|
270 | 270 | user_group_model = UserGroupModel() |
|
271 | 271 | user_group_model.change_groups(c.user, users_groups_model) |
|
272 | 272 | |
|
273 | 273 | Session().commit() |
|
274 | 274 | c.active = 'user_groups_management' |
|
275 | 275 | h.flash(_("Groups successfully changed"), category='success') |
|
276 | 276 | |
|
277 | 277 | return HTTPFound(h.route_path( |
|
278 | 278 | 'edit_user_groups_management', user_id=user_id)) |
|
279 | 279 | |
|
280 | 280 | @LoginRequired() |
|
281 | 281 | @HasPermissionAllDecorator('hg.admin') |
|
282 | 282 | @view_config( |
|
283 | 283 | route_name='edit_user_audit_logs', request_method='GET', |
|
284 | 284 | renderer='rhodecode:templates/admin/users/user_edit.mako') |
|
285 | 285 | def user_audit_logs(self): |
|
286 | 286 | _ = self.request.translate |
|
287 | 287 | c = self.load_default_context() |
|
288 | 288 | |
|
289 | 289 | user_id = self.request.matchdict.get('user_id') |
|
290 | 290 | c.user = User.get_or_404(user_id, pyramid_exc=True) |
|
291 | 291 | self._redirect_for_default_user(c.user.username) |
|
292 | 292 | c.active = 'audit' |
|
293 | 293 | |
|
294 | 294 | p = safe_int(self.request.GET.get('page', 1), 1) |
|
295 | 295 | |
|
296 | 296 | filter_term = self.request.GET.get('filter') |
|
297 | 297 | user_log = UserModel().get_user_log(c.user, filter_term) |
|
298 | 298 | |
|
299 | 299 | def url_generator(**kw): |
|
300 | 300 | if filter_term: |
|
301 | 301 | kw['filter'] = filter_term |
|
302 | 302 | return self.request.current_route_path(_query=kw) |
|
303 | 303 | |
|
304 | 304 | c.audit_logs = Page(user_log, page=p, items_per_page=10, |
|
305 | 305 | url=url_generator) |
|
306 | 306 | c.filter_term = filter_term |
|
307 | 307 | return self._get_template_context(c) |
|
308 | 308 |
@@ -1,134 +1,134 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | import pytest |
|
23 | 23 | |
|
24 | 24 | import rhodecode |
|
25 | 25 | from rhodecode.model.db import Repository |
|
26 | 26 | from rhodecode.model.meta import Session |
|
27 | 27 | from rhodecode.model.repo import RepoModel |
|
28 | 28 | from rhodecode.model.repo_group import RepoGroupModel |
|
29 | 29 | from rhodecode.model.settings import SettingsModel |
|
30 | 30 | from rhodecode.tests import TestController |
|
31 | 31 | from rhodecode.tests.fixture import Fixture |
|
32 | 32 | from rhodecode.lib import helpers as h |
|
33 | 33 | |
|
34 | 34 | fixture = Fixture() |
|
35 | 35 | |
|
36 | 36 | |
|
37 | 37 | def route_path(name, **kwargs): |
|
38 | 38 | return { |
|
39 | 39 | 'home': '/', |
|
40 | 40 | 'repo_group_home': '/{repo_group_name}' |
|
41 | 41 | }[name].format(**kwargs) |
|
42 | 42 | |
|
43 | 43 | |
|
44 | 44 | class TestHomeController(TestController): |
|
45 | 45 | |
|
46 | 46 | def test_index(self): |
|
47 | 47 | self.log_user() |
|
48 | 48 | response = self.app.get(route_path('home')) |
|
49 | 49 | # if global permission is set |
|
50 | 50 | response.mustcontain('Add Repository') |
|
51 | 51 | |
|
52 | 52 | # search for objects inside the JavaScript JSON |
|
53 | 53 | for repo in Repository.getAll(): |
|
54 | 54 | response.mustcontain('"name_raw": "%s"' % repo.repo_name) |
|
55 | 55 | |
|
56 | 56 | def test_index_contains_statics_with_ver(self): |
|
57 | 57 | from pylons import tmpl_context as c |
|
58 | 58 | |
|
59 | 59 | self.log_user() |
|
60 | 60 | response = self.app.get(route_path('home')) |
|
61 | 61 | |
|
62 | 62 | rhodecode_version_hash = c.rhodecode_version_hash |
|
63 | 63 | response.mustcontain('style.css?ver={0}'.format(rhodecode_version_hash)) |
|
64 | 64 | response.mustcontain('rhodecode-components.js?ver={0}'.format(rhodecode_version_hash)) |
|
65 | 65 | |
|
66 | 66 | def test_index_contains_backend_specific_details(self, backend): |
|
67 | 67 | self.log_user() |
|
68 | 68 | response = self.app.get(route_path('home')) |
|
69 | 69 | tip = backend.repo.get_commit().raw_id |
|
70 | 70 | |
|
71 | 71 | # html in javascript variable: |
|
72 | 72 | response.mustcontain(r'<i class=\"icon-%s\"' % (backend.alias, )) |
|
73 | 73 | response.mustcontain(r'href=\"/%s\"' % (backend.repo_name, )) |
|
74 | 74 | |
|
75 | 75 | response.mustcontain("""/%s/changeset/%s""" % (backend.repo_name, tip)) |
|
76 | 76 | response.mustcontain("""Added a symlink""") |
|
77 | 77 | |
|
78 | 78 | def test_index_with_anonymous_access_disabled(self): |
|
79 | 79 | with fixture.anon_access(False): |
|
80 | 80 | response = self.app.get(route_path('home'), status=302) |
|
81 | 81 | assert 'login' in response.location |
|
82 | 82 | |
|
83 | 83 | def test_index_page_on_groups(self, autologin_user, repo_group): |
|
84 | 84 | response = self.app.get(route_path('repo_group_home', repo_group_name='gr1')) |
|
85 | 85 | response.mustcontain("gr1/repo_in_group") |
|
86 | 86 | |
|
87 | 87 | def test_index_page_on_group_with_trailing_slash( |
|
88 | 88 | self, autologin_user, repo_group): |
|
89 | 89 | response = self.app.get(route_path('repo_group_home', repo_group_name='gr1') + '/') |
|
90 | 90 | response.mustcontain("gr1/repo_in_group") |
|
91 | 91 | |
|
92 | 92 | @pytest.fixture(scope='class') |
|
93 | 93 | def repo_group(self, request): |
|
94 | 94 | gr = fixture.create_repo_group('gr1') |
|
95 | 95 | fixture.create_repo(name='gr1/repo_in_group', repo_group=gr) |
|
96 | 96 | |
|
97 | 97 | @request.addfinalizer |
|
98 | 98 | def cleanup(): |
|
99 | 99 | RepoModel().delete('gr1/repo_in_group') |
|
100 | 100 | RepoGroupModel().delete(repo_group='gr1', force_delete=True) |
|
101 | 101 | Session().commit() |
|
102 | 102 | |
|
103 | 103 | def test_index_with_name_with_tags(self, user_util, autologin_user): |
|
104 | 104 | user = user_util.create_user() |
|
105 | 105 | username = user.username |
|
106 | 106 | user.name = '<img src="/image1" onload="alert(\'Hello, World!\');">' |
|
107 | 107 | user.lastname = '#"><img src=x onerror=prompt(document.cookie);>' |
|
108 | 108 | |
|
109 | 109 | Session().add(user) |
|
110 | 110 | Session().commit() |
|
111 | 111 | user_util.create_repo(owner=username) |
|
112 | 112 | |
|
113 | 113 | response = self.app.get(route_path('home')) |
|
114 |
response.mustcontain(h.html_escape( |
|
|
115 |
response.mustcontain(h.html_escape( |
|
|
114 | response.mustcontain(h.html_escape(user.first_name)) | |
|
115 | response.mustcontain(h.html_escape(user.last_name)) | |
|
116 | 116 | |
|
117 | 117 | @pytest.mark.parametrize("name, state", [ |
|
118 | 118 | ('Disabled', False), |
|
119 | 119 | ('Enabled', True), |
|
120 | 120 | ]) |
|
121 | 121 | def test_index_show_version(self, autologin_user, name, state): |
|
122 | 122 | version_string = 'RhodeCode Enterprise %s' % rhodecode.__version__ |
|
123 | 123 | |
|
124 | 124 | sett = SettingsModel().create_or_update_setting( |
|
125 | 125 | 'show_version', state, 'bool') |
|
126 | 126 | Session().add(sett) |
|
127 | 127 | Session().commit() |
|
128 | 128 | SettingsModel().invalidate_settings_cache() |
|
129 | 129 | |
|
130 | 130 | response = self.app.get(route_path('home')) |
|
131 | 131 | if state is True: |
|
132 | 132 | response.mustcontain(version_string) |
|
133 | 133 | if state is False: |
|
134 | 134 | response.mustcontain(no=[version_string]) |
@@ -1,76 +1,76 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | from rhodecode.lib import helpers as h |
|
22 | 22 | from rhodecode.lib.utils2 import safe_int |
|
23 | 23 | |
|
24 | 24 | |
|
25 | 25 | def reviewer_as_json(user, reasons=None, mandatory=False): |
|
26 | 26 | """ |
|
27 | 27 | Returns json struct of a reviewer for frontend |
|
28 | 28 | |
|
29 | 29 | :param user: the reviewer |
|
30 | 30 | :param reasons: list of strings of why they are reviewers |
|
31 | 31 | :param mandatory: bool, to set user as mandatory |
|
32 | 32 | """ |
|
33 | 33 | |
|
34 | 34 | return { |
|
35 | 35 | 'user_id': user.user_id, |
|
36 | 36 | 'reasons': reasons or [], |
|
37 | 37 | 'mandatory': mandatory, |
|
38 | 38 | 'username': user.username, |
|
39 | 'firstname': user.firstname, | |
|
40 | 'lastname': user.lastname, | |
|
39 | 'first_name': user.first_name, | |
|
40 | 'last_name': user.last_name, | |
|
41 | 41 | 'gravatar_link': h.gravatar_url(user.email, 14), |
|
42 | 42 | } |
|
43 | 43 | |
|
44 | 44 | |
|
45 | 45 | def get_default_reviewers_data( |
|
46 | 46 | current_user, source_repo, source_commit, target_repo, target_commit): |
|
47 | 47 | |
|
48 | 48 | """ Return json for default reviewers of a repository """ |
|
49 | 49 | |
|
50 | 50 | reasons = ['Default reviewer', 'Repository owner'] |
|
51 | 51 | default = reviewer_as_json( |
|
52 | 52 | user=current_user, reasons=reasons, mandatory=False) |
|
53 | 53 | |
|
54 | 54 | return { |
|
55 | 55 | 'api_ver': 'v1', # define version for later possible schema upgrade |
|
56 | 56 | 'reviewers': [default], |
|
57 | 57 | 'rules': {}, |
|
58 | 58 | 'rules_data': {}, |
|
59 | 59 | } |
|
60 | 60 | |
|
61 | 61 | |
|
62 | 62 | def validate_default_reviewers(review_members, reviewer_rules): |
|
63 | 63 | """ |
|
64 | 64 | Function to validate submitted reviewers against the saved rules |
|
65 | 65 | |
|
66 | 66 | """ |
|
67 | 67 | reviewers = [] |
|
68 | 68 | reviewer_by_id = {} |
|
69 | 69 | for r in review_members: |
|
70 | 70 | reviewer_user_id = safe_int(r['user_id']) |
|
71 | 71 | entry = (reviewer_user_id, r['reasons'], r['mandatory']) |
|
72 | 72 | |
|
73 | 73 | reviewer_by_id[reviewer_user_id] = entry |
|
74 | 74 | reviewers.append(entry) |
|
75 | 75 | |
|
76 | 76 | return reviewers |
@@ -1,510 +1,510 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | User Groups crud controller for pylons |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import logging |
|
26 | 26 | import formencode |
|
27 | 27 | |
|
28 | 28 | import peppercorn |
|
29 | 29 | from formencode import htmlfill |
|
30 | 30 | from pylons import request, tmpl_context as c, url, config |
|
31 | 31 | from pylons.controllers.util import redirect |
|
32 | 32 | from pylons.i18n.translation import _ |
|
33 | 33 | |
|
34 | 34 | from sqlalchemy.orm import joinedload |
|
35 | 35 | |
|
36 | 36 | from rhodecode.lib import auth |
|
37 | 37 | from rhodecode.lib import helpers as h |
|
38 | 38 | from rhodecode.lib import audit_logger |
|
39 | 39 | from rhodecode.lib.ext_json import json |
|
40 | 40 | from rhodecode.lib.exceptions import UserGroupAssignedException,\ |
|
41 | 41 | RepoGroupAssignmentError |
|
42 | 42 | from rhodecode.lib.utils import jsonify |
|
43 | 43 | from rhodecode.lib.utils2 import safe_unicode, str2bool, safe_int |
|
44 | 44 | from rhodecode.lib.auth import ( |
|
45 | 45 | LoginRequired, NotAnonymous, HasUserGroupPermissionAnyDecorator, |
|
46 | 46 | HasPermissionAnyDecorator, XHRRequired) |
|
47 | 47 | from rhodecode.lib.base import BaseController, render |
|
48 | 48 | from rhodecode.model.permission import PermissionModel |
|
49 | 49 | from rhodecode.model.scm import UserGroupList |
|
50 | 50 | from rhodecode.model.user_group import UserGroupModel |
|
51 | 51 | from rhodecode.model.db import ( |
|
52 | 52 | User, UserGroup, UserGroupRepoToPerm, UserGroupRepoGroupToPerm) |
|
53 | 53 | from rhodecode.model.forms import ( |
|
54 | 54 | UserGroupForm, UserGroupPermsForm, UserIndividualPermissionsForm, |
|
55 | 55 | UserPermissionsForm) |
|
56 | 56 | from rhodecode.model.meta import Session |
|
57 | 57 | |
|
58 | 58 | |
|
59 | 59 | log = logging.getLogger(__name__) |
|
60 | 60 | |
|
61 | 61 | |
|
62 | 62 | class UserGroupsController(BaseController): |
|
63 | 63 | """REST Controller styled on the Atom Publishing Protocol""" |
|
64 | 64 | |
|
65 | 65 | @LoginRequired() |
|
66 | 66 | def __before__(self): |
|
67 | 67 | super(UserGroupsController, self).__before__() |
|
68 | 68 | c.available_permissions = config['available_permissions'] |
|
69 | 69 | PermissionModel().set_global_permission_choices(c, gettext_translator=_) |
|
70 | 70 | |
|
71 | 71 | def __load_data(self, user_group_id): |
|
72 | 72 | c.group_members_obj = [x.user for x in c.user_group.members] |
|
73 | 73 | c.group_members_obj.sort(key=lambda u: u.username.lower()) |
|
74 | 74 | c.group_members = [(x.user_id, x.username) for x in c.group_members_obj] |
|
75 | 75 | |
|
76 | 76 | def __load_defaults(self, user_group_id): |
|
77 | 77 | """ |
|
78 | 78 | Load defaults settings for edit, and update |
|
79 | 79 | |
|
80 | 80 | :param user_group_id: |
|
81 | 81 | """ |
|
82 | 82 | user_group = UserGroup.get_or_404(user_group_id) |
|
83 | 83 | data = user_group.get_dict() |
|
84 | 84 | # fill owner |
|
85 | 85 | if user_group.user: |
|
86 | 86 | data.update({'user': user_group.user.username}) |
|
87 | 87 | else: |
|
88 | 88 | replacement_user = User.get_first_super_admin().username |
|
89 | 89 | data.update({'user': replacement_user}) |
|
90 | 90 | return data |
|
91 | 91 | |
|
92 | 92 | def _revoke_perms_on_yourself(self, form_result): |
|
93 | 93 | _updates = filter(lambda u: c.rhodecode_user.user_id == int(u[0]), |
|
94 | 94 | form_result['perm_updates']) |
|
95 | 95 | _additions = filter(lambda u: c.rhodecode_user.user_id == int(u[0]), |
|
96 | 96 | form_result['perm_additions']) |
|
97 | 97 | _deletions = filter(lambda u: c.rhodecode_user.user_id == int(u[0]), |
|
98 | 98 | form_result['perm_deletions']) |
|
99 | 99 | admin_perm = 'usergroup.admin' |
|
100 | 100 | if _updates and _updates[0][1] != admin_perm or \ |
|
101 | 101 | _additions and _additions[0][1] != admin_perm or \ |
|
102 | 102 | _deletions and _deletions[0][1] != admin_perm: |
|
103 | 103 | return True |
|
104 | 104 | return False |
|
105 | 105 | |
|
106 | 106 | # permission check inside |
|
107 | 107 | @NotAnonymous() |
|
108 | 108 | def index(self): |
|
109 | 109 | |
|
110 | 110 | from rhodecode.lib.utils import PartialRenderer |
|
111 | 111 | _render = PartialRenderer('data_table/_dt_elements.mako') |
|
112 | 112 | |
|
113 | 113 | def user_group_name(user_group_id, user_group_name): |
|
114 | 114 | return _render("user_group_name", user_group_id, user_group_name) |
|
115 | 115 | |
|
116 | 116 | def user_group_actions(user_group_id, user_group_name): |
|
117 | 117 | return _render("user_group_actions", user_group_id, user_group_name) |
|
118 | 118 | |
|
119 | 119 | # json generate |
|
120 | 120 | group_iter = UserGroupList(UserGroup.query().all(), |
|
121 | 121 | perm_set=['usergroup.admin']) |
|
122 | 122 | |
|
123 | 123 | user_groups_data = [] |
|
124 | 124 | for user_gr in group_iter: |
|
125 | 125 | user_groups_data.append({ |
|
126 | 126 | "group_name": user_group_name( |
|
127 | 127 | user_gr.users_group_id, h.escape(user_gr.users_group_name)), |
|
128 | 128 | "group_name_raw": user_gr.users_group_name, |
|
129 | 129 | "desc": h.escape(user_gr.user_group_description), |
|
130 | 130 | "members": len(user_gr.members), |
|
131 | 131 | "sync": user_gr.group_data.get('extern_type'), |
|
132 | 132 | "active": h.bool2icon(user_gr.users_group_active), |
|
133 | 133 | "owner": h.escape(h.link_to_user(user_gr.user.username)), |
|
134 | 134 | "action": user_group_actions( |
|
135 | 135 | user_gr.users_group_id, user_gr.users_group_name) |
|
136 | 136 | }) |
|
137 | 137 | |
|
138 | 138 | c.data = json.dumps(user_groups_data) |
|
139 | 139 | return render('admin/user_groups/user_groups.mako') |
|
140 | 140 | |
|
141 | 141 | @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true') |
|
142 | 142 | @auth.CSRFRequired() |
|
143 | 143 | def create(self): |
|
144 | 144 | |
|
145 | 145 | users_group_form = UserGroupForm()() |
|
146 | 146 | try: |
|
147 | 147 | form_result = users_group_form.to_python(dict(request.POST)) |
|
148 | 148 | user_group = UserGroupModel().create( |
|
149 | 149 | name=form_result['users_group_name'], |
|
150 | 150 | description=form_result['user_group_description'], |
|
151 | 151 | owner=c.rhodecode_user.user_id, |
|
152 | 152 | active=form_result['users_group_active']) |
|
153 | 153 | Session().flush() |
|
154 | 154 | creation_data = user_group.get_api_data() |
|
155 | 155 | user_group_name = form_result['users_group_name'] |
|
156 | 156 | |
|
157 | 157 | audit_logger.store_web( |
|
158 | 158 | 'user_group.create', action_data={'data': creation_data}, |
|
159 | 159 | user=c.rhodecode_user) |
|
160 | 160 | |
|
161 | 161 | user_group_link = h.link_to( |
|
162 | 162 | h.escape(user_group_name), |
|
163 | 163 | url('edit_users_group', user_group_id=user_group.users_group_id)) |
|
164 | 164 | h.flash(h.literal(_('Created user group %(user_group_link)s') |
|
165 | 165 | % {'user_group_link': user_group_link}), |
|
166 | 166 | category='success') |
|
167 | 167 | Session().commit() |
|
168 | 168 | except formencode.Invalid as errors: |
|
169 | 169 | return htmlfill.render( |
|
170 | 170 | render('admin/user_groups/user_group_add.mako'), |
|
171 | 171 | defaults=errors.value, |
|
172 | 172 | errors=errors.error_dict or {}, |
|
173 | 173 | prefix_error=False, |
|
174 | 174 | encoding="UTF-8", |
|
175 | 175 | force_defaults=False) |
|
176 | 176 | except Exception: |
|
177 | 177 | log.exception("Exception creating user group") |
|
178 | 178 | h.flash(_('Error occurred during creation of user group %s') \ |
|
179 | 179 | % request.POST.get('users_group_name'), category='error') |
|
180 | 180 | |
|
181 | 181 | return redirect( |
|
182 | 182 | url('edit_users_group', user_group_id=user_group.users_group_id)) |
|
183 | 183 | |
|
184 | 184 | @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true') |
|
185 | 185 | def new(self): |
|
186 | 186 | """GET /user_groups/new: Form to create a new item""" |
|
187 | 187 | # url('new_users_group') |
|
188 | 188 | return render('admin/user_groups/user_group_add.mako') |
|
189 | 189 | |
|
190 | 190 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
191 | 191 | @auth.CSRFRequired() |
|
192 | 192 | def update(self, user_group_id): |
|
193 | 193 | |
|
194 | 194 | user_group_id = safe_int(user_group_id) |
|
195 | 195 | c.user_group = UserGroup.get_or_404(user_group_id) |
|
196 | 196 | c.active = 'settings' |
|
197 | 197 | self.__load_data(user_group_id) |
|
198 | 198 | |
|
199 | 199 | users_group_form = UserGroupForm( |
|
200 | 200 | edit=True, old_data=c.user_group.get_dict(), allow_disabled=True)() |
|
201 | 201 | |
|
202 | 202 | old_values = c.user_group.get_api_data() |
|
203 | 203 | try: |
|
204 | 204 | form_result = users_group_form.to_python(request.POST) |
|
205 | 205 | pstruct = peppercorn.parse(request.POST.items()) |
|
206 | 206 | form_result['users_group_members'] = pstruct['user_group_members'] |
|
207 | 207 | |
|
208 | 208 | UserGroupModel().update(c.user_group, form_result) |
|
209 | 209 | updated_user_group = form_result['users_group_name'] |
|
210 | 210 | |
|
211 | 211 | audit_logger.store_web( |
|
212 | 212 | 'user_group.edit', action_data={'old_data': old_values}, |
|
213 | 213 | user=c.rhodecode_user) |
|
214 | 214 | |
|
215 | 215 | h.flash(_('Updated user group %s') % updated_user_group, |
|
216 | 216 | category='success') |
|
217 | 217 | Session().commit() |
|
218 | 218 | except formencode.Invalid as errors: |
|
219 | 219 | defaults = errors.value |
|
220 | 220 | e = errors.error_dict or {} |
|
221 | 221 | |
|
222 | 222 | return htmlfill.render( |
|
223 | 223 | render('admin/user_groups/user_group_edit.mako'), |
|
224 | 224 | defaults=defaults, |
|
225 | 225 | errors=e, |
|
226 | 226 | prefix_error=False, |
|
227 | 227 | encoding="UTF-8", |
|
228 | 228 | force_defaults=False) |
|
229 | 229 | except Exception: |
|
230 | 230 | log.exception("Exception during update of user group") |
|
231 | 231 | h.flash(_('Error occurred during update of user group %s') |
|
232 | 232 | % request.POST.get('users_group_name'), category='error') |
|
233 | 233 | |
|
234 | 234 | return redirect(url('edit_users_group', user_group_id=user_group_id)) |
|
235 | 235 | |
|
236 | 236 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
237 | 237 | @auth.CSRFRequired() |
|
238 | 238 | def delete(self, user_group_id): |
|
239 | 239 | user_group_id = safe_int(user_group_id) |
|
240 | 240 | c.user_group = UserGroup.get_or_404(user_group_id) |
|
241 | 241 | force = str2bool(request.POST.get('force')) |
|
242 | 242 | |
|
243 | 243 | old_values = c.user_group.get_api_data() |
|
244 | 244 | try: |
|
245 | 245 | UserGroupModel().delete(c.user_group, force=force) |
|
246 | 246 | audit_logger.store_web( |
|
247 | 247 | 'user.delete', action_data={'old_data': old_values}, |
|
248 | 248 | user=c.rhodecode_user) |
|
249 | 249 | Session().commit() |
|
250 | 250 | h.flash(_('Successfully deleted user group'), category='success') |
|
251 | 251 | except UserGroupAssignedException as e: |
|
252 | 252 | h.flash(str(e), category='error') |
|
253 | 253 | except Exception: |
|
254 | 254 | log.exception("Exception during deletion of user group") |
|
255 | 255 | h.flash(_('An error occurred during deletion of user group'), |
|
256 | 256 | category='error') |
|
257 | 257 | return redirect(url('users_groups')) |
|
258 | 258 | |
|
259 | 259 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
260 | 260 | def edit(self, user_group_id): |
|
261 | 261 | """GET /user_groups/user_group_id/edit: Form to edit an existing item""" |
|
262 | 262 | # url('edit_users_group', user_group_id=ID) |
|
263 | 263 | |
|
264 | 264 | user_group_id = safe_int(user_group_id) |
|
265 | 265 | c.user_group = UserGroup.get_or_404(user_group_id) |
|
266 | 266 | c.active = 'settings' |
|
267 | 267 | self.__load_data(user_group_id) |
|
268 | 268 | |
|
269 | 269 | defaults = self.__load_defaults(user_group_id) |
|
270 | 270 | |
|
271 | 271 | return htmlfill.render( |
|
272 | 272 | render('admin/user_groups/user_group_edit.mako'), |
|
273 | 273 | defaults=defaults, |
|
274 | 274 | encoding="UTF-8", |
|
275 | 275 | force_defaults=False |
|
276 | 276 | ) |
|
277 | 277 | |
|
278 | 278 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
279 | 279 | def edit_perms(self, user_group_id): |
|
280 | 280 | user_group_id = safe_int(user_group_id) |
|
281 | 281 | c.user_group = UserGroup.get_or_404(user_group_id) |
|
282 | 282 | c.active = 'perms' |
|
283 | 283 | |
|
284 | 284 | defaults = {} |
|
285 | 285 | # fill user group users |
|
286 | 286 | for p in c.user_group.user_user_group_to_perm: |
|
287 | 287 | defaults.update({'u_perm_%s' % p.user.user_id: |
|
288 | 288 | p.permission.permission_name}) |
|
289 | 289 | |
|
290 | 290 | for p in c.user_group.user_group_user_group_to_perm: |
|
291 | 291 | defaults.update({'g_perm_%s' % p.user_group.users_group_id: |
|
292 | 292 | p.permission.permission_name}) |
|
293 | 293 | |
|
294 | 294 | return htmlfill.render( |
|
295 | 295 | render('admin/user_groups/user_group_edit.mako'), |
|
296 | 296 | defaults=defaults, |
|
297 | 297 | encoding="UTF-8", |
|
298 | 298 | force_defaults=False |
|
299 | 299 | ) |
|
300 | 300 | |
|
301 | 301 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
302 | 302 | @auth.CSRFRequired() |
|
303 | 303 | def update_perms(self, user_group_id): |
|
304 | 304 | """ |
|
305 | 305 | grant permission for given usergroup |
|
306 | 306 | |
|
307 | 307 | :param user_group_id: |
|
308 | 308 | """ |
|
309 | 309 | user_group_id = safe_int(user_group_id) |
|
310 | 310 | c.user_group = UserGroup.get_or_404(user_group_id) |
|
311 | 311 | form = UserGroupPermsForm()().to_python(request.POST) |
|
312 | 312 | |
|
313 | 313 | if not c.rhodecode_user.is_admin: |
|
314 | 314 | if self._revoke_perms_on_yourself(form): |
|
315 | 315 | msg = _('Cannot change permission for yourself as admin') |
|
316 | 316 | h.flash(msg, category='warning') |
|
317 | 317 | return redirect(url('edit_user_group_perms', user_group_id=user_group_id)) |
|
318 | 318 | |
|
319 | 319 | try: |
|
320 | 320 | UserGroupModel().update_permissions(user_group_id, |
|
321 | 321 | form['perm_additions'], form['perm_updates'], form['perm_deletions']) |
|
322 | 322 | except RepoGroupAssignmentError: |
|
323 | 323 | h.flash(_('Target group cannot be the same'), category='error') |
|
324 | 324 | return redirect(url('edit_user_group_perms', user_group_id=user_group_id)) |
|
325 | 325 | |
|
326 | 326 | # TODO(marcink): implement global permissions |
|
327 | 327 | # audit_log.store_web('user_group.edit.permissions') |
|
328 | 328 | Session().commit() |
|
329 | 329 | h.flash(_('User Group permissions updated'), category='success') |
|
330 | 330 | return redirect(url('edit_user_group_perms', user_group_id=user_group_id)) |
|
331 | 331 | |
|
332 | 332 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
333 | 333 | def edit_perms_summary(self, user_group_id): |
|
334 | 334 | user_group_id = safe_int(user_group_id) |
|
335 | 335 | c.user_group = UserGroup.get_or_404(user_group_id) |
|
336 | 336 | c.active = 'perms_summary' |
|
337 | 337 | permissions = { |
|
338 | 338 | 'repositories': {}, |
|
339 | 339 | 'repositories_groups': {}, |
|
340 | 340 | } |
|
341 | 341 | ugroup_repo_perms = UserGroupRepoToPerm.query()\ |
|
342 | 342 | .options(joinedload(UserGroupRepoToPerm.permission))\ |
|
343 | 343 | .options(joinedload(UserGroupRepoToPerm.repository))\ |
|
344 | 344 | .filter(UserGroupRepoToPerm.users_group_id == user_group_id)\ |
|
345 | 345 | .all() |
|
346 | 346 | |
|
347 | 347 | for gr in ugroup_repo_perms: |
|
348 | 348 | permissions['repositories'][gr.repository.repo_name] \ |
|
349 | 349 | = gr.permission.permission_name |
|
350 | 350 | |
|
351 | 351 | ugroup_group_perms = UserGroupRepoGroupToPerm.query()\ |
|
352 | 352 | .options(joinedload(UserGroupRepoGroupToPerm.permission))\ |
|
353 | 353 | .options(joinedload(UserGroupRepoGroupToPerm.group))\ |
|
354 | 354 | .filter(UserGroupRepoGroupToPerm.users_group_id == user_group_id)\ |
|
355 | 355 | .all() |
|
356 | 356 | |
|
357 | 357 | for gr in ugroup_group_perms: |
|
358 | 358 | permissions['repositories_groups'][gr.group.group_name] \ |
|
359 | 359 | = gr.permission.permission_name |
|
360 | 360 | c.permissions = permissions |
|
361 | 361 | return render('admin/user_groups/user_group_edit.mako') |
|
362 | 362 | |
|
363 | 363 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
364 | 364 | def edit_global_perms(self, user_group_id): |
|
365 | 365 | user_group_id = safe_int(user_group_id) |
|
366 | 366 | c.user_group = UserGroup.get_or_404(user_group_id) |
|
367 | 367 | c.active = 'global_perms' |
|
368 | 368 | |
|
369 | 369 | c.default_user = User.get_default_user() |
|
370 | 370 | defaults = c.user_group.get_dict() |
|
371 | 371 | defaults.update(c.default_user.get_default_perms(suffix='_inherited')) |
|
372 | 372 | defaults.update(c.user_group.get_default_perms()) |
|
373 | 373 | |
|
374 | 374 | return htmlfill.render( |
|
375 | 375 | render('admin/user_groups/user_group_edit.mako'), |
|
376 | 376 | defaults=defaults, |
|
377 | 377 | encoding="UTF-8", |
|
378 | 378 | force_defaults=False |
|
379 | 379 | ) |
|
380 | 380 | |
|
381 | 381 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
382 | 382 | @auth.CSRFRequired() |
|
383 | 383 | def update_global_perms(self, user_group_id): |
|
384 | 384 | user_group_id = safe_int(user_group_id) |
|
385 | 385 | user_group = UserGroup.get_or_404(user_group_id) |
|
386 | 386 | c.active = 'global_perms' |
|
387 | 387 | |
|
388 | 388 | try: |
|
389 | 389 | # first stage that verifies the checkbox |
|
390 | 390 | _form = UserIndividualPermissionsForm() |
|
391 | 391 | form_result = _form.to_python(dict(request.POST)) |
|
392 | 392 | inherit_perms = form_result['inherit_default_permissions'] |
|
393 | 393 | user_group.inherit_default_permissions = inherit_perms |
|
394 | 394 | Session().add(user_group) |
|
395 | 395 | |
|
396 | 396 | if not inherit_perms: |
|
397 | 397 | # only update the individual ones if we un check the flag |
|
398 | 398 | _form = UserPermissionsForm( |
|
399 | 399 | [x[0] for x in c.repo_create_choices], |
|
400 | 400 | [x[0] for x in c.repo_create_on_write_choices], |
|
401 | 401 | [x[0] for x in c.repo_group_create_choices], |
|
402 | 402 | [x[0] for x in c.user_group_create_choices], |
|
403 | 403 | [x[0] for x in c.fork_choices], |
|
404 | 404 | [x[0] for x in c.inherit_default_permission_choices])() |
|
405 | 405 | |
|
406 | 406 | form_result = _form.to_python(dict(request.POST)) |
|
407 | 407 | form_result.update({'perm_user_group_id': user_group.users_group_id}) |
|
408 | 408 | |
|
409 | 409 | PermissionModel().update_user_group_permissions(form_result) |
|
410 | 410 | |
|
411 | 411 | Session().commit() |
|
412 | 412 | h.flash(_('User Group global permissions updated successfully'), |
|
413 | 413 | category='success') |
|
414 | 414 | |
|
415 | 415 | except formencode.Invalid as errors: |
|
416 | 416 | defaults = errors.value |
|
417 | 417 | c.user_group = user_group |
|
418 | 418 | return htmlfill.render( |
|
419 | 419 | render('admin/user_groups/user_group_edit.mako'), |
|
420 | 420 | defaults=defaults, |
|
421 | 421 | errors=errors.error_dict or {}, |
|
422 | 422 | prefix_error=False, |
|
423 | 423 | encoding="UTF-8", |
|
424 | 424 | force_defaults=False) |
|
425 | 425 | except Exception: |
|
426 | 426 | log.exception("Exception during permissions saving") |
|
427 | 427 | h.flash(_('An error occurred during permissions saving'), |
|
428 | 428 | category='error') |
|
429 | 429 | |
|
430 | 430 | return redirect(url('edit_user_group_global_perms', user_group_id=user_group_id)) |
|
431 | 431 | |
|
432 | 432 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
433 | 433 | def edit_advanced(self, user_group_id): |
|
434 | 434 | user_group_id = safe_int(user_group_id) |
|
435 | 435 | c.user_group = UserGroup.get_or_404(user_group_id) |
|
436 | 436 | c.active = 'advanced' |
|
437 | 437 | c.group_members_obj = sorted( |
|
438 | 438 | (x.user for x in c.user_group.members), |
|
439 | 439 | key=lambda u: u.username.lower()) |
|
440 | 440 | |
|
441 | 441 | c.group_to_repos = sorted( |
|
442 | 442 | (x.repository for x in c.user_group.users_group_repo_to_perm), |
|
443 | 443 | key=lambda u: u.repo_name.lower()) |
|
444 | 444 | |
|
445 | 445 | c.group_to_repo_groups = sorted( |
|
446 | 446 | (x.group for x in c.user_group.users_group_repo_group_to_perm), |
|
447 | 447 | key=lambda u: u.group_name.lower()) |
|
448 | 448 | |
|
449 | 449 | return render('admin/user_groups/user_group_edit.mako') |
|
450 | 450 | |
|
451 | 451 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
452 | 452 | def edit_advanced_set_synchronization(self, user_group_id): |
|
453 | 453 | user_group_id = safe_int(user_group_id) |
|
454 | 454 | user_group = UserGroup.get_or_404(user_group_id) |
|
455 | 455 | |
|
456 | 456 | existing = user_group.group_data.get('extern_type') |
|
457 | 457 | |
|
458 | 458 | if existing: |
|
459 | 459 | new_state = user_group.group_data |
|
460 | 460 | new_state['extern_type'] = None |
|
461 | 461 | else: |
|
462 | 462 | new_state = user_group.group_data |
|
463 | 463 | new_state['extern_type'] = 'manual' |
|
464 | 464 | new_state['extern_type_set_by'] = c.rhodecode_user.username |
|
465 | 465 | |
|
466 | 466 | try: |
|
467 | 467 | user_group.group_data = new_state |
|
468 | 468 | Session().add(user_group) |
|
469 | 469 | Session().commit() |
|
470 | 470 | |
|
471 | 471 | h.flash(_('User Group synchronization updated successfully'), |
|
472 | 472 | category='success') |
|
473 | 473 | except Exception: |
|
474 | 474 | log.exception("Exception during sync settings saving") |
|
475 | 475 | h.flash(_('An error occurred during synchronization update'), |
|
476 | 476 | category='error') |
|
477 | 477 | |
|
478 | 478 | return redirect( |
|
479 | 479 | url('edit_user_group_advanced', user_group_id=user_group_id)) |
|
480 | 480 | |
|
481 | 481 | @HasUserGroupPermissionAnyDecorator('usergroup.admin') |
|
482 | 482 | @XHRRequired() |
|
483 | 483 | @jsonify |
|
484 | 484 | def user_group_members(self, user_group_id): |
|
485 | 485 | """ |
|
486 | 486 | Return members of given user group |
|
487 | 487 | """ |
|
488 | 488 | user_group_id = safe_int(user_group_id) |
|
489 | 489 | user_group = UserGroup.get_or_404(user_group_id) |
|
490 | 490 | group_members_obj = sorted((x.user for x in user_group.members), |
|
491 | 491 | key=lambda u: u.username.lower()) |
|
492 | 492 | |
|
493 | 493 | group_members = [ |
|
494 | 494 | { |
|
495 | 495 | 'id': user.user_id, |
|
496 | 'first_name': user.name, | |
|
497 | 'last_name': user.lastname, | |
|
496 | 'first_name': user.first_name, | |
|
497 | 'last_name': user.last_name, | |
|
498 | 498 | 'username': user.username, |
|
499 | 499 | 'icon_link': h.gravatar_url(user.email, 30), |
|
500 | 500 | 'value_display': h.person(user.email), |
|
501 | 501 | 'value': user.username, |
|
502 | 502 | 'value_type': 'user', |
|
503 | 503 | 'active': user.active, |
|
504 | 504 | } |
|
505 | 505 | for user in group_members_obj |
|
506 | 506 | ] |
|
507 | 507 | |
|
508 | 508 | return { |
|
509 | 509 | 'members': group_members |
|
510 | 510 | } |
@@ -1,1012 +1,1011 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | pull requests controller for rhodecode for initializing pull requests |
|
23 | 23 | """ |
|
24 | import types | |
|
25 | ||
|
26 | 24 | import peppercorn |
|
27 | 25 | import formencode |
|
28 | 26 | import logging |
|
29 | 27 | import collections |
|
30 | 28 | |
|
31 | 29 | from webob.exc import HTTPNotFound, HTTPForbidden, HTTPBadRequest |
|
32 | 30 | from pylons import request, tmpl_context as c, url |
|
33 | 31 | from pylons.controllers.util import redirect |
|
34 | 32 | from pylons.i18n.translation import _ |
|
35 | 33 | from pyramid.threadlocal import get_current_registry |
|
34 | from pyramid.httpexceptions import HTTPFound | |
|
36 | 35 | from sqlalchemy.sql import func |
|
37 | 36 | from sqlalchemy.sql.expression import or_ |
|
38 | 37 | |
|
39 | 38 | from rhodecode import events |
|
40 | 39 | from rhodecode.lib import auth, diffs, helpers as h, codeblocks |
|
41 | 40 | from rhodecode.lib.ext_json import json |
|
42 | 41 | from rhodecode.lib.base import ( |
|
43 | 42 | BaseRepoController, render, vcs_operation_context) |
|
44 | 43 | from rhodecode.lib.auth import ( |
|
45 | 44 | LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, |
|
46 | 45 | HasAcceptedRepoType, XHRRequired) |
|
47 | 46 | from rhodecode.lib.channelstream import channelstream_request |
|
48 | 47 | from rhodecode.lib.utils import jsonify |
|
49 | 48 | from rhodecode.lib.utils2 import ( |
|
50 | 49 | safe_int, safe_str, str2bool, safe_unicode) |
|
51 | 50 | from rhodecode.lib.vcs.backends.base import ( |
|
52 | 51 | EmptyCommit, UpdateFailureReason, EmptyRepository) |
|
53 | 52 | from rhodecode.lib.vcs.exceptions import ( |
|
54 | 53 | EmptyRepositoryError, CommitDoesNotExistError, RepositoryRequirementError, |
|
55 | 54 | NodeDoesNotExistError) |
|
56 | 55 | |
|
57 | 56 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
58 | 57 | from rhodecode.model.comment import CommentsModel |
|
59 | 58 | from rhodecode.model.db import (PullRequest, ChangesetStatus, ChangesetComment, |
|
60 | 59 | Repository, PullRequestVersion) |
|
61 | 60 | from rhodecode.model.forms import PullRequestForm |
|
62 | 61 | from rhodecode.model.meta import Session |
|
63 | 62 | from rhodecode.model.pull_request import PullRequestModel, MergeCheck |
|
64 | 63 | |
|
65 | 64 | log = logging.getLogger(__name__) |
|
66 | 65 | |
|
67 | 66 | |
|
68 | 67 | class PullrequestsController(BaseRepoController): |
|
69 | 68 | |
|
70 | 69 | def __before__(self): |
|
71 | 70 | super(PullrequestsController, self).__before__() |
|
72 | 71 | c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED |
|
73 | 72 | c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED |
|
74 | 73 | |
|
75 | 74 | @LoginRequired() |
|
76 | 75 | @NotAnonymous() |
|
77 | 76 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
78 | 77 | 'repository.admin') |
|
79 | 78 | @HasAcceptedRepoType('git', 'hg') |
|
80 | 79 | def index(self): |
|
81 | 80 | source_repo = c.rhodecode_db_repo |
|
82 | 81 | |
|
83 | 82 | try: |
|
84 | 83 | source_repo.scm_instance().get_commit() |
|
85 | 84 | except EmptyRepositoryError: |
|
86 | 85 | h.flash(h.literal(_('There are no commits yet')), |
|
87 | 86 | category='warning') |
|
88 | 87 | redirect(h.route_path('repo_summary', repo_name=source_repo.repo_name)) |
|
89 | 88 | |
|
90 | 89 | commit_id = request.GET.get('commit') |
|
91 | 90 | branch_ref = request.GET.get('branch') |
|
92 | 91 | bookmark_ref = request.GET.get('bookmark') |
|
93 | 92 | |
|
94 | 93 | try: |
|
95 | 94 | source_repo_data = PullRequestModel().generate_repo_data( |
|
96 | 95 | source_repo, commit_id=commit_id, |
|
97 | 96 | branch=branch_ref, bookmark=bookmark_ref) |
|
98 | 97 | except CommitDoesNotExistError as e: |
|
99 | 98 | log.exception(e) |
|
100 | 99 | h.flash(_('Commit does not exist'), 'error') |
|
101 | 100 | redirect(url('pullrequest_home', repo_name=source_repo.repo_name)) |
|
102 | 101 | |
|
103 | 102 | default_target_repo = source_repo |
|
104 | 103 | |
|
105 | 104 | if source_repo.parent: |
|
106 | 105 | parent_vcs_obj = source_repo.parent.scm_instance() |
|
107 | 106 | if parent_vcs_obj and not parent_vcs_obj.is_empty(): |
|
108 | 107 | # change default if we have a parent repo |
|
109 | 108 | default_target_repo = source_repo.parent |
|
110 | 109 | |
|
111 | 110 | target_repo_data = PullRequestModel().generate_repo_data( |
|
112 | 111 | default_target_repo) |
|
113 | 112 | |
|
114 | 113 | selected_source_ref = source_repo_data['refs']['selected_ref'] |
|
115 | 114 | |
|
116 | 115 | title_source_ref = selected_source_ref.split(':', 2)[1] |
|
117 | 116 | c.default_title = PullRequestModel().generate_pullrequest_title( |
|
118 | 117 | source=source_repo.repo_name, |
|
119 | 118 | source_ref=title_source_ref, |
|
120 | 119 | target=default_target_repo.repo_name |
|
121 | 120 | ) |
|
122 | 121 | |
|
123 | 122 | c.default_repo_data = { |
|
124 | 123 | 'source_repo_name': source_repo.repo_name, |
|
125 | 124 | 'source_refs_json': json.dumps(source_repo_data), |
|
126 | 125 | 'target_repo_name': default_target_repo.repo_name, |
|
127 | 126 | 'target_refs_json': json.dumps(target_repo_data), |
|
128 | 127 | } |
|
129 | 128 | c.default_source_ref = selected_source_ref |
|
130 | 129 | |
|
131 | 130 | return render('/pullrequests/pullrequest.mako') |
|
132 | 131 | |
|
133 | 132 | @LoginRequired() |
|
134 | 133 | @NotAnonymous() |
|
135 | 134 | @XHRRequired() |
|
136 | 135 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
137 | 136 | 'repository.admin') |
|
138 | 137 | @jsonify |
|
139 | 138 | def get_repo_refs(self, repo_name, target_repo_name): |
|
140 | 139 | repo = Repository.get_by_repo_name(target_repo_name) |
|
141 | 140 | if not repo: |
|
142 | 141 | raise HTTPNotFound |
|
143 | 142 | return PullRequestModel().generate_repo_data(repo) |
|
144 | 143 | |
|
145 | 144 | @LoginRequired() |
|
146 | 145 | @NotAnonymous() |
|
147 | 146 | @XHRRequired() |
|
148 | 147 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
149 | 148 | 'repository.admin') |
|
150 | 149 | @jsonify |
|
151 | 150 | def get_repo_destinations(self, repo_name): |
|
152 | 151 | repo = Repository.get_by_repo_name(repo_name) |
|
153 | 152 | if not repo: |
|
154 | 153 | raise HTTPNotFound |
|
155 | 154 | filter_query = request.GET.get('query') |
|
156 | 155 | |
|
157 | 156 | query = Repository.query() \ |
|
158 | 157 | .order_by(func.length(Repository.repo_name)) \ |
|
159 | 158 | .filter(or_( |
|
160 | 159 | Repository.repo_name == repo.repo_name, |
|
161 | 160 | Repository.fork_id == repo.repo_id)) |
|
162 | 161 | |
|
163 | 162 | if filter_query: |
|
164 | 163 | ilike_expression = u'%{}%'.format(safe_unicode(filter_query)) |
|
165 | 164 | query = query.filter( |
|
166 | 165 | Repository.repo_name.ilike(ilike_expression)) |
|
167 | 166 | |
|
168 | 167 | add_parent = False |
|
169 | 168 | if repo.parent: |
|
170 | 169 | if filter_query in repo.parent.repo_name: |
|
171 | 170 | parent_vcs_obj = repo.parent.scm_instance() |
|
172 | 171 | if parent_vcs_obj and not parent_vcs_obj.is_empty(): |
|
173 | 172 | add_parent = True |
|
174 | 173 | |
|
175 | 174 | limit = 20 - 1 if add_parent else 20 |
|
176 | 175 | all_repos = query.limit(limit).all() |
|
177 | 176 | if add_parent: |
|
178 | 177 | all_repos += [repo.parent] |
|
179 | 178 | |
|
180 | 179 | repos = [] |
|
181 | 180 | for obj in self.scm_model.get_repos(all_repos): |
|
182 | 181 | repos.append({ |
|
183 | 182 | 'id': obj['name'], |
|
184 | 183 | 'text': obj['name'], |
|
185 | 184 | 'type': 'repo', |
|
186 | 185 | 'obj': obj['dbrepo'] |
|
187 | 186 | }) |
|
188 | 187 | |
|
189 | 188 | data = { |
|
190 | 189 | 'more': False, |
|
191 | 190 | 'results': [{ |
|
192 | 191 | 'text': _('Repositories'), |
|
193 | 192 | 'children': repos |
|
194 | 193 | }] if repos else [] |
|
195 | 194 | } |
|
196 | 195 | return data |
|
197 | 196 | |
|
198 | 197 | @LoginRequired() |
|
199 | 198 | @NotAnonymous() |
|
200 | 199 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
201 | 200 | 'repository.admin') |
|
202 | 201 | @HasAcceptedRepoType('git', 'hg') |
|
203 | 202 | @auth.CSRFRequired() |
|
204 | 203 | def create(self, repo_name): |
|
205 | 204 | repo = Repository.get_by_repo_name(repo_name) |
|
206 | 205 | if not repo: |
|
207 | 206 | raise HTTPNotFound |
|
208 | 207 | |
|
209 | 208 | controls = peppercorn.parse(request.POST.items()) |
|
210 | 209 | |
|
211 | 210 | try: |
|
212 | 211 | _form = PullRequestForm(repo.repo_id)().to_python(controls) |
|
213 | 212 | except formencode.Invalid as errors: |
|
214 | 213 | if errors.error_dict.get('revisions'): |
|
215 | 214 | msg = 'Revisions: %s' % errors.error_dict['revisions'] |
|
216 | 215 | elif errors.error_dict.get('pullrequest_title'): |
|
217 | 216 | msg = _('Pull request requires a title with min. 3 chars') |
|
218 | 217 | else: |
|
219 | 218 | msg = _('Error creating pull request: {}').format(errors) |
|
220 | 219 | log.exception(msg) |
|
221 | 220 | h.flash(msg, 'error') |
|
222 | 221 | |
|
223 | 222 | # would rather just go back to form ... |
|
224 | 223 | return redirect(url('pullrequest_home', repo_name=repo_name)) |
|
225 | 224 | |
|
226 | 225 | source_repo = _form['source_repo'] |
|
227 | 226 | source_ref = _form['source_ref'] |
|
228 | 227 | target_repo = _form['target_repo'] |
|
229 | 228 | target_ref = _form['target_ref'] |
|
230 | 229 | commit_ids = _form['revisions'][::-1] |
|
231 | 230 | |
|
232 | 231 | # find the ancestor for this pr |
|
233 | 232 | source_db_repo = Repository.get_by_repo_name(_form['source_repo']) |
|
234 | 233 | target_db_repo = Repository.get_by_repo_name(_form['target_repo']) |
|
235 | 234 | |
|
236 | 235 | source_scm = source_db_repo.scm_instance() |
|
237 | 236 | target_scm = target_db_repo.scm_instance() |
|
238 | 237 | |
|
239 | 238 | source_commit = source_scm.get_commit(source_ref.split(':')[-1]) |
|
240 | 239 | target_commit = target_scm.get_commit(target_ref.split(':')[-1]) |
|
241 | 240 | |
|
242 | 241 | ancestor = source_scm.get_common_ancestor( |
|
243 | 242 | source_commit.raw_id, target_commit.raw_id, target_scm) |
|
244 | 243 | |
|
245 | 244 | target_ref_type, target_ref_name, __ = _form['target_ref'].split(':') |
|
246 | 245 | target_ref = ':'.join((target_ref_type, target_ref_name, ancestor)) |
|
247 | 246 | |
|
248 | 247 | pullrequest_title = _form['pullrequest_title'] |
|
249 | 248 | title_source_ref = source_ref.split(':', 2)[1] |
|
250 | 249 | if not pullrequest_title: |
|
251 | 250 | pullrequest_title = PullRequestModel().generate_pullrequest_title( |
|
252 | 251 | source=source_repo, |
|
253 | 252 | source_ref=title_source_ref, |
|
254 | 253 | target=target_repo |
|
255 | 254 | ) |
|
256 | 255 | |
|
257 | 256 | description = _form['pullrequest_desc'] |
|
258 | 257 | |
|
259 | 258 | get_default_reviewers_data, validate_default_reviewers = \ |
|
260 | 259 | PullRequestModel().get_reviewer_functions() |
|
261 | 260 | |
|
262 | 261 | # recalculate reviewers logic, to make sure we can validate this |
|
263 | 262 | reviewer_rules = get_default_reviewers_data( |
|
264 | 263 | c.rhodecode_user.get_instance(), source_db_repo, |
|
265 | 264 | source_commit, target_db_repo, target_commit) |
|
266 | 265 | |
|
267 | 266 | given_reviewers = _form['review_members'] |
|
268 | 267 | reviewers = validate_default_reviewers(given_reviewers, reviewer_rules) |
|
269 | 268 | |
|
270 | 269 | try: |
|
271 | 270 | pull_request = PullRequestModel().create( |
|
272 | 271 | c.rhodecode_user.user_id, source_repo, source_ref, target_repo, |
|
273 | 272 | target_ref, commit_ids, reviewers, pullrequest_title, |
|
274 | 273 | description, reviewer_rules |
|
275 | 274 | ) |
|
276 | 275 | Session().commit() |
|
277 | 276 | h.flash(_('Successfully opened new pull request'), |
|
278 | 277 | category='success') |
|
279 | 278 | except Exception as e: |
|
280 | 279 | msg = _('Error occurred during creation of this pull request.') |
|
281 | 280 | log.exception(msg) |
|
282 | 281 | h.flash(msg, category='error') |
|
283 | 282 | return redirect(url('pullrequest_home', repo_name=repo_name)) |
|
284 | 283 | |
|
285 | 284 | raise HTTPFound( |
|
286 | 285 | h.route_path('pullrequest_show', repo_name=target_repo, |
|
287 | 286 | pull_request_id=pull_request.pull_request_id)) |
|
288 | 287 | |
|
289 | 288 | @LoginRequired() |
|
290 | 289 | @NotAnonymous() |
|
291 | 290 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
292 | 291 | 'repository.admin') |
|
293 | 292 | @auth.CSRFRequired() |
|
294 | 293 | @jsonify |
|
295 | 294 | def update(self, repo_name, pull_request_id): |
|
296 | 295 | pull_request_id = safe_int(pull_request_id) |
|
297 | 296 | pull_request = PullRequest.get_or_404(pull_request_id) |
|
298 | 297 | # only owner or admin can update it |
|
299 | 298 | allowed_to_update = PullRequestModel().check_user_update( |
|
300 | 299 | pull_request, c.rhodecode_user) |
|
301 | 300 | if allowed_to_update: |
|
302 | 301 | controls = peppercorn.parse(request.POST.items()) |
|
303 | 302 | |
|
304 | 303 | if 'review_members' in controls: |
|
305 | 304 | self._update_reviewers( |
|
306 | 305 | pull_request_id, controls['review_members'], |
|
307 | 306 | pull_request.reviewer_data) |
|
308 | 307 | elif str2bool(request.POST.get('update_commits', 'false')): |
|
309 | 308 | self._update_commits(pull_request) |
|
310 | 309 | elif str2bool(request.POST.get('edit_pull_request', 'false')): |
|
311 | 310 | self._edit_pull_request(pull_request) |
|
312 | 311 | else: |
|
313 | 312 | raise HTTPBadRequest() |
|
314 | 313 | return True |
|
315 | 314 | raise HTTPForbidden() |
|
316 | 315 | |
|
317 | 316 | def _edit_pull_request(self, pull_request): |
|
318 | 317 | try: |
|
319 | 318 | PullRequestModel().edit( |
|
320 | 319 | pull_request, request.POST.get('title'), |
|
321 | 320 | request.POST.get('description'), c.rhodecode_user) |
|
322 | 321 | except ValueError: |
|
323 | 322 | msg = _(u'Cannot update closed pull requests.') |
|
324 | 323 | h.flash(msg, category='error') |
|
325 | 324 | return |
|
326 | 325 | else: |
|
327 | 326 | Session().commit() |
|
328 | 327 | |
|
329 | 328 | msg = _(u'Pull request title & description updated.') |
|
330 | 329 | h.flash(msg, category='success') |
|
331 | 330 | return |
|
332 | 331 | |
|
333 | 332 | def _update_commits(self, pull_request): |
|
334 | 333 | resp = PullRequestModel().update_commits(pull_request) |
|
335 | 334 | |
|
336 | 335 | if resp.executed: |
|
337 | 336 | |
|
338 | 337 | if resp.target_changed and resp.source_changed: |
|
339 | 338 | changed = 'target and source repositories' |
|
340 | 339 | elif resp.target_changed and not resp.source_changed: |
|
341 | 340 | changed = 'target repository' |
|
342 | 341 | elif not resp.target_changed and resp.source_changed: |
|
343 | 342 | changed = 'source repository' |
|
344 | 343 | else: |
|
345 | 344 | changed = 'nothing' |
|
346 | 345 | |
|
347 | 346 | msg = _( |
|
348 | 347 | u'Pull request updated to "{source_commit_id}" with ' |
|
349 | 348 | u'{count_added} added, {count_removed} removed commits. ' |
|
350 | 349 | u'Source of changes: {change_source}') |
|
351 | 350 | msg = msg.format( |
|
352 | 351 | source_commit_id=pull_request.source_ref_parts.commit_id, |
|
353 | 352 | count_added=len(resp.changes.added), |
|
354 | 353 | count_removed=len(resp.changes.removed), |
|
355 | 354 | change_source=changed) |
|
356 | 355 | h.flash(msg, category='success') |
|
357 | 356 | |
|
358 | 357 | registry = get_current_registry() |
|
359 | 358 | rhodecode_plugins = getattr(registry, 'rhodecode_plugins', {}) |
|
360 | 359 | channelstream_config = rhodecode_plugins.get('channelstream', {}) |
|
361 | 360 | if channelstream_config.get('enabled'): |
|
362 | 361 | message = msg + ( |
|
363 | 362 | ' - <a onclick="window.location.reload()">' |
|
364 | 363 | '<strong>{}</strong></a>'.format(_('Reload page'))) |
|
365 | 364 | channel = '/repo${}$/pr/{}'.format( |
|
366 | 365 | pull_request.target_repo.repo_name, |
|
367 | 366 | pull_request.pull_request_id |
|
368 | 367 | ) |
|
369 | 368 | payload = { |
|
370 | 369 | 'type': 'message', |
|
371 | 370 | 'user': 'system', |
|
372 | 371 | 'exclude_users': [request.user.username], |
|
373 | 372 | 'channel': channel, |
|
374 | 373 | 'message': { |
|
375 | 374 | 'message': message, |
|
376 | 375 | 'level': 'success', |
|
377 | 376 | 'topic': '/notifications' |
|
378 | 377 | } |
|
379 | 378 | } |
|
380 | 379 | channelstream_request( |
|
381 | 380 | channelstream_config, [payload], '/message', |
|
382 | 381 | raise_exc=False) |
|
383 | 382 | else: |
|
384 | 383 | msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason] |
|
385 | 384 | warning_reasons = [ |
|
386 | 385 | UpdateFailureReason.NO_CHANGE, |
|
387 | 386 | UpdateFailureReason.WRONG_REF_TYPE, |
|
388 | 387 | ] |
|
389 | 388 | category = 'warning' if resp.reason in warning_reasons else 'error' |
|
390 | 389 | h.flash(msg, category=category) |
|
391 | 390 | |
|
392 | 391 | @auth.CSRFRequired() |
|
393 | 392 | @LoginRequired() |
|
394 | 393 | @NotAnonymous() |
|
395 | 394 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
396 | 395 | 'repository.admin') |
|
397 | 396 | def merge(self, repo_name, pull_request_id): |
|
398 | 397 | """ |
|
399 | 398 | POST /{repo_name}/pull-request/{pull_request_id} |
|
400 | 399 | |
|
401 | 400 | Merge will perform a server-side merge of the specified |
|
402 | 401 | pull request, if the pull request is approved and mergeable. |
|
403 | 402 | After successful merging, the pull request is automatically |
|
404 | 403 | closed, with a relevant comment. |
|
405 | 404 | """ |
|
406 | 405 | pull_request_id = safe_int(pull_request_id) |
|
407 | 406 | pull_request = PullRequest.get_or_404(pull_request_id) |
|
408 | 407 | user = c.rhodecode_user |
|
409 | 408 | |
|
410 | 409 | check = MergeCheck.validate(pull_request, user) |
|
411 | 410 | merge_possible = not check.failed |
|
412 | 411 | |
|
413 | 412 | for err_type, error_msg in check.errors: |
|
414 | 413 | h.flash(error_msg, category=err_type) |
|
415 | 414 | |
|
416 | 415 | if merge_possible: |
|
417 | 416 | log.debug("Pre-conditions checked, trying to merge.") |
|
418 | 417 | extras = vcs_operation_context( |
|
419 | 418 | request.environ, repo_name=pull_request.target_repo.repo_name, |
|
420 | 419 | username=user.username, action='push', |
|
421 | 420 | scm=pull_request.target_repo.repo_type) |
|
422 | 421 | self._merge_pull_request(pull_request, user, extras) |
|
423 | 422 | |
|
424 | 423 | raise HTTPFound( |
|
425 | 424 | h.route_path('pullrequest_show', |
|
426 | 425 | repo_name=pull_request.target_repo.repo_name, |
|
427 | 426 | pull_request_id=pull_request.pull_request_id)) |
|
428 | 427 | |
|
429 | 428 | def _merge_pull_request(self, pull_request, user, extras): |
|
430 | 429 | merge_resp = PullRequestModel().merge( |
|
431 | 430 | pull_request, user, extras=extras) |
|
432 | 431 | |
|
433 | 432 | if merge_resp.executed: |
|
434 | 433 | log.debug("The merge was successful, closing the pull request.") |
|
435 | 434 | PullRequestModel().close_pull_request( |
|
436 | 435 | pull_request.pull_request_id, user) |
|
437 | 436 | Session().commit() |
|
438 | 437 | msg = _('Pull request was successfully merged and closed.') |
|
439 | 438 | h.flash(msg, category='success') |
|
440 | 439 | else: |
|
441 | 440 | log.debug( |
|
442 | 441 | "The merge was not successful. Merge response: %s", |
|
443 | 442 | merge_resp) |
|
444 | 443 | msg = PullRequestModel().merge_status_message( |
|
445 | 444 | merge_resp.failure_reason) |
|
446 | 445 | h.flash(msg, category='error') |
|
447 | 446 | |
|
448 | 447 | def _update_reviewers(self, pull_request_id, review_members, reviewer_rules): |
|
449 | 448 | |
|
450 | 449 | get_default_reviewers_data, validate_default_reviewers = \ |
|
451 | 450 | PullRequestModel().get_reviewer_functions() |
|
452 | 451 | |
|
453 | 452 | try: |
|
454 | 453 | reviewers = validate_default_reviewers(review_members, reviewer_rules) |
|
455 | 454 | except ValueError as e: |
|
456 | 455 | log.error('Reviewers Validation: {}'.format(e)) |
|
457 | 456 | h.flash(e, category='error') |
|
458 | 457 | return |
|
459 | 458 | |
|
460 | 459 | PullRequestModel().update_reviewers( |
|
461 | 460 | pull_request_id, reviewers, c.rhodecode_user) |
|
462 | 461 | h.flash(_('Pull request reviewers updated.'), category='success') |
|
463 | 462 | Session().commit() |
|
464 | 463 | |
|
465 | 464 | @LoginRequired() |
|
466 | 465 | @NotAnonymous() |
|
467 | 466 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
468 | 467 | 'repository.admin') |
|
469 | 468 | @auth.CSRFRequired() |
|
470 | 469 | @jsonify |
|
471 | 470 | def delete(self, repo_name, pull_request_id): |
|
472 | 471 | pull_request_id = safe_int(pull_request_id) |
|
473 | 472 | pull_request = PullRequest.get_or_404(pull_request_id) |
|
474 | 473 | |
|
475 | 474 | pr_closed = pull_request.is_closed() |
|
476 | 475 | allowed_to_delete = PullRequestModel().check_user_delete( |
|
477 | 476 | pull_request, c.rhodecode_user) and not pr_closed |
|
478 | 477 | |
|
479 | 478 | # only owner can delete it ! |
|
480 | 479 | if allowed_to_delete: |
|
481 | 480 | PullRequestModel().delete(pull_request, c.rhodecode_user) |
|
482 | 481 | Session().commit() |
|
483 | 482 | h.flash(_('Successfully deleted pull request'), |
|
484 | 483 | category='success') |
|
485 | 484 | return redirect(url('my_account_pullrequests')) |
|
486 | 485 | |
|
487 | 486 | h.flash(_('Your are not allowed to delete this pull request'), |
|
488 | 487 | category='error') |
|
489 | 488 | raise HTTPForbidden() |
|
490 | 489 | |
|
491 | 490 | def _get_pr_version(self, pull_request_id, version=None): |
|
492 | 491 | pull_request_id = safe_int(pull_request_id) |
|
493 | 492 | at_version = None |
|
494 | 493 | |
|
495 | 494 | if version and version == 'latest': |
|
496 | 495 | pull_request_ver = PullRequest.get(pull_request_id) |
|
497 | 496 | pull_request_obj = pull_request_ver |
|
498 | 497 | _org_pull_request_obj = pull_request_obj |
|
499 | 498 | at_version = 'latest' |
|
500 | 499 | elif version: |
|
501 | 500 | pull_request_ver = PullRequestVersion.get_or_404(version) |
|
502 | 501 | pull_request_obj = pull_request_ver |
|
503 | 502 | _org_pull_request_obj = pull_request_ver.pull_request |
|
504 | 503 | at_version = pull_request_ver.pull_request_version_id |
|
505 | 504 | else: |
|
506 | 505 | _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404( |
|
507 | 506 | pull_request_id) |
|
508 | 507 | |
|
509 | 508 | pull_request_display_obj = PullRequest.get_pr_display_object( |
|
510 | 509 | pull_request_obj, _org_pull_request_obj) |
|
511 | 510 | |
|
512 | 511 | return _org_pull_request_obj, pull_request_obj, \ |
|
513 | 512 | pull_request_display_obj, at_version |
|
514 | 513 | |
|
515 | 514 | def _get_diffset( |
|
516 | 515 | self, source_repo, source_ref_id, target_ref_id, target_commit, |
|
517 | 516 | source_commit, diff_limit, file_limit, display_inline_comments): |
|
518 | 517 | vcs_diff = PullRequestModel().get_diff( |
|
519 | 518 | source_repo, source_ref_id, target_ref_id) |
|
520 | 519 | |
|
521 | 520 | diff_processor = diffs.DiffProcessor( |
|
522 | 521 | vcs_diff, format='newdiff', diff_limit=diff_limit, |
|
523 | 522 | file_limit=file_limit, show_full_diff=c.fulldiff) |
|
524 | 523 | |
|
525 | 524 | _parsed = diff_processor.prepare() |
|
526 | 525 | |
|
527 | 526 | def _node_getter(commit): |
|
528 | 527 | def get_node(fname): |
|
529 | 528 | try: |
|
530 | 529 | return commit.get_node(fname) |
|
531 | 530 | except NodeDoesNotExistError: |
|
532 | 531 | return None |
|
533 | 532 | |
|
534 | 533 | return get_node |
|
535 | 534 | |
|
536 | 535 | diffset = codeblocks.DiffSet( |
|
537 | 536 | repo_name=c.repo_name, |
|
538 | 537 | source_repo_name=c.source_repo.repo_name, |
|
539 | 538 | source_node_getter=_node_getter(target_commit), |
|
540 | 539 | target_node_getter=_node_getter(source_commit), |
|
541 | 540 | comments=display_inline_comments |
|
542 | 541 | ) |
|
543 | 542 | diffset = diffset.render_patchset( |
|
544 | 543 | _parsed, target_commit.raw_id, source_commit.raw_id) |
|
545 | 544 | |
|
546 | 545 | return diffset |
|
547 | 546 | |
|
548 | 547 | @LoginRequired() |
|
549 | 548 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
550 | 549 | 'repository.admin') |
|
551 | 550 | def show(self, repo_name, pull_request_id): |
|
552 | 551 | pull_request_id = safe_int(pull_request_id) |
|
553 | 552 | version = request.GET.get('version') |
|
554 | 553 | from_version = request.GET.get('from_version') or version |
|
555 | 554 | merge_checks = request.GET.get('merge_checks') |
|
556 | 555 | c.fulldiff = str2bool(request.GET.get('fulldiff')) |
|
557 | 556 | |
|
558 | 557 | (pull_request_latest, |
|
559 | 558 | pull_request_at_ver, |
|
560 | 559 | pull_request_display_obj, |
|
561 | 560 | at_version) = self._get_pr_version( |
|
562 | 561 | pull_request_id, version=version) |
|
563 | 562 | pr_closed = pull_request_latest.is_closed() |
|
564 | 563 | |
|
565 | 564 | if pr_closed and (version or from_version): |
|
566 | 565 | # not allow to browse versions |
|
567 | 566 | return redirect(h.url('pullrequest_show', repo_name=repo_name, |
|
568 | 567 | pull_request_id=pull_request_id)) |
|
569 | 568 | |
|
570 | 569 | versions = pull_request_display_obj.versions() |
|
571 | 570 | |
|
572 | 571 | c.at_version = at_version |
|
573 | 572 | c.at_version_num = (at_version |
|
574 | 573 | if at_version and at_version != 'latest' |
|
575 | 574 | else None) |
|
576 | 575 | c.at_version_pos = ChangesetComment.get_index_from_version( |
|
577 | 576 | c.at_version_num, versions) |
|
578 | 577 | |
|
579 | 578 | (prev_pull_request_latest, |
|
580 | 579 | prev_pull_request_at_ver, |
|
581 | 580 | prev_pull_request_display_obj, |
|
582 | 581 | prev_at_version) = self._get_pr_version( |
|
583 | 582 | pull_request_id, version=from_version) |
|
584 | 583 | |
|
585 | 584 | c.from_version = prev_at_version |
|
586 | 585 | c.from_version_num = (prev_at_version |
|
587 | 586 | if prev_at_version and prev_at_version != 'latest' |
|
588 | 587 | else None) |
|
589 | 588 | c.from_version_pos = ChangesetComment.get_index_from_version( |
|
590 | 589 | c.from_version_num, versions) |
|
591 | 590 | |
|
592 | 591 | # define if we're in COMPARE mode or VIEW at version mode |
|
593 | 592 | compare = at_version != prev_at_version |
|
594 | 593 | |
|
595 | 594 | # pull_requests repo_name we opened it against |
|
596 | 595 | # ie. target_repo must match |
|
597 | 596 | if repo_name != pull_request_at_ver.target_repo.repo_name: |
|
598 | 597 | raise HTTPNotFound |
|
599 | 598 | |
|
600 | 599 | c.shadow_clone_url = PullRequestModel().get_shadow_clone_url( |
|
601 | 600 | pull_request_at_ver) |
|
602 | 601 | |
|
603 | 602 | c.pull_request = pull_request_display_obj |
|
604 | 603 | c.pull_request_latest = pull_request_latest |
|
605 | 604 | |
|
606 | 605 | if compare or (at_version and not at_version == 'latest'): |
|
607 | 606 | c.allowed_to_change_status = False |
|
608 | 607 | c.allowed_to_update = False |
|
609 | 608 | c.allowed_to_merge = False |
|
610 | 609 | c.allowed_to_delete = False |
|
611 | 610 | c.allowed_to_comment = False |
|
612 | 611 | c.allowed_to_close = False |
|
613 | 612 | else: |
|
614 | 613 | can_change_status = PullRequestModel().check_user_change_status( |
|
615 | 614 | pull_request_at_ver, c.rhodecode_user) |
|
616 | 615 | c.allowed_to_change_status = can_change_status and not pr_closed |
|
617 | 616 | |
|
618 | 617 | c.allowed_to_update = PullRequestModel().check_user_update( |
|
619 | 618 | pull_request_latest, c.rhodecode_user) and not pr_closed |
|
620 | 619 | c.allowed_to_merge = PullRequestModel().check_user_merge( |
|
621 | 620 | pull_request_latest, c.rhodecode_user) and not pr_closed |
|
622 | 621 | c.allowed_to_delete = PullRequestModel().check_user_delete( |
|
623 | 622 | pull_request_latest, c.rhodecode_user) and not pr_closed |
|
624 | 623 | c.allowed_to_comment = not pr_closed |
|
625 | 624 | c.allowed_to_close = c.allowed_to_merge and not pr_closed |
|
626 | 625 | |
|
627 | 626 | c.forbid_adding_reviewers = False |
|
628 | 627 | c.forbid_author_to_review = False |
|
629 | 628 | c.forbid_commit_author_to_review = False |
|
630 | 629 | |
|
631 | 630 | if pull_request_latest.reviewer_data and \ |
|
632 | 631 | 'rules' in pull_request_latest.reviewer_data: |
|
633 | 632 | rules = pull_request_latest.reviewer_data['rules'] or {} |
|
634 | 633 | try: |
|
635 | 634 | c.forbid_adding_reviewers = rules.get( |
|
636 | 635 | 'forbid_adding_reviewers') |
|
637 | 636 | c.forbid_author_to_review = rules.get( |
|
638 | 637 | 'forbid_author_to_review') |
|
639 | 638 | c.forbid_commit_author_to_review = rules.get( |
|
640 | 639 | 'forbid_commit_author_to_review') |
|
641 | 640 | except Exception: |
|
642 | 641 | pass |
|
643 | 642 | |
|
644 | 643 | # check merge capabilities |
|
645 | 644 | _merge_check = MergeCheck.validate( |
|
646 | 645 | pull_request_latest, user=c.rhodecode_user) |
|
647 | 646 | c.pr_merge_errors = _merge_check.error_details |
|
648 | 647 | c.pr_merge_possible = not _merge_check.failed |
|
649 | 648 | c.pr_merge_message = _merge_check.merge_msg |
|
650 | 649 | |
|
651 | 650 | c.pull_request_review_status = _merge_check.review_status |
|
652 | 651 | if merge_checks: |
|
653 | 652 | return render('/pullrequests/pullrequest_merge_checks.mako') |
|
654 | 653 | |
|
655 | 654 | comments_model = CommentsModel() |
|
656 | 655 | |
|
657 | 656 | # reviewers and statuses |
|
658 | 657 | c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses() |
|
659 | 658 | allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers] |
|
660 | 659 | |
|
661 | 660 | # GENERAL COMMENTS with versions # |
|
662 | 661 | q = comments_model._all_general_comments_of_pull_request(pull_request_latest) |
|
663 | 662 | q = q.order_by(ChangesetComment.comment_id.asc()) |
|
664 | 663 | general_comments = q |
|
665 | 664 | |
|
666 | 665 | # pick comments we want to render at current version |
|
667 | 666 | c.comment_versions = comments_model.aggregate_comments( |
|
668 | 667 | general_comments, versions, c.at_version_num) |
|
669 | 668 | c.comments = c.comment_versions[c.at_version_num]['until'] |
|
670 | 669 | |
|
671 | 670 | # INLINE COMMENTS with versions # |
|
672 | 671 | q = comments_model._all_inline_comments_of_pull_request(pull_request_latest) |
|
673 | 672 | q = q.order_by(ChangesetComment.comment_id.asc()) |
|
674 | 673 | inline_comments = q |
|
675 | 674 | |
|
676 | 675 | c.inline_versions = comments_model.aggregate_comments( |
|
677 | 676 | inline_comments, versions, c.at_version_num, inline=True) |
|
678 | 677 | |
|
679 | 678 | # inject latest version |
|
680 | 679 | latest_ver = PullRequest.get_pr_display_object( |
|
681 | 680 | pull_request_latest, pull_request_latest) |
|
682 | 681 | |
|
683 | 682 | c.versions = versions + [latest_ver] |
|
684 | 683 | |
|
685 | 684 | # if we use version, then do not show later comments |
|
686 | 685 | # than current version |
|
687 | 686 | display_inline_comments = collections.defaultdict( |
|
688 | 687 | lambda: collections.defaultdict(list)) |
|
689 | 688 | for co in inline_comments: |
|
690 | 689 | if c.at_version_num: |
|
691 | 690 | # pick comments that are at least UPTO given version, so we |
|
692 | 691 | # don't render comments for higher version |
|
693 | 692 | should_render = co.pull_request_version_id and \ |
|
694 | 693 | co.pull_request_version_id <= c.at_version_num |
|
695 | 694 | else: |
|
696 | 695 | # showing all, for 'latest' |
|
697 | 696 | should_render = True |
|
698 | 697 | |
|
699 | 698 | if should_render: |
|
700 | 699 | display_inline_comments[co.f_path][co.line_no].append(co) |
|
701 | 700 | |
|
702 | 701 | # load diff data into template context, if we use compare mode then |
|
703 | 702 | # diff is calculated based on changes between versions of PR |
|
704 | 703 | |
|
705 | 704 | source_repo = pull_request_at_ver.source_repo |
|
706 | 705 | source_ref_id = pull_request_at_ver.source_ref_parts.commit_id |
|
707 | 706 | |
|
708 | 707 | target_repo = pull_request_at_ver.target_repo |
|
709 | 708 | target_ref_id = pull_request_at_ver.target_ref_parts.commit_id |
|
710 | 709 | |
|
711 | 710 | if compare: |
|
712 | 711 | # in compare switch the diff base to latest commit from prev version |
|
713 | 712 | target_ref_id = prev_pull_request_display_obj.revisions[0] |
|
714 | 713 | |
|
715 | 714 | # despite opening commits for bookmarks/branches/tags, we always |
|
716 | 715 | # convert this to rev to prevent changes after bookmark or branch change |
|
717 | 716 | c.source_ref_type = 'rev' |
|
718 | 717 | c.source_ref = source_ref_id |
|
719 | 718 | |
|
720 | 719 | c.target_ref_type = 'rev' |
|
721 | 720 | c.target_ref = target_ref_id |
|
722 | 721 | |
|
723 | 722 | c.source_repo = source_repo |
|
724 | 723 | c.target_repo = target_repo |
|
725 | 724 | |
|
726 | 725 | # diff_limit is the old behavior, will cut off the whole diff |
|
727 | 726 | # if the limit is applied otherwise will just hide the |
|
728 | 727 | # big files from the front-end |
|
729 | 728 | diff_limit = self.cut_off_limit_diff |
|
730 | 729 | file_limit = self.cut_off_limit_file |
|
731 | 730 | |
|
732 | 731 | c.commit_ranges = [] |
|
733 | 732 | source_commit = EmptyCommit() |
|
734 | 733 | target_commit = EmptyCommit() |
|
735 | 734 | c.missing_requirements = False |
|
736 | 735 | |
|
737 | 736 | source_scm = source_repo.scm_instance() |
|
738 | 737 | target_scm = target_repo.scm_instance() |
|
739 | 738 | |
|
740 | 739 | # try first shadow repo, fallback to regular repo |
|
741 | 740 | try: |
|
742 | 741 | commits_source_repo = pull_request_latest.get_shadow_repo() |
|
743 | 742 | except Exception: |
|
744 | 743 | log.debug('Failed to get shadow repo', exc_info=True) |
|
745 | 744 | commits_source_repo = source_scm |
|
746 | 745 | |
|
747 | 746 | c.commits_source_repo = commits_source_repo |
|
748 | 747 | commit_cache = {} |
|
749 | 748 | try: |
|
750 | 749 | pre_load = ["author", "branch", "date", "message"] |
|
751 | 750 | show_revs = pull_request_at_ver.revisions |
|
752 | 751 | for rev in show_revs: |
|
753 | 752 | comm = commits_source_repo.get_commit( |
|
754 | 753 | commit_id=rev, pre_load=pre_load) |
|
755 | 754 | c.commit_ranges.append(comm) |
|
756 | 755 | commit_cache[comm.raw_id] = comm |
|
757 | 756 | |
|
758 | 757 | # Order here matters, we first need to get target, and then |
|
759 | 758 | # the source |
|
760 | 759 | target_commit = commits_source_repo.get_commit( |
|
761 | 760 | commit_id=safe_str(target_ref_id)) |
|
762 | 761 | |
|
763 | 762 | source_commit = commits_source_repo.get_commit( |
|
764 | 763 | commit_id=safe_str(source_ref_id)) |
|
765 | 764 | |
|
766 | 765 | except CommitDoesNotExistError: |
|
767 | 766 | log.warning( |
|
768 | 767 | 'Failed to get commit from `{}` repo'.format( |
|
769 | 768 | commits_source_repo), exc_info=True) |
|
770 | 769 | except RepositoryRequirementError: |
|
771 | 770 | log.warning( |
|
772 | 771 | 'Failed to get all required data from repo', exc_info=True) |
|
773 | 772 | c.missing_requirements = True |
|
774 | 773 | |
|
775 | 774 | c.ancestor = None # set it to None, to hide it from PR view |
|
776 | 775 | |
|
777 | 776 | try: |
|
778 | 777 | ancestor_id = source_scm.get_common_ancestor( |
|
779 | 778 | source_commit.raw_id, target_commit.raw_id, target_scm) |
|
780 | 779 | c.ancestor_commit = source_scm.get_commit(ancestor_id) |
|
781 | 780 | except Exception: |
|
782 | 781 | c.ancestor_commit = None |
|
783 | 782 | |
|
784 | 783 | c.statuses = source_repo.statuses( |
|
785 | 784 | [x.raw_id for x in c.commit_ranges]) |
|
786 | 785 | |
|
787 | 786 | # auto collapse if we have more than limit |
|
788 | 787 | collapse_limit = diffs.DiffProcessor._collapse_commits_over |
|
789 | 788 | c.collapse_all_commits = len(c.commit_ranges) > collapse_limit |
|
790 | 789 | c.compare_mode = compare |
|
791 | 790 | |
|
792 | 791 | c.missing_commits = False |
|
793 | 792 | if (c.missing_requirements or isinstance(source_commit, EmptyCommit) |
|
794 | 793 | or source_commit == target_commit): |
|
795 | 794 | |
|
796 | 795 | c.missing_commits = True |
|
797 | 796 | else: |
|
798 | 797 | |
|
799 | 798 | c.diffset = self._get_diffset( |
|
800 | 799 | commits_source_repo, source_ref_id, target_ref_id, |
|
801 | 800 | target_commit, source_commit, |
|
802 | 801 | diff_limit, file_limit, display_inline_comments) |
|
803 | 802 | |
|
804 | 803 | c.limited_diff = c.diffset.limited_diff |
|
805 | 804 | |
|
806 | 805 | # calculate removed files that are bound to comments |
|
807 | 806 | comment_deleted_files = [ |
|
808 | 807 | fname for fname in display_inline_comments |
|
809 | 808 | if fname not in c.diffset.file_stats] |
|
810 | 809 | |
|
811 | 810 | c.deleted_files_comments = collections.defaultdict(dict) |
|
812 | 811 | for fname, per_line_comments in display_inline_comments.items(): |
|
813 | 812 | if fname in comment_deleted_files: |
|
814 | 813 | c.deleted_files_comments[fname]['stats'] = 0 |
|
815 | 814 | c.deleted_files_comments[fname]['comments'] = list() |
|
816 | 815 | for lno, comments in per_line_comments.items(): |
|
817 | 816 | c.deleted_files_comments[fname]['comments'].extend( |
|
818 | 817 | comments) |
|
819 | 818 | |
|
820 | 819 | # this is a hack to properly display links, when creating PR, the |
|
821 | 820 | # compare view and others uses different notation, and |
|
822 | 821 | # compare_commits.mako renders links based on the target_repo. |
|
823 | 822 | # We need to swap that here to generate it properly on the html side |
|
824 | 823 | c.target_repo = c.source_repo |
|
825 | 824 | |
|
826 | 825 | c.commit_statuses = ChangesetStatus.STATUSES |
|
827 | 826 | |
|
828 | 827 | c.show_version_changes = not pr_closed |
|
829 | 828 | if c.show_version_changes: |
|
830 | 829 | cur_obj = pull_request_at_ver |
|
831 | 830 | prev_obj = prev_pull_request_at_ver |
|
832 | 831 | |
|
833 | 832 | old_commit_ids = prev_obj.revisions |
|
834 | 833 | new_commit_ids = cur_obj.revisions |
|
835 | 834 | commit_changes = PullRequestModel()._calculate_commit_id_changes( |
|
836 | 835 | old_commit_ids, new_commit_ids) |
|
837 | 836 | c.commit_changes_summary = commit_changes |
|
838 | 837 | |
|
839 | 838 | # calculate the diff for commits between versions |
|
840 | 839 | c.commit_changes = [] |
|
841 | 840 | mark = lambda cs, fw: list( |
|
842 | 841 | h.itertools.izip_longest([], cs, fillvalue=fw)) |
|
843 | 842 | for c_type, raw_id in mark(commit_changes.added, 'a') \ |
|
844 | 843 | + mark(commit_changes.removed, 'r') \ |
|
845 | 844 | + mark(commit_changes.common, 'c'): |
|
846 | 845 | |
|
847 | 846 | if raw_id in commit_cache: |
|
848 | 847 | commit = commit_cache[raw_id] |
|
849 | 848 | else: |
|
850 | 849 | try: |
|
851 | 850 | commit = commits_source_repo.get_commit(raw_id) |
|
852 | 851 | except CommitDoesNotExistError: |
|
853 | 852 | # in case we fail extracting still use "dummy" commit |
|
854 | 853 | # for display in commit diff |
|
855 | 854 | commit = h.AttributeDict( |
|
856 | 855 | {'raw_id': raw_id, |
|
857 | 856 | 'message': 'EMPTY or MISSING COMMIT'}) |
|
858 | 857 | c.commit_changes.append([c_type, commit]) |
|
859 | 858 | |
|
860 | 859 | # current user review statuses for each version |
|
861 | 860 | c.review_versions = {} |
|
862 | 861 | if c.rhodecode_user.user_id in allowed_reviewers: |
|
863 | 862 | for co in general_comments: |
|
864 | 863 | if co.author.user_id == c.rhodecode_user.user_id: |
|
865 | 864 | # each comment has a status change |
|
866 | 865 | status = co.status_change |
|
867 | 866 | if status: |
|
868 | 867 | _ver_pr = status[0].comment.pull_request_version_id |
|
869 | 868 | c.review_versions[_ver_pr] = status[0] |
|
870 | 869 | |
|
871 | 870 | return render('/pullrequests/pullrequest_show.mako') |
|
872 | 871 | |
|
873 | 872 | @LoginRequired() |
|
874 | 873 | @NotAnonymous() |
|
875 | 874 | @HasRepoPermissionAnyDecorator( |
|
876 | 875 | 'repository.read', 'repository.write', 'repository.admin') |
|
877 | 876 | @auth.CSRFRequired() |
|
878 | 877 | @jsonify |
|
879 | 878 | def comment(self, repo_name, pull_request_id): |
|
880 | 879 | pull_request_id = safe_int(pull_request_id) |
|
881 | 880 | pull_request = PullRequest.get_or_404(pull_request_id) |
|
882 | 881 | if pull_request.is_closed(): |
|
883 | 882 | log.debug('comment: forbidden because pull request is closed') |
|
884 | 883 | raise HTTPForbidden() |
|
885 | 884 | |
|
886 | 885 | status = request.POST.get('changeset_status', None) |
|
887 | 886 | text = request.POST.get('text') |
|
888 | 887 | comment_type = request.POST.get('comment_type') |
|
889 | 888 | resolves_comment_id = request.POST.get('resolves_comment_id', None) |
|
890 | 889 | close_pull_request = request.POST.get('close_pull_request') |
|
891 | 890 | |
|
892 | 891 | # the logic here should work like following, if we submit close |
|
893 | 892 | # pr comment, use `close_pull_request_with_comment` function |
|
894 | 893 | # else handle regular comment logic |
|
895 | 894 | user = c.rhodecode_user |
|
896 | 895 | repo = c.rhodecode_db_repo |
|
897 | 896 | |
|
898 | 897 | if close_pull_request: |
|
899 | 898 | # only owner or admin or person with write permissions |
|
900 | 899 | allowed_to_close = PullRequestModel().check_user_update( |
|
901 | 900 | pull_request, c.rhodecode_user) |
|
902 | 901 | if not allowed_to_close: |
|
903 | 902 | log.debug('comment: forbidden because not allowed to close ' |
|
904 | 903 | 'pull request %s', pull_request_id) |
|
905 | 904 | raise HTTPForbidden() |
|
906 | 905 | comment, status = PullRequestModel().close_pull_request_with_comment( |
|
907 | 906 | pull_request, user, repo, message=text) |
|
908 | 907 | Session().flush() |
|
909 | 908 | events.trigger( |
|
910 | 909 | events.PullRequestCommentEvent(pull_request, comment)) |
|
911 | 910 | |
|
912 | 911 | else: |
|
913 | 912 | # regular comment case, could be inline, or one with status. |
|
914 | 913 | # for that one we check also permissions |
|
915 | 914 | |
|
916 | 915 | allowed_to_change_status = PullRequestModel().check_user_change_status( |
|
917 | 916 | pull_request, c.rhodecode_user) |
|
918 | 917 | |
|
919 | 918 | if status and allowed_to_change_status: |
|
920 | 919 | message = (_('Status change %(transition_icon)s %(status)s') |
|
921 | 920 | % {'transition_icon': '>', |
|
922 | 921 | 'status': ChangesetStatus.get_status_lbl(status)}) |
|
923 | 922 | text = text or message |
|
924 | 923 | |
|
925 | 924 | comment = CommentsModel().create( |
|
926 | 925 | text=text, |
|
927 | 926 | repo=c.rhodecode_db_repo.repo_id, |
|
928 | 927 | user=c.rhodecode_user.user_id, |
|
929 | 928 | pull_request=pull_request_id, |
|
930 | 929 | f_path=request.POST.get('f_path'), |
|
931 | 930 | line_no=request.POST.get('line'), |
|
932 | 931 | status_change=(ChangesetStatus.get_status_lbl(status) |
|
933 | 932 | if status and allowed_to_change_status else None), |
|
934 | 933 | status_change_type=(status |
|
935 | 934 | if status and allowed_to_change_status else None), |
|
936 | 935 | comment_type=comment_type, |
|
937 | 936 | resolves_comment_id=resolves_comment_id |
|
938 | 937 | ) |
|
939 | 938 | |
|
940 | 939 | if allowed_to_change_status: |
|
941 | 940 | # calculate old status before we change it |
|
942 | 941 | old_calculated_status = pull_request.calculated_review_status() |
|
943 | 942 | |
|
944 | 943 | # get status if set ! |
|
945 | 944 | if status: |
|
946 | 945 | ChangesetStatusModel().set_status( |
|
947 | 946 | c.rhodecode_db_repo.repo_id, |
|
948 | 947 | status, |
|
949 | 948 | c.rhodecode_user.user_id, |
|
950 | 949 | comment, |
|
951 | 950 | pull_request=pull_request_id |
|
952 | 951 | ) |
|
953 | 952 | |
|
954 | 953 | Session().flush() |
|
955 | 954 | events.trigger( |
|
956 | 955 | events.PullRequestCommentEvent(pull_request, comment)) |
|
957 | 956 | |
|
958 | 957 | # we now calculate the status of pull request, and based on that |
|
959 | 958 | # calculation we set the commits status |
|
960 | 959 | calculated_status = pull_request.calculated_review_status() |
|
961 | 960 | if old_calculated_status != calculated_status: |
|
962 | 961 | PullRequestModel()._trigger_pull_request_hook( |
|
963 | 962 | pull_request, c.rhodecode_user, 'review_status_change') |
|
964 | 963 | |
|
965 | 964 | Session().commit() |
|
966 | 965 | |
|
967 | 966 | if not request.is_xhr: |
|
968 | 967 | raise HTTPFound( |
|
969 | 968 | h.route_path('pullrequest_show', |
|
970 | 969 | repo_name=repo_name, |
|
971 | 970 | pull_request_id=pull_request_id)) |
|
972 | 971 | |
|
973 | 972 | data = { |
|
974 | 973 | 'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))), |
|
975 | 974 | } |
|
976 | 975 | if comment: |
|
977 | 976 | c.co = comment |
|
978 | 977 | rendered_comment = render('changeset/changeset_comment_block.mako') |
|
979 | 978 | data.update(comment.get_dict()) |
|
980 | 979 | data.update({'rendered_text': rendered_comment}) |
|
981 | 980 | |
|
982 | 981 | return data |
|
983 | 982 | |
|
984 | 983 | @LoginRequired() |
|
985 | 984 | @NotAnonymous() |
|
986 | 985 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
987 | 986 | 'repository.admin') |
|
988 | 987 | @auth.CSRFRequired() |
|
989 | 988 | @jsonify |
|
990 | 989 | def delete_comment(self, repo_name, comment_id): |
|
991 | 990 | return self._delete_comment(comment_id) |
|
992 | 991 | |
|
993 | 992 | def _delete_comment(self, comment_id): |
|
994 | 993 | comment_id = safe_int(comment_id) |
|
995 | 994 | co = ChangesetComment.get_or_404(comment_id) |
|
996 | 995 | if co.pull_request.is_closed(): |
|
997 | 996 | # don't allow deleting comments on closed pull request |
|
998 | 997 | raise HTTPForbidden() |
|
999 | 998 | |
|
1000 | 999 | is_owner = co.author.user_id == c.rhodecode_user.user_id |
|
1001 | 1000 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(c.repo_name) |
|
1002 | 1001 | if h.HasPermissionAny('hg.admin')() or is_repo_admin or is_owner: |
|
1003 | 1002 | old_calculated_status = co.pull_request.calculated_review_status() |
|
1004 | 1003 | CommentsModel().delete(comment=co, user=c.rhodecode_user) |
|
1005 | 1004 | Session().commit() |
|
1006 | 1005 | calculated_status = co.pull_request.calculated_review_status() |
|
1007 | 1006 | if old_calculated_status != calculated_status: |
|
1008 | 1007 | PullRequestModel()._trigger_pull_request_hook( |
|
1009 | 1008 | co.pull_request, c.rhodecode_user, 'review_status_change') |
|
1010 | 1009 | return True |
|
1011 | 1010 | else: |
|
1012 | 1011 | raise HTTPForbidden() |
@@ -1,2021 +1,2023 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | authentication and permission libraries |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import os |
|
26 | 26 | import inspect |
|
27 | 27 | import collections |
|
28 | 28 | import fnmatch |
|
29 | 29 | import hashlib |
|
30 | 30 | import itertools |
|
31 | 31 | import logging |
|
32 | 32 | import random |
|
33 | 33 | import traceback |
|
34 | 34 | from functools import wraps |
|
35 | 35 | |
|
36 | 36 | import ipaddress |
|
37 | 37 | from pyramid.httpexceptions import HTTPForbidden, HTTPFound |
|
38 | 38 | from pylons.i18n.translation import _ |
|
39 | 39 | # NOTE(marcink): this has to be removed only after pyramid migration, |
|
40 | 40 | # replace with _ = request.translate |
|
41 | 41 | from sqlalchemy.orm.exc import ObjectDeletedError |
|
42 | 42 | from sqlalchemy.orm import joinedload |
|
43 | 43 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
44 | 44 | |
|
45 | 45 | import rhodecode |
|
46 | 46 | from rhodecode.model import meta |
|
47 | 47 | from rhodecode.model.meta import Session |
|
48 | 48 | from rhodecode.model.user import UserModel |
|
49 | 49 | from rhodecode.model.db import ( |
|
50 | 50 | User, Repository, Permission, UserToPerm, UserGroupToPerm, UserGroupMember, |
|
51 | 51 | UserIpMap, UserApiKeys, RepoGroup) |
|
52 | 52 | from rhodecode.lib import caches |
|
53 | 53 | from rhodecode.lib.utils2 import safe_unicode, aslist, safe_str, md5 |
|
54 | 54 | from rhodecode.lib.utils import ( |
|
55 | 55 | get_repo_slug, get_repo_group_slug, get_user_group_slug) |
|
56 | 56 | from rhodecode.lib.caching_query import FromCache |
|
57 | 57 | |
|
58 | 58 | |
|
59 | 59 | if rhodecode.is_unix: |
|
60 | 60 | import bcrypt |
|
61 | 61 | |
|
62 | 62 | log = logging.getLogger(__name__) |
|
63 | 63 | |
|
64 | 64 | csrf_token_key = "csrf_token" |
|
65 | 65 | |
|
66 | 66 | |
|
67 | 67 | class PasswordGenerator(object): |
|
68 | 68 | """ |
|
69 | 69 | This is a simple class for generating password from different sets of |
|
70 | 70 | characters |
|
71 | 71 | usage:: |
|
72 | 72 | |
|
73 | 73 | passwd_gen = PasswordGenerator() |
|
74 | 74 | #print 8-letter password containing only big and small letters |
|
75 | 75 | of alphabet |
|
76 | 76 | passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL) |
|
77 | 77 | """ |
|
78 | 78 | ALPHABETS_NUM = r'''1234567890''' |
|
79 | 79 | ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm''' |
|
80 | 80 | ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM''' |
|
81 | 81 | ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?''' |
|
82 | 82 | ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \ |
|
83 | 83 | + ALPHABETS_NUM + ALPHABETS_SPECIAL |
|
84 | 84 | ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM |
|
85 | 85 | ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL |
|
86 | 86 | ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM |
|
87 | 87 | ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM |
|
88 | 88 | |
|
89 | 89 | def __init__(self, passwd=''): |
|
90 | 90 | self.passwd = passwd |
|
91 | 91 | |
|
92 | 92 | def gen_password(self, length, type_=None): |
|
93 | 93 | if type_ is None: |
|
94 | 94 | type_ = self.ALPHABETS_FULL |
|
95 | 95 | self.passwd = ''.join([random.choice(type_) for _ in xrange(length)]) |
|
96 | 96 | return self.passwd |
|
97 | 97 | |
|
98 | 98 | |
|
99 | 99 | class _RhodeCodeCryptoBase(object): |
|
100 | 100 | ENC_PREF = None |
|
101 | 101 | |
|
102 | 102 | def hash_create(self, str_): |
|
103 | 103 | """ |
|
104 | 104 | hash the string using |
|
105 | 105 | |
|
106 | 106 | :param str_: password to hash |
|
107 | 107 | """ |
|
108 | 108 | raise NotImplementedError |
|
109 | 109 | |
|
110 | 110 | def hash_check_with_upgrade(self, password, hashed): |
|
111 | 111 | """ |
|
112 | 112 | Returns tuple in which first element is boolean that states that |
|
113 | 113 | given password matches it's hashed version, and the second is new hash |
|
114 | 114 | of the password, in case this password should be migrated to new |
|
115 | 115 | cipher. |
|
116 | 116 | """ |
|
117 | 117 | checked_hash = self.hash_check(password, hashed) |
|
118 | 118 | return checked_hash, None |
|
119 | 119 | |
|
120 | 120 | def hash_check(self, password, hashed): |
|
121 | 121 | """ |
|
122 | 122 | Checks matching password with it's hashed value. |
|
123 | 123 | |
|
124 | 124 | :param password: password |
|
125 | 125 | :param hashed: password in hashed form |
|
126 | 126 | """ |
|
127 | 127 | raise NotImplementedError |
|
128 | 128 | |
|
129 | 129 | def _assert_bytes(self, value): |
|
130 | 130 | """ |
|
131 | 131 | Passing in an `unicode` object can lead to hard to detect issues |
|
132 | 132 | if passwords contain non-ascii characters. Doing a type check |
|
133 | 133 | during runtime, so that such mistakes are detected early on. |
|
134 | 134 | """ |
|
135 | 135 | if not isinstance(value, str): |
|
136 | 136 | raise TypeError( |
|
137 | 137 | "Bytestring required as input, got %r." % (value, )) |
|
138 | 138 | |
|
139 | 139 | |
|
140 | 140 | class _RhodeCodeCryptoBCrypt(_RhodeCodeCryptoBase): |
|
141 | 141 | ENC_PREF = ('$2a$10', '$2b$10') |
|
142 | 142 | |
|
143 | 143 | def hash_create(self, str_): |
|
144 | 144 | self._assert_bytes(str_) |
|
145 | 145 | return bcrypt.hashpw(str_, bcrypt.gensalt(10)) |
|
146 | 146 | |
|
147 | 147 | def hash_check_with_upgrade(self, password, hashed): |
|
148 | 148 | """ |
|
149 | 149 | Returns tuple in which first element is boolean that states that |
|
150 | 150 | given password matches it's hashed version, and the second is new hash |
|
151 | 151 | of the password, in case this password should be migrated to new |
|
152 | 152 | cipher. |
|
153 | 153 | |
|
154 | 154 | This implements special upgrade logic which works like that: |
|
155 | 155 | - check if the given password == bcrypted hash, if yes then we |
|
156 | 156 | properly used password and it was already in bcrypt. Proceed |
|
157 | 157 | without any changes |
|
158 | 158 | - if bcrypt hash check is not working try with sha256. If hash compare |
|
159 | 159 | is ok, it means we using correct but old hashed password. indicate |
|
160 | 160 | hash change and proceed |
|
161 | 161 | """ |
|
162 | 162 | |
|
163 | 163 | new_hash = None |
|
164 | 164 | |
|
165 | 165 | # regular pw check |
|
166 | 166 | password_match_bcrypt = self.hash_check(password, hashed) |
|
167 | 167 | |
|
168 | 168 | # now we want to know if the password was maybe from sha256 |
|
169 | 169 | # basically calling _RhodeCodeCryptoSha256().hash_check() |
|
170 | 170 | if not password_match_bcrypt: |
|
171 | 171 | if _RhodeCodeCryptoSha256().hash_check(password, hashed): |
|
172 | 172 | new_hash = self.hash_create(password) # make new bcrypt hash |
|
173 | 173 | password_match_bcrypt = True |
|
174 | 174 | |
|
175 | 175 | return password_match_bcrypt, new_hash |
|
176 | 176 | |
|
177 | 177 | def hash_check(self, password, hashed): |
|
178 | 178 | """ |
|
179 | 179 | Checks matching password with it's hashed value. |
|
180 | 180 | |
|
181 | 181 | :param password: password |
|
182 | 182 | :param hashed: password in hashed form |
|
183 | 183 | """ |
|
184 | 184 | self._assert_bytes(password) |
|
185 | 185 | try: |
|
186 | 186 | return bcrypt.hashpw(password, hashed) == hashed |
|
187 | 187 | except ValueError as e: |
|
188 | 188 | # we're having a invalid salt here probably, we should not crash |
|
189 | 189 | # just return with False as it would be a wrong password. |
|
190 | 190 | log.debug('Failed to check password hash using bcrypt %s', |
|
191 | 191 | safe_str(e)) |
|
192 | 192 | |
|
193 | 193 | return False |
|
194 | 194 | |
|
195 | 195 | |
|
196 | 196 | class _RhodeCodeCryptoSha256(_RhodeCodeCryptoBase): |
|
197 | 197 | ENC_PREF = '_' |
|
198 | 198 | |
|
199 | 199 | def hash_create(self, str_): |
|
200 | 200 | self._assert_bytes(str_) |
|
201 | 201 | return hashlib.sha256(str_).hexdigest() |
|
202 | 202 | |
|
203 | 203 | def hash_check(self, password, hashed): |
|
204 | 204 | """ |
|
205 | 205 | Checks matching password with it's hashed value. |
|
206 | 206 | |
|
207 | 207 | :param password: password |
|
208 | 208 | :param hashed: password in hashed form |
|
209 | 209 | """ |
|
210 | 210 | self._assert_bytes(password) |
|
211 | 211 | return hashlib.sha256(password).hexdigest() == hashed |
|
212 | 212 | |
|
213 | 213 | |
|
214 | 214 | class _RhodeCodeCryptoMd5(_RhodeCodeCryptoBase): |
|
215 | 215 | ENC_PREF = '_' |
|
216 | 216 | |
|
217 | 217 | def hash_create(self, str_): |
|
218 | 218 | self._assert_bytes(str_) |
|
219 | 219 | return hashlib.md5(str_).hexdigest() |
|
220 | 220 | |
|
221 | 221 | def hash_check(self, password, hashed): |
|
222 | 222 | """ |
|
223 | 223 | Checks matching password with it's hashed value. |
|
224 | 224 | |
|
225 | 225 | :param password: password |
|
226 | 226 | :param hashed: password in hashed form |
|
227 | 227 | """ |
|
228 | 228 | self._assert_bytes(password) |
|
229 | 229 | return hashlib.md5(password).hexdigest() == hashed |
|
230 | 230 | |
|
231 | 231 | |
|
232 | 232 | def crypto_backend(): |
|
233 | 233 | """ |
|
234 | 234 | Return the matching crypto backend. |
|
235 | 235 | |
|
236 | 236 | Selection is based on if we run tests or not, we pick md5 backend to run |
|
237 | 237 | tests faster since BCRYPT is expensive to calculate |
|
238 | 238 | """ |
|
239 | 239 | if rhodecode.is_test: |
|
240 | 240 | RhodeCodeCrypto = _RhodeCodeCryptoMd5() |
|
241 | 241 | else: |
|
242 | 242 | RhodeCodeCrypto = _RhodeCodeCryptoBCrypt() |
|
243 | 243 | |
|
244 | 244 | return RhodeCodeCrypto |
|
245 | 245 | |
|
246 | 246 | |
|
247 | 247 | def get_crypt_password(password): |
|
248 | 248 | """ |
|
249 | 249 | Create the hash of `password` with the active crypto backend. |
|
250 | 250 | |
|
251 | 251 | :param password: The cleartext password. |
|
252 | 252 | :type password: unicode |
|
253 | 253 | """ |
|
254 | 254 | password = safe_str(password) |
|
255 | 255 | return crypto_backend().hash_create(password) |
|
256 | 256 | |
|
257 | 257 | |
|
258 | 258 | def check_password(password, hashed): |
|
259 | 259 | """ |
|
260 | 260 | Check if the value in `password` matches the hash in `hashed`. |
|
261 | 261 | |
|
262 | 262 | :param password: The cleartext password. |
|
263 | 263 | :type password: unicode |
|
264 | 264 | |
|
265 | 265 | :param hashed: The expected hashed version of the password. |
|
266 | 266 | :type hashed: The hash has to be passed in in text representation. |
|
267 | 267 | """ |
|
268 | 268 | password = safe_str(password) |
|
269 | 269 | return crypto_backend().hash_check(password, hashed) |
|
270 | 270 | |
|
271 | 271 | |
|
272 | 272 | def generate_auth_token(data, salt=None): |
|
273 | 273 | """ |
|
274 | 274 | Generates API KEY from given string |
|
275 | 275 | """ |
|
276 | 276 | |
|
277 | 277 | if salt is None: |
|
278 | 278 | salt = os.urandom(16) |
|
279 | 279 | return hashlib.sha1(safe_str(data) + salt).hexdigest() |
|
280 | 280 | |
|
281 | 281 | |
|
282 | 282 | class CookieStoreWrapper(object): |
|
283 | 283 | |
|
284 | 284 | def __init__(self, cookie_store): |
|
285 | 285 | self.cookie_store = cookie_store |
|
286 | 286 | |
|
287 | 287 | def __repr__(self): |
|
288 | 288 | return 'CookieStore<%s>' % (self.cookie_store) |
|
289 | 289 | |
|
290 | 290 | def get(self, key, other=None): |
|
291 | 291 | if isinstance(self.cookie_store, dict): |
|
292 | 292 | return self.cookie_store.get(key, other) |
|
293 | 293 | elif isinstance(self.cookie_store, AuthUser): |
|
294 | 294 | return self.cookie_store.__dict__.get(key, other) |
|
295 | 295 | |
|
296 | 296 | |
|
297 | 297 | def _cached_perms_data(user_id, scope, user_is_admin, |
|
298 | 298 | user_inherit_default_permissions, explicit, algo): |
|
299 | 299 | |
|
300 | 300 | permissions = PermissionCalculator( |
|
301 | 301 | user_id, scope, user_is_admin, user_inherit_default_permissions, |
|
302 | 302 | explicit, algo) |
|
303 | 303 | return permissions.calculate() |
|
304 | 304 | |
|
305 | 305 | |
|
306 | 306 | class PermOrigin(object): |
|
307 | 307 | ADMIN = 'superadmin' |
|
308 | 308 | |
|
309 | 309 | REPO_USER = 'user:%s' |
|
310 | 310 | REPO_USERGROUP = 'usergroup:%s' |
|
311 | 311 | REPO_OWNER = 'repo.owner' |
|
312 | 312 | REPO_DEFAULT = 'repo.default' |
|
313 | 313 | REPO_PRIVATE = 'repo.private' |
|
314 | 314 | |
|
315 | 315 | REPOGROUP_USER = 'user:%s' |
|
316 | 316 | REPOGROUP_USERGROUP = 'usergroup:%s' |
|
317 | 317 | REPOGROUP_OWNER = 'group.owner' |
|
318 | 318 | REPOGROUP_DEFAULT = 'group.default' |
|
319 | 319 | |
|
320 | 320 | USERGROUP_USER = 'user:%s' |
|
321 | 321 | USERGROUP_USERGROUP = 'usergroup:%s' |
|
322 | 322 | USERGROUP_OWNER = 'usergroup.owner' |
|
323 | 323 | USERGROUP_DEFAULT = 'usergroup.default' |
|
324 | 324 | |
|
325 | 325 | |
|
326 | 326 | class PermOriginDict(dict): |
|
327 | 327 | """ |
|
328 | 328 | A special dict used for tracking permissions along with their origins. |
|
329 | 329 | |
|
330 | 330 | `__setitem__` has been overridden to expect a tuple(perm, origin) |
|
331 | 331 | `__getitem__` will return only the perm |
|
332 | 332 | `.perm_origin_stack` will return the stack of (perm, origin) set per key |
|
333 | 333 | |
|
334 | 334 | >>> perms = PermOriginDict() |
|
335 | 335 | >>> perms['resource'] = 'read', 'default' |
|
336 | 336 | >>> perms['resource'] |
|
337 | 337 | 'read' |
|
338 | 338 | >>> perms['resource'] = 'write', 'admin' |
|
339 | 339 | >>> perms['resource'] |
|
340 | 340 | 'write' |
|
341 | 341 | >>> perms.perm_origin_stack |
|
342 | 342 | {'resource': [('read', 'default'), ('write', 'admin')]} |
|
343 | 343 | """ |
|
344 | 344 | |
|
345 | 345 | def __init__(self, *args, **kw): |
|
346 | 346 | dict.__init__(self, *args, **kw) |
|
347 | 347 | self.perm_origin_stack = {} |
|
348 | 348 | |
|
349 | 349 | def __setitem__(self, key, (perm, origin)): |
|
350 | 350 | self.perm_origin_stack.setdefault(key, []).append((perm, origin)) |
|
351 | 351 | dict.__setitem__(self, key, perm) |
|
352 | 352 | |
|
353 | 353 | |
|
354 | 354 | class PermissionCalculator(object): |
|
355 | 355 | |
|
356 | 356 | def __init__( |
|
357 | 357 | self, user_id, scope, user_is_admin, |
|
358 | 358 | user_inherit_default_permissions, explicit, algo): |
|
359 | 359 | self.user_id = user_id |
|
360 | 360 | self.user_is_admin = user_is_admin |
|
361 | 361 | self.inherit_default_permissions = user_inherit_default_permissions |
|
362 | 362 | self.explicit = explicit |
|
363 | 363 | self.algo = algo |
|
364 | 364 | |
|
365 | 365 | scope = scope or {} |
|
366 | 366 | self.scope_repo_id = scope.get('repo_id') |
|
367 | 367 | self.scope_repo_group_id = scope.get('repo_group_id') |
|
368 | 368 | self.scope_user_group_id = scope.get('user_group_id') |
|
369 | 369 | |
|
370 | 370 | self.default_user_id = User.get_default_user(cache=True).user_id |
|
371 | 371 | |
|
372 | 372 | self.permissions_repositories = PermOriginDict() |
|
373 | 373 | self.permissions_repository_groups = PermOriginDict() |
|
374 | 374 | self.permissions_user_groups = PermOriginDict() |
|
375 | 375 | self.permissions_global = set() |
|
376 | 376 | |
|
377 | 377 | self.default_repo_perms = Permission.get_default_repo_perms( |
|
378 | 378 | self.default_user_id, self.scope_repo_id) |
|
379 | 379 | self.default_repo_groups_perms = Permission.get_default_group_perms( |
|
380 | 380 | self.default_user_id, self.scope_repo_group_id) |
|
381 | 381 | self.default_user_group_perms = \ |
|
382 | 382 | Permission.get_default_user_group_perms( |
|
383 | 383 | self.default_user_id, self.scope_user_group_id) |
|
384 | 384 | |
|
385 | 385 | def calculate(self): |
|
386 | 386 | if self.user_is_admin: |
|
387 | 387 | return self._admin_permissions() |
|
388 | 388 | |
|
389 | 389 | self._calculate_global_default_permissions() |
|
390 | 390 | self._calculate_global_permissions() |
|
391 | 391 | self._calculate_default_permissions() |
|
392 | 392 | self._calculate_repository_permissions() |
|
393 | 393 | self._calculate_repository_group_permissions() |
|
394 | 394 | self._calculate_user_group_permissions() |
|
395 | 395 | return self._permission_structure() |
|
396 | 396 | |
|
397 | 397 | def _admin_permissions(self): |
|
398 | 398 | """ |
|
399 | 399 | admin user have all default rights for repositories |
|
400 | 400 | and groups set to admin |
|
401 | 401 | """ |
|
402 | 402 | self.permissions_global.add('hg.admin') |
|
403 | 403 | self.permissions_global.add('hg.create.write_on_repogroup.true') |
|
404 | 404 | |
|
405 | 405 | # repositories |
|
406 | 406 | for perm in self.default_repo_perms: |
|
407 | 407 | r_k = perm.UserRepoToPerm.repository.repo_name |
|
408 | 408 | p = 'repository.admin' |
|
409 | 409 | self.permissions_repositories[r_k] = p, PermOrigin.ADMIN |
|
410 | 410 | |
|
411 | 411 | # repository groups |
|
412 | 412 | for perm in self.default_repo_groups_perms: |
|
413 | 413 | rg_k = perm.UserRepoGroupToPerm.group.group_name |
|
414 | 414 | p = 'group.admin' |
|
415 | 415 | self.permissions_repository_groups[rg_k] = p, PermOrigin.ADMIN |
|
416 | 416 | |
|
417 | 417 | # user groups |
|
418 | 418 | for perm in self.default_user_group_perms: |
|
419 | 419 | u_k = perm.UserUserGroupToPerm.user_group.users_group_name |
|
420 | 420 | p = 'usergroup.admin' |
|
421 | 421 | self.permissions_user_groups[u_k] = p, PermOrigin.ADMIN |
|
422 | 422 | |
|
423 | 423 | return self._permission_structure() |
|
424 | 424 | |
|
425 | 425 | def _calculate_global_default_permissions(self): |
|
426 | 426 | """ |
|
427 | 427 | global permissions taken from the default user |
|
428 | 428 | """ |
|
429 | 429 | default_global_perms = UserToPerm.query()\ |
|
430 | 430 | .filter(UserToPerm.user_id == self.default_user_id)\ |
|
431 | 431 | .options(joinedload(UserToPerm.permission)) |
|
432 | 432 | |
|
433 | 433 | for perm in default_global_perms: |
|
434 | 434 | self.permissions_global.add(perm.permission.permission_name) |
|
435 | 435 | |
|
436 | 436 | def _calculate_global_permissions(self): |
|
437 | 437 | """ |
|
438 | 438 | Set global system permissions with user permissions or permissions |
|
439 | 439 | taken from the user groups of the current user. |
|
440 | 440 | |
|
441 | 441 | The permissions include repo creating, repo group creating, forking |
|
442 | 442 | etc. |
|
443 | 443 | """ |
|
444 | 444 | |
|
445 | 445 | # now we read the defined permissions and overwrite what we have set |
|
446 | 446 | # before those can be configured from groups or users explicitly. |
|
447 | 447 | |
|
448 | 448 | # TODO: johbo: This seems to be out of sync, find out the reason |
|
449 | 449 | # for the comment below and update it. |
|
450 | 450 | |
|
451 | 451 | # In case we want to extend this list we should be always in sync with |
|
452 | 452 | # User.DEFAULT_USER_PERMISSIONS definitions |
|
453 | 453 | _configurable = frozenset([ |
|
454 | 454 | 'hg.fork.none', 'hg.fork.repository', |
|
455 | 455 | 'hg.create.none', 'hg.create.repository', |
|
456 | 456 | 'hg.usergroup.create.false', 'hg.usergroup.create.true', |
|
457 | 457 | 'hg.repogroup.create.false', 'hg.repogroup.create.true', |
|
458 | 458 | 'hg.create.write_on_repogroup.false', |
|
459 | 459 | 'hg.create.write_on_repogroup.true', |
|
460 | 460 | 'hg.inherit_default_perms.false', 'hg.inherit_default_perms.true' |
|
461 | 461 | ]) |
|
462 | 462 | |
|
463 | 463 | # USER GROUPS comes first user group global permissions |
|
464 | 464 | user_perms_from_users_groups = Session().query(UserGroupToPerm)\ |
|
465 | 465 | .options(joinedload(UserGroupToPerm.permission))\ |
|
466 | 466 | .join((UserGroupMember, UserGroupToPerm.users_group_id == |
|
467 | 467 | UserGroupMember.users_group_id))\ |
|
468 | 468 | .filter(UserGroupMember.user_id == self.user_id)\ |
|
469 | 469 | .order_by(UserGroupToPerm.users_group_id)\ |
|
470 | 470 | .all() |
|
471 | 471 | |
|
472 | 472 | # need to group here by groups since user can be in more than |
|
473 | 473 | # one group, so we get all groups |
|
474 | 474 | _explicit_grouped_perms = [ |
|
475 | 475 | [x, list(y)] for x, y in |
|
476 | 476 | itertools.groupby(user_perms_from_users_groups, |
|
477 | 477 | lambda _x: _x.users_group)] |
|
478 | 478 | |
|
479 | 479 | for gr, perms in _explicit_grouped_perms: |
|
480 | 480 | # since user can be in multiple groups iterate over them and |
|
481 | 481 | # select the lowest permissions first (more explicit) |
|
482 | 482 | # TODO: marcink: do this^^ |
|
483 | 483 | |
|
484 | 484 | # group doesn't inherit default permissions so we actually set them |
|
485 | 485 | if not gr.inherit_default_permissions: |
|
486 | 486 | # NEED TO IGNORE all previously set configurable permissions |
|
487 | 487 | # and replace them with explicitly set from this user |
|
488 | 488 | # group permissions |
|
489 | 489 | self.permissions_global = self.permissions_global.difference( |
|
490 | 490 | _configurable) |
|
491 | 491 | for perm in perms: |
|
492 | 492 | self.permissions_global.add(perm.permission.permission_name) |
|
493 | 493 | |
|
494 | 494 | # user explicit global permissions |
|
495 | 495 | user_perms = Session().query(UserToPerm)\ |
|
496 | 496 | .options(joinedload(UserToPerm.permission))\ |
|
497 | 497 | .filter(UserToPerm.user_id == self.user_id).all() |
|
498 | 498 | |
|
499 | 499 | if not self.inherit_default_permissions: |
|
500 | 500 | # NEED TO IGNORE all configurable permissions and |
|
501 | 501 | # replace them with explicitly set from this user permissions |
|
502 | 502 | self.permissions_global = self.permissions_global.difference( |
|
503 | 503 | _configurable) |
|
504 | 504 | for perm in user_perms: |
|
505 | 505 | self.permissions_global.add(perm.permission.permission_name) |
|
506 | 506 | |
|
507 | 507 | def _calculate_default_permissions(self): |
|
508 | 508 | """ |
|
509 | 509 | Set default user permissions for repositories, repository groups |
|
510 | 510 | taken from the default user. |
|
511 | 511 | |
|
512 | 512 | Calculate inheritance of object permissions based on what we have now |
|
513 | 513 | in GLOBAL permissions. We check if .false is in GLOBAL since this is |
|
514 | 514 | explicitly set. Inherit is the opposite of .false being there. |
|
515 | 515 | |
|
516 | 516 | .. note:: |
|
517 | 517 | |
|
518 | 518 | the syntax is little bit odd but what we need to check here is |
|
519 | 519 | the opposite of .false permission being in the list so even for |
|
520 | 520 | inconsistent state when both .true/.false is there |
|
521 | 521 | .false is more important |
|
522 | 522 | |
|
523 | 523 | """ |
|
524 | 524 | user_inherit_object_permissions = not ('hg.inherit_default_perms.false' |
|
525 | 525 | in self.permissions_global) |
|
526 | 526 | |
|
527 | 527 | # defaults for repositories, taken from `default` user permissions |
|
528 | 528 | # on given repo |
|
529 | 529 | for perm in self.default_repo_perms: |
|
530 | 530 | r_k = perm.UserRepoToPerm.repository.repo_name |
|
531 | 531 | o = PermOrigin.REPO_DEFAULT |
|
532 | 532 | if perm.Repository.private and not ( |
|
533 | 533 | perm.Repository.user_id == self.user_id): |
|
534 | 534 | # disable defaults for private repos, |
|
535 | 535 | p = 'repository.none' |
|
536 | 536 | o = PermOrigin.REPO_PRIVATE |
|
537 | 537 | elif perm.Repository.user_id == self.user_id: |
|
538 | 538 | # set admin if owner |
|
539 | 539 | p = 'repository.admin' |
|
540 | 540 | o = PermOrigin.REPO_OWNER |
|
541 | 541 | else: |
|
542 | 542 | p = perm.Permission.permission_name |
|
543 | 543 | # if we decide this user isn't inheriting permissions from |
|
544 | 544 | # default user we set him to .none so only explicit |
|
545 | 545 | # permissions work |
|
546 | 546 | if not user_inherit_object_permissions: |
|
547 | 547 | p = 'repository.none' |
|
548 | 548 | self.permissions_repositories[r_k] = p, o |
|
549 | 549 | |
|
550 | 550 | # defaults for repository groups taken from `default` user permission |
|
551 | 551 | # on given group |
|
552 | 552 | for perm in self.default_repo_groups_perms: |
|
553 | 553 | rg_k = perm.UserRepoGroupToPerm.group.group_name |
|
554 | 554 | o = PermOrigin.REPOGROUP_DEFAULT |
|
555 | 555 | if perm.RepoGroup.user_id == self.user_id: |
|
556 | 556 | # set admin if owner |
|
557 | 557 | p = 'group.admin' |
|
558 | 558 | o = PermOrigin.REPOGROUP_OWNER |
|
559 | 559 | else: |
|
560 | 560 | p = perm.Permission.permission_name |
|
561 | 561 | |
|
562 | 562 | # if we decide this user isn't inheriting permissions from default |
|
563 | 563 | # user we set him to .none so only explicit permissions work |
|
564 | 564 | if not user_inherit_object_permissions: |
|
565 | 565 | p = 'group.none' |
|
566 | 566 | self.permissions_repository_groups[rg_k] = p, o |
|
567 | 567 | |
|
568 | 568 | # defaults for user groups taken from `default` user permission |
|
569 | 569 | # on given user group |
|
570 | 570 | for perm in self.default_user_group_perms: |
|
571 | 571 | u_k = perm.UserUserGroupToPerm.user_group.users_group_name |
|
572 | 572 | o = PermOrigin.USERGROUP_DEFAULT |
|
573 | 573 | if perm.UserGroup.user_id == self.user_id: |
|
574 | 574 | # set admin if owner |
|
575 | 575 | p = 'usergroup.admin' |
|
576 | 576 | o = PermOrigin.USERGROUP_OWNER |
|
577 | 577 | else: |
|
578 | 578 | p = perm.Permission.permission_name |
|
579 | 579 | |
|
580 | 580 | # if we decide this user isn't inheriting permissions from default |
|
581 | 581 | # user we set him to .none so only explicit permissions work |
|
582 | 582 | if not user_inherit_object_permissions: |
|
583 | 583 | p = 'usergroup.none' |
|
584 | 584 | self.permissions_user_groups[u_k] = p, o |
|
585 | 585 | |
|
586 | 586 | def _calculate_repository_permissions(self): |
|
587 | 587 | """ |
|
588 | 588 | Repository permissions for the current user. |
|
589 | 589 | |
|
590 | 590 | Check if the user is part of user groups for this repository and |
|
591 | 591 | fill in the permission from it. `_choose_permission` decides of which |
|
592 | 592 | permission should be selected based on selected method. |
|
593 | 593 | """ |
|
594 | 594 | |
|
595 | 595 | # user group for repositories permissions |
|
596 | 596 | user_repo_perms_from_user_group = Permission\ |
|
597 | 597 | .get_default_repo_perms_from_user_group( |
|
598 | 598 | self.user_id, self.scope_repo_id) |
|
599 | 599 | |
|
600 | 600 | multiple_counter = collections.defaultdict(int) |
|
601 | 601 | for perm in user_repo_perms_from_user_group: |
|
602 | 602 | r_k = perm.UserGroupRepoToPerm.repository.repo_name |
|
603 | 603 | ug_k = perm.UserGroupRepoToPerm.users_group.users_group_name |
|
604 | 604 | multiple_counter[r_k] += 1 |
|
605 | 605 | p = perm.Permission.permission_name |
|
606 | 606 | o = PermOrigin.REPO_USERGROUP % ug_k |
|
607 | 607 | |
|
608 | 608 | if perm.Repository.user_id == self.user_id: |
|
609 | 609 | # set admin if owner |
|
610 | 610 | p = 'repository.admin' |
|
611 | 611 | o = PermOrigin.REPO_OWNER |
|
612 | 612 | else: |
|
613 | 613 | if multiple_counter[r_k] > 1: |
|
614 | 614 | cur_perm = self.permissions_repositories[r_k] |
|
615 | 615 | p = self._choose_permission(p, cur_perm) |
|
616 | 616 | self.permissions_repositories[r_k] = p, o |
|
617 | 617 | |
|
618 | 618 | # user explicit permissions for repositories, overrides any specified |
|
619 | 619 | # by the group permission |
|
620 | 620 | user_repo_perms = Permission.get_default_repo_perms( |
|
621 | 621 | self.user_id, self.scope_repo_id) |
|
622 | 622 | for perm in user_repo_perms: |
|
623 | 623 | r_k = perm.UserRepoToPerm.repository.repo_name |
|
624 | 624 | o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username |
|
625 | 625 | # set admin if owner |
|
626 | 626 | if perm.Repository.user_id == self.user_id: |
|
627 | 627 | p = 'repository.admin' |
|
628 | 628 | o = PermOrigin.REPO_OWNER |
|
629 | 629 | else: |
|
630 | 630 | p = perm.Permission.permission_name |
|
631 | 631 | if not self.explicit: |
|
632 | 632 | cur_perm = self.permissions_repositories.get( |
|
633 | 633 | r_k, 'repository.none') |
|
634 | 634 | p = self._choose_permission(p, cur_perm) |
|
635 | 635 | self.permissions_repositories[r_k] = p, o |
|
636 | 636 | |
|
637 | 637 | def _calculate_repository_group_permissions(self): |
|
638 | 638 | """ |
|
639 | 639 | Repository group permissions for the current user. |
|
640 | 640 | |
|
641 | 641 | Check if the user is part of user groups for repository groups and |
|
642 | 642 | fill in the permissions from it. `_choose_permmission` decides of which |
|
643 | 643 | permission should be selected based on selected method. |
|
644 | 644 | """ |
|
645 | 645 | # user group for repo groups permissions |
|
646 | 646 | user_repo_group_perms_from_user_group = Permission\ |
|
647 | 647 | .get_default_group_perms_from_user_group( |
|
648 | 648 | self.user_id, self.scope_repo_group_id) |
|
649 | 649 | |
|
650 | 650 | multiple_counter = collections.defaultdict(int) |
|
651 | 651 | for perm in user_repo_group_perms_from_user_group: |
|
652 | 652 | g_k = perm.UserGroupRepoGroupToPerm.group.group_name |
|
653 | 653 | ug_k = perm.UserGroupRepoGroupToPerm.users_group.users_group_name |
|
654 | 654 | o = PermOrigin.REPOGROUP_USERGROUP % ug_k |
|
655 | 655 | multiple_counter[g_k] += 1 |
|
656 | 656 | p = perm.Permission.permission_name |
|
657 | 657 | if perm.RepoGroup.user_id == self.user_id: |
|
658 | 658 | # set admin if owner, even for member of other user group |
|
659 | 659 | p = 'group.admin' |
|
660 | 660 | o = PermOrigin.REPOGROUP_OWNER |
|
661 | 661 | else: |
|
662 | 662 | if multiple_counter[g_k] > 1: |
|
663 | 663 | cur_perm = self.permissions_repository_groups[g_k] |
|
664 | 664 | p = self._choose_permission(p, cur_perm) |
|
665 | 665 | self.permissions_repository_groups[g_k] = p, o |
|
666 | 666 | |
|
667 | 667 | # user explicit permissions for repository groups |
|
668 | 668 | user_repo_groups_perms = Permission.get_default_group_perms( |
|
669 | 669 | self.user_id, self.scope_repo_group_id) |
|
670 | 670 | for perm in user_repo_groups_perms: |
|
671 | 671 | rg_k = perm.UserRepoGroupToPerm.group.group_name |
|
672 | 672 | u_k = perm.UserRepoGroupToPerm.user.username |
|
673 | 673 | o = PermOrigin.REPOGROUP_USER % u_k |
|
674 | 674 | |
|
675 | 675 | if perm.RepoGroup.user_id == self.user_id: |
|
676 | 676 | # set admin if owner |
|
677 | 677 | p = 'group.admin' |
|
678 | 678 | o = PermOrigin.REPOGROUP_OWNER |
|
679 | 679 | else: |
|
680 | 680 | p = perm.Permission.permission_name |
|
681 | 681 | if not self.explicit: |
|
682 | 682 | cur_perm = self.permissions_repository_groups.get( |
|
683 | 683 | rg_k, 'group.none') |
|
684 | 684 | p = self._choose_permission(p, cur_perm) |
|
685 | 685 | self.permissions_repository_groups[rg_k] = p, o |
|
686 | 686 | |
|
687 | 687 | def _calculate_user_group_permissions(self): |
|
688 | 688 | """ |
|
689 | 689 | User group permissions for the current user. |
|
690 | 690 | """ |
|
691 | 691 | # user group for user group permissions |
|
692 | 692 | user_group_from_user_group = Permission\ |
|
693 | 693 | .get_default_user_group_perms_from_user_group( |
|
694 | 694 | self.user_id, self.scope_user_group_id) |
|
695 | 695 | |
|
696 | 696 | multiple_counter = collections.defaultdict(int) |
|
697 | 697 | for perm in user_group_from_user_group: |
|
698 | 698 | g_k = perm.UserGroupUserGroupToPerm\ |
|
699 | 699 | .target_user_group.users_group_name |
|
700 | 700 | u_k = perm.UserGroupUserGroupToPerm\ |
|
701 | 701 | .user_group.users_group_name |
|
702 | 702 | o = PermOrigin.USERGROUP_USERGROUP % u_k |
|
703 | 703 | multiple_counter[g_k] += 1 |
|
704 | 704 | p = perm.Permission.permission_name |
|
705 | 705 | |
|
706 | 706 | if perm.UserGroup.user_id == self.user_id: |
|
707 | 707 | # set admin if owner, even for member of other user group |
|
708 | 708 | p = 'usergroup.admin' |
|
709 | 709 | o = PermOrigin.USERGROUP_OWNER |
|
710 | 710 | else: |
|
711 | 711 | if multiple_counter[g_k] > 1: |
|
712 | 712 | cur_perm = self.permissions_user_groups[g_k] |
|
713 | 713 | p = self._choose_permission(p, cur_perm) |
|
714 | 714 | self.permissions_user_groups[g_k] = p, o |
|
715 | 715 | |
|
716 | 716 | # user explicit permission for user groups |
|
717 | 717 | user_user_groups_perms = Permission.get_default_user_group_perms( |
|
718 | 718 | self.user_id, self.scope_user_group_id) |
|
719 | 719 | for perm in user_user_groups_perms: |
|
720 | 720 | ug_k = perm.UserUserGroupToPerm.user_group.users_group_name |
|
721 | 721 | u_k = perm.UserUserGroupToPerm.user.username |
|
722 | 722 | o = PermOrigin.USERGROUP_USER % u_k |
|
723 | 723 | |
|
724 | 724 | if perm.UserGroup.user_id == self.user_id: |
|
725 | 725 | # set admin if owner |
|
726 | 726 | p = 'usergroup.admin' |
|
727 | 727 | o = PermOrigin.USERGROUP_OWNER |
|
728 | 728 | else: |
|
729 | 729 | p = perm.Permission.permission_name |
|
730 | 730 | if not self.explicit: |
|
731 | 731 | cur_perm = self.permissions_user_groups.get( |
|
732 | 732 | ug_k, 'usergroup.none') |
|
733 | 733 | p = self._choose_permission(p, cur_perm) |
|
734 | 734 | self.permissions_user_groups[ug_k] = p, o |
|
735 | 735 | |
|
736 | 736 | def _choose_permission(self, new_perm, cur_perm): |
|
737 | 737 | new_perm_val = Permission.PERM_WEIGHTS[new_perm] |
|
738 | 738 | cur_perm_val = Permission.PERM_WEIGHTS[cur_perm] |
|
739 | 739 | if self.algo == 'higherwin': |
|
740 | 740 | if new_perm_val > cur_perm_val: |
|
741 | 741 | return new_perm |
|
742 | 742 | return cur_perm |
|
743 | 743 | elif self.algo == 'lowerwin': |
|
744 | 744 | if new_perm_val < cur_perm_val: |
|
745 | 745 | return new_perm |
|
746 | 746 | return cur_perm |
|
747 | 747 | |
|
748 | 748 | def _permission_structure(self): |
|
749 | 749 | return { |
|
750 | 750 | 'global': self.permissions_global, |
|
751 | 751 | 'repositories': self.permissions_repositories, |
|
752 | 752 | 'repositories_groups': self.permissions_repository_groups, |
|
753 | 753 | 'user_groups': self.permissions_user_groups, |
|
754 | 754 | } |
|
755 | 755 | |
|
756 | 756 | |
|
757 | 757 | def allowed_auth_token_access(controller_name, whitelist=None, auth_token=None): |
|
758 | 758 | """ |
|
759 | 759 | Check if given controller_name is in whitelist of auth token access |
|
760 | 760 | """ |
|
761 | 761 | if not whitelist: |
|
762 | 762 | from rhodecode import CONFIG |
|
763 | 763 | whitelist = aslist( |
|
764 | 764 | CONFIG.get('api_access_controllers_whitelist'), sep=',') |
|
765 | 765 | log.debug( |
|
766 | 766 | 'Allowed controllers for AUTH TOKEN access: %s' % (whitelist,)) |
|
767 | 767 | |
|
768 | 768 | auth_token_access_valid = False |
|
769 | 769 | for entry in whitelist: |
|
770 | 770 | if fnmatch.fnmatch(controller_name, entry): |
|
771 | 771 | auth_token_access_valid = True |
|
772 | 772 | break |
|
773 | 773 | |
|
774 | 774 | if auth_token_access_valid: |
|
775 | 775 | log.debug('controller:%s matches entry in whitelist' |
|
776 | 776 | % (controller_name,)) |
|
777 | 777 | else: |
|
778 | 778 | msg = ('controller: %s does *NOT* match any entry in whitelist' |
|
779 | 779 | % (controller_name,)) |
|
780 | 780 | if auth_token: |
|
781 | 781 | # if we use auth token key and don't have access it's a warning |
|
782 | 782 | log.warning(msg) |
|
783 | 783 | else: |
|
784 | 784 | log.debug(msg) |
|
785 | 785 | |
|
786 | 786 | return auth_token_access_valid |
|
787 | 787 | |
|
788 | 788 | |
|
789 | 789 | class AuthUser(object): |
|
790 | 790 | """ |
|
791 | 791 | A simple object that handles all attributes of user in RhodeCode |
|
792 | 792 | |
|
793 | 793 | It does lookup based on API key,given user, or user present in session |
|
794 | 794 | Then it fills all required information for such user. It also checks if |
|
795 | 795 | anonymous access is enabled and if so, it returns default user as logged in |
|
796 | 796 | """ |
|
797 | 797 | GLOBAL_PERMS = [x[0] for x in Permission.PERMS] |
|
798 | 798 | |
|
799 | 799 | def __init__(self, user_id=None, api_key=None, username=None, ip_addr=None): |
|
800 | 800 | |
|
801 | 801 | self.user_id = user_id |
|
802 | 802 | self._api_key = api_key |
|
803 | 803 | |
|
804 | 804 | self.api_key = None |
|
805 | 805 | self.feed_token = '' |
|
806 | 806 | self.username = username |
|
807 | 807 | self.ip_addr = ip_addr |
|
808 | 808 | self.name = '' |
|
809 | 809 | self.lastname = '' |
|
810 | self.first_name = '' | |
|
811 | self.last_name = '' | |
|
810 | 812 | self.email = '' |
|
811 | 813 | self.is_authenticated = False |
|
812 | 814 | self.admin = False |
|
813 | 815 | self.inherit_default_permissions = False |
|
814 | 816 | self.password = '' |
|
815 | 817 | |
|
816 | 818 | self.anonymous_user = None # propagated on propagate_data |
|
817 | 819 | self.propagate_data() |
|
818 | 820 | self._instance = None |
|
819 | 821 | self._permissions_scoped_cache = {} # used to bind scoped calculation |
|
820 | 822 | |
|
821 | 823 | @LazyProperty |
|
822 | 824 | def permissions(self): |
|
823 | 825 | return self.get_perms(user=self, cache=False) |
|
824 | 826 | |
|
825 | 827 | def permissions_with_scope(self, scope): |
|
826 | 828 | """ |
|
827 | 829 | Call the get_perms function with scoped data. The scope in that function |
|
828 | 830 | narrows the SQL calls to the given ID of objects resulting in fetching |
|
829 | 831 | Just particular permission we want to obtain. If scope is an empty dict |
|
830 | 832 | then it basically narrows the scope to GLOBAL permissions only. |
|
831 | 833 | |
|
832 | 834 | :param scope: dict |
|
833 | 835 | """ |
|
834 | 836 | if 'repo_name' in scope: |
|
835 | 837 | obj = Repository.get_by_repo_name(scope['repo_name']) |
|
836 | 838 | if obj: |
|
837 | 839 | scope['repo_id'] = obj.repo_id |
|
838 | 840 | _scope = { |
|
839 | 841 | 'repo_id': -1, |
|
840 | 842 | 'user_group_id': -1, |
|
841 | 843 | 'repo_group_id': -1, |
|
842 | 844 | } |
|
843 | 845 | _scope.update(scope) |
|
844 | 846 | cache_key = "_".join(map(safe_str, reduce(lambda a, b: a+b, |
|
845 | 847 | _scope.items()))) |
|
846 | 848 | if cache_key not in self._permissions_scoped_cache: |
|
847 | 849 | # store in cache to mimic how the @LazyProperty works, |
|
848 | 850 | # the difference here is that we use the unique key calculated |
|
849 | 851 | # from params and values |
|
850 | 852 | res = self.get_perms(user=self, cache=False, scope=_scope) |
|
851 | 853 | self._permissions_scoped_cache[cache_key] = res |
|
852 | 854 | return self._permissions_scoped_cache[cache_key] |
|
853 | 855 | |
|
854 | 856 | def get_instance(self): |
|
855 | 857 | return User.get(self.user_id) |
|
856 | 858 | |
|
857 | 859 | def update_lastactivity(self): |
|
858 | 860 | if self.user_id: |
|
859 | 861 | User.get(self.user_id).update_lastactivity() |
|
860 | 862 | |
|
861 | 863 | def propagate_data(self): |
|
862 | 864 | """ |
|
863 | 865 | Fills in user data and propagates values to this instance. Maps fetched |
|
864 | 866 | user attributes to this class instance attributes |
|
865 | 867 | """ |
|
866 | 868 | log.debug('starting data propagation for new potential AuthUser') |
|
867 | 869 | user_model = UserModel() |
|
868 | 870 | anon_user = self.anonymous_user = User.get_default_user(cache=True) |
|
869 | 871 | is_user_loaded = False |
|
870 | 872 | |
|
871 | 873 | # lookup by userid |
|
872 | 874 | if self.user_id is not None and self.user_id != anon_user.user_id: |
|
873 | 875 | log.debug('Trying Auth User lookup by USER ID: `%s`' % self.user_id) |
|
874 | 876 | is_user_loaded = user_model.fill_data(self, user_id=self.user_id) |
|
875 | 877 | |
|
876 | 878 | # try go get user by api key |
|
877 | 879 | elif self._api_key and self._api_key != anon_user.api_key: |
|
878 | 880 | log.debug('Trying Auth User lookup by API KEY: `%s`' % self._api_key) |
|
879 | 881 | is_user_loaded = user_model.fill_data(self, api_key=self._api_key) |
|
880 | 882 | |
|
881 | 883 | # lookup by username |
|
882 | 884 | elif self.username: |
|
883 | 885 | log.debug('Trying Auth User lookup by USER NAME: `%s`' % self.username) |
|
884 | 886 | is_user_loaded = user_model.fill_data(self, username=self.username) |
|
885 | 887 | else: |
|
886 | 888 | log.debug('No data in %s that could been used to log in' % self) |
|
887 | 889 | |
|
888 | 890 | if not is_user_loaded: |
|
889 | 891 | log.debug('Failed to load user. Fallback to default user') |
|
890 | 892 | # if we cannot authenticate user try anonymous |
|
891 | 893 | if anon_user.active: |
|
892 | 894 | user_model.fill_data(self, user_id=anon_user.user_id) |
|
893 | 895 | # then we set this user is logged in |
|
894 | 896 | self.is_authenticated = True |
|
895 | 897 | else: |
|
896 | 898 | # in case of disabled anonymous user we reset some of the |
|
897 | 899 | # parameters so such user is "corrupted", skipping the fill_data |
|
898 | 900 | for attr in ['user_id', 'username', 'admin', 'active']: |
|
899 | 901 | setattr(self, attr, None) |
|
900 | 902 | self.is_authenticated = False |
|
901 | 903 | |
|
902 | 904 | if not self.username: |
|
903 | 905 | self.username = 'None' |
|
904 | 906 | |
|
905 | 907 | log.debug('Auth User is now %s' % self) |
|
906 | 908 | |
|
907 | 909 | def get_perms(self, user, scope=None, explicit=True, algo='higherwin', |
|
908 | 910 | cache=False): |
|
909 | 911 | """ |
|
910 | 912 | Fills user permission attribute with permissions taken from database |
|
911 | 913 | works for permissions given for repositories, and for permissions that |
|
912 | 914 | are granted to groups |
|
913 | 915 | |
|
914 | 916 | :param user: instance of User object from database |
|
915 | 917 | :param explicit: In case there are permissions both for user and a group |
|
916 | 918 | that user is part of, explicit flag will defiine if user will |
|
917 | 919 | explicitly override permissions from group, if it's False it will |
|
918 | 920 | make decision based on the algo |
|
919 | 921 | :param algo: algorithm to decide what permission should be choose if |
|
920 | 922 | it's multiple defined, eg user in two different groups. It also |
|
921 | 923 | decides if explicit flag is turned off how to specify the permission |
|
922 | 924 | for case when user is in a group + have defined separate permission |
|
923 | 925 | """ |
|
924 | 926 | user_id = user.user_id |
|
925 | 927 | user_is_admin = user.is_admin |
|
926 | 928 | |
|
927 | 929 | # inheritance of global permissions like create repo/fork repo etc |
|
928 | 930 | user_inherit_default_permissions = user.inherit_default_permissions |
|
929 | 931 | |
|
930 | 932 | log.debug('Computing PERMISSION tree for scope %s' % (scope, )) |
|
931 | 933 | compute = caches.conditional_cache( |
|
932 | 934 | 'short_term', 'cache_desc', |
|
933 | 935 | condition=cache, func=_cached_perms_data) |
|
934 | 936 | result = compute(user_id, scope, user_is_admin, |
|
935 | 937 | user_inherit_default_permissions, explicit, algo) |
|
936 | 938 | |
|
937 | 939 | result_repr = [] |
|
938 | 940 | for k in result: |
|
939 | 941 | result_repr.append((k, len(result[k]))) |
|
940 | 942 | |
|
941 | 943 | log.debug('PERMISSION tree computed %s' % (result_repr,)) |
|
942 | 944 | return result |
|
943 | 945 | |
|
944 | 946 | @property |
|
945 | 947 | def is_default(self): |
|
946 | 948 | return self.username == User.DEFAULT_USER |
|
947 | 949 | |
|
948 | 950 | @property |
|
949 | 951 | def is_admin(self): |
|
950 | 952 | return self.admin |
|
951 | 953 | |
|
952 | 954 | @property |
|
953 | 955 | def is_user_object(self): |
|
954 | 956 | return self.user_id is not None |
|
955 | 957 | |
|
956 | 958 | @property |
|
957 | 959 | def repositories_admin(self): |
|
958 | 960 | """ |
|
959 | 961 | Returns list of repositories you're an admin of |
|
960 | 962 | """ |
|
961 | 963 | return [ |
|
962 | 964 | x[0] for x in self.permissions['repositories'].iteritems() |
|
963 | 965 | if x[1] == 'repository.admin'] |
|
964 | 966 | |
|
965 | 967 | @property |
|
966 | 968 | def repository_groups_admin(self): |
|
967 | 969 | """ |
|
968 | 970 | Returns list of repository groups you're an admin of |
|
969 | 971 | """ |
|
970 | 972 | return [ |
|
971 | 973 | x[0] for x in self.permissions['repositories_groups'].iteritems() |
|
972 | 974 | if x[1] == 'group.admin'] |
|
973 | 975 | |
|
974 | 976 | @property |
|
975 | 977 | def user_groups_admin(self): |
|
976 | 978 | """ |
|
977 | 979 | Returns list of user groups you're an admin of |
|
978 | 980 | """ |
|
979 | 981 | return [ |
|
980 | 982 | x[0] for x in self.permissions['user_groups'].iteritems() |
|
981 | 983 | if x[1] == 'usergroup.admin'] |
|
982 | 984 | |
|
983 | 985 | @property |
|
984 | 986 | def ip_allowed(self): |
|
985 | 987 | """ |
|
986 | 988 | Checks if ip_addr used in constructor is allowed from defined list of |
|
987 | 989 | allowed ip_addresses for user |
|
988 | 990 | |
|
989 | 991 | :returns: boolean, True if ip is in allowed ip range |
|
990 | 992 | """ |
|
991 | 993 | # check IP |
|
992 | 994 | inherit = self.inherit_default_permissions |
|
993 | 995 | return AuthUser.check_ip_allowed(self.user_id, self.ip_addr, |
|
994 | 996 | inherit_from_default=inherit) |
|
995 | 997 | @property |
|
996 | 998 | def personal_repo_group(self): |
|
997 | 999 | return RepoGroup.get_user_personal_repo_group(self.user_id) |
|
998 | 1000 | |
|
999 | 1001 | @classmethod |
|
1000 | 1002 | def check_ip_allowed(cls, user_id, ip_addr, inherit_from_default): |
|
1001 | 1003 | allowed_ips = AuthUser.get_allowed_ips( |
|
1002 | 1004 | user_id, cache=True, inherit_from_default=inherit_from_default) |
|
1003 | 1005 | if check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips): |
|
1004 | 1006 | log.debug('IP:%s is in range of %s' % (ip_addr, allowed_ips)) |
|
1005 | 1007 | return True |
|
1006 | 1008 | else: |
|
1007 | 1009 | log.info('Access for IP:%s forbidden, ' |
|
1008 | 1010 | 'not in %s' % (ip_addr, allowed_ips)) |
|
1009 | 1011 | return False |
|
1010 | 1012 | |
|
1011 | 1013 | def __repr__(self): |
|
1012 | 1014 | return "<AuthUser('id:%s[%s] ip:%s auth:%s')>"\ |
|
1013 | 1015 | % (self.user_id, self.username, self.ip_addr, self.is_authenticated) |
|
1014 | 1016 | |
|
1015 | 1017 | def set_authenticated(self, authenticated=True): |
|
1016 | 1018 | if self.user_id != self.anonymous_user.user_id: |
|
1017 | 1019 | self.is_authenticated = authenticated |
|
1018 | 1020 | |
|
1019 | 1021 | def get_cookie_store(self): |
|
1020 | 1022 | return { |
|
1021 | 1023 | 'username': self.username, |
|
1022 | 1024 | 'password': md5(self.password), |
|
1023 | 1025 | 'user_id': self.user_id, |
|
1024 | 1026 | 'is_authenticated': self.is_authenticated |
|
1025 | 1027 | } |
|
1026 | 1028 | |
|
1027 | 1029 | @classmethod |
|
1028 | 1030 | def from_cookie_store(cls, cookie_store): |
|
1029 | 1031 | """ |
|
1030 | 1032 | Creates AuthUser from a cookie store |
|
1031 | 1033 | |
|
1032 | 1034 | :param cls: |
|
1033 | 1035 | :param cookie_store: |
|
1034 | 1036 | """ |
|
1035 | 1037 | user_id = cookie_store.get('user_id') |
|
1036 | 1038 | username = cookie_store.get('username') |
|
1037 | 1039 | api_key = cookie_store.get('api_key') |
|
1038 | 1040 | return AuthUser(user_id, api_key, username) |
|
1039 | 1041 | |
|
1040 | 1042 | @classmethod |
|
1041 | 1043 | def get_allowed_ips(cls, user_id, cache=False, inherit_from_default=False): |
|
1042 | 1044 | _set = set() |
|
1043 | 1045 | |
|
1044 | 1046 | if inherit_from_default: |
|
1045 | 1047 | default_ips = UserIpMap.query().filter( |
|
1046 | 1048 | UserIpMap.user == User.get_default_user(cache=True)) |
|
1047 | 1049 | if cache: |
|
1048 | 1050 | default_ips = default_ips.options( |
|
1049 | 1051 | FromCache("sql_cache_short", "get_user_ips_default")) |
|
1050 | 1052 | |
|
1051 | 1053 | # populate from default user |
|
1052 | 1054 | for ip in default_ips: |
|
1053 | 1055 | try: |
|
1054 | 1056 | _set.add(ip.ip_addr) |
|
1055 | 1057 | except ObjectDeletedError: |
|
1056 | 1058 | # since we use heavy caching sometimes it happens that |
|
1057 | 1059 | # we get deleted objects here, we just skip them |
|
1058 | 1060 | pass |
|
1059 | 1061 | |
|
1060 | 1062 | user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id) |
|
1061 | 1063 | if cache: |
|
1062 | 1064 | user_ips = user_ips.options( |
|
1063 | 1065 | FromCache("sql_cache_short", "get_user_ips_%s" % user_id)) |
|
1064 | 1066 | |
|
1065 | 1067 | for ip in user_ips: |
|
1066 | 1068 | try: |
|
1067 | 1069 | _set.add(ip.ip_addr) |
|
1068 | 1070 | except ObjectDeletedError: |
|
1069 | 1071 | # since we use heavy caching sometimes it happens that we get |
|
1070 | 1072 | # deleted objects here, we just skip them |
|
1071 | 1073 | pass |
|
1072 | 1074 | return _set or set(['0.0.0.0/0', '::/0']) |
|
1073 | 1075 | |
|
1074 | 1076 | |
|
1075 | 1077 | def set_available_permissions(config): |
|
1076 | 1078 | """ |
|
1077 | 1079 | This function will propagate pylons globals with all available defined |
|
1078 | 1080 | permission given in db. We don't want to check each time from db for new |
|
1079 | 1081 | permissions since adding a new permission also requires application restart |
|
1080 | 1082 | ie. to decorate new views with the newly created permission |
|
1081 | 1083 | |
|
1082 | 1084 | :param config: current pylons config instance |
|
1083 | 1085 | |
|
1084 | 1086 | """ |
|
1085 | 1087 | log.info('getting information about all available permissions') |
|
1086 | 1088 | try: |
|
1087 | 1089 | sa = meta.Session |
|
1088 | 1090 | all_perms = sa.query(Permission).all() |
|
1089 | 1091 | config['available_permissions'] = [x.permission_name for x in all_perms] |
|
1090 | 1092 | except Exception: |
|
1091 | 1093 | log.error(traceback.format_exc()) |
|
1092 | 1094 | finally: |
|
1093 | 1095 | meta.Session.remove() |
|
1094 | 1096 | |
|
1095 | 1097 | |
|
1096 | 1098 | def get_csrf_token(session=None, force_new=False, save_if_missing=True): |
|
1097 | 1099 | """ |
|
1098 | 1100 | Return the current authentication token, creating one if one doesn't |
|
1099 | 1101 | already exist and the save_if_missing flag is present. |
|
1100 | 1102 | |
|
1101 | 1103 | :param session: pass in the pylons session, else we use the global ones |
|
1102 | 1104 | :param force_new: force to re-generate the token and store it in session |
|
1103 | 1105 | :param save_if_missing: save the newly generated token if it's missing in |
|
1104 | 1106 | session |
|
1105 | 1107 | """ |
|
1106 | 1108 | if not session: |
|
1107 | 1109 | from pylons import session |
|
1108 | 1110 | |
|
1109 | 1111 | if (csrf_token_key not in session and save_if_missing) or force_new: |
|
1110 | 1112 | token = hashlib.sha1(str(random.getrandbits(128))).hexdigest() |
|
1111 | 1113 | session[csrf_token_key] = token |
|
1112 | 1114 | if hasattr(session, 'save'): |
|
1113 | 1115 | session.save() |
|
1114 | 1116 | return session.get(csrf_token_key) |
|
1115 | 1117 | |
|
1116 | 1118 | |
|
1117 | 1119 | def get_request(perm_class): |
|
1118 | 1120 | from pyramid.threadlocal import get_current_request |
|
1119 | 1121 | pyramid_request = get_current_request() |
|
1120 | 1122 | if not pyramid_request: |
|
1121 | 1123 | # return global request of pylons in case pyramid isn't available |
|
1122 | 1124 | # NOTE(marcink): this should be removed after migration to pyramid |
|
1123 | 1125 | from pylons import request |
|
1124 | 1126 | return request |
|
1125 | 1127 | return pyramid_request |
|
1126 | 1128 | |
|
1127 | 1129 | |
|
1128 | 1130 | # CHECK DECORATORS |
|
1129 | 1131 | class CSRFRequired(object): |
|
1130 | 1132 | """ |
|
1131 | 1133 | Decorator for authenticating a form |
|
1132 | 1134 | |
|
1133 | 1135 | This decorator uses an authorization token stored in the client's |
|
1134 | 1136 | session for prevention of certain Cross-site request forgery (CSRF) |
|
1135 | 1137 | attacks (See |
|
1136 | 1138 | http://en.wikipedia.org/wiki/Cross-site_request_forgery for more |
|
1137 | 1139 | information). |
|
1138 | 1140 | |
|
1139 | 1141 | For use with the ``webhelpers.secure_form`` helper functions. |
|
1140 | 1142 | |
|
1141 | 1143 | """ |
|
1142 | 1144 | def __init__(self, token=csrf_token_key, header='X-CSRF-Token', |
|
1143 | 1145 | except_methods=None): |
|
1144 | 1146 | self.token = token |
|
1145 | 1147 | self.header = header |
|
1146 | 1148 | self.except_methods = except_methods or [] |
|
1147 | 1149 | |
|
1148 | 1150 | def __call__(self, func): |
|
1149 | 1151 | return get_cython_compat_decorator(self.__wrapper, func) |
|
1150 | 1152 | |
|
1151 | 1153 | def _get_csrf(self, _request): |
|
1152 | 1154 | return _request.POST.get(self.token, _request.headers.get(self.header)) |
|
1153 | 1155 | |
|
1154 | 1156 | def check_csrf(self, _request, cur_token): |
|
1155 | 1157 | supplied_token = self._get_csrf(_request) |
|
1156 | 1158 | return supplied_token and supplied_token == cur_token |
|
1157 | 1159 | |
|
1158 | 1160 | def _get_request(self): |
|
1159 | 1161 | return get_request(self) |
|
1160 | 1162 | |
|
1161 | 1163 | def __wrapper(self, func, *fargs, **fkwargs): |
|
1162 | 1164 | request = self._get_request() |
|
1163 | 1165 | |
|
1164 | 1166 | if request.method in self.except_methods: |
|
1165 | 1167 | return func(*fargs, **fkwargs) |
|
1166 | 1168 | |
|
1167 | 1169 | cur_token = get_csrf_token(save_if_missing=False) |
|
1168 | 1170 | if self.check_csrf(request, cur_token): |
|
1169 | 1171 | if request.POST.get(self.token): |
|
1170 | 1172 | del request.POST[self.token] |
|
1171 | 1173 | return func(*fargs, **fkwargs) |
|
1172 | 1174 | else: |
|
1173 | 1175 | reason = 'token-missing' |
|
1174 | 1176 | supplied_token = self._get_csrf(request) |
|
1175 | 1177 | if supplied_token and cur_token != supplied_token: |
|
1176 | 1178 | reason = 'token-mismatch [%s:%s]' % ( |
|
1177 | 1179 | cur_token or ''[:6], supplied_token or ''[:6]) |
|
1178 | 1180 | |
|
1179 | 1181 | csrf_message = \ |
|
1180 | 1182 | ("Cross-site request forgery detected, request denied. See " |
|
1181 | 1183 | "http://en.wikipedia.org/wiki/Cross-site_request_forgery for " |
|
1182 | 1184 | "more information.") |
|
1183 | 1185 | log.warn('Cross-site request forgery detected, request %r DENIED: %s ' |
|
1184 | 1186 | 'REMOTE_ADDR:%s, HEADERS:%s' % ( |
|
1185 | 1187 | request, reason, request.remote_addr, request.headers)) |
|
1186 | 1188 | |
|
1187 | 1189 | raise HTTPForbidden(explanation=csrf_message) |
|
1188 | 1190 | |
|
1189 | 1191 | |
|
1190 | 1192 | class LoginRequired(object): |
|
1191 | 1193 | """ |
|
1192 | 1194 | Must be logged in to execute this function else |
|
1193 | 1195 | redirect to login page |
|
1194 | 1196 | |
|
1195 | 1197 | :param api_access: if enabled this checks only for valid auth token |
|
1196 | 1198 | and grants access based on valid token |
|
1197 | 1199 | """ |
|
1198 | 1200 | def __init__(self, auth_token_access=None): |
|
1199 | 1201 | self.auth_token_access = auth_token_access |
|
1200 | 1202 | |
|
1201 | 1203 | def __call__(self, func): |
|
1202 | 1204 | return get_cython_compat_decorator(self.__wrapper, func) |
|
1203 | 1205 | |
|
1204 | 1206 | def _get_request(self): |
|
1205 | 1207 | return get_request(self) |
|
1206 | 1208 | |
|
1207 | 1209 | def __wrapper(self, func, *fargs, **fkwargs): |
|
1208 | 1210 | from rhodecode.lib import helpers as h |
|
1209 | 1211 | cls = fargs[0] |
|
1210 | 1212 | user = cls._rhodecode_user |
|
1211 | 1213 | request = self._get_request() |
|
1212 | 1214 | |
|
1213 | 1215 | loc = "%s:%s" % (cls.__class__.__name__, func.__name__) |
|
1214 | 1216 | log.debug('Starting login restriction checks for user: %s' % (user,)) |
|
1215 | 1217 | # check if our IP is allowed |
|
1216 | 1218 | ip_access_valid = True |
|
1217 | 1219 | if not user.ip_allowed: |
|
1218 | 1220 | h.flash(h.literal(_('IP %s not allowed' % (user.ip_addr,))), |
|
1219 | 1221 | category='warning') |
|
1220 | 1222 | ip_access_valid = False |
|
1221 | 1223 | |
|
1222 | 1224 | # check if we used an APIKEY and it's a valid one |
|
1223 | 1225 | # defined white-list of controllers which API access will be enabled |
|
1224 | 1226 | _auth_token = request.GET.get( |
|
1225 | 1227 | 'auth_token', '') or request.GET.get('api_key', '') |
|
1226 | 1228 | auth_token_access_valid = allowed_auth_token_access( |
|
1227 | 1229 | loc, auth_token=_auth_token) |
|
1228 | 1230 | |
|
1229 | 1231 | # explicit controller is enabled or API is in our whitelist |
|
1230 | 1232 | if self.auth_token_access or auth_token_access_valid: |
|
1231 | 1233 | log.debug('Checking AUTH TOKEN access for %s' % (cls,)) |
|
1232 | 1234 | db_user = user.get_instance() |
|
1233 | 1235 | |
|
1234 | 1236 | if db_user: |
|
1235 | 1237 | if self.auth_token_access: |
|
1236 | 1238 | roles = self.auth_token_access |
|
1237 | 1239 | else: |
|
1238 | 1240 | roles = [UserApiKeys.ROLE_HTTP] |
|
1239 | 1241 | token_match = db_user.authenticate_by_token( |
|
1240 | 1242 | _auth_token, roles=roles) |
|
1241 | 1243 | else: |
|
1242 | 1244 | log.debug('Unable to fetch db instance for auth user: %s', user) |
|
1243 | 1245 | token_match = False |
|
1244 | 1246 | |
|
1245 | 1247 | if _auth_token and token_match: |
|
1246 | 1248 | auth_token_access_valid = True |
|
1247 | 1249 | log.debug('AUTH TOKEN ****%s is VALID' % (_auth_token[-4:],)) |
|
1248 | 1250 | else: |
|
1249 | 1251 | auth_token_access_valid = False |
|
1250 | 1252 | if not _auth_token: |
|
1251 | 1253 | log.debug("AUTH TOKEN *NOT* present in request") |
|
1252 | 1254 | else: |
|
1253 | 1255 | log.warning( |
|
1254 | 1256 | "AUTH TOKEN ****%s *NOT* valid" % _auth_token[-4:]) |
|
1255 | 1257 | |
|
1256 | 1258 | log.debug('Checking if %s is authenticated @ %s' % (user.username, loc)) |
|
1257 | 1259 | reason = 'RHODECODE_AUTH' if user.is_authenticated \ |
|
1258 | 1260 | else 'AUTH_TOKEN_AUTH' |
|
1259 | 1261 | |
|
1260 | 1262 | if ip_access_valid and ( |
|
1261 | 1263 | user.is_authenticated or auth_token_access_valid): |
|
1262 | 1264 | log.info( |
|
1263 | 1265 | 'user %s authenticating with:%s IS authenticated on func %s' |
|
1264 | 1266 | % (user, reason, loc)) |
|
1265 | 1267 | |
|
1266 | 1268 | # update user data to check last activity |
|
1267 | 1269 | user.update_lastactivity() |
|
1268 | 1270 | Session().commit() |
|
1269 | 1271 | return func(*fargs, **fkwargs) |
|
1270 | 1272 | else: |
|
1271 | 1273 | log.warning( |
|
1272 | 1274 | 'user %s authenticating with:%s NOT authenticated on ' |
|
1273 | 1275 | 'func: %s: IP_ACCESS:%s AUTH_TOKEN_ACCESS:%s' |
|
1274 | 1276 | % (user, reason, loc, ip_access_valid, |
|
1275 | 1277 | auth_token_access_valid)) |
|
1276 | 1278 | # we preserve the get PARAM |
|
1277 | 1279 | came_from = request.path_qs |
|
1278 | 1280 | log.debug('redirecting to login page with %s' % (came_from,)) |
|
1279 | 1281 | raise HTTPFound( |
|
1280 | 1282 | h.route_path('login', _query={'came_from': came_from})) |
|
1281 | 1283 | |
|
1282 | 1284 | |
|
1283 | 1285 | class NotAnonymous(object): |
|
1284 | 1286 | """ |
|
1285 | 1287 | Must be logged in to execute this function else |
|
1286 | 1288 | redirect to login page |
|
1287 | 1289 | """ |
|
1288 | 1290 | |
|
1289 | 1291 | def __call__(self, func): |
|
1290 | 1292 | return get_cython_compat_decorator(self.__wrapper, func) |
|
1291 | 1293 | |
|
1292 | 1294 | def _get_request(self): |
|
1293 | 1295 | return get_request(self) |
|
1294 | 1296 | |
|
1295 | 1297 | def __wrapper(self, func, *fargs, **fkwargs): |
|
1296 | 1298 | import rhodecode.lib.helpers as h |
|
1297 | 1299 | cls = fargs[0] |
|
1298 | 1300 | self.user = cls._rhodecode_user |
|
1299 | 1301 | request = self._get_request() |
|
1300 | 1302 | |
|
1301 | 1303 | log.debug('Checking if user is not anonymous @%s' % cls) |
|
1302 | 1304 | |
|
1303 | 1305 | anonymous = self.user.username == User.DEFAULT_USER |
|
1304 | 1306 | |
|
1305 | 1307 | if anonymous: |
|
1306 | 1308 | came_from = request.path_qs |
|
1307 | 1309 | h.flash(_('You need to be a registered user to ' |
|
1308 | 1310 | 'perform this action'), |
|
1309 | 1311 | category='warning') |
|
1310 | 1312 | raise HTTPFound( |
|
1311 | 1313 | h.route_path('login', _query={'came_from': came_from})) |
|
1312 | 1314 | else: |
|
1313 | 1315 | return func(*fargs, **fkwargs) |
|
1314 | 1316 | |
|
1315 | 1317 | |
|
1316 | 1318 | class XHRRequired(object): |
|
1317 | 1319 | # TODO(marcink): remove this in favor of the predicates in pyramid routes |
|
1318 | 1320 | |
|
1319 | 1321 | def __call__(self, func): |
|
1320 | 1322 | return get_cython_compat_decorator(self.__wrapper, func) |
|
1321 | 1323 | |
|
1322 | 1324 | def _get_request(self): |
|
1323 | 1325 | return get_request(self) |
|
1324 | 1326 | |
|
1325 | 1327 | def __wrapper(self, func, *fargs, **fkwargs): |
|
1326 | 1328 | from pylons.controllers.util import abort |
|
1327 | 1329 | request = self._get_request() |
|
1328 | 1330 | |
|
1329 | 1331 | log.debug('Checking if request is XMLHttpRequest (XHR)') |
|
1330 | 1332 | xhr_message = 'This is not a valid XMLHttpRequest (XHR) request' |
|
1331 | 1333 | |
|
1332 | 1334 | if not request.is_xhr: |
|
1333 | 1335 | abort(400, detail=xhr_message) |
|
1334 | 1336 | |
|
1335 | 1337 | return func(*fargs, **fkwargs) |
|
1336 | 1338 | |
|
1337 | 1339 | |
|
1338 | 1340 | class HasAcceptedRepoType(object): |
|
1339 | 1341 | """ |
|
1340 | 1342 | Check if requested repo is within given repo type aliases |
|
1341 | 1343 | """ |
|
1342 | 1344 | |
|
1343 | 1345 | # TODO(marcink): remove this in favor of the predicates in pyramid routes |
|
1344 | 1346 | |
|
1345 | 1347 | def __init__(self, *repo_type_list): |
|
1346 | 1348 | self.repo_type_list = set(repo_type_list) |
|
1347 | 1349 | |
|
1348 | 1350 | def __call__(self, func): |
|
1349 | 1351 | return get_cython_compat_decorator(self.__wrapper, func) |
|
1350 | 1352 | |
|
1351 | 1353 | def __wrapper(self, func, *fargs, **fkwargs): |
|
1352 | 1354 | import rhodecode.lib.helpers as h |
|
1353 | 1355 | cls = fargs[0] |
|
1354 | 1356 | rhodecode_repo = cls.rhodecode_repo |
|
1355 | 1357 | |
|
1356 | 1358 | log.debug('%s checking repo type for %s in %s', |
|
1357 | 1359 | self.__class__.__name__, |
|
1358 | 1360 | rhodecode_repo.alias, self.repo_type_list) |
|
1359 | 1361 | |
|
1360 | 1362 | if rhodecode_repo.alias in self.repo_type_list: |
|
1361 | 1363 | return func(*fargs, **fkwargs) |
|
1362 | 1364 | else: |
|
1363 | 1365 | h.flash(h.literal( |
|
1364 | 1366 | _('Action not supported for %s.' % rhodecode_repo.alias)), |
|
1365 | 1367 | category='warning') |
|
1366 | 1368 | raise HTTPFound( |
|
1367 | 1369 | h.route_path('repo_summary', |
|
1368 | 1370 | repo_name=cls.rhodecode_db_repo.repo_name)) |
|
1369 | 1371 | |
|
1370 | 1372 | |
|
1371 | 1373 | class PermsDecorator(object): |
|
1372 | 1374 | """ |
|
1373 | 1375 | Base class for controller decorators, we extract the current user from |
|
1374 | 1376 | the class itself, which has it stored in base controllers |
|
1375 | 1377 | """ |
|
1376 | 1378 | |
|
1377 | 1379 | def __init__(self, *required_perms): |
|
1378 | 1380 | self.required_perms = set(required_perms) |
|
1379 | 1381 | |
|
1380 | 1382 | def __call__(self, func): |
|
1381 | 1383 | return get_cython_compat_decorator(self.__wrapper, func) |
|
1382 | 1384 | |
|
1383 | 1385 | def _get_request(self): |
|
1384 | 1386 | return get_request(self) |
|
1385 | 1387 | |
|
1386 | 1388 | def _get_came_from(self): |
|
1387 | 1389 | _request = self._get_request() |
|
1388 | 1390 | |
|
1389 | 1391 | # both pylons/pyramid has this attribute |
|
1390 | 1392 | return _request.path_qs |
|
1391 | 1393 | |
|
1392 | 1394 | def __wrapper(self, func, *fargs, **fkwargs): |
|
1393 | 1395 | import rhodecode.lib.helpers as h |
|
1394 | 1396 | cls = fargs[0] |
|
1395 | 1397 | _user = cls._rhodecode_user |
|
1396 | 1398 | |
|
1397 | 1399 | log.debug('checking %s permissions %s for %s %s', |
|
1398 | 1400 | self.__class__.__name__, self.required_perms, cls, _user) |
|
1399 | 1401 | |
|
1400 | 1402 | if self.check_permissions(_user): |
|
1401 | 1403 | log.debug('Permission granted for %s %s', cls, _user) |
|
1402 | 1404 | return func(*fargs, **fkwargs) |
|
1403 | 1405 | |
|
1404 | 1406 | else: |
|
1405 | 1407 | log.debug('Permission denied for %s %s', cls, _user) |
|
1406 | 1408 | anonymous = _user.username == User.DEFAULT_USER |
|
1407 | 1409 | |
|
1408 | 1410 | if anonymous: |
|
1409 | 1411 | came_from = self._get_came_from() |
|
1410 | 1412 | h.flash(_('You need to be signed in to view this page'), |
|
1411 | 1413 | category='warning') |
|
1412 | 1414 | raise HTTPFound( |
|
1413 | 1415 | h.route_path('login', _query={'came_from': came_from})) |
|
1414 | 1416 | |
|
1415 | 1417 | else: |
|
1416 | 1418 | # redirect with forbidden ret code |
|
1417 | 1419 | raise HTTPForbidden() |
|
1418 | 1420 | |
|
1419 | 1421 | def check_permissions(self, user): |
|
1420 | 1422 | """Dummy function for overriding""" |
|
1421 | 1423 | raise NotImplementedError( |
|
1422 | 1424 | 'You have to write this function in child class') |
|
1423 | 1425 | |
|
1424 | 1426 | |
|
1425 | 1427 | class HasPermissionAllDecorator(PermsDecorator): |
|
1426 | 1428 | """ |
|
1427 | 1429 | Checks for access permission for all given predicates. All of them |
|
1428 | 1430 | have to be meet in order to fulfill the request |
|
1429 | 1431 | """ |
|
1430 | 1432 | |
|
1431 | 1433 | def check_permissions(self, user): |
|
1432 | 1434 | perms = user.permissions_with_scope({}) |
|
1433 | 1435 | if self.required_perms.issubset(perms['global']): |
|
1434 | 1436 | return True |
|
1435 | 1437 | return False |
|
1436 | 1438 | |
|
1437 | 1439 | |
|
1438 | 1440 | class HasPermissionAnyDecorator(PermsDecorator): |
|
1439 | 1441 | """ |
|
1440 | 1442 | Checks for access permission for any of given predicates. In order to |
|
1441 | 1443 | fulfill the request any of predicates must be meet |
|
1442 | 1444 | """ |
|
1443 | 1445 | |
|
1444 | 1446 | def check_permissions(self, user): |
|
1445 | 1447 | perms = user.permissions_with_scope({}) |
|
1446 | 1448 | if self.required_perms.intersection(perms['global']): |
|
1447 | 1449 | return True |
|
1448 | 1450 | return False |
|
1449 | 1451 | |
|
1450 | 1452 | |
|
1451 | 1453 | class HasRepoPermissionAllDecorator(PermsDecorator): |
|
1452 | 1454 | """ |
|
1453 | 1455 | Checks for access permission for all given predicates for specific |
|
1454 | 1456 | repository. All of them have to be meet in order to fulfill the request |
|
1455 | 1457 | """ |
|
1456 | 1458 | def _get_repo_name(self): |
|
1457 | 1459 | _request = self._get_request() |
|
1458 | 1460 | return get_repo_slug(_request) |
|
1459 | 1461 | |
|
1460 | 1462 | def check_permissions(self, user): |
|
1461 | 1463 | perms = user.permissions |
|
1462 | 1464 | repo_name = self._get_repo_name() |
|
1463 | 1465 | |
|
1464 | 1466 | try: |
|
1465 | 1467 | user_perms = set([perms['repositories'][repo_name]]) |
|
1466 | 1468 | except KeyError: |
|
1467 | 1469 | log.debug('cannot locate repo with name: `%s` in permissions defs', |
|
1468 | 1470 | repo_name) |
|
1469 | 1471 | return False |
|
1470 | 1472 | |
|
1471 | 1473 | log.debug('checking `%s` permissions for repo `%s`', |
|
1472 | 1474 | user_perms, repo_name) |
|
1473 | 1475 | if self.required_perms.issubset(user_perms): |
|
1474 | 1476 | return True |
|
1475 | 1477 | return False |
|
1476 | 1478 | |
|
1477 | 1479 | |
|
1478 | 1480 | class HasRepoPermissionAnyDecorator(PermsDecorator): |
|
1479 | 1481 | """ |
|
1480 | 1482 | Checks for access permission for any of given predicates for specific |
|
1481 | 1483 | repository. In order to fulfill the request any of predicates must be meet |
|
1482 | 1484 | """ |
|
1483 | 1485 | def _get_repo_name(self): |
|
1484 | 1486 | _request = self._get_request() |
|
1485 | 1487 | return get_repo_slug(_request) |
|
1486 | 1488 | |
|
1487 | 1489 | def check_permissions(self, user): |
|
1488 | 1490 | perms = user.permissions |
|
1489 | 1491 | repo_name = self._get_repo_name() |
|
1490 | 1492 | |
|
1491 | 1493 | try: |
|
1492 | 1494 | user_perms = set([perms['repositories'][repo_name]]) |
|
1493 | 1495 | except KeyError: |
|
1494 | 1496 | log.debug('cannot locate repo with name: `%s` in permissions defs', |
|
1495 | 1497 | repo_name) |
|
1496 | 1498 | return False |
|
1497 | 1499 | |
|
1498 | 1500 | log.debug('checking `%s` permissions for repo `%s`', |
|
1499 | 1501 | user_perms, repo_name) |
|
1500 | 1502 | if self.required_perms.intersection(user_perms): |
|
1501 | 1503 | return True |
|
1502 | 1504 | return False |
|
1503 | 1505 | |
|
1504 | 1506 | |
|
1505 | 1507 | class HasRepoGroupPermissionAllDecorator(PermsDecorator): |
|
1506 | 1508 | """ |
|
1507 | 1509 | Checks for access permission for all given predicates for specific |
|
1508 | 1510 | repository group. All of them have to be meet in order to |
|
1509 | 1511 | fulfill the request |
|
1510 | 1512 | """ |
|
1511 | 1513 | def _get_repo_group_name(self): |
|
1512 | 1514 | _request = self._get_request() |
|
1513 | 1515 | return get_repo_group_slug(_request) |
|
1514 | 1516 | |
|
1515 | 1517 | def check_permissions(self, user): |
|
1516 | 1518 | perms = user.permissions |
|
1517 | 1519 | group_name = self._get_repo_group_name() |
|
1518 | 1520 | try: |
|
1519 | 1521 | user_perms = set([perms['repositories_groups'][group_name]]) |
|
1520 | 1522 | except KeyError: |
|
1521 | 1523 | log.debug('cannot locate repo group with name: `%s` in permissions defs', |
|
1522 | 1524 | group_name) |
|
1523 | 1525 | return False |
|
1524 | 1526 | |
|
1525 | 1527 | log.debug('checking `%s` permissions for repo group `%s`', |
|
1526 | 1528 | user_perms, group_name) |
|
1527 | 1529 | if self.required_perms.issubset(user_perms): |
|
1528 | 1530 | return True |
|
1529 | 1531 | return False |
|
1530 | 1532 | |
|
1531 | 1533 | |
|
1532 | 1534 | class HasRepoGroupPermissionAnyDecorator(PermsDecorator): |
|
1533 | 1535 | """ |
|
1534 | 1536 | Checks for access permission for any of given predicates for specific |
|
1535 | 1537 | repository group. In order to fulfill the request any |
|
1536 | 1538 | of predicates must be met |
|
1537 | 1539 | """ |
|
1538 | 1540 | def _get_repo_group_name(self): |
|
1539 | 1541 | _request = self._get_request() |
|
1540 | 1542 | return get_repo_group_slug(_request) |
|
1541 | 1543 | |
|
1542 | 1544 | def check_permissions(self, user): |
|
1543 | 1545 | perms = user.permissions |
|
1544 | 1546 | group_name = self._get_repo_group_name() |
|
1545 | 1547 | |
|
1546 | 1548 | try: |
|
1547 | 1549 | user_perms = set([perms['repositories_groups'][group_name]]) |
|
1548 | 1550 | except KeyError: |
|
1549 | 1551 | log.debug('cannot locate repo group with name: `%s` in permissions defs', |
|
1550 | 1552 | group_name) |
|
1551 | 1553 | return False |
|
1552 | 1554 | |
|
1553 | 1555 | log.debug('checking `%s` permissions for repo group `%s`', |
|
1554 | 1556 | user_perms, group_name) |
|
1555 | 1557 | if self.required_perms.intersection(user_perms): |
|
1556 | 1558 | return True |
|
1557 | 1559 | return False |
|
1558 | 1560 | |
|
1559 | 1561 | |
|
1560 | 1562 | class HasUserGroupPermissionAllDecorator(PermsDecorator): |
|
1561 | 1563 | """ |
|
1562 | 1564 | Checks for access permission for all given predicates for specific |
|
1563 | 1565 | user group. All of them have to be meet in order to fulfill the request |
|
1564 | 1566 | """ |
|
1565 | 1567 | def _get_user_group_name(self): |
|
1566 | 1568 | _request = self._get_request() |
|
1567 | 1569 | return get_user_group_slug(_request) |
|
1568 | 1570 | |
|
1569 | 1571 | def check_permissions(self, user): |
|
1570 | 1572 | perms = user.permissions |
|
1571 | 1573 | group_name = self._get_user_group_name() |
|
1572 | 1574 | try: |
|
1573 | 1575 | user_perms = set([perms['user_groups'][group_name]]) |
|
1574 | 1576 | except KeyError: |
|
1575 | 1577 | return False |
|
1576 | 1578 | |
|
1577 | 1579 | if self.required_perms.issubset(user_perms): |
|
1578 | 1580 | return True |
|
1579 | 1581 | return False |
|
1580 | 1582 | |
|
1581 | 1583 | |
|
1582 | 1584 | class HasUserGroupPermissionAnyDecorator(PermsDecorator): |
|
1583 | 1585 | """ |
|
1584 | 1586 | Checks for access permission for any of given predicates for specific |
|
1585 | 1587 | user group. In order to fulfill the request any of predicates must be meet |
|
1586 | 1588 | """ |
|
1587 | 1589 | def _get_user_group_name(self): |
|
1588 | 1590 | _request = self._get_request() |
|
1589 | 1591 | return get_user_group_slug(_request) |
|
1590 | 1592 | |
|
1591 | 1593 | def check_permissions(self, user): |
|
1592 | 1594 | perms = user.permissions |
|
1593 | 1595 | group_name = self._get_user_group_name() |
|
1594 | 1596 | try: |
|
1595 | 1597 | user_perms = set([perms['user_groups'][group_name]]) |
|
1596 | 1598 | except KeyError: |
|
1597 | 1599 | return False |
|
1598 | 1600 | |
|
1599 | 1601 | if self.required_perms.intersection(user_perms): |
|
1600 | 1602 | return True |
|
1601 | 1603 | return False |
|
1602 | 1604 | |
|
1603 | 1605 | |
|
1604 | 1606 | # CHECK FUNCTIONS |
|
1605 | 1607 | class PermsFunction(object): |
|
1606 | 1608 | """Base function for other check functions""" |
|
1607 | 1609 | |
|
1608 | 1610 | def __init__(self, *perms): |
|
1609 | 1611 | self.required_perms = set(perms) |
|
1610 | 1612 | self.repo_name = None |
|
1611 | 1613 | self.repo_group_name = None |
|
1612 | 1614 | self.user_group_name = None |
|
1613 | 1615 | |
|
1614 | 1616 | def __bool__(self): |
|
1615 | 1617 | frame = inspect.currentframe() |
|
1616 | 1618 | stack_trace = traceback.format_stack(frame) |
|
1617 | 1619 | log.error('Checking bool value on a class instance of perm ' |
|
1618 | 1620 | 'function is not allowed: %s' % ''.join(stack_trace)) |
|
1619 | 1621 | # rather than throwing errors, here we always return False so if by |
|
1620 | 1622 | # accident someone checks truth for just an instance it will always end |
|
1621 | 1623 | # up in returning False |
|
1622 | 1624 | return False |
|
1623 | 1625 | __nonzero__ = __bool__ |
|
1624 | 1626 | |
|
1625 | 1627 | def __call__(self, check_location='', user=None): |
|
1626 | 1628 | if not user: |
|
1627 | 1629 | log.debug('Using user attribute from global request') |
|
1628 | 1630 | # TODO: remove this someday,put as user as attribute here |
|
1629 | 1631 | request = self._get_request() |
|
1630 | 1632 | user = request.user |
|
1631 | 1633 | |
|
1632 | 1634 | # init auth user if not already given |
|
1633 | 1635 | if not isinstance(user, AuthUser): |
|
1634 | 1636 | log.debug('Wrapping user %s into AuthUser', user) |
|
1635 | 1637 | user = AuthUser(user.user_id) |
|
1636 | 1638 | |
|
1637 | 1639 | cls_name = self.__class__.__name__ |
|
1638 | 1640 | check_scope = self._get_check_scope(cls_name) |
|
1639 | 1641 | check_location = check_location or 'unspecified location' |
|
1640 | 1642 | |
|
1641 | 1643 | log.debug('checking cls:%s %s usr:%s %s @ %s', cls_name, |
|
1642 | 1644 | self.required_perms, user, check_scope, check_location) |
|
1643 | 1645 | if not user: |
|
1644 | 1646 | log.warning('Empty user given for permission check') |
|
1645 | 1647 | return False |
|
1646 | 1648 | |
|
1647 | 1649 | if self.check_permissions(user): |
|
1648 | 1650 | log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s', |
|
1649 | 1651 | check_scope, user, check_location) |
|
1650 | 1652 | return True |
|
1651 | 1653 | |
|
1652 | 1654 | else: |
|
1653 | 1655 | log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s', |
|
1654 | 1656 | check_scope, user, check_location) |
|
1655 | 1657 | return False |
|
1656 | 1658 | |
|
1657 | 1659 | def _get_request(self): |
|
1658 | 1660 | return get_request(self) |
|
1659 | 1661 | |
|
1660 | 1662 | def _get_check_scope(self, cls_name): |
|
1661 | 1663 | return { |
|
1662 | 1664 | 'HasPermissionAll': 'GLOBAL', |
|
1663 | 1665 | 'HasPermissionAny': 'GLOBAL', |
|
1664 | 1666 | 'HasRepoPermissionAll': 'repo:%s' % self.repo_name, |
|
1665 | 1667 | 'HasRepoPermissionAny': 'repo:%s' % self.repo_name, |
|
1666 | 1668 | 'HasRepoGroupPermissionAll': 'repo_group:%s' % self.repo_group_name, |
|
1667 | 1669 | 'HasRepoGroupPermissionAny': 'repo_group:%s' % self.repo_group_name, |
|
1668 | 1670 | 'HasUserGroupPermissionAll': 'user_group:%s' % self.user_group_name, |
|
1669 | 1671 | 'HasUserGroupPermissionAny': 'user_group:%s' % self.user_group_name, |
|
1670 | 1672 | }.get(cls_name, '?:%s' % cls_name) |
|
1671 | 1673 | |
|
1672 | 1674 | def check_permissions(self, user): |
|
1673 | 1675 | """Dummy function for overriding""" |
|
1674 | 1676 | raise Exception('You have to write this function in child class') |
|
1675 | 1677 | |
|
1676 | 1678 | |
|
1677 | 1679 | class HasPermissionAll(PermsFunction): |
|
1678 | 1680 | def check_permissions(self, user): |
|
1679 | 1681 | perms = user.permissions_with_scope({}) |
|
1680 | 1682 | if self.required_perms.issubset(perms.get('global')): |
|
1681 | 1683 | return True |
|
1682 | 1684 | return False |
|
1683 | 1685 | |
|
1684 | 1686 | |
|
1685 | 1687 | class HasPermissionAny(PermsFunction): |
|
1686 | 1688 | def check_permissions(self, user): |
|
1687 | 1689 | perms = user.permissions_with_scope({}) |
|
1688 | 1690 | if self.required_perms.intersection(perms.get('global')): |
|
1689 | 1691 | return True |
|
1690 | 1692 | return False |
|
1691 | 1693 | |
|
1692 | 1694 | |
|
1693 | 1695 | class HasRepoPermissionAll(PermsFunction): |
|
1694 | 1696 | def __call__(self, repo_name=None, check_location='', user=None): |
|
1695 | 1697 | self.repo_name = repo_name |
|
1696 | 1698 | return super(HasRepoPermissionAll, self).__call__(check_location, user) |
|
1697 | 1699 | |
|
1698 | 1700 | def _get_repo_name(self): |
|
1699 | 1701 | if not self.repo_name: |
|
1700 | 1702 | _request = self._get_request() |
|
1701 | 1703 | self.repo_name = get_repo_slug(_request) |
|
1702 | 1704 | return self.repo_name |
|
1703 | 1705 | |
|
1704 | 1706 | def check_permissions(self, user): |
|
1705 | 1707 | self.repo_name = self._get_repo_name() |
|
1706 | 1708 | perms = user.permissions |
|
1707 | 1709 | try: |
|
1708 | 1710 | user_perms = set([perms['repositories'][self.repo_name]]) |
|
1709 | 1711 | except KeyError: |
|
1710 | 1712 | return False |
|
1711 | 1713 | if self.required_perms.issubset(user_perms): |
|
1712 | 1714 | return True |
|
1713 | 1715 | return False |
|
1714 | 1716 | |
|
1715 | 1717 | |
|
1716 | 1718 | class HasRepoPermissionAny(PermsFunction): |
|
1717 | 1719 | def __call__(self, repo_name=None, check_location='', user=None): |
|
1718 | 1720 | self.repo_name = repo_name |
|
1719 | 1721 | return super(HasRepoPermissionAny, self).__call__(check_location, user) |
|
1720 | 1722 | |
|
1721 | 1723 | def _get_repo_name(self): |
|
1722 | 1724 | if not self.repo_name: |
|
1723 | 1725 | _request = self._get_request() |
|
1724 | 1726 | self.repo_name = get_repo_slug(_request) |
|
1725 | 1727 | return self.repo_name |
|
1726 | 1728 | |
|
1727 | 1729 | def check_permissions(self, user): |
|
1728 | 1730 | self.repo_name = self._get_repo_name() |
|
1729 | 1731 | perms = user.permissions |
|
1730 | 1732 | try: |
|
1731 | 1733 | user_perms = set([perms['repositories'][self.repo_name]]) |
|
1732 | 1734 | except KeyError: |
|
1733 | 1735 | return False |
|
1734 | 1736 | if self.required_perms.intersection(user_perms): |
|
1735 | 1737 | return True |
|
1736 | 1738 | return False |
|
1737 | 1739 | |
|
1738 | 1740 | |
|
1739 | 1741 | class HasRepoGroupPermissionAny(PermsFunction): |
|
1740 | 1742 | def __call__(self, group_name=None, check_location='', user=None): |
|
1741 | 1743 | self.repo_group_name = group_name |
|
1742 | 1744 | return super(HasRepoGroupPermissionAny, self).__call__( |
|
1743 | 1745 | check_location, user) |
|
1744 | 1746 | |
|
1745 | 1747 | def check_permissions(self, user): |
|
1746 | 1748 | perms = user.permissions |
|
1747 | 1749 | try: |
|
1748 | 1750 | user_perms = set( |
|
1749 | 1751 | [perms['repositories_groups'][self.repo_group_name]]) |
|
1750 | 1752 | except KeyError: |
|
1751 | 1753 | return False |
|
1752 | 1754 | if self.required_perms.intersection(user_perms): |
|
1753 | 1755 | return True |
|
1754 | 1756 | return False |
|
1755 | 1757 | |
|
1756 | 1758 | |
|
1757 | 1759 | class HasRepoGroupPermissionAll(PermsFunction): |
|
1758 | 1760 | def __call__(self, group_name=None, check_location='', user=None): |
|
1759 | 1761 | self.repo_group_name = group_name |
|
1760 | 1762 | return super(HasRepoGroupPermissionAll, self).__call__( |
|
1761 | 1763 | check_location, user) |
|
1762 | 1764 | |
|
1763 | 1765 | def check_permissions(self, user): |
|
1764 | 1766 | perms = user.permissions |
|
1765 | 1767 | try: |
|
1766 | 1768 | user_perms = set( |
|
1767 | 1769 | [perms['repositories_groups'][self.repo_group_name]]) |
|
1768 | 1770 | except KeyError: |
|
1769 | 1771 | return False |
|
1770 | 1772 | if self.required_perms.issubset(user_perms): |
|
1771 | 1773 | return True |
|
1772 | 1774 | return False |
|
1773 | 1775 | |
|
1774 | 1776 | |
|
1775 | 1777 | class HasUserGroupPermissionAny(PermsFunction): |
|
1776 | 1778 | def __call__(self, user_group_name=None, check_location='', user=None): |
|
1777 | 1779 | self.user_group_name = user_group_name |
|
1778 | 1780 | return super(HasUserGroupPermissionAny, self).__call__( |
|
1779 | 1781 | check_location, user) |
|
1780 | 1782 | |
|
1781 | 1783 | def check_permissions(self, user): |
|
1782 | 1784 | perms = user.permissions |
|
1783 | 1785 | try: |
|
1784 | 1786 | user_perms = set([perms['user_groups'][self.user_group_name]]) |
|
1785 | 1787 | except KeyError: |
|
1786 | 1788 | return False |
|
1787 | 1789 | if self.required_perms.intersection(user_perms): |
|
1788 | 1790 | return True |
|
1789 | 1791 | return False |
|
1790 | 1792 | |
|
1791 | 1793 | |
|
1792 | 1794 | class HasUserGroupPermissionAll(PermsFunction): |
|
1793 | 1795 | def __call__(self, user_group_name=None, check_location='', user=None): |
|
1794 | 1796 | self.user_group_name = user_group_name |
|
1795 | 1797 | return super(HasUserGroupPermissionAll, self).__call__( |
|
1796 | 1798 | check_location, user) |
|
1797 | 1799 | |
|
1798 | 1800 | def check_permissions(self, user): |
|
1799 | 1801 | perms = user.permissions |
|
1800 | 1802 | try: |
|
1801 | 1803 | user_perms = set([perms['user_groups'][self.user_group_name]]) |
|
1802 | 1804 | except KeyError: |
|
1803 | 1805 | return False |
|
1804 | 1806 | if self.required_perms.issubset(user_perms): |
|
1805 | 1807 | return True |
|
1806 | 1808 | return False |
|
1807 | 1809 | |
|
1808 | 1810 | |
|
1809 | 1811 | # SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH |
|
1810 | 1812 | class HasPermissionAnyMiddleware(object): |
|
1811 | 1813 | def __init__(self, *perms): |
|
1812 | 1814 | self.required_perms = set(perms) |
|
1813 | 1815 | |
|
1814 | 1816 | def __call__(self, user, repo_name): |
|
1815 | 1817 | # repo_name MUST be unicode, since we handle keys in permission |
|
1816 | 1818 | # dict by unicode |
|
1817 | 1819 | repo_name = safe_unicode(repo_name) |
|
1818 | 1820 | user = AuthUser(user.user_id) |
|
1819 | 1821 | log.debug( |
|
1820 | 1822 | 'Checking VCS protocol permissions %s for user:%s repo:`%s`', |
|
1821 | 1823 | self.required_perms, user, repo_name) |
|
1822 | 1824 | |
|
1823 | 1825 | if self.check_permissions(user, repo_name): |
|
1824 | 1826 | log.debug('Permission to repo:`%s` GRANTED for user:%s @ %s', |
|
1825 | 1827 | repo_name, user, 'PermissionMiddleware') |
|
1826 | 1828 | return True |
|
1827 | 1829 | |
|
1828 | 1830 | else: |
|
1829 | 1831 | log.debug('Permission to repo:`%s` DENIED for user:%s @ %s', |
|
1830 | 1832 | repo_name, user, 'PermissionMiddleware') |
|
1831 | 1833 | return False |
|
1832 | 1834 | |
|
1833 | 1835 | def check_permissions(self, user, repo_name): |
|
1834 | 1836 | perms = user.permissions_with_scope({'repo_name': repo_name}) |
|
1835 | 1837 | |
|
1836 | 1838 | try: |
|
1837 | 1839 | user_perms = set([perms['repositories'][repo_name]]) |
|
1838 | 1840 | except Exception: |
|
1839 | 1841 | log.exception('Error while accessing user permissions') |
|
1840 | 1842 | return False |
|
1841 | 1843 | |
|
1842 | 1844 | if self.required_perms.intersection(user_perms): |
|
1843 | 1845 | return True |
|
1844 | 1846 | return False |
|
1845 | 1847 | |
|
1846 | 1848 | |
|
1847 | 1849 | # SPECIAL VERSION TO HANDLE API AUTH |
|
1848 | 1850 | class _BaseApiPerm(object): |
|
1849 | 1851 | def __init__(self, *perms): |
|
1850 | 1852 | self.required_perms = set(perms) |
|
1851 | 1853 | |
|
1852 | 1854 | def __call__(self, check_location=None, user=None, repo_name=None, |
|
1853 | 1855 | group_name=None, user_group_name=None): |
|
1854 | 1856 | cls_name = self.__class__.__name__ |
|
1855 | 1857 | check_scope = 'global:%s' % (self.required_perms,) |
|
1856 | 1858 | if repo_name: |
|
1857 | 1859 | check_scope += ', repo_name:%s' % (repo_name,) |
|
1858 | 1860 | |
|
1859 | 1861 | if group_name: |
|
1860 | 1862 | check_scope += ', repo_group_name:%s' % (group_name,) |
|
1861 | 1863 | |
|
1862 | 1864 | if user_group_name: |
|
1863 | 1865 | check_scope += ', user_group_name:%s' % (user_group_name,) |
|
1864 | 1866 | |
|
1865 | 1867 | log.debug( |
|
1866 | 1868 | 'checking cls:%s %s %s @ %s' |
|
1867 | 1869 | % (cls_name, self.required_perms, check_scope, check_location)) |
|
1868 | 1870 | if not user: |
|
1869 | 1871 | log.debug('Empty User passed into arguments') |
|
1870 | 1872 | return False |
|
1871 | 1873 | |
|
1872 | 1874 | # process user |
|
1873 | 1875 | if not isinstance(user, AuthUser): |
|
1874 | 1876 | user = AuthUser(user.user_id) |
|
1875 | 1877 | if not check_location: |
|
1876 | 1878 | check_location = 'unspecified' |
|
1877 | 1879 | if self.check_permissions(user.permissions, repo_name, group_name, |
|
1878 | 1880 | user_group_name): |
|
1879 | 1881 | log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s', |
|
1880 | 1882 | check_scope, user, check_location) |
|
1881 | 1883 | return True |
|
1882 | 1884 | |
|
1883 | 1885 | else: |
|
1884 | 1886 | log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s', |
|
1885 | 1887 | check_scope, user, check_location) |
|
1886 | 1888 | return False |
|
1887 | 1889 | |
|
1888 | 1890 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
1889 | 1891 | user_group_name=None): |
|
1890 | 1892 | """ |
|
1891 | 1893 | implement in child class should return True if permissions are ok, |
|
1892 | 1894 | False otherwise |
|
1893 | 1895 | |
|
1894 | 1896 | :param perm_defs: dict with permission definitions |
|
1895 | 1897 | :param repo_name: repo name |
|
1896 | 1898 | """ |
|
1897 | 1899 | raise NotImplementedError() |
|
1898 | 1900 | |
|
1899 | 1901 | |
|
1900 | 1902 | class HasPermissionAllApi(_BaseApiPerm): |
|
1901 | 1903 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
1902 | 1904 | user_group_name=None): |
|
1903 | 1905 | if self.required_perms.issubset(perm_defs.get('global')): |
|
1904 | 1906 | return True |
|
1905 | 1907 | return False |
|
1906 | 1908 | |
|
1907 | 1909 | |
|
1908 | 1910 | class HasPermissionAnyApi(_BaseApiPerm): |
|
1909 | 1911 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
1910 | 1912 | user_group_name=None): |
|
1911 | 1913 | if self.required_perms.intersection(perm_defs.get('global')): |
|
1912 | 1914 | return True |
|
1913 | 1915 | return False |
|
1914 | 1916 | |
|
1915 | 1917 | |
|
1916 | 1918 | class HasRepoPermissionAllApi(_BaseApiPerm): |
|
1917 | 1919 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
1918 | 1920 | user_group_name=None): |
|
1919 | 1921 | try: |
|
1920 | 1922 | _user_perms = set([perm_defs['repositories'][repo_name]]) |
|
1921 | 1923 | except KeyError: |
|
1922 | 1924 | log.warning(traceback.format_exc()) |
|
1923 | 1925 | return False |
|
1924 | 1926 | if self.required_perms.issubset(_user_perms): |
|
1925 | 1927 | return True |
|
1926 | 1928 | return False |
|
1927 | 1929 | |
|
1928 | 1930 | |
|
1929 | 1931 | class HasRepoPermissionAnyApi(_BaseApiPerm): |
|
1930 | 1932 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
1931 | 1933 | user_group_name=None): |
|
1932 | 1934 | try: |
|
1933 | 1935 | _user_perms = set([perm_defs['repositories'][repo_name]]) |
|
1934 | 1936 | except KeyError: |
|
1935 | 1937 | log.warning(traceback.format_exc()) |
|
1936 | 1938 | return False |
|
1937 | 1939 | if self.required_perms.intersection(_user_perms): |
|
1938 | 1940 | return True |
|
1939 | 1941 | return False |
|
1940 | 1942 | |
|
1941 | 1943 | |
|
1942 | 1944 | class HasRepoGroupPermissionAnyApi(_BaseApiPerm): |
|
1943 | 1945 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
1944 | 1946 | user_group_name=None): |
|
1945 | 1947 | try: |
|
1946 | 1948 | _user_perms = set([perm_defs['repositories_groups'][group_name]]) |
|
1947 | 1949 | except KeyError: |
|
1948 | 1950 | log.warning(traceback.format_exc()) |
|
1949 | 1951 | return False |
|
1950 | 1952 | if self.required_perms.intersection(_user_perms): |
|
1951 | 1953 | return True |
|
1952 | 1954 | return False |
|
1953 | 1955 | |
|
1954 | 1956 | |
|
1955 | 1957 | class HasRepoGroupPermissionAllApi(_BaseApiPerm): |
|
1956 | 1958 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
1957 | 1959 | user_group_name=None): |
|
1958 | 1960 | try: |
|
1959 | 1961 | _user_perms = set([perm_defs['repositories_groups'][group_name]]) |
|
1960 | 1962 | except KeyError: |
|
1961 | 1963 | log.warning(traceback.format_exc()) |
|
1962 | 1964 | return False |
|
1963 | 1965 | if self.required_perms.issubset(_user_perms): |
|
1964 | 1966 | return True |
|
1965 | 1967 | return False |
|
1966 | 1968 | |
|
1967 | 1969 | |
|
1968 | 1970 | class HasUserGroupPermissionAnyApi(_BaseApiPerm): |
|
1969 | 1971 | def check_permissions(self, perm_defs, repo_name=None, group_name=None, |
|
1970 | 1972 | user_group_name=None): |
|
1971 | 1973 | try: |
|
1972 | 1974 | _user_perms = set([perm_defs['user_groups'][user_group_name]]) |
|
1973 | 1975 | except KeyError: |
|
1974 | 1976 | log.warning(traceback.format_exc()) |
|
1975 | 1977 | return False |
|
1976 | 1978 | if self.required_perms.intersection(_user_perms): |
|
1977 | 1979 | return True |
|
1978 | 1980 | return False |
|
1979 | 1981 | |
|
1980 | 1982 | |
|
1981 | 1983 | def check_ip_access(source_ip, allowed_ips=None): |
|
1982 | 1984 | """ |
|
1983 | 1985 | Checks if source_ip is a subnet of any of allowed_ips. |
|
1984 | 1986 | |
|
1985 | 1987 | :param source_ip: |
|
1986 | 1988 | :param allowed_ips: list of allowed ips together with mask |
|
1987 | 1989 | """ |
|
1988 | 1990 | log.debug('checking if ip:%s is subnet of %s' % (source_ip, allowed_ips)) |
|
1989 | 1991 | source_ip_address = ipaddress.ip_address(source_ip) |
|
1990 | 1992 | if isinstance(allowed_ips, (tuple, list, set)): |
|
1991 | 1993 | for ip in allowed_ips: |
|
1992 | 1994 | try: |
|
1993 | 1995 | network_address = ipaddress.ip_network(ip, strict=False) |
|
1994 | 1996 | if source_ip_address in network_address: |
|
1995 | 1997 | log.debug('IP %s is network %s' % |
|
1996 | 1998 | (source_ip_address, network_address)) |
|
1997 | 1999 | return True |
|
1998 | 2000 | # for any case we cannot determine the IP, don't crash just |
|
1999 | 2001 | # skip it and log as error, we want to say forbidden still when |
|
2000 | 2002 | # sending bad IP |
|
2001 | 2003 | except Exception: |
|
2002 | 2004 | log.error(traceback.format_exc()) |
|
2003 | 2005 | continue |
|
2004 | 2006 | return False |
|
2005 | 2007 | |
|
2006 | 2008 | |
|
2007 | 2009 | def get_cython_compat_decorator(wrapper, func): |
|
2008 | 2010 | """ |
|
2009 | 2011 | Creates a cython compatible decorator. The previously used |
|
2010 | 2012 | decorator.decorator() function seems to be incompatible with cython. |
|
2011 | 2013 | |
|
2012 | 2014 | :param wrapper: __wrapper method of the decorator class |
|
2013 | 2015 | :param func: decorated function |
|
2014 | 2016 | """ |
|
2015 | 2017 | @wraps(func) |
|
2016 | 2018 | def local_wrapper(*args, **kwds): |
|
2017 | 2019 | return wrapper(func, *args, **kwds) |
|
2018 | 2020 | local_wrapper.__wrapped__ = func |
|
2019 | 2021 | return local_wrapper |
|
2020 | 2022 | |
|
2021 | 2023 |
@@ -1,220 +1,220 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import hashlib |
|
22 | 22 | import itsdangerous |
|
23 | 23 | import logging |
|
24 | 24 | import os |
|
25 | 25 | import requests |
|
26 | 26 | from dogpile.core import ReadWriteMutex |
|
27 | 27 | |
|
28 | 28 | import rhodecode.lib.helpers as h |
|
29 | 29 | from rhodecode.lib.auth import HasRepoPermissionAny |
|
30 | 30 | from rhodecode.lib.ext_json import json |
|
31 | 31 | from rhodecode.model.db import User |
|
32 | 32 | |
|
33 | 33 | log = logging.getLogger(__name__) |
|
34 | 34 | |
|
35 | 35 | LOCK = ReadWriteMutex() |
|
36 | 36 | |
|
37 | 37 | STATE_PUBLIC_KEYS = ['id', 'username', 'first_name', 'last_name', |
|
38 | 38 | 'icon_link', 'display_name', 'display_link'] |
|
39 | 39 | |
|
40 | 40 | |
|
41 | 41 | class ChannelstreamException(Exception): |
|
42 | 42 | pass |
|
43 | 43 | |
|
44 | 44 | |
|
45 | 45 | class ChannelstreamConnectionException(ChannelstreamException): |
|
46 | 46 | pass |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | class ChannelstreamPermissionException(ChannelstreamException): |
|
50 | 50 | pass |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | def channelstream_request(config, payload, endpoint, raise_exc=True): |
|
54 | 54 | signer = itsdangerous.TimestampSigner(config['secret']) |
|
55 | 55 | sig_for_server = signer.sign(endpoint) |
|
56 | 56 | secret_headers = {'x-channelstream-secret': sig_for_server, |
|
57 | 57 | 'x-channelstream-endpoint': endpoint, |
|
58 | 58 | 'Content-Type': 'application/json'} |
|
59 | 59 | req_url = 'http://{}{}'.format(config['server'], endpoint) |
|
60 | 60 | response = None |
|
61 | 61 | try: |
|
62 | 62 | response = requests.post(req_url, data=json.dumps(payload), |
|
63 | 63 | headers=secret_headers).json() |
|
64 | 64 | except requests.ConnectionError: |
|
65 | 65 | log.exception('ConnectionError happened') |
|
66 | 66 | if raise_exc: |
|
67 | 67 | raise ChannelstreamConnectionException() |
|
68 | 68 | except Exception: |
|
69 | 69 | log.exception('Exception related to channelstream happened') |
|
70 | 70 | if raise_exc: |
|
71 | 71 | raise ChannelstreamConnectionException() |
|
72 | 72 | return response |
|
73 | 73 | |
|
74 | 74 | |
|
75 | 75 | def get_user_data(user_id): |
|
76 | 76 | user = User.get(user_id) |
|
77 | 77 | return { |
|
78 | 78 | 'id': user.user_id, |
|
79 | 79 | 'username': user.username, |
|
80 | 'first_name': user.name, | |
|
81 | 'last_name': user.lastname, | |
|
80 | 'first_name': user.first_name, | |
|
81 | 'last_name': user.last_name, | |
|
82 | 82 | 'icon_link': h.gravatar_url(user.email, 60), |
|
83 | 83 | 'display_name': h.person(user, 'username_or_name_or_email'), |
|
84 | 84 | 'display_link': h.link_to_user(user), |
|
85 | 85 | 'notifications': user.user_data.get('notification_status', True) |
|
86 | 86 | } |
|
87 | 87 | |
|
88 | 88 | |
|
89 | 89 | def broadcast_validator(channel_name): |
|
90 | 90 | """ checks if user can access the broadcast channel """ |
|
91 | 91 | if channel_name == 'broadcast': |
|
92 | 92 | return True |
|
93 | 93 | |
|
94 | 94 | |
|
95 | 95 | def repo_validator(channel_name): |
|
96 | 96 | """ checks if user can access the broadcast channel """ |
|
97 | 97 | channel_prefix = '/repo$' |
|
98 | 98 | if channel_name.startswith(channel_prefix): |
|
99 | 99 | elements = channel_name[len(channel_prefix):].split('$') |
|
100 | 100 | repo_name = elements[0] |
|
101 | 101 | can_access = HasRepoPermissionAny( |
|
102 | 102 | 'repository.read', |
|
103 | 103 | 'repository.write', |
|
104 | 104 | 'repository.admin')(repo_name) |
|
105 | 105 | log.debug('permission check for {} channel ' |
|
106 | 106 | 'resulted in {}'.format(repo_name, can_access)) |
|
107 | 107 | if can_access: |
|
108 | 108 | return True |
|
109 | 109 | return False |
|
110 | 110 | |
|
111 | 111 | |
|
112 | 112 | def check_channel_permissions(channels, plugin_validators, should_raise=True): |
|
113 | 113 | valid_channels = [] |
|
114 | 114 | |
|
115 | 115 | validators = [broadcast_validator, repo_validator] |
|
116 | 116 | if plugin_validators: |
|
117 | 117 | validators.extend(plugin_validators) |
|
118 | 118 | for channel_name in channels: |
|
119 | 119 | is_valid = False |
|
120 | 120 | for validator in validators: |
|
121 | 121 | if validator(channel_name): |
|
122 | 122 | is_valid = True |
|
123 | 123 | break |
|
124 | 124 | if is_valid: |
|
125 | 125 | valid_channels.append(channel_name) |
|
126 | 126 | else: |
|
127 | 127 | if should_raise: |
|
128 | 128 | raise ChannelstreamPermissionException() |
|
129 | 129 | return valid_channels |
|
130 | 130 | |
|
131 | 131 | |
|
132 | 132 | def get_channels_info(self, channels): |
|
133 | 133 | payload = {'channels': channels} |
|
134 | 134 | # gather persistence info |
|
135 | 135 | return channelstream_request(self._config(), payload, '/info') |
|
136 | 136 | |
|
137 | 137 | |
|
138 | 138 | def parse_channels_info(info_result, include_channel_info=None): |
|
139 | 139 | """ |
|
140 | 140 | Returns data that contains only secure information that can be |
|
141 | 141 | presented to clients |
|
142 | 142 | """ |
|
143 | 143 | include_channel_info = include_channel_info or [] |
|
144 | 144 | |
|
145 | 145 | user_state_dict = {} |
|
146 | 146 | for userinfo in info_result['users']: |
|
147 | 147 | user_state_dict[userinfo['user']] = { |
|
148 | 148 | k: v for k, v in userinfo['state'].items() |
|
149 | 149 | if k in STATE_PUBLIC_KEYS |
|
150 | 150 | } |
|
151 | 151 | |
|
152 | 152 | channels_info = {} |
|
153 | 153 | |
|
154 | 154 | for c_name, c_info in info_result['channels'].items(): |
|
155 | 155 | if c_name not in include_channel_info: |
|
156 | 156 | continue |
|
157 | 157 | connected_list = [] |
|
158 | 158 | for userinfo in c_info['users']: |
|
159 | 159 | connected_list.append({ |
|
160 | 160 | 'user': userinfo['user'], |
|
161 | 161 | 'state': user_state_dict[userinfo['user']] |
|
162 | 162 | }) |
|
163 | 163 | channels_info[c_name] = {'users': connected_list, |
|
164 | 164 | 'history': c_info['history']} |
|
165 | 165 | |
|
166 | 166 | return channels_info |
|
167 | 167 | |
|
168 | 168 | |
|
169 | 169 | def log_filepath(history_location, channel_name): |
|
170 | 170 | hasher = hashlib.sha256() |
|
171 | 171 | hasher.update(channel_name.encode('utf8')) |
|
172 | 172 | filename = '{}.log'.format(hasher.hexdigest()) |
|
173 | 173 | filepath = os.path.join(history_location, filename) |
|
174 | 174 | return filepath |
|
175 | 175 | |
|
176 | 176 | |
|
177 | 177 | def read_history(history_location, channel_name): |
|
178 | 178 | filepath = log_filepath(history_location, channel_name) |
|
179 | 179 | if not os.path.exists(filepath): |
|
180 | 180 | return [] |
|
181 | 181 | history_lines_limit = -100 |
|
182 | 182 | history = [] |
|
183 | 183 | with open(filepath, 'rb') as f: |
|
184 | 184 | for line in f.readlines()[history_lines_limit:]: |
|
185 | 185 | try: |
|
186 | 186 | history.append(json.loads(line)) |
|
187 | 187 | except Exception: |
|
188 | 188 | log.exception('Failed to load history') |
|
189 | 189 | return history |
|
190 | 190 | |
|
191 | 191 | |
|
192 | 192 | def update_history_from_logs(config, channels, payload): |
|
193 | 193 | history_location = config.get('history.location') |
|
194 | 194 | for channel in channels: |
|
195 | 195 | history = read_history(history_location, channel) |
|
196 | 196 | payload['channels_info'][channel]['history'] = history |
|
197 | 197 | |
|
198 | 198 | |
|
199 | 199 | def write_history(config, message): |
|
200 | 200 | """ writes a messge to a base64encoded filename """ |
|
201 | 201 | history_location = config.get('history.location') |
|
202 | 202 | if not os.path.exists(history_location): |
|
203 | 203 | return |
|
204 | 204 | try: |
|
205 | 205 | LOCK.acquire_write_lock() |
|
206 | 206 | filepath = log_filepath(history_location, message['channel']) |
|
207 | 207 | with open(filepath, 'ab') as f: |
|
208 | 208 | json.dump(message, f) |
|
209 | 209 | f.write('\n') |
|
210 | 210 | finally: |
|
211 | 211 | LOCK.release_write_lock() |
|
212 | 212 | |
|
213 | 213 | |
|
214 | 214 | def get_connection_validators(registry): |
|
215 | 215 | validators = [] |
|
216 | 216 | for k, config in registry.rhodecode_plugins.iteritems(): |
|
217 | 217 | validator = config.get('channelstream', {}).get('connect_validator') |
|
218 | 218 | if validator: |
|
219 | 219 | validators.append(validator) |
|
220 | 220 | return validators |
@@ -1,2035 +1,2035 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Helper functions |
|
23 | 23 | |
|
24 | 24 | Consists of functions to typically be used within templates, but also |
|
25 | 25 | available to Controllers. This module is available to both as 'h'. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | import random |
|
29 | 29 | import hashlib |
|
30 | 30 | import StringIO |
|
31 | 31 | import urllib |
|
32 | 32 | import math |
|
33 | 33 | import logging |
|
34 | 34 | import re |
|
35 | 35 | import urlparse |
|
36 | 36 | import time |
|
37 | 37 | import string |
|
38 | 38 | import hashlib |
|
39 | 39 | from collections import OrderedDict |
|
40 | 40 | |
|
41 | 41 | import pygments |
|
42 | 42 | import itertools |
|
43 | 43 | import fnmatch |
|
44 | 44 | |
|
45 | 45 | from datetime import datetime |
|
46 | 46 | from functools import partial |
|
47 | 47 | from pygments.formatters.html import HtmlFormatter |
|
48 | 48 | from pygments import highlight as code_highlight |
|
49 | 49 | from pygments.lexers import ( |
|
50 | 50 | get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype) |
|
51 | 51 | from pylons import url as pylons_url |
|
52 | 52 | from pylons.i18n.translation import _, ungettext |
|
53 | 53 | from pyramid.threadlocal import get_current_request |
|
54 | 54 | |
|
55 | 55 | from webhelpers.html import literal, HTML, escape |
|
56 | 56 | from webhelpers.html.tools import * |
|
57 | 57 | from webhelpers.html.builder import make_tag |
|
58 | 58 | from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \ |
|
59 | 59 | end_form, file, form as wh_form, hidden, image, javascript_link, link_to, \ |
|
60 | 60 | link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \ |
|
61 | 61 | submit, text, password, textarea, title, ul, xml_declaration, radio |
|
62 | 62 | from webhelpers.html.tools import auto_link, button_to, highlight, \ |
|
63 | 63 | js_obfuscate, mail_to, strip_links, strip_tags, tag_re |
|
64 | 64 | from webhelpers.pylonslib import Flash as _Flash |
|
65 | 65 | from webhelpers.text import chop_at, collapse, convert_accented_entities, \ |
|
66 | 66 | convert_misc_entities, lchop, plural, rchop, remove_formatting, \ |
|
67 | 67 | replace_whitespace, urlify, truncate, wrap_paragraphs |
|
68 | 68 | from webhelpers.date import time_ago_in_words |
|
69 | 69 | from webhelpers.paginate import Page as _Page |
|
70 | 70 | from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \ |
|
71 | 71 | convert_boolean_attrs, NotGiven, _make_safe_id_component |
|
72 | 72 | from webhelpers2.number import format_byte_size |
|
73 | 73 | |
|
74 | 74 | from rhodecode.lib.action_parser import action_parser |
|
75 | 75 | from rhodecode.lib.ext_json import json |
|
76 | 76 | from rhodecode.lib.utils import repo_name_slug, get_custom_lexer |
|
77 | 77 | from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \ |
|
78 | 78 | get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, \ |
|
79 | 79 | AttributeDict, safe_int, md5, md5_safe |
|
80 | 80 | from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links |
|
81 | 81 | from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError |
|
82 | 82 | from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit |
|
83 | 83 | from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT |
|
84 | 84 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
85 | 85 | from rhodecode.model.db import Permission, User, Repository |
|
86 | 86 | from rhodecode.model.repo_group import RepoGroupModel |
|
87 | 87 | from rhodecode.model.settings import IssueTrackerSettingsModel |
|
88 | 88 | |
|
89 | 89 | log = logging.getLogger(__name__) |
|
90 | 90 | |
|
91 | 91 | |
|
92 | 92 | DEFAULT_USER = User.DEFAULT_USER |
|
93 | 93 | DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL |
|
94 | 94 | |
|
95 | 95 | |
|
96 | 96 | def url(*args, **kw): |
|
97 | 97 | return pylons_url(*args, **kw) |
|
98 | 98 | |
|
99 | 99 | |
|
100 | 100 | def pylons_url_current(*args, **kw): |
|
101 | 101 | """ |
|
102 | 102 | This function overrides pylons.url.current() which returns the current |
|
103 | 103 | path so that it will also work from a pyramid only context. This |
|
104 | 104 | should be removed once port to pyramid is complete. |
|
105 | 105 | """ |
|
106 | 106 | if not args and not kw: |
|
107 | 107 | request = get_current_request() |
|
108 | 108 | return request.path |
|
109 | 109 | return pylons_url.current(*args, **kw) |
|
110 | 110 | |
|
111 | 111 | url.current = pylons_url_current |
|
112 | 112 | |
|
113 | 113 | |
|
114 | 114 | def url_replace(**qargs): |
|
115 | 115 | """ Returns the current request url while replacing query string args """ |
|
116 | 116 | |
|
117 | 117 | request = get_current_request() |
|
118 | 118 | new_args = request.GET.mixed() |
|
119 | 119 | new_args.update(qargs) |
|
120 | 120 | return url('', **new_args) |
|
121 | 121 | |
|
122 | 122 | |
|
123 | 123 | def asset(path, ver=None, **kwargs): |
|
124 | 124 | """ |
|
125 | 125 | Helper to generate a static asset file path for rhodecode assets |
|
126 | 126 | |
|
127 | 127 | eg. h.asset('images/image.png', ver='3923') |
|
128 | 128 | |
|
129 | 129 | :param path: path of asset |
|
130 | 130 | :param ver: optional version query param to append as ?ver= |
|
131 | 131 | """ |
|
132 | 132 | request = get_current_request() |
|
133 | 133 | query = {} |
|
134 | 134 | query.update(kwargs) |
|
135 | 135 | if ver: |
|
136 | 136 | query = {'ver': ver} |
|
137 | 137 | return request.static_path( |
|
138 | 138 | 'rhodecode:public/{}'.format(path), _query=query) |
|
139 | 139 | |
|
140 | 140 | |
|
141 | 141 | default_html_escape_table = { |
|
142 | 142 | ord('&'): u'&', |
|
143 | 143 | ord('<'): u'<', |
|
144 | 144 | ord('>'): u'>', |
|
145 | 145 | ord('"'): u'"', |
|
146 | 146 | ord("'"): u''', |
|
147 | 147 | } |
|
148 | 148 | |
|
149 | 149 | |
|
150 | 150 | def html_escape(text, html_escape_table=default_html_escape_table): |
|
151 | 151 | """Produce entities within text.""" |
|
152 | 152 | return text.translate(html_escape_table) |
|
153 | 153 | |
|
154 | 154 | |
|
155 | 155 | def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None): |
|
156 | 156 | """ |
|
157 | 157 | Truncate string ``s`` at the first occurrence of ``sub``. |
|
158 | 158 | |
|
159 | 159 | If ``inclusive`` is true, truncate just after ``sub`` rather than at it. |
|
160 | 160 | """ |
|
161 | 161 | suffix_if_chopped = suffix_if_chopped or '' |
|
162 | 162 | pos = s.find(sub) |
|
163 | 163 | if pos == -1: |
|
164 | 164 | return s |
|
165 | 165 | |
|
166 | 166 | if inclusive: |
|
167 | 167 | pos += len(sub) |
|
168 | 168 | |
|
169 | 169 | chopped = s[:pos] |
|
170 | 170 | left = s[pos:].strip() |
|
171 | 171 | |
|
172 | 172 | if left and suffix_if_chopped: |
|
173 | 173 | chopped += suffix_if_chopped |
|
174 | 174 | |
|
175 | 175 | return chopped |
|
176 | 176 | |
|
177 | 177 | |
|
178 | 178 | def shorter(text, size=20): |
|
179 | 179 | postfix = '...' |
|
180 | 180 | if len(text) > size: |
|
181 | 181 | return text[:size - len(postfix)] + postfix |
|
182 | 182 | return text |
|
183 | 183 | |
|
184 | 184 | |
|
185 | 185 | def _reset(name, value=None, id=NotGiven, type="reset", **attrs): |
|
186 | 186 | """ |
|
187 | 187 | Reset button |
|
188 | 188 | """ |
|
189 | 189 | _set_input_attrs(attrs, type, name, value) |
|
190 | 190 | _set_id_attr(attrs, id, name) |
|
191 | 191 | convert_boolean_attrs(attrs, ["disabled"]) |
|
192 | 192 | return HTML.input(**attrs) |
|
193 | 193 | |
|
194 | 194 | reset = _reset |
|
195 | 195 | safeid = _make_safe_id_component |
|
196 | 196 | |
|
197 | 197 | |
|
198 | 198 | def branding(name, length=40): |
|
199 | 199 | return truncate(name, length, indicator="") |
|
200 | 200 | |
|
201 | 201 | |
|
202 | 202 | def FID(raw_id, path): |
|
203 | 203 | """ |
|
204 | 204 | Creates a unique ID for filenode based on it's hash of path and commit |
|
205 | 205 | it's safe to use in urls |
|
206 | 206 | |
|
207 | 207 | :param raw_id: |
|
208 | 208 | :param path: |
|
209 | 209 | """ |
|
210 | 210 | |
|
211 | 211 | return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12]) |
|
212 | 212 | |
|
213 | 213 | |
|
214 | 214 | class _GetError(object): |
|
215 | 215 | """Get error from form_errors, and represent it as span wrapped error |
|
216 | 216 | message |
|
217 | 217 | |
|
218 | 218 | :param field_name: field to fetch errors for |
|
219 | 219 | :param form_errors: form errors dict |
|
220 | 220 | """ |
|
221 | 221 | |
|
222 | 222 | def __call__(self, field_name, form_errors): |
|
223 | 223 | tmpl = """<span class="error_msg">%s</span>""" |
|
224 | 224 | if form_errors and field_name in form_errors: |
|
225 | 225 | return literal(tmpl % form_errors.get(field_name)) |
|
226 | 226 | |
|
227 | 227 | get_error = _GetError() |
|
228 | 228 | |
|
229 | 229 | |
|
230 | 230 | class _ToolTip(object): |
|
231 | 231 | |
|
232 | 232 | def __call__(self, tooltip_title, trim_at=50): |
|
233 | 233 | """ |
|
234 | 234 | Special function just to wrap our text into nice formatted |
|
235 | 235 | autowrapped text |
|
236 | 236 | |
|
237 | 237 | :param tooltip_title: |
|
238 | 238 | """ |
|
239 | 239 | tooltip_title = escape(tooltip_title) |
|
240 | 240 | tooltip_title = tooltip_title.replace('<', '<').replace('>', '>') |
|
241 | 241 | return tooltip_title |
|
242 | 242 | tooltip = _ToolTip() |
|
243 | 243 | |
|
244 | 244 | |
|
245 | 245 | def files_breadcrumbs(repo_name, commit_id, file_path): |
|
246 | 246 | if isinstance(file_path, str): |
|
247 | 247 | file_path = safe_unicode(file_path) |
|
248 | 248 | |
|
249 | 249 | # TODO: johbo: Is this always a url like path, or is this operating |
|
250 | 250 | # system dependent? |
|
251 | 251 | path_segments = file_path.split('/') |
|
252 | 252 | |
|
253 | 253 | repo_name_html = escape(repo_name) |
|
254 | 254 | if len(path_segments) == 1 and path_segments[0] == '': |
|
255 | 255 | url_segments = [repo_name_html] |
|
256 | 256 | else: |
|
257 | 257 | url_segments = [ |
|
258 | 258 | link_to( |
|
259 | 259 | repo_name_html, |
|
260 | 260 | url('files_home', |
|
261 | 261 | repo_name=repo_name, |
|
262 | 262 | revision=commit_id, |
|
263 | 263 | f_path=''), |
|
264 | 264 | class_='pjax-link')] |
|
265 | 265 | |
|
266 | 266 | last_cnt = len(path_segments) - 1 |
|
267 | 267 | for cnt, segment in enumerate(path_segments): |
|
268 | 268 | if not segment: |
|
269 | 269 | continue |
|
270 | 270 | segment_html = escape(segment) |
|
271 | 271 | |
|
272 | 272 | if cnt != last_cnt: |
|
273 | 273 | url_segments.append( |
|
274 | 274 | link_to( |
|
275 | 275 | segment_html, |
|
276 | 276 | url('files_home', |
|
277 | 277 | repo_name=repo_name, |
|
278 | 278 | revision=commit_id, |
|
279 | 279 | f_path='/'.join(path_segments[:cnt + 1])), |
|
280 | 280 | class_='pjax-link')) |
|
281 | 281 | else: |
|
282 | 282 | url_segments.append(segment_html) |
|
283 | 283 | |
|
284 | 284 | return literal('/'.join(url_segments)) |
|
285 | 285 | |
|
286 | 286 | |
|
287 | 287 | class CodeHtmlFormatter(HtmlFormatter): |
|
288 | 288 | """ |
|
289 | 289 | My code Html Formatter for source codes |
|
290 | 290 | """ |
|
291 | 291 | |
|
292 | 292 | def wrap(self, source, outfile): |
|
293 | 293 | return self._wrap_div(self._wrap_pre(self._wrap_code(source))) |
|
294 | 294 | |
|
295 | 295 | def _wrap_code(self, source): |
|
296 | 296 | for cnt, it in enumerate(source): |
|
297 | 297 | i, t = it |
|
298 | 298 | t = '<div id="L%s">%s</div>' % (cnt + 1, t) |
|
299 | 299 | yield i, t |
|
300 | 300 | |
|
301 | 301 | def _wrap_tablelinenos(self, inner): |
|
302 | 302 | dummyoutfile = StringIO.StringIO() |
|
303 | 303 | lncount = 0 |
|
304 | 304 | for t, line in inner: |
|
305 | 305 | if t: |
|
306 | 306 | lncount += 1 |
|
307 | 307 | dummyoutfile.write(line) |
|
308 | 308 | |
|
309 | 309 | fl = self.linenostart |
|
310 | 310 | mw = len(str(lncount + fl - 1)) |
|
311 | 311 | sp = self.linenospecial |
|
312 | 312 | st = self.linenostep |
|
313 | 313 | la = self.lineanchors |
|
314 | 314 | aln = self.anchorlinenos |
|
315 | 315 | nocls = self.noclasses |
|
316 | 316 | if sp: |
|
317 | 317 | lines = [] |
|
318 | 318 | |
|
319 | 319 | for i in range(fl, fl + lncount): |
|
320 | 320 | if i % st == 0: |
|
321 | 321 | if i % sp == 0: |
|
322 | 322 | if aln: |
|
323 | 323 | lines.append('<a href="#%s%d" class="special">%*d</a>' % |
|
324 | 324 | (la, i, mw, i)) |
|
325 | 325 | else: |
|
326 | 326 | lines.append('<span class="special">%*d</span>' % (mw, i)) |
|
327 | 327 | else: |
|
328 | 328 | if aln: |
|
329 | 329 | lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i)) |
|
330 | 330 | else: |
|
331 | 331 | lines.append('%*d' % (mw, i)) |
|
332 | 332 | else: |
|
333 | 333 | lines.append('') |
|
334 | 334 | ls = '\n'.join(lines) |
|
335 | 335 | else: |
|
336 | 336 | lines = [] |
|
337 | 337 | for i in range(fl, fl + lncount): |
|
338 | 338 | if i % st == 0: |
|
339 | 339 | if aln: |
|
340 | 340 | lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i)) |
|
341 | 341 | else: |
|
342 | 342 | lines.append('%*d' % (mw, i)) |
|
343 | 343 | else: |
|
344 | 344 | lines.append('') |
|
345 | 345 | ls = '\n'.join(lines) |
|
346 | 346 | |
|
347 | 347 | # in case you wonder about the seemingly redundant <div> here: since the |
|
348 | 348 | # content in the other cell also is wrapped in a div, some browsers in |
|
349 | 349 | # some configurations seem to mess up the formatting... |
|
350 | 350 | if nocls: |
|
351 | 351 | yield 0, ('<table class="%stable">' % self.cssclass + |
|
352 | 352 | '<tr><td><div class="linenodiv" ' |
|
353 | 353 | 'style="background-color: #f0f0f0; padding-right: 10px">' |
|
354 | 354 | '<pre style="line-height: 125%">' + |
|
355 | 355 | ls + '</pre></div></td><td id="hlcode" class="code">') |
|
356 | 356 | else: |
|
357 | 357 | yield 0, ('<table class="%stable">' % self.cssclass + |
|
358 | 358 | '<tr><td class="linenos"><div class="linenodiv"><pre>' + |
|
359 | 359 | ls + '</pre></div></td><td id="hlcode" class="code">') |
|
360 | 360 | yield 0, dummyoutfile.getvalue() |
|
361 | 361 | yield 0, '</td></tr></table>' |
|
362 | 362 | |
|
363 | 363 | |
|
364 | 364 | class SearchContentCodeHtmlFormatter(CodeHtmlFormatter): |
|
365 | 365 | def __init__(self, **kw): |
|
366 | 366 | # only show these line numbers if set |
|
367 | 367 | self.only_lines = kw.pop('only_line_numbers', []) |
|
368 | 368 | self.query_terms = kw.pop('query_terms', []) |
|
369 | 369 | self.max_lines = kw.pop('max_lines', 5) |
|
370 | 370 | self.line_context = kw.pop('line_context', 3) |
|
371 | 371 | self.url = kw.pop('url', None) |
|
372 | 372 | |
|
373 | 373 | super(CodeHtmlFormatter, self).__init__(**kw) |
|
374 | 374 | |
|
375 | 375 | def _wrap_code(self, source): |
|
376 | 376 | for cnt, it in enumerate(source): |
|
377 | 377 | i, t = it |
|
378 | 378 | t = '<pre>%s</pre>' % t |
|
379 | 379 | yield i, t |
|
380 | 380 | |
|
381 | 381 | def _wrap_tablelinenos(self, inner): |
|
382 | 382 | yield 0, '<table class="code-highlight %stable">' % self.cssclass |
|
383 | 383 | |
|
384 | 384 | last_shown_line_number = 0 |
|
385 | 385 | current_line_number = 1 |
|
386 | 386 | |
|
387 | 387 | for t, line in inner: |
|
388 | 388 | if not t: |
|
389 | 389 | yield t, line |
|
390 | 390 | continue |
|
391 | 391 | |
|
392 | 392 | if current_line_number in self.only_lines: |
|
393 | 393 | if last_shown_line_number + 1 != current_line_number: |
|
394 | 394 | yield 0, '<tr>' |
|
395 | 395 | yield 0, '<td class="line">...</td>' |
|
396 | 396 | yield 0, '<td id="hlcode" class="code"></td>' |
|
397 | 397 | yield 0, '</tr>' |
|
398 | 398 | |
|
399 | 399 | yield 0, '<tr>' |
|
400 | 400 | if self.url: |
|
401 | 401 | yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % ( |
|
402 | 402 | self.url, current_line_number, current_line_number) |
|
403 | 403 | else: |
|
404 | 404 | yield 0, '<td class="line"><a href="">%i</a></td>' % ( |
|
405 | 405 | current_line_number) |
|
406 | 406 | yield 0, '<td id="hlcode" class="code">' + line + '</td>' |
|
407 | 407 | yield 0, '</tr>' |
|
408 | 408 | |
|
409 | 409 | last_shown_line_number = current_line_number |
|
410 | 410 | |
|
411 | 411 | current_line_number += 1 |
|
412 | 412 | |
|
413 | 413 | |
|
414 | 414 | yield 0, '</table>' |
|
415 | 415 | |
|
416 | 416 | |
|
417 | 417 | def extract_phrases(text_query): |
|
418 | 418 | """ |
|
419 | 419 | Extracts phrases from search term string making sure phrases |
|
420 | 420 | contained in double quotes are kept together - and discarding empty values |
|
421 | 421 | or fully whitespace values eg. |
|
422 | 422 | |
|
423 | 423 | 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more'] |
|
424 | 424 | |
|
425 | 425 | """ |
|
426 | 426 | |
|
427 | 427 | in_phrase = False |
|
428 | 428 | buf = '' |
|
429 | 429 | phrases = [] |
|
430 | 430 | for char in text_query: |
|
431 | 431 | if in_phrase: |
|
432 | 432 | if char == '"': # end phrase |
|
433 | 433 | phrases.append(buf) |
|
434 | 434 | buf = '' |
|
435 | 435 | in_phrase = False |
|
436 | 436 | continue |
|
437 | 437 | else: |
|
438 | 438 | buf += char |
|
439 | 439 | continue |
|
440 | 440 | else: |
|
441 | 441 | if char == '"': # start phrase |
|
442 | 442 | in_phrase = True |
|
443 | 443 | phrases.append(buf) |
|
444 | 444 | buf = '' |
|
445 | 445 | continue |
|
446 | 446 | elif char == ' ': |
|
447 | 447 | phrases.append(buf) |
|
448 | 448 | buf = '' |
|
449 | 449 | continue |
|
450 | 450 | else: |
|
451 | 451 | buf += char |
|
452 | 452 | |
|
453 | 453 | phrases.append(buf) |
|
454 | 454 | phrases = [phrase.strip() for phrase in phrases if phrase.strip()] |
|
455 | 455 | return phrases |
|
456 | 456 | |
|
457 | 457 | |
|
458 | 458 | def get_matching_offsets(text, phrases): |
|
459 | 459 | """ |
|
460 | 460 | Returns a list of string offsets in `text` that the list of `terms` match |
|
461 | 461 | |
|
462 | 462 | >>> get_matching_offsets('some text here', ['some', 'here']) |
|
463 | 463 | [(0, 4), (10, 14)] |
|
464 | 464 | |
|
465 | 465 | """ |
|
466 | 466 | offsets = [] |
|
467 | 467 | for phrase in phrases: |
|
468 | 468 | for match in re.finditer(phrase, text): |
|
469 | 469 | offsets.append((match.start(), match.end())) |
|
470 | 470 | |
|
471 | 471 | return offsets |
|
472 | 472 | |
|
473 | 473 | |
|
474 | 474 | def normalize_text_for_matching(x): |
|
475 | 475 | """ |
|
476 | 476 | Replaces all non alnum characters to spaces and lower cases the string, |
|
477 | 477 | useful for comparing two text strings without punctuation |
|
478 | 478 | """ |
|
479 | 479 | return re.sub(r'[^\w]', ' ', x.lower()) |
|
480 | 480 | |
|
481 | 481 | |
|
482 | 482 | def get_matching_line_offsets(lines, terms): |
|
483 | 483 | """ Return a set of `lines` indices (starting from 1) matching a |
|
484 | 484 | text search query, along with `context` lines above/below matching lines |
|
485 | 485 | |
|
486 | 486 | :param lines: list of strings representing lines |
|
487 | 487 | :param terms: search term string to match in lines eg. 'some text' |
|
488 | 488 | :param context: number of lines above/below a matching line to add to result |
|
489 | 489 | :param max_lines: cut off for lines of interest |
|
490 | 490 | eg. |
|
491 | 491 | |
|
492 | 492 | text = ''' |
|
493 | 493 | words words words |
|
494 | 494 | words words words |
|
495 | 495 | some text some |
|
496 | 496 | words words words |
|
497 | 497 | words words words |
|
498 | 498 | text here what |
|
499 | 499 | ''' |
|
500 | 500 | get_matching_line_offsets(text, 'text', context=1) |
|
501 | 501 | {3: [(5, 9)], 6: [(0, 4)]] |
|
502 | 502 | |
|
503 | 503 | """ |
|
504 | 504 | matching_lines = {} |
|
505 | 505 | phrases = [normalize_text_for_matching(phrase) |
|
506 | 506 | for phrase in extract_phrases(terms)] |
|
507 | 507 | |
|
508 | 508 | for line_index, line in enumerate(lines, start=1): |
|
509 | 509 | match_offsets = get_matching_offsets( |
|
510 | 510 | normalize_text_for_matching(line), phrases) |
|
511 | 511 | if match_offsets: |
|
512 | 512 | matching_lines[line_index] = match_offsets |
|
513 | 513 | |
|
514 | 514 | return matching_lines |
|
515 | 515 | |
|
516 | 516 | |
|
517 | 517 | def hsv_to_rgb(h, s, v): |
|
518 | 518 | """ Convert hsv color values to rgb """ |
|
519 | 519 | |
|
520 | 520 | if s == 0.0: |
|
521 | 521 | return v, v, v |
|
522 | 522 | i = int(h * 6.0) # XXX assume int() truncates! |
|
523 | 523 | f = (h * 6.0) - i |
|
524 | 524 | p = v * (1.0 - s) |
|
525 | 525 | q = v * (1.0 - s * f) |
|
526 | 526 | t = v * (1.0 - s * (1.0 - f)) |
|
527 | 527 | i = i % 6 |
|
528 | 528 | if i == 0: |
|
529 | 529 | return v, t, p |
|
530 | 530 | if i == 1: |
|
531 | 531 | return q, v, p |
|
532 | 532 | if i == 2: |
|
533 | 533 | return p, v, t |
|
534 | 534 | if i == 3: |
|
535 | 535 | return p, q, v |
|
536 | 536 | if i == 4: |
|
537 | 537 | return t, p, v |
|
538 | 538 | if i == 5: |
|
539 | 539 | return v, p, q |
|
540 | 540 | |
|
541 | 541 | |
|
542 | 542 | def unique_color_generator(n=10000, saturation=0.10, lightness=0.95): |
|
543 | 543 | """ |
|
544 | 544 | Generator for getting n of evenly distributed colors using |
|
545 | 545 | hsv color and golden ratio. It always return same order of colors |
|
546 | 546 | |
|
547 | 547 | :param n: number of colors to generate |
|
548 | 548 | :param saturation: saturation of returned colors |
|
549 | 549 | :param lightness: lightness of returned colors |
|
550 | 550 | :returns: RGB tuple |
|
551 | 551 | """ |
|
552 | 552 | |
|
553 | 553 | golden_ratio = 0.618033988749895 |
|
554 | 554 | h = 0.22717784590367374 |
|
555 | 555 | |
|
556 | 556 | for _ in xrange(n): |
|
557 | 557 | h += golden_ratio |
|
558 | 558 | h %= 1 |
|
559 | 559 | HSV_tuple = [h, saturation, lightness] |
|
560 | 560 | RGB_tuple = hsv_to_rgb(*HSV_tuple) |
|
561 | 561 | yield map(lambda x: str(int(x * 256)), RGB_tuple) |
|
562 | 562 | |
|
563 | 563 | |
|
564 | 564 | def color_hasher(n=10000, saturation=0.10, lightness=0.95): |
|
565 | 565 | """ |
|
566 | 566 | Returns a function which when called with an argument returns a unique |
|
567 | 567 | color for that argument, eg. |
|
568 | 568 | |
|
569 | 569 | :param n: number of colors to generate |
|
570 | 570 | :param saturation: saturation of returned colors |
|
571 | 571 | :param lightness: lightness of returned colors |
|
572 | 572 | :returns: css RGB string |
|
573 | 573 | |
|
574 | 574 | >>> color_hash = color_hasher() |
|
575 | 575 | >>> color_hash('hello') |
|
576 | 576 | 'rgb(34, 12, 59)' |
|
577 | 577 | >>> color_hash('hello') |
|
578 | 578 | 'rgb(34, 12, 59)' |
|
579 | 579 | >>> color_hash('other') |
|
580 | 580 | 'rgb(90, 224, 159)' |
|
581 | 581 | """ |
|
582 | 582 | |
|
583 | 583 | color_dict = {} |
|
584 | 584 | cgenerator = unique_color_generator( |
|
585 | 585 | saturation=saturation, lightness=lightness) |
|
586 | 586 | |
|
587 | 587 | def get_color_string(thing): |
|
588 | 588 | if thing in color_dict: |
|
589 | 589 | col = color_dict[thing] |
|
590 | 590 | else: |
|
591 | 591 | col = color_dict[thing] = cgenerator.next() |
|
592 | 592 | return "rgb(%s)" % (', '.join(col)) |
|
593 | 593 | |
|
594 | 594 | return get_color_string |
|
595 | 595 | |
|
596 | 596 | |
|
597 | 597 | def get_lexer_safe(mimetype=None, filepath=None): |
|
598 | 598 | """ |
|
599 | 599 | Tries to return a relevant pygments lexer using mimetype/filepath name, |
|
600 | 600 | defaulting to plain text if none could be found |
|
601 | 601 | """ |
|
602 | 602 | lexer = None |
|
603 | 603 | try: |
|
604 | 604 | if mimetype: |
|
605 | 605 | lexer = get_lexer_for_mimetype(mimetype) |
|
606 | 606 | if not lexer: |
|
607 | 607 | lexer = get_lexer_for_filename(filepath) |
|
608 | 608 | except pygments.util.ClassNotFound: |
|
609 | 609 | pass |
|
610 | 610 | |
|
611 | 611 | if not lexer: |
|
612 | 612 | lexer = get_lexer_by_name('text') |
|
613 | 613 | |
|
614 | 614 | return lexer |
|
615 | 615 | |
|
616 | 616 | |
|
617 | 617 | def get_lexer_for_filenode(filenode): |
|
618 | 618 | lexer = get_custom_lexer(filenode.extension) or filenode.lexer |
|
619 | 619 | return lexer |
|
620 | 620 | |
|
621 | 621 | |
|
622 | 622 | def pygmentize(filenode, **kwargs): |
|
623 | 623 | """ |
|
624 | 624 | pygmentize function using pygments |
|
625 | 625 | |
|
626 | 626 | :param filenode: |
|
627 | 627 | """ |
|
628 | 628 | lexer = get_lexer_for_filenode(filenode) |
|
629 | 629 | return literal(code_highlight(filenode.content, lexer, |
|
630 | 630 | CodeHtmlFormatter(**kwargs))) |
|
631 | 631 | |
|
632 | 632 | |
|
633 | 633 | def is_following_repo(repo_name, user_id): |
|
634 | 634 | from rhodecode.model.scm import ScmModel |
|
635 | 635 | return ScmModel().is_following_repo(repo_name, user_id) |
|
636 | 636 | |
|
637 | 637 | |
|
638 | 638 | class _Message(object): |
|
639 | 639 | """A message returned by ``Flash.pop_messages()``. |
|
640 | 640 | |
|
641 | 641 | Converting the message to a string returns the message text. Instances |
|
642 | 642 | also have the following attributes: |
|
643 | 643 | |
|
644 | 644 | * ``message``: the message text. |
|
645 | 645 | * ``category``: the category specified when the message was created. |
|
646 | 646 | """ |
|
647 | 647 | |
|
648 | 648 | def __init__(self, category, message): |
|
649 | 649 | self.category = category |
|
650 | 650 | self.message = message |
|
651 | 651 | |
|
652 | 652 | def __str__(self): |
|
653 | 653 | return self.message |
|
654 | 654 | |
|
655 | 655 | __unicode__ = __str__ |
|
656 | 656 | |
|
657 | 657 | def __html__(self): |
|
658 | 658 | return escape(safe_unicode(self.message)) |
|
659 | 659 | |
|
660 | 660 | |
|
661 | 661 | class Flash(_Flash): |
|
662 | 662 | |
|
663 | 663 | def pop_messages(self): |
|
664 | 664 | """Return all accumulated messages and delete them from the session. |
|
665 | 665 | |
|
666 | 666 | The return value is a list of ``Message`` objects. |
|
667 | 667 | """ |
|
668 | 668 | from pylons import session |
|
669 | 669 | |
|
670 | 670 | messages = [] |
|
671 | 671 | |
|
672 | 672 | # Pop the 'old' pylons flash messages. They are tuples of the form |
|
673 | 673 | # (category, message) |
|
674 | 674 | for cat, msg in session.pop(self.session_key, []): |
|
675 | 675 | messages.append(_Message(cat, msg)) |
|
676 | 676 | |
|
677 | 677 | # Pop the 'new' pyramid flash messages for each category as list |
|
678 | 678 | # of strings. |
|
679 | 679 | for cat in self.categories: |
|
680 | 680 | for msg in session.pop_flash(queue=cat): |
|
681 | 681 | messages.append(_Message(cat, msg)) |
|
682 | 682 | # Map messages from the default queue to the 'notice' category. |
|
683 | 683 | for msg in session.pop_flash(): |
|
684 | 684 | messages.append(_Message('notice', msg)) |
|
685 | 685 | |
|
686 | 686 | session.save() |
|
687 | 687 | return messages |
|
688 | 688 | |
|
689 | 689 | def json_alerts(self): |
|
690 | 690 | payloads = [] |
|
691 | 691 | messages = flash.pop_messages() |
|
692 | 692 | if messages: |
|
693 | 693 | for message in messages: |
|
694 | 694 | subdata = {} |
|
695 | 695 | if hasattr(message.message, 'rsplit'): |
|
696 | 696 | flash_data = message.message.rsplit('|DELIM|', 1) |
|
697 | 697 | org_message = flash_data[0] |
|
698 | 698 | if len(flash_data) > 1: |
|
699 | 699 | subdata = json.loads(flash_data[1]) |
|
700 | 700 | else: |
|
701 | 701 | org_message = message.message |
|
702 | 702 | payloads.append({ |
|
703 | 703 | 'message': { |
|
704 | 704 | 'message': u'{}'.format(org_message), |
|
705 | 705 | 'level': message.category, |
|
706 | 706 | 'force': True, |
|
707 | 707 | 'subdata': subdata |
|
708 | 708 | } |
|
709 | 709 | }) |
|
710 | 710 | return json.dumps(payloads) |
|
711 | 711 | |
|
712 | 712 | flash = Flash() |
|
713 | 713 | |
|
714 | 714 | #============================================================================== |
|
715 | 715 | # SCM FILTERS available via h. |
|
716 | 716 | #============================================================================== |
|
717 | 717 | from rhodecode.lib.vcs.utils import author_name, author_email |
|
718 | 718 | from rhodecode.lib.utils2 import credentials_filter, age as _age |
|
719 | 719 | from rhodecode.model.db import User, ChangesetStatus |
|
720 | 720 | |
|
721 | 721 | age = _age |
|
722 | 722 | capitalize = lambda x: x.capitalize() |
|
723 | 723 | email = author_email |
|
724 | 724 | short_id = lambda x: x[:12] |
|
725 | 725 | hide_credentials = lambda x: ''.join(credentials_filter(x)) |
|
726 | 726 | |
|
727 | 727 | |
|
728 | 728 | def age_component(datetime_iso, value=None, time_is_local=False): |
|
729 | 729 | title = value or format_date(datetime_iso) |
|
730 | 730 | tzinfo = '+00:00' |
|
731 | 731 | |
|
732 | 732 | # detect if we have a timezone info, otherwise, add it |
|
733 | 733 | if isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo: |
|
734 | 734 | if time_is_local: |
|
735 | 735 | tzinfo = time.strftime("+%H:%M", |
|
736 | 736 | time.gmtime( |
|
737 | 737 | (datetime.now() - datetime.utcnow()).seconds + 1 |
|
738 | 738 | ) |
|
739 | 739 | ) |
|
740 | 740 | |
|
741 | 741 | return literal( |
|
742 | 742 | '<time class="timeago tooltip" ' |
|
743 | 743 | 'title="{1}{2}" datetime="{0}{2}">{1}</time>'.format( |
|
744 | 744 | datetime_iso, title, tzinfo)) |
|
745 | 745 | |
|
746 | 746 | |
|
747 | 747 | def _shorten_commit_id(commit_id): |
|
748 | 748 | from rhodecode import CONFIG |
|
749 | 749 | def_len = safe_int(CONFIG.get('rhodecode_show_sha_length', 12)) |
|
750 | 750 | return commit_id[:def_len] |
|
751 | 751 | |
|
752 | 752 | |
|
753 | 753 | def show_id(commit): |
|
754 | 754 | """ |
|
755 | 755 | Configurable function that shows ID |
|
756 | 756 | by default it's r123:fffeeefffeee |
|
757 | 757 | |
|
758 | 758 | :param commit: commit instance |
|
759 | 759 | """ |
|
760 | 760 | from rhodecode import CONFIG |
|
761 | 761 | show_idx = str2bool(CONFIG.get('rhodecode_show_revision_number', True)) |
|
762 | 762 | |
|
763 | 763 | raw_id = _shorten_commit_id(commit.raw_id) |
|
764 | 764 | if show_idx: |
|
765 | 765 | return 'r%s:%s' % (commit.idx, raw_id) |
|
766 | 766 | else: |
|
767 | 767 | return '%s' % (raw_id, ) |
|
768 | 768 | |
|
769 | 769 | |
|
770 | 770 | def format_date(date): |
|
771 | 771 | """ |
|
772 | 772 | use a standardized formatting for dates used in RhodeCode |
|
773 | 773 | |
|
774 | 774 | :param date: date/datetime object |
|
775 | 775 | :return: formatted date |
|
776 | 776 | """ |
|
777 | 777 | |
|
778 | 778 | if date: |
|
779 | 779 | _fmt = "%a, %d %b %Y %H:%M:%S" |
|
780 | 780 | return safe_unicode(date.strftime(_fmt)) |
|
781 | 781 | |
|
782 | 782 | return u"" |
|
783 | 783 | |
|
784 | 784 | |
|
785 | 785 | class _RepoChecker(object): |
|
786 | 786 | |
|
787 | 787 | def __init__(self, backend_alias): |
|
788 | 788 | self._backend_alias = backend_alias |
|
789 | 789 | |
|
790 | 790 | def __call__(self, repository): |
|
791 | 791 | if hasattr(repository, 'alias'): |
|
792 | 792 | _type = repository.alias |
|
793 | 793 | elif hasattr(repository, 'repo_type'): |
|
794 | 794 | _type = repository.repo_type |
|
795 | 795 | else: |
|
796 | 796 | _type = repository |
|
797 | 797 | return _type == self._backend_alias |
|
798 | 798 | |
|
799 | 799 | is_git = _RepoChecker('git') |
|
800 | 800 | is_hg = _RepoChecker('hg') |
|
801 | 801 | is_svn = _RepoChecker('svn') |
|
802 | 802 | |
|
803 | 803 | |
|
804 | 804 | def get_repo_type_by_name(repo_name): |
|
805 | 805 | repo = Repository.get_by_repo_name(repo_name) |
|
806 | 806 | return repo.repo_type |
|
807 | 807 | |
|
808 | 808 | |
|
809 | 809 | def is_svn_without_proxy(repository): |
|
810 | 810 | if is_svn(repository): |
|
811 | 811 | from rhodecode.model.settings import VcsSettingsModel |
|
812 | 812 | conf = VcsSettingsModel().get_ui_settings_as_config_obj() |
|
813 | 813 | return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled')) |
|
814 | 814 | return False |
|
815 | 815 | |
|
816 | 816 | |
|
817 | 817 | def discover_user(author): |
|
818 | 818 | """ |
|
819 | 819 | Tries to discover RhodeCode User based on the autho string. Author string |
|
820 | 820 | is typically `FirstName LastName <email@address.com>` |
|
821 | 821 | """ |
|
822 | 822 | |
|
823 | 823 | # if author is already an instance use it for extraction |
|
824 | 824 | if isinstance(author, User): |
|
825 | 825 | return author |
|
826 | 826 | |
|
827 | 827 | # Valid email in the attribute passed, see if they're in the system |
|
828 | 828 | _email = author_email(author) |
|
829 | 829 | if _email != '': |
|
830 | 830 | user = User.get_by_email(_email, case_insensitive=True, cache=True) |
|
831 | 831 | if user is not None: |
|
832 | 832 | return user |
|
833 | 833 | |
|
834 | 834 | # Maybe it's a username, we try to extract it and fetch by username ? |
|
835 | 835 | _author = author_name(author) |
|
836 | 836 | user = User.get_by_username(_author, case_insensitive=True, cache=True) |
|
837 | 837 | if user is not None: |
|
838 | 838 | return user |
|
839 | 839 | |
|
840 | 840 | return None |
|
841 | 841 | |
|
842 | 842 | |
|
843 | 843 | def email_or_none(author): |
|
844 | 844 | # extract email from the commit string |
|
845 | 845 | _email = author_email(author) |
|
846 | 846 | |
|
847 | 847 | # If we have an email, use it, otherwise |
|
848 | 848 | # see if it contains a username we can get an email from |
|
849 | 849 | if _email != '': |
|
850 | 850 | return _email |
|
851 | 851 | else: |
|
852 | 852 | user = User.get_by_username( |
|
853 | 853 | author_name(author), case_insensitive=True, cache=True) |
|
854 | 854 | |
|
855 | 855 | if user is not None: |
|
856 | 856 | return user.email |
|
857 | 857 | |
|
858 | 858 | # No valid email, not a valid user in the system, none! |
|
859 | 859 | return None |
|
860 | 860 | |
|
861 | 861 | |
|
862 | 862 | def link_to_user(author, length=0, **kwargs): |
|
863 | 863 | user = discover_user(author) |
|
864 | 864 | # user can be None, but if we have it already it means we can re-use it |
|
865 | 865 | # in the person() function, so we save 1 intensive-query |
|
866 | 866 | if user: |
|
867 | 867 | author = user |
|
868 | 868 | |
|
869 | 869 | display_person = person(author, 'username_or_name_or_email') |
|
870 | 870 | if length: |
|
871 | 871 | display_person = shorter(display_person, length) |
|
872 | 872 | |
|
873 | 873 | if user: |
|
874 | 874 | return link_to( |
|
875 | 875 | escape(display_person), |
|
876 | 876 | route_path('user_profile', username=user.username), |
|
877 | 877 | **kwargs) |
|
878 | 878 | else: |
|
879 | 879 | return escape(display_person) |
|
880 | 880 | |
|
881 | 881 | |
|
882 | 882 | def person(author, show_attr="username_and_name"): |
|
883 | 883 | user = discover_user(author) |
|
884 | 884 | if user: |
|
885 | 885 | return getattr(user, show_attr) |
|
886 | 886 | else: |
|
887 | 887 | _author = author_name(author) |
|
888 | 888 | _email = email(author) |
|
889 | 889 | return _author or _email |
|
890 | 890 | |
|
891 | 891 | |
|
892 | 892 | def author_string(email): |
|
893 | 893 | if email: |
|
894 | 894 | user = User.get_by_email(email, case_insensitive=True, cache=True) |
|
895 | 895 | if user: |
|
896 | if user.firstname or user.lastname: | |
|
896 | if user.first_name or user.last_name: | |
|
897 | 897 | return '%s %s <%s>' % ( |
|
898 |
|
|
|
898 | user.first_name, user.last_name, email) | |
|
899 | 899 | else: |
|
900 | 900 | return email |
|
901 | 901 | else: |
|
902 | 902 | return email |
|
903 | 903 | else: |
|
904 | 904 | return None |
|
905 | 905 | |
|
906 | 906 | |
|
907 | 907 | def person_by_id(id_, show_attr="username_and_name"): |
|
908 | 908 | # attr to return from fetched user |
|
909 | 909 | person_getter = lambda usr: getattr(usr, show_attr) |
|
910 | 910 | |
|
911 | 911 | #maybe it's an ID ? |
|
912 | 912 | if str(id_).isdigit() or isinstance(id_, int): |
|
913 | 913 | id_ = int(id_) |
|
914 | 914 | user = User.get(id_) |
|
915 | 915 | if user is not None: |
|
916 | 916 | return person_getter(user) |
|
917 | 917 | return id_ |
|
918 | 918 | |
|
919 | 919 | |
|
920 | 920 | def gravatar_with_user(author, show_disabled=False): |
|
921 | 921 | from rhodecode.lib.utils import PartialRenderer |
|
922 | 922 | _render = PartialRenderer('base/base.mako') |
|
923 | 923 | return _render('gravatar_with_user', author, show_disabled=show_disabled) |
|
924 | 924 | |
|
925 | 925 | |
|
926 | 926 | def desc_stylize(value): |
|
927 | 927 | """ |
|
928 | 928 | converts tags from value into html equivalent |
|
929 | 929 | |
|
930 | 930 | :param value: |
|
931 | 931 | """ |
|
932 | 932 | if not value: |
|
933 | 933 | return '' |
|
934 | 934 | |
|
935 | 935 | value = re.sub(r'\[see\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]', |
|
936 | 936 | '<div class="metatag" tag="see">see => \\1 </div>', value) |
|
937 | 937 | value = re.sub(r'\[license\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]', |
|
938 | 938 | '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value) |
|
939 | 939 | value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\>\ *([a-zA-Z0-9\-\/]*)\]', |
|
940 | 940 | '<div class="metatag" tag="\\1">\\1 => <a href="/\\2">\\2</a></div>', value) |
|
941 | 941 | value = re.sub(r'\[(lang|language)\ \=\>\ *([a-zA-Z\-\/\#\+]*)\]', |
|
942 | 942 | '<div class="metatag" tag="lang">\\2</div>', value) |
|
943 | 943 | value = re.sub(r'\[([a-z]+)\]', |
|
944 | 944 | '<div class="metatag" tag="\\1">\\1</div>', value) |
|
945 | 945 | |
|
946 | 946 | return value |
|
947 | 947 | |
|
948 | 948 | |
|
949 | 949 | def escaped_stylize(value): |
|
950 | 950 | """ |
|
951 | 951 | converts tags from value into html equivalent, but escaping its value first |
|
952 | 952 | """ |
|
953 | 953 | if not value: |
|
954 | 954 | return '' |
|
955 | 955 | |
|
956 | 956 | # Using default webhelper escape method, but has to force it as a |
|
957 | 957 | # plain unicode instead of a markup tag to be used in regex expressions |
|
958 | 958 | value = unicode(escape(safe_unicode(value))) |
|
959 | 959 | |
|
960 | 960 | value = re.sub(r'\[see\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]', |
|
961 | 961 | '<div class="metatag" tag="see">see => \\1 </div>', value) |
|
962 | 962 | value = re.sub(r'\[license\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]', |
|
963 | 963 | '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value) |
|
964 | 964 | value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\>\ *([a-zA-Z0-9\-\/]*)\]', |
|
965 | 965 | '<div class="metatag" tag="\\1">\\1 => <a href="/\\2">\\2</a></div>', value) |
|
966 | 966 | value = re.sub(r'\[(lang|language)\ \=\>\ *([a-zA-Z\-\/\#\+]*)\]', |
|
967 | 967 | '<div class="metatag" tag="lang">\\2</div>', value) |
|
968 | 968 | value = re.sub(r'\[([a-z]+)\]', |
|
969 | 969 | '<div class="metatag" tag="\\1">\\1</div>', value) |
|
970 | 970 | |
|
971 | 971 | return value |
|
972 | 972 | |
|
973 | 973 | |
|
974 | 974 | def bool2icon(value): |
|
975 | 975 | """ |
|
976 | 976 | Returns boolean value of a given value, represented as html element with |
|
977 | 977 | classes that will represent icons |
|
978 | 978 | |
|
979 | 979 | :param value: given value to convert to html node |
|
980 | 980 | """ |
|
981 | 981 | |
|
982 | 982 | if value: # does bool conversion |
|
983 | 983 | return HTML.tag('i', class_="icon-true") |
|
984 | 984 | else: # not true as bool |
|
985 | 985 | return HTML.tag('i', class_="icon-false") |
|
986 | 986 | |
|
987 | 987 | |
|
988 | 988 | #============================================================================== |
|
989 | 989 | # PERMS |
|
990 | 990 | #============================================================================== |
|
991 | 991 | from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \ |
|
992 | 992 | HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, \ |
|
993 | 993 | HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token, \ |
|
994 | 994 | csrf_token_key |
|
995 | 995 | |
|
996 | 996 | |
|
997 | 997 | #============================================================================== |
|
998 | 998 | # GRAVATAR URL |
|
999 | 999 | #============================================================================== |
|
1000 | 1000 | class InitialsGravatar(object): |
|
1001 | 1001 | def __init__(self, email_address, first_name, last_name, size=30, |
|
1002 | 1002 | background=None, text_color='#fff'): |
|
1003 | 1003 | self.size = size |
|
1004 | 1004 | self.first_name = first_name |
|
1005 | 1005 | self.last_name = last_name |
|
1006 | 1006 | self.email_address = email_address |
|
1007 | 1007 | self.background = background or self.str2color(email_address) |
|
1008 | 1008 | self.text_color = text_color |
|
1009 | 1009 | |
|
1010 | 1010 | def get_color_bank(self): |
|
1011 | 1011 | """ |
|
1012 | 1012 | returns a predefined list of colors that gravatars can use. |
|
1013 | 1013 | Those are randomized distinct colors that guarantee readability and |
|
1014 | 1014 | uniqueness. |
|
1015 | 1015 | |
|
1016 | 1016 | generated with: http://phrogz.net/css/distinct-colors.html |
|
1017 | 1017 | """ |
|
1018 | 1018 | return [ |
|
1019 | 1019 | '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000', |
|
1020 | 1020 | '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320', |
|
1021 | 1021 | '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300', |
|
1022 | 1022 | '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140', |
|
1023 | 1023 | '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c', |
|
1024 | 1024 | '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020', |
|
1025 | 1025 | '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039', |
|
1026 | 1026 | '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f', |
|
1027 | 1027 | '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340', |
|
1028 | 1028 | '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98', |
|
1029 | 1029 | '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c', |
|
1030 | 1030 | '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200', |
|
1031 | 1031 | '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a', |
|
1032 | 1032 | '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959', |
|
1033 | 1033 | '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3', |
|
1034 | 1034 | '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626', |
|
1035 | 1035 | '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000', |
|
1036 | 1036 | '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362', |
|
1037 | 1037 | '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3', |
|
1038 | 1038 | '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a', |
|
1039 | 1039 | '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939', |
|
1040 | 1040 | '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39', |
|
1041 | 1041 | '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953', |
|
1042 | 1042 | '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9', |
|
1043 | 1043 | '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1', |
|
1044 | 1044 | '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900', |
|
1045 | 1045 | '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00', |
|
1046 | 1046 | '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3', |
|
1047 | 1047 | '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59', |
|
1048 | 1048 | '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079', |
|
1049 | 1049 | '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700', |
|
1050 | 1050 | '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d', |
|
1051 | 1051 | '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2', |
|
1052 | 1052 | '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff', |
|
1053 | 1053 | '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20', |
|
1054 | 1054 | '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626', |
|
1055 | 1055 | '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23', |
|
1056 | 1056 | '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff', |
|
1057 | 1057 | '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6', |
|
1058 | 1058 | '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a', |
|
1059 | 1059 | '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c', |
|
1060 | 1060 | '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600', |
|
1061 | 1061 | '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff', |
|
1062 | 1062 | '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539', |
|
1063 | 1063 | '#4f8c46', '#368dd9', '#5c0073' |
|
1064 | 1064 | ] |
|
1065 | 1065 | |
|
1066 | 1066 | def rgb_to_hex_color(self, rgb_tuple): |
|
1067 | 1067 | """ |
|
1068 | 1068 | Converts an rgb_tuple passed to an hex color. |
|
1069 | 1069 | |
|
1070 | 1070 | :param rgb_tuple: tuple with 3 ints represents rgb color space |
|
1071 | 1071 | """ |
|
1072 | 1072 | return '#' + ("".join(map(chr, rgb_tuple)).encode('hex')) |
|
1073 | 1073 | |
|
1074 | 1074 | def email_to_int_list(self, email_str): |
|
1075 | 1075 | """ |
|
1076 | 1076 | Get every byte of the hex digest value of email and turn it to integer. |
|
1077 | 1077 | It's going to be always between 0-255 |
|
1078 | 1078 | """ |
|
1079 | 1079 | digest = md5_safe(email_str.lower()) |
|
1080 | 1080 | return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)] |
|
1081 | 1081 | |
|
1082 | 1082 | def pick_color_bank_index(self, email_str, color_bank): |
|
1083 | 1083 | return self.email_to_int_list(email_str)[0] % len(color_bank) |
|
1084 | 1084 | |
|
1085 | 1085 | def str2color(self, email_str): |
|
1086 | 1086 | """ |
|
1087 | 1087 | Tries to map in a stable algorithm an email to color |
|
1088 | 1088 | |
|
1089 | 1089 | :param email_str: |
|
1090 | 1090 | """ |
|
1091 | 1091 | color_bank = self.get_color_bank() |
|
1092 | 1092 | # pick position (module it's length so we always find it in the |
|
1093 | 1093 | # bank even if it's smaller than 256 values |
|
1094 | 1094 | pos = self.pick_color_bank_index(email_str, color_bank) |
|
1095 | 1095 | return color_bank[pos] |
|
1096 | 1096 | |
|
1097 | 1097 | def normalize_email(self, email_address): |
|
1098 | 1098 | import unicodedata |
|
1099 | 1099 | # default host used to fill in the fake/missing email |
|
1100 | 1100 | default_host = u'localhost' |
|
1101 | 1101 | |
|
1102 | 1102 | if not email_address: |
|
1103 | 1103 | email_address = u'%s@%s' % (User.DEFAULT_USER, default_host) |
|
1104 | 1104 | |
|
1105 | 1105 | email_address = safe_unicode(email_address) |
|
1106 | 1106 | |
|
1107 | 1107 | if u'@' not in email_address: |
|
1108 | 1108 | email_address = u'%s@%s' % (email_address, default_host) |
|
1109 | 1109 | |
|
1110 | 1110 | if email_address.endswith(u'@'): |
|
1111 | 1111 | email_address = u'%s%s' % (email_address, default_host) |
|
1112 | 1112 | |
|
1113 | 1113 | email_address = unicodedata.normalize('NFKD', email_address)\ |
|
1114 | 1114 | .encode('ascii', 'ignore') |
|
1115 | 1115 | return email_address |
|
1116 | 1116 | |
|
1117 | 1117 | def get_initials(self): |
|
1118 | 1118 | """ |
|
1119 | 1119 | Returns 2 letter initials calculated based on the input. |
|
1120 | 1120 | The algorithm picks first given email address, and takes first letter |
|
1121 | 1121 | of part before @, and then the first letter of server name. In case |
|
1122 | 1122 | the part before @ is in a format of `somestring.somestring2` it replaces |
|
1123 | 1123 | the server letter with first letter of somestring2 |
|
1124 | 1124 | |
|
1125 | 1125 | In case function was initialized with both first and lastname, this |
|
1126 | 1126 | overrides the extraction from email by first letter of the first and |
|
1127 | 1127 | last name. We add special logic to that functionality, In case Full name |
|
1128 | 1128 | is compound, like Guido Von Rossum, we use last part of the last name |
|
1129 | 1129 | (Von Rossum) picking `R`. |
|
1130 | 1130 | |
|
1131 | 1131 | Function also normalizes the non-ascii characters to they ascii |
|
1132 | 1132 | representation, eg Ą => A |
|
1133 | 1133 | """ |
|
1134 | 1134 | import unicodedata |
|
1135 | 1135 | # replace non-ascii to ascii |
|
1136 | 1136 | first_name = unicodedata.normalize( |
|
1137 | 1137 | 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore') |
|
1138 | 1138 | last_name = unicodedata.normalize( |
|
1139 | 1139 | 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore') |
|
1140 | 1140 | |
|
1141 | 1141 | # do NFKD encoding, and also make sure email has proper format |
|
1142 | 1142 | email_address = self.normalize_email(self.email_address) |
|
1143 | 1143 | |
|
1144 | 1144 | # first push the email initials |
|
1145 | 1145 | prefix, server = email_address.split('@', 1) |
|
1146 | 1146 | |
|
1147 | # check if prefix is maybe a 'firstname.lastname' syntax | |
|
1147 | # check if prefix is maybe a 'first_name.last_name' syntax | |
|
1148 | 1148 | _dot_split = prefix.rsplit('.', 1) |
|
1149 | 1149 | if len(_dot_split) == 2: |
|
1150 | 1150 | initials = [_dot_split[0][0], _dot_split[1][0]] |
|
1151 | 1151 | else: |
|
1152 | 1152 | initials = [prefix[0], server[0]] |
|
1153 | 1153 | |
|
1154 | # then try to replace either firtname or lastname | |
|
1154 | # then try to replace either first_name or last_name | |
|
1155 | 1155 | fn_letter = (first_name or " ")[0].strip() |
|
1156 | 1156 | ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip() |
|
1157 | 1157 | |
|
1158 | 1158 | if fn_letter: |
|
1159 | 1159 | initials[0] = fn_letter |
|
1160 | 1160 | |
|
1161 | 1161 | if ln_letter: |
|
1162 | 1162 | initials[1] = ln_letter |
|
1163 | 1163 | |
|
1164 | 1164 | return ''.join(initials).upper() |
|
1165 | 1165 | |
|
1166 | 1166 | def get_img_data_by_type(self, font_family, img_type): |
|
1167 | 1167 | default_user = """ |
|
1168 | 1168 | <svg xmlns="http://www.w3.org/2000/svg" |
|
1169 | 1169 | version="1.1" x="0px" y="0px" width="{size}" height="{size}" |
|
1170 | 1170 | viewBox="-15 -10 439.165 429.164" |
|
1171 | 1171 | |
|
1172 | 1172 | xml:space="preserve" |
|
1173 | 1173 | style="background:{background};" > |
|
1174 | 1174 | |
|
1175 | 1175 | <path d="M204.583,216.671c50.664,0,91.74-48.075, |
|
1176 | 1176 | 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377 |
|
1177 | 1177 | c-50.668,0-91.74,25.14-91.74,107.377C112.844, |
|
1178 | 1178 | 168.596,153.916,216.671, |
|
1179 | 1179 | 204.583,216.671z" fill="{text_color}"/> |
|
1180 | 1180 | <path d="M407.164,374.717L360.88, |
|
1181 | 1181 | 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392 |
|
1182 | 1182 | c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316, |
|
1183 | 1183 | 15.366-44.203,23.488-69.076,23.488c-24.877, |
|
1184 | 1184 | 0-48.762-8.122-69.078-23.488 |
|
1185 | 1185 | c-1.428-1.078-3.346-1.238-4.93-0.415L58.75, |
|
1186 | 1186 | 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717 |
|
1187 | 1187 | c-3.191,7.188-2.537,15.412,1.75,22.005c4.285, |
|
1188 | 1188 | 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936, |
|
1189 | 1189 | 19.402-10.527 C409.699,390.129, |
|
1190 | 1190 | 410.355,381.902,407.164,374.717z" fill="{text_color}"/> |
|
1191 | 1191 | </svg>""".format( |
|
1192 | 1192 | size=self.size, |
|
1193 | 1193 | background='#979797', # @grey4 |
|
1194 | 1194 | text_color=self.text_color, |
|
1195 | 1195 | font_family=font_family) |
|
1196 | 1196 | |
|
1197 | 1197 | return { |
|
1198 | 1198 | "default_user": default_user |
|
1199 | 1199 | }[img_type] |
|
1200 | 1200 | |
|
1201 | 1201 | def get_img_data(self, svg_type=None): |
|
1202 | 1202 | """ |
|
1203 | 1203 | generates the svg metadata for image |
|
1204 | 1204 | """ |
|
1205 | 1205 | |
|
1206 | 1206 | font_family = ','.join([ |
|
1207 | 1207 | 'proximanovaregular', |
|
1208 | 1208 | 'Proxima Nova Regular', |
|
1209 | 1209 | 'Proxima Nova', |
|
1210 | 1210 | 'Arial', |
|
1211 | 1211 | 'Lucida Grande', |
|
1212 | 1212 | 'sans-serif' |
|
1213 | 1213 | ]) |
|
1214 | 1214 | if svg_type: |
|
1215 | 1215 | return self.get_img_data_by_type(font_family, svg_type) |
|
1216 | 1216 | |
|
1217 | 1217 | initials = self.get_initials() |
|
1218 | 1218 | img_data = """ |
|
1219 | 1219 | <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none" |
|
1220 | 1220 | width="{size}" height="{size}" |
|
1221 | 1221 | style="width: 100%; height: 100%; background-color: {background}" |
|
1222 | 1222 | viewBox="0 0 {size} {size}"> |
|
1223 | 1223 | <text text-anchor="middle" y="50%" x="50%" dy="0.35em" |
|
1224 | 1224 | pointer-events="auto" fill="{text_color}" |
|
1225 | 1225 | font-family="{font_family}" |
|
1226 | 1226 | style="font-weight: 400; font-size: {f_size}px;">{text} |
|
1227 | 1227 | </text> |
|
1228 | 1228 | </svg>""".format( |
|
1229 | 1229 | size=self.size, |
|
1230 | 1230 | f_size=self.size/1.85, # scale the text inside the box nicely |
|
1231 | 1231 | background=self.background, |
|
1232 | 1232 | text_color=self.text_color, |
|
1233 | 1233 | text=initials.upper(), |
|
1234 | 1234 | font_family=font_family) |
|
1235 | 1235 | |
|
1236 | 1236 | return img_data |
|
1237 | 1237 | |
|
1238 | 1238 | def generate_svg(self, svg_type=None): |
|
1239 | 1239 | img_data = self.get_img_data(svg_type) |
|
1240 | 1240 | return "data:image/svg+xml;base64,%s" % img_data.encode('base64') |
|
1241 | 1241 | |
|
1242 | 1242 | |
|
1243 | 1243 | def initials_gravatar(email_address, first_name, last_name, size=30): |
|
1244 | 1244 | svg_type = None |
|
1245 | 1245 | if email_address == User.DEFAULT_USER_EMAIL: |
|
1246 | 1246 | svg_type = 'default_user' |
|
1247 | 1247 | klass = InitialsGravatar(email_address, first_name, last_name, size) |
|
1248 | 1248 | return klass.generate_svg(svg_type=svg_type) |
|
1249 | 1249 | |
|
1250 | 1250 | |
|
1251 | 1251 | def gravatar_url(email_address, size=30, request=None): |
|
1252 | 1252 | request = get_current_request() |
|
1253 | 1253 | if request and hasattr(request, 'call_context'): |
|
1254 | 1254 | _use_gravatar = request.call_context.visual.use_gravatar |
|
1255 | 1255 | _gravatar_url = request.call_context.visual.gravatar_url |
|
1256 | 1256 | else: |
|
1257 | 1257 | # doh, we need to re-import those to mock it later |
|
1258 | 1258 | from pylons import tmpl_context as c |
|
1259 | 1259 | |
|
1260 | 1260 | _use_gravatar = c.visual.use_gravatar |
|
1261 | 1261 | _gravatar_url = c.visual.gravatar_url |
|
1262 | 1262 | |
|
1263 | 1263 | _gravatar_url = _gravatar_url or User.DEFAULT_GRAVATAR_URL |
|
1264 | 1264 | |
|
1265 | 1265 | email_address = email_address or User.DEFAULT_USER_EMAIL |
|
1266 | 1266 | if isinstance(email_address, unicode): |
|
1267 | 1267 | # hashlib crashes on unicode items |
|
1268 | 1268 | email_address = safe_str(email_address) |
|
1269 | 1269 | |
|
1270 | 1270 | # empty email or default user |
|
1271 | 1271 | if not email_address or email_address == User.DEFAULT_USER_EMAIL: |
|
1272 | 1272 | return initials_gravatar(User.DEFAULT_USER_EMAIL, '', '', size=size) |
|
1273 | 1273 | |
|
1274 | 1274 | if _use_gravatar: |
|
1275 | 1275 | # TODO: Disuse pyramid thread locals. Think about another solution to |
|
1276 | 1276 | # get the host and schema here. |
|
1277 | 1277 | request = get_current_request() |
|
1278 | 1278 | tmpl = safe_str(_gravatar_url) |
|
1279 | 1279 | tmpl = tmpl.replace('{email}', email_address)\ |
|
1280 | 1280 | .replace('{md5email}', md5_safe(email_address.lower())) \ |
|
1281 | 1281 | .replace('{netloc}', request.host)\ |
|
1282 | 1282 | .replace('{scheme}', request.scheme)\ |
|
1283 | 1283 | .replace('{size}', safe_str(size)) |
|
1284 | 1284 | return tmpl |
|
1285 | 1285 | else: |
|
1286 | 1286 | return initials_gravatar(email_address, '', '', size=size) |
|
1287 | 1287 | |
|
1288 | 1288 | |
|
1289 | 1289 | class Page(_Page): |
|
1290 | 1290 | """ |
|
1291 | 1291 | Custom pager to match rendering style with paginator |
|
1292 | 1292 | """ |
|
1293 | 1293 | |
|
1294 | 1294 | def _get_pos(self, cur_page, max_page, items): |
|
1295 | 1295 | edge = (items / 2) + 1 |
|
1296 | 1296 | if (cur_page <= edge): |
|
1297 | 1297 | radius = max(items / 2, items - cur_page) |
|
1298 | 1298 | elif (max_page - cur_page) < edge: |
|
1299 | 1299 | radius = (items - 1) - (max_page - cur_page) |
|
1300 | 1300 | else: |
|
1301 | 1301 | radius = items / 2 |
|
1302 | 1302 | |
|
1303 | 1303 | left = max(1, (cur_page - (radius))) |
|
1304 | 1304 | right = min(max_page, cur_page + (radius)) |
|
1305 | 1305 | return left, cur_page, right |
|
1306 | 1306 | |
|
1307 | 1307 | def _range(self, regexp_match): |
|
1308 | 1308 | """ |
|
1309 | 1309 | Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8'). |
|
1310 | 1310 | |
|
1311 | 1311 | Arguments: |
|
1312 | 1312 | |
|
1313 | 1313 | regexp_match |
|
1314 | 1314 | A "re" (regular expressions) match object containing the |
|
1315 | 1315 | radius of linked pages around the current page in |
|
1316 | 1316 | regexp_match.group(1) as a string |
|
1317 | 1317 | |
|
1318 | 1318 | This function is supposed to be called as a callable in |
|
1319 | 1319 | re.sub. |
|
1320 | 1320 | |
|
1321 | 1321 | """ |
|
1322 | 1322 | radius = int(regexp_match.group(1)) |
|
1323 | 1323 | |
|
1324 | 1324 | # Compute the first and last page number within the radius |
|
1325 | 1325 | # e.g. '1 .. 5 6 [7] 8 9 .. 12' |
|
1326 | 1326 | # -> leftmost_page = 5 |
|
1327 | 1327 | # -> rightmost_page = 9 |
|
1328 | 1328 | leftmost_page, _cur, rightmost_page = self._get_pos(self.page, |
|
1329 | 1329 | self.last_page, |
|
1330 | 1330 | (radius * 2) + 1) |
|
1331 | 1331 | nav_items = [] |
|
1332 | 1332 | |
|
1333 | 1333 | # Create a link to the first page (unless we are on the first page |
|
1334 | 1334 | # or there would be no need to insert '..' spacers) |
|
1335 | 1335 | if self.page != self.first_page and self.first_page < leftmost_page: |
|
1336 | 1336 | nav_items.append(self._pagerlink(self.first_page, self.first_page)) |
|
1337 | 1337 | |
|
1338 | 1338 | # Insert dots if there are pages between the first page |
|
1339 | 1339 | # and the currently displayed page range |
|
1340 | 1340 | if leftmost_page - self.first_page > 1: |
|
1341 | 1341 | # Wrap in a SPAN tag if nolink_attr is set |
|
1342 | 1342 | text = '..' |
|
1343 | 1343 | if self.dotdot_attr: |
|
1344 | 1344 | text = HTML.span(c=text, **self.dotdot_attr) |
|
1345 | 1345 | nav_items.append(text) |
|
1346 | 1346 | |
|
1347 | 1347 | for thispage in xrange(leftmost_page, rightmost_page + 1): |
|
1348 | 1348 | # Hilight the current page number and do not use a link |
|
1349 | 1349 | if thispage == self.page: |
|
1350 | 1350 | text = '%s' % (thispage,) |
|
1351 | 1351 | # Wrap in a SPAN tag if nolink_attr is set |
|
1352 | 1352 | if self.curpage_attr: |
|
1353 | 1353 | text = HTML.span(c=text, **self.curpage_attr) |
|
1354 | 1354 | nav_items.append(text) |
|
1355 | 1355 | # Otherwise create just a link to that page |
|
1356 | 1356 | else: |
|
1357 | 1357 | text = '%s' % (thispage,) |
|
1358 | 1358 | nav_items.append(self._pagerlink(thispage, text)) |
|
1359 | 1359 | |
|
1360 | 1360 | # Insert dots if there are pages between the displayed |
|
1361 | 1361 | # page numbers and the end of the page range |
|
1362 | 1362 | if self.last_page - rightmost_page > 1: |
|
1363 | 1363 | text = '..' |
|
1364 | 1364 | # Wrap in a SPAN tag if nolink_attr is set |
|
1365 | 1365 | if self.dotdot_attr: |
|
1366 | 1366 | text = HTML.span(c=text, **self.dotdot_attr) |
|
1367 | 1367 | nav_items.append(text) |
|
1368 | 1368 | |
|
1369 | 1369 | # Create a link to the very last page (unless we are on the last |
|
1370 | 1370 | # page or there would be no need to insert '..' spacers) |
|
1371 | 1371 | if self.page != self.last_page and rightmost_page < self.last_page: |
|
1372 | 1372 | nav_items.append(self._pagerlink(self.last_page, self.last_page)) |
|
1373 | 1373 | |
|
1374 | 1374 | ## prerender links |
|
1375 | 1375 | #_page_link = url.current() |
|
1376 | 1376 | #nav_items.append(literal('<link rel="prerender" href="%s?page=%s">' % (_page_link, str(int(self.page)+1)))) |
|
1377 | 1377 | #nav_items.append(literal('<link rel="prefetch" href="%s?page=%s">' % (_page_link, str(int(self.page)+1)))) |
|
1378 | 1378 | return self.separator.join(nav_items) |
|
1379 | 1379 | |
|
1380 | 1380 | def pager(self, format='~2~', page_param='page', partial_param='partial', |
|
1381 | 1381 | show_if_single_page=False, separator=' ', onclick=None, |
|
1382 | 1382 | symbol_first='<<', symbol_last='>>', |
|
1383 | 1383 | symbol_previous='<', symbol_next='>', |
|
1384 | 1384 | link_attr={'class': 'pager_link', 'rel': 'prerender'}, |
|
1385 | 1385 | curpage_attr={'class': 'pager_curpage'}, |
|
1386 | 1386 | dotdot_attr={'class': 'pager_dotdot'}, **kwargs): |
|
1387 | 1387 | |
|
1388 | 1388 | self.curpage_attr = curpage_attr |
|
1389 | 1389 | self.separator = separator |
|
1390 | 1390 | self.pager_kwargs = kwargs |
|
1391 | 1391 | self.page_param = page_param |
|
1392 | 1392 | self.partial_param = partial_param |
|
1393 | 1393 | self.onclick = onclick |
|
1394 | 1394 | self.link_attr = link_attr |
|
1395 | 1395 | self.dotdot_attr = dotdot_attr |
|
1396 | 1396 | |
|
1397 | 1397 | # Don't show navigator if there is no more than one page |
|
1398 | 1398 | if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page): |
|
1399 | 1399 | return '' |
|
1400 | 1400 | |
|
1401 | 1401 | from string import Template |
|
1402 | 1402 | # Replace ~...~ in token format by range of pages |
|
1403 | 1403 | result = re.sub(r'~(\d+)~', self._range, format) |
|
1404 | 1404 | |
|
1405 | 1405 | # Interpolate '%' variables |
|
1406 | 1406 | result = Template(result).safe_substitute({ |
|
1407 | 1407 | 'first_page': self.first_page, |
|
1408 | 1408 | 'last_page': self.last_page, |
|
1409 | 1409 | 'page': self.page, |
|
1410 | 1410 | 'page_count': self.page_count, |
|
1411 | 1411 | 'items_per_page': self.items_per_page, |
|
1412 | 1412 | 'first_item': self.first_item, |
|
1413 | 1413 | 'last_item': self.last_item, |
|
1414 | 1414 | 'item_count': self.item_count, |
|
1415 | 1415 | 'link_first': self.page > self.first_page and \ |
|
1416 | 1416 | self._pagerlink(self.first_page, symbol_first) or '', |
|
1417 | 1417 | 'link_last': self.page < self.last_page and \ |
|
1418 | 1418 | self._pagerlink(self.last_page, symbol_last) or '', |
|
1419 | 1419 | 'link_previous': self.previous_page and \ |
|
1420 | 1420 | self._pagerlink(self.previous_page, symbol_previous) \ |
|
1421 | 1421 | or HTML.span(symbol_previous, class_="pg-previous disabled"), |
|
1422 | 1422 | 'link_next': self.next_page and \ |
|
1423 | 1423 | self._pagerlink(self.next_page, symbol_next) \ |
|
1424 | 1424 | or HTML.span(symbol_next, class_="pg-next disabled") |
|
1425 | 1425 | }) |
|
1426 | 1426 | |
|
1427 | 1427 | return literal(result) |
|
1428 | 1428 | |
|
1429 | 1429 | |
|
1430 | 1430 | #============================================================================== |
|
1431 | 1431 | # REPO PAGER, PAGER FOR REPOSITORY |
|
1432 | 1432 | #============================================================================== |
|
1433 | 1433 | class RepoPage(Page): |
|
1434 | 1434 | |
|
1435 | 1435 | def __init__(self, collection, page=1, items_per_page=20, |
|
1436 | 1436 | item_count=None, url=None, **kwargs): |
|
1437 | 1437 | |
|
1438 | 1438 | """Create a "RepoPage" instance. special pager for paging |
|
1439 | 1439 | repository |
|
1440 | 1440 | """ |
|
1441 | 1441 | self._url_generator = url |
|
1442 | 1442 | |
|
1443 | 1443 | # Safe the kwargs class-wide so they can be used in the pager() method |
|
1444 | 1444 | self.kwargs = kwargs |
|
1445 | 1445 | |
|
1446 | 1446 | # Save a reference to the collection |
|
1447 | 1447 | self.original_collection = collection |
|
1448 | 1448 | |
|
1449 | 1449 | self.collection = collection |
|
1450 | 1450 | |
|
1451 | 1451 | # The self.page is the number of the current page. |
|
1452 | 1452 | # The first page has the number 1! |
|
1453 | 1453 | try: |
|
1454 | 1454 | self.page = int(page) # make it int() if we get it as a string |
|
1455 | 1455 | except (ValueError, TypeError): |
|
1456 | 1456 | self.page = 1 |
|
1457 | 1457 | |
|
1458 | 1458 | self.items_per_page = items_per_page |
|
1459 | 1459 | |
|
1460 | 1460 | # Unless the user tells us how many items the collections has |
|
1461 | 1461 | # we calculate that ourselves. |
|
1462 | 1462 | if item_count is not None: |
|
1463 | 1463 | self.item_count = item_count |
|
1464 | 1464 | else: |
|
1465 | 1465 | self.item_count = len(self.collection) |
|
1466 | 1466 | |
|
1467 | 1467 | # Compute the number of the first and last available page |
|
1468 | 1468 | if self.item_count > 0: |
|
1469 | 1469 | self.first_page = 1 |
|
1470 | 1470 | self.page_count = int(math.ceil(float(self.item_count) / |
|
1471 | 1471 | self.items_per_page)) |
|
1472 | 1472 | self.last_page = self.first_page + self.page_count - 1 |
|
1473 | 1473 | |
|
1474 | 1474 | # Make sure that the requested page number is the range of |
|
1475 | 1475 | # valid pages |
|
1476 | 1476 | if self.page > self.last_page: |
|
1477 | 1477 | self.page = self.last_page |
|
1478 | 1478 | elif self.page < self.first_page: |
|
1479 | 1479 | self.page = self.first_page |
|
1480 | 1480 | |
|
1481 | 1481 | # Note: the number of items on this page can be less than |
|
1482 | 1482 | # items_per_page if the last page is not full |
|
1483 | 1483 | self.first_item = max(0, (self.item_count) - (self.page * |
|
1484 | 1484 | items_per_page)) |
|
1485 | 1485 | self.last_item = ((self.item_count - 1) - items_per_page * |
|
1486 | 1486 | (self.page - 1)) |
|
1487 | 1487 | |
|
1488 | 1488 | self.items = list(self.collection[self.first_item:self.last_item + 1]) |
|
1489 | 1489 | |
|
1490 | 1490 | # Links to previous and next page |
|
1491 | 1491 | if self.page > self.first_page: |
|
1492 | 1492 | self.previous_page = self.page - 1 |
|
1493 | 1493 | else: |
|
1494 | 1494 | self.previous_page = None |
|
1495 | 1495 | |
|
1496 | 1496 | if self.page < self.last_page: |
|
1497 | 1497 | self.next_page = self.page + 1 |
|
1498 | 1498 | else: |
|
1499 | 1499 | self.next_page = None |
|
1500 | 1500 | |
|
1501 | 1501 | # No items available |
|
1502 | 1502 | else: |
|
1503 | 1503 | self.first_page = None |
|
1504 | 1504 | self.page_count = 0 |
|
1505 | 1505 | self.last_page = None |
|
1506 | 1506 | self.first_item = None |
|
1507 | 1507 | self.last_item = None |
|
1508 | 1508 | self.previous_page = None |
|
1509 | 1509 | self.next_page = None |
|
1510 | 1510 | self.items = [] |
|
1511 | 1511 | |
|
1512 | 1512 | # This is a subclass of the 'list' type. Initialise the list now. |
|
1513 | 1513 | list.__init__(self, reversed(self.items)) |
|
1514 | 1514 | |
|
1515 | 1515 | |
|
1516 | 1516 | def changed_tooltip(nodes): |
|
1517 | 1517 | """ |
|
1518 | 1518 | Generates a html string for changed nodes in commit page. |
|
1519 | 1519 | It limits the output to 30 entries |
|
1520 | 1520 | |
|
1521 | 1521 | :param nodes: LazyNodesGenerator |
|
1522 | 1522 | """ |
|
1523 | 1523 | if nodes: |
|
1524 | 1524 | pref = ': <br/> ' |
|
1525 | 1525 | suf = '' |
|
1526 | 1526 | if len(nodes) > 30: |
|
1527 | 1527 | suf = '<br/>' + _(' and %s more') % (len(nodes) - 30) |
|
1528 | 1528 | return literal(pref + '<br/> '.join([safe_unicode(x.path) |
|
1529 | 1529 | for x in nodes[:30]]) + suf) |
|
1530 | 1530 | else: |
|
1531 | 1531 | return ': ' + _('No Files') |
|
1532 | 1532 | |
|
1533 | 1533 | |
|
1534 | 1534 | def breadcrumb_repo_link(repo): |
|
1535 | 1535 | """ |
|
1536 | 1536 | Makes a breadcrumbs path link to repo |
|
1537 | 1537 | |
|
1538 | 1538 | ex:: |
|
1539 | 1539 | group >> subgroup >> repo |
|
1540 | 1540 | |
|
1541 | 1541 | :param repo: a Repository instance |
|
1542 | 1542 | """ |
|
1543 | 1543 | |
|
1544 | 1544 | path = [ |
|
1545 | 1545 | link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name)) |
|
1546 | 1546 | for group in repo.groups_with_parents |
|
1547 | 1547 | ] + [ |
|
1548 | 1548 | link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name)) |
|
1549 | 1549 | ] |
|
1550 | 1550 | |
|
1551 | 1551 | return literal(' » '.join(path)) |
|
1552 | 1552 | |
|
1553 | 1553 | |
|
1554 | 1554 | def format_byte_size_binary(file_size): |
|
1555 | 1555 | """ |
|
1556 | 1556 | Formats file/folder sizes to standard. |
|
1557 | 1557 | """ |
|
1558 | 1558 | formatted_size = format_byte_size(file_size, binary=True) |
|
1559 | 1559 | return formatted_size |
|
1560 | 1560 | |
|
1561 | 1561 | |
|
1562 | 1562 | def urlify_text(text_, safe=True): |
|
1563 | 1563 | """ |
|
1564 | 1564 | Extrac urls from text and make html links out of them |
|
1565 | 1565 | |
|
1566 | 1566 | :param text_: |
|
1567 | 1567 | """ |
|
1568 | 1568 | |
|
1569 | 1569 | url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]''' |
|
1570 | 1570 | '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''') |
|
1571 | 1571 | |
|
1572 | 1572 | def url_func(match_obj): |
|
1573 | 1573 | url_full = match_obj.groups()[0] |
|
1574 | 1574 | return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full}) |
|
1575 | 1575 | _newtext = url_pat.sub(url_func, text_) |
|
1576 | 1576 | if safe: |
|
1577 | 1577 | return literal(_newtext) |
|
1578 | 1578 | return _newtext |
|
1579 | 1579 | |
|
1580 | 1580 | |
|
1581 | 1581 | def urlify_commits(text_, repository): |
|
1582 | 1582 | """ |
|
1583 | 1583 | Extract commit ids from text and make link from them |
|
1584 | 1584 | |
|
1585 | 1585 | :param text_: |
|
1586 | 1586 | :param repository: repo name to build the URL with |
|
1587 | 1587 | """ |
|
1588 | 1588 | from pylons import url # doh, we need to re-import url to mock it later |
|
1589 | 1589 | URL_PAT = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)') |
|
1590 | 1590 | |
|
1591 | 1591 | def url_func(match_obj): |
|
1592 | 1592 | commit_id = match_obj.groups()[1] |
|
1593 | 1593 | pref = match_obj.groups()[0] |
|
1594 | 1594 | suf = match_obj.groups()[2] |
|
1595 | 1595 | |
|
1596 | 1596 | tmpl = ( |
|
1597 | 1597 | '%(pref)s<a class="%(cls)s" href="%(url)s">' |
|
1598 | 1598 | '%(commit_id)s</a>%(suf)s' |
|
1599 | 1599 | ) |
|
1600 | 1600 | return tmpl % { |
|
1601 | 1601 | 'pref': pref, |
|
1602 | 1602 | 'cls': 'revision-link', |
|
1603 | 1603 | 'url': url('changeset_home', repo_name=repository, |
|
1604 | 1604 | revision=commit_id, qualified=True), |
|
1605 | 1605 | 'commit_id': commit_id, |
|
1606 | 1606 | 'suf': suf |
|
1607 | 1607 | } |
|
1608 | 1608 | |
|
1609 | 1609 | newtext = URL_PAT.sub(url_func, text_) |
|
1610 | 1610 | |
|
1611 | 1611 | return newtext |
|
1612 | 1612 | |
|
1613 | 1613 | |
|
1614 | 1614 | def _process_url_func(match_obj, repo_name, uid, entry, |
|
1615 | 1615 | return_raw_data=False, link_format='html'): |
|
1616 | 1616 | pref = '' |
|
1617 | 1617 | if match_obj.group().startswith(' '): |
|
1618 | 1618 | pref = ' ' |
|
1619 | 1619 | |
|
1620 | 1620 | issue_id = ''.join(match_obj.groups()) |
|
1621 | 1621 | |
|
1622 | 1622 | if link_format == 'html': |
|
1623 | 1623 | tmpl = ( |
|
1624 | 1624 | '%(pref)s<a class="%(cls)s" href="%(url)s">' |
|
1625 | 1625 | '%(issue-prefix)s%(id-repr)s' |
|
1626 | 1626 | '</a>') |
|
1627 | 1627 | elif link_format == 'rst': |
|
1628 | 1628 | tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_' |
|
1629 | 1629 | elif link_format == 'markdown': |
|
1630 | 1630 | tmpl = '[%(issue-prefix)s%(id-repr)s](%(url)s)' |
|
1631 | 1631 | else: |
|
1632 | 1632 | raise ValueError('Bad link_format:{}'.format(link_format)) |
|
1633 | 1633 | |
|
1634 | 1634 | (repo_name_cleaned, |
|
1635 | 1635 | parent_group_name) = RepoGroupModel().\ |
|
1636 | 1636 | _get_group_name_and_parent(repo_name) |
|
1637 | 1637 | |
|
1638 | 1638 | # variables replacement |
|
1639 | 1639 | named_vars = { |
|
1640 | 1640 | 'id': issue_id, |
|
1641 | 1641 | 'repo': repo_name, |
|
1642 | 1642 | 'repo_name': repo_name_cleaned, |
|
1643 | 1643 | 'group_name': parent_group_name |
|
1644 | 1644 | } |
|
1645 | 1645 | # named regex variables |
|
1646 | 1646 | named_vars.update(match_obj.groupdict()) |
|
1647 | 1647 | _url = string.Template(entry['url']).safe_substitute(**named_vars) |
|
1648 | 1648 | |
|
1649 | 1649 | data = { |
|
1650 | 1650 | 'pref': pref, |
|
1651 | 1651 | 'cls': 'issue-tracker-link', |
|
1652 | 1652 | 'url': _url, |
|
1653 | 1653 | 'id-repr': issue_id, |
|
1654 | 1654 | 'issue-prefix': entry['pref'], |
|
1655 | 1655 | 'serv': entry['url'], |
|
1656 | 1656 | } |
|
1657 | 1657 | if return_raw_data: |
|
1658 | 1658 | return { |
|
1659 | 1659 | 'id': issue_id, |
|
1660 | 1660 | 'url': _url |
|
1661 | 1661 | } |
|
1662 | 1662 | return tmpl % data |
|
1663 | 1663 | |
|
1664 | 1664 | |
|
1665 | 1665 | def process_patterns(text_string, repo_name, link_format='html'): |
|
1666 | 1666 | allowed_formats = ['html', 'rst', 'markdown'] |
|
1667 | 1667 | if link_format not in allowed_formats: |
|
1668 | 1668 | raise ValueError('Link format can be only one of:{} got {}'.format( |
|
1669 | 1669 | allowed_formats, link_format)) |
|
1670 | 1670 | |
|
1671 | 1671 | repo = None |
|
1672 | 1672 | if repo_name: |
|
1673 | 1673 | # Retrieving repo_name to avoid invalid repo_name to explode on |
|
1674 | 1674 | # IssueTrackerSettingsModel but still passing invalid name further down |
|
1675 | 1675 | repo = Repository.get_by_repo_name(repo_name, cache=True) |
|
1676 | 1676 | |
|
1677 | 1677 | settings_model = IssueTrackerSettingsModel(repo=repo) |
|
1678 | 1678 | active_entries = settings_model.get_settings(cache=True) |
|
1679 | 1679 | |
|
1680 | 1680 | issues_data = [] |
|
1681 | 1681 | newtext = text_string |
|
1682 | 1682 | |
|
1683 | 1683 | for uid, entry in active_entries.items(): |
|
1684 | 1684 | log.debug('found issue tracker entry with uid %s' % (uid,)) |
|
1685 | 1685 | |
|
1686 | 1686 | if not (entry['pat'] and entry['url']): |
|
1687 | 1687 | log.debug('skipping due to missing data') |
|
1688 | 1688 | continue |
|
1689 | 1689 | |
|
1690 | 1690 | log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s' |
|
1691 | 1691 | % (uid, entry['pat'], entry['url'], entry['pref'])) |
|
1692 | 1692 | |
|
1693 | 1693 | try: |
|
1694 | 1694 | pattern = re.compile(r'%s' % entry['pat']) |
|
1695 | 1695 | except re.error: |
|
1696 | 1696 | log.exception( |
|
1697 | 1697 | 'issue tracker pattern: `%s` failed to compile', |
|
1698 | 1698 | entry['pat']) |
|
1699 | 1699 | continue |
|
1700 | 1700 | |
|
1701 | 1701 | data_func = partial( |
|
1702 | 1702 | _process_url_func, repo_name=repo_name, entry=entry, uid=uid, |
|
1703 | 1703 | return_raw_data=True) |
|
1704 | 1704 | |
|
1705 | 1705 | for match_obj in pattern.finditer(text_string): |
|
1706 | 1706 | issues_data.append(data_func(match_obj)) |
|
1707 | 1707 | |
|
1708 | 1708 | url_func = partial( |
|
1709 | 1709 | _process_url_func, repo_name=repo_name, entry=entry, uid=uid, |
|
1710 | 1710 | link_format=link_format) |
|
1711 | 1711 | |
|
1712 | 1712 | newtext = pattern.sub(url_func, newtext) |
|
1713 | 1713 | log.debug('processed prefix:uid `%s`' % (uid,)) |
|
1714 | 1714 | |
|
1715 | 1715 | return newtext, issues_data |
|
1716 | 1716 | |
|
1717 | 1717 | |
|
1718 | 1718 | def urlify_commit_message(commit_text, repository=None): |
|
1719 | 1719 | """ |
|
1720 | 1720 | Parses given text message and makes proper links. |
|
1721 | 1721 | issues are linked to given issue-server, and rest is a commit link |
|
1722 | 1722 | |
|
1723 | 1723 | :param commit_text: |
|
1724 | 1724 | :param repository: |
|
1725 | 1725 | """ |
|
1726 | 1726 | from pylons import url # doh, we need to re-import url to mock it later |
|
1727 | 1727 | |
|
1728 | 1728 | def escaper(string): |
|
1729 | 1729 | return string.replace('<', '<').replace('>', '>') |
|
1730 | 1730 | |
|
1731 | 1731 | newtext = escaper(commit_text) |
|
1732 | 1732 | |
|
1733 | 1733 | # extract http/https links and make them real urls |
|
1734 | 1734 | newtext = urlify_text(newtext, safe=False) |
|
1735 | 1735 | |
|
1736 | 1736 | # urlify commits - extract commit ids and make link out of them, if we have |
|
1737 | 1737 | # the scope of repository present. |
|
1738 | 1738 | if repository: |
|
1739 | 1739 | newtext = urlify_commits(newtext, repository) |
|
1740 | 1740 | |
|
1741 | 1741 | # process issue tracker patterns |
|
1742 | 1742 | newtext, issues = process_patterns(newtext, repository or '') |
|
1743 | 1743 | |
|
1744 | 1744 | return literal(newtext) |
|
1745 | 1745 | |
|
1746 | 1746 | |
|
1747 | 1747 | def render_binary(repo_name, file_obj): |
|
1748 | 1748 | """ |
|
1749 | 1749 | Choose how to render a binary file |
|
1750 | 1750 | """ |
|
1751 | 1751 | filename = file_obj.name |
|
1752 | 1752 | |
|
1753 | 1753 | # images |
|
1754 | 1754 | for ext in ['*.png', '*.jpg', '*.ico', '*.gif']: |
|
1755 | 1755 | if fnmatch.fnmatch(filename, pat=ext): |
|
1756 | 1756 | alt = filename |
|
1757 | 1757 | src = url('files_raw_home', repo_name=repo_name, |
|
1758 | 1758 | revision=file_obj.commit.raw_id, f_path=file_obj.path) |
|
1759 | 1759 | return literal('<img class="rendered-binary" alt="{}" src="{}">'.format(alt, src)) |
|
1760 | 1760 | |
|
1761 | 1761 | |
|
1762 | 1762 | def renderer_from_filename(filename, exclude=None): |
|
1763 | 1763 | """ |
|
1764 | 1764 | choose a renderer based on filename, this works only for text based files |
|
1765 | 1765 | """ |
|
1766 | 1766 | |
|
1767 | 1767 | # ipython |
|
1768 | 1768 | for ext in ['*.ipynb']: |
|
1769 | 1769 | if fnmatch.fnmatch(filename, pat=ext): |
|
1770 | 1770 | return 'jupyter' |
|
1771 | 1771 | |
|
1772 | 1772 | is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude) |
|
1773 | 1773 | if is_markup: |
|
1774 | 1774 | return is_markup |
|
1775 | 1775 | return None |
|
1776 | 1776 | |
|
1777 | 1777 | |
|
1778 | 1778 | def render(source, renderer='rst', mentions=False, relative_url=None, |
|
1779 | 1779 | repo_name=None): |
|
1780 | 1780 | |
|
1781 | 1781 | def maybe_convert_relative_links(html_source): |
|
1782 | 1782 | if relative_url: |
|
1783 | 1783 | return relative_links(html_source, relative_url) |
|
1784 | 1784 | return html_source |
|
1785 | 1785 | |
|
1786 | 1786 | if renderer == 'rst': |
|
1787 | 1787 | if repo_name: |
|
1788 | 1788 | # process patterns on comments if we pass in repo name |
|
1789 | 1789 | source, issues = process_patterns( |
|
1790 | 1790 | source, repo_name, link_format='rst') |
|
1791 | 1791 | |
|
1792 | 1792 | return literal( |
|
1793 | 1793 | '<div class="rst-block">%s</div>' % |
|
1794 | 1794 | maybe_convert_relative_links( |
|
1795 | 1795 | MarkupRenderer.rst(source, mentions=mentions))) |
|
1796 | 1796 | elif renderer == 'markdown': |
|
1797 | 1797 | if repo_name: |
|
1798 | 1798 | # process patterns on comments if we pass in repo name |
|
1799 | 1799 | source, issues = process_patterns( |
|
1800 | 1800 | source, repo_name, link_format='markdown') |
|
1801 | 1801 | |
|
1802 | 1802 | return literal( |
|
1803 | 1803 | '<div class="markdown-block">%s</div>' % |
|
1804 | 1804 | maybe_convert_relative_links( |
|
1805 | 1805 | MarkupRenderer.markdown(source, flavored=True, |
|
1806 | 1806 | mentions=mentions))) |
|
1807 | 1807 | elif renderer == 'jupyter': |
|
1808 | 1808 | return literal( |
|
1809 | 1809 | '<div class="ipynb">%s</div>' % |
|
1810 | 1810 | maybe_convert_relative_links( |
|
1811 | 1811 | MarkupRenderer.jupyter(source))) |
|
1812 | 1812 | |
|
1813 | 1813 | # None means just show the file-source |
|
1814 | 1814 | return None |
|
1815 | 1815 | |
|
1816 | 1816 | |
|
1817 | 1817 | def commit_status(repo, commit_id): |
|
1818 | 1818 | return ChangesetStatusModel().get_status(repo, commit_id) |
|
1819 | 1819 | |
|
1820 | 1820 | |
|
1821 | 1821 | def commit_status_lbl(commit_status): |
|
1822 | 1822 | return dict(ChangesetStatus.STATUSES).get(commit_status) |
|
1823 | 1823 | |
|
1824 | 1824 | |
|
1825 | 1825 | def commit_time(repo_name, commit_id): |
|
1826 | 1826 | repo = Repository.get_by_repo_name(repo_name) |
|
1827 | 1827 | commit = repo.get_commit(commit_id=commit_id) |
|
1828 | 1828 | return commit.date |
|
1829 | 1829 | |
|
1830 | 1830 | |
|
1831 | 1831 | def get_permission_name(key): |
|
1832 | 1832 | return dict(Permission.PERMS).get(key) |
|
1833 | 1833 | |
|
1834 | 1834 | |
|
1835 | 1835 | def journal_filter_help(): |
|
1836 | 1836 | return _( |
|
1837 | 1837 | 'Example filter terms:\n' + |
|
1838 | 1838 | ' repository:vcs\n' + |
|
1839 | 1839 | ' username:marcin\n' + |
|
1840 | 1840 | ' action:*push*\n' + |
|
1841 | 1841 | ' ip:127.0.0.1\n' + |
|
1842 | 1842 | ' date:20120101\n' + |
|
1843 | 1843 | ' date:[20120101100000 TO 20120102]\n' + |
|
1844 | 1844 | '\n' + |
|
1845 | 1845 | 'Generate wildcards using \'*\' character:\n' + |
|
1846 | 1846 | ' "repository:vcs*" - search everything starting with \'vcs\'\n' + |
|
1847 | 1847 | ' "repository:*vcs*" - search for repository containing \'vcs\'\n' + |
|
1848 | 1848 | '\n' + |
|
1849 | 1849 | 'Optional AND / OR operators in queries\n' + |
|
1850 | 1850 | ' "repository:vcs OR repository:test"\n' + |
|
1851 | 1851 | ' "username:test AND repository:test*"\n' |
|
1852 | 1852 | ) |
|
1853 | 1853 | |
|
1854 | 1854 | |
|
1855 | 1855 | def search_filter_help(searcher): |
|
1856 | 1856 | |
|
1857 | 1857 | terms = '' |
|
1858 | 1858 | return _( |
|
1859 | 1859 | 'Example filter terms for `{searcher}` search:\n' + |
|
1860 | 1860 | '{terms}\n' + |
|
1861 | 1861 | 'Generate wildcards using \'*\' character:\n' + |
|
1862 | 1862 | ' "repo_name:vcs*" - search everything starting with \'vcs\'\n' + |
|
1863 | 1863 | ' "repo_name:*vcs*" - search for repository containing \'vcs\'\n' + |
|
1864 | 1864 | '\n' + |
|
1865 | 1865 | 'Optional AND / OR operators in queries\n' + |
|
1866 | 1866 | ' "repo_name:vcs OR repo_name:test"\n' + |
|
1867 | 1867 | ' "owner:test AND repo_name:test*"\n' + |
|
1868 | 1868 | 'More: {search_doc}' |
|
1869 | 1869 | ).format(searcher=searcher.name, |
|
1870 | 1870 | terms=terms, search_doc=searcher.query_lang_doc) |
|
1871 | 1871 | |
|
1872 | 1872 | |
|
1873 | 1873 | def not_mapped_error(repo_name): |
|
1874 | 1874 | flash(_('%s repository is not mapped to db perhaps' |
|
1875 | 1875 | ' it was created or renamed from the filesystem' |
|
1876 | 1876 | ' please run the application again' |
|
1877 | 1877 | ' in order to rescan repositories') % repo_name, category='error') |
|
1878 | 1878 | |
|
1879 | 1879 | |
|
1880 | 1880 | def ip_range(ip_addr): |
|
1881 | 1881 | from rhodecode.model.db import UserIpMap |
|
1882 | 1882 | s, e = UserIpMap._get_ip_range(ip_addr) |
|
1883 | 1883 | return '%s - %s' % (s, e) |
|
1884 | 1884 | |
|
1885 | 1885 | |
|
1886 | 1886 | def form(url, method='post', needs_csrf_token=True, **attrs): |
|
1887 | 1887 | """Wrapper around webhelpers.tags.form to prevent CSRF attacks.""" |
|
1888 | 1888 | if method.lower() != 'get' and needs_csrf_token: |
|
1889 | 1889 | raise Exception( |
|
1890 | 1890 | 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' + |
|
1891 | 1891 | 'CSRF token. If the endpoint does not require such token you can ' + |
|
1892 | 1892 | 'explicitly set the parameter needs_csrf_token to false.') |
|
1893 | 1893 | |
|
1894 | 1894 | return wh_form(url, method=method, **attrs) |
|
1895 | 1895 | |
|
1896 | 1896 | |
|
1897 | 1897 | def secure_form(url, method="POST", multipart=False, **attrs): |
|
1898 | 1898 | """Start a form tag that points the action to an url. This |
|
1899 | 1899 | form tag will also include the hidden field containing |
|
1900 | 1900 | the auth token. |
|
1901 | 1901 | |
|
1902 | 1902 | The url options should be given either as a string, or as a |
|
1903 | 1903 | ``url()`` function. The method for the form defaults to POST. |
|
1904 | 1904 | |
|
1905 | 1905 | Options: |
|
1906 | 1906 | |
|
1907 | 1907 | ``multipart`` |
|
1908 | 1908 | If set to True, the enctype is set to "multipart/form-data". |
|
1909 | 1909 | ``method`` |
|
1910 | 1910 | The method to use when submitting the form, usually either |
|
1911 | 1911 | "GET" or "POST". If "PUT", "DELETE", or another verb is used, a |
|
1912 | 1912 | hidden input with name _method is added to simulate the verb |
|
1913 | 1913 | over POST. |
|
1914 | 1914 | |
|
1915 | 1915 | """ |
|
1916 | 1916 | from webhelpers.pylonslib.secure_form import insecure_form |
|
1917 | 1917 | form = insecure_form(url, method, multipart, **attrs) |
|
1918 | 1918 | token = csrf_input() |
|
1919 | 1919 | return literal("%s\n%s" % (form, token)) |
|
1920 | 1920 | |
|
1921 | 1921 | def csrf_input(): |
|
1922 | 1922 | return literal( |
|
1923 | 1923 | '<input type="hidden" id="{}" name="{}" value="{}">'.format( |
|
1924 | 1924 | csrf_token_key, csrf_token_key, get_csrf_token())) |
|
1925 | 1925 | |
|
1926 | 1926 | def dropdownmenu(name, selected, options, enable_filter=False, **attrs): |
|
1927 | 1927 | select_html = select(name, selected, options, **attrs) |
|
1928 | 1928 | select2 = """ |
|
1929 | 1929 | <script> |
|
1930 | 1930 | $(document).ready(function() { |
|
1931 | 1931 | $('#%s').select2({ |
|
1932 | 1932 | containerCssClass: 'drop-menu', |
|
1933 | 1933 | dropdownCssClass: 'drop-menu-dropdown', |
|
1934 | 1934 | dropdownAutoWidth: true%s |
|
1935 | 1935 | }); |
|
1936 | 1936 | }); |
|
1937 | 1937 | </script> |
|
1938 | 1938 | """ |
|
1939 | 1939 | filter_option = """, |
|
1940 | 1940 | minimumResultsForSearch: -1 |
|
1941 | 1941 | """ |
|
1942 | 1942 | input_id = attrs.get('id') or name |
|
1943 | 1943 | filter_enabled = "" if enable_filter else filter_option |
|
1944 | 1944 | select_script = literal(select2 % (input_id, filter_enabled)) |
|
1945 | 1945 | |
|
1946 | 1946 | return literal(select_html+select_script) |
|
1947 | 1947 | |
|
1948 | 1948 | |
|
1949 | 1949 | def get_visual_attr(tmpl_context_var, attr_name): |
|
1950 | 1950 | """ |
|
1951 | 1951 | A safe way to get a variable from visual variable of template context |
|
1952 | 1952 | |
|
1953 | 1953 | :param tmpl_context_var: instance of tmpl_context, usually present as `c` |
|
1954 | 1954 | :param attr_name: name of the attribute we fetch from the c.visual |
|
1955 | 1955 | """ |
|
1956 | 1956 | visual = getattr(tmpl_context_var, 'visual', None) |
|
1957 | 1957 | if not visual: |
|
1958 | 1958 | return |
|
1959 | 1959 | else: |
|
1960 | 1960 | return getattr(visual, attr_name, None) |
|
1961 | 1961 | |
|
1962 | 1962 | |
|
1963 | 1963 | def get_last_path_part(file_node): |
|
1964 | 1964 | if not file_node.path: |
|
1965 | 1965 | return u'' |
|
1966 | 1966 | |
|
1967 | 1967 | path = safe_unicode(file_node.path.split('/')[-1]) |
|
1968 | 1968 | return u'../' + path |
|
1969 | 1969 | |
|
1970 | 1970 | |
|
1971 | 1971 | def route_url(*args, **kwargs): |
|
1972 | 1972 | """ |
|
1973 | 1973 | Wrapper around pyramids `route_url` (fully qualified url) function. |
|
1974 | 1974 | It is used to generate URLs from within pylons views or templates. |
|
1975 | 1975 | This will be removed when pyramid migration if finished. |
|
1976 | 1976 | """ |
|
1977 | 1977 | req = get_current_request() |
|
1978 | 1978 | return req.route_url(*args, **kwargs) |
|
1979 | 1979 | |
|
1980 | 1980 | |
|
1981 | 1981 | def route_path(*args, **kwargs): |
|
1982 | 1982 | """ |
|
1983 | 1983 | Wrapper around pyramids `route_path` function. It is used to generate |
|
1984 | 1984 | URLs from within pylons views or templates. This will be removed when |
|
1985 | 1985 | pyramid migration if finished. |
|
1986 | 1986 | """ |
|
1987 | 1987 | req = get_current_request() |
|
1988 | 1988 | return req.route_path(*args, **kwargs) |
|
1989 | 1989 | |
|
1990 | 1990 | |
|
1991 | 1991 | def route_path_or_none(*args, **kwargs): |
|
1992 | 1992 | try: |
|
1993 | 1993 | return route_path(*args, **kwargs) |
|
1994 | 1994 | except KeyError: |
|
1995 | 1995 | return None |
|
1996 | 1996 | |
|
1997 | 1997 | |
|
1998 | 1998 | def static_url(*args, **kwds): |
|
1999 | 1999 | """ |
|
2000 | 2000 | Wrapper around pyramids `route_path` function. It is used to generate |
|
2001 | 2001 | URLs from within pylons views or templates. This will be removed when |
|
2002 | 2002 | pyramid migration if finished. |
|
2003 | 2003 | """ |
|
2004 | 2004 | req = get_current_request() |
|
2005 | 2005 | return req.static_url(*args, **kwds) |
|
2006 | 2006 | |
|
2007 | 2007 | |
|
2008 | 2008 | def resource_path(*args, **kwds): |
|
2009 | 2009 | """ |
|
2010 | 2010 | Wrapper around pyramids `route_path` function. It is used to generate |
|
2011 | 2011 | URLs from within pylons views or templates. This will be removed when |
|
2012 | 2012 | pyramid migration if finished. |
|
2013 | 2013 | """ |
|
2014 | 2014 | req = get_current_request() |
|
2015 | 2015 | return req.resource_path(*args, **kwds) |
|
2016 | 2016 | |
|
2017 | 2017 | |
|
2018 | 2018 | def api_call_example(method, args): |
|
2019 | 2019 | """ |
|
2020 | 2020 | Generates an API call example via CURL |
|
2021 | 2021 | """ |
|
2022 | 2022 | args_json = json.dumps(OrderedDict([ |
|
2023 | 2023 | ('id', 1), |
|
2024 | 2024 | ('auth_token', 'SECRET'), |
|
2025 | 2025 | ('method', method), |
|
2026 | 2026 | ('args', args) |
|
2027 | 2027 | ])) |
|
2028 | 2028 | return literal( |
|
2029 | 2029 | "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{data}'" |
|
2030 | 2030 | "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, " |
|
2031 | 2031 | "and needs to be of `api calls` role." |
|
2032 | 2032 | .format( |
|
2033 | 2033 | api_url=route_url('apiv2'), |
|
2034 | 2034 | token_url=route_url('my_account_auth_tokens'), |
|
2035 | 2035 | data=args_json)) |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
@@ -1,1551 +1,1551 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | """ |
|
23 | 23 | pull request model for RhodeCode |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | from collections import namedtuple |
|
27 | 27 | import json |
|
28 | 28 | import logging |
|
29 | 29 | import datetime |
|
30 | 30 | import urllib |
|
31 | 31 | |
|
32 | 32 | from pylons.i18n.translation import _ |
|
33 | 33 | from pylons.i18n.translation import lazy_ugettext |
|
34 | 34 | from pyramid.threadlocal import get_current_request |
|
35 | 35 | from sqlalchemy import or_ |
|
36 | 36 | |
|
37 | 37 | from rhodecode import events |
|
38 | 38 | from rhodecode.lib import helpers as h, hooks_utils, diffs |
|
39 | 39 | from rhodecode.lib import audit_logger |
|
40 | 40 | from rhodecode.lib.compat import OrderedDict |
|
41 | 41 | from rhodecode.lib.hooks_daemon import prepare_callback_daemon |
|
42 | 42 | from rhodecode.lib.markup_renderer import ( |
|
43 | 43 | DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer) |
|
44 | 44 | from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe |
|
45 | 45 | from rhodecode.lib.vcs.backends.base import ( |
|
46 | 46 | Reference, MergeResponse, MergeFailureReason, UpdateFailureReason) |
|
47 | 47 | from rhodecode.lib.vcs.conf import settings as vcs_settings |
|
48 | 48 | from rhodecode.lib.vcs.exceptions import ( |
|
49 | 49 | CommitDoesNotExistError, EmptyRepositoryError) |
|
50 | 50 | from rhodecode.model import BaseModel |
|
51 | 51 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
52 | 52 | from rhodecode.model.comment import CommentsModel |
|
53 | 53 | from rhodecode.model.db import ( |
|
54 | 54 | PullRequest, PullRequestReviewers, ChangesetStatus, |
|
55 | 55 | PullRequestVersion, ChangesetComment, Repository) |
|
56 | 56 | from rhodecode.model.meta import Session |
|
57 | 57 | from rhodecode.model.notification import NotificationModel, \ |
|
58 | 58 | EmailNotificationModel |
|
59 | 59 | from rhodecode.model.scm import ScmModel |
|
60 | 60 | from rhodecode.model.settings import VcsSettingsModel |
|
61 | 61 | |
|
62 | 62 | |
|
63 | 63 | log = logging.getLogger(__name__) |
|
64 | 64 | |
|
65 | 65 | |
|
66 | 66 | # Data structure to hold the response data when updating commits during a pull |
|
67 | 67 | # request update. |
|
68 | 68 | UpdateResponse = namedtuple('UpdateResponse', [ |
|
69 | 69 | 'executed', 'reason', 'new', 'old', 'changes', |
|
70 | 70 | 'source_changed', 'target_changed']) |
|
71 | 71 | |
|
72 | 72 | |
|
73 | 73 | class PullRequestModel(BaseModel): |
|
74 | 74 | |
|
75 | 75 | cls = PullRequest |
|
76 | 76 | |
|
77 | 77 | DIFF_CONTEXT = 3 |
|
78 | 78 | |
|
79 | 79 | MERGE_STATUS_MESSAGES = { |
|
80 | 80 | MergeFailureReason.NONE: lazy_ugettext( |
|
81 | 81 | 'This pull request can be automatically merged.'), |
|
82 | 82 | MergeFailureReason.UNKNOWN: lazy_ugettext( |
|
83 | 83 | 'This pull request cannot be merged because of an unhandled' |
|
84 | 84 | ' exception.'), |
|
85 | 85 | MergeFailureReason.MERGE_FAILED: lazy_ugettext( |
|
86 | 86 | 'This pull request cannot be merged because of merge conflicts.'), |
|
87 | 87 | MergeFailureReason.PUSH_FAILED: lazy_ugettext( |
|
88 | 88 | 'This pull request could not be merged because push to target' |
|
89 | 89 | ' failed.'), |
|
90 | 90 | MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext( |
|
91 | 91 | 'This pull request cannot be merged because the target is not a' |
|
92 | 92 | ' head.'), |
|
93 | 93 | MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext( |
|
94 | 94 | 'This pull request cannot be merged because the source contains' |
|
95 | 95 | ' more branches than the target.'), |
|
96 | 96 | MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext( |
|
97 | 97 | 'This pull request cannot be merged because the target has' |
|
98 | 98 | ' multiple heads.'), |
|
99 | 99 | MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext( |
|
100 | 100 | 'This pull request cannot be merged because the target repository' |
|
101 | 101 | ' is locked.'), |
|
102 | 102 | MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext( |
|
103 | 103 | 'This pull request cannot be merged because the target or the ' |
|
104 | 104 | 'source reference is missing.'), |
|
105 | 105 | MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext( |
|
106 | 106 | 'This pull request cannot be merged because the target ' |
|
107 | 107 | 'reference is missing.'), |
|
108 | 108 | MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext( |
|
109 | 109 | 'This pull request cannot be merged because the source ' |
|
110 | 110 | 'reference is missing.'), |
|
111 | 111 | MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext( |
|
112 | 112 | 'This pull request cannot be merged because of conflicts related ' |
|
113 | 113 | 'to sub repositories.'), |
|
114 | 114 | } |
|
115 | 115 | |
|
116 | 116 | UPDATE_STATUS_MESSAGES = { |
|
117 | 117 | UpdateFailureReason.NONE: lazy_ugettext( |
|
118 | 118 | 'Pull request update successful.'), |
|
119 | 119 | UpdateFailureReason.UNKNOWN: lazy_ugettext( |
|
120 | 120 | 'Pull request update failed because of an unknown error.'), |
|
121 | 121 | UpdateFailureReason.NO_CHANGE: lazy_ugettext( |
|
122 | 122 | 'No update needed because the source and target have not changed.'), |
|
123 | 123 | UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext( |
|
124 | 124 | 'Pull request cannot be updated because the reference type is ' |
|
125 | 125 | 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'), |
|
126 | 126 | UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext( |
|
127 | 127 | 'This pull request cannot be updated because the target ' |
|
128 | 128 | 'reference is missing.'), |
|
129 | 129 | UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext( |
|
130 | 130 | 'This pull request cannot be updated because the source ' |
|
131 | 131 | 'reference is missing.'), |
|
132 | 132 | } |
|
133 | 133 | |
|
134 | 134 | def __get_pull_request(self, pull_request): |
|
135 | 135 | return self._get_instance(( |
|
136 | 136 | PullRequest, PullRequestVersion), pull_request) |
|
137 | 137 | |
|
138 | 138 | def _check_perms(self, perms, pull_request, user, api=False): |
|
139 | 139 | if not api: |
|
140 | 140 | return h.HasRepoPermissionAny(*perms)( |
|
141 | 141 | user=user, repo_name=pull_request.target_repo.repo_name) |
|
142 | 142 | else: |
|
143 | 143 | return h.HasRepoPermissionAnyApi(*perms)( |
|
144 | 144 | user=user, repo_name=pull_request.target_repo.repo_name) |
|
145 | 145 | |
|
146 | 146 | def check_user_read(self, pull_request, user, api=False): |
|
147 | 147 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
148 | 148 | return self._check_perms(_perms, pull_request, user, api) |
|
149 | 149 | |
|
150 | 150 | def check_user_merge(self, pull_request, user, api=False): |
|
151 | 151 | _perms = ('repository.admin', 'repository.write', 'hg.admin',) |
|
152 | 152 | return self._check_perms(_perms, pull_request, user, api) |
|
153 | 153 | |
|
154 | 154 | def check_user_update(self, pull_request, user, api=False): |
|
155 | 155 | owner = user.user_id == pull_request.user_id |
|
156 | 156 | return self.check_user_merge(pull_request, user, api) or owner |
|
157 | 157 | |
|
158 | 158 | def check_user_delete(self, pull_request, user): |
|
159 | 159 | owner = user.user_id == pull_request.user_id |
|
160 | 160 | _perms = ('repository.admin',) |
|
161 | 161 | return self._check_perms(_perms, pull_request, user) or owner |
|
162 | 162 | |
|
163 | 163 | def check_user_change_status(self, pull_request, user, api=False): |
|
164 | 164 | reviewer = user.user_id in [x.user_id for x in |
|
165 | 165 | pull_request.reviewers] |
|
166 | 166 | return self.check_user_update(pull_request, user, api) or reviewer |
|
167 | 167 | |
|
168 | 168 | def get(self, pull_request): |
|
169 | 169 | return self.__get_pull_request(pull_request) |
|
170 | 170 | |
|
171 | 171 | def _prepare_get_all_query(self, repo_name, source=False, statuses=None, |
|
172 | 172 | opened_by=None, order_by=None, |
|
173 | 173 | order_dir='desc'): |
|
174 | 174 | repo = None |
|
175 | 175 | if repo_name: |
|
176 | 176 | repo = self._get_repo(repo_name) |
|
177 | 177 | |
|
178 | 178 | q = PullRequest.query() |
|
179 | 179 | |
|
180 | 180 | # source or target |
|
181 | 181 | if repo and source: |
|
182 | 182 | q = q.filter(PullRequest.source_repo == repo) |
|
183 | 183 | elif repo: |
|
184 | 184 | q = q.filter(PullRequest.target_repo == repo) |
|
185 | 185 | |
|
186 | 186 | # closed,opened |
|
187 | 187 | if statuses: |
|
188 | 188 | q = q.filter(PullRequest.status.in_(statuses)) |
|
189 | 189 | |
|
190 | 190 | # opened by filter |
|
191 | 191 | if opened_by: |
|
192 | 192 | q = q.filter(PullRequest.user_id.in_(opened_by)) |
|
193 | 193 | |
|
194 | 194 | if order_by: |
|
195 | 195 | order_map = { |
|
196 | 196 | 'name_raw': PullRequest.pull_request_id, |
|
197 | 197 | 'title': PullRequest.title, |
|
198 | 198 | 'updated_on_raw': PullRequest.updated_on, |
|
199 | 199 | 'target_repo': PullRequest.target_repo_id |
|
200 | 200 | } |
|
201 | 201 | if order_dir == 'asc': |
|
202 | 202 | q = q.order_by(order_map[order_by].asc()) |
|
203 | 203 | else: |
|
204 | 204 | q = q.order_by(order_map[order_by].desc()) |
|
205 | 205 | |
|
206 | 206 | return q |
|
207 | 207 | |
|
208 | 208 | def count_all(self, repo_name, source=False, statuses=None, |
|
209 | 209 | opened_by=None): |
|
210 | 210 | """ |
|
211 | 211 | Count the number of pull requests for a specific repository. |
|
212 | 212 | |
|
213 | 213 | :param repo_name: target or source repo |
|
214 | 214 | :param source: boolean flag to specify if repo_name refers to source |
|
215 | 215 | :param statuses: list of pull request statuses |
|
216 | 216 | :param opened_by: author user of the pull request |
|
217 | 217 | :returns: int number of pull requests |
|
218 | 218 | """ |
|
219 | 219 | q = self._prepare_get_all_query( |
|
220 | 220 | repo_name, source=source, statuses=statuses, opened_by=opened_by) |
|
221 | 221 | |
|
222 | 222 | return q.count() |
|
223 | 223 | |
|
224 | 224 | def get_all(self, repo_name, source=False, statuses=None, opened_by=None, |
|
225 | 225 | offset=0, length=None, order_by=None, order_dir='desc'): |
|
226 | 226 | """ |
|
227 | 227 | Get all pull requests for a specific repository. |
|
228 | 228 | |
|
229 | 229 | :param repo_name: target or source repo |
|
230 | 230 | :param source: boolean flag to specify if repo_name refers to source |
|
231 | 231 | :param statuses: list of pull request statuses |
|
232 | 232 | :param opened_by: author user of the pull request |
|
233 | 233 | :param offset: pagination offset |
|
234 | 234 | :param length: length of returned list |
|
235 | 235 | :param order_by: order of the returned list |
|
236 | 236 | :param order_dir: 'asc' or 'desc' ordering direction |
|
237 | 237 | :returns: list of pull requests |
|
238 | 238 | """ |
|
239 | 239 | q = self._prepare_get_all_query( |
|
240 | 240 | repo_name, source=source, statuses=statuses, opened_by=opened_by, |
|
241 | 241 | order_by=order_by, order_dir=order_dir) |
|
242 | 242 | |
|
243 | 243 | if length: |
|
244 | 244 | pull_requests = q.limit(length).offset(offset).all() |
|
245 | 245 | else: |
|
246 | 246 | pull_requests = q.all() |
|
247 | 247 | |
|
248 | 248 | return pull_requests |
|
249 | 249 | |
|
250 | 250 | def count_awaiting_review(self, repo_name, source=False, statuses=None, |
|
251 | 251 | opened_by=None): |
|
252 | 252 | """ |
|
253 | 253 | Count the number of pull requests for a specific repository that are |
|
254 | 254 | awaiting review. |
|
255 | 255 | |
|
256 | 256 | :param repo_name: target or source repo |
|
257 | 257 | :param source: boolean flag to specify if repo_name refers to source |
|
258 | 258 | :param statuses: list of pull request statuses |
|
259 | 259 | :param opened_by: author user of the pull request |
|
260 | 260 | :returns: int number of pull requests |
|
261 | 261 | """ |
|
262 | 262 | pull_requests = self.get_awaiting_review( |
|
263 | 263 | repo_name, source=source, statuses=statuses, opened_by=opened_by) |
|
264 | 264 | |
|
265 | 265 | return len(pull_requests) |
|
266 | 266 | |
|
267 | 267 | def get_awaiting_review(self, repo_name, source=False, statuses=None, |
|
268 | 268 | opened_by=None, offset=0, length=None, |
|
269 | 269 | order_by=None, order_dir='desc'): |
|
270 | 270 | """ |
|
271 | 271 | Get all pull requests for a specific repository that are awaiting |
|
272 | 272 | review. |
|
273 | 273 | |
|
274 | 274 | :param repo_name: target or source repo |
|
275 | 275 | :param source: boolean flag to specify if repo_name refers to source |
|
276 | 276 | :param statuses: list of pull request statuses |
|
277 | 277 | :param opened_by: author user of the pull request |
|
278 | 278 | :param offset: pagination offset |
|
279 | 279 | :param length: length of returned list |
|
280 | 280 | :param order_by: order of the returned list |
|
281 | 281 | :param order_dir: 'asc' or 'desc' ordering direction |
|
282 | 282 | :returns: list of pull requests |
|
283 | 283 | """ |
|
284 | 284 | pull_requests = self.get_all( |
|
285 | 285 | repo_name, source=source, statuses=statuses, opened_by=opened_by, |
|
286 | 286 | order_by=order_by, order_dir=order_dir) |
|
287 | 287 | |
|
288 | 288 | _filtered_pull_requests = [] |
|
289 | 289 | for pr in pull_requests: |
|
290 | 290 | status = pr.calculated_review_status() |
|
291 | 291 | if status in [ChangesetStatus.STATUS_NOT_REVIEWED, |
|
292 | 292 | ChangesetStatus.STATUS_UNDER_REVIEW]: |
|
293 | 293 | _filtered_pull_requests.append(pr) |
|
294 | 294 | if length: |
|
295 | 295 | return _filtered_pull_requests[offset:offset+length] |
|
296 | 296 | else: |
|
297 | 297 | return _filtered_pull_requests |
|
298 | 298 | |
|
299 | 299 | def count_awaiting_my_review(self, repo_name, source=False, statuses=None, |
|
300 | 300 | opened_by=None, user_id=None): |
|
301 | 301 | """ |
|
302 | 302 | Count the number of pull requests for a specific repository that are |
|
303 | 303 | awaiting review from a specific user. |
|
304 | 304 | |
|
305 | 305 | :param repo_name: target or source repo |
|
306 | 306 | :param source: boolean flag to specify if repo_name refers to source |
|
307 | 307 | :param statuses: list of pull request statuses |
|
308 | 308 | :param opened_by: author user of the pull request |
|
309 | 309 | :param user_id: reviewer user of the pull request |
|
310 | 310 | :returns: int number of pull requests |
|
311 | 311 | """ |
|
312 | 312 | pull_requests = self.get_awaiting_my_review( |
|
313 | 313 | repo_name, source=source, statuses=statuses, opened_by=opened_by, |
|
314 | 314 | user_id=user_id) |
|
315 | 315 | |
|
316 | 316 | return len(pull_requests) |
|
317 | 317 | |
|
318 | 318 | def get_awaiting_my_review(self, repo_name, source=False, statuses=None, |
|
319 | 319 | opened_by=None, user_id=None, offset=0, |
|
320 | 320 | length=None, order_by=None, order_dir='desc'): |
|
321 | 321 | """ |
|
322 | 322 | Get all pull requests for a specific repository that are awaiting |
|
323 | 323 | review from a specific user. |
|
324 | 324 | |
|
325 | 325 | :param repo_name: target or source repo |
|
326 | 326 | :param source: boolean flag to specify if repo_name refers to source |
|
327 | 327 | :param statuses: list of pull request statuses |
|
328 | 328 | :param opened_by: author user of the pull request |
|
329 | 329 | :param user_id: reviewer user of the pull request |
|
330 | 330 | :param offset: pagination offset |
|
331 | 331 | :param length: length of returned list |
|
332 | 332 | :param order_by: order of the returned list |
|
333 | 333 | :param order_dir: 'asc' or 'desc' ordering direction |
|
334 | 334 | :returns: list of pull requests |
|
335 | 335 | """ |
|
336 | 336 | pull_requests = self.get_all( |
|
337 | 337 | repo_name, source=source, statuses=statuses, opened_by=opened_by, |
|
338 | 338 | order_by=order_by, order_dir=order_dir) |
|
339 | 339 | |
|
340 | 340 | _my = PullRequestModel().get_not_reviewed(user_id) |
|
341 | 341 | my_participation = [] |
|
342 | 342 | for pr in pull_requests: |
|
343 | 343 | if pr in _my: |
|
344 | 344 | my_participation.append(pr) |
|
345 | 345 | _filtered_pull_requests = my_participation |
|
346 | 346 | if length: |
|
347 | 347 | return _filtered_pull_requests[offset:offset+length] |
|
348 | 348 | else: |
|
349 | 349 | return _filtered_pull_requests |
|
350 | 350 | |
|
351 | 351 | def get_not_reviewed(self, user_id): |
|
352 | 352 | return [ |
|
353 | 353 | x.pull_request for x in PullRequestReviewers.query().filter( |
|
354 | 354 | PullRequestReviewers.user_id == user_id).all() |
|
355 | 355 | ] |
|
356 | 356 | |
|
357 | 357 | def _prepare_participating_query(self, user_id=None, statuses=None, |
|
358 | 358 | order_by=None, order_dir='desc'): |
|
359 | 359 | q = PullRequest.query() |
|
360 | 360 | if user_id: |
|
361 | 361 | reviewers_subquery = Session().query( |
|
362 | 362 | PullRequestReviewers.pull_request_id).filter( |
|
363 | 363 | PullRequestReviewers.user_id == user_id).subquery() |
|
364 | 364 | user_filter= or_( |
|
365 | 365 | PullRequest.user_id == user_id, |
|
366 | 366 | PullRequest.pull_request_id.in_(reviewers_subquery) |
|
367 | 367 | ) |
|
368 | 368 | q = PullRequest.query().filter(user_filter) |
|
369 | 369 | |
|
370 | 370 | # closed,opened |
|
371 | 371 | if statuses: |
|
372 | 372 | q = q.filter(PullRequest.status.in_(statuses)) |
|
373 | 373 | |
|
374 | 374 | if order_by: |
|
375 | 375 | order_map = { |
|
376 | 376 | 'name_raw': PullRequest.pull_request_id, |
|
377 | 377 | 'title': PullRequest.title, |
|
378 | 378 | 'updated_on_raw': PullRequest.updated_on, |
|
379 | 379 | 'target_repo': PullRequest.target_repo_id |
|
380 | 380 | } |
|
381 | 381 | if order_dir == 'asc': |
|
382 | 382 | q = q.order_by(order_map[order_by].asc()) |
|
383 | 383 | else: |
|
384 | 384 | q = q.order_by(order_map[order_by].desc()) |
|
385 | 385 | |
|
386 | 386 | return q |
|
387 | 387 | |
|
388 | 388 | def count_im_participating_in(self, user_id=None, statuses=None): |
|
389 | 389 | q = self._prepare_participating_query(user_id, statuses=statuses) |
|
390 | 390 | return q.count() |
|
391 | 391 | |
|
392 | 392 | def get_im_participating_in( |
|
393 | 393 | self, user_id=None, statuses=None, offset=0, |
|
394 | 394 | length=None, order_by=None, order_dir='desc'): |
|
395 | 395 | """ |
|
396 | 396 | Get all Pull requests that i'm participating in, or i have opened |
|
397 | 397 | """ |
|
398 | 398 | |
|
399 | 399 | q = self._prepare_participating_query( |
|
400 | 400 | user_id, statuses=statuses, order_by=order_by, |
|
401 | 401 | order_dir=order_dir) |
|
402 | 402 | |
|
403 | 403 | if length: |
|
404 | 404 | pull_requests = q.limit(length).offset(offset).all() |
|
405 | 405 | else: |
|
406 | 406 | pull_requests = q.all() |
|
407 | 407 | |
|
408 | 408 | return pull_requests |
|
409 | 409 | |
|
410 | 410 | def get_versions(self, pull_request): |
|
411 | 411 | """ |
|
412 | 412 | returns version of pull request sorted by ID descending |
|
413 | 413 | """ |
|
414 | 414 | return PullRequestVersion.query()\ |
|
415 | 415 | .filter(PullRequestVersion.pull_request == pull_request)\ |
|
416 | 416 | .order_by(PullRequestVersion.pull_request_version_id.asc())\ |
|
417 | 417 | .all() |
|
418 | 418 | |
|
419 | 419 | def create(self, created_by, source_repo, source_ref, target_repo, |
|
420 | 420 | target_ref, revisions, reviewers, title, description=None, |
|
421 | 421 | reviewer_data=None): |
|
422 | 422 | |
|
423 | 423 | created_by_user = self._get_user(created_by) |
|
424 | 424 | source_repo = self._get_repo(source_repo) |
|
425 | 425 | target_repo = self._get_repo(target_repo) |
|
426 | 426 | |
|
427 | 427 | pull_request = PullRequest() |
|
428 | 428 | pull_request.source_repo = source_repo |
|
429 | 429 | pull_request.source_ref = source_ref |
|
430 | 430 | pull_request.target_repo = target_repo |
|
431 | 431 | pull_request.target_ref = target_ref |
|
432 | 432 | pull_request.revisions = revisions |
|
433 | 433 | pull_request.title = title |
|
434 | 434 | pull_request.description = description |
|
435 | 435 | pull_request.author = created_by_user |
|
436 | 436 | pull_request.reviewer_data = reviewer_data |
|
437 | 437 | |
|
438 | 438 | Session().add(pull_request) |
|
439 | 439 | Session().flush() |
|
440 | 440 | |
|
441 | 441 | reviewer_ids = set() |
|
442 | 442 | # members / reviewers |
|
443 | 443 | for reviewer_object in reviewers: |
|
444 | 444 | user_id, reasons, mandatory = reviewer_object |
|
445 | 445 | user = self._get_user(user_id) |
|
446 | 446 | |
|
447 | 447 | # skip duplicates |
|
448 | 448 | if user.user_id in reviewer_ids: |
|
449 | 449 | continue |
|
450 | 450 | |
|
451 | 451 | reviewer_ids.add(user.user_id) |
|
452 | 452 | |
|
453 | 453 | reviewer = PullRequestReviewers() |
|
454 | 454 | reviewer.user = user |
|
455 | 455 | reviewer.pull_request = pull_request |
|
456 | 456 | reviewer.reasons = reasons |
|
457 | 457 | reviewer.mandatory = mandatory |
|
458 | 458 | Session().add(reviewer) |
|
459 | 459 | |
|
460 | 460 | # Set approval status to "Under Review" for all commits which are |
|
461 | 461 | # part of this pull request. |
|
462 | 462 | ChangesetStatusModel().set_status( |
|
463 | 463 | repo=target_repo, |
|
464 | 464 | status=ChangesetStatus.STATUS_UNDER_REVIEW, |
|
465 | 465 | user=created_by_user, |
|
466 | 466 | pull_request=pull_request |
|
467 | 467 | ) |
|
468 | 468 | |
|
469 | 469 | self.notify_reviewers(pull_request, reviewer_ids) |
|
470 | 470 | self._trigger_pull_request_hook( |
|
471 | 471 | pull_request, created_by_user, 'create') |
|
472 | 472 | |
|
473 | 473 | creation_data = pull_request.get_api_data(with_merge_state=False) |
|
474 | 474 | self._log_audit_action( |
|
475 | 475 | 'repo.pull_request.create', {'data': creation_data}, |
|
476 | 476 | created_by_user, pull_request) |
|
477 | 477 | |
|
478 | 478 | return pull_request |
|
479 | 479 | |
|
480 | 480 | def _trigger_pull_request_hook(self, pull_request, user, action): |
|
481 | 481 | pull_request = self.__get_pull_request(pull_request) |
|
482 | 482 | target_scm = pull_request.target_repo.scm_instance() |
|
483 | 483 | if action == 'create': |
|
484 | 484 | trigger_hook = hooks_utils.trigger_log_create_pull_request_hook |
|
485 | 485 | elif action == 'merge': |
|
486 | 486 | trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook |
|
487 | 487 | elif action == 'close': |
|
488 | 488 | trigger_hook = hooks_utils.trigger_log_close_pull_request_hook |
|
489 | 489 | elif action == 'review_status_change': |
|
490 | 490 | trigger_hook = hooks_utils.trigger_log_review_pull_request_hook |
|
491 | 491 | elif action == 'update': |
|
492 | 492 | trigger_hook = hooks_utils.trigger_log_update_pull_request_hook |
|
493 | 493 | else: |
|
494 | 494 | return |
|
495 | 495 | |
|
496 | 496 | trigger_hook( |
|
497 | 497 | username=user.username, |
|
498 | 498 | repo_name=pull_request.target_repo.repo_name, |
|
499 | 499 | repo_alias=target_scm.alias, |
|
500 | 500 | pull_request=pull_request) |
|
501 | 501 | |
|
502 | 502 | def _get_commit_ids(self, pull_request): |
|
503 | 503 | """ |
|
504 | 504 | Return the commit ids of the merged pull request. |
|
505 | 505 | |
|
506 | 506 | This method is not dealing correctly yet with the lack of autoupdates |
|
507 | 507 | nor with the implicit target updates. |
|
508 | 508 | For example: if a commit in the source repo is already in the target it |
|
509 | 509 | will be reported anyways. |
|
510 | 510 | """ |
|
511 | 511 | merge_rev = pull_request.merge_rev |
|
512 | 512 | if merge_rev is None: |
|
513 | 513 | raise ValueError('This pull request was not merged yet') |
|
514 | 514 | |
|
515 | 515 | commit_ids = list(pull_request.revisions) |
|
516 | 516 | if merge_rev not in commit_ids: |
|
517 | 517 | commit_ids.append(merge_rev) |
|
518 | 518 | |
|
519 | 519 | return commit_ids |
|
520 | 520 | |
|
521 | 521 | def merge(self, pull_request, user, extras): |
|
522 | 522 | log.debug("Merging pull request %s", pull_request.pull_request_id) |
|
523 | 523 | merge_state = self._merge_pull_request(pull_request, user, extras) |
|
524 | 524 | if merge_state.executed: |
|
525 | 525 | log.debug( |
|
526 | 526 | "Merge was successful, updating the pull request comments.") |
|
527 | 527 | self._comment_and_close_pr(pull_request, user, merge_state) |
|
528 | 528 | |
|
529 | 529 | self._log_audit_action( |
|
530 | 530 | 'repo.pull_request.merge', |
|
531 | 531 | {'merge_state': merge_state.__dict__}, |
|
532 | 532 | user, pull_request) |
|
533 | 533 | |
|
534 | 534 | else: |
|
535 | 535 | log.warn("Merge failed, not updating the pull request.") |
|
536 | 536 | return merge_state |
|
537 | 537 | |
|
538 | 538 | def _merge_pull_request(self, pull_request, user, extras): |
|
539 | 539 | target_vcs = pull_request.target_repo.scm_instance() |
|
540 | 540 | source_vcs = pull_request.source_repo.scm_instance() |
|
541 | 541 | target_ref = self._refresh_reference( |
|
542 | 542 | pull_request.target_ref_parts, target_vcs) |
|
543 | 543 | |
|
544 | 544 | message = _( |
|
545 | 545 | 'Merge pull request #%(pr_id)s from ' |
|
546 | 546 | '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % { |
|
547 | 547 | 'pr_id': pull_request.pull_request_id, |
|
548 | 548 | 'source_repo': source_vcs.name, |
|
549 | 549 | 'source_ref_name': pull_request.source_ref_parts.name, |
|
550 | 550 | 'pr_title': pull_request.title |
|
551 | 551 | } |
|
552 | 552 | |
|
553 | 553 | workspace_id = self._workspace_id(pull_request) |
|
554 | 554 | use_rebase = self._use_rebase_for_merging(pull_request) |
|
555 | 555 | |
|
556 | 556 | callback_daemon, extras = prepare_callback_daemon( |
|
557 | 557 | extras, protocol=vcs_settings.HOOKS_PROTOCOL, |
|
558 | 558 | use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS) |
|
559 | 559 | |
|
560 | 560 | with callback_daemon: |
|
561 | 561 | # TODO: johbo: Implement a clean way to run a config_override |
|
562 | 562 | # for a single call. |
|
563 | 563 | target_vcs.config.set( |
|
564 | 564 | 'rhodecode', 'RC_SCM_DATA', json.dumps(extras)) |
|
565 | 565 | merge_state = target_vcs.merge( |
|
566 | 566 | target_ref, source_vcs, pull_request.source_ref_parts, |
|
567 | 567 | workspace_id, user_name=user.username, |
|
568 | 568 | user_email=user.email, message=message, use_rebase=use_rebase) |
|
569 | 569 | return merge_state |
|
570 | 570 | |
|
571 | 571 | def _comment_and_close_pr(self, pull_request, user, merge_state): |
|
572 | 572 | pull_request.merge_rev = merge_state.merge_ref.commit_id |
|
573 | 573 | pull_request.updated_on = datetime.datetime.now() |
|
574 | 574 | |
|
575 | 575 | CommentsModel().create( |
|
576 | 576 | text=unicode(_('Pull request merged and closed')), |
|
577 | 577 | repo=pull_request.target_repo.repo_id, |
|
578 | 578 | user=user.user_id, |
|
579 | 579 | pull_request=pull_request.pull_request_id, |
|
580 | 580 | f_path=None, |
|
581 | 581 | line_no=None, |
|
582 | 582 | closing_pr=True |
|
583 | 583 | ) |
|
584 | 584 | |
|
585 | 585 | Session().add(pull_request) |
|
586 | 586 | Session().flush() |
|
587 | 587 | # TODO: paris: replace invalidation with less radical solution |
|
588 | 588 | ScmModel().mark_for_invalidation( |
|
589 | 589 | pull_request.target_repo.repo_name) |
|
590 | 590 | self._trigger_pull_request_hook(pull_request, user, 'merge') |
|
591 | 591 | |
|
592 | 592 | def has_valid_update_type(self, pull_request): |
|
593 | 593 | source_ref_type = pull_request.source_ref_parts.type |
|
594 | 594 | return source_ref_type in ['book', 'branch', 'tag'] |
|
595 | 595 | |
|
596 | 596 | def update_commits(self, pull_request): |
|
597 | 597 | """ |
|
598 | 598 | Get the updated list of commits for the pull request |
|
599 | 599 | and return the new pull request version and the list |
|
600 | 600 | of commits processed by this update action |
|
601 | 601 | """ |
|
602 | 602 | pull_request = self.__get_pull_request(pull_request) |
|
603 | 603 | source_ref_type = pull_request.source_ref_parts.type |
|
604 | 604 | source_ref_name = pull_request.source_ref_parts.name |
|
605 | 605 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
606 | 606 | |
|
607 | 607 | target_ref_type = pull_request.target_ref_parts.type |
|
608 | 608 | target_ref_name = pull_request.target_ref_parts.name |
|
609 | 609 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
610 | 610 | |
|
611 | 611 | if not self.has_valid_update_type(pull_request): |
|
612 | 612 | log.debug( |
|
613 | 613 | "Skipping update of pull request %s due to ref type: %s", |
|
614 | 614 | pull_request, source_ref_type) |
|
615 | 615 | return UpdateResponse( |
|
616 | 616 | executed=False, |
|
617 | 617 | reason=UpdateFailureReason.WRONG_REF_TYPE, |
|
618 | 618 | old=pull_request, new=None, changes=None, |
|
619 | 619 | source_changed=False, target_changed=False) |
|
620 | 620 | |
|
621 | 621 | # source repo |
|
622 | 622 | source_repo = pull_request.source_repo.scm_instance() |
|
623 | 623 | try: |
|
624 | 624 | source_commit = source_repo.get_commit(commit_id=source_ref_name) |
|
625 | 625 | except CommitDoesNotExistError: |
|
626 | 626 | return UpdateResponse( |
|
627 | 627 | executed=False, |
|
628 | 628 | reason=UpdateFailureReason.MISSING_SOURCE_REF, |
|
629 | 629 | old=pull_request, new=None, changes=None, |
|
630 | 630 | source_changed=False, target_changed=False) |
|
631 | 631 | |
|
632 | 632 | source_changed = source_ref_id != source_commit.raw_id |
|
633 | 633 | |
|
634 | 634 | # target repo |
|
635 | 635 | target_repo = pull_request.target_repo.scm_instance() |
|
636 | 636 | try: |
|
637 | 637 | target_commit = target_repo.get_commit(commit_id=target_ref_name) |
|
638 | 638 | except CommitDoesNotExistError: |
|
639 | 639 | return UpdateResponse( |
|
640 | 640 | executed=False, |
|
641 | 641 | reason=UpdateFailureReason.MISSING_TARGET_REF, |
|
642 | 642 | old=pull_request, new=None, changes=None, |
|
643 | 643 | source_changed=False, target_changed=False) |
|
644 | 644 | target_changed = target_ref_id != target_commit.raw_id |
|
645 | 645 | |
|
646 | 646 | if not (source_changed or target_changed): |
|
647 | 647 | log.debug("Nothing changed in pull request %s", pull_request) |
|
648 | 648 | return UpdateResponse( |
|
649 | 649 | executed=False, |
|
650 | 650 | reason=UpdateFailureReason.NO_CHANGE, |
|
651 | 651 | old=pull_request, new=None, changes=None, |
|
652 | 652 | source_changed=target_changed, target_changed=source_changed) |
|
653 | 653 | |
|
654 | 654 | change_in_found = 'target repo' if target_changed else 'source repo' |
|
655 | 655 | log.debug('Updating pull request because of change in %s detected', |
|
656 | 656 | change_in_found) |
|
657 | 657 | |
|
658 | 658 | # Finally there is a need for an update, in case of source change |
|
659 | 659 | # we create a new version, else just an update |
|
660 | 660 | if source_changed: |
|
661 | 661 | pull_request_version = self._create_version_from_snapshot(pull_request) |
|
662 | 662 | self._link_comments_to_version(pull_request_version) |
|
663 | 663 | else: |
|
664 | 664 | try: |
|
665 | 665 | ver = pull_request.versions[-1] |
|
666 | 666 | except IndexError: |
|
667 | 667 | ver = None |
|
668 | 668 | |
|
669 | 669 | pull_request.pull_request_version_id = \ |
|
670 | 670 | ver.pull_request_version_id if ver else None |
|
671 | 671 | pull_request_version = pull_request |
|
672 | 672 | |
|
673 | 673 | try: |
|
674 | 674 | if target_ref_type in ('tag', 'branch', 'book'): |
|
675 | 675 | target_commit = target_repo.get_commit(target_ref_name) |
|
676 | 676 | else: |
|
677 | 677 | target_commit = target_repo.get_commit(target_ref_id) |
|
678 | 678 | except CommitDoesNotExistError: |
|
679 | 679 | return UpdateResponse( |
|
680 | 680 | executed=False, |
|
681 | 681 | reason=UpdateFailureReason.MISSING_TARGET_REF, |
|
682 | 682 | old=pull_request, new=None, changes=None, |
|
683 | 683 | source_changed=source_changed, target_changed=target_changed) |
|
684 | 684 | |
|
685 | 685 | # re-compute commit ids |
|
686 | 686 | old_commit_ids = pull_request.revisions |
|
687 | 687 | pre_load = ["author", "branch", "date", "message"] |
|
688 | 688 | commit_ranges = target_repo.compare( |
|
689 | 689 | target_commit.raw_id, source_commit.raw_id, source_repo, merge=True, |
|
690 | 690 | pre_load=pre_load) |
|
691 | 691 | |
|
692 | 692 | ancestor = target_repo.get_common_ancestor( |
|
693 | 693 | target_commit.raw_id, source_commit.raw_id, source_repo) |
|
694 | 694 | |
|
695 | 695 | pull_request.source_ref = '%s:%s:%s' % ( |
|
696 | 696 | source_ref_type, source_ref_name, source_commit.raw_id) |
|
697 | 697 | pull_request.target_ref = '%s:%s:%s' % ( |
|
698 | 698 | target_ref_type, target_ref_name, ancestor) |
|
699 | 699 | |
|
700 | 700 | pull_request.revisions = [ |
|
701 | 701 | commit.raw_id for commit in reversed(commit_ranges)] |
|
702 | 702 | pull_request.updated_on = datetime.datetime.now() |
|
703 | 703 | Session().add(pull_request) |
|
704 | 704 | new_commit_ids = pull_request.revisions |
|
705 | 705 | |
|
706 | 706 | old_diff_data, new_diff_data = self._generate_update_diffs( |
|
707 | 707 | pull_request, pull_request_version) |
|
708 | 708 | |
|
709 | 709 | # calculate commit and file changes |
|
710 | 710 | changes = self._calculate_commit_id_changes( |
|
711 | 711 | old_commit_ids, new_commit_ids) |
|
712 | 712 | file_changes = self._calculate_file_changes( |
|
713 | 713 | old_diff_data, new_diff_data) |
|
714 | 714 | |
|
715 | 715 | # set comments as outdated if DIFFS changed |
|
716 | 716 | CommentsModel().outdate_comments( |
|
717 | 717 | pull_request, old_diff_data=old_diff_data, |
|
718 | 718 | new_diff_data=new_diff_data) |
|
719 | 719 | |
|
720 | 720 | commit_changes = (changes.added or changes.removed) |
|
721 | 721 | file_node_changes = ( |
|
722 | 722 | file_changes.added or file_changes.modified or file_changes.removed) |
|
723 | 723 | pr_has_changes = commit_changes or file_node_changes |
|
724 | 724 | |
|
725 | 725 | # Add an automatic comment to the pull request, in case |
|
726 | 726 | # anything has changed |
|
727 | 727 | if pr_has_changes: |
|
728 | 728 | update_comment = CommentsModel().create( |
|
729 | 729 | text=self._render_update_message(changes, file_changes), |
|
730 | 730 | repo=pull_request.target_repo, |
|
731 | 731 | user=pull_request.author, |
|
732 | 732 | pull_request=pull_request, |
|
733 | 733 | send_email=False, renderer=DEFAULT_COMMENTS_RENDERER) |
|
734 | 734 | |
|
735 | 735 | # Update status to "Under Review" for added commits |
|
736 | 736 | for commit_id in changes.added: |
|
737 | 737 | ChangesetStatusModel().set_status( |
|
738 | 738 | repo=pull_request.source_repo, |
|
739 | 739 | status=ChangesetStatus.STATUS_UNDER_REVIEW, |
|
740 | 740 | comment=update_comment, |
|
741 | 741 | user=pull_request.author, |
|
742 | 742 | pull_request=pull_request, |
|
743 | 743 | revision=commit_id) |
|
744 | 744 | |
|
745 | 745 | log.debug( |
|
746 | 746 | 'Updated pull request %s, added_ids: %s, common_ids: %s, ' |
|
747 | 747 | 'removed_ids: %s', pull_request.pull_request_id, |
|
748 | 748 | changes.added, changes.common, changes.removed) |
|
749 | 749 | log.debug( |
|
750 | 750 | 'Updated pull request with the following file changes: %s', |
|
751 | 751 | file_changes) |
|
752 | 752 | |
|
753 | 753 | log.info( |
|
754 | 754 | "Updated pull request %s from commit %s to commit %s, " |
|
755 | 755 | "stored new version %s of this pull request.", |
|
756 | 756 | pull_request.pull_request_id, source_ref_id, |
|
757 | 757 | pull_request.source_ref_parts.commit_id, |
|
758 | 758 | pull_request_version.pull_request_version_id) |
|
759 | 759 | Session().commit() |
|
760 | 760 | self._trigger_pull_request_hook( |
|
761 | 761 | pull_request, pull_request.author, 'update') |
|
762 | 762 | |
|
763 | 763 | return UpdateResponse( |
|
764 | 764 | executed=True, reason=UpdateFailureReason.NONE, |
|
765 | 765 | old=pull_request, new=pull_request_version, changes=changes, |
|
766 | 766 | source_changed=source_changed, target_changed=target_changed) |
|
767 | 767 | |
|
768 | 768 | def _create_version_from_snapshot(self, pull_request): |
|
769 | 769 | version = PullRequestVersion() |
|
770 | 770 | version.title = pull_request.title |
|
771 | 771 | version.description = pull_request.description |
|
772 | 772 | version.status = pull_request.status |
|
773 | 773 | version.created_on = datetime.datetime.now() |
|
774 | 774 | version.updated_on = pull_request.updated_on |
|
775 | 775 | version.user_id = pull_request.user_id |
|
776 | 776 | version.source_repo = pull_request.source_repo |
|
777 | 777 | version.source_ref = pull_request.source_ref |
|
778 | 778 | version.target_repo = pull_request.target_repo |
|
779 | 779 | version.target_ref = pull_request.target_ref |
|
780 | 780 | |
|
781 | 781 | version._last_merge_source_rev = pull_request._last_merge_source_rev |
|
782 | 782 | version._last_merge_target_rev = pull_request._last_merge_target_rev |
|
783 | 783 | version._last_merge_status = pull_request._last_merge_status |
|
784 | 784 | version.shadow_merge_ref = pull_request.shadow_merge_ref |
|
785 | 785 | version.merge_rev = pull_request.merge_rev |
|
786 | 786 | version.reviewer_data = pull_request.reviewer_data |
|
787 | 787 | |
|
788 | 788 | version.revisions = pull_request.revisions |
|
789 | 789 | version.pull_request = pull_request |
|
790 | 790 | Session().add(version) |
|
791 | 791 | Session().flush() |
|
792 | 792 | |
|
793 | 793 | return version |
|
794 | 794 | |
|
795 | 795 | def _generate_update_diffs(self, pull_request, pull_request_version): |
|
796 | 796 | |
|
797 | 797 | diff_context = ( |
|
798 | 798 | self.DIFF_CONTEXT + |
|
799 | 799 | CommentsModel.needed_extra_diff_context()) |
|
800 | 800 | |
|
801 | 801 | source_repo = pull_request_version.source_repo |
|
802 | 802 | source_ref_id = pull_request_version.source_ref_parts.commit_id |
|
803 | 803 | target_ref_id = pull_request_version.target_ref_parts.commit_id |
|
804 | 804 | old_diff = self._get_diff_from_pr_or_version( |
|
805 | 805 | source_repo, source_ref_id, target_ref_id, context=diff_context) |
|
806 | 806 | |
|
807 | 807 | source_repo = pull_request.source_repo |
|
808 | 808 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
809 | 809 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
810 | 810 | |
|
811 | 811 | new_diff = self._get_diff_from_pr_or_version( |
|
812 | 812 | source_repo, source_ref_id, target_ref_id, context=diff_context) |
|
813 | 813 | |
|
814 | 814 | old_diff_data = diffs.DiffProcessor(old_diff) |
|
815 | 815 | old_diff_data.prepare() |
|
816 | 816 | new_diff_data = diffs.DiffProcessor(new_diff) |
|
817 | 817 | new_diff_data.prepare() |
|
818 | 818 | |
|
819 | 819 | return old_diff_data, new_diff_data |
|
820 | 820 | |
|
821 | 821 | def _link_comments_to_version(self, pull_request_version): |
|
822 | 822 | """ |
|
823 | 823 | Link all unlinked comments of this pull request to the given version. |
|
824 | 824 | |
|
825 | 825 | :param pull_request_version: The `PullRequestVersion` to which |
|
826 | 826 | the comments shall be linked. |
|
827 | 827 | |
|
828 | 828 | """ |
|
829 | 829 | pull_request = pull_request_version.pull_request |
|
830 | 830 | comments = ChangesetComment.query()\ |
|
831 | 831 | .filter( |
|
832 | 832 | # TODO: johbo: Should we query for the repo at all here? |
|
833 | 833 | # Pending decision on how comments of PRs are to be related |
|
834 | 834 | # to either the source repo, the target repo or no repo at all. |
|
835 | 835 | ChangesetComment.repo_id == pull_request.target_repo.repo_id, |
|
836 | 836 | ChangesetComment.pull_request == pull_request, |
|
837 | 837 | ChangesetComment.pull_request_version == None)\ |
|
838 | 838 | .order_by(ChangesetComment.comment_id.asc()) |
|
839 | 839 | |
|
840 | 840 | # TODO: johbo: Find out why this breaks if it is done in a bulk |
|
841 | 841 | # operation. |
|
842 | 842 | for comment in comments: |
|
843 | 843 | comment.pull_request_version_id = ( |
|
844 | 844 | pull_request_version.pull_request_version_id) |
|
845 | 845 | Session().add(comment) |
|
846 | 846 | |
|
847 | 847 | def _calculate_commit_id_changes(self, old_ids, new_ids): |
|
848 | 848 | added = [x for x in new_ids if x not in old_ids] |
|
849 | 849 | common = [x for x in new_ids if x in old_ids] |
|
850 | 850 | removed = [x for x in old_ids if x not in new_ids] |
|
851 | 851 | total = new_ids |
|
852 | 852 | return ChangeTuple(added, common, removed, total) |
|
853 | 853 | |
|
854 | 854 | def _calculate_file_changes(self, old_diff_data, new_diff_data): |
|
855 | 855 | |
|
856 | 856 | old_files = OrderedDict() |
|
857 | 857 | for diff_data in old_diff_data.parsed_diff: |
|
858 | 858 | old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff']) |
|
859 | 859 | |
|
860 | 860 | added_files = [] |
|
861 | 861 | modified_files = [] |
|
862 | 862 | removed_files = [] |
|
863 | 863 | for diff_data in new_diff_data.parsed_diff: |
|
864 | 864 | new_filename = diff_data['filename'] |
|
865 | 865 | new_hash = md5_safe(diff_data['raw_diff']) |
|
866 | 866 | |
|
867 | 867 | old_hash = old_files.get(new_filename) |
|
868 | 868 | if not old_hash: |
|
869 | 869 | # file is not present in old diff, means it's added |
|
870 | 870 | added_files.append(new_filename) |
|
871 | 871 | else: |
|
872 | 872 | if new_hash != old_hash: |
|
873 | 873 | modified_files.append(new_filename) |
|
874 | 874 | # now remove a file from old, since we have seen it already |
|
875 | 875 | del old_files[new_filename] |
|
876 | 876 | |
|
877 | 877 | # removed files is when there are present in old, but not in NEW, |
|
878 | 878 | # since we remove old files that are present in new diff, left-overs |
|
879 | 879 | # if any should be the removed files |
|
880 | 880 | removed_files.extend(old_files.keys()) |
|
881 | 881 | |
|
882 | 882 | return FileChangeTuple(added_files, modified_files, removed_files) |
|
883 | 883 | |
|
884 | 884 | def _render_update_message(self, changes, file_changes): |
|
885 | 885 | """ |
|
886 | 886 | render the message using DEFAULT_COMMENTS_RENDERER (RST renderer), |
|
887 | 887 | so it's always looking the same disregarding on which default |
|
888 | 888 | renderer system is using. |
|
889 | 889 | |
|
890 | 890 | :param changes: changes named tuple |
|
891 | 891 | :param file_changes: file changes named tuple |
|
892 | 892 | |
|
893 | 893 | """ |
|
894 | 894 | new_status = ChangesetStatus.get_status_lbl( |
|
895 | 895 | ChangesetStatus.STATUS_UNDER_REVIEW) |
|
896 | 896 | |
|
897 | 897 | changed_files = ( |
|
898 | 898 | file_changes.added + file_changes.modified + file_changes.removed) |
|
899 | 899 | |
|
900 | 900 | params = { |
|
901 | 901 | 'under_review_label': new_status, |
|
902 | 902 | 'added_commits': changes.added, |
|
903 | 903 | 'removed_commits': changes.removed, |
|
904 | 904 | 'changed_files': changed_files, |
|
905 | 905 | 'added_files': file_changes.added, |
|
906 | 906 | 'modified_files': file_changes.modified, |
|
907 | 907 | 'removed_files': file_changes.removed, |
|
908 | 908 | } |
|
909 | 909 | renderer = RstTemplateRenderer() |
|
910 | 910 | return renderer.render('pull_request_update.mako', **params) |
|
911 | 911 | |
|
912 | 912 | def edit(self, pull_request, title, description, user): |
|
913 | 913 | pull_request = self.__get_pull_request(pull_request) |
|
914 | 914 | old_data = pull_request.get_api_data(with_merge_state=False) |
|
915 | 915 | if pull_request.is_closed(): |
|
916 | 916 | raise ValueError('This pull request is closed') |
|
917 | 917 | if title: |
|
918 | 918 | pull_request.title = title |
|
919 | 919 | pull_request.description = description |
|
920 | 920 | pull_request.updated_on = datetime.datetime.now() |
|
921 | 921 | Session().add(pull_request) |
|
922 | 922 | self._log_audit_action( |
|
923 | 923 | 'repo.pull_request.edit', {'old_data': old_data}, |
|
924 | 924 | user, pull_request) |
|
925 | 925 | |
|
926 | 926 | def update_reviewers(self, pull_request, reviewer_data, user): |
|
927 | 927 | """ |
|
928 | 928 | Update the reviewers in the pull request |
|
929 | 929 | |
|
930 | 930 | :param pull_request: the pr to update |
|
931 | 931 | :param reviewer_data: list of tuples |
|
932 | 932 | [(user, ['reason1', 'reason2'], mandatory_flag)] |
|
933 | 933 | """ |
|
934 | 934 | |
|
935 | 935 | reviewers = {} |
|
936 | 936 | for user_id, reasons, mandatory in reviewer_data: |
|
937 | 937 | if isinstance(user_id, (int, basestring)): |
|
938 | 938 | user_id = self._get_user(user_id).user_id |
|
939 | 939 | reviewers[user_id] = { |
|
940 | 940 | 'reasons': reasons, 'mandatory': mandatory} |
|
941 | 941 | |
|
942 | 942 | reviewers_ids = set(reviewers.keys()) |
|
943 | 943 | pull_request = self.__get_pull_request(pull_request) |
|
944 | 944 | current_reviewers = PullRequestReviewers.query()\ |
|
945 | 945 | .filter(PullRequestReviewers.pull_request == |
|
946 | 946 | pull_request).all() |
|
947 | 947 | current_reviewers_ids = set([x.user.user_id for x in current_reviewers]) |
|
948 | 948 | |
|
949 | 949 | ids_to_add = reviewers_ids.difference(current_reviewers_ids) |
|
950 | 950 | ids_to_remove = current_reviewers_ids.difference(reviewers_ids) |
|
951 | 951 | |
|
952 | 952 | log.debug("Adding %s reviewers", ids_to_add) |
|
953 | 953 | log.debug("Removing %s reviewers", ids_to_remove) |
|
954 | 954 | changed = False |
|
955 | 955 | for uid in ids_to_add: |
|
956 | 956 | changed = True |
|
957 | 957 | _usr = self._get_user(uid) |
|
958 | 958 | reviewer = PullRequestReviewers() |
|
959 | 959 | reviewer.user = _usr |
|
960 | 960 | reviewer.pull_request = pull_request |
|
961 | 961 | reviewer.reasons = reviewers[uid]['reasons'] |
|
962 | 962 | # NOTE(marcink): mandatory shouldn't be changed now |
|
963 | 963 | # reviewer.mandatory = reviewers[uid]['reasons'] |
|
964 | 964 | Session().add(reviewer) |
|
965 | 965 | self._log_audit_action( |
|
966 | 966 | 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()}, |
|
967 | 967 | user, pull_request) |
|
968 | 968 | |
|
969 | 969 | for uid in ids_to_remove: |
|
970 | 970 | changed = True |
|
971 | 971 | reviewers = PullRequestReviewers.query()\ |
|
972 | 972 | .filter(PullRequestReviewers.user_id == uid, |
|
973 | 973 | PullRequestReviewers.pull_request == pull_request)\ |
|
974 | 974 | .all() |
|
975 | 975 | # use .all() in case we accidentally added the same person twice |
|
976 | 976 | # this CAN happen due to the lack of DB checks |
|
977 | 977 | for obj in reviewers: |
|
978 | 978 | old_data = obj.get_dict() |
|
979 | 979 | Session().delete(obj) |
|
980 | 980 | self._log_audit_action( |
|
981 | 981 | 'repo.pull_request.reviewer.delete', |
|
982 | 982 | {'old_data': old_data}, user, pull_request) |
|
983 | 983 | |
|
984 | 984 | if changed: |
|
985 | 985 | pull_request.updated_on = datetime.datetime.now() |
|
986 | 986 | Session().add(pull_request) |
|
987 | 987 | |
|
988 | 988 | self.notify_reviewers(pull_request, ids_to_add) |
|
989 | 989 | return ids_to_add, ids_to_remove |
|
990 | 990 | |
|
991 | 991 | def get_url(self, pull_request, request=None, permalink=False): |
|
992 | 992 | if not request: |
|
993 | 993 | request = get_current_request() |
|
994 | 994 | |
|
995 | 995 | if permalink: |
|
996 | 996 | return request.route_url( |
|
997 | 997 | 'pull_requests_global', |
|
998 | 998 | pull_request_id=pull_request.pull_request_id,) |
|
999 | 999 | else: |
|
1000 | 1000 | return request.route_url('pullrequest_show', |
|
1001 | 1001 | repo_name=safe_str(pull_request.target_repo.repo_name), |
|
1002 | 1002 | pull_request_id=pull_request.pull_request_id,) |
|
1003 | 1003 | |
|
1004 | 1004 | def get_shadow_clone_url(self, pull_request): |
|
1005 | 1005 | """ |
|
1006 | 1006 | Returns qualified url pointing to the shadow repository. If this pull |
|
1007 | 1007 | request is closed there is no shadow repository and ``None`` will be |
|
1008 | 1008 | returned. |
|
1009 | 1009 | """ |
|
1010 | 1010 | if pull_request.is_closed(): |
|
1011 | 1011 | return None |
|
1012 | 1012 | else: |
|
1013 | 1013 | pr_url = urllib.unquote(self.get_url(pull_request)) |
|
1014 | 1014 | return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url)) |
|
1015 | 1015 | |
|
1016 | 1016 | def notify_reviewers(self, pull_request, reviewers_ids): |
|
1017 | 1017 | # notification to reviewers |
|
1018 | 1018 | if not reviewers_ids: |
|
1019 | 1019 | return |
|
1020 | 1020 | |
|
1021 | 1021 | pull_request_obj = pull_request |
|
1022 | 1022 | # get the current participants of this pull request |
|
1023 | 1023 | recipients = reviewers_ids |
|
1024 | 1024 | notification_type = EmailNotificationModel.TYPE_PULL_REQUEST |
|
1025 | 1025 | |
|
1026 | 1026 | pr_source_repo = pull_request_obj.source_repo |
|
1027 | 1027 | pr_target_repo = pull_request_obj.target_repo |
|
1028 | 1028 | |
|
1029 | 1029 | pr_url = h.route_url('pullrequest_show', |
|
1030 | 1030 | repo_name=pr_target_repo.repo_name, |
|
1031 | 1031 | pull_request_id=pull_request_obj.pull_request_id,) |
|
1032 | 1032 | |
|
1033 | 1033 | # set some variables for email notification |
|
1034 | 1034 | pr_target_repo_url = h.route_url( |
|
1035 | 1035 | 'repo_summary', repo_name=pr_target_repo.repo_name) |
|
1036 | 1036 | |
|
1037 | 1037 | pr_source_repo_url = h.route_url( |
|
1038 | 1038 | 'repo_summary', repo_name=pr_source_repo.repo_name) |
|
1039 | 1039 | |
|
1040 | 1040 | # pull request specifics |
|
1041 | 1041 | pull_request_commits = [ |
|
1042 | 1042 | (x.raw_id, x.message) |
|
1043 | 1043 | for x in map(pr_source_repo.get_commit, pull_request.revisions)] |
|
1044 | 1044 | |
|
1045 | 1045 | kwargs = { |
|
1046 | 1046 | 'user': pull_request.author, |
|
1047 | 1047 | 'pull_request': pull_request_obj, |
|
1048 | 1048 | 'pull_request_commits': pull_request_commits, |
|
1049 | 1049 | |
|
1050 | 1050 | 'pull_request_target_repo': pr_target_repo, |
|
1051 | 1051 | 'pull_request_target_repo_url': pr_target_repo_url, |
|
1052 | 1052 | |
|
1053 | 1053 | 'pull_request_source_repo': pr_source_repo, |
|
1054 | 1054 | 'pull_request_source_repo_url': pr_source_repo_url, |
|
1055 | 1055 | |
|
1056 | 1056 | 'pull_request_url': pr_url, |
|
1057 | 1057 | } |
|
1058 | 1058 | |
|
1059 | 1059 | # pre-generate the subject for notification itself |
|
1060 | 1060 | (subject, |
|
1061 | 1061 | _h, _e, # we don't care about those |
|
1062 | 1062 | body_plaintext) = EmailNotificationModel().render_email( |
|
1063 | 1063 | notification_type, **kwargs) |
|
1064 | 1064 | |
|
1065 | 1065 | # create notification objects, and emails |
|
1066 | 1066 | NotificationModel().create( |
|
1067 | 1067 | created_by=pull_request.author, |
|
1068 | 1068 | notification_subject=subject, |
|
1069 | 1069 | notification_body=body_plaintext, |
|
1070 | 1070 | notification_type=notification_type, |
|
1071 | 1071 | recipients=recipients, |
|
1072 | 1072 | email_kwargs=kwargs, |
|
1073 | 1073 | ) |
|
1074 | 1074 | |
|
1075 | 1075 | def delete(self, pull_request, user): |
|
1076 | 1076 | pull_request = self.__get_pull_request(pull_request) |
|
1077 | 1077 | old_data = pull_request.get_api_data(with_merge_state=False) |
|
1078 | 1078 | self._cleanup_merge_workspace(pull_request) |
|
1079 | 1079 | self._log_audit_action( |
|
1080 | 1080 | 'repo.pull_request.delete', {'old_data': old_data}, |
|
1081 | 1081 | user, pull_request) |
|
1082 | 1082 | Session().delete(pull_request) |
|
1083 | 1083 | |
|
1084 | 1084 | def close_pull_request(self, pull_request, user): |
|
1085 | 1085 | pull_request = self.__get_pull_request(pull_request) |
|
1086 | 1086 | self._cleanup_merge_workspace(pull_request) |
|
1087 | 1087 | pull_request.status = PullRequest.STATUS_CLOSED |
|
1088 | 1088 | pull_request.updated_on = datetime.datetime.now() |
|
1089 | 1089 | Session().add(pull_request) |
|
1090 | 1090 | self._trigger_pull_request_hook( |
|
1091 | 1091 | pull_request, pull_request.author, 'close') |
|
1092 | 1092 | self._log_audit_action( |
|
1093 | 1093 | 'repo.pull_request.close', {}, user, pull_request) |
|
1094 | 1094 | |
|
1095 | 1095 | def close_pull_request_with_comment( |
|
1096 | 1096 | self, pull_request, user, repo, message=None): |
|
1097 | 1097 | |
|
1098 | 1098 | pull_request_review_status = pull_request.calculated_review_status() |
|
1099 | 1099 | |
|
1100 | 1100 | if pull_request_review_status == ChangesetStatus.STATUS_APPROVED: |
|
1101 | 1101 | # approved only if we have voting consent |
|
1102 | 1102 | status = ChangesetStatus.STATUS_APPROVED |
|
1103 | 1103 | else: |
|
1104 | 1104 | status = ChangesetStatus.STATUS_REJECTED |
|
1105 | 1105 | status_lbl = ChangesetStatus.get_status_lbl(status) |
|
1106 | 1106 | |
|
1107 | 1107 | default_message = ( |
|
1108 | 1108 | _('Closing with status change {transition_icon} {status}.') |
|
1109 | 1109 | ).format(transition_icon='>', status=status_lbl) |
|
1110 | 1110 | text = message or default_message |
|
1111 | 1111 | |
|
1112 | 1112 | # create a comment, and link it to new status |
|
1113 | 1113 | comment = CommentsModel().create( |
|
1114 | 1114 | text=text, |
|
1115 | 1115 | repo=repo.repo_id, |
|
1116 | 1116 | user=user.user_id, |
|
1117 | 1117 | pull_request=pull_request.pull_request_id, |
|
1118 | 1118 | status_change=status_lbl, |
|
1119 | 1119 | status_change_type=status, |
|
1120 | 1120 | closing_pr=True |
|
1121 | 1121 | ) |
|
1122 | 1122 | |
|
1123 | 1123 | # calculate old status before we change it |
|
1124 | 1124 | old_calculated_status = pull_request.calculated_review_status() |
|
1125 | 1125 | ChangesetStatusModel().set_status( |
|
1126 | 1126 | repo.repo_id, |
|
1127 | 1127 | status, |
|
1128 | 1128 | user.user_id, |
|
1129 | 1129 | comment=comment, |
|
1130 | 1130 | pull_request=pull_request.pull_request_id |
|
1131 | 1131 | ) |
|
1132 | 1132 | |
|
1133 | 1133 | Session().flush() |
|
1134 | 1134 | events.trigger(events.PullRequestCommentEvent(pull_request, comment)) |
|
1135 | 1135 | # we now calculate the status of pull request again, and based on that |
|
1136 | 1136 | # calculation trigger status change. This might happen in cases |
|
1137 | 1137 | # that non-reviewer admin closes a pr, which means his vote doesn't |
|
1138 | 1138 | # change the status, while if he's a reviewer this might change it. |
|
1139 | 1139 | calculated_status = pull_request.calculated_review_status() |
|
1140 | 1140 | if old_calculated_status != calculated_status: |
|
1141 | 1141 | self._trigger_pull_request_hook( |
|
1142 | 1142 | pull_request, user, 'review_status_change') |
|
1143 | 1143 | |
|
1144 | 1144 | # finally close the PR |
|
1145 | 1145 | PullRequestModel().close_pull_request( |
|
1146 | 1146 | pull_request.pull_request_id, user) |
|
1147 | 1147 | |
|
1148 | 1148 | return comment, status |
|
1149 | 1149 | |
|
1150 | 1150 | def merge_status(self, pull_request): |
|
1151 | 1151 | if not self._is_merge_enabled(pull_request): |
|
1152 | 1152 | return False, _('Server-side pull request merging is disabled.') |
|
1153 | 1153 | if pull_request.is_closed(): |
|
1154 | 1154 | return False, _('This pull request is closed.') |
|
1155 | 1155 | merge_possible, msg = self._check_repo_requirements( |
|
1156 | 1156 | target=pull_request.target_repo, source=pull_request.source_repo) |
|
1157 | 1157 | if not merge_possible: |
|
1158 | 1158 | return merge_possible, msg |
|
1159 | 1159 | |
|
1160 | 1160 | try: |
|
1161 | 1161 | resp = self._try_merge(pull_request) |
|
1162 | 1162 | log.debug("Merge response: %s", resp) |
|
1163 | 1163 | status = resp.possible, self.merge_status_message( |
|
1164 | 1164 | resp.failure_reason) |
|
1165 | 1165 | except NotImplementedError: |
|
1166 | 1166 | status = False, _('Pull request merging is not supported.') |
|
1167 | 1167 | |
|
1168 | 1168 | return status |
|
1169 | 1169 | |
|
1170 | 1170 | def _check_repo_requirements(self, target, source): |
|
1171 | 1171 | """ |
|
1172 | 1172 | Check if `target` and `source` have compatible requirements. |
|
1173 | 1173 | |
|
1174 | 1174 | Currently this is just checking for largefiles. |
|
1175 | 1175 | """ |
|
1176 | 1176 | target_has_largefiles = self._has_largefiles(target) |
|
1177 | 1177 | source_has_largefiles = self._has_largefiles(source) |
|
1178 | 1178 | merge_possible = True |
|
1179 | 1179 | message = u'' |
|
1180 | 1180 | |
|
1181 | 1181 | if target_has_largefiles != source_has_largefiles: |
|
1182 | 1182 | merge_possible = False |
|
1183 | 1183 | if source_has_largefiles: |
|
1184 | 1184 | message = _( |
|
1185 | 1185 | 'Target repository large files support is disabled.') |
|
1186 | 1186 | else: |
|
1187 | 1187 | message = _( |
|
1188 | 1188 | 'Source repository large files support is disabled.') |
|
1189 | 1189 | |
|
1190 | 1190 | return merge_possible, message |
|
1191 | 1191 | |
|
1192 | 1192 | def _has_largefiles(self, repo): |
|
1193 | 1193 | largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings( |
|
1194 | 1194 | 'extensions', 'largefiles') |
|
1195 | 1195 | return largefiles_ui and largefiles_ui[0].active |
|
1196 | 1196 | |
|
1197 | 1197 | def _try_merge(self, pull_request): |
|
1198 | 1198 | """ |
|
1199 | 1199 | Try to merge the pull request and return the merge status. |
|
1200 | 1200 | """ |
|
1201 | 1201 | log.debug( |
|
1202 | 1202 | "Trying out if the pull request %s can be merged.", |
|
1203 | 1203 | pull_request.pull_request_id) |
|
1204 | 1204 | target_vcs = pull_request.target_repo.scm_instance() |
|
1205 | 1205 | |
|
1206 | 1206 | # Refresh the target reference. |
|
1207 | 1207 | try: |
|
1208 | 1208 | target_ref = self._refresh_reference( |
|
1209 | 1209 | pull_request.target_ref_parts, target_vcs) |
|
1210 | 1210 | except CommitDoesNotExistError: |
|
1211 | 1211 | merge_state = MergeResponse( |
|
1212 | 1212 | False, False, None, MergeFailureReason.MISSING_TARGET_REF) |
|
1213 | 1213 | return merge_state |
|
1214 | 1214 | |
|
1215 | 1215 | target_locked = pull_request.target_repo.locked |
|
1216 | 1216 | if target_locked and target_locked[0]: |
|
1217 | 1217 | log.debug("The target repository is locked.") |
|
1218 | 1218 | merge_state = MergeResponse( |
|
1219 | 1219 | False, False, None, MergeFailureReason.TARGET_IS_LOCKED) |
|
1220 | 1220 | elif self._needs_merge_state_refresh(pull_request, target_ref): |
|
1221 | 1221 | log.debug("Refreshing the merge status of the repository.") |
|
1222 | 1222 | merge_state = self._refresh_merge_state( |
|
1223 | 1223 | pull_request, target_vcs, target_ref) |
|
1224 | 1224 | else: |
|
1225 | 1225 | possible = pull_request.\ |
|
1226 | 1226 | _last_merge_status == MergeFailureReason.NONE |
|
1227 | 1227 | merge_state = MergeResponse( |
|
1228 | 1228 | possible, False, None, pull_request._last_merge_status) |
|
1229 | 1229 | |
|
1230 | 1230 | return merge_state |
|
1231 | 1231 | |
|
1232 | 1232 | def _refresh_reference(self, reference, vcs_repository): |
|
1233 | 1233 | if reference.type in ('branch', 'book'): |
|
1234 | 1234 | name_or_id = reference.name |
|
1235 | 1235 | else: |
|
1236 | 1236 | name_or_id = reference.commit_id |
|
1237 | 1237 | refreshed_commit = vcs_repository.get_commit(name_or_id) |
|
1238 | 1238 | refreshed_reference = Reference( |
|
1239 | 1239 | reference.type, reference.name, refreshed_commit.raw_id) |
|
1240 | 1240 | return refreshed_reference |
|
1241 | 1241 | |
|
1242 | 1242 | def _needs_merge_state_refresh(self, pull_request, target_reference): |
|
1243 | 1243 | return not( |
|
1244 | 1244 | pull_request.revisions and |
|
1245 | 1245 | pull_request.revisions[0] == pull_request._last_merge_source_rev and |
|
1246 | 1246 | target_reference.commit_id == pull_request._last_merge_target_rev) |
|
1247 | 1247 | |
|
1248 | 1248 | def _refresh_merge_state(self, pull_request, target_vcs, target_reference): |
|
1249 | 1249 | workspace_id = self._workspace_id(pull_request) |
|
1250 | 1250 | source_vcs = pull_request.source_repo.scm_instance() |
|
1251 | 1251 | use_rebase = self._use_rebase_for_merging(pull_request) |
|
1252 | 1252 | merge_state = target_vcs.merge( |
|
1253 | 1253 | target_reference, source_vcs, pull_request.source_ref_parts, |
|
1254 | 1254 | workspace_id, dry_run=True, use_rebase=use_rebase) |
|
1255 | 1255 | |
|
1256 | 1256 | # Do not store the response if there was an unknown error. |
|
1257 | 1257 | if merge_state.failure_reason != MergeFailureReason.UNKNOWN: |
|
1258 | 1258 | pull_request._last_merge_source_rev = \ |
|
1259 | 1259 | pull_request.source_ref_parts.commit_id |
|
1260 | 1260 | pull_request._last_merge_target_rev = target_reference.commit_id |
|
1261 | 1261 | pull_request._last_merge_status = merge_state.failure_reason |
|
1262 | 1262 | pull_request.shadow_merge_ref = merge_state.merge_ref |
|
1263 | 1263 | Session().add(pull_request) |
|
1264 | 1264 | Session().commit() |
|
1265 | 1265 | |
|
1266 | 1266 | return merge_state |
|
1267 | 1267 | |
|
1268 | 1268 | def _workspace_id(self, pull_request): |
|
1269 | 1269 | workspace_id = 'pr-%s' % pull_request.pull_request_id |
|
1270 | 1270 | return workspace_id |
|
1271 | 1271 | |
|
1272 | 1272 | def merge_status_message(self, status_code): |
|
1273 | 1273 | """ |
|
1274 | 1274 | Return a human friendly error message for the given merge status code. |
|
1275 | 1275 | """ |
|
1276 | 1276 | return self.MERGE_STATUS_MESSAGES[status_code] |
|
1277 | 1277 | |
|
1278 | 1278 | def generate_repo_data(self, repo, commit_id=None, branch=None, |
|
1279 | 1279 | bookmark=None): |
|
1280 | 1280 | all_refs, selected_ref = \ |
|
1281 | 1281 | self._get_repo_pullrequest_sources( |
|
1282 | 1282 | repo.scm_instance(), commit_id=commit_id, |
|
1283 | 1283 | branch=branch, bookmark=bookmark) |
|
1284 | 1284 | |
|
1285 | 1285 | refs_select2 = [] |
|
1286 | 1286 | for element in all_refs: |
|
1287 | 1287 | children = [{'id': x[0], 'text': x[1]} for x in element[0]] |
|
1288 | 1288 | refs_select2.append({'text': element[1], 'children': children}) |
|
1289 | 1289 | |
|
1290 | 1290 | return { |
|
1291 | 1291 | 'user': { |
|
1292 | 1292 | 'user_id': repo.user.user_id, |
|
1293 | 1293 | 'username': repo.user.username, |
|
1294 | 'firstname': repo.user.firstname, | |
|
1295 | 'lastname': repo.user.lastname, | |
|
1294 | 'firstname': repo.user.first_name, | |
|
1295 | 'lastname': repo.user.last_name, | |
|
1296 | 1296 | 'gravatar_link': h.gravatar_url(repo.user.email, 14), |
|
1297 | 1297 | }, |
|
1298 | 1298 | 'description': h.chop_at_smart(repo.description, '\n'), |
|
1299 | 1299 | 'refs': { |
|
1300 | 1300 | 'all_refs': all_refs, |
|
1301 | 1301 | 'selected_ref': selected_ref, |
|
1302 | 1302 | 'select2_refs': refs_select2 |
|
1303 | 1303 | } |
|
1304 | 1304 | } |
|
1305 | 1305 | |
|
1306 | 1306 | def generate_pullrequest_title(self, source, source_ref, target): |
|
1307 | 1307 | return u'{source}#{at_ref} to {target}'.format( |
|
1308 | 1308 | source=source, |
|
1309 | 1309 | at_ref=source_ref, |
|
1310 | 1310 | target=target, |
|
1311 | 1311 | ) |
|
1312 | 1312 | |
|
1313 | 1313 | def _cleanup_merge_workspace(self, pull_request): |
|
1314 | 1314 | # Merging related cleanup |
|
1315 | 1315 | target_scm = pull_request.target_repo.scm_instance() |
|
1316 | 1316 | workspace_id = 'pr-%s' % pull_request.pull_request_id |
|
1317 | 1317 | |
|
1318 | 1318 | try: |
|
1319 | 1319 | target_scm.cleanup_merge_workspace(workspace_id) |
|
1320 | 1320 | except NotImplementedError: |
|
1321 | 1321 | pass |
|
1322 | 1322 | |
|
1323 | 1323 | def _get_repo_pullrequest_sources( |
|
1324 | 1324 | self, repo, commit_id=None, branch=None, bookmark=None): |
|
1325 | 1325 | """ |
|
1326 | 1326 | Return a structure with repo's interesting commits, suitable for |
|
1327 | 1327 | the selectors in pullrequest controller |
|
1328 | 1328 | |
|
1329 | 1329 | :param commit_id: a commit that must be in the list somehow |
|
1330 | 1330 | and selected by default |
|
1331 | 1331 | :param branch: a branch that must be in the list and selected |
|
1332 | 1332 | by default - even if closed |
|
1333 | 1333 | :param bookmark: a bookmark that must be in the list and selected |
|
1334 | 1334 | """ |
|
1335 | 1335 | |
|
1336 | 1336 | commit_id = safe_str(commit_id) if commit_id else None |
|
1337 | 1337 | branch = safe_str(branch) if branch else None |
|
1338 | 1338 | bookmark = safe_str(bookmark) if bookmark else None |
|
1339 | 1339 | |
|
1340 | 1340 | selected = None |
|
1341 | 1341 | |
|
1342 | 1342 | # order matters: first source that has commit_id in it will be selected |
|
1343 | 1343 | sources = [] |
|
1344 | 1344 | sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark)) |
|
1345 | 1345 | sources.append(('branch', repo.branches.items(), _('Branches'), branch)) |
|
1346 | 1346 | |
|
1347 | 1347 | if commit_id: |
|
1348 | 1348 | ref_commit = (h.short_id(commit_id), commit_id) |
|
1349 | 1349 | sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id)) |
|
1350 | 1350 | |
|
1351 | 1351 | sources.append( |
|
1352 | 1352 | ('branch', repo.branches_closed.items(), _('Closed Branches'), branch), |
|
1353 | 1353 | ) |
|
1354 | 1354 | |
|
1355 | 1355 | groups = [] |
|
1356 | 1356 | for group_key, ref_list, group_name, match in sources: |
|
1357 | 1357 | group_refs = [] |
|
1358 | 1358 | for ref_name, ref_id in ref_list: |
|
1359 | 1359 | ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id) |
|
1360 | 1360 | group_refs.append((ref_key, ref_name)) |
|
1361 | 1361 | |
|
1362 | 1362 | if not selected: |
|
1363 | 1363 | if set([commit_id, match]) & set([ref_id, ref_name]): |
|
1364 | 1364 | selected = ref_key |
|
1365 | 1365 | |
|
1366 | 1366 | if group_refs: |
|
1367 | 1367 | groups.append((group_refs, group_name)) |
|
1368 | 1368 | |
|
1369 | 1369 | if not selected: |
|
1370 | 1370 | ref = commit_id or branch or bookmark |
|
1371 | 1371 | if ref: |
|
1372 | 1372 | raise CommitDoesNotExistError( |
|
1373 | 1373 | 'No commit refs could be found matching: %s' % ref) |
|
1374 | 1374 | elif repo.DEFAULT_BRANCH_NAME in repo.branches: |
|
1375 | 1375 | selected = 'branch:%s:%s' % ( |
|
1376 | 1376 | repo.DEFAULT_BRANCH_NAME, |
|
1377 | 1377 | repo.branches[repo.DEFAULT_BRANCH_NAME] |
|
1378 | 1378 | ) |
|
1379 | 1379 | elif repo.commit_ids: |
|
1380 | 1380 | rev = repo.commit_ids[0] |
|
1381 | 1381 | selected = 'rev:%s:%s' % (rev, rev) |
|
1382 | 1382 | else: |
|
1383 | 1383 | raise EmptyRepositoryError() |
|
1384 | 1384 | return groups, selected |
|
1385 | 1385 | |
|
1386 | 1386 | def get_diff(self, source_repo, source_ref_id, target_ref_id, context=DIFF_CONTEXT): |
|
1387 | 1387 | return self._get_diff_from_pr_or_version( |
|
1388 | 1388 | source_repo, source_ref_id, target_ref_id, context=context) |
|
1389 | 1389 | |
|
1390 | 1390 | def _get_diff_from_pr_or_version( |
|
1391 | 1391 | self, source_repo, source_ref_id, target_ref_id, context): |
|
1392 | 1392 | target_commit = source_repo.get_commit( |
|
1393 | 1393 | commit_id=safe_str(target_ref_id)) |
|
1394 | 1394 | source_commit = source_repo.get_commit( |
|
1395 | 1395 | commit_id=safe_str(source_ref_id)) |
|
1396 | 1396 | if isinstance(source_repo, Repository): |
|
1397 | 1397 | vcs_repo = source_repo.scm_instance() |
|
1398 | 1398 | else: |
|
1399 | 1399 | vcs_repo = source_repo |
|
1400 | 1400 | |
|
1401 | 1401 | # TODO: johbo: In the context of an update, we cannot reach |
|
1402 | 1402 | # the old commit anymore with our normal mechanisms. It needs |
|
1403 | 1403 | # some sort of special support in the vcs layer to avoid this |
|
1404 | 1404 | # workaround. |
|
1405 | 1405 | if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and |
|
1406 | 1406 | vcs_repo.alias == 'git'): |
|
1407 | 1407 | source_commit.raw_id = safe_str(source_ref_id) |
|
1408 | 1408 | |
|
1409 | 1409 | log.debug('calculating diff between ' |
|
1410 | 1410 | 'source_ref:%s and target_ref:%s for repo `%s`', |
|
1411 | 1411 | target_ref_id, source_ref_id, |
|
1412 | 1412 | safe_unicode(vcs_repo.path)) |
|
1413 | 1413 | |
|
1414 | 1414 | vcs_diff = vcs_repo.get_diff( |
|
1415 | 1415 | commit1=target_commit, commit2=source_commit, context=context) |
|
1416 | 1416 | return vcs_diff |
|
1417 | 1417 | |
|
1418 | 1418 | def _is_merge_enabled(self, pull_request): |
|
1419 | 1419 | settings_model = VcsSettingsModel(repo=pull_request.target_repo) |
|
1420 | 1420 | settings = settings_model.get_general_settings() |
|
1421 | 1421 | return settings.get('rhodecode_pr_merge_enabled', False) |
|
1422 | 1422 | |
|
1423 | 1423 | def _use_rebase_for_merging(self, pull_request): |
|
1424 | 1424 | settings_model = VcsSettingsModel(repo=pull_request.target_repo) |
|
1425 | 1425 | settings = settings_model.get_general_settings() |
|
1426 | 1426 | return settings.get('rhodecode_hg_use_rebase_for_merging', False) |
|
1427 | 1427 | |
|
1428 | 1428 | def _log_audit_action(self, action, action_data, user, pull_request): |
|
1429 | 1429 | audit_logger.store( |
|
1430 | 1430 | action=action, |
|
1431 | 1431 | action_data=action_data, |
|
1432 | 1432 | user=user, |
|
1433 | 1433 | repo=pull_request.target_repo) |
|
1434 | 1434 | |
|
1435 | 1435 | def get_reviewer_functions(self): |
|
1436 | 1436 | """ |
|
1437 | 1437 | Fetches functions for validation and fetching default reviewers. |
|
1438 | 1438 | If available we use the EE package, else we fallback to CE |
|
1439 | 1439 | package functions |
|
1440 | 1440 | """ |
|
1441 | 1441 | try: |
|
1442 | 1442 | from rc_reviewers.utils import get_default_reviewers_data |
|
1443 | 1443 | from rc_reviewers.utils import validate_default_reviewers |
|
1444 | 1444 | except ImportError: |
|
1445 | 1445 | from rhodecode.apps.repository.utils import \ |
|
1446 | 1446 | get_default_reviewers_data |
|
1447 | 1447 | from rhodecode.apps.repository.utils import \ |
|
1448 | 1448 | validate_default_reviewers |
|
1449 | 1449 | |
|
1450 | 1450 | return get_default_reviewers_data, validate_default_reviewers |
|
1451 | 1451 | |
|
1452 | 1452 | |
|
1453 | 1453 | class MergeCheck(object): |
|
1454 | 1454 | """ |
|
1455 | 1455 | Perform Merge Checks and returns a check object which stores information |
|
1456 | 1456 | about merge errors, and merge conditions |
|
1457 | 1457 | """ |
|
1458 | 1458 | TODO_CHECK = 'todo' |
|
1459 | 1459 | PERM_CHECK = 'perm' |
|
1460 | 1460 | REVIEW_CHECK = 'review' |
|
1461 | 1461 | MERGE_CHECK = 'merge' |
|
1462 | 1462 | |
|
1463 | 1463 | def __init__(self): |
|
1464 | 1464 | self.review_status = None |
|
1465 | 1465 | self.merge_possible = None |
|
1466 | 1466 | self.merge_msg = '' |
|
1467 | 1467 | self.failed = None |
|
1468 | 1468 | self.errors = [] |
|
1469 | 1469 | self.error_details = OrderedDict() |
|
1470 | 1470 | |
|
1471 | 1471 | def push_error(self, error_type, message, error_key, details): |
|
1472 | 1472 | self.failed = True |
|
1473 | 1473 | self.errors.append([error_type, message]) |
|
1474 | 1474 | self.error_details[error_key] = dict( |
|
1475 | 1475 | details=details, |
|
1476 | 1476 | error_type=error_type, |
|
1477 | 1477 | message=message |
|
1478 | 1478 | ) |
|
1479 | 1479 | |
|
1480 | 1480 | @classmethod |
|
1481 | 1481 | def validate(cls, pull_request, user, fail_early=False, translator=None): |
|
1482 | 1482 | # if migrated to pyramid... |
|
1483 | 1483 | # _ = lambda: translator or _ # use passed in translator if any |
|
1484 | 1484 | |
|
1485 | 1485 | merge_check = cls() |
|
1486 | 1486 | |
|
1487 | 1487 | # permissions to merge |
|
1488 | 1488 | user_allowed_to_merge = PullRequestModel().check_user_merge( |
|
1489 | 1489 | pull_request, user) |
|
1490 | 1490 | if not user_allowed_to_merge: |
|
1491 | 1491 | log.debug("MergeCheck: cannot merge, approval is pending.") |
|
1492 | 1492 | |
|
1493 | 1493 | msg = _('User `{}` not allowed to perform merge.').format(user.username) |
|
1494 | 1494 | merge_check.push_error('error', msg, cls.PERM_CHECK, user.username) |
|
1495 | 1495 | if fail_early: |
|
1496 | 1496 | return merge_check |
|
1497 | 1497 | |
|
1498 | 1498 | # review status, must be always present |
|
1499 | 1499 | review_status = pull_request.calculated_review_status() |
|
1500 | 1500 | merge_check.review_status = review_status |
|
1501 | 1501 | |
|
1502 | 1502 | status_approved = review_status == ChangesetStatus.STATUS_APPROVED |
|
1503 | 1503 | if not status_approved: |
|
1504 | 1504 | log.debug("MergeCheck: cannot merge, approval is pending.") |
|
1505 | 1505 | |
|
1506 | 1506 | msg = _('Pull request reviewer approval is pending.') |
|
1507 | 1507 | |
|
1508 | 1508 | merge_check.push_error( |
|
1509 | 1509 | 'warning', msg, cls.REVIEW_CHECK, review_status) |
|
1510 | 1510 | |
|
1511 | 1511 | if fail_early: |
|
1512 | 1512 | return merge_check |
|
1513 | 1513 | |
|
1514 | 1514 | # left over TODOs |
|
1515 | 1515 | todos = CommentsModel().get_unresolved_todos(pull_request) |
|
1516 | 1516 | if todos: |
|
1517 | 1517 | log.debug("MergeCheck: cannot merge, {} " |
|
1518 | 1518 | "unresolved todos left.".format(len(todos))) |
|
1519 | 1519 | |
|
1520 | 1520 | if len(todos) == 1: |
|
1521 | 1521 | msg = _('Cannot merge, {} TODO still not resolved.').format( |
|
1522 | 1522 | len(todos)) |
|
1523 | 1523 | else: |
|
1524 | 1524 | msg = _('Cannot merge, {} TODOs still not resolved.').format( |
|
1525 | 1525 | len(todos)) |
|
1526 | 1526 | |
|
1527 | 1527 | merge_check.push_error('warning', msg, cls.TODO_CHECK, todos) |
|
1528 | 1528 | |
|
1529 | 1529 | if fail_early: |
|
1530 | 1530 | return merge_check |
|
1531 | 1531 | |
|
1532 | 1532 | # merge possible |
|
1533 | 1533 | merge_status, msg = PullRequestModel().merge_status(pull_request) |
|
1534 | 1534 | merge_check.merge_possible = merge_status |
|
1535 | 1535 | merge_check.merge_msg = msg |
|
1536 | 1536 | if not merge_status: |
|
1537 | 1537 | log.debug( |
|
1538 | 1538 | "MergeCheck: cannot merge, pull request merge not possible.") |
|
1539 | 1539 | merge_check.push_error('warning', msg, cls.MERGE_CHECK, None) |
|
1540 | 1540 | |
|
1541 | 1541 | if fail_early: |
|
1542 | 1542 | return merge_check |
|
1543 | 1543 | |
|
1544 | 1544 | return merge_check |
|
1545 | 1545 | |
|
1546 | 1546 | |
|
1547 | 1547 | ChangeTuple = namedtuple('ChangeTuple', |
|
1548 | 1548 | ['added', 'common', 'removed', 'total']) |
|
1549 | 1549 | |
|
1550 | 1550 | FileChangeTuple = namedtuple('FileChangeTuple', |
|
1551 | 1551 | ['added', 'modified', 'removed']) |
@@ -1,902 +1,907 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | users model for RhodeCode |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import logging |
|
26 | 26 | import traceback |
|
27 | 27 | |
|
28 | 28 | import datetime |
|
29 | 29 | from pylons.i18n.translation import _ |
|
30 | 30 | |
|
31 | 31 | import ipaddress |
|
32 | 32 | from sqlalchemy.exc import DatabaseError |
|
33 | 33 | |
|
34 | 34 | from rhodecode import events |
|
35 | 35 | from rhodecode.lib.user_log_filter import user_log_filter |
|
36 | 36 | from rhodecode.lib.utils2 import ( |
|
37 | 37 | safe_unicode, get_current_rhodecode_user, action_logger_generic, |
|
38 | 38 | AttributeDict, str2bool) |
|
39 | 39 | from rhodecode.lib.exceptions import ( |
|
40 | 40 | DefaultUserException, UserOwnsReposException, UserOwnsRepoGroupsException, |
|
41 | 41 | UserOwnsUserGroupsException, NotAllowedToCreateUserError) |
|
42 | 42 | from rhodecode.lib.caching_query import FromCache |
|
43 | 43 | from rhodecode.model import BaseModel |
|
44 | 44 | from rhodecode.model.auth_token import AuthTokenModel |
|
45 | 45 | from rhodecode.model.db import ( |
|
46 | 46 | _hash_key, true, false, or_, joinedload, User, UserToPerm, |
|
47 | 47 | UserEmailMap, UserIpMap, UserLog) |
|
48 | 48 | from rhodecode.model.meta import Session |
|
49 | 49 | from rhodecode.model.repo_group import RepoGroupModel |
|
50 | 50 | |
|
51 | 51 | |
|
52 | 52 | log = logging.getLogger(__name__) |
|
53 | 53 | |
|
54 | 54 | |
|
55 | 55 | class UserModel(BaseModel): |
|
56 | 56 | cls = User |
|
57 | 57 | |
|
58 | 58 | def get(self, user_id, cache=False): |
|
59 | 59 | user = self.sa.query(User) |
|
60 | 60 | if cache: |
|
61 | 61 | user = user.options( |
|
62 | 62 | FromCache("sql_cache_short", "get_user_%s" % user_id)) |
|
63 | 63 | return user.get(user_id) |
|
64 | 64 | |
|
65 | 65 | def get_user(self, user): |
|
66 | 66 | return self._get_user(user) |
|
67 | 67 | |
|
68 | 68 | def _serialize_user(self, user): |
|
69 | 69 | import rhodecode.lib.helpers as h |
|
70 | 70 | |
|
71 | 71 | return { |
|
72 | 72 | 'id': user.user_id, |
|
73 |
'first_name': |
|
|
74 |
'last_name': |
|
|
73 | 'first_name': user.first_name, | |
|
74 | 'last_name': user.last_name, | |
|
75 | 75 | 'username': user.username, |
|
76 | 76 | 'email': user.email, |
|
77 | 77 | 'icon_link': h.gravatar_url(user.email, 30), |
|
78 | 78 | 'value_display': h.escape(h.person(user)), |
|
79 | 79 | 'value': user.username, |
|
80 | 80 | 'value_type': 'user', |
|
81 | 81 | 'active': user.active, |
|
82 | 82 | } |
|
83 | 83 | |
|
84 | 84 | def get_users(self, name_contains=None, limit=20, only_active=True): |
|
85 | 85 | |
|
86 | 86 | query = self.sa.query(User) |
|
87 | 87 | if only_active: |
|
88 | 88 | query = query.filter(User.active == true()) |
|
89 | 89 | |
|
90 | 90 | if name_contains: |
|
91 | 91 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
92 | 92 | query = query.filter( |
|
93 | 93 | or_( |
|
94 | 94 | User.name.ilike(ilike_expression), |
|
95 | 95 | User.lastname.ilike(ilike_expression), |
|
96 | 96 | User.username.ilike(ilike_expression) |
|
97 | 97 | ) |
|
98 | 98 | ) |
|
99 | 99 | query = query.limit(limit) |
|
100 | 100 | users = query.all() |
|
101 | 101 | |
|
102 | 102 | _users = [ |
|
103 | 103 | self._serialize_user(user) for user in users |
|
104 | 104 | ] |
|
105 | 105 | return _users |
|
106 | 106 | |
|
107 | 107 | def get_by_username(self, username, cache=False, case_insensitive=False): |
|
108 | 108 | |
|
109 | 109 | if case_insensitive: |
|
110 | 110 | user = self.sa.query(User).filter(User.username.ilike(username)) |
|
111 | 111 | else: |
|
112 | 112 | user = self.sa.query(User)\ |
|
113 | 113 | .filter(User.username == username) |
|
114 | 114 | if cache: |
|
115 | 115 | name_key = _hash_key(username) |
|
116 | 116 | user = user.options( |
|
117 | 117 | FromCache("sql_cache_short", "get_user_%s" % name_key)) |
|
118 | 118 | return user.scalar() |
|
119 | 119 | |
|
120 | 120 | def get_by_email(self, email, cache=False, case_insensitive=False): |
|
121 | 121 | return User.get_by_email(email, case_insensitive, cache) |
|
122 | 122 | |
|
123 | 123 | def get_by_auth_token(self, auth_token, cache=False): |
|
124 | 124 | return User.get_by_auth_token(auth_token, cache) |
|
125 | 125 | |
|
126 | 126 | def get_active_user_count(self, cache=False): |
|
127 | 127 | return User.query().filter( |
|
128 | 128 | User.active == True).filter( |
|
129 | 129 | User.username != User.DEFAULT_USER).count() |
|
130 | 130 | |
|
131 | 131 | def create(self, form_data, cur_user=None): |
|
132 | 132 | if not cur_user: |
|
133 | 133 | cur_user = getattr(get_current_rhodecode_user(), 'username', None) |
|
134 | 134 | |
|
135 | 135 | user_data = { |
|
136 | 136 | 'username': form_data['username'], |
|
137 | 137 | 'password': form_data['password'], |
|
138 | 138 | 'email': form_data['email'], |
|
139 | 139 | 'firstname': form_data['firstname'], |
|
140 | 140 | 'lastname': form_data['lastname'], |
|
141 | 141 | 'active': form_data['active'], |
|
142 | 142 | 'extern_type': form_data['extern_type'], |
|
143 | 143 | 'extern_name': form_data['extern_name'], |
|
144 | 144 | 'admin': False, |
|
145 | 145 | 'cur_user': cur_user |
|
146 | 146 | } |
|
147 | 147 | |
|
148 | 148 | if 'create_repo_group' in form_data: |
|
149 | 149 | user_data['create_repo_group'] = str2bool( |
|
150 | 150 | form_data.get('create_repo_group')) |
|
151 | 151 | |
|
152 | 152 | try: |
|
153 | 153 | if form_data.get('password_change'): |
|
154 | 154 | user_data['force_password_change'] = True |
|
155 | 155 | return UserModel().create_or_update(**user_data) |
|
156 | 156 | except Exception: |
|
157 | 157 | log.error(traceback.format_exc()) |
|
158 | 158 | raise |
|
159 | 159 | |
|
160 | 160 | def update_user(self, user, skip_attrs=None, **kwargs): |
|
161 | 161 | from rhodecode.lib.auth import get_crypt_password |
|
162 | 162 | |
|
163 | 163 | user = self._get_user(user) |
|
164 | 164 | if user.username == User.DEFAULT_USER: |
|
165 | 165 | raise DefaultUserException( |
|
166 | 166 | _("You can't Edit this user since it's" |
|
167 | 167 | " crucial for entire application")) |
|
168 | 168 | |
|
169 | 169 | # first store only defaults |
|
170 | 170 | user_attrs = { |
|
171 | 171 | 'updating_user_id': user.user_id, |
|
172 | 172 | 'username': user.username, |
|
173 | 173 | 'password': user.password, |
|
174 | 174 | 'email': user.email, |
|
175 | 175 | 'firstname': user.name, |
|
176 | 176 | 'lastname': user.lastname, |
|
177 | 177 | 'active': user.active, |
|
178 | 178 | 'admin': user.admin, |
|
179 | 179 | 'extern_name': user.extern_name, |
|
180 | 180 | 'extern_type': user.extern_type, |
|
181 | 181 | 'language': user.user_data.get('language') |
|
182 | 182 | } |
|
183 | 183 | |
|
184 | 184 | # in case there's new_password, that comes from form, use it to |
|
185 | 185 | # store password |
|
186 | 186 | if kwargs.get('new_password'): |
|
187 | 187 | kwargs['password'] = kwargs['new_password'] |
|
188 | 188 | |
|
189 | 189 | # cleanups, my_account password change form |
|
190 | 190 | kwargs.pop('current_password', None) |
|
191 | 191 | kwargs.pop('new_password', None) |
|
192 | 192 | |
|
193 | 193 | # cleanups, user edit password change form |
|
194 | 194 | kwargs.pop('password_confirmation', None) |
|
195 | 195 | kwargs.pop('password_change', None) |
|
196 | 196 | |
|
197 | 197 | # create repo group on user creation |
|
198 | 198 | kwargs.pop('create_repo_group', None) |
|
199 | 199 | |
|
200 | 200 | # legacy forms send name, which is the firstname |
|
201 | 201 | firstname = kwargs.pop('name', None) |
|
202 | 202 | if firstname: |
|
203 | 203 | kwargs['firstname'] = firstname |
|
204 | 204 | |
|
205 | 205 | for k, v in kwargs.items(): |
|
206 | 206 | # skip if we don't want to update this |
|
207 | 207 | if skip_attrs and k in skip_attrs: |
|
208 | 208 | continue |
|
209 | 209 | |
|
210 | 210 | user_attrs[k] = v |
|
211 | 211 | |
|
212 | 212 | try: |
|
213 | 213 | return self.create_or_update(**user_attrs) |
|
214 | 214 | except Exception: |
|
215 | 215 | log.error(traceback.format_exc()) |
|
216 | 216 | raise |
|
217 | 217 | |
|
218 | 218 | def create_or_update( |
|
219 | 219 | self, username, password, email, firstname='', lastname='', |
|
220 | 220 | active=True, admin=False, extern_type=None, extern_name=None, |
|
221 | 221 | cur_user=None, plugin=None, force_password_change=False, |
|
222 | 222 | allow_to_create_user=True, create_repo_group=None, |
|
223 | 223 | updating_user_id=None, language=None, strict_creation_check=True): |
|
224 | 224 | """ |
|
225 | 225 | Creates a new instance if not found, or updates current one |
|
226 | 226 | |
|
227 | 227 | :param username: |
|
228 | 228 | :param password: |
|
229 | 229 | :param email: |
|
230 | 230 | :param firstname: |
|
231 | 231 | :param lastname: |
|
232 | 232 | :param active: |
|
233 | 233 | :param admin: |
|
234 | 234 | :param extern_type: |
|
235 | 235 | :param extern_name: |
|
236 | 236 | :param cur_user: |
|
237 | 237 | :param plugin: optional plugin this method was called from |
|
238 | 238 | :param force_password_change: toggles new or existing user flag |
|
239 | 239 | for password change |
|
240 | 240 | :param allow_to_create_user: Defines if the method can actually create |
|
241 | 241 | new users |
|
242 | 242 | :param create_repo_group: Defines if the method should also |
|
243 | 243 | create an repo group with user name, and owner |
|
244 | 244 | :param updating_user_id: if we set it up this is the user we want to |
|
245 | 245 | update this allows to editing username. |
|
246 | 246 | :param language: language of user from interface. |
|
247 | 247 | |
|
248 | 248 | :returns: new User object with injected `is_new_user` attribute. |
|
249 | 249 | """ |
|
250 | 250 | if not cur_user: |
|
251 | 251 | cur_user = getattr(get_current_rhodecode_user(), 'username', None) |
|
252 | 252 | |
|
253 | 253 | from rhodecode.lib.auth import ( |
|
254 | 254 | get_crypt_password, check_password, generate_auth_token) |
|
255 | 255 | from rhodecode.lib.hooks_base import ( |
|
256 | 256 | log_create_user, check_allowed_create_user) |
|
257 | 257 | |
|
258 | 258 | def _password_change(new_user, password): |
|
259 | 259 | # empty password |
|
260 | 260 | if not new_user.password: |
|
261 | 261 | return False |
|
262 | 262 | |
|
263 | 263 | # password check is only needed for RhodeCode internal auth calls |
|
264 | 264 | # in case it's a plugin we don't care |
|
265 | 265 | if not plugin: |
|
266 | 266 | |
|
267 | 267 | # first check if we gave crypted password back, and if it |
|
268 | 268 | # matches it's not password change |
|
269 | 269 | if new_user.password == password: |
|
270 | 270 | return False |
|
271 | 271 | |
|
272 | 272 | password_match = check_password(password, new_user.password) |
|
273 | 273 | if not password_match: |
|
274 | 274 | return True |
|
275 | 275 | |
|
276 | 276 | return False |
|
277 | 277 | |
|
278 | 278 | # read settings on default personal repo group creation |
|
279 | 279 | if create_repo_group is None: |
|
280 | 280 | default_create_repo_group = RepoGroupModel()\ |
|
281 | 281 | .get_default_create_personal_repo_group() |
|
282 | 282 | create_repo_group = default_create_repo_group |
|
283 | 283 | |
|
284 | 284 | user_data = { |
|
285 | 285 | 'username': username, |
|
286 | 286 | 'password': password, |
|
287 | 287 | 'email': email, |
|
288 | 288 | 'firstname': firstname, |
|
289 | 289 | 'lastname': lastname, |
|
290 | 290 | 'active': active, |
|
291 | 291 | 'admin': admin |
|
292 | 292 | } |
|
293 | 293 | |
|
294 | 294 | if updating_user_id: |
|
295 | 295 | log.debug('Checking for existing account in RhodeCode ' |
|
296 | 296 | 'database with user_id `%s` ' % (updating_user_id,)) |
|
297 | 297 | user = User.get(updating_user_id) |
|
298 | 298 | else: |
|
299 | 299 | log.debug('Checking for existing account in RhodeCode ' |
|
300 | 300 | 'database with username `%s` ' % (username,)) |
|
301 | 301 | user = User.get_by_username(username, case_insensitive=True) |
|
302 | 302 | |
|
303 | 303 | if user is None: |
|
304 | 304 | # we check internal flag if this method is actually allowed to |
|
305 | 305 | # create new user |
|
306 | 306 | if not allow_to_create_user: |
|
307 | 307 | msg = ('Method wants to create new user, but it is not ' |
|
308 | 308 | 'allowed to do so') |
|
309 | 309 | log.warning(msg) |
|
310 | 310 | raise NotAllowedToCreateUserError(msg) |
|
311 | 311 | |
|
312 | 312 | log.debug('Creating new user %s', username) |
|
313 | 313 | |
|
314 | 314 | # only if we create user that is active |
|
315 | 315 | new_active_user = active |
|
316 | 316 | if new_active_user and strict_creation_check: |
|
317 | 317 | # raises UserCreationError if it's not allowed for any reason to |
|
318 | 318 | # create new active user, this also executes pre-create hooks |
|
319 | 319 | check_allowed_create_user(user_data, cur_user, strict_check=True) |
|
320 | 320 | events.trigger(events.UserPreCreate(user_data)) |
|
321 | 321 | new_user = User() |
|
322 | 322 | edit = False |
|
323 | 323 | else: |
|
324 | 324 | log.debug('updating user %s', username) |
|
325 | 325 | events.trigger(events.UserPreUpdate(user, user_data)) |
|
326 | 326 | new_user = user |
|
327 | 327 | edit = True |
|
328 | 328 | |
|
329 | 329 | # we're not allowed to edit default user |
|
330 | 330 | if user.username == User.DEFAULT_USER: |
|
331 | 331 | raise DefaultUserException( |
|
332 | 332 | _("You can't edit this user (`%(username)s`) since it's " |
|
333 | 333 | "crucial for entire application") % {'username': user.username}) |
|
334 | 334 | |
|
335 | 335 | # inject special attribute that will tell us if User is new or old |
|
336 | 336 | new_user.is_new_user = not edit |
|
337 | 337 | # for users that didn's specify auth type, we use RhodeCode built in |
|
338 | 338 | from rhodecode.authentication.plugins import auth_rhodecode |
|
339 | 339 | extern_name = extern_name or auth_rhodecode.RhodeCodeAuthPlugin.name |
|
340 | 340 | extern_type = extern_type or auth_rhodecode.RhodeCodeAuthPlugin.name |
|
341 | 341 | |
|
342 | 342 | try: |
|
343 | 343 | new_user.username = username |
|
344 | 344 | new_user.admin = admin |
|
345 | 345 | new_user.email = email |
|
346 | 346 | new_user.active = active |
|
347 | 347 | new_user.extern_name = safe_unicode(extern_name) |
|
348 | 348 | new_user.extern_type = safe_unicode(extern_type) |
|
349 | 349 | new_user.name = firstname |
|
350 | 350 | new_user.lastname = lastname |
|
351 | 351 | |
|
352 | 352 | # set password only if creating an user or password is changed |
|
353 | 353 | if not edit or _password_change(new_user, password): |
|
354 | 354 | reason = 'new password' if edit else 'new user' |
|
355 | 355 | log.debug('Updating password reason=>%s', reason) |
|
356 | 356 | new_user.password = get_crypt_password(password) if password else None |
|
357 | 357 | |
|
358 | 358 | if force_password_change: |
|
359 | 359 | new_user.update_userdata(force_password_change=True) |
|
360 | 360 | if language: |
|
361 | 361 | new_user.update_userdata(language=language) |
|
362 | 362 | new_user.update_userdata(notification_status=True) |
|
363 | 363 | |
|
364 | 364 | self.sa.add(new_user) |
|
365 | 365 | |
|
366 | 366 | if not edit and create_repo_group: |
|
367 | 367 | RepoGroupModel().create_personal_repo_group( |
|
368 | 368 | new_user, commit_early=False) |
|
369 | 369 | |
|
370 | 370 | if not edit: |
|
371 | 371 | # add the RSS token |
|
372 | 372 | AuthTokenModel().create(username, |
|
373 | 373 | description='Generated feed token', |
|
374 | 374 | role=AuthTokenModel.cls.ROLE_FEED) |
|
375 | 375 | log_create_user(created_by=cur_user, **new_user.get_dict()) |
|
376 | 376 | events.trigger(events.UserPostCreate(user_data)) |
|
377 | 377 | return new_user |
|
378 | 378 | except (DatabaseError,): |
|
379 | 379 | log.error(traceback.format_exc()) |
|
380 | 380 | raise |
|
381 | 381 | |
|
382 | 382 | def create_registration(self, form_data): |
|
383 | 383 | from rhodecode.model.notification import NotificationModel |
|
384 | 384 | from rhodecode.model.notification import EmailNotificationModel |
|
385 | 385 | |
|
386 | 386 | try: |
|
387 | 387 | form_data['admin'] = False |
|
388 | 388 | form_data['extern_name'] = 'rhodecode' |
|
389 | 389 | form_data['extern_type'] = 'rhodecode' |
|
390 | 390 | new_user = self.create(form_data) |
|
391 | 391 | |
|
392 | 392 | self.sa.add(new_user) |
|
393 | 393 | self.sa.flush() |
|
394 | 394 | |
|
395 | 395 | user_data = new_user.get_dict() |
|
396 | 396 | kwargs = { |
|
397 | 397 | # use SQLALCHEMY safe dump of user data |
|
398 | 398 | 'user': AttributeDict(user_data), |
|
399 | 399 | 'date': datetime.datetime.now() |
|
400 | 400 | } |
|
401 | 401 | notification_type = EmailNotificationModel.TYPE_REGISTRATION |
|
402 | 402 | # pre-generate the subject for notification itself |
|
403 | 403 | (subject, |
|
404 | 404 | _h, _e, # we don't care about those |
|
405 | 405 | body_plaintext) = EmailNotificationModel().render_email( |
|
406 | 406 | notification_type, **kwargs) |
|
407 | 407 | |
|
408 | 408 | # create notification objects, and emails |
|
409 | 409 | NotificationModel().create( |
|
410 | 410 | created_by=new_user, |
|
411 | 411 | notification_subject=subject, |
|
412 | 412 | notification_body=body_plaintext, |
|
413 | 413 | notification_type=notification_type, |
|
414 | 414 | recipients=None, # all admins |
|
415 | 415 | email_kwargs=kwargs, |
|
416 | 416 | ) |
|
417 | 417 | |
|
418 | 418 | return new_user |
|
419 | 419 | except Exception: |
|
420 | 420 | log.error(traceback.format_exc()) |
|
421 | 421 | raise |
|
422 | 422 | |
|
423 | 423 | def _handle_user_repos(self, username, repositories, handle_mode=None): |
|
424 | 424 | _superadmin = self.cls.get_first_super_admin() |
|
425 | 425 | left_overs = True |
|
426 | 426 | |
|
427 | 427 | from rhodecode.model.repo import RepoModel |
|
428 | 428 | |
|
429 | 429 | if handle_mode == 'detach': |
|
430 | 430 | for obj in repositories: |
|
431 | 431 | obj.user = _superadmin |
|
432 | 432 | # set description we know why we super admin now owns |
|
433 | 433 | # additional repositories that were orphaned ! |
|
434 | 434 | obj.description += ' \n::detached repository from deleted user: %s' % (username,) |
|
435 | 435 | self.sa.add(obj) |
|
436 | 436 | left_overs = False |
|
437 | 437 | elif handle_mode == 'delete': |
|
438 | 438 | for obj in repositories: |
|
439 | 439 | RepoModel().delete(obj, forks='detach') |
|
440 | 440 | left_overs = False |
|
441 | 441 | |
|
442 | 442 | # if nothing is done we have left overs left |
|
443 | 443 | return left_overs |
|
444 | 444 | |
|
445 | 445 | def _handle_user_repo_groups(self, username, repository_groups, |
|
446 | 446 | handle_mode=None): |
|
447 | 447 | _superadmin = self.cls.get_first_super_admin() |
|
448 | 448 | left_overs = True |
|
449 | 449 | |
|
450 | 450 | from rhodecode.model.repo_group import RepoGroupModel |
|
451 | 451 | |
|
452 | 452 | if handle_mode == 'detach': |
|
453 | 453 | for r in repository_groups: |
|
454 | 454 | r.user = _superadmin |
|
455 | 455 | # set description we know why we super admin now owns |
|
456 | 456 | # additional repositories that were orphaned ! |
|
457 | 457 | r.group_description += ' \n::detached repository group from deleted user: %s' % (username,) |
|
458 | 458 | self.sa.add(r) |
|
459 | 459 | left_overs = False |
|
460 | 460 | elif handle_mode == 'delete': |
|
461 | 461 | for r in repository_groups: |
|
462 | 462 | RepoGroupModel().delete(r) |
|
463 | 463 | left_overs = False |
|
464 | 464 | |
|
465 | 465 | # if nothing is done we have left overs left |
|
466 | 466 | return left_overs |
|
467 | 467 | |
|
468 | 468 | def _handle_user_user_groups(self, username, user_groups, handle_mode=None): |
|
469 | 469 | _superadmin = self.cls.get_first_super_admin() |
|
470 | 470 | left_overs = True |
|
471 | 471 | |
|
472 | 472 | from rhodecode.model.user_group import UserGroupModel |
|
473 | 473 | |
|
474 | 474 | if handle_mode == 'detach': |
|
475 | 475 | for r in user_groups: |
|
476 | 476 | for user_user_group_to_perm in r.user_user_group_to_perm: |
|
477 | 477 | if user_user_group_to_perm.user.username == username: |
|
478 | 478 | user_user_group_to_perm.user = _superadmin |
|
479 | 479 | r.user = _superadmin |
|
480 | 480 | # set description we know why we super admin now owns |
|
481 | 481 | # additional repositories that were orphaned ! |
|
482 | 482 | r.user_group_description += ' \n::detached user group from deleted user: %s' % (username,) |
|
483 | 483 | self.sa.add(r) |
|
484 | 484 | left_overs = False |
|
485 | 485 | elif handle_mode == 'delete': |
|
486 | 486 | for r in user_groups: |
|
487 | 487 | UserGroupModel().delete(r) |
|
488 | 488 | left_overs = False |
|
489 | 489 | |
|
490 | 490 | # if nothing is done we have left overs left |
|
491 | 491 | return left_overs |
|
492 | 492 | |
|
493 | 493 | def delete(self, user, cur_user=None, handle_repos=None, |
|
494 | 494 | handle_repo_groups=None, handle_user_groups=None): |
|
495 | 495 | if not cur_user: |
|
496 | 496 | cur_user = getattr(get_current_rhodecode_user(), 'username', None) |
|
497 | 497 | user = self._get_user(user) |
|
498 | 498 | |
|
499 | 499 | try: |
|
500 | 500 | if user.username == User.DEFAULT_USER: |
|
501 | 501 | raise DefaultUserException( |
|
502 | 502 | _(u"You can't remove this user since it's" |
|
503 | 503 | u" crucial for entire application")) |
|
504 | 504 | |
|
505 | 505 | left_overs = self._handle_user_repos( |
|
506 | 506 | user.username, user.repositories, handle_repos) |
|
507 | 507 | if left_overs and user.repositories: |
|
508 | 508 | repos = [x.repo_name for x in user.repositories] |
|
509 | 509 | raise UserOwnsReposException( |
|
510 | 510 | _(u'user "%s" still owns %s repositories and cannot be ' |
|
511 | 511 | u'removed. Switch owners or remove those repositories:%s') |
|
512 | 512 | % (user.username, len(repos), ', '.join(repos))) |
|
513 | 513 | |
|
514 | 514 | left_overs = self._handle_user_repo_groups( |
|
515 | 515 | user.username, user.repository_groups, handle_repo_groups) |
|
516 | 516 | if left_overs and user.repository_groups: |
|
517 | 517 | repo_groups = [x.group_name for x in user.repository_groups] |
|
518 | 518 | raise UserOwnsRepoGroupsException( |
|
519 | 519 | _(u'user "%s" still owns %s repository groups and cannot be ' |
|
520 | 520 | u'removed. Switch owners or remove those repository groups:%s') |
|
521 | 521 | % (user.username, len(repo_groups), ', '.join(repo_groups))) |
|
522 | 522 | |
|
523 | 523 | left_overs = self._handle_user_user_groups( |
|
524 | 524 | user.username, user.user_groups, handle_user_groups) |
|
525 | 525 | if left_overs and user.user_groups: |
|
526 | 526 | user_groups = [x.users_group_name for x in user.user_groups] |
|
527 | 527 | raise UserOwnsUserGroupsException( |
|
528 | 528 | _(u'user "%s" still owns %s user groups and cannot be ' |
|
529 | 529 | u'removed. Switch owners or remove those user groups:%s') |
|
530 | 530 | % (user.username, len(user_groups), ', '.join(user_groups))) |
|
531 | 531 | |
|
532 | 532 | # we might change the user data with detach/delete, make sure |
|
533 | 533 | # the object is marked as expired before actually deleting ! |
|
534 | 534 | self.sa.expire(user) |
|
535 | 535 | self.sa.delete(user) |
|
536 | 536 | from rhodecode.lib.hooks_base import log_delete_user |
|
537 | 537 | log_delete_user(deleted_by=cur_user, **user.get_dict()) |
|
538 | 538 | except Exception: |
|
539 | 539 | log.error(traceback.format_exc()) |
|
540 | 540 | raise |
|
541 | 541 | |
|
542 | 542 | def reset_password_link(self, data, pwd_reset_url): |
|
543 | 543 | from rhodecode.lib.celerylib import tasks, run_task |
|
544 | 544 | from rhodecode.model.notification import EmailNotificationModel |
|
545 | 545 | user_email = data['email'] |
|
546 | 546 | try: |
|
547 | 547 | user = User.get_by_email(user_email) |
|
548 | 548 | if user: |
|
549 | 549 | log.debug('password reset user found %s', user) |
|
550 | 550 | |
|
551 | 551 | email_kwargs = { |
|
552 | 552 | 'password_reset_url': pwd_reset_url, |
|
553 | 553 | 'user': user, |
|
554 | 554 | 'email': user_email, |
|
555 | 555 | 'date': datetime.datetime.now() |
|
556 | 556 | } |
|
557 | 557 | |
|
558 | 558 | (subject, headers, email_body, |
|
559 | 559 | email_body_plaintext) = EmailNotificationModel().render_email( |
|
560 | 560 | EmailNotificationModel.TYPE_PASSWORD_RESET, **email_kwargs) |
|
561 | 561 | |
|
562 | 562 | recipients = [user_email] |
|
563 | 563 | |
|
564 | 564 | action_logger_generic( |
|
565 | 565 | 'sending password reset email to user: {}'.format( |
|
566 | 566 | user), namespace='security.password_reset') |
|
567 | 567 | |
|
568 | 568 | run_task(tasks.send_email, recipients, subject, |
|
569 | 569 | email_body_plaintext, email_body) |
|
570 | 570 | |
|
571 | 571 | else: |
|
572 | 572 | log.debug("password reset email %s not found", user_email) |
|
573 | 573 | except Exception: |
|
574 | 574 | log.error(traceback.format_exc()) |
|
575 | 575 | return False |
|
576 | 576 | |
|
577 | 577 | return True |
|
578 | 578 | |
|
579 | 579 | def reset_password(self, data): |
|
580 | 580 | from rhodecode.lib.celerylib import tasks, run_task |
|
581 | 581 | from rhodecode.model.notification import EmailNotificationModel |
|
582 | 582 | from rhodecode.lib import auth |
|
583 | 583 | user_email = data['email'] |
|
584 | 584 | pre_db = True |
|
585 | 585 | try: |
|
586 | 586 | user = User.get_by_email(user_email) |
|
587 | 587 | new_passwd = auth.PasswordGenerator().gen_password( |
|
588 | 588 | 12, auth.PasswordGenerator.ALPHABETS_BIG_SMALL) |
|
589 | 589 | if user: |
|
590 | 590 | user.password = auth.get_crypt_password(new_passwd) |
|
591 | 591 | # also force this user to reset his password ! |
|
592 | 592 | user.update_userdata(force_password_change=True) |
|
593 | 593 | |
|
594 | 594 | Session().add(user) |
|
595 | 595 | |
|
596 | 596 | # now delete the token in question |
|
597 | 597 | UserApiKeys = AuthTokenModel.cls |
|
598 | 598 | UserApiKeys().query().filter( |
|
599 | 599 | UserApiKeys.api_key == data['token']).delete() |
|
600 | 600 | |
|
601 | 601 | Session().commit() |
|
602 | 602 | log.info('successfully reset password for `%s`', user_email) |
|
603 | 603 | |
|
604 | 604 | if new_passwd is None: |
|
605 | 605 | raise Exception('unable to generate new password') |
|
606 | 606 | |
|
607 | 607 | pre_db = False |
|
608 | 608 | |
|
609 | 609 | email_kwargs = { |
|
610 | 610 | 'new_password': new_passwd, |
|
611 | 611 | 'user': user, |
|
612 | 612 | 'email': user_email, |
|
613 | 613 | 'date': datetime.datetime.now() |
|
614 | 614 | } |
|
615 | 615 | |
|
616 | 616 | (subject, headers, email_body, |
|
617 | 617 | email_body_plaintext) = EmailNotificationModel().render_email( |
|
618 | 618 | EmailNotificationModel.TYPE_PASSWORD_RESET_CONFIRMATION, |
|
619 | 619 | **email_kwargs) |
|
620 | 620 | |
|
621 | 621 | recipients = [user_email] |
|
622 | 622 | |
|
623 | 623 | action_logger_generic( |
|
624 | 624 | 'sent new password to user: {} with email: {}'.format( |
|
625 | 625 | user, user_email), namespace='security.password_reset') |
|
626 | 626 | |
|
627 | 627 | run_task(tasks.send_email, recipients, subject, |
|
628 | 628 | email_body_plaintext, email_body) |
|
629 | 629 | |
|
630 | 630 | except Exception: |
|
631 | 631 | log.error('Failed to update user password') |
|
632 | 632 | log.error(traceback.format_exc()) |
|
633 | 633 | if pre_db: |
|
634 | 634 | # we rollback only if local db stuff fails. If it goes into |
|
635 | 635 | # run_task, we're pass rollback state this wouldn't work then |
|
636 | 636 | Session().rollback() |
|
637 | 637 | |
|
638 | 638 | return True |
|
639 | 639 | |
|
640 | 640 | def fill_data(self, auth_user, user_id=None, api_key=None, username=None): |
|
641 | 641 | """ |
|
642 | 642 | Fetches auth_user by user_id,or api_key if present. |
|
643 | 643 | Fills auth_user attributes with those taken from database. |
|
644 | 644 | Additionally set's is_authenitated if lookup fails |
|
645 | 645 | present in database |
|
646 | 646 | |
|
647 | 647 | :param auth_user: instance of user to set attributes |
|
648 | 648 | :param user_id: user id to fetch by |
|
649 | 649 | :param api_key: api key to fetch by |
|
650 | 650 | :param username: username to fetch by |
|
651 | 651 | """ |
|
652 | 652 | if user_id is None and api_key is None and username is None: |
|
653 | 653 | raise Exception('You need to pass user_id, api_key or username') |
|
654 | 654 | |
|
655 | 655 | log.debug( |
|
656 | 656 | 'doing fill data based on: user_id:%s api_key:%s username:%s', |
|
657 | 657 | user_id, api_key, username) |
|
658 | 658 | try: |
|
659 | 659 | dbuser = None |
|
660 | 660 | if user_id: |
|
661 | 661 | dbuser = self.get(user_id) |
|
662 | 662 | elif api_key: |
|
663 | 663 | dbuser = self.get_by_auth_token(api_key) |
|
664 | 664 | elif username: |
|
665 | 665 | dbuser = self.get_by_username(username) |
|
666 | 666 | |
|
667 | 667 | if not dbuser: |
|
668 | 668 | log.warning( |
|
669 | 669 | 'Unable to lookup user by id:%s api_key:%s username:%s', |
|
670 | 670 | user_id, api_key, username) |
|
671 | 671 | return False |
|
672 | 672 | if not dbuser.active: |
|
673 | 673 | log.debug('User `%s:%s` is inactive, skipping fill data', |
|
674 | 674 | username, user_id) |
|
675 | 675 | return False |
|
676 | 676 | |
|
677 | 677 | log.debug('filling user:%s data', dbuser) |
|
678 | 678 | |
|
679 | 679 | # TODO: johbo: Think about this and find a clean solution |
|
680 | 680 | user_data = dbuser.get_dict() |
|
681 | 681 | user_data.update(dbuser.get_api_data(include_secrets=True)) |
|
682 | user_data.update({ | |
|
683 | # set explicit the safe escaped values | |
|
684 | 'first_name': dbuser.first_name, | |
|
685 | 'last_name': dbuser.last_name, | |
|
686 | }) | |
|
682 | 687 | |
|
683 | 688 | for k, v in user_data.iteritems(): |
|
684 | 689 | # properties of auth user we dont update |
|
685 | 690 | if k not in ['auth_tokens', 'permissions']: |
|
686 | 691 | setattr(auth_user, k, v) |
|
687 | 692 | |
|
688 | 693 | # few extras |
|
689 | 694 | setattr(auth_user, 'feed_token', dbuser.feed_token) |
|
690 | 695 | except Exception: |
|
691 | 696 | log.error(traceback.format_exc()) |
|
692 | 697 | auth_user.is_authenticated = False |
|
693 | 698 | return False |
|
694 | 699 | |
|
695 | 700 | return True |
|
696 | 701 | |
|
697 | 702 | def has_perm(self, user, perm): |
|
698 | 703 | perm = self._get_perm(perm) |
|
699 | 704 | user = self._get_user(user) |
|
700 | 705 | |
|
701 | 706 | return UserToPerm.query().filter(UserToPerm.user == user)\ |
|
702 | 707 | .filter(UserToPerm.permission == perm).scalar() is not None |
|
703 | 708 | |
|
704 | 709 | def grant_perm(self, user, perm): |
|
705 | 710 | """ |
|
706 | 711 | Grant user global permissions |
|
707 | 712 | |
|
708 | 713 | :param user: |
|
709 | 714 | :param perm: |
|
710 | 715 | """ |
|
711 | 716 | user = self._get_user(user) |
|
712 | 717 | perm = self._get_perm(perm) |
|
713 | 718 | # if this permission is already granted skip it |
|
714 | 719 | _perm = UserToPerm.query()\ |
|
715 | 720 | .filter(UserToPerm.user == user)\ |
|
716 | 721 | .filter(UserToPerm.permission == perm)\ |
|
717 | 722 | .scalar() |
|
718 | 723 | if _perm: |
|
719 | 724 | return |
|
720 | 725 | new = UserToPerm() |
|
721 | 726 | new.user = user |
|
722 | 727 | new.permission = perm |
|
723 | 728 | self.sa.add(new) |
|
724 | 729 | return new |
|
725 | 730 | |
|
726 | 731 | def revoke_perm(self, user, perm): |
|
727 | 732 | """ |
|
728 | 733 | Revoke users global permissions |
|
729 | 734 | |
|
730 | 735 | :param user: |
|
731 | 736 | :param perm: |
|
732 | 737 | """ |
|
733 | 738 | user = self._get_user(user) |
|
734 | 739 | perm = self._get_perm(perm) |
|
735 | 740 | |
|
736 | 741 | obj = UserToPerm.query()\ |
|
737 | 742 | .filter(UserToPerm.user == user)\ |
|
738 | 743 | .filter(UserToPerm.permission == perm)\ |
|
739 | 744 | .scalar() |
|
740 | 745 | if obj: |
|
741 | 746 | self.sa.delete(obj) |
|
742 | 747 | |
|
743 | 748 | def add_extra_email(self, user, email): |
|
744 | 749 | """ |
|
745 | 750 | Adds email address to UserEmailMap |
|
746 | 751 | |
|
747 | 752 | :param user: |
|
748 | 753 | :param email: |
|
749 | 754 | """ |
|
750 | 755 | from rhodecode.model import forms |
|
751 | 756 | form = forms.UserExtraEmailForm()() |
|
752 | 757 | data = form.to_python({'email': email}) |
|
753 | 758 | user = self._get_user(user) |
|
754 | 759 | |
|
755 | 760 | obj = UserEmailMap() |
|
756 | 761 | obj.user = user |
|
757 | 762 | obj.email = data['email'] |
|
758 | 763 | self.sa.add(obj) |
|
759 | 764 | return obj |
|
760 | 765 | |
|
761 | 766 | def delete_extra_email(self, user, email_id): |
|
762 | 767 | """ |
|
763 | 768 | Removes email address from UserEmailMap |
|
764 | 769 | |
|
765 | 770 | :param user: |
|
766 | 771 | :param email_id: |
|
767 | 772 | """ |
|
768 | 773 | user = self._get_user(user) |
|
769 | 774 | obj = UserEmailMap.query().get(email_id) |
|
770 | 775 | if obj and obj.user_id == user.user_id: |
|
771 | 776 | self.sa.delete(obj) |
|
772 | 777 | |
|
773 | 778 | def parse_ip_range(self, ip_range): |
|
774 | 779 | ip_list = [] |
|
775 | 780 | def make_unique(value): |
|
776 | 781 | seen = [] |
|
777 | 782 | return [c for c in value if not (c in seen or seen.append(c))] |
|
778 | 783 | |
|
779 | 784 | # firsts split by commas |
|
780 | 785 | for ip_range in ip_range.split(','): |
|
781 | 786 | if not ip_range: |
|
782 | 787 | continue |
|
783 | 788 | ip_range = ip_range.strip() |
|
784 | 789 | if '-' in ip_range: |
|
785 | 790 | start_ip, end_ip = ip_range.split('-', 1) |
|
786 | 791 | start_ip = ipaddress.ip_address(start_ip.strip()) |
|
787 | 792 | end_ip = ipaddress.ip_address(end_ip.strip()) |
|
788 | 793 | parsed_ip_range = [] |
|
789 | 794 | |
|
790 | 795 | for index in xrange(int(start_ip), int(end_ip) + 1): |
|
791 | 796 | new_ip = ipaddress.ip_address(index) |
|
792 | 797 | parsed_ip_range.append(str(new_ip)) |
|
793 | 798 | ip_list.extend(parsed_ip_range) |
|
794 | 799 | else: |
|
795 | 800 | ip_list.append(ip_range) |
|
796 | 801 | |
|
797 | 802 | return make_unique(ip_list) |
|
798 | 803 | |
|
799 | 804 | def add_extra_ip(self, user, ip, description=None): |
|
800 | 805 | """ |
|
801 | 806 | Adds ip address to UserIpMap |
|
802 | 807 | |
|
803 | 808 | :param user: |
|
804 | 809 | :param ip: |
|
805 | 810 | """ |
|
806 | 811 | from rhodecode.model import forms |
|
807 | 812 | form = forms.UserExtraIpForm()() |
|
808 | 813 | data = form.to_python({'ip': ip}) |
|
809 | 814 | user = self._get_user(user) |
|
810 | 815 | |
|
811 | 816 | obj = UserIpMap() |
|
812 | 817 | obj.user = user |
|
813 | 818 | obj.ip_addr = data['ip'] |
|
814 | 819 | obj.description = description |
|
815 | 820 | self.sa.add(obj) |
|
816 | 821 | return obj |
|
817 | 822 | |
|
818 | 823 | def delete_extra_ip(self, user, ip_id): |
|
819 | 824 | """ |
|
820 | 825 | Removes ip address from UserIpMap |
|
821 | 826 | |
|
822 | 827 | :param user: |
|
823 | 828 | :param ip_id: |
|
824 | 829 | """ |
|
825 | 830 | user = self._get_user(user) |
|
826 | 831 | obj = UserIpMap.query().get(ip_id) |
|
827 | 832 | if obj and obj.user_id == user.user_id: |
|
828 | 833 | self.sa.delete(obj) |
|
829 | 834 | |
|
830 | 835 | def get_accounts_in_creation_order(self, current_user=None): |
|
831 | 836 | """ |
|
832 | 837 | Get accounts in order of creation for deactivation for license limits |
|
833 | 838 | |
|
834 | 839 | pick currently logged in user, and append to the list in position 0 |
|
835 | 840 | pick all super-admins in order of creation date and add it to the list |
|
836 | 841 | pick all other accounts in order of creation and add it to the list. |
|
837 | 842 | |
|
838 | 843 | Based on that list, the last accounts can be disabled as they are |
|
839 | 844 | created at the end and don't include any of the super admins as well |
|
840 | 845 | as the current user. |
|
841 | 846 | |
|
842 | 847 | :param current_user: optionally current user running this operation |
|
843 | 848 | """ |
|
844 | 849 | |
|
845 | 850 | if not current_user: |
|
846 | 851 | current_user = get_current_rhodecode_user() |
|
847 | 852 | active_super_admins = [ |
|
848 | 853 | x.user_id for x in User.query() |
|
849 | 854 | .filter(User.user_id != current_user.user_id) |
|
850 | 855 | .filter(User.active == true()) |
|
851 | 856 | .filter(User.admin == true()) |
|
852 | 857 | .order_by(User.created_on.asc())] |
|
853 | 858 | |
|
854 | 859 | active_regular_users = [ |
|
855 | 860 | x.user_id for x in User.query() |
|
856 | 861 | .filter(User.user_id != current_user.user_id) |
|
857 | 862 | .filter(User.active == true()) |
|
858 | 863 | .filter(User.admin == false()) |
|
859 | 864 | .order_by(User.created_on.asc())] |
|
860 | 865 | |
|
861 | 866 | list_of_accounts = [current_user.user_id] |
|
862 | 867 | list_of_accounts += active_super_admins |
|
863 | 868 | list_of_accounts += active_regular_users |
|
864 | 869 | |
|
865 | 870 | return list_of_accounts |
|
866 | 871 | |
|
867 | 872 | def deactivate_last_users(self, expected_users): |
|
868 | 873 | """ |
|
869 | 874 | Deactivate accounts that are over the license limits. |
|
870 | 875 | Algorithm of which accounts to disabled is based on the formula: |
|
871 | 876 | |
|
872 | 877 | Get current user, then super admins in creation order, then regular |
|
873 | 878 | active users in creation order. |
|
874 | 879 | |
|
875 | 880 | Using that list we mark all accounts from the end of it as inactive. |
|
876 | 881 | This way we block only latest created accounts. |
|
877 | 882 | |
|
878 | 883 | :param expected_users: list of users in special order, we deactivate |
|
879 | 884 | the end N ammoun of users from that list |
|
880 | 885 | """ |
|
881 | 886 | |
|
882 | 887 | list_of_accounts = self.get_accounts_in_creation_order() |
|
883 | 888 | |
|
884 | 889 | for acc_id in list_of_accounts[expected_users + 1:]: |
|
885 | 890 | user = User.get(acc_id) |
|
886 | 891 | log.info('Deactivating account %s for license unlock', user) |
|
887 | 892 | user.active = False |
|
888 | 893 | Session().add(user) |
|
889 | 894 | Session().commit() |
|
890 | 895 | |
|
891 | 896 | return |
|
892 | 897 | |
|
893 | 898 | def get_user_log(self, user, filter_term): |
|
894 | 899 | user_log = UserLog.query()\ |
|
895 | 900 | .filter(or_(UserLog.user_id == user.user_id, |
|
896 | 901 | UserLog.username == user.username))\ |
|
897 | 902 | .options(joinedload(UserLog.user))\ |
|
898 | 903 | .options(joinedload(UserLog.repository))\ |
|
899 | 904 | .order_by(UserLog.action_date.desc()) |
|
900 | 905 | |
|
901 | 906 | user_log = user_log_filter(user_log, filter_term) |
|
902 | 907 | return user_log |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
General Comments 0
You need to be logged in to leave comments.
Login now