##// END OF EJS Templates
security: use new safe escaped user attributes across the application....
ergo -
r1815:7cb6e1ce default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,308 +1,308 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import datetime
23 23
24 24 from pyramid.httpexceptions import HTTPFound
25 25 from pyramid.view import view_config
26 26 from sqlalchemy.sql.functions import coalesce
27 27
28 28 from rhodecode.lib.helpers import Page
29 29 from rhodecode.lib.ext_json import json
30 30
31 31 from rhodecode.apps._base import BaseAppView, DataGridAppView
32 32 from rhodecode.lib.auth import (
33 33 LoginRequired, HasPermissionAllDecorator, CSRFRequired)
34 34 from rhodecode.lib import helpers as h
35 35 from rhodecode.lib.utils import PartialRenderer
36 36 from rhodecode.lib.utils2 import safe_int, safe_unicode
37 37 from rhodecode.model.auth_token import AuthTokenModel
38 38 from rhodecode.model.user import UserModel
39 39 from rhodecode.model.user_group import UserGroupModel
40 40 from rhodecode.model.db import User, or_
41 41 from rhodecode.model.meta import Session
42 42
43 43 log = logging.getLogger(__name__)
44 44
45 45
46 46 class AdminUsersView(BaseAppView, DataGridAppView):
47 47 ALLOW_SCOPED_TOKENS = False
48 48 """
49 49 This view has alternative version inside EE, if modified please take a look
50 50 in there as well.
51 51 """
52 52
53 53 def load_default_context(self):
54 54 c = self._get_local_tmpl_context()
55 55 c.allow_scoped_tokens = self.ALLOW_SCOPED_TOKENS
56 56 self._register_global_c(c)
57 57 return c
58 58
59 59 def _redirect_for_default_user(self, username):
60 60 _ = self.request.translate
61 61 if username == User.DEFAULT_USER:
62 62 h.flash(_("You can't edit this user"), category='warning')
63 63 # TODO(marcink): redirect to 'users' admin panel once this
64 64 # is a pyramid view
65 65 raise HTTPFound('/')
66 66
67 67 @HasPermissionAllDecorator('hg.admin')
68 68 @view_config(
69 69 route_name='users', request_method='GET',
70 70 renderer='rhodecode:templates/admin/users/users.mako')
71 71 def users_list(self):
72 72 c = self.load_default_context()
73 73 return self._get_template_context(c)
74 74
75 75 @HasPermissionAllDecorator('hg.admin')
76 76 @view_config(
77 77 # renderer defined below
78 78 route_name='users_data', request_method='GET',
79 79 renderer='json_ext', xhr=True)
80 80 def users_list_data(self):
81 81 draw, start, limit = self._extract_chunk(self.request)
82 82 search_q, order_by, order_dir = self._extract_ordering(self.request)
83 83
84 84 _render = PartialRenderer('data_table/_dt_elements.mako')
85 85
86 86 def user_actions(user_id, username):
87 87 return _render("user_actions", user_id, username)
88 88
89 89 users_data_total_count = User.query()\
90 90 .filter(User.username != User.DEFAULT_USER) \
91 91 .count()
92 92
93 93 # json generate
94 94 base_q = User.query().filter(User.username != User.DEFAULT_USER)
95 95
96 96 if search_q:
97 97 like_expression = u'%{}%'.format(safe_unicode(search_q))
98 98 base_q = base_q.filter(or_(
99 99 User.username.ilike(like_expression),
100 100 User._email.ilike(like_expression),
101 101 User.name.ilike(like_expression),
102 102 User.lastname.ilike(like_expression),
103 103 ))
104 104
105 105 users_data_total_filtered_count = base_q.count()
106 106
107 107 sort_col = getattr(User, order_by, None)
108 108 if sort_col:
109 109 if order_dir == 'asc':
110 110 # handle null values properly to order by NULL last
111 111 if order_by in ['last_activity']:
112 112 sort_col = coalesce(sort_col, datetime.date.max)
113 113 sort_col = sort_col.asc()
114 114 else:
115 115 # handle null values properly to order by NULL last
116 116 if order_by in ['last_activity']:
117 117 sort_col = coalesce(sort_col, datetime.date.min)
118 118 sort_col = sort_col.desc()
119 119
120 120 base_q = base_q.order_by(sort_col)
121 121 base_q = base_q.offset(start).limit(limit)
122 122
123 123 users_list = base_q.all()
124 124
125 125 users_data = []
126 126 for user in users_list:
127 127 users_data.append({
128 128 "username": h.gravatar_with_user(user.username),
129 129 "email": user.email,
130 "first_name": h.escape(user.name),
131 "last_name": h.escape(user.lastname),
130 "first_name": user.first_name,
131 "last_name": user.last_name,
132 132 "last_login": h.format_date(user.last_login),
133 133 "last_activity": h.format_date(user.last_activity),
134 134 "active": h.bool2icon(user.active),
135 135 "active_raw": user.active,
136 136 "admin": h.bool2icon(user.admin),
137 137 "extern_type": user.extern_type,
138 138 "extern_name": user.extern_name,
139 139 "action": user_actions(user.user_id, user.username),
140 140 })
141 141
142 142 data = ({
143 143 'draw': draw,
144 144 'data': users_data,
145 145 'recordsTotal': users_data_total_count,
146 146 'recordsFiltered': users_data_total_filtered_count,
147 147 })
148 148
149 149 return data
150 150
151 151 @LoginRequired()
152 152 @HasPermissionAllDecorator('hg.admin')
153 153 @view_config(
154 154 route_name='edit_user_auth_tokens', request_method='GET',
155 155 renderer='rhodecode:templates/admin/users/user_edit.mako')
156 156 def auth_tokens(self):
157 157 _ = self.request.translate
158 158 c = self.load_default_context()
159 159
160 160 user_id = self.request.matchdict.get('user_id')
161 161 c.user = User.get_or_404(user_id, pyramid_exc=True)
162 162 self._redirect_for_default_user(c.user.username)
163 163
164 164 c.active = 'auth_tokens'
165 165
166 166 c.lifetime_values = [
167 167 (str(-1), _('forever')),
168 168 (str(5), _('5 minutes')),
169 169 (str(60), _('1 hour')),
170 170 (str(60 * 24), _('1 day')),
171 171 (str(60 * 24 * 30), _('1 month')),
172 172 ]
173 173 c.lifetime_options = [(c.lifetime_values, _("Lifetime"))]
174 174 c.role_values = [
175 175 (x, AuthTokenModel.cls._get_role_name(x))
176 176 for x in AuthTokenModel.cls.ROLES]
177 177 c.role_options = [(c.role_values, _("Role"))]
178 178 c.user_auth_tokens = AuthTokenModel().get_auth_tokens(
179 179 c.user.user_id, show_expired=True)
180 180 return self._get_template_context(c)
181 181
182 182 def maybe_attach_token_scope(self, token):
183 183 # implemented in EE edition
184 184 pass
185 185
186 186 @LoginRequired()
187 187 @HasPermissionAllDecorator('hg.admin')
188 188 @CSRFRequired()
189 189 @view_config(
190 190 route_name='edit_user_auth_tokens_add', request_method='POST')
191 191 def auth_tokens_add(self):
192 192 _ = self.request.translate
193 193 c = self.load_default_context()
194 194
195 195 user_id = self.request.matchdict.get('user_id')
196 196 c.user = User.get_or_404(user_id, pyramid_exc=True)
197 197 self._redirect_for_default_user(c.user.username)
198 198
199 199 lifetime = safe_int(self.request.POST.get('lifetime'), -1)
200 200 description = self.request.POST.get('description')
201 201 role = self.request.POST.get('role')
202 202
203 203 token = AuthTokenModel().create(
204 204 c.user.user_id, description, lifetime, role)
205 205 self.maybe_attach_token_scope(token)
206 206 Session().commit()
207 207
208 208 h.flash(_("Auth token successfully created"), category='success')
209 209 return HTTPFound(h.route_path('edit_user_auth_tokens', user_id=user_id))
210 210
211 211 @LoginRequired()
212 212 @HasPermissionAllDecorator('hg.admin')
213 213 @CSRFRequired()
214 214 @view_config(
215 215 route_name='edit_user_auth_tokens_delete', request_method='POST')
216 216 def auth_tokens_delete(self):
217 217 _ = self.request.translate
218 218 c = self.load_default_context()
219 219
220 220 user_id = self.request.matchdict.get('user_id')
221 221 c.user = User.get_or_404(user_id, pyramid_exc=True)
222 222 self._redirect_for_default_user(c.user.username)
223 223
224 224 del_auth_token = self.request.POST.get('del_auth_token')
225 225
226 226 if del_auth_token:
227 227 AuthTokenModel().delete(del_auth_token, c.user.user_id)
228 228 Session().commit()
229 229 h.flash(_("Auth token successfully deleted"), category='success')
230 230
231 231 return HTTPFound(h.route_path('edit_user_auth_tokens', user_id=user_id))
232 232
233 233 @LoginRequired()
234 234 @HasPermissionAllDecorator('hg.admin')
235 235 @view_config(
236 236 route_name='edit_user_groups_management', request_method='GET',
237 237 renderer='rhodecode:templates/admin/users/user_edit.mako')
238 238 def groups_management(self):
239 239 c = self.load_default_context()
240 240
241 241 user_id = self.request.matchdict.get('user_id')
242 242 c.user = User.get_or_404(user_id, pyramid_exc=True)
243 243 c.data = c.user.group_member
244 244 self._redirect_for_default_user(c.user.username)
245 245 groups = [UserGroupModel.get_user_groups_as_dict(group.users_group)
246 246 for group in c.user.group_member]
247 247 c.groups = json.dumps(groups)
248 248 c.active = 'groups'
249 249
250 250 return self._get_template_context(c)
251 251
252 252 @LoginRequired()
253 253 @HasPermissionAllDecorator('hg.admin')
254 254 @CSRFRequired()
255 255 @view_config(
256 256 route_name='edit_user_groups_management_updates', request_method='POST')
257 257 def groups_management_updates(self):
258 258 _ = self.request.translate
259 259 c = self.load_default_context()
260 260
261 261 user_id = self.request.matchdict.get('user_id')
262 262 c.user = User.get_or_404(user_id, pyramid_exc=True)
263 263 self._redirect_for_default_user(c.user.username)
264 264
265 265 users_groups = set(self.request.POST.getall('users_group_id'))
266 266 users_groups_model = []
267 267
268 268 for ugid in users_groups:
269 269 users_groups_model.append(UserGroupModel().get_group(safe_int(ugid)))
270 270 user_group_model = UserGroupModel()
271 271 user_group_model.change_groups(c.user, users_groups_model)
272 272
273 273 Session().commit()
274 274 c.active = 'user_groups_management'
275 275 h.flash(_("Groups successfully changed"), category='success')
276 276
277 277 return HTTPFound(h.route_path(
278 278 'edit_user_groups_management', user_id=user_id))
279 279
280 280 @LoginRequired()
281 281 @HasPermissionAllDecorator('hg.admin')
282 282 @view_config(
283 283 route_name='edit_user_audit_logs', request_method='GET',
284 284 renderer='rhodecode:templates/admin/users/user_edit.mako')
285 285 def user_audit_logs(self):
286 286 _ = self.request.translate
287 287 c = self.load_default_context()
288 288
289 289 user_id = self.request.matchdict.get('user_id')
290 290 c.user = User.get_or_404(user_id, pyramid_exc=True)
291 291 self._redirect_for_default_user(c.user.username)
292 292 c.active = 'audit'
293 293
294 294 p = safe_int(self.request.GET.get('page', 1), 1)
295 295
296 296 filter_term = self.request.GET.get('filter')
297 297 user_log = UserModel().get_user_log(c.user, filter_term)
298 298
299 299 def url_generator(**kw):
300 300 if filter_term:
301 301 kw['filter'] = filter_term
302 302 return self.request.current_route_path(_query=kw)
303 303
304 304 c.audit_logs = Page(user_log, page=p, items_per_page=10,
305 305 url=url_generator)
306 306 c.filter_term = filter_term
307 307 return self._get_template_context(c)
308 308
@@ -1,134 +1,134 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import pytest
23 23
24 24 import rhodecode
25 25 from rhodecode.model.db import Repository
26 26 from rhodecode.model.meta import Session
27 27 from rhodecode.model.repo import RepoModel
28 28 from rhodecode.model.repo_group import RepoGroupModel
29 29 from rhodecode.model.settings import SettingsModel
30 30 from rhodecode.tests import TestController
31 31 from rhodecode.tests.fixture import Fixture
32 32 from rhodecode.lib import helpers as h
33 33
34 34 fixture = Fixture()
35 35
36 36
37 37 def route_path(name, **kwargs):
38 38 return {
39 39 'home': '/',
40 40 'repo_group_home': '/{repo_group_name}'
41 41 }[name].format(**kwargs)
42 42
43 43
44 44 class TestHomeController(TestController):
45 45
46 46 def test_index(self):
47 47 self.log_user()
48 48 response = self.app.get(route_path('home'))
49 49 # if global permission is set
50 50 response.mustcontain('Add Repository')
51 51
52 52 # search for objects inside the JavaScript JSON
53 53 for repo in Repository.getAll():
54 54 response.mustcontain('"name_raw": "%s"' % repo.repo_name)
55 55
56 56 def test_index_contains_statics_with_ver(self):
57 57 from pylons import tmpl_context as c
58 58
59 59 self.log_user()
60 60 response = self.app.get(route_path('home'))
61 61
62 62 rhodecode_version_hash = c.rhodecode_version_hash
63 63 response.mustcontain('style.css?ver={0}'.format(rhodecode_version_hash))
64 64 response.mustcontain('rhodecode-components.js?ver={0}'.format(rhodecode_version_hash))
65 65
66 66 def test_index_contains_backend_specific_details(self, backend):
67 67 self.log_user()
68 68 response = self.app.get(route_path('home'))
69 69 tip = backend.repo.get_commit().raw_id
70 70
71 71 # html in javascript variable:
72 72 response.mustcontain(r'<i class=\"icon-%s\"' % (backend.alias, ))
73 73 response.mustcontain(r'href=\"/%s\"' % (backend.repo_name, ))
74 74
75 75 response.mustcontain("""/%s/changeset/%s""" % (backend.repo_name, tip))
76 76 response.mustcontain("""Added a symlink""")
77 77
78 78 def test_index_with_anonymous_access_disabled(self):
79 79 with fixture.anon_access(False):
80 80 response = self.app.get(route_path('home'), status=302)
81 81 assert 'login' in response.location
82 82
83 83 def test_index_page_on_groups(self, autologin_user, repo_group):
84 84 response = self.app.get(route_path('repo_group_home', repo_group_name='gr1'))
85 85 response.mustcontain("gr1/repo_in_group")
86 86
87 87 def test_index_page_on_group_with_trailing_slash(
88 88 self, autologin_user, repo_group):
89 89 response = self.app.get(route_path('repo_group_home', repo_group_name='gr1') + '/')
90 90 response.mustcontain("gr1/repo_in_group")
91 91
92 92 @pytest.fixture(scope='class')
93 93 def repo_group(self, request):
94 94 gr = fixture.create_repo_group('gr1')
95 95 fixture.create_repo(name='gr1/repo_in_group', repo_group=gr)
96 96
97 97 @request.addfinalizer
98 98 def cleanup():
99 99 RepoModel().delete('gr1/repo_in_group')
100 100 RepoGroupModel().delete(repo_group='gr1', force_delete=True)
101 101 Session().commit()
102 102
103 103 def test_index_with_name_with_tags(self, user_util, autologin_user):
104 104 user = user_util.create_user()
105 105 username = user.username
106 106 user.name = '<img src="/image1" onload="alert(\'Hello, World!\');">'
107 107 user.lastname = '#"><img src=x onerror=prompt(document.cookie);>'
108 108
109 109 Session().add(user)
110 110 Session().commit()
111 111 user_util.create_repo(owner=username)
112 112
113 113 response = self.app.get(route_path('home'))
114 response.mustcontain(h.html_escape(h.escape(user.name)))
115 response.mustcontain(h.html_escape(h.escape(user.lastname)))
114 response.mustcontain(h.html_escape(user.first_name))
115 response.mustcontain(h.html_escape(user.last_name))
116 116
117 117 @pytest.mark.parametrize("name, state", [
118 118 ('Disabled', False),
119 119 ('Enabled', True),
120 120 ])
121 121 def test_index_show_version(self, autologin_user, name, state):
122 122 version_string = 'RhodeCode Enterprise %s' % rhodecode.__version__
123 123
124 124 sett = SettingsModel().create_or_update_setting(
125 125 'show_version', state, 'bool')
126 126 Session().add(sett)
127 127 Session().commit()
128 128 SettingsModel().invalidate_settings_cache()
129 129
130 130 response = self.app.get(route_path('home'))
131 131 if state is True:
132 132 response.mustcontain(version_string)
133 133 if state is False:
134 134 response.mustcontain(no=[version_string])
@@ -1,76 +1,76 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 from rhodecode.lib import helpers as h
22 22 from rhodecode.lib.utils2 import safe_int
23 23
24 24
25 25 def reviewer_as_json(user, reasons=None, mandatory=False):
26 26 """
27 27 Returns json struct of a reviewer for frontend
28 28
29 29 :param user: the reviewer
30 30 :param reasons: list of strings of why they are reviewers
31 31 :param mandatory: bool, to set user as mandatory
32 32 """
33 33
34 34 return {
35 35 'user_id': user.user_id,
36 36 'reasons': reasons or [],
37 37 'mandatory': mandatory,
38 38 'username': user.username,
39 'firstname': user.firstname,
40 'lastname': user.lastname,
39 'first_name': user.first_name,
40 'last_name': user.last_name,
41 41 'gravatar_link': h.gravatar_url(user.email, 14),
42 42 }
43 43
44 44
45 45 def get_default_reviewers_data(
46 46 current_user, source_repo, source_commit, target_repo, target_commit):
47 47
48 48 """ Return json for default reviewers of a repository """
49 49
50 50 reasons = ['Default reviewer', 'Repository owner']
51 51 default = reviewer_as_json(
52 52 user=current_user, reasons=reasons, mandatory=False)
53 53
54 54 return {
55 55 'api_ver': 'v1', # define version for later possible schema upgrade
56 56 'reviewers': [default],
57 57 'rules': {},
58 58 'rules_data': {},
59 59 }
60 60
61 61
62 62 def validate_default_reviewers(review_members, reviewer_rules):
63 63 """
64 64 Function to validate submitted reviewers against the saved rules
65 65
66 66 """
67 67 reviewers = []
68 68 reviewer_by_id = {}
69 69 for r in review_members:
70 70 reviewer_user_id = safe_int(r['user_id'])
71 71 entry = (reviewer_user_id, r['reasons'], r['mandatory'])
72 72
73 73 reviewer_by_id[reviewer_user_id] = entry
74 74 reviewers.append(entry)
75 75
76 76 return reviewers
@@ -1,510 +1,510 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 User Groups crud controller for pylons
23 23 """
24 24
25 25 import logging
26 26 import formencode
27 27
28 28 import peppercorn
29 29 from formencode import htmlfill
30 30 from pylons import request, tmpl_context as c, url, config
31 31 from pylons.controllers.util import redirect
32 32 from pylons.i18n.translation import _
33 33
34 34 from sqlalchemy.orm import joinedload
35 35
36 36 from rhodecode.lib import auth
37 37 from rhodecode.lib import helpers as h
38 38 from rhodecode.lib import audit_logger
39 39 from rhodecode.lib.ext_json import json
40 40 from rhodecode.lib.exceptions import UserGroupAssignedException,\
41 41 RepoGroupAssignmentError
42 42 from rhodecode.lib.utils import jsonify
43 43 from rhodecode.lib.utils2 import safe_unicode, str2bool, safe_int
44 44 from rhodecode.lib.auth import (
45 45 LoginRequired, NotAnonymous, HasUserGroupPermissionAnyDecorator,
46 46 HasPermissionAnyDecorator, XHRRequired)
47 47 from rhodecode.lib.base import BaseController, render
48 48 from rhodecode.model.permission import PermissionModel
49 49 from rhodecode.model.scm import UserGroupList
50 50 from rhodecode.model.user_group import UserGroupModel
51 51 from rhodecode.model.db import (
52 52 User, UserGroup, UserGroupRepoToPerm, UserGroupRepoGroupToPerm)
53 53 from rhodecode.model.forms import (
54 54 UserGroupForm, UserGroupPermsForm, UserIndividualPermissionsForm,
55 55 UserPermissionsForm)
56 56 from rhodecode.model.meta import Session
57 57
58 58
59 59 log = logging.getLogger(__name__)
60 60
61 61
62 62 class UserGroupsController(BaseController):
63 63 """REST Controller styled on the Atom Publishing Protocol"""
64 64
65 65 @LoginRequired()
66 66 def __before__(self):
67 67 super(UserGroupsController, self).__before__()
68 68 c.available_permissions = config['available_permissions']
69 69 PermissionModel().set_global_permission_choices(c, gettext_translator=_)
70 70
71 71 def __load_data(self, user_group_id):
72 72 c.group_members_obj = [x.user for x in c.user_group.members]
73 73 c.group_members_obj.sort(key=lambda u: u.username.lower())
74 74 c.group_members = [(x.user_id, x.username) for x in c.group_members_obj]
75 75
76 76 def __load_defaults(self, user_group_id):
77 77 """
78 78 Load defaults settings for edit, and update
79 79
80 80 :param user_group_id:
81 81 """
82 82 user_group = UserGroup.get_or_404(user_group_id)
83 83 data = user_group.get_dict()
84 84 # fill owner
85 85 if user_group.user:
86 86 data.update({'user': user_group.user.username})
87 87 else:
88 88 replacement_user = User.get_first_super_admin().username
89 89 data.update({'user': replacement_user})
90 90 return data
91 91
92 92 def _revoke_perms_on_yourself(self, form_result):
93 93 _updates = filter(lambda u: c.rhodecode_user.user_id == int(u[0]),
94 94 form_result['perm_updates'])
95 95 _additions = filter(lambda u: c.rhodecode_user.user_id == int(u[0]),
96 96 form_result['perm_additions'])
97 97 _deletions = filter(lambda u: c.rhodecode_user.user_id == int(u[0]),
98 98 form_result['perm_deletions'])
99 99 admin_perm = 'usergroup.admin'
100 100 if _updates and _updates[0][1] != admin_perm or \
101 101 _additions and _additions[0][1] != admin_perm or \
102 102 _deletions and _deletions[0][1] != admin_perm:
103 103 return True
104 104 return False
105 105
106 106 # permission check inside
107 107 @NotAnonymous()
108 108 def index(self):
109 109
110 110 from rhodecode.lib.utils import PartialRenderer
111 111 _render = PartialRenderer('data_table/_dt_elements.mako')
112 112
113 113 def user_group_name(user_group_id, user_group_name):
114 114 return _render("user_group_name", user_group_id, user_group_name)
115 115
116 116 def user_group_actions(user_group_id, user_group_name):
117 117 return _render("user_group_actions", user_group_id, user_group_name)
118 118
119 119 # json generate
120 120 group_iter = UserGroupList(UserGroup.query().all(),
121 121 perm_set=['usergroup.admin'])
122 122
123 123 user_groups_data = []
124 124 for user_gr in group_iter:
125 125 user_groups_data.append({
126 126 "group_name": user_group_name(
127 127 user_gr.users_group_id, h.escape(user_gr.users_group_name)),
128 128 "group_name_raw": user_gr.users_group_name,
129 129 "desc": h.escape(user_gr.user_group_description),
130 130 "members": len(user_gr.members),
131 131 "sync": user_gr.group_data.get('extern_type'),
132 132 "active": h.bool2icon(user_gr.users_group_active),
133 133 "owner": h.escape(h.link_to_user(user_gr.user.username)),
134 134 "action": user_group_actions(
135 135 user_gr.users_group_id, user_gr.users_group_name)
136 136 })
137 137
138 138 c.data = json.dumps(user_groups_data)
139 139 return render('admin/user_groups/user_groups.mako')
140 140
141 141 @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
142 142 @auth.CSRFRequired()
143 143 def create(self):
144 144
145 145 users_group_form = UserGroupForm()()
146 146 try:
147 147 form_result = users_group_form.to_python(dict(request.POST))
148 148 user_group = UserGroupModel().create(
149 149 name=form_result['users_group_name'],
150 150 description=form_result['user_group_description'],
151 151 owner=c.rhodecode_user.user_id,
152 152 active=form_result['users_group_active'])
153 153 Session().flush()
154 154 creation_data = user_group.get_api_data()
155 155 user_group_name = form_result['users_group_name']
156 156
157 157 audit_logger.store_web(
158 158 'user_group.create', action_data={'data': creation_data},
159 159 user=c.rhodecode_user)
160 160
161 161 user_group_link = h.link_to(
162 162 h.escape(user_group_name),
163 163 url('edit_users_group', user_group_id=user_group.users_group_id))
164 164 h.flash(h.literal(_('Created user group %(user_group_link)s')
165 165 % {'user_group_link': user_group_link}),
166 166 category='success')
167 167 Session().commit()
168 168 except formencode.Invalid as errors:
169 169 return htmlfill.render(
170 170 render('admin/user_groups/user_group_add.mako'),
171 171 defaults=errors.value,
172 172 errors=errors.error_dict or {},
173 173 prefix_error=False,
174 174 encoding="UTF-8",
175 175 force_defaults=False)
176 176 except Exception:
177 177 log.exception("Exception creating user group")
178 178 h.flash(_('Error occurred during creation of user group %s') \
179 179 % request.POST.get('users_group_name'), category='error')
180 180
181 181 return redirect(
182 182 url('edit_users_group', user_group_id=user_group.users_group_id))
183 183
184 184 @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
185 185 def new(self):
186 186 """GET /user_groups/new: Form to create a new item"""
187 187 # url('new_users_group')
188 188 return render('admin/user_groups/user_group_add.mako')
189 189
190 190 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
191 191 @auth.CSRFRequired()
192 192 def update(self, user_group_id):
193 193
194 194 user_group_id = safe_int(user_group_id)
195 195 c.user_group = UserGroup.get_or_404(user_group_id)
196 196 c.active = 'settings'
197 197 self.__load_data(user_group_id)
198 198
199 199 users_group_form = UserGroupForm(
200 200 edit=True, old_data=c.user_group.get_dict(), allow_disabled=True)()
201 201
202 202 old_values = c.user_group.get_api_data()
203 203 try:
204 204 form_result = users_group_form.to_python(request.POST)
205 205 pstruct = peppercorn.parse(request.POST.items())
206 206 form_result['users_group_members'] = pstruct['user_group_members']
207 207
208 208 UserGroupModel().update(c.user_group, form_result)
209 209 updated_user_group = form_result['users_group_name']
210 210
211 211 audit_logger.store_web(
212 212 'user_group.edit', action_data={'old_data': old_values},
213 213 user=c.rhodecode_user)
214 214
215 215 h.flash(_('Updated user group %s') % updated_user_group,
216 216 category='success')
217 217 Session().commit()
218 218 except formencode.Invalid as errors:
219 219 defaults = errors.value
220 220 e = errors.error_dict or {}
221 221
222 222 return htmlfill.render(
223 223 render('admin/user_groups/user_group_edit.mako'),
224 224 defaults=defaults,
225 225 errors=e,
226 226 prefix_error=False,
227 227 encoding="UTF-8",
228 228 force_defaults=False)
229 229 except Exception:
230 230 log.exception("Exception during update of user group")
231 231 h.flash(_('Error occurred during update of user group %s')
232 232 % request.POST.get('users_group_name'), category='error')
233 233
234 234 return redirect(url('edit_users_group', user_group_id=user_group_id))
235 235
236 236 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
237 237 @auth.CSRFRequired()
238 238 def delete(self, user_group_id):
239 239 user_group_id = safe_int(user_group_id)
240 240 c.user_group = UserGroup.get_or_404(user_group_id)
241 241 force = str2bool(request.POST.get('force'))
242 242
243 243 old_values = c.user_group.get_api_data()
244 244 try:
245 245 UserGroupModel().delete(c.user_group, force=force)
246 246 audit_logger.store_web(
247 247 'user.delete', action_data={'old_data': old_values},
248 248 user=c.rhodecode_user)
249 249 Session().commit()
250 250 h.flash(_('Successfully deleted user group'), category='success')
251 251 except UserGroupAssignedException as e:
252 252 h.flash(str(e), category='error')
253 253 except Exception:
254 254 log.exception("Exception during deletion of user group")
255 255 h.flash(_('An error occurred during deletion of user group'),
256 256 category='error')
257 257 return redirect(url('users_groups'))
258 258
259 259 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
260 260 def edit(self, user_group_id):
261 261 """GET /user_groups/user_group_id/edit: Form to edit an existing item"""
262 262 # url('edit_users_group', user_group_id=ID)
263 263
264 264 user_group_id = safe_int(user_group_id)
265 265 c.user_group = UserGroup.get_or_404(user_group_id)
266 266 c.active = 'settings'
267 267 self.__load_data(user_group_id)
268 268
269 269 defaults = self.__load_defaults(user_group_id)
270 270
271 271 return htmlfill.render(
272 272 render('admin/user_groups/user_group_edit.mako'),
273 273 defaults=defaults,
274 274 encoding="UTF-8",
275 275 force_defaults=False
276 276 )
277 277
278 278 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
279 279 def edit_perms(self, user_group_id):
280 280 user_group_id = safe_int(user_group_id)
281 281 c.user_group = UserGroup.get_or_404(user_group_id)
282 282 c.active = 'perms'
283 283
284 284 defaults = {}
285 285 # fill user group users
286 286 for p in c.user_group.user_user_group_to_perm:
287 287 defaults.update({'u_perm_%s' % p.user.user_id:
288 288 p.permission.permission_name})
289 289
290 290 for p in c.user_group.user_group_user_group_to_perm:
291 291 defaults.update({'g_perm_%s' % p.user_group.users_group_id:
292 292 p.permission.permission_name})
293 293
294 294 return htmlfill.render(
295 295 render('admin/user_groups/user_group_edit.mako'),
296 296 defaults=defaults,
297 297 encoding="UTF-8",
298 298 force_defaults=False
299 299 )
300 300
301 301 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
302 302 @auth.CSRFRequired()
303 303 def update_perms(self, user_group_id):
304 304 """
305 305 grant permission for given usergroup
306 306
307 307 :param user_group_id:
308 308 """
309 309 user_group_id = safe_int(user_group_id)
310 310 c.user_group = UserGroup.get_or_404(user_group_id)
311 311 form = UserGroupPermsForm()().to_python(request.POST)
312 312
313 313 if not c.rhodecode_user.is_admin:
314 314 if self._revoke_perms_on_yourself(form):
315 315 msg = _('Cannot change permission for yourself as admin')
316 316 h.flash(msg, category='warning')
317 317 return redirect(url('edit_user_group_perms', user_group_id=user_group_id))
318 318
319 319 try:
320 320 UserGroupModel().update_permissions(user_group_id,
321 321 form['perm_additions'], form['perm_updates'], form['perm_deletions'])
322 322 except RepoGroupAssignmentError:
323 323 h.flash(_('Target group cannot be the same'), category='error')
324 324 return redirect(url('edit_user_group_perms', user_group_id=user_group_id))
325 325
326 326 # TODO(marcink): implement global permissions
327 327 # audit_log.store_web('user_group.edit.permissions')
328 328 Session().commit()
329 329 h.flash(_('User Group permissions updated'), category='success')
330 330 return redirect(url('edit_user_group_perms', user_group_id=user_group_id))
331 331
332 332 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
333 333 def edit_perms_summary(self, user_group_id):
334 334 user_group_id = safe_int(user_group_id)
335 335 c.user_group = UserGroup.get_or_404(user_group_id)
336 336 c.active = 'perms_summary'
337 337 permissions = {
338 338 'repositories': {},
339 339 'repositories_groups': {},
340 340 }
341 341 ugroup_repo_perms = UserGroupRepoToPerm.query()\
342 342 .options(joinedload(UserGroupRepoToPerm.permission))\
343 343 .options(joinedload(UserGroupRepoToPerm.repository))\
344 344 .filter(UserGroupRepoToPerm.users_group_id == user_group_id)\
345 345 .all()
346 346
347 347 for gr in ugroup_repo_perms:
348 348 permissions['repositories'][gr.repository.repo_name] \
349 349 = gr.permission.permission_name
350 350
351 351 ugroup_group_perms = UserGroupRepoGroupToPerm.query()\
352 352 .options(joinedload(UserGroupRepoGroupToPerm.permission))\
353 353 .options(joinedload(UserGroupRepoGroupToPerm.group))\
354 354 .filter(UserGroupRepoGroupToPerm.users_group_id == user_group_id)\
355 355 .all()
356 356
357 357 for gr in ugroup_group_perms:
358 358 permissions['repositories_groups'][gr.group.group_name] \
359 359 = gr.permission.permission_name
360 360 c.permissions = permissions
361 361 return render('admin/user_groups/user_group_edit.mako')
362 362
363 363 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
364 364 def edit_global_perms(self, user_group_id):
365 365 user_group_id = safe_int(user_group_id)
366 366 c.user_group = UserGroup.get_or_404(user_group_id)
367 367 c.active = 'global_perms'
368 368
369 369 c.default_user = User.get_default_user()
370 370 defaults = c.user_group.get_dict()
371 371 defaults.update(c.default_user.get_default_perms(suffix='_inherited'))
372 372 defaults.update(c.user_group.get_default_perms())
373 373
374 374 return htmlfill.render(
375 375 render('admin/user_groups/user_group_edit.mako'),
376 376 defaults=defaults,
377 377 encoding="UTF-8",
378 378 force_defaults=False
379 379 )
380 380
381 381 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
382 382 @auth.CSRFRequired()
383 383 def update_global_perms(self, user_group_id):
384 384 user_group_id = safe_int(user_group_id)
385 385 user_group = UserGroup.get_or_404(user_group_id)
386 386 c.active = 'global_perms'
387 387
388 388 try:
389 389 # first stage that verifies the checkbox
390 390 _form = UserIndividualPermissionsForm()
391 391 form_result = _form.to_python(dict(request.POST))
392 392 inherit_perms = form_result['inherit_default_permissions']
393 393 user_group.inherit_default_permissions = inherit_perms
394 394 Session().add(user_group)
395 395
396 396 if not inherit_perms:
397 397 # only update the individual ones if we un check the flag
398 398 _form = UserPermissionsForm(
399 399 [x[0] for x in c.repo_create_choices],
400 400 [x[0] for x in c.repo_create_on_write_choices],
401 401 [x[0] for x in c.repo_group_create_choices],
402 402 [x[0] for x in c.user_group_create_choices],
403 403 [x[0] for x in c.fork_choices],
404 404 [x[0] for x in c.inherit_default_permission_choices])()
405 405
406 406 form_result = _form.to_python(dict(request.POST))
407 407 form_result.update({'perm_user_group_id': user_group.users_group_id})
408 408
409 409 PermissionModel().update_user_group_permissions(form_result)
410 410
411 411 Session().commit()
412 412 h.flash(_('User Group global permissions updated successfully'),
413 413 category='success')
414 414
415 415 except formencode.Invalid as errors:
416 416 defaults = errors.value
417 417 c.user_group = user_group
418 418 return htmlfill.render(
419 419 render('admin/user_groups/user_group_edit.mako'),
420 420 defaults=defaults,
421 421 errors=errors.error_dict or {},
422 422 prefix_error=False,
423 423 encoding="UTF-8",
424 424 force_defaults=False)
425 425 except Exception:
426 426 log.exception("Exception during permissions saving")
427 427 h.flash(_('An error occurred during permissions saving'),
428 428 category='error')
429 429
430 430 return redirect(url('edit_user_group_global_perms', user_group_id=user_group_id))
431 431
432 432 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
433 433 def edit_advanced(self, user_group_id):
434 434 user_group_id = safe_int(user_group_id)
435 435 c.user_group = UserGroup.get_or_404(user_group_id)
436 436 c.active = 'advanced'
437 437 c.group_members_obj = sorted(
438 438 (x.user for x in c.user_group.members),
439 439 key=lambda u: u.username.lower())
440 440
441 441 c.group_to_repos = sorted(
442 442 (x.repository for x in c.user_group.users_group_repo_to_perm),
443 443 key=lambda u: u.repo_name.lower())
444 444
445 445 c.group_to_repo_groups = sorted(
446 446 (x.group for x in c.user_group.users_group_repo_group_to_perm),
447 447 key=lambda u: u.group_name.lower())
448 448
449 449 return render('admin/user_groups/user_group_edit.mako')
450 450
451 451 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
452 452 def edit_advanced_set_synchronization(self, user_group_id):
453 453 user_group_id = safe_int(user_group_id)
454 454 user_group = UserGroup.get_or_404(user_group_id)
455 455
456 456 existing = user_group.group_data.get('extern_type')
457 457
458 458 if existing:
459 459 new_state = user_group.group_data
460 460 new_state['extern_type'] = None
461 461 else:
462 462 new_state = user_group.group_data
463 463 new_state['extern_type'] = 'manual'
464 464 new_state['extern_type_set_by'] = c.rhodecode_user.username
465 465
466 466 try:
467 467 user_group.group_data = new_state
468 468 Session().add(user_group)
469 469 Session().commit()
470 470
471 471 h.flash(_('User Group synchronization updated successfully'),
472 472 category='success')
473 473 except Exception:
474 474 log.exception("Exception during sync settings saving")
475 475 h.flash(_('An error occurred during synchronization update'),
476 476 category='error')
477 477
478 478 return redirect(
479 479 url('edit_user_group_advanced', user_group_id=user_group_id))
480 480
481 481 @HasUserGroupPermissionAnyDecorator('usergroup.admin')
482 482 @XHRRequired()
483 483 @jsonify
484 484 def user_group_members(self, user_group_id):
485 485 """
486 486 Return members of given user group
487 487 """
488 488 user_group_id = safe_int(user_group_id)
489 489 user_group = UserGroup.get_or_404(user_group_id)
490 490 group_members_obj = sorted((x.user for x in user_group.members),
491 491 key=lambda u: u.username.lower())
492 492
493 493 group_members = [
494 494 {
495 495 'id': user.user_id,
496 'first_name': user.name,
497 'last_name': user.lastname,
496 'first_name': user.first_name,
497 'last_name': user.last_name,
498 498 'username': user.username,
499 499 'icon_link': h.gravatar_url(user.email, 30),
500 500 'value_display': h.person(user.email),
501 501 'value': user.username,
502 502 'value_type': 'user',
503 503 'active': user.active,
504 504 }
505 505 for user in group_members_obj
506 506 ]
507 507
508 508 return {
509 509 'members': group_members
510 510 }
@@ -1,1012 +1,1011 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 pull requests controller for rhodecode for initializing pull requests
23 23 """
24 import types
25
26 24 import peppercorn
27 25 import formencode
28 26 import logging
29 27 import collections
30 28
31 29 from webob.exc import HTTPNotFound, HTTPForbidden, HTTPBadRequest
32 30 from pylons import request, tmpl_context as c, url
33 31 from pylons.controllers.util import redirect
34 32 from pylons.i18n.translation import _
35 33 from pyramid.threadlocal import get_current_registry
34 from pyramid.httpexceptions import HTTPFound
36 35 from sqlalchemy.sql import func
37 36 from sqlalchemy.sql.expression import or_
38 37
39 38 from rhodecode import events
40 39 from rhodecode.lib import auth, diffs, helpers as h, codeblocks
41 40 from rhodecode.lib.ext_json import json
42 41 from rhodecode.lib.base import (
43 42 BaseRepoController, render, vcs_operation_context)
44 43 from rhodecode.lib.auth import (
45 44 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous,
46 45 HasAcceptedRepoType, XHRRequired)
47 46 from rhodecode.lib.channelstream import channelstream_request
48 47 from rhodecode.lib.utils import jsonify
49 48 from rhodecode.lib.utils2 import (
50 49 safe_int, safe_str, str2bool, safe_unicode)
51 50 from rhodecode.lib.vcs.backends.base import (
52 51 EmptyCommit, UpdateFailureReason, EmptyRepository)
53 52 from rhodecode.lib.vcs.exceptions import (
54 53 EmptyRepositoryError, CommitDoesNotExistError, RepositoryRequirementError,
55 54 NodeDoesNotExistError)
56 55
57 56 from rhodecode.model.changeset_status import ChangesetStatusModel
58 57 from rhodecode.model.comment import CommentsModel
59 58 from rhodecode.model.db import (PullRequest, ChangesetStatus, ChangesetComment,
60 59 Repository, PullRequestVersion)
61 60 from rhodecode.model.forms import PullRequestForm
62 61 from rhodecode.model.meta import Session
63 62 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
64 63
65 64 log = logging.getLogger(__name__)
66 65
67 66
68 67 class PullrequestsController(BaseRepoController):
69 68
70 69 def __before__(self):
71 70 super(PullrequestsController, self).__before__()
72 71 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
73 72 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
74 73
75 74 @LoginRequired()
76 75 @NotAnonymous()
77 76 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
78 77 'repository.admin')
79 78 @HasAcceptedRepoType('git', 'hg')
80 79 def index(self):
81 80 source_repo = c.rhodecode_db_repo
82 81
83 82 try:
84 83 source_repo.scm_instance().get_commit()
85 84 except EmptyRepositoryError:
86 85 h.flash(h.literal(_('There are no commits yet')),
87 86 category='warning')
88 87 redirect(h.route_path('repo_summary', repo_name=source_repo.repo_name))
89 88
90 89 commit_id = request.GET.get('commit')
91 90 branch_ref = request.GET.get('branch')
92 91 bookmark_ref = request.GET.get('bookmark')
93 92
94 93 try:
95 94 source_repo_data = PullRequestModel().generate_repo_data(
96 95 source_repo, commit_id=commit_id,
97 96 branch=branch_ref, bookmark=bookmark_ref)
98 97 except CommitDoesNotExistError as e:
99 98 log.exception(e)
100 99 h.flash(_('Commit does not exist'), 'error')
101 100 redirect(url('pullrequest_home', repo_name=source_repo.repo_name))
102 101
103 102 default_target_repo = source_repo
104 103
105 104 if source_repo.parent:
106 105 parent_vcs_obj = source_repo.parent.scm_instance()
107 106 if parent_vcs_obj and not parent_vcs_obj.is_empty():
108 107 # change default if we have a parent repo
109 108 default_target_repo = source_repo.parent
110 109
111 110 target_repo_data = PullRequestModel().generate_repo_data(
112 111 default_target_repo)
113 112
114 113 selected_source_ref = source_repo_data['refs']['selected_ref']
115 114
116 115 title_source_ref = selected_source_ref.split(':', 2)[1]
117 116 c.default_title = PullRequestModel().generate_pullrequest_title(
118 117 source=source_repo.repo_name,
119 118 source_ref=title_source_ref,
120 119 target=default_target_repo.repo_name
121 120 )
122 121
123 122 c.default_repo_data = {
124 123 'source_repo_name': source_repo.repo_name,
125 124 'source_refs_json': json.dumps(source_repo_data),
126 125 'target_repo_name': default_target_repo.repo_name,
127 126 'target_refs_json': json.dumps(target_repo_data),
128 127 }
129 128 c.default_source_ref = selected_source_ref
130 129
131 130 return render('/pullrequests/pullrequest.mako')
132 131
133 132 @LoginRequired()
134 133 @NotAnonymous()
135 134 @XHRRequired()
136 135 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
137 136 'repository.admin')
138 137 @jsonify
139 138 def get_repo_refs(self, repo_name, target_repo_name):
140 139 repo = Repository.get_by_repo_name(target_repo_name)
141 140 if not repo:
142 141 raise HTTPNotFound
143 142 return PullRequestModel().generate_repo_data(repo)
144 143
145 144 @LoginRequired()
146 145 @NotAnonymous()
147 146 @XHRRequired()
148 147 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
149 148 'repository.admin')
150 149 @jsonify
151 150 def get_repo_destinations(self, repo_name):
152 151 repo = Repository.get_by_repo_name(repo_name)
153 152 if not repo:
154 153 raise HTTPNotFound
155 154 filter_query = request.GET.get('query')
156 155
157 156 query = Repository.query() \
158 157 .order_by(func.length(Repository.repo_name)) \
159 158 .filter(or_(
160 159 Repository.repo_name == repo.repo_name,
161 160 Repository.fork_id == repo.repo_id))
162 161
163 162 if filter_query:
164 163 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
165 164 query = query.filter(
166 165 Repository.repo_name.ilike(ilike_expression))
167 166
168 167 add_parent = False
169 168 if repo.parent:
170 169 if filter_query in repo.parent.repo_name:
171 170 parent_vcs_obj = repo.parent.scm_instance()
172 171 if parent_vcs_obj and not parent_vcs_obj.is_empty():
173 172 add_parent = True
174 173
175 174 limit = 20 - 1 if add_parent else 20
176 175 all_repos = query.limit(limit).all()
177 176 if add_parent:
178 177 all_repos += [repo.parent]
179 178
180 179 repos = []
181 180 for obj in self.scm_model.get_repos(all_repos):
182 181 repos.append({
183 182 'id': obj['name'],
184 183 'text': obj['name'],
185 184 'type': 'repo',
186 185 'obj': obj['dbrepo']
187 186 })
188 187
189 188 data = {
190 189 'more': False,
191 190 'results': [{
192 191 'text': _('Repositories'),
193 192 'children': repos
194 193 }] if repos else []
195 194 }
196 195 return data
197 196
198 197 @LoginRequired()
199 198 @NotAnonymous()
200 199 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
201 200 'repository.admin')
202 201 @HasAcceptedRepoType('git', 'hg')
203 202 @auth.CSRFRequired()
204 203 def create(self, repo_name):
205 204 repo = Repository.get_by_repo_name(repo_name)
206 205 if not repo:
207 206 raise HTTPNotFound
208 207
209 208 controls = peppercorn.parse(request.POST.items())
210 209
211 210 try:
212 211 _form = PullRequestForm(repo.repo_id)().to_python(controls)
213 212 except formencode.Invalid as errors:
214 213 if errors.error_dict.get('revisions'):
215 214 msg = 'Revisions: %s' % errors.error_dict['revisions']
216 215 elif errors.error_dict.get('pullrequest_title'):
217 216 msg = _('Pull request requires a title with min. 3 chars')
218 217 else:
219 218 msg = _('Error creating pull request: {}').format(errors)
220 219 log.exception(msg)
221 220 h.flash(msg, 'error')
222 221
223 222 # would rather just go back to form ...
224 223 return redirect(url('pullrequest_home', repo_name=repo_name))
225 224
226 225 source_repo = _form['source_repo']
227 226 source_ref = _form['source_ref']
228 227 target_repo = _form['target_repo']
229 228 target_ref = _form['target_ref']
230 229 commit_ids = _form['revisions'][::-1]
231 230
232 231 # find the ancestor for this pr
233 232 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
234 233 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
235 234
236 235 source_scm = source_db_repo.scm_instance()
237 236 target_scm = target_db_repo.scm_instance()
238 237
239 238 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
240 239 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
241 240
242 241 ancestor = source_scm.get_common_ancestor(
243 242 source_commit.raw_id, target_commit.raw_id, target_scm)
244 243
245 244 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
246 245 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
247 246
248 247 pullrequest_title = _form['pullrequest_title']
249 248 title_source_ref = source_ref.split(':', 2)[1]
250 249 if not pullrequest_title:
251 250 pullrequest_title = PullRequestModel().generate_pullrequest_title(
252 251 source=source_repo,
253 252 source_ref=title_source_ref,
254 253 target=target_repo
255 254 )
256 255
257 256 description = _form['pullrequest_desc']
258 257
259 258 get_default_reviewers_data, validate_default_reviewers = \
260 259 PullRequestModel().get_reviewer_functions()
261 260
262 261 # recalculate reviewers logic, to make sure we can validate this
263 262 reviewer_rules = get_default_reviewers_data(
264 263 c.rhodecode_user.get_instance(), source_db_repo,
265 264 source_commit, target_db_repo, target_commit)
266 265
267 266 given_reviewers = _form['review_members']
268 267 reviewers = validate_default_reviewers(given_reviewers, reviewer_rules)
269 268
270 269 try:
271 270 pull_request = PullRequestModel().create(
272 271 c.rhodecode_user.user_id, source_repo, source_ref, target_repo,
273 272 target_ref, commit_ids, reviewers, pullrequest_title,
274 273 description, reviewer_rules
275 274 )
276 275 Session().commit()
277 276 h.flash(_('Successfully opened new pull request'),
278 277 category='success')
279 278 except Exception as e:
280 279 msg = _('Error occurred during creation of this pull request.')
281 280 log.exception(msg)
282 281 h.flash(msg, category='error')
283 282 return redirect(url('pullrequest_home', repo_name=repo_name))
284 283
285 284 raise HTTPFound(
286 285 h.route_path('pullrequest_show', repo_name=target_repo,
287 286 pull_request_id=pull_request.pull_request_id))
288 287
289 288 @LoginRequired()
290 289 @NotAnonymous()
291 290 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
292 291 'repository.admin')
293 292 @auth.CSRFRequired()
294 293 @jsonify
295 294 def update(self, repo_name, pull_request_id):
296 295 pull_request_id = safe_int(pull_request_id)
297 296 pull_request = PullRequest.get_or_404(pull_request_id)
298 297 # only owner or admin can update it
299 298 allowed_to_update = PullRequestModel().check_user_update(
300 299 pull_request, c.rhodecode_user)
301 300 if allowed_to_update:
302 301 controls = peppercorn.parse(request.POST.items())
303 302
304 303 if 'review_members' in controls:
305 304 self._update_reviewers(
306 305 pull_request_id, controls['review_members'],
307 306 pull_request.reviewer_data)
308 307 elif str2bool(request.POST.get('update_commits', 'false')):
309 308 self._update_commits(pull_request)
310 309 elif str2bool(request.POST.get('edit_pull_request', 'false')):
311 310 self._edit_pull_request(pull_request)
312 311 else:
313 312 raise HTTPBadRequest()
314 313 return True
315 314 raise HTTPForbidden()
316 315
317 316 def _edit_pull_request(self, pull_request):
318 317 try:
319 318 PullRequestModel().edit(
320 319 pull_request, request.POST.get('title'),
321 320 request.POST.get('description'), c.rhodecode_user)
322 321 except ValueError:
323 322 msg = _(u'Cannot update closed pull requests.')
324 323 h.flash(msg, category='error')
325 324 return
326 325 else:
327 326 Session().commit()
328 327
329 328 msg = _(u'Pull request title & description updated.')
330 329 h.flash(msg, category='success')
331 330 return
332 331
333 332 def _update_commits(self, pull_request):
334 333 resp = PullRequestModel().update_commits(pull_request)
335 334
336 335 if resp.executed:
337 336
338 337 if resp.target_changed and resp.source_changed:
339 338 changed = 'target and source repositories'
340 339 elif resp.target_changed and not resp.source_changed:
341 340 changed = 'target repository'
342 341 elif not resp.target_changed and resp.source_changed:
343 342 changed = 'source repository'
344 343 else:
345 344 changed = 'nothing'
346 345
347 346 msg = _(
348 347 u'Pull request updated to "{source_commit_id}" with '
349 348 u'{count_added} added, {count_removed} removed commits. '
350 349 u'Source of changes: {change_source}')
351 350 msg = msg.format(
352 351 source_commit_id=pull_request.source_ref_parts.commit_id,
353 352 count_added=len(resp.changes.added),
354 353 count_removed=len(resp.changes.removed),
355 354 change_source=changed)
356 355 h.flash(msg, category='success')
357 356
358 357 registry = get_current_registry()
359 358 rhodecode_plugins = getattr(registry, 'rhodecode_plugins', {})
360 359 channelstream_config = rhodecode_plugins.get('channelstream', {})
361 360 if channelstream_config.get('enabled'):
362 361 message = msg + (
363 362 ' - <a onclick="window.location.reload()">'
364 363 '<strong>{}</strong></a>'.format(_('Reload page')))
365 364 channel = '/repo${}$/pr/{}'.format(
366 365 pull_request.target_repo.repo_name,
367 366 pull_request.pull_request_id
368 367 )
369 368 payload = {
370 369 'type': 'message',
371 370 'user': 'system',
372 371 'exclude_users': [request.user.username],
373 372 'channel': channel,
374 373 'message': {
375 374 'message': message,
376 375 'level': 'success',
377 376 'topic': '/notifications'
378 377 }
379 378 }
380 379 channelstream_request(
381 380 channelstream_config, [payload], '/message',
382 381 raise_exc=False)
383 382 else:
384 383 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
385 384 warning_reasons = [
386 385 UpdateFailureReason.NO_CHANGE,
387 386 UpdateFailureReason.WRONG_REF_TYPE,
388 387 ]
389 388 category = 'warning' if resp.reason in warning_reasons else 'error'
390 389 h.flash(msg, category=category)
391 390
392 391 @auth.CSRFRequired()
393 392 @LoginRequired()
394 393 @NotAnonymous()
395 394 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
396 395 'repository.admin')
397 396 def merge(self, repo_name, pull_request_id):
398 397 """
399 398 POST /{repo_name}/pull-request/{pull_request_id}
400 399
401 400 Merge will perform a server-side merge of the specified
402 401 pull request, if the pull request is approved and mergeable.
403 402 After successful merging, the pull request is automatically
404 403 closed, with a relevant comment.
405 404 """
406 405 pull_request_id = safe_int(pull_request_id)
407 406 pull_request = PullRequest.get_or_404(pull_request_id)
408 407 user = c.rhodecode_user
409 408
410 409 check = MergeCheck.validate(pull_request, user)
411 410 merge_possible = not check.failed
412 411
413 412 for err_type, error_msg in check.errors:
414 413 h.flash(error_msg, category=err_type)
415 414
416 415 if merge_possible:
417 416 log.debug("Pre-conditions checked, trying to merge.")
418 417 extras = vcs_operation_context(
419 418 request.environ, repo_name=pull_request.target_repo.repo_name,
420 419 username=user.username, action='push',
421 420 scm=pull_request.target_repo.repo_type)
422 421 self._merge_pull_request(pull_request, user, extras)
423 422
424 423 raise HTTPFound(
425 424 h.route_path('pullrequest_show',
426 425 repo_name=pull_request.target_repo.repo_name,
427 426 pull_request_id=pull_request.pull_request_id))
428 427
429 428 def _merge_pull_request(self, pull_request, user, extras):
430 429 merge_resp = PullRequestModel().merge(
431 430 pull_request, user, extras=extras)
432 431
433 432 if merge_resp.executed:
434 433 log.debug("The merge was successful, closing the pull request.")
435 434 PullRequestModel().close_pull_request(
436 435 pull_request.pull_request_id, user)
437 436 Session().commit()
438 437 msg = _('Pull request was successfully merged and closed.')
439 438 h.flash(msg, category='success')
440 439 else:
441 440 log.debug(
442 441 "The merge was not successful. Merge response: %s",
443 442 merge_resp)
444 443 msg = PullRequestModel().merge_status_message(
445 444 merge_resp.failure_reason)
446 445 h.flash(msg, category='error')
447 446
448 447 def _update_reviewers(self, pull_request_id, review_members, reviewer_rules):
449 448
450 449 get_default_reviewers_data, validate_default_reviewers = \
451 450 PullRequestModel().get_reviewer_functions()
452 451
453 452 try:
454 453 reviewers = validate_default_reviewers(review_members, reviewer_rules)
455 454 except ValueError as e:
456 455 log.error('Reviewers Validation: {}'.format(e))
457 456 h.flash(e, category='error')
458 457 return
459 458
460 459 PullRequestModel().update_reviewers(
461 460 pull_request_id, reviewers, c.rhodecode_user)
462 461 h.flash(_('Pull request reviewers updated.'), category='success')
463 462 Session().commit()
464 463
465 464 @LoginRequired()
466 465 @NotAnonymous()
467 466 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
468 467 'repository.admin')
469 468 @auth.CSRFRequired()
470 469 @jsonify
471 470 def delete(self, repo_name, pull_request_id):
472 471 pull_request_id = safe_int(pull_request_id)
473 472 pull_request = PullRequest.get_or_404(pull_request_id)
474 473
475 474 pr_closed = pull_request.is_closed()
476 475 allowed_to_delete = PullRequestModel().check_user_delete(
477 476 pull_request, c.rhodecode_user) and not pr_closed
478 477
479 478 # only owner can delete it !
480 479 if allowed_to_delete:
481 480 PullRequestModel().delete(pull_request, c.rhodecode_user)
482 481 Session().commit()
483 482 h.flash(_('Successfully deleted pull request'),
484 483 category='success')
485 484 return redirect(url('my_account_pullrequests'))
486 485
487 486 h.flash(_('Your are not allowed to delete this pull request'),
488 487 category='error')
489 488 raise HTTPForbidden()
490 489
491 490 def _get_pr_version(self, pull_request_id, version=None):
492 491 pull_request_id = safe_int(pull_request_id)
493 492 at_version = None
494 493
495 494 if version and version == 'latest':
496 495 pull_request_ver = PullRequest.get(pull_request_id)
497 496 pull_request_obj = pull_request_ver
498 497 _org_pull_request_obj = pull_request_obj
499 498 at_version = 'latest'
500 499 elif version:
501 500 pull_request_ver = PullRequestVersion.get_or_404(version)
502 501 pull_request_obj = pull_request_ver
503 502 _org_pull_request_obj = pull_request_ver.pull_request
504 503 at_version = pull_request_ver.pull_request_version_id
505 504 else:
506 505 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
507 506 pull_request_id)
508 507
509 508 pull_request_display_obj = PullRequest.get_pr_display_object(
510 509 pull_request_obj, _org_pull_request_obj)
511 510
512 511 return _org_pull_request_obj, pull_request_obj, \
513 512 pull_request_display_obj, at_version
514 513
515 514 def _get_diffset(
516 515 self, source_repo, source_ref_id, target_ref_id, target_commit,
517 516 source_commit, diff_limit, file_limit, display_inline_comments):
518 517 vcs_diff = PullRequestModel().get_diff(
519 518 source_repo, source_ref_id, target_ref_id)
520 519
521 520 diff_processor = diffs.DiffProcessor(
522 521 vcs_diff, format='newdiff', diff_limit=diff_limit,
523 522 file_limit=file_limit, show_full_diff=c.fulldiff)
524 523
525 524 _parsed = diff_processor.prepare()
526 525
527 526 def _node_getter(commit):
528 527 def get_node(fname):
529 528 try:
530 529 return commit.get_node(fname)
531 530 except NodeDoesNotExistError:
532 531 return None
533 532
534 533 return get_node
535 534
536 535 diffset = codeblocks.DiffSet(
537 536 repo_name=c.repo_name,
538 537 source_repo_name=c.source_repo.repo_name,
539 538 source_node_getter=_node_getter(target_commit),
540 539 target_node_getter=_node_getter(source_commit),
541 540 comments=display_inline_comments
542 541 )
543 542 diffset = diffset.render_patchset(
544 543 _parsed, target_commit.raw_id, source_commit.raw_id)
545 544
546 545 return diffset
547 546
548 547 @LoginRequired()
549 548 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
550 549 'repository.admin')
551 550 def show(self, repo_name, pull_request_id):
552 551 pull_request_id = safe_int(pull_request_id)
553 552 version = request.GET.get('version')
554 553 from_version = request.GET.get('from_version') or version
555 554 merge_checks = request.GET.get('merge_checks')
556 555 c.fulldiff = str2bool(request.GET.get('fulldiff'))
557 556
558 557 (pull_request_latest,
559 558 pull_request_at_ver,
560 559 pull_request_display_obj,
561 560 at_version) = self._get_pr_version(
562 561 pull_request_id, version=version)
563 562 pr_closed = pull_request_latest.is_closed()
564 563
565 564 if pr_closed and (version or from_version):
566 565 # not allow to browse versions
567 566 return redirect(h.url('pullrequest_show', repo_name=repo_name,
568 567 pull_request_id=pull_request_id))
569 568
570 569 versions = pull_request_display_obj.versions()
571 570
572 571 c.at_version = at_version
573 572 c.at_version_num = (at_version
574 573 if at_version and at_version != 'latest'
575 574 else None)
576 575 c.at_version_pos = ChangesetComment.get_index_from_version(
577 576 c.at_version_num, versions)
578 577
579 578 (prev_pull_request_latest,
580 579 prev_pull_request_at_ver,
581 580 prev_pull_request_display_obj,
582 581 prev_at_version) = self._get_pr_version(
583 582 pull_request_id, version=from_version)
584 583
585 584 c.from_version = prev_at_version
586 585 c.from_version_num = (prev_at_version
587 586 if prev_at_version and prev_at_version != 'latest'
588 587 else None)
589 588 c.from_version_pos = ChangesetComment.get_index_from_version(
590 589 c.from_version_num, versions)
591 590
592 591 # define if we're in COMPARE mode or VIEW at version mode
593 592 compare = at_version != prev_at_version
594 593
595 594 # pull_requests repo_name we opened it against
596 595 # ie. target_repo must match
597 596 if repo_name != pull_request_at_ver.target_repo.repo_name:
598 597 raise HTTPNotFound
599 598
600 599 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
601 600 pull_request_at_ver)
602 601
603 602 c.pull_request = pull_request_display_obj
604 603 c.pull_request_latest = pull_request_latest
605 604
606 605 if compare or (at_version and not at_version == 'latest'):
607 606 c.allowed_to_change_status = False
608 607 c.allowed_to_update = False
609 608 c.allowed_to_merge = False
610 609 c.allowed_to_delete = False
611 610 c.allowed_to_comment = False
612 611 c.allowed_to_close = False
613 612 else:
614 613 can_change_status = PullRequestModel().check_user_change_status(
615 614 pull_request_at_ver, c.rhodecode_user)
616 615 c.allowed_to_change_status = can_change_status and not pr_closed
617 616
618 617 c.allowed_to_update = PullRequestModel().check_user_update(
619 618 pull_request_latest, c.rhodecode_user) and not pr_closed
620 619 c.allowed_to_merge = PullRequestModel().check_user_merge(
621 620 pull_request_latest, c.rhodecode_user) and not pr_closed
622 621 c.allowed_to_delete = PullRequestModel().check_user_delete(
623 622 pull_request_latest, c.rhodecode_user) and not pr_closed
624 623 c.allowed_to_comment = not pr_closed
625 624 c.allowed_to_close = c.allowed_to_merge and not pr_closed
626 625
627 626 c.forbid_adding_reviewers = False
628 627 c.forbid_author_to_review = False
629 628 c.forbid_commit_author_to_review = False
630 629
631 630 if pull_request_latest.reviewer_data and \
632 631 'rules' in pull_request_latest.reviewer_data:
633 632 rules = pull_request_latest.reviewer_data['rules'] or {}
634 633 try:
635 634 c.forbid_adding_reviewers = rules.get(
636 635 'forbid_adding_reviewers')
637 636 c.forbid_author_to_review = rules.get(
638 637 'forbid_author_to_review')
639 638 c.forbid_commit_author_to_review = rules.get(
640 639 'forbid_commit_author_to_review')
641 640 except Exception:
642 641 pass
643 642
644 643 # check merge capabilities
645 644 _merge_check = MergeCheck.validate(
646 645 pull_request_latest, user=c.rhodecode_user)
647 646 c.pr_merge_errors = _merge_check.error_details
648 647 c.pr_merge_possible = not _merge_check.failed
649 648 c.pr_merge_message = _merge_check.merge_msg
650 649
651 650 c.pull_request_review_status = _merge_check.review_status
652 651 if merge_checks:
653 652 return render('/pullrequests/pullrequest_merge_checks.mako')
654 653
655 654 comments_model = CommentsModel()
656 655
657 656 # reviewers and statuses
658 657 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
659 658 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
660 659
661 660 # GENERAL COMMENTS with versions #
662 661 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
663 662 q = q.order_by(ChangesetComment.comment_id.asc())
664 663 general_comments = q
665 664
666 665 # pick comments we want to render at current version
667 666 c.comment_versions = comments_model.aggregate_comments(
668 667 general_comments, versions, c.at_version_num)
669 668 c.comments = c.comment_versions[c.at_version_num]['until']
670 669
671 670 # INLINE COMMENTS with versions #
672 671 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
673 672 q = q.order_by(ChangesetComment.comment_id.asc())
674 673 inline_comments = q
675 674
676 675 c.inline_versions = comments_model.aggregate_comments(
677 676 inline_comments, versions, c.at_version_num, inline=True)
678 677
679 678 # inject latest version
680 679 latest_ver = PullRequest.get_pr_display_object(
681 680 pull_request_latest, pull_request_latest)
682 681
683 682 c.versions = versions + [latest_ver]
684 683
685 684 # if we use version, then do not show later comments
686 685 # than current version
687 686 display_inline_comments = collections.defaultdict(
688 687 lambda: collections.defaultdict(list))
689 688 for co in inline_comments:
690 689 if c.at_version_num:
691 690 # pick comments that are at least UPTO given version, so we
692 691 # don't render comments for higher version
693 692 should_render = co.pull_request_version_id and \
694 693 co.pull_request_version_id <= c.at_version_num
695 694 else:
696 695 # showing all, for 'latest'
697 696 should_render = True
698 697
699 698 if should_render:
700 699 display_inline_comments[co.f_path][co.line_no].append(co)
701 700
702 701 # load diff data into template context, if we use compare mode then
703 702 # diff is calculated based on changes between versions of PR
704 703
705 704 source_repo = pull_request_at_ver.source_repo
706 705 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
707 706
708 707 target_repo = pull_request_at_ver.target_repo
709 708 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
710 709
711 710 if compare:
712 711 # in compare switch the diff base to latest commit from prev version
713 712 target_ref_id = prev_pull_request_display_obj.revisions[0]
714 713
715 714 # despite opening commits for bookmarks/branches/tags, we always
716 715 # convert this to rev to prevent changes after bookmark or branch change
717 716 c.source_ref_type = 'rev'
718 717 c.source_ref = source_ref_id
719 718
720 719 c.target_ref_type = 'rev'
721 720 c.target_ref = target_ref_id
722 721
723 722 c.source_repo = source_repo
724 723 c.target_repo = target_repo
725 724
726 725 # diff_limit is the old behavior, will cut off the whole diff
727 726 # if the limit is applied otherwise will just hide the
728 727 # big files from the front-end
729 728 diff_limit = self.cut_off_limit_diff
730 729 file_limit = self.cut_off_limit_file
731 730
732 731 c.commit_ranges = []
733 732 source_commit = EmptyCommit()
734 733 target_commit = EmptyCommit()
735 734 c.missing_requirements = False
736 735
737 736 source_scm = source_repo.scm_instance()
738 737 target_scm = target_repo.scm_instance()
739 738
740 739 # try first shadow repo, fallback to regular repo
741 740 try:
742 741 commits_source_repo = pull_request_latest.get_shadow_repo()
743 742 except Exception:
744 743 log.debug('Failed to get shadow repo', exc_info=True)
745 744 commits_source_repo = source_scm
746 745
747 746 c.commits_source_repo = commits_source_repo
748 747 commit_cache = {}
749 748 try:
750 749 pre_load = ["author", "branch", "date", "message"]
751 750 show_revs = pull_request_at_ver.revisions
752 751 for rev in show_revs:
753 752 comm = commits_source_repo.get_commit(
754 753 commit_id=rev, pre_load=pre_load)
755 754 c.commit_ranges.append(comm)
756 755 commit_cache[comm.raw_id] = comm
757 756
758 757 # Order here matters, we first need to get target, and then
759 758 # the source
760 759 target_commit = commits_source_repo.get_commit(
761 760 commit_id=safe_str(target_ref_id))
762 761
763 762 source_commit = commits_source_repo.get_commit(
764 763 commit_id=safe_str(source_ref_id))
765 764
766 765 except CommitDoesNotExistError:
767 766 log.warning(
768 767 'Failed to get commit from `{}` repo'.format(
769 768 commits_source_repo), exc_info=True)
770 769 except RepositoryRequirementError:
771 770 log.warning(
772 771 'Failed to get all required data from repo', exc_info=True)
773 772 c.missing_requirements = True
774 773
775 774 c.ancestor = None # set it to None, to hide it from PR view
776 775
777 776 try:
778 777 ancestor_id = source_scm.get_common_ancestor(
779 778 source_commit.raw_id, target_commit.raw_id, target_scm)
780 779 c.ancestor_commit = source_scm.get_commit(ancestor_id)
781 780 except Exception:
782 781 c.ancestor_commit = None
783 782
784 783 c.statuses = source_repo.statuses(
785 784 [x.raw_id for x in c.commit_ranges])
786 785
787 786 # auto collapse if we have more than limit
788 787 collapse_limit = diffs.DiffProcessor._collapse_commits_over
789 788 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
790 789 c.compare_mode = compare
791 790
792 791 c.missing_commits = False
793 792 if (c.missing_requirements or isinstance(source_commit, EmptyCommit)
794 793 or source_commit == target_commit):
795 794
796 795 c.missing_commits = True
797 796 else:
798 797
799 798 c.diffset = self._get_diffset(
800 799 commits_source_repo, source_ref_id, target_ref_id,
801 800 target_commit, source_commit,
802 801 diff_limit, file_limit, display_inline_comments)
803 802
804 803 c.limited_diff = c.diffset.limited_diff
805 804
806 805 # calculate removed files that are bound to comments
807 806 comment_deleted_files = [
808 807 fname for fname in display_inline_comments
809 808 if fname not in c.diffset.file_stats]
810 809
811 810 c.deleted_files_comments = collections.defaultdict(dict)
812 811 for fname, per_line_comments in display_inline_comments.items():
813 812 if fname in comment_deleted_files:
814 813 c.deleted_files_comments[fname]['stats'] = 0
815 814 c.deleted_files_comments[fname]['comments'] = list()
816 815 for lno, comments in per_line_comments.items():
817 816 c.deleted_files_comments[fname]['comments'].extend(
818 817 comments)
819 818
820 819 # this is a hack to properly display links, when creating PR, the
821 820 # compare view and others uses different notation, and
822 821 # compare_commits.mako renders links based on the target_repo.
823 822 # We need to swap that here to generate it properly on the html side
824 823 c.target_repo = c.source_repo
825 824
826 825 c.commit_statuses = ChangesetStatus.STATUSES
827 826
828 827 c.show_version_changes = not pr_closed
829 828 if c.show_version_changes:
830 829 cur_obj = pull_request_at_ver
831 830 prev_obj = prev_pull_request_at_ver
832 831
833 832 old_commit_ids = prev_obj.revisions
834 833 new_commit_ids = cur_obj.revisions
835 834 commit_changes = PullRequestModel()._calculate_commit_id_changes(
836 835 old_commit_ids, new_commit_ids)
837 836 c.commit_changes_summary = commit_changes
838 837
839 838 # calculate the diff for commits between versions
840 839 c.commit_changes = []
841 840 mark = lambda cs, fw: list(
842 841 h.itertools.izip_longest([], cs, fillvalue=fw))
843 842 for c_type, raw_id in mark(commit_changes.added, 'a') \
844 843 + mark(commit_changes.removed, 'r') \
845 844 + mark(commit_changes.common, 'c'):
846 845
847 846 if raw_id in commit_cache:
848 847 commit = commit_cache[raw_id]
849 848 else:
850 849 try:
851 850 commit = commits_source_repo.get_commit(raw_id)
852 851 except CommitDoesNotExistError:
853 852 # in case we fail extracting still use "dummy" commit
854 853 # for display in commit diff
855 854 commit = h.AttributeDict(
856 855 {'raw_id': raw_id,
857 856 'message': 'EMPTY or MISSING COMMIT'})
858 857 c.commit_changes.append([c_type, commit])
859 858
860 859 # current user review statuses for each version
861 860 c.review_versions = {}
862 861 if c.rhodecode_user.user_id in allowed_reviewers:
863 862 for co in general_comments:
864 863 if co.author.user_id == c.rhodecode_user.user_id:
865 864 # each comment has a status change
866 865 status = co.status_change
867 866 if status:
868 867 _ver_pr = status[0].comment.pull_request_version_id
869 868 c.review_versions[_ver_pr] = status[0]
870 869
871 870 return render('/pullrequests/pullrequest_show.mako')
872 871
873 872 @LoginRequired()
874 873 @NotAnonymous()
875 874 @HasRepoPermissionAnyDecorator(
876 875 'repository.read', 'repository.write', 'repository.admin')
877 876 @auth.CSRFRequired()
878 877 @jsonify
879 878 def comment(self, repo_name, pull_request_id):
880 879 pull_request_id = safe_int(pull_request_id)
881 880 pull_request = PullRequest.get_or_404(pull_request_id)
882 881 if pull_request.is_closed():
883 882 log.debug('comment: forbidden because pull request is closed')
884 883 raise HTTPForbidden()
885 884
886 885 status = request.POST.get('changeset_status', None)
887 886 text = request.POST.get('text')
888 887 comment_type = request.POST.get('comment_type')
889 888 resolves_comment_id = request.POST.get('resolves_comment_id', None)
890 889 close_pull_request = request.POST.get('close_pull_request')
891 890
892 891 # the logic here should work like following, if we submit close
893 892 # pr comment, use `close_pull_request_with_comment` function
894 893 # else handle regular comment logic
895 894 user = c.rhodecode_user
896 895 repo = c.rhodecode_db_repo
897 896
898 897 if close_pull_request:
899 898 # only owner or admin or person with write permissions
900 899 allowed_to_close = PullRequestModel().check_user_update(
901 900 pull_request, c.rhodecode_user)
902 901 if not allowed_to_close:
903 902 log.debug('comment: forbidden because not allowed to close '
904 903 'pull request %s', pull_request_id)
905 904 raise HTTPForbidden()
906 905 comment, status = PullRequestModel().close_pull_request_with_comment(
907 906 pull_request, user, repo, message=text)
908 907 Session().flush()
909 908 events.trigger(
910 909 events.PullRequestCommentEvent(pull_request, comment))
911 910
912 911 else:
913 912 # regular comment case, could be inline, or one with status.
914 913 # for that one we check also permissions
915 914
916 915 allowed_to_change_status = PullRequestModel().check_user_change_status(
917 916 pull_request, c.rhodecode_user)
918 917
919 918 if status and allowed_to_change_status:
920 919 message = (_('Status change %(transition_icon)s %(status)s')
921 920 % {'transition_icon': '>',
922 921 'status': ChangesetStatus.get_status_lbl(status)})
923 922 text = text or message
924 923
925 924 comment = CommentsModel().create(
926 925 text=text,
927 926 repo=c.rhodecode_db_repo.repo_id,
928 927 user=c.rhodecode_user.user_id,
929 928 pull_request=pull_request_id,
930 929 f_path=request.POST.get('f_path'),
931 930 line_no=request.POST.get('line'),
932 931 status_change=(ChangesetStatus.get_status_lbl(status)
933 932 if status and allowed_to_change_status else None),
934 933 status_change_type=(status
935 934 if status and allowed_to_change_status else None),
936 935 comment_type=comment_type,
937 936 resolves_comment_id=resolves_comment_id
938 937 )
939 938
940 939 if allowed_to_change_status:
941 940 # calculate old status before we change it
942 941 old_calculated_status = pull_request.calculated_review_status()
943 942
944 943 # get status if set !
945 944 if status:
946 945 ChangesetStatusModel().set_status(
947 946 c.rhodecode_db_repo.repo_id,
948 947 status,
949 948 c.rhodecode_user.user_id,
950 949 comment,
951 950 pull_request=pull_request_id
952 951 )
953 952
954 953 Session().flush()
955 954 events.trigger(
956 955 events.PullRequestCommentEvent(pull_request, comment))
957 956
958 957 # we now calculate the status of pull request, and based on that
959 958 # calculation we set the commits status
960 959 calculated_status = pull_request.calculated_review_status()
961 960 if old_calculated_status != calculated_status:
962 961 PullRequestModel()._trigger_pull_request_hook(
963 962 pull_request, c.rhodecode_user, 'review_status_change')
964 963
965 964 Session().commit()
966 965
967 966 if not request.is_xhr:
968 967 raise HTTPFound(
969 968 h.route_path('pullrequest_show',
970 969 repo_name=repo_name,
971 970 pull_request_id=pull_request_id))
972 971
973 972 data = {
974 973 'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))),
975 974 }
976 975 if comment:
977 976 c.co = comment
978 977 rendered_comment = render('changeset/changeset_comment_block.mako')
979 978 data.update(comment.get_dict())
980 979 data.update({'rendered_text': rendered_comment})
981 980
982 981 return data
983 982
984 983 @LoginRequired()
985 984 @NotAnonymous()
986 985 @HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
987 986 'repository.admin')
988 987 @auth.CSRFRequired()
989 988 @jsonify
990 989 def delete_comment(self, repo_name, comment_id):
991 990 return self._delete_comment(comment_id)
992 991
993 992 def _delete_comment(self, comment_id):
994 993 comment_id = safe_int(comment_id)
995 994 co = ChangesetComment.get_or_404(comment_id)
996 995 if co.pull_request.is_closed():
997 996 # don't allow deleting comments on closed pull request
998 997 raise HTTPForbidden()
999 998
1000 999 is_owner = co.author.user_id == c.rhodecode_user.user_id
1001 1000 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(c.repo_name)
1002 1001 if h.HasPermissionAny('hg.admin')() or is_repo_admin or is_owner:
1003 1002 old_calculated_status = co.pull_request.calculated_review_status()
1004 1003 CommentsModel().delete(comment=co, user=c.rhodecode_user)
1005 1004 Session().commit()
1006 1005 calculated_status = co.pull_request.calculated_review_status()
1007 1006 if old_calculated_status != calculated_status:
1008 1007 PullRequestModel()._trigger_pull_request_hook(
1009 1008 co.pull_request, c.rhodecode_user, 'review_status_change')
1010 1009 return True
1011 1010 else:
1012 1011 raise HTTPForbidden()
@@ -1,2021 +1,2023 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 authentication and permission libraries
23 23 """
24 24
25 25 import os
26 26 import inspect
27 27 import collections
28 28 import fnmatch
29 29 import hashlib
30 30 import itertools
31 31 import logging
32 32 import random
33 33 import traceback
34 34 from functools import wraps
35 35
36 36 import ipaddress
37 37 from pyramid.httpexceptions import HTTPForbidden, HTTPFound
38 38 from pylons.i18n.translation import _
39 39 # NOTE(marcink): this has to be removed only after pyramid migration,
40 40 # replace with _ = request.translate
41 41 from sqlalchemy.orm.exc import ObjectDeletedError
42 42 from sqlalchemy.orm import joinedload
43 43 from zope.cachedescriptors.property import Lazy as LazyProperty
44 44
45 45 import rhodecode
46 46 from rhodecode.model import meta
47 47 from rhodecode.model.meta import Session
48 48 from rhodecode.model.user import UserModel
49 49 from rhodecode.model.db import (
50 50 User, Repository, Permission, UserToPerm, UserGroupToPerm, UserGroupMember,
51 51 UserIpMap, UserApiKeys, RepoGroup)
52 52 from rhodecode.lib import caches
53 53 from rhodecode.lib.utils2 import safe_unicode, aslist, safe_str, md5
54 54 from rhodecode.lib.utils import (
55 55 get_repo_slug, get_repo_group_slug, get_user_group_slug)
56 56 from rhodecode.lib.caching_query import FromCache
57 57
58 58
59 59 if rhodecode.is_unix:
60 60 import bcrypt
61 61
62 62 log = logging.getLogger(__name__)
63 63
64 64 csrf_token_key = "csrf_token"
65 65
66 66
67 67 class PasswordGenerator(object):
68 68 """
69 69 This is a simple class for generating password from different sets of
70 70 characters
71 71 usage::
72 72
73 73 passwd_gen = PasswordGenerator()
74 74 #print 8-letter password containing only big and small letters
75 75 of alphabet
76 76 passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL)
77 77 """
78 78 ALPHABETS_NUM = r'''1234567890'''
79 79 ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm'''
80 80 ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM'''
81 81 ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?'''
82 82 ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \
83 83 + ALPHABETS_NUM + ALPHABETS_SPECIAL
84 84 ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM
85 85 ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL
86 86 ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM
87 87 ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM
88 88
89 89 def __init__(self, passwd=''):
90 90 self.passwd = passwd
91 91
92 92 def gen_password(self, length, type_=None):
93 93 if type_ is None:
94 94 type_ = self.ALPHABETS_FULL
95 95 self.passwd = ''.join([random.choice(type_) for _ in xrange(length)])
96 96 return self.passwd
97 97
98 98
99 99 class _RhodeCodeCryptoBase(object):
100 100 ENC_PREF = None
101 101
102 102 def hash_create(self, str_):
103 103 """
104 104 hash the string using
105 105
106 106 :param str_: password to hash
107 107 """
108 108 raise NotImplementedError
109 109
110 110 def hash_check_with_upgrade(self, password, hashed):
111 111 """
112 112 Returns tuple in which first element is boolean that states that
113 113 given password matches it's hashed version, and the second is new hash
114 114 of the password, in case this password should be migrated to new
115 115 cipher.
116 116 """
117 117 checked_hash = self.hash_check(password, hashed)
118 118 return checked_hash, None
119 119
120 120 def hash_check(self, password, hashed):
121 121 """
122 122 Checks matching password with it's hashed value.
123 123
124 124 :param password: password
125 125 :param hashed: password in hashed form
126 126 """
127 127 raise NotImplementedError
128 128
129 129 def _assert_bytes(self, value):
130 130 """
131 131 Passing in an `unicode` object can lead to hard to detect issues
132 132 if passwords contain non-ascii characters. Doing a type check
133 133 during runtime, so that such mistakes are detected early on.
134 134 """
135 135 if not isinstance(value, str):
136 136 raise TypeError(
137 137 "Bytestring required as input, got %r." % (value, ))
138 138
139 139
140 140 class _RhodeCodeCryptoBCrypt(_RhodeCodeCryptoBase):
141 141 ENC_PREF = ('$2a$10', '$2b$10')
142 142
143 143 def hash_create(self, str_):
144 144 self._assert_bytes(str_)
145 145 return bcrypt.hashpw(str_, bcrypt.gensalt(10))
146 146
147 147 def hash_check_with_upgrade(self, password, hashed):
148 148 """
149 149 Returns tuple in which first element is boolean that states that
150 150 given password matches it's hashed version, and the second is new hash
151 151 of the password, in case this password should be migrated to new
152 152 cipher.
153 153
154 154 This implements special upgrade logic which works like that:
155 155 - check if the given password == bcrypted hash, if yes then we
156 156 properly used password and it was already in bcrypt. Proceed
157 157 without any changes
158 158 - if bcrypt hash check is not working try with sha256. If hash compare
159 159 is ok, it means we using correct but old hashed password. indicate
160 160 hash change and proceed
161 161 """
162 162
163 163 new_hash = None
164 164
165 165 # regular pw check
166 166 password_match_bcrypt = self.hash_check(password, hashed)
167 167
168 168 # now we want to know if the password was maybe from sha256
169 169 # basically calling _RhodeCodeCryptoSha256().hash_check()
170 170 if not password_match_bcrypt:
171 171 if _RhodeCodeCryptoSha256().hash_check(password, hashed):
172 172 new_hash = self.hash_create(password) # make new bcrypt hash
173 173 password_match_bcrypt = True
174 174
175 175 return password_match_bcrypt, new_hash
176 176
177 177 def hash_check(self, password, hashed):
178 178 """
179 179 Checks matching password with it's hashed value.
180 180
181 181 :param password: password
182 182 :param hashed: password in hashed form
183 183 """
184 184 self._assert_bytes(password)
185 185 try:
186 186 return bcrypt.hashpw(password, hashed) == hashed
187 187 except ValueError as e:
188 188 # we're having a invalid salt here probably, we should not crash
189 189 # just return with False as it would be a wrong password.
190 190 log.debug('Failed to check password hash using bcrypt %s',
191 191 safe_str(e))
192 192
193 193 return False
194 194
195 195
196 196 class _RhodeCodeCryptoSha256(_RhodeCodeCryptoBase):
197 197 ENC_PREF = '_'
198 198
199 199 def hash_create(self, str_):
200 200 self._assert_bytes(str_)
201 201 return hashlib.sha256(str_).hexdigest()
202 202
203 203 def hash_check(self, password, hashed):
204 204 """
205 205 Checks matching password with it's hashed value.
206 206
207 207 :param password: password
208 208 :param hashed: password in hashed form
209 209 """
210 210 self._assert_bytes(password)
211 211 return hashlib.sha256(password).hexdigest() == hashed
212 212
213 213
214 214 class _RhodeCodeCryptoMd5(_RhodeCodeCryptoBase):
215 215 ENC_PREF = '_'
216 216
217 217 def hash_create(self, str_):
218 218 self._assert_bytes(str_)
219 219 return hashlib.md5(str_).hexdigest()
220 220
221 221 def hash_check(self, password, hashed):
222 222 """
223 223 Checks matching password with it's hashed value.
224 224
225 225 :param password: password
226 226 :param hashed: password in hashed form
227 227 """
228 228 self._assert_bytes(password)
229 229 return hashlib.md5(password).hexdigest() == hashed
230 230
231 231
232 232 def crypto_backend():
233 233 """
234 234 Return the matching crypto backend.
235 235
236 236 Selection is based on if we run tests or not, we pick md5 backend to run
237 237 tests faster since BCRYPT is expensive to calculate
238 238 """
239 239 if rhodecode.is_test:
240 240 RhodeCodeCrypto = _RhodeCodeCryptoMd5()
241 241 else:
242 242 RhodeCodeCrypto = _RhodeCodeCryptoBCrypt()
243 243
244 244 return RhodeCodeCrypto
245 245
246 246
247 247 def get_crypt_password(password):
248 248 """
249 249 Create the hash of `password` with the active crypto backend.
250 250
251 251 :param password: The cleartext password.
252 252 :type password: unicode
253 253 """
254 254 password = safe_str(password)
255 255 return crypto_backend().hash_create(password)
256 256
257 257
258 258 def check_password(password, hashed):
259 259 """
260 260 Check if the value in `password` matches the hash in `hashed`.
261 261
262 262 :param password: The cleartext password.
263 263 :type password: unicode
264 264
265 265 :param hashed: The expected hashed version of the password.
266 266 :type hashed: The hash has to be passed in in text representation.
267 267 """
268 268 password = safe_str(password)
269 269 return crypto_backend().hash_check(password, hashed)
270 270
271 271
272 272 def generate_auth_token(data, salt=None):
273 273 """
274 274 Generates API KEY from given string
275 275 """
276 276
277 277 if salt is None:
278 278 salt = os.urandom(16)
279 279 return hashlib.sha1(safe_str(data) + salt).hexdigest()
280 280
281 281
282 282 class CookieStoreWrapper(object):
283 283
284 284 def __init__(self, cookie_store):
285 285 self.cookie_store = cookie_store
286 286
287 287 def __repr__(self):
288 288 return 'CookieStore<%s>' % (self.cookie_store)
289 289
290 290 def get(self, key, other=None):
291 291 if isinstance(self.cookie_store, dict):
292 292 return self.cookie_store.get(key, other)
293 293 elif isinstance(self.cookie_store, AuthUser):
294 294 return self.cookie_store.__dict__.get(key, other)
295 295
296 296
297 297 def _cached_perms_data(user_id, scope, user_is_admin,
298 298 user_inherit_default_permissions, explicit, algo):
299 299
300 300 permissions = PermissionCalculator(
301 301 user_id, scope, user_is_admin, user_inherit_default_permissions,
302 302 explicit, algo)
303 303 return permissions.calculate()
304 304
305 305
306 306 class PermOrigin(object):
307 307 ADMIN = 'superadmin'
308 308
309 309 REPO_USER = 'user:%s'
310 310 REPO_USERGROUP = 'usergroup:%s'
311 311 REPO_OWNER = 'repo.owner'
312 312 REPO_DEFAULT = 'repo.default'
313 313 REPO_PRIVATE = 'repo.private'
314 314
315 315 REPOGROUP_USER = 'user:%s'
316 316 REPOGROUP_USERGROUP = 'usergroup:%s'
317 317 REPOGROUP_OWNER = 'group.owner'
318 318 REPOGROUP_DEFAULT = 'group.default'
319 319
320 320 USERGROUP_USER = 'user:%s'
321 321 USERGROUP_USERGROUP = 'usergroup:%s'
322 322 USERGROUP_OWNER = 'usergroup.owner'
323 323 USERGROUP_DEFAULT = 'usergroup.default'
324 324
325 325
326 326 class PermOriginDict(dict):
327 327 """
328 328 A special dict used for tracking permissions along with their origins.
329 329
330 330 `__setitem__` has been overridden to expect a tuple(perm, origin)
331 331 `__getitem__` will return only the perm
332 332 `.perm_origin_stack` will return the stack of (perm, origin) set per key
333 333
334 334 >>> perms = PermOriginDict()
335 335 >>> perms['resource'] = 'read', 'default'
336 336 >>> perms['resource']
337 337 'read'
338 338 >>> perms['resource'] = 'write', 'admin'
339 339 >>> perms['resource']
340 340 'write'
341 341 >>> perms.perm_origin_stack
342 342 {'resource': [('read', 'default'), ('write', 'admin')]}
343 343 """
344 344
345 345 def __init__(self, *args, **kw):
346 346 dict.__init__(self, *args, **kw)
347 347 self.perm_origin_stack = {}
348 348
349 349 def __setitem__(self, key, (perm, origin)):
350 350 self.perm_origin_stack.setdefault(key, []).append((perm, origin))
351 351 dict.__setitem__(self, key, perm)
352 352
353 353
354 354 class PermissionCalculator(object):
355 355
356 356 def __init__(
357 357 self, user_id, scope, user_is_admin,
358 358 user_inherit_default_permissions, explicit, algo):
359 359 self.user_id = user_id
360 360 self.user_is_admin = user_is_admin
361 361 self.inherit_default_permissions = user_inherit_default_permissions
362 362 self.explicit = explicit
363 363 self.algo = algo
364 364
365 365 scope = scope or {}
366 366 self.scope_repo_id = scope.get('repo_id')
367 367 self.scope_repo_group_id = scope.get('repo_group_id')
368 368 self.scope_user_group_id = scope.get('user_group_id')
369 369
370 370 self.default_user_id = User.get_default_user(cache=True).user_id
371 371
372 372 self.permissions_repositories = PermOriginDict()
373 373 self.permissions_repository_groups = PermOriginDict()
374 374 self.permissions_user_groups = PermOriginDict()
375 375 self.permissions_global = set()
376 376
377 377 self.default_repo_perms = Permission.get_default_repo_perms(
378 378 self.default_user_id, self.scope_repo_id)
379 379 self.default_repo_groups_perms = Permission.get_default_group_perms(
380 380 self.default_user_id, self.scope_repo_group_id)
381 381 self.default_user_group_perms = \
382 382 Permission.get_default_user_group_perms(
383 383 self.default_user_id, self.scope_user_group_id)
384 384
385 385 def calculate(self):
386 386 if self.user_is_admin:
387 387 return self._admin_permissions()
388 388
389 389 self._calculate_global_default_permissions()
390 390 self._calculate_global_permissions()
391 391 self._calculate_default_permissions()
392 392 self._calculate_repository_permissions()
393 393 self._calculate_repository_group_permissions()
394 394 self._calculate_user_group_permissions()
395 395 return self._permission_structure()
396 396
397 397 def _admin_permissions(self):
398 398 """
399 399 admin user have all default rights for repositories
400 400 and groups set to admin
401 401 """
402 402 self.permissions_global.add('hg.admin')
403 403 self.permissions_global.add('hg.create.write_on_repogroup.true')
404 404
405 405 # repositories
406 406 for perm in self.default_repo_perms:
407 407 r_k = perm.UserRepoToPerm.repository.repo_name
408 408 p = 'repository.admin'
409 409 self.permissions_repositories[r_k] = p, PermOrigin.ADMIN
410 410
411 411 # repository groups
412 412 for perm in self.default_repo_groups_perms:
413 413 rg_k = perm.UserRepoGroupToPerm.group.group_name
414 414 p = 'group.admin'
415 415 self.permissions_repository_groups[rg_k] = p, PermOrigin.ADMIN
416 416
417 417 # user groups
418 418 for perm in self.default_user_group_perms:
419 419 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
420 420 p = 'usergroup.admin'
421 421 self.permissions_user_groups[u_k] = p, PermOrigin.ADMIN
422 422
423 423 return self._permission_structure()
424 424
425 425 def _calculate_global_default_permissions(self):
426 426 """
427 427 global permissions taken from the default user
428 428 """
429 429 default_global_perms = UserToPerm.query()\
430 430 .filter(UserToPerm.user_id == self.default_user_id)\
431 431 .options(joinedload(UserToPerm.permission))
432 432
433 433 for perm in default_global_perms:
434 434 self.permissions_global.add(perm.permission.permission_name)
435 435
436 436 def _calculate_global_permissions(self):
437 437 """
438 438 Set global system permissions with user permissions or permissions
439 439 taken from the user groups of the current user.
440 440
441 441 The permissions include repo creating, repo group creating, forking
442 442 etc.
443 443 """
444 444
445 445 # now we read the defined permissions and overwrite what we have set
446 446 # before those can be configured from groups or users explicitly.
447 447
448 448 # TODO: johbo: This seems to be out of sync, find out the reason
449 449 # for the comment below and update it.
450 450
451 451 # In case we want to extend this list we should be always in sync with
452 452 # User.DEFAULT_USER_PERMISSIONS definitions
453 453 _configurable = frozenset([
454 454 'hg.fork.none', 'hg.fork.repository',
455 455 'hg.create.none', 'hg.create.repository',
456 456 'hg.usergroup.create.false', 'hg.usergroup.create.true',
457 457 'hg.repogroup.create.false', 'hg.repogroup.create.true',
458 458 'hg.create.write_on_repogroup.false',
459 459 'hg.create.write_on_repogroup.true',
460 460 'hg.inherit_default_perms.false', 'hg.inherit_default_perms.true'
461 461 ])
462 462
463 463 # USER GROUPS comes first user group global permissions
464 464 user_perms_from_users_groups = Session().query(UserGroupToPerm)\
465 465 .options(joinedload(UserGroupToPerm.permission))\
466 466 .join((UserGroupMember, UserGroupToPerm.users_group_id ==
467 467 UserGroupMember.users_group_id))\
468 468 .filter(UserGroupMember.user_id == self.user_id)\
469 469 .order_by(UserGroupToPerm.users_group_id)\
470 470 .all()
471 471
472 472 # need to group here by groups since user can be in more than
473 473 # one group, so we get all groups
474 474 _explicit_grouped_perms = [
475 475 [x, list(y)] for x, y in
476 476 itertools.groupby(user_perms_from_users_groups,
477 477 lambda _x: _x.users_group)]
478 478
479 479 for gr, perms in _explicit_grouped_perms:
480 480 # since user can be in multiple groups iterate over them and
481 481 # select the lowest permissions first (more explicit)
482 482 # TODO: marcink: do this^^
483 483
484 484 # group doesn't inherit default permissions so we actually set them
485 485 if not gr.inherit_default_permissions:
486 486 # NEED TO IGNORE all previously set configurable permissions
487 487 # and replace them with explicitly set from this user
488 488 # group permissions
489 489 self.permissions_global = self.permissions_global.difference(
490 490 _configurable)
491 491 for perm in perms:
492 492 self.permissions_global.add(perm.permission.permission_name)
493 493
494 494 # user explicit global permissions
495 495 user_perms = Session().query(UserToPerm)\
496 496 .options(joinedload(UserToPerm.permission))\
497 497 .filter(UserToPerm.user_id == self.user_id).all()
498 498
499 499 if not self.inherit_default_permissions:
500 500 # NEED TO IGNORE all configurable permissions and
501 501 # replace them with explicitly set from this user permissions
502 502 self.permissions_global = self.permissions_global.difference(
503 503 _configurable)
504 504 for perm in user_perms:
505 505 self.permissions_global.add(perm.permission.permission_name)
506 506
507 507 def _calculate_default_permissions(self):
508 508 """
509 509 Set default user permissions for repositories, repository groups
510 510 taken from the default user.
511 511
512 512 Calculate inheritance of object permissions based on what we have now
513 513 in GLOBAL permissions. We check if .false is in GLOBAL since this is
514 514 explicitly set. Inherit is the opposite of .false being there.
515 515
516 516 .. note::
517 517
518 518 the syntax is little bit odd but what we need to check here is
519 519 the opposite of .false permission being in the list so even for
520 520 inconsistent state when both .true/.false is there
521 521 .false is more important
522 522
523 523 """
524 524 user_inherit_object_permissions = not ('hg.inherit_default_perms.false'
525 525 in self.permissions_global)
526 526
527 527 # defaults for repositories, taken from `default` user permissions
528 528 # on given repo
529 529 for perm in self.default_repo_perms:
530 530 r_k = perm.UserRepoToPerm.repository.repo_name
531 531 o = PermOrigin.REPO_DEFAULT
532 532 if perm.Repository.private and not (
533 533 perm.Repository.user_id == self.user_id):
534 534 # disable defaults for private repos,
535 535 p = 'repository.none'
536 536 o = PermOrigin.REPO_PRIVATE
537 537 elif perm.Repository.user_id == self.user_id:
538 538 # set admin if owner
539 539 p = 'repository.admin'
540 540 o = PermOrigin.REPO_OWNER
541 541 else:
542 542 p = perm.Permission.permission_name
543 543 # if we decide this user isn't inheriting permissions from
544 544 # default user we set him to .none so only explicit
545 545 # permissions work
546 546 if not user_inherit_object_permissions:
547 547 p = 'repository.none'
548 548 self.permissions_repositories[r_k] = p, o
549 549
550 550 # defaults for repository groups taken from `default` user permission
551 551 # on given group
552 552 for perm in self.default_repo_groups_perms:
553 553 rg_k = perm.UserRepoGroupToPerm.group.group_name
554 554 o = PermOrigin.REPOGROUP_DEFAULT
555 555 if perm.RepoGroup.user_id == self.user_id:
556 556 # set admin if owner
557 557 p = 'group.admin'
558 558 o = PermOrigin.REPOGROUP_OWNER
559 559 else:
560 560 p = perm.Permission.permission_name
561 561
562 562 # if we decide this user isn't inheriting permissions from default
563 563 # user we set him to .none so only explicit permissions work
564 564 if not user_inherit_object_permissions:
565 565 p = 'group.none'
566 566 self.permissions_repository_groups[rg_k] = p, o
567 567
568 568 # defaults for user groups taken from `default` user permission
569 569 # on given user group
570 570 for perm in self.default_user_group_perms:
571 571 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
572 572 o = PermOrigin.USERGROUP_DEFAULT
573 573 if perm.UserGroup.user_id == self.user_id:
574 574 # set admin if owner
575 575 p = 'usergroup.admin'
576 576 o = PermOrigin.USERGROUP_OWNER
577 577 else:
578 578 p = perm.Permission.permission_name
579 579
580 580 # if we decide this user isn't inheriting permissions from default
581 581 # user we set him to .none so only explicit permissions work
582 582 if not user_inherit_object_permissions:
583 583 p = 'usergroup.none'
584 584 self.permissions_user_groups[u_k] = p, o
585 585
586 586 def _calculate_repository_permissions(self):
587 587 """
588 588 Repository permissions for the current user.
589 589
590 590 Check if the user is part of user groups for this repository and
591 591 fill in the permission from it. `_choose_permission` decides of which
592 592 permission should be selected based on selected method.
593 593 """
594 594
595 595 # user group for repositories permissions
596 596 user_repo_perms_from_user_group = Permission\
597 597 .get_default_repo_perms_from_user_group(
598 598 self.user_id, self.scope_repo_id)
599 599
600 600 multiple_counter = collections.defaultdict(int)
601 601 for perm in user_repo_perms_from_user_group:
602 602 r_k = perm.UserGroupRepoToPerm.repository.repo_name
603 603 ug_k = perm.UserGroupRepoToPerm.users_group.users_group_name
604 604 multiple_counter[r_k] += 1
605 605 p = perm.Permission.permission_name
606 606 o = PermOrigin.REPO_USERGROUP % ug_k
607 607
608 608 if perm.Repository.user_id == self.user_id:
609 609 # set admin if owner
610 610 p = 'repository.admin'
611 611 o = PermOrigin.REPO_OWNER
612 612 else:
613 613 if multiple_counter[r_k] > 1:
614 614 cur_perm = self.permissions_repositories[r_k]
615 615 p = self._choose_permission(p, cur_perm)
616 616 self.permissions_repositories[r_k] = p, o
617 617
618 618 # user explicit permissions for repositories, overrides any specified
619 619 # by the group permission
620 620 user_repo_perms = Permission.get_default_repo_perms(
621 621 self.user_id, self.scope_repo_id)
622 622 for perm in user_repo_perms:
623 623 r_k = perm.UserRepoToPerm.repository.repo_name
624 624 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
625 625 # set admin if owner
626 626 if perm.Repository.user_id == self.user_id:
627 627 p = 'repository.admin'
628 628 o = PermOrigin.REPO_OWNER
629 629 else:
630 630 p = perm.Permission.permission_name
631 631 if not self.explicit:
632 632 cur_perm = self.permissions_repositories.get(
633 633 r_k, 'repository.none')
634 634 p = self._choose_permission(p, cur_perm)
635 635 self.permissions_repositories[r_k] = p, o
636 636
637 637 def _calculate_repository_group_permissions(self):
638 638 """
639 639 Repository group permissions for the current user.
640 640
641 641 Check if the user is part of user groups for repository groups and
642 642 fill in the permissions from it. `_choose_permmission` decides of which
643 643 permission should be selected based on selected method.
644 644 """
645 645 # user group for repo groups permissions
646 646 user_repo_group_perms_from_user_group = Permission\
647 647 .get_default_group_perms_from_user_group(
648 648 self.user_id, self.scope_repo_group_id)
649 649
650 650 multiple_counter = collections.defaultdict(int)
651 651 for perm in user_repo_group_perms_from_user_group:
652 652 g_k = perm.UserGroupRepoGroupToPerm.group.group_name
653 653 ug_k = perm.UserGroupRepoGroupToPerm.users_group.users_group_name
654 654 o = PermOrigin.REPOGROUP_USERGROUP % ug_k
655 655 multiple_counter[g_k] += 1
656 656 p = perm.Permission.permission_name
657 657 if perm.RepoGroup.user_id == self.user_id:
658 658 # set admin if owner, even for member of other user group
659 659 p = 'group.admin'
660 660 o = PermOrigin.REPOGROUP_OWNER
661 661 else:
662 662 if multiple_counter[g_k] > 1:
663 663 cur_perm = self.permissions_repository_groups[g_k]
664 664 p = self._choose_permission(p, cur_perm)
665 665 self.permissions_repository_groups[g_k] = p, o
666 666
667 667 # user explicit permissions for repository groups
668 668 user_repo_groups_perms = Permission.get_default_group_perms(
669 669 self.user_id, self.scope_repo_group_id)
670 670 for perm in user_repo_groups_perms:
671 671 rg_k = perm.UserRepoGroupToPerm.group.group_name
672 672 u_k = perm.UserRepoGroupToPerm.user.username
673 673 o = PermOrigin.REPOGROUP_USER % u_k
674 674
675 675 if perm.RepoGroup.user_id == self.user_id:
676 676 # set admin if owner
677 677 p = 'group.admin'
678 678 o = PermOrigin.REPOGROUP_OWNER
679 679 else:
680 680 p = perm.Permission.permission_name
681 681 if not self.explicit:
682 682 cur_perm = self.permissions_repository_groups.get(
683 683 rg_k, 'group.none')
684 684 p = self._choose_permission(p, cur_perm)
685 685 self.permissions_repository_groups[rg_k] = p, o
686 686
687 687 def _calculate_user_group_permissions(self):
688 688 """
689 689 User group permissions for the current user.
690 690 """
691 691 # user group for user group permissions
692 692 user_group_from_user_group = Permission\
693 693 .get_default_user_group_perms_from_user_group(
694 694 self.user_id, self.scope_user_group_id)
695 695
696 696 multiple_counter = collections.defaultdict(int)
697 697 for perm in user_group_from_user_group:
698 698 g_k = perm.UserGroupUserGroupToPerm\
699 699 .target_user_group.users_group_name
700 700 u_k = perm.UserGroupUserGroupToPerm\
701 701 .user_group.users_group_name
702 702 o = PermOrigin.USERGROUP_USERGROUP % u_k
703 703 multiple_counter[g_k] += 1
704 704 p = perm.Permission.permission_name
705 705
706 706 if perm.UserGroup.user_id == self.user_id:
707 707 # set admin if owner, even for member of other user group
708 708 p = 'usergroup.admin'
709 709 o = PermOrigin.USERGROUP_OWNER
710 710 else:
711 711 if multiple_counter[g_k] > 1:
712 712 cur_perm = self.permissions_user_groups[g_k]
713 713 p = self._choose_permission(p, cur_perm)
714 714 self.permissions_user_groups[g_k] = p, o
715 715
716 716 # user explicit permission for user groups
717 717 user_user_groups_perms = Permission.get_default_user_group_perms(
718 718 self.user_id, self.scope_user_group_id)
719 719 for perm in user_user_groups_perms:
720 720 ug_k = perm.UserUserGroupToPerm.user_group.users_group_name
721 721 u_k = perm.UserUserGroupToPerm.user.username
722 722 o = PermOrigin.USERGROUP_USER % u_k
723 723
724 724 if perm.UserGroup.user_id == self.user_id:
725 725 # set admin if owner
726 726 p = 'usergroup.admin'
727 727 o = PermOrigin.USERGROUP_OWNER
728 728 else:
729 729 p = perm.Permission.permission_name
730 730 if not self.explicit:
731 731 cur_perm = self.permissions_user_groups.get(
732 732 ug_k, 'usergroup.none')
733 733 p = self._choose_permission(p, cur_perm)
734 734 self.permissions_user_groups[ug_k] = p, o
735 735
736 736 def _choose_permission(self, new_perm, cur_perm):
737 737 new_perm_val = Permission.PERM_WEIGHTS[new_perm]
738 738 cur_perm_val = Permission.PERM_WEIGHTS[cur_perm]
739 739 if self.algo == 'higherwin':
740 740 if new_perm_val > cur_perm_val:
741 741 return new_perm
742 742 return cur_perm
743 743 elif self.algo == 'lowerwin':
744 744 if new_perm_val < cur_perm_val:
745 745 return new_perm
746 746 return cur_perm
747 747
748 748 def _permission_structure(self):
749 749 return {
750 750 'global': self.permissions_global,
751 751 'repositories': self.permissions_repositories,
752 752 'repositories_groups': self.permissions_repository_groups,
753 753 'user_groups': self.permissions_user_groups,
754 754 }
755 755
756 756
757 757 def allowed_auth_token_access(controller_name, whitelist=None, auth_token=None):
758 758 """
759 759 Check if given controller_name is in whitelist of auth token access
760 760 """
761 761 if not whitelist:
762 762 from rhodecode import CONFIG
763 763 whitelist = aslist(
764 764 CONFIG.get('api_access_controllers_whitelist'), sep=',')
765 765 log.debug(
766 766 'Allowed controllers for AUTH TOKEN access: %s' % (whitelist,))
767 767
768 768 auth_token_access_valid = False
769 769 for entry in whitelist:
770 770 if fnmatch.fnmatch(controller_name, entry):
771 771 auth_token_access_valid = True
772 772 break
773 773
774 774 if auth_token_access_valid:
775 775 log.debug('controller:%s matches entry in whitelist'
776 776 % (controller_name,))
777 777 else:
778 778 msg = ('controller: %s does *NOT* match any entry in whitelist'
779 779 % (controller_name,))
780 780 if auth_token:
781 781 # if we use auth token key and don't have access it's a warning
782 782 log.warning(msg)
783 783 else:
784 784 log.debug(msg)
785 785
786 786 return auth_token_access_valid
787 787
788 788
789 789 class AuthUser(object):
790 790 """
791 791 A simple object that handles all attributes of user in RhodeCode
792 792
793 793 It does lookup based on API key,given user, or user present in session
794 794 Then it fills all required information for such user. It also checks if
795 795 anonymous access is enabled and if so, it returns default user as logged in
796 796 """
797 797 GLOBAL_PERMS = [x[0] for x in Permission.PERMS]
798 798
799 799 def __init__(self, user_id=None, api_key=None, username=None, ip_addr=None):
800 800
801 801 self.user_id = user_id
802 802 self._api_key = api_key
803 803
804 804 self.api_key = None
805 805 self.feed_token = ''
806 806 self.username = username
807 807 self.ip_addr = ip_addr
808 808 self.name = ''
809 809 self.lastname = ''
810 self.first_name = ''
811 self.last_name = ''
810 812 self.email = ''
811 813 self.is_authenticated = False
812 814 self.admin = False
813 815 self.inherit_default_permissions = False
814 816 self.password = ''
815 817
816 818 self.anonymous_user = None # propagated on propagate_data
817 819 self.propagate_data()
818 820 self._instance = None
819 821 self._permissions_scoped_cache = {} # used to bind scoped calculation
820 822
821 823 @LazyProperty
822 824 def permissions(self):
823 825 return self.get_perms(user=self, cache=False)
824 826
825 827 def permissions_with_scope(self, scope):
826 828 """
827 829 Call the get_perms function with scoped data. The scope in that function
828 830 narrows the SQL calls to the given ID of objects resulting in fetching
829 831 Just particular permission we want to obtain. If scope is an empty dict
830 832 then it basically narrows the scope to GLOBAL permissions only.
831 833
832 834 :param scope: dict
833 835 """
834 836 if 'repo_name' in scope:
835 837 obj = Repository.get_by_repo_name(scope['repo_name'])
836 838 if obj:
837 839 scope['repo_id'] = obj.repo_id
838 840 _scope = {
839 841 'repo_id': -1,
840 842 'user_group_id': -1,
841 843 'repo_group_id': -1,
842 844 }
843 845 _scope.update(scope)
844 846 cache_key = "_".join(map(safe_str, reduce(lambda a, b: a+b,
845 847 _scope.items())))
846 848 if cache_key not in self._permissions_scoped_cache:
847 849 # store in cache to mimic how the @LazyProperty works,
848 850 # the difference here is that we use the unique key calculated
849 851 # from params and values
850 852 res = self.get_perms(user=self, cache=False, scope=_scope)
851 853 self._permissions_scoped_cache[cache_key] = res
852 854 return self._permissions_scoped_cache[cache_key]
853 855
854 856 def get_instance(self):
855 857 return User.get(self.user_id)
856 858
857 859 def update_lastactivity(self):
858 860 if self.user_id:
859 861 User.get(self.user_id).update_lastactivity()
860 862
861 863 def propagate_data(self):
862 864 """
863 865 Fills in user data and propagates values to this instance. Maps fetched
864 866 user attributes to this class instance attributes
865 867 """
866 868 log.debug('starting data propagation for new potential AuthUser')
867 869 user_model = UserModel()
868 870 anon_user = self.anonymous_user = User.get_default_user(cache=True)
869 871 is_user_loaded = False
870 872
871 873 # lookup by userid
872 874 if self.user_id is not None and self.user_id != anon_user.user_id:
873 875 log.debug('Trying Auth User lookup by USER ID: `%s`' % self.user_id)
874 876 is_user_loaded = user_model.fill_data(self, user_id=self.user_id)
875 877
876 878 # try go get user by api key
877 879 elif self._api_key and self._api_key != anon_user.api_key:
878 880 log.debug('Trying Auth User lookup by API KEY: `%s`' % self._api_key)
879 881 is_user_loaded = user_model.fill_data(self, api_key=self._api_key)
880 882
881 883 # lookup by username
882 884 elif self.username:
883 885 log.debug('Trying Auth User lookup by USER NAME: `%s`' % self.username)
884 886 is_user_loaded = user_model.fill_data(self, username=self.username)
885 887 else:
886 888 log.debug('No data in %s that could been used to log in' % self)
887 889
888 890 if not is_user_loaded:
889 891 log.debug('Failed to load user. Fallback to default user')
890 892 # if we cannot authenticate user try anonymous
891 893 if anon_user.active:
892 894 user_model.fill_data(self, user_id=anon_user.user_id)
893 895 # then we set this user is logged in
894 896 self.is_authenticated = True
895 897 else:
896 898 # in case of disabled anonymous user we reset some of the
897 899 # parameters so such user is "corrupted", skipping the fill_data
898 900 for attr in ['user_id', 'username', 'admin', 'active']:
899 901 setattr(self, attr, None)
900 902 self.is_authenticated = False
901 903
902 904 if not self.username:
903 905 self.username = 'None'
904 906
905 907 log.debug('Auth User is now %s' % self)
906 908
907 909 def get_perms(self, user, scope=None, explicit=True, algo='higherwin',
908 910 cache=False):
909 911 """
910 912 Fills user permission attribute with permissions taken from database
911 913 works for permissions given for repositories, and for permissions that
912 914 are granted to groups
913 915
914 916 :param user: instance of User object from database
915 917 :param explicit: In case there are permissions both for user and a group
916 918 that user is part of, explicit flag will defiine if user will
917 919 explicitly override permissions from group, if it's False it will
918 920 make decision based on the algo
919 921 :param algo: algorithm to decide what permission should be choose if
920 922 it's multiple defined, eg user in two different groups. It also
921 923 decides if explicit flag is turned off how to specify the permission
922 924 for case when user is in a group + have defined separate permission
923 925 """
924 926 user_id = user.user_id
925 927 user_is_admin = user.is_admin
926 928
927 929 # inheritance of global permissions like create repo/fork repo etc
928 930 user_inherit_default_permissions = user.inherit_default_permissions
929 931
930 932 log.debug('Computing PERMISSION tree for scope %s' % (scope, ))
931 933 compute = caches.conditional_cache(
932 934 'short_term', 'cache_desc',
933 935 condition=cache, func=_cached_perms_data)
934 936 result = compute(user_id, scope, user_is_admin,
935 937 user_inherit_default_permissions, explicit, algo)
936 938
937 939 result_repr = []
938 940 for k in result:
939 941 result_repr.append((k, len(result[k])))
940 942
941 943 log.debug('PERMISSION tree computed %s' % (result_repr,))
942 944 return result
943 945
944 946 @property
945 947 def is_default(self):
946 948 return self.username == User.DEFAULT_USER
947 949
948 950 @property
949 951 def is_admin(self):
950 952 return self.admin
951 953
952 954 @property
953 955 def is_user_object(self):
954 956 return self.user_id is not None
955 957
956 958 @property
957 959 def repositories_admin(self):
958 960 """
959 961 Returns list of repositories you're an admin of
960 962 """
961 963 return [
962 964 x[0] for x in self.permissions['repositories'].iteritems()
963 965 if x[1] == 'repository.admin']
964 966
965 967 @property
966 968 def repository_groups_admin(self):
967 969 """
968 970 Returns list of repository groups you're an admin of
969 971 """
970 972 return [
971 973 x[0] for x in self.permissions['repositories_groups'].iteritems()
972 974 if x[1] == 'group.admin']
973 975
974 976 @property
975 977 def user_groups_admin(self):
976 978 """
977 979 Returns list of user groups you're an admin of
978 980 """
979 981 return [
980 982 x[0] for x in self.permissions['user_groups'].iteritems()
981 983 if x[1] == 'usergroup.admin']
982 984
983 985 @property
984 986 def ip_allowed(self):
985 987 """
986 988 Checks if ip_addr used in constructor is allowed from defined list of
987 989 allowed ip_addresses for user
988 990
989 991 :returns: boolean, True if ip is in allowed ip range
990 992 """
991 993 # check IP
992 994 inherit = self.inherit_default_permissions
993 995 return AuthUser.check_ip_allowed(self.user_id, self.ip_addr,
994 996 inherit_from_default=inherit)
995 997 @property
996 998 def personal_repo_group(self):
997 999 return RepoGroup.get_user_personal_repo_group(self.user_id)
998 1000
999 1001 @classmethod
1000 1002 def check_ip_allowed(cls, user_id, ip_addr, inherit_from_default):
1001 1003 allowed_ips = AuthUser.get_allowed_ips(
1002 1004 user_id, cache=True, inherit_from_default=inherit_from_default)
1003 1005 if check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips):
1004 1006 log.debug('IP:%s is in range of %s' % (ip_addr, allowed_ips))
1005 1007 return True
1006 1008 else:
1007 1009 log.info('Access for IP:%s forbidden, '
1008 1010 'not in %s' % (ip_addr, allowed_ips))
1009 1011 return False
1010 1012
1011 1013 def __repr__(self):
1012 1014 return "<AuthUser('id:%s[%s] ip:%s auth:%s')>"\
1013 1015 % (self.user_id, self.username, self.ip_addr, self.is_authenticated)
1014 1016
1015 1017 def set_authenticated(self, authenticated=True):
1016 1018 if self.user_id != self.anonymous_user.user_id:
1017 1019 self.is_authenticated = authenticated
1018 1020
1019 1021 def get_cookie_store(self):
1020 1022 return {
1021 1023 'username': self.username,
1022 1024 'password': md5(self.password),
1023 1025 'user_id': self.user_id,
1024 1026 'is_authenticated': self.is_authenticated
1025 1027 }
1026 1028
1027 1029 @classmethod
1028 1030 def from_cookie_store(cls, cookie_store):
1029 1031 """
1030 1032 Creates AuthUser from a cookie store
1031 1033
1032 1034 :param cls:
1033 1035 :param cookie_store:
1034 1036 """
1035 1037 user_id = cookie_store.get('user_id')
1036 1038 username = cookie_store.get('username')
1037 1039 api_key = cookie_store.get('api_key')
1038 1040 return AuthUser(user_id, api_key, username)
1039 1041
1040 1042 @classmethod
1041 1043 def get_allowed_ips(cls, user_id, cache=False, inherit_from_default=False):
1042 1044 _set = set()
1043 1045
1044 1046 if inherit_from_default:
1045 1047 default_ips = UserIpMap.query().filter(
1046 1048 UserIpMap.user == User.get_default_user(cache=True))
1047 1049 if cache:
1048 1050 default_ips = default_ips.options(
1049 1051 FromCache("sql_cache_short", "get_user_ips_default"))
1050 1052
1051 1053 # populate from default user
1052 1054 for ip in default_ips:
1053 1055 try:
1054 1056 _set.add(ip.ip_addr)
1055 1057 except ObjectDeletedError:
1056 1058 # since we use heavy caching sometimes it happens that
1057 1059 # we get deleted objects here, we just skip them
1058 1060 pass
1059 1061
1060 1062 user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id)
1061 1063 if cache:
1062 1064 user_ips = user_ips.options(
1063 1065 FromCache("sql_cache_short", "get_user_ips_%s" % user_id))
1064 1066
1065 1067 for ip in user_ips:
1066 1068 try:
1067 1069 _set.add(ip.ip_addr)
1068 1070 except ObjectDeletedError:
1069 1071 # since we use heavy caching sometimes it happens that we get
1070 1072 # deleted objects here, we just skip them
1071 1073 pass
1072 1074 return _set or set(['0.0.0.0/0', '::/0'])
1073 1075
1074 1076
1075 1077 def set_available_permissions(config):
1076 1078 """
1077 1079 This function will propagate pylons globals with all available defined
1078 1080 permission given in db. We don't want to check each time from db for new
1079 1081 permissions since adding a new permission also requires application restart
1080 1082 ie. to decorate new views with the newly created permission
1081 1083
1082 1084 :param config: current pylons config instance
1083 1085
1084 1086 """
1085 1087 log.info('getting information about all available permissions')
1086 1088 try:
1087 1089 sa = meta.Session
1088 1090 all_perms = sa.query(Permission).all()
1089 1091 config['available_permissions'] = [x.permission_name for x in all_perms]
1090 1092 except Exception:
1091 1093 log.error(traceback.format_exc())
1092 1094 finally:
1093 1095 meta.Session.remove()
1094 1096
1095 1097
1096 1098 def get_csrf_token(session=None, force_new=False, save_if_missing=True):
1097 1099 """
1098 1100 Return the current authentication token, creating one if one doesn't
1099 1101 already exist and the save_if_missing flag is present.
1100 1102
1101 1103 :param session: pass in the pylons session, else we use the global ones
1102 1104 :param force_new: force to re-generate the token and store it in session
1103 1105 :param save_if_missing: save the newly generated token if it's missing in
1104 1106 session
1105 1107 """
1106 1108 if not session:
1107 1109 from pylons import session
1108 1110
1109 1111 if (csrf_token_key not in session and save_if_missing) or force_new:
1110 1112 token = hashlib.sha1(str(random.getrandbits(128))).hexdigest()
1111 1113 session[csrf_token_key] = token
1112 1114 if hasattr(session, 'save'):
1113 1115 session.save()
1114 1116 return session.get(csrf_token_key)
1115 1117
1116 1118
1117 1119 def get_request(perm_class):
1118 1120 from pyramid.threadlocal import get_current_request
1119 1121 pyramid_request = get_current_request()
1120 1122 if not pyramid_request:
1121 1123 # return global request of pylons in case pyramid isn't available
1122 1124 # NOTE(marcink): this should be removed after migration to pyramid
1123 1125 from pylons import request
1124 1126 return request
1125 1127 return pyramid_request
1126 1128
1127 1129
1128 1130 # CHECK DECORATORS
1129 1131 class CSRFRequired(object):
1130 1132 """
1131 1133 Decorator for authenticating a form
1132 1134
1133 1135 This decorator uses an authorization token stored in the client's
1134 1136 session for prevention of certain Cross-site request forgery (CSRF)
1135 1137 attacks (See
1136 1138 http://en.wikipedia.org/wiki/Cross-site_request_forgery for more
1137 1139 information).
1138 1140
1139 1141 For use with the ``webhelpers.secure_form`` helper functions.
1140 1142
1141 1143 """
1142 1144 def __init__(self, token=csrf_token_key, header='X-CSRF-Token',
1143 1145 except_methods=None):
1144 1146 self.token = token
1145 1147 self.header = header
1146 1148 self.except_methods = except_methods or []
1147 1149
1148 1150 def __call__(self, func):
1149 1151 return get_cython_compat_decorator(self.__wrapper, func)
1150 1152
1151 1153 def _get_csrf(self, _request):
1152 1154 return _request.POST.get(self.token, _request.headers.get(self.header))
1153 1155
1154 1156 def check_csrf(self, _request, cur_token):
1155 1157 supplied_token = self._get_csrf(_request)
1156 1158 return supplied_token and supplied_token == cur_token
1157 1159
1158 1160 def _get_request(self):
1159 1161 return get_request(self)
1160 1162
1161 1163 def __wrapper(self, func, *fargs, **fkwargs):
1162 1164 request = self._get_request()
1163 1165
1164 1166 if request.method in self.except_methods:
1165 1167 return func(*fargs, **fkwargs)
1166 1168
1167 1169 cur_token = get_csrf_token(save_if_missing=False)
1168 1170 if self.check_csrf(request, cur_token):
1169 1171 if request.POST.get(self.token):
1170 1172 del request.POST[self.token]
1171 1173 return func(*fargs, **fkwargs)
1172 1174 else:
1173 1175 reason = 'token-missing'
1174 1176 supplied_token = self._get_csrf(request)
1175 1177 if supplied_token and cur_token != supplied_token:
1176 1178 reason = 'token-mismatch [%s:%s]' % (
1177 1179 cur_token or ''[:6], supplied_token or ''[:6])
1178 1180
1179 1181 csrf_message = \
1180 1182 ("Cross-site request forgery detected, request denied. See "
1181 1183 "http://en.wikipedia.org/wiki/Cross-site_request_forgery for "
1182 1184 "more information.")
1183 1185 log.warn('Cross-site request forgery detected, request %r DENIED: %s '
1184 1186 'REMOTE_ADDR:%s, HEADERS:%s' % (
1185 1187 request, reason, request.remote_addr, request.headers))
1186 1188
1187 1189 raise HTTPForbidden(explanation=csrf_message)
1188 1190
1189 1191
1190 1192 class LoginRequired(object):
1191 1193 """
1192 1194 Must be logged in to execute this function else
1193 1195 redirect to login page
1194 1196
1195 1197 :param api_access: if enabled this checks only for valid auth token
1196 1198 and grants access based on valid token
1197 1199 """
1198 1200 def __init__(self, auth_token_access=None):
1199 1201 self.auth_token_access = auth_token_access
1200 1202
1201 1203 def __call__(self, func):
1202 1204 return get_cython_compat_decorator(self.__wrapper, func)
1203 1205
1204 1206 def _get_request(self):
1205 1207 return get_request(self)
1206 1208
1207 1209 def __wrapper(self, func, *fargs, **fkwargs):
1208 1210 from rhodecode.lib import helpers as h
1209 1211 cls = fargs[0]
1210 1212 user = cls._rhodecode_user
1211 1213 request = self._get_request()
1212 1214
1213 1215 loc = "%s:%s" % (cls.__class__.__name__, func.__name__)
1214 1216 log.debug('Starting login restriction checks for user: %s' % (user,))
1215 1217 # check if our IP is allowed
1216 1218 ip_access_valid = True
1217 1219 if not user.ip_allowed:
1218 1220 h.flash(h.literal(_('IP %s not allowed' % (user.ip_addr,))),
1219 1221 category='warning')
1220 1222 ip_access_valid = False
1221 1223
1222 1224 # check if we used an APIKEY and it's a valid one
1223 1225 # defined white-list of controllers which API access will be enabled
1224 1226 _auth_token = request.GET.get(
1225 1227 'auth_token', '') or request.GET.get('api_key', '')
1226 1228 auth_token_access_valid = allowed_auth_token_access(
1227 1229 loc, auth_token=_auth_token)
1228 1230
1229 1231 # explicit controller is enabled or API is in our whitelist
1230 1232 if self.auth_token_access or auth_token_access_valid:
1231 1233 log.debug('Checking AUTH TOKEN access for %s' % (cls,))
1232 1234 db_user = user.get_instance()
1233 1235
1234 1236 if db_user:
1235 1237 if self.auth_token_access:
1236 1238 roles = self.auth_token_access
1237 1239 else:
1238 1240 roles = [UserApiKeys.ROLE_HTTP]
1239 1241 token_match = db_user.authenticate_by_token(
1240 1242 _auth_token, roles=roles)
1241 1243 else:
1242 1244 log.debug('Unable to fetch db instance for auth user: %s', user)
1243 1245 token_match = False
1244 1246
1245 1247 if _auth_token and token_match:
1246 1248 auth_token_access_valid = True
1247 1249 log.debug('AUTH TOKEN ****%s is VALID' % (_auth_token[-4:],))
1248 1250 else:
1249 1251 auth_token_access_valid = False
1250 1252 if not _auth_token:
1251 1253 log.debug("AUTH TOKEN *NOT* present in request")
1252 1254 else:
1253 1255 log.warning(
1254 1256 "AUTH TOKEN ****%s *NOT* valid" % _auth_token[-4:])
1255 1257
1256 1258 log.debug('Checking if %s is authenticated @ %s' % (user.username, loc))
1257 1259 reason = 'RHODECODE_AUTH' if user.is_authenticated \
1258 1260 else 'AUTH_TOKEN_AUTH'
1259 1261
1260 1262 if ip_access_valid and (
1261 1263 user.is_authenticated or auth_token_access_valid):
1262 1264 log.info(
1263 1265 'user %s authenticating with:%s IS authenticated on func %s'
1264 1266 % (user, reason, loc))
1265 1267
1266 1268 # update user data to check last activity
1267 1269 user.update_lastactivity()
1268 1270 Session().commit()
1269 1271 return func(*fargs, **fkwargs)
1270 1272 else:
1271 1273 log.warning(
1272 1274 'user %s authenticating with:%s NOT authenticated on '
1273 1275 'func: %s: IP_ACCESS:%s AUTH_TOKEN_ACCESS:%s'
1274 1276 % (user, reason, loc, ip_access_valid,
1275 1277 auth_token_access_valid))
1276 1278 # we preserve the get PARAM
1277 1279 came_from = request.path_qs
1278 1280 log.debug('redirecting to login page with %s' % (came_from,))
1279 1281 raise HTTPFound(
1280 1282 h.route_path('login', _query={'came_from': came_from}))
1281 1283
1282 1284
1283 1285 class NotAnonymous(object):
1284 1286 """
1285 1287 Must be logged in to execute this function else
1286 1288 redirect to login page
1287 1289 """
1288 1290
1289 1291 def __call__(self, func):
1290 1292 return get_cython_compat_decorator(self.__wrapper, func)
1291 1293
1292 1294 def _get_request(self):
1293 1295 return get_request(self)
1294 1296
1295 1297 def __wrapper(self, func, *fargs, **fkwargs):
1296 1298 import rhodecode.lib.helpers as h
1297 1299 cls = fargs[0]
1298 1300 self.user = cls._rhodecode_user
1299 1301 request = self._get_request()
1300 1302
1301 1303 log.debug('Checking if user is not anonymous @%s' % cls)
1302 1304
1303 1305 anonymous = self.user.username == User.DEFAULT_USER
1304 1306
1305 1307 if anonymous:
1306 1308 came_from = request.path_qs
1307 1309 h.flash(_('You need to be a registered user to '
1308 1310 'perform this action'),
1309 1311 category='warning')
1310 1312 raise HTTPFound(
1311 1313 h.route_path('login', _query={'came_from': came_from}))
1312 1314 else:
1313 1315 return func(*fargs, **fkwargs)
1314 1316
1315 1317
1316 1318 class XHRRequired(object):
1317 1319 # TODO(marcink): remove this in favor of the predicates in pyramid routes
1318 1320
1319 1321 def __call__(self, func):
1320 1322 return get_cython_compat_decorator(self.__wrapper, func)
1321 1323
1322 1324 def _get_request(self):
1323 1325 return get_request(self)
1324 1326
1325 1327 def __wrapper(self, func, *fargs, **fkwargs):
1326 1328 from pylons.controllers.util import abort
1327 1329 request = self._get_request()
1328 1330
1329 1331 log.debug('Checking if request is XMLHttpRequest (XHR)')
1330 1332 xhr_message = 'This is not a valid XMLHttpRequest (XHR) request'
1331 1333
1332 1334 if not request.is_xhr:
1333 1335 abort(400, detail=xhr_message)
1334 1336
1335 1337 return func(*fargs, **fkwargs)
1336 1338
1337 1339
1338 1340 class HasAcceptedRepoType(object):
1339 1341 """
1340 1342 Check if requested repo is within given repo type aliases
1341 1343 """
1342 1344
1343 1345 # TODO(marcink): remove this in favor of the predicates in pyramid routes
1344 1346
1345 1347 def __init__(self, *repo_type_list):
1346 1348 self.repo_type_list = set(repo_type_list)
1347 1349
1348 1350 def __call__(self, func):
1349 1351 return get_cython_compat_decorator(self.__wrapper, func)
1350 1352
1351 1353 def __wrapper(self, func, *fargs, **fkwargs):
1352 1354 import rhodecode.lib.helpers as h
1353 1355 cls = fargs[0]
1354 1356 rhodecode_repo = cls.rhodecode_repo
1355 1357
1356 1358 log.debug('%s checking repo type for %s in %s',
1357 1359 self.__class__.__name__,
1358 1360 rhodecode_repo.alias, self.repo_type_list)
1359 1361
1360 1362 if rhodecode_repo.alias in self.repo_type_list:
1361 1363 return func(*fargs, **fkwargs)
1362 1364 else:
1363 1365 h.flash(h.literal(
1364 1366 _('Action not supported for %s.' % rhodecode_repo.alias)),
1365 1367 category='warning')
1366 1368 raise HTTPFound(
1367 1369 h.route_path('repo_summary',
1368 1370 repo_name=cls.rhodecode_db_repo.repo_name))
1369 1371
1370 1372
1371 1373 class PermsDecorator(object):
1372 1374 """
1373 1375 Base class for controller decorators, we extract the current user from
1374 1376 the class itself, which has it stored in base controllers
1375 1377 """
1376 1378
1377 1379 def __init__(self, *required_perms):
1378 1380 self.required_perms = set(required_perms)
1379 1381
1380 1382 def __call__(self, func):
1381 1383 return get_cython_compat_decorator(self.__wrapper, func)
1382 1384
1383 1385 def _get_request(self):
1384 1386 return get_request(self)
1385 1387
1386 1388 def _get_came_from(self):
1387 1389 _request = self._get_request()
1388 1390
1389 1391 # both pylons/pyramid has this attribute
1390 1392 return _request.path_qs
1391 1393
1392 1394 def __wrapper(self, func, *fargs, **fkwargs):
1393 1395 import rhodecode.lib.helpers as h
1394 1396 cls = fargs[0]
1395 1397 _user = cls._rhodecode_user
1396 1398
1397 1399 log.debug('checking %s permissions %s for %s %s',
1398 1400 self.__class__.__name__, self.required_perms, cls, _user)
1399 1401
1400 1402 if self.check_permissions(_user):
1401 1403 log.debug('Permission granted for %s %s', cls, _user)
1402 1404 return func(*fargs, **fkwargs)
1403 1405
1404 1406 else:
1405 1407 log.debug('Permission denied for %s %s', cls, _user)
1406 1408 anonymous = _user.username == User.DEFAULT_USER
1407 1409
1408 1410 if anonymous:
1409 1411 came_from = self._get_came_from()
1410 1412 h.flash(_('You need to be signed in to view this page'),
1411 1413 category='warning')
1412 1414 raise HTTPFound(
1413 1415 h.route_path('login', _query={'came_from': came_from}))
1414 1416
1415 1417 else:
1416 1418 # redirect with forbidden ret code
1417 1419 raise HTTPForbidden()
1418 1420
1419 1421 def check_permissions(self, user):
1420 1422 """Dummy function for overriding"""
1421 1423 raise NotImplementedError(
1422 1424 'You have to write this function in child class')
1423 1425
1424 1426
1425 1427 class HasPermissionAllDecorator(PermsDecorator):
1426 1428 """
1427 1429 Checks for access permission for all given predicates. All of them
1428 1430 have to be meet in order to fulfill the request
1429 1431 """
1430 1432
1431 1433 def check_permissions(self, user):
1432 1434 perms = user.permissions_with_scope({})
1433 1435 if self.required_perms.issubset(perms['global']):
1434 1436 return True
1435 1437 return False
1436 1438
1437 1439
1438 1440 class HasPermissionAnyDecorator(PermsDecorator):
1439 1441 """
1440 1442 Checks for access permission for any of given predicates. In order to
1441 1443 fulfill the request any of predicates must be meet
1442 1444 """
1443 1445
1444 1446 def check_permissions(self, user):
1445 1447 perms = user.permissions_with_scope({})
1446 1448 if self.required_perms.intersection(perms['global']):
1447 1449 return True
1448 1450 return False
1449 1451
1450 1452
1451 1453 class HasRepoPermissionAllDecorator(PermsDecorator):
1452 1454 """
1453 1455 Checks for access permission for all given predicates for specific
1454 1456 repository. All of them have to be meet in order to fulfill the request
1455 1457 """
1456 1458 def _get_repo_name(self):
1457 1459 _request = self._get_request()
1458 1460 return get_repo_slug(_request)
1459 1461
1460 1462 def check_permissions(self, user):
1461 1463 perms = user.permissions
1462 1464 repo_name = self._get_repo_name()
1463 1465
1464 1466 try:
1465 1467 user_perms = set([perms['repositories'][repo_name]])
1466 1468 except KeyError:
1467 1469 log.debug('cannot locate repo with name: `%s` in permissions defs',
1468 1470 repo_name)
1469 1471 return False
1470 1472
1471 1473 log.debug('checking `%s` permissions for repo `%s`',
1472 1474 user_perms, repo_name)
1473 1475 if self.required_perms.issubset(user_perms):
1474 1476 return True
1475 1477 return False
1476 1478
1477 1479
1478 1480 class HasRepoPermissionAnyDecorator(PermsDecorator):
1479 1481 """
1480 1482 Checks for access permission for any of given predicates for specific
1481 1483 repository. In order to fulfill the request any of predicates must be meet
1482 1484 """
1483 1485 def _get_repo_name(self):
1484 1486 _request = self._get_request()
1485 1487 return get_repo_slug(_request)
1486 1488
1487 1489 def check_permissions(self, user):
1488 1490 perms = user.permissions
1489 1491 repo_name = self._get_repo_name()
1490 1492
1491 1493 try:
1492 1494 user_perms = set([perms['repositories'][repo_name]])
1493 1495 except KeyError:
1494 1496 log.debug('cannot locate repo with name: `%s` in permissions defs',
1495 1497 repo_name)
1496 1498 return False
1497 1499
1498 1500 log.debug('checking `%s` permissions for repo `%s`',
1499 1501 user_perms, repo_name)
1500 1502 if self.required_perms.intersection(user_perms):
1501 1503 return True
1502 1504 return False
1503 1505
1504 1506
1505 1507 class HasRepoGroupPermissionAllDecorator(PermsDecorator):
1506 1508 """
1507 1509 Checks for access permission for all given predicates for specific
1508 1510 repository group. All of them have to be meet in order to
1509 1511 fulfill the request
1510 1512 """
1511 1513 def _get_repo_group_name(self):
1512 1514 _request = self._get_request()
1513 1515 return get_repo_group_slug(_request)
1514 1516
1515 1517 def check_permissions(self, user):
1516 1518 perms = user.permissions
1517 1519 group_name = self._get_repo_group_name()
1518 1520 try:
1519 1521 user_perms = set([perms['repositories_groups'][group_name]])
1520 1522 except KeyError:
1521 1523 log.debug('cannot locate repo group with name: `%s` in permissions defs',
1522 1524 group_name)
1523 1525 return False
1524 1526
1525 1527 log.debug('checking `%s` permissions for repo group `%s`',
1526 1528 user_perms, group_name)
1527 1529 if self.required_perms.issubset(user_perms):
1528 1530 return True
1529 1531 return False
1530 1532
1531 1533
1532 1534 class HasRepoGroupPermissionAnyDecorator(PermsDecorator):
1533 1535 """
1534 1536 Checks for access permission for any of given predicates for specific
1535 1537 repository group. In order to fulfill the request any
1536 1538 of predicates must be met
1537 1539 """
1538 1540 def _get_repo_group_name(self):
1539 1541 _request = self._get_request()
1540 1542 return get_repo_group_slug(_request)
1541 1543
1542 1544 def check_permissions(self, user):
1543 1545 perms = user.permissions
1544 1546 group_name = self._get_repo_group_name()
1545 1547
1546 1548 try:
1547 1549 user_perms = set([perms['repositories_groups'][group_name]])
1548 1550 except KeyError:
1549 1551 log.debug('cannot locate repo group with name: `%s` in permissions defs',
1550 1552 group_name)
1551 1553 return False
1552 1554
1553 1555 log.debug('checking `%s` permissions for repo group `%s`',
1554 1556 user_perms, group_name)
1555 1557 if self.required_perms.intersection(user_perms):
1556 1558 return True
1557 1559 return False
1558 1560
1559 1561
1560 1562 class HasUserGroupPermissionAllDecorator(PermsDecorator):
1561 1563 """
1562 1564 Checks for access permission for all given predicates for specific
1563 1565 user group. All of them have to be meet in order to fulfill the request
1564 1566 """
1565 1567 def _get_user_group_name(self):
1566 1568 _request = self._get_request()
1567 1569 return get_user_group_slug(_request)
1568 1570
1569 1571 def check_permissions(self, user):
1570 1572 perms = user.permissions
1571 1573 group_name = self._get_user_group_name()
1572 1574 try:
1573 1575 user_perms = set([perms['user_groups'][group_name]])
1574 1576 except KeyError:
1575 1577 return False
1576 1578
1577 1579 if self.required_perms.issubset(user_perms):
1578 1580 return True
1579 1581 return False
1580 1582
1581 1583
1582 1584 class HasUserGroupPermissionAnyDecorator(PermsDecorator):
1583 1585 """
1584 1586 Checks for access permission for any of given predicates for specific
1585 1587 user group. In order to fulfill the request any of predicates must be meet
1586 1588 """
1587 1589 def _get_user_group_name(self):
1588 1590 _request = self._get_request()
1589 1591 return get_user_group_slug(_request)
1590 1592
1591 1593 def check_permissions(self, user):
1592 1594 perms = user.permissions
1593 1595 group_name = self._get_user_group_name()
1594 1596 try:
1595 1597 user_perms = set([perms['user_groups'][group_name]])
1596 1598 except KeyError:
1597 1599 return False
1598 1600
1599 1601 if self.required_perms.intersection(user_perms):
1600 1602 return True
1601 1603 return False
1602 1604
1603 1605
1604 1606 # CHECK FUNCTIONS
1605 1607 class PermsFunction(object):
1606 1608 """Base function for other check functions"""
1607 1609
1608 1610 def __init__(self, *perms):
1609 1611 self.required_perms = set(perms)
1610 1612 self.repo_name = None
1611 1613 self.repo_group_name = None
1612 1614 self.user_group_name = None
1613 1615
1614 1616 def __bool__(self):
1615 1617 frame = inspect.currentframe()
1616 1618 stack_trace = traceback.format_stack(frame)
1617 1619 log.error('Checking bool value on a class instance of perm '
1618 1620 'function is not allowed: %s' % ''.join(stack_trace))
1619 1621 # rather than throwing errors, here we always return False so if by
1620 1622 # accident someone checks truth for just an instance it will always end
1621 1623 # up in returning False
1622 1624 return False
1623 1625 __nonzero__ = __bool__
1624 1626
1625 1627 def __call__(self, check_location='', user=None):
1626 1628 if not user:
1627 1629 log.debug('Using user attribute from global request')
1628 1630 # TODO: remove this someday,put as user as attribute here
1629 1631 request = self._get_request()
1630 1632 user = request.user
1631 1633
1632 1634 # init auth user if not already given
1633 1635 if not isinstance(user, AuthUser):
1634 1636 log.debug('Wrapping user %s into AuthUser', user)
1635 1637 user = AuthUser(user.user_id)
1636 1638
1637 1639 cls_name = self.__class__.__name__
1638 1640 check_scope = self._get_check_scope(cls_name)
1639 1641 check_location = check_location or 'unspecified location'
1640 1642
1641 1643 log.debug('checking cls:%s %s usr:%s %s @ %s', cls_name,
1642 1644 self.required_perms, user, check_scope, check_location)
1643 1645 if not user:
1644 1646 log.warning('Empty user given for permission check')
1645 1647 return False
1646 1648
1647 1649 if self.check_permissions(user):
1648 1650 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
1649 1651 check_scope, user, check_location)
1650 1652 return True
1651 1653
1652 1654 else:
1653 1655 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
1654 1656 check_scope, user, check_location)
1655 1657 return False
1656 1658
1657 1659 def _get_request(self):
1658 1660 return get_request(self)
1659 1661
1660 1662 def _get_check_scope(self, cls_name):
1661 1663 return {
1662 1664 'HasPermissionAll': 'GLOBAL',
1663 1665 'HasPermissionAny': 'GLOBAL',
1664 1666 'HasRepoPermissionAll': 'repo:%s' % self.repo_name,
1665 1667 'HasRepoPermissionAny': 'repo:%s' % self.repo_name,
1666 1668 'HasRepoGroupPermissionAll': 'repo_group:%s' % self.repo_group_name,
1667 1669 'HasRepoGroupPermissionAny': 'repo_group:%s' % self.repo_group_name,
1668 1670 'HasUserGroupPermissionAll': 'user_group:%s' % self.user_group_name,
1669 1671 'HasUserGroupPermissionAny': 'user_group:%s' % self.user_group_name,
1670 1672 }.get(cls_name, '?:%s' % cls_name)
1671 1673
1672 1674 def check_permissions(self, user):
1673 1675 """Dummy function for overriding"""
1674 1676 raise Exception('You have to write this function in child class')
1675 1677
1676 1678
1677 1679 class HasPermissionAll(PermsFunction):
1678 1680 def check_permissions(self, user):
1679 1681 perms = user.permissions_with_scope({})
1680 1682 if self.required_perms.issubset(perms.get('global')):
1681 1683 return True
1682 1684 return False
1683 1685
1684 1686
1685 1687 class HasPermissionAny(PermsFunction):
1686 1688 def check_permissions(self, user):
1687 1689 perms = user.permissions_with_scope({})
1688 1690 if self.required_perms.intersection(perms.get('global')):
1689 1691 return True
1690 1692 return False
1691 1693
1692 1694
1693 1695 class HasRepoPermissionAll(PermsFunction):
1694 1696 def __call__(self, repo_name=None, check_location='', user=None):
1695 1697 self.repo_name = repo_name
1696 1698 return super(HasRepoPermissionAll, self).__call__(check_location, user)
1697 1699
1698 1700 def _get_repo_name(self):
1699 1701 if not self.repo_name:
1700 1702 _request = self._get_request()
1701 1703 self.repo_name = get_repo_slug(_request)
1702 1704 return self.repo_name
1703 1705
1704 1706 def check_permissions(self, user):
1705 1707 self.repo_name = self._get_repo_name()
1706 1708 perms = user.permissions
1707 1709 try:
1708 1710 user_perms = set([perms['repositories'][self.repo_name]])
1709 1711 except KeyError:
1710 1712 return False
1711 1713 if self.required_perms.issubset(user_perms):
1712 1714 return True
1713 1715 return False
1714 1716
1715 1717
1716 1718 class HasRepoPermissionAny(PermsFunction):
1717 1719 def __call__(self, repo_name=None, check_location='', user=None):
1718 1720 self.repo_name = repo_name
1719 1721 return super(HasRepoPermissionAny, self).__call__(check_location, user)
1720 1722
1721 1723 def _get_repo_name(self):
1722 1724 if not self.repo_name:
1723 1725 _request = self._get_request()
1724 1726 self.repo_name = get_repo_slug(_request)
1725 1727 return self.repo_name
1726 1728
1727 1729 def check_permissions(self, user):
1728 1730 self.repo_name = self._get_repo_name()
1729 1731 perms = user.permissions
1730 1732 try:
1731 1733 user_perms = set([perms['repositories'][self.repo_name]])
1732 1734 except KeyError:
1733 1735 return False
1734 1736 if self.required_perms.intersection(user_perms):
1735 1737 return True
1736 1738 return False
1737 1739
1738 1740
1739 1741 class HasRepoGroupPermissionAny(PermsFunction):
1740 1742 def __call__(self, group_name=None, check_location='', user=None):
1741 1743 self.repo_group_name = group_name
1742 1744 return super(HasRepoGroupPermissionAny, self).__call__(
1743 1745 check_location, user)
1744 1746
1745 1747 def check_permissions(self, user):
1746 1748 perms = user.permissions
1747 1749 try:
1748 1750 user_perms = set(
1749 1751 [perms['repositories_groups'][self.repo_group_name]])
1750 1752 except KeyError:
1751 1753 return False
1752 1754 if self.required_perms.intersection(user_perms):
1753 1755 return True
1754 1756 return False
1755 1757
1756 1758
1757 1759 class HasRepoGroupPermissionAll(PermsFunction):
1758 1760 def __call__(self, group_name=None, check_location='', user=None):
1759 1761 self.repo_group_name = group_name
1760 1762 return super(HasRepoGroupPermissionAll, self).__call__(
1761 1763 check_location, user)
1762 1764
1763 1765 def check_permissions(self, user):
1764 1766 perms = user.permissions
1765 1767 try:
1766 1768 user_perms = set(
1767 1769 [perms['repositories_groups'][self.repo_group_name]])
1768 1770 except KeyError:
1769 1771 return False
1770 1772 if self.required_perms.issubset(user_perms):
1771 1773 return True
1772 1774 return False
1773 1775
1774 1776
1775 1777 class HasUserGroupPermissionAny(PermsFunction):
1776 1778 def __call__(self, user_group_name=None, check_location='', user=None):
1777 1779 self.user_group_name = user_group_name
1778 1780 return super(HasUserGroupPermissionAny, self).__call__(
1779 1781 check_location, user)
1780 1782
1781 1783 def check_permissions(self, user):
1782 1784 perms = user.permissions
1783 1785 try:
1784 1786 user_perms = set([perms['user_groups'][self.user_group_name]])
1785 1787 except KeyError:
1786 1788 return False
1787 1789 if self.required_perms.intersection(user_perms):
1788 1790 return True
1789 1791 return False
1790 1792
1791 1793
1792 1794 class HasUserGroupPermissionAll(PermsFunction):
1793 1795 def __call__(self, user_group_name=None, check_location='', user=None):
1794 1796 self.user_group_name = user_group_name
1795 1797 return super(HasUserGroupPermissionAll, self).__call__(
1796 1798 check_location, user)
1797 1799
1798 1800 def check_permissions(self, user):
1799 1801 perms = user.permissions
1800 1802 try:
1801 1803 user_perms = set([perms['user_groups'][self.user_group_name]])
1802 1804 except KeyError:
1803 1805 return False
1804 1806 if self.required_perms.issubset(user_perms):
1805 1807 return True
1806 1808 return False
1807 1809
1808 1810
1809 1811 # SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH
1810 1812 class HasPermissionAnyMiddleware(object):
1811 1813 def __init__(self, *perms):
1812 1814 self.required_perms = set(perms)
1813 1815
1814 1816 def __call__(self, user, repo_name):
1815 1817 # repo_name MUST be unicode, since we handle keys in permission
1816 1818 # dict by unicode
1817 1819 repo_name = safe_unicode(repo_name)
1818 1820 user = AuthUser(user.user_id)
1819 1821 log.debug(
1820 1822 'Checking VCS protocol permissions %s for user:%s repo:`%s`',
1821 1823 self.required_perms, user, repo_name)
1822 1824
1823 1825 if self.check_permissions(user, repo_name):
1824 1826 log.debug('Permission to repo:`%s` GRANTED for user:%s @ %s',
1825 1827 repo_name, user, 'PermissionMiddleware')
1826 1828 return True
1827 1829
1828 1830 else:
1829 1831 log.debug('Permission to repo:`%s` DENIED for user:%s @ %s',
1830 1832 repo_name, user, 'PermissionMiddleware')
1831 1833 return False
1832 1834
1833 1835 def check_permissions(self, user, repo_name):
1834 1836 perms = user.permissions_with_scope({'repo_name': repo_name})
1835 1837
1836 1838 try:
1837 1839 user_perms = set([perms['repositories'][repo_name]])
1838 1840 except Exception:
1839 1841 log.exception('Error while accessing user permissions')
1840 1842 return False
1841 1843
1842 1844 if self.required_perms.intersection(user_perms):
1843 1845 return True
1844 1846 return False
1845 1847
1846 1848
1847 1849 # SPECIAL VERSION TO HANDLE API AUTH
1848 1850 class _BaseApiPerm(object):
1849 1851 def __init__(self, *perms):
1850 1852 self.required_perms = set(perms)
1851 1853
1852 1854 def __call__(self, check_location=None, user=None, repo_name=None,
1853 1855 group_name=None, user_group_name=None):
1854 1856 cls_name = self.__class__.__name__
1855 1857 check_scope = 'global:%s' % (self.required_perms,)
1856 1858 if repo_name:
1857 1859 check_scope += ', repo_name:%s' % (repo_name,)
1858 1860
1859 1861 if group_name:
1860 1862 check_scope += ', repo_group_name:%s' % (group_name,)
1861 1863
1862 1864 if user_group_name:
1863 1865 check_scope += ', user_group_name:%s' % (user_group_name,)
1864 1866
1865 1867 log.debug(
1866 1868 'checking cls:%s %s %s @ %s'
1867 1869 % (cls_name, self.required_perms, check_scope, check_location))
1868 1870 if not user:
1869 1871 log.debug('Empty User passed into arguments')
1870 1872 return False
1871 1873
1872 1874 # process user
1873 1875 if not isinstance(user, AuthUser):
1874 1876 user = AuthUser(user.user_id)
1875 1877 if not check_location:
1876 1878 check_location = 'unspecified'
1877 1879 if self.check_permissions(user.permissions, repo_name, group_name,
1878 1880 user_group_name):
1879 1881 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
1880 1882 check_scope, user, check_location)
1881 1883 return True
1882 1884
1883 1885 else:
1884 1886 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
1885 1887 check_scope, user, check_location)
1886 1888 return False
1887 1889
1888 1890 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1889 1891 user_group_name=None):
1890 1892 """
1891 1893 implement in child class should return True if permissions are ok,
1892 1894 False otherwise
1893 1895
1894 1896 :param perm_defs: dict with permission definitions
1895 1897 :param repo_name: repo name
1896 1898 """
1897 1899 raise NotImplementedError()
1898 1900
1899 1901
1900 1902 class HasPermissionAllApi(_BaseApiPerm):
1901 1903 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1902 1904 user_group_name=None):
1903 1905 if self.required_perms.issubset(perm_defs.get('global')):
1904 1906 return True
1905 1907 return False
1906 1908
1907 1909
1908 1910 class HasPermissionAnyApi(_BaseApiPerm):
1909 1911 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1910 1912 user_group_name=None):
1911 1913 if self.required_perms.intersection(perm_defs.get('global')):
1912 1914 return True
1913 1915 return False
1914 1916
1915 1917
1916 1918 class HasRepoPermissionAllApi(_BaseApiPerm):
1917 1919 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1918 1920 user_group_name=None):
1919 1921 try:
1920 1922 _user_perms = set([perm_defs['repositories'][repo_name]])
1921 1923 except KeyError:
1922 1924 log.warning(traceback.format_exc())
1923 1925 return False
1924 1926 if self.required_perms.issubset(_user_perms):
1925 1927 return True
1926 1928 return False
1927 1929
1928 1930
1929 1931 class HasRepoPermissionAnyApi(_BaseApiPerm):
1930 1932 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1931 1933 user_group_name=None):
1932 1934 try:
1933 1935 _user_perms = set([perm_defs['repositories'][repo_name]])
1934 1936 except KeyError:
1935 1937 log.warning(traceback.format_exc())
1936 1938 return False
1937 1939 if self.required_perms.intersection(_user_perms):
1938 1940 return True
1939 1941 return False
1940 1942
1941 1943
1942 1944 class HasRepoGroupPermissionAnyApi(_BaseApiPerm):
1943 1945 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1944 1946 user_group_name=None):
1945 1947 try:
1946 1948 _user_perms = set([perm_defs['repositories_groups'][group_name]])
1947 1949 except KeyError:
1948 1950 log.warning(traceback.format_exc())
1949 1951 return False
1950 1952 if self.required_perms.intersection(_user_perms):
1951 1953 return True
1952 1954 return False
1953 1955
1954 1956
1955 1957 class HasRepoGroupPermissionAllApi(_BaseApiPerm):
1956 1958 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1957 1959 user_group_name=None):
1958 1960 try:
1959 1961 _user_perms = set([perm_defs['repositories_groups'][group_name]])
1960 1962 except KeyError:
1961 1963 log.warning(traceback.format_exc())
1962 1964 return False
1963 1965 if self.required_perms.issubset(_user_perms):
1964 1966 return True
1965 1967 return False
1966 1968
1967 1969
1968 1970 class HasUserGroupPermissionAnyApi(_BaseApiPerm):
1969 1971 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
1970 1972 user_group_name=None):
1971 1973 try:
1972 1974 _user_perms = set([perm_defs['user_groups'][user_group_name]])
1973 1975 except KeyError:
1974 1976 log.warning(traceback.format_exc())
1975 1977 return False
1976 1978 if self.required_perms.intersection(_user_perms):
1977 1979 return True
1978 1980 return False
1979 1981
1980 1982
1981 1983 def check_ip_access(source_ip, allowed_ips=None):
1982 1984 """
1983 1985 Checks if source_ip is a subnet of any of allowed_ips.
1984 1986
1985 1987 :param source_ip:
1986 1988 :param allowed_ips: list of allowed ips together with mask
1987 1989 """
1988 1990 log.debug('checking if ip:%s is subnet of %s' % (source_ip, allowed_ips))
1989 1991 source_ip_address = ipaddress.ip_address(source_ip)
1990 1992 if isinstance(allowed_ips, (tuple, list, set)):
1991 1993 for ip in allowed_ips:
1992 1994 try:
1993 1995 network_address = ipaddress.ip_network(ip, strict=False)
1994 1996 if source_ip_address in network_address:
1995 1997 log.debug('IP %s is network %s' %
1996 1998 (source_ip_address, network_address))
1997 1999 return True
1998 2000 # for any case we cannot determine the IP, don't crash just
1999 2001 # skip it and log as error, we want to say forbidden still when
2000 2002 # sending bad IP
2001 2003 except Exception:
2002 2004 log.error(traceback.format_exc())
2003 2005 continue
2004 2006 return False
2005 2007
2006 2008
2007 2009 def get_cython_compat_decorator(wrapper, func):
2008 2010 """
2009 2011 Creates a cython compatible decorator. The previously used
2010 2012 decorator.decorator() function seems to be incompatible with cython.
2011 2013
2012 2014 :param wrapper: __wrapper method of the decorator class
2013 2015 :param func: decorated function
2014 2016 """
2015 2017 @wraps(func)
2016 2018 def local_wrapper(*args, **kwds):
2017 2019 return wrapper(func, *args, **kwds)
2018 2020 local_wrapper.__wrapped__ = func
2019 2021 return local_wrapper
2020 2022
2021 2023
@@ -1,220 +1,220 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import hashlib
22 22 import itsdangerous
23 23 import logging
24 24 import os
25 25 import requests
26 26 from dogpile.core import ReadWriteMutex
27 27
28 28 import rhodecode.lib.helpers as h
29 29 from rhodecode.lib.auth import HasRepoPermissionAny
30 30 from rhodecode.lib.ext_json import json
31 31 from rhodecode.model.db import User
32 32
33 33 log = logging.getLogger(__name__)
34 34
35 35 LOCK = ReadWriteMutex()
36 36
37 37 STATE_PUBLIC_KEYS = ['id', 'username', 'first_name', 'last_name',
38 38 'icon_link', 'display_name', 'display_link']
39 39
40 40
41 41 class ChannelstreamException(Exception):
42 42 pass
43 43
44 44
45 45 class ChannelstreamConnectionException(ChannelstreamException):
46 46 pass
47 47
48 48
49 49 class ChannelstreamPermissionException(ChannelstreamException):
50 50 pass
51 51
52 52
53 53 def channelstream_request(config, payload, endpoint, raise_exc=True):
54 54 signer = itsdangerous.TimestampSigner(config['secret'])
55 55 sig_for_server = signer.sign(endpoint)
56 56 secret_headers = {'x-channelstream-secret': sig_for_server,
57 57 'x-channelstream-endpoint': endpoint,
58 58 'Content-Type': 'application/json'}
59 59 req_url = 'http://{}{}'.format(config['server'], endpoint)
60 60 response = None
61 61 try:
62 62 response = requests.post(req_url, data=json.dumps(payload),
63 63 headers=secret_headers).json()
64 64 except requests.ConnectionError:
65 65 log.exception('ConnectionError happened')
66 66 if raise_exc:
67 67 raise ChannelstreamConnectionException()
68 68 except Exception:
69 69 log.exception('Exception related to channelstream happened')
70 70 if raise_exc:
71 71 raise ChannelstreamConnectionException()
72 72 return response
73 73
74 74
75 75 def get_user_data(user_id):
76 76 user = User.get(user_id)
77 77 return {
78 78 'id': user.user_id,
79 79 'username': user.username,
80 'first_name': user.name,
81 'last_name': user.lastname,
80 'first_name': user.first_name,
81 'last_name': user.last_name,
82 82 'icon_link': h.gravatar_url(user.email, 60),
83 83 'display_name': h.person(user, 'username_or_name_or_email'),
84 84 'display_link': h.link_to_user(user),
85 85 'notifications': user.user_data.get('notification_status', True)
86 86 }
87 87
88 88
89 89 def broadcast_validator(channel_name):
90 90 """ checks if user can access the broadcast channel """
91 91 if channel_name == 'broadcast':
92 92 return True
93 93
94 94
95 95 def repo_validator(channel_name):
96 96 """ checks if user can access the broadcast channel """
97 97 channel_prefix = '/repo$'
98 98 if channel_name.startswith(channel_prefix):
99 99 elements = channel_name[len(channel_prefix):].split('$')
100 100 repo_name = elements[0]
101 101 can_access = HasRepoPermissionAny(
102 102 'repository.read',
103 103 'repository.write',
104 104 'repository.admin')(repo_name)
105 105 log.debug('permission check for {} channel '
106 106 'resulted in {}'.format(repo_name, can_access))
107 107 if can_access:
108 108 return True
109 109 return False
110 110
111 111
112 112 def check_channel_permissions(channels, plugin_validators, should_raise=True):
113 113 valid_channels = []
114 114
115 115 validators = [broadcast_validator, repo_validator]
116 116 if plugin_validators:
117 117 validators.extend(plugin_validators)
118 118 for channel_name in channels:
119 119 is_valid = False
120 120 for validator in validators:
121 121 if validator(channel_name):
122 122 is_valid = True
123 123 break
124 124 if is_valid:
125 125 valid_channels.append(channel_name)
126 126 else:
127 127 if should_raise:
128 128 raise ChannelstreamPermissionException()
129 129 return valid_channels
130 130
131 131
132 132 def get_channels_info(self, channels):
133 133 payload = {'channels': channels}
134 134 # gather persistence info
135 135 return channelstream_request(self._config(), payload, '/info')
136 136
137 137
138 138 def parse_channels_info(info_result, include_channel_info=None):
139 139 """
140 140 Returns data that contains only secure information that can be
141 141 presented to clients
142 142 """
143 143 include_channel_info = include_channel_info or []
144 144
145 145 user_state_dict = {}
146 146 for userinfo in info_result['users']:
147 147 user_state_dict[userinfo['user']] = {
148 148 k: v for k, v in userinfo['state'].items()
149 149 if k in STATE_PUBLIC_KEYS
150 150 }
151 151
152 152 channels_info = {}
153 153
154 154 for c_name, c_info in info_result['channels'].items():
155 155 if c_name not in include_channel_info:
156 156 continue
157 157 connected_list = []
158 158 for userinfo in c_info['users']:
159 159 connected_list.append({
160 160 'user': userinfo['user'],
161 161 'state': user_state_dict[userinfo['user']]
162 162 })
163 163 channels_info[c_name] = {'users': connected_list,
164 164 'history': c_info['history']}
165 165
166 166 return channels_info
167 167
168 168
169 169 def log_filepath(history_location, channel_name):
170 170 hasher = hashlib.sha256()
171 171 hasher.update(channel_name.encode('utf8'))
172 172 filename = '{}.log'.format(hasher.hexdigest())
173 173 filepath = os.path.join(history_location, filename)
174 174 return filepath
175 175
176 176
177 177 def read_history(history_location, channel_name):
178 178 filepath = log_filepath(history_location, channel_name)
179 179 if not os.path.exists(filepath):
180 180 return []
181 181 history_lines_limit = -100
182 182 history = []
183 183 with open(filepath, 'rb') as f:
184 184 for line in f.readlines()[history_lines_limit:]:
185 185 try:
186 186 history.append(json.loads(line))
187 187 except Exception:
188 188 log.exception('Failed to load history')
189 189 return history
190 190
191 191
192 192 def update_history_from_logs(config, channels, payload):
193 193 history_location = config.get('history.location')
194 194 for channel in channels:
195 195 history = read_history(history_location, channel)
196 196 payload['channels_info'][channel]['history'] = history
197 197
198 198
199 199 def write_history(config, message):
200 200 """ writes a messge to a base64encoded filename """
201 201 history_location = config.get('history.location')
202 202 if not os.path.exists(history_location):
203 203 return
204 204 try:
205 205 LOCK.acquire_write_lock()
206 206 filepath = log_filepath(history_location, message['channel'])
207 207 with open(filepath, 'ab') as f:
208 208 json.dump(message, f)
209 209 f.write('\n')
210 210 finally:
211 211 LOCK.release_write_lock()
212 212
213 213
214 214 def get_connection_validators(registry):
215 215 validators = []
216 216 for k, config in registry.rhodecode_plugins.iteritems():
217 217 validator = config.get('channelstream', {}).get('connect_validator')
218 218 if validator:
219 219 validators.append(validator)
220 220 return validators
@@ -1,2035 +1,2035 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Helper functions
23 23
24 24 Consists of functions to typically be used within templates, but also
25 25 available to Controllers. This module is available to both as 'h'.
26 26 """
27 27
28 28 import random
29 29 import hashlib
30 30 import StringIO
31 31 import urllib
32 32 import math
33 33 import logging
34 34 import re
35 35 import urlparse
36 36 import time
37 37 import string
38 38 import hashlib
39 39 from collections import OrderedDict
40 40
41 41 import pygments
42 42 import itertools
43 43 import fnmatch
44 44
45 45 from datetime import datetime
46 46 from functools import partial
47 47 from pygments.formatters.html import HtmlFormatter
48 48 from pygments import highlight as code_highlight
49 49 from pygments.lexers import (
50 50 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
51 51 from pylons import url as pylons_url
52 52 from pylons.i18n.translation import _, ungettext
53 53 from pyramid.threadlocal import get_current_request
54 54
55 55 from webhelpers.html import literal, HTML, escape
56 56 from webhelpers.html.tools import *
57 57 from webhelpers.html.builder import make_tag
58 58 from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \
59 59 end_form, file, form as wh_form, hidden, image, javascript_link, link_to, \
60 60 link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \
61 61 submit, text, password, textarea, title, ul, xml_declaration, radio
62 62 from webhelpers.html.tools import auto_link, button_to, highlight, \
63 63 js_obfuscate, mail_to, strip_links, strip_tags, tag_re
64 64 from webhelpers.pylonslib import Flash as _Flash
65 65 from webhelpers.text import chop_at, collapse, convert_accented_entities, \
66 66 convert_misc_entities, lchop, plural, rchop, remove_formatting, \
67 67 replace_whitespace, urlify, truncate, wrap_paragraphs
68 68 from webhelpers.date import time_ago_in_words
69 69 from webhelpers.paginate import Page as _Page
70 70 from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \
71 71 convert_boolean_attrs, NotGiven, _make_safe_id_component
72 72 from webhelpers2.number import format_byte_size
73 73
74 74 from rhodecode.lib.action_parser import action_parser
75 75 from rhodecode.lib.ext_json import json
76 76 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
77 77 from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \
78 78 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, \
79 79 AttributeDict, safe_int, md5, md5_safe
80 80 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
81 81 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
82 82 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
83 83 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
84 84 from rhodecode.model.changeset_status import ChangesetStatusModel
85 85 from rhodecode.model.db import Permission, User, Repository
86 86 from rhodecode.model.repo_group import RepoGroupModel
87 87 from rhodecode.model.settings import IssueTrackerSettingsModel
88 88
89 89 log = logging.getLogger(__name__)
90 90
91 91
92 92 DEFAULT_USER = User.DEFAULT_USER
93 93 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
94 94
95 95
96 96 def url(*args, **kw):
97 97 return pylons_url(*args, **kw)
98 98
99 99
100 100 def pylons_url_current(*args, **kw):
101 101 """
102 102 This function overrides pylons.url.current() which returns the current
103 103 path so that it will also work from a pyramid only context. This
104 104 should be removed once port to pyramid is complete.
105 105 """
106 106 if not args and not kw:
107 107 request = get_current_request()
108 108 return request.path
109 109 return pylons_url.current(*args, **kw)
110 110
111 111 url.current = pylons_url_current
112 112
113 113
114 114 def url_replace(**qargs):
115 115 """ Returns the current request url while replacing query string args """
116 116
117 117 request = get_current_request()
118 118 new_args = request.GET.mixed()
119 119 new_args.update(qargs)
120 120 return url('', **new_args)
121 121
122 122
123 123 def asset(path, ver=None, **kwargs):
124 124 """
125 125 Helper to generate a static asset file path for rhodecode assets
126 126
127 127 eg. h.asset('images/image.png', ver='3923')
128 128
129 129 :param path: path of asset
130 130 :param ver: optional version query param to append as ?ver=
131 131 """
132 132 request = get_current_request()
133 133 query = {}
134 134 query.update(kwargs)
135 135 if ver:
136 136 query = {'ver': ver}
137 137 return request.static_path(
138 138 'rhodecode:public/{}'.format(path), _query=query)
139 139
140 140
141 141 default_html_escape_table = {
142 142 ord('&'): u'&amp;',
143 143 ord('<'): u'&lt;',
144 144 ord('>'): u'&gt;',
145 145 ord('"'): u'&quot;',
146 146 ord("'"): u'&#39;',
147 147 }
148 148
149 149
150 150 def html_escape(text, html_escape_table=default_html_escape_table):
151 151 """Produce entities within text."""
152 152 return text.translate(html_escape_table)
153 153
154 154
155 155 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
156 156 """
157 157 Truncate string ``s`` at the first occurrence of ``sub``.
158 158
159 159 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
160 160 """
161 161 suffix_if_chopped = suffix_if_chopped or ''
162 162 pos = s.find(sub)
163 163 if pos == -1:
164 164 return s
165 165
166 166 if inclusive:
167 167 pos += len(sub)
168 168
169 169 chopped = s[:pos]
170 170 left = s[pos:].strip()
171 171
172 172 if left and suffix_if_chopped:
173 173 chopped += suffix_if_chopped
174 174
175 175 return chopped
176 176
177 177
178 178 def shorter(text, size=20):
179 179 postfix = '...'
180 180 if len(text) > size:
181 181 return text[:size - len(postfix)] + postfix
182 182 return text
183 183
184 184
185 185 def _reset(name, value=None, id=NotGiven, type="reset", **attrs):
186 186 """
187 187 Reset button
188 188 """
189 189 _set_input_attrs(attrs, type, name, value)
190 190 _set_id_attr(attrs, id, name)
191 191 convert_boolean_attrs(attrs, ["disabled"])
192 192 return HTML.input(**attrs)
193 193
194 194 reset = _reset
195 195 safeid = _make_safe_id_component
196 196
197 197
198 198 def branding(name, length=40):
199 199 return truncate(name, length, indicator="")
200 200
201 201
202 202 def FID(raw_id, path):
203 203 """
204 204 Creates a unique ID for filenode based on it's hash of path and commit
205 205 it's safe to use in urls
206 206
207 207 :param raw_id:
208 208 :param path:
209 209 """
210 210
211 211 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
212 212
213 213
214 214 class _GetError(object):
215 215 """Get error from form_errors, and represent it as span wrapped error
216 216 message
217 217
218 218 :param field_name: field to fetch errors for
219 219 :param form_errors: form errors dict
220 220 """
221 221
222 222 def __call__(self, field_name, form_errors):
223 223 tmpl = """<span class="error_msg">%s</span>"""
224 224 if form_errors and field_name in form_errors:
225 225 return literal(tmpl % form_errors.get(field_name))
226 226
227 227 get_error = _GetError()
228 228
229 229
230 230 class _ToolTip(object):
231 231
232 232 def __call__(self, tooltip_title, trim_at=50):
233 233 """
234 234 Special function just to wrap our text into nice formatted
235 235 autowrapped text
236 236
237 237 :param tooltip_title:
238 238 """
239 239 tooltip_title = escape(tooltip_title)
240 240 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
241 241 return tooltip_title
242 242 tooltip = _ToolTip()
243 243
244 244
245 245 def files_breadcrumbs(repo_name, commit_id, file_path):
246 246 if isinstance(file_path, str):
247 247 file_path = safe_unicode(file_path)
248 248
249 249 # TODO: johbo: Is this always a url like path, or is this operating
250 250 # system dependent?
251 251 path_segments = file_path.split('/')
252 252
253 253 repo_name_html = escape(repo_name)
254 254 if len(path_segments) == 1 and path_segments[0] == '':
255 255 url_segments = [repo_name_html]
256 256 else:
257 257 url_segments = [
258 258 link_to(
259 259 repo_name_html,
260 260 url('files_home',
261 261 repo_name=repo_name,
262 262 revision=commit_id,
263 263 f_path=''),
264 264 class_='pjax-link')]
265 265
266 266 last_cnt = len(path_segments) - 1
267 267 for cnt, segment in enumerate(path_segments):
268 268 if not segment:
269 269 continue
270 270 segment_html = escape(segment)
271 271
272 272 if cnt != last_cnt:
273 273 url_segments.append(
274 274 link_to(
275 275 segment_html,
276 276 url('files_home',
277 277 repo_name=repo_name,
278 278 revision=commit_id,
279 279 f_path='/'.join(path_segments[:cnt + 1])),
280 280 class_='pjax-link'))
281 281 else:
282 282 url_segments.append(segment_html)
283 283
284 284 return literal('/'.join(url_segments))
285 285
286 286
287 287 class CodeHtmlFormatter(HtmlFormatter):
288 288 """
289 289 My code Html Formatter for source codes
290 290 """
291 291
292 292 def wrap(self, source, outfile):
293 293 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
294 294
295 295 def _wrap_code(self, source):
296 296 for cnt, it in enumerate(source):
297 297 i, t = it
298 298 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
299 299 yield i, t
300 300
301 301 def _wrap_tablelinenos(self, inner):
302 302 dummyoutfile = StringIO.StringIO()
303 303 lncount = 0
304 304 for t, line in inner:
305 305 if t:
306 306 lncount += 1
307 307 dummyoutfile.write(line)
308 308
309 309 fl = self.linenostart
310 310 mw = len(str(lncount + fl - 1))
311 311 sp = self.linenospecial
312 312 st = self.linenostep
313 313 la = self.lineanchors
314 314 aln = self.anchorlinenos
315 315 nocls = self.noclasses
316 316 if sp:
317 317 lines = []
318 318
319 319 for i in range(fl, fl + lncount):
320 320 if i % st == 0:
321 321 if i % sp == 0:
322 322 if aln:
323 323 lines.append('<a href="#%s%d" class="special">%*d</a>' %
324 324 (la, i, mw, i))
325 325 else:
326 326 lines.append('<span class="special">%*d</span>' % (mw, i))
327 327 else:
328 328 if aln:
329 329 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
330 330 else:
331 331 lines.append('%*d' % (mw, i))
332 332 else:
333 333 lines.append('')
334 334 ls = '\n'.join(lines)
335 335 else:
336 336 lines = []
337 337 for i in range(fl, fl + lncount):
338 338 if i % st == 0:
339 339 if aln:
340 340 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
341 341 else:
342 342 lines.append('%*d' % (mw, i))
343 343 else:
344 344 lines.append('')
345 345 ls = '\n'.join(lines)
346 346
347 347 # in case you wonder about the seemingly redundant <div> here: since the
348 348 # content in the other cell also is wrapped in a div, some browsers in
349 349 # some configurations seem to mess up the formatting...
350 350 if nocls:
351 351 yield 0, ('<table class="%stable">' % self.cssclass +
352 352 '<tr><td><div class="linenodiv" '
353 353 'style="background-color: #f0f0f0; padding-right: 10px">'
354 354 '<pre style="line-height: 125%">' +
355 355 ls + '</pre></div></td><td id="hlcode" class="code">')
356 356 else:
357 357 yield 0, ('<table class="%stable">' % self.cssclass +
358 358 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
359 359 ls + '</pre></div></td><td id="hlcode" class="code">')
360 360 yield 0, dummyoutfile.getvalue()
361 361 yield 0, '</td></tr></table>'
362 362
363 363
364 364 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
365 365 def __init__(self, **kw):
366 366 # only show these line numbers if set
367 367 self.only_lines = kw.pop('only_line_numbers', [])
368 368 self.query_terms = kw.pop('query_terms', [])
369 369 self.max_lines = kw.pop('max_lines', 5)
370 370 self.line_context = kw.pop('line_context', 3)
371 371 self.url = kw.pop('url', None)
372 372
373 373 super(CodeHtmlFormatter, self).__init__(**kw)
374 374
375 375 def _wrap_code(self, source):
376 376 for cnt, it in enumerate(source):
377 377 i, t = it
378 378 t = '<pre>%s</pre>' % t
379 379 yield i, t
380 380
381 381 def _wrap_tablelinenos(self, inner):
382 382 yield 0, '<table class="code-highlight %stable">' % self.cssclass
383 383
384 384 last_shown_line_number = 0
385 385 current_line_number = 1
386 386
387 387 for t, line in inner:
388 388 if not t:
389 389 yield t, line
390 390 continue
391 391
392 392 if current_line_number in self.only_lines:
393 393 if last_shown_line_number + 1 != current_line_number:
394 394 yield 0, '<tr>'
395 395 yield 0, '<td class="line">...</td>'
396 396 yield 0, '<td id="hlcode" class="code"></td>'
397 397 yield 0, '</tr>'
398 398
399 399 yield 0, '<tr>'
400 400 if self.url:
401 401 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
402 402 self.url, current_line_number, current_line_number)
403 403 else:
404 404 yield 0, '<td class="line"><a href="">%i</a></td>' % (
405 405 current_line_number)
406 406 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
407 407 yield 0, '</tr>'
408 408
409 409 last_shown_line_number = current_line_number
410 410
411 411 current_line_number += 1
412 412
413 413
414 414 yield 0, '</table>'
415 415
416 416
417 417 def extract_phrases(text_query):
418 418 """
419 419 Extracts phrases from search term string making sure phrases
420 420 contained in double quotes are kept together - and discarding empty values
421 421 or fully whitespace values eg.
422 422
423 423 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more']
424 424
425 425 """
426 426
427 427 in_phrase = False
428 428 buf = ''
429 429 phrases = []
430 430 for char in text_query:
431 431 if in_phrase:
432 432 if char == '"': # end phrase
433 433 phrases.append(buf)
434 434 buf = ''
435 435 in_phrase = False
436 436 continue
437 437 else:
438 438 buf += char
439 439 continue
440 440 else:
441 441 if char == '"': # start phrase
442 442 in_phrase = True
443 443 phrases.append(buf)
444 444 buf = ''
445 445 continue
446 446 elif char == ' ':
447 447 phrases.append(buf)
448 448 buf = ''
449 449 continue
450 450 else:
451 451 buf += char
452 452
453 453 phrases.append(buf)
454 454 phrases = [phrase.strip() for phrase in phrases if phrase.strip()]
455 455 return phrases
456 456
457 457
458 458 def get_matching_offsets(text, phrases):
459 459 """
460 460 Returns a list of string offsets in `text` that the list of `terms` match
461 461
462 462 >>> get_matching_offsets('some text here', ['some', 'here'])
463 463 [(0, 4), (10, 14)]
464 464
465 465 """
466 466 offsets = []
467 467 for phrase in phrases:
468 468 for match in re.finditer(phrase, text):
469 469 offsets.append((match.start(), match.end()))
470 470
471 471 return offsets
472 472
473 473
474 474 def normalize_text_for_matching(x):
475 475 """
476 476 Replaces all non alnum characters to spaces and lower cases the string,
477 477 useful for comparing two text strings without punctuation
478 478 """
479 479 return re.sub(r'[^\w]', ' ', x.lower())
480 480
481 481
482 482 def get_matching_line_offsets(lines, terms):
483 483 """ Return a set of `lines` indices (starting from 1) matching a
484 484 text search query, along with `context` lines above/below matching lines
485 485
486 486 :param lines: list of strings representing lines
487 487 :param terms: search term string to match in lines eg. 'some text'
488 488 :param context: number of lines above/below a matching line to add to result
489 489 :param max_lines: cut off for lines of interest
490 490 eg.
491 491
492 492 text = '''
493 493 words words words
494 494 words words words
495 495 some text some
496 496 words words words
497 497 words words words
498 498 text here what
499 499 '''
500 500 get_matching_line_offsets(text, 'text', context=1)
501 501 {3: [(5, 9)], 6: [(0, 4)]]
502 502
503 503 """
504 504 matching_lines = {}
505 505 phrases = [normalize_text_for_matching(phrase)
506 506 for phrase in extract_phrases(terms)]
507 507
508 508 for line_index, line in enumerate(lines, start=1):
509 509 match_offsets = get_matching_offsets(
510 510 normalize_text_for_matching(line), phrases)
511 511 if match_offsets:
512 512 matching_lines[line_index] = match_offsets
513 513
514 514 return matching_lines
515 515
516 516
517 517 def hsv_to_rgb(h, s, v):
518 518 """ Convert hsv color values to rgb """
519 519
520 520 if s == 0.0:
521 521 return v, v, v
522 522 i = int(h * 6.0) # XXX assume int() truncates!
523 523 f = (h * 6.0) - i
524 524 p = v * (1.0 - s)
525 525 q = v * (1.0 - s * f)
526 526 t = v * (1.0 - s * (1.0 - f))
527 527 i = i % 6
528 528 if i == 0:
529 529 return v, t, p
530 530 if i == 1:
531 531 return q, v, p
532 532 if i == 2:
533 533 return p, v, t
534 534 if i == 3:
535 535 return p, q, v
536 536 if i == 4:
537 537 return t, p, v
538 538 if i == 5:
539 539 return v, p, q
540 540
541 541
542 542 def unique_color_generator(n=10000, saturation=0.10, lightness=0.95):
543 543 """
544 544 Generator for getting n of evenly distributed colors using
545 545 hsv color and golden ratio. It always return same order of colors
546 546
547 547 :param n: number of colors to generate
548 548 :param saturation: saturation of returned colors
549 549 :param lightness: lightness of returned colors
550 550 :returns: RGB tuple
551 551 """
552 552
553 553 golden_ratio = 0.618033988749895
554 554 h = 0.22717784590367374
555 555
556 556 for _ in xrange(n):
557 557 h += golden_ratio
558 558 h %= 1
559 559 HSV_tuple = [h, saturation, lightness]
560 560 RGB_tuple = hsv_to_rgb(*HSV_tuple)
561 561 yield map(lambda x: str(int(x * 256)), RGB_tuple)
562 562
563 563
564 564 def color_hasher(n=10000, saturation=0.10, lightness=0.95):
565 565 """
566 566 Returns a function which when called with an argument returns a unique
567 567 color for that argument, eg.
568 568
569 569 :param n: number of colors to generate
570 570 :param saturation: saturation of returned colors
571 571 :param lightness: lightness of returned colors
572 572 :returns: css RGB string
573 573
574 574 >>> color_hash = color_hasher()
575 575 >>> color_hash('hello')
576 576 'rgb(34, 12, 59)'
577 577 >>> color_hash('hello')
578 578 'rgb(34, 12, 59)'
579 579 >>> color_hash('other')
580 580 'rgb(90, 224, 159)'
581 581 """
582 582
583 583 color_dict = {}
584 584 cgenerator = unique_color_generator(
585 585 saturation=saturation, lightness=lightness)
586 586
587 587 def get_color_string(thing):
588 588 if thing in color_dict:
589 589 col = color_dict[thing]
590 590 else:
591 591 col = color_dict[thing] = cgenerator.next()
592 592 return "rgb(%s)" % (', '.join(col))
593 593
594 594 return get_color_string
595 595
596 596
597 597 def get_lexer_safe(mimetype=None, filepath=None):
598 598 """
599 599 Tries to return a relevant pygments lexer using mimetype/filepath name,
600 600 defaulting to plain text if none could be found
601 601 """
602 602 lexer = None
603 603 try:
604 604 if mimetype:
605 605 lexer = get_lexer_for_mimetype(mimetype)
606 606 if not lexer:
607 607 lexer = get_lexer_for_filename(filepath)
608 608 except pygments.util.ClassNotFound:
609 609 pass
610 610
611 611 if not lexer:
612 612 lexer = get_lexer_by_name('text')
613 613
614 614 return lexer
615 615
616 616
617 617 def get_lexer_for_filenode(filenode):
618 618 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
619 619 return lexer
620 620
621 621
622 622 def pygmentize(filenode, **kwargs):
623 623 """
624 624 pygmentize function using pygments
625 625
626 626 :param filenode:
627 627 """
628 628 lexer = get_lexer_for_filenode(filenode)
629 629 return literal(code_highlight(filenode.content, lexer,
630 630 CodeHtmlFormatter(**kwargs)))
631 631
632 632
633 633 def is_following_repo(repo_name, user_id):
634 634 from rhodecode.model.scm import ScmModel
635 635 return ScmModel().is_following_repo(repo_name, user_id)
636 636
637 637
638 638 class _Message(object):
639 639 """A message returned by ``Flash.pop_messages()``.
640 640
641 641 Converting the message to a string returns the message text. Instances
642 642 also have the following attributes:
643 643
644 644 * ``message``: the message text.
645 645 * ``category``: the category specified when the message was created.
646 646 """
647 647
648 648 def __init__(self, category, message):
649 649 self.category = category
650 650 self.message = message
651 651
652 652 def __str__(self):
653 653 return self.message
654 654
655 655 __unicode__ = __str__
656 656
657 657 def __html__(self):
658 658 return escape(safe_unicode(self.message))
659 659
660 660
661 661 class Flash(_Flash):
662 662
663 663 def pop_messages(self):
664 664 """Return all accumulated messages and delete them from the session.
665 665
666 666 The return value is a list of ``Message`` objects.
667 667 """
668 668 from pylons import session
669 669
670 670 messages = []
671 671
672 672 # Pop the 'old' pylons flash messages. They are tuples of the form
673 673 # (category, message)
674 674 for cat, msg in session.pop(self.session_key, []):
675 675 messages.append(_Message(cat, msg))
676 676
677 677 # Pop the 'new' pyramid flash messages for each category as list
678 678 # of strings.
679 679 for cat in self.categories:
680 680 for msg in session.pop_flash(queue=cat):
681 681 messages.append(_Message(cat, msg))
682 682 # Map messages from the default queue to the 'notice' category.
683 683 for msg in session.pop_flash():
684 684 messages.append(_Message('notice', msg))
685 685
686 686 session.save()
687 687 return messages
688 688
689 689 def json_alerts(self):
690 690 payloads = []
691 691 messages = flash.pop_messages()
692 692 if messages:
693 693 for message in messages:
694 694 subdata = {}
695 695 if hasattr(message.message, 'rsplit'):
696 696 flash_data = message.message.rsplit('|DELIM|', 1)
697 697 org_message = flash_data[0]
698 698 if len(flash_data) > 1:
699 699 subdata = json.loads(flash_data[1])
700 700 else:
701 701 org_message = message.message
702 702 payloads.append({
703 703 'message': {
704 704 'message': u'{}'.format(org_message),
705 705 'level': message.category,
706 706 'force': True,
707 707 'subdata': subdata
708 708 }
709 709 })
710 710 return json.dumps(payloads)
711 711
712 712 flash = Flash()
713 713
714 714 #==============================================================================
715 715 # SCM FILTERS available via h.
716 716 #==============================================================================
717 717 from rhodecode.lib.vcs.utils import author_name, author_email
718 718 from rhodecode.lib.utils2 import credentials_filter, age as _age
719 719 from rhodecode.model.db import User, ChangesetStatus
720 720
721 721 age = _age
722 722 capitalize = lambda x: x.capitalize()
723 723 email = author_email
724 724 short_id = lambda x: x[:12]
725 725 hide_credentials = lambda x: ''.join(credentials_filter(x))
726 726
727 727
728 728 def age_component(datetime_iso, value=None, time_is_local=False):
729 729 title = value or format_date(datetime_iso)
730 730 tzinfo = '+00:00'
731 731
732 732 # detect if we have a timezone info, otherwise, add it
733 733 if isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
734 734 if time_is_local:
735 735 tzinfo = time.strftime("+%H:%M",
736 736 time.gmtime(
737 737 (datetime.now() - datetime.utcnow()).seconds + 1
738 738 )
739 739 )
740 740
741 741 return literal(
742 742 '<time class="timeago tooltip" '
743 743 'title="{1}{2}" datetime="{0}{2}">{1}</time>'.format(
744 744 datetime_iso, title, tzinfo))
745 745
746 746
747 747 def _shorten_commit_id(commit_id):
748 748 from rhodecode import CONFIG
749 749 def_len = safe_int(CONFIG.get('rhodecode_show_sha_length', 12))
750 750 return commit_id[:def_len]
751 751
752 752
753 753 def show_id(commit):
754 754 """
755 755 Configurable function that shows ID
756 756 by default it's r123:fffeeefffeee
757 757
758 758 :param commit: commit instance
759 759 """
760 760 from rhodecode import CONFIG
761 761 show_idx = str2bool(CONFIG.get('rhodecode_show_revision_number', True))
762 762
763 763 raw_id = _shorten_commit_id(commit.raw_id)
764 764 if show_idx:
765 765 return 'r%s:%s' % (commit.idx, raw_id)
766 766 else:
767 767 return '%s' % (raw_id, )
768 768
769 769
770 770 def format_date(date):
771 771 """
772 772 use a standardized formatting for dates used in RhodeCode
773 773
774 774 :param date: date/datetime object
775 775 :return: formatted date
776 776 """
777 777
778 778 if date:
779 779 _fmt = "%a, %d %b %Y %H:%M:%S"
780 780 return safe_unicode(date.strftime(_fmt))
781 781
782 782 return u""
783 783
784 784
785 785 class _RepoChecker(object):
786 786
787 787 def __init__(self, backend_alias):
788 788 self._backend_alias = backend_alias
789 789
790 790 def __call__(self, repository):
791 791 if hasattr(repository, 'alias'):
792 792 _type = repository.alias
793 793 elif hasattr(repository, 'repo_type'):
794 794 _type = repository.repo_type
795 795 else:
796 796 _type = repository
797 797 return _type == self._backend_alias
798 798
799 799 is_git = _RepoChecker('git')
800 800 is_hg = _RepoChecker('hg')
801 801 is_svn = _RepoChecker('svn')
802 802
803 803
804 804 def get_repo_type_by_name(repo_name):
805 805 repo = Repository.get_by_repo_name(repo_name)
806 806 return repo.repo_type
807 807
808 808
809 809 def is_svn_without_proxy(repository):
810 810 if is_svn(repository):
811 811 from rhodecode.model.settings import VcsSettingsModel
812 812 conf = VcsSettingsModel().get_ui_settings_as_config_obj()
813 813 return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
814 814 return False
815 815
816 816
817 817 def discover_user(author):
818 818 """
819 819 Tries to discover RhodeCode User based on the autho string. Author string
820 820 is typically `FirstName LastName <email@address.com>`
821 821 """
822 822
823 823 # if author is already an instance use it for extraction
824 824 if isinstance(author, User):
825 825 return author
826 826
827 827 # Valid email in the attribute passed, see if they're in the system
828 828 _email = author_email(author)
829 829 if _email != '':
830 830 user = User.get_by_email(_email, case_insensitive=True, cache=True)
831 831 if user is not None:
832 832 return user
833 833
834 834 # Maybe it's a username, we try to extract it and fetch by username ?
835 835 _author = author_name(author)
836 836 user = User.get_by_username(_author, case_insensitive=True, cache=True)
837 837 if user is not None:
838 838 return user
839 839
840 840 return None
841 841
842 842
843 843 def email_or_none(author):
844 844 # extract email from the commit string
845 845 _email = author_email(author)
846 846
847 847 # If we have an email, use it, otherwise
848 848 # see if it contains a username we can get an email from
849 849 if _email != '':
850 850 return _email
851 851 else:
852 852 user = User.get_by_username(
853 853 author_name(author), case_insensitive=True, cache=True)
854 854
855 855 if user is not None:
856 856 return user.email
857 857
858 858 # No valid email, not a valid user in the system, none!
859 859 return None
860 860
861 861
862 862 def link_to_user(author, length=0, **kwargs):
863 863 user = discover_user(author)
864 864 # user can be None, but if we have it already it means we can re-use it
865 865 # in the person() function, so we save 1 intensive-query
866 866 if user:
867 867 author = user
868 868
869 869 display_person = person(author, 'username_or_name_or_email')
870 870 if length:
871 871 display_person = shorter(display_person, length)
872 872
873 873 if user:
874 874 return link_to(
875 875 escape(display_person),
876 876 route_path('user_profile', username=user.username),
877 877 **kwargs)
878 878 else:
879 879 return escape(display_person)
880 880
881 881
882 882 def person(author, show_attr="username_and_name"):
883 883 user = discover_user(author)
884 884 if user:
885 885 return getattr(user, show_attr)
886 886 else:
887 887 _author = author_name(author)
888 888 _email = email(author)
889 889 return _author or _email
890 890
891 891
892 892 def author_string(email):
893 893 if email:
894 894 user = User.get_by_email(email, case_insensitive=True, cache=True)
895 895 if user:
896 if user.firstname or user.lastname:
896 if user.first_name or user.last_name:
897 897 return '%s %s &lt;%s&gt;' % (
898 escape(user.firstname), escape(user.lastname), email)
898 user.first_name, user.last_name, email)
899 899 else:
900 900 return email
901 901 else:
902 902 return email
903 903 else:
904 904 return None
905 905
906 906
907 907 def person_by_id(id_, show_attr="username_and_name"):
908 908 # attr to return from fetched user
909 909 person_getter = lambda usr: getattr(usr, show_attr)
910 910
911 911 #maybe it's an ID ?
912 912 if str(id_).isdigit() or isinstance(id_, int):
913 913 id_ = int(id_)
914 914 user = User.get(id_)
915 915 if user is not None:
916 916 return person_getter(user)
917 917 return id_
918 918
919 919
920 920 def gravatar_with_user(author, show_disabled=False):
921 921 from rhodecode.lib.utils import PartialRenderer
922 922 _render = PartialRenderer('base/base.mako')
923 923 return _render('gravatar_with_user', author, show_disabled=show_disabled)
924 924
925 925
926 926 def desc_stylize(value):
927 927 """
928 928 converts tags from value into html equivalent
929 929
930 930 :param value:
931 931 """
932 932 if not value:
933 933 return ''
934 934
935 935 value = re.sub(r'\[see\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
936 936 '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
937 937 value = re.sub(r'\[license\ \=\>\ *([a-zA-Z0-9\/\=\?\&\ \:\/\.\-]*)\]',
938 938 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
939 939 value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\>\ *([a-zA-Z0-9\-\/]*)\]',
940 940 '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
941 941 value = re.sub(r'\[(lang|language)\ \=\>\ *([a-zA-Z\-\/\#\+]*)\]',
942 942 '<div class="metatag" tag="lang">\\2</div>', value)
943 943 value = re.sub(r'\[([a-z]+)\]',
944 944 '<div class="metatag" tag="\\1">\\1</div>', value)
945 945
946 946 return value
947 947
948 948
949 949 def escaped_stylize(value):
950 950 """
951 951 converts tags from value into html equivalent, but escaping its value first
952 952 """
953 953 if not value:
954 954 return ''
955 955
956 956 # Using default webhelper escape method, but has to force it as a
957 957 # plain unicode instead of a markup tag to be used in regex expressions
958 958 value = unicode(escape(safe_unicode(value)))
959 959
960 960 value = re.sub(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]',
961 961 '<div class="metatag" tag="see">see =&gt; \\1 </div>', value)
962 962 value = re.sub(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]',
963 963 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>', value)
964 964 value = re.sub(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]',
965 965 '<div class="metatag" tag="\\1">\\1 =&gt; <a href="/\\2">\\2</a></div>', value)
966 966 value = re.sub(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+]*)\]',
967 967 '<div class="metatag" tag="lang">\\2</div>', value)
968 968 value = re.sub(r'\[([a-z]+)\]',
969 969 '<div class="metatag" tag="\\1">\\1</div>', value)
970 970
971 971 return value
972 972
973 973
974 974 def bool2icon(value):
975 975 """
976 976 Returns boolean value of a given value, represented as html element with
977 977 classes that will represent icons
978 978
979 979 :param value: given value to convert to html node
980 980 """
981 981
982 982 if value: # does bool conversion
983 983 return HTML.tag('i', class_="icon-true")
984 984 else: # not true as bool
985 985 return HTML.tag('i', class_="icon-false")
986 986
987 987
988 988 #==============================================================================
989 989 # PERMS
990 990 #==============================================================================
991 991 from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \
992 992 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, \
993 993 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token, \
994 994 csrf_token_key
995 995
996 996
997 997 #==============================================================================
998 998 # GRAVATAR URL
999 999 #==============================================================================
1000 1000 class InitialsGravatar(object):
1001 1001 def __init__(self, email_address, first_name, last_name, size=30,
1002 1002 background=None, text_color='#fff'):
1003 1003 self.size = size
1004 1004 self.first_name = first_name
1005 1005 self.last_name = last_name
1006 1006 self.email_address = email_address
1007 1007 self.background = background or self.str2color(email_address)
1008 1008 self.text_color = text_color
1009 1009
1010 1010 def get_color_bank(self):
1011 1011 """
1012 1012 returns a predefined list of colors that gravatars can use.
1013 1013 Those are randomized distinct colors that guarantee readability and
1014 1014 uniqueness.
1015 1015
1016 1016 generated with: http://phrogz.net/css/distinct-colors.html
1017 1017 """
1018 1018 return [
1019 1019 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
1020 1020 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
1021 1021 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
1022 1022 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
1023 1023 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
1024 1024 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
1025 1025 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
1026 1026 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
1027 1027 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
1028 1028 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
1029 1029 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
1030 1030 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
1031 1031 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
1032 1032 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
1033 1033 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
1034 1034 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
1035 1035 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
1036 1036 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
1037 1037 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
1038 1038 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
1039 1039 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
1040 1040 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
1041 1041 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
1042 1042 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
1043 1043 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
1044 1044 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
1045 1045 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
1046 1046 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
1047 1047 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
1048 1048 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
1049 1049 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
1050 1050 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
1051 1051 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
1052 1052 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
1053 1053 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
1054 1054 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
1055 1055 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
1056 1056 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
1057 1057 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
1058 1058 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
1059 1059 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
1060 1060 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
1061 1061 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
1062 1062 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
1063 1063 '#4f8c46', '#368dd9', '#5c0073'
1064 1064 ]
1065 1065
1066 1066 def rgb_to_hex_color(self, rgb_tuple):
1067 1067 """
1068 1068 Converts an rgb_tuple passed to an hex color.
1069 1069
1070 1070 :param rgb_tuple: tuple with 3 ints represents rgb color space
1071 1071 """
1072 1072 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1073 1073
1074 1074 def email_to_int_list(self, email_str):
1075 1075 """
1076 1076 Get every byte of the hex digest value of email and turn it to integer.
1077 1077 It's going to be always between 0-255
1078 1078 """
1079 1079 digest = md5_safe(email_str.lower())
1080 1080 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1081 1081
1082 1082 def pick_color_bank_index(self, email_str, color_bank):
1083 1083 return self.email_to_int_list(email_str)[0] % len(color_bank)
1084 1084
1085 1085 def str2color(self, email_str):
1086 1086 """
1087 1087 Tries to map in a stable algorithm an email to color
1088 1088
1089 1089 :param email_str:
1090 1090 """
1091 1091 color_bank = self.get_color_bank()
1092 1092 # pick position (module it's length so we always find it in the
1093 1093 # bank even if it's smaller than 256 values
1094 1094 pos = self.pick_color_bank_index(email_str, color_bank)
1095 1095 return color_bank[pos]
1096 1096
1097 1097 def normalize_email(self, email_address):
1098 1098 import unicodedata
1099 1099 # default host used to fill in the fake/missing email
1100 1100 default_host = u'localhost'
1101 1101
1102 1102 if not email_address:
1103 1103 email_address = u'%s@%s' % (User.DEFAULT_USER, default_host)
1104 1104
1105 1105 email_address = safe_unicode(email_address)
1106 1106
1107 1107 if u'@' not in email_address:
1108 1108 email_address = u'%s@%s' % (email_address, default_host)
1109 1109
1110 1110 if email_address.endswith(u'@'):
1111 1111 email_address = u'%s%s' % (email_address, default_host)
1112 1112
1113 1113 email_address = unicodedata.normalize('NFKD', email_address)\
1114 1114 .encode('ascii', 'ignore')
1115 1115 return email_address
1116 1116
1117 1117 def get_initials(self):
1118 1118 """
1119 1119 Returns 2 letter initials calculated based on the input.
1120 1120 The algorithm picks first given email address, and takes first letter
1121 1121 of part before @, and then the first letter of server name. In case
1122 1122 the part before @ is in a format of `somestring.somestring2` it replaces
1123 1123 the server letter with first letter of somestring2
1124 1124
1125 1125 In case function was initialized with both first and lastname, this
1126 1126 overrides the extraction from email by first letter of the first and
1127 1127 last name. We add special logic to that functionality, In case Full name
1128 1128 is compound, like Guido Von Rossum, we use last part of the last name
1129 1129 (Von Rossum) picking `R`.
1130 1130
1131 1131 Function also normalizes the non-ascii characters to they ascii
1132 1132 representation, eg Ą => A
1133 1133 """
1134 1134 import unicodedata
1135 1135 # replace non-ascii to ascii
1136 1136 first_name = unicodedata.normalize(
1137 1137 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1138 1138 last_name = unicodedata.normalize(
1139 1139 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1140 1140
1141 1141 # do NFKD encoding, and also make sure email has proper format
1142 1142 email_address = self.normalize_email(self.email_address)
1143 1143
1144 1144 # first push the email initials
1145 1145 prefix, server = email_address.split('@', 1)
1146 1146
1147 # check if prefix is maybe a 'firstname.lastname' syntax
1147 # check if prefix is maybe a 'first_name.last_name' syntax
1148 1148 _dot_split = prefix.rsplit('.', 1)
1149 1149 if len(_dot_split) == 2:
1150 1150 initials = [_dot_split[0][0], _dot_split[1][0]]
1151 1151 else:
1152 1152 initials = [prefix[0], server[0]]
1153 1153
1154 # then try to replace either firtname or lastname
1154 # then try to replace either first_name or last_name
1155 1155 fn_letter = (first_name or " ")[0].strip()
1156 1156 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1157 1157
1158 1158 if fn_letter:
1159 1159 initials[0] = fn_letter
1160 1160
1161 1161 if ln_letter:
1162 1162 initials[1] = ln_letter
1163 1163
1164 1164 return ''.join(initials).upper()
1165 1165
1166 1166 def get_img_data_by_type(self, font_family, img_type):
1167 1167 default_user = """
1168 1168 <svg xmlns="http://www.w3.org/2000/svg"
1169 1169 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1170 1170 viewBox="-15 -10 439.165 429.164"
1171 1171
1172 1172 xml:space="preserve"
1173 1173 style="background:{background};" >
1174 1174
1175 1175 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1176 1176 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1177 1177 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1178 1178 168.596,153.916,216.671,
1179 1179 204.583,216.671z" fill="{text_color}"/>
1180 1180 <path d="M407.164,374.717L360.88,
1181 1181 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1182 1182 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1183 1183 15.366-44.203,23.488-69.076,23.488c-24.877,
1184 1184 0-48.762-8.122-69.078-23.488
1185 1185 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1186 1186 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1187 1187 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1188 1188 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1189 1189 19.402-10.527 C409.699,390.129,
1190 1190 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1191 1191 </svg>""".format(
1192 1192 size=self.size,
1193 1193 background='#979797', # @grey4
1194 1194 text_color=self.text_color,
1195 1195 font_family=font_family)
1196 1196
1197 1197 return {
1198 1198 "default_user": default_user
1199 1199 }[img_type]
1200 1200
1201 1201 def get_img_data(self, svg_type=None):
1202 1202 """
1203 1203 generates the svg metadata for image
1204 1204 """
1205 1205
1206 1206 font_family = ','.join([
1207 1207 'proximanovaregular',
1208 1208 'Proxima Nova Regular',
1209 1209 'Proxima Nova',
1210 1210 'Arial',
1211 1211 'Lucida Grande',
1212 1212 'sans-serif'
1213 1213 ])
1214 1214 if svg_type:
1215 1215 return self.get_img_data_by_type(font_family, svg_type)
1216 1216
1217 1217 initials = self.get_initials()
1218 1218 img_data = """
1219 1219 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1220 1220 width="{size}" height="{size}"
1221 1221 style="width: 100%; height: 100%; background-color: {background}"
1222 1222 viewBox="0 0 {size} {size}">
1223 1223 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1224 1224 pointer-events="auto" fill="{text_color}"
1225 1225 font-family="{font_family}"
1226 1226 style="font-weight: 400; font-size: {f_size}px;">{text}
1227 1227 </text>
1228 1228 </svg>""".format(
1229 1229 size=self.size,
1230 1230 f_size=self.size/1.85, # scale the text inside the box nicely
1231 1231 background=self.background,
1232 1232 text_color=self.text_color,
1233 1233 text=initials.upper(),
1234 1234 font_family=font_family)
1235 1235
1236 1236 return img_data
1237 1237
1238 1238 def generate_svg(self, svg_type=None):
1239 1239 img_data = self.get_img_data(svg_type)
1240 1240 return "data:image/svg+xml;base64,%s" % img_data.encode('base64')
1241 1241
1242 1242
1243 1243 def initials_gravatar(email_address, first_name, last_name, size=30):
1244 1244 svg_type = None
1245 1245 if email_address == User.DEFAULT_USER_EMAIL:
1246 1246 svg_type = 'default_user'
1247 1247 klass = InitialsGravatar(email_address, first_name, last_name, size)
1248 1248 return klass.generate_svg(svg_type=svg_type)
1249 1249
1250 1250
1251 1251 def gravatar_url(email_address, size=30, request=None):
1252 1252 request = get_current_request()
1253 1253 if request and hasattr(request, 'call_context'):
1254 1254 _use_gravatar = request.call_context.visual.use_gravatar
1255 1255 _gravatar_url = request.call_context.visual.gravatar_url
1256 1256 else:
1257 1257 # doh, we need to re-import those to mock it later
1258 1258 from pylons import tmpl_context as c
1259 1259
1260 1260 _use_gravatar = c.visual.use_gravatar
1261 1261 _gravatar_url = c.visual.gravatar_url
1262 1262
1263 1263 _gravatar_url = _gravatar_url or User.DEFAULT_GRAVATAR_URL
1264 1264
1265 1265 email_address = email_address or User.DEFAULT_USER_EMAIL
1266 1266 if isinstance(email_address, unicode):
1267 1267 # hashlib crashes on unicode items
1268 1268 email_address = safe_str(email_address)
1269 1269
1270 1270 # empty email or default user
1271 1271 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1272 1272 return initials_gravatar(User.DEFAULT_USER_EMAIL, '', '', size=size)
1273 1273
1274 1274 if _use_gravatar:
1275 1275 # TODO: Disuse pyramid thread locals. Think about another solution to
1276 1276 # get the host and schema here.
1277 1277 request = get_current_request()
1278 1278 tmpl = safe_str(_gravatar_url)
1279 1279 tmpl = tmpl.replace('{email}', email_address)\
1280 1280 .replace('{md5email}', md5_safe(email_address.lower())) \
1281 1281 .replace('{netloc}', request.host)\
1282 1282 .replace('{scheme}', request.scheme)\
1283 1283 .replace('{size}', safe_str(size))
1284 1284 return tmpl
1285 1285 else:
1286 1286 return initials_gravatar(email_address, '', '', size=size)
1287 1287
1288 1288
1289 1289 class Page(_Page):
1290 1290 """
1291 1291 Custom pager to match rendering style with paginator
1292 1292 """
1293 1293
1294 1294 def _get_pos(self, cur_page, max_page, items):
1295 1295 edge = (items / 2) + 1
1296 1296 if (cur_page <= edge):
1297 1297 radius = max(items / 2, items - cur_page)
1298 1298 elif (max_page - cur_page) < edge:
1299 1299 radius = (items - 1) - (max_page - cur_page)
1300 1300 else:
1301 1301 radius = items / 2
1302 1302
1303 1303 left = max(1, (cur_page - (radius)))
1304 1304 right = min(max_page, cur_page + (radius))
1305 1305 return left, cur_page, right
1306 1306
1307 1307 def _range(self, regexp_match):
1308 1308 """
1309 1309 Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8').
1310 1310
1311 1311 Arguments:
1312 1312
1313 1313 regexp_match
1314 1314 A "re" (regular expressions) match object containing the
1315 1315 radius of linked pages around the current page in
1316 1316 regexp_match.group(1) as a string
1317 1317
1318 1318 This function is supposed to be called as a callable in
1319 1319 re.sub.
1320 1320
1321 1321 """
1322 1322 radius = int(regexp_match.group(1))
1323 1323
1324 1324 # Compute the first and last page number within the radius
1325 1325 # e.g. '1 .. 5 6 [7] 8 9 .. 12'
1326 1326 # -> leftmost_page = 5
1327 1327 # -> rightmost_page = 9
1328 1328 leftmost_page, _cur, rightmost_page = self._get_pos(self.page,
1329 1329 self.last_page,
1330 1330 (radius * 2) + 1)
1331 1331 nav_items = []
1332 1332
1333 1333 # Create a link to the first page (unless we are on the first page
1334 1334 # or there would be no need to insert '..' spacers)
1335 1335 if self.page != self.first_page and self.first_page < leftmost_page:
1336 1336 nav_items.append(self._pagerlink(self.first_page, self.first_page))
1337 1337
1338 1338 # Insert dots if there are pages between the first page
1339 1339 # and the currently displayed page range
1340 1340 if leftmost_page - self.first_page > 1:
1341 1341 # Wrap in a SPAN tag if nolink_attr is set
1342 1342 text = '..'
1343 1343 if self.dotdot_attr:
1344 1344 text = HTML.span(c=text, **self.dotdot_attr)
1345 1345 nav_items.append(text)
1346 1346
1347 1347 for thispage in xrange(leftmost_page, rightmost_page + 1):
1348 1348 # Hilight the current page number and do not use a link
1349 1349 if thispage == self.page:
1350 1350 text = '%s' % (thispage,)
1351 1351 # Wrap in a SPAN tag if nolink_attr is set
1352 1352 if self.curpage_attr:
1353 1353 text = HTML.span(c=text, **self.curpage_attr)
1354 1354 nav_items.append(text)
1355 1355 # Otherwise create just a link to that page
1356 1356 else:
1357 1357 text = '%s' % (thispage,)
1358 1358 nav_items.append(self._pagerlink(thispage, text))
1359 1359
1360 1360 # Insert dots if there are pages between the displayed
1361 1361 # page numbers and the end of the page range
1362 1362 if self.last_page - rightmost_page > 1:
1363 1363 text = '..'
1364 1364 # Wrap in a SPAN tag if nolink_attr is set
1365 1365 if self.dotdot_attr:
1366 1366 text = HTML.span(c=text, **self.dotdot_attr)
1367 1367 nav_items.append(text)
1368 1368
1369 1369 # Create a link to the very last page (unless we are on the last
1370 1370 # page or there would be no need to insert '..' spacers)
1371 1371 if self.page != self.last_page and rightmost_page < self.last_page:
1372 1372 nav_items.append(self._pagerlink(self.last_page, self.last_page))
1373 1373
1374 1374 ## prerender links
1375 1375 #_page_link = url.current()
1376 1376 #nav_items.append(literal('<link rel="prerender" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1377 1377 #nav_items.append(literal('<link rel="prefetch" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1378 1378 return self.separator.join(nav_items)
1379 1379
1380 1380 def pager(self, format='~2~', page_param='page', partial_param='partial',
1381 1381 show_if_single_page=False, separator=' ', onclick=None,
1382 1382 symbol_first='<<', symbol_last='>>',
1383 1383 symbol_previous='<', symbol_next='>',
1384 1384 link_attr={'class': 'pager_link', 'rel': 'prerender'},
1385 1385 curpage_attr={'class': 'pager_curpage'},
1386 1386 dotdot_attr={'class': 'pager_dotdot'}, **kwargs):
1387 1387
1388 1388 self.curpage_attr = curpage_attr
1389 1389 self.separator = separator
1390 1390 self.pager_kwargs = kwargs
1391 1391 self.page_param = page_param
1392 1392 self.partial_param = partial_param
1393 1393 self.onclick = onclick
1394 1394 self.link_attr = link_attr
1395 1395 self.dotdot_attr = dotdot_attr
1396 1396
1397 1397 # Don't show navigator if there is no more than one page
1398 1398 if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page):
1399 1399 return ''
1400 1400
1401 1401 from string import Template
1402 1402 # Replace ~...~ in token format by range of pages
1403 1403 result = re.sub(r'~(\d+)~', self._range, format)
1404 1404
1405 1405 # Interpolate '%' variables
1406 1406 result = Template(result).safe_substitute({
1407 1407 'first_page': self.first_page,
1408 1408 'last_page': self.last_page,
1409 1409 'page': self.page,
1410 1410 'page_count': self.page_count,
1411 1411 'items_per_page': self.items_per_page,
1412 1412 'first_item': self.first_item,
1413 1413 'last_item': self.last_item,
1414 1414 'item_count': self.item_count,
1415 1415 'link_first': self.page > self.first_page and \
1416 1416 self._pagerlink(self.first_page, symbol_first) or '',
1417 1417 'link_last': self.page < self.last_page and \
1418 1418 self._pagerlink(self.last_page, symbol_last) or '',
1419 1419 'link_previous': self.previous_page and \
1420 1420 self._pagerlink(self.previous_page, symbol_previous) \
1421 1421 or HTML.span(symbol_previous, class_="pg-previous disabled"),
1422 1422 'link_next': self.next_page and \
1423 1423 self._pagerlink(self.next_page, symbol_next) \
1424 1424 or HTML.span(symbol_next, class_="pg-next disabled")
1425 1425 })
1426 1426
1427 1427 return literal(result)
1428 1428
1429 1429
1430 1430 #==============================================================================
1431 1431 # REPO PAGER, PAGER FOR REPOSITORY
1432 1432 #==============================================================================
1433 1433 class RepoPage(Page):
1434 1434
1435 1435 def __init__(self, collection, page=1, items_per_page=20,
1436 1436 item_count=None, url=None, **kwargs):
1437 1437
1438 1438 """Create a "RepoPage" instance. special pager for paging
1439 1439 repository
1440 1440 """
1441 1441 self._url_generator = url
1442 1442
1443 1443 # Safe the kwargs class-wide so they can be used in the pager() method
1444 1444 self.kwargs = kwargs
1445 1445
1446 1446 # Save a reference to the collection
1447 1447 self.original_collection = collection
1448 1448
1449 1449 self.collection = collection
1450 1450
1451 1451 # The self.page is the number of the current page.
1452 1452 # The first page has the number 1!
1453 1453 try:
1454 1454 self.page = int(page) # make it int() if we get it as a string
1455 1455 except (ValueError, TypeError):
1456 1456 self.page = 1
1457 1457
1458 1458 self.items_per_page = items_per_page
1459 1459
1460 1460 # Unless the user tells us how many items the collections has
1461 1461 # we calculate that ourselves.
1462 1462 if item_count is not None:
1463 1463 self.item_count = item_count
1464 1464 else:
1465 1465 self.item_count = len(self.collection)
1466 1466
1467 1467 # Compute the number of the first and last available page
1468 1468 if self.item_count > 0:
1469 1469 self.first_page = 1
1470 1470 self.page_count = int(math.ceil(float(self.item_count) /
1471 1471 self.items_per_page))
1472 1472 self.last_page = self.first_page + self.page_count - 1
1473 1473
1474 1474 # Make sure that the requested page number is the range of
1475 1475 # valid pages
1476 1476 if self.page > self.last_page:
1477 1477 self.page = self.last_page
1478 1478 elif self.page < self.first_page:
1479 1479 self.page = self.first_page
1480 1480
1481 1481 # Note: the number of items on this page can be less than
1482 1482 # items_per_page if the last page is not full
1483 1483 self.first_item = max(0, (self.item_count) - (self.page *
1484 1484 items_per_page))
1485 1485 self.last_item = ((self.item_count - 1) - items_per_page *
1486 1486 (self.page - 1))
1487 1487
1488 1488 self.items = list(self.collection[self.first_item:self.last_item + 1])
1489 1489
1490 1490 # Links to previous and next page
1491 1491 if self.page > self.first_page:
1492 1492 self.previous_page = self.page - 1
1493 1493 else:
1494 1494 self.previous_page = None
1495 1495
1496 1496 if self.page < self.last_page:
1497 1497 self.next_page = self.page + 1
1498 1498 else:
1499 1499 self.next_page = None
1500 1500
1501 1501 # No items available
1502 1502 else:
1503 1503 self.first_page = None
1504 1504 self.page_count = 0
1505 1505 self.last_page = None
1506 1506 self.first_item = None
1507 1507 self.last_item = None
1508 1508 self.previous_page = None
1509 1509 self.next_page = None
1510 1510 self.items = []
1511 1511
1512 1512 # This is a subclass of the 'list' type. Initialise the list now.
1513 1513 list.__init__(self, reversed(self.items))
1514 1514
1515 1515
1516 1516 def changed_tooltip(nodes):
1517 1517 """
1518 1518 Generates a html string for changed nodes in commit page.
1519 1519 It limits the output to 30 entries
1520 1520
1521 1521 :param nodes: LazyNodesGenerator
1522 1522 """
1523 1523 if nodes:
1524 1524 pref = ': <br/> '
1525 1525 suf = ''
1526 1526 if len(nodes) > 30:
1527 1527 suf = '<br/>' + _(' and %s more') % (len(nodes) - 30)
1528 1528 return literal(pref + '<br/> '.join([safe_unicode(x.path)
1529 1529 for x in nodes[:30]]) + suf)
1530 1530 else:
1531 1531 return ': ' + _('No Files')
1532 1532
1533 1533
1534 1534 def breadcrumb_repo_link(repo):
1535 1535 """
1536 1536 Makes a breadcrumbs path link to repo
1537 1537
1538 1538 ex::
1539 1539 group >> subgroup >> repo
1540 1540
1541 1541 :param repo: a Repository instance
1542 1542 """
1543 1543
1544 1544 path = [
1545 1545 link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name))
1546 1546 for group in repo.groups_with_parents
1547 1547 ] + [
1548 1548 link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name))
1549 1549 ]
1550 1550
1551 1551 return literal(' &raquo; '.join(path))
1552 1552
1553 1553
1554 1554 def format_byte_size_binary(file_size):
1555 1555 """
1556 1556 Formats file/folder sizes to standard.
1557 1557 """
1558 1558 formatted_size = format_byte_size(file_size, binary=True)
1559 1559 return formatted_size
1560 1560
1561 1561
1562 1562 def urlify_text(text_, safe=True):
1563 1563 """
1564 1564 Extrac urls from text and make html links out of them
1565 1565
1566 1566 :param text_:
1567 1567 """
1568 1568
1569 1569 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1570 1570 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1571 1571
1572 1572 def url_func(match_obj):
1573 1573 url_full = match_obj.groups()[0]
1574 1574 return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full})
1575 1575 _newtext = url_pat.sub(url_func, text_)
1576 1576 if safe:
1577 1577 return literal(_newtext)
1578 1578 return _newtext
1579 1579
1580 1580
1581 1581 def urlify_commits(text_, repository):
1582 1582 """
1583 1583 Extract commit ids from text and make link from them
1584 1584
1585 1585 :param text_:
1586 1586 :param repository: repo name to build the URL with
1587 1587 """
1588 1588 from pylons import url # doh, we need to re-import url to mock it later
1589 1589 URL_PAT = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1590 1590
1591 1591 def url_func(match_obj):
1592 1592 commit_id = match_obj.groups()[1]
1593 1593 pref = match_obj.groups()[0]
1594 1594 suf = match_obj.groups()[2]
1595 1595
1596 1596 tmpl = (
1597 1597 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1598 1598 '%(commit_id)s</a>%(suf)s'
1599 1599 )
1600 1600 return tmpl % {
1601 1601 'pref': pref,
1602 1602 'cls': 'revision-link',
1603 1603 'url': url('changeset_home', repo_name=repository,
1604 1604 revision=commit_id, qualified=True),
1605 1605 'commit_id': commit_id,
1606 1606 'suf': suf
1607 1607 }
1608 1608
1609 1609 newtext = URL_PAT.sub(url_func, text_)
1610 1610
1611 1611 return newtext
1612 1612
1613 1613
1614 1614 def _process_url_func(match_obj, repo_name, uid, entry,
1615 1615 return_raw_data=False, link_format='html'):
1616 1616 pref = ''
1617 1617 if match_obj.group().startswith(' '):
1618 1618 pref = ' '
1619 1619
1620 1620 issue_id = ''.join(match_obj.groups())
1621 1621
1622 1622 if link_format == 'html':
1623 1623 tmpl = (
1624 1624 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1625 1625 '%(issue-prefix)s%(id-repr)s'
1626 1626 '</a>')
1627 1627 elif link_format == 'rst':
1628 1628 tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_'
1629 1629 elif link_format == 'markdown':
1630 1630 tmpl = '[%(issue-prefix)s%(id-repr)s](%(url)s)'
1631 1631 else:
1632 1632 raise ValueError('Bad link_format:{}'.format(link_format))
1633 1633
1634 1634 (repo_name_cleaned,
1635 1635 parent_group_name) = RepoGroupModel().\
1636 1636 _get_group_name_and_parent(repo_name)
1637 1637
1638 1638 # variables replacement
1639 1639 named_vars = {
1640 1640 'id': issue_id,
1641 1641 'repo': repo_name,
1642 1642 'repo_name': repo_name_cleaned,
1643 1643 'group_name': parent_group_name
1644 1644 }
1645 1645 # named regex variables
1646 1646 named_vars.update(match_obj.groupdict())
1647 1647 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1648 1648
1649 1649 data = {
1650 1650 'pref': pref,
1651 1651 'cls': 'issue-tracker-link',
1652 1652 'url': _url,
1653 1653 'id-repr': issue_id,
1654 1654 'issue-prefix': entry['pref'],
1655 1655 'serv': entry['url'],
1656 1656 }
1657 1657 if return_raw_data:
1658 1658 return {
1659 1659 'id': issue_id,
1660 1660 'url': _url
1661 1661 }
1662 1662 return tmpl % data
1663 1663
1664 1664
1665 1665 def process_patterns(text_string, repo_name, link_format='html'):
1666 1666 allowed_formats = ['html', 'rst', 'markdown']
1667 1667 if link_format not in allowed_formats:
1668 1668 raise ValueError('Link format can be only one of:{} got {}'.format(
1669 1669 allowed_formats, link_format))
1670 1670
1671 1671 repo = None
1672 1672 if repo_name:
1673 1673 # Retrieving repo_name to avoid invalid repo_name to explode on
1674 1674 # IssueTrackerSettingsModel but still passing invalid name further down
1675 1675 repo = Repository.get_by_repo_name(repo_name, cache=True)
1676 1676
1677 1677 settings_model = IssueTrackerSettingsModel(repo=repo)
1678 1678 active_entries = settings_model.get_settings(cache=True)
1679 1679
1680 1680 issues_data = []
1681 1681 newtext = text_string
1682 1682
1683 1683 for uid, entry in active_entries.items():
1684 1684 log.debug('found issue tracker entry with uid %s' % (uid,))
1685 1685
1686 1686 if not (entry['pat'] and entry['url']):
1687 1687 log.debug('skipping due to missing data')
1688 1688 continue
1689 1689
1690 1690 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s'
1691 1691 % (uid, entry['pat'], entry['url'], entry['pref']))
1692 1692
1693 1693 try:
1694 1694 pattern = re.compile(r'%s' % entry['pat'])
1695 1695 except re.error:
1696 1696 log.exception(
1697 1697 'issue tracker pattern: `%s` failed to compile',
1698 1698 entry['pat'])
1699 1699 continue
1700 1700
1701 1701 data_func = partial(
1702 1702 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1703 1703 return_raw_data=True)
1704 1704
1705 1705 for match_obj in pattern.finditer(text_string):
1706 1706 issues_data.append(data_func(match_obj))
1707 1707
1708 1708 url_func = partial(
1709 1709 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1710 1710 link_format=link_format)
1711 1711
1712 1712 newtext = pattern.sub(url_func, newtext)
1713 1713 log.debug('processed prefix:uid `%s`' % (uid,))
1714 1714
1715 1715 return newtext, issues_data
1716 1716
1717 1717
1718 1718 def urlify_commit_message(commit_text, repository=None):
1719 1719 """
1720 1720 Parses given text message and makes proper links.
1721 1721 issues are linked to given issue-server, and rest is a commit link
1722 1722
1723 1723 :param commit_text:
1724 1724 :param repository:
1725 1725 """
1726 1726 from pylons import url # doh, we need to re-import url to mock it later
1727 1727
1728 1728 def escaper(string):
1729 1729 return string.replace('<', '&lt;').replace('>', '&gt;')
1730 1730
1731 1731 newtext = escaper(commit_text)
1732 1732
1733 1733 # extract http/https links and make them real urls
1734 1734 newtext = urlify_text(newtext, safe=False)
1735 1735
1736 1736 # urlify commits - extract commit ids and make link out of them, if we have
1737 1737 # the scope of repository present.
1738 1738 if repository:
1739 1739 newtext = urlify_commits(newtext, repository)
1740 1740
1741 1741 # process issue tracker patterns
1742 1742 newtext, issues = process_patterns(newtext, repository or '')
1743 1743
1744 1744 return literal(newtext)
1745 1745
1746 1746
1747 1747 def render_binary(repo_name, file_obj):
1748 1748 """
1749 1749 Choose how to render a binary file
1750 1750 """
1751 1751 filename = file_obj.name
1752 1752
1753 1753 # images
1754 1754 for ext in ['*.png', '*.jpg', '*.ico', '*.gif']:
1755 1755 if fnmatch.fnmatch(filename, pat=ext):
1756 1756 alt = filename
1757 1757 src = url('files_raw_home', repo_name=repo_name,
1758 1758 revision=file_obj.commit.raw_id, f_path=file_obj.path)
1759 1759 return literal('<img class="rendered-binary" alt="{}" src="{}">'.format(alt, src))
1760 1760
1761 1761
1762 1762 def renderer_from_filename(filename, exclude=None):
1763 1763 """
1764 1764 choose a renderer based on filename, this works only for text based files
1765 1765 """
1766 1766
1767 1767 # ipython
1768 1768 for ext in ['*.ipynb']:
1769 1769 if fnmatch.fnmatch(filename, pat=ext):
1770 1770 return 'jupyter'
1771 1771
1772 1772 is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1773 1773 if is_markup:
1774 1774 return is_markup
1775 1775 return None
1776 1776
1777 1777
1778 1778 def render(source, renderer='rst', mentions=False, relative_url=None,
1779 1779 repo_name=None):
1780 1780
1781 1781 def maybe_convert_relative_links(html_source):
1782 1782 if relative_url:
1783 1783 return relative_links(html_source, relative_url)
1784 1784 return html_source
1785 1785
1786 1786 if renderer == 'rst':
1787 1787 if repo_name:
1788 1788 # process patterns on comments if we pass in repo name
1789 1789 source, issues = process_patterns(
1790 1790 source, repo_name, link_format='rst')
1791 1791
1792 1792 return literal(
1793 1793 '<div class="rst-block">%s</div>' %
1794 1794 maybe_convert_relative_links(
1795 1795 MarkupRenderer.rst(source, mentions=mentions)))
1796 1796 elif renderer == 'markdown':
1797 1797 if repo_name:
1798 1798 # process patterns on comments if we pass in repo name
1799 1799 source, issues = process_patterns(
1800 1800 source, repo_name, link_format='markdown')
1801 1801
1802 1802 return literal(
1803 1803 '<div class="markdown-block">%s</div>' %
1804 1804 maybe_convert_relative_links(
1805 1805 MarkupRenderer.markdown(source, flavored=True,
1806 1806 mentions=mentions)))
1807 1807 elif renderer == 'jupyter':
1808 1808 return literal(
1809 1809 '<div class="ipynb">%s</div>' %
1810 1810 maybe_convert_relative_links(
1811 1811 MarkupRenderer.jupyter(source)))
1812 1812
1813 1813 # None means just show the file-source
1814 1814 return None
1815 1815
1816 1816
1817 1817 def commit_status(repo, commit_id):
1818 1818 return ChangesetStatusModel().get_status(repo, commit_id)
1819 1819
1820 1820
1821 1821 def commit_status_lbl(commit_status):
1822 1822 return dict(ChangesetStatus.STATUSES).get(commit_status)
1823 1823
1824 1824
1825 1825 def commit_time(repo_name, commit_id):
1826 1826 repo = Repository.get_by_repo_name(repo_name)
1827 1827 commit = repo.get_commit(commit_id=commit_id)
1828 1828 return commit.date
1829 1829
1830 1830
1831 1831 def get_permission_name(key):
1832 1832 return dict(Permission.PERMS).get(key)
1833 1833
1834 1834
1835 1835 def journal_filter_help():
1836 1836 return _(
1837 1837 'Example filter terms:\n' +
1838 1838 ' repository:vcs\n' +
1839 1839 ' username:marcin\n' +
1840 1840 ' action:*push*\n' +
1841 1841 ' ip:127.0.0.1\n' +
1842 1842 ' date:20120101\n' +
1843 1843 ' date:[20120101100000 TO 20120102]\n' +
1844 1844 '\n' +
1845 1845 'Generate wildcards using \'*\' character:\n' +
1846 1846 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1847 1847 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1848 1848 '\n' +
1849 1849 'Optional AND / OR operators in queries\n' +
1850 1850 ' "repository:vcs OR repository:test"\n' +
1851 1851 ' "username:test AND repository:test*"\n'
1852 1852 )
1853 1853
1854 1854
1855 1855 def search_filter_help(searcher):
1856 1856
1857 1857 terms = ''
1858 1858 return _(
1859 1859 'Example filter terms for `{searcher}` search:\n' +
1860 1860 '{terms}\n' +
1861 1861 'Generate wildcards using \'*\' character:\n' +
1862 1862 ' "repo_name:vcs*" - search everything starting with \'vcs\'\n' +
1863 1863 ' "repo_name:*vcs*" - search for repository containing \'vcs\'\n' +
1864 1864 '\n' +
1865 1865 'Optional AND / OR operators in queries\n' +
1866 1866 ' "repo_name:vcs OR repo_name:test"\n' +
1867 1867 ' "owner:test AND repo_name:test*"\n' +
1868 1868 'More: {search_doc}'
1869 1869 ).format(searcher=searcher.name,
1870 1870 terms=terms, search_doc=searcher.query_lang_doc)
1871 1871
1872 1872
1873 1873 def not_mapped_error(repo_name):
1874 1874 flash(_('%s repository is not mapped to db perhaps'
1875 1875 ' it was created or renamed from the filesystem'
1876 1876 ' please run the application again'
1877 1877 ' in order to rescan repositories') % repo_name, category='error')
1878 1878
1879 1879
1880 1880 def ip_range(ip_addr):
1881 1881 from rhodecode.model.db import UserIpMap
1882 1882 s, e = UserIpMap._get_ip_range(ip_addr)
1883 1883 return '%s - %s' % (s, e)
1884 1884
1885 1885
1886 1886 def form(url, method='post', needs_csrf_token=True, **attrs):
1887 1887 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1888 1888 if method.lower() != 'get' and needs_csrf_token:
1889 1889 raise Exception(
1890 1890 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1891 1891 'CSRF token. If the endpoint does not require such token you can ' +
1892 1892 'explicitly set the parameter needs_csrf_token to false.')
1893 1893
1894 1894 return wh_form(url, method=method, **attrs)
1895 1895
1896 1896
1897 1897 def secure_form(url, method="POST", multipart=False, **attrs):
1898 1898 """Start a form tag that points the action to an url. This
1899 1899 form tag will also include the hidden field containing
1900 1900 the auth token.
1901 1901
1902 1902 The url options should be given either as a string, or as a
1903 1903 ``url()`` function. The method for the form defaults to POST.
1904 1904
1905 1905 Options:
1906 1906
1907 1907 ``multipart``
1908 1908 If set to True, the enctype is set to "multipart/form-data".
1909 1909 ``method``
1910 1910 The method to use when submitting the form, usually either
1911 1911 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1912 1912 hidden input with name _method is added to simulate the verb
1913 1913 over POST.
1914 1914
1915 1915 """
1916 1916 from webhelpers.pylonslib.secure_form import insecure_form
1917 1917 form = insecure_form(url, method, multipart, **attrs)
1918 1918 token = csrf_input()
1919 1919 return literal("%s\n%s" % (form, token))
1920 1920
1921 1921 def csrf_input():
1922 1922 return literal(
1923 1923 '<input type="hidden" id="{}" name="{}" value="{}">'.format(
1924 1924 csrf_token_key, csrf_token_key, get_csrf_token()))
1925 1925
1926 1926 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1927 1927 select_html = select(name, selected, options, **attrs)
1928 1928 select2 = """
1929 1929 <script>
1930 1930 $(document).ready(function() {
1931 1931 $('#%s').select2({
1932 1932 containerCssClass: 'drop-menu',
1933 1933 dropdownCssClass: 'drop-menu-dropdown',
1934 1934 dropdownAutoWidth: true%s
1935 1935 });
1936 1936 });
1937 1937 </script>
1938 1938 """
1939 1939 filter_option = """,
1940 1940 minimumResultsForSearch: -1
1941 1941 """
1942 1942 input_id = attrs.get('id') or name
1943 1943 filter_enabled = "" if enable_filter else filter_option
1944 1944 select_script = literal(select2 % (input_id, filter_enabled))
1945 1945
1946 1946 return literal(select_html+select_script)
1947 1947
1948 1948
1949 1949 def get_visual_attr(tmpl_context_var, attr_name):
1950 1950 """
1951 1951 A safe way to get a variable from visual variable of template context
1952 1952
1953 1953 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
1954 1954 :param attr_name: name of the attribute we fetch from the c.visual
1955 1955 """
1956 1956 visual = getattr(tmpl_context_var, 'visual', None)
1957 1957 if not visual:
1958 1958 return
1959 1959 else:
1960 1960 return getattr(visual, attr_name, None)
1961 1961
1962 1962
1963 1963 def get_last_path_part(file_node):
1964 1964 if not file_node.path:
1965 1965 return u''
1966 1966
1967 1967 path = safe_unicode(file_node.path.split('/')[-1])
1968 1968 return u'../' + path
1969 1969
1970 1970
1971 1971 def route_url(*args, **kwargs):
1972 1972 """
1973 1973 Wrapper around pyramids `route_url` (fully qualified url) function.
1974 1974 It is used to generate URLs from within pylons views or templates.
1975 1975 This will be removed when pyramid migration if finished.
1976 1976 """
1977 1977 req = get_current_request()
1978 1978 return req.route_url(*args, **kwargs)
1979 1979
1980 1980
1981 1981 def route_path(*args, **kwargs):
1982 1982 """
1983 1983 Wrapper around pyramids `route_path` function. It is used to generate
1984 1984 URLs from within pylons views or templates. This will be removed when
1985 1985 pyramid migration if finished.
1986 1986 """
1987 1987 req = get_current_request()
1988 1988 return req.route_path(*args, **kwargs)
1989 1989
1990 1990
1991 1991 def route_path_or_none(*args, **kwargs):
1992 1992 try:
1993 1993 return route_path(*args, **kwargs)
1994 1994 except KeyError:
1995 1995 return None
1996 1996
1997 1997
1998 1998 def static_url(*args, **kwds):
1999 1999 """
2000 2000 Wrapper around pyramids `route_path` function. It is used to generate
2001 2001 URLs from within pylons views or templates. This will be removed when
2002 2002 pyramid migration if finished.
2003 2003 """
2004 2004 req = get_current_request()
2005 2005 return req.static_url(*args, **kwds)
2006 2006
2007 2007
2008 2008 def resource_path(*args, **kwds):
2009 2009 """
2010 2010 Wrapper around pyramids `route_path` function. It is used to generate
2011 2011 URLs from within pylons views or templates. This will be removed when
2012 2012 pyramid migration if finished.
2013 2013 """
2014 2014 req = get_current_request()
2015 2015 return req.resource_path(*args, **kwds)
2016 2016
2017 2017
2018 2018 def api_call_example(method, args):
2019 2019 """
2020 2020 Generates an API call example via CURL
2021 2021 """
2022 2022 args_json = json.dumps(OrderedDict([
2023 2023 ('id', 1),
2024 2024 ('auth_token', 'SECRET'),
2025 2025 ('method', method),
2026 2026 ('args', args)
2027 2027 ]))
2028 2028 return literal(
2029 2029 "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{data}'"
2030 2030 "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, "
2031 2031 "and needs to be of `api calls` role."
2032 2032 .format(
2033 2033 api_url=route_url('apiv2'),
2034 2034 token_url=route_url('my_account_auth_tokens'),
2035 2035 data=args_json))
@@ -1,4075 +1,4079 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import hashlib
29 29 import logging
30 30 import datetime
31 31 import warnings
32 32 import ipaddress
33 33 import functools
34 34 import traceback
35 35 import collections
36 36
37 37
38 38 from sqlalchemy import *
39 39 from sqlalchemy.ext.declarative import declared_attr
40 40 from sqlalchemy.ext.hybrid import hybrid_property
41 41 from sqlalchemy.orm import (
42 42 relationship, joinedload, class_mapper, validates, aliased)
43 43 from sqlalchemy.sql.expression import true
44 44 from beaker.cache import cache_region
45 45 from zope.cachedescriptors.property import Lazy as LazyProperty
46 46
47 47 from pylons.i18n.translation import lazy_ugettext as _
48 48 from pyramid.threadlocal import get_current_request
49 49
50 50 from rhodecode.lib.vcs import get_vcs_instance
51 51 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
52 52 from rhodecode.lib.utils2 import (
53 53 str2bool, safe_str, get_commit_safe, safe_unicode, md5_safe,
54 54 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
55 55 glob2re, StrictAttributeDict, cleaned_uri)
56 56 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType
57 57 from rhodecode.lib.ext_json import json
58 58 from rhodecode.lib.caching_query import FromCache
59 59 from rhodecode.lib.encrypt import AESCipher
60 60
61 61 from rhodecode.model.meta import Base, Session
62 62
63 63 URL_SEP = '/'
64 64 log = logging.getLogger(__name__)
65 65
66 66 # =============================================================================
67 67 # BASE CLASSES
68 68 # =============================================================================
69 69
70 70 # this is propagated from .ini file rhodecode.encrypted_values.secret or
71 71 # beaker.session.secret if first is not set.
72 72 # and initialized at environment.py
73 73 ENCRYPTION_KEY = None
74 74
75 75 # used to sort permissions by types, '#' used here is not allowed to be in
76 76 # usernames, and it's very early in sorted string.printable table.
77 77 PERMISSION_TYPE_SORT = {
78 78 'admin': '####',
79 79 'write': '###',
80 80 'read': '##',
81 81 'none': '#',
82 82 }
83 83
84 84
85 85 def display_sort(obj):
86 86 """
87 87 Sort function used to sort permissions in .permissions() function of
88 88 Repository, RepoGroup, UserGroup. Also it put the default user in front
89 89 of all other resources
90 90 """
91 91
92 92 if obj.username == User.DEFAULT_USER:
93 93 return '#####'
94 94 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
95 95 return prefix + obj.username
96 96
97 97
98 98 def _hash_key(k):
99 99 return md5_safe(k)
100 100
101 101
102 102 class EncryptedTextValue(TypeDecorator):
103 103 """
104 104 Special column for encrypted long text data, use like::
105 105
106 106 value = Column("encrypted_value", EncryptedValue(), nullable=False)
107 107
108 108 This column is intelligent so if value is in unencrypted form it return
109 109 unencrypted form, but on save it always encrypts
110 110 """
111 111 impl = Text
112 112
113 113 def process_bind_param(self, value, dialect):
114 114 if not value:
115 115 return value
116 116 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
117 117 # protect against double encrypting if someone manually starts
118 118 # doing
119 119 raise ValueError('value needs to be in unencrypted format, ie. '
120 120 'not starting with enc$aes')
121 121 return 'enc$aes_hmac$%s' % AESCipher(
122 122 ENCRYPTION_KEY, hmac=True).encrypt(value)
123 123
124 124 def process_result_value(self, value, dialect):
125 125 import rhodecode
126 126
127 127 if not value:
128 128 return value
129 129
130 130 parts = value.split('$', 3)
131 131 if not len(parts) == 3:
132 132 # probably not encrypted values
133 133 return value
134 134 else:
135 135 if parts[0] != 'enc':
136 136 # parts ok but without our header ?
137 137 return value
138 138 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
139 139 'rhodecode.encrypted_values.strict') or True)
140 140 # at that stage we know it's our encryption
141 141 if parts[1] == 'aes':
142 142 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
143 143 elif parts[1] == 'aes_hmac':
144 144 decrypted_data = AESCipher(
145 145 ENCRYPTION_KEY, hmac=True,
146 146 strict_verification=enc_strict_mode).decrypt(parts[2])
147 147 else:
148 148 raise ValueError(
149 149 'Encryption type part is wrong, must be `aes` '
150 150 'or `aes_hmac`, got `%s` instead' % (parts[1]))
151 151 return decrypted_data
152 152
153 153
154 154 class BaseModel(object):
155 155 """
156 156 Base Model for all classes
157 157 """
158 158
159 159 @classmethod
160 160 def _get_keys(cls):
161 161 """return column names for this model """
162 162 return class_mapper(cls).c.keys()
163 163
164 164 def get_dict(self):
165 165 """
166 166 return dict with keys and values corresponding
167 167 to this model data """
168 168
169 169 d = {}
170 170 for k in self._get_keys():
171 171 d[k] = getattr(self, k)
172 172
173 173 # also use __json__() if present to get additional fields
174 174 _json_attr = getattr(self, '__json__', None)
175 175 if _json_attr:
176 176 # update with attributes from __json__
177 177 if callable(_json_attr):
178 178 _json_attr = _json_attr()
179 179 for k, val in _json_attr.iteritems():
180 180 d[k] = val
181 181 return d
182 182
183 183 def get_appstruct(self):
184 184 """return list with keys and values tuples corresponding
185 185 to this model data """
186 186
187 187 l = []
188 188 for k in self._get_keys():
189 189 l.append((k, getattr(self, k),))
190 190 return l
191 191
192 192 def populate_obj(self, populate_dict):
193 193 """populate model with data from given populate_dict"""
194 194
195 195 for k in self._get_keys():
196 196 if k in populate_dict:
197 197 setattr(self, k, populate_dict[k])
198 198
199 199 @classmethod
200 200 def query(cls):
201 201 return Session().query(cls)
202 202
203 203 @classmethod
204 204 def get(cls, id_):
205 205 if id_:
206 206 return cls.query().get(id_)
207 207
208 208 @classmethod
209 209 def get_or_404(cls, id_, pyramid_exc=False):
210 210 if pyramid_exc:
211 211 # NOTE(marcink): backward compat, once migration to pyramid
212 212 # this should only use pyramid exceptions
213 213 from pyramid.httpexceptions import HTTPNotFound
214 214 else:
215 215 from webob.exc import HTTPNotFound
216 216
217 217 try:
218 218 id_ = int(id_)
219 219 except (TypeError, ValueError):
220 220 raise HTTPNotFound
221 221
222 222 res = cls.query().get(id_)
223 223 if not res:
224 224 raise HTTPNotFound
225 225 return res
226 226
227 227 @classmethod
228 228 def getAll(cls):
229 229 # deprecated and left for backward compatibility
230 230 return cls.get_all()
231 231
232 232 @classmethod
233 233 def get_all(cls):
234 234 return cls.query().all()
235 235
236 236 @classmethod
237 237 def delete(cls, id_):
238 238 obj = cls.query().get(id_)
239 239 Session().delete(obj)
240 240
241 241 @classmethod
242 242 def identity_cache(cls, session, attr_name, value):
243 243 exist_in_session = []
244 244 for (item_cls, pkey), instance in session.identity_map.items():
245 245 if cls == item_cls and getattr(instance, attr_name) == value:
246 246 exist_in_session.append(instance)
247 247 if exist_in_session:
248 248 if len(exist_in_session) == 1:
249 249 return exist_in_session[0]
250 250 log.exception(
251 251 'multiple objects with attr %s and '
252 252 'value %s found with same name: %r',
253 253 attr_name, value, exist_in_session)
254 254
255 255 def __repr__(self):
256 256 if hasattr(self, '__unicode__'):
257 257 # python repr needs to return str
258 258 try:
259 259 return safe_str(self.__unicode__())
260 260 except UnicodeDecodeError:
261 261 pass
262 262 return '<DB:%s>' % (self.__class__.__name__)
263 263
264 264
265 265 class RhodeCodeSetting(Base, BaseModel):
266 266 __tablename__ = 'rhodecode_settings'
267 267 __table_args__ = (
268 268 UniqueConstraint('app_settings_name'),
269 269 {'extend_existing': True, 'mysql_engine': 'InnoDB',
270 270 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
271 271 )
272 272
273 273 SETTINGS_TYPES = {
274 274 'str': safe_str,
275 275 'int': safe_int,
276 276 'unicode': safe_unicode,
277 277 'bool': str2bool,
278 278 'list': functools.partial(aslist, sep=',')
279 279 }
280 280 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
281 281 GLOBAL_CONF_KEY = 'app_settings'
282 282
283 283 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
284 284 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
285 285 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
286 286 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
287 287
288 288 def __init__(self, key='', val='', type='unicode'):
289 289 self.app_settings_name = key
290 290 self.app_settings_type = type
291 291 self.app_settings_value = val
292 292
293 293 @validates('_app_settings_value')
294 294 def validate_settings_value(self, key, val):
295 295 assert type(val) == unicode
296 296 return val
297 297
298 298 @hybrid_property
299 299 def app_settings_value(self):
300 300 v = self._app_settings_value
301 301 _type = self.app_settings_type
302 302 if _type:
303 303 _type = self.app_settings_type.split('.')[0]
304 304 # decode the encrypted value
305 305 if 'encrypted' in self.app_settings_type:
306 306 cipher = EncryptedTextValue()
307 307 v = safe_unicode(cipher.process_result_value(v, None))
308 308
309 309 converter = self.SETTINGS_TYPES.get(_type) or \
310 310 self.SETTINGS_TYPES['unicode']
311 311 return converter(v)
312 312
313 313 @app_settings_value.setter
314 314 def app_settings_value(self, val):
315 315 """
316 316 Setter that will always make sure we use unicode in app_settings_value
317 317
318 318 :param val:
319 319 """
320 320 val = safe_unicode(val)
321 321 # encode the encrypted value
322 322 if 'encrypted' in self.app_settings_type:
323 323 cipher = EncryptedTextValue()
324 324 val = safe_unicode(cipher.process_bind_param(val, None))
325 325 self._app_settings_value = val
326 326
327 327 @hybrid_property
328 328 def app_settings_type(self):
329 329 return self._app_settings_type
330 330
331 331 @app_settings_type.setter
332 332 def app_settings_type(self, val):
333 333 if val.split('.')[0] not in self.SETTINGS_TYPES:
334 334 raise Exception('type must be one of %s got %s'
335 335 % (self.SETTINGS_TYPES.keys(), val))
336 336 self._app_settings_type = val
337 337
338 338 def __unicode__(self):
339 339 return u"<%s('%s:%s[%s]')>" % (
340 340 self.__class__.__name__,
341 341 self.app_settings_name, self.app_settings_value,
342 342 self.app_settings_type
343 343 )
344 344
345 345
346 346 class RhodeCodeUi(Base, BaseModel):
347 347 __tablename__ = 'rhodecode_ui'
348 348 __table_args__ = (
349 349 UniqueConstraint('ui_key'),
350 350 {'extend_existing': True, 'mysql_engine': 'InnoDB',
351 351 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
352 352 )
353 353
354 354 HOOK_REPO_SIZE = 'changegroup.repo_size'
355 355 # HG
356 356 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
357 357 HOOK_PULL = 'outgoing.pull_logger'
358 358 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
359 359 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
360 360 HOOK_PUSH = 'changegroup.push_logger'
361 361 HOOK_PUSH_KEY = 'pushkey.key_push'
362 362
363 363 # TODO: johbo: Unify way how hooks are configured for git and hg,
364 364 # git part is currently hardcoded.
365 365
366 366 # SVN PATTERNS
367 367 SVN_BRANCH_ID = 'vcs_svn_branch'
368 368 SVN_TAG_ID = 'vcs_svn_tag'
369 369
370 370 ui_id = Column(
371 371 "ui_id", Integer(), nullable=False, unique=True, default=None,
372 372 primary_key=True)
373 373 ui_section = Column(
374 374 "ui_section", String(255), nullable=True, unique=None, default=None)
375 375 ui_key = Column(
376 376 "ui_key", String(255), nullable=True, unique=None, default=None)
377 377 ui_value = Column(
378 378 "ui_value", String(255), nullable=True, unique=None, default=None)
379 379 ui_active = Column(
380 380 "ui_active", Boolean(), nullable=True, unique=None, default=True)
381 381
382 382 def __repr__(self):
383 383 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
384 384 self.ui_key, self.ui_value)
385 385
386 386
387 387 class RepoRhodeCodeSetting(Base, BaseModel):
388 388 __tablename__ = 'repo_rhodecode_settings'
389 389 __table_args__ = (
390 390 UniqueConstraint(
391 391 'app_settings_name', 'repository_id',
392 392 name='uq_repo_rhodecode_setting_name_repo_id'),
393 393 {'extend_existing': True, 'mysql_engine': 'InnoDB',
394 394 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
395 395 )
396 396
397 397 repository_id = Column(
398 398 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
399 399 nullable=False)
400 400 app_settings_id = Column(
401 401 "app_settings_id", Integer(), nullable=False, unique=True,
402 402 default=None, primary_key=True)
403 403 app_settings_name = Column(
404 404 "app_settings_name", String(255), nullable=True, unique=None,
405 405 default=None)
406 406 _app_settings_value = Column(
407 407 "app_settings_value", String(4096), nullable=True, unique=None,
408 408 default=None)
409 409 _app_settings_type = Column(
410 410 "app_settings_type", String(255), nullable=True, unique=None,
411 411 default=None)
412 412
413 413 repository = relationship('Repository')
414 414
415 415 def __init__(self, repository_id, key='', val='', type='unicode'):
416 416 self.repository_id = repository_id
417 417 self.app_settings_name = key
418 418 self.app_settings_type = type
419 419 self.app_settings_value = val
420 420
421 421 @validates('_app_settings_value')
422 422 def validate_settings_value(self, key, val):
423 423 assert type(val) == unicode
424 424 return val
425 425
426 426 @hybrid_property
427 427 def app_settings_value(self):
428 428 v = self._app_settings_value
429 429 type_ = self.app_settings_type
430 430 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
431 431 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
432 432 return converter(v)
433 433
434 434 @app_settings_value.setter
435 435 def app_settings_value(self, val):
436 436 """
437 437 Setter that will always make sure we use unicode in app_settings_value
438 438
439 439 :param val:
440 440 """
441 441 self._app_settings_value = safe_unicode(val)
442 442
443 443 @hybrid_property
444 444 def app_settings_type(self):
445 445 return self._app_settings_type
446 446
447 447 @app_settings_type.setter
448 448 def app_settings_type(self, val):
449 449 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
450 450 if val not in SETTINGS_TYPES:
451 451 raise Exception('type must be one of %s got %s'
452 452 % (SETTINGS_TYPES.keys(), val))
453 453 self._app_settings_type = val
454 454
455 455 def __unicode__(self):
456 456 return u"<%s('%s:%s:%s[%s]')>" % (
457 457 self.__class__.__name__, self.repository.repo_name,
458 458 self.app_settings_name, self.app_settings_value,
459 459 self.app_settings_type
460 460 )
461 461
462 462
463 463 class RepoRhodeCodeUi(Base, BaseModel):
464 464 __tablename__ = 'repo_rhodecode_ui'
465 465 __table_args__ = (
466 466 UniqueConstraint(
467 467 'repository_id', 'ui_section', 'ui_key',
468 468 name='uq_repo_rhodecode_ui_repository_id_section_key'),
469 469 {'extend_existing': True, 'mysql_engine': 'InnoDB',
470 470 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
471 471 )
472 472
473 473 repository_id = Column(
474 474 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
475 475 nullable=False)
476 476 ui_id = Column(
477 477 "ui_id", Integer(), nullable=False, unique=True, default=None,
478 478 primary_key=True)
479 479 ui_section = Column(
480 480 "ui_section", String(255), nullable=True, unique=None, default=None)
481 481 ui_key = Column(
482 482 "ui_key", String(255), nullable=True, unique=None, default=None)
483 483 ui_value = Column(
484 484 "ui_value", String(255), nullable=True, unique=None, default=None)
485 485 ui_active = Column(
486 486 "ui_active", Boolean(), nullable=True, unique=None, default=True)
487 487
488 488 repository = relationship('Repository')
489 489
490 490 def __repr__(self):
491 491 return '<%s[%s:%s]%s=>%s]>' % (
492 492 self.__class__.__name__, self.repository.repo_name,
493 493 self.ui_section, self.ui_key, self.ui_value)
494 494
495 495
496 496 class User(Base, BaseModel):
497 497 __tablename__ = 'users'
498 498 __table_args__ = (
499 499 UniqueConstraint('username'), UniqueConstraint('email'),
500 500 Index('u_username_idx', 'username'),
501 501 Index('u_email_idx', 'email'),
502 502 {'extend_existing': True, 'mysql_engine': 'InnoDB',
503 503 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
504 504 )
505 505 DEFAULT_USER = 'default'
506 506 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
507 507 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
508 508
509 509 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
510 510 username = Column("username", String(255), nullable=True, unique=None, default=None)
511 511 password = Column("password", String(255), nullable=True, unique=None, default=None)
512 512 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
513 513 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
514 514 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
515 515 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
516 516 _email = Column("email", String(255), nullable=True, unique=None, default=None)
517 517 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
518 518 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
519 519
520 520 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
521 521 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
522 522 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
523 523 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
524 524 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
525 525 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
526 526
527 527 user_log = relationship('UserLog')
528 528 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
529 529
530 530 repositories = relationship('Repository')
531 531 repository_groups = relationship('RepoGroup')
532 532 user_groups = relationship('UserGroup')
533 533
534 534 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
535 535 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
536 536
537 537 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
538 538 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
539 539 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
540 540
541 541 group_member = relationship('UserGroupMember', cascade='all')
542 542
543 543 notifications = relationship('UserNotification', cascade='all')
544 544 # notifications assigned to this user
545 545 user_created_notifications = relationship('Notification', cascade='all')
546 546 # comments created by this user
547 547 user_comments = relationship('ChangesetComment', cascade='all')
548 548 # user profile extra info
549 549 user_emails = relationship('UserEmailMap', cascade='all')
550 550 user_ip_map = relationship('UserIpMap', cascade='all')
551 551 user_auth_tokens = relationship('UserApiKeys', cascade='all')
552 552 # gists
553 553 user_gists = relationship('Gist', cascade='all')
554 554 # user pull requests
555 555 user_pull_requests = relationship('PullRequest', cascade='all')
556 556 # external identities
557 557 extenal_identities = relationship(
558 558 'ExternalIdentity',
559 559 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
560 560 cascade='all')
561 561
562 562 def __unicode__(self):
563 563 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
564 564 self.user_id, self.username)
565 565
566 566 @hybrid_property
567 567 def email(self):
568 568 return self._email
569 569
570 570 @email.setter
571 571 def email(self, val):
572 572 self._email = val.lower() if val else None
573 573
574 574 @hybrid_property
575 575 def first_name(self):
576 576 from rhodecode.lib import helpers as h
577 if self.name:
577 578 return h.escape(self.name)
579 return self.name
578 580
579 581 @hybrid_property
580 582 def last_name(self):
581 583 from rhodecode.lib import helpers as h
584 if self.lastname:
582 585 return h.escape(self.lastname)
586 return self.lastname
583 587
584 588 @hybrid_property
585 589 def api_key(self):
586 590 """
587 591 Fetch if exist an auth-token with role ALL connected to this user
588 592 """
589 593 user_auth_token = UserApiKeys.query()\
590 594 .filter(UserApiKeys.user_id == self.user_id)\
591 595 .filter(or_(UserApiKeys.expires == -1,
592 596 UserApiKeys.expires >= time.time()))\
593 597 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
594 598 if user_auth_token:
595 599 user_auth_token = user_auth_token.api_key
596 600
597 601 return user_auth_token
598 602
599 603 @api_key.setter
600 604 def api_key(self, val):
601 605 # don't allow to set API key this is deprecated for now
602 606 self._api_key = None
603 607
604 608 @property
605 609 def firstname(self):
606 610 # alias for future
607 611 return self.name
608 612
609 613 @property
610 614 def emails(self):
611 615 other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
612 616 return [self.email] + [x.email for x in other]
613 617
614 618 @property
615 619 def auth_tokens(self):
616 620 return [x.api_key for x in self.extra_auth_tokens]
617 621
618 622 @property
619 623 def extra_auth_tokens(self):
620 624 return UserApiKeys.query().filter(UserApiKeys.user == self).all()
621 625
622 626 @property
623 627 def feed_token(self):
624 628 return self.get_feed_token()
625 629
626 630 def get_feed_token(self):
627 631 feed_tokens = UserApiKeys.query()\
628 632 .filter(UserApiKeys.user == self)\
629 633 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)\
630 634 .all()
631 635 if feed_tokens:
632 636 return feed_tokens[0].api_key
633 637 return 'NO_FEED_TOKEN_AVAILABLE'
634 638
635 639 @classmethod
636 640 def extra_valid_auth_tokens(cls, user, role=None):
637 641 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
638 642 .filter(or_(UserApiKeys.expires == -1,
639 643 UserApiKeys.expires >= time.time()))
640 644 if role:
641 645 tokens = tokens.filter(or_(UserApiKeys.role == role,
642 646 UserApiKeys.role == UserApiKeys.ROLE_ALL))
643 647 return tokens.all()
644 648
645 649 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
646 650 from rhodecode.lib import auth
647 651
648 652 log.debug('Trying to authenticate user: %s via auth-token, '
649 653 'and roles: %s', self, roles)
650 654
651 655 if not auth_token:
652 656 return False
653 657
654 658 crypto_backend = auth.crypto_backend()
655 659
656 660 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
657 661 tokens_q = UserApiKeys.query()\
658 662 .filter(UserApiKeys.user_id == self.user_id)\
659 663 .filter(or_(UserApiKeys.expires == -1,
660 664 UserApiKeys.expires >= time.time()))
661 665
662 666 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
663 667
664 668 plain_tokens = []
665 669 hash_tokens = []
666 670
667 671 for token in tokens_q.all():
668 672 # verify scope first
669 673 if token.repo_id:
670 674 # token has a scope, we need to verify it
671 675 if scope_repo_id != token.repo_id:
672 676 log.debug(
673 677 'Scope mismatch: token has a set repo scope: %s, '
674 678 'and calling scope is:%s, skipping further checks',
675 679 token.repo, scope_repo_id)
676 680 # token has a scope, and it doesn't match, skip token
677 681 continue
678 682
679 683 if token.api_key.startswith(crypto_backend.ENC_PREF):
680 684 hash_tokens.append(token.api_key)
681 685 else:
682 686 plain_tokens.append(token.api_key)
683 687
684 688 is_plain_match = auth_token in plain_tokens
685 689 if is_plain_match:
686 690 return True
687 691
688 692 for hashed in hash_tokens:
689 693 # TODO(marcink): this is expensive to calculate, but most secure
690 694 match = crypto_backend.hash_check(auth_token, hashed)
691 695 if match:
692 696 return True
693 697
694 698 return False
695 699
696 700 @property
697 701 def ip_addresses(self):
698 702 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
699 703 return [x.ip_addr for x in ret]
700 704
701 705 @property
702 706 def username_and_name(self):
703 return '%s (%s %s)' % (self.username, self.firstname, self.lastname)
707 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
704 708
705 709 @property
706 710 def username_or_name_or_email(self):
707 711 full_name = self.full_name if self.full_name is not ' ' else None
708 712 return self.username or full_name or self.email
709 713
710 714 @property
711 715 def full_name(self):
712 return '%s %s' % (self.firstname, self.lastname)
716 return '%s %s' % (self.first_name, self.last_name)
713 717
714 718 @property
715 719 def full_name_or_username(self):
716 return ('%s %s' % (self.firstname, self.lastname)
717 if (self.firstname and self.lastname) else self.username)
720 return ('%s %s' % (self.first_name, self.last_name)
721 if (self.first_name and self.last_name) else self.username)
718 722
719 723 @property
720 724 def full_contact(self):
721 return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
725 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
722 726
723 727 @property
724 728 def short_contact(self):
725 return '%s %s' % (self.firstname, self.lastname)
729 return '%s %s' % (self.first_name, self.last_name)
726 730
727 731 @property
728 732 def is_admin(self):
729 733 return self.admin
730 734
731 735 @property
732 736 def AuthUser(self):
733 737 """
734 738 Returns instance of AuthUser for this user
735 739 """
736 740 from rhodecode.lib.auth import AuthUser
737 741 return AuthUser(user_id=self.user_id, username=self.username)
738 742
739 743 @hybrid_property
740 744 def user_data(self):
741 745 if not self._user_data:
742 746 return {}
743 747
744 748 try:
745 749 return json.loads(self._user_data)
746 750 except TypeError:
747 751 return {}
748 752
749 753 @user_data.setter
750 754 def user_data(self, val):
751 755 if not isinstance(val, dict):
752 756 raise Exception('user_data must be dict, got %s' % type(val))
753 757 try:
754 758 self._user_data = json.dumps(val)
755 759 except Exception:
756 760 log.error(traceback.format_exc())
757 761
758 762 @classmethod
759 763 def get_by_username(cls, username, case_insensitive=False,
760 764 cache=False, identity_cache=False):
761 765 session = Session()
762 766
763 767 if case_insensitive:
764 768 q = cls.query().filter(
765 769 func.lower(cls.username) == func.lower(username))
766 770 else:
767 771 q = cls.query().filter(cls.username == username)
768 772
769 773 if cache:
770 774 if identity_cache:
771 775 val = cls.identity_cache(session, 'username', username)
772 776 if val:
773 777 return val
774 778 else:
775 779 cache_key = "get_user_by_name_%s" % _hash_key(username)
776 780 q = q.options(
777 781 FromCache("sql_cache_short", cache_key))
778 782
779 783 return q.scalar()
780 784
781 785 @classmethod
782 786 def get_by_auth_token(cls, auth_token, cache=False):
783 787 q = UserApiKeys.query()\
784 788 .filter(UserApiKeys.api_key == auth_token)\
785 789 .filter(or_(UserApiKeys.expires == -1,
786 790 UserApiKeys.expires >= time.time()))
787 791 if cache:
788 792 q = q.options(
789 793 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
790 794
791 795 match = q.first()
792 796 if match:
793 797 return match.user
794 798
795 799 @classmethod
796 800 def get_by_email(cls, email, case_insensitive=False, cache=False):
797 801
798 802 if case_insensitive:
799 803 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
800 804
801 805 else:
802 806 q = cls.query().filter(cls.email == email)
803 807
804 808 email_key = _hash_key(email)
805 809 if cache:
806 810 q = q.options(
807 811 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
808 812
809 813 ret = q.scalar()
810 814 if ret is None:
811 815 q = UserEmailMap.query()
812 816 # try fetching in alternate email map
813 817 if case_insensitive:
814 818 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
815 819 else:
816 820 q = q.filter(UserEmailMap.email == email)
817 821 q = q.options(joinedload(UserEmailMap.user))
818 822 if cache:
819 823 q = q.options(
820 824 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
821 825 ret = getattr(q.scalar(), 'user', None)
822 826
823 827 return ret
824 828
825 829 @classmethod
826 830 def get_from_cs_author(cls, author):
827 831 """
828 832 Tries to get User objects out of commit author string
829 833
830 834 :param author:
831 835 """
832 836 from rhodecode.lib.helpers import email, author_name
833 837 # Valid email in the attribute passed, see if they're in the system
834 838 _email = email(author)
835 839 if _email:
836 840 user = cls.get_by_email(_email, case_insensitive=True)
837 841 if user:
838 842 return user
839 843 # Maybe we can match by username?
840 844 _author = author_name(author)
841 845 user = cls.get_by_username(_author, case_insensitive=True)
842 846 if user:
843 847 return user
844 848
845 849 def update_userdata(self, **kwargs):
846 850 usr = self
847 851 old = usr.user_data
848 852 old.update(**kwargs)
849 853 usr.user_data = old
850 854 Session().add(usr)
851 855 log.debug('updated userdata with ', kwargs)
852 856
853 857 def update_lastlogin(self):
854 858 """Update user lastlogin"""
855 859 self.last_login = datetime.datetime.now()
856 860 Session().add(self)
857 861 log.debug('updated user %s lastlogin', self.username)
858 862
859 863 def update_lastactivity(self):
860 864 """Update user lastactivity"""
861 865 self.last_activity = datetime.datetime.now()
862 866 Session().add(self)
863 867 log.debug('updated user %s lastactivity', self.username)
864 868
865 869 def update_password(self, new_password):
866 870 from rhodecode.lib.auth import get_crypt_password
867 871
868 872 self.password = get_crypt_password(new_password)
869 873 Session().add(self)
870 874
871 875 @classmethod
872 876 def get_first_super_admin(cls):
873 877 user = User.query().filter(User.admin == true()).first()
874 878 if user is None:
875 879 raise Exception('FATAL: Missing administrative account!')
876 880 return user
877 881
878 882 @classmethod
879 883 def get_all_super_admins(cls):
880 884 """
881 885 Returns all admin accounts sorted by username
882 886 """
883 887 return User.query().filter(User.admin == true())\
884 888 .order_by(User.username.asc()).all()
885 889
886 890 @classmethod
887 891 def get_default_user(cls, cache=False, refresh=False):
888 892 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
889 893 if user is None:
890 894 raise Exception('FATAL: Missing default account!')
891 895 if refresh:
892 896 # The default user might be based on outdated state which
893 897 # has been loaded from the cache.
894 898 # A call to refresh() ensures that the
895 899 # latest state from the database is used.
896 900 Session().refresh(user)
897 901 return user
898 902
899 903 def _get_default_perms(self, user, suffix=''):
900 904 from rhodecode.model.permission import PermissionModel
901 905 return PermissionModel().get_default_perms(user.user_perms, suffix)
902 906
903 907 def get_default_perms(self, suffix=''):
904 908 return self._get_default_perms(self, suffix)
905 909
906 910 def get_api_data(self, include_secrets=False, details='full'):
907 911 """
908 912 Common function for generating user related data for API
909 913
910 914 :param include_secrets: By default secrets in the API data will be replaced
911 915 by a placeholder value to prevent exposing this data by accident. In case
912 916 this data shall be exposed, set this flag to ``True``.
913 917
914 918 :param details: details can be 'basic|full' basic gives only a subset of
915 919 the available user information that includes user_id, name and emails.
916 920 """
917 921 user = self
918 922 user_data = self.user_data
919 923 data = {
920 924 'user_id': user.user_id,
921 925 'username': user.username,
922 926 'firstname': user.name,
923 927 'lastname': user.lastname,
924 928 'email': user.email,
925 929 'emails': user.emails,
926 930 }
927 931 if details == 'basic':
928 932 return data
929 933
930 934 api_key_length = 40
931 935 api_key_replacement = '*' * api_key_length
932 936
933 937 extras = {
934 938 'api_keys': [api_key_replacement],
935 939 'auth_tokens': [api_key_replacement],
936 940 'active': user.active,
937 941 'admin': user.admin,
938 942 'extern_type': user.extern_type,
939 943 'extern_name': user.extern_name,
940 944 'last_login': user.last_login,
941 945 'last_activity': user.last_activity,
942 946 'ip_addresses': user.ip_addresses,
943 947 'language': user_data.get('language')
944 948 }
945 949 data.update(extras)
946 950
947 951 if include_secrets:
948 952 data['api_keys'] = user.auth_tokens
949 953 data['auth_tokens'] = user.extra_auth_tokens
950 954 return data
951 955
952 956 def __json__(self):
953 957 data = {
954 958 'full_name': self.full_name,
955 959 'full_name_or_username': self.full_name_or_username,
956 960 'short_contact': self.short_contact,
957 961 'full_contact': self.full_contact,
958 962 }
959 963 data.update(self.get_api_data())
960 964 return data
961 965
962 966
963 967 class UserApiKeys(Base, BaseModel):
964 968 __tablename__ = 'user_api_keys'
965 969 __table_args__ = (
966 970 Index('uak_api_key_idx', 'api_key'),
967 971 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
968 972 UniqueConstraint('api_key'),
969 973 {'extend_existing': True, 'mysql_engine': 'InnoDB',
970 974 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
971 975 )
972 976 __mapper_args__ = {}
973 977
974 978 # ApiKey role
975 979 ROLE_ALL = 'token_role_all'
976 980 ROLE_HTTP = 'token_role_http'
977 981 ROLE_VCS = 'token_role_vcs'
978 982 ROLE_API = 'token_role_api'
979 983 ROLE_FEED = 'token_role_feed'
980 984 ROLE_PASSWORD_RESET = 'token_password_reset'
981 985
982 986 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
983 987
984 988 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
985 989 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
986 990 api_key = Column("api_key", String(255), nullable=False, unique=True)
987 991 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
988 992 expires = Column('expires', Float(53), nullable=False)
989 993 role = Column('role', String(255), nullable=True)
990 994 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
991 995
992 996 # scope columns
993 997 repo_id = Column(
994 998 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
995 999 nullable=True, unique=None, default=None)
996 1000 repo = relationship('Repository', lazy='joined')
997 1001
998 1002 repo_group_id = Column(
999 1003 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1000 1004 nullable=True, unique=None, default=None)
1001 1005 repo_group = relationship('RepoGroup', lazy='joined')
1002 1006
1003 1007 user = relationship('User', lazy='joined')
1004 1008
1005 1009 def __unicode__(self):
1006 1010 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1007 1011
1008 1012 def __json__(self):
1009 1013 data = {
1010 1014 'auth_token': self.api_key,
1011 1015 'role': self.role,
1012 1016 'scope': self.scope_humanized,
1013 1017 'expired': self.expired
1014 1018 }
1015 1019 return data
1016 1020
1017 1021 @property
1018 1022 def expired(self):
1019 1023 if self.expires == -1:
1020 1024 return False
1021 1025 return time.time() > self.expires
1022 1026
1023 1027 @classmethod
1024 1028 def _get_role_name(cls, role):
1025 1029 return {
1026 1030 cls.ROLE_ALL: _('all'),
1027 1031 cls.ROLE_HTTP: _('http/web interface'),
1028 1032 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1029 1033 cls.ROLE_API: _('api calls'),
1030 1034 cls.ROLE_FEED: _('feed access'),
1031 1035 }.get(role, role)
1032 1036
1033 1037 @property
1034 1038 def role_humanized(self):
1035 1039 return self._get_role_name(self.role)
1036 1040
1037 1041 def _get_scope(self):
1038 1042 if self.repo:
1039 1043 return repr(self.repo)
1040 1044 if self.repo_group:
1041 1045 return repr(self.repo_group) + ' (recursive)'
1042 1046 return 'global'
1043 1047
1044 1048 @property
1045 1049 def scope_humanized(self):
1046 1050 return self._get_scope()
1047 1051
1048 1052
1049 1053 class UserEmailMap(Base, BaseModel):
1050 1054 __tablename__ = 'user_email_map'
1051 1055 __table_args__ = (
1052 1056 Index('uem_email_idx', 'email'),
1053 1057 UniqueConstraint('email'),
1054 1058 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1055 1059 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1056 1060 )
1057 1061 __mapper_args__ = {}
1058 1062
1059 1063 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1060 1064 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1061 1065 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1062 1066 user = relationship('User', lazy='joined')
1063 1067
1064 1068 @validates('_email')
1065 1069 def validate_email(self, key, email):
1066 1070 # check if this email is not main one
1067 1071 main_email = Session().query(User).filter(User.email == email).scalar()
1068 1072 if main_email is not None:
1069 1073 raise AttributeError('email %s is present is user table' % email)
1070 1074 return email
1071 1075
1072 1076 @hybrid_property
1073 1077 def email(self):
1074 1078 return self._email
1075 1079
1076 1080 @email.setter
1077 1081 def email(self, val):
1078 1082 self._email = val.lower() if val else None
1079 1083
1080 1084
1081 1085 class UserIpMap(Base, BaseModel):
1082 1086 __tablename__ = 'user_ip_map'
1083 1087 __table_args__ = (
1084 1088 UniqueConstraint('user_id', 'ip_addr'),
1085 1089 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1086 1090 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1087 1091 )
1088 1092 __mapper_args__ = {}
1089 1093
1090 1094 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1091 1095 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1092 1096 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1093 1097 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1094 1098 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1095 1099 user = relationship('User', lazy='joined')
1096 1100
1097 1101 @classmethod
1098 1102 def _get_ip_range(cls, ip_addr):
1099 1103 net = ipaddress.ip_network(ip_addr, strict=False)
1100 1104 return [str(net.network_address), str(net.broadcast_address)]
1101 1105
1102 1106 def __json__(self):
1103 1107 return {
1104 1108 'ip_addr': self.ip_addr,
1105 1109 'ip_range': self._get_ip_range(self.ip_addr),
1106 1110 }
1107 1111
1108 1112 def __unicode__(self):
1109 1113 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1110 1114 self.user_id, self.ip_addr)
1111 1115
1112 1116
1113 1117 class UserLog(Base, BaseModel):
1114 1118 __tablename__ = 'user_logs'
1115 1119 __table_args__ = (
1116 1120 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1117 1121 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1118 1122 )
1119 1123 VERSION_1 = 'v1'
1120 1124 VERSION_2 = 'v2'
1121 1125 VERSIONS = [VERSION_1, VERSION_2]
1122 1126
1123 1127 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1124 1128 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1125 1129 username = Column("username", String(255), nullable=True, unique=None, default=None)
1126 1130 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
1127 1131 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1128 1132 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1129 1133 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1130 1134 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1131 1135
1132 1136 version = Column("version", String(255), nullable=True, default=VERSION_1)
1133 1137 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
1134 1138 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
1135 1139
1136 1140 def __unicode__(self):
1137 1141 return u"<%s('id:%s:%s')>" % (
1138 1142 self.__class__.__name__, self.repository_name, self.action)
1139 1143
1140 1144 def __json__(self):
1141 1145 return {
1142 1146 'user_id': self.user_id,
1143 1147 'username': self.username,
1144 1148 'repository_id': self.repository_id,
1145 1149 'repository_name': self.repository_name,
1146 1150 'user_ip': self.user_ip,
1147 1151 'action_date': self.action_date,
1148 1152 'action': self.action,
1149 1153 }
1150 1154
1151 1155 @property
1152 1156 def action_as_day(self):
1153 1157 return datetime.date(*self.action_date.timetuple()[:3])
1154 1158
1155 1159 user = relationship('User')
1156 1160 repository = relationship('Repository', cascade='')
1157 1161
1158 1162
1159 1163 class UserGroup(Base, BaseModel):
1160 1164 __tablename__ = 'users_groups'
1161 1165 __table_args__ = (
1162 1166 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1163 1167 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1164 1168 )
1165 1169
1166 1170 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1167 1171 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1168 1172 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1169 1173 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1170 1174 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1171 1175 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1172 1176 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1173 1177 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1174 1178
1175 1179 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1176 1180 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1177 1181 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1178 1182 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1179 1183 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1180 1184 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1181 1185
1182 1186 user = relationship('User')
1183 1187
1184 1188 @hybrid_property
1185 1189 def group_data(self):
1186 1190 if not self._group_data:
1187 1191 return {}
1188 1192
1189 1193 try:
1190 1194 return json.loads(self._group_data)
1191 1195 except TypeError:
1192 1196 return {}
1193 1197
1194 1198 @group_data.setter
1195 1199 def group_data(self, val):
1196 1200 try:
1197 1201 self._group_data = json.dumps(val)
1198 1202 except Exception:
1199 1203 log.error(traceback.format_exc())
1200 1204
1201 1205 def __unicode__(self):
1202 1206 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1203 1207 self.users_group_id,
1204 1208 self.users_group_name)
1205 1209
1206 1210 @classmethod
1207 1211 def get_by_group_name(cls, group_name, cache=False,
1208 1212 case_insensitive=False):
1209 1213 if case_insensitive:
1210 1214 q = cls.query().filter(func.lower(cls.users_group_name) ==
1211 1215 func.lower(group_name))
1212 1216
1213 1217 else:
1214 1218 q = cls.query().filter(cls.users_group_name == group_name)
1215 1219 if cache:
1216 1220 q = q.options(
1217 1221 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1218 1222 return q.scalar()
1219 1223
1220 1224 @classmethod
1221 1225 def get(cls, user_group_id, cache=False):
1222 1226 user_group = cls.query()
1223 1227 if cache:
1224 1228 user_group = user_group.options(
1225 1229 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1226 1230 return user_group.get(user_group_id)
1227 1231
1228 1232 def permissions(self, with_admins=True, with_owner=True):
1229 1233 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1230 1234 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1231 1235 joinedload(UserUserGroupToPerm.user),
1232 1236 joinedload(UserUserGroupToPerm.permission),)
1233 1237
1234 1238 # get owners and admins and permissions. We do a trick of re-writing
1235 1239 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1236 1240 # has a global reference and changing one object propagates to all
1237 1241 # others. This means if admin is also an owner admin_row that change
1238 1242 # would propagate to both objects
1239 1243 perm_rows = []
1240 1244 for _usr in q.all():
1241 1245 usr = AttributeDict(_usr.user.get_dict())
1242 1246 usr.permission = _usr.permission.permission_name
1243 1247 perm_rows.append(usr)
1244 1248
1245 1249 # filter the perm rows by 'default' first and then sort them by
1246 1250 # admin,write,read,none permissions sorted again alphabetically in
1247 1251 # each group
1248 1252 perm_rows = sorted(perm_rows, key=display_sort)
1249 1253
1250 1254 _admin_perm = 'usergroup.admin'
1251 1255 owner_row = []
1252 1256 if with_owner:
1253 1257 usr = AttributeDict(self.user.get_dict())
1254 1258 usr.owner_row = True
1255 1259 usr.permission = _admin_perm
1256 1260 owner_row.append(usr)
1257 1261
1258 1262 super_admin_rows = []
1259 1263 if with_admins:
1260 1264 for usr in User.get_all_super_admins():
1261 1265 # if this admin is also owner, don't double the record
1262 1266 if usr.user_id == owner_row[0].user_id:
1263 1267 owner_row[0].admin_row = True
1264 1268 else:
1265 1269 usr = AttributeDict(usr.get_dict())
1266 1270 usr.admin_row = True
1267 1271 usr.permission = _admin_perm
1268 1272 super_admin_rows.append(usr)
1269 1273
1270 1274 return super_admin_rows + owner_row + perm_rows
1271 1275
1272 1276 def permission_user_groups(self):
1273 1277 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1274 1278 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1275 1279 joinedload(UserGroupUserGroupToPerm.target_user_group),
1276 1280 joinedload(UserGroupUserGroupToPerm.permission),)
1277 1281
1278 1282 perm_rows = []
1279 1283 for _user_group in q.all():
1280 1284 usr = AttributeDict(_user_group.user_group.get_dict())
1281 1285 usr.permission = _user_group.permission.permission_name
1282 1286 perm_rows.append(usr)
1283 1287
1284 1288 return perm_rows
1285 1289
1286 1290 def _get_default_perms(self, user_group, suffix=''):
1287 1291 from rhodecode.model.permission import PermissionModel
1288 1292 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1289 1293
1290 1294 def get_default_perms(self, suffix=''):
1291 1295 return self._get_default_perms(self, suffix)
1292 1296
1293 1297 def get_api_data(self, with_group_members=True, include_secrets=False):
1294 1298 """
1295 1299 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1296 1300 basically forwarded.
1297 1301
1298 1302 """
1299 1303 user_group = self
1300 1304 data = {
1301 1305 'users_group_id': user_group.users_group_id,
1302 1306 'group_name': user_group.users_group_name,
1303 1307 'group_description': user_group.user_group_description,
1304 1308 'active': user_group.users_group_active,
1305 1309 'owner': user_group.user.username,
1306 1310 'owner_email': user_group.user.email,
1307 1311 }
1308 1312
1309 1313 if with_group_members:
1310 1314 users = []
1311 1315 for user in user_group.members:
1312 1316 user = user.user
1313 1317 users.append(user.get_api_data(include_secrets=include_secrets))
1314 1318 data['users'] = users
1315 1319
1316 1320 return data
1317 1321
1318 1322
1319 1323 class UserGroupMember(Base, BaseModel):
1320 1324 __tablename__ = 'users_groups_members'
1321 1325 __table_args__ = (
1322 1326 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1323 1327 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1324 1328 )
1325 1329
1326 1330 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1327 1331 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1328 1332 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1329 1333
1330 1334 user = relationship('User', lazy='joined')
1331 1335 users_group = relationship('UserGroup')
1332 1336
1333 1337 def __init__(self, gr_id='', u_id=''):
1334 1338 self.users_group_id = gr_id
1335 1339 self.user_id = u_id
1336 1340
1337 1341
1338 1342 class RepositoryField(Base, BaseModel):
1339 1343 __tablename__ = 'repositories_fields'
1340 1344 __table_args__ = (
1341 1345 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1342 1346 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1343 1347 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1344 1348 )
1345 1349 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1346 1350
1347 1351 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1348 1352 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1349 1353 field_key = Column("field_key", String(250))
1350 1354 field_label = Column("field_label", String(1024), nullable=False)
1351 1355 field_value = Column("field_value", String(10000), nullable=False)
1352 1356 field_desc = Column("field_desc", String(1024), nullable=False)
1353 1357 field_type = Column("field_type", String(255), nullable=False, unique=None)
1354 1358 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1355 1359
1356 1360 repository = relationship('Repository')
1357 1361
1358 1362 @property
1359 1363 def field_key_prefixed(self):
1360 1364 return 'ex_%s' % self.field_key
1361 1365
1362 1366 @classmethod
1363 1367 def un_prefix_key(cls, key):
1364 1368 if key.startswith(cls.PREFIX):
1365 1369 return key[len(cls.PREFIX):]
1366 1370 return key
1367 1371
1368 1372 @classmethod
1369 1373 def get_by_key_name(cls, key, repo):
1370 1374 row = cls.query()\
1371 1375 .filter(cls.repository == repo)\
1372 1376 .filter(cls.field_key == key).scalar()
1373 1377 return row
1374 1378
1375 1379
1376 1380 class Repository(Base, BaseModel):
1377 1381 __tablename__ = 'repositories'
1378 1382 __table_args__ = (
1379 1383 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1380 1384 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1381 1385 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1382 1386 )
1383 1387 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1384 1388 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1385 1389
1386 1390 STATE_CREATED = 'repo_state_created'
1387 1391 STATE_PENDING = 'repo_state_pending'
1388 1392 STATE_ERROR = 'repo_state_error'
1389 1393
1390 1394 LOCK_AUTOMATIC = 'lock_auto'
1391 1395 LOCK_API = 'lock_api'
1392 1396 LOCK_WEB = 'lock_web'
1393 1397 LOCK_PULL = 'lock_pull'
1394 1398
1395 1399 NAME_SEP = URL_SEP
1396 1400
1397 1401 repo_id = Column(
1398 1402 "repo_id", Integer(), nullable=False, unique=True, default=None,
1399 1403 primary_key=True)
1400 1404 _repo_name = Column(
1401 1405 "repo_name", Text(), nullable=False, default=None)
1402 1406 _repo_name_hash = Column(
1403 1407 "repo_name_hash", String(255), nullable=False, unique=True)
1404 1408 repo_state = Column("repo_state", String(255), nullable=True)
1405 1409
1406 1410 clone_uri = Column(
1407 1411 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1408 1412 default=None)
1409 1413 repo_type = Column(
1410 1414 "repo_type", String(255), nullable=False, unique=False, default=None)
1411 1415 user_id = Column(
1412 1416 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1413 1417 unique=False, default=None)
1414 1418 private = Column(
1415 1419 "private", Boolean(), nullable=True, unique=None, default=None)
1416 1420 enable_statistics = Column(
1417 1421 "statistics", Boolean(), nullable=True, unique=None, default=True)
1418 1422 enable_downloads = Column(
1419 1423 "downloads", Boolean(), nullable=True, unique=None, default=True)
1420 1424 description = Column(
1421 1425 "description", String(10000), nullable=True, unique=None, default=None)
1422 1426 created_on = Column(
1423 1427 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1424 1428 default=datetime.datetime.now)
1425 1429 updated_on = Column(
1426 1430 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1427 1431 default=datetime.datetime.now)
1428 1432 _landing_revision = Column(
1429 1433 "landing_revision", String(255), nullable=False, unique=False,
1430 1434 default=None)
1431 1435 enable_locking = Column(
1432 1436 "enable_locking", Boolean(), nullable=False, unique=None,
1433 1437 default=False)
1434 1438 _locked = Column(
1435 1439 "locked", String(255), nullable=True, unique=False, default=None)
1436 1440 _changeset_cache = Column(
1437 1441 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1438 1442
1439 1443 fork_id = Column(
1440 1444 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1441 1445 nullable=True, unique=False, default=None)
1442 1446 group_id = Column(
1443 1447 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1444 1448 unique=False, default=None)
1445 1449
1446 1450 user = relationship('User', lazy='joined')
1447 1451 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1448 1452 group = relationship('RepoGroup', lazy='joined')
1449 1453 repo_to_perm = relationship(
1450 1454 'UserRepoToPerm', cascade='all',
1451 1455 order_by='UserRepoToPerm.repo_to_perm_id')
1452 1456 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1453 1457 stats = relationship('Statistics', cascade='all', uselist=False)
1454 1458
1455 1459 followers = relationship(
1456 1460 'UserFollowing',
1457 1461 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1458 1462 cascade='all')
1459 1463 extra_fields = relationship(
1460 1464 'RepositoryField', cascade="all, delete, delete-orphan")
1461 1465 logs = relationship('UserLog')
1462 1466 comments = relationship(
1463 1467 'ChangesetComment', cascade="all, delete, delete-orphan")
1464 1468 pull_requests_source = relationship(
1465 1469 'PullRequest',
1466 1470 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1467 1471 cascade="all, delete, delete-orphan")
1468 1472 pull_requests_target = relationship(
1469 1473 'PullRequest',
1470 1474 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1471 1475 cascade="all, delete, delete-orphan")
1472 1476 ui = relationship('RepoRhodeCodeUi', cascade="all")
1473 1477 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1474 1478 integrations = relationship('Integration',
1475 1479 cascade="all, delete, delete-orphan")
1476 1480
1477 1481 def __unicode__(self):
1478 1482 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1479 1483 safe_unicode(self.repo_name))
1480 1484
1481 1485 @hybrid_property
1482 1486 def landing_rev(self):
1483 1487 # always should return [rev_type, rev]
1484 1488 if self._landing_revision:
1485 1489 _rev_info = self._landing_revision.split(':')
1486 1490 if len(_rev_info) < 2:
1487 1491 _rev_info.insert(0, 'rev')
1488 1492 return [_rev_info[0], _rev_info[1]]
1489 1493 return [None, None]
1490 1494
1491 1495 @landing_rev.setter
1492 1496 def landing_rev(self, val):
1493 1497 if ':' not in val:
1494 1498 raise ValueError('value must be delimited with `:` and consist '
1495 1499 'of <rev_type>:<rev>, got %s instead' % val)
1496 1500 self._landing_revision = val
1497 1501
1498 1502 @hybrid_property
1499 1503 def locked(self):
1500 1504 if self._locked:
1501 1505 user_id, timelocked, reason = self._locked.split(':')
1502 1506 lock_values = int(user_id), timelocked, reason
1503 1507 else:
1504 1508 lock_values = [None, None, None]
1505 1509 return lock_values
1506 1510
1507 1511 @locked.setter
1508 1512 def locked(self, val):
1509 1513 if val and isinstance(val, (list, tuple)):
1510 1514 self._locked = ':'.join(map(str, val))
1511 1515 else:
1512 1516 self._locked = None
1513 1517
1514 1518 @hybrid_property
1515 1519 def changeset_cache(self):
1516 1520 from rhodecode.lib.vcs.backends.base import EmptyCommit
1517 1521 dummy = EmptyCommit().__json__()
1518 1522 if not self._changeset_cache:
1519 1523 return dummy
1520 1524 try:
1521 1525 return json.loads(self._changeset_cache)
1522 1526 except TypeError:
1523 1527 return dummy
1524 1528 except Exception:
1525 1529 log.error(traceback.format_exc())
1526 1530 return dummy
1527 1531
1528 1532 @changeset_cache.setter
1529 1533 def changeset_cache(self, val):
1530 1534 try:
1531 1535 self._changeset_cache = json.dumps(val)
1532 1536 except Exception:
1533 1537 log.error(traceback.format_exc())
1534 1538
1535 1539 @hybrid_property
1536 1540 def repo_name(self):
1537 1541 return self._repo_name
1538 1542
1539 1543 @repo_name.setter
1540 1544 def repo_name(self, value):
1541 1545 self._repo_name = value
1542 1546 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1543 1547
1544 1548 @classmethod
1545 1549 def normalize_repo_name(cls, repo_name):
1546 1550 """
1547 1551 Normalizes os specific repo_name to the format internally stored inside
1548 1552 database using URL_SEP
1549 1553
1550 1554 :param cls:
1551 1555 :param repo_name:
1552 1556 """
1553 1557 return cls.NAME_SEP.join(repo_name.split(os.sep))
1554 1558
1555 1559 @classmethod
1556 1560 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1557 1561 session = Session()
1558 1562 q = session.query(cls).filter(cls.repo_name == repo_name)
1559 1563
1560 1564 if cache:
1561 1565 if identity_cache:
1562 1566 val = cls.identity_cache(session, 'repo_name', repo_name)
1563 1567 if val:
1564 1568 return val
1565 1569 else:
1566 1570 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1567 1571 q = q.options(
1568 1572 FromCache("sql_cache_short", cache_key))
1569 1573
1570 1574 return q.scalar()
1571 1575
1572 1576 @classmethod
1573 1577 def get_by_full_path(cls, repo_full_path):
1574 1578 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1575 1579 repo_name = cls.normalize_repo_name(repo_name)
1576 1580 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1577 1581
1578 1582 @classmethod
1579 1583 def get_repo_forks(cls, repo_id):
1580 1584 return cls.query().filter(Repository.fork_id == repo_id)
1581 1585
1582 1586 @classmethod
1583 1587 def base_path(cls):
1584 1588 """
1585 1589 Returns base path when all repos are stored
1586 1590
1587 1591 :param cls:
1588 1592 """
1589 1593 q = Session().query(RhodeCodeUi)\
1590 1594 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1591 1595 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1592 1596 return q.one().ui_value
1593 1597
1594 1598 @classmethod
1595 1599 def is_valid(cls, repo_name):
1596 1600 """
1597 1601 returns True if given repo name is a valid filesystem repository
1598 1602
1599 1603 :param cls:
1600 1604 :param repo_name:
1601 1605 """
1602 1606 from rhodecode.lib.utils import is_valid_repo
1603 1607
1604 1608 return is_valid_repo(repo_name, cls.base_path())
1605 1609
1606 1610 @classmethod
1607 1611 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1608 1612 case_insensitive=True):
1609 1613 q = Repository.query()
1610 1614
1611 1615 if not isinstance(user_id, Optional):
1612 1616 q = q.filter(Repository.user_id == user_id)
1613 1617
1614 1618 if not isinstance(group_id, Optional):
1615 1619 q = q.filter(Repository.group_id == group_id)
1616 1620
1617 1621 if case_insensitive:
1618 1622 q = q.order_by(func.lower(Repository.repo_name))
1619 1623 else:
1620 1624 q = q.order_by(Repository.repo_name)
1621 1625 return q.all()
1622 1626
1623 1627 @property
1624 1628 def forks(self):
1625 1629 """
1626 1630 Return forks of this repo
1627 1631 """
1628 1632 return Repository.get_repo_forks(self.repo_id)
1629 1633
1630 1634 @property
1631 1635 def parent(self):
1632 1636 """
1633 1637 Returns fork parent
1634 1638 """
1635 1639 return self.fork
1636 1640
1637 1641 @property
1638 1642 def just_name(self):
1639 1643 return self.repo_name.split(self.NAME_SEP)[-1]
1640 1644
1641 1645 @property
1642 1646 def groups_with_parents(self):
1643 1647 groups = []
1644 1648 if self.group is None:
1645 1649 return groups
1646 1650
1647 1651 cur_gr = self.group
1648 1652 groups.insert(0, cur_gr)
1649 1653 while 1:
1650 1654 gr = getattr(cur_gr, 'parent_group', None)
1651 1655 cur_gr = cur_gr.parent_group
1652 1656 if gr is None:
1653 1657 break
1654 1658 groups.insert(0, gr)
1655 1659
1656 1660 return groups
1657 1661
1658 1662 @property
1659 1663 def groups_and_repo(self):
1660 1664 return self.groups_with_parents, self
1661 1665
1662 1666 @LazyProperty
1663 1667 def repo_path(self):
1664 1668 """
1665 1669 Returns base full path for that repository means where it actually
1666 1670 exists on a filesystem
1667 1671 """
1668 1672 q = Session().query(RhodeCodeUi).filter(
1669 1673 RhodeCodeUi.ui_key == self.NAME_SEP)
1670 1674 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1671 1675 return q.one().ui_value
1672 1676
1673 1677 @property
1674 1678 def repo_full_path(self):
1675 1679 p = [self.repo_path]
1676 1680 # we need to split the name by / since this is how we store the
1677 1681 # names in the database, but that eventually needs to be converted
1678 1682 # into a valid system path
1679 1683 p += self.repo_name.split(self.NAME_SEP)
1680 1684 return os.path.join(*map(safe_unicode, p))
1681 1685
1682 1686 @property
1683 1687 def cache_keys(self):
1684 1688 """
1685 1689 Returns associated cache keys for that repo
1686 1690 """
1687 1691 return CacheKey.query()\
1688 1692 .filter(CacheKey.cache_args == self.repo_name)\
1689 1693 .order_by(CacheKey.cache_key)\
1690 1694 .all()
1691 1695
1692 1696 def get_new_name(self, repo_name):
1693 1697 """
1694 1698 returns new full repository name based on assigned group and new new
1695 1699
1696 1700 :param group_name:
1697 1701 """
1698 1702 path_prefix = self.group.full_path_splitted if self.group else []
1699 1703 return self.NAME_SEP.join(path_prefix + [repo_name])
1700 1704
1701 1705 @property
1702 1706 def _config(self):
1703 1707 """
1704 1708 Returns db based config object.
1705 1709 """
1706 1710 from rhodecode.lib.utils import make_db_config
1707 1711 return make_db_config(clear_session=False, repo=self)
1708 1712
1709 1713 def permissions(self, with_admins=True, with_owner=True):
1710 1714 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1711 1715 q = q.options(joinedload(UserRepoToPerm.repository),
1712 1716 joinedload(UserRepoToPerm.user),
1713 1717 joinedload(UserRepoToPerm.permission),)
1714 1718
1715 1719 # get owners and admins and permissions. We do a trick of re-writing
1716 1720 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1717 1721 # has a global reference and changing one object propagates to all
1718 1722 # others. This means if admin is also an owner admin_row that change
1719 1723 # would propagate to both objects
1720 1724 perm_rows = []
1721 1725 for _usr in q.all():
1722 1726 usr = AttributeDict(_usr.user.get_dict())
1723 1727 usr.permission = _usr.permission.permission_name
1724 1728 perm_rows.append(usr)
1725 1729
1726 1730 # filter the perm rows by 'default' first and then sort them by
1727 1731 # admin,write,read,none permissions sorted again alphabetically in
1728 1732 # each group
1729 1733 perm_rows = sorted(perm_rows, key=display_sort)
1730 1734
1731 1735 _admin_perm = 'repository.admin'
1732 1736 owner_row = []
1733 1737 if with_owner:
1734 1738 usr = AttributeDict(self.user.get_dict())
1735 1739 usr.owner_row = True
1736 1740 usr.permission = _admin_perm
1737 1741 owner_row.append(usr)
1738 1742
1739 1743 super_admin_rows = []
1740 1744 if with_admins:
1741 1745 for usr in User.get_all_super_admins():
1742 1746 # if this admin is also owner, don't double the record
1743 1747 if usr.user_id == owner_row[0].user_id:
1744 1748 owner_row[0].admin_row = True
1745 1749 else:
1746 1750 usr = AttributeDict(usr.get_dict())
1747 1751 usr.admin_row = True
1748 1752 usr.permission = _admin_perm
1749 1753 super_admin_rows.append(usr)
1750 1754
1751 1755 return super_admin_rows + owner_row + perm_rows
1752 1756
1753 1757 def permission_user_groups(self):
1754 1758 q = UserGroupRepoToPerm.query().filter(
1755 1759 UserGroupRepoToPerm.repository == self)
1756 1760 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1757 1761 joinedload(UserGroupRepoToPerm.users_group),
1758 1762 joinedload(UserGroupRepoToPerm.permission),)
1759 1763
1760 1764 perm_rows = []
1761 1765 for _user_group in q.all():
1762 1766 usr = AttributeDict(_user_group.users_group.get_dict())
1763 1767 usr.permission = _user_group.permission.permission_name
1764 1768 perm_rows.append(usr)
1765 1769
1766 1770 return perm_rows
1767 1771
1768 1772 def get_api_data(self, include_secrets=False):
1769 1773 """
1770 1774 Common function for generating repo api data
1771 1775
1772 1776 :param include_secrets: See :meth:`User.get_api_data`.
1773 1777
1774 1778 """
1775 1779 # TODO: mikhail: Here there is an anti-pattern, we probably need to
1776 1780 # move this methods on models level.
1777 1781 from rhodecode.model.settings import SettingsModel
1778 1782 from rhodecode.model.repo import RepoModel
1779 1783
1780 1784 repo = self
1781 1785 _user_id, _time, _reason = self.locked
1782 1786
1783 1787 data = {
1784 1788 'repo_id': repo.repo_id,
1785 1789 'repo_name': repo.repo_name,
1786 1790 'repo_type': repo.repo_type,
1787 1791 'clone_uri': repo.clone_uri or '',
1788 1792 'url': RepoModel().get_url(self),
1789 1793 'private': repo.private,
1790 1794 'created_on': repo.created_on,
1791 1795 'description': repo.description,
1792 1796 'landing_rev': repo.landing_rev,
1793 1797 'owner': repo.user.username,
1794 1798 'fork_of': repo.fork.repo_name if repo.fork else None,
1795 1799 'fork_of_id': repo.fork.repo_id if repo.fork else None,
1796 1800 'enable_statistics': repo.enable_statistics,
1797 1801 'enable_locking': repo.enable_locking,
1798 1802 'enable_downloads': repo.enable_downloads,
1799 1803 'last_changeset': repo.changeset_cache,
1800 1804 'locked_by': User.get(_user_id).get_api_data(
1801 1805 include_secrets=include_secrets) if _user_id else None,
1802 1806 'locked_date': time_to_datetime(_time) if _time else None,
1803 1807 'lock_reason': _reason if _reason else None,
1804 1808 }
1805 1809
1806 1810 # TODO: mikhail: should be per-repo settings here
1807 1811 rc_config = SettingsModel().get_all_settings()
1808 1812 repository_fields = str2bool(
1809 1813 rc_config.get('rhodecode_repository_fields'))
1810 1814 if repository_fields:
1811 1815 for f in self.extra_fields:
1812 1816 data[f.field_key_prefixed] = f.field_value
1813 1817
1814 1818 return data
1815 1819
1816 1820 @classmethod
1817 1821 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
1818 1822 if not lock_time:
1819 1823 lock_time = time.time()
1820 1824 if not lock_reason:
1821 1825 lock_reason = cls.LOCK_AUTOMATIC
1822 1826 repo.locked = [user_id, lock_time, lock_reason]
1823 1827 Session().add(repo)
1824 1828 Session().commit()
1825 1829
1826 1830 @classmethod
1827 1831 def unlock(cls, repo):
1828 1832 repo.locked = None
1829 1833 Session().add(repo)
1830 1834 Session().commit()
1831 1835
1832 1836 @classmethod
1833 1837 def getlock(cls, repo):
1834 1838 return repo.locked
1835 1839
1836 1840 def is_user_lock(self, user_id):
1837 1841 if self.lock[0]:
1838 1842 lock_user_id = safe_int(self.lock[0])
1839 1843 user_id = safe_int(user_id)
1840 1844 # both are ints, and they are equal
1841 1845 return all([lock_user_id, user_id]) and lock_user_id == user_id
1842 1846
1843 1847 return False
1844 1848
1845 1849 def get_locking_state(self, action, user_id, only_when_enabled=True):
1846 1850 """
1847 1851 Checks locking on this repository, if locking is enabled and lock is
1848 1852 present returns a tuple of make_lock, locked, locked_by.
1849 1853 make_lock can have 3 states None (do nothing) True, make lock
1850 1854 False release lock, This value is later propagated to hooks, which
1851 1855 do the locking. Think about this as signals passed to hooks what to do.
1852 1856
1853 1857 """
1854 1858 # TODO: johbo: This is part of the business logic and should be moved
1855 1859 # into the RepositoryModel.
1856 1860
1857 1861 if action not in ('push', 'pull'):
1858 1862 raise ValueError("Invalid action value: %s" % repr(action))
1859 1863
1860 1864 # defines if locked error should be thrown to user
1861 1865 currently_locked = False
1862 1866 # defines if new lock should be made, tri-state
1863 1867 make_lock = None
1864 1868 repo = self
1865 1869 user = User.get(user_id)
1866 1870
1867 1871 lock_info = repo.locked
1868 1872
1869 1873 if repo and (repo.enable_locking or not only_when_enabled):
1870 1874 if action == 'push':
1871 1875 # check if it's already locked !, if it is compare users
1872 1876 locked_by_user_id = lock_info[0]
1873 1877 if user.user_id == locked_by_user_id:
1874 1878 log.debug(
1875 1879 'Got `push` action from user %s, now unlocking', user)
1876 1880 # unlock if we have push from user who locked
1877 1881 make_lock = False
1878 1882 else:
1879 1883 # we're not the same user who locked, ban with
1880 1884 # code defined in settings (default is 423 HTTP Locked) !
1881 1885 log.debug('Repo %s is currently locked by %s', repo, user)
1882 1886 currently_locked = True
1883 1887 elif action == 'pull':
1884 1888 # [0] user [1] date
1885 1889 if lock_info[0] and lock_info[1]:
1886 1890 log.debug('Repo %s is currently locked by %s', repo, user)
1887 1891 currently_locked = True
1888 1892 else:
1889 1893 log.debug('Setting lock on repo %s by %s', repo, user)
1890 1894 make_lock = True
1891 1895
1892 1896 else:
1893 1897 log.debug('Repository %s do not have locking enabled', repo)
1894 1898
1895 1899 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
1896 1900 make_lock, currently_locked, lock_info)
1897 1901
1898 1902 from rhodecode.lib.auth import HasRepoPermissionAny
1899 1903 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
1900 1904 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
1901 1905 # if we don't have at least write permission we cannot make a lock
1902 1906 log.debug('lock state reset back to FALSE due to lack '
1903 1907 'of at least read permission')
1904 1908 make_lock = False
1905 1909
1906 1910 return make_lock, currently_locked, lock_info
1907 1911
1908 1912 @property
1909 1913 def last_db_change(self):
1910 1914 return self.updated_on
1911 1915
1912 1916 @property
1913 1917 def clone_uri_hidden(self):
1914 1918 clone_uri = self.clone_uri
1915 1919 if clone_uri:
1916 1920 import urlobject
1917 1921 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
1918 1922 if url_obj.password:
1919 1923 clone_uri = url_obj.with_password('*****')
1920 1924 return clone_uri
1921 1925
1922 1926 def clone_url(self, **override):
1923 1927
1924 1928 uri_tmpl = None
1925 1929 if 'with_id' in override:
1926 1930 uri_tmpl = self.DEFAULT_CLONE_URI_ID
1927 1931 del override['with_id']
1928 1932
1929 1933 if 'uri_tmpl' in override:
1930 1934 uri_tmpl = override['uri_tmpl']
1931 1935 del override['uri_tmpl']
1932 1936
1933 1937 # we didn't override our tmpl from **overrides
1934 1938 if not uri_tmpl:
1935 1939 uri_tmpl = self.DEFAULT_CLONE_URI
1936 1940 try:
1937 1941 from pylons import tmpl_context as c
1938 1942 uri_tmpl = c.clone_uri_tmpl
1939 1943 except Exception:
1940 1944 # in any case if we call this outside of request context,
1941 1945 # ie, not having tmpl_context set up
1942 1946 pass
1943 1947
1944 1948 request = get_current_request()
1945 1949 return get_clone_url(request=request,
1946 1950 uri_tmpl=uri_tmpl,
1947 1951 repo_name=self.repo_name,
1948 1952 repo_id=self.repo_id, **override)
1949 1953
1950 1954 def set_state(self, state):
1951 1955 self.repo_state = state
1952 1956 Session().add(self)
1953 1957 #==========================================================================
1954 1958 # SCM PROPERTIES
1955 1959 #==========================================================================
1956 1960
1957 1961 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
1958 1962 return get_commit_safe(
1959 1963 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
1960 1964
1961 1965 def get_changeset(self, rev=None, pre_load=None):
1962 1966 warnings.warn("Use get_commit", DeprecationWarning)
1963 1967 commit_id = None
1964 1968 commit_idx = None
1965 1969 if isinstance(rev, basestring):
1966 1970 commit_id = rev
1967 1971 else:
1968 1972 commit_idx = rev
1969 1973 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
1970 1974 pre_load=pre_load)
1971 1975
1972 1976 def get_landing_commit(self):
1973 1977 """
1974 1978 Returns landing commit, or if that doesn't exist returns the tip
1975 1979 """
1976 1980 _rev_type, _rev = self.landing_rev
1977 1981 commit = self.get_commit(_rev)
1978 1982 if isinstance(commit, EmptyCommit):
1979 1983 return self.get_commit()
1980 1984 return commit
1981 1985
1982 1986 def update_commit_cache(self, cs_cache=None, config=None):
1983 1987 """
1984 1988 Update cache of last changeset for repository, keys should be::
1985 1989
1986 1990 short_id
1987 1991 raw_id
1988 1992 revision
1989 1993 parents
1990 1994 message
1991 1995 date
1992 1996 author
1993 1997
1994 1998 :param cs_cache:
1995 1999 """
1996 2000 from rhodecode.lib.vcs.backends.base import BaseChangeset
1997 2001 if cs_cache is None:
1998 2002 # use no-cache version here
1999 2003 scm_repo = self.scm_instance(cache=False, config=config)
2000 2004 if scm_repo:
2001 2005 cs_cache = scm_repo.get_commit(
2002 2006 pre_load=["author", "date", "message", "parents"])
2003 2007 else:
2004 2008 cs_cache = EmptyCommit()
2005 2009
2006 2010 if isinstance(cs_cache, BaseChangeset):
2007 2011 cs_cache = cs_cache.__json__()
2008 2012
2009 2013 def is_outdated(new_cs_cache):
2010 2014 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2011 2015 new_cs_cache['revision'] != self.changeset_cache['revision']):
2012 2016 return True
2013 2017 return False
2014 2018
2015 2019 # check if we have maybe already latest cached revision
2016 2020 if is_outdated(cs_cache) or not self.changeset_cache:
2017 2021 _default = datetime.datetime.fromtimestamp(0)
2018 2022 last_change = cs_cache.get('date') or _default
2019 2023 log.debug('updated repo %s with new cs cache %s',
2020 2024 self.repo_name, cs_cache)
2021 2025 self.updated_on = last_change
2022 2026 self.changeset_cache = cs_cache
2023 2027 Session().add(self)
2024 2028 Session().commit()
2025 2029 else:
2026 2030 log.debug('Skipping update_commit_cache for repo:`%s` '
2027 2031 'commit already with latest changes', self.repo_name)
2028 2032
2029 2033 @property
2030 2034 def tip(self):
2031 2035 return self.get_commit('tip')
2032 2036
2033 2037 @property
2034 2038 def author(self):
2035 2039 return self.tip.author
2036 2040
2037 2041 @property
2038 2042 def last_change(self):
2039 2043 return self.scm_instance().last_change
2040 2044
2041 2045 def get_comments(self, revisions=None):
2042 2046 """
2043 2047 Returns comments for this repository grouped by revisions
2044 2048
2045 2049 :param revisions: filter query by revisions only
2046 2050 """
2047 2051 cmts = ChangesetComment.query()\
2048 2052 .filter(ChangesetComment.repo == self)
2049 2053 if revisions:
2050 2054 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2051 2055 grouped = collections.defaultdict(list)
2052 2056 for cmt in cmts.all():
2053 2057 grouped[cmt.revision].append(cmt)
2054 2058 return grouped
2055 2059
2056 2060 def statuses(self, revisions=None):
2057 2061 """
2058 2062 Returns statuses for this repository
2059 2063
2060 2064 :param revisions: list of revisions to get statuses for
2061 2065 """
2062 2066 statuses = ChangesetStatus.query()\
2063 2067 .filter(ChangesetStatus.repo == self)\
2064 2068 .filter(ChangesetStatus.version == 0)
2065 2069
2066 2070 if revisions:
2067 2071 # Try doing the filtering in chunks to avoid hitting limits
2068 2072 size = 500
2069 2073 status_results = []
2070 2074 for chunk in xrange(0, len(revisions), size):
2071 2075 status_results += statuses.filter(
2072 2076 ChangesetStatus.revision.in_(
2073 2077 revisions[chunk: chunk+size])
2074 2078 ).all()
2075 2079 else:
2076 2080 status_results = statuses.all()
2077 2081
2078 2082 grouped = {}
2079 2083
2080 2084 # maybe we have open new pullrequest without a status?
2081 2085 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2082 2086 status_lbl = ChangesetStatus.get_status_lbl(stat)
2083 2087 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2084 2088 for rev in pr.revisions:
2085 2089 pr_id = pr.pull_request_id
2086 2090 pr_repo = pr.target_repo.repo_name
2087 2091 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2088 2092
2089 2093 for stat in status_results:
2090 2094 pr_id = pr_repo = None
2091 2095 if stat.pull_request:
2092 2096 pr_id = stat.pull_request.pull_request_id
2093 2097 pr_repo = stat.pull_request.target_repo.repo_name
2094 2098 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2095 2099 pr_id, pr_repo]
2096 2100 return grouped
2097 2101
2098 2102 # ==========================================================================
2099 2103 # SCM CACHE INSTANCE
2100 2104 # ==========================================================================
2101 2105
2102 2106 def scm_instance(self, **kwargs):
2103 2107 import rhodecode
2104 2108
2105 2109 # Passing a config will not hit the cache currently only used
2106 2110 # for repo2dbmapper
2107 2111 config = kwargs.pop('config', None)
2108 2112 cache = kwargs.pop('cache', None)
2109 2113 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2110 2114 # if cache is NOT defined use default global, else we have a full
2111 2115 # control over cache behaviour
2112 2116 if cache is None and full_cache and not config:
2113 2117 return self._get_instance_cached()
2114 2118 return self._get_instance(cache=bool(cache), config=config)
2115 2119
2116 2120 def _get_instance_cached(self):
2117 2121 @cache_region('long_term')
2118 2122 def _get_repo(cache_key):
2119 2123 return self._get_instance()
2120 2124
2121 2125 invalidator_context = CacheKey.repo_context_cache(
2122 2126 _get_repo, self.repo_name, None, thread_scoped=True)
2123 2127
2124 2128 with invalidator_context as context:
2125 2129 context.invalidate()
2126 2130 repo = context.compute()
2127 2131
2128 2132 return repo
2129 2133
2130 2134 def _get_instance(self, cache=True, config=None):
2131 2135 config = config or self._config
2132 2136 custom_wire = {
2133 2137 'cache': cache # controls the vcs.remote cache
2134 2138 }
2135 2139 repo = get_vcs_instance(
2136 2140 repo_path=safe_str(self.repo_full_path),
2137 2141 config=config,
2138 2142 with_wire=custom_wire,
2139 2143 create=False,
2140 2144 _vcs_alias=self.repo_type)
2141 2145
2142 2146 return repo
2143 2147
2144 2148 def __json__(self):
2145 2149 return {'landing_rev': self.landing_rev}
2146 2150
2147 2151 def get_dict(self):
2148 2152
2149 2153 # Since we transformed `repo_name` to a hybrid property, we need to
2150 2154 # keep compatibility with the code which uses `repo_name` field.
2151 2155
2152 2156 result = super(Repository, self).get_dict()
2153 2157 result['repo_name'] = result.pop('_repo_name', None)
2154 2158 return result
2155 2159
2156 2160
2157 2161 class RepoGroup(Base, BaseModel):
2158 2162 __tablename__ = 'groups'
2159 2163 __table_args__ = (
2160 2164 UniqueConstraint('group_name', 'group_parent_id'),
2161 2165 CheckConstraint('group_id != group_parent_id'),
2162 2166 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2163 2167 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2164 2168 )
2165 2169 __mapper_args__ = {'order_by': 'group_name'}
2166 2170
2167 2171 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2168 2172
2169 2173 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2170 2174 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2171 2175 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2172 2176 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2173 2177 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2174 2178 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2175 2179 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2176 2180 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2177 2181
2178 2182 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2179 2183 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2180 2184 parent_group = relationship('RepoGroup', remote_side=group_id)
2181 2185 user = relationship('User')
2182 2186 integrations = relationship('Integration',
2183 2187 cascade="all, delete, delete-orphan")
2184 2188
2185 2189 def __init__(self, group_name='', parent_group=None):
2186 2190 self.group_name = group_name
2187 2191 self.parent_group = parent_group
2188 2192
2189 2193 def __unicode__(self):
2190 2194 return u"<%s('id:%s:%s')>" % (self.__class__.__name__, self.group_id,
2191 2195 self.group_name)
2192 2196
2193 2197 @classmethod
2194 2198 def _generate_choice(cls, repo_group):
2195 2199 from webhelpers.html import literal as _literal
2196 2200 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2197 2201 return repo_group.group_id, _name(repo_group.full_path_splitted)
2198 2202
2199 2203 @classmethod
2200 2204 def groups_choices(cls, groups=None, show_empty_group=True):
2201 2205 if not groups:
2202 2206 groups = cls.query().all()
2203 2207
2204 2208 repo_groups = []
2205 2209 if show_empty_group:
2206 2210 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2207 2211
2208 2212 repo_groups.extend([cls._generate_choice(x) for x in groups])
2209 2213
2210 2214 repo_groups = sorted(
2211 2215 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2212 2216 return repo_groups
2213 2217
2214 2218 @classmethod
2215 2219 def url_sep(cls):
2216 2220 return URL_SEP
2217 2221
2218 2222 @classmethod
2219 2223 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2220 2224 if case_insensitive:
2221 2225 gr = cls.query().filter(func.lower(cls.group_name)
2222 2226 == func.lower(group_name))
2223 2227 else:
2224 2228 gr = cls.query().filter(cls.group_name == group_name)
2225 2229 if cache:
2226 2230 name_key = _hash_key(group_name)
2227 2231 gr = gr.options(
2228 2232 FromCache("sql_cache_short", "get_group_%s" % name_key))
2229 2233 return gr.scalar()
2230 2234
2231 2235 @classmethod
2232 2236 def get_user_personal_repo_group(cls, user_id):
2233 2237 user = User.get(user_id)
2234 2238 if user.username == User.DEFAULT_USER:
2235 2239 return None
2236 2240
2237 2241 return cls.query()\
2238 2242 .filter(cls.personal == true()) \
2239 2243 .filter(cls.user == user).scalar()
2240 2244
2241 2245 @classmethod
2242 2246 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2243 2247 case_insensitive=True):
2244 2248 q = RepoGroup.query()
2245 2249
2246 2250 if not isinstance(user_id, Optional):
2247 2251 q = q.filter(RepoGroup.user_id == user_id)
2248 2252
2249 2253 if not isinstance(group_id, Optional):
2250 2254 q = q.filter(RepoGroup.group_parent_id == group_id)
2251 2255
2252 2256 if case_insensitive:
2253 2257 q = q.order_by(func.lower(RepoGroup.group_name))
2254 2258 else:
2255 2259 q = q.order_by(RepoGroup.group_name)
2256 2260 return q.all()
2257 2261
2258 2262 @property
2259 2263 def parents(self):
2260 2264 parents_recursion_limit = 10
2261 2265 groups = []
2262 2266 if self.parent_group is None:
2263 2267 return groups
2264 2268 cur_gr = self.parent_group
2265 2269 groups.insert(0, cur_gr)
2266 2270 cnt = 0
2267 2271 while 1:
2268 2272 cnt += 1
2269 2273 gr = getattr(cur_gr, 'parent_group', None)
2270 2274 cur_gr = cur_gr.parent_group
2271 2275 if gr is None:
2272 2276 break
2273 2277 if cnt == parents_recursion_limit:
2274 2278 # this will prevent accidental infinit loops
2275 2279 log.error(('more than %s parents found for group %s, stopping '
2276 2280 'recursive parent fetching' % (parents_recursion_limit, self)))
2277 2281 break
2278 2282
2279 2283 groups.insert(0, gr)
2280 2284 return groups
2281 2285
2282 2286 @property
2283 2287 def children(self):
2284 2288 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2285 2289
2286 2290 @property
2287 2291 def name(self):
2288 2292 return self.group_name.split(RepoGroup.url_sep())[-1]
2289 2293
2290 2294 @property
2291 2295 def full_path(self):
2292 2296 return self.group_name
2293 2297
2294 2298 @property
2295 2299 def full_path_splitted(self):
2296 2300 return self.group_name.split(RepoGroup.url_sep())
2297 2301
2298 2302 @property
2299 2303 def repositories(self):
2300 2304 return Repository.query()\
2301 2305 .filter(Repository.group == self)\
2302 2306 .order_by(Repository.repo_name)
2303 2307
2304 2308 @property
2305 2309 def repositories_recursive_count(self):
2306 2310 cnt = self.repositories.count()
2307 2311
2308 2312 def children_count(group):
2309 2313 cnt = 0
2310 2314 for child in group.children:
2311 2315 cnt += child.repositories.count()
2312 2316 cnt += children_count(child)
2313 2317 return cnt
2314 2318
2315 2319 return cnt + children_count(self)
2316 2320
2317 2321 def _recursive_objects(self, include_repos=True):
2318 2322 all_ = []
2319 2323
2320 2324 def _get_members(root_gr):
2321 2325 if include_repos:
2322 2326 for r in root_gr.repositories:
2323 2327 all_.append(r)
2324 2328 childs = root_gr.children.all()
2325 2329 if childs:
2326 2330 for gr in childs:
2327 2331 all_.append(gr)
2328 2332 _get_members(gr)
2329 2333
2330 2334 _get_members(self)
2331 2335 return [self] + all_
2332 2336
2333 2337 def recursive_groups_and_repos(self):
2334 2338 """
2335 2339 Recursive return all groups, with repositories in those groups
2336 2340 """
2337 2341 return self._recursive_objects()
2338 2342
2339 2343 def recursive_groups(self):
2340 2344 """
2341 2345 Returns all children groups for this group including children of children
2342 2346 """
2343 2347 return self._recursive_objects(include_repos=False)
2344 2348
2345 2349 def get_new_name(self, group_name):
2346 2350 """
2347 2351 returns new full group name based on parent and new name
2348 2352
2349 2353 :param group_name:
2350 2354 """
2351 2355 path_prefix = (self.parent_group.full_path_splitted if
2352 2356 self.parent_group else [])
2353 2357 return RepoGroup.url_sep().join(path_prefix + [group_name])
2354 2358
2355 2359 def permissions(self, with_admins=True, with_owner=True):
2356 2360 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2357 2361 q = q.options(joinedload(UserRepoGroupToPerm.group),
2358 2362 joinedload(UserRepoGroupToPerm.user),
2359 2363 joinedload(UserRepoGroupToPerm.permission),)
2360 2364
2361 2365 # get owners and admins and permissions. We do a trick of re-writing
2362 2366 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2363 2367 # has a global reference and changing one object propagates to all
2364 2368 # others. This means if admin is also an owner admin_row that change
2365 2369 # would propagate to both objects
2366 2370 perm_rows = []
2367 2371 for _usr in q.all():
2368 2372 usr = AttributeDict(_usr.user.get_dict())
2369 2373 usr.permission = _usr.permission.permission_name
2370 2374 perm_rows.append(usr)
2371 2375
2372 2376 # filter the perm rows by 'default' first and then sort them by
2373 2377 # admin,write,read,none permissions sorted again alphabetically in
2374 2378 # each group
2375 2379 perm_rows = sorted(perm_rows, key=display_sort)
2376 2380
2377 2381 _admin_perm = 'group.admin'
2378 2382 owner_row = []
2379 2383 if with_owner:
2380 2384 usr = AttributeDict(self.user.get_dict())
2381 2385 usr.owner_row = True
2382 2386 usr.permission = _admin_perm
2383 2387 owner_row.append(usr)
2384 2388
2385 2389 super_admin_rows = []
2386 2390 if with_admins:
2387 2391 for usr in User.get_all_super_admins():
2388 2392 # if this admin is also owner, don't double the record
2389 2393 if usr.user_id == owner_row[0].user_id:
2390 2394 owner_row[0].admin_row = True
2391 2395 else:
2392 2396 usr = AttributeDict(usr.get_dict())
2393 2397 usr.admin_row = True
2394 2398 usr.permission = _admin_perm
2395 2399 super_admin_rows.append(usr)
2396 2400
2397 2401 return super_admin_rows + owner_row + perm_rows
2398 2402
2399 2403 def permission_user_groups(self):
2400 2404 q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self)
2401 2405 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2402 2406 joinedload(UserGroupRepoGroupToPerm.users_group),
2403 2407 joinedload(UserGroupRepoGroupToPerm.permission),)
2404 2408
2405 2409 perm_rows = []
2406 2410 for _user_group in q.all():
2407 2411 usr = AttributeDict(_user_group.users_group.get_dict())
2408 2412 usr.permission = _user_group.permission.permission_name
2409 2413 perm_rows.append(usr)
2410 2414
2411 2415 return perm_rows
2412 2416
2413 2417 def get_api_data(self):
2414 2418 """
2415 2419 Common function for generating api data
2416 2420
2417 2421 """
2418 2422 group = self
2419 2423 data = {
2420 2424 'group_id': group.group_id,
2421 2425 'group_name': group.group_name,
2422 2426 'group_description': group.group_description,
2423 2427 'parent_group': group.parent_group.group_name if group.parent_group else None,
2424 2428 'repositories': [x.repo_name for x in group.repositories],
2425 2429 'owner': group.user.username,
2426 2430 }
2427 2431 return data
2428 2432
2429 2433
2430 2434 class Permission(Base, BaseModel):
2431 2435 __tablename__ = 'permissions'
2432 2436 __table_args__ = (
2433 2437 Index('p_perm_name_idx', 'permission_name'),
2434 2438 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2435 2439 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2436 2440 )
2437 2441 PERMS = [
2438 2442 ('hg.admin', _('RhodeCode Super Administrator')),
2439 2443
2440 2444 ('repository.none', _('Repository no access')),
2441 2445 ('repository.read', _('Repository read access')),
2442 2446 ('repository.write', _('Repository write access')),
2443 2447 ('repository.admin', _('Repository admin access')),
2444 2448
2445 2449 ('group.none', _('Repository group no access')),
2446 2450 ('group.read', _('Repository group read access')),
2447 2451 ('group.write', _('Repository group write access')),
2448 2452 ('group.admin', _('Repository group admin access')),
2449 2453
2450 2454 ('usergroup.none', _('User group no access')),
2451 2455 ('usergroup.read', _('User group read access')),
2452 2456 ('usergroup.write', _('User group write access')),
2453 2457 ('usergroup.admin', _('User group admin access')),
2454 2458
2455 2459 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2456 2460 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2457 2461
2458 2462 ('hg.usergroup.create.false', _('User Group creation disabled')),
2459 2463 ('hg.usergroup.create.true', _('User Group creation enabled')),
2460 2464
2461 2465 ('hg.create.none', _('Repository creation disabled')),
2462 2466 ('hg.create.repository', _('Repository creation enabled')),
2463 2467 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2464 2468 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2465 2469
2466 2470 ('hg.fork.none', _('Repository forking disabled')),
2467 2471 ('hg.fork.repository', _('Repository forking enabled')),
2468 2472
2469 2473 ('hg.register.none', _('Registration disabled')),
2470 2474 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2471 2475 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2472 2476
2473 2477 ('hg.password_reset.enabled', _('Password reset enabled')),
2474 2478 ('hg.password_reset.hidden', _('Password reset hidden')),
2475 2479 ('hg.password_reset.disabled', _('Password reset disabled')),
2476 2480
2477 2481 ('hg.extern_activate.manual', _('Manual activation of external account')),
2478 2482 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2479 2483
2480 2484 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2481 2485 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2482 2486 ]
2483 2487
2484 2488 # definition of system default permissions for DEFAULT user
2485 2489 DEFAULT_USER_PERMISSIONS = [
2486 2490 'repository.read',
2487 2491 'group.read',
2488 2492 'usergroup.read',
2489 2493 'hg.create.repository',
2490 2494 'hg.repogroup.create.false',
2491 2495 'hg.usergroup.create.false',
2492 2496 'hg.create.write_on_repogroup.true',
2493 2497 'hg.fork.repository',
2494 2498 'hg.register.manual_activate',
2495 2499 'hg.password_reset.enabled',
2496 2500 'hg.extern_activate.auto',
2497 2501 'hg.inherit_default_perms.true',
2498 2502 ]
2499 2503
2500 2504 # defines which permissions are more important higher the more important
2501 2505 # Weight defines which permissions are more important.
2502 2506 # The higher number the more important.
2503 2507 PERM_WEIGHTS = {
2504 2508 'repository.none': 0,
2505 2509 'repository.read': 1,
2506 2510 'repository.write': 3,
2507 2511 'repository.admin': 4,
2508 2512
2509 2513 'group.none': 0,
2510 2514 'group.read': 1,
2511 2515 'group.write': 3,
2512 2516 'group.admin': 4,
2513 2517
2514 2518 'usergroup.none': 0,
2515 2519 'usergroup.read': 1,
2516 2520 'usergroup.write': 3,
2517 2521 'usergroup.admin': 4,
2518 2522
2519 2523 'hg.repogroup.create.false': 0,
2520 2524 'hg.repogroup.create.true': 1,
2521 2525
2522 2526 'hg.usergroup.create.false': 0,
2523 2527 'hg.usergroup.create.true': 1,
2524 2528
2525 2529 'hg.fork.none': 0,
2526 2530 'hg.fork.repository': 1,
2527 2531 'hg.create.none': 0,
2528 2532 'hg.create.repository': 1
2529 2533 }
2530 2534
2531 2535 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2532 2536 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2533 2537 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2534 2538
2535 2539 def __unicode__(self):
2536 2540 return u"<%s('%s:%s')>" % (
2537 2541 self.__class__.__name__, self.permission_id, self.permission_name
2538 2542 )
2539 2543
2540 2544 @classmethod
2541 2545 def get_by_key(cls, key):
2542 2546 return cls.query().filter(cls.permission_name == key).scalar()
2543 2547
2544 2548 @classmethod
2545 2549 def get_default_repo_perms(cls, user_id, repo_id=None):
2546 2550 q = Session().query(UserRepoToPerm, Repository, Permission)\
2547 2551 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2548 2552 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2549 2553 .filter(UserRepoToPerm.user_id == user_id)
2550 2554 if repo_id:
2551 2555 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2552 2556 return q.all()
2553 2557
2554 2558 @classmethod
2555 2559 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2556 2560 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2557 2561 .join(
2558 2562 Permission,
2559 2563 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2560 2564 .join(
2561 2565 Repository,
2562 2566 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2563 2567 .join(
2564 2568 UserGroup,
2565 2569 UserGroupRepoToPerm.users_group_id ==
2566 2570 UserGroup.users_group_id)\
2567 2571 .join(
2568 2572 UserGroupMember,
2569 2573 UserGroupRepoToPerm.users_group_id ==
2570 2574 UserGroupMember.users_group_id)\
2571 2575 .filter(
2572 2576 UserGroupMember.user_id == user_id,
2573 2577 UserGroup.users_group_active == true())
2574 2578 if repo_id:
2575 2579 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2576 2580 return q.all()
2577 2581
2578 2582 @classmethod
2579 2583 def get_default_group_perms(cls, user_id, repo_group_id=None):
2580 2584 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2581 2585 .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\
2582 2586 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
2583 2587 .filter(UserRepoGroupToPerm.user_id == user_id)
2584 2588 if repo_group_id:
2585 2589 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2586 2590 return q.all()
2587 2591
2588 2592 @classmethod
2589 2593 def get_default_group_perms_from_user_group(
2590 2594 cls, user_id, repo_group_id=None):
2591 2595 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2592 2596 .join(
2593 2597 Permission,
2594 2598 UserGroupRepoGroupToPerm.permission_id ==
2595 2599 Permission.permission_id)\
2596 2600 .join(
2597 2601 RepoGroup,
2598 2602 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2599 2603 .join(
2600 2604 UserGroup,
2601 2605 UserGroupRepoGroupToPerm.users_group_id ==
2602 2606 UserGroup.users_group_id)\
2603 2607 .join(
2604 2608 UserGroupMember,
2605 2609 UserGroupRepoGroupToPerm.users_group_id ==
2606 2610 UserGroupMember.users_group_id)\
2607 2611 .filter(
2608 2612 UserGroupMember.user_id == user_id,
2609 2613 UserGroup.users_group_active == true())
2610 2614 if repo_group_id:
2611 2615 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2612 2616 return q.all()
2613 2617
2614 2618 @classmethod
2615 2619 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2616 2620 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2617 2621 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2618 2622 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2619 2623 .filter(UserUserGroupToPerm.user_id == user_id)
2620 2624 if user_group_id:
2621 2625 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2622 2626 return q.all()
2623 2627
2624 2628 @classmethod
2625 2629 def get_default_user_group_perms_from_user_group(
2626 2630 cls, user_id, user_group_id=None):
2627 2631 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2628 2632 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2629 2633 .join(
2630 2634 Permission,
2631 2635 UserGroupUserGroupToPerm.permission_id ==
2632 2636 Permission.permission_id)\
2633 2637 .join(
2634 2638 TargetUserGroup,
2635 2639 UserGroupUserGroupToPerm.target_user_group_id ==
2636 2640 TargetUserGroup.users_group_id)\
2637 2641 .join(
2638 2642 UserGroup,
2639 2643 UserGroupUserGroupToPerm.user_group_id ==
2640 2644 UserGroup.users_group_id)\
2641 2645 .join(
2642 2646 UserGroupMember,
2643 2647 UserGroupUserGroupToPerm.user_group_id ==
2644 2648 UserGroupMember.users_group_id)\
2645 2649 .filter(
2646 2650 UserGroupMember.user_id == user_id,
2647 2651 UserGroup.users_group_active == true())
2648 2652 if user_group_id:
2649 2653 q = q.filter(
2650 2654 UserGroupUserGroupToPerm.user_group_id == user_group_id)
2651 2655
2652 2656 return q.all()
2653 2657
2654 2658
2655 2659 class UserRepoToPerm(Base, BaseModel):
2656 2660 __tablename__ = 'repo_to_perm'
2657 2661 __table_args__ = (
2658 2662 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
2659 2663 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2660 2664 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2661 2665 )
2662 2666 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2663 2667 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2664 2668 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2665 2669 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2666 2670
2667 2671 user = relationship('User')
2668 2672 repository = relationship('Repository')
2669 2673 permission = relationship('Permission')
2670 2674
2671 2675 @classmethod
2672 2676 def create(cls, user, repository, permission):
2673 2677 n = cls()
2674 2678 n.user = user
2675 2679 n.repository = repository
2676 2680 n.permission = permission
2677 2681 Session().add(n)
2678 2682 return n
2679 2683
2680 2684 def __unicode__(self):
2681 2685 return u'<%s => %s >' % (self.user, self.repository)
2682 2686
2683 2687
2684 2688 class UserUserGroupToPerm(Base, BaseModel):
2685 2689 __tablename__ = 'user_user_group_to_perm'
2686 2690 __table_args__ = (
2687 2691 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
2688 2692 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2689 2693 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2690 2694 )
2691 2695 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2692 2696 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2693 2697 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2694 2698 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2695 2699
2696 2700 user = relationship('User')
2697 2701 user_group = relationship('UserGroup')
2698 2702 permission = relationship('Permission')
2699 2703
2700 2704 @classmethod
2701 2705 def create(cls, user, user_group, permission):
2702 2706 n = cls()
2703 2707 n.user = user
2704 2708 n.user_group = user_group
2705 2709 n.permission = permission
2706 2710 Session().add(n)
2707 2711 return n
2708 2712
2709 2713 def __unicode__(self):
2710 2714 return u'<%s => %s >' % (self.user, self.user_group)
2711 2715
2712 2716
2713 2717 class UserToPerm(Base, BaseModel):
2714 2718 __tablename__ = 'user_to_perm'
2715 2719 __table_args__ = (
2716 2720 UniqueConstraint('user_id', 'permission_id'),
2717 2721 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2718 2722 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2719 2723 )
2720 2724 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2721 2725 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2722 2726 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2723 2727
2724 2728 user = relationship('User')
2725 2729 permission = relationship('Permission', lazy='joined')
2726 2730
2727 2731 def __unicode__(self):
2728 2732 return u'<%s => %s >' % (self.user, self.permission)
2729 2733
2730 2734
2731 2735 class UserGroupRepoToPerm(Base, BaseModel):
2732 2736 __tablename__ = 'users_group_repo_to_perm'
2733 2737 __table_args__ = (
2734 2738 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
2735 2739 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2736 2740 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2737 2741 )
2738 2742 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2739 2743 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2740 2744 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2741 2745 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2742 2746
2743 2747 users_group = relationship('UserGroup')
2744 2748 permission = relationship('Permission')
2745 2749 repository = relationship('Repository')
2746 2750
2747 2751 @classmethod
2748 2752 def create(cls, users_group, repository, permission):
2749 2753 n = cls()
2750 2754 n.users_group = users_group
2751 2755 n.repository = repository
2752 2756 n.permission = permission
2753 2757 Session().add(n)
2754 2758 return n
2755 2759
2756 2760 def __unicode__(self):
2757 2761 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
2758 2762
2759 2763
2760 2764 class UserGroupUserGroupToPerm(Base, BaseModel):
2761 2765 __tablename__ = 'user_group_user_group_to_perm'
2762 2766 __table_args__ = (
2763 2767 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
2764 2768 CheckConstraint('target_user_group_id != user_group_id'),
2765 2769 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2766 2770 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2767 2771 )
2768 2772 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2769 2773 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2770 2774 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2771 2775 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2772 2776
2773 2777 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
2774 2778 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
2775 2779 permission = relationship('Permission')
2776 2780
2777 2781 @classmethod
2778 2782 def create(cls, target_user_group, user_group, permission):
2779 2783 n = cls()
2780 2784 n.target_user_group = target_user_group
2781 2785 n.user_group = user_group
2782 2786 n.permission = permission
2783 2787 Session().add(n)
2784 2788 return n
2785 2789
2786 2790 def __unicode__(self):
2787 2791 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
2788 2792
2789 2793
2790 2794 class UserGroupToPerm(Base, BaseModel):
2791 2795 __tablename__ = 'users_group_to_perm'
2792 2796 __table_args__ = (
2793 2797 UniqueConstraint('users_group_id', 'permission_id',),
2794 2798 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2795 2799 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2796 2800 )
2797 2801 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2798 2802 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2799 2803 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2800 2804
2801 2805 users_group = relationship('UserGroup')
2802 2806 permission = relationship('Permission')
2803 2807
2804 2808
2805 2809 class UserRepoGroupToPerm(Base, BaseModel):
2806 2810 __tablename__ = 'user_repo_group_to_perm'
2807 2811 __table_args__ = (
2808 2812 UniqueConstraint('user_id', 'group_id', 'permission_id'),
2809 2813 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2810 2814 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2811 2815 )
2812 2816
2813 2817 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2814 2818 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2815 2819 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
2816 2820 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2817 2821
2818 2822 user = relationship('User')
2819 2823 group = relationship('RepoGroup')
2820 2824 permission = relationship('Permission')
2821 2825
2822 2826 @classmethod
2823 2827 def create(cls, user, repository_group, permission):
2824 2828 n = cls()
2825 2829 n.user = user
2826 2830 n.group = repository_group
2827 2831 n.permission = permission
2828 2832 Session().add(n)
2829 2833 return n
2830 2834
2831 2835
2832 2836 class UserGroupRepoGroupToPerm(Base, BaseModel):
2833 2837 __tablename__ = 'users_group_repo_group_to_perm'
2834 2838 __table_args__ = (
2835 2839 UniqueConstraint('users_group_id', 'group_id'),
2836 2840 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2837 2841 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2838 2842 )
2839 2843
2840 2844 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2841 2845 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2842 2846 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
2843 2847 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2844 2848
2845 2849 users_group = relationship('UserGroup')
2846 2850 permission = relationship('Permission')
2847 2851 group = relationship('RepoGroup')
2848 2852
2849 2853 @classmethod
2850 2854 def create(cls, user_group, repository_group, permission):
2851 2855 n = cls()
2852 2856 n.users_group = user_group
2853 2857 n.group = repository_group
2854 2858 n.permission = permission
2855 2859 Session().add(n)
2856 2860 return n
2857 2861
2858 2862 def __unicode__(self):
2859 2863 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
2860 2864
2861 2865
2862 2866 class Statistics(Base, BaseModel):
2863 2867 __tablename__ = 'statistics'
2864 2868 __table_args__ = (
2865 2869 UniqueConstraint('repository_id'),
2866 2870 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2867 2871 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2868 2872 )
2869 2873 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2870 2874 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
2871 2875 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
2872 2876 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
2873 2877 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
2874 2878 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
2875 2879
2876 2880 repository = relationship('Repository', single_parent=True)
2877 2881
2878 2882
2879 2883 class UserFollowing(Base, BaseModel):
2880 2884 __tablename__ = 'user_followings'
2881 2885 __table_args__ = (
2882 2886 UniqueConstraint('user_id', 'follows_repository_id'),
2883 2887 UniqueConstraint('user_id', 'follows_user_id'),
2884 2888 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2885 2889 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2886 2890 )
2887 2891
2888 2892 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2889 2893 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2890 2894 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
2891 2895 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
2892 2896 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2893 2897
2894 2898 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
2895 2899
2896 2900 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
2897 2901 follows_repository = relationship('Repository', order_by='Repository.repo_name')
2898 2902
2899 2903 @classmethod
2900 2904 def get_repo_followers(cls, repo_id):
2901 2905 return cls.query().filter(cls.follows_repo_id == repo_id)
2902 2906
2903 2907
2904 2908 class CacheKey(Base, BaseModel):
2905 2909 __tablename__ = 'cache_invalidation'
2906 2910 __table_args__ = (
2907 2911 UniqueConstraint('cache_key'),
2908 2912 Index('key_idx', 'cache_key'),
2909 2913 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2910 2914 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2911 2915 )
2912 2916 CACHE_TYPE_ATOM = 'ATOM'
2913 2917 CACHE_TYPE_RSS = 'RSS'
2914 2918 CACHE_TYPE_README = 'README'
2915 2919
2916 2920 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2917 2921 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
2918 2922 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
2919 2923 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
2920 2924
2921 2925 def __init__(self, cache_key, cache_args=''):
2922 2926 self.cache_key = cache_key
2923 2927 self.cache_args = cache_args
2924 2928 self.cache_active = False
2925 2929
2926 2930 def __unicode__(self):
2927 2931 return u"<%s('%s:%s[%s]')>" % (
2928 2932 self.__class__.__name__,
2929 2933 self.cache_id, self.cache_key, self.cache_active)
2930 2934
2931 2935 def _cache_key_partition(self):
2932 2936 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
2933 2937 return prefix, repo_name, suffix
2934 2938
2935 2939 def get_prefix(self):
2936 2940 """
2937 2941 Try to extract prefix from existing cache key. The key could consist
2938 2942 of prefix, repo_name, suffix
2939 2943 """
2940 2944 # this returns prefix, repo_name, suffix
2941 2945 return self._cache_key_partition()[0]
2942 2946
2943 2947 def get_suffix(self):
2944 2948 """
2945 2949 get suffix that might have been used in _get_cache_key to
2946 2950 generate self.cache_key. Only used for informational purposes
2947 2951 in repo_edit.mako.
2948 2952 """
2949 2953 # prefix, repo_name, suffix
2950 2954 return self._cache_key_partition()[2]
2951 2955
2952 2956 @classmethod
2953 2957 def delete_all_cache(cls):
2954 2958 """
2955 2959 Delete all cache keys from database.
2956 2960 Should only be run when all instances are down and all entries
2957 2961 thus stale.
2958 2962 """
2959 2963 cls.query().delete()
2960 2964 Session().commit()
2961 2965
2962 2966 @classmethod
2963 2967 def get_cache_key(cls, repo_name, cache_type):
2964 2968 """
2965 2969
2966 2970 Generate a cache key for this process of RhodeCode instance.
2967 2971 Prefix most likely will be process id or maybe explicitly set
2968 2972 instance_id from .ini file.
2969 2973 """
2970 2974 import rhodecode
2971 2975 prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '')
2972 2976
2973 2977 repo_as_unicode = safe_unicode(repo_name)
2974 2978 key = u'{}_{}'.format(repo_as_unicode, cache_type) \
2975 2979 if cache_type else repo_as_unicode
2976 2980
2977 2981 return u'{}{}'.format(prefix, key)
2978 2982
2979 2983 @classmethod
2980 2984 def set_invalidate(cls, repo_name, delete=False):
2981 2985 """
2982 2986 Mark all caches of a repo as invalid in the database.
2983 2987 """
2984 2988
2985 2989 try:
2986 2990 qry = Session().query(cls).filter(cls.cache_args == repo_name)
2987 2991 if delete:
2988 2992 log.debug('cache objects deleted for repo %s',
2989 2993 safe_str(repo_name))
2990 2994 qry.delete()
2991 2995 else:
2992 2996 log.debug('cache objects marked as invalid for repo %s',
2993 2997 safe_str(repo_name))
2994 2998 qry.update({"cache_active": False})
2995 2999
2996 3000 Session().commit()
2997 3001 except Exception:
2998 3002 log.exception(
2999 3003 'Cache key invalidation failed for repository %s',
3000 3004 safe_str(repo_name))
3001 3005 Session().rollback()
3002 3006
3003 3007 @classmethod
3004 3008 def get_active_cache(cls, cache_key):
3005 3009 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3006 3010 if inv_obj:
3007 3011 return inv_obj
3008 3012 return None
3009 3013
3010 3014 @classmethod
3011 3015 def repo_context_cache(cls, compute_func, repo_name, cache_type,
3012 3016 thread_scoped=False):
3013 3017 """
3014 3018 @cache_region('long_term')
3015 3019 def _heavy_calculation(cache_key):
3016 3020 return 'result'
3017 3021
3018 3022 cache_context = CacheKey.repo_context_cache(
3019 3023 _heavy_calculation, repo_name, cache_type)
3020 3024
3021 3025 with cache_context as context:
3022 3026 context.invalidate()
3023 3027 computed = context.compute()
3024 3028
3025 3029 assert computed == 'result'
3026 3030 """
3027 3031 from rhodecode.lib import caches
3028 3032 return caches.InvalidationContext(
3029 3033 compute_func, repo_name, cache_type, thread_scoped=thread_scoped)
3030 3034
3031 3035
3032 3036 class ChangesetComment(Base, BaseModel):
3033 3037 __tablename__ = 'changeset_comments'
3034 3038 __table_args__ = (
3035 3039 Index('cc_revision_idx', 'revision'),
3036 3040 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3037 3041 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3038 3042 )
3039 3043
3040 3044 COMMENT_OUTDATED = u'comment_outdated'
3041 3045 COMMENT_TYPE_NOTE = u'note'
3042 3046 COMMENT_TYPE_TODO = u'todo'
3043 3047 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3044 3048
3045 3049 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3046 3050 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3047 3051 revision = Column('revision', String(40), nullable=True)
3048 3052 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3049 3053 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3050 3054 line_no = Column('line_no', Unicode(10), nullable=True)
3051 3055 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3052 3056 f_path = Column('f_path', Unicode(1000), nullable=True)
3053 3057 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3054 3058 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3055 3059 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3056 3060 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3057 3061 renderer = Column('renderer', Unicode(64), nullable=True)
3058 3062 display_state = Column('display_state', Unicode(128), nullable=True)
3059 3063
3060 3064 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3061 3065 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3062 3066 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, backref='resolved_by')
3063 3067 author = relationship('User', lazy='joined')
3064 3068 repo = relationship('Repository')
3065 3069 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined')
3066 3070 pull_request = relationship('PullRequest', lazy='joined')
3067 3071 pull_request_version = relationship('PullRequestVersion')
3068 3072
3069 3073 @classmethod
3070 3074 def get_users(cls, revision=None, pull_request_id=None):
3071 3075 """
3072 3076 Returns user associated with this ChangesetComment. ie those
3073 3077 who actually commented
3074 3078
3075 3079 :param cls:
3076 3080 :param revision:
3077 3081 """
3078 3082 q = Session().query(User)\
3079 3083 .join(ChangesetComment.author)
3080 3084 if revision:
3081 3085 q = q.filter(cls.revision == revision)
3082 3086 elif pull_request_id:
3083 3087 q = q.filter(cls.pull_request_id == pull_request_id)
3084 3088 return q.all()
3085 3089
3086 3090 @classmethod
3087 3091 def get_index_from_version(cls, pr_version, versions):
3088 3092 num_versions = [x.pull_request_version_id for x in versions]
3089 3093 try:
3090 3094 return num_versions.index(pr_version) +1
3091 3095 except (IndexError, ValueError):
3092 3096 return
3093 3097
3094 3098 @property
3095 3099 def outdated(self):
3096 3100 return self.display_state == self.COMMENT_OUTDATED
3097 3101
3098 3102 def outdated_at_version(self, version):
3099 3103 """
3100 3104 Checks if comment is outdated for given pull request version
3101 3105 """
3102 3106 return self.outdated and self.pull_request_version_id != version
3103 3107
3104 3108 def older_than_version(self, version):
3105 3109 """
3106 3110 Checks if comment is made from previous version than given
3107 3111 """
3108 3112 if version is None:
3109 3113 return self.pull_request_version_id is not None
3110 3114
3111 3115 return self.pull_request_version_id < version
3112 3116
3113 3117 @property
3114 3118 def resolved(self):
3115 3119 return self.resolved_by[0] if self.resolved_by else None
3116 3120
3117 3121 @property
3118 3122 def is_todo(self):
3119 3123 return self.comment_type == self.COMMENT_TYPE_TODO
3120 3124
3121 3125 @property
3122 3126 def is_inline(self):
3123 3127 return self.line_no and self.f_path
3124 3128
3125 3129 def get_index_version(self, versions):
3126 3130 return self.get_index_from_version(
3127 3131 self.pull_request_version_id, versions)
3128 3132
3129 3133 def __repr__(self):
3130 3134 if self.comment_id:
3131 3135 return '<DB:Comment #%s>' % self.comment_id
3132 3136 else:
3133 3137 return '<DB:Comment at %#x>' % id(self)
3134 3138
3135 3139 def get_api_data(self):
3136 3140 comment = self
3137 3141 data = {
3138 3142 'comment_id': comment.comment_id,
3139 3143 'comment_type': comment.comment_type,
3140 3144 'comment_text': comment.text,
3141 3145 'comment_status': comment.status_change,
3142 3146 'comment_f_path': comment.f_path,
3143 3147 'comment_lineno': comment.line_no,
3144 3148 'comment_author': comment.author,
3145 3149 'comment_created_on': comment.created_on
3146 3150 }
3147 3151 return data
3148 3152
3149 3153 def __json__(self):
3150 3154 data = dict()
3151 3155 data.update(self.get_api_data())
3152 3156 return data
3153 3157
3154 3158
3155 3159 class ChangesetStatus(Base, BaseModel):
3156 3160 __tablename__ = 'changeset_statuses'
3157 3161 __table_args__ = (
3158 3162 Index('cs_revision_idx', 'revision'),
3159 3163 Index('cs_version_idx', 'version'),
3160 3164 UniqueConstraint('repo_id', 'revision', 'version'),
3161 3165 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3162 3166 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3163 3167 )
3164 3168 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3165 3169 STATUS_APPROVED = 'approved'
3166 3170 STATUS_REJECTED = 'rejected'
3167 3171 STATUS_UNDER_REVIEW = 'under_review'
3168 3172
3169 3173 STATUSES = [
3170 3174 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3171 3175 (STATUS_APPROVED, _("Approved")),
3172 3176 (STATUS_REJECTED, _("Rejected")),
3173 3177 (STATUS_UNDER_REVIEW, _("Under Review")),
3174 3178 ]
3175 3179
3176 3180 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3177 3181 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3178 3182 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3179 3183 revision = Column('revision', String(40), nullable=False)
3180 3184 status = Column('status', String(128), nullable=False, default=DEFAULT)
3181 3185 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3182 3186 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3183 3187 version = Column('version', Integer(), nullable=False, default=0)
3184 3188 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3185 3189
3186 3190 author = relationship('User', lazy='joined')
3187 3191 repo = relationship('Repository')
3188 3192 comment = relationship('ChangesetComment', lazy='joined')
3189 3193 pull_request = relationship('PullRequest', lazy='joined')
3190 3194
3191 3195 def __unicode__(self):
3192 3196 return u"<%s('%s[v%s]:%s')>" % (
3193 3197 self.__class__.__name__,
3194 3198 self.status, self.version, self.author
3195 3199 )
3196 3200
3197 3201 @classmethod
3198 3202 def get_status_lbl(cls, value):
3199 3203 return dict(cls.STATUSES).get(value)
3200 3204
3201 3205 @property
3202 3206 def status_lbl(self):
3203 3207 return ChangesetStatus.get_status_lbl(self.status)
3204 3208
3205 3209 def get_api_data(self):
3206 3210 status = self
3207 3211 data = {
3208 3212 'status_id': status.changeset_status_id,
3209 3213 'status': status.status,
3210 3214 }
3211 3215 return data
3212 3216
3213 3217 def __json__(self):
3214 3218 data = dict()
3215 3219 data.update(self.get_api_data())
3216 3220 return data
3217 3221
3218 3222
3219 3223 class _PullRequestBase(BaseModel):
3220 3224 """
3221 3225 Common attributes of pull request and version entries.
3222 3226 """
3223 3227
3224 3228 # .status values
3225 3229 STATUS_NEW = u'new'
3226 3230 STATUS_OPEN = u'open'
3227 3231 STATUS_CLOSED = u'closed'
3228 3232
3229 3233 title = Column('title', Unicode(255), nullable=True)
3230 3234 description = Column(
3231 3235 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3232 3236 nullable=True)
3233 3237 # new/open/closed status of pull request (not approve/reject/etc)
3234 3238 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3235 3239 created_on = Column(
3236 3240 'created_on', DateTime(timezone=False), nullable=False,
3237 3241 default=datetime.datetime.now)
3238 3242 updated_on = Column(
3239 3243 'updated_on', DateTime(timezone=False), nullable=False,
3240 3244 default=datetime.datetime.now)
3241 3245
3242 3246 @declared_attr
3243 3247 def user_id(cls):
3244 3248 return Column(
3245 3249 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3246 3250 unique=None)
3247 3251
3248 3252 # 500 revisions max
3249 3253 _revisions = Column(
3250 3254 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3251 3255
3252 3256 @declared_attr
3253 3257 def source_repo_id(cls):
3254 3258 # TODO: dan: rename column to source_repo_id
3255 3259 return Column(
3256 3260 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3257 3261 nullable=False)
3258 3262
3259 3263 source_ref = Column('org_ref', Unicode(255), nullable=False)
3260 3264
3261 3265 @declared_attr
3262 3266 def target_repo_id(cls):
3263 3267 # TODO: dan: rename column to target_repo_id
3264 3268 return Column(
3265 3269 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3266 3270 nullable=False)
3267 3271
3268 3272 target_ref = Column('other_ref', Unicode(255), nullable=False)
3269 3273 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3270 3274
3271 3275 # TODO: dan: rename column to last_merge_source_rev
3272 3276 _last_merge_source_rev = Column(
3273 3277 'last_merge_org_rev', String(40), nullable=True)
3274 3278 # TODO: dan: rename column to last_merge_target_rev
3275 3279 _last_merge_target_rev = Column(
3276 3280 'last_merge_other_rev', String(40), nullable=True)
3277 3281 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3278 3282 merge_rev = Column('merge_rev', String(40), nullable=True)
3279 3283
3280 3284 reviewer_data = Column(
3281 3285 'reviewer_data_json', MutationObj.as_mutable(
3282 3286 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3283 3287
3284 3288 @property
3285 3289 def reviewer_data_json(self):
3286 3290 return json.dumps(self.reviewer_data)
3287 3291
3288 3292 @hybrid_property
3289 3293 def revisions(self):
3290 3294 return self._revisions.split(':') if self._revisions else []
3291 3295
3292 3296 @revisions.setter
3293 3297 def revisions(self, val):
3294 3298 self._revisions = ':'.join(val)
3295 3299
3296 3300 @declared_attr
3297 3301 def author(cls):
3298 3302 return relationship('User', lazy='joined')
3299 3303
3300 3304 @declared_attr
3301 3305 def source_repo(cls):
3302 3306 return relationship(
3303 3307 'Repository',
3304 3308 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3305 3309
3306 3310 @property
3307 3311 def source_ref_parts(self):
3308 3312 return self.unicode_to_reference(self.source_ref)
3309 3313
3310 3314 @declared_attr
3311 3315 def target_repo(cls):
3312 3316 return relationship(
3313 3317 'Repository',
3314 3318 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3315 3319
3316 3320 @property
3317 3321 def target_ref_parts(self):
3318 3322 return self.unicode_to_reference(self.target_ref)
3319 3323
3320 3324 @property
3321 3325 def shadow_merge_ref(self):
3322 3326 return self.unicode_to_reference(self._shadow_merge_ref)
3323 3327
3324 3328 @shadow_merge_ref.setter
3325 3329 def shadow_merge_ref(self, ref):
3326 3330 self._shadow_merge_ref = self.reference_to_unicode(ref)
3327 3331
3328 3332 def unicode_to_reference(self, raw):
3329 3333 """
3330 3334 Convert a unicode (or string) to a reference object.
3331 3335 If unicode evaluates to False it returns None.
3332 3336 """
3333 3337 if raw:
3334 3338 refs = raw.split(':')
3335 3339 return Reference(*refs)
3336 3340 else:
3337 3341 return None
3338 3342
3339 3343 def reference_to_unicode(self, ref):
3340 3344 """
3341 3345 Convert a reference object to unicode.
3342 3346 If reference is None it returns None.
3343 3347 """
3344 3348 if ref:
3345 3349 return u':'.join(ref)
3346 3350 else:
3347 3351 return None
3348 3352
3349 3353 def get_api_data(self, with_merge_state=True):
3350 3354 from rhodecode.model.pull_request import PullRequestModel
3351 3355
3352 3356 pull_request = self
3353 3357 if with_merge_state:
3354 3358 merge_status = PullRequestModel().merge_status(pull_request)
3355 3359 merge_state = {
3356 3360 'status': merge_status[0],
3357 3361 'message': safe_unicode(merge_status[1]),
3358 3362 }
3359 3363 else:
3360 3364 merge_state = {'status': 'not_available',
3361 3365 'message': 'not_available'}
3362 3366
3363 3367 merge_data = {
3364 3368 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
3365 3369 'reference': (
3366 3370 pull_request.shadow_merge_ref._asdict()
3367 3371 if pull_request.shadow_merge_ref else None),
3368 3372 }
3369 3373
3370 3374 data = {
3371 3375 'pull_request_id': pull_request.pull_request_id,
3372 3376 'url': PullRequestModel().get_url(pull_request),
3373 3377 'title': pull_request.title,
3374 3378 'description': pull_request.description,
3375 3379 'status': pull_request.status,
3376 3380 'created_on': pull_request.created_on,
3377 3381 'updated_on': pull_request.updated_on,
3378 3382 'commit_ids': pull_request.revisions,
3379 3383 'review_status': pull_request.calculated_review_status(),
3380 3384 'mergeable': merge_state,
3381 3385 'source': {
3382 3386 'clone_url': pull_request.source_repo.clone_url(),
3383 3387 'repository': pull_request.source_repo.repo_name,
3384 3388 'reference': {
3385 3389 'name': pull_request.source_ref_parts.name,
3386 3390 'type': pull_request.source_ref_parts.type,
3387 3391 'commit_id': pull_request.source_ref_parts.commit_id,
3388 3392 },
3389 3393 },
3390 3394 'target': {
3391 3395 'clone_url': pull_request.target_repo.clone_url(),
3392 3396 'repository': pull_request.target_repo.repo_name,
3393 3397 'reference': {
3394 3398 'name': pull_request.target_ref_parts.name,
3395 3399 'type': pull_request.target_ref_parts.type,
3396 3400 'commit_id': pull_request.target_ref_parts.commit_id,
3397 3401 },
3398 3402 },
3399 3403 'merge': merge_data,
3400 3404 'author': pull_request.author.get_api_data(include_secrets=False,
3401 3405 details='basic'),
3402 3406 'reviewers': [
3403 3407 {
3404 3408 'user': reviewer.get_api_data(include_secrets=False,
3405 3409 details='basic'),
3406 3410 'reasons': reasons,
3407 3411 'review_status': st[0][1].status if st else 'not_reviewed',
3408 3412 }
3409 3413 for reviewer, reasons, mandatory, st in
3410 3414 pull_request.reviewers_statuses()
3411 3415 ]
3412 3416 }
3413 3417
3414 3418 return data
3415 3419
3416 3420
3417 3421 class PullRequest(Base, _PullRequestBase):
3418 3422 __tablename__ = 'pull_requests'
3419 3423 __table_args__ = (
3420 3424 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3421 3425 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3422 3426 )
3423 3427
3424 3428 pull_request_id = Column(
3425 3429 'pull_request_id', Integer(), nullable=False, primary_key=True)
3426 3430
3427 3431 def __repr__(self):
3428 3432 if self.pull_request_id:
3429 3433 return '<DB:PullRequest #%s>' % self.pull_request_id
3430 3434 else:
3431 3435 return '<DB:PullRequest at %#x>' % id(self)
3432 3436
3433 3437 reviewers = relationship('PullRequestReviewers',
3434 3438 cascade="all, delete, delete-orphan")
3435 3439 statuses = relationship('ChangesetStatus',
3436 3440 cascade="all, delete, delete-orphan")
3437 3441 comments = relationship('ChangesetComment',
3438 3442 cascade="all, delete, delete-orphan")
3439 3443 versions = relationship('PullRequestVersion',
3440 3444 cascade="all, delete, delete-orphan",
3441 3445 lazy='dynamic')
3442 3446
3443 3447 @classmethod
3444 3448 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
3445 3449 internal_methods=None):
3446 3450
3447 3451 class PullRequestDisplay(object):
3448 3452 """
3449 3453 Special object wrapper for showing PullRequest data via Versions
3450 3454 It mimics PR object as close as possible. This is read only object
3451 3455 just for display
3452 3456 """
3453 3457
3454 3458 def __init__(self, attrs, internal=None):
3455 3459 self.attrs = attrs
3456 3460 # internal have priority over the given ones via attrs
3457 3461 self.internal = internal or ['versions']
3458 3462
3459 3463 def __getattr__(self, item):
3460 3464 if item in self.internal:
3461 3465 return getattr(self, item)
3462 3466 try:
3463 3467 return self.attrs[item]
3464 3468 except KeyError:
3465 3469 raise AttributeError(
3466 3470 '%s object has no attribute %s' % (self, item))
3467 3471
3468 3472 def __repr__(self):
3469 3473 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
3470 3474
3471 3475 def versions(self):
3472 3476 return pull_request_obj.versions.order_by(
3473 3477 PullRequestVersion.pull_request_version_id).all()
3474 3478
3475 3479 def is_closed(self):
3476 3480 return pull_request_obj.is_closed()
3477 3481
3478 3482 @property
3479 3483 def pull_request_version_id(self):
3480 3484 return getattr(pull_request_obj, 'pull_request_version_id', None)
3481 3485
3482 3486 attrs = StrictAttributeDict(pull_request_obj.get_api_data())
3483 3487
3484 3488 attrs.author = StrictAttributeDict(
3485 3489 pull_request_obj.author.get_api_data())
3486 3490 if pull_request_obj.target_repo:
3487 3491 attrs.target_repo = StrictAttributeDict(
3488 3492 pull_request_obj.target_repo.get_api_data())
3489 3493 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
3490 3494
3491 3495 if pull_request_obj.source_repo:
3492 3496 attrs.source_repo = StrictAttributeDict(
3493 3497 pull_request_obj.source_repo.get_api_data())
3494 3498 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
3495 3499
3496 3500 attrs.source_ref_parts = pull_request_obj.source_ref_parts
3497 3501 attrs.target_ref_parts = pull_request_obj.target_ref_parts
3498 3502 attrs.revisions = pull_request_obj.revisions
3499 3503
3500 3504 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
3501 3505 attrs.reviewer_data = org_pull_request_obj.reviewer_data
3502 3506 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
3503 3507
3504 3508 return PullRequestDisplay(attrs, internal=internal_methods)
3505 3509
3506 3510 def is_closed(self):
3507 3511 return self.status == self.STATUS_CLOSED
3508 3512
3509 3513 def __json__(self):
3510 3514 return {
3511 3515 'revisions': self.revisions,
3512 3516 }
3513 3517
3514 3518 def calculated_review_status(self):
3515 3519 from rhodecode.model.changeset_status import ChangesetStatusModel
3516 3520 return ChangesetStatusModel().calculated_review_status(self)
3517 3521
3518 3522 def reviewers_statuses(self):
3519 3523 from rhodecode.model.changeset_status import ChangesetStatusModel
3520 3524 return ChangesetStatusModel().reviewers_statuses(self)
3521 3525
3522 3526 @property
3523 3527 def workspace_id(self):
3524 3528 from rhodecode.model.pull_request import PullRequestModel
3525 3529 return PullRequestModel()._workspace_id(self)
3526 3530
3527 3531 def get_shadow_repo(self):
3528 3532 workspace_id = self.workspace_id
3529 3533 vcs_obj = self.target_repo.scm_instance()
3530 3534 shadow_repository_path = vcs_obj._get_shadow_repository_path(
3531 3535 workspace_id)
3532 3536 return vcs_obj._get_shadow_instance(shadow_repository_path)
3533 3537
3534 3538
3535 3539 class PullRequestVersion(Base, _PullRequestBase):
3536 3540 __tablename__ = 'pull_request_versions'
3537 3541 __table_args__ = (
3538 3542 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3539 3543 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3540 3544 )
3541 3545
3542 3546 pull_request_version_id = Column(
3543 3547 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3544 3548 pull_request_id = Column(
3545 3549 'pull_request_id', Integer(),
3546 3550 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3547 3551 pull_request = relationship('PullRequest')
3548 3552
3549 3553 def __repr__(self):
3550 3554 if self.pull_request_version_id:
3551 3555 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3552 3556 else:
3553 3557 return '<DB:PullRequestVersion at %#x>' % id(self)
3554 3558
3555 3559 @property
3556 3560 def reviewers(self):
3557 3561 return self.pull_request.reviewers
3558 3562
3559 3563 @property
3560 3564 def versions(self):
3561 3565 return self.pull_request.versions
3562 3566
3563 3567 def is_closed(self):
3564 3568 # calculate from original
3565 3569 return self.pull_request.status == self.STATUS_CLOSED
3566 3570
3567 3571 def calculated_review_status(self):
3568 3572 return self.pull_request.calculated_review_status()
3569 3573
3570 3574 def reviewers_statuses(self):
3571 3575 return self.pull_request.reviewers_statuses()
3572 3576
3573 3577
3574 3578 class PullRequestReviewers(Base, BaseModel):
3575 3579 __tablename__ = 'pull_request_reviewers'
3576 3580 __table_args__ = (
3577 3581 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3578 3582 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3579 3583 )
3580 3584
3581 3585 @hybrid_property
3582 3586 def reasons(self):
3583 3587 if not self._reasons:
3584 3588 return []
3585 3589 return self._reasons
3586 3590
3587 3591 @reasons.setter
3588 3592 def reasons(self, val):
3589 3593 val = val or []
3590 3594 if any(not isinstance(x, basestring) for x in val):
3591 3595 raise Exception('invalid reasons type, must be list of strings')
3592 3596 self._reasons = val
3593 3597
3594 3598 pull_requests_reviewers_id = Column(
3595 3599 'pull_requests_reviewers_id', Integer(), nullable=False,
3596 3600 primary_key=True)
3597 3601 pull_request_id = Column(
3598 3602 "pull_request_id", Integer(),
3599 3603 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3600 3604 user_id = Column(
3601 3605 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3602 3606 _reasons = Column(
3603 3607 'reason', MutationList.as_mutable(
3604 3608 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
3605 3609 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3606 3610 user = relationship('User')
3607 3611 pull_request = relationship('PullRequest')
3608 3612
3609 3613
3610 3614 class Notification(Base, BaseModel):
3611 3615 __tablename__ = 'notifications'
3612 3616 __table_args__ = (
3613 3617 Index('notification_type_idx', 'type'),
3614 3618 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3615 3619 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3616 3620 )
3617 3621
3618 3622 TYPE_CHANGESET_COMMENT = u'cs_comment'
3619 3623 TYPE_MESSAGE = u'message'
3620 3624 TYPE_MENTION = u'mention'
3621 3625 TYPE_REGISTRATION = u'registration'
3622 3626 TYPE_PULL_REQUEST = u'pull_request'
3623 3627 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
3624 3628
3625 3629 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
3626 3630 subject = Column('subject', Unicode(512), nullable=True)
3627 3631 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
3628 3632 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
3629 3633 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3630 3634 type_ = Column('type', Unicode(255))
3631 3635
3632 3636 created_by_user = relationship('User')
3633 3637 notifications_to_users = relationship('UserNotification', lazy='joined',
3634 3638 cascade="all, delete, delete-orphan")
3635 3639
3636 3640 @property
3637 3641 def recipients(self):
3638 3642 return [x.user for x in UserNotification.query()\
3639 3643 .filter(UserNotification.notification == self)\
3640 3644 .order_by(UserNotification.user_id.asc()).all()]
3641 3645
3642 3646 @classmethod
3643 3647 def create(cls, created_by, subject, body, recipients, type_=None):
3644 3648 if type_ is None:
3645 3649 type_ = Notification.TYPE_MESSAGE
3646 3650
3647 3651 notification = cls()
3648 3652 notification.created_by_user = created_by
3649 3653 notification.subject = subject
3650 3654 notification.body = body
3651 3655 notification.type_ = type_
3652 3656 notification.created_on = datetime.datetime.now()
3653 3657
3654 3658 for u in recipients:
3655 3659 assoc = UserNotification()
3656 3660 assoc.notification = notification
3657 3661
3658 3662 # if created_by is inside recipients mark his notification
3659 3663 # as read
3660 3664 if u.user_id == created_by.user_id:
3661 3665 assoc.read = True
3662 3666
3663 3667 u.notifications.append(assoc)
3664 3668 Session().add(notification)
3665 3669
3666 3670 return notification
3667 3671
3668 3672 @property
3669 3673 def description(self):
3670 3674 from rhodecode.model.notification import NotificationModel
3671 3675 return NotificationModel().make_description(self)
3672 3676
3673 3677
3674 3678 class UserNotification(Base, BaseModel):
3675 3679 __tablename__ = 'user_to_notification'
3676 3680 __table_args__ = (
3677 3681 UniqueConstraint('user_id', 'notification_id'),
3678 3682 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3679 3683 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3680 3684 )
3681 3685 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
3682 3686 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
3683 3687 read = Column('read', Boolean, default=False)
3684 3688 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
3685 3689
3686 3690 user = relationship('User', lazy="joined")
3687 3691 notification = relationship('Notification', lazy="joined",
3688 3692 order_by=lambda: Notification.created_on.desc(),)
3689 3693
3690 3694 def mark_as_read(self):
3691 3695 self.read = True
3692 3696 Session().add(self)
3693 3697
3694 3698
3695 3699 class Gist(Base, BaseModel):
3696 3700 __tablename__ = 'gists'
3697 3701 __table_args__ = (
3698 3702 Index('g_gist_access_id_idx', 'gist_access_id'),
3699 3703 Index('g_created_on_idx', 'created_on'),
3700 3704 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3701 3705 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3702 3706 )
3703 3707 GIST_PUBLIC = u'public'
3704 3708 GIST_PRIVATE = u'private'
3705 3709 DEFAULT_FILENAME = u'gistfile1.txt'
3706 3710
3707 3711 ACL_LEVEL_PUBLIC = u'acl_public'
3708 3712 ACL_LEVEL_PRIVATE = u'acl_private'
3709 3713
3710 3714 gist_id = Column('gist_id', Integer(), primary_key=True)
3711 3715 gist_access_id = Column('gist_access_id', Unicode(250))
3712 3716 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
3713 3717 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
3714 3718 gist_expires = Column('gist_expires', Float(53), nullable=False)
3715 3719 gist_type = Column('gist_type', Unicode(128), nullable=False)
3716 3720 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3717 3721 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3718 3722 acl_level = Column('acl_level', Unicode(128), nullable=True)
3719 3723
3720 3724 owner = relationship('User')
3721 3725
3722 3726 def __repr__(self):
3723 3727 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
3724 3728
3725 3729 @classmethod
3726 3730 def get_or_404(cls, id_, pyramid_exc=False):
3727 3731
3728 3732 if pyramid_exc:
3729 3733 from pyramid.httpexceptions import HTTPNotFound
3730 3734 else:
3731 3735 from webob.exc import HTTPNotFound
3732 3736
3733 3737 res = cls.query().filter(cls.gist_access_id == id_).scalar()
3734 3738 if not res:
3735 3739 raise HTTPNotFound
3736 3740 return res
3737 3741
3738 3742 @classmethod
3739 3743 def get_by_access_id(cls, gist_access_id):
3740 3744 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
3741 3745
3742 3746 def gist_url(self):
3743 3747 import rhodecode
3744 3748 from pylons import url
3745 3749
3746 3750 alias_url = rhodecode.CONFIG.get('gist_alias_url')
3747 3751 if alias_url:
3748 3752 return alias_url.replace('{gistid}', self.gist_access_id)
3749 3753
3750 3754 return url('gist', gist_id=self.gist_access_id, qualified=True)
3751 3755
3752 3756 @classmethod
3753 3757 def base_path(cls):
3754 3758 """
3755 3759 Returns base path when all gists are stored
3756 3760
3757 3761 :param cls:
3758 3762 """
3759 3763 from rhodecode.model.gist import GIST_STORE_LOC
3760 3764 q = Session().query(RhodeCodeUi)\
3761 3765 .filter(RhodeCodeUi.ui_key == URL_SEP)
3762 3766 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
3763 3767 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
3764 3768
3765 3769 def get_api_data(self):
3766 3770 """
3767 3771 Common function for generating gist related data for API
3768 3772 """
3769 3773 gist = self
3770 3774 data = {
3771 3775 'gist_id': gist.gist_id,
3772 3776 'type': gist.gist_type,
3773 3777 'access_id': gist.gist_access_id,
3774 3778 'description': gist.gist_description,
3775 3779 'url': gist.gist_url(),
3776 3780 'expires': gist.gist_expires,
3777 3781 'created_on': gist.created_on,
3778 3782 'modified_at': gist.modified_at,
3779 3783 'content': None,
3780 3784 'acl_level': gist.acl_level,
3781 3785 }
3782 3786 return data
3783 3787
3784 3788 def __json__(self):
3785 3789 data = dict(
3786 3790 )
3787 3791 data.update(self.get_api_data())
3788 3792 return data
3789 3793 # SCM functions
3790 3794
3791 3795 def scm_instance(self, **kwargs):
3792 3796 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
3793 3797 return get_vcs_instance(
3794 3798 repo_path=safe_str(full_repo_path), create=False)
3795 3799
3796 3800
3797 3801 class ExternalIdentity(Base, BaseModel):
3798 3802 __tablename__ = 'external_identities'
3799 3803 __table_args__ = (
3800 3804 Index('local_user_id_idx', 'local_user_id'),
3801 3805 Index('external_id_idx', 'external_id'),
3802 3806 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3803 3807 'mysql_charset': 'utf8'})
3804 3808
3805 3809 external_id = Column('external_id', Unicode(255), default=u'',
3806 3810 primary_key=True)
3807 3811 external_username = Column('external_username', Unicode(1024), default=u'')
3808 3812 local_user_id = Column('local_user_id', Integer(),
3809 3813 ForeignKey('users.user_id'), primary_key=True)
3810 3814 provider_name = Column('provider_name', Unicode(255), default=u'',
3811 3815 primary_key=True)
3812 3816 access_token = Column('access_token', String(1024), default=u'')
3813 3817 alt_token = Column('alt_token', String(1024), default=u'')
3814 3818 token_secret = Column('token_secret', String(1024), default=u'')
3815 3819
3816 3820 @classmethod
3817 3821 def by_external_id_and_provider(cls, external_id, provider_name,
3818 3822 local_user_id=None):
3819 3823 """
3820 3824 Returns ExternalIdentity instance based on search params
3821 3825
3822 3826 :param external_id:
3823 3827 :param provider_name:
3824 3828 :return: ExternalIdentity
3825 3829 """
3826 3830 query = cls.query()
3827 3831 query = query.filter(cls.external_id == external_id)
3828 3832 query = query.filter(cls.provider_name == provider_name)
3829 3833 if local_user_id:
3830 3834 query = query.filter(cls.local_user_id == local_user_id)
3831 3835 return query.first()
3832 3836
3833 3837 @classmethod
3834 3838 def user_by_external_id_and_provider(cls, external_id, provider_name):
3835 3839 """
3836 3840 Returns User instance based on search params
3837 3841
3838 3842 :param external_id:
3839 3843 :param provider_name:
3840 3844 :return: User
3841 3845 """
3842 3846 query = User.query()
3843 3847 query = query.filter(cls.external_id == external_id)
3844 3848 query = query.filter(cls.provider_name == provider_name)
3845 3849 query = query.filter(User.user_id == cls.local_user_id)
3846 3850 return query.first()
3847 3851
3848 3852 @classmethod
3849 3853 def by_local_user_id(cls, local_user_id):
3850 3854 """
3851 3855 Returns all tokens for user
3852 3856
3853 3857 :param local_user_id:
3854 3858 :return: ExternalIdentity
3855 3859 """
3856 3860 query = cls.query()
3857 3861 query = query.filter(cls.local_user_id == local_user_id)
3858 3862 return query
3859 3863
3860 3864
3861 3865 class Integration(Base, BaseModel):
3862 3866 __tablename__ = 'integrations'
3863 3867 __table_args__ = (
3864 3868 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3865 3869 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3866 3870 )
3867 3871
3868 3872 integration_id = Column('integration_id', Integer(), primary_key=True)
3869 3873 integration_type = Column('integration_type', String(255))
3870 3874 enabled = Column('enabled', Boolean(), nullable=False)
3871 3875 name = Column('name', String(255), nullable=False)
3872 3876 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
3873 3877 default=False)
3874 3878
3875 3879 settings = Column(
3876 3880 'settings_json', MutationObj.as_mutable(
3877 3881 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3878 3882 repo_id = Column(
3879 3883 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
3880 3884 nullable=True, unique=None, default=None)
3881 3885 repo = relationship('Repository', lazy='joined')
3882 3886
3883 3887 repo_group_id = Column(
3884 3888 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
3885 3889 nullable=True, unique=None, default=None)
3886 3890 repo_group = relationship('RepoGroup', lazy='joined')
3887 3891
3888 3892 @property
3889 3893 def scope(self):
3890 3894 if self.repo:
3891 3895 return repr(self.repo)
3892 3896 if self.repo_group:
3893 3897 if self.child_repos_only:
3894 3898 return repr(self.repo_group) + ' (child repos only)'
3895 3899 else:
3896 3900 return repr(self.repo_group) + ' (recursive)'
3897 3901 if self.child_repos_only:
3898 3902 return 'root_repos'
3899 3903 return 'global'
3900 3904
3901 3905 def __repr__(self):
3902 3906 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
3903 3907
3904 3908
3905 3909 class RepoReviewRuleUser(Base, BaseModel):
3906 3910 __tablename__ = 'repo_review_rules_users'
3907 3911 __table_args__ = (
3908 3912 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3909 3913 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
3910 3914 )
3911 3915 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
3912 3916 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
3913 3917 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
3914 3918 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3915 3919 user = relationship('User')
3916 3920
3917 3921 def rule_data(self):
3918 3922 return {
3919 3923 'mandatory': self.mandatory
3920 3924 }
3921 3925
3922 3926
3923 3927 class RepoReviewRuleUserGroup(Base, BaseModel):
3924 3928 __tablename__ = 'repo_review_rules_users_groups'
3925 3929 __table_args__ = (
3926 3930 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3927 3931 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
3928 3932 )
3929 3933 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
3930 3934 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
3931 3935 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
3932 3936 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3933 3937 users_group = relationship('UserGroup')
3934 3938
3935 3939 def rule_data(self):
3936 3940 return {
3937 3941 'mandatory': self.mandatory
3938 3942 }
3939 3943
3940 3944
3941 3945 class RepoReviewRule(Base, BaseModel):
3942 3946 __tablename__ = 'repo_review_rules'
3943 3947 __table_args__ = (
3944 3948 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3945 3949 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
3946 3950 )
3947 3951
3948 3952 repo_review_rule_id = Column(
3949 3953 'repo_review_rule_id', Integer(), primary_key=True)
3950 3954 repo_id = Column(
3951 3955 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
3952 3956 repo = relationship('Repository', backref='review_rules')
3953 3957
3954 3958 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
3955 3959 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
3956 3960
3957 3961 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
3958 3962 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
3959 3963 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
3960 3964 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
3961 3965
3962 3966 rule_users = relationship('RepoReviewRuleUser')
3963 3967 rule_user_groups = relationship('RepoReviewRuleUserGroup')
3964 3968
3965 3969 @hybrid_property
3966 3970 def branch_pattern(self):
3967 3971 return self._branch_pattern or '*'
3968 3972
3969 3973 def _validate_glob(self, value):
3970 3974 re.compile('^' + glob2re(value) + '$')
3971 3975
3972 3976 @branch_pattern.setter
3973 3977 def branch_pattern(self, value):
3974 3978 self._validate_glob(value)
3975 3979 self._branch_pattern = value or '*'
3976 3980
3977 3981 @hybrid_property
3978 3982 def file_pattern(self):
3979 3983 return self._file_pattern or '*'
3980 3984
3981 3985 @file_pattern.setter
3982 3986 def file_pattern(self, value):
3983 3987 self._validate_glob(value)
3984 3988 self._file_pattern = value or '*'
3985 3989
3986 3990 def matches(self, branch, files_changed):
3987 3991 """
3988 3992 Check if this review rule matches a branch/files in a pull request
3989 3993
3990 3994 :param branch: branch name for the commit
3991 3995 :param files_changed: list of file paths changed in the pull request
3992 3996 """
3993 3997
3994 3998 branch = branch or ''
3995 3999 files_changed = files_changed or []
3996 4000
3997 4001 branch_matches = True
3998 4002 if branch:
3999 4003 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
4000 4004 branch_matches = bool(branch_regex.search(branch))
4001 4005
4002 4006 files_matches = True
4003 4007 if self.file_pattern != '*':
4004 4008 files_matches = False
4005 4009 file_regex = re.compile(glob2re(self.file_pattern))
4006 4010 for filename in files_changed:
4007 4011 if file_regex.search(filename):
4008 4012 files_matches = True
4009 4013 break
4010 4014
4011 4015 return branch_matches and files_matches
4012 4016
4013 4017 @property
4014 4018 def review_users(self):
4015 4019 """ Returns the users which this rule applies to """
4016 4020
4017 4021 users = collections.OrderedDict()
4018 4022
4019 4023 for rule_user in self.rule_users:
4020 4024 if rule_user.user.active:
4021 4025 if rule_user.user not in users:
4022 4026 users[rule_user.user.username] = {
4023 4027 'user': rule_user.user,
4024 4028 'source': 'user',
4025 4029 'source_data': {},
4026 4030 'data': rule_user.rule_data()
4027 4031 }
4028 4032
4029 4033 for rule_user_group in self.rule_user_groups:
4030 4034 source_data = {
4031 4035 'name': rule_user_group.users_group.users_group_name,
4032 4036 'members': len(rule_user_group.users_group.members)
4033 4037 }
4034 4038 for member in rule_user_group.users_group.members:
4035 4039 if member.user.active:
4036 4040 users[member.user.username] = {
4037 4041 'user': member.user,
4038 4042 'source': 'user_group',
4039 4043 'source_data': source_data,
4040 4044 'data': rule_user_group.rule_data()
4041 4045 }
4042 4046
4043 4047 return users
4044 4048
4045 4049 def __repr__(self):
4046 4050 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4047 4051 self.repo_review_rule_id, self.repo)
4048 4052
4049 4053
4050 4054 class DbMigrateVersion(Base, BaseModel):
4051 4055 __tablename__ = 'db_migrate_version'
4052 4056 __table_args__ = (
4053 4057 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4054 4058 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4055 4059 )
4056 4060 repository_id = Column('repository_id', String(250), primary_key=True)
4057 4061 repository_path = Column('repository_path', Text)
4058 4062 version = Column('version', Integer)
4059 4063
4060 4064
4061 4065 class DbSession(Base, BaseModel):
4062 4066 __tablename__ = 'db_session'
4063 4067 __table_args__ = (
4064 4068 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4065 4069 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4066 4070 )
4067 4071
4068 4072 def __repr__(self):
4069 4073 return '<DB:DbSession({})>'.format(self.id)
4070 4074
4071 4075 id = Column('id', Integer())
4072 4076 namespace = Column('namespace', String(255), primary_key=True)
4073 4077 accessed = Column('accessed', DateTime, nullable=False)
4074 4078 created = Column('created', DateTime, nullable=False)
4075 4079 data = Column('data', PickleType, nullable=False)
@@ -1,1551 +1,1551 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26 from collections import namedtuple
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31
32 32 from pylons.i18n.translation import _
33 33 from pylons.i18n.translation import lazy_ugettext
34 34 from pyramid.threadlocal import get_current_request
35 35 from sqlalchemy import or_
36 36
37 37 from rhodecode import events
38 38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 39 from rhodecode.lib import audit_logger
40 40 from rhodecode.lib.compat import OrderedDict
41 41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
42 42 from rhodecode.lib.markup_renderer import (
43 43 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
44 44 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
45 45 from rhodecode.lib.vcs.backends.base import (
46 46 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
47 47 from rhodecode.lib.vcs.conf import settings as vcs_settings
48 48 from rhodecode.lib.vcs.exceptions import (
49 49 CommitDoesNotExistError, EmptyRepositoryError)
50 50 from rhodecode.model import BaseModel
51 51 from rhodecode.model.changeset_status import ChangesetStatusModel
52 52 from rhodecode.model.comment import CommentsModel
53 53 from rhodecode.model.db import (
54 54 PullRequest, PullRequestReviewers, ChangesetStatus,
55 55 PullRequestVersion, ChangesetComment, Repository)
56 56 from rhodecode.model.meta import Session
57 57 from rhodecode.model.notification import NotificationModel, \
58 58 EmailNotificationModel
59 59 from rhodecode.model.scm import ScmModel
60 60 from rhodecode.model.settings import VcsSettingsModel
61 61
62 62
63 63 log = logging.getLogger(__name__)
64 64
65 65
66 66 # Data structure to hold the response data when updating commits during a pull
67 67 # request update.
68 68 UpdateResponse = namedtuple('UpdateResponse', [
69 69 'executed', 'reason', 'new', 'old', 'changes',
70 70 'source_changed', 'target_changed'])
71 71
72 72
73 73 class PullRequestModel(BaseModel):
74 74
75 75 cls = PullRequest
76 76
77 77 DIFF_CONTEXT = 3
78 78
79 79 MERGE_STATUS_MESSAGES = {
80 80 MergeFailureReason.NONE: lazy_ugettext(
81 81 'This pull request can be automatically merged.'),
82 82 MergeFailureReason.UNKNOWN: lazy_ugettext(
83 83 'This pull request cannot be merged because of an unhandled'
84 84 ' exception.'),
85 85 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
86 86 'This pull request cannot be merged because of merge conflicts.'),
87 87 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
88 88 'This pull request could not be merged because push to target'
89 89 ' failed.'),
90 90 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
91 91 'This pull request cannot be merged because the target is not a'
92 92 ' head.'),
93 93 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
94 94 'This pull request cannot be merged because the source contains'
95 95 ' more branches than the target.'),
96 96 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
97 97 'This pull request cannot be merged because the target has'
98 98 ' multiple heads.'),
99 99 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
100 100 'This pull request cannot be merged because the target repository'
101 101 ' is locked.'),
102 102 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
103 103 'This pull request cannot be merged because the target or the '
104 104 'source reference is missing.'),
105 105 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
106 106 'This pull request cannot be merged because the target '
107 107 'reference is missing.'),
108 108 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
109 109 'This pull request cannot be merged because the source '
110 110 'reference is missing.'),
111 111 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
112 112 'This pull request cannot be merged because of conflicts related '
113 113 'to sub repositories.'),
114 114 }
115 115
116 116 UPDATE_STATUS_MESSAGES = {
117 117 UpdateFailureReason.NONE: lazy_ugettext(
118 118 'Pull request update successful.'),
119 119 UpdateFailureReason.UNKNOWN: lazy_ugettext(
120 120 'Pull request update failed because of an unknown error.'),
121 121 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
122 122 'No update needed because the source and target have not changed.'),
123 123 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
124 124 'Pull request cannot be updated because the reference type is '
125 125 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
126 126 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
127 127 'This pull request cannot be updated because the target '
128 128 'reference is missing.'),
129 129 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
130 130 'This pull request cannot be updated because the source '
131 131 'reference is missing.'),
132 132 }
133 133
134 134 def __get_pull_request(self, pull_request):
135 135 return self._get_instance((
136 136 PullRequest, PullRequestVersion), pull_request)
137 137
138 138 def _check_perms(self, perms, pull_request, user, api=False):
139 139 if not api:
140 140 return h.HasRepoPermissionAny(*perms)(
141 141 user=user, repo_name=pull_request.target_repo.repo_name)
142 142 else:
143 143 return h.HasRepoPermissionAnyApi(*perms)(
144 144 user=user, repo_name=pull_request.target_repo.repo_name)
145 145
146 146 def check_user_read(self, pull_request, user, api=False):
147 147 _perms = ('repository.admin', 'repository.write', 'repository.read',)
148 148 return self._check_perms(_perms, pull_request, user, api)
149 149
150 150 def check_user_merge(self, pull_request, user, api=False):
151 151 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
152 152 return self._check_perms(_perms, pull_request, user, api)
153 153
154 154 def check_user_update(self, pull_request, user, api=False):
155 155 owner = user.user_id == pull_request.user_id
156 156 return self.check_user_merge(pull_request, user, api) or owner
157 157
158 158 def check_user_delete(self, pull_request, user):
159 159 owner = user.user_id == pull_request.user_id
160 160 _perms = ('repository.admin',)
161 161 return self._check_perms(_perms, pull_request, user) or owner
162 162
163 163 def check_user_change_status(self, pull_request, user, api=False):
164 164 reviewer = user.user_id in [x.user_id for x in
165 165 pull_request.reviewers]
166 166 return self.check_user_update(pull_request, user, api) or reviewer
167 167
168 168 def get(self, pull_request):
169 169 return self.__get_pull_request(pull_request)
170 170
171 171 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
172 172 opened_by=None, order_by=None,
173 173 order_dir='desc'):
174 174 repo = None
175 175 if repo_name:
176 176 repo = self._get_repo(repo_name)
177 177
178 178 q = PullRequest.query()
179 179
180 180 # source or target
181 181 if repo and source:
182 182 q = q.filter(PullRequest.source_repo == repo)
183 183 elif repo:
184 184 q = q.filter(PullRequest.target_repo == repo)
185 185
186 186 # closed,opened
187 187 if statuses:
188 188 q = q.filter(PullRequest.status.in_(statuses))
189 189
190 190 # opened by filter
191 191 if opened_by:
192 192 q = q.filter(PullRequest.user_id.in_(opened_by))
193 193
194 194 if order_by:
195 195 order_map = {
196 196 'name_raw': PullRequest.pull_request_id,
197 197 'title': PullRequest.title,
198 198 'updated_on_raw': PullRequest.updated_on,
199 199 'target_repo': PullRequest.target_repo_id
200 200 }
201 201 if order_dir == 'asc':
202 202 q = q.order_by(order_map[order_by].asc())
203 203 else:
204 204 q = q.order_by(order_map[order_by].desc())
205 205
206 206 return q
207 207
208 208 def count_all(self, repo_name, source=False, statuses=None,
209 209 opened_by=None):
210 210 """
211 211 Count the number of pull requests for a specific repository.
212 212
213 213 :param repo_name: target or source repo
214 214 :param source: boolean flag to specify if repo_name refers to source
215 215 :param statuses: list of pull request statuses
216 216 :param opened_by: author user of the pull request
217 217 :returns: int number of pull requests
218 218 """
219 219 q = self._prepare_get_all_query(
220 220 repo_name, source=source, statuses=statuses, opened_by=opened_by)
221 221
222 222 return q.count()
223 223
224 224 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
225 225 offset=0, length=None, order_by=None, order_dir='desc'):
226 226 """
227 227 Get all pull requests for a specific repository.
228 228
229 229 :param repo_name: target or source repo
230 230 :param source: boolean flag to specify if repo_name refers to source
231 231 :param statuses: list of pull request statuses
232 232 :param opened_by: author user of the pull request
233 233 :param offset: pagination offset
234 234 :param length: length of returned list
235 235 :param order_by: order of the returned list
236 236 :param order_dir: 'asc' or 'desc' ordering direction
237 237 :returns: list of pull requests
238 238 """
239 239 q = self._prepare_get_all_query(
240 240 repo_name, source=source, statuses=statuses, opened_by=opened_by,
241 241 order_by=order_by, order_dir=order_dir)
242 242
243 243 if length:
244 244 pull_requests = q.limit(length).offset(offset).all()
245 245 else:
246 246 pull_requests = q.all()
247 247
248 248 return pull_requests
249 249
250 250 def count_awaiting_review(self, repo_name, source=False, statuses=None,
251 251 opened_by=None):
252 252 """
253 253 Count the number of pull requests for a specific repository that are
254 254 awaiting review.
255 255
256 256 :param repo_name: target or source repo
257 257 :param source: boolean flag to specify if repo_name refers to source
258 258 :param statuses: list of pull request statuses
259 259 :param opened_by: author user of the pull request
260 260 :returns: int number of pull requests
261 261 """
262 262 pull_requests = self.get_awaiting_review(
263 263 repo_name, source=source, statuses=statuses, opened_by=opened_by)
264 264
265 265 return len(pull_requests)
266 266
267 267 def get_awaiting_review(self, repo_name, source=False, statuses=None,
268 268 opened_by=None, offset=0, length=None,
269 269 order_by=None, order_dir='desc'):
270 270 """
271 271 Get all pull requests for a specific repository that are awaiting
272 272 review.
273 273
274 274 :param repo_name: target or source repo
275 275 :param source: boolean flag to specify if repo_name refers to source
276 276 :param statuses: list of pull request statuses
277 277 :param opened_by: author user of the pull request
278 278 :param offset: pagination offset
279 279 :param length: length of returned list
280 280 :param order_by: order of the returned list
281 281 :param order_dir: 'asc' or 'desc' ordering direction
282 282 :returns: list of pull requests
283 283 """
284 284 pull_requests = self.get_all(
285 285 repo_name, source=source, statuses=statuses, opened_by=opened_by,
286 286 order_by=order_by, order_dir=order_dir)
287 287
288 288 _filtered_pull_requests = []
289 289 for pr in pull_requests:
290 290 status = pr.calculated_review_status()
291 291 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
292 292 ChangesetStatus.STATUS_UNDER_REVIEW]:
293 293 _filtered_pull_requests.append(pr)
294 294 if length:
295 295 return _filtered_pull_requests[offset:offset+length]
296 296 else:
297 297 return _filtered_pull_requests
298 298
299 299 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
300 300 opened_by=None, user_id=None):
301 301 """
302 302 Count the number of pull requests for a specific repository that are
303 303 awaiting review from a specific user.
304 304
305 305 :param repo_name: target or source repo
306 306 :param source: boolean flag to specify if repo_name refers to source
307 307 :param statuses: list of pull request statuses
308 308 :param opened_by: author user of the pull request
309 309 :param user_id: reviewer user of the pull request
310 310 :returns: int number of pull requests
311 311 """
312 312 pull_requests = self.get_awaiting_my_review(
313 313 repo_name, source=source, statuses=statuses, opened_by=opened_by,
314 314 user_id=user_id)
315 315
316 316 return len(pull_requests)
317 317
318 318 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
319 319 opened_by=None, user_id=None, offset=0,
320 320 length=None, order_by=None, order_dir='desc'):
321 321 """
322 322 Get all pull requests for a specific repository that are awaiting
323 323 review from a specific user.
324 324
325 325 :param repo_name: target or source repo
326 326 :param source: boolean flag to specify if repo_name refers to source
327 327 :param statuses: list of pull request statuses
328 328 :param opened_by: author user of the pull request
329 329 :param user_id: reviewer user of the pull request
330 330 :param offset: pagination offset
331 331 :param length: length of returned list
332 332 :param order_by: order of the returned list
333 333 :param order_dir: 'asc' or 'desc' ordering direction
334 334 :returns: list of pull requests
335 335 """
336 336 pull_requests = self.get_all(
337 337 repo_name, source=source, statuses=statuses, opened_by=opened_by,
338 338 order_by=order_by, order_dir=order_dir)
339 339
340 340 _my = PullRequestModel().get_not_reviewed(user_id)
341 341 my_participation = []
342 342 for pr in pull_requests:
343 343 if pr in _my:
344 344 my_participation.append(pr)
345 345 _filtered_pull_requests = my_participation
346 346 if length:
347 347 return _filtered_pull_requests[offset:offset+length]
348 348 else:
349 349 return _filtered_pull_requests
350 350
351 351 def get_not_reviewed(self, user_id):
352 352 return [
353 353 x.pull_request for x in PullRequestReviewers.query().filter(
354 354 PullRequestReviewers.user_id == user_id).all()
355 355 ]
356 356
357 357 def _prepare_participating_query(self, user_id=None, statuses=None,
358 358 order_by=None, order_dir='desc'):
359 359 q = PullRequest.query()
360 360 if user_id:
361 361 reviewers_subquery = Session().query(
362 362 PullRequestReviewers.pull_request_id).filter(
363 363 PullRequestReviewers.user_id == user_id).subquery()
364 364 user_filter= or_(
365 365 PullRequest.user_id == user_id,
366 366 PullRequest.pull_request_id.in_(reviewers_subquery)
367 367 )
368 368 q = PullRequest.query().filter(user_filter)
369 369
370 370 # closed,opened
371 371 if statuses:
372 372 q = q.filter(PullRequest.status.in_(statuses))
373 373
374 374 if order_by:
375 375 order_map = {
376 376 'name_raw': PullRequest.pull_request_id,
377 377 'title': PullRequest.title,
378 378 'updated_on_raw': PullRequest.updated_on,
379 379 'target_repo': PullRequest.target_repo_id
380 380 }
381 381 if order_dir == 'asc':
382 382 q = q.order_by(order_map[order_by].asc())
383 383 else:
384 384 q = q.order_by(order_map[order_by].desc())
385 385
386 386 return q
387 387
388 388 def count_im_participating_in(self, user_id=None, statuses=None):
389 389 q = self._prepare_participating_query(user_id, statuses=statuses)
390 390 return q.count()
391 391
392 392 def get_im_participating_in(
393 393 self, user_id=None, statuses=None, offset=0,
394 394 length=None, order_by=None, order_dir='desc'):
395 395 """
396 396 Get all Pull requests that i'm participating in, or i have opened
397 397 """
398 398
399 399 q = self._prepare_participating_query(
400 400 user_id, statuses=statuses, order_by=order_by,
401 401 order_dir=order_dir)
402 402
403 403 if length:
404 404 pull_requests = q.limit(length).offset(offset).all()
405 405 else:
406 406 pull_requests = q.all()
407 407
408 408 return pull_requests
409 409
410 410 def get_versions(self, pull_request):
411 411 """
412 412 returns version of pull request sorted by ID descending
413 413 """
414 414 return PullRequestVersion.query()\
415 415 .filter(PullRequestVersion.pull_request == pull_request)\
416 416 .order_by(PullRequestVersion.pull_request_version_id.asc())\
417 417 .all()
418 418
419 419 def create(self, created_by, source_repo, source_ref, target_repo,
420 420 target_ref, revisions, reviewers, title, description=None,
421 421 reviewer_data=None):
422 422
423 423 created_by_user = self._get_user(created_by)
424 424 source_repo = self._get_repo(source_repo)
425 425 target_repo = self._get_repo(target_repo)
426 426
427 427 pull_request = PullRequest()
428 428 pull_request.source_repo = source_repo
429 429 pull_request.source_ref = source_ref
430 430 pull_request.target_repo = target_repo
431 431 pull_request.target_ref = target_ref
432 432 pull_request.revisions = revisions
433 433 pull_request.title = title
434 434 pull_request.description = description
435 435 pull_request.author = created_by_user
436 436 pull_request.reviewer_data = reviewer_data
437 437
438 438 Session().add(pull_request)
439 439 Session().flush()
440 440
441 441 reviewer_ids = set()
442 442 # members / reviewers
443 443 for reviewer_object in reviewers:
444 444 user_id, reasons, mandatory = reviewer_object
445 445 user = self._get_user(user_id)
446 446
447 447 # skip duplicates
448 448 if user.user_id in reviewer_ids:
449 449 continue
450 450
451 451 reviewer_ids.add(user.user_id)
452 452
453 453 reviewer = PullRequestReviewers()
454 454 reviewer.user = user
455 455 reviewer.pull_request = pull_request
456 456 reviewer.reasons = reasons
457 457 reviewer.mandatory = mandatory
458 458 Session().add(reviewer)
459 459
460 460 # Set approval status to "Under Review" for all commits which are
461 461 # part of this pull request.
462 462 ChangesetStatusModel().set_status(
463 463 repo=target_repo,
464 464 status=ChangesetStatus.STATUS_UNDER_REVIEW,
465 465 user=created_by_user,
466 466 pull_request=pull_request
467 467 )
468 468
469 469 self.notify_reviewers(pull_request, reviewer_ids)
470 470 self._trigger_pull_request_hook(
471 471 pull_request, created_by_user, 'create')
472 472
473 473 creation_data = pull_request.get_api_data(with_merge_state=False)
474 474 self._log_audit_action(
475 475 'repo.pull_request.create', {'data': creation_data},
476 476 created_by_user, pull_request)
477 477
478 478 return pull_request
479 479
480 480 def _trigger_pull_request_hook(self, pull_request, user, action):
481 481 pull_request = self.__get_pull_request(pull_request)
482 482 target_scm = pull_request.target_repo.scm_instance()
483 483 if action == 'create':
484 484 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
485 485 elif action == 'merge':
486 486 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
487 487 elif action == 'close':
488 488 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
489 489 elif action == 'review_status_change':
490 490 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
491 491 elif action == 'update':
492 492 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
493 493 else:
494 494 return
495 495
496 496 trigger_hook(
497 497 username=user.username,
498 498 repo_name=pull_request.target_repo.repo_name,
499 499 repo_alias=target_scm.alias,
500 500 pull_request=pull_request)
501 501
502 502 def _get_commit_ids(self, pull_request):
503 503 """
504 504 Return the commit ids of the merged pull request.
505 505
506 506 This method is not dealing correctly yet with the lack of autoupdates
507 507 nor with the implicit target updates.
508 508 For example: if a commit in the source repo is already in the target it
509 509 will be reported anyways.
510 510 """
511 511 merge_rev = pull_request.merge_rev
512 512 if merge_rev is None:
513 513 raise ValueError('This pull request was not merged yet')
514 514
515 515 commit_ids = list(pull_request.revisions)
516 516 if merge_rev not in commit_ids:
517 517 commit_ids.append(merge_rev)
518 518
519 519 return commit_ids
520 520
521 521 def merge(self, pull_request, user, extras):
522 522 log.debug("Merging pull request %s", pull_request.pull_request_id)
523 523 merge_state = self._merge_pull_request(pull_request, user, extras)
524 524 if merge_state.executed:
525 525 log.debug(
526 526 "Merge was successful, updating the pull request comments.")
527 527 self._comment_and_close_pr(pull_request, user, merge_state)
528 528
529 529 self._log_audit_action(
530 530 'repo.pull_request.merge',
531 531 {'merge_state': merge_state.__dict__},
532 532 user, pull_request)
533 533
534 534 else:
535 535 log.warn("Merge failed, not updating the pull request.")
536 536 return merge_state
537 537
538 538 def _merge_pull_request(self, pull_request, user, extras):
539 539 target_vcs = pull_request.target_repo.scm_instance()
540 540 source_vcs = pull_request.source_repo.scm_instance()
541 541 target_ref = self._refresh_reference(
542 542 pull_request.target_ref_parts, target_vcs)
543 543
544 544 message = _(
545 545 'Merge pull request #%(pr_id)s from '
546 546 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
547 547 'pr_id': pull_request.pull_request_id,
548 548 'source_repo': source_vcs.name,
549 549 'source_ref_name': pull_request.source_ref_parts.name,
550 550 'pr_title': pull_request.title
551 551 }
552 552
553 553 workspace_id = self._workspace_id(pull_request)
554 554 use_rebase = self._use_rebase_for_merging(pull_request)
555 555
556 556 callback_daemon, extras = prepare_callback_daemon(
557 557 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
558 558 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
559 559
560 560 with callback_daemon:
561 561 # TODO: johbo: Implement a clean way to run a config_override
562 562 # for a single call.
563 563 target_vcs.config.set(
564 564 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
565 565 merge_state = target_vcs.merge(
566 566 target_ref, source_vcs, pull_request.source_ref_parts,
567 567 workspace_id, user_name=user.username,
568 568 user_email=user.email, message=message, use_rebase=use_rebase)
569 569 return merge_state
570 570
571 571 def _comment_and_close_pr(self, pull_request, user, merge_state):
572 572 pull_request.merge_rev = merge_state.merge_ref.commit_id
573 573 pull_request.updated_on = datetime.datetime.now()
574 574
575 575 CommentsModel().create(
576 576 text=unicode(_('Pull request merged and closed')),
577 577 repo=pull_request.target_repo.repo_id,
578 578 user=user.user_id,
579 579 pull_request=pull_request.pull_request_id,
580 580 f_path=None,
581 581 line_no=None,
582 582 closing_pr=True
583 583 )
584 584
585 585 Session().add(pull_request)
586 586 Session().flush()
587 587 # TODO: paris: replace invalidation with less radical solution
588 588 ScmModel().mark_for_invalidation(
589 589 pull_request.target_repo.repo_name)
590 590 self._trigger_pull_request_hook(pull_request, user, 'merge')
591 591
592 592 def has_valid_update_type(self, pull_request):
593 593 source_ref_type = pull_request.source_ref_parts.type
594 594 return source_ref_type in ['book', 'branch', 'tag']
595 595
596 596 def update_commits(self, pull_request):
597 597 """
598 598 Get the updated list of commits for the pull request
599 599 and return the new pull request version and the list
600 600 of commits processed by this update action
601 601 """
602 602 pull_request = self.__get_pull_request(pull_request)
603 603 source_ref_type = pull_request.source_ref_parts.type
604 604 source_ref_name = pull_request.source_ref_parts.name
605 605 source_ref_id = pull_request.source_ref_parts.commit_id
606 606
607 607 target_ref_type = pull_request.target_ref_parts.type
608 608 target_ref_name = pull_request.target_ref_parts.name
609 609 target_ref_id = pull_request.target_ref_parts.commit_id
610 610
611 611 if not self.has_valid_update_type(pull_request):
612 612 log.debug(
613 613 "Skipping update of pull request %s due to ref type: %s",
614 614 pull_request, source_ref_type)
615 615 return UpdateResponse(
616 616 executed=False,
617 617 reason=UpdateFailureReason.WRONG_REF_TYPE,
618 618 old=pull_request, new=None, changes=None,
619 619 source_changed=False, target_changed=False)
620 620
621 621 # source repo
622 622 source_repo = pull_request.source_repo.scm_instance()
623 623 try:
624 624 source_commit = source_repo.get_commit(commit_id=source_ref_name)
625 625 except CommitDoesNotExistError:
626 626 return UpdateResponse(
627 627 executed=False,
628 628 reason=UpdateFailureReason.MISSING_SOURCE_REF,
629 629 old=pull_request, new=None, changes=None,
630 630 source_changed=False, target_changed=False)
631 631
632 632 source_changed = source_ref_id != source_commit.raw_id
633 633
634 634 # target repo
635 635 target_repo = pull_request.target_repo.scm_instance()
636 636 try:
637 637 target_commit = target_repo.get_commit(commit_id=target_ref_name)
638 638 except CommitDoesNotExistError:
639 639 return UpdateResponse(
640 640 executed=False,
641 641 reason=UpdateFailureReason.MISSING_TARGET_REF,
642 642 old=pull_request, new=None, changes=None,
643 643 source_changed=False, target_changed=False)
644 644 target_changed = target_ref_id != target_commit.raw_id
645 645
646 646 if not (source_changed or target_changed):
647 647 log.debug("Nothing changed in pull request %s", pull_request)
648 648 return UpdateResponse(
649 649 executed=False,
650 650 reason=UpdateFailureReason.NO_CHANGE,
651 651 old=pull_request, new=None, changes=None,
652 652 source_changed=target_changed, target_changed=source_changed)
653 653
654 654 change_in_found = 'target repo' if target_changed else 'source repo'
655 655 log.debug('Updating pull request because of change in %s detected',
656 656 change_in_found)
657 657
658 658 # Finally there is a need for an update, in case of source change
659 659 # we create a new version, else just an update
660 660 if source_changed:
661 661 pull_request_version = self._create_version_from_snapshot(pull_request)
662 662 self._link_comments_to_version(pull_request_version)
663 663 else:
664 664 try:
665 665 ver = pull_request.versions[-1]
666 666 except IndexError:
667 667 ver = None
668 668
669 669 pull_request.pull_request_version_id = \
670 670 ver.pull_request_version_id if ver else None
671 671 pull_request_version = pull_request
672 672
673 673 try:
674 674 if target_ref_type in ('tag', 'branch', 'book'):
675 675 target_commit = target_repo.get_commit(target_ref_name)
676 676 else:
677 677 target_commit = target_repo.get_commit(target_ref_id)
678 678 except CommitDoesNotExistError:
679 679 return UpdateResponse(
680 680 executed=False,
681 681 reason=UpdateFailureReason.MISSING_TARGET_REF,
682 682 old=pull_request, new=None, changes=None,
683 683 source_changed=source_changed, target_changed=target_changed)
684 684
685 685 # re-compute commit ids
686 686 old_commit_ids = pull_request.revisions
687 687 pre_load = ["author", "branch", "date", "message"]
688 688 commit_ranges = target_repo.compare(
689 689 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
690 690 pre_load=pre_load)
691 691
692 692 ancestor = target_repo.get_common_ancestor(
693 693 target_commit.raw_id, source_commit.raw_id, source_repo)
694 694
695 695 pull_request.source_ref = '%s:%s:%s' % (
696 696 source_ref_type, source_ref_name, source_commit.raw_id)
697 697 pull_request.target_ref = '%s:%s:%s' % (
698 698 target_ref_type, target_ref_name, ancestor)
699 699
700 700 pull_request.revisions = [
701 701 commit.raw_id for commit in reversed(commit_ranges)]
702 702 pull_request.updated_on = datetime.datetime.now()
703 703 Session().add(pull_request)
704 704 new_commit_ids = pull_request.revisions
705 705
706 706 old_diff_data, new_diff_data = self._generate_update_diffs(
707 707 pull_request, pull_request_version)
708 708
709 709 # calculate commit and file changes
710 710 changes = self._calculate_commit_id_changes(
711 711 old_commit_ids, new_commit_ids)
712 712 file_changes = self._calculate_file_changes(
713 713 old_diff_data, new_diff_data)
714 714
715 715 # set comments as outdated if DIFFS changed
716 716 CommentsModel().outdate_comments(
717 717 pull_request, old_diff_data=old_diff_data,
718 718 new_diff_data=new_diff_data)
719 719
720 720 commit_changes = (changes.added or changes.removed)
721 721 file_node_changes = (
722 722 file_changes.added or file_changes.modified or file_changes.removed)
723 723 pr_has_changes = commit_changes or file_node_changes
724 724
725 725 # Add an automatic comment to the pull request, in case
726 726 # anything has changed
727 727 if pr_has_changes:
728 728 update_comment = CommentsModel().create(
729 729 text=self._render_update_message(changes, file_changes),
730 730 repo=pull_request.target_repo,
731 731 user=pull_request.author,
732 732 pull_request=pull_request,
733 733 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
734 734
735 735 # Update status to "Under Review" for added commits
736 736 for commit_id in changes.added:
737 737 ChangesetStatusModel().set_status(
738 738 repo=pull_request.source_repo,
739 739 status=ChangesetStatus.STATUS_UNDER_REVIEW,
740 740 comment=update_comment,
741 741 user=pull_request.author,
742 742 pull_request=pull_request,
743 743 revision=commit_id)
744 744
745 745 log.debug(
746 746 'Updated pull request %s, added_ids: %s, common_ids: %s, '
747 747 'removed_ids: %s', pull_request.pull_request_id,
748 748 changes.added, changes.common, changes.removed)
749 749 log.debug(
750 750 'Updated pull request with the following file changes: %s',
751 751 file_changes)
752 752
753 753 log.info(
754 754 "Updated pull request %s from commit %s to commit %s, "
755 755 "stored new version %s of this pull request.",
756 756 pull_request.pull_request_id, source_ref_id,
757 757 pull_request.source_ref_parts.commit_id,
758 758 pull_request_version.pull_request_version_id)
759 759 Session().commit()
760 760 self._trigger_pull_request_hook(
761 761 pull_request, pull_request.author, 'update')
762 762
763 763 return UpdateResponse(
764 764 executed=True, reason=UpdateFailureReason.NONE,
765 765 old=pull_request, new=pull_request_version, changes=changes,
766 766 source_changed=source_changed, target_changed=target_changed)
767 767
768 768 def _create_version_from_snapshot(self, pull_request):
769 769 version = PullRequestVersion()
770 770 version.title = pull_request.title
771 771 version.description = pull_request.description
772 772 version.status = pull_request.status
773 773 version.created_on = datetime.datetime.now()
774 774 version.updated_on = pull_request.updated_on
775 775 version.user_id = pull_request.user_id
776 776 version.source_repo = pull_request.source_repo
777 777 version.source_ref = pull_request.source_ref
778 778 version.target_repo = pull_request.target_repo
779 779 version.target_ref = pull_request.target_ref
780 780
781 781 version._last_merge_source_rev = pull_request._last_merge_source_rev
782 782 version._last_merge_target_rev = pull_request._last_merge_target_rev
783 783 version._last_merge_status = pull_request._last_merge_status
784 784 version.shadow_merge_ref = pull_request.shadow_merge_ref
785 785 version.merge_rev = pull_request.merge_rev
786 786 version.reviewer_data = pull_request.reviewer_data
787 787
788 788 version.revisions = pull_request.revisions
789 789 version.pull_request = pull_request
790 790 Session().add(version)
791 791 Session().flush()
792 792
793 793 return version
794 794
795 795 def _generate_update_diffs(self, pull_request, pull_request_version):
796 796
797 797 diff_context = (
798 798 self.DIFF_CONTEXT +
799 799 CommentsModel.needed_extra_diff_context())
800 800
801 801 source_repo = pull_request_version.source_repo
802 802 source_ref_id = pull_request_version.source_ref_parts.commit_id
803 803 target_ref_id = pull_request_version.target_ref_parts.commit_id
804 804 old_diff = self._get_diff_from_pr_or_version(
805 805 source_repo, source_ref_id, target_ref_id, context=diff_context)
806 806
807 807 source_repo = pull_request.source_repo
808 808 source_ref_id = pull_request.source_ref_parts.commit_id
809 809 target_ref_id = pull_request.target_ref_parts.commit_id
810 810
811 811 new_diff = self._get_diff_from_pr_or_version(
812 812 source_repo, source_ref_id, target_ref_id, context=diff_context)
813 813
814 814 old_diff_data = diffs.DiffProcessor(old_diff)
815 815 old_diff_data.prepare()
816 816 new_diff_data = diffs.DiffProcessor(new_diff)
817 817 new_diff_data.prepare()
818 818
819 819 return old_diff_data, new_diff_data
820 820
821 821 def _link_comments_to_version(self, pull_request_version):
822 822 """
823 823 Link all unlinked comments of this pull request to the given version.
824 824
825 825 :param pull_request_version: The `PullRequestVersion` to which
826 826 the comments shall be linked.
827 827
828 828 """
829 829 pull_request = pull_request_version.pull_request
830 830 comments = ChangesetComment.query()\
831 831 .filter(
832 832 # TODO: johbo: Should we query for the repo at all here?
833 833 # Pending decision on how comments of PRs are to be related
834 834 # to either the source repo, the target repo or no repo at all.
835 835 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
836 836 ChangesetComment.pull_request == pull_request,
837 837 ChangesetComment.pull_request_version == None)\
838 838 .order_by(ChangesetComment.comment_id.asc())
839 839
840 840 # TODO: johbo: Find out why this breaks if it is done in a bulk
841 841 # operation.
842 842 for comment in comments:
843 843 comment.pull_request_version_id = (
844 844 pull_request_version.pull_request_version_id)
845 845 Session().add(comment)
846 846
847 847 def _calculate_commit_id_changes(self, old_ids, new_ids):
848 848 added = [x for x in new_ids if x not in old_ids]
849 849 common = [x for x in new_ids if x in old_ids]
850 850 removed = [x for x in old_ids if x not in new_ids]
851 851 total = new_ids
852 852 return ChangeTuple(added, common, removed, total)
853 853
854 854 def _calculate_file_changes(self, old_diff_data, new_diff_data):
855 855
856 856 old_files = OrderedDict()
857 857 for diff_data in old_diff_data.parsed_diff:
858 858 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
859 859
860 860 added_files = []
861 861 modified_files = []
862 862 removed_files = []
863 863 for diff_data in new_diff_data.parsed_diff:
864 864 new_filename = diff_data['filename']
865 865 new_hash = md5_safe(diff_data['raw_diff'])
866 866
867 867 old_hash = old_files.get(new_filename)
868 868 if not old_hash:
869 869 # file is not present in old diff, means it's added
870 870 added_files.append(new_filename)
871 871 else:
872 872 if new_hash != old_hash:
873 873 modified_files.append(new_filename)
874 874 # now remove a file from old, since we have seen it already
875 875 del old_files[new_filename]
876 876
877 877 # removed files is when there are present in old, but not in NEW,
878 878 # since we remove old files that are present in new diff, left-overs
879 879 # if any should be the removed files
880 880 removed_files.extend(old_files.keys())
881 881
882 882 return FileChangeTuple(added_files, modified_files, removed_files)
883 883
884 884 def _render_update_message(self, changes, file_changes):
885 885 """
886 886 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
887 887 so it's always looking the same disregarding on which default
888 888 renderer system is using.
889 889
890 890 :param changes: changes named tuple
891 891 :param file_changes: file changes named tuple
892 892
893 893 """
894 894 new_status = ChangesetStatus.get_status_lbl(
895 895 ChangesetStatus.STATUS_UNDER_REVIEW)
896 896
897 897 changed_files = (
898 898 file_changes.added + file_changes.modified + file_changes.removed)
899 899
900 900 params = {
901 901 'under_review_label': new_status,
902 902 'added_commits': changes.added,
903 903 'removed_commits': changes.removed,
904 904 'changed_files': changed_files,
905 905 'added_files': file_changes.added,
906 906 'modified_files': file_changes.modified,
907 907 'removed_files': file_changes.removed,
908 908 }
909 909 renderer = RstTemplateRenderer()
910 910 return renderer.render('pull_request_update.mako', **params)
911 911
912 912 def edit(self, pull_request, title, description, user):
913 913 pull_request = self.__get_pull_request(pull_request)
914 914 old_data = pull_request.get_api_data(with_merge_state=False)
915 915 if pull_request.is_closed():
916 916 raise ValueError('This pull request is closed')
917 917 if title:
918 918 pull_request.title = title
919 919 pull_request.description = description
920 920 pull_request.updated_on = datetime.datetime.now()
921 921 Session().add(pull_request)
922 922 self._log_audit_action(
923 923 'repo.pull_request.edit', {'old_data': old_data},
924 924 user, pull_request)
925 925
926 926 def update_reviewers(self, pull_request, reviewer_data, user):
927 927 """
928 928 Update the reviewers in the pull request
929 929
930 930 :param pull_request: the pr to update
931 931 :param reviewer_data: list of tuples
932 932 [(user, ['reason1', 'reason2'], mandatory_flag)]
933 933 """
934 934
935 935 reviewers = {}
936 936 for user_id, reasons, mandatory in reviewer_data:
937 937 if isinstance(user_id, (int, basestring)):
938 938 user_id = self._get_user(user_id).user_id
939 939 reviewers[user_id] = {
940 940 'reasons': reasons, 'mandatory': mandatory}
941 941
942 942 reviewers_ids = set(reviewers.keys())
943 943 pull_request = self.__get_pull_request(pull_request)
944 944 current_reviewers = PullRequestReviewers.query()\
945 945 .filter(PullRequestReviewers.pull_request ==
946 946 pull_request).all()
947 947 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
948 948
949 949 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
950 950 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
951 951
952 952 log.debug("Adding %s reviewers", ids_to_add)
953 953 log.debug("Removing %s reviewers", ids_to_remove)
954 954 changed = False
955 955 for uid in ids_to_add:
956 956 changed = True
957 957 _usr = self._get_user(uid)
958 958 reviewer = PullRequestReviewers()
959 959 reviewer.user = _usr
960 960 reviewer.pull_request = pull_request
961 961 reviewer.reasons = reviewers[uid]['reasons']
962 962 # NOTE(marcink): mandatory shouldn't be changed now
963 963 # reviewer.mandatory = reviewers[uid]['reasons']
964 964 Session().add(reviewer)
965 965 self._log_audit_action(
966 966 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()},
967 967 user, pull_request)
968 968
969 969 for uid in ids_to_remove:
970 970 changed = True
971 971 reviewers = PullRequestReviewers.query()\
972 972 .filter(PullRequestReviewers.user_id == uid,
973 973 PullRequestReviewers.pull_request == pull_request)\
974 974 .all()
975 975 # use .all() in case we accidentally added the same person twice
976 976 # this CAN happen due to the lack of DB checks
977 977 for obj in reviewers:
978 978 old_data = obj.get_dict()
979 979 Session().delete(obj)
980 980 self._log_audit_action(
981 981 'repo.pull_request.reviewer.delete',
982 982 {'old_data': old_data}, user, pull_request)
983 983
984 984 if changed:
985 985 pull_request.updated_on = datetime.datetime.now()
986 986 Session().add(pull_request)
987 987
988 988 self.notify_reviewers(pull_request, ids_to_add)
989 989 return ids_to_add, ids_to_remove
990 990
991 991 def get_url(self, pull_request, request=None, permalink=False):
992 992 if not request:
993 993 request = get_current_request()
994 994
995 995 if permalink:
996 996 return request.route_url(
997 997 'pull_requests_global',
998 998 pull_request_id=pull_request.pull_request_id,)
999 999 else:
1000 1000 return request.route_url('pullrequest_show',
1001 1001 repo_name=safe_str(pull_request.target_repo.repo_name),
1002 1002 pull_request_id=pull_request.pull_request_id,)
1003 1003
1004 1004 def get_shadow_clone_url(self, pull_request):
1005 1005 """
1006 1006 Returns qualified url pointing to the shadow repository. If this pull
1007 1007 request is closed there is no shadow repository and ``None`` will be
1008 1008 returned.
1009 1009 """
1010 1010 if pull_request.is_closed():
1011 1011 return None
1012 1012 else:
1013 1013 pr_url = urllib.unquote(self.get_url(pull_request))
1014 1014 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1015 1015
1016 1016 def notify_reviewers(self, pull_request, reviewers_ids):
1017 1017 # notification to reviewers
1018 1018 if not reviewers_ids:
1019 1019 return
1020 1020
1021 1021 pull_request_obj = pull_request
1022 1022 # get the current participants of this pull request
1023 1023 recipients = reviewers_ids
1024 1024 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1025 1025
1026 1026 pr_source_repo = pull_request_obj.source_repo
1027 1027 pr_target_repo = pull_request_obj.target_repo
1028 1028
1029 1029 pr_url = h.route_url('pullrequest_show',
1030 1030 repo_name=pr_target_repo.repo_name,
1031 1031 pull_request_id=pull_request_obj.pull_request_id,)
1032 1032
1033 1033 # set some variables for email notification
1034 1034 pr_target_repo_url = h.route_url(
1035 1035 'repo_summary', repo_name=pr_target_repo.repo_name)
1036 1036
1037 1037 pr_source_repo_url = h.route_url(
1038 1038 'repo_summary', repo_name=pr_source_repo.repo_name)
1039 1039
1040 1040 # pull request specifics
1041 1041 pull_request_commits = [
1042 1042 (x.raw_id, x.message)
1043 1043 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1044 1044
1045 1045 kwargs = {
1046 1046 'user': pull_request.author,
1047 1047 'pull_request': pull_request_obj,
1048 1048 'pull_request_commits': pull_request_commits,
1049 1049
1050 1050 'pull_request_target_repo': pr_target_repo,
1051 1051 'pull_request_target_repo_url': pr_target_repo_url,
1052 1052
1053 1053 'pull_request_source_repo': pr_source_repo,
1054 1054 'pull_request_source_repo_url': pr_source_repo_url,
1055 1055
1056 1056 'pull_request_url': pr_url,
1057 1057 }
1058 1058
1059 1059 # pre-generate the subject for notification itself
1060 1060 (subject,
1061 1061 _h, _e, # we don't care about those
1062 1062 body_plaintext) = EmailNotificationModel().render_email(
1063 1063 notification_type, **kwargs)
1064 1064
1065 1065 # create notification objects, and emails
1066 1066 NotificationModel().create(
1067 1067 created_by=pull_request.author,
1068 1068 notification_subject=subject,
1069 1069 notification_body=body_plaintext,
1070 1070 notification_type=notification_type,
1071 1071 recipients=recipients,
1072 1072 email_kwargs=kwargs,
1073 1073 )
1074 1074
1075 1075 def delete(self, pull_request, user):
1076 1076 pull_request = self.__get_pull_request(pull_request)
1077 1077 old_data = pull_request.get_api_data(with_merge_state=False)
1078 1078 self._cleanup_merge_workspace(pull_request)
1079 1079 self._log_audit_action(
1080 1080 'repo.pull_request.delete', {'old_data': old_data},
1081 1081 user, pull_request)
1082 1082 Session().delete(pull_request)
1083 1083
1084 1084 def close_pull_request(self, pull_request, user):
1085 1085 pull_request = self.__get_pull_request(pull_request)
1086 1086 self._cleanup_merge_workspace(pull_request)
1087 1087 pull_request.status = PullRequest.STATUS_CLOSED
1088 1088 pull_request.updated_on = datetime.datetime.now()
1089 1089 Session().add(pull_request)
1090 1090 self._trigger_pull_request_hook(
1091 1091 pull_request, pull_request.author, 'close')
1092 1092 self._log_audit_action(
1093 1093 'repo.pull_request.close', {}, user, pull_request)
1094 1094
1095 1095 def close_pull_request_with_comment(
1096 1096 self, pull_request, user, repo, message=None):
1097 1097
1098 1098 pull_request_review_status = pull_request.calculated_review_status()
1099 1099
1100 1100 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1101 1101 # approved only if we have voting consent
1102 1102 status = ChangesetStatus.STATUS_APPROVED
1103 1103 else:
1104 1104 status = ChangesetStatus.STATUS_REJECTED
1105 1105 status_lbl = ChangesetStatus.get_status_lbl(status)
1106 1106
1107 1107 default_message = (
1108 1108 _('Closing with status change {transition_icon} {status}.')
1109 1109 ).format(transition_icon='>', status=status_lbl)
1110 1110 text = message or default_message
1111 1111
1112 1112 # create a comment, and link it to new status
1113 1113 comment = CommentsModel().create(
1114 1114 text=text,
1115 1115 repo=repo.repo_id,
1116 1116 user=user.user_id,
1117 1117 pull_request=pull_request.pull_request_id,
1118 1118 status_change=status_lbl,
1119 1119 status_change_type=status,
1120 1120 closing_pr=True
1121 1121 )
1122 1122
1123 1123 # calculate old status before we change it
1124 1124 old_calculated_status = pull_request.calculated_review_status()
1125 1125 ChangesetStatusModel().set_status(
1126 1126 repo.repo_id,
1127 1127 status,
1128 1128 user.user_id,
1129 1129 comment=comment,
1130 1130 pull_request=pull_request.pull_request_id
1131 1131 )
1132 1132
1133 1133 Session().flush()
1134 1134 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1135 1135 # we now calculate the status of pull request again, and based on that
1136 1136 # calculation trigger status change. This might happen in cases
1137 1137 # that non-reviewer admin closes a pr, which means his vote doesn't
1138 1138 # change the status, while if he's a reviewer this might change it.
1139 1139 calculated_status = pull_request.calculated_review_status()
1140 1140 if old_calculated_status != calculated_status:
1141 1141 self._trigger_pull_request_hook(
1142 1142 pull_request, user, 'review_status_change')
1143 1143
1144 1144 # finally close the PR
1145 1145 PullRequestModel().close_pull_request(
1146 1146 pull_request.pull_request_id, user)
1147 1147
1148 1148 return comment, status
1149 1149
1150 1150 def merge_status(self, pull_request):
1151 1151 if not self._is_merge_enabled(pull_request):
1152 1152 return False, _('Server-side pull request merging is disabled.')
1153 1153 if pull_request.is_closed():
1154 1154 return False, _('This pull request is closed.')
1155 1155 merge_possible, msg = self._check_repo_requirements(
1156 1156 target=pull_request.target_repo, source=pull_request.source_repo)
1157 1157 if not merge_possible:
1158 1158 return merge_possible, msg
1159 1159
1160 1160 try:
1161 1161 resp = self._try_merge(pull_request)
1162 1162 log.debug("Merge response: %s", resp)
1163 1163 status = resp.possible, self.merge_status_message(
1164 1164 resp.failure_reason)
1165 1165 except NotImplementedError:
1166 1166 status = False, _('Pull request merging is not supported.')
1167 1167
1168 1168 return status
1169 1169
1170 1170 def _check_repo_requirements(self, target, source):
1171 1171 """
1172 1172 Check if `target` and `source` have compatible requirements.
1173 1173
1174 1174 Currently this is just checking for largefiles.
1175 1175 """
1176 1176 target_has_largefiles = self._has_largefiles(target)
1177 1177 source_has_largefiles = self._has_largefiles(source)
1178 1178 merge_possible = True
1179 1179 message = u''
1180 1180
1181 1181 if target_has_largefiles != source_has_largefiles:
1182 1182 merge_possible = False
1183 1183 if source_has_largefiles:
1184 1184 message = _(
1185 1185 'Target repository large files support is disabled.')
1186 1186 else:
1187 1187 message = _(
1188 1188 'Source repository large files support is disabled.')
1189 1189
1190 1190 return merge_possible, message
1191 1191
1192 1192 def _has_largefiles(self, repo):
1193 1193 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1194 1194 'extensions', 'largefiles')
1195 1195 return largefiles_ui and largefiles_ui[0].active
1196 1196
1197 1197 def _try_merge(self, pull_request):
1198 1198 """
1199 1199 Try to merge the pull request and return the merge status.
1200 1200 """
1201 1201 log.debug(
1202 1202 "Trying out if the pull request %s can be merged.",
1203 1203 pull_request.pull_request_id)
1204 1204 target_vcs = pull_request.target_repo.scm_instance()
1205 1205
1206 1206 # Refresh the target reference.
1207 1207 try:
1208 1208 target_ref = self._refresh_reference(
1209 1209 pull_request.target_ref_parts, target_vcs)
1210 1210 except CommitDoesNotExistError:
1211 1211 merge_state = MergeResponse(
1212 1212 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1213 1213 return merge_state
1214 1214
1215 1215 target_locked = pull_request.target_repo.locked
1216 1216 if target_locked and target_locked[0]:
1217 1217 log.debug("The target repository is locked.")
1218 1218 merge_state = MergeResponse(
1219 1219 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1220 1220 elif self._needs_merge_state_refresh(pull_request, target_ref):
1221 1221 log.debug("Refreshing the merge status of the repository.")
1222 1222 merge_state = self._refresh_merge_state(
1223 1223 pull_request, target_vcs, target_ref)
1224 1224 else:
1225 1225 possible = pull_request.\
1226 1226 _last_merge_status == MergeFailureReason.NONE
1227 1227 merge_state = MergeResponse(
1228 1228 possible, False, None, pull_request._last_merge_status)
1229 1229
1230 1230 return merge_state
1231 1231
1232 1232 def _refresh_reference(self, reference, vcs_repository):
1233 1233 if reference.type in ('branch', 'book'):
1234 1234 name_or_id = reference.name
1235 1235 else:
1236 1236 name_or_id = reference.commit_id
1237 1237 refreshed_commit = vcs_repository.get_commit(name_or_id)
1238 1238 refreshed_reference = Reference(
1239 1239 reference.type, reference.name, refreshed_commit.raw_id)
1240 1240 return refreshed_reference
1241 1241
1242 1242 def _needs_merge_state_refresh(self, pull_request, target_reference):
1243 1243 return not(
1244 1244 pull_request.revisions and
1245 1245 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1246 1246 target_reference.commit_id == pull_request._last_merge_target_rev)
1247 1247
1248 1248 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1249 1249 workspace_id = self._workspace_id(pull_request)
1250 1250 source_vcs = pull_request.source_repo.scm_instance()
1251 1251 use_rebase = self._use_rebase_for_merging(pull_request)
1252 1252 merge_state = target_vcs.merge(
1253 1253 target_reference, source_vcs, pull_request.source_ref_parts,
1254 1254 workspace_id, dry_run=True, use_rebase=use_rebase)
1255 1255
1256 1256 # Do not store the response if there was an unknown error.
1257 1257 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1258 1258 pull_request._last_merge_source_rev = \
1259 1259 pull_request.source_ref_parts.commit_id
1260 1260 pull_request._last_merge_target_rev = target_reference.commit_id
1261 1261 pull_request._last_merge_status = merge_state.failure_reason
1262 1262 pull_request.shadow_merge_ref = merge_state.merge_ref
1263 1263 Session().add(pull_request)
1264 1264 Session().commit()
1265 1265
1266 1266 return merge_state
1267 1267
1268 1268 def _workspace_id(self, pull_request):
1269 1269 workspace_id = 'pr-%s' % pull_request.pull_request_id
1270 1270 return workspace_id
1271 1271
1272 1272 def merge_status_message(self, status_code):
1273 1273 """
1274 1274 Return a human friendly error message for the given merge status code.
1275 1275 """
1276 1276 return self.MERGE_STATUS_MESSAGES[status_code]
1277 1277
1278 1278 def generate_repo_data(self, repo, commit_id=None, branch=None,
1279 1279 bookmark=None):
1280 1280 all_refs, selected_ref = \
1281 1281 self._get_repo_pullrequest_sources(
1282 1282 repo.scm_instance(), commit_id=commit_id,
1283 1283 branch=branch, bookmark=bookmark)
1284 1284
1285 1285 refs_select2 = []
1286 1286 for element in all_refs:
1287 1287 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1288 1288 refs_select2.append({'text': element[1], 'children': children})
1289 1289
1290 1290 return {
1291 1291 'user': {
1292 1292 'user_id': repo.user.user_id,
1293 1293 'username': repo.user.username,
1294 'firstname': repo.user.firstname,
1295 'lastname': repo.user.lastname,
1294 'firstname': repo.user.first_name,
1295 'lastname': repo.user.last_name,
1296 1296 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1297 1297 },
1298 1298 'description': h.chop_at_smart(repo.description, '\n'),
1299 1299 'refs': {
1300 1300 'all_refs': all_refs,
1301 1301 'selected_ref': selected_ref,
1302 1302 'select2_refs': refs_select2
1303 1303 }
1304 1304 }
1305 1305
1306 1306 def generate_pullrequest_title(self, source, source_ref, target):
1307 1307 return u'{source}#{at_ref} to {target}'.format(
1308 1308 source=source,
1309 1309 at_ref=source_ref,
1310 1310 target=target,
1311 1311 )
1312 1312
1313 1313 def _cleanup_merge_workspace(self, pull_request):
1314 1314 # Merging related cleanup
1315 1315 target_scm = pull_request.target_repo.scm_instance()
1316 1316 workspace_id = 'pr-%s' % pull_request.pull_request_id
1317 1317
1318 1318 try:
1319 1319 target_scm.cleanup_merge_workspace(workspace_id)
1320 1320 except NotImplementedError:
1321 1321 pass
1322 1322
1323 1323 def _get_repo_pullrequest_sources(
1324 1324 self, repo, commit_id=None, branch=None, bookmark=None):
1325 1325 """
1326 1326 Return a structure with repo's interesting commits, suitable for
1327 1327 the selectors in pullrequest controller
1328 1328
1329 1329 :param commit_id: a commit that must be in the list somehow
1330 1330 and selected by default
1331 1331 :param branch: a branch that must be in the list and selected
1332 1332 by default - even if closed
1333 1333 :param bookmark: a bookmark that must be in the list and selected
1334 1334 """
1335 1335
1336 1336 commit_id = safe_str(commit_id) if commit_id else None
1337 1337 branch = safe_str(branch) if branch else None
1338 1338 bookmark = safe_str(bookmark) if bookmark else None
1339 1339
1340 1340 selected = None
1341 1341
1342 1342 # order matters: first source that has commit_id in it will be selected
1343 1343 sources = []
1344 1344 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1345 1345 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1346 1346
1347 1347 if commit_id:
1348 1348 ref_commit = (h.short_id(commit_id), commit_id)
1349 1349 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1350 1350
1351 1351 sources.append(
1352 1352 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1353 1353 )
1354 1354
1355 1355 groups = []
1356 1356 for group_key, ref_list, group_name, match in sources:
1357 1357 group_refs = []
1358 1358 for ref_name, ref_id in ref_list:
1359 1359 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1360 1360 group_refs.append((ref_key, ref_name))
1361 1361
1362 1362 if not selected:
1363 1363 if set([commit_id, match]) & set([ref_id, ref_name]):
1364 1364 selected = ref_key
1365 1365
1366 1366 if group_refs:
1367 1367 groups.append((group_refs, group_name))
1368 1368
1369 1369 if not selected:
1370 1370 ref = commit_id or branch or bookmark
1371 1371 if ref:
1372 1372 raise CommitDoesNotExistError(
1373 1373 'No commit refs could be found matching: %s' % ref)
1374 1374 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1375 1375 selected = 'branch:%s:%s' % (
1376 1376 repo.DEFAULT_BRANCH_NAME,
1377 1377 repo.branches[repo.DEFAULT_BRANCH_NAME]
1378 1378 )
1379 1379 elif repo.commit_ids:
1380 1380 rev = repo.commit_ids[0]
1381 1381 selected = 'rev:%s:%s' % (rev, rev)
1382 1382 else:
1383 1383 raise EmptyRepositoryError()
1384 1384 return groups, selected
1385 1385
1386 1386 def get_diff(self, source_repo, source_ref_id, target_ref_id, context=DIFF_CONTEXT):
1387 1387 return self._get_diff_from_pr_or_version(
1388 1388 source_repo, source_ref_id, target_ref_id, context=context)
1389 1389
1390 1390 def _get_diff_from_pr_or_version(
1391 1391 self, source_repo, source_ref_id, target_ref_id, context):
1392 1392 target_commit = source_repo.get_commit(
1393 1393 commit_id=safe_str(target_ref_id))
1394 1394 source_commit = source_repo.get_commit(
1395 1395 commit_id=safe_str(source_ref_id))
1396 1396 if isinstance(source_repo, Repository):
1397 1397 vcs_repo = source_repo.scm_instance()
1398 1398 else:
1399 1399 vcs_repo = source_repo
1400 1400
1401 1401 # TODO: johbo: In the context of an update, we cannot reach
1402 1402 # the old commit anymore with our normal mechanisms. It needs
1403 1403 # some sort of special support in the vcs layer to avoid this
1404 1404 # workaround.
1405 1405 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1406 1406 vcs_repo.alias == 'git'):
1407 1407 source_commit.raw_id = safe_str(source_ref_id)
1408 1408
1409 1409 log.debug('calculating diff between '
1410 1410 'source_ref:%s and target_ref:%s for repo `%s`',
1411 1411 target_ref_id, source_ref_id,
1412 1412 safe_unicode(vcs_repo.path))
1413 1413
1414 1414 vcs_diff = vcs_repo.get_diff(
1415 1415 commit1=target_commit, commit2=source_commit, context=context)
1416 1416 return vcs_diff
1417 1417
1418 1418 def _is_merge_enabled(self, pull_request):
1419 1419 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1420 1420 settings = settings_model.get_general_settings()
1421 1421 return settings.get('rhodecode_pr_merge_enabled', False)
1422 1422
1423 1423 def _use_rebase_for_merging(self, pull_request):
1424 1424 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1425 1425 settings = settings_model.get_general_settings()
1426 1426 return settings.get('rhodecode_hg_use_rebase_for_merging', False)
1427 1427
1428 1428 def _log_audit_action(self, action, action_data, user, pull_request):
1429 1429 audit_logger.store(
1430 1430 action=action,
1431 1431 action_data=action_data,
1432 1432 user=user,
1433 1433 repo=pull_request.target_repo)
1434 1434
1435 1435 def get_reviewer_functions(self):
1436 1436 """
1437 1437 Fetches functions for validation and fetching default reviewers.
1438 1438 If available we use the EE package, else we fallback to CE
1439 1439 package functions
1440 1440 """
1441 1441 try:
1442 1442 from rc_reviewers.utils import get_default_reviewers_data
1443 1443 from rc_reviewers.utils import validate_default_reviewers
1444 1444 except ImportError:
1445 1445 from rhodecode.apps.repository.utils import \
1446 1446 get_default_reviewers_data
1447 1447 from rhodecode.apps.repository.utils import \
1448 1448 validate_default_reviewers
1449 1449
1450 1450 return get_default_reviewers_data, validate_default_reviewers
1451 1451
1452 1452
1453 1453 class MergeCheck(object):
1454 1454 """
1455 1455 Perform Merge Checks and returns a check object which stores information
1456 1456 about merge errors, and merge conditions
1457 1457 """
1458 1458 TODO_CHECK = 'todo'
1459 1459 PERM_CHECK = 'perm'
1460 1460 REVIEW_CHECK = 'review'
1461 1461 MERGE_CHECK = 'merge'
1462 1462
1463 1463 def __init__(self):
1464 1464 self.review_status = None
1465 1465 self.merge_possible = None
1466 1466 self.merge_msg = ''
1467 1467 self.failed = None
1468 1468 self.errors = []
1469 1469 self.error_details = OrderedDict()
1470 1470
1471 1471 def push_error(self, error_type, message, error_key, details):
1472 1472 self.failed = True
1473 1473 self.errors.append([error_type, message])
1474 1474 self.error_details[error_key] = dict(
1475 1475 details=details,
1476 1476 error_type=error_type,
1477 1477 message=message
1478 1478 )
1479 1479
1480 1480 @classmethod
1481 1481 def validate(cls, pull_request, user, fail_early=False, translator=None):
1482 1482 # if migrated to pyramid...
1483 1483 # _ = lambda: translator or _ # use passed in translator if any
1484 1484
1485 1485 merge_check = cls()
1486 1486
1487 1487 # permissions to merge
1488 1488 user_allowed_to_merge = PullRequestModel().check_user_merge(
1489 1489 pull_request, user)
1490 1490 if not user_allowed_to_merge:
1491 1491 log.debug("MergeCheck: cannot merge, approval is pending.")
1492 1492
1493 1493 msg = _('User `{}` not allowed to perform merge.').format(user.username)
1494 1494 merge_check.push_error('error', msg, cls.PERM_CHECK, user.username)
1495 1495 if fail_early:
1496 1496 return merge_check
1497 1497
1498 1498 # review status, must be always present
1499 1499 review_status = pull_request.calculated_review_status()
1500 1500 merge_check.review_status = review_status
1501 1501
1502 1502 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1503 1503 if not status_approved:
1504 1504 log.debug("MergeCheck: cannot merge, approval is pending.")
1505 1505
1506 1506 msg = _('Pull request reviewer approval is pending.')
1507 1507
1508 1508 merge_check.push_error(
1509 1509 'warning', msg, cls.REVIEW_CHECK, review_status)
1510 1510
1511 1511 if fail_early:
1512 1512 return merge_check
1513 1513
1514 1514 # left over TODOs
1515 1515 todos = CommentsModel().get_unresolved_todos(pull_request)
1516 1516 if todos:
1517 1517 log.debug("MergeCheck: cannot merge, {} "
1518 1518 "unresolved todos left.".format(len(todos)))
1519 1519
1520 1520 if len(todos) == 1:
1521 1521 msg = _('Cannot merge, {} TODO still not resolved.').format(
1522 1522 len(todos))
1523 1523 else:
1524 1524 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1525 1525 len(todos))
1526 1526
1527 1527 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1528 1528
1529 1529 if fail_early:
1530 1530 return merge_check
1531 1531
1532 1532 # merge possible
1533 1533 merge_status, msg = PullRequestModel().merge_status(pull_request)
1534 1534 merge_check.merge_possible = merge_status
1535 1535 merge_check.merge_msg = msg
1536 1536 if not merge_status:
1537 1537 log.debug(
1538 1538 "MergeCheck: cannot merge, pull request merge not possible.")
1539 1539 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1540 1540
1541 1541 if fail_early:
1542 1542 return merge_check
1543 1543
1544 1544 return merge_check
1545 1545
1546 1546
1547 1547 ChangeTuple = namedtuple('ChangeTuple',
1548 1548 ['added', 'common', 'removed', 'total'])
1549 1549
1550 1550 FileChangeTuple = namedtuple('FileChangeTuple',
1551 1551 ['added', 'modified', 'removed'])
@@ -1,902 +1,907 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 users model for RhodeCode
23 23 """
24 24
25 25 import logging
26 26 import traceback
27 27
28 28 import datetime
29 29 from pylons.i18n.translation import _
30 30
31 31 import ipaddress
32 32 from sqlalchemy.exc import DatabaseError
33 33
34 34 from rhodecode import events
35 35 from rhodecode.lib.user_log_filter import user_log_filter
36 36 from rhodecode.lib.utils2 import (
37 37 safe_unicode, get_current_rhodecode_user, action_logger_generic,
38 38 AttributeDict, str2bool)
39 39 from rhodecode.lib.exceptions import (
40 40 DefaultUserException, UserOwnsReposException, UserOwnsRepoGroupsException,
41 41 UserOwnsUserGroupsException, NotAllowedToCreateUserError)
42 42 from rhodecode.lib.caching_query import FromCache
43 43 from rhodecode.model import BaseModel
44 44 from rhodecode.model.auth_token import AuthTokenModel
45 45 from rhodecode.model.db import (
46 46 _hash_key, true, false, or_, joinedload, User, UserToPerm,
47 47 UserEmailMap, UserIpMap, UserLog)
48 48 from rhodecode.model.meta import Session
49 49 from rhodecode.model.repo_group import RepoGroupModel
50 50
51 51
52 52 log = logging.getLogger(__name__)
53 53
54 54
55 55 class UserModel(BaseModel):
56 56 cls = User
57 57
58 58 def get(self, user_id, cache=False):
59 59 user = self.sa.query(User)
60 60 if cache:
61 61 user = user.options(
62 62 FromCache("sql_cache_short", "get_user_%s" % user_id))
63 63 return user.get(user_id)
64 64
65 65 def get_user(self, user):
66 66 return self._get_user(user)
67 67
68 68 def _serialize_user(self, user):
69 69 import rhodecode.lib.helpers as h
70 70
71 71 return {
72 72 'id': user.user_id,
73 'first_name': h.escape(user.name),
74 'last_name': h.escape(user.lastname),
73 'first_name': user.first_name,
74 'last_name': user.last_name,
75 75 'username': user.username,
76 76 'email': user.email,
77 77 'icon_link': h.gravatar_url(user.email, 30),
78 78 'value_display': h.escape(h.person(user)),
79 79 'value': user.username,
80 80 'value_type': 'user',
81 81 'active': user.active,
82 82 }
83 83
84 84 def get_users(self, name_contains=None, limit=20, only_active=True):
85 85
86 86 query = self.sa.query(User)
87 87 if only_active:
88 88 query = query.filter(User.active == true())
89 89
90 90 if name_contains:
91 91 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
92 92 query = query.filter(
93 93 or_(
94 94 User.name.ilike(ilike_expression),
95 95 User.lastname.ilike(ilike_expression),
96 96 User.username.ilike(ilike_expression)
97 97 )
98 98 )
99 99 query = query.limit(limit)
100 100 users = query.all()
101 101
102 102 _users = [
103 103 self._serialize_user(user) for user in users
104 104 ]
105 105 return _users
106 106
107 107 def get_by_username(self, username, cache=False, case_insensitive=False):
108 108
109 109 if case_insensitive:
110 110 user = self.sa.query(User).filter(User.username.ilike(username))
111 111 else:
112 112 user = self.sa.query(User)\
113 113 .filter(User.username == username)
114 114 if cache:
115 115 name_key = _hash_key(username)
116 116 user = user.options(
117 117 FromCache("sql_cache_short", "get_user_%s" % name_key))
118 118 return user.scalar()
119 119
120 120 def get_by_email(self, email, cache=False, case_insensitive=False):
121 121 return User.get_by_email(email, case_insensitive, cache)
122 122
123 123 def get_by_auth_token(self, auth_token, cache=False):
124 124 return User.get_by_auth_token(auth_token, cache)
125 125
126 126 def get_active_user_count(self, cache=False):
127 127 return User.query().filter(
128 128 User.active == True).filter(
129 129 User.username != User.DEFAULT_USER).count()
130 130
131 131 def create(self, form_data, cur_user=None):
132 132 if not cur_user:
133 133 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
134 134
135 135 user_data = {
136 136 'username': form_data['username'],
137 137 'password': form_data['password'],
138 138 'email': form_data['email'],
139 139 'firstname': form_data['firstname'],
140 140 'lastname': form_data['lastname'],
141 141 'active': form_data['active'],
142 142 'extern_type': form_data['extern_type'],
143 143 'extern_name': form_data['extern_name'],
144 144 'admin': False,
145 145 'cur_user': cur_user
146 146 }
147 147
148 148 if 'create_repo_group' in form_data:
149 149 user_data['create_repo_group'] = str2bool(
150 150 form_data.get('create_repo_group'))
151 151
152 152 try:
153 153 if form_data.get('password_change'):
154 154 user_data['force_password_change'] = True
155 155 return UserModel().create_or_update(**user_data)
156 156 except Exception:
157 157 log.error(traceback.format_exc())
158 158 raise
159 159
160 160 def update_user(self, user, skip_attrs=None, **kwargs):
161 161 from rhodecode.lib.auth import get_crypt_password
162 162
163 163 user = self._get_user(user)
164 164 if user.username == User.DEFAULT_USER:
165 165 raise DefaultUserException(
166 166 _("You can't Edit this user since it's"
167 167 " crucial for entire application"))
168 168
169 169 # first store only defaults
170 170 user_attrs = {
171 171 'updating_user_id': user.user_id,
172 172 'username': user.username,
173 173 'password': user.password,
174 174 'email': user.email,
175 175 'firstname': user.name,
176 176 'lastname': user.lastname,
177 177 'active': user.active,
178 178 'admin': user.admin,
179 179 'extern_name': user.extern_name,
180 180 'extern_type': user.extern_type,
181 181 'language': user.user_data.get('language')
182 182 }
183 183
184 184 # in case there's new_password, that comes from form, use it to
185 185 # store password
186 186 if kwargs.get('new_password'):
187 187 kwargs['password'] = kwargs['new_password']
188 188
189 189 # cleanups, my_account password change form
190 190 kwargs.pop('current_password', None)
191 191 kwargs.pop('new_password', None)
192 192
193 193 # cleanups, user edit password change form
194 194 kwargs.pop('password_confirmation', None)
195 195 kwargs.pop('password_change', None)
196 196
197 197 # create repo group on user creation
198 198 kwargs.pop('create_repo_group', None)
199 199
200 200 # legacy forms send name, which is the firstname
201 201 firstname = kwargs.pop('name', None)
202 202 if firstname:
203 203 kwargs['firstname'] = firstname
204 204
205 205 for k, v in kwargs.items():
206 206 # skip if we don't want to update this
207 207 if skip_attrs and k in skip_attrs:
208 208 continue
209 209
210 210 user_attrs[k] = v
211 211
212 212 try:
213 213 return self.create_or_update(**user_attrs)
214 214 except Exception:
215 215 log.error(traceback.format_exc())
216 216 raise
217 217
218 218 def create_or_update(
219 219 self, username, password, email, firstname='', lastname='',
220 220 active=True, admin=False, extern_type=None, extern_name=None,
221 221 cur_user=None, plugin=None, force_password_change=False,
222 222 allow_to_create_user=True, create_repo_group=None,
223 223 updating_user_id=None, language=None, strict_creation_check=True):
224 224 """
225 225 Creates a new instance if not found, or updates current one
226 226
227 227 :param username:
228 228 :param password:
229 229 :param email:
230 230 :param firstname:
231 231 :param lastname:
232 232 :param active:
233 233 :param admin:
234 234 :param extern_type:
235 235 :param extern_name:
236 236 :param cur_user:
237 237 :param plugin: optional plugin this method was called from
238 238 :param force_password_change: toggles new or existing user flag
239 239 for password change
240 240 :param allow_to_create_user: Defines if the method can actually create
241 241 new users
242 242 :param create_repo_group: Defines if the method should also
243 243 create an repo group with user name, and owner
244 244 :param updating_user_id: if we set it up this is the user we want to
245 245 update this allows to editing username.
246 246 :param language: language of user from interface.
247 247
248 248 :returns: new User object with injected `is_new_user` attribute.
249 249 """
250 250 if not cur_user:
251 251 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
252 252
253 253 from rhodecode.lib.auth import (
254 254 get_crypt_password, check_password, generate_auth_token)
255 255 from rhodecode.lib.hooks_base import (
256 256 log_create_user, check_allowed_create_user)
257 257
258 258 def _password_change(new_user, password):
259 259 # empty password
260 260 if not new_user.password:
261 261 return False
262 262
263 263 # password check is only needed for RhodeCode internal auth calls
264 264 # in case it's a plugin we don't care
265 265 if not plugin:
266 266
267 267 # first check if we gave crypted password back, and if it
268 268 # matches it's not password change
269 269 if new_user.password == password:
270 270 return False
271 271
272 272 password_match = check_password(password, new_user.password)
273 273 if not password_match:
274 274 return True
275 275
276 276 return False
277 277
278 278 # read settings on default personal repo group creation
279 279 if create_repo_group is None:
280 280 default_create_repo_group = RepoGroupModel()\
281 281 .get_default_create_personal_repo_group()
282 282 create_repo_group = default_create_repo_group
283 283
284 284 user_data = {
285 285 'username': username,
286 286 'password': password,
287 287 'email': email,
288 288 'firstname': firstname,
289 289 'lastname': lastname,
290 290 'active': active,
291 291 'admin': admin
292 292 }
293 293
294 294 if updating_user_id:
295 295 log.debug('Checking for existing account in RhodeCode '
296 296 'database with user_id `%s` ' % (updating_user_id,))
297 297 user = User.get(updating_user_id)
298 298 else:
299 299 log.debug('Checking for existing account in RhodeCode '
300 300 'database with username `%s` ' % (username,))
301 301 user = User.get_by_username(username, case_insensitive=True)
302 302
303 303 if user is None:
304 304 # we check internal flag if this method is actually allowed to
305 305 # create new user
306 306 if not allow_to_create_user:
307 307 msg = ('Method wants to create new user, but it is not '
308 308 'allowed to do so')
309 309 log.warning(msg)
310 310 raise NotAllowedToCreateUserError(msg)
311 311
312 312 log.debug('Creating new user %s', username)
313 313
314 314 # only if we create user that is active
315 315 new_active_user = active
316 316 if new_active_user and strict_creation_check:
317 317 # raises UserCreationError if it's not allowed for any reason to
318 318 # create new active user, this also executes pre-create hooks
319 319 check_allowed_create_user(user_data, cur_user, strict_check=True)
320 320 events.trigger(events.UserPreCreate(user_data))
321 321 new_user = User()
322 322 edit = False
323 323 else:
324 324 log.debug('updating user %s', username)
325 325 events.trigger(events.UserPreUpdate(user, user_data))
326 326 new_user = user
327 327 edit = True
328 328
329 329 # we're not allowed to edit default user
330 330 if user.username == User.DEFAULT_USER:
331 331 raise DefaultUserException(
332 332 _("You can't edit this user (`%(username)s`) since it's "
333 333 "crucial for entire application") % {'username': user.username})
334 334
335 335 # inject special attribute that will tell us if User is new or old
336 336 new_user.is_new_user = not edit
337 337 # for users that didn's specify auth type, we use RhodeCode built in
338 338 from rhodecode.authentication.plugins import auth_rhodecode
339 339 extern_name = extern_name or auth_rhodecode.RhodeCodeAuthPlugin.name
340 340 extern_type = extern_type or auth_rhodecode.RhodeCodeAuthPlugin.name
341 341
342 342 try:
343 343 new_user.username = username
344 344 new_user.admin = admin
345 345 new_user.email = email
346 346 new_user.active = active
347 347 new_user.extern_name = safe_unicode(extern_name)
348 348 new_user.extern_type = safe_unicode(extern_type)
349 349 new_user.name = firstname
350 350 new_user.lastname = lastname
351 351
352 352 # set password only if creating an user or password is changed
353 353 if not edit or _password_change(new_user, password):
354 354 reason = 'new password' if edit else 'new user'
355 355 log.debug('Updating password reason=>%s', reason)
356 356 new_user.password = get_crypt_password(password) if password else None
357 357
358 358 if force_password_change:
359 359 new_user.update_userdata(force_password_change=True)
360 360 if language:
361 361 new_user.update_userdata(language=language)
362 362 new_user.update_userdata(notification_status=True)
363 363
364 364 self.sa.add(new_user)
365 365
366 366 if not edit and create_repo_group:
367 367 RepoGroupModel().create_personal_repo_group(
368 368 new_user, commit_early=False)
369 369
370 370 if not edit:
371 371 # add the RSS token
372 372 AuthTokenModel().create(username,
373 373 description='Generated feed token',
374 374 role=AuthTokenModel.cls.ROLE_FEED)
375 375 log_create_user(created_by=cur_user, **new_user.get_dict())
376 376 events.trigger(events.UserPostCreate(user_data))
377 377 return new_user
378 378 except (DatabaseError,):
379 379 log.error(traceback.format_exc())
380 380 raise
381 381
382 382 def create_registration(self, form_data):
383 383 from rhodecode.model.notification import NotificationModel
384 384 from rhodecode.model.notification import EmailNotificationModel
385 385
386 386 try:
387 387 form_data['admin'] = False
388 388 form_data['extern_name'] = 'rhodecode'
389 389 form_data['extern_type'] = 'rhodecode'
390 390 new_user = self.create(form_data)
391 391
392 392 self.sa.add(new_user)
393 393 self.sa.flush()
394 394
395 395 user_data = new_user.get_dict()
396 396 kwargs = {
397 397 # use SQLALCHEMY safe dump of user data
398 398 'user': AttributeDict(user_data),
399 399 'date': datetime.datetime.now()
400 400 }
401 401 notification_type = EmailNotificationModel.TYPE_REGISTRATION
402 402 # pre-generate the subject for notification itself
403 403 (subject,
404 404 _h, _e, # we don't care about those
405 405 body_plaintext) = EmailNotificationModel().render_email(
406 406 notification_type, **kwargs)
407 407
408 408 # create notification objects, and emails
409 409 NotificationModel().create(
410 410 created_by=new_user,
411 411 notification_subject=subject,
412 412 notification_body=body_plaintext,
413 413 notification_type=notification_type,
414 414 recipients=None, # all admins
415 415 email_kwargs=kwargs,
416 416 )
417 417
418 418 return new_user
419 419 except Exception:
420 420 log.error(traceback.format_exc())
421 421 raise
422 422
423 423 def _handle_user_repos(self, username, repositories, handle_mode=None):
424 424 _superadmin = self.cls.get_first_super_admin()
425 425 left_overs = True
426 426
427 427 from rhodecode.model.repo import RepoModel
428 428
429 429 if handle_mode == 'detach':
430 430 for obj in repositories:
431 431 obj.user = _superadmin
432 432 # set description we know why we super admin now owns
433 433 # additional repositories that were orphaned !
434 434 obj.description += ' \n::detached repository from deleted user: %s' % (username,)
435 435 self.sa.add(obj)
436 436 left_overs = False
437 437 elif handle_mode == 'delete':
438 438 for obj in repositories:
439 439 RepoModel().delete(obj, forks='detach')
440 440 left_overs = False
441 441
442 442 # if nothing is done we have left overs left
443 443 return left_overs
444 444
445 445 def _handle_user_repo_groups(self, username, repository_groups,
446 446 handle_mode=None):
447 447 _superadmin = self.cls.get_first_super_admin()
448 448 left_overs = True
449 449
450 450 from rhodecode.model.repo_group import RepoGroupModel
451 451
452 452 if handle_mode == 'detach':
453 453 for r in repository_groups:
454 454 r.user = _superadmin
455 455 # set description we know why we super admin now owns
456 456 # additional repositories that were orphaned !
457 457 r.group_description += ' \n::detached repository group from deleted user: %s' % (username,)
458 458 self.sa.add(r)
459 459 left_overs = False
460 460 elif handle_mode == 'delete':
461 461 for r in repository_groups:
462 462 RepoGroupModel().delete(r)
463 463 left_overs = False
464 464
465 465 # if nothing is done we have left overs left
466 466 return left_overs
467 467
468 468 def _handle_user_user_groups(self, username, user_groups, handle_mode=None):
469 469 _superadmin = self.cls.get_first_super_admin()
470 470 left_overs = True
471 471
472 472 from rhodecode.model.user_group import UserGroupModel
473 473
474 474 if handle_mode == 'detach':
475 475 for r in user_groups:
476 476 for user_user_group_to_perm in r.user_user_group_to_perm:
477 477 if user_user_group_to_perm.user.username == username:
478 478 user_user_group_to_perm.user = _superadmin
479 479 r.user = _superadmin
480 480 # set description we know why we super admin now owns
481 481 # additional repositories that were orphaned !
482 482 r.user_group_description += ' \n::detached user group from deleted user: %s' % (username,)
483 483 self.sa.add(r)
484 484 left_overs = False
485 485 elif handle_mode == 'delete':
486 486 for r in user_groups:
487 487 UserGroupModel().delete(r)
488 488 left_overs = False
489 489
490 490 # if nothing is done we have left overs left
491 491 return left_overs
492 492
493 493 def delete(self, user, cur_user=None, handle_repos=None,
494 494 handle_repo_groups=None, handle_user_groups=None):
495 495 if not cur_user:
496 496 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
497 497 user = self._get_user(user)
498 498
499 499 try:
500 500 if user.username == User.DEFAULT_USER:
501 501 raise DefaultUserException(
502 502 _(u"You can't remove this user since it's"
503 503 u" crucial for entire application"))
504 504
505 505 left_overs = self._handle_user_repos(
506 506 user.username, user.repositories, handle_repos)
507 507 if left_overs and user.repositories:
508 508 repos = [x.repo_name for x in user.repositories]
509 509 raise UserOwnsReposException(
510 510 _(u'user "%s" still owns %s repositories and cannot be '
511 511 u'removed. Switch owners or remove those repositories:%s')
512 512 % (user.username, len(repos), ', '.join(repos)))
513 513
514 514 left_overs = self._handle_user_repo_groups(
515 515 user.username, user.repository_groups, handle_repo_groups)
516 516 if left_overs and user.repository_groups:
517 517 repo_groups = [x.group_name for x in user.repository_groups]
518 518 raise UserOwnsRepoGroupsException(
519 519 _(u'user "%s" still owns %s repository groups and cannot be '
520 520 u'removed. Switch owners or remove those repository groups:%s')
521 521 % (user.username, len(repo_groups), ', '.join(repo_groups)))
522 522
523 523 left_overs = self._handle_user_user_groups(
524 524 user.username, user.user_groups, handle_user_groups)
525 525 if left_overs and user.user_groups:
526 526 user_groups = [x.users_group_name for x in user.user_groups]
527 527 raise UserOwnsUserGroupsException(
528 528 _(u'user "%s" still owns %s user groups and cannot be '
529 529 u'removed. Switch owners or remove those user groups:%s')
530 530 % (user.username, len(user_groups), ', '.join(user_groups)))
531 531
532 532 # we might change the user data with detach/delete, make sure
533 533 # the object is marked as expired before actually deleting !
534 534 self.sa.expire(user)
535 535 self.sa.delete(user)
536 536 from rhodecode.lib.hooks_base import log_delete_user
537 537 log_delete_user(deleted_by=cur_user, **user.get_dict())
538 538 except Exception:
539 539 log.error(traceback.format_exc())
540 540 raise
541 541
542 542 def reset_password_link(self, data, pwd_reset_url):
543 543 from rhodecode.lib.celerylib import tasks, run_task
544 544 from rhodecode.model.notification import EmailNotificationModel
545 545 user_email = data['email']
546 546 try:
547 547 user = User.get_by_email(user_email)
548 548 if user:
549 549 log.debug('password reset user found %s', user)
550 550
551 551 email_kwargs = {
552 552 'password_reset_url': pwd_reset_url,
553 553 'user': user,
554 554 'email': user_email,
555 555 'date': datetime.datetime.now()
556 556 }
557 557
558 558 (subject, headers, email_body,
559 559 email_body_plaintext) = EmailNotificationModel().render_email(
560 560 EmailNotificationModel.TYPE_PASSWORD_RESET, **email_kwargs)
561 561
562 562 recipients = [user_email]
563 563
564 564 action_logger_generic(
565 565 'sending password reset email to user: {}'.format(
566 566 user), namespace='security.password_reset')
567 567
568 568 run_task(tasks.send_email, recipients, subject,
569 569 email_body_plaintext, email_body)
570 570
571 571 else:
572 572 log.debug("password reset email %s not found", user_email)
573 573 except Exception:
574 574 log.error(traceback.format_exc())
575 575 return False
576 576
577 577 return True
578 578
579 579 def reset_password(self, data):
580 580 from rhodecode.lib.celerylib import tasks, run_task
581 581 from rhodecode.model.notification import EmailNotificationModel
582 582 from rhodecode.lib import auth
583 583 user_email = data['email']
584 584 pre_db = True
585 585 try:
586 586 user = User.get_by_email(user_email)
587 587 new_passwd = auth.PasswordGenerator().gen_password(
588 588 12, auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
589 589 if user:
590 590 user.password = auth.get_crypt_password(new_passwd)
591 591 # also force this user to reset his password !
592 592 user.update_userdata(force_password_change=True)
593 593
594 594 Session().add(user)
595 595
596 596 # now delete the token in question
597 597 UserApiKeys = AuthTokenModel.cls
598 598 UserApiKeys().query().filter(
599 599 UserApiKeys.api_key == data['token']).delete()
600 600
601 601 Session().commit()
602 602 log.info('successfully reset password for `%s`', user_email)
603 603
604 604 if new_passwd is None:
605 605 raise Exception('unable to generate new password')
606 606
607 607 pre_db = False
608 608
609 609 email_kwargs = {
610 610 'new_password': new_passwd,
611 611 'user': user,
612 612 'email': user_email,
613 613 'date': datetime.datetime.now()
614 614 }
615 615
616 616 (subject, headers, email_body,
617 617 email_body_plaintext) = EmailNotificationModel().render_email(
618 618 EmailNotificationModel.TYPE_PASSWORD_RESET_CONFIRMATION,
619 619 **email_kwargs)
620 620
621 621 recipients = [user_email]
622 622
623 623 action_logger_generic(
624 624 'sent new password to user: {} with email: {}'.format(
625 625 user, user_email), namespace='security.password_reset')
626 626
627 627 run_task(tasks.send_email, recipients, subject,
628 628 email_body_plaintext, email_body)
629 629
630 630 except Exception:
631 631 log.error('Failed to update user password')
632 632 log.error(traceback.format_exc())
633 633 if pre_db:
634 634 # we rollback only if local db stuff fails. If it goes into
635 635 # run_task, we're pass rollback state this wouldn't work then
636 636 Session().rollback()
637 637
638 638 return True
639 639
640 640 def fill_data(self, auth_user, user_id=None, api_key=None, username=None):
641 641 """
642 642 Fetches auth_user by user_id,or api_key if present.
643 643 Fills auth_user attributes with those taken from database.
644 644 Additionally set's is_authenitated if lookup fails
645 645 present in database
646 646
647 647 :param auth_user: instance of user to set attributes
648 648 :param user_id: user id to fetch by
649 649 :param api_key: api key to fetch by
650 650 :param username: username to fetch by
651 651 """
652 652 if user_id is None and api_key is None and username is None:
653 653 raise Exception('You need to pass user_id, api_key or username')
654 654
655 655 log.debug(
656 656 'doing fill data based on: user_id:%s api_key:%s username:%s',
657 657 user_id, api_key, username)
658 658 try:
659 659 dbuser = None
660 660 if user_id:
661 661 dbuser = self.get(user_id)
662 662 elif api_key:
663 663 dbuser = self.get_by_auth_token(api_key)
664 664 elif username:
665 665 dbuser = self.get_by_username(username)
666 666
667 667 if not dbuser:
668 668 log.warning(
669 669 'Unable to lookup user by id:%s api_key:%s username:%s',
670 670 user_id, api_key, username)
671 671 return False
672 672 if not dbuser.active:
673 673 log.debug('User `%s:%s` is inactive, skipping fill data',
674 674 username, user_id)
675 675 return False
676 676
677 677 log.debug('filling user:%s data', dbuser)
678 678
679 679 # TODO: johbo: Think about this and find a clean solution
680 680 user_data = dbuser.get_dict()
681 681 user_data.update(dbuser.get_api_data(include_secrets=True))
682 user_data.update({
683 # set explicit the safe escaped values
684 'first_name': dbuser.first_name,
685 'last_name': dbuser.last_name,
686 })
682 687
683 688 for k, v in user_data.iteritems():
684 689 # properties of auth user we dont update
685 690 if k not in ['auth_tokens', 'permissions']:
686 691 setattr(auth_user, k, v)
687 692
688 693 # few extras
689 694 setattr(auth_user, 'feed_token', dbuser.feed_token)
690 695 except Exception:
691 696 log.error(traceback.format_exc())
692 697 auth_user.is_authenticated = False
693 698 return False
694 699
695 700 return True
696 701
697 702 def has_perm(self, user, perm):
698 703 perm = self._get_perm(perm)
699 704 user = self._get_user(user)
700 705
701 706 return UserToPerm.query().filter(UserToPerm.user == user)\
702 707 .filter(UserToPerm.permission == perm).scalar() is not None
703 708
704 709 def grant_perm(self, user, perm):
705 710 """
706 711 Grant user global permissions
707 712
708 713 :param user:
709 714 :param perm:
710 715 """
711 716 user = self._get_user(user)
712 717 perm = self._get_perm(perm)
713 718 # if this permission is already granted skip it
714 719 _perm = UserToPerm.query()\
715 720 .filter(UserToPerm.user == user)\
716 721 .filter(UserToPerm.permission == perm)\
717 722 .scalar()
718 723 if _perm:
719 724 return
720 725 new = UserToPerm()
721 726 new.user = user
722 727 new.permission = perm
723 728 self.sa.add(new)
724 729 return new
725 730
726 731 def revoke_perm(self, user, perm):
727 732 """
728 733 Revoke users global permissions
729 734
730 735 :param user:
731 736 :param perm:
732 737 """
733 738 user = self._get_user(user)
734 739 perm = self._get_perm(perm)
735 740
736 741 obj = UserToPerm.query()\
737 742 .filter(UserToPerm.user == user)\
738 743 .filter(UserToPerm.permission == perm)\
739 744 .scalar()
740 745 if obj:
741 746 self.sa.delete(obj)
742 747
743 748 def add_extra_email(self, user, email):
744 749 """
745 750 Adds email address to UserEmailMap
746 751
747 752 :param user:
748 753 :param email:
749 754 """
750 755 from rhodecode.model import forms
751 756 form = forms.UserExtraEmailForm()()
752 757 data = form.to_python({'email': email})
753 758 user = self._get_user(user)
754 759
755 760 obj = UserEmailMap()
756 761 obj.user = user
757 762 obj.email = data['email']
758 763 self.sa.add(obj)
759 764 return obj
760 765
761 766 def delete_extra_email(self, user, email_id):
762 767 """
763 768 Removes email address from UserEmailMap
764 769
765 770 :param user:
766 771 :param email_id:
767 772 """
768 773 user = self._get_user(user)
769 774 obj = UserEmailMap.query().get(email_id)
770 775 if obj and obj.user_id == user.user_id:
771 776 self.sa.delete(obj)
772 777
773 778 def parse_ip_range(self, ip_range):
774 779 ip_list = []
775 780 def make_unique(value):
776 781 seen = []
777 782 return [c for c in value if not (c in seen or seen.append(c))]
778 783
779 784 # firsts split by commas
780 785 for ip_range in ip_range.split(','):
781 786 if not ip_range:
782 787 continue
783 788 ip_range = ip_range.strip()
784 789 if '-' in ip_range:
785 790 start_ip, end_ip = ip_range.split('-', 1)
786 791 start_ip = ipaddress.ip_address(start_ip.strip())
787 792 end_ip = ipaddress.ip_address(end_ip.strip())
788 793 parsed_ip_range = []
789 794
790 795 for index in xrange(int(start_ip), int(end_ip) + 1):
791 796 new_ip = ipaddress.ip_address(index)
792 797 parsed_ip_range.append(str(new_ip))
793 798 ip_list.extend(parsed_ip_range)
794 799 else:
795 800 ip_list.append(ip_range)
796 801
797 802 return make_unique(ip_list)
798 803
799 804 def add_extra_ip(self, user, ip, description=None):
800 805 """
801 806 Adds ip address to UserIpMap
802 807
803 808 :param user:
804 809 :param ip:
805 810 """
806 811 from rhodecode.model import forms
807 812 form = forms.UserExtraIpForm()()
808 813 data = form.to_python({'ip': ip})
809 814 user = self._get_user(user)
810 815
811 816 obj = UserIpMap()
812 817 obj.user = user
813 818 obj.ip_addr = data['ip']
814 819 obj.description = description
815 820 self.sa.add(obj)
816 821 return obj
817 822
818 823 def delete_extra_ip(self, user, ip_id):
819 824 """
820 825 Removes ip address from UserIpMap
821 826
822 827 :param user:
823 828 :param ip_id:
824 829 """
825 830 user = self._get_user(user)
826 831 obj = UserIpMap.query().get(ip_id)
827 832 if obj and obj.user_id == user.user_id:
828 833 self.sa.delete(obj)
829 834
830 835 def get_accounts_in_creation_order(self, current_user=None):
831 836 """
832 837 Get accounts in order of creation for deactivation for license limits
833 838
834 839 pick currently logged in user, and append to the list in position 0
835 840 pick all super-admins in order of creation date and add it to the list
836 841 pick all other accounts in order of creation and add it to the list.
837 842
838 843 Based on that list, the last accounts can be disabled as they are
839 844 created at the end and don't include any of the super admins as well
840 845 as the current user.
841 846
842 847 :param current_user: optionally current user running this operation
843 848 """
844 849
845 850 if not current_user:
846 851 current_user = get_current_rhodecode_user()
847 852 active_super_admins = [
848 853 x.user_id for x in User.query()
849 854 .filter(User.user_id != current_user.user_id)
850 855 .filter(User.active == true())
851 856 .filter(User.admin == true())
852 857 .order_by(User.created_on.asc())]
853 858
854 859 active_regular_users = [
855 860 x.user_id for x in User.query()
856 861 .filter(User.user_id != current_user.user_id)
857 862 .filter(User.active == true())
858 863 .filter(User.admin == false())
859 864 .order_by(User.created_on.asc())]
860 865
861 866 list_of_accounts = [current_user.user_id]
862 867 list_of_accounts += active_super_admins
863 868 list_of_accounts += active_regular_users
864 869
865 870 return list_of_accounts
866 871
867 872 def deactivate_last_users(self, expected_users):
868 873 """
869 874 Deactivate accounts that are over the license limits.
870 875 Algorithm of which accounts to disabled is based on the formula:
871 876
872 877 Get current user, then super admins in creation order, then regular
873 878 active users in creation order.
874 879
875 880 Using that list we mark all accounts from the end of it as inactive.
876 881 This way we block only latest created accounts.
877 882
878 883 :param expected_users: list of users in special order, we deactivate
879 884 the end N ammoun of users from that list
880 885 """
881 886
882 887 list_of_accounts = self.get_accounts_in_creation_order()
883 888
884 889 for acc_id in list_of_accounts[expected_users + 1:]:
885 890 user = User.get(acc_id)
886 891 log.info('Deactivating account %s for license unlock', user)
887 892 user.active = False
888 893 Session().add(user)
889 894 Session().commit()
890 895
891 896 return
892 897
893 898 def get_user_log(self, user, filter_term):
894 899 user_log = UserLog.query()\
895 900 .filter(or_(UserLog.user_id == user.user_id,
896 901 UserLog.username == user.username))\
897 902 .options(joinedload(UserLog.user))\
898 903 .options(joinedload(UserLog.repository))\
899 904 .order_by(UserLog.action_date.desc())
900 905
901 906 user_log = user_log_filter(user_log, filter_term)
902 907 return user_log
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now