##// END OF EJS Templates
fix(events): fixed celery based events where it was missing request object
super-admin -
r5340:d43cbc34 default
parent child Browse files
Show More
@@ -1,131 +1,140 b''
1 1 # Copyright (C) 2016-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18 19 import logging
19 20 import datetime
20 import typing
21 21
22 22 from zope.cachedescriptors.property import Lazy as LazyProperty
23 from pyramid.threadlocal import get_current_request
24 23
24 from rhodecode.lib.pyramid_utils import get_current_request
25 from rhodecode.lib.auth import AuthUser
25 26 from rhodecode.lib.utils2 import AttributeDict
26 27
27 28
28 29 # this is a user object to be used for events caused by the system (eg. shell)
29 30 SYSTEM_USER = AttributeDict(dict(
30 31 username='__SYSTEM__',
31 32 user_id='__SYSTEM_ID__'
32 33 ))
33 34
34 35 log = logging.getLogger(__name__)
35 36
36 37
37 38 class RhodecodeEvent(object):
38 39 """
39 40 Base event class for all RhodeCode events
40 41 """
41 42 name = "RhodeCodeEvent"
42 43 no_url_set = '<no server_url available>'
43 44
44 def __init__(self, request=None):
45 def __init__(self, request=None, actor=None):
45 46 self._request = request
47 self._actor = actor
46 48 self.utc_timestamp = datetime.datetime.utcnow()
47 49
48 50 def __repr__(self):
49 51 return '<{}:({})>'.format(self.__class__.__name__, self.name)
50 52
51 53 def get_request(self):
52 54 if self._request:
53 55 return self._request
54 56 return get_current_request()
55 57
56 58 @LazyProperty
57 59 def request(self):
58 60 return self.get_request()
59 61
60 62 @property
61 63 def auth_user(self):
62 64 if not self.request:
63 65 return
64 66
65 67 user = getattr(self.request, 'user', None)
66 68 if user:
67 69 return user
68 70
69 71 api_user = getattr(self.request, 'rpc_user', None)
70 72 if api_user:
71 73 return api_user
72 74
73 75 @property
74 76 def actor(self):
77 # if an explicit actor is specified, use this
78 if self._actor:
79 return self._actor
80
75 81 auth_user = self.auth_user
76 if auth_user:
82 log.debug('Got integration actor: %s', auth_user)
83 if isinstance(auth_user, AuthUser):
77 84 instance = auth_user.get_instance()
85 # we can't find this DB user...
78 86 if not instance:
79 87 return AttributeDict(dict(
80 88 username=auth_user.username,
81 89 user_id=auth_user.user_id,
82 90 ))
83 return instance
84
91 elif auth_user:
92 return auth_user
85 93 return SYSTEM_USER
86 94
87 95 @property
88 96 def actor_ip(self):
89 97 auth_user = self.auth_user
90 98 if auth_user:
91 99 return auth_user.ip_addr
92 100 return '<no ip available>'
93 101
94 102 @property
95 103 def server_url(self):
96 104 if self.request:
97 105 try:
98 106 return self.request.route_url('home')
99 107 except Exception:
100 108 log.exception('Failed to fetch URL for server')
101 109 return self.no_url_set
102 110
103 111 return self.no_url_set
104 112
105 113 def as_dict(self):
106 114 data = {
107 115 'name': self.name,
108 116 'utc_timestamp': self.utc_timestamp,
109 117 'actor_ip': self.actor_ip,
110 118 'actor': {
111 119 'username': self.actor.username,
112 120 'user_id': self.actor.user_id
113 121 },
114 122 'server_url': self.server_url
115 123 }
116 124 return data
117 125
118 126
119 127 class RhodeCodeIntegrationEvent(RhodecodeEvent):
120 128 """
121 129 Special subclass for Integration events
122 130 """
123 131 description = ''
124 132
125 133
126 134 class FtsBuild(RhodecodeEvent):
127 135 """
128 136 This event will be triggered when FTS Build is triggered
129 137 """
130 138 name = 'fts-build'
131 139 display_name = 'Start FTS Build'
140
@@ -1,444 +1,444 b''
1 1 # Copyright (C) 2016-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import collections
20 20 import logging
21 21 import datetime
22 22
23 23 from rhodecode.translation import lazy_ugettext
24 24 from rhodecode.model.db import User, Repository
25 25 from rhodecode.events.base import RhodeCodeIntegrationEvent
26 26 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
27 27
28 28 log = logging.getLogger(__name__)
29 29
30 30
31 31 def _commits_as_dict(event, commit_ids, repos):
32 32 """
33 33 Helper function to serialize commit_ids
34 34
35 35 :param event: class calling this method
36 36 :param commit_ids: commits to get
37 37 :param repos: a list of repos to check
38 38 """
39 39 from rhodecode.lib.utils2 import extract_mentioned_users
40 40 from rhodecode.lib.helpers import (
41 41 urlify_commit_message, process_patterns, chop_at_smart)
42 42 from rhodecode.model.repo import RepoModel
43 43
44 44 if not repos:
45 45 raise Exception('no repo defined')
46 46
47 47 if not isinstance(repos, (tuple, list)):
48 48 repos = [repos]
49 49
50 50 if not commit_ids:
51 51 return []
52 52
53 53 needed_commits = list(commit_ids)
54 54
55 55 commits = []
56 56 reviewers = []
57 57 for repo in repos:
58 58 if not needed_commits:
59 59 return commits # return early if we have the commits we need
60 60
61 61 vcs_repo = repo.scm_instance(cache=False)
62 62
63 63 try:
64 64 # use copy of needed_commits since we modify it while iterating
65 65 for commit_id in list(needed_commits):
66 66 if commit_id.startswith('tag=>'):
67 67 raw_id = commit_id[5:]
68 68 cs_data = {
69 69 'raw_id': commit_id, 'short_id': commit_id,
70 70 'branch': None,
71 71 'git_ref_change': 'tag_add',
72 72 'message': f'Added new tag {raw_id}',
73 73 'author': event.actor.full_contact,
74 74 'date': datetime.datetime.now(),
75 75 'refs': {
76 76 'branches': [],
77 77 'bookmarks': [],
78 78 'tags': []
79 79 }
80 80 }
81 81 commits.append(cs_data)
82 82
83 83 elif commit_id.startswith('delete_branch=>'):
84 84 raw_id = commit_id[15:]
85 85 cs_data = {
86 86 'raw_id': commit_id, 'short_id': commit_id,
87 87 'branch': None,
88 88 'git_ref_change': 'branch_delete',
89 89 'message': f'Deleted branch {raw_id}',
90 90 'author': event.actor.full_contact,
91 91 'date': datetime.datetime.now(),
92 92 'refs': {
93 93 'branches': [],
94 94 'bookmarks': [],
95 95 'tags': []
96 96 }
97 97 }
98 98 commits.append(cs_data)
99 99
100 100 else:
101 101 try:
102 102 cs = vcs_repo.get_commit(commit_id)
103 103 except CommitDoesNotExistError:
104 104 continue # maybe its in next repo
105 105
106 106 cs_data = cs.__json__()
107 107 cs_data['refs'] = cs._get_refs()
108 108
109 109 cs_data['mentions'] = extract_mentioned_users(cs_data['message'])
110 110 cs_data['reviewers'] = reviewers
111 111 cs_data['url'] = RepoModel().get_commit_url(
112 112 repo, cs_data['raw_id'], request=event.request)
113 113 cs_data['permalink_url'] = RepoModel().get_commit_url(
114 114 repo, cs_data['raw_id'], request=event.request,
115 115 permalink=True)
116 116 urlified_message, issues_data, errors = process_patterns(
117 117 cs_data['message'], repo.repo_name)
118 118 cs_data['issues'] = issues_data
119 119 cs_data['message_html'] = urlify_commit_message(
120 120 cs_data['message'], repo.repo_name)
121 121 cs_data['message_html_title'] = chop_at_smart(
122 122 cs_data['message'], '\n', suffix_if_chopped='...')
123 123 commits.append(cs_data)
124 124
125 125 needed_commits.remove(commit_id)
126 126
127 127 except Exception:
128 128 log.exception('Failed to extract commits data')
129 129 # we don't send any commits when crash happens, only full list
130 130 # matters we short circuit then.
131 131 return []
132 132
133 133 # we failed to remove all needed_commits from all repositories
134 134 if needed_commits:
135 135 raise ValueError(f'Unexpectedly not found {needed_commits} in all available repos {repos}')
136 136
137 137 missing_commits = set(commit_ids) - set(c['raw_id'] for c in commits)
138 138 if missing_commits:
139 139 log.error('Inconsistent repository state. '
140 140 'Missing commits: %s', ', '.join(missing_commits))
141 141
142 142 return commits
143 143
144 144
145 145 def _issues_as_dict(commits):
146 146 """ Helper function to serialize issues from commits """
147 147 issues = {}
148 148 for commit in commits:
149 149 for issue in commit['issues']:
150 150 issues[issue['id']] = issue
151 151 return issues
152 152
153 153
154 154 class RepoEvent(RhodeCodeIntegrationEvent):
155 155 """
156 156 Base class for events acting on a repository.
157 157 """
158 158
159 def __init__(self, repo):
159 def __init__(self, repo, actor=None):
160 160 """
161 161 :param repo: a :class:`Repository` instance
162 162 """
163 super().__init__()
163 super().__init__(actor=actor)
164 164 self.repo = repo
165 165
166 166 def as_dict(self):
167 167 from rhodecode.model.repo import RepoModel
168 168 data = super().as_dict()
169 169
170 170 extra_fields = collections.OrderedDict()
171 171 for field in self.repo.extra_fields:
172 172 extra_fields[field.field_key] = field.field_value
173 173
174 174 data.update({
175 175 'repo': {
176 176 'repo_id': self.repo.repo_id,
177 177 'repo_name': self.repo.repo_name,
178 178 'repo_type': self.repo.repo_type,
179 179 'url': RepoModel().get_url(
180 180 self.repo, request=self.request),
181 181 'permalink_url': RepoModel().get_url(
182 182 self.repo, request=self.request, permalink=True),
183 183 'extra_fields': extra_fields
184 184 }
185 185 })
186 186 return data
187 187
188 188
189 189 class RepoCommitCommentEvent(RepoEvent):
190 190 """
191 191 An instance of this class is emitted as an :term:`event` after a comment is made
192 192 on repository commit.
193 193 """
194 194
195 195 name = 'repo-commit-comment'
196 196 display_name = lazy_ugettext('repository commit comment')
197 197 description = lazy_ugettext('Event triggered after a comment was made '
198 198 'on commit inside a repository')
199 199
200 200 def __init__(self, repo, commit, comment):
201 201 super().__init__(repo)
202 202 self.commit = commit
203 203 self.comment = comment
204 204
205 205 def as_dict(self):
206 206 data = super().as_dict()
207 207 data['commit'] = {
208 208 'commit_id': self.commit.raw_id,
209 209 'commit_message': self.commit.message,
210 210 'commit_branch': self.commit.branch,
211 211 }
212 212
213 213 data['comment'] = {
214 214 'comment_id': self.comment.comment_id,
215 215 'comment_text': self.comment.text,
216 216 'comment_type': self.comment.comment_type,
217 217 'comment_f_path': self.comment.f_path,
218 218 'comment_line_no': self.comment.line_no,
219 219 'comment_version': self.comment.last_version,
220 220 }
221 221 return data
222 222
223 223
224 224 class RepoCommitCommentEditEvent(RepoEvent):
225 225 """
226 226 An instance of this class is emitted as an :term:`event` after a comment is edited
227 227 on repository commit.
228 228 """
229 229
230 230 name = 'repo-commit-edit-comment'
231 231 display_name = lazy_ugettext('repository commit edit comment')
232 232 description = lazy_ugettext('Event triggered after a comment was edited '
233 233 'on commit inside a repository')
234 234
235 235 def __init__(self, repo, commit, comment):
236 236 super().__init__(repo)
237 237 self.commit = commit
238 238 self.comment = comment
239 239
240 240 def as_dict(self):
241 241 data = super().as_dict()
242 242 data['commit'] = {
243 243 'commit_id': self.commit.raw_id,
244 244 'commit_message': self.commit.message,
245 245 'commit_branch': self.commit.branch,
246 246 }
247 247
248 248 data['comment'] = {
249 249 'comment_id': self.comment.comment_id,
250 250 'comment_text': self.comment.text,
251 251 'comment_type': self.comment.comment_type,
252 252 'comment_f_path': self.comment.f_path,
253 253 'comment_line_no': self.comment.line_no,
254 254 'comment_version': self.comment.last_version,
255 255 }
256 256 return data
257 257
258 258
259 259 class RepoPreCreateEvent(RepoEvent):
260 260 """
261 261 An instance of this class is emitted as an :term:`event` before a repo is
262 262 created.
263 263 """
264 264 name = 'repo-pre-create'
265 265 display_name = lazy_ugettext('repository pre create')
266 266 description = lazy_ugettext('Event triggered before repository is created')
267 267
268 268
269 269 class RepoCreateEvent(RepoEvent):
270 270 """
271 271 An instance of this class is emitted as an :term:`event` whenever a repo is
272 272 created.
273 273 """
274 274 name = 'repo-create'
275 275 display_name = lazy_ugettext('repository created')
276 276 description = lazy_ugettext('Event triggered after repository was created')
277 277
278 278
279 279 class RepoPreDeleteEvent(RepoEvent):
280 280 """
281 281 An instance of this class is emitted as an :term:`event` whenever a repo is
282 282 created.
283 283 """
284 284 name = 'repo-pre-delete'
285 285 display_name = lazy_ugettext('repository pre delete')
286 286 description = lazy_ugettext('Event triggered before a repository is deleted')
287 287
288 288
289 289 class RepoDeleteEvent(RepoEvent):
290 290 """
291 291 An instance of this class is emitted as an :term:`event` whenever a repo is
292 292 created.
293 293 """
294 294 name = 'repo-delete'
295 295 display_name = lazy_ugettext('repository deleted')
296 296 description = lazy_ugettext('Event triggered after repository was deleted')
297 297
298 298
299 299 class RepoVCSEvent(RepoEvent):
300 300 """
301 301 Base class for events triggered by the VCS
302 302 """
303 303 name = ''
304 304 display_name = 'generic_vcs_event'
305 305
306 306 def __init__(self, repo_name, extras):
307 307 self.repo = Repository.get_by_repo_name(repo_name)
308 308 if not self.repo:
309 309 raise Exception(f'repo by this name {repo_name} does not exist')
310 310 self.extras = extras
311 311 super().__init__(self.repo)
312 312
313 313 @property
314 314 def actor(self):
315 315 if self.extras.get('username'):
316 316 return User.get_by_username(self.extras['username'])
317 317
318 318 @property
319 319 def actor_ip(self):
320 320 if self.extras.get('ip'):
321 321 return self.extras['ip']
322 322
323 323 @property
324 324 def server_url(self):
325 325 if self.extras.get('server_url'):
326 326 return self.extras['server_url']
327 327
328 328 @property
329 329 def request(self):
330 330 return self.extras.get('request') or self.get_request()
331 331
332 332
333 333 class RepoPrePullEvent(RepoVCSEvent):
334 334 """
335 335 An instance of this class is emitted as an :term:`event` before commits
336 336 are pulled from a repo.
337 337 """
338 338 name = 'repo-pre-pull'
339 339 display_name = lazy_ugettext('repository pre pull')
340 340 description = lazy_ugettext('Event triggered before repository code is pulled')
341 341
342 342
343 343 class RepoPullEvent(RepoVCSEvent):
344 344 """
345 345 An instance of this class is emitted as an :term:`event` after commits
346 346 are pulled from a repo.
347 347 """
348 348 name = 'repo-pull'
349 349 display_name = lazy_ugettext('repository pull')
350 350 description = lazy_ugettext('Event triggered after repository code was pulled')
351 351
352 352
353 353 class RepoPrePushEvent(RepoVCSEvent):
354 354 """
355 355 An instance of this class is emitted as an :term:`event` before commits
356 356 are pushed to a repo.
357 357 """
358 358 name = 'repo-pre-push'
359 359 display_name = lazy_ugettext('repository pre push')
360 360 description = lazy_ugettext('Event triggered before the code is '
361 361 'pushed to a repository')
362 362
363 363
364 364 class RepoPushEvent(RepoVCSEvent):
365 365 """
366 366 An instance of this class is emitted as an :term:`event` after commits
367 367 are pushed to a repo.
368 368
369 369 :param extras: (optional) dict of data from proxied VCS actions
370 370 """
371 371 name = 'repo-push'
372 372 display_name = lazy_ugettext('repository push')
373 373 description = lazy_ugettext('Event triggered after the code was '
374 374 'pushed to a repository')
375 375
376 376 def __init__(self, repo_name, pushed_commit_ids, extras):
377 377 super().__init__(repo_name, extras)
378 378 self.pushed_commit_ids = pushed_commit_ids
379 379 self.new_refs = extras.new_refs
380 380
381 381 def as_dict(self):
382 382 data = super().as_dict()
383 383
384 384 def branch_url(branch_name):
385 385 return '{}/changelog?branch={}'.format(
386 386 data['repo']['url'], branch_name)
387 387
388 388 def tag_url(tag_name):
389 389 return '{}/files/{}/'.format(
390 390 data['repo']['url'], tag_name)
391 391
392 392 commits = _commits_as_dict(
393 393 self, commit_ids=self.pushed_commit_ids, repos=[self.repo])
394 394
395 395 last_branch = None
396 396 for commit in reversed(commits):
397 397 commit['branch'] = commit['branch'] or last_branch
398 398 last_branch = commit['branch']
399 399 issues = _issues_as_dict(commits)
400 400
401 401 branches = set()
402 402 tags = set()
403 403 for commit in commits:
404 404 if commit['refs']['tags']:
405 405 for tag in commit['refs']['tags']:
406 406 tags.add(tag)
407 407 if commit['branch']:
408 408 branches.add(commit['branch'])
409 409
410 410 # maybe we have branches in new_refs ?
411 411 try:
412 412 branches = branches.union(set(self.new_refs['branches']))
413 413 except Exception:
414 414 pass
415 415
416 416 branches = [
417 417 {
418 418 'name': branch,
419 419 'url': branch_url(branch)
420 420 }
421 421 for branch in branches
422 422 ]
423 423
424 424 # maybe we have branches in new_refs ?
425 425 try:
426 426 tags = tags.union(set(self.new_refs['tags']))
427 427 except Exception:
428 428 pass
429 429
430 430 tags = [
431 431 {
432 432 'name': tag,
433 433 'url': tag_url(tag)
434 434 }
435 435 for tag in tags
436 436 ]
437 437
438 438 data['push'] = {
439 439 'commits': commits,
440 440 'issues': issues,
441 441 'branches': branches,
442 442 'tags': tags,
443 443 }
444 444 return data
@@ -1,2538 +1,2538 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 authentication and permission libraries
21 21 """
22 22
23 23 import os
24 24
25 25 import time
26 26 import collections
27 27 import fnmatch
28 28 import itertools
29 29 import logging
30 30 import random
31 31 import traceback
32 32 from functools import wraps
33 33 import bcrypt
34 34 import ipaddress
35 35
36 36 from pyramid.httpexceptions import HTTPForbidden, HTTPFound, HTTPNotFound
37 37 from sqlalchemy.orm.exc import ObjectDeletedError
38 38 from sqlalchemy.orm import joinedload
39 39 from zope.cachedescriptors.property import Lazy as LazyProperty
40 40
41 41 import rhodecode
42 42 from rhodecode.model import meta
43 43 from rhodecode.model.meta import Session
44 44 from rhodecode.model.user import UserModel
45 45 from rhodecode.model.db import (
46 46 false, User, Repository, Permission, UserToPerm, UserGroupToPerm, UserGroupMember,
47 47 UserIpMap, UserApiKeys, RepoGroup, UserGroup, UserNotice)
48 48 from rhodecode.lib import rc_cache
49 49 from rhodecode.lib.utils import (
50 50 get_repo_slug, get_repo_group_slug, get_user_group_slug)
51 51 from rhodecode.lib.type_utils import aslist
52 52 from rhodecode.lib.hash_utils import sha1, sha256, md5
53 53 from rhodecode.lib.str_utils import ascii_bytes, safe_str, safe_int, safe_bytes
54 54 from rhodecode.lib.caching_query import FromCache
55 55
56 56
57 57 log = logging.getLogger(__name__)
58 58
59 59 csrf_token_key = "csrf_token"
60 60
61 61
62 62 class PasswordGenerator(object):
63 63 """
64 64 This is a simple class for generating password from different sets of
65 65 characters
66 66 usage::
67 67 passwd_gen = PasswordGenerator()
68 68 #print 8-letter password containing only big and small letters
69 69 of alphabet
70 70 passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL)
71 71 """
72 72 ALPHABETS_NUM = r'''1234567890'''
73 73 ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm'''
74 74 ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM'''
75 75 ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?'''
76 76 ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \
77 77 + ALPHABETS_NUM + ALPHABETS_SPECIAL
78 78 ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM
79 79 ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL
80 80 ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM
81 81 ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM
82 82
83 83 def __init__(self, passwd=''):
84 84 self.passwd = passwd
85 85
86 86 def gen_password(self, length, type_=None):
87 87 if type_ is None:
88 88 type_ = self.ALPHABETS_FULL
89 89 self.passwd = ''.join([random.choice(type_) for _ in range(length)])
90 90 return self.passwd
91 91
92 92
93 93 class _RhodeCodeCryptoBase(object):
94 94 ENC_PREF = None
95 95
96 96 def hash_create(self, str_):
97 97 """
98 98 hash the string using
99 99
100 100 :param str_: password to hash
101 101 """
102 102 raise NotImplementedError
103 103
104 104 def hash_check_with_upgrade(self, password: bytes, hashed: bytes):
105 105 """
106 106 Returns tuple in which first element is boolean that states that
107 107 given password matches it's hashed version, and the second is new hash
108 108 of the password, in case this password should be migrated to new
109 109 cipher.
110 110 """
111 111 self._assert_bytes(password)
112 112 checked_hash = self.hash_check(password, hashed)
113 113 return checked_hash, None
114 114
115 115 def hash_check(self, password, hashed):
116 116 """
117 117 Checks matching password with it's hashed value.
118 118
119 119 :param password: password
120 120 :param hashed: password in hashed form
121 121 """
122 122 raise NotImplementedError
123 123
124 124 @classmethod
125 125 def _assert_bytes(cls, value):
126 126 """
127 127 Passing in an `unicode` object can lead too hard to detect issues
128 128 if passwords contain non-ascii characters. Doing a type check
129 129 during runtime, so that such mistakes are detected early on.
130 130 """
131 131 if not isinstance(value, bytes):
132 132 raise TypeError(f"Bytestring required as input, got {type(value)}.")
133 133
134 134
135 135 class _RhodeCodeCryptoBCrypt(_RhodeCodeCryptoBase):
136 136 ENC_PREF = ('$2a$10', '$2b$10')
137 137
138 138 def hash_create(self, str_):
139 139 self._assert_bytes(str_)
140 140 return bcrypt.hashpw(str_, bcrypt.gensalt(10))
141 141
142 142 def hash_check_with_upgrade(self, password: bytes, hashed: bytes):
143 143 """
144 144 Returns tuple in which first element is boolean that states that
145 145 given password matches it's hashed version, and the second is new hash
146 146 of the password, in case this password should be migrated to new
147 147 cipher.
148 148
149 149 This implements special upgrade logic which works like that:
150 150 - check if the given password == bcrypted hash, if yes then we
151 151 properly used password and it was already in bcrypt. Proceed
152 152 without any changes
153 153 - if bcrypt hash check is not working try with sha256. If hash compare
154 154 is ok, it means we using correct but old hashed password. indicate
155 155 hash change and proceed
156 156 """
157 157 self._assert_bytes(password)
158 158 new_hash = None
159 159
160 160 # regular pw check
161 161 password_match_bcrypt = self.hash_check(password, hashed)
162 162
163 163 # now we want to know if the password was maybe from sha256
164 164 # basically calling _RhodeCodeCryptoSha256().hash_check()
165 165 if not password_match_bcrypt:
166 166 if _RhodeCodeCryptoSha256().hash_check(password, hashed): # match by OLD algo
167 167 new_hash = self.hash_create(password) # make new bcrypt hash, aka "migrate" hash
168 168 password_match_bcrypt = True
169 169
170 170 return password_match_bcrypt, new_hash
171 171
172 172 def hash_check(self, password: bytes, hashed: bytes) -> bool:
173 173 """
174 174 Checks matching password with it's hashed value.
175 175
176 176 :param password: password
177 177 :param hashed: password in hashed form
178 178 """
179 179 self._assert_bytes(password)
180 180 try:
181 181 return bcrypt.hashpw(password, hashed) == hashed
182 182 except ValueError as e:
183 183 # we're having a invalid salt here probably, we should not crash
184 184 # just return with False as it would be a wrong password.
185 185 log.debug('Failed to check password hash using bcrypt %s',
186 186 safe_str(e))
187 187
188 188 return False
189 189
190 190
191 191 class _RhodeCodeCryptoSha256(_RhodeCodeCryptoBase):
192 192 """
193 193 Legacy CryptoBackend used in OLD versions on Windows. Now it's just here to
194 194 Migrate passwords to new backend bcrypt
195 195 """
196 196 ENC_PREF = '_'
197 197
198 198 def hash_create(self, str_):
199 199 self._assert_bytes(str_)
200 200 return sha256(str_)
201 201
202 202 def hash_check(self, password: bytes, hashed: bytes) -> bool:
203 203 """
204 204 Checks matching password with it's hashed value.
205 205
206 206 :param password: password
207 207 :param hashed: password in hashed form
208 208 """
209 209 self._assert_bytes(password)
210 210 return sha256(password) == hashed
211 211
212 212
213 213 class _RhodeCodeCryptoTest(_RhodeCodeCryptoBase):
214 214 ENC_PREF = '_'
215 215
216 216 def hash_create(self, str_):
217 217 self._assert_bytes(str_)
218 218 return sha1(str_)
219 219
220 220 def hash_check(self, password: bytes, hashed: bytes) -> bool:
221 221 """
222 222 Checks matching password with it's hashed value.
223 223
224 224 :param password: password
225 225 :param hashed: password in hashed form
226 226 """
227 227 self._assert_bytes(password)
228 228 return sha1(password) == hashed
229 229
230 230
231 231 def crypto_backend():
232 232 """
233 233 Return the matching crypto backend.
234 234
235 235 Selection is based on if we run tests or not, we pick sha1-test backend to run
236 236 tests faster since BCRYPT is expensive to calculate
237 237 """
238 238 if rhodecode.is_test:
239 239 RhodeCodeCrypto = _RhodeCodeCryptoTest()
240 240 else:
241 241 RhodeCodeCrypto = _RhodeCodeCryptoBCrypt()
242 242
243 243 return RhodeCodeCrypto
244 244
245 245
246 246 def get_crypt_password(password):
247 247 """
248 248 Create the hash of `password` with the active crypto backend.
249 249
250 250 :param password: The cleartext password.
251 251 """
252 252 return crypto_backend().hash_create(safe_bytes(password))
253 253
254 254
255 255 def check_password(password, hashed):
256 256 """
257 257 Check if the value in `password` matches the hash in `hashed`.
258 258
259 259 :param password: The cleartext password.
260 260 :type password: unicode
261 261
262 262 :param hashed: The expected hashed version of the password.
263 263 :type hashed: The hash has to be passed in in text representation.
264 264 """
265 265 password = safe_bytes(password)
266 266 return crypto_backend().hash_check(password, hashed)
267 267
268 268
269 269 def generate_auth_token(data, salt=None):
270 270 """
271 271 Generates API KEY from given string
272 272 """
273 273
274 274 if salt is None:
275 275 salt = os.urandom(16)
276 276 token = safe_bytes(data) + safe_bytes(salt)
277 277 return sha1(token)
278 278
279 279
280 280 def get_came_from(request):
281 281 """
282 282 get query_string+path from request sanitized after removing auth_token
283 283 """
284 284 _req = request
285 285
286 286 path = _req.path
287 287 if 'auth_token' in _req.GET:
288 288 # sanitize the request and remove auth_token for redirection
289 289 _req.GET.pop('auth_token')
290 290 qs = _req.query_string
291 291 if qs:
292 292 path += '?' + qs
293 293
294 294 return path
295 295
296 296
297 297 class CookieStoreWrapper(object):
298 298
299 299 def __init__(self, cookie_store):
300 300 self.cookie_store = cookie_store
301 301
302 302 def __repr__(self):
303 303 return f'CookieStore<{self.cookie_store}>'
304 304
305 305 def get(self, key, other=None):
306 306 if isinstance(self.cookie_store, dict):
307 307 return self.cookie_store.get(key, other)
308 308 elif isinstance(self.cookie_store, AuthUser):
309 309 return self.cookie_store.__dict__.get(key, other)
310 310
311 311
312 312 def _cached_perms_data(user_id, scope, user_is_admin,
313 313 user_inherit_default_permissions, explicit, algo,
314 314 calculate_super_admin):
315 315
316 316 permissions = PermissionCalculator(
317 317 user_id, scope, user_is_admin, user_inherit_default_permissions,
318 318 explicit, algo, calculate_super_admin)
319 319 return permissions.calculate()
320 320
321 321
322 322 class PermOrigin(object):
323 323 SUPER_ADMIN = 'superadmin'
324 324 ARCHIVED = 'archived'
325 325
326 326 REPO_USER = 'user:%s'
327 327 REPO_USERGROUP = 'usergroup:%s'
328 328 REPO_OWNER = 'repo.owner'
329 329 REPO_DEFAULT = 'repo.default'
330 330 REPO_DEFAULT_NO_INHERIT = 'repo.default.no.inherit'
331 331 REPO_PRIVATE = 'repo.private'
332 332
333 333 REPOGROUP_USER = 'user:%s'
334 334 REPOGROUP_USERGROUP = 'usergroup:%s'
335 335 REPOGROUP_OWNER = 'group.owner'
336 336 REPOGROUP_DEFAULT = 'group.default'
337 337 REPOGROUP_DEFAULT_NO_INHERIT = 'group.default.no.inherit'
338 338
339 339 USERGROUP_USER = 'user:%s'
340 340 USERGROUP_USERGROUP = 'usergroup:%s'
341 341 USERGROUP_OWNER = 'usergroup.owner'
342 342 USERGROUP_DEFAULT = 'usergroup.default'
343 343 USERGROUP_DEFAULT_NO_INHERIT = 'usergroup.default.no.inherit'
344 344
345 345
346 346 class PermOriginDict(dict):
347 347 """
348 348 A special dict used for tracking permissions along with their origins.
349 349
350 350 `__setitem__` has been overridden to expect a tuple(perm, origin)
351 351 `__getitem__` will return only the perm
352 352 `.perm_origin_stack` will return the stack of (perm, origin) set per key
353 353
354 354 >>> perms = PermOriginDict()
355 355 >>> perms['resource'] = 'read', 'default', 1
356 356 >>> perms['resource']
357 357 'read'
358 358 >>> perms['resource'] = 'write', 'admin', 2
359 359 >>> perms['resource']
360 360 'write'
361 361 >>> perms.perm_origin_stack
362 362 {'resource': [('read', 'default', 1), ('write', 'admin', 2)]}
363 363 """
364 364
365 365 def __init__(self, *args, **kw):
366 366 dict.__init__(self, *args, **kw)
367 367 self.perm_origin_stack = collections.OrderedDict()
368 368
369 369 def __setitem__(self, key, perm_origin_obj_id):
370 370 # set (most likely via pickle) key:val pair without tuple
371 371 if not isinstance(perm_origin_obj_id, tuple):
372 372 perm = perm_origin_obj_id
373 373 dict.__setitem__(self, key, perm)
374 374 else:
375 375 # unpack if we create a key from tuple
376 376 (perm, origin, obj_id) = perm_origin_obj_id
377 377 self.perm_origin_stack.setdefault(key, []).append((perm, origin, obj_id))
378 378 dict.__setitem__(self, key, perm)
379 379
380 380
381 381 class BranchPermOriginDict(dict):
382 382 """
383 383 Dedicated branch permissions dict, with tracking of patterns and origins.
384 384
385 385 >>> perms = BranchPermOriginDict()
386 386 >>> perms['resource'] = '*pattern', 'read', 'default'
387 387 >>> perms['resource']
388 388 {'*pattern': 'read'}
389 389 >>> perms['resource'] = '*pattern', 'write', 'admin'
390 390 >>> perms['resource']
391 391 {'*pattern': 'write'}
392 392 >>> perms.perm_origin_stack
393 393 {'resource': {'*pattern': [('read', 'default'), ('write', 'admin')]}}
394 394 """
395 395 def __init__(self, *args, **kw):
396 396 dict.__init__(self, *args, **kw)
397 397 self.perm_origin_stack = collections.OrderedDict()
398 398
399 399 def __setitem__(self, key, pattern_perm_origin):
400 400 # set (most likely via pickle) key:val pair without tuple
401 401 if not isinstance(pattern_perm_origin, tuple):
402 402 pattern_perm = pattern_perm_origin
403 403 dict.__setitem__(self, key, pattern_perm)
404 404
405 405 else:
406 406 (pattern_perm, origin) = pattern_perm_origin
407 407 # we're passing in the dict, so we save the the stack
408 408 for pattern, perm in list(pattern_perm.items()):
409 409 self.perm_origin_stack.setdefault(key, {})\
410 410 .setdefault(pattern, []).append((perm, origin))
411 411
412 412 dict.__setitem__(self, key, pattern_perm)
413 413
414 414
415 415 class PermissionCalculator(object):
416 416
417 417 def __init__(
418 418 self, user_id, scope, user_is_admin,
419 419 user_inherit_default_permissions, explicit, algo,
420 420 calculate_super_admin_as_user=False):
421 421
422 422 self.user_id = user_id
423 423 self.user_is_admin = user_is_admin
424 424 self.inherit_default_permissions = user_inherit_default_permissions
425 425 self.explicit = explicit
426 426 self.algo = algo
427 427 self.calculate_super_admin_as_user = calculate_super_admin_as_user
428 428
429 429 scope = scope or {}
430 430 self.scope_repo_id = scope.get('repo_id')
431 431 self.scope_repo_group_id = scope.get('repo_group_id')
432 432 self.scope_user_group_id = scope.get('user_group_id')
433 433
434 434 self.default_user_id = User.get_default_user(cache=True).user_id
435 435
436 436 self.permissions_repositories = PermOriginDict()
437 437 self.permissions_repository_groups = PermOriginDict()
438 438 self.permissions_user_groups = PermOriginDict()
439 439 self.permissions_repository_branches = BranchPermOriginDict()
440 440 self.permissions_global = set()
441 441
442 442 self.default_repo_perms = Permission.get_default_repo_perms(
443 443 self.default_user_id, self.scope_repo_id)
444 444 self.default_repo_groups_perms = Permission.get_default_group_perms(
445 445 self.default_user_id, self.scope_repo_group_id)
446 446 self.default_user_group_perms = \
447 447 Permission.get_default_user_group_perms(
448 448 self.default_user_id, self.scope_user_group_id)
449 449
450 450 # default branch perms
451 451 self.default_branch_repo_perms = \
452 452 Permission.get_default_repo_branch_perms(
453 453 self.default_user_id, self.scope_repo_id)
454 454
455 455 def calculate(self):
456 456 if self.user_is_admin and not self.calculate_super_admin_as_user:
457 457 return self._calculate_super_admin_permissions()
458 458
459 459 self._calculate_global_default_permissions()
460 460 self._calculate_global_permissions()
461 461 self._calculate_default_permissions()
462 462 self._calculate_repository_permissions()
463 463 self._calculate_repository_branch_permissions()
464 464 self._calculate_repository_group_permissions()
465 465 self._calculate_user_group_permissions()
466 466 return self._permission_structure()
467 467
468 468 def _calculate_super_admin_permissions(self):
469 469 """
470 470 super-admin user have all default rights for repositories
471 471 and groups set to admin
472 472 """
473 473 self.permissions_global.add('hg.admin')
474 474 self.permissions_global.add('hg.create.write_on_repogroup.true')
475 475
476 476 # repositories
477 477 for perm in self.default_repo_perms:
478 478 r_k = perm.UserRepoToPerm.repository.repo_name
479 479 obj_id = perm.UserRepoToPerm.repository.repo_id
480 480 archived = perm.UserRepoToPerm.repository.archived
481 481 p = 'repository.admin'
482 482 self.permissions_repositories[r_k] = p, PermOrigin.SUPER_ADMIN, obj_id
483 483 # special case for archived repositories, which we block still even for
484 484 # super admins
485 485 if archived:
486 486 p = 'repository.read'
487 487 self.permissions_repositories[r_k] = p, PermOrigin.ARCHIVED, obj_id
488 488
489 489 # repository groups
490 490 for perm in self.default_repo_groups_perms:
491 491 rg_k = perm.UserRepoGroupToPerm.group.group_name
492 492 obj_id = perm.UserRepoGroupToPerm.group.group_id
493 493 p = 'group.admin'
494 494 self.permissions_repository_groups[rg_k] = p, PermOrigin.SUPER_ADMIN, obj_id
495 495
496 496 # user groups
497 497 for perm in self.default_user_group_perms:
498 498 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
499 499 obj_id = perm.UserUserGroupToPerm.user_group.users_group_id
500 500 p = 'usergroup.admin'
501 501 self.permissions_user_groups[u_k] = p, PermOrigin.SUPER_ADMIN, obj_id
502 502
503 503 # branch permissions
504 504 # since super-admin also can have custom rule permissions
505 505 # we *always* need to calculate those inherited from default, and also explicit
506 506 self._calculate_default_permissions_repository_branches(
507 507 user_inherit_object_permissions=False)
508 508 self._calculate_repository_branch_permissions()
509 509
510 510 return self._permission_structure()
511 511
512 512 def _calculate_global_default_permissions(self):
513 513 """
514 514 global permissions taken from the default user
515 515 """
516 516 default_global_perms = UserToPerm.query()\
517 517 .filter(UserToPerm.user_id == self.default_user_id)\
518 518 .options(joinedload(UserToPerm.permission))
519 519
520 520 for perm in default_global_perms:
521 521 self.permissions_global.add(perm.permission.permission_name)
522 522
523 523 if self.user_is_admin:
524 524 self.permissions_global.add('hg.admin')
525 525 self.permissions_global.add('hg.create.write_on_repogroup.true')
526 526
527 527 def _calculate_global_permissions(self):
528 528 """
529 529 Set global system permissions with user permissions or permissions
530 530 taken from the user groups of the current user.
531 531
532 532 The permissions include repo creating, repo group creating, forking
533 533 etc.
534 534 """
535 535
536 536 # now we read the defined permissions and overwrite what we have set
537 537 # before those can be configured from groups or users explicitly.
538 538
539 539 # In case we want to extend this list we should make sure
540 540 # this is in sync with User.DEFAULT_USER_PERMISSIONS definitions
541 541 from rhodecode.model.permission import PermissionModel
542 542
543 543 _configurable = frozenset([
544 544 PermissionModel.FORKING_DISABLED, PermissionModel.FORKING_ENABLED,
545 545 'hg.create.none', 'hg.create.repository',
546 546 'hg.usergroup.create.false', 'hg.usergroup.create.true',
547 547 'hg.repogroup.create.false', 'hg.repogroup.create.true',
548 548 'hg.create.write_on_repogroup.false', 'hg.create.write_on_repogroup.true',
549 549 'hg.inherit_default_perms.false', 'hg.inherit_default_perms.true'
550 550 ])
551 551
552 552 # USER GROUPS comes first user group global permissions
553 553 user_perms_from_users_groups = Session().query(UserGroupToPerm)\
554 554 .options(joinedload(UserGroupToPerm.permission))\
555 555 .join((UserGroupMember, UserGroupToPerm.users_group_id ==
556 556 UserGroupMember.users_group_id))\
557 557 .filter(UserGroupMember.user_id == self.user_id)\
558 558 .order_by(UserGroupToPerm.users_group_id)\
559 559 .all()
560 560
561 561 # need to group here by groups since user can be in more than
562 562 # one group, so we get all groups
563 563 _explicit_grouped_perms = [
564 564 [x, list(y)] for x, y in
565 565 itertools.groupby(user_perms_from_users_groups,
566 566 lambda _x: _x.users_group)]
567 567
568 568 for gr, perms in _explicit_grouped_perms:
569 569 # since user can be in multiple groups iterate over them and
570 570 # select the lowest permissions first (more explicit)
571 571 # TODO(marcink): do this^^
572 572
573 573 # group doesn't inherit default permissions so we actually set them
574 574 if not gr.inherit_default_permissions:
575 575 # NEED TO IGNORE all previously set configurable permissions
576 576 # and replace them with explicitly set from this user
577 577 # group permissions
578 578 self.permissions_global = self.permissions_global.difference(
579 579 _configurable)
580 580 for perm in perms:
581 581 self.permissions_global.add(perm.permission.permission_name)
582 582
583 583 # user explicit global permissions
584 584 user_perms = Session().query(UserToPerm)\
585 585 .options(joinedload(UserToPerm.permission))\
586 586 .filter(UserToPerm.user_id == self.user_id).all()
587 587
588 588 if not self.inherit_default_permissions:
589 589 # NEED TO IGNORE all configurable permissions and
590 590 # replace them with explicitly set from this user permissions
591 591 self.permissions_global = self.permissions_global.difference(
592 592 _configurable)
593 593 for perm in user_perms:
594 594 self.permissions_global.add(perm.permission.permission_name)
595 595
596 596 def _calculate_default_permissions_repositories(self, user_inherit_object_permissions):
597 597 for perm in self.default_repo_perms:
598 598 r_k = perm.UserRepoToPerm.repository.repo_name
599 599 obj_id = perm.UserRepoToPerm.repository.repo_id
600 600 archived = perm.UserRepoToPerm.repository.archived
601 601 p = perm.Permission.permission_name
602 602 o = PermOrigin.REPO_DEFAULT
603 603 self.permissions_repositories[r_k] = p, o, obj_id
604 604
605 605 # if we decide this user isn't inheriting permissions from
606 606 # default user we set him to .none so only explicit
607 607 # permissions work
608 608 if not user_inherit_object_permissions:
609 609 p = 'repository.none'
610 610 o = PermOrigin.REPO_DEFAULT_NO_INHERIT
611 611 self.permissions_repositories[r_k] = p, o, obj_id
612 612
613 613 if perm.Repository.private and not (
614 614 perm.Repository.user_id == self.user_id):
615 615 # disable defaults for private repos,
616 616 p = 'repository.none'
617 617 o = PermOrigin.REPO_PRIVATE
618 618 self.permissions_repositories[r_k] = p, o, obj_id
619 619
620 620 elif perm.Repository.user_id == self.user_id:
621 621 # set admin if owner
622 622 p = 'repository.admin'
623 623 o = PermOrigin.REPO_OWNER
624 624 self.permissions_repositories[r_k] = p, o, obj_id
625 625
626 626 if self.user_is_admin:
627 627 p = 'repository.admin'
628 628 o = PermOrigin.SUPER_ADMIN
629 629 self.permissions_repositories[r_k] = p, o, obj_id
630 630
631 631 # finally in case of archived repositories, we downgrade higher
632 632 # permissions to read
633 633 if archived:
634 634 current_perm = self.permissions_repositories[r_k]
635 635 if current_perm in ['repository.write', 'repository.admin']:
636 636 p = 'repository.read'
637 637 o = PermOrigin.ARCHIVED
638 638 self.permissions_repositories[r_k] = p, o, obj_id
639 639
640 640 def _calculate_default_permissions_repository_branches(self, user_inherit_object_permissions):
641 641 for perm in self.default_branch_repo_perms:
642 642
643 643 r_k = perm.UserRepoToPerm.repository.repo_name
644 644 p = perm.Permission.permission_name
645 645 pattern = perm.UserToRepoBranchPermission.branch_pattern
646 646 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
647 647
648 648 if not self.explicit:
649 649 cur_perm = self.permissions_repository_branches.get(r_k)
650 650 if cur_perm:
651 651 cur_perm = cur_perm[pattern]
652 652 cur_perm = cur_perm or 'branch.none'
653 653
654 654 p = self._choose_permission(p, cur_perm)
655 655
656 656 # NOTE(marcink): register all pattern/perm instances in this
657 657 # special dict that aggregates entries
658 658 self.permissions_repository_branches[r_k] = {pattern: p}, o
659 659
660 660 def _calculate_default_permissions_repository_groups(self, user_inherit_object_permissions):
661 661 for perm in self.default_repo_groups_perms:
662 662 rg_k = perm.UserRepoGroupToPerm.group.group_name
663 663 obj_id = perm.UserRepoGroupToPerm.group.group_id
664 664 p = perm.Permission.permission_name
665 665 o = PermOrigin.REPOGROUP_DEFAULT
666 666 self.permissions_repository_groups[rg_k] = p, o, obj_id
667 667
668 668 # if we decide this user isn't inheriting permissions from default
669 669 # user we set him to .none so only explicit permissions work
670 670 if not user_inherit_object_permissions:
671 671 p = 'group.none'
672 672 o = PermOrigin.REPOGROUP_DEFAULT_NO_INHERIT
673 673 self.permissions_repository_groups[rg_k] = p, o, obj_id
674 674
675 675 if perm.RepoGroup.user_id == self.user_id:
676 676 # set admin if owner
677 677 p = 'group.admin'
678 678 o = PermOrigin.REPOGROUP_OWNER
679 679 self.permissions_repository_groups[rg_k] = p, o, obj_id
680 680
681 681 if self.user_is_admin:
682 682 p = 'group.admin'
683 683 o = PermOrigin.SUPER_ADMIN
684 684 self.permissions_repository_groups[rg_k] = p, o, obj_id
685 685
686 686 def _calculate_default_permissions_user_groups(self, user_inherit_object_permissions):
687 687 for perm in self.default_user_group_perms:
688 688 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
689 689 obj_id = perm.UserUserGroupToPerm.user_group.users_group_id
690 690 p = perm.Permission.permission_name
691 691 o = PermOrigin.USERGROUP_DEFAULT
692 692 self.permissions_user_groups[u_k] = p, o, obj_id
693 693
694 694 # if we decide this user isn't inheriting permissions from default
695 695 # user we set him to .none so only explicit permissions work
696 696 if not user_inherit_object_permissions:
697 697 p = 'usergroup.none'
698 698 o = PermOrigin.USERGROUP_DEFAULT_NO_INHERIT
699 699 self.permissions_user_groups[u_k] = p, o, obj_id
700 700
701 701 if perm.UserGroup.user_id == self.user_id:
702 702 # set admin if owner
703 703 p = 'usergroup.admin'
704 704 o = PermOrigin.USERGROUP_OWNER
705 705 self.permissions_user_groups[u_k] = p, o, obj_id
706 706
707 707 if self.user_is_admin:
708 708 p = 'usergroup.admin'
709 709 o = PermOrigin.SUPER_ADMIN
710 710 self.permissions_user_groups[u_k] = p, o, obj_id
711 711
712 712 def _calculate_default_permissions(self):
713 713 """
714 714 Set default user permissions for repositories, repository branches,
715 715 repository groups, user groups taken from the default user.
716 716
717 717 Calculate inheritance of object permissions based on what we have now
718 718 in GLOBAL permissions. We check if .false is in GLOBAL since this is
719 719 explicitly set. Inherit is the opposite of .false being there.
720 720
721 721 .. note::
722 722
723 723 the syntax is little bit odd but what we need to check here is
724 724 the opposite of .false permission being in the list so even for
725 725 inconsistent state when both .true/.false is there
726 726 .false is more important
727 727
728 728 """
729 729 user_inherit_object_permissions = (
730 730 'hg.inherit_default_perms.false' not in self.permissions_global)
731 731
732 732 # default permissions inherited from `default` user permissions
733 733 self._calculate_default_permissions_repositories(
734 734 user_inherit_object_permissions)
735 735
736 736 self._calculate_default_permissions_repository_branches(
737 737 user_inherit_object_permissions)
738 738
739 739 self._calculate_default_permissions_repository_groups(
740 740 user_inherit_object_permissions)
741 741
742 742 self._calculate_default_permissions_user_groups(
743 743 user_inherit_object_permissions)
744 744
745 745 def _calculate_repository_permissions(self):
746 746 """
747 747 Repository access permissions for the current user.
748 748
749 749 Check if the user is part of user groups for this repository and
750 750 fill in the permission from it. `_choose_permission` decides of which
751 751 permission should be selected based on selected method.
752 752 """
753 753
754 754 # user group for repositories permissions
755 755 user_repo_perms_from_user_group = Permission\
756 756 .get_default_repo_perms_from_user_group(
757 757 self.user_id, self.scope_repo_id)
758 758
759 759 multiple_counter = collections.defaultdict(int)
760 760 for perm in user_repo_perms_from_user_group:
761 761 r_k = perm.UserGroupRepoToPerm.repository.repo_name
762 762 obj_id = perm.UserGroupRepoToPerm.repository.repo_id
763 763 multiple_counter[r_k] += 1
764 764 p = perm.Permission.permission_name
765 765 o = PermOrigin.REPO_USERGROUP % perm.UserGroupRepoToPerm\
766 766 .users_group.users_group_name
767 767
768 768 if multiple_counter[r_k] > 1:
769 769 cur_perm = self.permissions_repositories[r_k]
770 770 p = self._choose_permission(p, cur_perm)
771 771
772 772 self.permissions_repositories[r_k] = p, o, obj_id
773 773
774 774 if perm.Repository.user_id == self.user_id:
775 775 # set admin if owner
776 776 p = 'repository.admin'
777 777 o = PermOrigin.REPO_OWNER
778 778 self.permissions_repositories[r_k] = p, o, obj_id
779 779
780 780 if self.user_is_admin:
781 781 p = 'repository.admin'
782 782 o = PermOrigin.SUPER_ADMIN
783 783 self.permissions_repositories[r_k] = p, o, obj_id
784 784
785 785 # user explicit permissions for repositories, overrides any specified
786 786 # by the group permission
787 787 user_repo_perms = Permission.get_default_repo_perms(
788 788 self.user_id, self.scope_repo_id)
789 789 for perm in user_repo_perms:
790 790 r_k = perm.UserRepoToPerm.repository.repo_name
791 791 obj_id = perm.UserRepoToPerm.repository.repo_id
792 792 archived = perm.UserRepoToPerm.repository.archived
793 793 p = perm.Permission.permission_name
794 794 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
795 795
796 796 if not self.explicit:
797 797 cur_perm = self.permissions_repositories.get(
798 798 r_k, 'repository.none')
799 799 p = self._choose_permission(p, cur_perm)
800 800
801 801 self.permissions_repositories[r_k] = p, o, obj_id
802 802
803 803 if perm.Repository.user_id == self.user_id:
804 804 # set admin if owner
805 805 p = 'repository.admin'
806 806 o = PermOrigin.REPO_OWNER
807 807 self.permissions_repositories[r_k] = p, o, obj_id
808 808
809 809 if self.user_is_admin:
810 810 p = 'repository.admin'
811 811 o = PermOrigin.SUPER_ADMIN
812 812 self.permissions_repositories[r_k] = p, o, obj_id
813 813
814 814 # finally in case of archived repositories, we downgrade higher
815 815 # permissions to read
816 816 if archived:
817 817 current_perm = self.permissions_repositories[r_k]
818 818 if current_perm in ['repository.write', 'repository.admin']:
819 819 p = 'repository.read'
820 820 o = PermOrigin.ARCHIVED
821 821 self.permissions_repositories[r_k] = p, o, obj_id
822 822
823 823 def _calculate_repository_branch_permissions(self):
824 824 # user group for repositories permissions
825 825 user_repo_branch_perms_from_user_group = Permission\
826 826 .get_default_repo_branch_perms_from_user_group(
827 827 self.user_id, self.scope_repo_id)
828 828
829 829 multiple_counter = collections.defaultdict(int)
830 830 for perm in user_repo_branch_perms_from_user_group:
831 831 r_k = perm.UserGroupRepoToPerm.repository.repo_name
832 832 p = perm.Permission.permission_name
833 833 pattern = perm.UserGroupToRepoBranchPermission.branch_pattern
834 834 o = PermOrigin.REPO_USERGROUP % perm.UserGroupRepoToPerm\
835 835 .users_group.users_group_name
836 836
837 837 multiple_counter[r_k] += 1
838 838 if multiple_counter[r_k] > 1:
839 839 cur_perm = self.permissions_repository_branches[r_k][pattern]
840 840 p = self._choose_permission(p, cur_perm)
841 841
842 842 self.permissions_repository_branches[r_k] = {pattern: p}, o
843 843
844 844 # user explicit branch permissions for repositories, overrides
845 845 # any specified by the group permission
846 846 user_repo_branch_perms = Permission.get_default_repo_branch_perms(
847 847 self.user_id, self.scope_repo_id)
848 848
849 849 for perm in user_repo_branch_perms:
850 850
851 851 r_k = perm.UserRepoToPerm.repository.repo_name
852 852 p = perm.Permission.permission_name
853 853 pattern = perm.UserToRepoBranchPermission.branch_pattern
854 854 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
855 855
856 856 if not self.explicit:
857 857 cur_perm = self.permissions_repository_branches.get(r_k)
858 858 if cur_perm:
859 859 cur_perm = cur_perm[pattern]
860 860 cur_perm = cur_perm or 'branch.none'
861 861 p = self._choose_permission(p, cur_perm)
862 862
863 863 # NOTE(marcink): register all pattern/perm instances in this
864 864 # special dict that aggregates entries
865 865 self.permissions_repository_branches[r_k] = {pattern: p}, o
866 866
867 867 def _calculate_repository_group_permissions(self):
868 868 """
869 869 Repository group permissions for the current user.
870 870
871 871 Check if the user is part of user groups for repository groups and
872 872 fill in the permissions from it. `_choose_permission` decides of which
873 873 permission should be selected based on selected method.
874 874 """
875 875 # user group for repo groups permissions
876 876 user_repo_group_perms_from_user_group = Permission\
877 877 .get_default_group_perms_from_user_group(
878 878 self.user_id, self.scope_repo_group_id)
879 879
880 880 multiple_counter = collections.defaultdict(int)
881 881 for perm in user_repo_group_perms_from_user_group:
882 882 rg_k = perm.UserGroupRepoGroupToPerm.group.group_name
883 883 obj_id = perm.UserGroupRepoGroupToPerm.group.group_id
884 884 multiple_counter[rg_k] += 1
885 885 o = PermOrigin.REPOGROUP_USERGROUP % perm.UserGroupRepoGroupToPerm\
886 886 .users_group.users_group_name
887 887 p = perm.Permission.permission_name
888 888
889 889 if multiple_counter[rg_k] > 1:
890 890 cur_perm = self.permissions_repository_groups[rg_k]
891 891 p = self._choose_permission(p, cur_perm)
892 892 self.permissions_repository_groups[rg_k] = p, o, obj_id
893 893
894 894 if perm.RepoGroup.user_id == self.user_id:
895 895 # set admin if owner, even for member of other user group
896 896 p = 'group.admin'
897 897 o = PermOrigin.REPOGROUP_OWNER
898 898 self.permissions_repository_groups[rg_k] = p, o, obj_id
899 899
900 900 if self.user_is_admin:
901 901 p = 'group.admin'
902 902 o = PermOrigin.SUPER_ADMIN
903 903 self.permissions_repository_groups[rg_k] = p, o, obj_id
904 904
905 905 # user explicit permissions for repository groups
906 906 user_repo_groups_perms = Permission.get_default_group_perms(
907 907 self.user_id, self.scope_repo_group_id)
908 908 for perm in user_repo_groups_perms:
909 909 rg_k = perm.UserRepoGroupToPerm.group.group_name
910 910 obj_id = perm.UserRepoGroupToPerm.group.group_id
911 911 o = PermOrigin.REPOGROUP_USER % perm.UserRepoGroupToPerm\
912 912 .user.username
913 913 p = perm.Permission.permission_name
914 914
915 915 if not self.explicit:
916 916 cur_perm = self.permissions_repository_groups.get(rg_k, 'group.none')
917 917 p = self._choose_permission(p, cur_perm)
918 918
919 919 self.permissions_repository_groups[rg_k] = p, o, obj_id
920 920
921 921 if perm.RepoGroup.user_id == self.user_id:
922 922 # set admin if owner
923 923 p = 'group.admin'
924 924 o = PermOrigin.REPOGROUP_OWNER
925 925 self.permissions_repository_groups[rg_k] = p, o, obj_id
926 926
927 927 if self.user_is_admin:
928 928 p = 'group.admin'
929 929 o = PermOrigin.SUPER_ADMIN
930 930 self.permissions_repository_groups[rg_k] = p, o, obj_id
931 931
932 932 def _calculate_user_group_permissions(self):
933 933 """
934 934 User group permissions for the current user.
935 935 """
936 936 # user group for user group permissions
937 937 user_group_from_user_group = Permission\
938 938 .get_default_user_group_perms_from_user_group(
939 939 self.user_id, self.scope_user_group_id)
940 940
941 941 multiple_counter = collections.defaultdict(int)
942 942 for perm in user_group_from_user_group:
943 943 ug_k = perm.UserGroupUserGroupToPerm.target_user_group.users_group_name
944 944 obj_id = perm.UserGroupUserGroupToPerm.target_user_group.users_group_id
945 945 multiple_counter[ug_k] += 1
946 946 o = PermOrigin.USERGROUP_USERGROUP % perm.UserGroupUserGroupToPerm\
947 947 .user_group.users_group_name
948 948 p = perm.Permission.permission_name
949 949
950 950 if multiple_counter[ug_k] > 1:
951 951 cur_perm = self.permissions_user_groups[ug_k]
952 952 p = self._choose_permission(p, cur_perm)
953 953
954 954 self.permissions_user_groups[ug_k] = p, o, obj_id
955 955
956 956 if perm.UserGroup.user_id == self.user_id:
957 957 # set admin if owner, even for member of other user group
958 958 p = 'usergroup.admin'
959 959 o = PermOrigin.USERGROUP_OWNER
960 960 self.permissions_user_groups[ug_k] = p, o, obj_id
961 961
962 962 if self.user_is_admin:
963 963 p = 'usergroup.admin'
964 964 o = PermOrigin.SUPER_ADMIN
965 965 self.permissions_user_groups[ug_k] = p, o, obj_id
966 966
967 967 # user explicit permission for user groups
968 968 user_user_groups_perms = Permission.get_default_user_group_perms(
969 969 self.user_id, self.scope_user_group_id)
970 970 for perm in user_user_groups_perms:
971 971 ug_k = perm.UserUserGroupToPerm.user_group.users_group_name
972 972 obj_id = perm.UserUserGroupToPerm.user_group.users_group_id
973 973 o = PermOrigin.USERGROUP_USER % perm.UserUserGroupToPerm\
974 974 .user.username
975 975 p = perm.Permission.permission_name
976 976
977 977 if not self.explicit:
978 978 cur_perm = self.permissions_user_groups.get(ug_k, 'usergroup.none')
979 979 p = self._choose_permission(p, cur_perm)
980 980
981 981 self.permissions_user_groups[ug_k] = p, o, obj_id
982 982
983 983 if perm.UserGroup.user_id == self.user_id:
984 984 # set admin if owner
985 985 p = 'usergroup.admin'
986 986 o = PermOrigin.USERGROUP_OWNER
987 987 self.permissions_user_groups[ug_k] = p, o, obj_id
988 988
989 989 if self.user_is_admin:
990 990 p = 'usergroup.admin'
991 991 o = PermOrigin.SUPER_ADMIN
992 992 self.permissions_user_groups[ug_k] = p, o, obj_id
993 993
994 994 def _choose_permission(self, new_perm, cur_perm):
995 995 new_perm_val = Permission.PERM_WEIGHTS[new_perm]
996 996 cur_perm_val = Permission.PERM_WEIGHTS[cur_perm]
997 997 if self.algo == 'higherwin':
998 998 if new_perm_val > cur_perm_val:
999 999 return new_perm
1000 1000 return cur_perm
1001 1001 elif self.algo == 'lowerwin':
1002 1002 if new_perm_val < cur_perm_val:
1003 1003 return new_perm
1004 1004 return cur_perm
1005 1005
1006 1006 def _permission_structure(self):
1007 1007 return {
1008 1008 'global': self.permissions_global,
1009 1009 'repositories': self.permissions_repositories,
1010 1010 'repository_branches': self.permissions_repository_branches,
1011 1011 'repositories_groups': self.permissions_repository_groups,
1012 1012 'user_groups': self.permissions_user_groups,
1013 1013 }
1014 1014
1015 1015
1016 1016 def allowed_auth_token_access(view_name, auth_token, whitelist=None):
1017 1017 """
1018 1018 Check if given controller_name is in whitelist of auth token access
1019 1019 """
1020 1020 if not whitelist:
1021 1021 from rhodecode import CONFIG
1022 1022 whitelist = aslist(
1023 1023 CONFIG.get('api_access_controllers_whitelist'), sep=',')
1024 1024 # backward compat translation
1025 1025 compat = {
1026 1026 # old controller, new VIEW
1027 1027 'ChangesetController:*': 'RepoCommitsView:*',
1028 1028 'ChangesetController:changeset_patch': 'RepoCommitsView:repo_commit_patch',
1029 1029 'ChangesetController:changeset_raw': 'RepoCommitsView:repo_commit_raw',
1030 1030 'FilesController:raw': 'RepoCommitsView:repo_commit_raw',
1031 1031 'FilesController:archivefile': 'RepoFilesView:repo_archivefile',
1032 1032 'GistsController:*': 'GistView:*',
1033 1033 }
1034 1034
1035 1035 log.debug(
1036 1036 'Allowed views for AUTH TOKEN access: %s', whitelist)
1037 1037 auth_token_access_valid = False
1038 1038
1039 1039 for entry in whitelist:
1040 1040 token_match = True
1041 1041 if entry in compat:
1042 1042 # translate from old Controllers to Pyramid Views
1043 1043 entry = compat[entry]
1044 1044
1045 1045 if '@' in entry:
1046 1046 # specific AuthToken
1047 1047 entry, allowed_token = entry.split('@', 1)
1048 1048 token_match = auth_token == allowed_token
1049 1049
1050 1050 if fnmatch.fnmatch(view_name, entry) and token_match:
1051 1051 auth_token_access_valid = True
1052 1052 break
1053 1053
1054 1054 if auth_token_access_valid:
1055 1055 log.debug('view: `%s` matches entry in whitelist: %s',
1056 1056 view_name, whitelist)
1057 1057
1058 1058 else:
1059 1059 msg = ('view: `%s` does *NOT* match any entry in whitelist: %s'
1060 1060 % (view_name, whitelist))
1061 1061 if auth_token:
1062 1062 # if we use auth token key and don't have access it's a warning
1063 1063 log.warning(msg)
1064 1064 else:
1065 1065 log.debug(msg)
1066 1066
1067 1067 return auth_token_access_valid
1068 1068
1069 1069
1070 1070 class AuthUser(object):
1071 1071 """
1072 1072 A simple object that handles all attributes of user in RhodeCode
1073 1073
1074 1074 It does lookup based on API key,given user, or user present in session
1075 1075 Then it fills all required information for such user. It also checks if
1076 1076 anonymous access is enabled and if so, it returns default user as logged in
1077 1077 """
1078 1078 GLOBAL_PERMS = [x[0] for x in Permission.PERMS]
1079 1079 repo_read_perms = ['repository.read', 'repository.admin', 'repository.write']
1080 1080 repo_group_read_perms = ['group.read', 'group.write', 'group.admin']
1081 1081 user_group_read_perms = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
1082 1082
1083 1083 def __init__(self, user_id=None, api_key=None, username=None, ip_addr=None):
1084 1084
1085 1085 self.user_id = user_id
1086 1086 self._api_key = api_key
1087 1087
1088 1088 self.api_key = None
1089 1089 self.username = username
1090 1090 self.ip_addr = ip_addr
1091 1091 self.name = ''
1092 1092 self.lastname = ''
1093 1093 self.first_name = ''
1094 1094 self.last_name = ''
1095 1095 self.email = ''
1096 1096 self.is_authenticated = False
1097 1097 self.admin = False
1098 1098 self.inherit_default_permissions = False
1099 1099 self.password = ''
1100 1100
1101 1101 self.anonymous_user = None # propagated on propagate_data
1102 1102 self.propagate_data()
1103 1103 self._instance = None
1104 1104 self._permissions_scoped_cache = {} # used to bind scoped calculation
1105 1105
1106 1106 @LazyProperty
1107 1107 def permissions(self):
1108 1108 return self.get_perms(user=self, cache=None)
1109 1109
1110 1110 @LazyProperty
1111 1111 def permissions_safe(self):
1112 1112 """
1113 1113 Filtered permissions excluding not allowed repositories
1114 1114 """
1115 1115 perms = self.get_perms(user=self, cache=None)
1116 1116
1117 1117 perms['repositories'] = {
1118 1118 k: v for k, v in list(perms['repositories'].items())
1119 1119 if v != 'repository.none'
1120 1120 }
1121 1121 perms['repositories_groups'] = {
1122 1122 k: v for k, v in list(perms['repositories_groups'].items())
1123 1123 if v != 'group.none'
1124 1124 }
1125 1125 perms['user_groups'] = {
1126 1126 k: v for k, v in list(perms['user_groups'].items())
1127 1127 if v != 'usergroup.none'
1128 1128 }
1129 1129 perms['repository_branches'] = {
1130 1130 k: v for k, v in list(perms['repository_branches'].items())
1131 1131 if v != 'branch.none'
1132 1132 }
1133 1133 return perms
1134 1134
1135 1135 @LazyProperty
1136 1136 def permissions_full_details(self):
1137 1137 return self.get_perms(
1138 1138 user=self, cache=None, calculate_super_admin=True)
1139 1139
1140 1140 def permissions_with_scope(self, scope):
1141 1141 """
1142 1142 Call the get_perms function with scoped data. The scope in that function
1143 1143 narrows the SQL calls to the given ID of objects resulting in fetching
1144 1144 Just particular permission we want to obtain. If scope is an empty dict
1145 1145 then it basically narrows the scope to GLOBAL permissions only.
1146 1146
1147 1147 :param scope: dict
1148 1148 """
1149 1149 if 'repo_name' in scope:
1150 1150 obj = Repository.get_by_repo_name(scope['repo_name'])
1151 1151 if obj:
1152 1152 scope['repo_id'] = obj.repo_id
1153 1153 _scope = collections.OrderedDict()
1154 1154 _scope['repo_id'] = -1
1155 1155 _scope['user_group_id'] = -1
1156 1156 _scope['repo_group_id'] = -1
1157 1157
1158 1158 for k in sorted(scope.keys()):
1159 1159 _scope[k] = scope[k]
1160 1160
1161 1161 # store in cache to mimic how the @LazyProperty works,
1162 1162 # the difference here is that we use the unique key calculated
1163 1163 # from params and values
1164 1164 return self.get_perms(user=self, cache=None, scope=_scope)
1165 1165
1166 1166 def get_instance(self):
1167 1167 return User.get(self.user_id)
1168 1168
1169 1169 def propagate_data(self):
1170 1170 """
1171 1171 Fills in user data and propagates values to this instance. Maps fetched
1172 1172 user attributes to this class instance attributes
1173 1173 """
1174 1174 log.debug('AuthUser: starting data propagation for new potential user')
1175 1175 user_model = UserModel()
1176 1176 anon_user = self.anonymous_user = User.get_default_user(cache=True)
1177 1177 is_user_loaded = False
1178 1178
1179 1179 # lookup by userid
1180 1180 if self.user_id is not None and self.user_id != anon_user.user_id:
1181 1181 log.debug('Trying Auth User lookup by USER ID: `%s`', self.user_id)
1182 1182 is_user_loaded = user_model.fill_data(self, user_id=self.user_id)
1183 1183
1184 1184 # try go get user by api key
1185 1185 elif self._api_key and self._api_key != anon_user.api_key:
1186 1186 log.debug('Trying Auth User lookup by API KEY: `...%s`', self._api_key[-4:])
1187 1187 is_user_loaded = user_model.fill_data(self, api_key=self._api_key)
1188 1188
1189 1189 # lookup by username
1190 1190 elif self.username:
1191 1191 log.debug('Trying Auth User lookup by USER NAME: `%s`', self.username)
1192 1192 is_user_loaded = user_model.fill_data(self, username=self.username)
1193 1193 else:
1194 1194 log.debug('No data in %s that could been used to log in', self)
1195 1195
1196 1196 if not is_user_loaded:
1197 1197 log.debug(
1198 1198 'Failed to load user. Fallback to default user %s', anon_user)
1199 1199 # if we cannot authenticate user try anonymous
1200 1200 if anon_user.active:
1201 1201 log.debug('default user is active, using it as a session user')
1202 1202 user_model.fill_data(self, user_id=anon_user.user_id)
1203 1203 # then we set this user is logged in
1204 1204 self.is_authenticated = True
1205 1205 else:
1206 1206 log.debug('default user is NOT active')
1207 1207 # in case of disabled anonymous user we reset some of the
1208 1208 # parameters so such user is "corrupted", skipping the fill_data
1209 1209 for attr in ['user_id', 'username', 'admin', 'active']:
1210 1210 setattr(self, attr, None)
1211 1211 self.is_authenticated = False
1212 1212
1213 1213 if not self.username:
1214 1214 self.username = 'None'
1215 1215
1216 1216 log.debug('AuthUser: propagated user is now %s', self)
1217 1217
1218 1218 def get_perms(self, user, scope=None, explicit=True, algo='higherwin',
1219 1219 calculate_super_admin=False, cache=None):
1220 1220 """
1221 1221 Fills user permission attribute with permissions taken from database
1222 1222 works for permissions given for repositories, and for permissions that
1223 1223 are granted to groups
1224 1224
1225 1225 :param user: instance of User object from database
1226 1226 :param scope:
1227 1227 :param explicit: In case there are permissions both for user and a group
1228 1228 that user is part of, explicit flag will defiine if user will
1229 1229 explicitly override permissions from group, if it's False it will
1230 1230 make decision based on the algo
1231 1231 :param algo: algorithm to decide what permission should be choose if
1232 1232 it's multiple defined, eg user in two different groups. It also
1233 1233 decides if explicit flag is turned off how to specify the permission
1234 1234 for case when user is in a group + have defined separate permission
1235 1235 :param calculate_super_admin: calculate permissions for super-admin in the
1236 1236 same way as for regular user without speedups
1237 1237 :param cache: Use caching for calculation, None = let the cache backend decide
1238 1238 """
1239 1239 user_id = user.user_id
1240 1240 user_is_admin = user.is_admin
1241 1241
1242 1242 # inheritance of global permissions like create repo/fork repo etc
1243 1243 user_inherit_default_permissions = user.inherit_default_permissions
1244 1244
1245 1245 cache_seconds = safe_int(
1246 1246 rhodecode.CONFIG.get('rc_cache.cache_perms.expiration_time'))
1247 1247
1248 1248 if cache is None:
1249 1249 # let the backend cache decide
1250 1250 cache_on = cache_seconds > 0
1251 1251 else:
1252 1252 cache_on = cache
1253 1253
1254 1254 log.debug(
1255 1255 'Computing PERMISSION tree for user %s scope `%s` '
1256 1256 'with caching: %s[TTL: %ss]', user, scope, cache_on, cache_seconds or 0)
1257 1257
1258 1258 cache_namespace_uid = f'cache_user_auth.{rc_cache.PERMISSIONS_CACHE_VER}.{user_id}'
1259 1259 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1260 1260
1261 1261 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
1262 1262 condition=cache_on)
1263 1263 def compute_perm_tree(cache_name, cache_ver,
1264 1264 user_id, scope, user_is_admin,user_inherit_default_permissions,
1265 1265 explicit, algo, calculate_super_admin):
1266 1266 return _cached_perms_data(
1267 1267 user_id, scope, user_is_admin, user_inherit_default_permissions,
1268 1268 explicit, algo, calculate_super_admin)
1269 1269
1270 1270 start = time.time()
1271 1271 result = compute_perm_tree(
1272 1272 'permissions', 'v1', user_id, scope, user_is_admin,
1273 1273 user_inherit_default_permissions, explicit, algo,
1274 1274 calculate_super_admin)
1275 1275
1276 1276 result_repr = []
1277 1277 for k in result:
1278 1278 result_repr.append((k, len(result[k])))
1279 1279 total = time.time() - start
1280 1280 log.debug('PERMISSION tree for user %s computed in %.4fs: %s',
1281 1281 user, total, result_repr)
1282 1282
1283 1283 return result
1284 1284
1285 1285 @property
1286 1286 def is_default(self):
1287 1287 return self.username == User.DEFAULT_USER
1288 1288
1289 1289 @property
1290 1290 def is_admin(self):
1291 1291 return self.admin
1292 1292
1293 1293 @property
1294 1294 def is_user_object(self):
1295 1295 return self.user_id is not None
1296 1296
1297 1297 @property
1298 1298 def repositories_admin(self):
1299 1299 """
1300 1300 Returns list of repositories you're an admin of
1301 1301 """
1302 1302 return [
1303 1303 x[0] for x in list(self.permissions['repositories'].items())
1304 1304 if x[1] == 'repository.admin']
1305 1305
1306 1306 @property
1307 1307 def repository_groups_admin(self):
1308 1308 """
1309 1309 Returns list of repository groups you're an admin of
1310 1310 """
1311 1311 return [
1312 1312 x[0] for x in list(self.permissions['repositories_groups'].items())
1313 1313 if x[1] == 'group.admin']
1314 1314
1315 1315 @property
1316 1316 def user_groups_admin(self):
1317 1317 """
1318 1318 Returns list of user groups you're an admin of
1319 1319 """
1320 1320 return [
1321 1321 x[0] for x in list(self.permissions['user_groups'].items())
1322 1322 if x[1] == 'usergroup.admin']
1323 1323
1324 1324 def repo_acl_ids_from_stack(self, perms=None, prefix_filter=None, cache=False):
1325 1325 if not perms:
1326 1326 perms = AuthUser.repo_read_perms
1327 1327 allowed_ids = []
1328 1328 for k, stack_data in list(self.permissions['repositories'].perm_origin_stack.items()):
1329 1329 perm, origin, obj_id = stack_data[-1] # last item is the current permission
1330 1330 if prefix_filter and not k.startswith(prefix_filter):
1331 1331 continue
1332 1332 if perm in perms:
1333 1333 allowed_ids.append(obj_id)
1334 1334 return allowed_ids
1335 1335
1336 1336 def repo_acl_ids(self, perms=None, name_filter=None, cache=False):
1337 1337 """
1338 1338 Returns list of repository ids that user have access to based on given
1339 1339 perms. The cache flag should be only used in cases that are used for
1340 1340 display purposes, NOT IN ANY CASE for permission checks.
1341 1341 """
1342 1342 from rhodecode.model.scm import RepoList
1343 1343 if not perms:
1344 1344 perms = AuthUser.repo_read_perms
1345 1345
1346 1346 if not isinstance(perms, list):
1347 1347 raise ValueError('perms parameter must be a list got {} instead'.format(perms))
1348 1348
1349 1349 def _cached_repo_acl(perm_def, _name_filter):
1350 1350 qry = Repository.query()
1351 1351 if _name_filter:
1352 1352 ilike_expression = '%{}%'.format(_name_filter)
1353 1353 qry = qry.filter(
1354 1354 Repository.repo_name.ilike(ilike_expression))
1355 1355
1356 1356 return [x.repo_id for x in
1357 1357 RepoList(qry, perm_set=perm_def, extra_kwargs={'user': self})]
1358 1358
1359 1359 log.debug('Computing REPO ACL IDS user %s', self)
1360 1360
1361 1361 cache_namespace_uid = f'cache_user_repo_acl_ids.{rc_cache.PERMISSIONS_CACHE_VER}.{self.user_id}'
1362 1362 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1363 1363
1364 1364 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache)
1365 1365 def compute_repo_acl_ids(cache_ver, user_id, perm_def, _name_filter):
1366 1366 return _cached_repo_acl(perm_def, _name_filter)
1367 1367
1368 1368 start = time.time()
1369 1369 result = compute_repo_acl_ids('v1', self.user_id, perms, name_filter)
1370 1370 total = time.time() - start
1371 1371 log.debug('REPO ACL IDS for user %s computed in %.4fs', self, total)
1372 1372
1373 1373 return result
1374 1374
1375 1375 def repo_group_acl_ids_from_stack(self, perms=None, prefix_filter=None, cache=False):
1376 1376 if not perms:
1377 1377 perms = AuthUser.repo_group_read_perms
1378 1378 allowed_ids = []
1379 1379 for k, stack_data in list(self.permissions['repositories_groups'].perm_origin_stack.items()):
1380 1380 perm, origin, obj_id = stack_data[-1] # last item is the current permission
1381 1381 if prefix_filter and not k.startswith(prefix_filter):
1382 1382 continue
1383 1383 if perm in perms:
1384 1384 allowed_ids.append(obj_id)
1385 1385 return allowed_ids
1386 1386
1387 1387 def repo_group_acl_ids(self, perms=None, name_filter=None, cache=False):
1388 1388 """
1389 1389 Returns list of repository group ids that user have access to based on given
1390 1390 perms. The cache flag should be only used in cases that are used for
1391 1391 display purposes, NOT IN ANY CASE for permission checks.
1392 1392 """
1393 1393 from rhodecode.model.scm import RepoGroupList
1394 1394 if not perms:
1395 1395 perms = AuthUser.repo_group_read_perms
1396 1396
1397 1397 if not isinstance(perms, list):
1398 1398 raise ValueError(f'perms parameter must be a list got {perms} instead')
1399 1399
1400 1400 def _cached_repo_group_acl(perm_def, _name_filter):
1401 1401 qry = RepoGroup.query()
1402 1402 if _name_filter:
1403 1403 ilike_expression = '%{}%'.format(_name_filter)
1404 1404 qry = qry.filter(
1405 1405 RepoGroup.group_name.ilike(ilike_expression))
1406 1406
1407 1407 return [x.group_id for x in
1408 1408 RepoGroupList(qry, perm_set=perm_def, extra_kwargs={'user': self})]
1409 1409
1410 1410 log.debug('Computing REPO GROUP ACL IDS user %s', self)
1411 1411
1412 1412 cache_namespace_uid = f'cache_user_repo_group_acl_ids.{rc_cache.PERMISSIONS_CACHE_VER}.{self.user_id}'
1413 1413 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1414 1414
1415 1415 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache)
1416 1416 def compute_repo_group_acl_ids(cache_ver, user_id, perm_def, _name_filter):
1417 1417 return _cached_repo_group_acl(perm_def, _name_filter)
1418 1418
1419 1419 start = time.time()
1420 1420 result = compute_repo_group_acl_ids('v1', self.user_id, perms, name_filter)
1421 1421 total = time.time() - start
1422 1422 log.debug('REPO GROUP ACL IDS for user %s computed in %.4fs', self, total)
1423 1423
1424 1424 return result
1425 1425
1426 1426 def user_group_acl_ids_from_stack(self, perms=None, cache=False):
1427 1427 if not perms:
1428 1428 perms = AuthUser.user_group_read_perms
1429 1429 allowed_ids = []
1430 1430 for k, stack_data in list(self.permissions['user_groups'].perm_origin_stack.items()):
1431 1431 perm, origin, obj_id = stack_data[-1] # last item is the current permission
1432 1432 if perm in perms:
1433 1433 allowed_ids.append(obj_id)
1434 1434 return allowed_ids
1435 1435
1436 1436 def user_group_acl_ids(self, perms=None, name_filter=None, cache=False):
1437 1437 """
1438 1438 Returns list of user group ids that user have access to based on given
1439 1439 perms. The cache flag should be only used in cases that are used for
1440 1440 display purposes, NOT IN ANY CASE for permission checks.
1441 1441 """
1442 1442 from rhodecode.model.scm import UserGroupList
1443 1443 if not perms:
1444 1444 perms = AuthUser.user_group_read_perms
1445 1445
1446 1446 if not isinstance(perms, list):
1447 1447 raise ValueError('perms parameter must be a list got {} instead'.format(perms))
1448 1448
1449 1449 def _cached_user_group_acl(perm_def, _name_filter):
1450 1450 qry = UserGroup.query()
1451 1451 if _name_filter:
1452 1452 ilike_expression = '%{}%'.format(_name_filter)
1453 1453 qry = qry.filter(
1454 1454 UserGroup.users_group_name.ilike(ilike_expression))
1455 1455
1456 1456 return [x.users_group_id for x in
1457 1457 UserGroupList(qry, perm_set=perm_def, extra_kwargs={'user': self})]
1458 1458
1459 1459 log.debug('Computing USER GROUP ACL IDS user %s', self)
1460 1460
1461 1461 cache_namespace_uid = f'cache_user_user_group_acl_ids.{rc_cache.PERMISSIONS_CACHE_VER}.{self.user_id}'
1462 1462 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1463 1463
1464 1464 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache)
1465 1465 def compute_user_group_acl_ids(cache_ver, user_id, perm_def, _name_filter):
1466 1466 return _cached_user_group_acl(perm_def, _name_filter)
1467 1467
1468 1468 start = time.time()
1469 1469 result = compute_user_group_acl_ids('v1', self.user_id, perms, name_filter)
1470 1470 total = time.time() - start
1471 1471 log.debug('USER GROUP ACL IDS for user %s computed in %.4fs', self, total)
1472 1472
1473 1473 return result
1474 1474
1475 1475 @property
1476 1476 def ip_allowed(self):
1477 1477 """
1478 1478 Checks if ip_addr used in constructor is allowed from defined list of
1479 1479 allowed ip_addresses for user
1480 1480
1481 1481 :returns: boolean, True if ip is in allowed ip range
1482 1482 """
1483 1483 # check IP
1484 1484 inherit = self.inherit_default_permissions
1485 1485 return AuthUser.check_ip_allowed(self.user_id, self.ip_addr,
1486 1486 inherit_from_default=inherit)
1487 1487
1488 1488 @property
1489 1489 def personal_repo_group(self):
1490 1490 return RepoGroup.get_user_personal_repo_group(self.user_id)
1491 1491
1492 1492 @LazyProperty
1493 1493 def feed_token(self):
1494 1494 return self.get_instance().feed_token
1495 1495
1496 1496 @LazyProperty
1497 1497 def artifact_token(self):
1498 1498 return self.get_instance().artifact_token
1499 1499
1500 1500 @classmethod
1501 1501 def check_ip_allowed(cls, user_id, ip_addr, inherit_from_default):
1502 1502 allowed_ips = AuthUser.get_allowed_ips(
1503 1503 user_id, cache=True, inherit_from_default=inherit_from_default)
1504 1504 if check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips):
1505 1505 log.debug('IP:%s for user %s is in range of %s',
1506 1506 ip_addr, user_id, allowed_ips)
1507 1507 return True
1508 1508 else:
1509 1509 log.info('Access for IP:%s forbidden for user %s, '
1510 1510 'not in %s', ip_addr, user_id, allowed_ips,
1511 1511 extra={"ip": ip_addr, "user_id": user_id})
1512 1512 return False
1513 1513
1514 1514 def get_branch_permissions(self, repo_name, perms=None):
1515 1515 perms = perms or self.permissions_with_scope({'repo_name': repo_name})
1516 1516 branch_perms = perms.get('repository_branches', {})
1517 1517 if not branch_perms:
1518 1518 return {}
1519 1519 repo_branch_perms = branch_perms.get(repo_name)
1520 1520 return repo_branch_perms or {}
1521 1521
1522 1522 def get_rule_and_branch_permission(self, repo_name, branch_name):
1523 1523 """
1524 1524 Check if this AuthUser has defined any permissions for branches. If any of
1525 1525 the rules match in order, we return the matching permissions
1526 1526 """
1527 1527
1528 1528 rule = default_perm = ''
1529 1529
1530 1530 repo_branch_perms = self.get_branch_permissions(repo_name=repo_name)
1531 1531 if not repo_branch_perms:
1532 1532 return rule, default_perm
1533 1533
1534 1534 # now calculate the permissions
1535 1535 for pattern, branch_perm in list(repo_branch_perms.items()):
1536 1536 if fnmatch.fnmatch(branch_name, pattern):
1537 1537 rule = '`{}`=>{}'.format(pattern, branch_perm)
1538 1538 return rule, branch_perm
1539 1539
1540 1540 return rule, default_perm
1541 1541
1542 1542 def get_notice_messages(self):
1543 1543
1544 1544 notice_level = 'notice-error'
1545 1545 notice_messages = []
1546 1546 if self.is_default:
1547 1547 return [], notice_level
1548 1548
1549 1549 notices = UserNotice.query()\
1550 1550 .filter(UserNotice.user_id == self.user_id)\
1551 1551 .filter(UserNotice.notice_read == false())\
1552 1552 .all()
1553 1553
1554 1554 try:
1555 1555 for entry in notices:
1556 1556
1557 1557 msg = {
1558 1558 'msg_id': entry.user_notice_id,
1559 1559 'level': entry.notification_level,
1560 1560 'subject': entry.notice_subject,
1561 1561 'body': entry.notice_body,
1562 1562 }
1563 1563 notice_messages.append(msg)
1564 1564
1565 1565 log.debug('Got user %s %s messages', self, len(notice_messages))
1566 1566
1567 1567 levels = [x['level'] for x in notice_messages]
1568 1568 notice_level = 'notice-error' if 'error' in levels else 'notice-warning'
1569 1569 except Exception:
1570 1570 pass
1571 1571
1572 1572 return notice_messages, notice_level
1573 1573
1574 1574 def __repr__(self):
1575 1575 return self.repr_user(self.user_id, self.username, self.ip_addr, self.is_authenticated)
1576 1576
1577 1577 def set_authenticated(self, authenticated=True):
1578 1578 if self.user_id != self.anonymous_user.user_id:
1579 1579 self.is_authenticated = authenticated
1580 1580
1581 1581 def get_cookie_store(self):
1582 1582 return {
1583 1583 'username': self.username,
1584 1584 'password': md5(safe_bytes(self.password or '')),
1585 1585 'user_id': self.user_id,
1586 1586 'is_authenticated': self.is_authenticated
1587 1587 }
1588 1588
1589 1589 @classmethod
1590 1590 def repr_user(cls, user_id=0, username='ANONYMOUS', ip='0.0.0.0', is_authenticated=False):
1591 1591 tmpl = "<AuthUser('id:{}[{}] ip:{} auth:{}')>"
1592 1592 return tmpl.format(user_id, username, ip, is_authenticated)
1593 1593
1594 1594 @classmethod
1595 1595 def from_cookie_store(cls, cookie_store):
1596 1596 """
1597 1597 Creates AuthUser from a cookie store
1598 1598
1599 1599 :param cls:
1600 1600 :param cookie_store:
1601 1601 """
1602 1602 user_id = cookie_store.get('user_id')
1603 1603 username = cookie_store.get('username')
1604 1604 api_key = cookie_store.get('api_key')
1605 1605 return AuthUser(user_id, api_key, username)
1606 1606
1607 1607 @classmethod
1608 1608 def get_allowed_ips(cls, user_id, cache=False, inherit_from_default=False):
1609 1609 _set = set()
1610 1610
1611 1611 if inherit_from_default:
1612 1612 def_user_id = User.get_default_user(cache=True).user_id
1613 1613 default_ips = UserIpMap.query().filter(UserIpMap.user_id == def_user_id)
1614 1614 if cache:
1615 1615 default_ips = default_ips.options(
1616 1616 FromCache("sql_cache_short", "get_user_ips_default"))
1617 1617
1618 1618 # populate from default user
1619 1619 for ip in default_ips:
1620 1620 try:
1621 1621 _set.add(ip.ip_addr)
1622 1622 except ObjectDeletedError:
1623 1623 # since we use heavy caching sometimes it happens that
1624 1624 # we get deleted objects here, we just skip them
1625 1625 pass
1626 1626
1627 1627 # NOTE:(marcink) we don't want to load any rules for empty
1628 1628 # user_id which is the case of access of non logged users when anonymous
1629 1629 # access is disabled
1630 1630 user_ips = []
1631 1631 if user_id:
1632 1632 user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id)
1633 1633 if cache:
1634 1634 user_ips = user_ips.options(
1635 1635 FromCache("sql_cache_short", f"get_user_ips_{user_id}"))
1636 1636
1637 1637 for ip in user_ips:
1638 1638 try:
1639 1639 _set.add(ip.ip_addr)
1640 1640 except ObjectDeletedError:
1641 1641 # since we use heavy caching sometimes it happens that we get
1642 1642 # deleted objects here, we just skip them
1643 1643 pass
1644 1644 return _set or {ip for ip in ['0.0.0.0/0', '::/0']}
1645 1645
1646 1646
1647 1647 def set_available_permissions(settings):
1648 1648 """
1649 1649 This function will propagate pyramid settings with all available defined
1650 1650 permission given in db. We don't want to check each time from db for new
1651 1651 permissions since adding a new permission also requires application restart
1652 1652 ie. to decorate new views with the newly created permission
1653 1653
1654 1654 :param settings: current pyramid registry.settings
1655 1655
1656 1656 """
1657 1657 log.debug('auth: getting information about all available permissions')
1658 1658 try:
1659 1659 sa = meta.Session
1660 1660 all_perms = sa.query(Permission).all()
1661 1661 settings.setdefault('available_permissions',
1662 1662 [x.permission_name for x in all_perms])
1663 1663 log.debug('auth: set available permissions')
1664 1664 except Exception:
1665 1665 log.exception('Failed to fetch permissions from the database.')
1666 1666 raise
1667 1667
1668 1668
1669 1669 def get_csrf_token(session, force_new=False, save_if_missing=True):
1670 1670 """
1671 1671 Return the current authentication token, creating one if one doesn't
1672 1672 already exist and the save_if_missing flag is present.
1673 1673
1674 1674 :param session: pass in the pyramid session, else we use the global ones
1675 1675 :param force_new: force to re-generate the token and store it in session
1676 1676 :param save_if_missing: save the newly generated token if it's missing in
1677 1677 session
1678 1678 """
1679 1679 # NOTE(marcink): probably should be replaced with below one from pyramid 1.9
1680 1680 # from pyramid.csrf import get_csrf_token
1681 1681
1682 1682 if (csrf_token_key not in session and save_if_missing) or force_new:
1683 1683 token = sha1(ascii_bytes(str(random.getrandbits(128))))
1684 1684 session[csrf_token_key] = token
1685 1685 if hasattr(session, 'save'):
1686 1686 session.save()
1687 1687 return session.get(csrf_token_key)
1688 1688
1689 1689
1690 1690 def get_request(perm_class_instance):
1691 from pyramid.threadlocal import get_current_request
1691 from rhodecode.lib.pyramid_utils import get_current_request
1692 1692 pyramid_request = get_current_request()
1693 1693 return pyramid_request
1694 1694
1695 1695
1696 1696 # CHECK DECORATORS
1697 1697 class CSRFRequired(object):
1698 1698 """
1699 1699 Decorator for authenticating a form
1700 1700
1701 1701 This decorator uses an authorization token stored in the client's
1702 1702 session for prevention of certain Cross-site request forgery (CSRF)
1703 1703 attacks (See
1704 1704 http://en.wikipedia.org/wiki/Cross-site_request_forgery for more
1705 1705 information).
1706 1706
1707 1707 For use with the ``secure_form`` helper functions.
1708 1708
1709 1709 """
1710 1710 def __init__(self, token=csrf_token_key, header='X-CSRF-Token', except_methods=None):
1711 1711 self.token = token
1712 1712 self.header = header
1713 1713 self.except_methods = except_methods or []
1714 1714
1715 1715 def __call__(self, func):
1716 1716 return get_cython_compat_decorator(self.__wrapper, func)
1717 1717
1718 1718 def _get_csrf(self, _request):
1719 1719 return _request.POST.get(self.token, _request.headers.get(self.header))
1720 1720
1721 1721 def check_csrf(self, _request, cur_token):
1722 1722 supplied_token = self._get_csrf(_request)
1723 1723 return supplied_token and supplied_token == cur_token
1724 1724
1725 1725 def _get_request(self):
1726 1726 return get_request(self)
1727 1727
1728 1728 def __wrapper(self, func, *fargs, **fkwargs):
1729 1729 cls = fargs[0]
1730 1730 request = cls.request or self._get_request()
1731 1731
1732 1732 if request.method in self.except_methods:
1733 1733 return func(*fargs, **fkwargs)
1734 1734
1735 1735 cur_token = get_csrf_token(request.session, save_if_missing=False)
1736 1736 if self.check_csrf(request, cur_token):
1737 1737 if request.POST.get(self.token):
1738 1738 del request.POST[self.token]
1739 1739 return func(*fargs, **fkwargs)
1740 1740 else:
1741 1741 reason = 'token-missing'
1742 1742 supplied_token = self._get_csrf(request)
1743 1743 if supplied_token and cur_token != supplied_token:
1744 1744 reason = 'token-mismatch [%s:%s]' % (
1745 1745 cur_token or ''[:6], supplied_token or ''[:6])
1746 1746
1747 1747 csrf_message = \
1748 1748 ("Cross-site request forgery detected, request denied. See "
1749 1749 "http://en.wikipedia.org/wiki/Cross-site_request_forgery for "
1750 1750 "more information.")
1751 1751 log.warning('Cross-site request forgery detected, request %r DENIED: %s '
1752 1752 'REMOTE_ADDR:%s, HEADERS:%s' % (
1753 1753 request, reason, request.remote_addr, request.headers))
1754 1754
1755 1755 raise HTTPForbidden(explanation=csrf_message)
1756 1756
1757 1757
1758 1758 class LoginRequired(object):
1759 1759 """
1760 1760 Must be logged in to execute this function else
1761 1761 redirect to login page
1762 1762
1763 1763 :param auth_token_access: if enabled this checks only for valid auth token
1764 1764 and grants access based on valid token
1765 1765 """
1766 1766 def __init__(self, auth_token_access=None):
1767 1767 self.auth_token_access = auth_token_access
1768 1768 if self.auth_token_access:
1769 1769 valid_type = set(auth_token_access).intersection(set(UserApiKeys.ROLES))
1770 1770 if not valid_type:
1771 1771 raise ValueError('auth_token_access must be on of {}, got {}'.format(
1772 1772 UserApiKeys.ROLES, auth_token_access))
1773 1773
1774 1774 def __call__(self, func):
1775 1775 return get_cython_compat_decorator(self.__wrapper, func)
1776 1776
1777 1777 def _get_request(self):
1778 1778 return get_request(self)
1779 1779
1780 1780 def __wrapper(self, func, *fargs, **fkwargs):
1781 1781 from rhodecode.lib import helpers as h
1782 1782 cls = fargs[0]
1783 1783 user = cls._rhodecode_user
1784 1784 request = cls.request or self._get_request()
1785 1785 _ = request.translate
1786 1786
1787 1787 loc = "%s:%s" % (cls.__class__.__name__, func.__name__)
1788 1788 log.debug('Starting login restriction checks for user: %s', user)
1789 1789 # check if our IP is allowed
1790 1790 ip_access_valid = True
1791 1791 if not user.ip_allowed:
1792 1792 h.flash(h.literal(_('IP {} not allowed'.format(user.ip_addr))),
1793 1793 category='warning')
1794 1794 ip_access_valid = False
1795 1795
1796 1796 # we used stored token that is extract from GET or URL param (if any)
1797 1797 _auth_token = request.user_auth_token
1798 1798
1799 1799 # check if we used an AUTH_TOKEN and it's a valid one
1800 1800 # defined white-list of controllers which API access will be enabled
1801 1801 whitelist = None
1802 1802 if self.auth_token_access:
1803 1803 # since this location is allowed by @LoginRequired decorator it's our
1804 1804 # only whitelist
1805 1805 whitelist = [loc]
1806 1806 auth_token_access_valid = allowed_auth_token_access(
1807 1807 loc, whitelist=whitelist, auth_token=_auth_token)
1808 1808
1809 1809 # explicit controller is enabled or API is in our whitelist
1810 1810 if auth_token_access_valid:
1811 1811 log.debug('Checking AUTH TOKEN access for %s', cls)
1812 1812 db_user = user.get_instance()
1813 1813
1814 1814 if db_user:
1815 1815 if self.auth_token_access:
1816 1816 roles = self.auth_token_access
1817 1817 else:
1818 1818 roles = [UserApiKeys.ROLE_HTTP]
1819 1819 log.debug('AUTH TOKEN: checking auth for user %s and roles %s',
1820 1820 db_user, roles)
1821 1821 token_match = db_user.authenticate_by_token(
1822 1822 _auth_token, roles=roles)
1823 1823 else:
1824 1824 log.debug('Unable to fetch db instance for auth user: %s', user)
1825 1825 token_match = False
1826 1826
1827 1827 if _auth_token and token_match:
1828 1828 auth_token_access_valid = True
1829 1829 log.debug('AUTH TOKEN ****%s is VALID', _auth_token[-4:])
1830 1830 else:
1831 1831 auth_token_access_valid = False
1832 1832 if not _auth_token:
1833 1833 log.debug("AUTH TOKEN *NOT* present in request")
1834 1834 else:
1835 1835 log.warning("AUTH TOKEN ****%s *NOT* valid", _auth_token[-4:])
1836 1836
1837 1837 log.debug('Checking if %s is authenticated @ %s', user.username, loc)
1838 1838 reason = 'RHODECODE_AUTH' if user.is_authenticated \
1839 1839 else 'AUTH_TOKEN_AUTH'
1840 1840
1841 1841 if ip_access_valid and (
1842 1842 user.is_authenticated or auth_token_access_valid):
1843 1843 log.info('user %s authenticating with:%s IS authenticated on func %s',
1844 1844 user, reason, loc)
1845 1845
1846 1846 return func(*fargs, **fkwargs)
1847 1847 else:
1848 1848 log.warning(
1849 1849 'user %s authenticating with:%s NOT authenticated on '
1850 1850 'func: %s: IP_ACCESS:%s AUTH_TOKEN_ACCESS:%s',
1851 1851 user, reason, loc, ip_access_valid, auth_token_access_valid)
1852 1852 # we preserve the get PARAM
1853 1853 came_from = get_came_from(request)
1854 1854
1855 1855 log.debug('redirecting to login page with %s', came_from)
1856 1856 raise HTTPFound(
1857 1857 h.route_path('login', _query={'came_from': came_from}))
1858 1858
1859 1859
1860 1860 class NotAnonymous(object):
1861 1861 """
1862 1862 Must be logged in to execute this function else
1863 1863 redirect to login page
1864 1864 """
1865 1865
1866 1866 def __call__(self, func):
1867 1867 return get_cython_compat_decorator(self.__wrapper, func)
1868 1868
1869 1869 def _get_request(self):
1870 1870 return get_request(self)
1871 1871
1872 1872 def __wrapper(self, func, *fargs, **fkwargs):
1873 1873 import rhodecode.lib.helpers as h
1874 1874 cls = fargs[0]
1875 1875 self.user = cls._rhodecode_user
1876 1876 request = cls.request or self._get_request()
1877 1877 _ = request.translate
1878 1878 log.debug('Checking if user is not anonymous @%s', cls)
1879 1879
1880 1880 anonymous = self.user.username == User.DEFAULT_USER
1881 1881
1882 1882 if anonymous:
1883 1883 came_from = get_came_from(request)
1884 1884 h.flash(_('You need to be a registered user to '
1885 1885 'perform this action'),
1886 1886 category='warning')
1887 1887 raise HTTPFound(
1888 1888 h.route_path('login', _query={'came_from': came_from}))
1889 1889 else:
1890 1890 return func(*fargs, **fkwargs)
1891 1891
1892 1892
1893 1893 class PermsDecorator(object):
1894 1894 """
1895 1895 Base class for controller decorators, we extract the current user from
1896 1896 the class itself, which has it stored in base controllers
1897 1897 """
1898 1898
1899 1899 def __init__(self, *required_perms):
1900 1900 self.required_perms = set(required_perms)
1901 1901
1902 1902 def __call__(self, func):
1903 1903 return get_cython_compat_decorator(self.__wrapper, func)
1904 1904
1905 1905 def _get_request(self):
1906 1906 return get_request(self)
1907 1907
1908 1908 def __wrapper(self, func, *fargs, **fkwargs):
1909 1909 cls = fargs[0]
1910 1910 _user = cls._rhodecode_user
1911 1911 request = cls.request or self._get_request()
1912 1912 self.request = request
1913 1913 _ = request.translate
1914 1914
1915 1915 log.debug('checking %s permissions %s for %s %s',
1916 1916 self.__class__.__name__, self.required_perms, cls, _user)
1917 1917
1918 1918 if self.check_permissions(_user):
1919 1919 log.debug('Permission granted for %s %s', cls, _user)
1920 1920 return func(*fargs, **fkwargs)
1921 1921
1922 1922 else:
1923 1923 log.debug('Permission denied for %s %s', cls, _user)
1924 1924 anonymous = _user.username == User.DEFAULT_USER
1925 1925
1926 1926 if anonymous:
1927 1927 import rhodecode.lib.helpers as h
1928 1928 came_from = get_came_from(self._get_request())
1929 1929 h.flash(_('You need to be signed in to view this page'),
1930 1930 category='warning')
1931 1931 raise HTTPFound(
1932 1932 h.route_path('login', _query={'came_from': came_from}))
1933 1933
1934 1934 else:
1935 1935 # redirect with 404 to prevent resource discovery
1936 1936 raise HTTPNotFound()
1937 1937
1938 1938 def check_permissions(self, user):
1939 1939 """Dummy function for overriding"""
1940 1940 raise NotImplementedError(
1941 1941 'You have to write this function in child class')
1942 1942
1943 1943
1944 1944 class HasPermissionAllDecorator(PermsDecorator):
1945 1945 """
1946 1946 Checks for access permission for all given predicates. All of them
1947 1947 have to be meet in order to fulfill the request
1948 1948 """
1949 1949
1950 1950 def check_permissions(self, user):
1951 1951 perms = user.permissions_with_scope({})
1952 1952 if self.required_perms.issubset(perms['global']):
1953 1953 return True
1954 1954 return False
1955 1955
1956 1956
1957 1957 class HasPermissionAnyDecorator(PermsDecorator):
1958 1958 """
1959 1959 Checks for access permission for any of given predicates. In order to
1960 1960 fulfill the request any of predicates must be meet
1961 1961 """
1962 1962
1963 1963 def check_permissions(self, user):
1964 1964 perms = user.permissions_with_scope({})
1965 1965 if self.required_perms.intersection(perms['global']):
1966 1966 return True
1967 1967 return False
1968 1968
1969 1969
1970 1970 class HasRepoPermissionAllDecorator(PermsDecorator):
1971 1971 """
1972 1972 Checks for access permission for all given predicates for specific
1973 1973 repository. All of them have to be meet in order to fulfill the request
1974 1974 """
1975 1975 def _get_repo_name(self):
1976 1976 _request = self.request or self._get_request()
1977 1977 return get_repo_slug(_request)
1978 1978
1979 1979 def check_permissions(self, user):
1980 1980 perms = user.permissions
1981 1981 repo_name = self._get_repo_name()
1982 1982
1983 1983 try:
1984 1984 user_perms = {perms['repositories'][repo_name]}
1985 1985 except KeyError:
1986 1986 log.debug('cannot locate repo with name: `%s` in permissions defs',
1987 1987 repo_name)
1988 1988 return False
1989 1989 log.debug('checking `%s` permissions for repo `%s`',
1990 1990 user_perms, repo_name)
1991 1991 if self.required_perms.issubset(user_perms):
1992 1992 return True
1993 1993 return False
1994 1994
1995 1995
1996 1996 class HasRepoPermissionAnyDecorator(PermsDecorator):
1997 1997 """
1998 1998 Checks for access permission for any of given predicates for specific
1999 1999 repository. In order to fulfill the request any of predicates must be meet
2000 2000 """
2001 2001 def _get_repo_name(self):
2002 2002 _request = self.request or self._get_request()
2003 2003 return get_repo_slug(_request)
2004 2004
2005 2005 def check_permissions(self, user):
2006 2006 perms = user.permissions
2007 2007 repo_name = self._get_repo_name()
2008 2008
2009 2009 try:
2010 2010 user_perms = {perms['repositories'][repo_name]}
2011 2011 except KeyError:
2012 2012 log.debug(
2013 2013 'cannot locate repo with name: `%s` in permissions defs',
2014 2014 repo_name)
2015 2015 return False
2016 2016
2017 2017 log.debug('checking `%s` permissions for repo `%s`',
2018 2018 user_perms, repo_name)
2019 2019 if self.required_perms.intersection(user_perms):
2020 2020 return True
2021 2021 return False
2022 2022
2023 2023
2024 2024 class HasRepoGroupPermissionAllDecorator(PermsDecorator):
2025 2025 """
2026 2026 Checks for access permission for all given predicates for specific
2027 2027 repository group. All of them have to be meet in order to
2028 2028 fulfill the request
2029 2029 """
2030 2030 def _get_repo_group_name(self):
2031 2031 _request = self.request or self._get_request()
2032 2032 return get_repo_group_slug(_request)
2033 2033
2034 2034 def check_permissions(self, user):
2035 2035 perms = user.permissions
2036 2036 group_name = self._get_repo_group_name()
2037 2037 try:
2038 2038 user_perms = {perms['repositories_groups'][group_name]}
2039 2039 except KeyError:
2040 2040 log.debug(
2041 2041 'cannot locate repo group with name: `%s` in permissions defs',
2042 2042 group_name)
2043 2043 return False
2044 2044
2045 2045 log.debug('checking `%s` permissions for repo group `%s`',
2046 2046 user_perms, group_name)
2047 2047 if self.required_perms.issubset(user_perms):
2048 2048 return True
2049 2049 return False
2050 2050
2051 2051
2052 2052 class HasRepoGroupPermissionAnyDecorator(PermsDecorator):
2053 2053 """
2054 2054 Checks for access permission for any of given predicates for specific
2055 2055 repository group. In order to fulfill the request any
2056 2056 of predicates must be met
2057 2057 """
2058 2058 def _get_repo_group_name(self):
2059 2059 _request = self.request or self._get_request()
2060 2060 return get_repo_group_slug(_request)
2061 2061
2062 2062 def check_permissions(self, user):
2063 2063 perms = user.permissions
2064 2064 group_name = self._get_repo_group_name()
2065 2065
2066 2066 try:
2067 2067 user_perms = {perms['repositories_groups'][group_name]}
2068 2068 except KeyError:
2069 2069 log.debug(
2070 2070 'cannot locate repo group with name: `%s` in permissions defs',
2071 2071 group_name)
2072 2072 return False
2073 2073
2074 2074 log.debug('checking `%s` permissions for repo group `%s`',
2075 2075 user_perms, group_name)
2076 2076 if self.required_perms.intersection(user_perms):
2077 2077 return True
2078 2078 return False
2079 2079
2080 2080
2081 2081 class HasUserGroupPermissionAllDecorator(PermsDecorator):
2082 2082 """
2083 2083 Checks for access permission for all given predicates for specific
2084 2084 user group. All of them have to be meet in order to fulfill the request
2085 2085 """
2086 2086 def _get_user_group_name(self):
2087 2087 _request = self.request or self._get_request()
2088 2088 return get_user_group_slug(_request)
2089 2089
2090 2090 def check_permissions(self, user):
2091 2091 perms = user.permissions
2092 2092 group_name = self._get_user_group_name()
2093 2093 try:
2094 2094 user_perms = {perms['user_groups'][group_name]}
2095 2095 except KeyError:
2096 2096 return False
2097 2097
2098 2098 if self.required_perms.issubset(user_perms):
2099 2099 return True
2100 2100 return False
2101 2101
2102 2102
2103 2103 class HasUserGroupPermissionAnyDecorator(PermsDecorator):
2104 2104 """
2105 2105 Checks for access permission for any of given predicates for specific
2106 2106 user group. In order to fulfill the request any of predicates must be meet
2107 2107 """
2108 2108 def _get_user_group_name(self):
2109 2109 _request = self.request or self._get_request()
2110 2110 return get_user_group_slug(_request)
2111 2111
2112 2112 def check_permissions(self, user):
2113 2113 perms = user.permissions
2114 2114 group_name = self._get_user_group_name()
2115 2115 try:
2116 2116 user_perms = {perms['user_groups'][group_name]}
2117 2117 except KeyError:
2118 2118 return False
2119 2119
2120 2120 if self.required_perms.intersection(user_perms):
2121 2121 return True
2122 2122 return False
2123 2123
2124 2124
2125 2125 # CHECK FUNCTIONS
2126 2126 class PermsFunction(object):
2127 2127 """Base function for other check functions"""
2128 2128
2129 2129 def __init__(self, *perms):
2130 2130 self.required_perms = set(perms)
2131 2131 self.repo_name = None
2132 2132 self.repo_group_name = None
2133 2133 self.user_group_name = None
2134 2134
2135 2135 def __bool__(self):
2136 2136 import inspect
2137 2137 frame = inspect.currentframe()
2138 2138 stack_trace = traceback.format_stack(frame)
2139 2139 log.error('Checking bool value on a class instance of perm '
2140 2140 'function is not allowed: %s', ''.join(stack_trace))
2141 2141 # rather than throwing errors, here we always return False so if by
2142 2142 # accident someone checks truth for just an instance it will always end
2143 2143 # up in returning False
2144 2144 return False
2145 2145 __nonzero__ = __bool__
2146 2146
2147 2147 def __call__(self, check_location='', user=None):
2148 2148 if not user:
2149 2149 log.debug('Using user attribute from global request')
2150 2150 request = self._get_request()
2151 2151 user = request.user
2152 2152
2153 2153 # init auth user if not already given
2154 2154 if not isinstance(user, AuthUser):
2155 2155 log.debug('Wrapping user %s into AuthUser', user)
2156 2156 user = AuthUser(user.user_id)
2157 2157
2158 2158 cls_name = self.__class__.__name__
2159 2159 check_scope = self._get_check_scope(cls_name)
2160 2160 check_location = check_location or 'unspecified location'
2161 2161
2162 2162 log.debug('checking cls:%s %s usr:%s %s @ %s', cls_name,
2163 2163 self.required_perms, user, check_scope, check_location)
2164 2164 if not user:
2165 2165 log.warning('Empty user given for permission check')
2166 2166 return False
2167 2167
2168 2168 if self.check_permissions(user):
2169 2169 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
2170 2170 check_scope, user, check_location)
2171 2171 return True
2172 2172
2173 2173 else:
2174 2174 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
2175 2175 check_scope, user, check_location)
2176 2176 return False
2177 2177
2178 2178 def _get_request(self):
2179 2179 return get_request(self)
2180 2180
2181 2181 def _get_check_scope(self, cls_name):
2182 2182 return {
2183 2183 'HasPermissionAll': 'GLOBAL',
2184 2184 'HasPermissionAny': 'GLOBAL',
2185 2185 'HasRepoPermissionAll': 'repo:%s' % self.repo_name,
2186 2186 'HasRepoPermissionAny': 'repo:%s' % self.repo_name,
2187 2187 'HasRepoGroupPermissionAll': 'repo_group:%s' % self.repo_group_name,
2188 2188 'HasRepoGroupPermissionAny': 'repo_group:%s' % self.repo_group_name,
2189 2189 'HasUserGroupPermissionAll': 'user_group:%s' % self.user_group_name,
2190 2190 'HasUserGroupPermissionAny': 'user_group:%s' % self.user_group_name,
2191 2191 }.get(cls_name, '?:%s' % cls_name)
2192 2192
2193 2193 def check_permissions(self, user):
2194 2194 """Dummy function for overriding"""
2195 2195 raise Exception('You have to write this function in child class')
2196 2196
2197 2197
2198 2198 class HasPermissionAll(PermsFunction):
2199 2199 def check_permissions(self, user):
2200 2200 perms = user.permissions_with_scope({})
2201 2201 if self.required_perms.issubset(perms.get('global')):
2202 2202 return True
2203 2203 return False
2204 2204
2205 2205
2206 2206 class HasPermissionAny(PermsFunction):
2207 2207 def check_permissions(self, user):
2208 2208 perms = user.permissions_with_scope({})
2209 2209 if self.required_perms.intersection(perms.get('global')):
2210 2210 return True
2211 2211 return False
2212 2212
2213 2213
2214 2214 class HasRepoPermissionAll(PermsFunction):
2215 2215 def __call__(self, repo_name=None, check_location='', user=None):
2216 2216 self.repo_name = repo_name
2217 2217 return super(HasRepoPermissionAll, self).__call__(check_location, user)
2218 2218
2219 2219 def _get_repo_name(self):
2220 2220 if not self.repo_name:
2221 2221 _request = self._get_request()
2222 2222 self.repo_name = get_repo_slug(_request)
2223 2223 return self.repo_name
2224 2224
2225 2225 def check_permissions(self, user):
2226 2226 self.repo_name = self._get_repo_name()
2227 2227 perms = user.permissions
2228 2228 try:
2229 2229 user_perms = {perms['repositories'][self.repo_name]}
2230 2230 except KeyError:
2231 2231 return False
2232 2232 if self.required_perms.issubset(user_perms):
2233 2233 return True
2234 2234 return False
2235 2235
2236 2236
2237 2237 class HasRepoPermissionAny(PermsFunction):
2238 2238 def __call__(self, repo_name=None, check_location='', user=None):
2239 2239 self.repo_name = repo_name
2240 2240 return super(HasRepoPermissionAny, self).__call__(check_location, user)
2241 2241
2242 2242 def _get_repo_name(self):
2243 2243 if not self.repo_name:
2244 2244 _request = self._get_request()
2245 2245 self.repo_name = get_repo_slug(_request)
2246 2246 return self.repo_name
2247 2247
2248 2248 def check_permissions(self, user):
2249 2249 self.repo_name = self._get_repo_name()
2250 2250 perms = user.permissions
2251 2251 try:
2252 2252 user_perms = {perms['repositories'][self.repo_name]}
2253 2253 except KeyError:
2254 2254 return False
2255 2255 if self.required_perms.intersection(user_perms):
2256 2256 return True
2257 2257 return False
2258 2258
2259 2259
2260 2260 class HasRepoGroupPermissionAny(PermsFunction):
2261 2261
2262 2262 def __call__(self, group_name=None, check_location='', user=None):
2263 2263 self.repo_group_name = group_name
2264 2264 return super(HasRepoGroupPermissionAny, self).__call__(check_location, user)
2265 2265
2266 2266 def check_permissions(self, user):
2267 2267 perms = user.permissions
2268 2268 try:
2269 2269 user_perms = {perms['repositories_groups'][self.repo_group_name]}
2270 2270 except KeyError:
2271 2271 return False
2272 2272 if self.required_perms.intersection(user_perms):
2273 2273 return True
2274 2274 return False
2275 2275
2276 2276
2277 2277 class HasRepoGroupPermissionAll(PermsFunction):
2278 2278 def __call__(self, group_name=None, check_location='', user=None):
2279 2279 self.repo_group_name = group_name
2280 2280 return super(HasRepoGroupPermissionAll, self).__call__(check_location, user)
2281 2281
2282 2282 def check_permissions(self, user):
2283 2283 perms = user.permissions
2284 2284 try:
2285 2285 user_perms = {perms['repositories_groups'][self.repo_group_name]}
2286 2286 except KeyError:
2287 2287 return False
2288 2288 if self.required_perms.issubset(user_perms):
2289 2289 return True
2290 2290 return False
2291 2291
2292 2292
2293 2293 class HasUserGroupPermissionAny(PermsFunction):
2294 2294 def __call__(self, user_group_name=None, check_location='', user=None):
2295 2295 self.user_group_name = user_group_name
2296 2296 return super(HasUserGroupPermissionAny, self).__call__(check_location, user)
2297 2297
2298 2298 def check_permissions(self, user):
2299 2299 perms = user.permissions
2300 2300 try:
2301 2301 user_perms = {perms['user_groups'][self.user_group_name]}
2302 2302 except KeyError:
2303 2303 return False
2304 2304 if self.required_perms.intersection(user_perms):
2305 2305 return True
2306 2306 return False
2307 2307
2308 2308
2309 2309 class HasUserGroupPermissionAll(PermsFunction):
2310 2310 def __call__(self, user_group_name=None, check_location='', user=None):
2311 2311 self.user_group_name = user_group_name
2312 2312 return super(HasUserGroupPermissionAll, self).__call__(check_location, user)
2313 2313
2314 2314 def check_permissions(self, user):
2315 2315 perms = user.permissions
2316 2316 try:
2317 2317 user_perms = {perms['user_groups'][self.user_group_name]}
2318 2318 except KeyError:
2319 2319 return False
2320 2320 if self.required_perms.issubset(user_perms):
2321 2321 return True
2322 2322 return False
2323 2323
2324 2324
2325 2325 # SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH
2326 2326 class HasPermissionAnyMiddleware(object):
2327 2327 def __init__(self, *perms):
2328 2328 self.required_perms = set(perms)
2329 2329
2330 2330 def __call__(self, auth_user, repo_name):
2331 2331 # # repo_name MUST be unicode, since we handle keys in permission
2332 2332 # # dict by unicode
2333 2333 #TODO: verify
2334 2334 # repo_name = safe_str(repo_name)
2335 2335
2336 2336 log.debug(
2337 2337 'Checking VCS protocol permissions %s for user:%s repo:`%s`',
2338 2338 self.required_perms, auth_user, repo_name)
2339 2339
2340 2340 if self.check_permissions(auth_user, repo_name):
2341 2341 log.debug('Permission to repo:`%s` GRANTED for user:%s @ %s',
2342 2342 repo_name, auth_user, 'PermissionMiddleware')
2343 2343 return True
2344 2344
2345 2345 else:
2346 2346 log.debug('Permission to repo:`%s` DENIED for user:%s @ %s',
2347 2347 repo_name, auth_user, 'PermissionMiddleware')
2348 2348 return False
2349 2349
2350 2350 def check_permissions(self, user, repo_name):
2351 2351 perms = user.permissions_with_scope({'repo_name': repo_name})
2352 2352
2353 2353 try:
2354 2354 user_perms = {perms['repositories'][repo_name]}
2355 2355 except Exception:
2356 2356 log.exception('Error while accessing user permissions')
2357 2357 return False
2358 2358
2359 2359 if self.required_perms.intersection(user_perms):
2360 2360 return True
2361 2361 return False
2362 2362
2363 2363
2364 2364 # SPECIAL VERSION TO HANDLE API AUTH
2365 2365 class _BaseApiPerm(object):
2366 2366 def __init__(self, *perms):
2367 2367 self.required_perms = set(perms)
2368 2368
2369 2369 def __call__(self, check_location=None, user=None, repo_name=None,
2370 2370 group_name=None, user_group_name=None):
2371 2371 cls_name = self.__class__.__name__
2372 2372 check_scope = 'global:%s' % (self.required_perms,)
2373 2373 if repo_name:
2374 2374 check_scope += ', repo_name:%s' % (repo_name,)
2375 2375
2376 2376 if group_name:
2377 2377 check_scope += ', repo_group_name:%s' % (group_name,)
2378 2378
2379 2379 if user_group_name:
2380 2380 check_scope += ', user_group_name:%s' % (user_group_name,)
2381 2381
2382 2382 log.debug('checking cls:%s %s %s @ %s',
2383 2383 cls_name, self.required_perms, check_scope, check_location)
2384 2384 if not user:
2385 2385 log.debug('Empty User passed into arguments')
2386 2386 return False
2387 2387
2388 2388 # process user
2389 2389 if not isinstance(user, AuthUser):
2390 2390 user = AuthUser(user.user_id)
2391 2391 if not check_location:
2392 2392 check_location = 'unspecified'
2393 2393 if self.check_permissions(user.permissions, repo_name, group_name,
2394 2394 user_group_name):
2395 2395 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
2396 2396 check_scope, user, check_location)
2397 2397 return True
2398 2398
2399 2399 else:
2400 2400 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
2401 2401 check_scope, user, check_location)
2402 2402 return False
2403 2403
2404 2404 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2405 2405 user_group_name=None):
2406 2406 """
2407 2407 implement in child class should return True if permissions are ok,
2408 2408 False otherwise
2409 2409
2410 2410 :param perm_defs: dict with permission definitions
2411 2411 :param repo_name: repo name
2412 2412 """
2413 2413 raise NotImplementedError()
2414 2414
2415 2415
2416 2416 class HasPermissionAllApi(_BaseApiPerm):
2417 2417 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2418 2418 user_group_name=None):
2419 2419 if self.required_perms.issubset(perm_defs.get('global')):
2420 2420 return True
2421 2421 return False
2422 2422
2423 2423
2424 2424 class HasPermissionAnyApi(_BaseApiPerm):
2425 2425 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2426 2426 user_group_name=None):
2427 2427 if self.required_perms.intersection(perm_defs.get('global')):
2428 2428 return True
2429 2429 return False
2430 2430
2431 2431
2432 2432 class HasRepoPermissionAllApi(_BaseApiPerm):
2433 2433 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2434 2434 user_group_name=None):
2435 2435 try:
2436 2436 _user_perms = {perm_defs['repositories'][repo_name]}
2437 2437 except KeyError:
2438 2438 log.warning(traceback.format_exc())
2439 2439 return False
2440 2440 if self.required_perms.issubset(_user_perms):
2441 2441 return True
2442 2442 return False
2443 2443
2444 2444
2445 2445 class HasRepoPermissionAnyApi(_BaseApiPerm):
2446 2446 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2447 2447 user_group_name=None):
2448 2448 try:
2449 2449 _user_perms = {perm_defs['repositories'][repo_name]}
2450 2450 except KeyError:
2451 2451 log.warning(traceback.format_exc())
2452 2452 return False
2453 2453 if self.required_perms.intersection(_user_perms):
2454 2454 return True
2455 2455 return False
2456 2456
2457 2457
2458 2458 class HasRepoGroupPermissionAnyApi(_BaseApiPerm):
2459 2459 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2460 2460 user_group_name=None):
2461 2461 try:
2462 2462 _user_perms = {perm_defs['repositories_groups'][group_name]}
2463 2463 except KeyError:
2464 2464 log.warning(traceback.format_exc())
2465 2465 return False
2466 2466 if self.required_perms.intersection(_user_perms):
2467 2467 return True
2468 2468 return False
2469 2469
2470 2470
2471 2471 class HasRepoGroupPermissionAllApi(_BaseApiPerm):
2472 2472 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2473 2473 user_group_name=None):
2474 2474 try:
2475 2475 _user_perms = {perm_defs['repositories_groups'][group_name]}
2476 2476 except KeyError:
2477 2477 log.warning(traceback.format_exc())
2478 2478 return False
2479 2479 if self.required_perms.issubset(_user_perms):
2480 2480 return True
2481 2481 return False
2482 2482
2483 2483
2484 2484 class HasUserGroupPermissionAnyApi(_BaseApiPerm):
2485 2485 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2486 2486 user_group_name=None):
2487 2487 try:
2488 2488 _user_perms = {perm_defs['user_groups'][user_group_name]}
2489 2489 except KeyError:
2490 2490 log.warning(traceback.format_exc())
2491 2491 return False
2492 2492 if self.required_perms.intersection(_user_perms):
2493 2493 return True
2494 2494 return False
2495 2495
2496 2496
2497 2497 def check_ip_access(source_ip, allowed_ips=None):
2498 2498 """
2499 2499 Checks if source_ip is a subnet of any of allowed_ips.
2500 2500
2501 2501 :param source_ip:
2502 2502 :param allowed_ips: list of allowed ips together with mask
2503 2503 """
2504 2504 log.debug('checking if ip:%s is subnet of %s', source_ip, allowed_ips)
2505 2505 source_ip_address = ipaddress.ip_address(source_ip)
2506 2506 if isinstance(allowed_ips, (tuple, list, set)):
2507 2507 for ip in allowed_ips:
2508 2508 #TODO: verify
2509 2509 #ip = safe_str(ip)
2510 2510 try:
2511 2511 network_address = ipaddress.ip_network(ip, strict=False)
2512 2512 if source_ip_address in network_address:
2513 2513 log.debug('IP %s is network %s', source_ip_address, network_address)
2514 2514 return True
2515 2515 # for any case we cannot determine the IP, don't crash just
2516 2516 # skip it and log as error, we want to say forbidden still when
2517 2517 # sending bad IP
2518 2518 except Exception:
2519 2519 log.error(traceback.format_exc())
2520 2520 continue
2521 2521 return False
2522 2522
2523 2523
2524 2524 def get_cython_compat_decorator(wrapper, func):
2525 2525 """
2526 2526 Creates a cython compatible decorator. The previously used
2527 2527 decorator.decorator() function seems to be incompatible with cython.
2528 2528
2529 2529 :param wrapper: __wrapper method of the decorator class
2530 2530 :param func: decorated function
2531 2531 """
2532 2532 @wraps(func)
2533 2533 def local_wrapper(*args, **kwds):
2534 2534 return wrapper(func, *args, **kwds)
2535 2535 local_wrapper.__wrapped__ = func
2536 2536 return local_wrapper
2537 2537
2538 2538
@@ -1,448 +1,449 b''
1 1 # Copyright (C) 2012-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 RhodeCode task modules, containing all task that suppose to be run
21 21 by celery daemon
22 22 """
23 23
24 24 import os
25 25 import time
26 26
27 27 from pyramid_mailer.mailer import Mailer
28 28 from pyramid_mailer.message import Message
29 29 from email.utils import formatdate
30 30
31 31 import rhodecode
32 32 from rhodecode.lib import audit_logger
33 33 from rhodecode.lib.celerylib import get_logger, async_task, RequestContextTask, run_task
34 34 from rhodecode.lib import hooks_base
35 35 from rhodecode.lib.utils import adopt_for_celery
36 36 from rhodecode.lib.utils2 import safe_int, str2bool, aslist
37 37 from rhodecode.lib.statsd_client import StatsdClient
38 38 from rhodecode.model.db import (
39 39 true, null, Session, IntegrityError, Repository, RepoGroup, User)
40 40 from rhodecode.model.permission import PermissionModel
41 41
42 42
43 43 @async_task(ignore_result=True, base=RequestContextTask)
44 44 def send_email(recipients, subject, body='', html_body='', email_config=None,
45 45 extra_headers=None):
46 46 """
47 47 Sends an email with defined parameters from the .ini files.
48 48
49 49 :param recipients: list of recipients, it this is empty the defined email
50 50 address from field 'email_to' is used instead
51 51 :param subject: subject of the mail
52 52 :param body: body of the mail
53 53 :param html_body: html version of body
54 54 :param email_config: specify custom configuration for mailer
55 55 :param extra_headers: specify custom headers
56 56 """
57 57 log = get_logger(send_email)
58 58
59 59 email_config = email_config or rhodecode.CONFIG
60 60
61 61 mail_server = email_config.get('smtp_server') or None
62 62 if mail_server is None:
63 63 log.error("SMTP server information missing. Sending email failed. "
64 64 "Make sure that `smtp_server` variable is configured "
65 65 "inside the .ini file")
66 66 return False
67 67
68 68 subject = "%s %s" % (email_config.get('email_prefix', ''), subject)
69 69
70 70 if recipients:
71 71 if isinstance(recipients, str):
72 72 recipients = recipients.split(',')
73 73 else:
74 74 # if recipients are not defined we send to email_config + all admins
75 75 admins = []
76 76 for u in User.query().filter(User.admin == true()).all():
77 77 if u.email:
78 78 admins.append(u.email)
79 79 recipients = []
80 80 config_email = email_config.get('email_to')
81 81 if config_email:
82 82 recipients += [config_email]
83 83 recipients += admins
84 84
85 85 # translate our LEGACY config into the one that pyramid_mailer supports
86 86 email_conf = dict(
87 87 host=mail_server,
88 88 port=email_config.get('smtp_port', 25),
89 89 username=email_config.get('smtp_username'),
90 90 password=email_config.get('smtp_password'),
91 91
92 92 tls=str2bool(email_config.get('smtp_use_tls')),
93 93 ssl=str2bool(email_config.get('smtp_use_ssl')),
94 94
95 95 # SSL key file
96 96 # keyfile='',
97 97
98 98 # SSL certificate file
99 99 # certfile='',
100 100
101 101 # Location of maildir
102 102 # queue_path='',
103 103
104 104 default_sender=email_config.get('app_email_from', 'RhodeCode-noreply@rhodecode.com'),
105 105
106 106 debug=str2bool(email_config.get('smtp_debug')),
107 107 # /usr/sbin/sendmail Sendmail executable
108 108 # sendmail_app='',
109 109
110 110 # {sendmail_app} -t -i -f {sender} Template for sendmail execution
111 111 # sendmail_template='',
112 112 )
113 113
114 114 if extra_headers is None:
115 115 extra_headers = {}
116 116
117 117 extra_headers.setdefault('Date', formatdate(time.time()))
118 118
119 119 if 'thread_ids' in extra_headers:
120 120 thread_ids = extra_headers.pop('thread_ids')
121 121 extra_headers['References'] = ' '.join('<{}>'.format(t) for t in thread_ids)
122 122
123 123 try:
124 124 mailer = Mailer(**email_conf)
125 125
126 126 message = Message(subject=subject,
127 127 sender=email_conf['default_sender'],
128 128 recipients=recipients,
129 129 body=body, html=html_body,
130 130 extra_headers=extra_headers)
131 131 mailer.send_immediately(message)
132 132 statsd = StatsdClient.statsd
133 133 if statsd:
134 134 statsd.incr('rhodecode_email_sent_total')
135 135
136 136 except Exception:
137 137 log.exception('Mail sending failed')
138 138 return False
139 139 return True
140 140
141 141
142 142 @async_task(ignore_result=True, base=RequestContextTask)
143 143 def create_repo(form_data, cur_user):
144 144 from rhodecode.model.repo import RepoModel
145 145 from rhodecode.model.user import UserModel
146 146 from rhodecode.model.scm import ScmModel
147 147 from rhodecode.model.settings import SettingsModel
148 148
149 149 log = get_logger(create_repo)
150 150
151 151 cur_user = UserModel()._get_user(cur_user)
152 152 owner = cur_user
153 153
154 154 repo_name = form_data['repo_name']
155 155 repo_name_full = form_data['repo_name_full']
156 156 repo_type = form_data['repo_type']
157 157 description = form_data['repo_description']
158 158 private = form_data['repo_private']
159 159 clone_uri = form_data.get('clone_uri')
160 160 repo_group = safe_int(form_data['repo_group'])
161 161 copy_fork_permissions = form_data.get('copy_permissions')
162 162 copy_group_permissions = form_data.get('repo_copy_permissions')
163 163 fork_of = form_data.get('fork_parent_id')
164 164 state = form_data.get('repo_state', Repository.STATE_PENDING)
165 165
166 166 # repo creation defaults, private and repo_type are filled in form
167 167 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
168 168 enable_statistics = form_data.get(
169 169 'enable_statistics', defs.get('repo_enable_statistics'))
170 170 enable_locking = form_data.get(
171 171 'enable_locking', defs.get('repo_enable_locking'))
172 172 enable_downloads = form_data.get(
173 173 'enable_downloads', defs.get('repo_enable_downloads'))
174 174
175 175 # set landing rev based on default branches for SCM
176 176 landing_ref, _label = ScmModel.backend_landing_ref(repo_type)
177 177
178 178 try:
179 179 RepoModel()._create_repo(
180 180 repo_name=repo_name_full,
181 181 repo_type=repo_type,
182 182 description=description,
183 183 owner=owner,
184 184 private=private,
185 185 clone_uri=clone_uri,
186 186 repo_group=repo_group,
187 187 landing_rev=landing_ref,
188 188 fork_of=fork_of,
189 189 copy_fork_permissions=copy_fork_permissions,
190 190 copy_group_permissions=copy_group_permissions,
191 191 enable_statistics=enable_statistics,
192 192 enable_locking=enable_locking,
193 193 enable_downloads=enable_downloads,
194 194 state=state
195 195 )
196
196 197 Session().commit()
197 198
198 199 # now create this repo on Filesystem
199 200 RepoModel()._create_filesystem_repo(
200 201 repo_name=repo_name,
201 202 repo_type=repo_type,
202 203 repo_group=RepoModel()._get_repo_group(repo_group),
203 204 clone_uri=clone_uri,
204 205 )
205 206 repo = Repository.get_by_repo_name(repo_name_full)
206 207 hooks_base.create_repository(created_by=owner.username, **repo.get_dict())
207 208
208 209 # update repo commit caches initially
209 210 repo.update_commit_cache()
210 211
211 212 # set new created state
212 213 repo.set_state(Repository.STATE_CREATED)
213 214 repo_id = repo.repo_id
214 215 repo_data = repo.get_api_data()
215 216
216 217 audit_logger.store(
217 218 'repo.create', action_data={'data': repo_data},
218 219 user=cur_user,
219 220 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
220 221
221 222 Session().commit()
222 223
223 224 PermissionModel().trigger_permission_flush()
224 225
225 226 except Exception as e:
226 227 log.warning('Exception occurred when creating repository, '
227 228 'doing cleanup...', exc_info=True)
228 229 if isinstance(e, IntegrityError):
229 230 Session().rollback()
230 231
231 232 # rollback things manually !
232 233 repo = Repository.get_by_repo_name(repo_name_full)
233 234 if repo:
234 235 Repository.delete(repo.repo_id)
235 236 Session().commit()
236 237 RepoModel()._delete_filesystem_repo(repo)
237 238 log.info('Cleanup of repo %s finished', repo_name_full)
238 239 raise
239 240
240 241 return True
241 242
242 243
243 244 @async_task(ignore_result=True, base=RequestContextTask)
244 245 def create_repo_fork(form_data, cur_user):
245 246 """
246 247 Creates a fork of repository using internal VCS methods
247 248 """
248 249 from rhodecode.model.repo import RepoModel
249 250 from rhodecode.model.user import UserModel
250 251
251 252 log = get_logger(create_repo_fork)
252 253
253 254 cur_user = UserModel()._get_user(cur_user)
254 255 owner = cur_user
255 256
256 257 repo_name = form_data['repo_name'] # fork in this case
257 258 repo_name_full = form_data['repo_name_full']
258 259 repo_type = form_data['repo_type']
259 260 description = form_data['description']
260 261 private = form_data['private']
261 262 clone_uri = form_data.get('clone_uri')
262 263 repo_group = safe_int(form_data['repo_group'])
263 264 landing_ref = form_data['landing_rev']
264 265 copy_fork_permissions = form_data.get('copy_permissions')
265 266 fork_id = safe_int(form_data.get('fork_parent_id'))
266 267
267 268 try:
268 269 fork_of = RepoModel()._get_repo(fork_id)
269 270 RepoModel()._create_repo(
270 271 repo_name=repo_name_full,
271 272 repo_type=repo_type,
272 273 description=description,
273 274 owner=owner,
274 275 private=private,
275 276 clone_uri=clone_uri,
276 277 repo_group=repo_group,
277 278 landing_rev=landing_ref,
278 279 fork_of=fork_of,
279 280 copy_fork_permissions=copy_fork_permissions
280 281 )
281 282
282 283 Session().commit()
283 284
284 285 base_path = Repository.base_path()
285 286 source_repo_path = os.path.join(base_path, fork_of.repo_name)
286 287
287 288 # now create this repo on Filesystem
288 289 RepoModel()._create_filesystem_repo(
289 290 repo_name=repo_name,
290 291 repo_type=repo_type,
291 292 repo_group=RepoModel()._get_repo_group(repo_group),
292 293 clone_uri=source_repo_path,
293 294 )
294 295 repo = Repository.get_by_repo_name(repo_name_full)
295 296 hooks_base.create_repository(created_by=owner.username, **repo.get_dict())
296 297
297 298 # update repo commit caches initially
298 299 config = repo._config
299 300 config.set('extensions', 'largefiles', '')
300 301 repo.update_commit_cache(config=config)
301 302
302 303 # set new created state
303 304 repo.set_state(Repository.STATE_CREATED)
304 305
305 306 repo_id = repo.repo_id
306 307 repo_data = repo.get_api_data()
307 308 audit_logger.store(
308 309 'repo.fork', action_data={'data': repo_data},
309 310 user=cur_user,
310 311 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
311 312
312 313 Session().commit()
313 314 except Exception as e:
314 315 log.warning('Exception occurred when forking repository, '
315 316 'doing cleanup...', exc_info=True)
316 317 if isinstance(e, IntegrityError):
317 318 Session().rollback()
318 319
319 320 # rollback things manually !
320 321 repo = Repository.get_by_repo_name(repo_name_full)
321 322 if repo:
322 323 Repository.delete(repo.repo_id)
323 324 Session().commit()
324 325 RepoModel()._delete_filesystem_repo(repo)
325 326 log.info('Cleanup of repo %s finished', repo_name_full)
326 327 raise
327 328
328 329 return True
329 330
330 331
331 332 @async_task(ignore_result=True, base=RequestContextTask)
332 333 def repo_maintenance(repoid):
333 334 from rhodecode.lib import repo_maintenance as repo_maintenance_lib
334 335 log = get_logger(repo_maintenance)
335 336 repo = Repository.get_by_id_or_repo_name(repoid)
336 337 if repo:
337 338 maintenance = repo_maintenance_lib.RepoMaintenance()
338 339 tasks = maintenance.get_tasks_for_repo(repo)
339 340 log.debug('Executing %s tasks on repo `%s`', tasks, repoid)
340 341 executed_types = maintenance.execute(repo)
341 342 log.debug('Got execution results %s', executed_types)
342 343 else:
343 344 log.debug('Repo `%s` not found or without a clone_url', repoid)
344 345
345 346
346 347 @async_task(ignore_result=True, base=RequestContextTask)
347 348 def check_for_update(send_email_notification=True, email_recipients=None):
348 349 from rhodecode.model.update import UpdateModel
349 350 from rhodecode.model.notification import EmailNotificationModel
350 351
351 352 log = get_logger(check_for_update)
352 353 update_url = UpdateModel().get_update_url()
353 354 cur_ver = rhodecode.__version__
354 355
355 356 try:
356 357 data = UpdateModel().get_update_data(update_url)
357 358
358 359 current_ver = UpdateModel().get_stored_version(fallback=cur_ver)
359 360 latest_ver = data['versions'][0]['version']
360 361 UpdateModel().store_version(latest_ver)
361 362
362 363 if send_email_notification:
363 364 log.debug('Send email notification is enabled. '
364 365 'Current RhodeCode version: %s, latest known: %s', current_ver, latest_ver)
365 366 if UpdateModel().is_outdated(current_ver, latest_ver):
366 367
367 368 email_kwargs = {
368 369 'current_ver': current_ver,
369 370 'latest_ver': latest_ver,
370 371 }
371 372
372 373 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
373 374 EmailNotificationModel.TYPE_UPDATE_AVAILABLE, **email_kwargs)
374 375
375 376 email_recipients = aslist(email_recipients, sep=',') or \
376 377 [user.email for user in User.get_all_super_admins()]
377 378 run_task(send_email, email_recipients, subject,
378 379 email_body_plaintext, email_body)
379 380
380 381 except Exception:
381 382 log.exception('Failed to check for update')
382 383 raise
383 384
384 385
385 386 def sync_last_update_for_objects(*args, **kwargs):
386 387 skip_repos = kwargs.get('skip_repos')
387 388 if not skip_repos:
388 389 repos = Repository.query() \
389 390 .order_by(Repository.group_id.asc())
390 391
391 392 for repo in repos:
392 393 repo.update_commit_cache()
393 394
394 395 skip_groups = kwargs.get('skip_groups')
395 396 if not skip_groups:
396 397 repo_groups = RepoGroup.query() \
397 398 .filter(RepoGroup.group_parent_id == null())
398 399
399 400 for root_gr in repo_groups:
400 401 for repo_gr in reversed(root_gr.recursive_groups()):
401 402 repo_gr.update_commit_cache()
402 403
403 404
404 405 @async_task(ignore_result=True, base=RequestContextTask)
405 406 def sync_last_update(*args, **kwargs):
406 407 sync_last_update_for_objects(*args, **kwargs)
407 408
408 409
409 410 @async_task(ignore_result=False)
410 411 def beat_check(*args, **kwargs):
411 412 log = get_logger(beat_check)
412 413 log.info('%r: Got args: %r and kwargs %r', beat_check, args, kwargs)
413 414 return time.time()
414 415
415 416
416 417 @async_task
417 418 @adopt_for_celery
418 419 def repo_size(extras):
419 420 from rhodecode.lib.hooks_base import repo_size
420 421 return repo_size(extras)
421 422
422 423
423 424 @async_task
424 425 @adopt_for_celery
425 426 def pre_pull(extras):
426 427 from rhodecode.lib.hooks_base import pre_pull
427 428 return pre_pull(extras)
428 429
429 430
430 431 @async_task
431 432 @adopt_for_celery
432 433 def post_pull(extras):
433 434 from rhodecode.lib.hooks_base import post_pull
434 435 return post_pull(extras)
435 436
436 437
437 438 @async_task
438 439 @adopt_for_celery
439 440 def pre_push(extras):
440 441 from rhodecode.lib.hooks_base import pre_push
441 442 return pre_push(extras)
442 443
443 444
444 445 @async_task
445 446 @adopt_for_celery
446 447 def post_push(extras):
447 448 from rhodecode.lib.hooks_base import post_push
448 449 return post_push(extras)
@@ -1,262 +1,262 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import webob
20 from pyramid.threadlocal import get_current_request
21 20
22 21 from rhodecode import events
23 22 from rhodecode.lib import hooks_base
24 23 from rhodecode.lib import utils2
25 24
26 25
27 26 def _supports_repo_type(repo_type):
28 27 if repo_type in ('hg', 'git'):
29 28 return True
30 29 return False
31 30
32 31
33 32 def _get_vcs_operation_context(username, repo_name, repo_type, action):
34 33 # NOTE(dan): import loop
35 34 from rhodecode.lib.base import vcs_operation_context
35 from rhodecode.lib.pyramid_utils import get_current_request
36 36
37 37 check_locking = action in ('pull', 'push')
38 38
39 39 request = get_current_request()
40 40
41 41 try:
42 42 environ = request.environ
43 43 except TypeError:
44 44 # we might use this outside of request context
45 45 environ = {}
46 46
47 47 if not environ:
48 48 environ = webob.Request.blank('').environ
49 49
50 50 extras = vcs_operation_context(environ, repo_name, username, action, repo_type, check_locking)
51 51 return utils2.AttributeDict(extras)
52 52
53 53
54 54 def trigger_post_push_hook(username, action, hook_type, repo_name, repo_type, commit_ids):
55 55 """
56 56 Triggers push action hooks
57 57
58 58 :param username: username who pushes
59 59 :param action: push/push_local/push_remote
60 60 :param hook_type: type of hook executed
61 61 :param repo_name: name of repo
62 62 :param repo_type: the type of SCM repo
63 63 :param commit_ids: list of commit ids that we pushed
64 64 """
65 65 extras = _get_vcs_operation_context(username, repo_name, repo_type, action)
66 66 extras.commit_ids = commit_ids
67 67 extras.hook_type = hook_type
68 68 hooks_base.post_push(extras)
69 69
70 70
71 71 def trigger_comment_commit_hooks(username, repo_name, repo_type, repo, data=None):
72 72 """
73 73 Triggers when a comment is made on a commit
74 74
75 75 :param username: username who creates the comment
76 76 :param repo_name: name of target repo
77 77 :param repo_type: the type of SCM target repo
78 78 :param repo: the repo object we trigger the event for
79 79 :param data: extra data for specific events e.g {'comment': comment_obj, 'commit': commit_obj}
80 80 """
81 81 if not _supports_repo_type(repo_type):
82 82 return
83 83
84 84 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'comment_commit')
85 85
86 86 comment = data['comment']
87 87 commit = data['commit']
88 88
89 89 events.trigger(events.RepoCommitCommentEvent(repo, commit, comment))
90 90 extras.update(repo.get_dict())
91 91
92 92 extras.commit = commit.serialize()
93 93 extras.comment = comment.get_api_data()
94 94 extras.created_by = username
95 95 hooks_base.comment_commit_repository(**extras)
96 96
97 97
98 98 def trigger_comment_commit_edit_hooks(username, repo_name, repo_type, repo, data=None):
99 99 """
100 100 Triggers when a comment is edited on a commit
101 101
102 102 :param username: username who edits the comment
103 103 :param repo_name: name of target repo
104 104 :param repo_type: the type of SCM target repo
105 105 :param repo: the repo object we trigger the event for
106 106 :param data: extra data for specific events e.g {'comment': comment_obj, 'commit': commit_obj}
107 107 """
108 108 if not _supports_repo_type(repo_type):
109 109 return
110 110
111 111 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'comment_commit')
112 112
113 113 comment = data['comment']
114 114 commit = data['commit']
115 115
116 116 events.trigger(events.RepoCommitCommentEditEvent(repo, commit, comment))
117 117 extras.update(repo.get_dict())
118 118
119 119 extras.commit = commit.serialize()
120 120 extras.comment = comment.get_api_data()
121 121 extras.created_by = username
122 122 hooks_base.comment_edit_commit_repository(**extras)
123 123
124 124
125 125 def trigger_create_pull_request_hook(username, repo_name, repo_type, pull_request, data=None):
126 126 """
127 127 Triggers create pull request action hooks
128 128
129 129 :param username: username who creates the pull request
130 130 :param repo_name: name of target repo
131 131 :param repo_type: the type of SCM target repo
132 132 :param pull_request: the pull request that was created
133 133 :param data: extra data for specific events e.g {'comment': comment_obj}
134 134 """
135 135 if not _supports_repo_type(repo_type):
136 136 return
137 137
138 138 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'create_pull_request')
139 139 events.trigger(events.PullRequestCreateEvent(pull_request))
140 140 extras.update(pull_request.get_api_data(with_merge_state=False))
141 141 hooks_base.create_pull_request(**extras)
142 142
143 143
144 144 def trigger_merge_pull_request_hook(username, repo_name, repo_type, pull_request, data=None):
145 145 """
146 146 Triggers merge pull request action hooks
147 147
148 148 :param username: username who creates the pull request
149 149 :param repo_name: name of target repo
150 150 :param repo_type: the type of SCM target repo
151 151 :param pull_request: the pull request that was merged
152 152 :param data: extra data for specific events e.g {'comment': comment_obj}
153 153 """
154 154 if not _supports_repo_type(repo_type):
155 155 return
156 156
157 157 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'merge_pull_request')
158 158 events.trigger(events.PullRequestMergeEvent(pull_request))
159 159 extras.update(pull_request.get_api_data())
160 160 hooks_base.merge_pull_request(**extras)
161 161
162 162
163 163 def trigger_close_pull_request_hook(username, repo_name, repo_type, pull_request, data=None):
164 164 """
165 165 Triggers close pull request action hooks
166 166
167 167 :param username: username who creates the pull request
168 168 :param repo_name: name of target repo
169 169 :param repo_type: the type of SCM target repo
170 170 :param pull_request: the pull request that was closed
171 171 :param data: extra data for specific events e.g {'comment': comment_obj}
172 172 """
173 173 if not _supports_repo_type(repo_type):
174 174 return
175 175
176 176 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'close_pull_request')
177 177 events.trigger(events.PullRequestCloseEvent(pull_request))
178 178 extras.update(pull_request.get_api_data())
179 179 hooks_base.close_pull_request(**extras)
180 180
181 181
182 182 def trigger_review_pull_request_hook(username, repo_name, repo_type, pull_request, data=None):
183 183 """
184 184 Triggers review status change pull request action hooks
185 185
186 186 :param username: username who creates the pull request
187 187 :param repo_name: name of target repo
188 188 :param repo_type: the type of SCM target repo
189 189 :param pull_request: the pull request that review status changed
190 190 :param data: extra data for specific events e.g {'comment': comment_obj}
191 191 """
192 192 if not _supports_repo_type(repo_type):
193 193 return
194 194
195 195 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'review_pull_request')
196 196 status = data.get('status')
197 197 events.trigger(events.PullRequestReviewEvent(pull_request, status))
198 198 extras.update(pull_request.get_api_data())
199 199 hooks_base.review_pull_request(**extras)
200 200
201 201
202 202 def trigger_comment_pull_request_hook(username, repo_name, repo_type, pull_request, data=None):
203 203 """
204 204 Triggers when a comment is made on a pull request
205 205
206 206 :param username: username who creates the pull request
207 207 :param repo_name: name of target repo
208 208 :param repo_type: the type of SCM target repo
209 209 :param pull_request: the pull request that comment was made on
210 210 :param data: extra data for specific events e.g {'comment': comment_obj}
211 211 """
212 212 if not _supports_repo_type(repo_type):
213 213 return
214 214
215 215 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'comment_pull_request')
216 216
217 217 comment = data['comment']
218 218 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
219 219 extras.update(pull_request.get_api_data())
220 220 extras.comment = comment.get_api_data()
221 221 hooks_base.comment_pull_request(**extras)
222 222
223 223
224 224 def trigger_comment_pull_request_edit_hook(username, repo_name, repo_type, pull_request, data=None):
225 225 """
226 226 Triggers when a comment was edited on a pull request
227 227
228 228 :param username: username who made the edit
229 229 :param repo_name: name of target repo
230 230 :param repo_type: the type of SCM target repo
231 231 :param pull_request: the pull request that comment was made on
232 232 :param data: extra data for specific events e.g {'comment': comment_obj}
233 233 """
234 234 if not _supports_repo_type(repo_type):
235 235 return
236 236
237 237 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'comment_pull_request')
238 238
239 239 comment = data['comment']
240 240 events.trigger(events.PullRequestCommentEditEvent(pull_request, comment))
241 241 extras.update(pull_request.get_api_data())
242 242 extras.comment = comment.get_api_data()
243 243 hooks_base.comment_edit_pull_request(**extras)
244 244
245 245
246 246 def trigger_update_pull_request_hook(username, repo_name, repo_type, pull_request, data=None):
247 247 """
248 248 Triggers update pull request action hooks
249 249
250 250 :param username: username who creates the pull request
251 251 :param repo_name: name of target repo
252 252 :param repo_type: the type of SCM target repo
253 253 :param pull_request: the pull request that was updated
254 254 :param data: extra data for specific events e.g {'comment': comment_obj}
255 255 """
256 256 if not _supports_repo_type(repo_type):
257 257 return
258 258
259 259 extras = _get_vcs_operation_context(username, repo_name, repo_type, 'update_pull_request')
260 260 events.trigger(events.PullRequestUpdateEvent(pull_request))
261 261 extras.update(pull_request.get_api_data())
262 262 hooks_base.update_pull_request(**extras)
@@ -1,48 +1,57 b''
1 1
2 2
3 3 # Copyright (C) 2016-2023 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import configparser
23 23
24 24 from rhodecode.lib.config_utils import get_config
25 25 from pyramid.paster import bootstrap as pyramid_bootstrap, setup_logging # pragma: no cover
26 from pyramid.threadlocal import get_current_request as pyramid_current_request
26 27
27 28
28 29 def bootstrap(config_uri, options=None, env=None):
29 30 from rhodecode.lib.utils2 import AttributeDict
30 31 from rhodecode.lib.request import Request
31 32
32 33 if env:
33 34 os.environ.update(env)
34 35
35 36 config = get_config(config_uri)
36 37 base_url = 'http://rhodecode.local'
37 38 try:
38 39 base_url = config.get('app:main', 'app.base_url')
39 40 except (configparser.NoSectionError, configparser.NoOptionError):
40 41 pass
41 42
42 43 request = Request.blank('/', base_url=base_url)
43 44 # fake inject a running user for bootstrap request !
44 45 request.user = AttributeDict({'username': 'bootstrap-user',
45 46 'user_id': 1,
46 47 'ip_addr': '127.0.0.1'})
47 48 return pyramid_bootstrap(config_uri, request=request, options=options)
48 49
50
51 def get_current_request():
52 pyramid_req = pyramid_current_request()
53 if not pyramid_req:
54 # maybe we're in celery context and need to get the PYRAMID_REQUEST
55 from rhodecode.lib.celerylib.loader import celery_app
56 pyramid_req = celery_app.conf['PYRAMID_REQUEST']
57 return pyramid_req
@@ -1,1203 +1,1203 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 import os
20 20 import re
21 21 import shutil
22 22 import time
23 23 import logging
24 24 import traceback
25 25 import datetime
26 26
27 27 from pyramid.threadlocal import get_current_request
28 28 from sqlalchemy.orm import aliased
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode import events
32 32 from rhodecode.lib.auth import HasUserGroupPermissionAny
33 33 from rhodecode.lib.caching_query import FromCache
34 34 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
35 35 from rhodecode.lib import hooks_base
36 36 from rhodecode.lib.user_log_filter import user_log_filter
37 37 from rhodecode.lib.utils import make_db_config
38 38 from rhodecode.lib.utils2 import (
39 39 safe_str, remove_prefix, obfuscate_url_pw,
40 40 get_current_rhodecode_user, safe_int, action_logger_generic)
41 41 from rhodecode.lib.vcs.backends import get_backend
42 42 from rhodecode.lib.vcs.nodes import NodeKind
43 43 from rhodecode.model import BaseModel
44 44 from rhodecode.model.db import (
45 45 _hash_key, func, case, joinedload, or_, in_filter_generator,
46 46 Session, Repository, UserRepoToPerm, UserGroupRepoToPerm,
47 47 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
48 48 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
49 49 from rhodecode.model.permission import PermissionModel
50 50 from rhodecode.model.settings import VcsSettingsModel
51 51
52 52 log = logging.getLogger(__name__)
53 53
54 54
55 55 class RepoModel(BaseModel):
56 56
57 57 cls = Repository
58 58
59 59 def _get_user_group(self, users_group):
60 60 return self._get_instance(UserGroup, users_group,
61 61 callback=UserGroup.get_by_group_name)
62 62
63 63 def _get_repo_group(self, repo_group):
64 64 return self._get_instance(RepoGroup, repo_group,
65 65 callback=RepoGroup.get_by_group_name)
66 66
67 67 def _create_default_perms(self, repository, private):
68 68 # create default permission
69 69 default = 'repository.read'
70 70 def_user = User.get_default_user()
71 71 for p in def_user.user_perms:
72 72 if p.permission.permission_name.startswith('repository.'):
73 73 default = p.permission.permission_name
74 74 break
75 75
76 76 default_perm = 'repository.none' if private else default
77 77
78 78 repo_to_perm = UserRepoToPerm()
79 79 repo_to_perm.permission = Permission.get_by_key(default_perm)
80 80
81 81 repo_to_perm.repository = repository
82 82 repo_to_perm.user = def_user
83 83
84 84 return repo_to_perm
85 85
86 86 @LazyProperty
87 87 def repos_path(self):
88 88 """
89 89 Gets the repositories root path from database
90 90 """
91 91 settings_model = VcsSettingsModel(sa=self.sa)
92 92 return settings_model.get_repos_location()
93 93
94 94 def get(self, repo_id):
95 95 repo = self.sa.query(Repository) \
96 96 .filter(Repository.repo_id == repo_id)
97 97
98 98 return repo.scalar()
99 99
100 100 def get_repo(self, repository):
101 101 return self._get_repo(repository)
102 102
103 103 def get_by_repo_name(self, repo_name, cache=False):
104 104 repo = self.sa.query(Repository) \
105 105 .filter(Repository.repo_name == repo_name)
106 106
107 107 if cache:
108 108 name_key = _hash_key(repo_name)
109 109 repo = repo.options(
110 110 FromCache("sql_cache_short", f"get_repo_{name_key}"))
111 111 return repo.scalar()
112 112
113 113 def _extract_id_from_repo_name(self, repo_name):
114 114 if repo_name.startswith('/'):
115 115 repo_name = repo_name.lstrip('/')
116 116 by_id_match = re.match(r'^_(\d+)', repo_name)
117 117 if by_id_match:
118 118 return by_id_match.groups()[0]
119 119
120 120 def get_repo_by_id(self, repo_name):
121 121 """
122 122 Extracts repo_name by id from special urls.
123 123 Example url is _11/repo_name
124 124
125 125 :param repo_name:
126 126 :return: repo object if matched else None
127 127 """
128 128 _repo_id = None
129 129 try:
130 130 _repo_id = self._extract_id_from_repo_name(repo_name)
131 131 if _repo_id:
132 132 return self.get(_repo_id)
133 133 except Exception:
134 134 log.exception('Failed to extract repo_name from URL')
135 135 if _repo_id:
136 136 Session().rollback()
137 137
138 138 return None
139 139
140 140 def get_repos_for_root(self, root, traverse=False):
141 141 if traverse:
142 142 like_expression = u'{}%'.format(safe_str(root))
143 143 repos = Repository.query().filter(
144 144 Repository.repo_name.like(like_expression)).all()
145 145 else:
146 146 if root and not isinstance(root, RepoGroup):
147 147 raise ValueError(
148 148 'Root must be an instance '
149 149 'of RepoGroup, got:{} instead'.format(type(root)))
150 150 repos = Repository.query().filter(Repository.group == root).all()
151 151 return repos
152 152
153 153 def get_url(self, repo, request=None, permalink=False):
154 154 if not request:
155 155 request = get_current_request()
156 156
157 157 if not request:
158 158 return
159 159
160 160 if permalink:
161 161 return request.route_url(
162 162 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
163 163 else:
164 164 return request.route_url(
165 165 'repo_summary', repo_name=safe_str(repo.repo_name))
166 166
167 167 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
168 168 if not request:
169 169 request = get_current_request()
170 170
171 171 if not request:
172 172 return
173 173
174 174 if permalink:
175 175 return request.route_url(
176 176 'repo_commit', repo_name=safe_str(repo.repo_id),
177 177 commit_id=commit_id)
178 178
179 179 else:
180 180 return request.route_url(
181 181 'repo_commit', repo_name=safe_str(repo.repo_name),
182 182 commit_id=commit_id)
183 183
184 184 def get_repo_log(self, repo, filter_term):
185 185 repo_log = UserLog.query()\
186 186 .filter(or_(UserLog.repository_id == repo.repo_id,
187 187 UserLog.repository_name == repo.repo_name))\
188 188 .options(joinedload(UserLog.user))\
189 189 .options(joinedload(UserLog.repository))\
190 190 .order_by(UserLog.action_date.desc())
191 191
192 192 repo_log = user_log_filter(repo_log, filter_term)
193 193 return repo_log
194 194
195 195 @classmethod
196 196 def update_commit_cache(cls, repositories=None):
197 197 if not repositories:
198 198 repositories = Repository.getAll()
199 199 for repo in repositories:
200 200 repo.update_commit_cache()
201 201
202 202 def get_repos_as_dict(self, repo_list=None, admin=False,
203 203 super_user_actions=False, short_name=None):
204 204
205 205 _render = get_current_request().get_partial_renderer(
206 206 'rhodecode:templates/data_table/_dt_elements.mako')
207 207 c = _render.get_call_context()
208 208 h = _render.get_helpers()
209 209
210 210 def quick_menu(repo_name):
211 211 return _render('quick_menu', repo_name)
212 212
213 213 def repo_lnk(name, rtype, rstate, private, archived, fork_repo_name):
214 214 if short_name is not None:
215 215 short_name_var = short_name
216 216 else:
217 217 short_name_var = not admin
218 218 return _render('repo_name', name, rtype, rstate, private, archived, fork_repo_name,
219 219 short_name=short_name_var, admin=False)
220 220
221 221 def last_change(last_change):
222 222 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
223 223 ts = time.time()
224 224 utc_offset = (datetime.datetime.fromtimestamp(ts)
225 225 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
226 226 last_change = last_change + datetime.timedelta(seconds=utc_offset)
227 227
228 228 return _render("last_change", last_change)
229 229
230 230 def rss_lnk(repo_name):
231 231 return _render("rss", repo_name)
232 232
233 233 def atom_lnk(repo_name):
234 234 return _render("atom", repo_name)
235 235
236 236 def last_rev(repo_name, cs_cache):
237 237 return _render('revision', repo_name, cs_cache.get('revision'),
238 238 cs_cache.get('raw_id'), cs_cache.get('author'),
239 239 cs_cache.get('message'), cs_cache.get('date'))
240 240
241 241 def desc(desc):
242 242 return _render('repo_desc', desc, c.visual.stylify_metatags)
243 243
244 244 def state(repo_state):
245 245 return _render("repo_state", repo_state)
246 246
247 247 def repo_actions(repo_name):
248 248 return _render('repo_actions', repo_name, super_user_actions)
249 249
250 250 def user_profile(username):
251 251 return _render('user_profile', username)
252 252
253 253 repos_data = []
254 254 for repo in repo_list:
255 255 # NOTE(marcink): because we use only raw column we need to load it like that
256 256 changeset_cache = Repository._load_changeset_cache(
257 257 repo.repo_id, repo._changeset_cache)
258 258
259 259 row = {
260 260 "menu": quick_menu(repo.repo_name),
261 261
262 262 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
263 263 repo.private, repo.archived, repo.fork_repo_name),
264 264
265 265 "desc": desc(h.escape(repo.description)),
266 266
267 267 "last_change": last_change(repo.updated_on),
268 268
269 269 "last_changeset": last_rev(repo.repo_name, changeset_cache),
270 270 "last_changeset_raw": changeset_cache.get('revision'),
271 271
272 272 "owner": user_profile(repo.owner_username),
273 273
274 274 "state": state(repo.repo_state),
275 275 "rss": rss_lnk(repo.repo_name),
276 276 "atom": atom_lnk(repo.repo_name),
277 277 }
278 278 if admin:
279 279 row.update({
280 280 "action": repo_actions(repo.repo_name),
281 281 })
282 282 repos_data.append(row)
283 283
284 284 return repos_data
285 285
286 286 def get_repos_data_table(
287 287 self, draw, start, limit,
288 288 search_q, order_by, order_dir,
289 289 auth_user, repo_group_id):
290 290 from rhodecode.model.scm import RepoList
291 291
292 292 _perms = ['repository.read', 'repository.write', 'repository.admin']
293 293
294 294 repos = Repository.query() \
295 295 .filter(Repository.group_id == repo_group_id) \
296 296 .all()
297 297 auth_repo_list = RepoList(
298 298 repos, perm_set=_perms,
299 299 extra_kwargs=dict(user=auth_user))
300 300
301 301 allowed_ids = [-1]
302 302 for repo in auth_repo_list:
303 303 allowed_ids.append(repo.repo_id)
304 304
305 305 repos_data_total_count = Repository.query() \
306 306 .filter(Repository.group_id == repo_group_id) \
307 307 .filter(or_(
308 308 # generate multiple IN to fix limitation problems
309 309 *in_filter_generator(Repository.repo_id, allowed_ids))
310 310 ) \
311 311 .count()
312 312
313 313 RepoFork = aliased(Repository)
314 314 OwnerUser = aliased(User)
315 315 base_q = Session.query(
316 316 Repository.repo_id,
317 317 Repository.repo_name,
318 318 Repository.description,
319 319 Repository.repo_type,
320 320 Repository.repo_state,
321 321 Repository.private,
322 322 Repository.archived,
323 323 Repository.updated_on,
324 324 Repository._changeset_cache,
325 325 RepoFork.repo_name.label('fork_repo_name'),
326 326 OwnerUser.username.label('owner_username'),
327 327 ) \
328 328 .filter(Repository.group_id == repo_group_id) \
329 329 .filter(or_(
330 330 # generate multiple IN to fix limitation problems
331 331 *in_filter_generator(Repository.repo_id, allowed_ids))
332 332 ) \
333 333 .outerjoin(RepoFork, Repository.fork_id == RepoFork.repo_id) \
334 334 .join(OwnerUser, Repository.user_id == OwnerUser.user_id)
335 335
336 336 repos_data_total_filtered_count = base_q.count()
337 337
338 338 sort_defined = False
339 339 if order_by == 'repo_name':
340 340 sort_col = func.lower(Repository.repo_name)
341 341 sort_defined = True
342 342 elif order_by == 'user_username':
343 343 sort_col = User.username
344 344 else:
345 345 sort_col = getattr(Repository, order_by, None)
346 346
347 347 if sort_defined or sort_col:
348 348 if order_dir == 'asc':
349 349 sort_col = sort_col.asc()
350 350 else:
351 351 sort_col = sort_col.desc()
352 352
353 353 base_q = base_q.order_by(sort_col)
354 354 base_q = base_q.offset(start).limit(limit)
355 355
356 356 repos_list = base_q.all()
357 357
358 358 repos_data = RepoModel().get_repos_as_dict(
359 359 repo_list=repos_list, admin=False)
360 360
361 361 data = ({
362 362 'draw': draw,
363 363 'data': repos_data,
364 364 'recordsTotal': repos_data_total_count,
365 365 'recordsFiltered': repos_data_total_filtered_count,
366 366 })
367 367 return data
368 368
369 369 def _get_defaults(self, repo_name):
370 370 """
371 371 Gets information about repository, and returns a dict for
372 372 usage in forms
373 373
374 374 :param repo_name:
375 375 """
376 376
377 377 repo_info = Repository.get_by_repo_name(repo_name)
378 378
379 379 if repo_info is None:
380 380 return None
381 381
382 382 defaults = repo_info.get_dict()
383 383 defaults['repo_name'] = repo_info.just_name
384 384
385 385 groups = repo_info.groups_with_parents
386 386 parent_group = groups[-1] if groups else None
387 387
388 388 # we use -1 as this is how in HTML, we mark an empty group
389 389 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
390 390
391 391 keys_to_process = (
392 392 {'k': 'repo_type', 'strip': False},
393 393 {'k': 'repo_enable_downloads', 'strip': True},
394 394 {'k': 'repo_description', 'strip': True},
395 395 {'k': 'repo_enable_locking', 'strip': True},
396 396 {'k': 'repo_landing_rev', 'strip': True},
397 397 {'k': 'clone_uri', 'strip': False},
398 398 {'k': 'push_uri', 'strip': False},
399 399 {'k': 'repo_private', 'strip': True},
400 400 {'k': 'repo_enable_statistics', 'strip': True}
401 401 )
402 402
403 403 for item in keys_to_process:
404 404 attr = item['k']
405 405 if item['strip']:
406 406 attr = remove_prefix(item['k'], 'repo_')
407 407
408 408 val = defaults[attr]
409 409 if item['k'] == 'repo_landing_rev':
410 410 val = ':'.join(defaults[attr])
411 411 defaults[item['k']] = val
412 412 if item['k'] == 'clone_uri':
413 413 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
414 414 if item['k'] == 'push_uri':
415 415 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
416 416
417 417 # fill owner
418 418 if repo_info.user:
419 419 defaults.update({'user': repo_info.user.username})
420 420 else:
421 421 replacement_user = User.get_first_super_admin().username
422 422 defaults.update({'user': replacement_user})
423 423
424 424 return defaults
425 425
426 426 def update(self, repo, **kwargs):
427 427 try:
428 428 cur_repo = self._get_repo(repo)
429 429 source_repo_name = cur_repo.repo_name
430 430
431 431 affected_user_ids = []
432 432 if 'user' in kwargs:
433 433 old_owner_id = cur_repo.user.user_id
434 434 new_owner = User.get_by_username(kwargs['user'])
435 435 cur_repo.user = new_owner
436 436
437 437 if old_owner_id != new_owner.user_id:
438 438 affected_user_ids = [new_owner.user_id, old_owner_id]
439 439
440 440 if 'repo_group' in kwargs:
441 441 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
442 442 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
443 443
444 444 update_keys = [
445 445 (1, 'repo_description'),
446 446 (1, 'repo_landing_rev'),
447 447 (1, 'repo_private'),
448 448 (1, 'repo_enable_downloads'),
449 449 (1, 'repo_enable_locking'),
450 450 (1, 'repo_enable_statistics'),
451 451 (0, 'clone_uri'),
452 452 (0, 'push_uri'),
453 453 (0, 'fork_id')
454 454 ]
455 455 for strip, k in update_keys:
456 456 if k in kwargs:
457 457 val = kwargs[k]
458 458 if strip:
459 459 k = remove_prefix(k, 'repo_')
460 460
461 461 setattr(cur_repo, k, val)
462 462
463 463 new_name = cur_repo.get_new_name(kwargs['repo_name'])
464 464 cur_repo.repo_name = new_name
465 465
466 466 # if private flag is set, reset default permission to NONE
467 467 if kwargs.get('repo_private'):
468 468 EMPTY_PERM = 'repository.none'
469 469 RepoModel().grant_user_permission(
470 470 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
471 471 )
472 472 if kwargs.get('repo_landing_rev'):
473 473 landing_rev_val = kwargs['repo_landing_rev']
474 474 RepoModel().set_landing_rev(cur_repo, landing_rev_val)
475 475
476 476 # handle extra fields
477 477 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
478 478 k = RepositoryField.un_prefix_key(field)
479 479 ex_field = RepositoryField.get_by_key_name(
480 480 key=k, repo=cur_repo)
481 481 if ex_field:
482 482 ex_field.field_value = kwargs[field]
483 483 self.sa.add(ex_field)
484 484
485 485 self.sa.add(cur_repo)
486 486
487 487 if source_repo_name != new_name:
488 488 # rename repository
489 489 self._rename_filesystem_repo(
490 490 old=source_repo_name, new=new_name)
491 491
492 492 if affected_user_ids:
493 493 PermissionModel().trigger_permission_flush(affected_user_ids)
494 494
495 495 return cur_repo
496 496 except Exception:
497 497 log.error(traceback.format_exc())
498 498 raise
499 499
500 500 def _create_repo(self, repo_name, repo_type, description, owner,
501 501 private=False, clone_uri=None, repo_group=None,
502 502 landing_rev=None, fork_of=None,
503 503 copy_fork_permissions=False, enable_statistics=False,
504 504 enable_locking=False, enable_downloads=False,
505 505 copy_group_permissions=False,
506 506 state=Repository.STATE_PENDING):
507 507 """
508 508 Create repository inside database with PENDING state, this should be
509 509 only executed by create() repo. With exception of importing existing
510 510 repos
511 511 """
512 512 from rhodecode.model.scm import ScmModel
513 513
514 514 owner = self._get_user(owner)
515 515 fork_of = self._get_repo(fork_of)
516 516 repo_group = self._get_repo_group(safe_int(repo_group))
517 517 default_landing_ref, _lbl = ScmModel.backend_landing_ref(repo_type)
518 518 landing_rev = landing_rev or default_landing_ref
519 519
520 520 try:
521 521 repo_name = safe_str(repo_name)
522 522 description = safe_str(description)
523 523 # repo name is just a name of repository
524 524 # while repo_name_full is a full qualified name that is combined
525 525 # with name and path of group
526 526 repo_name_full = repo_name
527 527 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
528 528
529 529 new_repo = Repository()
530 530 new_repo.repo_state = state
531 531 new_repo.enable_statistics = False
532 532 new_repo.repo_name = repo_name_full
533 533 new_repo.repo_type = repo_type
534 534 new_repo.user = owner
535 535 new_repo.group = repo_group
536 536 new_repo.description = description or repo_name
537 537 new_repo.private = private
538 538 new_repo.archived = False
539 539 new_repo.clone_uri = clone_uri
540 540 new_repo.landing_rev = landing_rev
541 541
542 542 new_repo.enable_statistics = enable_statistics
543 543 new_repo.enable_locking = enable_locking
544 544 new_repo.enable_downloads = enable_downloads
545 545
546 546 if repo_group:
547 547 new_repo.enable_locking = repo_group.enable_locking
548 548
549 549 if fork_of:
550 550 parent_repo = fork_of
551 551 new_repo.fork = parent_repo
552 552
553 553 events.trigger(events.RepoPreCreateEvent(new_repo))
554 554
555 555 self.sa.add(new_repo)
556 556
557 557 EMPTY_PERM = 'repository.none'
558 558 if fork_of and copy_fork_permissions:
559 559 repo = fork_of
560 560 user_perms = UserRepoToPerm.query() \
561 561 .filter(UserRepoToPerm.repository == repo).all()
562 562 group_perms = UserGroupRepoToPerm.query() \
563 563 .filter(UserGroupRepoToPerm.repository == repo).all()
564 564
565 565 for perm in user_perms:
566 566 UserRepoToPerm.create(
567 567 perm.user, new_repo, perm.permission)
568 568
569 569 for perm in group_perms:
570 570 UserGroupRepoToPerm.create(
571 571 perm.users_group, new_repo, perm.permission)
572 572 # in case we copy permissions and also set this repo to private
573 573 # override the default user permission to make it a private repo
574 574 if private:
575 575 RepoModel(self.sa).grant_user_permission(
576 576 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
577 577
578 578 elif repo_group and copy_group_permissions:
579 579 user_perms = UserRepoGroupToPerm.query() \
580 580 .filter(UserRepoGroupToPerm.group == repo_group).all()
581 581
582 582 group_perms = UserGroupRepoGroupToPerm.query() \
583 583 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
584 584
585 585 for perm in user_perms:
586 586 perm_name = perm.permission.permission_name.replace(
587 587 'group.', 'repository.')
588 588 perm_obj = Permission.get_by_key(perm_name)
589 589 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
590 590
591 591 for perm in group_perms:
592 592 perm_name = perm.permission.permission_name.replace(
593 593 'group.', 'repository.')
594 594 perm_obj = Permission.get_by_key(perm_name)
595 595 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
596 596
597 597 if private:
598 598 RepoModel(self.sa).grant_user_permission(
599 599 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
600 600
601 601 else:
602 602 perm_obj = self._create_default_perms(new_repo, private)
603 603 self.sa.add(perm_obj)
604 604
605 605 # now automatically start following this repository as owner
606 606 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
607 607
608 608 # we need to flush here, in order to check if database won't
609 609 # throw any exceptions, create filesystem dirs at the very end
610 610 self.sa.flush()
611 events.trigger(events.RepoCreateEvent(new_repo))
611 events.trigger(events.RepoCreateEvent(new_repo, actor=owner))
612 612 return new_repo
613 613
614 614 except Exception:
615 615 log.error(traceback.format_exc())
616 616 raise
617 617
618 618 def create(self, form_data, cur_user):
619 619 """
620 620 Create repository using celery tasks
621 621
622 622 :param form_data:
623 623 :param cur_user:
624 624 """
625 625 from rhodecode.lib.celerylib import tasks, run_task
626 626 return run_task(tasks.create_repo, form_data, cur_user)
627 627
628 628 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
629 629 perm_deletions=None, check_perms=True,
630 630 cur_user=None):
631 631 if not perm_additions:
632 632 perm_additions = []
633 633 if not perm_updates:
634 634 perm_updates = []
635 635 if not perm_deletions:
636 636 perm_deletions = []
637 637
638 638 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
639 639
640 640 changes = {
641 641 'added': [],
642 642 'updated': [],
643 643 'deleted': [],
644 644 'default_user_changed': None
645 645 }
646 646
647 647 repo = self._get_repo(repo)
648 648
649 649 # update permissions
650 650 for member_id, perm, member_type in perm_updates:
651 651 member_id = int(member_id)
652 652 if member_type == 'user':
653 653 member_name = User.get(member_id).username
654 654 if member_name == User.DEFAULT_USER:
655 655 # NOTE(dan): detect if we changed permissions for default user
656 656 perm_obj = self.sa.query(UserRepoToPerm) \
657 657 .filter(UserRepoToPerm.user_id == member_id) \
658 658 .filter(UserRepoToPerm.repository == repo) \
659 659 .scalar()
660 660 if perm_obj and perm_obj.permission.permission_name != perm:
661 661 changes['default_user_changed'] = True
662 662
663 663 # this updates also current one if found
664 664 self.grant_user_permission(
665 665 repo=repo, user=member_id, perm=perm)
666 666 elif member_type == 'user_group':
667 667 # check if we have permissions to alter this usergroup
668 668 member_name = UserGroup.get(member_id).users_group_name
669 669 if not check_perms or HasUserGroupPermissionAny(
670 670 *req_perms)(member_name, user=cur_user):
671 671 self.grant_user_group_permission(
672 672 repo=repo, group_name=member_id, perm=perm)
673 673 else:
674 674 raise ValueError("member_type must be 'user' or 'user_group' "
675 675 "got {} instead".format(member_type))
676 676 changes['updated'].append({'type': member_type, 'id': member_id,
677 677 'name': member_name, 'new_perm': perm})
678 678
679 679 # set new permissions
680 680 for member_id, perm, member_type in perm_additions:
681 681 member_id = int(member_id)
682 682 if member_type == 'user':
683 683 member_name = User.get(member_id).username
684 684 self.grant_user_permission(
685 685 repo=repo, user=member_id, perm=perm)
686 686 elif member_type == 'user_group':
687 687 # check if we have permissions to alter this usergroup
688 688 member_name = UserGroup.get(member_id).users_group_name
689 689 if not check_perms or HasUserGroupPermissionAny(
690 690 *req_perms)(member_name, user=cur_user):
691 691 self.grant_user_group_permission(
692 692 repo=repo, group_name=member_id, perm=perm)
693 693 else:
694 694 raise ValueError("member_type must be 'user' or 'user_group' "
695 695 "got {} instead".format(member_type))
696 696
697 697 changes['added'].append({'type': member_type, 'id': member_id,
698 698 'name': member_name, 'new_perm': perm})
699 699 # delete permissions
700 700 for member_id, perm, member_type in perm_deletions:
701 701 member_id = int(member_id)
702 702 if member_type == 'user':
703 703 member_name = User.get(member_id).username
704 704 self.revoke_user_permission(repo=repo, user=member_id)
705 705 elif member_type == 'user_group':
706 706 # check if we have permissions to alter this usergroup
707 707 member_name = UserGroup.get(member_id).users_group_name
708 708 if not check_perms or HasUserGroupPermissionAny(
709 709 *req_perms)(member_name, user=cur_user):
710 710 self.revoke_user_group_permission(
711 711 repo=repo, group_name=member_id)
712 712 else:
713 713 raise ValueError("member_type must be 'user' or 'user_group' "
714 714 "got {} instead".format(member_type))
715 715
716 716 changes['deleted'].append({'type': member_type, 'id': member_id,
717 717 'name': member_name, 'new_perm': perm})
718 718 return changes
719 719
720 720 def create_fork(self, form_data, cur_user):
721 721 """
722 722 Simple wrapper into executing celery task for fork creation
723 723
724 724 :param form_data:
725 725 :param cur_user:
726 726 """
727 727 from rhodecode.lib.celerylib import tasks, run_task
728 728 return run_task(tasks.create_repo_fork, form_data, cur_user)
729 729
730 730 def archive(self, repo):
731 731 """
732 732 Archive given repository. Set archive flag.
733 733
734 734 :param repo:
735 735 """
736 736 repo = self._get_repo(repo)
737 737 if repo:
738 738
739 739 try:
740 740 repo.archived = True
741 741 self.sa.add(repo)
742 742 self.sa.commit()
743 743 except Exception:
744 744 log.error(traceback.format_exc())
745 745 raise
746 746
747 747 def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None):
748 748 """
749 749 Delete given repository, forks parameter defines what do do with
750 750 attached forks. Throws AttachedForksError if deleted repo has attached
751 751 forks
752 752
753 753 :param repo:
754 754 :param forks: str 'delete' or 'detach'
755 755 :param pull_requests: str 'delete' or None
756 756 :param fs_remove: remove(archive) repo from filesystem
757 757 """
758 758 if not cur_user:
759 759 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
760 760 repo = self._get_repo(repo)
761 761 if repo:
762 762 if forks == 'detach':
763 763 for r in repo.forks:
764 764 r.fork = None
765 765 self.sa.add(r)
766 766 elif forks == 'delete':
767 767 for r in repo.forks:
768 768 self.delete(r, forks='delete')
769 769 elif [f for f in repo.forks]:
770 770 raise AttachedForksError()
771 771
772 772 # check for pull requests
773 773 pr_sources = repo.pull_requests_source
774 774 pr_targets = repo.pull_requests_target
775 775 if pull_requests != 'delete' and (pr_sources or pr_targets):
776 776 raise AttachedPullRequestsError()
777 777
778 778 old_repo_dict = repo.get_dict()
779 779 events.trigger(events.RepoPreDeleteEvent(repo))
780 780 try:
781 781 self.sa.delete(repo)
782 782 if fs_remove:
783 783 self._delete_filesystem_repo(repo)
784 784 else:
785 785 log.debug('skipping removal from filesystem')
786 786 old_repo_dict.update({
787 787 'deleted_by': cur_user,
788 788 'deleted_on': time.time(),
789 789 })
790 790 hooks_base.delete_repository(**old_repo_dict)
791 791 events.trigger(events.RepoDeleteEvent(repo))
792 792 except Exception:
793 793 log.error(traceback.format_exc())
794 794 raise
795 795
796 796 def grant_user_permission(self, repo, user, perm):
797 797 """
798 798 Grant permission for user on given repository, or update existing one
799 799 if found
800 800
801 801 :param repo: Instance of Repository, repository_id, or repository name
802 802 :param user: Instance of User, user_id or username
803 803 :param perm: Instance of Permission, or permission_name
804 804 """
805 805 user = self._get_user(user)
806 806 repo = self._get_repo(repo)
807 807 permission = self._get_perm(perm)
808 808
809 809 # check if we have that permission already
810 810 obj = self.sa.query(UserRepoToPerm) \
811 811 .filter(UserRepoToPerm.user == user) \
812 812 .filter(UserRepoToPerm.repository == repo) \
813 813 .scalar()
814 814 if obj is None:
815 815 # create new !
816 816 obj = UserRepoToPerm()
817 817 obj.repository = repo
818 818 obj.user = user
819 819 obj.permission = permission
820 820 self.sa.add(obj)
821 821 log.debug('Granted perm %s to %s on %s', perm, user, repo)
822 822 action_logger_generic(
823 823 'granted permission: {} to user: {} on repo: {}'.format(
824 824 perm, user, repo), namespace='security.repo')
825 825 return obj
826 826
827 827 def revoke_user_permission(self, repo, user):
828 828 """
829 829 Revoke permission for user on given repository
830 830
831 831 :param repo: Instance of Repository, repository_id, or repository name
832 832 :param user: Instance of User, user_id or username
833 833 """
834 834
835 835 user = self._get_user(user)
836 836 repo = self._get_repo(repo)
837 837
838 838 obj = self.sa.query(UserRepoToPerm) \
839 839 .filter(UserRepoToPerm.repository == repo) \
840 840 .filter(UserRepoToPerm.user == user) \
841 841 .scalar()
842 842 if obj:
843 843 self.sa.delete(obj)
844 844 log.debug('Revoked perm on %s on %s', repo, user)
845 845 action_logger_generic(
846 846 'revoked permission from user: {} on repo: {}'.format(
847 847 user, repo), namespace='security.repo')
848 848
849 849 def grant_user_group_permission(self, repo, group_name, perm):
850 850 """
851 851 Grant permission for user group on given repository, or update
852 852 existing one if found
853 853
854 854 :param repo: Instance of Repository, repository_id, or repository name
855 855 :param group_name: Instance of UserGroup, users_group_id,
856 856 or user group name
857 857 :param perm: Instance of Permission, or permission_name
858 858 """
859 859 repo = self._get_repo(repo)
860 860 group_name = self._get_user_group(group_name)
861 861 permission = self._get_perm(perm)
862 862
863 863 # check if we have that permission already
864 864 obj = self.sa.query(UserGroupRepoToPerm) \
865 865 .filter(UserGroupRepoToPerm.users_group == group_name) \
866 866 .filter(UserGroupRepoToPerm.repository == repo) \
867 867 .scalar()
868 868
869 869 if obj is None:
870 870 # create new
871 871 obj = UserGroupRepoToPerm()
872 872
873 873 obj.repository = repo
874 874 obj.users_group = group_name
875 875 obj.permission = permission
876 876 self.sa.add(obj)
877 877 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
878 878 action_logger_generic(
879 879 'granted permission: {} to usergroup: {} on repo: {}'.format(
880 880 perm, group_name, repo), namespace='security.repo')
881 881
882 882 return obj
883 883
884 884 def revoke_user_group_permission(self, repo, group_name):
885 885 """
886 886 Revoke permission for user group on given repository
887 887
888 888 :param repo: Instance of Repository, repository_id, or repository name
889 889 :param group_name: Instance of UserGroup, users_group_id,
890 890 or user group name
891 891 """
892 892 repo = self._get_repo(repo)
893 893 group_name = self._get_user_group(group_name)
894 894
895 895 obj = self.sa.query(UserGroupRepoToPerm) \
896 896 .filter(UserGroupRepoToPerm.repository == repo) \
897 897 .filter(UserGroupRepoToPerm.users_group == group_name) \
898 898 .scalar()
899 899 if obj:
900 900 self.sa.delete(obj)
901 901 log.debug('Revoked perm to %s on %s', repo, group_name)
902 902 action_logger_generic(
903 903 'revoked permission from usergroup: {} on repo: {}'.format(
904 904 group_name, repo), namespace='security.repo')
905 905
906 906 def delete_stats(self, repo_name):
907 907 """
908 908 removes stats for given repo
909 909
910 910 :param repo_name:
911 911 """
912 912 repo = self._get_repo(repo_name)
913 913 try:
914 914 obj = self.sa.query(Statistics) \
915 915 .filter(Statistics.repository == repo).scalar()
916 916 if obj:
917 917 self.sa.delete(obj)
918 918 except Exception:
919 919 log.error(traceback.format_exc())
920 920 raise
921 921
922 922 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
923 923 field_type='str', field_desc=''):
924 924
925 925 repo = self._get_repo(repo_name)
926 926
927 927 new_field = RepositoryField()
928 928 new_field.repository = repo
929 929 new_field.field_key = field_key
930 930 new_field.field_type = field_type # python type
931 931 new_field.field_value = field_value
932 932 new_field.field_desc = field_desc
933 933 new_field.field_label = field_label
934 934 self.sa.add(new_field)
935 935 return new_field
936 936
937 937 def delete_repo_field(self, repo_name, field_key):
938 938 repo = self._get_repo(repo_name)
939 939 field = RepositoryField.get_by_key_name(field_key, repo)
940 940 if field:
941 941 self.sa.delete(field)
942 942
943 943 def set_landing_rev(self, repo, landing_rev_name):
944 944 if landing_rev_name.startswith('branch:'):
945 945 landing_rev_name = landing_rev_name.split('branch:')[-1]
946 946 scm_instance = repo.scm_instance()
947 947 if scm_instance:
948 948 return scm_instance._remote.set_head_ref(landing_rev_name)
949 949
950 950 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
951 951 clone_uri=None, repo_store_location=None,
952 952 use_global_config=False, install_hooks=True):
953 953 """
954 954 makes repository on filesystem. It's group aware means it'll create
955 955 a repository within a group, and alter the paths accordingly of
956 956 group location
957 957
958 958 :param repo_name:
959 959 :param alias:
960 960 :param parent:
961 961 :param clone_uri:
962 962 :param repo_store_location:
963 963 """
964 964 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
965 965 from rhodecode.model.scm import ScmModel
966 966
967 967 if Repository.NAME_SEP in repo_name:
968 968 raise ValueError(
969 969 'repo_name must not contain groups got `%s`' % repo_name)
970 970
971 971 if isinstance(repo_group, RepoGroup):
972 972 new_parent_path = os.sep.join(repo_group.full_path_splitted)
973 973 else:
974 974 new_parent_path = repo_group or ''
975 975
976 976 if repo_store_location:
977 977 _paths = [repo_store_location]
978 978 else:
979 979 _paths = [self.repos_path, new_parent_path, repo_name]
980 980 # we need to make it str for mercurial
981 981 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
982 982
983 983 # check if this path is not a repository
984 984 if is_valid_repo(repo_path, self.repos_path):
985 985 raise Exception(f'This path {repo_path} is a valid repository')
986 986
987 987 # check if this path is a group
988 988 if is_valid_repo_group(repo_path, self.repos_path):
989 989 raise Exception(f'This path {repo_path} is a valid group')
990 990
991 991 log.info('creating repo %s in %s from url: `%s`',
992 992 repo_name, safe_str(repo_path),
993 993 obfuscate_url_pw(clone_uri))
994 994
995 995 backend = get_backend(repo_type)
996 996
997 997 config_repo = None if use_global_config else repo_name
998 998 if config_repo and new_parent_path:
999 999 config_repo = Repository.NAME_SEP.join(
1000 1000 (new_parent_path, config_repo))
1001 1001 config = make_db_config(clear_session=False, repo=config_repo)
1002 1002 config.set('extensions', 'largefiles', '')
1003 1003
1004 1004 # patch and reset hooks section of UI config to not run any
1005 1005 # hooks on creating remote repo
1006 1006 config.clear_section('hooks')
1007 1007
1008 1008 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
1009 1009 if repo_type == 'git':
1010 1010 repo = backend(
1011 1011 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
1012 1012 with_wire={"cache": False})
1013 1013 else:
1014 1014 repo = backend(
1015 1015 repo_path, config=config, create=True, src_url=clone_uri,
1016 1016 with_wire={"cache": False})
1017 1017
1018 1018 if install_hooks:
1019 1019 repo.install_hooks()
1020 1020
1021 1021 log.debug('Created repo %s with %s backend',
1022 1022 safe_str(repo_name), safe_str(repo_type))
1023 1023 return repo
1024 1024
1025 1025 def _rename_filesystem_repo(self, old, new):
1026 1026 """
1027 1027 renames repository on filesystem
1028 1028
1029 1029 :param old: old name
1030 1030 :param new: new name
1031 1031 """
1032 1032 log.info('renaming repo from %s to %s', old, new)
1033 1033
1034 1034 old_path = os.path.join(self.repos_path, old)
1035 1035 new_path = os.path.join(self.repos_path, new)
1036 1036 if os.path.isdir(new_path):
1037 1037 raise Exception(
1038 1038 'Was trying to rename to already existing dir %s' % new_path
1039 1039 )
1040 1040 shutil.move(old_path, new_path)
1041 1041
1042 1042 def _delete_filesystem_repo(self, repo):
1043 1043 """
1044 1044 removes repo from filesystem, the removal is actually made by
1045 1045 added rm__ prefix into dir, and rename internal .hg/.git dirs so this
1046 1046 repository is no longer valid for rhodecode, can be undeleted later on
1047 1047 by reverting the renames on this repository
1048 1048
1049 1049 :param repo: repo object
1050 1050 """
1051 1051 rm_path = os.path.join(self.repos_path, repo.repo_name)
1052 1052 repo_group = repo.group
1053 1053 log.info("delete_filesystem_repo: removing repository %s", rm_path)
1054 1054 # disable hg/git internal that it doesn't get detected as repo
1055 1055 alias = repo.repo_type
1056 1056
1057 1057 config = make_db_config(clear_session=False)
1058 1058 config.set('extensions', 'largefiles', '')
1059 1059 bare = getattr(repo.scm_instance(config=config), 'bare', False)
1060 1060
1061 1061 # skip this for bare git repos
1062 1062 if not bare:
1063 1063 # disable VCS repo
1064 1064 vcs_path = os.path.join(rm_path, '.%s' % alias)
1065 1065 if os.path.exists(vcs_path):
1066 1066 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
1067 1067
1068 1068 _now = datetime.datetime.now()
1069 1069 _ms = str(_now.microsecond).rjust(6, '0')
1070 1070 _d = 'rm__{}__{}'.format(_now.strftime('%Y%m%d_%H%M%S_' + _ms),
1071 1071 repo.just_name)
1072 1072 if repo_group:
1073 1073 # if repository is in group, prefix the removal path with the group
1074 1074 args = repo_group.full_path_splitted + [_d]
1075 1075 _d = os.path.join(*args)
1076 1076
1077 1077 if os.path.isdir(rm_path):
1078 1078 shutil.move(rm_path, os.path.join(self.repos_path, _d))
1079 1079
1080 1080 # finally cleanup diff-cache if it exists
1081 1081 cached_diffs_dir = repo.cached_diffs_dir
1082 1082 if os.path.isdir(cached_diffs_dir):
1083 1083 shutil.rmtree(cached_diffs_dir)
1084 1084
1085 1085
1086 1086 class ReadmeFinder:
1087 1087 """
1088 1088 Utility which knows how to find a readme for a specific commit.
1089 1089
1090 1090 The main idea is that this is a configurable algorithm. When creating an
1091 1091 instance you can define parameters, currently only the `default_renderer`.
1092 1092 Based on this configuration the method :meth:`search` behaves slightly
1093 1093 different.
1094 1094 """
1095 1095
1096 1096 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
1097 1097 path_re = re.compile(r'^docs?', re.IGNORECASE)
1098 1098
1099 1099 default_priorities = {
1100 1100 None: 0,
1101 1101 '.rst': 1,
1102 1102 '.md': 1,
1103 1103 '.rest': 2,
1104 1104 '.mkdn': 2,
1105 1105 '.text': 2,
1106 1106 '.txt': 3,
1107 1107 '.mdown': 3,
1108 1108 '.markdown': 4,
1109 1109 }
1110 1110
1111 1111 path_priority = {
1112 1112 'doc': 0,
1113 1113 'docs': 1,
1114 1114 }
1115 1115
1116 1116 FALLBACK_PRIORITY = 99
1117 1117
1118 1118 RENDERER_TO_EXTENSION = {
1119 1119 'rst': ['.rst', '.rest'],
1120 1120 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
1121 1121 }
1122 1122
1123 1123 def __init__(self, default_renderer=None):
1124 1124 self._default_renderer = default_renderer
1125 1125 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
1126 1126 default_renderer, [])
1127 1127
1128 1128 def search(self, commit, path='/'):
1129 1129 """
1130 1130 Find a readme in the given `commit`.
1131 1131 """
1132 1132 # firstly, check the PATH type if it is actually a DIR
1133 1133 if commit.get_node(path).kind != NodeKind.DIR:
1134 1134 return None
1135 1135
1136 1136 nodes = commit.get_nodes(path)
1137 1137 matches = self._match_readmes(nodes)
1138 1138 matches = self._sort_according_to_priority(matches)
1139 1139 if matches:
1140 1140 return matches[0].node
1141 1141
1142 1142 paths = self._match_paths(nodes)
1143 1143 paths = self._sort_paths_according_to_priority(paths)
1144 1144 for path in paths:
1145 1145 match = self.search(commit, path=path)
1146 1146 if match:
1147 1147 return match
1148 1148
1149 1149 return None
1150 1150
1151 1151 def _match_readmes(self, nodes):
1152 1152 for node in nodes:
1153 1153 if not node.is_file():
1154 1154 continue
1155 1155 path = node.path.rsplit('/', 1)[-1]
1156 1156 match = self.readme_re.match(path)
1157 1157 if match:
1158 1158 extension = match.group(1)
1159 1159 yield ReadmeMatch(node, match, self._priority(extension))
1160 1160
1161 1161 def _match_paths(self, nodes):
1162 1162 for node in nodes:
1163 1163 if not node.is_dir():
1164 1164 continue
1165 1165 match = self.path_re.match(node.path)
1166 1166 if match:
1167 1167 yield node.path
1168 1168
1169 1169 def _priority(self, extension):
1170 1170 renderer_priority = (
1171 1171 0 if extension in self._renderer_extensions else 1)
1172 1172 extension_priority = self.default_priorities.get(
1173 1173 extension, self.FALLBACK_PRIORITY)
1174 1174 return (renderer_priority, extension_priority)
1175 1175
1176 1176 def _sort_according_to_priority(self, matches):
1177 1177
1178 1178 def priority_and_path(match):
1179 1179 return (match.priority, match.path)
1180 1180
1181 1181 return sorted(matches, key=priority_and_path)
1182 1182
1183 1183 def _sort_paths_according_to_priority(self, paths):
1184 1184
1185 1185 def priority_and_path(path):
1186 1186 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1187 1187
1188 1188 return sorted(paths, key=priority_and_path)
1189 1189
1190 1190
1191 1191 class ReadmeMatch:
1192 1192
1193 1193 def __init__(self, node, match, priority):
1194 1194 self.node = node
1195 1195 self._match = match
1196 1196 self.priority = priority
1197 1197
1198 1198 @property
1199 1199 def path(self):
1200 1200 return self.node.path
1201 1201
1202 1202 def __repr__(self):
1203 1203 return f'<ReadmeMatch {self.path} priority={self.priority}'
General Comments 0
You need to be logged in to leave comments. Login now