##// END OF EJS Templates
feat(caches): make sure commit-caches propagate to parent repo groups
super-admin -
r5486:a9fbe41d default
parent child Browse files
Show More
@@ -1,454 +1,454 b''
1 1 # Copyright (C) 2012-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 RhodeCode task modules, containing all task that suppose to be run
21 21 by celery daemon
22 22 """
23 23
24 24 import os
25 25 import time
26 26
27 27 from pyramid_mailer.mailer import Mailer
28 28 from pyramid_mailer.message import Message
29 29 from email.utils import formatdate
30 30
31 31 import rhodecode
32 32 from rhodecode.lib import audit_logger
33 33 from rhodecode.lib.celerylib import get_logger, async_task, RequestContextTask, run_task
34 34 from rhodecode.lib import hooks_base
35 35 from rhodecode.lib.utils import adopt_for_celery
36 36 from rhodecode.lib.utils2 import safe_int, str2bool, aslist
37 37 from rhodecode.lib.statsd_client import StatsdClient
38 38 from rhodecode.model.db import (
39 39 true, null, Session, IntegrityError, Repository, RepoGroup, User)
40 40 from rhodecode.model.permission import PermissionModel
41 41
42 42
43 43 @async_task(ignore_result=True, base=RequestContextTask)
44 44 def send_email(recipients, subject, body='', html_body='', email_config=None,
45 45 extra_headers=None):
46 46 """
47 47 Sends an email with defined parameters from the .ini files.
48 48
49 49 :param recipients: list of recipients, it this is empty the defined email
50 50 address from field 'email_to' is used instead
51 51 :param subject: subject of the mail
52 52 :param body: body of the mail
53 53 :param html_body: html version of body
54 54 :param email_config: specify custom configuration for mailer
55 55 :param extra_headers: specify custom headers
56 56 """
57 57 log = get_logger(send_email)
58 58
59 59 email_config = email_config or rhodecode.CONFIG
60 60
61 61 mail_server = email_config.get('smtp_server') or None
62 62 if mail_server is None:
63 63 log.error("SMTP server information missing. Sending email failed. "
64 64 "Make sure that `smtp_server` variable is configured "
65 65 "inside the .ini file")
66 66 return False
67 67
68 68 subject = "%s %s" % (email_config.get('email_prefix', ''), subject)
69 69
70 70 if recipients:
71 71 if isinstance(recipients, str):
72 72 recipients = recipients.split(',')
73 73 else:
74 74 # if recipients are not defined we send to email_config + all admins
75 75 admins = []
76 76 for u in User.query().filter(User.admin == true()).all():
77 77 if u.email:
78 78 admins.append(u.email)
79 79 recipients = []
80 80 config_email = email_config.get('email_to')
81 81 if config_email:
82 82 recipients += [config_email]
83 83 recipients += admins
84 84
85 85 # translate our LEGACY config into the one that pyramid_mailer supports
86 86 email_conf = dict(
87 87 host=mail_server,
88 88 port=email_config.get('smtp_port', 25),
89 89 username=email_config.get('smtp_username'),
90 90 password=email_config.get('smtp_password'),
91 91
92 92 tls=str2bool(email_config.get('smtp_use_tls')),
93 93 ssl=str2bool(email_config.get('smtp_use_ssl')),
94 94
95 95 # SSL key file
96 96 # keyfile='',
97 97
98 98 # SSL certificate file
99 99 # certfile='',
100 100
101 101 # Location of maildir
102 102 # queue_path='',
103 103
104 104 default_sender=email_config.get('app_email_from', 'RhodeCode-noreply@rhodecode.com'),
105 105
106 106 debug=str2bool(email_config.get('smtp_debug')),
107 107 # /usr/sbin/sendmail Sendmail executable
108 108 # sendmail_app='',
109 109
110 110 # {sendmail_app} -t -i -f {sender} Template for sendmail execution
111 111 # sendmail_template='',
112 112 )
113 113
114 114 if extra_headers is None:
115 115 extra_headers = {}
116 116
117 117 extra_headers.setdefault('Date', formatdate(time.time()))
118 118
119 119 if 'thread_ids' in extra_headers:
120 120 thread_ids = extra_headers.pop('thread_ids')
121 121 extra_headers['References'] = ' '.join('<{}>'.format(t) for t in thread_ids)
122 122
123 123 try:
124 124 mailer = Mailer(**email_conf)
125 125
126 126 message = Message(subject=subject,
127 127 sender=email_conf['default_sender'],
128 128 recipients=recipients,
129 129 body=body, html=html_body,
130 130 extra_headers=extra_headers)
131 131 mailer.send_immediately(message)
132 132 statsd = StatsdClient.statsd
133 133 if statsd:
134 134 statsd.incr('rhodecode_email_sent_total')
135 135
136 136 except Exception:
137 137 log.exception('Mail sending failed')
138 138 return False
139 139 return True
140 140
141 141
142 142 @async_task(ignore_result=True, base=RequestContextTask)
143 143 def create_repo(form_data, cur_user):
144 144 from rhodecode.model.repo import RepoModel
145 145 from rhodecode.model.user import UserModel
146 146 from rhodecode.model.scm import ScmModel
147 147 from rhodecode.model.settings import SettingsModel
148 148
149 149 log = get_logger(create_repo)
150 150
151 151 cur_user = UserModel()._get_user(cur_user)
152 152 owner = cur_user
153 153
154 154 repo_name = form_data['repo_name']
155 155 repo_name_full = form_data['repo_name_full']
156 156 repo_type = form_data['repo_type']
157 157 description = form_data['repo_description']
158 158 private = form_data['repo_private']
159 159 clone_uri = form_data.get('clone_uri')
160 160 repo_group = safe_int(form_data['repo_group'])
161 161 copy_fork_permissions = form_data.get('copy_permissions')
162 162 copy_group_permissions = form_data.get('repo_copy_permissions')
163 163 fork_of = form_data.get('fork_parent_id')
164 164 state = form_data.get('repo_state', Repository.STATE_PENDING)
165 165
166 166 # repo creation defaults, private and repo_type are filled in form
167 167 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
168 168 enable_statistics = form_data.get(
169 169 'enable_statistics', defs.get('repo_enable_statistics'))
170 170 enable_locking = form_data.get(
171 171 'enable_locking', defs.get('repo_enable_locking'))
172 172 enable_downloads = form_data.get(
173 173 'enable_downloads', defs.get('repo_enable_downloads'))
174 174
175 175 # set landing rev based on default branches for SCM
176 176 landing_ref, _label = ScmModel.backend_landing_ref(repo_type)
177 177
178 178 try:
179 179 RepoModel()._create_repo(
180 180 repo_name=repo_name_full,
181 181 repo_type=repo_type,
182 182 description=description,
183 183 owner=owner,
184 184 private=private,
185 185 clone_uri=clone_uri,
186 186 repo_group=repo_group,
187 187 landing_rev=landing_ref,
188 188 fork_of=fork_of,
189 189 copy_fork_permissions=copy_fork_permissions,
190 190 copy_group_permissions=copy_group_permissions,
191 191 enable_statistics=enable_statistics,
192 192 enable_locking=enable_locking,
193 193 enable_downloads=enable_downloads,
194 194 state=state
195 195 )
196 196
197 197 Session().commit()
198 198
199 199 # now create this repo on Filesystem
200 200 RepoModel()._create_filesystem_repo(
201 201 repo_name=repo_name,
202 202 repo_type=repo_type,
203 203 repo_group=RepoModel()._get_repo_group(repo_group),
204 204 clone_uri=clone_uri,
205 205 )
206 206 repo = Repository.get_by_repo_name(repo_name_full)
207 207 hooks_base.create_repository(created_by=owner.username, **repo.get_dict())
208 208
209 209 # update repo commit caches initially
210 repo.update_commit_cache()
210 repo.update_commit_cache(recursive=False)
211 211
212 212 # set new created state
213 213 repo.set_state(Repository.STATE_CREATED)
214 214 repo_id = repo.repo_id
215 215 repo_data = repo.get_api_data()
216 216
217 217 audit_logger.store(
218 218 'repo.create', action_data={'data': repo_data},
219 219 user=cur_user,
220 220 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
221 221
222 222 Session().commit()
223 223
224 224 PermissionModel().trigger_permission_flush()
225 225
226 226 except Exception as e:
227 227 log.warning('Exception occurred when creating repository, '
228 228 'doing cleanup...', exc_info=True)
229 229 if isinstance(e, IntegrityError):
230 230 Session().rollback()
231 231
232 232 # rollback things manually !
233 233 repo = Repository.get_by_repo_name(repo_name_full)
234 234 if repo:
235 235 Repository.delete(repo.repo_id)
236 236 Session().commit()
237 237 RepoModel()._delete_filesystem_repo(repo)
238 238 log.info('Cleanup of repo %s finished', repo_name_full)
239 239 raise
240 240
241 241 return True
242 242
243 243
244 244 @async_task(ignore_result=True, base=RequestContextTask)
245 245 def create_repo_fork(form_data, cur_user):
246 246 """
247 247 Creates a fork of repository using internal VCS methods
248 248 """
249 249 from rhodecode.model.repo import RepoModel
250 250 from rhodecode.model.user import UserModel
251 251
252 252 log = get_logger(create_repo_fork)
253 253
254 254 cur_user = UserModel()._get_user(cur_user)
255 255 owner = cur_user
256 256
257 257 repo_name = form_data['repo_name'] # fork in this case
258 258 repo_name_full = form_data['repo_name_full']
259 259 repo_type = form_data['repo_type']
260 260 description = form_data['description']
261 261 private = form_data['private']
262 262 clone_uri = form_data.get('clone_uri')
263 263 repo_group = safe_int(form_data['repo_group'])
264 264 landing_ref = form_data['landing_rev']
265 265 copy_fork_permissions = form_data.get('copy_permissions')
266 266 fork_id = safe_int(form_data.get('fork_parent_id'))
267 267
268 268 try:
269 269 fork_of = RepoModel()._get_repo(fork_id)
270 270 RepoModel()._create_repo(
271 271 repo_name=repo_name_full,
272 272 repo_type=repo_type,
273 273 description=description,
274 274 owner=owner,
275 275 private=private,
276 276 clone_uri=clone_uri,
277 277 repo_group=repo_group,
278 278 landing_rev=landing_ref,
279 279 fork_of=fork_of,
280 280 copy_fork_permissions=copy_fork_permissions
281 281 )
282 282
283 283 Session().commit()
284 284
285 285 base_path = Repository.base_path()
286 286 source_repo_path = os.path.join(base_path, fork_of.repo_name)
287 287
288 288 # now create this repo on Filesystem
289 289 RepoModel()._create_filesystem_repo(
290 290 repo_name=repo_name,
291 291 repo_type=repo_type,
292 292 repo_group=RepoModel()._get_repo_group(repo_group),
293 293 clone_uri=source_repo_path,
294 294 )
295 295 repo = Repository.get_by_repo_name(repo_name_full)
296 296 hooks_base.create_repository(created_by=owner.username, **repo.get_dict())
297 297
298 298 # update repo commit caches initially
299 299 config = repo._config
300 300 config.set('extensions', 'largefiles', '')
301 repo.update_commit_cache(config=config)
301 repo.update_commit_cache(config=config, recursive=False)
302 302
303 303 # set new created state
304 304 repo.set_state(Repository.STATE_CREATED)
305 305
306 306 repo_id = repo.repo_id
307 307 repo_data = repo.get_api_data()
308 308 audit_logger.store(
309 309 'repo.fork', action_data={'data': repo_data},
310 310 user=cur_user,
311 311 repo=audit_logger.RepoWrap(repo_name=repo_name, repo_id=repo_id))
312 312
313 313 Session().commit()
314 314 except Exception as e:
315 315 log.warning('Exception occurred when forking repository, '
316 316 'doing cleanup...', exc_info=True)
317 317 if isinstance(e, IntegrityError):
318 318 Session().rollback()
319 319
320 320 # rollback things manually !
321 321 repo = Repository.get_by_repo_name(repo_name_full)
322 322 if repo:
323 323 Repository.delete(repo.repo_id)
324 324 Session().commit()
325 325 RepoModel()._delete_filesystem_repo(repo)
326 326 log.info('Cleanup of repo %s finished', repo_name_full)
327 327 raise
328 328
329 329 return True
330 330
331 331
332 332 @async_task(ignore_result=True, base=RequestContextTask)
333 333 def repo_maintenance(repoid):
334 334 from rhodecode.lib import repo_maintenance as repo_maintenance_lib
335 335 log = get_logger(repo_maintenance)
336 336 repo = Repository.get_by_id_or_repo_name(repoid)
337 337 if repo:
338 338 maintenance = repo_maintenance_lib.RepoMaintenance()
339 339 tasks = maintenance.get_tasks_for_repo(repo)
340 340 log.debug('Executing %s tasks on repo `%s`', tasks, repoid)
341 341 executed_types = maintenance.execute(repo)
342 342 log.debug('Got execution results %s', executed_types)
343 343 else:
344 344 log.debug('Repo `%s` not found or without a clone_url', repoid)
345 345
346 346
347 347 @async_task(ignore_result=True, base=RequestContextTask)
348 348 def check_for_update(send_email_notification=True, email_recipients=None):
349 349 from rhodecode.model.update import UpdateModel
350 350 from rhodecode.model.notification import EmailNotificationModel
351 351
352 352 log = get_logger(check_for_update)
353 353 update_url = UpdateModel().get_update_url()
354 354 cur_ver = rhodecode.__version__
355 355
356 356 try:
357 357 data = UpdateModel().get_update_data(update_url)
358 358
359 359 current_ver = UpdateModel().get_stored_version(fallback=cur_ver)
360 360 latest_ver = data['versions'][0]['version']
361 361 UpdateModel().store_version(latest_ver)
362 362
363 363 if send_email_notification:
364 364 log.debug('Send email notification is enabled. '
365 365 'Current RhodeCode version: %s, latest known: %s', current_ver, latest_ver)
366 366 if UpdateModel().is_outdated(current_ver, latest_ver):
367 367
368 368 email_kwargs = {
369 369 'current_ver': current_ver,
370 370 'latest_ver': latest_ver,
371 371 }
372 372
373 373 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
374 374 EmailNotificationModel.TYPE_UPDATE_AVAILABLE, **email_kwargs)
375 375
376 376 email_recipients = aslist(email_recipients, sep=',') or \
377 377 [user.email for user in User.get_all_super_admins()]
378 378 run_task(send_email, email_recipients, subject,
379 379 email_body_plaintext, email_body)
380 380
381 381 except Exception:
382 382 log.exception('Failed to check for update')
383 383 raise
384 384
385 385
386 386 def sync_last_update_for_objects(*args, **kwargs):
387 387 skip_repos = kwargs.get('skip_repos')
388 388 if not skip_repos:
389 389 repos = Repository.query() \
390 390 .order_by(Repository.group_id.asc())
391 391
392 392 for repo in repos:
393 repo.update_commit_cache()
393 repo.update_commit_cache(recursive=False)
394 394
395 395 skip_groups = kwargs.get('skip_groups')
396 396 if not skip_groups:
397 397 repo_groups = RepoGroup.query() \
398 398 .filter(RepoGroup.group_parent_id == null())
399 399
400 400 for root_gr in repo_groups:
401 401 for repo_gr in reversed(root_gr.recursive_groups()):
402 402 repo_gr.update_commit_cache()
403 403
404 404
405 405 @async_task(ignore_result=True, base=RequestContextTask)
406 406 def test_celery_exception(msg):
407 407 raise Exception(f'Test exception: {msg}')
408 408
409 409
410 410 @async_task(ignore_result=True, base=RequestContextTask)
411 411 def sync_last_update(*args, **kwargs):
412 412 sync_last_update_for_objects(*args, **kwargs)
413 413
414 414
415 415 @async_task(ignore_result=False)
416 416 def beat_check(*args, **kwargs):
417 417 log = get_logger(beat_check)
418 418 log.info('%r: Got args: %r and kwargs %r', beat_check, args, kwargs)
419 419 return time.time()
420 420
421 421
422 422 @async_task
423 423 @adopt_for_celery
424 424 def repo_size(extras):
425 425 from rhodecode.lib.hooks_base import repo_size
426 426 return repo_size(extras)
427 427
428 428
429 429 @async_task
430 430 @adopt_for_celery
431 431 def pre_pull(extras):
432 432 from rhodecode.lib.hooks_base import pre_pull
433 433 return pre_pull(extras)
434 434
435 435
436 436 @async_task
437 437 @adopt_for_celery
438 438 def post_pull(extras):
439 439 from rhodecode.lib.hooks_base import post_pull
440 440 return post_pull(extras)
441 441
442 442
443 443 @async_task
444 444 @adopt_for_celery
445 445 def pre_push(extras):
446 446 from rhodecode.lib.hooks_base import pre_push
447 447 return pre_push(extras)
448 448
449 449
450 450 @async_task
451 451 @adopt_for_celery
452 452 def post_push(extras):
453 453 from rhodecode.lib.hooks_base import post_push
454 454 return post_push(extras)
@@ -1,824 +1,824 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 Utilities library for RhodeCode
21 21 """
22 22
23 23 import datetime
24 24
25 25 import decorator
26 26 import logging
27 27 import os
28 28 import re
29 29 import sys
30 30 import shutil
31 31 import socket
32 32 import tempfile
33 33 import traceback
34 34 import tarfile
35 35
36 36 from functools import wraps
37 37 from os.path import join as jn
38 38
39 39 import paste
40 40 import pkg_resources
41 41 from webhelpers2.text import collapse, strip_tags, convert_accented_entities, convert_misc_entities
42 42
43 43 from mako import exceptions
44 44
45 45 from rhodecode.lib.hash_utils import sha256_safe, md5, sha1
46 46 from rhodecode.lib.type_utils import AttributeDict
47 47 from rhodecode.lib.str_utils import safe_bytes, safe_str
48 48 from rhodecode.lib.vcs.backends.base import Config
49 49 from rhodecode.lib.vcs.exceptions import VCSError
50 50 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
51 51 from rhodecode.lib.ext_json import sjson as json
52 52 from rhodecode.model import meta
53 53 from rhodecode.model.db import (
54 54 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
55 55 from rhodecode.model.meta import Session
56 56
57 57
58 58 log = logging.getLogger(__name__)
59 59
60 60 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
61 61
62 62 # String which contains characters that are not allowed in slug names for
63 63 # repositories or repository groups. It is properly escaped to use it in
64 64 # regular expressions.
65 65 SLUG_BAD_CHARS = re.escape(r'`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
66 66
67 67 # Regex that matches forbidden characters in repo/group slugs.
68 68 SLUG_BAD_CHAR_RE = re.compile(r'[{}\x00-\x08\x0b-\x0c\x0e-\x1f]'.format(SLUG_BAD_CHARS))
69 69
70 70 # Regex that matches allowed characters in repo/group slugs.
71 71 SLUG_GOOD_CHAR_RE = re.compile(r'[^{}]'.format(SLUG_BAD_CHARS))
72 72
73 73 # Regex that matches whole repo/group slugs.
74 74 SLUG_RE = re.compile(r'[^{}]+'.format(SLUG_BAD_CHARS))
75 75
76 76 _license_cache = None
77 77
78 78
79 79 def adopt_for_celery(func):
80 80 """
81 81 Decorator designed to adopt hooks (from rhodecode.lib.hooks_base)
82 82 for further usage as a celery tasks.
83 83 """
84 84 @wraps(func)
85 85 def wrapper(extras):
86 86 extras = AttributeDict(extras)
87 87 # HooksResponse implements to_json method which must be used there.
88 88 return func(extras).to_json()
89 89 return wrapper
90 90
91 91
92 92 def repo_name_slug(value):
93 93 """
94 94 Return slug of name of repository
95 95 This function is called on each creation/modification
96 96 of repository to prevent bad names in repo
97 97 """
98 98
99 99 replacement_char = '-'
100 100
101 101 slug = strip_tags(value)
102 102 slug = convert_accented_entities(slug)
103 103 slug = convert_misc_entities(slug)
104 104
105 105 slug = SLUG_BAD_CHAR_RE.sub('', slug)
106 106 slug = re.sub(r'[\s]+', '-', slug)
107 107 slug = collapse(slug, replacement_char)
108 108
109 109 return slug
110 110
111 111
112 112 #==============================================================================
113 113 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
114 114 #==============================================================================
115 115 def get_repo_slug(request):
116 116 _repo = ''
117 117
118 118 if hasattr(request, 'db_repo_name'):
119 119 # if our requests has set db reference use it for name, this
120 120 # translates the example.com/_<id> into proper repo names
121 121 _repo = request.db_repo_name
122 122 elif getattr(request, 'matchdict', None):
123 123 # pyramid
124 124 _repo = request.matchdict.get('repo_name')
125 125
126 126 if _repo:
127 127 _repo = _repo.rstrip('/')
128 128 return _repo
129 129
130 130
131 131 def get_repo_group_slug(request):
132 132 _group = ''
133 133 if hasattr(request, 'db_repo_group'):
134 134 # if our requests has set db reference use it for name, this
135 135 # translates the example.com/_<id> into proper repo group names
136 136 _group = request.db_repo_group.group_name
137 137 elif getattr(request, 'matchdict', None):
138 138 # pyramid
139 139 _group = request.matchdict.get('repo_group_name')
140 140
141 141 if _group:
142 142 _group = _group.rstrip('/')
143 143 return _group
144 144
145 145
146 146 def get_user_group_slug(request):
147 147 _user_group = ''
148 148
149 149 if hasattr(request, 'db_user_group'):
150 150 _user_group = request.db_user_group.users_group_name
151 151 elif getattr(request, 'matchdict', None):
152 152 # pyramid
153 153 _user_group = request.matchdict.get('user_group_id')
154 154 _user_group_name = request.matchdict.get('user_group_name')
155 155 try:
156 156 if _user_group:
157 157 _user_group = UserGroup.get(_user_group)
158 158 elif _user_group_name:
159 159 _user_group = UserGroup.get_by_group_name(_user_group_name)
160 160
161 161 if _user_group:
162 162 _user_group = _user_group.users_group_name
163 163 except Exception:
164 164 log.exception('Failed to get user group by id and name')
165 165 # catch all failures here
166 166 return None
167 167
168 168 return _user_group
169 169
170 170
171 171 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
172 172 """
173 173 Scans given path for repos and return (name,(type,path)) tuple
174 174
175 175 :param path: path to scan for repositories
176 176 :param recursive: recursive search and return names with subdirs in front
177 177 """
178 178
179 179 # remove ending slash for better results
180 180 path = path.rstrip(os.sep)
181 181 log.debug('now scanning in %s location recursive:%s...', path, recursive)
182 182
183 183 def _get_repos(p):
184 184 dirpaths = get_dirpaths(p)
185 185 if not _is_dir_writable(p):
186 186 log.warning('repo path without write access: %s', p)
187 187
188 188 for dirpath in dirpaths:
189 189 if os.path.isfile(os.path.join(p, dirpath)):
190 190 continue
191 191 cur_path = os.path.join(p, dirpath)
192 192
193 193 # skip removed repos
194 194 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
195 195 continue
196 196
197 197 #skip .<somethin> dirs
198 198 if dirpath.startswith('.'):
199 199 continue
200 200
201 201 try:
202 202 scm_info = get_scm(cur_path)
203 203 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
204 204 except VCSError:
205 205 if not recursive:
206 206 continue
207 207 #check if this dir containts other repos for recursive scan
208 208 rec_path = os.path.join(p, dirpath)
209 209 if os.path.isdir(rec_path):
210 210 yield from _get_repos(rec_path)
211 211
212 212 return _get_repos(path)
213 213
214 214
215 215 def get_dirpaths(p: str) -> list:
216 216 try:
217 217 # OS-independable way of checking if we have at least read-only
218 218 # access or not.
219 219 dirpaths = os.listdir(p)
220 220 except OSError:
221 221 log.warning('ignoring repo path without read access: %s', p)
222 222 return []
223 223
224 224 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
225 225 # decode paths and suddenly returns unicode objects itself. The items it
226 226 # cannot decode are returned as strings and cause issues.
227 227 #
228 228 # Those paths are ignored here until a solid solution for path handling has
229 229 # been built.
230 230 expected_type = type(p)
231 231
232 232 def _has_correct_type(item):
233 233 if type(item) is not expected_type:
234 234 log.error(
235 235 "Ignoring path %s since it cannot be decoded into str.",
236 236 # Using "repr" to make sure that we see the byte value in case
237 237 # of support.
238 238 repr(item))
239 239 return False
240 240 return True
241 241
242 242 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
243 243
244 244 return dirpaths
245 245
246 246
247 247 def _is_dir_writable(path):
248 248 """
249 249 Probe if `path` is writable.
250 250
251 251 Due to trouble on Cygwin / Windows, this is actually probing if it is
252 252 possible to create a file inside of `path`, stat does not produce reliable
253 253 results in this case.
254 254 """
255 255 try:
256 256 with tempfile.TemporaryFile(dir=path):
257 257 pass
258 258 except OSError:
259 259 return False
260 260 return True
261 261
262 262
263 263 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
264 264 """
265 265 Returns True if given path is a valid repository False otherwise.
266 266 If expect_scm param is given also, compare if given scm is the same
267 267 as expected from scm parameter. If explicit_scm is given don't try to
268 268 detect the scm, just use the given one to check if repo is valid
269 269
270 270 :param repo_name:
271 271 :param base_path:
272 272 :param expect_scm:
273 273 :param explicit_scm:
274 274 :param config:
275 275
276 276 :return True: if given path is a valid repository
277 277 """
278 278 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
279 279 log.debug('Checking if `%s` is a valid path for repository. '
280 280 'Explicit type: %s', repo_name, explicit_scm)
281 281
282 282 try:
283 283 if explicit_scm:
284 284 detected_scms = [get_scm_backend(explicit_scm)(
285 285 full_path, config=config).alias]
286 286 else:
287 287 detected_scms = get_scm(full_path)
288 288
289 289 if expect_scm:
290 290 return detected_scms[0] == expect_scm
291 291 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
292 292 return True
293 293 except VCSError:
294 294 log.debug('path: %s is not a valid repo !', full_path)
295 295 return False
296 296
297 297
298 298 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
299 299 """
300 300 Returns True if a given path is a repository group, False otherwise
301 301
302 302 :param repo_group_name:
303 303 :param base_path:
304 304 """
305 305 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
306 306 log.debug('Checking if `%s` is a valid path for repository group',
307 307 repo_group_name)
308 308
309 309 # check if it's not a repo
310 310 if is_valid_repo(repo_group_name, base_path):
311 311 log.debug('Repo called %s exist, it is not a valid repo group', repo_group_name)
312 312 return False
313 313
314 314 try:
315 315 # we need to check bare git repos at higher level
316 316 # since we might match branches/hooks/info/objects or possible
317 317 # other things inside bare git repo
318 318 maybe_repo = os.path.dirname(full_path)
319 319 if maybe_repo == base_path:
320 320 # skip root level repo check; we know root location CANNOT BE a repo group
321 321 return False
322 322
323 323 scm_ = get_scm(maybe_repo)
324 324 log.debug('path: %s is a vcs object:%s, not valid repo group', full_path, scm_)
325 325 return False
326 326 except VCSError:
327 327 pass
328 328
329 329 # check if it's a valid path
330 330 if skip_path_check or os.path.isdir(full_path):
331 331 log.debug('path: %s is a valid repo group !', full_path)
332 332 return True
333 333
334 334 log.debug('path: %s is not a valid repo group !', full_path)
335 335 return False
336 336
337 337
338 338 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
339 339 while True:
340 340 ok = input(prompt)
341 341 if ok.lower() in ('y', 'ye', 'yes'):
342 342 return True
343 343 if ok.lower() in ('n', 'no', 'nop', 'nope'):
344 344 return False
345 345 retries = retries - 1
346 346 if retries < 0:
347 347 raise OSError
348 348 print(complaint)
349 349
350 350 # propagated from mercurial documentation
351 351 ui_sections = [
352 352 'alias', 'auth',
353 353 'decode/encode', 'defaults',
354 354 'diff', 'email',
355 355 'extensions', 'format',
356 356 'merge-patterns', 'merge-tools',
357 357 'hooks', 'http_proxy',
358 358 'smtp', 'patch',
359 359 'paths', 'profiling',
360 360 'server', 'trusted',
361 361 'ui', 'web', ]
362 362
363 363
364 364 def config_data_from_db(clear_session=True, repo=None):
365 365 """
366 366 Read the configuration data from the database and return configuration
367 367 tuples.
368 368 """
369 369 from rhodecode.model.settings import VcsSettingsModel
370 370
371 371 config = []
372 372
373 373 sa = meta.Session()
374 374 settings_model = VcsSettingsModel(repo=repo, sa=sa)
375 375
376 376 ui_settings = settings_model.get_ui_settings()
377 377
378 378 ui_data = []
379 379 for setting in ui_settings:
380 380 if setting.active:
381 381 ui_data.append((setting.section, setting.key, setting.value))
382 382 config.append((
383 383 safe_str(setting.section), safe_str(setting.key),
384 384 safe_str(setting.value)))
385 385 if setting.key == 'push_ssl':
386 386 # force set push_ssl requirement to False, rhodecode
387 387 # handles that
388 388 config.append((
389 389 safe_str(setting.section), safe_str(setting.key), False))
390 390 log.debug(
391 391 'settings ui from db@repo[%s]: %s',
392 392 repo,
393 393 ','.join(['[{}] {}={}'.format(*s) for s in ui_data]))
394 394 if clear_session:
395 395 meta.Session.remove()
396 396
397 397 # TODO: mikhail: probably it makes no sense to re-read hooks information.
398 398 # It's already there and activated/deactivated
399 399 skip_entries = []
400 400 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
401 401 if 'pull' not in enabled_hook_classes:
402 402 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
403 403 if 'push' not in enabled_hook_classes:
404 404 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
405 405 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
406 406 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
407 407
408 408 config = [entry for entry in config if entry[:2] not in skip_entries]
409 409
410 410 return config
411 411
412 412
413 413 def make_db_config(clear_session=True, repo=None):
414 414 """
415 415 Create a :class:`Config` instance based on the values in the database.
416 416 """
417 417 config = Config()
418 418 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
419 419 for section, option, value in config_data:
420 420 config.set(section, option, value)
421 421 return config
422 422
423 423
424 424 def get_enabled_hook_classes(ui_settings):
425 425 """
426 426 Return the enabled hook classes.
427 427
428 428 :param ui_settings: List of ui_settings as returned
429 429 by :meth:`VcsSettingsModel.get_ui_settings`
430 430
431 431 :return: a list with the enabled hook classes. The order is not guaranteed.
432 432 :rtype: list
433 433 """
434 434 enabled_hooks = []
435 435 active_hook_keys = [
436 436 key for section, key, value, active in ui_settings
437 437 if section == 'hooks' and active]
438 438
439 439 hook_names = {
440 440 RhodeCodeUi.HOOK_PUSH: 'push',
441 441 RhodeCodeUi.HOOK_PULL: 'pull',
442 442 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
443 443 }
444 444
445 445 for key in active_hook_keys:
446 446 hook = hook_names.get(key)
447 447 if hook:
448 448 enabled_hooks.append(hook)
449 449
450 450 return enabled_hooks
451 451
452 452
453 453 def set_rhodecode_config(config):
454 454 """
455 455 Updates pyramid config with new settings from database
456 456
457 457 :param config:
458 458 """
459 459 from rhodecode.model.settings import SettingsModel
460 460 app_settings = SettingsModel().get_all_settings()
461 461
462 462 for k, v in list(app_settings.items()):
463 463 config[k] = v
464 464
465 465
466 466 def get_rhodecode_realm():
467 467 """
468 468 Return the rhodecode realm from database.
469 469 """
470 470 from rhodecode.model.settings import SettingsModel
471 471 realm = SettingsModel().get_setting_by_name('realm')
472 472 return safe_str(realm.app_settings_value)
473 473
474 474
475 475 def get_rhodecode_repo_store_path():
476 476 """
477 477 Returns the base path. The base path is the filesystem path which points
478 478 to the repository store.
479 479 """
480 480
481 481 import rhodecode
482 482 return rhodecode.CONFIG['repo_store.path']
483 483
484 484
485 485 def map_groups(path):
486 486 """
487 487 Given a full path to a repository, create all nested groups that this
488 488 repo is inside. This function creates parent-child relationships between
489 489 groups and creates default perms for all new groups.
490 490
491 491 :param paths: full path to repository
492 492 """
493 493 from rhodecode.model.repo_group import RepoGroupModel
494 494 sa = meta.Session()
495 495 groups = path.split(Repository.NAME_SEP)
496 496 parent = None
497 497 group = None
498 498
499 499 # last element is repo in nested groups structure
500 500 groups = groups[:-1]
501 501 rgm = RepoGroupModel(sa)
502 502 owner = User.get_first_super_admin()
503 503 for lvl, group_name in enumerate(groups):
504 504 group_name = '/'.join(groups[:lvl] + [group_name])
505 505 group = RepoGroup.get_by_group_name(group_name)
506 506 desc = '%s group' % group_name
507 507
508 508 # skip folders that are now removed repos
509 509 if REMOVED_REPO_PAT.match(group_name):
510 510 break
511 511
512 512 if group is None:
513 513 log.debug('creating group level: %s group_name: %s',
514 514 lvl, group_name)
515 515 group = RepoGroup(group_name, parent)
516 516 group.group_description = desc
517 517 group.user = owner
518 518 sa.add(group)
519 519 perm_obj = rgm._create_default_perms(group)
520 520 sa.add(perm_obj)
521 521 sa.flush()
522 522
523 523 parent = group
524 524 return group
525 525
526 526
527 527 def repo2db_mapper(initial_repo_list, remove_obsolete=False, force_hooks_rebuild=False):
528 528 """
529 529 maps all repos given in initial_repo_list, non existing repositories
530 530 are created, if remove_obsolete is True it also checks for db entries
531 531 that are not in initial_repo_list and removes them.
532 532
533 533 :param initial_repo_list: list of repositories found by scanning methods
534 534 :param remove_obsolete: check for obsolete entries in database
535 535 """
536 536 from rhodecode.model.repo import RepoModel
537 537 from rhodecode.model.repo_group import RepoGroupModel
538 538 from rhodecode.model.settings import SettingsModel
539 539
540 540 sa = meta.Session()
541 541 repo_model = RepoModel()
542 542 user = User.get_first_super_admin()
543 543 added = []
544 544
545 545 # creation defaults
546 546 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
547 547 enable_statistics = defs.get('repo_enable_statistics')
548 548 enable_locking = defs.get('repo_enable_locking')
549 549 enable_downloads = defs.get('repo_enable_downloads')
550 550 private = defs.get('repo_private')
551 551
552 552 for name, repo in list(initial_repo_list.items()):
553 553 group = map_groups(name)
554 554 str_name = safe_str(name)
555 555 db_repo = repo_model.get_by_repo_name(str_name)
556 556
557 557 # found repo that is on filesystem not in RhodeCode database
558 558 if not db_repo:
559 559 log.info('repository `%s` not found in the database, creating now', name)
560 560 added.append(name)
561 561 desc = (repo.description
562 562 if repo.description != 'unknown'
563 563 else '%s repository' % name)
564 564
565 565 db_repo = repo_model._create_repo(
566 566 repo_name=name,
567 567 repo_type=repo.alias,
568 568 description=desc,
569 569 repo_group=getattr(group, 'group_id', None),
570 570 owner=user,
571 571 enable_locking=enable_locking,
572 572 enable_downloads=enable_downloads,
573 573 enable_statistics=enable_statistics,
574 574 private=private,
575 575 state=Repository.STATE_CREATED
576 576 )
577 577 sa.commit()
578 578 # we added that repo just now, and make sure we updated server info
579 579 if db_repo.repo_type == 'git':
580 580 git_repo = db_repo.scm_instance()
581 581 # update repository server-info
582 582 log.debug('Running update server info')
583 583 git_repo._update_server_info(force=True)
584 584
585 db_repo.update_commit_cache()
585 db_repo.update_commit_cache(recursive=False)
586 586
587 587 config = db_repo._config
588 588 config.set('extensions', 'largefiles', '')
589 589 repo = db_repo.scm_instance(config=config)
590 590 repo.install_hooks(force=force_hooks_rebuild)
591 591
592 592 removed = []
593 593 if remove_obsolete:
594 594 # remove from database those repositories that are not in the filesystem
595 595 for repo in sa.query(Repository).all():
596 596 if repo.repo_name not in list(initial_repo_list.keys()):
597 597 log.debug("Removing non-existing repository found in db `%s`",
598 598 repo.repo_name)
599 599 try:
600 600 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
601 601 sa.commit()
602 602 removed.append(repo.repo_name)
603 603 except Exception:
604 604 # don't hold further removals on error
605 605 log.error(traceback.format_exc())
606 606 sa.rollback()
607 607
608 608 def splitter(full_repo_name):
609 609 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
610 610 gr_name = None
611 611 if len(_parts) == 2:
612 612 gr_name = _parts[0]
613 613 return gr_name
614 614
615 615 initial_repo_group_list = [splitter(x) for x in
616 616 list(initial_repo_list.keys()) if splitter(x)]
617 617
618 618 # remove from database those repository groups that are not in the
619 619 # filesystem due to parent child relationships we need to delete them
620 620 # in a specific order of most nested first
621 621 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
622 622 def nested_sort(gr):
623 623 return len(gr.split('/'))
624 624 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
625 625 if group_name not in initial_repo_group_list:
626 626 repo_group = RepoGroup.get_by_group_name(group_name)
627 627 if (repo_group.children.all() or
628 628 not RepoGroupModel().check_exist_filesystem(
629 629 group_name=group_name, exc_on_failure=False)):
630 630 continue
631 631
632 632 log.info(
633 633 'Removing non-existing repository group found in db `%s`',
634 634 group_name)
635 635 try:
636 636 RepoGroupModel(sa).delete(group_name, fs_remove=False)
637 637 sa.commit()
638 638 removed.append(group_name)
639 639 except Exception:
640 640 # don't hold further removals on error
641 641 log.exception(
642 642 'Unable to remove repository group `%s`',
643 643 group_name)
644 644 sa.rollback()
645 645 raise
646 646
647 647 return added, removed
648 648
649 649
650 650 def load_rcextensions(root_path):
651 651 import rhodecode
652 652 from rhodecode.config import conf
653 653
654 654 path = os.path.join(root_path)
655 655 sys.path.append(path)
656 656
657 657 try:
658 658 rcextensions = __import__('rcextensions')
659 659 except ImportError:
660 660 if os.path.isdir(os.path.join(path, 'rcextensions')):
661 661 log.warning('Unable to load rcextensions from %s', path)
662 662 rcextensions = None
663 663
664 664 if rcextensions:
665 665 log.info('Loaded rcextensions from %s...', rcextensions)
666 666 rhodecode.EXTENSIONS = rcextensions
667 667
668 668 # Additional mappings that are not present in the pygments lexers
669 669 conf.LANGUAGES_EXTENSIONS_MAP.update(
670 670 getattr(rhodecode.EXTENSIONS, 'EXTRA_MAPPINGS', {}))
671 671
672 672
673 673 def get_custom_lexer(extension):
674 674 """
675 675 returns a custom lexer if it is defined in rcextensions module, or None
676 676 if there's no custom lexer defined
677 677 """
678 678 import rhodecode
679 679 from pygments import lexers
680 680
681 681 # custom override made by RhodeCode
682 682 if extension in ['mako']:
683 683 return lexers.get_lexer_by_name('html+mako')
684 684
685 685 # check if we didn't define this extension as other lexer
686 686 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
687 687 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
688 688 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
689 689 return lexers.get_lexer_by_name(_lexer_name)
690 690
691 691
692 692 #==============================================================================
693 693 # TEST FUNCTIONS AND CREATORS
694 694 #==============================================================================
695 695 def create_test_index(repo_location, config):
696 696 """
697 697 Makes default test index.
698 698 """
699 699 try:
700 700 import rc_testdata
701 701 except ImportError:
702 702 raise ImportError('Failed to import rc_testdata, '
703 703 'please make sure this package is installed from requirements_test.txt')
704 704 rc_testdata.extract_search_index(
705 705 'vcs_search_index', os.path.dirname(config['search.location']))
706 706
707 707
708 708 def create_test_directory(test_path):
709 709 """
710 710 Create test directory if it doesn't exist.
711 711 """
712 712 if not os.path.isdir(test_path):
713 713 log.debug('Creating testdir %s', test_path)
714 714 os.makedirs(test_path)
715 715
716 716
717 717 def create_test_database(test_path, config):
718 718 """
719 719 Makes a fresh database.
720 720 """
721 721 from rhodecode.lib.db_manage import DbManage
722 722 from rhodecode.lib.utils2 import get_encryption_key
723 723
724 724 # PART ONE create db
725 725 dbconf = config['sqlalchemy.db1.url']
726 726 enc_key = get_encryption_key(config)
727 727
728 728 log.debug('making test db %s', dbconf)
729 729
730 730 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
731 731 tests=True, cli_args={'force_ask': True}, enc_key=enc_key)
732 732 dbmanage.create_tables(override=True)
733 733 dbmanage.set_db_version()
734 734 # for tests dynamically set new root paths based on generated content
735 735 dbmanage.create_settings(dbmanage.config_prompt(test_path))
736 736 dbmanage.create_default_user()
737 737 dbmanage.create_test_admin_and_users()
738 738 dbmanage.create_permissions()
739 739 dbmanage.populate_default_permissions()
740 740 Session().commit()
741 741
742 742
743 743 def create_test_repositories(test_path, config):
744 744 """
745 745 Creates test repositories in the temporary directory. Repositories are
746 746 extracted from archives within the rc_testdata package.
747 747 """
748 748 import rc_testdata
749 749 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
750 750
751 751 log.debug('making test vcs repositories')
752 752
753 753 idx_path = config['search.location']
754 754 data_path = config['cache_dir']
755 755
756 756 # clean index and data
757 757 if idx_path and os.path.exists(idx_path):
758 758 log.debug('remove %s', idx_path)
759 759 shutil.rmtree(idx_path)
760 760
761 761 if data_path and os.path.exists(data_path):
762 762 log.debug('remove %s', data_path)
763 763 shutil.rmtree(data_path)
764 764
765 765 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
766 766 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
767 767
768 768 # Note: Subversion is in the process of being integrated with the system,
769 769 # until we have a properly packed version of the test svn repository, this
770 770 # tries to copy over the repo from a package "rc_testdata"
771 771 svn_repo_path = rc_testdata.get_svn_repo_archive()
772 772 with tarfile.open(svn_repo_path) as tar:
773 773 tar.extractall(jn(test_path, SVN_REPO))
774 774
775 775
776 776 def password_changed(auth_user, session):
777 777 # Never report password change in case of default user or anonymous user.
778 778 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
779 779 return False
780 780
781 781 password_hash = md5(safe_bytes(auth_user.password)) if auth_user.password else None
782 782 rhodecode_user = session.get('rhodecode_user', {})
783 783 session_password_hash = rhodecode_user.get('password', '')
784 784 return password_hash != session_password_hash
785 785
786 786
787 787 def read_opensource_licenses():
788 788 global _license_cache
789 789
790 790 if not _license_cache:
791 791 licenses = pkg_resources.resource_string(
792 792 'rhodecode', 'config/licenses.json')
793 793 _license_cache = json.loads(licenses)
794 794
795 795 return _license_cache
796 796
797 797
798 798 def generate_platform_uuid():
799 799 """
800 800 Generates platform UUID based on it's name
801 801 """
802 802 import platform
803 803
804 804 try:
805 805 uuid_list = [platform.platform()]
806 806 return sha256_safe(':'.join(uuid_list))
807 807 except Exception as e:
808 808 log.error('Failed to generate host uuid: %s', e)
809 809 return 'UNDEFINED'
810 810
811 811
812 812 def send_test_email(recipients, email_body='TEST EMAIL'):
813 813 """
814 814 Simple code for generating test emails.
815 815 Usage::
816 816
817 817 from rhodecode.lib import utils
818 818 utils.send_test_email()
819 819 """
820 820 from rhodecode.lib.celerylib import tasks, run_task
821 821
822 822 email_body = email_body_plaintext = email_body
823 823 subject = f'SUBJECT FROM: {socket.gethostname()}'
824 824 tasks.send_email(recipients, subject, email_body_plaintext, email_body)
@@ -1,6046 +1,6056 b''
1 1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 Database Models for RhodeCode Enterprise
21 21 """
22 22
23 23 import re
24 24 import os
25 25 import time
26 26 import string
27 27 import logging
28 28 import datetime
29 29 import uuid
30 30 import warnings
31 31 import ipaddress
32 32 import functools
33 33 import traceback
34 34 import collections
35 35
36 36 import pyotp
37 37 from sqlalchemy import (
38 38 or_, and_, not_, func, cast, TypeDecorator, event, select,
39 39 true, false, null, union_all,
40 40 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
41 41 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
42 42 Text, Float, PickleType, BigInteger)
43 43 from sqlalchemy.sql.expression import case
44 44 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
45 45 from sqlalchemy.orm import (
46 46 relationship, lazyload, joinedload, class_mapper, validates, aliased, load_only)
47 47 from sqlalchemy.ext.declarative import declared_attr
48 48 from sqlalchemy.ext.hybrid import hybrid_property
49 49 from sqlalchemy.exc import IntegrityError # pragma: no cover
50 50 from sqlalchemy.dialects.mysql import LONGTEXT
51 51 from zope.cachedescriptors.property import Lazy as LazyProperty
52 52 from pyramid.threadlocal import get_current_request
53 53 from webhelpers2.text import remove_formatting
54 54
55 55 from rhodecode import ConfigGet
56 56 from rhodecode.lib.str_utils import safe_bytes
57 57 from rhodecode.translation import _
58 58 from rhodecode.lib.vcs import get_vcs_instance, VCSError
59 59 from rhodecode.lib.vcs.backends.base import (
60 60 EmptyCommit, Reference, unicode_to_reference, reference_to_unicode)
61 61 from rhodecode.lib.utils2 import (
62 62 str2bool, safe_str, get_commit_safe, sha1_safe,
63 63 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
64 64 glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time)
65 65 from rhodecode.lib.jsonalchemy import (
66 66 MutationObj, MutationList, JsonType, JsonRaw)
67 67 from rhodecode.lib.hash_utils import sha1
68 68 from rhodecode.lib import ext_json
69 69 from rhodecode.lib import enc_utils
70 70 from rhodecode.lib.ext_json import json, str_json
71 71 from rhodecode.lib.caching_query import FromCache
72 72 from rhodecode.lib.exceptions import (
73 73 ArtifactMetadataDuplicate, ArtifactMetadataBadValueType)
74 74 from rhodecode.model.meta import Base, Session
75 75
76 76 URL_SEP = '/'
77 77 log = logging.getLogger(__name__)
78 78
79 79 # =============================================================================
80 80 # BASE CLASSES
81 81 # =============================================================================
82 82
83 83 # this is propagated from .ini file rhodecode.encrypted_values.secret or
84 84 # beaker.session.secret if first is not set.
85 85 # and initialized at environment.py
86 86 ENCRYPTION_KEY: bytes = b''
87 87
88 88 # used to sort permissions by types, '#' used here is not allowed to be in
89 89 # usernames, and it's very early in sorted string.printable table.
90 90 PERMISSION_TYPE_SORT = {
91 91 'admin': '####',
92 92 'write': '###',
93 93 'read': '##',
94 94 'none': '#',
95 95 }
96 96
97 97
98 98 def display_user_sort(obj):
99 99 """
100 100 Sort function used to sort permissions in .permissions() function of
101 101 Repository, RepoGroup, UserGroup. Also it put the default user in front
102 102 of all other resources
103 103 """
104 104
105 105 if obj.username == User.DEFAULT_USER:
106 106 return '#####'
107 107 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
108 108 extra_sort_num = '1' # default
109 109
110 110 # NOTE(dan): inactive duplicates goes last
111 111 if getattr(obj, 'duplicate_perm', None):
112 112 extra_sort_num = '9'
113 113 return prefix + extra_sort_num + obj.username
114 114
115 115
116 116 def display_user_group_sort(obj):
117 117 """
118 118 Sort function used to sort permissions in .permissions() function of
119 119 Repository, RepoGroup, UserGroup. Also it put the default user in front
120 120 of all other resources
121 121 """
122 122
123 123 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
124 124 return prefix + obj.users_group_name
125 125
126 126
127 127 def _hash_key(k):
128 128 return sha1_safe(k)
129 129
130 130
131 131 def description_escaper(desc):
132 132 from rhodecode.lib import helpers as h
133 133 return h.escape(desc)
134 134
135 135
136 136 def in_filter_generator(qry, items, limit=500):
137 137 """
138 138 Splits IN() into multiple with OR
139 139 e.g.::
140 140 cnt = Repository.query().filter(
141 141 or_(
142 142 *in_filter_generator(Repository.repo_id, range(100000))
143 143 )).count()
144 144 """
145 145 if not items:
146 146 # empty list will cause empty query which might cause security issues
147 147 # this can lead to hidden unpleasant results
148 148 items = [-1]
149 149
150 150 parts = []
151 151 for chunk in range(0, len(items), limit):
152 152 parts.append(
153 153 qry.in_(items[chunk: chunk + limit])
154 154 )
155 155
156 156 return parts
157 157
158 158
159 159 base_table_args = {
160 160 'extend_existing': True,
161 161 'mysql_engine': 'InnoDB',
162 162 'mysql_charset': 'utf8',
163 163 'sqlite_autoincrement': True
164 164 }
165 165
166 166
167 167 class EncryptedTextValue(TypeDecorator):
168 168 """
169 169 Special column for encrypted long text data, use like::
170 170
171 171 value = Column("encrypted_value", EncryptedValue(), nullable=False)
172 172
173 173 This column is intelligent so if value is in unencrypted form it return
174 174 unencrypted form, but on save it always encrypts
175 175 """
176 176 cache_ok = True
177 177 impl = Text
178 178
179 179 def process_bind_param(self, value, dialect):
180 180 """
181 181 Setter for storing value
182 182 """
183 183 import rhodecode
184 184 if not value:
185 185 return value
186 186
187 187 # protect against double encrypting if values is already encrypted
188 188 if value.startswith('enc$aes$') \
189 189 or value.startswith('enc$aes_hmac$') \
190 190 or value.startswith('enc2$'):
191 191 raise ValueError('value needs to be in unencrypted format, '
192 192 'ie. not starting with enc$ or enc2$')
193 193
194 194 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
195 195 bytes_val = enc_utils.encrypt_value(value, enc_key=ENCRYPTION_KEY, algo=algo)
196 196 return safe_str(bytes_val)
197 197
198 198 def process_result_value(self, value, dialect):
199 199 """
200 200 Getter for retrieving value
201 201 """
202 202
203 203 import rhodecode
204 204 if not value:
205 205 return value
206 206
207 207 bytes_val = enc_utils.decrypt_value(value, enc_key=ENCRYPTION_KEY)
208 208
209 209 return safe_str(bytes_val)
210 210
211 211
212 212 class BaseModel(object):
213 213 """
214 214 Base Model for all classes
215 215 """
216 216
217 217 @classmethod
218 218 def _get_keys(cls):
219 219 """return column names for this model """
220 220 return class_mapper(cls).c.keys()
221 221
222 222 def get_dict(self):
223 223 """
224 224 return dict with keys and values corresponding
225 225 to this model data """
226 226
227 227 d = {}
228 228 for k in self._get_keys():
229 229 d[k] = getattr(self, k)
230 230
231 231 # also use __json__() if present to get additional fields
232 232 _json_attr = getattr(self, '__json__', None)
233 233 if _json_attr:
234 234 # update with attributes from __json__
235 235 if callable(_json_attr):
236 236 _json_attr = _json_attr()
237 237 for k, val in _json_attr.items():
238 238 d[k] = val
239 239 return d
240 240
241 241 def get_appstruct(self):
242 242 """return list with keys and values tuples corresponding
243 243 to this model data """
244 244
245 245 lst = []
246 246 for k in self._get_keys():
247 247 lst.append((k, getattr(self, k),))
248 248 return lst
249 249
250 250 def populate_obj(self, populate_dict):
251 251 """populate model with data from given populate_dict"""
252 252
253 253 for k in self._get_keys():
254 254 if k in populate_dict:
255 255 setattr(self, k, populate_dict[k])
256 256
257 257 @classmethod
258 258 def query(cls):
259 259 return Session().query(cls)
260 260
261 261 @classmethod
262 262 def select(cls, custom_cls=None):
263 263 """
264 264 stmt = cls.select().where(cls.user_id==1)
265 265 # optionally
266 266 stmt = cls.select(User.user_id).where(cls.user_id==1)
267 267 result = cls.execute(stmt) | cls.scalars(stmt)
268 268 """
269 269
270 270 if custom_cls:
271 271 stmt = select(custom_cls)
272 272 else:
273 273 stmt = select(cls)
274 274 return stmt
275 275
276 276 @classmethod
277 277 def execute(cls, stmt):
278 278 return Session().execute(stmt)
279 279
280 280 @classmethod
281 281 def scalars(cls, stmt):
282 282 return Session().scalars(stmt)
283 283
284 284 @classmethod
285 285 def get(cls, id_):
286 286 if id_:
287 287 return cls.query().get(id_)
288 288
289 289 @classmethod
290 290 def get_or_404(cls, id_):
291 291 from pyramid.httpexceptions import HTTPNotFound
292 292
293 293 try:
294 294 id_ = int(id_)
295 295 except (TypeError, ValueError):
296 296 raise HTTPNotFound()
297 297
298 298 res = cls.query().get(id_)
299 299 if not res:
300 300 raise HTTPNotFound()
301 301 return res
302 302
303 303 @classmethod
304 304 def getAll(cls):
305 305 # deprecated and left for backward compatibility
306 306 return cls.get_all()
307 307
308 308 @classmethod
309 309 def get_all(cls):
310 310 return cls.query().all()
311 311
312 312 @classmethod
313 313 def delete(cls, id_):
314 314 obj = cls.query().get(id_)
315 315 Session().delete(obj)
316 316
317 317 @classmethod
318 318 def identity_cache(cls, session, attr_name, value):
319 319 exist_in_session = []
320 320 for (item_cls, pkey), instance in session.identity_map.items():
321 321 if cls == item_cls and getattr(instance, attr_name) == value:
322 322 exist_in_session.append(instance)
323 323 if exist_in_session:
324 324 if len(exist_in_session) == 1:
325 325 return exist_in_session[0]
326 326 log.exception(
327 327 'multiple objects with attr %s and '
328 328 'value %s found with same name: %r',
329 329 attr_name, value, exist_in_session)
330 330
331 331 @property
332 332 def cls_name(self):
333 333 return self.__class__.__name__
334 334
335 335 def __repr__(self):
336 336 return f'<DB:{self.cls_name}>'
337 337
338 338
339 339 class RhodeCodeSetting(Base, BaseModel):
340 340 __tablename__ = 'rhodecode_settings'
341 341 __table_args__ = (
342 342 UniqueConstraint('app_settings_name'),
343 343 base_table_args
344 344 )
345 345
346 346 SETTINGS_TYPES = {
347 347 'str': safe_str,
348 348 'int': safe_int,
349 349 'unicode': safe_str,
350 350 'bool': str2bool,
351 351 'list': functools.partial(aslist, sep=',')
352 352 }
353 353 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
354 354 GLOBAL_CONF_KEY = 'app_settings'
355 355
356 356 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
357 357 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
358 358 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
359 359 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
360 360
361 361 def __init__(self, key='', val='', type='unicode'):
362 362 self.app_settings_name = key
363 363 self.app_settings_type = type
364 364 self.app_settings_value = val
365 365
366 366 @validates('_app_settings_value')
367 367 def validate_settings_value(self, key, val):
368 368 assert type(val) == str
369 369 return val
370 370
371 371 @hybrid_property
372 372 def app_settings_value(self):
373 373 v = self._app_settings_value
374 374 _type = self.app_settings_type
375 375 if _type:
376 376 _type = self.app_settings_type.split('.')[0]
377 377 # decode the encrypted value
378 378 if 'encrypted' in self.app_settings_type:
379 379 cipher = EncryptedTextValue()
380 380 v = safe_str(cipher.process_result_value(v, None))
381 381
382 382 converter = self.SETTINGS_TYPES.get(_type) or \
383 383 self.SETTINGS_TYPES['unicode']
384 384 return converter(v)
385 385
386 386 @app_settings_value.setter
387 387 def app_settings_value(self, val):
388 388 """
389 389 Setter that will always make sure we use unicode in app_settings_value
390 390
391 391 :param val:
392 392 """
393 393 val = safe_str(val)
394 394 # encode the encrypted value
395 395 if 'encrypted' in self.app_settings_type:
396 396 cipher = EncryptedTextValue()
397 397 val = safe_str(cipher.process_bind_param(val, None))
398 398 self._app_settings_value = val
399 399
400 400 @hybrid_property
401 401 def app_settings_type(self):
402 402 return self._app_settings_type
403 403
404 404 @app_settings_type.setter
405 405 def app_settings_type(self, val):
406 406 if val.split('.')[0] not in self.SETTINGS_TYPES:
407 407 raise Exception('type must be one of %s got %s'
408 408 % (self.SETTINGS_TYPES.keys(), val))
409 409 self._app_settings_type = val
410 410
411 411 @classmethod
412 412 def get_by_prefix(cls, prefix):
413 413 return RhodeCodeSetting.query()\
414 414 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
415 415 .all()
416 416
417 417 def __repr__(self):
418 418 return "<%s('%s:%s[%s]')>" % (
419 419 self.cls_name,
420 420 self.app_settings_name, self.app_settings_value,
421 421 self.app_settings_type
422 422 )
423 423
424 424
425 425 class RhodeCodeUi(Base, BaseModel):
426 426 __tablename__ = 'rhodecode_ui'
427 427 __table_args__ = (
428 428 UniqueConstraint('ui_key'),
429 429 base_table_args
430 430 )
431 431 # Sync those values with vcsserver.config.hooks
432 432
433 433 HOOK_REPO_SIZE = 'changegroup.repo_size'
434 434 # HG
435 435 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
436 436 HOOK_PULL = 'outgoing.pull_logger'
437 437 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
438 438 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
439 439 HOOK_PUSH = 'changegroup.push_logger'
440 440 HOOK_PUSH_KEY = 'pushkey.key_push'
441 441
442 442 HOOKS_BUILTIN = [
443 443 HOOK_PRE_PULL,
444 444 HOOK_PULL,
445 445 HOOK_PRE_PUSH,
446 446 HOOK_PRETX_PUSH,
447 447 HOOK_PUSH,
448 448 HOOK_PUSH_KEY,
449 449 ]
450 450
451 451 # TODO: johbo: Unify way how hooks are configured for git and hg,
452 452 # git part is currently hardcoded.
453 453
454 454 # SVN PATTERNS
455 455 SVN_BRANCH_ID = 'vcs_svn_branch'
456 456 SVN_TAG_ID = 'vcs_svn_tag'
457 457
458 458 ui_id = Column(
459 459 "ui_id", Integer(), nullable=False, unique=True, default=None,
460 460 primary_key=True)
461 461 ui_section = Column(
462 462 "ui_section", String(255), nullable=True, unique=None, default=None)
463 463 ui_key = Column(
464 464 "ui_key", String(255), nullable=True, unique=None, default=None)
465 465 ui_value = Column(
466 466 "ui_value", String(255), nullable=True, unique=None, default=None)
467 467 ui_active = Column(
468 468 "ui_active", Boolean(), nullable=True, unique=None, default=True)
469 469
470 470 def __repr__(self):
471 471 return '<%s[%s]%s=>%s]>' % (self.cls_name, self.ui_section,
472 472 self.ui_key, self.ui_value)
473 473
474 474
475 475 class RepoRhodeCodeSetting(Base, BaseModel):
476 476 __tablename__ = 'repo_rhodecode_settings'
477 477 __table_args__ = (
478 478 UniqueConstraint(
479 479 'app_settings_name', 'repository_id',
480 480 name='uq_repo_rhodecode_setting_name_repo_id'),
481 481 base_table_args
482 482 )
483 483
484 484 repository_id = Column(
485 485 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
486 486 nullable=False)
487 487 app_settings_id = Column(
488 488 "app_settings_id", Integer(), nullable=False, unique=True,
489 489 default=None, primary_key=True)
490 490 app_settings_name = Column(
491 491 "app_settings_name", String(255), nullable=True, unique=None,
492 492 default=None)
493 493 _app_settings_value = Column(
494 494 "app_settings_value", String(4096), nullable=True, unique=None,
495 495 default=None)
496 496 _app_settings_type = Column(
497 497 "app_settings_type", String(255), nullable=True, unique=None,
498 498 default=None)
499 499
500 500 repository = relationship('Repository', viewonly=True)
501 501
502 502 def __init__(self, repository_id, key='', val='', type='unicode'):
503 503 self.repository_id = repository_id
504 504 self.app_settings_name = key
505 505 self.app_settings_type = type
506 506 self.app_settings_value = val
507 507
508 508 @validates('_app_settings_value')
509 509 def validate_settings_value(self, key, val):
510 510 assert type(val) == str
511 511 return val
512 512
513 513 @hybrid_property
514 514 def app_settings_value(self):
515 515 v = self._app_settings_value
516 516 type_ = self.app_settings_type
517 517 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
518 518 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
519 519 return converter(v)
520 520
521 521 @app_settings_value.setter
522 522 def app_settings_value(self, val):
523 523 """
524 524 Setter that will always make sure we use unicode in app_settings_value
525 525
526 526 :param val:
527 527 """
528 528 self._app_settings_value = safe_str(val)
529 529
530 530 @hybrid_property
531 531 def app_settings_type(self):
532 532 return self._app_settings_type
533 533
534 534 @app_settings_type.setter
535 535 def app_settings_type(self, val):
536 536 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
537 537 if val not in SETTINGS_TYPES:
538 538 raise Exception('type must be one of %s got %s'
539 539 % (SETTINGS_TYPES.keys(), val))
540 540 self._app_settings_type = val
541 541
542 542 def __repr__(self):
543 543 return "<%s('%s:%s:%s[%s]')>" % (
544 544 self.cls_name, self.repository.repo_name,
545 545 self.app_settings_name, self.app_settings_value,
546 546 self.app_settings_type
547 547 )
548 548
549 549
550 550 class RepoRhodeCodeUi(Base, BaseModel):
551 551 __tablename__ = 'repo_rhodecode_ui'
552 552 __table_args__ = (
553 553 UniqueConstraint(
554 554 'repository_id', 'ui_section', 'ui_key',
555 555 name='uq_repo_rhodecode_ui_repository_id_section_key'),
556 556 base_table_args
557 557 )
558 558
559 559 repository_id = Column(
560 560 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
561 561 nullable=False)
562 562 ui_id = Column(
563 563 "ui_id", Integer(), nullable=False, unique=True, default=None,
564 564 primary_key=True)
565 565 ui_section = Column(
566 566 "ui_section", String(255), nullable=True, unique=None, default=None)
567 567 ui_key = Column(
568 568 "ui_key", String(255), nullable=True, unique=None, default=None)
569 569 ui_value = Column(
570 570 "ui_value", String(255), nullable=True, unique=None, default=None)
571 571 ui_active = Column(
572 572 "ui_active", Boolean(), nullable=True, unique=None, default=True)
573 573
574 574 repository = relationship('Repository', viewonly=True)
575 575
576 576 def __repr__(self):
577 577 return '<%s[%s:%s]%s=>%s]>' % (
578 578 self.cls_name, self.repository.repo_name,
579 579 self.ui_section, self.ui_key, self.ui_value)
580 580
581 581
582 582 class User(Base, BaseModel):
583 583 __tablename__ = 'users'
584 584 __table_args__ = (
585 585 UniqueConstraint('username'), UniqueConstraint('email'),
586 586 Index('u_username_idx', 'username'),
587 587 Index('u_email_idx', 'email'),
588 588 base_table_args
589 589 )
590 590
591 591 DEFAULT_USER = 'default'
592 592 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
593 593 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
594 594 RECOVERY_CODES_COUNT = 10
595 595
596 596 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
597 597 username = Column("username", String(255), nullable=True, unique=None, default=None)
598 598 password = Column("password", String(255), nullable=True, unique=None, default=None)
599 599 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
600 600 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
601 601 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
602 602 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
603 603 _email = Column("email", String(255), nullable=True, unique=None, default=None)
604 604 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
605 605 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
606 606 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
607 607
608 608 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
609 609 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
610 610 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
611 611 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
612 612 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
613 613 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
614 614
615 615 user_log = relationship('UserLog', back_populates='user')
616 616 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan')
617 617
618 618 repositories = relationship('Repository', back_populates='user')
619 619 repository_groups = relationship('RepoGroup', back_populates='user')
620 620 user_groups = relationship('UserGroup', back_populates='user')
621 621
622 622 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all', back_populates='follows_user')
623 623 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all', back_populates='user')
624 624
625 625 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan')
626 626 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan', back_populates='user')
627 627 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan', back_populates='user')
628 628
629 629 group_member = relationship('UserGroupMember', cascade='all', back_populates='user')
630 630
631 631 notifications = relationship('UserNotification', cascade='all', back_populates='user')
632 632 # notifications assigned to this user
633 633 user_created_notifications = relationship('Notification', cascade='all', back_populates='created_by_user')
634 634 # comments created by this user
635 635 user_comments = relationship('ChangesetComment', cascade='all', back_populates='author')
636 636 # user profile extra info
637 637 user_emails = relationship('UserEmailMap', cascade='all', back_populates='user')
638 638 user_ip_map = relationship('UserIpMap', cascade='all', back_populates='user')
639 639 user_auth_tokens = relationship('UserApiKeys', cascade='all', back_populates='user')
640 640 user_ssh_keys = relationship('UserSshKeys', cascade='all', back_populates='user')
641 641
642 642 # gists
643 643 user_gists = relationship('Gist', cascade='all', back_populates='owner')
644 644 # user pull requests
645 645 user_pull_requests = relationship('PullRequest', cascade='all', back_populates='author')
646 646
647 647 # external identities
648 648 external_identities = relationship('ExternalIdentity', primaryjoin="User.user_id==ExternalIdentity.local_user_id", cascade='all')
649 649 # review rules
650 650 user_review_rules = relationship('RepoReviewRuleUser', cascade='all', back_populates='user')
651 651
652 652 # artifacts owned
653 653 artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id', back_populates='upload_user')
654 654
655 655 # no cascade, set NULL
656 656 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id', cascade='', back_populates='user')
657 657
658 658 def __repr__(self):
659 659 return f"<{self.cls_name}('id={self.user_id}, username={self.username}')>"
660 660
661 661 @hybrid_property
662 662 def email(self):
663 663 return self._email
664 664
665 665 @email.setter
666 666 def email(self, val):
667 667 self._email = val.lower() if val else None
668 668
669 669 @hybrid_property
670 670 def first_name(self):
671 671 if self.name:
672 672 return description_escaper(self.name)
673 673 return self.name
674 674
675 675 @hybrid_property
676 676 def last_name(self):
677 677 if self.lastname:
678 678 return description_escaper(self.lastname)
679 679 return self.lastname
680 680
681 681 @hybrid_property
682 682 def api_key(self):
683 683 """
684 684 Fetch if exist an auth-token with role ALL connected to this user
685 685 """
686 686 user_auth_token = UserApiKeys.query()\
687 687 .filter(UserApiKeys.user_id == self.user_id)\
688 688 .filter(or_(UserApiKeys.expires == -1,
689 689 UserApiKeys.expires >= time.time()))\
690 690 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
691 691 if user_auth_token:
692 692 user_auth_token = user_auth_token.api_key
693 693
694 694 return user_auth_token
695 695
696 696 @api_key.setter
697 697 def api_key(self, val):
698 698 # don't allow to set API key this is deprecated for now
699 699 self._api_key = None
700 700
701 701 @property
702 702 def reviewer_pull_requests(self):
703 703 return PullRequestReviewers.query() \
704 704 .options(joinedload(PullRequestReviewers.pull_request)) \
705 705 .filter(PullRequestReviewers.user_id == self.user_id) \
706 706 .all()
707 707
708 708 @property
709 709 def firstname(self):
710 710 # alias for future
711 711 return self.name
712 712
713 713 @property
714 714 def emails(self):
715 715 other = UserEmailMap.query()\
716 716 .filter(UserEmailMap.user == self) \
717 717 .order_by(UserEmailMap.email_id.asc()) \
718 718 .all()
719 719 return [self.email] + [x.email for x in other]
720 720
721 721 def emails_cached(self):
722 722 emails = []
723 723 if self.user_id != self.get_default_user_id():
724 724 emails = UserEmailMap.query()\
725 725 .filter(UserEmailMap.user == self) \
726 726 .order_by(UserEmailMap.email_id.asc())
727 727
728 728 emails = emails.options(
729 729 FromCache("sql_cache_short", f"get_user_{self.user_id}_emails")
730 730 )
731 731
732 732 return [self.email] + [x.email for x in emails]
733 733
734 734 @property
735 735 def auth_tokens(self):
736 736 auth_tokens = self.get_auth_tokens()
737 737 return [x.api_key for x in auth_tokens]
738 738
739 739 def get_auth_tokens(self):
740 740 return UserApiKeys.query()\
741 741 .filter(UserApiKeys.user == self)\
742 742 .order_by(UserApiKeys.user_api_key_id.asc())\
743 743 .all()
744 744
745 745 @LazyProperty
746 746 def feed_token(self):
747 747 return self.get_feed_token()
748 748
749 749 def get_feed_token(self, cache=True):
750 750 feed_tokens = UserApiKeys.query()\
751 751 .filter(UserApiKeys.user == self)\
752 752 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
753 753 if cache:
754 754 feed_tokens = feed_tokens.options(
755 755 FromCache("sql_cache_short", f"get_user_feed_token_{self.user_id}"))
756 756
757 757 feed_tokens = feed_tokens.all()
758 758 if feed_tokens:
759 759 return feed_tokens[0].api_key
760 760 return 'NO_FEED_TOKEN_AVAILABLE'
761 761
762 762 @LazyProperty
763 763 def artifact_token(self):
764 764 return self.get_artifact_token()
765 765
766 766 def get_artifact_token(self, cache=True):
767 767 artifacts_tokens = UserApiKeys.query()\
768 768 .filter(UserApiKeys.user == self) \
769 769 .filter(or_(UserApiKeys.expires == -1,
770 770 UserApiKeys.expires >= time.time())) \
771 771 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
772 772
773 773 if cache:
774 774 artifacts_tokens = artifacts_tokens.options(
775 775 FromCache("sql_cache_short", f"get_user_artifact_token_{self.user_id}"))
776 776
777 777 artifacts_tokens = artifacts_tokens.all()
778 778 if artifacts_tokens:
779 779 return artifacts_tokens[0].api_key
780 780 return 'NO_ARTIFACT_TOKEN_AVAILABLE'
781 781
782 782 def get_or_create_artifact_token(self):
783 783 artifacts_tokens = UserApiKeys.query()\
784 784 .filter(UserApiKeys.user == self) \
785 785 .filter(or_(UserApiKeys.expires == -1,
786 786 UserApiKeys.expires >= time.time())) \
787 787 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
788 788
789 789 artifacts_tokens = artifacts_tokens.all()
790 790 if artifacts_tokens:
791 791 return artifacts_tokens[0].api_key
792 792 else:
793 793 from rhodecode.model.auth_token import AuthTokenModel
794 794 artifact_token = AuthTokenModel().create(
795 795 self, 'auto-generated-artifact-token',
796 796 lifetime=-1, role=UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
797 797 Session.commit()
798 798 return artifact_token.api_key
799 799
800 800 def is_totp_valid(self, received_code, secret):
801 801 totp = pyotp.TOTP(secret)
802 802 return totp.verify(received_code)
803 803
804 804 def is_2fa_recovery_code_valid(self, received_code, secret):
805 805 encrypted_recovery_codes = self.user_data.get('recovery_codes_2fa', [])
806 806 recovery_codes = self.get_2fa_recovery_codes()
807 807 if received_code in recovery_codes:
808 808 encrypted_recovery_codes.pop(recovery_codes.index(received_code))
809 809 self.update_userdata(recovery_codes_2fa=encrypted_recovery_codes)
810 810 return True
811 811 return False
812 812
813 813 @hybrid_property
814 814 def has_forced_2fa(self):
815 815 """
816 816 Checks if 2fa was forced for current user
817 817 """
818 818 from rhodecode.model.settings import SettingsModel
819 819 if value := SettingsModel().get_setting_by_name(f'auth_{self.extern_type}_global_2fa'):
820 820 return value.app_settings_value
821 821 return False
822 822
823 823 @hybrid_property
824 824 def has_enabled_2fa(self):
825 825 """
826 826 Checks if user enabled 2fa
827 827 """
828 828 if value := self.has_forced_2fa:
829 829 return value
830 830 return self.user_data.get('enabled_2fa', False)
831 831
832 832 @has_enabled_2fa.setter
833 833 def has_enabled_2fa(self, val):
834 834 val = str2bool(val)
835 835 self.update_userdata(enabled_2fa=val)
836 836 if not val:
837 837 # NOTE: setting to false we clear the user_data to not store any 2fa artifacts
838 838 self.update_userdata(secret_2fa=None, recovery_codes_2fa=[], check_2fa=False)
839 839 Session().commit()
840 840
841 841 @hybrid_property
842 842 def check_2fa_required(self):
843 843 """
844 844 Check if check 2fa flag is set for this user
845 845 """
846 846 value = self.user_data.get('check_2fa', False)
847 847 return value
848 848
849 849 @check_2fa_required.setter
850 850 def check_2fa_required(self, val):
851 851 val = str2bool(val)
852 852 self.update_userdata(check_2fa=val)
853 853 Session().commit()
854 854
855 855 @hybrid_property
856 856 def has_seen_2fa_codes(self):
857 857 """
858 858 get the flag about if user has seen 2fa recovery codes
859 859 """
860 860 value = self.user_data.get('recovery_codes_2fa_seen', False)
861 861 return value
862 862
863 863 @has_seen_2fa_codes.setter
864 864 def has_seen_2fa_codes(self, val):
865 865 val = str2bool(val)
866 866 self.update_userdata(recovery_codes_2fa_seen=val)
867 867 Session().commit()
868 868
869 869 @hybrid_property
870 870 def needs_2fa_configure(self):
871 871 """
872 872 Determines if setup2fa has completed for this user. Means he has all needed data for 2fa to work.
873 873
874 874 Currently this is 2fa enabled and secret exists
875 875 """
876 876 if self.has_enabled_2fa:
877 877 return not self.user_data.get('secret_2fa')
878 878 return False
879 879
880 880 def init_2fa_recovery_codes(self, persist=True, force=False):
881 881 """
882 882 Creates 2fa recovery codes
883 883 """
884 884 recovery_codes = self.user_data.get('recovery_codes_2fa', [])
885 885 encrypted_codes = []
886 886 if not recovery_codes or force:
887 887 for _ in range(self.RECOVERY_CODES_COUNT):
888 888 recovery_code = pyotp.random_base32()
889 889 recovery_codes.append(recovery_code)
890 890 encrypted_code = enc_utils.encrypt_value(safe_bytes(recovery_code), enc_key=ENCRYPTION_KEY)
891 891 encrypted_codes.append(safe_str(encrypted_code))
892 892 if persist:
893 893 self.update_userdata(recovery_codes_2fa=encrypted_codes, recovery_codes_2fa_seen=False)
894 894 return recovery_codes
895 895 # User should not check the same recovery codes more than once
896 896 return []
897 897
898 898 def get_2fa_recovery_codes(self):
899 899 encrypted_recovery_codes = self.user_data.get('recovery_codes_2fa', [])
900 900
901 901 recovery_codes = list(map(
902 902 lambda val: safe_str(
903 903 enc_utils.decrypt_value(
904 904 val,
905 905 enc_key=ENCRYPTION_KEY
906 906 )),
907 907 encrypted_recovery_codes))
908 908 return recovery_codes
909 909
910 910 def init_secret_2fa(self, persist=True, force=False):
911 911 secret_2fa = self.user_data.get('secret_2fa')
912 912 if not secret_2fa or force:
913 913 secret = pyotp.random_base32()
914 914 if persist:
915 915 self.update_userdata(secret_2fa=safe_str(enc_utils.encrypt_value(safe_bytes(secret), enc_key=ENCRYPTION_KEY)))
916 916 return secret
917 917 return ''
918 918
919 919 @hybrid_property
920 920 def secret_2fa(self) -> str:
921 921 """
922 922 get stored secret for 2fa
923 923 """
924 924 secret_2fa = self.user_data.get('secret_2fa')
925 925 if secret_2fa:
926 926 return safe_str(
927 927 enc_utils.decrypt_value(secret_2fa, enc_key=ENCRYPTION_KEY))
928 928 return ''
929 929
930 930 @secret_2fa.setter
931 931 def secret_2fa(self, value: str) -> None:
932 932 encrypted_value = enc_utils.encrypt_value(safe_bytes(value), enc_key=ENCRYPTION_KEY)
933 933 self.update_userdata(secret_2fa=safe_str(encrypted_value))
934 934
935 935 def regenerate_2fa_recovery_codes(self):
936 936 """
937 937 Regenerates 2fa recovery codes upon request
938 938 """
939 939 new_recovery_codes = self.init_2fa_recovery_codes(force=True)
940 940 Session().commit()
941 941 return new_recovery_codes
942 942
943 943 @classmethod
944 944 def extra_valid_auth_tokens(cls, user, role=None):
945 945 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
946 946 .filter(or_(UserApiKeys.expires == -1,
947 947 UserApiKeys.expires >= time.time()))
948 948 if role:
949 949 tokens = tokens.filter(or_(UserApiKeys.role == role,
950 950 UserApiKeys.role == UserApiKeys.ROLE_ALL))
951 951 return tokens.all()
952 952
953 953 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
954 954 from rhodecode.lib import auth
955 955
956 956 log.debug('Trying to authenticate user: %s via auth-token, '
957 957 'and roles: %s', self, roles)
958 958
959 959 if not auth_token:
960 960 return False
961 961
962 962 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
963 963 tokens_q = UserApiKeys.query()\
964 964 .filter(UserApiKeys.user_id == self.user_id)\
965 965 .filter(or_(UserApiKeys.expires == -1,
966 966 UserApiKeys.expires >= time.time()))
967 967
968 968 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
969 969
970 970 crypto_backend = auth.crypto_backend()
971 971 enc_token_map = {}
972 972 plain_token_map = {}
973 973 for token in tokens_q:
974 974 if token.api_key.startswith(crypto_backend.ENC_PREF):
975 975 enc_token_map[token.api_key] = token
976 976 else:
977 977 plain_token_map[token.api_key] = token
978 978 log.debug(
979 979 'Found %s plain and %s encrypted tokens to check for authentication for this user',
980 980 len(plain_token_map), len(enc_token_map))
981 981
982 982 # plain token match comes first
983 983 match = plain_token_map.get(auth_token)
984 984
985 985 # check encrypted tokens now
986 986 if not match:
987 987 for token_hash, token in enc_token_map.items():
988 988 # NOTE(marcink): this is expensive to calculate, but most secure
989 989 if crypto_backend.hash_check(auth_token, token_hash):
990 990 match = token
991 991 break
992 992
993 993 if match:
994 994 log.debug('Found matching token %s', match)
995 995 if match.repo_id:
996 996 log.debug('Found scope, checking for scope match of token %s', match)
997 997 if match.repo_id == scope_repo_id:
998 998 return True
999 999 else:
1000 1000 log.debug(
1001 1001 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
1002 1002 'and calling scope is:%s, skipping further checks',
1003 1003 match.repo, scope_repo_id)
1004 1004 return False
1005 1005 else:
1006 1006 return True
1007 1007
1008 1008 return False
1009 1009
1010 1010 @property
1011 1011 def ip_addresses(self):
1012 1012 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
1013 1013 return [x.ip_addr for x in ret]
1014 1014
1015 1015 @property
1016 1016 def username_and_name(self):
1017 1017 return f'{self.username} ({self.first_name} {self.last_name})'
1018 1018
1019 1019 @property
1020 1020 def username_or_name_or_email(self):
1021 1021 full_name = self.full_name if self.full_name != ' ' else None
1022 1022 return self.username or full_name or self.email
1023 1023
1024 1024 @property
1025 1025 def full_name(self):
1026 1026 return f'{self.first_name} {self.last_name}'
1027 1027
1028 1028 @property
1029 1029 def full_name_or_username(self):
1030 1030 return (f'{self.first_name} {self.last_name}'
1031 1031 if (self.first_name and self.last_name) else self.username)
1032 1032
1033 1033 @property
1034 1034 def full_contact(self):
1035 1035 return f'{self.first_name} {self.last_name} <{self.email}>'
1036 1036
1037 1037 @property
1038 1038 def short_contact(self):
1039 1039 return f'{self.first_name} {self.last_name}'
1040 1040
1041 1041 @property
1042 1042 def is_admin(self):
1043 1043 return self.admin
1044 1044
1045 1045 @property
1046 1046 def language(self):
1047 1047 return self.user_data.get('language')
1048 1048
1049 1049 def AuthUser(self, **kwargs):
1050 1050 """
1051 1051 Returns instance of AuthUser for this user
1052 1052 """
1053 1053 from rhodecode.lib.auth import AuthUser
1054 1054 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
1055 1055
1056 1056 @hybrid_property
1057 1057 def user_data(self):
1058 1058 if not self._user_data:
1059 1059 return {}
1060 1060
1061 1061 try:
1062 1062 return json.loads(self._user_data) or {}
1063 1063 except TypeError:
1064 1064 return {}
1065 1065
1066 1066 @user_data.setter
1067 1067 def user_data(self, val):
1068 1068 if not isinstance(val, dict):
1069 1069 raise Exception(f'user_data must be dict, got {type(val)}')
1070 1070 try:
1071 1071 self._user_data = safe_bytes(json.dumps(val))
1072 1072 except Exception:
1073 1073 log.error(traceback.format_exc())
1074 1074
1075 1075 @classmethod
1076 1076 def get(cls, user_id, cache=False):
1077 1077 if not user_id:
1078 1078 return
1079 1079
1080 1080 user = cls.query()
1081 1081 if cache:
1082 1082 user = user.options(
1083 1083 FromCache("sql_cache_short", f"get_users_{user_id}"))
1084 1084 return user.get(user_id)
1085 1085
1086 1086 @classmethod
1087 1087 def get_by_username(cls, username, case_insensitive=False,
1088 1088 cache=False):
1089 1089
1090 1090 if case_insensitive:
1091 1091 q = cls.select().where(
1092 1092 func.lower(cls.username) == func.lower(username))
1093 1093 else:
1094 1094 q = cls.select().where(cls.username == username)
1095 1095
1096 1096 if cache:
1097 1097 hash_key = _hash_key(username)
1098 1098 q = q.options(
1099 1099 FromCache("sql_cache_short", f"get_user_by_name_{hash_key}"))
1100 1100
1101 1101 return cls.execute(q).scalar_one_or_none()
1102 1102
1103 1103 @classmethod
1104 1104 def get_by_username_or_primary_email(cls, user_identifier):
1105 1105 qs = union_all(cls.select().where(func.lower(cls.username) == func.lower(user_identifier)),
1106 1106 cls.select().where(func.lower(cls.email) == func.lower(user_identifier)))
1107 1107 return cls.execute(cls.select(User).from_statement(qs)).scalar_one_or_none()
1108 1108
1109 1109 @classmethod
1110 1110 def get_by_auth_token(cls, auth_token, cache=False):
1111 1111
1112 1112 q = cls.select(User)\
1113 1113 .join(UserApiKeys)\
1114 1114 .where(UserApiKeys.api_key == auth_token)\
1115 1115 .where(or_(UserApiKeys.expires == -1,
1116 1116 UserApiKeys.expires >= time.time()))
1117 1117
1118 1118 if cache:
1119 1119 q = q.options(
1120 1120 FromCache("sql_cache_short", f"get_auth_token_{auth_token}"))
1121 1121
1122 1122 matched_user = cls.execute(q).scalar_one_or_none()
1123 1123
1124 1124 return matched_user
1125 1125
1126 1126 @classmethod
1127 1127 def get_by_email(cls, email, case_insensitive=False, cache=False):
1128 1128
1129 1129 if case_insensitive:
1130 1130 q = cls.select().where(func.lower(cls.email) == func.lower(email))
1131 1131 else:
1132 1132 q = cls.select().where(cls.email == email)
1133 1133
1134 1134 if cache:
1135 1135 email_key = _hash_key(email)
1136 1136 q = q.options(
1137 1137 FromCache("sql_cache_short", f"get_email_key_{email_key}"))
1138 1138
1139 1139 ret = cls.execute(q).scalar_one_or_none()
1140 1140
1141 1141 if ret is None:
1142 1142 q = cls.select(UserEmailMap)
1143 1143 # try fetching in alternate email map
1144 1144 if case_insensitive:
1145 1145 q = q.where(func.lower(UserEmailMap.email) == func.lower(email))
1146 1146 else:
1147 1147 q = q.where(UserEmailMap.email == email)
1148 1148 q = q.options(joinedload(UserEmailMap.user))
1149 1149 if cache:
1150 1150 q = q.options(
1151 1151 FromCache("sql_cache_short", f"get_email_map_key_{email_key}"))
1152 1152
1153 1153 result = cls.execute(q).scalar_one_or_none()
1154 1154 ret = getattr(result, 'user', None)
1155 1155
1156 1156 return ret
1157 1157
1158 1158 @classmethod
1159 1159 def get_from_cs_author(cls, author):
1160 1160 """
1161 1161 Tries to get User objects out of commit author string
1162 1162
1163 1163 :param author:
1164 1164 """
1165 1165 from rhodecode.lib.helpers import email, author_name
1166 1166 # Valid email in the attribute passed, see if they're in the system
1167 1167 _email = email(author)
1168 1168 if _email:
1169 1169 user = cls.get_by_email(_email, case_insensitive=True)
1170 1170 if user:
1171 1171 return user
1172 1172 # Maybe we can match by username?
1173 1173 _author = author_name(author)
1174 1174 user = cls.get_by_username(_author, case_insensitive=True)
1175 1175 if user:
1176 1176 return user
1177 1177
1178 1178 def update_userdata(self, **kwargs):
1179 1179 usr = self
1180 1180 old = usr.user_data
1181 1181 old.update(**kwargs)
1182 1182 usr.user_data = old
1183 1183 Session().add(usr)
1184 1184 log.debug('updated userdata with %s', kwargs)
1185 1185
1186 1186 def update_lastlogin(self):
1187 1187 """Update user lastlogin"""
1188 1188 self.last_login = datetime.datetime.now()
1189 1189 Session().add(self)
1190 1190 log.debug('updated user %s lastlogin', self.username)
1191 1191
1192 1192 def update_password(self, new_password):
1193 1193 from rhodecode.lib.auth import get_crypt_password
1194 1194
1195 1195 self.password = get_crypt_password(new_password)
1196 1196 Session().add(self)
1197 1197
1198 1198 @classmethod
1199 1199 def get_first_super_admin(cls):
1200 1200 stmt = cls.select().where(User.admin == true()).order_by(User.user_id.asc())
1201 1201 user = cls.scalars(stmt).first()
1202 1202
1203 1203 if user is None:
1204 1204 raise Exception('FATAL: Missing administrative account!')
1205 1205 return user
1206 1206
1207 1207 @classmethod
1208 1208 def get_all_super_admins(cls, only_active=False):
1209 1209 """
1210 1210 Returns all admin accounts sorted by username
1211 1211 """
1212 1212 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
1213 1213 if only_active:
1214 1214 qry = qry.filter(User.active == true())
1215 1215 return qry.all()
1216 1216
1217 1217 @classmethod
1218 1218 def get_all_user_ids(cls, only_active=True):
1219 1219 """
1220 1220 Returns all users IDs
1221 1221 """
1222 1222 qry = Session().query(User.user_id)
1223 1223
1224 1224 if only_active:
1225 1225 qry = qry.filter(User.active == true())
1226 1226 return [x.user_id for x in qry]
1227 1227
1228 1228 @classmethod
1229 1229 def get_default_user(cls, cache=False, refresh=False):
1230 1230 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
1231 1231 if user is None:
1232 1232 raise Exception('FATAL: Missing default account!')
1233 1233 if refresh:
1234 1234 # The default user might be based on outdated state which
1235 1235 # has been loaded from the cache.
1236 1236 # A call to refresh() ensures that the
1237 1237 # latest state from the database is used.
1238 1238 Session().refresh(user)
1239 1239
1240 1240 return user
1241 1241
1242 1242 @classmethod
1243 1243 def get_default_user_id(cls):
1244 1244 import rhodecode
1245 1245 return rhodecode.CONFIG['default_user_id']
1246 1246
1247 1247 def _get_default_perms(self, user, suffix=''):
1248 1248 from rhodecode.model.permission import PermissionModel
1249 1249 return PermissionModel().get_default_perms(user.user_perms, suffix)
1250 1250
1251 1251 def get_default_perms(self, suffix=''):
1252 1252 return self._get_default_perms(self, suffix)
1253 1253
1254 1254 def get_api_data(self, include_secrets=False, details='full'):
1255 1255 """
1256 1256 Common function for generating user related data for API
1257 1257
1258 1258 :param include_secrets: By default secrets in the API data will be replaced
1259 1259 by a placeholder value to prevent exposing this data by accident. In case
1260 1260 this data shall be exposed, set this flag to ``True``.
1261 1261
1262 1262 :param details: details can be 'basic|full' basic gives only a subset of
1263 1263 the available user information that includes user_id, name and emails.
1264 1264 """
1265 1265 user = self
1266 1266 user_data = self.user_data
1267 1267 data = {
1268 1268 'user_id': user.user_id,
1269 1269 'username': user.username,
1270 1270 'firstname': user.name,
1271 1271 'lastname': user.lastname,
1272 1272 'description': user.description,
1273 1273 'email': user.email,
1274 1274 'emails': user.emails,
1275 1275 }
1276 1276 if details == 'basic':
1277 1277 return data
1278 1278
1279 1279 auth_token_length = 40
1280 1280 auth_token_replacement = '*' * auth_token_length
1281 1281
1282 1282 extras = {
1283 1283 'auth_tokens': [auth_token_replacement],
1284 1284 'active': user.active,
1285 1285 'admin': user.admin,
1286 1286 'extern_type': user.extern_type,
1287 1287 'extern_name': user.extern_name,
1288 1288 'last_login': user.last_login,
1289 1289 'last_activity': user.last_activity,
1290 1290 'ip_addresses': user.ip_addresses,
1291 1291 'language': user_data.get('language')
1292 1292 }
1293 1293 data.update(extras)
1294 1294
1295 1295 if include_secrets:
1296 1296 data['auth_tokens'] = user.auth_tokens
1297 1297 return data
1298 1298
1299 1299 def __json__(self):
1300 1300 data = {
1301 1301 'full_name': self.full_name,
1302 1302 'full_name_or_username': self.full_name_or_username,
1303 1303 'short_contact': self.short_contact,
1304 1304 'full_contact': self.full_contact,
1305 1305 }
1306 1306 data.update(self.get_api_data())
1307 1307 return data
1308 1308
1309 1309
1310 1310 class UserApiKeys(Base, BaseModel):
1311 1311 __tablename__ = 'user_api_keys'
1312 1312 __table_args__ = (
1313 1313 Index('uak_api_key_idx', 'api_key'),
1314 1314 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1315 1315 base_table_args
1316 1316 )
1317 1317
1318 1318 # ApiKey role
1319 1319 ROLE_ALL = 'token_role_all'
1320 1320 ROLE_VCS = 'token_role_vcs'
1321 1321 ROLE_API = 'token_role_api'
1322 1322 ROLE_HTTP = 'token_role_http'
1323 1323 ROLE_FEED = 'token_role_feed'
1324 1324 ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download'
1325 1325 # The last one is ignored in the list as we only
1326 1326 # use it for one action, and cannot be created by users
1327 1327 ROLE_PASSWORD_RESET = 'token_password_reset'
1328 1328
1329 1329 ROLES = [ROLE_ALL, ROLE_VCS, ROLE_API, ROLE_HTTP, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD]
1330 1330
1331 1331 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1332 1332 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1333 1333 api_key = Column("api_key", String(255), nullable=False, unique=True)
1334 1334 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1335 1335 expires = Column('expires', Float(53), nullable=False)
1336 1336 role = Column('role', String(255), nullable=True)
1337 1337 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1338 1338
1339 1339 # scope columns
1340 1340 repo_id = Column(
1341 1341 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1342 1342 nullable=True, unique=None, default=None)
1343 1343 repo = relationship('Repository', lazy='joined', back_populates='scoped_tokens')
1344 1344
1345 1345 repo_group_id = Column(
1346 1346 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1347 1347 nullable=True, unique=None, default=None)
1348 1348 repo_group = relationship('RepoGroup', lazy='joined')
1349 1349
1350 1350 user = relationship('User', lazy='joined', back_populates='user_auth_tokens')
1351 1351
1352 1352 def __repr__(self):
1353 1353 return f"<{self.cls_name}('{self.role}')>"
1354 1354
1355 1355 def __json__(self):
1356 1356 data = {
1357 1357 'auth_token': self.api_key,
1358 1358 'role': self.role,
1359 1359 'scope': self.scope_humanized,
1360 1360 'expired': self.expired
1361 1361 }
1362 1362 return data
1363 1363
1364 1364 def get_api_data(self, include_secrets=False):
1365 1365 data = self.__json__()
1366 1366 if include_secrets:
1367 1367 return data
1368 1368 else:
1369 1369 data['auth_token'] = self.token_obfuscated
1370 1370 return data
1371 1371
1372 1372 @hybrid_property
1373 1373 def description_safe(self):
1374 1374 return description_escaper(self.description)
1375 1375
1376 1376 @property
1377 1377 def expired(self):
1378 1378 if self.expires == -1:
1379 1379 return False
1380 1380 return time.time() > self.expires
1381 1381
1382 1382 @classmethod
1383 1383 def _get_role_name(cls, role):
1384 1384 return {
1385 1385 cls.ROLE_ALL: _('all'),
1386 1386 cls.ROLE_HTTP: _('http/web interface'),
1387 1387 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1388 1388 cls.ROLE_API: _('api calls'),
1389 1389 cls.ROLE_FEED: _('feed access'),
1390 1390 cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'),
1391 1391 }.get(role, role)
1392 1392
1393 1393 @classmethod
1394 1394 def _get_role_description(cls, role):
1395 1395 return {
1396 1396 cls.ROLE_ALL: _('Token for all actions.'),
1397 1397 cls.ROLE_HTTP: _('Token to access RhodeCode pages via web interface without '
1398 1398 'login using `api_access_controllers_whitelist` functionality.'),
1399 1399 cls.ROLE_VCS: _('Token to interact over git/hg/svn protocols. '
1400 1400 'Requires auth_token authentication plugin to be active. <br/>'
1401 1401 'Such Token should be used then instead of a password to '
1402 1402 'interact with a repository, and additionally can be '
1403 1403 'limited to single repository using repo scope.'),
1404 1404 cls.ROLE_API: _('Token limited to api calls.'),
1405 1405 cls.ROLE_FEED: _('Token to read RSS/ATOM feed.'),
1406 1406 cls.ROLE_ARTIFACT_DOWNLOAD: _('Token for artifacts downloads.'),
1407 1407 }.get(role, role)
1408 1408
1409 1409 @property
1410 1410 def role_humanized(self):
1411 1411 return self._get_role_name(self.role)
1412 1412
1413 1413 def _get_scope(self):
1414 1414 if self.repo:
1415 1415 return 'Repository: {}'.format(self.repo.repo_name)
1416 1416 if self.repo_group:
1417 1417 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1418 1418 return 'Global'
1419 1419
1420 1420 @property
1421 1421 def scope_humanized(self):
1422 1422 return self._get_scope()
1423 1423
1424 1424 @property
1425 1425 def token_obfuscated(self):
1426 1426 if self.api_key:
1427 1427 return self.api_key[:4] + "****"
1428 1428
1429 1429
1430 1430 class UserEmailMap(Base, BaseModel):
1431 1431 __tablename__ = 'user_email_map'
1432 1432 __table_args__ = (
1433 1433 Index('uem_email_idx', 'email'),
1434 1434 Index('uem_user_id_idx', 'user_id'),
1435 1435 UniqueConstraint('email'),
1436 1436 base_table_args
1437 1437 )
1438 1438
1439 1439 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1440 1440 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1441 1441 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1442 1442 user = relationship('User', lazy='joined', back_populates='user_emails')
1443 1443
1444 1444 @validates('_email')
1445 1445 def validate_email(self, key, email):
1446 1446 # check if this email is not main one
1447 1447 main_email = Session().query(User).filter(User.email == email).scalar()
1448 1448 if main_email is not None:
1449 1449 raise AttributeError('email %s is present is user table' % email)
1450 1450 return email
1451 1451
1452 1452 @hybrid_property
1453 1453 def email(self):
1454 1454 return self._email
1455 1455
1456 1456 @email.setter
1457 1457 def email(self, val):
1458 1458 self._email = val.lower() if val else None
1459 1459
1460 1460
1461 1461 class UserIpMap(Base, BaseModel):
1462 1462 __tablename__ = 'user_ip_map'
1463 1463 __table_args__ = (
1464 1464 UniqueConstraint('user_id', 'ip_addr'),
1465 1465 base_table_args
1466 1466 )
1467 1467
1468 1468 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1469 1469 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1470 1470 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1471 1471 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1472 1472 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1473 1473 user = relationship('User', lazy='joined', back_populates='user_ip_map')
1474 1474
1475 1475 @hybrid_property
1476 1476 def description_safe(self):
1477 1477 return description_escaper(self.description)
1478 1478
1479 1479 @classmethod
1480 1480 def _get_ip_range(cls, ip_addr):
1481 1481 net = ipaddress.ip_network(safe_str(ip_addr), strict=False)
1482 1482 return [str(net.network_address), str(net.broadcast_address)]
1483 1483
1484 1484 def __json__(self):
1485 1485 return {
1486 1486 'ip_addr': self.ip_addr,
1487 1487 'ip_range': self._get_ip_range(self.ip_addr),
1488 1488 }
1489 1489
1490 1490 def __repr__(self):
1491 1491 return f"<{self.cls_name}('user_id={self.user_id} => ip={self.ip_addr}')>"
1492 1492
1493 1493
1494 1494 class UserSshKeys(Base, BaseModel):
1495 1495 __tablename__ = 'user_ssh_keys'
1496 1496 __table_args__ = (
1497 1497 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1498 1498
1499 1499 UniqueConstraint('ssh_key_fingerprint'),
1500 1500
1501 1501 base_table_args
1502 1502 )
1503 1503
1504 1504 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1505 1505 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1506 1506 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1507 1507
1508 1508 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1509 1509
1510 1510 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1511 1511 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1512 1512 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1513 1513
1514 1514 user = relationship('User', lazy='joined', back_populates='user_ssh_keys')
1515 1515
1516 1516 def __json__(self):
1517 1517 data = {
1518 1518 'ssh_fingerprint': self.ssh_key_fingerprint,
1519 1519 'description': self.description,
1520 1520 'created_on': self.created_on
1521 1521 }
1522 1522 return data
1523 1523
1524 1524 def get_api_data(self):
1525 1525 data = self.__json__()
1526 1526 return data
1527 1527
1528 1528
1529 1529 class UserLog(Base, BaseModel):
1530 1530 __tablename__ = 'user_logs'
1531 1531 __table_args__ = (
1532 1532 base_table_args,
1533 1533 )
1534 1534
1535 1535 VERSION_1 = 'v1'
1536 1536 VERSION_2 = 'v2'
1537 1537 VERSIONS = [VERSION_1, VERSION_2]
1538 1538
1539 1539 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1540 1540 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1541 1541 username = Column("username", String(255), nullable=True, unique=None, default=None)
1542 1542 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1543 1543 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1544 1544 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1545 1545 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1546 1546 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1547 1547
1548 1548 version = Column("version", String(255), nullable=True, default=VERSION_1)
1549 1549 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1550 1550 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1551 1551 user = relationship('User', cascade='', back_populates='user_log')
1552 1552 repository = relationship('Repository', cascade='', back_populates='logs')
1553 1553
1554 1554 def __repr__(self):
1555 1555 return f"<{self.cls_name}('id:{self.repository_name}:{self.action}')>"
1556 1556
1557 1557 def __json__(self):
1558 1558 return {
1559 1559 'user_id': self.user_id,
1560 1560 'username': self.username,
1561 1561 'repository_id': self.repository_id,
1562 1562 'repository_name': self.repository_name,
1563 1563 'user_ip': self.user_ip,
1564 1564 'action_date': self.action_date,
1565 1565 'action': self.action,
1566 1566 }
1567 1567
1568 1568 @hybrid_property
1569 1569 def entry_id(self):
1570 1570 return self.user_log_id
1571 1571
1572 1572 @property
1573 1573 def action_as_day(self):
1574 1574 return datetime.date(*self.action_date.timetuple()[:3])
1575 1575
1576 1576
1577 1577 class UserGroup(Base, BaseModel):
1578 1578 __tablename__ = 'users_groups'
1579 1579 __table_args__ = (
1580 1580 base_table_args,
1581 1581 )
1582 1582
1583 1583 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1584 1584 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1585 1585 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1586 1586 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1587 1587 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1588 1588 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1589 1589 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1590 1590 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1591 1591
1592 1592 members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined", back_populates='users_group')
1593 1593 users_group_to_perm = relationship('UserGroupToPerm', cascade='all', back_populates='users_group')
1594 1594 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all', back_populates='users_group')
1595 1595 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all', back_populates='users_group')
1596 1596 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all', back_populates='user_group')
1597 1597
1598 1598 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all', back_populates='target_user_group')
1599 1599
1600 1600 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all', back_populates='users_group')
1601 1601 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id", back_populates='user_groups')
1602 1602
1603 1603 @classmethod
1604 1604 def _load_group_data(cls, column):
1605 1605 if not column:
1606 1606 return {}
1607 1607
1608 1608 try:
1609 1609 return json.loads(column) or {}
1610 1610 except TypeError:
1611 1611 return {}
1612 1612
1613 1613 @hybrid_property
1614 1614 def description_safe(self):
1615 1615 return description_escaper(self.user_group_description)
1616 1616
1617 1617 @hybrid_property
1618 1618 def group_data(self):
1619 1619 return self._load_group_data(self._group_data)
1620 1620
1621 1621 @group_data.expression
1622 1622 def group_data(self, **kwargs):
1623 1623 return self._group_data
1624 1624
1625 1625 @group_data.setter
1626 1626 def group_data(self, val):
1627 1627 try:
1628 1628 self._group_data = json.dumps(val)
1629 1629 except Exception:
1630 1630 log.error(traceback.format_exc())
1631 1631
1632 1632 @classmethod
1633 1633 def _load_sync(cls, group_data):
1634 1634 if group_data:
1635 1635 return group_data.get('extern_type')
1636 1636
1637 1637 @property
1638 1638 def sync(self):
1639 1639 return self._load_sync(self.group_data)
1640 1640
1641 1641 def __repr__(self):
1642 1642 return f"<{self.cls_name}('id:{self.users_group_id}:{self.users_group_name}')>"
1643 1643
1644 1644 @classmethod
1645 1645 def get_by_group_name(cls, group_name, cache=False,
1646 1646 case_insensitive=False):
1647 1647 if case_insensitive:
1648 1648 q = cls.query().filter(func.lower(cls.users_group_name) ==
1649 1649 func.lower(group_name))
1650 1650
1651 1651 else:
1652 1652 q = cls.query().filter(cls.users_group_name == group_name)
1653 1653 if cache:
1654 1654 name_key = _hash_key(group_name)
1655 1655 q = q.options(
1656 1656 FromCache("sql_cache_short", f"get_group_{name_key}"))
1657 1657 return q.scalar()
1658 1658
1659 1659 @classmethod
1660 1660 def get(cls, user_group_id, cache=False):
1661 1661 if not user_group_id:
1662 1662 return
1663 1663
1664 1664 user_group = cls.query()
1665 1665 if cache:
1666 1666 user_group = user_group.options(
1667 1667 FromCache("sql_cache_short", f"get_users_group_{user_group_id}"))
1668 1668 return user_group.get(user_group_id)
1669 1669
1670 1670 def permissions(self, with_admins=True, with_owner=True,
1671 1671 expand_from_user_groups=False):
1672 1672 """
1673 1673 Permissions for user groups
1674 1674 """
1675 1675 _admin_perm = 'usergroup.admin'
1676 1676
1677 1677 owner_row = []
1678 1678 if with_owner:
1679 1679 usr = AttributeDict(self.user.get_dict())
1680 1680 usr.owner_row = True
1681 1681 usr.permission = _admin_perm
1682 1682 owner_row.append(usr)
1683 1683
1684 1684 super_admin_ids = []
1685 1685 super_admin_rows = []
1686 1686 if with_admins:
1687 1687 for usr in User.get_all_super_admins():
1688 1688 super_admin_ids.append(usr.user_id)
1689 1689 # if this admin is also owner, don't double the record
1690 1690 if usr.user_id == owner_row[0].user_id:
1691 1691 owner_row[0].admin_row = True
1692 1692 else:
1693 1693 usr = AttributeDict(usr.get_dict())
1694 1694 usr.admin_row = True
1695 1695 usr.permission = _admin_perm
1696 1696 super_admin_rows.append(usr)
1697 1697
1698 1698 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1699 1699 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1700 1700 joinedload(UserUserGroupToPerm.user),
1701 1701 joinedload(UserUserGroupToPerm.permission),)
1702 1702
1703 1703 # get owners and admins and permissions. We do a trick of re-writing
1704 1704 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1705 1705 # has a global reference and changing one object propagates to all
1706 1706 # others. This means if admin is also an owner admin_row that change
1707 1707 # would propagate to both objects
1708 1708 perm_rows = []
1709 1709 for _usr in q.all():
1710 1710 usr = AttributeDict(_usr.user.get_dict())
1711 1711 # if this user is also owner/admin, mark as duplicate record
1712 1712 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1713 1713 usr.duplicate_perm = True
1714 1714 usr.permission = _usr.permission.permission_name
1715 1715 perm_rows.append(usr)
1716 1716
1717 1717 # filter the perm rows by 'default' first and then sort them by
1718 1718 # admin,write,read,none permissions sorted again alphabetically in
1719 1719 # each group
1720 1720 perm_rows = sorted(perm_rows, key=display_user_sort)
1721 1721
1722 1722 user_groups_rows = []
1723 1723 if expand_from_user_groups:
1724 1724 for ug in self.permission_user_groups(with_members=True):
1725 1725 for user_data in ug.members:
1726 1726 user_groups_rows.append(user_data)
1727 1727
1728 1728 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1729 1729
1730 1730 def permission_user_groups(self, with_members=False):
1731 1731 q = UserGroupUserGroupToPerm.query()\
1732 1732 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1733 1733 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1734 1734 joinedload(UserGroupUserGroupToPerm.target_user_group),
1735 1735 joinedload(UserGroupUserGroupToPerm.permission),)
1736 1736
1737 1737 perm_rows = []
1738 1738 for _user_group in q.all():
1739 1739 entry = AttributeDict(_user_group.user_group.get_dict())
1740 1740 entry.permission = _user_group.permission.permission_name
1741 1741 if with_members:
1742 1742 entry.members = [x.user.get_dict()
1743 1743 for x in _user_group.user_group.members]
1744 1744 perm_rows.append(entry)
1745 1745
1746 1746 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1747 1747 return perm_rows
1748 1748
1749 1749 def _get_default_perms(self, user_group, suffix=''):
1750 1750 from rhodecode.model.permission import PermissionModel
1751 1751 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1752 1752
1753 1753 def get_default_perms(self, suffix=''):
1754 1754 return self._get_default_perms(self, suffix)
1755 1755
1756 1756 def get_api_data(self, with_group_members=True, include_secrets=False):
1757 1757 """
1758 1758 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1759 1759 basically forwarded.
1760 1760
1761 1761 """
1762 1762 user_group = self
1763 1763 data = {
1764 1764 'users_group_id': user_group.users_group_id,
1765 1765 'group_name': user_group.users_group_name,
1766 1766 'group_description': user_group.user_group_description,
1767 1767 'active': user_group.users_group_active,
1768 1768 'owner': user_group.user.username,
1769 1769 'sync': user_group.sync,
1770 1770 'owner_email': user_group.user.email,
1771 1771 }
1772 1772
1773 1773 if with_group_members:
1774 1774 users = []
1775 1775 for user in user_group.members:
1776 1776 user = user.user
1777 1777 users.append(user.get_api_data(include_secrets=include_secrets))
1778 1778 data['users'] = users
1779 1779
1780 1780 return data
1781 1781
1782 1782
1783 1783 class UserGroupMember(Base, BaseModel):
1784 1784 __tablename__ = 'users_groups_members'
1785 1785 __table_args__ = (
1786 1786 base_table_args,
1787 1787 )
1788 1788
1789 1789 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1790 1790 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1791 1791 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1792 1792
1793 1793 user = relationship('User', lazy='joined', back_populates='group_member')
1794 1794 users_group = relationship('UserGroup', back_populates='members')
1795 1795
1796 1796 def __init__(self, gr_id='', u_id=''):
1797 1797 self.users_group_id = gr_id
1798 1798 self.user_id = u_id
1799 1799
1800 1800
1801 1801 class RepositoryField(Base, BaseModel):
1802 1802 __tablename__ = 'repositories_fields'
1803 1803 __table_args__ = (
1804 1804 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1805 1805 base_table_args,
1806 1806 )
1807 1807
1808 1808 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1809 1809
1810 1810 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1811 1811 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1812 1812 field_key = Column("field_key", String(250))
1813 1813 field_label = Column("field_label", String(1024), nullable=False)
1814 1814 field_value = Column("field_value", String(10000), nullable=False)
1815 1815 field_desc = Column("field_desc", String(1024), nullable=False)
1816 1816 field_type = Column("field_type", String(255), nullable=False, unique=None)
1817 1817 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1818 1818
1819 1819 repository = relationship('Repository', back_populates='extra_fields')
1820 1820
1821 1821 @property
1822 1822 def field_key_prefixed(self):
1823 1823 return 'ex_%s' % self.field_key
1824 1824
1825 1825 @classmethod
1826 1826 def un_prefix_key(cls, key):
1827 1827 if key.startswith(cls.PREFIX):
1828 1828 return key[len(cls.PREFIX):]
1829 1829 return key
1830 1830
1831 1831 @classmethod
1832 1832 def get_by_key_name(cls, key, repo):
1833 1833 row = cls.query()\
1834 1834 .filter(cls.repository == repo)\
1835 1835 .filter(cls.field_key == key).scalar()
1836 1836 return row
1837 1837
1838 1838
1839 1839 class Repository(Base, BaseModel):
1840 1840 __tablename__ = 'repositories'
1841 1841 __table_args__ = (
1842 1842 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1843 1843 base_table_args,
1844 1844 )
1845 1845 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1846 1846 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1847 1847 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1848 1848
1849 1849 STATE_CREATED = 'repo_state_created'
1850 1850 STATE_PENDING = 'repo_state_pending'
1851 1851 STATE_ERROR = 'repo_state_error'
1852 1852
1853 1853 LOCK_AUTOMATIC = 'lock_auto'
1854 1854 LOCK_API = 'lock_api'
1855 1855 LOCK_WEB = 'lock_web'
1856 1856 LOCK_PULL = 'lock_pull'
1857 1857
1858 1858 NAME_SEP = URL_SEP
1859 1859
1860 1860 repo_id = Column(
1861 1861 "repo_id", Integer(), nullable=False, unique=True, default=None,
1862 1862 primary_key=True)
1863 1863 _repo_name = Column(
1864 1864 "repo_name", Text(), nullable=False, default=None)
1865 1865 repo_name_hash = Column(
1866 1866 "repo_name_hash", String(255), nullable=False, unique=True)
1867 1867 repo_state = Column("repo_state", String(255), nullable=True)
1868 1868
1869 1869 clone_uri = Column(
1870 1870 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1871 1871 default=None)
1872 1872 push_uri = Column(
1873 1873 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1874 1874 default=None)
1875 1875 repo_type = Column(
1876 1876 "repo_type", String(255), nullable=False, unique=False, default=None)
1877 1877 user_id = Column(
1878 1878 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1879 1879 unique=False, default=None)
1880 1880 private = Column(
1881 1881 "private", Boolean(), nullable=True, unique=None, default=None)
1882 1882 archived = Column(
1883 1883 "archived", Boolean(), nullable=True, unique=None, default=None)
1884 1884 enable_statistics = Column(
1885 1885 "statistics", Boolean(), nullable=True, unique=None, default=True)
1886 1886 enable_downloads = Column(
1887 1887 "downloads", Boolean(), nullable=True, unique=None, default=True)
1888 1888 description = Column(
1889 1889 "description", String(10000), nullable=True, unique=None, default=None)
1890 1890 created_on = Column(
1891 1891 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1892 1892 default=datetime.datetime.now)
1893 1893 updated_on = Column(
1894 1894 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1895 1895 default=datetime.datetime.now)
1896 1896 _landing_revision = Column(
1897 1897 "landing_revision", String(255), nullable=False, unique=False,
1898 1898 default=None)
1899 1899 enable_locking = Column(
1900 1900 "enable_locking", Boolean(), nullable=False, unique=None,
1901 1901 default=False)
1902 1902 _locked = Column(
1903 1903 "locked", String(255), nullable=True, unique=False, default=None)
1904 1904 _changeset_cache = Column(
1905 1905 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1906 1906
1907 1907 fork_id = Column(
1908 1908 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1909 1909 nullable=True, unique=False, default=None)
1910 1910 group_id = Column(
1911 1911 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1912 1912 unique=False, default=None)
1913 1913
1914 1914 user = relationship('User', lazy='joined', back_populates='repositories')
1915 1915 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1916 1916 group = relationship('RepoGroup', lazy='joined')
1917 1917 repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id')
1918 1918 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all', back_populates='repository')
1919 1919 stats = relationship('Statistics', cascade='all', uselist=False)
1920 1920
1921 1921 followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all', back_populates='follows_repository')
1922 1922 extra_fields = relationship('RepositoryField', cascade="all, delete-orphan", back_populates='repository')
1923 1923
1924 1924 logs = relationship('UserLog', back_populates='repository')
1925 1925
1926 1926 comments = relationship('ChangesetComment', cascade="all, delete-orphan", back_populates='repo')
1927 1927
1928 1928 pull_requests_source = relationship(
1929 1929 'PullRequest',
1930 1930 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1931 1931 cascade="all, delete-orphan",
1932 1932 overlaps="source_repo"
1933 1933 )
1934 1934 pull_requests_target = relationship(
1935 1935 'PullRequest',
1936 1936 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1937 1937 cascade="all, delete-orphan",
1938 1938 overlaps="target_repo"
1939 1939 )
1940 1940
1941 1941 ui = relationship('RepoRhodeCodeUi', cascade="all")
1942 1942 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1943 1943 integrations = relationship('Integration', cascade="all, delete-orphan", back_populates='repo')
1944 1944
1945 1945 scoped_tokens = relationship('UserApiKeys', cascade="all", back_populates='repo')
1946 1946
1947 1947 # no cascade, set NULL
1948 1948 artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id', viewonly=True)
1949 1949
1950 1950 review_rules = relationship('RepoReviewRule')
1951 1951 user_branch_perms = relationship('UserToRepoBranchPermission')
1952 1952 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission')
1953 1953
1954 1954 def __repr__(self):
1955 1955 return "<%s('%s:%s')>" % (self.cls_name, self.repo_id, self.repo_name)
1956 1956
1957 1957 @hybrid_property
1958 1958 def description_safe(self):
1959 1959 return description_escaper(self.description)
1960 1960
1961 1961 @hybrid_property
1962 1962 def landing_rev(self):
1963 1963 # always should return [rev_type, rev], e.g ['branch', 'master']
1964 1964 if self._landing_revision:
1965 1965 _rev_info = self._landing_revision.split(':')
1966 1966 if len(_rev_info) < 2:
1967 1967 _rev_info.insert(0, 'rev')
1968 1968 return [_rev_info[0], _rev_info[1]]
1969 1969 return [None, None]
1970 1970
1971 1971 @property
1972 1972 def landing_ref_type(self):
1973 1973 return self.landing_rev[0]
1974 1974
1975 1975 @property
1976 1976 def landing_ref_name(self):
1977 1977 return self.landing_rev[1]
1978 1978
1979 1979 @landing_rev.setter
1980 1980 def landing_rev(self, val):
1981 1981 if ':' not in val:
1982 1982 raise ValueError('value must be delimited with `:` and consist '
1983 1983 'of <rev_type>:<rev>, got %s instead' % val)
1984 1984 self._landing_revision = val
1985 1985
1986 1986 @hybrid_property
1987 1987 def locked(self):
1988 1988 if self._locked:
1989 1989 user_id, timelocked, reason = self._locked.split(':')
1990 1990 lock_values = int(user_id), timelocked, reason
1991 1991 else:
1992 1992 lock_values = [None, None, None]
1993 1993 return lock_values
1994 1994
1995 1995 @locked.setter
1996 1996 def locked(self, val):
1997 1997 if val and isinstance(val, (list, tuple)):
1998 1998 self._locked = ':'.join(map(str, val))
1999 1999 else:
2000 2000 self._locked = None
2001 2001
2002 2002 @classmethod
2003 2003 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
2004 2004 from rhodecode.lib.vcs.backends.base import EmptyCommit
2005 2005 dummy = EmptyCommit().__json__()
2006 2006 if not changeset_cache_raw:
2007 2007 dummy['source_repo_id'] = repo_id
2008 2008 return json.loads(json.dumps(dummy))
2009 2009
2010 2010 try:
2011 2011 return json.loads(changeset_cache_raw)
2012 2012 except TypeError:
2013 2013 return dummy
2014 2014 except Exception:
2015 2015 log.error(traceback.format_exc())
2016 2016 return dummy
2017 2017
2018 2018 @hybrid_property
2019 2019 def changeset_cache(self):
2020 2020 return self._load_changeset_cache(self.repo_id, self._changeset_cache)
2021 2021
2022 2022 @changeset_cache.setter
2023 2023 def changeset_cache(self, val):
2024 2024 try:
2025 2025 self._changeset_cache = json.dumps(val)
2026 2026 except Exception:
2027 2027 log.error(traceback.format_exc())
2028 2028
2029 2029 @hybrid_property
2030 2030 def repo_name(self):
2031 2031 return self._repo_name
2032 2032
2033 2033 @repo_name.setter
2034 2034 def repo_name(self, value):
2035 2035 self._repo_name = value
2036 2036 self.repo_name_hash = sha1(safe_bytes(value))
2037 2037
2038 2038 @classmethod
2039 2039 def normalize_repo_name(cls, repo_name):
2040 2040 """
2041 2041 Normalizes os specific repo_name to the format internally stored inside
2042 2042 database using URL_SEP
2043 2043
2044 2044 :param cls:
2045 2045 :param repo_name:
2046 2046 """
2047 2047 return cls.NAME_SEP.join(repo_name.split(os.sep))
2048 2048
2049 2049 @classmethod
2050 2050 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
2051 2051 session = Session()
2052 2052 q = session.query(cls).filter(cls.repo_name == repo_name)
2053 2053
2054 2054 if cache:
2055 2055 if identity_cache:
2056 2056 val = cls.identity_cache(session, 'repo_name', repo_name)
2057 2057 if val:
2058 2058 return val
2059 2059 else:
2060 2060 cache_key = f"get_repo_by_name_{_hash_key(repo_name)}"
2061 2061 q = q.options(
2062 2062 FromCache("sql_cache_short", cache_key))
2063 2063
2064 2064 return q.scalar()
2065 2065
2066 2066 @classmethod
2067 2067 def get_by_id_or_repo_name(cls, repoid):
2068 2068 if isinstance(repoid, int):
2069 2069 try:
2070 2070 repo = cls.get(repoid)
2071 2071 except ValueError:
2072 2072 repo = None
2073 2073 else:
2074 2074 repo = cls.get_by_repo_name(repoid)
2075 2075 return repo
2076 2076
2077 2077 @classmethod
2078 2078 def get_by_full_path(cls, repo_full_path):
2079 2079 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
2080 2080 repo_name = cls.normalize_repo_name(repo_name)
2081 2081 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
2082 2082
2083 2083 @classmethod
2084 2084 def get_repo_forks(cls, repo_id):
2085 2085 return cls.query().filter(Repository.fork_id == repo_id)
2086 2086
2087 2087 @classmethod
2088 2088 def base_path(cls):
2089 2089 """
2090 2090 Returns base path when all repos are stored
2091 2091
2092 2092 :param cls:
2093 2093 """
2094 2094 from rhodecode.lib.utils import get_rhodecode_repo_store_path
2095 2095 return get_rhodecode_repo_store_path()
2096 2096
2097 2097 @classmethod
2098 2098 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
2099 2099 case_insensitive=True, archived=False):
2100 2100 q = Repository.query()
2101 2101
2102 2102 if not archived:
2103 2103 q = q.filter(Repository.archived.isnot(true()))
2104 2104
2105 2105 if not isinstance(user_id, Optional):
2106 2106 q = q.filter(Repository.user_id == user_id)
2107 2107
2108 2108 if not isinstance(group_id, Optional):
2109 2109 q = q.filter(Repository.group_id == group_id)
2110 2110
2111 2111 if case_insensitive:
2112 2112 q = q.order_by(func.lower(Repository.repo_name))
2113 2113 else:
2114 2114 q = q.order_by(Repository.repo_name)
2115 2115
2116 2116 return q.all()
2117 2117
2118 2118 @property
2119 2119 def repo_uid(self):
2120 2120 return '_{}'.format(self.repo_id)
2121 2121
2122 2122 @property
2123 2123 def forks(self):
2124 2124 """
2125 2125 Return forks of this repo
2126 2126 """
2127 2127 return Repository.get_repo_forks(self.repo_id)
2128 2128
2129 2129 @property
2130 2130 def parent(self):
2131 2131 """
2132 2132 Returns fork parent
2133 2133 """
2134 2134 return self.fork
2135 2135
2136 2136 @property
2137 2137 def just_name(self):
2138 2138 return self.repo_name.split(self.NAME_SEP)[-1]
2139 2139
2140 2140 @property
2141 2141 def groups_with_parents(self):
2142 2142 groups = []
2143 2143 if self.group is None:
2144 2144 return groups
2145 2145
2146 2146 cur_gr = self.group
2147 2147 groups.insert(0, cur_gr)
2148 2148 while 1:
2149 2149 gr = getattr(cur_gr, 'parent_group', None)
2150 2150 cur_gr = cur_gr.parent_group
2151 2151 if gr is None:
2152 2152 break
2153 2153 groups.insert(0, gr)
2154 2154
2155 2155 return groups
2156 2156
2157 2157 @property
2158 2158 def groups_and_repo(self):
2159 2159 return self.groups_with_parents, self
2160 2160
2161 2161 @property
2162 2162 def repo_path(self):
2163 2163 """
2164 2164 Returns base full path for that repository means where it actually
2165 2165 exists on a filesystem
2166 2166 """
2167 2167 return self.base_path()
2168 2168
2169 2169 @property
2170 2170 def repo_full_path(self):
2171 2171 p = [self.repo_path]
2172 2172 # we need to split the name by / since this is how we store the
2173 2173 # names in the database, but that eventually needs to be converted
2174 2174 # into a valid system path
2175 2175 p += self.repo_name.split(self.NAME_SEP)
2176 2176 return os.path.join(*map(safe_str, p))
2177 2177
2178 2178 @property
2179 2179 def cache_keys(self):
2180 2180 """
2181 2181 Returns associated cache keys for that repo
2182 2182 """
2183 2183 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=self.repo_id)
2184 2184 return CacheKey.query()\
2185 2185 .filter(CacheKey.cache_key == repo_namespace_key)\
2186 2186 .order_by(CacheKey.cache_key)\
2187 2187 .all()
2188 2188
2189 2189 @property
2190 2190 def cached_diffs_relative_dir(self):
2191 2191 """
2192 2192 Return a relative to the repository store path of cached diffs
2193 2193 used for safe display for users, who shouldn't know the absolute store
2194 2194 path
2195 2195 """
2196 2196 return os.path.join(
2197 2197 os.path.dirname(self.repo_name),
2198 2198 self.cached_diffs_dir.split(os.path.sep)[-1])
2199 2199
2200 2200 @property
2201 2201 def cached_diffs_dir(self):
2202 2202 path = self.repo_full_path
2203 2203 return os.path.join(
2204 2204 os.path.dirname(path),
2205 2205 f'.__shadow_diff_cache_repo_{self.repo_id}')
2206 2206
2207 2207 def cached_diffs(self):
2208 2208 diff_cache_dir = self.cached_diffs_dir
2209 2209 if os.path.isdir(diff_cache_dir):
2210 2210 return os.listdir(diff_cache_dir)
2211 2211 return []
2212 2212
2213 2213 def shadow_repos(self):
2214 2214 shadow_repos_pattern = f'.__shadow_repo_{self.repo_id}'
2215 2215 return [
2216 2216 x for x in os.listdir(os.path.dirname(self.repo_full_path))
2217 2217 if x.startswith(shadow_repos_pattern)
2218 2218 ]
2219 2219
2220 2220 def get_new_name(self, repo_name):
2221 2221 """
2222 2222 returns new full repository name based on assigned group and new new
2223 2223
2224 2224 :param repo_name:
2225 2225 """
2226 2226 path_prefix = self.group.full_path_splitted if self.group else []
2227 2227 return self.NAME_SEP.join(path_prefix + [repo_name])
2228 2228
2229 2229 @property
2230 2230 def _config(self):
2231 2231 """
2232 2232 Returns db based config object.
2233 2233 """
2234 2234 from rhodecode.lib.utils import make_db_config
2235 2235 return make_db_config(clear_session=False, repo=self)
2236 2236
2237 2237 def permissions(self, with_admins=True, with_owner=True,
2238 2238 expand_from_user_groups=False):
2239 2239 """
2240 2240 Permissions for repositories
2241 2241 """
2242 2242 _admin_perm = 'repository.admin'
2243 2243
2244 2244 owner_row = []
2245 2245 if with_owner:
2246 2246 usr = AttributeDict(self.user.get_dict())
2247 2247 usr.owner_row = True
2248 2248 usr.permission = _admin_perm
2249 2249 usr.permission_id = None
2250 2250 owner_row.append(usr)
2251 2251
2252 2252 super_admin_ids = []
2253 2253 super_admin_rows = []
2254 2254 if with_admins:
2255 2255 for usr in User.get_all_super_admins():
2256 2256 super_admin_ids.append(usr.user_id)
2257 2257 # if this admin is also owner, don't double the record
2258 2258 if usr.user_id == owner_row[0].user_id:
2259 2259 owner_row[0].admin_row = True
2260 2260 else:
2261 2261 usr = AttributeDict(usr.get_dict())
2262 2262 usr.admin_row = True
2263 2263 usr.permission = _admin_perm
2264 2264 usr.permission_id = None
2265 2265 super_admin_rows.append(usr)
2266 2266
2267 2267 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
2268 2268 q = q.options(joinedload(UserRepoToPerm.repository),
2269 2269 joinedload(UserRepoToPerm.user),
2270 2270 joinedload(UserRepoToPerm.permission),)
2271 2271
2272 2272 # get owners and admins and permissions. We do a trick of re-writing
2273 2273 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2274 2274 # has a global reference and changing one object propagates to all
2275 2275 # others. This means if admin is also an owner admin_row that change
2276 2276 # would propagate to both objects
2277 2277 perm_rows = []
2278 2278 for _usr in q.all():
2279 2279 usr = AttributeDict(_usr.user.get_dict())
2280 2280 # if this user is also owner/admin, mark as duplicate record
2281 2281 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2282 2282 usr.duplicate_perm = True
2283 2283 # also check if this permission is maybe used by branch_permissions
2284 2284 if _usr.branch_perm_entry:
2285 2285 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
2286 2286
2287 2287 usr.permission = _usr.permission.permission_name
2288 2288 usr.permission_id = _usr.repo_to_perm_id
2289 2289 perm_rows.append(usr)
2290 2290
2291 2291 # filter the perm rows by 'default' first and then sort them by
2292 2292 # admin,write,read,none permissions sorted again alphabetically in
2293 2293 # each group
2294 2294 perm_rows = sorted(perm_rows, key=display_user_sort)
2295 2295
2296 2296 user_groups_rows = []
2297 2297 if expand_from_user_groups:
2298 2298 for ug in self.permission_user_groups(with_members=True):
2299 2299 for user_data in ug.members:
2300 2300 user_groups_rows.append(user_data)
2301 2301
2302 2302 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2303 2303
2304 2304 def permission_user_groups(self, with_members=True):
2305 2305 q = UserGroupRepoToPerm.query()\
2306 2306 .filter(UserGroupRepoToPerm.repository == self)
2307 2307 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2308 2308 joinedload(UserGroupRepoToPerm.users_group),
2309 2309 joinedload(UserGroupRepoToPerm.permission),)
2310 2310
2311 2311 perm_rows = []
2312 2312 for _user_group in q.all():
2313 2313 entry = AttributeDict(_user_group.users_group.get_dict())
2314 2314 entry.permission = _user_group.permission.permission_name
2315 2315 if with_members:
2316 2316 entry.members = [x.user.get_dict()
2317 2317 for x in _user_group.users_group.members]
2318 2318 perm_rows.append(entry)
2319 2319
2320 2320 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2321 2321 return perm_rows
2322 2322
2323 2323 def get_api_data(self, include_secrets=False):
2324 2324 """
2325 2325 Common function for generating repo api data
2326 2326
2327 2327 :param include_secrets: See :meth:`User.get_api_data`.
2328 2328
2329 2329 """
2330 2330 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2331 2331 # move this methods on models level.
2332 2332 from rhodecode.model.settings import SettingsModel
2333 2333 from rhodecode.model.repo import RepoModel
2334 2334
2335 2335 repo = self
2336 2336 _user_id, _time, _reason = self.locked
2337 2337
2338 2338 data = {
2339 2339 'repo_id': repo.repo_id,
2340 2340 'repo_name': repo.repo_name,
2341 2341 'repo_type': repo.repo_type,
2342 2342 'clone_uri': repo.clone_uri or '',
2343 2343 'push_uri': repo.push_uri or '',
2344 2344 'url': RepoModel().get_url(self),
2345 2345 'private': repo.private,
2346 2346 'created_on': repo.created_on,
2347 2347 'description': repo.description_safe,
2348 2348 'landing_rev': repo.landing_rev,
2349 2349 'owner': repo.user.username,
2350 2350 'fork_of': repo.fork.repo_name if repo.fork else None,
2351 2351 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2352 2352 'enable_statistics': repo.enable_statistics,
2353 2353 'enable_locking': repo.enable_locking,
2354 2354 'enable_downloads': repo.enable_downloads,
2355 2355 'last_changeset': repo.changeset_cache,
2356 2356 'locked_by': User.get(_user_id).get_api_data(
2357 2357 include_secrets=include_secrets) if _user_id else None,
2358 2358 'locked_date': time_to_datetime(_time) if _time else None,
2359 2359 'lock_reason': _reason if _reason else None,
2360 2360 }
2361 2361
2362 2362 # TODO: mikhail: should be per-repo settings here
2363 2363 rc_config = SettingsModel().get_all_settings()
2364 2364 repository_fields = str2bool(
2365 2365 rc_config.get('rhodecode_repository_fields'))
2366 2366 if repository_fields:
2367 2367 for f in self.extra_fields:
2368 2368 data[f.field_key_prefixed] = f.field_value
2369 2369
2370 2370 return data
2371 2371
2372 2372 @classmethod
2373 2373 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2374 2374 if not lock_time:
2375 2375 lock_time = time.time()
2376 2376 if not lock_reason:
2377 2377 lock_reason = cls.LOCK_AUTOMATIC
2378 2378 repo.locked = [user_id, lock_time, lock_reason]
2379 2379 Session().add(repo)
2380 2380 Session().commit()
2381 2381
2382 2382 @classmethod
2383 2383 def unlock(cls, repo):
2384 2384 repo.locked = None
2385 2385 Session().add(repo)
2386 2386 Session().commit()
2387 2387
2388 2388 @classmethod
2389 2389 def getlock(cls, repo):
2390 2390 return repo.locked
2391 2391
2392 2392 def get_locking_state(self, action, user_id, only_when_enabled=True):
2393 2393 """
2394 2394 Checks locking on this repository, if locking is enabled and lock is
2395 2395 present returns a tuple of make_lock, locked, locked_by.
2396 2396 make_lock can have 3 states None (do nothing) True, make lock
2397 2397 False release lock, This value is later propagated to hooks, which
2398 2398 do the locking. Think about this as signals passed to hooks what to do.
2399 2399
2400 2400 """
2401 2401 # TODO: johbo: This is part of the business logic and should be moved
2402 2402 # into the RepositoryModel.
2403 2403
2404 2404 if action not in ('push', 'pull'):
2405 2405 raise ValueError("Invalid action value: %s" % repr(action))
2406 2406
2407 2407 # defines if locked error should be thrown to user
2408 2408 currently_locked = False
2409 2409 # defines if new lock should be made, tri-state
2410 2410 make_lock = None
2411 2411 repo = self
2412 2412 user = User.get(user_id)
2413 2413
2414 2414 lock_info = repo.locked
2415 2415
2416 2416 if repo and (repo.enable_locking or not only_when_enabled):
2417 2417 if action == 'push':
2418 2418 # check if it's already locked !, if it is compare users
2419 2419 locked_by_user_id = lock_info[0]
2420 2420 if user.user_id == locked_by_user_id:
2421 2421 log.debug(
2422 2422 'Got `push` action from user %s, now unlocking', user)
2423 2423 # unlock if we have push from user who locked
2424 2424 make_lock = False
2425 2425 else:
2426 2426 # we're not the same user who locked, ban with
2427 2427 # code defined in settings (default is 423 HTTP Locked) !
2428 2428 log.debug('Repo %s is currently locked by %s', repo, user)
2429 2429 currently_locked = True
2430 2430 elif action == 'pull':
2431 2431 # [0] user [1] date
2432 2432 if lock_info[0] and lock_info[1]:
2433 2433 log.debug('Repo %s is currently locked by %s', repo, user)
2434 2434 currently_locked = True
2435 2435 else:
2436 2436 log.debug('Setting lock on repo %s by %s', repo, user)
2437 2437 make_lock = True
2438 2438
2439 2439 else:
2440 2440 log.debug('Repository %s do not have locking enabled', repo)
2441 2441
2442 2442 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2443 2443 make_lock, currently_locked, lock_info)
2444 2444
2445 2445 from rhodecode.lib.auth import HasRepoPermissionAny
2446 2446 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2447 2447 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2448 2448 # if we don't have at least write permission we cannot make a lock
2449 2449 log.debug('lock state reset back to FALSE due to lack '
2450 2450 'of at least read permission')
2451 2451 make_lock = False
2452 2452
2453 2453 return make_lock, currently_locked, lock_info
2454 2454
2455 2455 @property
2456 2456 def last_commit_cache_update_diff(self):
2457 2457 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2458 2458
2459 2459 @classmethod
2460 2460 def _load_commit_change(cls, last_commit_cache):
2461 2461 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2462 2462 empty_date = datetime.datetime.fromtimestamp(0)
2463 2463 date_latest = last_commit_cache.get('date', empty_date)
2464 2464 try:
2465 2465 return parse_datetime(date_latest)
2466 2466 except Exception:
2467 2467 return empty_date
2468 2468
2469 2469 @property
2470 2470 def last_commit_change(self):
2471 2471 return self._load_commit_change(self.changeset_cache)
2472 2472
2473 2473 @property
2474 2474 def last_db_change(self):
2475 2475 return self.updated_on
2476 2476
2477 2477 @property
2478 2478 def clone_uri_hidden(self):
2479 2479 clone_uri = self.clone_uri
2480 2480 if clone_uri:
2481 2481 import urlobject
2482 2482 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2483 2483 if url_obj.password:
2484 2484 clone_uri = url_obj.with_password('*****')
2485 2485 return clone_uri
2486 2486
2487 2487 @property
2488 2488 def push_uri_hidden(self):
2489 2489 push_uri = self.push_uri
2490 2490 if push_uri:
2491 2491 import urlobject
2492 2492 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2493 2493 if url_obj.password:
2494 2494 push_uri = url_obj.with_password('*****')
2495 2495 return push_uri
2496 2496
2497 2497 def clone_url(self, **override):
2498 2498 from rhodecode.model.settings import SettingsModel
2499 2499
2500 2500 uri_tmpl = None
2501 2501 if 'with_id' in override:
2502 2502 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2503 2503 del override['with_id']
2504 2504
2505 2505 if 'uri_tmpl' in override:
2506 2506 uri_tmpl = override['uri_tmpl']
2507 2507 del override['uri_tmpl']
2508 2508
2509 2509 ssh = False
2510 2510 if 'ssh' in override:
2511 2511 ssh = True
2512 2512 del override['ssh']
2513 2513
2514 2514 # we didn't override our tmpl from **overrides
2515 2515 request = get_current_request()
2516 2516 if not uri_tmpl:
2517 2517 if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
2518 2518 rc_config = request.call_context.rc_config
2519 2519 else:
2520 2520 rc_config = SettingsModel().get_all_settings(cache=True)
2521 2521
2522 2522 if ssh:
2523 2523 uri_tmpl = rc_config.get(
2524 2524 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2525 2525
2526 2526 else:
2527 2527 uri_tmpl = rc_config.get(
2528 2528 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2529 2529
2530 2530 return get_clone_url(request=request,
2531 2531 uri_tmpl=uri_tmpl,
2532 2532 repo_name=self.repo_name,
2533 2533 repo_id=self.repo_id,
2534 2534 repo_type=self.repo_type,
2535 2535 **override)
2536 2536
2537 2537 def set_state(self, state):
2538 2538 self.repo_state = state
2539 2539 Session().add(self)
2540 2540 #==========================================================================
2541 2541 # SCM PROPERTIES
2542 2542 #==========================================================================
2543 2543
2544 2544 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, maybe_unreachable=False, reference_obj=None):
2545 2545 return get_commit_safe(
2546 2546 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load,
2547 2547 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
2548 2548
2549 2549 def get_changeset(self, rev=None, pre_load=None):
2550 2550 warnings.warn("Use get_commit", DeprecationWarning)
2551 2551 commit_id = None
2552 2552 commit_idx = None
2553 2553 if isinstance(rev, str):
2554 2554 commit_id = rev
2555 2555 else:
2556 2556 commit_idx = rev
2557 2557 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2558 2558 pre_load=pre_load)
2559 2559
2560 2560 def get_landing_commit(self):
2561 2561 """
2562 2562 Returns landing commit, or if that doesn't exist returns the tip
2563 2563 """
2564 2564 _rev_type, _rev = self.landing_rev
2565 2565 commit = self.get_commit(_rev)
2566 2566 if isinstance(commit, EmptyCommit):
2567 2567 return self.get_commit()
2568 2568 return commit
2569 2569
2570 2570 def flush_commit_cache(self):
2571 2571 self.update_commit_cache(cs_cache={'raw_id':'0'})
2572 2572 self.update_commit_cache()
2573 2573
2574 def update_commit_cache(self, cs_cache=None, config=None):
2574 def update_commit_cache(self, cs_cache=None, config=None, recursive=True):
2575 2575 """
2576 2576 Update cache of last commit for repository
2577 2577 cache_keys should be::
2578 2578
2579 2579 source_repo_id
2580 2580 short_id
2581 2581 raw_id
2582 2582 revision
2583 2583 parents
2584 2584 message
2585 2585 date
2586 2586 author
2587 2587 updated_on
2588 2588
2589 2589 """
2590 2590 from rhodecode.lib.vcs.backends.base import BaseCommit
2591 2591 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2592 2592 empty_date = datetime.datetime.fromtimestamp(0)
2593 2593 repo_commit_count = 0
2594 2594
2595 2595 if cs_cache is None:
2596 2596 # use no-cache version here
2597 2597 try:
2598 2598 scm_repo = self.scm_instance(cache=False, config=config)
2599 2599 except VCSError:
2600 2600 scm_repo = None
2601 2601 empty = scm_repo is None or scm_repo.is_empty()
2602 2602
2603 2603 if not empty:
2604 2604 cs_cache = scm_repo.get_commit(
2605 2605 pre_load=["author", "date", "message", "parents", "branch"])
2606 2606 repo_commit_count = scm_repo.count()
2607 2607 else:
2608 2608 cs_cache = EmptyCommit()
2609 2609
2610 2610 if isinstance(cs_cache, BaseCommit):
2611 2611 cs_cache = cs_cache.__json__()
2612 2612
2613 def maybe_update_recursive(instance, _config, _recursive, _cs_cache, _last_change):
2614 if _recursive:
2615 repo_id = instance.repo_id
2616 _cs_cache['source_repo_id'] = repo_id
2617 for gr in instance.groups_with_parents:
2618 gr.changeset_cache = _cs_cache
2619 gr.updated_on = _last_change
2620
2613 2621 def is_outdated(new_cs_cache):
2614 2622 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2615 2623 new_cs_cache['revision'] != self.changeset_cache['revision']):
2616 2624 return True
2617 2625 return False
2618 2626
2619 2627 # check if we have maybe already latest cached revision
2620 2628 if is_outdated(cs_cache) or not self.changeset_cache:
2621 2629 _current_datetime = datetime.datetime.utcnow()
2622 2630 last_change = cs_cache.get('date') or _current_datetime
2623 2631 # we check if last update is newer than the new value
2624 2632 # if yes, we use the current timestamp instead. Imagine you get
2625 2633 # old commit pushed 1y ago, we'd set last update 1y to ago.
2626 2634 last_change_timestamp = datetime_to_time(last_change)
2627 2635 current_timestamp = datetime_to_time(last_change)
2628 2636 if last_change_timestamp > current_timestamp and not empty:
2629 2637 cs_cache['date'] = _current_datetime
2630 2638
2631 2639 # also store size of repo
2632 2640 cs_cache['repo_commit_count'] = repo_commit_count
2633 2641
2634 2642 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2635 2643 cs_cache['updated_on'] = time.time()
2636 2644 self.changeset_cache = cs_cache
2637 2645 self.updated_on = last_change
2638 2646 Session().add(self)
2647 maybe_update_recursive(self, config, recursive, cs_cache, last_change)
2639 2648 Session().commit()
2640 2649
2641 2650 else:
2642 2651 if empty:
2643 2652 cs_cache = EmptyCommit().__json__()
2644 2653 else:
2645 2654 cs_cache = self.changeset_cache
2646 2655
2647 2656 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2648 2657
2649 2658 cs_cache['updated_on'] = time.time()
2650 2659 self.changeset_cache = cs_cache
2651 2660 self.updated_on = _date_latest
2652 2661 Session().add(self)
2662 maybe_update_recursive(self, config, recursive, cs_cache, _date_latest)
2653 2663 Session().commit()
2654 2664
2655 2665 log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s',
2656 2666 self.repo_name, cs_cache, _date_latest)
2657 2667
2658 2668 @property
2659 2669 def tip(self):
2660 2670 return self.get_commit('tip')
2661 2671
2662 2672 @property
2663 2673 def author(self):
2664 2674 return self.tip.author
2665 2675
2666 2676 @property
2667 2677 def last_change(self):
2668 2678 return self.scm_instance().last_change
2669 2679
2670 2680 def get_comments(self, revisions=None):
2671 2681 """
2672 2682 Returns comments for this repository grouped by revisions
2673 2683
2674 2684 :param revisions: filter query by revisions only
2675 2685 """
2676 2686 cmts = ChangesetComment.query()\
2677 2687 .filter(ChangesetComment.repo == self)
2678 2688 if revisions:
2679 2689 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2680 2690 grouped = collections.defaultdict(list)
2681 2691 for cmt in cmts.all():
2682 2692 grouped[cmt.revision].append(cmt)
2683 2693 return grouped
2684 2694
2685 2695 def statuses(self, revisions=None):
2686 2696 """
2687 2697 Returns statuses for this repository
2688 2698
2689 2699 :param revisions: list of revisions to get statuses for
2690 2700 """
2691 2701 statuses = ChangesetStatus.query()\
2692 2702 .filter(ChangesetStatus.repo == self)\
2693 2703 .filter(ChangesetStatus.version == 0)
2694 2704
2695 2705 if revisions:
2696 2706 # Try doing the filtering in chunks to avoid hitting limits
2697 2707 size = 500
2698 2708 status_results = []
2699 2709 for chunk in range(0, len(revisions), size):
2700 2710 status_results += statuses.filter(
2701 2711 ChangesetStatus.revision.in_(
2702 2712 revisions[chunk: chunk+size])
2703 2713 ).all()
2704 2714 else:
2705 2715 status_results = statuses.all()
2706 2716
2707 2717 grouped = {}
2708 2718
2709 2719 # maybe we have open new pullrequest without a status?
2710 2720 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2711 2721 status_lbl = ChangesetStatus.get_status_lbl(stat)
2712 2722 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2713 2723 for rev in pr.revisions:
2714 2724 pr_id = pr.pull_request_id
2715 2725 pr_repo = pr.target_repo.repo_name
2716 2726 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2717 2727
2718 2728 for stat in status_results:
2719 2729 pr_id = pr_repo = None
2720 2730 if stat.pull_request:
2721 2731 pr_id = stat.pull_request.pull_request_id
2722 2732 pr_repo = stat.pull_request.target_repo.repo_name
2723 2733 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2724 2734 pr_id, pr_repo]
2725 2735 return grouped
2726 2736
2727 2737 # ==========================================================================
2728 2738 # SCM CACHE INSTANCE
2729 2739 # ==========================================================================
2730 2740
2731 2741 def scm_instance(self, **kwargs):
2732 2742 import rhodecode
2733 2743
2734 2744 # Passing a config will not hit the cache currently only used
2735 2745 # for repo2dbmapper
2736 2746 config = kwargs.pop('config', None)
2737 2747 cache = kwargs.pop('cache', None)
2738 2748 vcs_full_cache = kwargs.pop('vcs_full_cache', None)
2739 2749 if vcs_full_cache is not None:
2740 2750 # allows override global config
2741 2751 full_cache = vcs_full_cache
2742 2752 else:
2743 2753 full_cache = rhodecode.ConfigGet().get_bool('vcs_full_cache')
2744 2754 # if cache is NOT defined use default global, else we have a full
2745 2755 # control over cache behaviour
2746 2756 if cache is None and full_cache and not config:
2747 2757 log.debug('Initializing pure cached instance for %s', self.repo_path)
2748 2758 return self._get_instance_cached()
2749 2759
2750 2760 # cache here is sent to the "vcs server"
2751 2761 return self._get_instance(cache=bool(cache), config=config)
2752 2762
2753 2763 def _get_instance_cached(self):
2754 2764 from rhodecode.lib import rc_cache
2755 2765
2756 2766 cache_namespace_uid = f'repo_instance.{self.repo_id}'
2757 2767 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2758 2768
2759 2769 # we must use thread scoped cache here,
2760 2770 # because each thread of gevent needs it's own not shared connection and cache
2761 2771 # we also alter `args` so the cache key is individual for every green thread.
2762 2772 repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=self.repo_id)
2763 2773 inv_context_manager = rc_cache.InvalidationContext(key=repo_namespace_key, thread_scoped=True)
2764 2774
2765 2775 # our wrapped caching function that takes state_uid to save the previous state in
2766 2776 def cache_generator(_state_uid):
2767 2777
2768 2778 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2769 2779 def get_instance_cached(_repo_id, _process_context_id):
2770 2780 # we save in cached func the generation state so we can detect a change and invalidate caches
2771 2781 return _state_uid, self._get_instance(repo_state_uid=_state_uid)
2772 2782
2773 2783 return get_instance_cached
2774 2784
2775 2785 with inv_context_manager as invalidation_context:
2776 2786 cache_state_uid = invalidation_context.state_uid
2777 2787 cache_func = cache_generator(cache_state_uid)
2778 2788
2779 2789 args = self.repo_id, inv_context_manager.proc_key
2780 2790
2781 2791 previous_state_uid, instance = cache_func(*args)
2782 2792
2783 2793 # now compare keys, the "cache" state vs expected state.
2784 2794 if previous_state_uid != cache_state_uid:
2785 2795 log.warning('Cached state uid %s is different than current state uid %s',
2786 2796 previous_state_uid, cache_state_uid)
2787 2797 _, instance = cache_func.refresh(*args)
2788 2798
2789 2799 log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time)
2790 2800 return instance
2791 2801
2792 2802 def _get_instance(self, cache=True, config=None, repo_state_uid=None):
2793 2803 log.debug('Initializing %s instance `%s` with cache flag set to: %s',
2794 2804 self.repo_type, self.repo_path, cache)
2795 2805 config = config or self._config
2796 2806 custom_wire = {
2797 2807 'cache': cache, # controls the vcs.remote cache
2798 2808 'repo_state_uid': repo_state_uid
2799 2809 }
2800 2810
2801 2811 repo = get_vcs_instance(
2802 2812 repo_path=safe_str(self.repo_full_path),
2803 2813 config=config,
2804 2814 with_wire=custom_wire,
2805 2815 create=False,
2806 2816 _vcs_alias=self.repo_type)
2807 2817 if repo is not None:
2808 2818 repo.count() # cache rebuild
2809 2819
2810 2820 return repo
2811 2821
2812 2822 def get_shadow_repository_path(self, workspace_id):
2813 2823 from rhodecode.lib.vcs.backends.base import BaseRepository
2814 2824 shadow_repo_path = BaseRepository._get_shadow_repository_path(
2815 2825 self.repo_full_path, self.repo_id, workspace_id)
2816 2826 return shadow_repo_path
2817 2827
2818 2828 def __json__(self):
2819 2829 return {'landing_rev': self.landing_rev}
2820 2830
2821 2831 def get_dict(self):
2822 2832
2823 2833 # Since we transformed `repo_name` to a hybrid property, we need to
2824 2834 # keep compatibility with the code which uses `repo_name` field.
2825 2835
2826 2836 result = super(Repository, self).get_dict()
2827 2837 result['repo_name'] = result.pop('_repo_name', None)
2828 2838 result.pop('_changeset_cache', '')
2829 2839 return result
2830 2840
2831 2841
2832 2842 class RepoGroup(Base, BaseModel):
2833 2843 __tablename__ = 'groups'
2834 2844 __table_args__ = (
2835 2845 UniqueConstraint('group_name', 'group_parent_id'),
2836 2846 base_table_args,
2837 2847 )
2838 2848
2839 2849 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2840 2850
2841 2851 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2842 2852 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2843 2853 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2844 2854 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2845 2855 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2846 2856 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2847 2857 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2848 2858 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2849 2859 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2850 2860 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2851 2861 _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data
2852 2862
2853 2863 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id', back_populates='group')
2854 2864 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all', back_populates='group')
2855 2865 parent_group = relationship('RepoGroup', remote_side=group_id)
2856 2866 user = relationship('User', back_populates='repository_groups')
2857 2867 integrations = relationship('Integration', cascade="all, delete-orphan", back_populates='repo_group')
2858 2868
2859 2869 # no cascade, set NULL
2860 2870 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id', viewonly=True)
2861 2871
2862 2872 def __init__(self, group_name='', parent_group=None):
2863 2873 self.group_name = group_name
2864 2874 self.parent_group = parent_group
2865 2875
2866 2876 def __repr__(self):
2867 2877 return f"<{self.cls_name}('id:{self.group_id}:{self.group_name}')>"
2868 2878
2869 2879 @hybrid_property
2870 2880 def group_name(self):
2871 2881 return self._group_name
2872 2882
2873 2883 @group_name.setter
2874 2884 def group_name(self, value):
2875 2885 self._group_name = value
2876 2886 self.group_name_hash = self.hash_repo_group_name(value)
2877 2887
2878 2888 @classmethod
2879 2889 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
2880 2890 from rhodecode.lib.vcs.backends.base import EmptyCommit
2881 2891 dummy = EmptyCommit().__json__()
2882 2892 if not changeset_cache_raw:
2883 2893 dummy['source_repo_id'] = repo_id
2884 2894 return json.loads(json.dumps(dummy))
2885 2895
2886 2896 try:
2887 2897 return json.loads(changeset_cache_raw)
2888 2898 except TypeError:
2889 2899 return dummy
2890 2900 except Exception:
2891 2901 log.error(traceback.format_exc())
2892 2902 return dummy
2893 2903
2894 2904 @hybrid_property
2895 2905 def changeset_cache(self):
2896 2906 return self._load_changeset_cache('', self._changeset_cache)
2897 2907
2898 2908 @changeset_cache.setter
2899 2909 def changeset_cache(self, val):
2900 2910 try:
2901 2911 self._changeset_cache = json.dumps(val)
2902 2912 except Exception:
2903 2913 log.error(traceback.format_exc())
2904 2914
2905 2915 @validates('group_parent_id')
2906 2916 def validate_group_parent_id(self, key, val):
2907 2917 """
2908 2918 Check cycle references for a parent group to self
2909 2919 """
2910 2920 if self.group_id and val:
2911 2921 assert val != self.group_id
2912 2922
2913 2923 return val
2914 2924
2915 2925 @hybrid_property
2916 2926 def description_safe(self):
2917 2927 return description_escaper(self.group_description)
2918 2928
2919 2929 @classmethod
2920 2930 def hash_repo_group_name(cls, repo_group_name):
2921 2931 val = remove_formatting(repo_group_name)
2922 2932 val = safe_str(val).lower()
2923 2933 chars = []
2924 2934 for c in val:
2925 2935 if c not in string.ascii_letters:
2926 2936 c = str(ord(c))
2927 2937 chars.append(c)
2928 2938
2929 2939 return ''.join(chars)
2930 2940
2931 2941 @classmethod
2932 2942 def _generate_choice(cls, repo_group):
2933 2943 from webhelpers2.html import literal as _literal
2934 2944
2935 2945 def _name(k):
2936 2946 return _literal(cls.CHOICES_SEPARATOR.join(k))
2937 2947
2938 2948 return repo_group.group_id, _name(repo_group.full_path_splitted)
2939 2949
2940 2950 @classmethod
2941 2951 def groups_choices(cls, groups=None, show_empty_group=True):
2942 2952 if not groups:
2943 2953 groups = cls.query().all()
2944 2954
2945 2955 repo_groups = []
2946 2956 if show_empty_group:
2947 2957 repo_groups = [(-1, '-- %s --' % _('No parent'))]
2948 2958
2949 2959 repo_groups.extend([cls._generate_choice(x) for x in groups])
2950 2960
2951 2961 repo_groups = sorted(
2952 2962 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2953 2963 return repo_groups
2954 2964
2955 2965 @classmethod
2956 2966 def url_sep(cls):
2957 2967 return URL_SEP
2958 2968
2959 2969 @classmethod
2960 2970 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2961 2971 if case_insensitive:
2962 2972 gr = cls.query().filter(func.lower(cls.group_name)
2963 2973 == func.lower(group_name))
2964 2974 else:
2965 2975 gr = cls.query().filter(cls.group_name == group_name)
2966 2976 if cache:
2967 2977 name_key = _hash_key(group_name)
2968 2978 gr = gr.options(
2969 2979 FromCache("sql_cache_short", f"get_group_{name_key}"))
2970 2980 return gr.scalar()
2971 2981
2972 2982 @classmethod
2973 2983 def get_user_personal_repo_group(cls, user_id):
2974 2984 user = User.get(user_id)
2975 2985 if user.username == User.DEFAULT_USER:
2976 2986 return None
2977 2987
2978 2988 return cls.query()\
2979 2989 .filter(cls.personal == true()) \
2980 2990 .filter(cls.user == user) \
2981 2991 .order_by(cls.group_id.asc()) \
2982 2992 .first()
2983 2993
2984 2994 @classmethod
2985 2995 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2986 2996 case_insensitive=True):
2987 2997 q = RepoGroup.query()
2988 2998
2989 2999 if not isinstance(user_id, Optional):
2990 3000 q = q.filter(RepoGroup.user_id == user_id)
2991 3001
2992 3002 if not isinstance(group_id, Optional):
2993 3003 q = q.filter(RepoGroup.group_parent_id == group_id)
2994 3004
2995 3005 if case_insensitive:
2996 3006 q = q.order_by(func.lower(RepoGroup.group_name))
2997 3007 else:
2998 3008 q = q.order_by(RepoGroup.group_name)
2999 3009 return q.all()
3000 3010
3001 3011 @property
3002 3012 def parents(self, parents_recursion_limit=10):
3003 3013 groups = []
3004 3014 if self.parent_group is None:
3005 3015 return groups
3006 3016 cur_gr = self.parent_group
3007 3017 groups.insert(0, cur_gr)
3008 3018 cnt = 0
3009 3019 while 1:
3010 3020 cnt += 1
3011 3021 gr = getattr(cur_gr, 'parent_group', None)
3012 3022 cur_gr = cur_gr.parent_group
3013 3023 if gr is None:
3014 3024 break
3015 3025 if cnt == parents_recursion_limit:
3016 3026 # this will prevent accidental infinit loops
3017 3027 log.error('more than %s parents found for group %s, stopping '
3018 3028 'recursive parent fetching', parents_recursion_limit, self)
3019 3029 break
3020 3030
3021 3031 groups.insert(0, gr)
3022 3032 return groups
3023 3033
3024 3034 @property
3025 3035 def last_commit_cache_update_diff(self):
3026 3036 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
3027 3037
3028 3038 @classmethod
3029 3039 def _load_commit_change(cls, last_commit_cache):
3030 3040 from rhodecode.lib.vcs.utils.helpers import parse_datetime
3031 3041 empty_date = datetime.datetime.fromtimestamp(0)
3032 3042 date_latest = last_commit_cache.get('date', empty_date)
3033 3043 try:
3034 3044 return parse_datetime(date_latest)
3035 3045 except Exception:
3036 3046 return empty_date
3037 3047
3038 3048 @property
3039 3049 def last_commit_change(self):
3040 3050 return self._load_commit_change(self.changeset_cache)
3041 3051
3042 3052 @property
3043 3053 def last_db_change(self):
3044 3054 return self.updated_on
3045 3055
3046 3056 @property
3047 3057 def children(self):
3048 3058 return RepoGroup.query().filter(RepoGroup.parent_group == self)
3049 3059
3050 3060 @property
3051 3061 def name(self):
3052 3062 return self.group_name.split(RepoGroup.url_sep())[-1]
3053 3063
3054 3064 @property
3055 3065 def full_path(self):
3056 3066 return self.group_name
3057 3067
3058 3068 @property
3059 3069 def full_path_splitted(self):
3060 3070 return self.group_name.split(RepoGroup.url_sep())
3061 3071
3062 3072 @property
3063 3073 def repositories(self):
3064 3074 return Repository.query()\
3065 3075 .filter(Repository.group == self)\
3066 3076 .order_by(Repository.repo_name)
3067 3077
3068 3078 @property
3069 3079 def repositories_recursive_count(self):
3070 3080 cnt = self.repositories.count()
3071 3081
3072 3082 def children_count(group):
3073 3083 cnt = 0
3074 3084 for child in group.children:
3075 3085 cnt += child.repositories.count()
3076 3086 cnt += children_count(child)
3077 3087 return cnt
3078 3088
3079 3089 return cnt + children_count(self)
3080 3090
3081 3091 def _recursive_objects(self, include_repos=True, include_groups=True):
3082 3092 all_ = []
3083 3093
3084 3094 def _get_members(root_gr):
3085 3095 if include_repos:
3086 3096 for r in root_gr.repositories:
3087 3097 all_.append(r)
3088 3098 childs = root_gr.children.all()
3089 3099 if childs:
3090 3100 for gr in childs:
3091 3101 if include_groups:
3092 3102 all_.append(gr)
3093 3103 _get_members(gr)
3094 3104
3095 3105 root_group = []
3096 3106 if include_groups:
3097 3107 root_group = [self]
3098 3108
3099 3109 _get_members(self)
3100 3110 return root_group + all_
3101 3111
3102 3112 def recursive_groups_and_repos(self):
3103 3113 """
3104 3114 Recursive return all groups, with repositories in those groups
3105 3115 """
3106 3116 return self._recursive_objects()
3107 3117
3108 3118 def recursive_groups(self):
3109 3119 """
3110 3120 Returns all children groups for this group including children of children
3111 3121 """
3112 3122 return self._recursive_objects(include_repos=False)
3113 3123
3114 3124 def recursive_repos(self):
3115 3125 """
3116 3126 Returns all children repositories for this group
3117 3127 """
3118 3128 return self._recursive_objects(include_groups=False)
3119 3129
3120 3130 def get_new_name(self, group_name):
3121 3131 """
3122 3132 returns new full group name based on parent and new name
3123 3133
3124 3134 :param group_name:
3125 3135 """
3126 3136 path_prefix = (self.parent_group.full_path_splitted if
3127 3137 self.parent_group else [])
3128 3138 return RepoGroup.url_sep().join(path_prefix + [group_name])
3129 3139
3130 3140 def update_commit_cache(self, config=None):
3131 3141 """
3132 3142 Update cache of last commit for newest repository inside this repository group.
3133 3143 cache_keys should be::
3134 3144
3135 3145 source_repo_id
3136 3146 short_id
3137 3147 raw_id
3138 3148 revision
3139 3149 parents
3140 3150 message
3141 3151 date
3142 3152 author
3143 3153
3144 3154 """
3145 3155 from rhodecode.lib.vcs.utils.helpers import parse_datetime
3146 3156 empty_date = datetime.datetime.fromtimestamp(0)
3147 3157
3148 3158 def repo_groups_and_repos(root_gr):
3149 3159 for _repo in root_gr.repositories:
3150 3160 yield _repo
3151 3161 for child_group in root_gr.children.all():
3152 3162 yield child_group
3153 3163
3154 3164 latest_repo_cs_cache = {}
3155 3165 for obj in repo_groups_and_repos(self):
3156 3166 repo_cs_cache = obj.changeset_cache
3157 3167 date_latest = latest_repo_cs_cache.get('date', empty_date)
3158 3168 date_current = repo_cs_cache.get('date', empty_date)
3159 3169 current_timestamp = datetime_to_time(parse_datetime(date_latest))
3160 3170 if current_timestamp < datetime_to_time(parse_datetime(date_current)):
3161 3171 latest_repo_cs_cache = repo_cs_cache
3162 3172 if hasattr(obj, 'repo_id'):
3163 3173 latest_repo_cs_cache['source_repo_id'] = obj.repo_id
3164 3174 else:
3165 3175 latest_repo_cs_cache['source_repo_id'] = repo_cs_cache.get('source_repo_id')
3166 3176
3167 3177 _date_latest = parse_datetime(latest_repo_cs_cache.get('date') or empty_date)
3168 3178
3169 3179 latest_repo_cs_cache['updated_on'] = time.time()
3170 3180 self.changeset_cache = latest_repo_cs_cache
3171 3181 self.updated_on = _date_latest
3172 3182 Session().add(self)
3173 3183 Session().commit()
3174 3184
3175 3185 log.debug('updated repo group `%s` with new commit cache %s, and last update_date: %s',
3176 3186 self.group_name, latest_repo_cs_cache, _date_latest)
3177 3187
3178 3188 def permissions(self, with_admins=True, with_owner=True,
3179 3189 expand_from_user_groups=False):
3180 3190 """
3181 3191 Permissions for repository groups
3182 3192 """
3183 3193 _admin_perm = 'group.admin'
3184 3194
3185 3195 owner_row = []
3186 3196 if with_owner:
3187 3197 usr = AttributeDict(self.user.get_dict())
3188 3198 usr.owner_row = True
3189 3199 usr.permission = _admin_perm
3190 3200 owner_row.append(usr)
3191 3201
3192 3202 super_admin_ids = []
3193 3203 super_admin_rows = []
3194 3204 if with_admins:
3195 3205 for usr in User.get_all_super_admins():
3196 3206 super_admin_ids.append(usr.user_id)
3197 3207 # if this admin is also owner, don't double the record
3198 3208 if usr.user_id == owner_row[0].user_id:
3199 3209 owner_row[0].admin_row = True
3200 3210 else:
3201 3211 usr = AttributeDict(usr.get_dict())
3202 3212 usr.admin_row = True
3203 3213 usr.permission = _admin_perm
3204 3214 super_admin_rows.append(usr)
3205 3215
3206 3216 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
3207 3217 q = q.options(joinedload(UserRepoGroupToPerm.group),
3208 3218 joinedload(UserRepoGroupToPerm.user),
3209 3219 joinedload(UserRepoGroupToPerm.permission),)
3210 3220
3211 3221 # get owners and admins and permissions. We do a trick of re-writing
3212 3222 # objects from sqlalchemy to named-tuples due to sqlalchemy session
3213 3223 # has a global reference and changing one object propagates to all
3214 3224 # others. This means if admin is also an owner admin_row that change
3215 3225 # would propagate to both objects
3216 3226 perm_rows = []
3217 3227 for _usr in q.all():
3218 3228 usr = AttributeDict(_usr.user.get_dict())
3219 3229 # if this user is also owner/admin, mark as duplicate record
3220 3230 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
3221 3231 usr.duplicate_perm = True
3222 3232 usr.permission = _usr.permission.permission_name
3223 3233 perm_rows.append(usr)
3224 3234
3225 3235 # filter the perm rows by 'default' first and then sort them by
3226 3236 # admin,write,read,none permissions sorted again alphabetically in
3227 3237 # each group
3228 3238 perm_rows = sorted(perm_rows, key=display_user_sort)
3229 3239
3230 3240 user_groups_rows = []
3231 3241 if expand_from_user_groups:
3232 3242 for ug in self.permission_user_groups(with_members=True):
3233 3243 for user_data in ug.members:
3234 3244 user_groups_rows.append(user_data)
3235 3245
3236 3246 return super_admin_rows + owner_row + perm_rows + user_groups_rows
3237 3247
3238 3248 def permission_user_groups(self, with_members=False):
3239 3249 q = UserGroupRepoGroupToPerm.query()\
3240 3250 .filter(UserGroupRepoGroupToPerm.group == self)
3241 3251 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
3242 3252 joinedload(UserGroupRepoGroupToPerm.users_group),
3243 3253 joinedload(UserGroupRepoGroupToPerm.permission),)
3244 3254
3245 3255 perm_rows = []
3246 3256 for _user_group in q.all():
3247 3257 entry = AttributeDict(_user_group.users_group.get_dict())
3248 3258 entry.permission = _user_group.permission.permission_name
3249 3259 if with_members:
3250 3260 entry.members = [x.user.get_dict()
3251 3261 for x in _user_group.users_group.members]
3252 3262 perm_rows.append(entry)
3253 3263
3254 3264 perm_rows = sorted(perm_rows, key=display_user_group_sort)
3255 3265 return perm_rows
3256 3266
3257 3267 def get_api_data(self):
3258 3268 """
3259 3269 Common function for generating api data
3260 3270
3261 3271 """
3262 3272 group = self
3263 3273 data = {
3264 3274 'group_id': group.group_id,
3265 3275 'group_name': group.group_name,
3266 3276 'group_description': group.description_safe,
3267 3277 'parent_group': group.parent_group.group_name if group.parent_group else None,
3268 3278 'repositories': [x.repo_name for x in group.repositories],
3269 3279 'owner': group.user.username,
3270 3280 }
3271 3281 return data
3272 3282
3273 3283 def get_dict(self):
3274 3284 # Since we transformed `group_name` to a hybrid property, we need to
3275 3285 # keep compatibility with the code which uses `group_name` field.
3276 3286 result = super(RepoGroup, self).get_dict()
3277 3287 result['group_name'] = result.pop('_group_name', None)
3278 3288 result.pop('_changeset_cache', '')
3279 3289 return result
3280 3290
3281 3291
3282 3292 class Permission(Base, BaseModel):
3283 3293 __tablename__ = 'permissions'
3284 3294 __table_args__ = (
3285 3295 Index('p_perm_name_idx', 'permission_name'),
3286 3296 base_table_args,
3287 3297 )
3288 3298
3289 3299 PERMS = [
3290 3300 ('hg.admin', _('RhodeCode Super Administrator')),
3291 3301
3292 3302 ('repository.none', _('Repository no access')),
3293 3303 ('repository.read', _('Repository read access')),
3294 3304 ('repository.write', _('Repository write access')),
3295 3305 ('repository.admin', _('Repository admin access')),
3296 3306
3297 3307 ('group.none', _('Repository group no access')),
3298 3308 ('group.read', _('Repository group read access')),
3299 3309 ('group.write', _('Repository group write access')),
3300 3310 ('group.admin', _('Repository group admin access')),
3301 3311
3302 3312 ('usergroup.none', _('User group no access')),
3303 3313 ('usergroup.read', _('User group read access')),
3304 3314 ('usergroup.write', _('User group write access')),
3305 3315 ('usergroup.admin', _('User group admin access')),
3306 3316
3307 3317 ('branch.none', _('Branch no permissions')),
3308 3318 ('branch.merge', _('Branch access by web merge')),
3309 3319 ('branch.push', _('Branch access by push')),
3310 3320 ('branch.push_force', _('Branch access by push with force')),
3311 3321
3312 3322 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
3313 3323 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
3314 3324
3315 3325 ('hg.usergroup.create.false', _('User Group creation disabled')),
3316 3326 ('hg.usergroup.create.true', _('User Group creation enabled')),
3317 3327
3318 3328 ('hg.create.none', _('Repository creation disabled')),
3319 3329 ('hg.create.repository', _('Repository creation enabled')),
3320 3330 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
3321 3331 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
3322 3332
3323 3333 ('hg.fork.none', _('Repository forking disabled')),
3324 3334 ('hg.fork.repository', _('Repository forking enabled')),
3325 3335
3326 3336 ('hg.register.none', _('Registration disabled')),
3327 3337 ('hg.register.manual_activate', _('User Registration with manual account activation')),
3328 3338 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
3329 3339
3330 3340 ('hg.password_reset.enabled', _('Password reset enabled')),
3331 3341 ('hg.password_reset.hidden', _('Password reset hidden')),
3332 3342 ('hg.password_reset.disabled', _('Password reset disabled')),
3333 3343
3334 3344 ('hg.extern_activate.manual', _('Manual activation of external account')),
3335 3345 ('hg.extern_activate.auto', _('Automatic activation of external account')),
3336 3346
3337 3347 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
3338 3348 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
3339 3349 ]
3340 3350
3341 3351 # definition of system default permissions for DEFAULT user, created on
3342 3352 # system setup
3343 3353 DEFAULT_USER_PERMISSIONS = [
3344 3354 # object perms
3345 3355 'repository.read',
3346 3356 'group.read',
3347 3357 'usergroup.read',
3348 3358 # branch, for backward compat we need same value as before so forced pushed
3349 3359 'branch.push_force',
3350 3360 # global
3351 3361 'hg.create.repository',
3352 3362 'hg.repogroup.create.false',
3353 3363 'hg.usergroup.create.false',
3354 3364 'hg.create.write_on_repogroup.true',
3355 3365 'hg.fork.repository',
3356 3366 'hg.register.manual_activate',
3357 3367 'hg.password_reset.enabled',
3358 3368 'hg.extern_activate.auto',
3359 3369 'hg.inherit_default_perms.true',
3360 3370 ]
3361 3371
3362 3372 # defines which permissions are more important higher the more important
3363 3373 # Weight defines which permissions are more important.
3364 3374 # The higher number the more important.
3365 3375 PERM_WEIGHTS = {
3366 3376 'repository.none': 0,
3367 3377 'repository.read': 1,
3368 3378 'repository.write': 3,
3369 3379 'repository.admin': 4,
3370 3380
3371 3381 'group.none': 0,
3372 3382 'group.read': 1,
3373 3383 'group.write': 3,
3374 3384 'group.admin': 4,
3375 3385
3376 3386 'usergroup.none': 0,
3377 3387 'usergroup.read': 1,
3378 3388 'usergroup.write': 3,
3379 3389 'usergroup.admin': 4,
3380 3390
3381 3391 'branch.none': 0,
3382 3392 'branch.merge': 1,
3383 3393 'branch.push': 3,
3384 3394 'branch.push_force': 4,
3385 3395
3386 3396 'hg.repogroup.create.false': 0,
3387 3397 'hg.repogroup.create.true': 1,
3388 3398
3389 3399 'hg.usergroup.create.false': 0,
3390 3400 'hg.usergroup.create.true': 1,
3391 3401
3392 3402 'hg.fork.none': 0,
3393 3403 'hg.fork.repository': 1,
3394 3404 'hg.create.none': 0,
3395 3405 'hg.create.repository': 1
3396 3406 }
3397 3407
3398 3408 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3399 3409 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
3400 3410 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
3401 3411
3402 3412 def __repr__(self):
3403 3413 return "<%s('%s:%s')>" % (
3404 3414 self.cls_name, self.permission_id, self.permission_name
3405 3415 )
3406 3416
3407 3417 @classmethod
3408 3418 def get_by_key(cls, key):
3409 3419 return cls.query().filter(cls.permission_name == key).scalar()
3410 3420
3411 3421 @classmethod
3412 3422 def get_default_repo_perms(cls, user_id, repo_id=None):
3413 3423 q = Session().query(UserRepoToPerm, Repository, Permission)\
3414 3424 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
3415 3425 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
3416 3426 .filter(UserRepoToPerm.user_id == user_id)
3417 3427 if repo_id:
3418 3428 q = q.filter(UserRepoToPerm.repository_id == repo_id)
3419 3429 return q.all()
3420 3430
3421 3431 @classmethod
3422 3432 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
3423 3433 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
3424 3434 .join(
3425 3435 Permission,
3426 3436 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
3427 3437 .join(
3428 3438 UserRepoToPerm,
3429 3439 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
3430 3440 .filter(UserRepoToPerm.user_id == user_id)
3431 3441
3432 3442 if repo_id:
3433 3443 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
3434 3444 return q.order_by(UserToRepoBranchPermission.rule_order).all()
3435 3445
3436 3446 @classmethod
3437 3447 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
3438 3448 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
3439 3449 .join(
3440 3450 Permission,
3441 3451 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
3442 3452 .join(
3443 3453 Repository,
3444 3454 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
3445 3455 .join(
3446 3456 UserGroup,
3447 3457 UserGroupRepoToPerm.users_group_id ==
3448 3458 UserGroup.users_group_id)\
3449 3459 .join(
3450 3460 UserGroupMember,
3451 3461 UserGroupRepoToPerm.users_group_id ==
3452 3462 UserGroupMember.users_group_id)\
3453 3463 .filter(
3454 3464 UserGroupMember.user_id == user_id,
3455 3465 UserGroup.users_group_active == true())
3456 3466 if repo_id:
3457 3467 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
3458 3468 return q.all()
3459 3469
3460 3470 @classmethod
3461 3471 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
3462 3472 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
3463 3473 .join(
3464 3474 Permission,
3465 3475 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
3466 3476 .join(
3467 3477 UserGroupRepoToPerm,
3468 3478 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
3469 3479 .join(
3470 3480 UserGroup,
3471 3481 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
3472 3482 .join(
3473 3483 UserGroupMember,
3474 3484 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
3475 3485 .filter(
3476 3486 UserGroupMember.user_id == user_id,
3477 3487 UserGroup.users_group_active == true())
3478 3488
3479 3489 if repo_id:
3480 3490 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
3481 3491 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
3482 3492
3483 3493 @classmethod
3484 3494 def get_default_group_perms(cls, user_id, repo_group_id=None):
3485 3495 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
3486 3496 .join(
3487 3497 Permission,
3488 3498 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
3489 3499 .join(
3490 3500 RepoGroup,
3491 3501 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
3492 3502 .filter(UserRepoGroupToPerm.user_id == user_id)
3493 3503 if repo_group_id:
3494 3504 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
3495 3505 return q.all()
3496 3506
3497 3507 @classmethod
3498 3508 def get_default_group_perms_from_user_group(
3499 3509 cls, user_id, repo_group_id=None):
3500 3510 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
3501 3511 .join(
3502 3512 Permission,
3503 3513 UserGroupRepoGroupToPerm.permission_id ==
3504 3514 Permission.permission_id)\
3505 3515 .join(
3506 3516 RepoGroup,
3507 3517 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3508 3518 .join(
3509 3519 UserGroup,
3510 3520 UserGroupRepoGroupToPerm.users_group_id ==
3511 3521 UserGroup.users_group_id)\
3512 3522 .join(
3513 3523 UserGroupMember,
3514 3524 UserGroupRepoGroupToPerm.users_group_id ==
3515 3525 UserGroupMember.users_group_id)\
3516 3526 .filter(
3517 3527 UserGroupMember.user_id == user_id,
3518 3528 UserGroup.users_group_active == true())
3519 3529 if repo_group_id:
3520 3530 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3521 3531 return q.all()
3522 3532
3523 3533 @classmethod
3524 3534 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3525 3535 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3526 3536 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3527 3537 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3528 3538 .filter(UserUserGroupToPerm.user_id == user_id)
3529 3539 if user_group_id:
3530 3540 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3531 3541 return q.all()
3532 3542
3533 3543 @classmethod
3534 3544 def get_default_user_group_perms_from_user_group(
3535 3545 cls, user_id, user_group_id=None):
3536 3546 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3537 3547 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3538 3548 .join(
3539 3549 Permission,
3540 3550 UserGroupUserGroupToPerm.permission_id ==
3541 3551 Permission.permission_id)\
3542 3552 .join(
3543 3553 TargetUserGroup,
3544 3554 UserGroupUserGroupToPerm.target_user_group_id ==
3545 3555 TargetUserGroup.users_group_id)\
3546 3556 .join(
3547 3557 UserGroup,
3548 3558 UserGroupUserGroupToPerm.user_group_id ==
3549 3559 UserGroup.users_group_id)\
3550 3560 .join(
3551 3561 UserGroupMember,
3552 3562 UserGroupUserGroupToPerm.user_group_id ==
3553 3563 UserGroupMember.users_group_id)\
3554 3564 .filter(
3555 3565 UserGroupMember.user_id == user_id,
3556 3566 UserGroup.users_group_active == true())
3557 3567 if user_group_id:
3558 3568 q = q.filter(
3559 3569 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3560 3570
3561 3571 return q.all()
3562 3572
3563 3573
3564 3574 class UserRepoToPerm(Base, BaseModel):
3565 3575 __tablename__ = 'repo_to_perm'
3566 3576 __table_args__ = (
3567 3577 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3568 3578 base_table_args
3569 3579 )
3570 3580
3571 3581 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3572 3582 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3573 3583 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3574 3584 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3575 3585
3576 3586 user = relationship('User', back_populates="repo_to_perm")
3577 3587 repository = relationship('Repository', back_populates="repo_to_perm")
3578 3588 permission = relationship('Permission')
3579 3589
3580 3590 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined', back_populates='user_repo_to_perm')
3581 3591
3582 3592 @classmethod
3583 3593 def create(cls, user, repository, permission):
3584 3594 n = cls()
3585 3595 n.user = user
3586 3596 n.repository = repository
3587 3597 n.permission = permission
3588 3598 Session().add(n)
3589 3599 return n
3590 3600
3591 3601 def __repr__(self):
3592 3602 return f'<{self.user} => {self.repository} >'
3593 3603
3594 3604
3595 3605 class UserUserGroupToPerm(Base, BaseModel):
3596 3606 __tablename__ = 'user_user_group_to_perm'
3597 3607 __table_args__ = (
3598 3608 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3599 3609 base_table_args
3600 3610 )
3601 3611
3602 3612 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3603 3613 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3604 3614 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3605 3615 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3606 3616
3607 3617 user = relationship('User', back_populates='user_group_to_perm')
3608 3618 user_group = relationship('UserGroup', back_populates='user_user_group_to_perm')
3609 3619 permission = relationship('Permission')
3610 3620
3611 3621 @classmethod
3612 3622 def create(cls, user, user_group, permission):
3613 3623 n = cls()
3614 3624 n.user = user
3615 3625 n.user_group = user_group
3616 3626 n.permission = permission
3617 3627 Session().add(n)
3618 3628 return n
3619 3629
3620 3630 def __repr__(self):
3621 3631 return f'<{self.user} => {self.user_group} >'
3622 3632
3623 3633
3624 3634 class UserToPerm(Base, BaseModel):
3625 3635 __tablename__ = 'user_to_perm'
3626 3636 __table_args__ = (
3627 3637 UniqueConstraint('user_id', 'permission_id'),
3628 3638 base_table_args
3629 3639 )
3630 3640
3631 3641 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3632 3642 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3633 3643 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3634 3644
3635 3645 user = relationship('User', back_populates='user_perms')
3636 3646 permission = relationship('Permission', lazy='joined')
3637 3647
3638 3648 def __repr__(self):
3639 3649 return f'<{self.user} => {self.permission} >'
3640 3650
3641 3651
3642 3652 class UserGroupRepoToPerm(Base, BaseModel):
3643 3653 __tablename__ = 'users_group_repo_to_perm'
3644 3654 __table_args__ = (
3645 3655 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3646 3656 base_table_args
3647 3657 )
3648 3658
3649 3659 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3650 3660 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3651 3661 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3652 3662 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3653 3663
3654 3664 users_group = relationship('UserGroup', back_populates='users_group_repo_to_perm')
3655 3665 permission = relationship('Permission')
3656 3666 repository = relationship('Repository', back_populates='users_group_to_perm')
3657 3667 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all', back_populates='user_group_repo_to_perm')
3658 3668
3659 3669 @classmethod
3660 3670 def create(cls, users_group, repository, permission):
3661 3671 n = cls()
3662 3672 n.users_group = users_group
3663 3673 n.repository = repository
3664 3674 n.permission = permission
3665 3675 Session().add(n)
3666 3676 return n
3667 3677
3668 3678 def __repr__(self):
3669 3679 return f'<UserGroupRepoToPerm:{self.users_group} => {self.repository} >'
3670 3680
3671 3681
3672 3682 class UserGroupUserGroupToPerm(Base, BaseModel):
3673 3683 __tablename__ = 'user_group_user_group_to_perm'
3674 3684 __table_args__ = (
3675 3685 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3676 3686 CheckConstraint('target_user_group_id != user_group_id'),
3677 3687 base_table_args
3678 3688 )
3679 3689
3680 3690 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3681 3691 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3682 3692 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3683 3693 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3684 3694
3685 3695 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id', back_populates='user_group_user_group_to_perm')
3686 3696 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3687 3697 permission = relationship('Permission')
3688 3698
3689 3699 @classmethod
3690 3700 def create(cls, target_user_group, user_group, permission):
3691 3701 n = cls()
3692 3702 n.target_user_group = target_user_group
3693 3703 n.user_group = user_group
3694 3704 n.permission = permission
3695 3705 Session().add(n)
3696 3706 return n
3697 3707
3698 3708 def __repr__(self):
3699 3709 return f'<UserGroupUserGroup:{self.target_user_group} => {self.user_group} >'
3700 3710
3701 3711
3702 3712 class UserGroupToPerm(Base, BaseModel):
3703 3713 __tablename__ = 'users_group_to_perm'
3704 3714 __table_args__ = (
3705 3715 UniqueConstraint('users_group_id', 'permission_id',),
3706 3716 base_table_args
3707 3717 )
3708 3718
3709 3719 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3710 3720 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3711 3721 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3712 3722
3713 3723 users_group = relationship('UserGroup', back_populates='users_group_to_perm')
3714 3724 permission = relationship('Permission')
3715 3725
3716 3726
3717 3727 class UserRepoGroupToPerm(Base, BaseModel):
3718 3728 __tablename__ = 'user_repo_group_to_perm'
3719 3729 __table_args__ = (
3720 3730 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3721 3731 base_table_args
3722 3732 )
3723 3733
3724 3734 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3725 3735 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3726 3736 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3727 3737 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3728 3738
3729 3739 user = relationship('User', back_populates='repo_group_to_perm')
3730 3740 group = relationship('RepoGroup', back_populates='repo_group_to_perm')
3731 3741 permission = relationship('Permission')
3732 3742
3733 3743 @classmethod
3734 3744 def create(cls, user, repository_group, permission):
3735 3745 n = cls()
3736 3746 n.user = user
3737 3747 n.group = repository_group
3738 3748 n.permission = permission
3739 3749 Session().add(n)
3740 3750 return n
3741 3751
3742 3752
3743 3753 class UserGroupRepoGroupToPerm(Base, BaseModel):
3744 3754 __tablename__ = 'users_group_repo_group_to_perm'
3745 3755 __table_args__ = (
3746 3756 UniqueConstraint('users_group_id', 'group_id'),
3747 3757 base_table_args
3748 3758 )
3749 3759
3750 3760 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3751 3761 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3752 3762 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3753 3763 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3754 3764
3755 3765 users_group = relationship('UserGroup', back_populates='users_group_repo_group_to_perm')
3756 3766 permission = relationship('Permission')
3757 3767 group = relationship('RepoGroup', back_populates='users_group_to_perm')
3758 3768
3759 3769 @classmethod
3760 3770 def create(cls, user_group, repository_group, permission):
3761 3771 n = cls()
3762 3772 n.users_group = user_group
3763 3773 n.group = repository_group
3764 3774 n.permission = permission
3765 3775 Session().add(n)
3766 3776 return n
3767 3777
3768 3778 def __repr__(self):
3769 3779 return '<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3770 3780
3771 3781
3772 3782 class Statistics(Base, BaseModel):
3773 3783 __tablename__ = 'statistics'
3774 3784 __table_args__ = (
3775 3785 base_table_args
3776 3786 )
3777 3787
3778 3788 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3779 3789 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3780 3790 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3781 3791 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False) #JSON data
3782 3792 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False) #JSON data
3783 3793 languages = Column("languages", LargeBinary(1000000), nullable=False) #JSON data
3784 3794
3785 3795 repository = relationship('Repository', single_parent=True, viewonly=True)
3786 3796
3787 3797
3788 3798 class UserFollowing(Base, BaseModel):
3789 3799 __tablename__ = 'user_followings'
3790 3800 __table_args__ = (
3791 3801 UniqueConstraint('user_id', 'follows_repository_id'),
3792 3802 UniqueConstraint('user_id', 'follows_user_id'),
3793 3803 base_table_args
3794 3804 )
3795 3805
3796 3806 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3797 3807 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3798 3808 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3799 3809 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3800 3810 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3801 3811
3802 3812 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id', back_populates='followings')
3803 3813
3804 3814 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3805 3815 follows_repository = relationship('Repository', order_by='Repository.repo_name', back_populates='followers')
3806 3816
3807 3817 @classmethod
3808 3818 def get_repo_followers(cls, repo_id):
3809 3819 return cls.query().filter(cls.follows_repo_id == repo_id)
3810 3820
3811 3821
3812 3822 class CacheKey(Base, BaseModel):
3813 3823 __tablename__ = 'cache_invalidation'
3814 3824 __table_args__ = (
3815 3825 UniqueConstraint('cache_key'),
3816 3826 Index('key_idx', 'cache_key'),
3817 3827 Index('cache_args_idx', 'cache_args'),
3818 3828 base_table_args,
3819 3829 )
3820 3830
3821 3831 CACHE_TYPE_FEED = 'FEED'
3822 3832
3823 3833 # namespaces used to register process/thread aware caches
3824 3834 REPO_INVALIDATION_NAMESPACE = 'repo_cache.v1:{repo_id}'
3825 3835
3826 3836 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3827 3837 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3828 3838 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3829 3839 cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None)
3830 3840 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3831 3841
3832 3842 def __init__(self, cache_key, cache_args='', cache_state_uid=None, cache_active=False):
3833 3843 self.cache_key = cache_key
3834 3844 self.cache_args = cache_args
3835 3845 self.cache_active = cache_active
3836 3846 # first key should be same for all entries, since all workers should share it
3837 3847 self.cache_state_uid = cache_state_uid or self.generate_new_state_uid()
3838 3848
3839 3849 def __repr__(self):
3840 3850 return "<%s('%s:%s[%s]')>" % (
3841 3851 self.cls_name,
3842 3852 self.cache_id, self.cache_key, self.cache_active)
3843 3853
3844 3854 def _cache_key_partition(self):
3845 3855 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3846 3856 return prefix, repo_name, suffix
3847 3857
3848 3858 def get_prefix(self):
3849 3859 """
3850 3860 Try to extract prefix from existing cache key. The key could consist
3851 3861 of prefix, repo_name, suffix
3852 3862 """
3853 3863 # this returns prefix, repo_name, suffix
3854 3864 return self._cache_key_partition()[0]
3855 3865
3856 3866 def get_suffix(self):
3857 3867 """
3858 3868 get suffix that might have been used in _get_cache_key to
3859 3869 generate self.cache_key. Only used for informational purposes
3860 3870 in repo_edit.mako.
3861 3871 """
3862 3872 # prefix, repo_name, suffix
3863 3873 return self._cache_key_partition()[2]
3864 3874
3865 3875 @classmethod
3866 3876 def generate_new_state_uid(cls, based_on=None):
3867 3877 if based_on:
3868 3878 return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on)))
3869 3879 else:
3870 3880 return str(uuid.uuid4())
3871 3881
3872 3882 @classmethod
3873 3883 def delete_all_cache(cls):
3874 3884 """
3875 3885 Delete all cache keys from database.
3876 3886 Should only be run when all instances are down and all entries
3877 3887 thus stale.
3878 3888 """
3879 3889 cls.query().delete()
3880 3890 Session().commit()
3881 3891
3882 3892 @classmethod
3883 3893 def set_invalidate(cls, cache_uid, delete=False):
3884 3894 """
3885 3895 Mark all caches of a repo as invalid in the database.
3886 3896 """
3887 3897 try:
3888 3898 qry = Session().query(cls).filter(cls.cache_key == cache_uid)
3889 3899 if delete:
3890 3900 qry.delete()
3891 3901 log.debug('cache objects deleted for cache args %s',
3892 3902 safe_str(cache_uid))
3893 3903 else:
3894 3904 new_uid = cls.generate_new_state_uid()
3895 3905 qry.update({"cache_state_uid": new_uid,
3896 3906 "cache_args": f"repo_state:{time.time()}"})
3897 3907 log.debug('cache object %s set new UID %s',
3898 3908 safe_str(cache_uid), new_uid)
3899 3909
3900 3910 Session().commit()
3901 3911 except Exception:
3902 3912 log.exception(
3903 3913 'Cache key invalidation failed for cache args %s',
3904 3914 safe_str(cache_uid))
3905 3915 Session().rollback()
3906 3916
3907 3917 @classmethod
3908 3918 def get_active_cache(cls, cache_key):
3909 3919 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3910 3920 if inv_obj:
3911 3921 return inv_obj
3912 3922 return None
3913 3923
3914 3924 @classmethod
3915 3925 def get_namespace_map(cls, namespace):
3916 3926 return {
3917 3927 x.cache_key: x
3918 3928 for x in cls.query().filter(cls.cache_args == namespace)}
3919 3929
3920 3930
3921 3931 class ChangesetComment(Base, BaseModel):
3922 3932 __tablename__ = 'changeset_comments'
3923 3933 __table_args__ = (
3924 3934 Index('cc_revision_idx', 'revision'),
3925 3935 base_table_args,
3926 3936 )
3927 3937
3928 3938 COMMENT_OUTDATED = 'comment_outdated'
3929 3939 COMMENT_TYPE_NOTE = 'note'
3930 3940 COMMENT_TYPE_TODO = 'todo'
3931 3941 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3932 3942
3933 3943 OP_IMMUTABLE = 'immutable'
3934 3944 OP_CHANGEABLE = 'changeable'
3935 3945
3936 3946 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3937 3947 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3938 3948 revision = Column('revision', String(40), nullable=True)
3939 3949 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3940 3950 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3941 3951 line_no = Column('line_no', Unicode(10), nullable=True)
3942 3952 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3943 3953 f_path = Column('f_path', Unicode(1000), nullable=True)
3944 3954 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3945 3955 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3946 3956 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3947 3957 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3948 3958 renderer = Column('renderer', Unicode(64), nullable=True)
3949 3959 display_state = Column('display_state', Unicode(128), nullable=True)
3950 3960 immutable_state = Column('immutable_state', Unicode(128), nullable=True, default=OP_CHANGEABLE)
3951 3961 draft = Column('draft', Boolean(), nullable=True, default=False)
3952 3962
3953 3963 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3954 3964 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3955 3965
3956 3966 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3957 3967 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3958 3968
3959 3969 author = relationship('User', lazy='select', back_populates='user_comments')
3960 3970 repo = relationship('Repository', back_populates='comments')
3961 3971 status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='select', back_populates='comment')
3962 3972 pull_request = relationship('PullRequest', lazy='select', back_populates='comments')
3963 3973 pull_request_version = relationship('PullRequestVersion', lazy='select')
3964 3974 history = relationship('ChangesetCommentHistory', cascade='all, delete-orphan', lazy='select', order_by='ChangesetCommentHistory.version', back_populates="comment")
3965 3975
3966 3976 @classmethod
3967 3977 def get_users(cls, revision=None, pull_request_id=None):
3968 3978 """
3969 3979 Returns user associated with this ChangesetComment. ie those
3970 3980 who actually commented
3971 3981
3972 3982 :param cls:
3973 3983 :param revision:
3974 3984 """
3975 3985 q = Session().query(User).join(ChangesetComment.author)
3976 3986 if revision:
3977 3987 q = q.filter(cls.revision == revision)
3978 3988 elif pull_request_id:
3979 3989 q = q.filter(cls.pull_request_id == pull_request_id)
3980 3990 return q.all()
3981 3991
3982 3992 @classmethod
3983 3993 def get_index_from_version(cls, pr_version, versions=None, num_versions=None) -> int:
3984 3994 if pr_version is None:
3985 3995 return 0
3986 3996
3987 3997 if versions is not None:
3988 3998 num_versions = [x.pull_request_version_id for x in versions]
3989 3999
3990 4000 num_versions = num_versions or []
3991 4001 try:
3992 4002 return num_versions.index(pr_version) + 1
3993 4003 except (IndexError, ValueError):
3994 4004 return 0
3995 4005
3996 4006 @property
3997 4007 def outdated(self):
3998 4008 return self.display_state == self.COMMENT_OUTDATED
3999 4009
4000 4010 @property
4001 4011 def outdated_js(self):
4002 4012 return str_json(self.display_state == self.COMMENT_OUTDATED)
4003 4013
4004 4014 @property
4005 4015 def immutable(self):
4006 4016 return self.immutable_state == self.OP_IMMUTABLE
4007 4017
4008 4018 def outdated_at_version(self, version: int) -> bool:
4009 4019 """
4010 4020 Checks if comment is outdated for given pull request version
4011 4021 """
4012 4022
4013 4023 def version_check():
4014 4024 return self.pull_request_version_id and self.pull_request_version_id != version
4015 4025
4016 4026 if self.is_inline:
4017 4027 return self.outdated and version_check()
4018 4028 else:
4019 4029 # general comments don't have .outdated set, also latest don't have a version
4020 4030 return version_check()
4021 4031
4022 4032 def outdated_at_version_js(self, version):
4023 4033 """
4024 4034 Checks if comment is outdated for given pull request version
4025 4035 """
4026 4036 return str_json(self.outdated_at_version(version))
4027 4037
4028 4038 def older_than_version(self, version: int) -> bool:
4029 4039 """
4030 4040 Checks if comment is made from a previous version than given.
4031 4041 Assumes self.pull_request_version.pull_request_version_id is an integer if not None.
4032 4042 """
4033 4043
4034 4044 # If version is None, return False as the current version cannot be less than None
4035 4045 if version is None:
4036 4046 return False
4037 4047
4038 4048 # Ensure that the version is an integer to prevent TypeError on comparison
4039 4049 if not isinstance(version, int):
4040 4050 raise ValueError("The provided version must be an integer.")
4041 4051
4042 4052 # Initialize current version to 0 or pull_request_version_id if it's available
4043 4053 cur_ver = 0
4044 4054 if self.pull_request_version and self.pull_request_version.pull_request_version_id is not None:
4045 4055 cur_ver = self.pull_request_version.pull_request_version_id
4046 4056
4047 4057 # Return True if the current version is less than the given version
4048 4058 return cur_ver < version
4049 4059
4050 4060 def older_than_version_js(self, version):
4051 4061 """
4052 4062 Checks if comment is made from previous version than given
4053 4063 """
4054 4064 return str_json(self.older_than_version(version))
4055 4065
4056 4066 @property
4057 4067 def commit_id(self):
4058 4068 """New style naming to stop using .revision"""
4059 4069 return self.revision
4060 4070
4061 4071 @property
4062 4072 def resolved(self):
4063 4073 return self.resolved_by[0] if self.resolved_by else None
4064 4074
4065 4075 @property
4066 4076 def is_todo(self):
4067 4077 return self.comment_type == self.COMMENT_TYPE_TODO
4068 4078
4069 4079 @property
4070 4080 def is_inline(self):
4071 4081 if self.line_no and self.f_path:
4072 4082 return True
4073 4083 return False
4074 4084
4075 4085 @property
4076 4086 def last_version(self):
4077 4087 version = 0
4078 4088 if self.history:
4079 4089 version = self.history[-1].version
4080 4090 return version
4081 4091
4082 4092 def get_index_version(self, versions):
4083 4093 return self.get_index_from_version(
4084 4094 self.pull_request_version_id, versions)
4085 4095
4086 4096 @property
4087 4097 def review_status(self):
4088 4098 if self.status_change:
4089 4099 return self.status_change[0].status
4090 4100
4091 4101 @property
4092 4102 def review_status_lbl(self):
4093 4103 if self.status_change:
4094 4104 return self.status_change[0].status_lbl
4095 4105
4096 4106 def __repr__(self):
4097 4107 if self.comment_id:
4098 4108 return f'<DB:Comment #{self.comment_id}>'
4099 4109 else:
4100 4110 return f'<DB:Comment at {id(self)!r}>'
4101 4111
4102 4112 def get_api_data(self):
4103 4113 comment = self
4104 4114
4105 4115 data = {
4106 4116 'comment_id': comment.comment_id,
4107 4117 'comment_type': comment.comment_type,
4108 4118 'comment_text': comment.text,
4109 4119 'comment_status': comment.status_change,
4110 4120 'comment_f_path': comment.f_path,
4111 4121 'comment_lineno': comment.line_no,
4112 4122 'comment_author': comment.author,
4113 4123 'comment_created_on': comment.created_on,
4114 4124 'comment_resolved_by': self.resolved,
4115 4125 'comment_commit_id': comment.revision,
4116 4126 'comment_pull_request_id': comment.pull_request_id,
4117 4127 'comment_last_version': self.last_version
4118 4128 }
4119 4129 return data
4120 4130
4121 4131 def __json__(self):
4122 4132 data = dict()
4123 4133 data.update(self.get_api_data())
4124 4134 return data
4125 4135
4126 4136
4127 4137 class ChangesetCommentHistory(Base, BaseModel):
4128 4138 __tablename__ = 'changeset_comments_history'
4129 4139 __table_args__ = (
4130 4140 Index('cch_comment_id_idx', 'comment_id'),
4131 4141 base_table_args,
4132 4142 )
4133 4143
4134 4144 comment_history_id = Column('comment_history_id', Integer(), nullable=False, primary_key=True)
4135 4145 comment_id = Column('comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=False)
4136 4146 version = Column("version", Integer(), nullable=False, default=0)
4137 4147 created_by_user_id = Column('created_by_user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
4138 4148 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
4139 4149 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4140 4150 deleted = Column('deleted', Boolean(), default=False)
4141 4151
4142 4152 author = relationship('User', lazy='joined')
4143 4153 comment = relationship('ChangesetComment', cascade="all, delete", back_populates="history")
4144 4154
4145 4155 @classmethod
4146 4156 def get_version(cls, comment_id):
4147 4157 q = Session().query(ChangesetCommentHistory).filter(
4148 4158 ChangesetCommentHistory.comment_id == comment_id).order_by(ChangesetCommentHistory.version.desc())
4149 4159 if q.count() == 0:
4150 4160 return 1
4151 4161 elif q.count() >= q[0].version:
4152 4162 return q.count() + 1
4153 4163 else:
4154 4164 return q[0].version + 1
4155 4165
4156 4166
4157 4167 class ChangesetStatus(Base, BaseModel):
4158 4168 __tablename__ = 'changeset_statuses'
4159 4169 __table_args__ = (
4160 4170 Index('cs_revision_idx', 'revision'),
4161 4171 Index('cs_version_idx', 'version'),
4162 4172 UniqueConstraint('repo_id', 'revision', 'version'),
4163 4173 base_table_args
4164 4174 )
4165 4175
4166 4176 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
4167 4177 STATUS_APPROVED = 'approved'
4168 4178 STATUS_REJECTED = 'rejected'
4169 4179 STATUS_UNDER_REVIEW = 'under_review'
4170 4180
4171 4181 STATUSES = [
4172 4182 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
4173 4183 (STATUS_APPROVED, _("Approved")),
4174 4184 (STATUS_REJECTED, _("Rejected")),
4175 4185 (STATUS_UNDER_REVIEW, _("Under Review")),
4176 4186 ]
4177 4187
4178 4188 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
4179 4189 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
4180 4190 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
4181 4191 revision = Column('revision', String(40), nullable=False)
4182 4192 status = Column('status', String(128), nullable=False, default=DEFAULT)
4183 4193 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
4184 4194 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
4185 4195 version = Column('version', Integer(), nullable=False, default=0)
4186 4196 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
4187 4197
4188 4198 author = relationship('User', lazy='select')
4189 4199 repo = relationship('Repository', lazy='select')
4190 4200 comment = relationship('ChangesetComment', lazy='select', back_populates='status_change')
4191 4201 pull_request = relationship('PullRequest', lazy='select', back_populates='statuses')
4192 4202
4193 4203 def __repr__(self):
4194 4204 return f"<{self.cls_name}('{self.status}[v{self.version}]:{self.author}')>"
4195 4205
4196 4206 @classmethod
4197 4207 def get_status_lbl(cls, value):
4198 4208 return dict(cls.STATUSES).get(value)
4199 4209
4200 4210 @property
4201 4211 def status_lbl(self):
4202 4212 return ChangesetStatus.get_status_lbl(self.status)
4203 4213
4204 4214 def get_api_data(self):
4205 4215 status = self
4206 4216 data = {
4207 4217 'status_id': status.changeset_status_id,
4208 4218 'status': status.status,
4209 4219 }
4210 4220 return data
4211 4221
4212 4222 def __json__(self):
4213 4223 data = dict()
4214 4224 data.update(self.get_api_data())
4215 4225 return data
4216 4226
4217 4227
4218 4228 class _SetState(object):
4219 4229 """
4220 4230 Context processor allowing changing state for sensitive operation such as
4221 4231 pull request update or merge
4222 4232 """
4223 4233
4224 4234 def __init__(self, pull_request, pr_state, back_state=None):
4225 4235 self._pr = pull_request
4226 4236 self._org_state = back_state or pull_request.pull_request_state
4227 4237 self._pr_state = pr_state
4228 4238 self._current_state = None
4229 4239
4230 4240 def __enter__(self):
4231 4241 log.debug('StateLock: entering set state context of pr %s, setting state to: `%s`',
4232 4242 self._pr, self._pr_state)
4233 4243 self.set_pr_state(self._pr_state)
4234 4244 return self
4235 4245
4236 4246 def __exit__(self, exc_type, exc_val, exc_tb):
4237 4247 if exc_val is not None or exc_type is not None:
4238 4248 log.error(traceback.format_tb(exc_tb))
4239 4249 return None
4240 4250
4241 4251 self.set_pr_state(self._org_state)
4242 4252 log.debug('StateLock: exiting set state context of pr %s, setting state to: `%s`',
4243 4253 self._pr, self._org_state)
4244 4254
4245 4255 @property
4246 4256 def state(self):
4247 4257 return self._current_state
4248 4258
4249 4259 def set_pr_state(self, pr_state):
4250 4260 try:
4251 4261 self._pr.pull_request_state = pr_state
4252 4262 Session().add(self._pr)
4253 4263 Session().commit()
4254 4264 self._current_state = pr_state
4255 4265 except Exception:
4256 4266 log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state)
4257 4267 raise
4258 4268
4259 4269
4260 4270 class _PullRequestBase(BaseModel):
4261 4271 """
4262 4272 Common attributes of pull request and version entries.
4263 4273 """
4264 4274
4265 4275 # .status values
4266 4276 STATUS_NEW = 'new'
4267 4277 STATUS_OPEN = 'open'
4268 4278 STATUS_CLOSED = 'closed'
4269 4279
4270 4280 # available states
4271 4281 STATE_CREATING = 'creating'
4272 4282 STATE_UPDATING = 'updating'
4273 4283 STATE_MERGING = 'merging'
4274 4284 STATE_CREATED = 'created'
4275 4285
4276 4286 title = Column('title', Unicode(255), nullable=True)
4277 4287 description = Column(
4278 4288 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
4279 4289 nullable=True)
4280 4290 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
4281 4291
4282 4292 # new/open/closed status of pull request (not approve/reject/etc)
4283 4293 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
4284 4294 created_on = Column(
4285 4295 'created_on', DateTime(timezone=False), nullable=False,
4286 4296 default=datetime.datetime.now)
4287 4297 updated_on = Column(
4288 4298 'updated_on', DateTime(timezone=False), nullable=False,
4289 4299 default=datetime.datetime.now)
4290 4300
4291 4301 pull_request_state = Column("pull_request_state", String(255), nullable=True)
4292 4302
4293 4303 @declared_attr
4294 4304 def user_id(cls):
4295 4305 return Column(
4296 4306 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
4297 4307 unique=None)
4298 4308
4299 4309 # 500 revisions max
4300 4310 _revisions = Column(
4301 4311 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
4302 4312
4303 4313 common_ancestor_id = Column('common_ancestor_id', Unicode(255), nullable=True)
4304 4314
4305 4315 @declared_attr
4306 4316 def source_repo_id(cls):
4307 4317 # TODO: dan: rename column to source_repo_id
4308 4318 return Column(
4309 4319 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4310 4320 nullable=False)
4311 4321
4312 4322 @declared_attr
4313 4323 def pr_source(cls):
4314 4324 return relationship(
4315 4325 'Repository',
4316 4326 primaryjoin=f'{cls.__name__}.source_repo_id==Repository.repo_id',
4317 4327 overlaps="pull_requests_source"
4318 4328 )
4319 4329
4320 4330 _source_ref = Column('org_ref', Unicode(255), nullable=False)
4321 4331
4322 4332 @hybrid_property
4323 4333 def source_ref(self):
4324 4334 return self._source_ref
4325 4335
4326 4336 @source_ref.setter
4327 4337 def source_ref(self, val):
4328 4338 parts = (val or '').split(':')
4329 4339 if len(parts) != 3:
4330 4340 raise ValueError(
4331 4341 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4332 4342 self._source_ref = safe_str(val)
4333 4343
4334 4344 _target_ref = Column('other_ref', Unicode(255), nullable=False)
4335 4345
4336 4346 @hybrid_property
4337 4347 def target_ref(self):
4338 4348 return self._target_ref
4339 4349
4340 4350 @target_ref.setter
4341 4351 def target_ref(self, val):
4342 4352 parts = (val or '').split(':')
4343 4353 if len(parts) != 3:
4344 4354 raise ValueError(
4345 4355 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4346 4356 self._target_ref = safe_str(val)
4347 4357
4348 4358 @declared_attr
4349 4359 def target_repo_id(cls):
4350 4360 # TODO: dan: rename column to target_repo_id
4351 4361 return Column(
4352 4362 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4353 4363 nullable=False)
4354 4364
4355 4365 @declared_attr
4356 4366 def pr_target(cls):
4357 4367 return relationship(
4358 4368 'Repository',
4359 4369 primaryjoin=f'{cls.__name__}.target_repo_id==Repository.repo_id',
4360 4370 overlaps="pull_requests_target"
4361 4371 )
4362 4372
4363 4373 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
4364 4374
4365 4375 # TODO: dan: rename column to last_merge_source_rev
4366 4376 _last_merge_source_rev = Column(
4367 4377 'last_merge_org_rev', String(40), nullable=True)
4368 4378 # TODO: dan: rename column to last_merge_target_rev
4369 4379 _last_merge_target_rev = Column(
4370 4380 'last_merge_other_rev', String(40), nullable=True)
4371 4381 _last_merge_status = Column('merge_status', Integer(), nullable=True)
4372 4382 last_merge_metadata = Column(
4373 4383 'last_merge_metadata', MutationObj.as_mutable(
4374 4384 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4375 4385
4376 4386 merge_rev = Column('merge_rev', String(40), nullable=True)
4377 4387
4378 4388 reviewer_data = Column(
4379 4389 'reviewer_data_json', MutationObj.as_mutable(
4380 4390 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4381 4391
4382 4392 @property
4383 4393 def reviewer_data_json(self):
4384 4394 return str_json(self.reviewer_data)
4385 4395
4386 4396 @property
4387 4397 def last_merge_metadata_parsed(self):
4388 4398 metadata = {}
4389 4399 if not self.last_merge_metadata:
4390 4400 return metadata
4391 4401
4392 4402 if hasattr(self.last_merge_metadata, 'de_coerce'):
4393 4403 for k, v in self.last_merge_metadata.de_coerce().items():
4394 4404 if k in ['target_ref', 'source_ref']:
4395 4405 metadata[k] = Reference(v['type'], v['name'], v['commit_id'])
4396 4406 else:
4397 4407 if hasattr(v, 'de_coerce'):
4398 4408 metadata[k] = v.de_coerce()
4399 4409 else:
4400 4410 metadata[k] = v
4401 4411 return metadata
4402 4412
4403 4413 @property
4404 4414 def work_in_progress(self):
4405 4415 """checks if pull request is work in progress by checking the title"""
4406 4416 title = self.title.upper()
4407 4417 if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title):
4408 4418 return True
4409 4419 return False
4410 4420
4411 4421 @property
4412 4422 def title_safe(self):
4413 4423 return self.title\
4414 4424 .replace('{', '{{')\
4415 4425 .replace('}', '}}')
4416 4426
4417 4427 @hybrid_property
4418 4428 def description_safe(self):
4419 4429 return description_escaper(self.description)
4420 4430
4421 4431 @hybrid_property
4422 4432 def revisions(self):
4423 4433 return self._revisions.split(':') if self._revisions else []
4424 4434
4425 4435 @revisions.setter
4426 4436 def revisions(self, val):
4427 4437 self._revisions = ':'.join(val)
4428 4438
4429 4439 @hybrid_property
4430 4440 def last_merge_status(self):
4431 4441 return safe_int(self._last_merge_status)
4432 4442
4433 4443 @last_merge_status.setter
4434 4444 def last_merge_status(self, val):
4435 4445 self._last_merge_status = val
4436 4446
4437 4447 @declared_attr
4438 4448 def author(cls):
4439 4449 return relationship(
4440 4450 'User', lazy='joined',
4441 4451 #TODO, problem that is somehow :?
4442 4452 #back_populates='user_pull_requests'
4443 4453 )
4444 4454
4445 4455 @declared_attr
4446 4456 def source_repo(cls):
4447 4457 return relationship(
4448 4458 'Repository',
4449 4459 primaryjoin=f'{cls.__name__}.source_repo_id==Repository.repo_id',
4450 4460 overlaps="pr_source"
4451 4461 )
4452 4462
4453 4463 @property
4454 4464 def source_ref_parts(self):
4455 4465 return self.unicode_to_reference(self.source_ref)
4456 4466
4457 4467 @declared_attr
4458 4468 def target_repo(cls):
4459 4469 return relationship(
4460 4470 'Repository',
4461 4471 primaryjoin=f'{cls.__name__}.target_repo_id==Repository.repo_id',
4462 4472 overlaps="pr_target"
4463 4473 )
4464 4474
4465 4475 @property
4466 4476 def target_ref_parts(self):
4467 4477 return self.unicode_to_reference(self.target_ref)
4468 4478
4469 4479 @property
4470 4480 def shadow_merge_ref(self):
4471 4481 return self.unicode_to_reference(self._shadow_merge_ref)
4472 4482
4473 4483 @shadow_merge_ref.setter
4474 4484 def shadow_merge_ref(self, ref):
4475 4485 self._shadow_merge_ref = self.reference_to_unicode(ref)
4476 4486
4477 4487 @staticmethod
4478 4488 def unicode_to_reference(raw):
4479 4489 return unicode_to_reference(raw)
4480 4490
4481 4491 @staticmethod
4482 4492 def reference_to_unicode(ref):
4483 4493 return reference_to_unicode(ref)
4484 4494
4485 4495 def get_api_data(self, with_merge_state=True):
4486 4496 from rhodecode.model.pull_request import PullRequestModel
4487 4497
4488 4498 pull_request = self
4489 4499 if with_merge_state:
4490 4500 merge_response, merge_status, msg = \
4491 4501 PullRequestModel().merge_status(pull_request)
4492 4502 merge_state = {
4493 4503 'status': merge_status,
4494 4504 'message': safe_str(msg),
4495 4505 }
4496 4506 else:
4497 4507 merge_state = {'status': 'not_available',
4498 4508 'message': 'not_available'}
4499 4509
4500 4510 merge_data = {
4501 4511 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
4502 4512 'reference': (
4503 4513 pull_request.shadow_merge_ref.asdict()
4504 4514 if pull_request.shadow_merge_ref else None),
4505 4515 }
4506 4516
4507 4517 data = {
4508 4518 'pull_request_id': pull_request.pull_request_id,
4509 4519 'url': PullRequestModel().get_url(pull_request),
4510 4520 'title': pull_request.title,
4511 4521 'description': pull_request.description,
4512 4522 'status': pull_request.status,
4513 4523 'state': pull_request.pull_request_state,
4514 4524 'created_on': pull_request.created_on,
4515 4525 'updated_on': pull_request.updated_on,
4516 4526 'commit_ids': pull_request.revisions,
4517 4527 'review_status': pull_request.calculated_review_status(),
4518 4528 'mergeable': merge_state,
4519 4529 'source': {
4520 4530 'clone_url': pull_request.source_repo.clone_url(),
4521 4531 'repository': pull_request.source_repo.repo_name,
4522 4532 'reference': {
4523 4533 'name': pull_request.source_ref_parts.name,
4524 4534 'type': pull_request.source_ref_parts.type,
4525 4535 'commit_id': pull_request.source_ref_parts.commit_id,
4526 4536 },
4527 4537 },
4528 4538 'target': {
4529 4539 'clone_url': pull_request.target_repo.clone_url(),
4530 4540 'repository': pull_request.target_repo.repo_name,
4531 4541 'reference': {
4532 4542 'name': pull_request.target_ref_parts.name,
4533 4543 'type': pull_request.target_ref_parts.type,
4534 4544 'commit_id': pull_request.target_ref_parts.commit_id,
4535 4545 },
4536 4546 },
4537 4547 'merge': merge_data,
4538 4548 'author': pull_request.author.get_api_data(include_secrets=False,
4539 4549 details='basic'),
4540 4550 'reviewers': [
4541 4551 {
4542 4552 'user': reviewer.get_api_data(include_secrets=False,
4543 4553 details='basic'),
4544 4554 'reasons': reasons,
4545 4555 'review_status': st[0][1].status if st else 'not_reviewed',
4546 4556 }
4547 4557 for obj, reviewer, reasons, mandatory, st in
4548 4558 pull_request.reviewers_statuses()
4549 4559 ]
4550 4560 }
4551 4561
4552 4562 return data
4553 4563
4554 4564 def set_state(self, pull_request_state, final_state=None):
4555 4565 """
4556 4566 # goes from initial state to updating to initial state.
4557 4567 # initial state can be changed by specifying back_state=
4558 4568 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
4559 4569 pull_request.merge()
4560 4570
4561 4571 :param pull_request_state:
4562 4572 :param final_state:
4563 4573
4564 4574 """
4565 4575
4566 4576 return _SetState(self, pull_request_state, back_state=final_state)
4567 4577
4568 4578
4569 4579 class PullRequest(Base, _PullRequestBase):
4570 4580 __tablename__ = 'pull_requests'
4571 4581 __table_args__ = (
4572 4582 base_table_args,
4573 4583 )
4574 4584 LATEST_VER = 'latest'
4575 4585
4576 4586 pull_request_id = Column(
4577 4587 'pull_request_id', Integer(), nullable=False, primary_key=True)
4578 4588
4579 4589 def __repr__(self):
4580 4590 if self.pull_request_id:
4581 4591 return f'<DB:PullRequest #{self.pull_request_id}>'
4582 4592 else:
4583 4593 return f'<DB:PullRequest at {id(self)!r}>'
4584 4594
4585 4595 def __str__(self):
4586 4596 if self.pull_request_id:
4587 4597 return f'#{self.pull_request_id}'
4588 4598 else:
4589 4599 return f'#{id(self)!r}'
4590 4600
4591 4601 reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan", back_populates='pull_request')
4592 4602 statuses = relationship('ChangesetStatus', cascade="all, delete-orphan", back_populates='pull_request')
4593 4603 comments = relationship('ChangesetComment', cascade="all, delete-orphan", back_populates='pull_request')
4594 4604 versions = relationship('PullRequestVersion', cascade="all, delete-orphan", lazy='dynamic', back_populates='pull_request')
4595 4605
4596 4606 @classmethod
4597 4607 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
4598 4608 internal_methods=None):
4599 4609
4600 4610 class PullRequestDisplay(object):
4601 4611 """
4602 4612 Special object wrapper for showing PullRequest data via Versions
4603 4613 It mimics PR object as close as possible. This is read only object
4604 4614 just for display
4605 4615 """
4606 4616
4607 4617 def __init__(self, attrs, internal=None):
4608 4618 self.attrs = attrs
4609 4619 # internal have priority over the given ones via attrs
4610 4620 self.internal = internal or ['versions']
4611 4621
4612 4622 def __getattr__(self, item):
4613 4623 if item in self.internal:
4614 4624 return getattr(self, item)
4615 4625 try:
4616 4626 return self.attrs[item]
4617 4627 except KeyError:
4618 4628 raise AttributeError(
4619 4629 '%s object has no attribute %s' % (self, item))
4620 4630
4621 4631 def __repr__(self):
4622 4632 pr_id = self.attrs.get('pull_request_id')
4623 4633 return f'<DB:PullRequestDisplay #{pr_id}>'
4624 4634
4625 4635 def versions(self):
4626 4636 return pull_request_obj.versions.order_by(
4627 4637 PullRequestVersion.pull_request_version_id).all()
4628 4638
4629 4639 def is_closed(self):
4630 4640 return pull_request_obj.is_closed()
4631 4641
4632 4642 def is_state_changing(self):
4633 4643 return pull_request_obj.is_state_changing()
4634 4644
4635 4645 @property
4636 4646 def pull_request_version_id(self):
4637 4647 return getattr(pull_request_obj, 'pull_request_version_id', None)
4638 4648
4639 4649 @property
4640 4650 def pull_request_last_version(self):
4641 4651 return pull_request_obj.pull_request_last_version
4642 4652
4643 4653 attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False))
4644 4654
4645 4655 attrs.author = StrictAttributeDict(
4646 4656 pull_request_obj.author.get_api_data())
4647 4657 if pull_request_obj.target_repo:
4648 4658 attrs.target_repo = StrictAttributeDict(
4649 4659 pull_request_obj.target_repo.get_api_data())
4650 4660 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
4651 4661
4652 4662 if pull_request_obj.source_repo:
4653 4663 attrs.source_repo = StrictAttributeDict(
4654 4664 pull_request_obj.source_repo.get_api_data())
4655 4665 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
4656 4666
4657 4667 attrs.source_ref_parts = pull_request_obj.source_ref_parts
4658 4668 attrs.target_ref_parts = pull_request_obj.target_ref_parts
4659 4669 attrs.revisions = pull_request_obj.revisions
4660 4670 attrs.common_ancestor_id = pull_request_obj.common_ancestor_id
4661 4671 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
4662 4672 attrs.reviewer_data = org_pull_request_obj.reviewer_data
4663 4673 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
4664 4674
4665 4675 return PullRequestDisplay(attrs, internal=internal_methods)
4666 4676
4667 4677 def is_closed(self):
4668 4678 return self.status == self.STATUS_CLOSED
4669 4679
4670 4680 def is_state_changing(self):
4671 4681 return self.pull_request_state != PullRequest.STATE_CREATED
4672 4682
4673 4683 def __json__(self):
4674 4684 return {
4675 4685 'revisions': self.revisions,
4676 4686 'versions': self.versions_count
4677 4687 }
4678 4688
4679 4689 def calculated_review_status(self):
4680 4690 from rhodecode.model.changeset_status import ChangesetStatusModel
4681 4691 return ChangesetStatusModel().calculated_review_status(self)
4682 4692
4683 4693 def reviewers_statuses(self, user=None):
4684 4694 from rhodecode.model.changeset_status import ChangesetStatusModel
4685 4695 return ChangesetStatusModel().reviewers_statuses(self, user=user)
4686 4696
4687 4697 def get_pull_request_reviewers(self, role=None):
4688 4698 qry = PullRequestReviewers.query()\
4689 4699 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)
4690 4700 if role:
4691 4701 qry = qry.filter(PullRequestReviewers.role == role)
4692 4702
4693 4703 return qry.all()
4694 4704
4695 4705 @property
4696 4706 def reviewers_count(self):
4697 4707 qry = PullRequestReviewers.query()\
4698 4708 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4699 4709 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER)
4700 4710 return qry.count()
4701 4711
4702 4712 @property
4703 4713 def observers_count(self):
4704 4714 qry = PullRequestReviewers.query()\
4705 4715 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4706 4716 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)
4707 4717 return qry.count()
4708 4718
4709 4719 def observers(self):
4710 4720 qry = PullRequestReviewers.query()\
4711 4721 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4712 4722 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)\
4713 4723 .all()
4714 4724
4715 4725 for entry in qry:
4716 4726 yield entry, entry.user
4717 4727
4718 4728 @property
4719 4729 def workspace_id(self):
4720 4730 from rhodecode.model.pull_request import PullRequestModel
4721 4731 return PullRequestModel()._workspace_id(self)
4722 4732
4723 4733 def get_shadow_repo(self):
4724 4734 workspace_id = self.workspace_id
4725 4735 shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id)
4726 4736 if os.path.isdir(shadow_repository_path):
4727 4737 vcs_obj = self.target_repo.scm_instance()
4728 4738 return vcs_obj.get_shadow_instance(shadow_repository_path)
4729 4739
4730 4740 @property
4731 4741 def versions_count(self):
4732 4742 """
4733 4743 return number of versions this PR have, e.g a PR that once been
4734 4744 updated will have 2 versions
4735 4745 """
4736 4746 return self.versions.count() + 1
4737 4747
4738 4748 @property
4739 4749 def pull_request_last_version(self):
4740 4750 return self.versions_count
4741 4751
4742 4752
4743 4753 class PullRequestVersion(Base, _PullRequestBase):
4744 4754 __tablename__ = 'pull_request_versions'
4745 4755 __table_args__ = (
4746 4756 base_table_args,
4747 4757 )
4748 4758
4749 4759 pull_request_version_id = Column('pull_request_version_id', Integer(), nullable=False, primary_key=True)
4750 4760 pull_request_id = Column('pull_request_id', Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False)
4751 4761 pull_request = relationship('PullRequest', back_populates='versions')
4752 4762
4753 4763 def __repr__(self):
4754 4764 if self.pull_request_version_id:
4755 4765 return f'<DB:PullRequestVersion #{self.pull_request_version_id}>'
4756 4766 else:
4757 4767 return f'<DB:PullRequestVersion at {id(self)!r}>'
4758 4768
4759 4769 @property
4760 4770 def reviewers(self):
4761 4771 return self.pull_request.reviewers
4762 4772
4763 4773 @property
4764 4774 def versions(self):
4765 4775 return self.pull_request.versions
4766 4776
4767 4777 def is_closed(self):
4768 4778 # calculate from original
4769 4779 return self.pull_request.status == self.STATUS_CLOSED
4770 4780
4771 4781 def is_state_changing(self):
4772 4782 return self.pull_request.pull_request_state != PullRequest.STATE_CREATED
4773 4783
4774 4784 def calculated_review_status(self):
4775 4785 return self.pull_request.calculated_review_status()
4776 4786
4777 4787 def reviewers_statuses(self):
4778 4788 return self.pull_request.reviewers_statuses()
4779 4789
4780 4790 def observers(self):
4781 4791 return self.pull_request.observers()
4782 4792
4783 4793
4784 4794 class PullRequestReviewers(Base, BaseModel):
4785 4795 __tablename__ = 'pull_request_reviewers'
4786 4796 __table_args__ = (
4787 4797 base_table_args,
4788 4798 )
4789 4799 ROLE_REVIEWER = 'reviewer'
4790 4800 ROLE_OBSERVER = 'observer'
4791 4801 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
4792 4802
4793 4803 @hybrid_property
4794 4804 def reasons(self):
4795 4805 if not self._reasons:
4796 4806 return []
4797 4807 return self._reasons
4798 4808
4799 4809 @reasons.setter
4800 4810 def reasons(self, val):
4801 4811 val = val or []
4802 4812 if any(not isinstance(x, str) for x in val):
4803 4813 raise Exception('invalid reasons type, must be list of strings')
4804 4814 self._reasons = val
4805 4815
4806 4816 pull_requests_reviewers_id = Column(
4807 4817 'pull_requests_reviewers_id', Integer(), nullable=False,
4808 4818 primary_key=True)
4809 4819 pull_request_id = Column(
4810 4820 "pull_request_id", Integer(),
4811 4821 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4812 4822 user_id = Column(
4813 4823 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4814 4824 _reasons = Column(
4815 4825 'reason', MutationList.as_mutable(
4816 4826 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4817 4827
4818 4828 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4819 4829 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
4820 4830
4821 4831 user = relationship('User')
4822 4832 pull_request = relationship('PullRequest', back_populates='reviewers')
4823 4833
4824 4834 rule_data = Column(
4825 4835 'rule_data_json',
4826 4836 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4827 4837
4828 4838 def rule_user_group_data(self):
4829 4839 """
4830 4840 Returns the voting user group rule data for this reviewer
4831 4841 """
4832 4842
4833 4843 if self.rule_data and 'vote_rule' in self.rule_data:
4834 4844 user_group_data = {}
4835 4845 if 'rule_user_group_entry_id' in self.rule_data:
4836 4846 # means a group with voting rules !
4837 4847 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4838 4848 user_group_data['name'] = self.rule_data['rule_name']
4839 4849 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4840 4850
4841 4851 return user_group_data
4842 4852
4843 4853 @classmethod
4844 4854 def get_pull_request_reviewers(cls, pull_request_id, role=None):
4845 4855 qry = PullRequestReviewers.query()\
4846 4856 .filter(PullRequestReviewers.pull_request_id == pull_request_id)
4847 4857 if role:
4848 4858 qry = qry.filter(PullRequestReviewers.role == role)
4849 4859
4850 4860 return qry.all()
4851 4861
4852 4862 def __repr__(self):
4853 4863 return f"<{self.cls_name}('id:{self.pull_requests_reviewers_id}')>"
4854 4864
4855 4865
4856 4866 class Notification(Base, BaseModel):
4857 4867 __tablename__ = 'notifications'
4858 4868 __table_args__ = (
4859 4869 Index('notification_type_idx', 'type'),
4860 4870 base_table_args,
4861 4871 )
4862 4872
4863 4873 TYPE_CHANGESET_COMMENT = 'cs_comment'
4864 4874 TYPE_MESSAGE = 'message'
4865 4875 TYPE_MENTION = 'mention'
4866 4876 TYPE_REGISTRATION = 'registration'
4867 4877 TYPE_PULL_REQUEST = 'pull_request'
4868 4878 TYPE_PULL_REQUEST_COMMENT = 'pull_request_comment'
4869 4879 TYPE_PULL_REQUEST_UPDATE = 'pull_request_update'
4870 4880
4871 4881 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4872 4882 subject = Column('subject', Unicode(512), nullable=True)
4873 4883 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4874 4884 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4875 4885 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4876 4886 type_ = Column('type', Unicode(255))
4877 4887
4878 4888 created_by_user = relationship('User', back_populates='user_created_notifications')
4879 4889 notifications_to_users = relationship('UserNotification', lazy='joined', cascade="all, delete-orphan", back_populates='notification')
4880 4890
4881 4891 @property
4882 4892 def recipients(self):
4883 4893 return [x.user for x in UserNotification.query()\
4884 4894 .filter(UserNotification.notification == self)\
4885 4895 .order_by(UserNotification.user_id.asc()).all()]
4886 4896
4887 4897 @classmethod
4888 4898 def create(cls, created_by, subject, body, recipients, type_=None):
4889 4899 if type_ is None:
4890 4900 type_ = Notification.TYPE_MESSAGE
4891 4901
4892 4902 notification = cls()
4893 4903 notification.created_by_user = created_by
4894 4904 notification.subject = subject
4895 4905 notification.body = body
4896 4906 notification.type_ = type_
4897 4907 notification.created_on = datetime.datetime.now()
4898 4908
4899 4909 # For each recipient link the created notification to his account
4900 4910 for u in recipients:
4901 4911 assoc = UserNotification()
4902 4912 assoc.user_id = u.user_id
4903 4913 assoc.notification = notification
4904 4914
4905 4915 # if created_by is inside recipients mark his notification
4906 4916 # as read
4907 4917 if u.user_id == created_by.user_id:
4908 4918 assoc.read = True
4909 4919 Session().add(assoc)
4910 4920
4911 4921 Session().add(notification)
4912 4922
4913 4923 return notification
4914 4924
4915 4925
4916 4926 class UserNotification(Base, BaseModel):
4917 4927 __tablename__ = 'user_to_notification'
4918 4928 __table_args__ = (
4919 4929 UniqueConstraint('user_id', 'notification_id'),
4920 4930 base_table_args
4921 4931 )
4922 4932
4923 4933 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4924 4934 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4925 4935 read = Column('read', Boolean, default=False)
4926 4936 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4927 4937
4928 4938 user = relationship('User', lazy="joined", back_populates='notifications')
4929 4939 notification = relationship('Notification', lazy="joined", order_by=lambda: Notification.created_on.desc(), back_populates='notifications_to_users')
4930 4940
4931 4941 def mark_as_read(self):
4932 4942 self.read = True
4933 4943 Session().add(self)
4934 4944
4935 4945
4936 4946 class UserNotice(Base, BaseModel):
4937 4947 __tablename__ = 'user_notices'
4938 4948 __table_args__ = (
4939 4949 base_table_args
4940 4950 )
4941 4951
4942 4952 NOTIFICATION_TYPE_MESSAGE = 'message'
4943 4953 NOTIFICATION_TYPE_NOTICE = 'notice'
4944 4954
4945 4955 NOTIFICATION_LEVEL_INFO = 'info'
4946 4956 NOTIFICATION_LEVEL_WARNING = 'warning'
4947 4957 NOTIFICATION_LEVEL_ERROR = 'error'
4948 4958
4949 4959 user_notice_id = Column('gist_id', Integer(), primary_key=True)
4950 4960
4951 4961 notice_subject = Column('notice_subject', Unicode(512), nullable=True)
4952 4962 notice_body = Column('notice_body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4953 4963
4954 4964 notice_read = Column('notice_read', Boolean, default=False)
4955 4965
4956 4966 notification_level = Column('notification_level', String(1024), default=NOTIFICATION_LEVEL_INFO)
4957 4967 notification_type = Column('notification_type', String(1024), default=NOTIFICATION_TYPE_NOTICE)
4958 4968
4959 4969 notice_created_by = Column('notice_created_by', Integer(), ForeignKey('users.user_id'), nullable=True)
4960 4970 notice_created_on = Column('notice_created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4961 4971
4962 4972 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'))
4963 4973 user = relationship('User', lazy="joined", primaryjoin='User.user_id==UserNotice.user_id')
4964 4974
4965 4975 @classmethod
4966 4976 def create_for_user(cls, user, subject, body, notice_level=NOTIFICATION_LEVEL_INFO, allow_duplicate=False):
4967 4977
4968 4978 if notice_level not in [cls.NOTIFICATION_LEVEL_ERROR,
4969 4979 cls.NOTIFICATION_LEVEL_WARNING,
4970 4980 cls.NOTIFICATION_LEVEL_INFO]:
4971 4981 return
4972 4982
4973 4983 from rhodecode.model.user import UserModel
4974 4984 user = UserModel().get_user(user)
4975 4985
4976 4986 new_notice = UserNotice()
4977 4987 if not allow_duplicate:
4978 4988 existing_msg = UserNotice().query() \
4979 4989 .filter(UserNotice.user == user) \
4980 4990 .filter(UserNotice.notice_body == body) \
4981 4991 .filter(UserNotice.notice_read == false()) \
4982 4992 .scalar()
4983 4993 if existing_msg:
4984 4994 log.warning('Ignoring duplicate notice for user %s', user)
4985 4995 return
4986 4996
4987 4997 new_notice.user = user
4988 4998 new_notice.notice_subject = subject
4989 4999 new_notice.notice_body = body
4990 5000 new_notice.notification_level = notice_level
4991 5001 Session().add(new_notice)
4992 5002 Session().commit()
4993 5003
4994 5004
4995 5005 class Gist(Base, BaseModel):
4996 5006 __tablename__ = 'gists'
4997 5007 __table_args__ = (
4998 5008 Index('g_gist_access_id_idx', 'gist_access_id'),
4999 5009 Index('g_created_on_idx', 'created_on'),
5000 5010 base_table_args
5001 5011 )
5002 5012
5003 5013 GIST_PUBLIC = 'public'
5004 5014 GIST_PRIVATE = 'private'
5005 5015 DEFAULT_FILENAME = 'gistfile1.txt'
5006 5016
5007 5017 ACL_LEVEL_PUBLIC = 'acl_public'
5008 5018 ACL_LEVEL_PRIVATE = 'acl_private'
5009 5019
5010 5020 gist_id = Column('gist_id', Integer(), primary_key=True)
5011 5021 gist_access_id = Column('gist_access_id', Unicode(250))
5012 5022 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
5013 5023 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
5014 5024 gist_expires = Column('gist_expires', Float(53), nullable=False)
5015 5025 gist_type = Column('gist_type', Unicode(128), nullable=False)
5016 5026 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5017 5027 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5018 5028 acl_level = Column('acl_level', Unicode(128), nullable=True)
5019 5029
5020 5030 owner = relationship('User', back_populates='user_gists')
5021 5031
5022 5032 def __repr__(self):
5023 5033 return f'<Gist:[{self.gist_type}]{self.gist_access_id}>'
5024 5034
5025 5035 @hybrid_property
5026 5036 def description_safe(self):
5027 5037 return description_escaper(self.gist_description)
5028 5038
5029 5039 @classmethod
5030 5040 def get_or_404(cls, id_):
5031 5041 from pyramid.httpexceptions import HTTPNotFound
5032 5042
5033 5043 res = cls.query().filter(cls.gist_access_id == id_).scalar()
5034 5044 if not res:
5035 5045 log.debug('WARN: No DB entry with id %s', id_)
5036 5046 raise HTTPNotFound()
5037 5047 return res
5038 5048
5039 5049 @classmethod
5040 5050 def get_by_access_id(cls, gist_access_id):
5041 5051 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
5042 5052
5043 5053 def gist_url(self):
5044 5054 from rhodecode.model.gist import GistModel
5045 5055 return GistModel().get_url(self)
5046 5056
5047 5057 @classmethod
5048 5058 def base_path(cls):
5049 5059 """
5050 5060 Returns base path when all gists are stored
5051 5061
5052 5062 :param cls:
5053 5063 """
5054 5064 from rhodecode.model.gist import GIST_STORE_LOC
5055 5065 from rhodecode.lib.utils import get_rhodecode_repo_store_path
5056 5066 repo_store_path = get_rhodecode_repo_store_path()
5057 5067 return os.path.join(repo_store_path, GIST_STORE_LOC)
5058 5068
5059 5069 def get_api_data(self):
5060 5070 """
5061 5071 Common function for generating gist related data for API
5062 5072 """
5063 5073 gist = self
5064 5074 data = {
5065 5075 'gist_id': gist.gist_id,
5066 5076 'type': gist.gist_type,
5067 5077 'access_id': gist.gist_access_id,
5068 5078 'description': gist.gist_description,
5069 5079 'url': gist.gist_url(),
5070 5080 'expires': gist.gist_expires,
5071 5081 'created_on': gist.created_on,
5072 5082 'modified_at': gist.modified_at,
5073 5083 'content': None,
5074 5084 'acl_level': gist.acl_level,
5075 5085 }
5076 5086 return data
5077 5087
5078 5088 def __json__(self):
5079 5089 data = dict()
5080 5090 data.update(self.get_api_data())
5081 5091 return data
5082 5092 # SCM functions
5083 5093
5084 5094 def scm_instance(self, **kwargs):
5085 5095 """
5086 5096 Get an instance of VCS Repository
5087 5097
5088 5098 :param kwargs:
5089 5099 """
5090 5100 from rhodecode.model.gist import GistModel
5091 5101 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
5092 5102 return get_vcs_instance(
5093 5103 repo_path=safe_str(full_repo_path), create=False,
5094 5104 _vcs_alias=GistModel.vcs_backend)
5095 5105
5096 5106
5097 5107 class ExternalIdentity(Base, BaseModel):
5098 5108 __tablename__ = 'external_identities'
5099 5109 __table_args__ = (
5100 5110 Index('local_user_id_idx', 'local_user_id'),
5101 5111 Index('external_id_idx', 'external_id'),
5102 5112 base_table_args
5103 5113 )
5104 5114
5105 5115 external_id = Column('external_id', Unicode(255), default='', primary_key=True)
5106 5116 external_username = Column('external_username', Unicode(1024), default='')
5107 5117 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
5108 5118 provider_name = Column('provider_name', Unicode(255), default='', primary_key=True)
5109 5119 access_token = Column('access_token', String(1024), default='')
5110 5120 alt_token = Column('alt_token', String(1024), default='')
5111 5121 token_secret = Column('token_secret', String(1024), default='')
5112 5122
5113 5123 @classmethod
5114 5124 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
5115 5125 """
5116 5126 Returns ExternalIdentity instance based on search params
5117 5127
5118 5128 :param external_id:
5119 5129 :param provider_name:
5120 5130 :return: ExternalIdentity
5121 5131 """
5122 5132 query = cls.query()
5123 5133 query = query.filter(cls.external_id == external_id)
5124 5134 query = query.filter(cls.provider_name == provider_name)
5125 5135 if local_user_id:
5126 5136 query = query.filter(cls.local_user_id == local_user_id)
5127 5137 return query.first()
5128 5138
5129 5139 @classmethod
5130 5140 def user_by_external_id_and_provider(cls, external_id, provider_name):
5131 5141 """
5132 5142 Returns User instance based on search params
5133 5143
5134 5144 :param external_id:
5135 5145 :param provider_name:
5136 5146 :return: User
5137 5147 """
5138 5148 query = User.query()
5139 5149 query = query.filter(cls.external_id == external_id)
5140 5150 query = query.filter(cls.provider_name == provider_name)
5141 5151 query = query.filter(User.user_id == cls.local_user_id)
5142 5152 return query.first()
5143 5153
5144 5154 @classmethod
5145 5155 def by_local_user_id(cls, local_user_id):
5146 5156 """
5147 5157 Returns all tokens for user
5148 5158
5149 5159 :param local_user_id:
5150 5160 :return: ExternalIdentity
5151 5161 """
5152 5162 query = cls.query()
5153 5163 query = query.filter(cls.local_user_id == local_user_id)
5154 5164 return query
5155 5165
5156 5166 @classmethod
5157 5167 def load_provider_plugin(cls, plugin_id):
5158 5168 from rhodecode.authentication.base import loadplugin
5159 5169 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
5160 5170 auth_plugin = loadplugin(_plugin_id)
5161 5171 return auth_plugin
5162 5172
5163 5173
5164 5174 class Integration(Base, BaseModel):
5165 5175 __tablename__ = 'integrations'
5166 5176 __table_args__ = (
5167 5177 base_table_args
5168 5178 )
5169 5179
5170 5180 integration_id = Column('integration_id', Integer(), primary_key=True)
5171 5181 integration_type = Column('integration_type', String(255))
5172 5182 enabled = Column('enabled', Boolean(), nullable=False)
5173 5183 name = Column('name', String(255), nullable=False)
5174 5184 child_repos_only = Column('child_repos_only', Boolean(), nullable=False, default=False)
5175 5185
5176 5186 settings = Column(
5177 5187 'settings_json', MutationObj.as_mutable(
5178 5188 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
5179 5189 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
5180 5190 repo = relationship('Repository', lazy='joined', back_populates='integrations')
5181 5191
5182 5192 repo_group_id = Column('repo_group_id', Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
5183 5193 repo_group = relationship('RepoGroup', lazy='joined', back_populates='integrations')
5184 5194
5185 5195 @property
5186 5196 def scope(self):
5187 5197 if self.repo:
5188 5198 return repr(self.repo)
5189 5199 if self.repo_group:
5190 5200 if self.child_repos_only:
5191 5201 return repr(self.repo_group) + ' (child repos only)'
5192 5202 else:
5193 5203 return repr(self.repo_group) + ' (recursive)'
5194 5204 if self.child_repos_only:
5195 5205 return 'root_repos'
5196 5206 return 'global'
5197 5207
5198 5208 def __repr__(self):
5199 5209 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
5200 5210
5201 5211
5202 5212 class RepoReviewRuleUser(Base, BaseModel):
5203 5213 __tablename__ = 'repo_review_rules_users'
5204 5214 __table_args__ = (
5205 5215 base_table_args
5206 5216 )
5207 5217 ROLE_REVIEWER = 'reviewer'
5208 5218 ROLE_OBSERVER = 'observer'
5209 5219 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5210 5220
5211 5221 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
5212 5222 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5213 5223 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
5214 5224 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5215 5225 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5216 5226 user = relationship('User', back_populates='user_review_rules')
5217 5227
5218 5228 def rule_data(self):
5219 5229 return {
5220 5230 'mandatory': self.mandatory,
5221 5231 'role': self.role,
5222 5232 }
5223 5233
5224 5234
5225 5235 class RepoReviewRuleUserGroup(Base, BaseModel):
5226 5236 __tablename__ = 'repo_review_rules_users_groups'
5227 5237 __table_args__ = (
5228 5238 base_table_args
5229 5239 )
5230 5240
5231 5241 VOTE_RULE_ALL = -1
5232 5242 ROLE_REVIEWER = 'reviewer'
5233 5243 ROLE_OBSERVER = 'observer'
5234 5244 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5235 5245
5236 5246 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
5237 5247 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5238 5248 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
5239 5249 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5240 5250 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5241 5251 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
5242 5252 users_group = relationship('UserGroup')
5243 5253
5244 5254 def rule_data(self):
5245 5255 return {
5246 5256 'mandatory': self.mandatory,
5247 5257 'role': self.role,
5248 5258 'vote_rule': self.vote_rule
5249 5259 }
5250 5260
5251 5261 @property
5252 5262 def vote_rule_label(self):
5253 5263 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
5254 5264 return 'all must vote'
5255 5265 else:
5256 5266 return 'min. vote {}'.format(self.vote_rule)
5257 5267
5258 5268
5259 5269 class RepoReviewRule(Base, BaseModel):
5260 5270 __tablename__ = 'repo_review_rules'
5261 5271 __table_args__ = (
5262 5272 base_table_args
5263 5273 )
5264 5274
5265 5275 repo_review_rule_id = Column(
5266 5276 'repo_review_rule_id', Integer(), primary_key=True)
5267 5277 repo_id = Column(
5268 5278 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
5269 5279 repo = relationship('Repository', back_populates='review_rules')
5270 5280
5271 5281 review_rule_name = Column('review_rule_name', String(255))
5272 5282 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob
5273 5283 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob
5274 5284 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob
5275 5285
5276 5286 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
5277 5287
5278 5288 # Legacy fields, just for backward compat
5279 5289 _forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
5280 5290 _forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
5281 5291
5282 5292 pr_author = Column("pr_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True)
5283 5293 commit_author = Column("commit_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True)
5284 5294
5285 5295 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
5286 5296
5287 5297 rule_users = relationship('RepoReviewRuleUser')
5288 5298 rule_user_groups = relationship('RepoReviewRuleUserGroup')
5289 5299
5290 5300 def _validate_pattern(self, value):
5291 5301 re.compile('^' + glob2re(value) + '$')
5292 5302
5293 5303 @hybrid_property
5294 5304 def source_branch_pattern(self):
5295 5305 return self._branch_pattern or '*'
5296 5306
5297 5307 @source_branch_pattern.setter
5298 5308 def source_branch_pattern(self, value):
5299 5309 self._validate_pattern(value)
5300 5310 self._branch_pattern = value or '*'
5301 5311
5302 5312 @hybrid_property
5303 5313 def target_branch_pattern(self):
5304 5314 return self._target_branch_pattern or '*'
5305 5315
5306 5316 @target_branch_pattern.setter
5307 5317 def target_branch_pattern(self, value):
5308 5318 self._validate_pattern(value)
5309 5319 self._target_branch_pattern = value or '*'
5310 5320
5311 5321 @hybrid_property
5312 5322 def file_pattern(self):
5313 5323 return self._file_pattern or '*'
5314 5324
5315 5325 @file_pattern.setter
5316 5326 def file_pattern(self, value):
5317 5327 self._validate_pattern(value)
5318 5328 self._file_pattern = value or '*'
5319 5329
5320 5330 @hybrid_property
5321 5331 def forbid_pr_author_to_review(self):
5322 5332 return self.pr_author == 'forbid_pr_author'
5323 5333
5324 5334 @hybrid_property
5325 5335 def include_pr_author_to_review(self):
5326 5336 return self.pr_author == 'include_pr_author'
5327 5337
5328 5338 @hybrid_property
5329 5339 def forbid_commit_author_to_review(self):
5330 5340 return self.commit_author == 'forbid_commit_author'
5331 5341
5332 5342 @hybrid_property
5333 5343 def include_commit_author_to_review(self):
5334 5344 return self.commit_author == 'include_commit_author'
5335 5345
5336 5346 def matches(self, source_branch, target_branch, files_changed):
5337 5347 """
5338 5348 Check if this review rule matches a branch/files in a pull request
5339 5349
5340 5350 :param source_branch: source branch name for the commit
5341 5351 :param target_branch: target branch name for the commit
5342 5352 :param files_changed: list of file paths changed in the pull request
5343 5353 """
5344 5354
5345 5355 source_branch = source_branch or ''
5346 5356 target_branch = target_branch or ''
5347 5357 files_changed = files_changed or []
5348 5358
5349 5359 branch_matches = True
5350 5360 if source_branch or target_branch:
5351 5361 if self.source_branch_pattern == '*':
5352 5362 source_branch_match = True
5353 5363 else:
5354 5364 if self.source_branch_pattern.startswith('re:'):
5355 5365 source_pattern = self.source_branch_pattern[3:]
5356 5366 else:
5357 5367 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
5358 5368 source_branch_regex = re.compile(source_pattern)
5359 5369 source_branch_match = bool(source_branch_regex.search(source_branch))
5360 5370 if self.target_branch_pattern == '*':
5361 5371 target_branch_match = True
5362 5372 else:
5363 5373 if self.target_branch_pattern.startswith('re:'):
5364 5374 target_pattern = self.target_branch_pattern[3:]
5365 5375 else:
5366 5376 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
5367 5377 target_branch_regex = re.compile(target_pattern)
5368 5378 target_branch_match = bool(target_branch_regex.search(target_branch))
5369 5379
5370 5380 branch_matches = source_branch_match and target_branch_match
5371 5381
5372 5382 files_matches = True
5373 5383 if self.file_pattern != '*':
5374 5384 files_matches = False
5375 5385 if self.file_pattern.startswith('re:'):
5376 5386 file_pattern = self.file_pattern[3:]
5377 5387 else:
5378 5388 file_pattern = glob2re(self.file_pattern)
5379 5389 file_regex = re.compile(file_pattern)
5380 5390 for file_data in files_changed:
5381 5391 filename = file_data.get('filename')
5382 5392
5383 5393 if file_regex.search(filename):
5384 5394 files_matches = True
5385 5395 break
5386 5396
5387 5397 return branch_matches and files_matches
5388 5398
5389 5399 @property
5390 5400 def review_users(self):
5391 5401 """ Returns the users which this rule applies to """
5392 5402
5393 5403 users = collections.OrderedDict()
5394 5404
5395 5405 for rule_user in self.rule_users:
5396 5406 if rule_user.user.active:
5397 5407 if rule_user.user not in users:
5398 5408 users[rule_user.user.username] = {
5399 5409 'user': rule_user.user,
5400 5410 'source': 'user',
5401 5411 'source_data': {},
5402 5412 'data': rule_user.rule_data()
5403 5413 }
5404 5414
5405 5415 for rule_user_group in self.rule_user_groups:
5406 5416 source_data = {
5407 5417 'user_group_id': rule_user_group.users_group.users_group_id,
5408 5418 'name': rule_user_group.users_group.users_group_name,
5409 5419 'members': len(rule_user_group.users_group.members)
5410 5420 }
5411 5421 for member in rule_user_group.users_group.members:
5412 5422 if member.user.active:
5413 5423 key = member.user.username
5414 5424 if key in users:
5415 5425 # skip this member as we have him already
5416 5426 # this prevents from override the "first" matched
5417 5427 # users with duplicates in multiple groups
5418 5428 continue
5419 5429
5420 5430 users[key] = {
5421 5431 'user': member.user,
5422 5432 'source': 'user_group',
5423 5433 'source_data': source_data,
5424 5434 'data': rule_user_group.rule_data()
5425 5435 }
5426 5436
5427 5437 return users
5428 5438
5429 5439 def user_group_vote_rule(self, user_id):
5430 5440
5431 5441 rules = []
5432 5442 if not self.rule_user_groups:
5433 5443 return rules
5434 5444
5435 5445 for user_group in self.rule_user_groups:
5436 5446 user_group_members = [x.user_id for x in user_group.users_group.members]
5437 5447 if user_id in user_group_members:
5438 5448 rules.append(user_group)
5439 5449 return rules
5440 5450
5441 5451 def __repr__(self):
5442 5452 return f'<RepoReviewerRule(id={self.repo_review_rule_id}, repo={self.repo!r})>'
5443 5453
5444 5454
5445 5455 class ScheduleEntry(Base, BaseModel):
5446 5456 __tablename__ = 'schedule_entries'
5447 5457 __table_args__ = (
5448 5458 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
5449 5459 UniqueConstraint('task_uid', name='s_task_uid_idx'),
5450 5460 base_table_args,
5451 5461 )
5452 5462 SCHEDULE_TYPE_INTEGER = "integer"
5453 5463 SCHEDULE_TYPE_CRONTAB = "crontab"
5454 5464
5455 5465 schedule_types = [SCHEDULE_TYPE_CRONTAB, SCHEDULE_TYPE_INTEGER]
5456 5466 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
5457 5467
5458 5468 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
5459 5469 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
5460 5470 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
5461 5471
5462 5472 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
5463 5473 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
5464 5474
5465 5475 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
5466 5476 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
5467 5477
5468 5478 # task
5469 5479 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
5470 5480 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
5471 5481 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
5472 5482 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
5473 5483
5474 5484 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5475 5485 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
5476 5486
5477 5487 @hybrid_property
5478 5488 def schedule_type(self):
5479 5489 return self._schedule_type
5480 5490
5481 5491 @schedule_type.setter
5482 5492 def schedule_type(self, val):
5483 5493 if val not in self.schedule_types:
5484 5494 raise ValueError(f'Value must be on of `{val}` and got `{self.schedule_type}`')
5485 5495
5486 5496 self._schedule_type = val
5487 5497
5488 5498 @classmethod
5489 5499 def get_uid(cls, obj):
5490 5500 args = obj.task_args
5491 5501 kwargs = obj.task_kwargs
5492 5502
5493 5503 if isinstance(args, JsonRaw):
5494 5504 try:
5495 5505 args = json.loads(str(args))
5496 5506 except ValueError:
5497 5507 log.exception('json.loads of args failed...')
5498 5508 args = tuple()
5499 5509
5500 5510 if isinstance(kwargs, JsonRaw):
5501 5511 try:
5502 5512 kwargs = json.loads(str(kwargs))
5503 5513 except ValueError:
5504 5514 log.exception('json.loads of kwargs failed...')
5505 5515 kwargs = dict()
5506 5516
5507 5517 dot_notation = obj.task_dot_notation
5508 5518 val = '.'.join(map(safe_str, [dot_notation, args, sorted(kwargs.items())]))
5509 5519 log.debug('calculating task uid using id:`%s`', val)
5510 5520
5511 5521 return sha1(safe_bytes(val))
5512 5522
5513 5523 @classmethod
5514 5524 def get_by_schedule_name(cls, schedule_name):
5515 5525 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
5516 5526
5517 5527 @classmethod
5518 5528 def get_by_schedule_id(cls, schedule_id):
5519 5529 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
5520 5530
5521 5531 @classmethod
5522 5532 def get_by_task_uid(cls, task_uid):
5523 5533 return cls.query().filter(cls.task_uid == task_uid).scalar()
5524 5534
5525 5535 @property
5526 5536 def task(self):
5527 5537 return self.task_dot_notation
5528 5538
5529 5539 @property
5530 5540 def schedule(self):
5531 5541 from rhodecode.lib.celerylib.utils import raw_2_schedule
5532 5542 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
5533 5543 return schedule
5534 5544
5535 5545 @property
5536 5546 def args(self):
5537 5547 try:
5538 5548 return list(self.task_args or [])
5539 5549 except ValueError:
5540 5550 return list()
5541 5551
5542 5552 @property
5543 5553 def kwargs(self):
5544 5554 try:
5545 5555 return dict(self.task_kwargs or {})
5546 5556 except ValueError:
5547 5557 return dict()
5548 5558
5549 5559 def _as_raw(self, val, indent=False):
5550 5560 if hasattr(val, 'de_coerce'):
5551 5561 val = val.de_coerce()
5552 5562 if val:
5553 5563 if indent:
5554 5564 val = ext_json.formatted_str_json(val)
5555 5565 else:
5556 5566 val = ext_json.str_json(val)
5557 5567
5558 5568 return val
5559 5569
5560 5570 @property
5561 5571 def schedule_definition_raw(self):
5562 5572 return self._as_raw(self.schedule_definition)
5563 5573
5564 5574 def args_raw(self, indent=False):
5565 5575 return self._as_raw(self.task_args, indent)
5566 5576
5567 5577 def kwargs_raw(self, indent=False):
5568 5578 return self._as_raw(self.task_kwargs, indent)
5569 5579
5570 5580 def __repr__(self):
5571 5581 return f'<DB:ScheduleEntry({self.schedule_entry_id}:{self.schedule_name})>'
5572 5582
5573 5583
5574 5584 @event.listens_for(ScheduleEntry, 'before_update')
5575 5585 def update_task_uid(mapper, connection, target):
5576 5586 target.task_uid = ScheduleEntry.get_uid(target)
5577 5587
5578 5588
5579 5589 @event.listens_for(ScheduleEntry, 'before_insert')
5580 5590 def set_task_uid(mapper, connection, target):
5581 5591 target.task_uid = ScheduleEntry.get_uid(target)
5582 5592
5583 5593
5584 5594 class _BaseBranchPerms(BaseModel):
5585 5595 @classmethod
5586 5596 def compute_hash(cls, value):
5587 5597 return sha1_safe(value)
5588 5598
5589 5599 @hybrid_property
5590 5600 def branch_pattern(self):
5591 5601 return self._branch_pattern or '*'
5592 5602
5593 5603 @hybrid_property
5594 5604 def branch_hash(self):
5595 5605 return self._branch_hash
5596 5606
5597 5607 def _validate_glob(self, value):
5598 5608 re.compile('^' + glob2re(value) + '$')
5599 5609
5600 5610 @branch_pattern.setter
5601 5611 def branch_pattern(self, value):
5602 5612 self._validate_glob(value)
5603 5613 self._branch_pattern = value or '*'
5604 5614 # set the Hash when setting the branch pattern
5605 5615 self._branch_hash = self.compute_hash(self._branch_pattern)
5606 5616
5607 5617 def matches(self, branch):
5608 5618 """
5609 5619 Check if this the branch matches entry
5610 5620
5611 5621 :param branch: branch name for the commit
5612 5622 """
5613 5623
5614 5624 branch = branch or ''
5615 5625
5616 5626 branch_matches = True
5617 5627 if branch:
5618 5628 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
5619 5629 branch_matches = bool(branch_regex.search(branch))
5620 5630
5621 5631 return branch_matches
5622 5632
5623 5633
5624 5634 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
5625 5635 __tablename__ = 'user_to_repo_branch_permissions'
5626 5636 __table_args__ = (
5627 5637 base_table_args
5628 5638 )
5629 5639
5630 5640 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5631 5641
5632 5642 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5633 5643 repo = relationship('Repository', back_populates='user_branch_perms')
5634 5644
5635 5645 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5636 5646 permission = relationship('Permission')
5637 5647
5638 5648 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
5639 5649 user_repo_to_perm = relationship('UserRepoToPerm', back_populates='branch_perm_entry')
5640 5650
5641 5651 rule_order = Column('rule_order', Integer(), nullable=False)
5642 5652 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default='*') # glob
5643 5653 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5644 5654
5645 5655 def __repr__(self):
5646 5656 return f'<UserBranchPermission({self.user_repo_to_perm} => {self.branch_pattern!r})>'
5647 5657
5648 5658
5649 5659 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
5650 5660 __tablename__ = 'user_group_to_repo_branch_permissions'
5651 5661 __table_args__ = (
5652 5662 base_table_args
5653 5663 )
5654 5664
5655 5665 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5656 5666
5657 5667 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5658 5668 repo = relationship('Repository', back_populates='user_group_branch_perms')
5659 5669
5660 5670 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5661 5671 permission = relationship('Permission')
5662 5672
5663 5673 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
5664 5674 user_group_repo_to_perm = relationship('UserGroupRepoToPerm', back_populates='user_group_branch_perms')
5665 5675
5666 5676 rule_order = Column('rule_order', Integer(), nullable=False)
5667 5677 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default='*') # glob
5668 5678 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5669 5679
5670 5680 def __repr__(self):
5671 5681 return f'<UserBranchPermission({self.user_group_repo_to_perm} => {self.branch_pattern!r})>'
5672 5682
5673 5683
5674 5684 class UserBookmark(Base, BaseModel):
5675 5685 __tablename__ = 'user_bookmarks'
5676 5686 __table_args__ = (
5677 5687 UniqueConstraint('user_id', 'bookmark_repo_id'),
5678 5688 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
5679 5689 UniqueConstraint('user_id', 'bookmark_position'),
5680 5690 base_table_args
5681 5691 )
5682 5692
5683 5693 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
5684 5694 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
5685 5695 position = Column("bookmark_position", Integer(), nullable=False)
5686 5696 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
5687 5697 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
5688 5698 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5689 5699
5690 5700 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
5691 5701 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
5692 5702
5693 5703 user = relationship("User")
5694 5704
5695 5705 repository = relationship("Repository")
5696 5706 repository_group = relationship("RepoGroup")
5697 5707
5698 5708 @classmethod
5699 5709 def get_by_position_for_user(cls, position, user_id):
5700 5710 return cls.query() \
5701 5711 .filter(UserBookmark.user_id == user_id) \
5702 5712 .filter(UserBookmark.position == position).scalar()
5703 5713
5704 5714 @classmethod
5705 5715 def get_bookmarks_for_user(cls, user_id, cache=True):
5706 5716 bookmarks = select(
5707 5717 UserBookmark.title,
5708 5718 UserBookmark.position,
5709 5719 ) \
5710 5720 .add_columns(Repository.repo_id, Repository.repo_type, Repository.repo_name) \
5711 5721 .add_columns(RepoGroup.group_id, RepoGroup.group_name) \
5712 5722 .where(UserBookmark.user_id == user_id) \
5713 5723 .outerjoin(Repository, Repository.repo_id == UserBookmark.bookmark_repo_id) \
5714 5724 .outerjoin(RepoGroup, RepoGroup.group_id == UserBookmark.bookmark_repo_group_id) \
5715 5725 .order_by(UserBookmark.position.asc())
5716 5726
5717 5727 if cache:
5718 5728 bookmarks = bookmarks.options(
5719 5729 FromCache("sql_cache_short", f"get_user_{user_id}_bookmarks")
5720 5730 )
5721 5731
5722 5732 return Session().execute(bookmarks).all()
5723 5733
5724 5734 def __repr__(self):
5725 5735 return f'<UserBookmark({self.position} @ {self.redirect_url!r})>'
5726 5736
5727 5737
5728 5738 class FileStore(Base, BaseModel):
5729 5739 __tablename__ = 'file_store'
5730 5740 __table_args__ = (
5731 5741 base_table_args
5732 5742 )
5733 5743
5734 5744 file_store_id = Column('file_store_id', Integer(), primary_key=True)
5735 5745 file_uid = Column('file_uid', String(1024), nullable=False)
5736 5746 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
5737 5747 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
5738 5748 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
5739 5749
5740 5750 # sha256 hash
5741 5751 file_hash = Column('file_hash', String(512), nullable=False)
5742 5752 file_size = Column('file_size', BigInteger(), nullable=False)
5743 5753
5744 5754 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5745 5755 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
5746 5756 accessed_count = Column('accessed_count', Integer(), default=0)
5747 5757
5748 5758 enabled = Column('enabled', Boolean(), nullable=False, default=True)
5749 5759
5750 5760 # if repo/repo_group reference is set, check for permissions
5751 5761 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
5752 5762
5753 5763 # hidden defines an attachment that should be hidden from showing in artifact listing
5754 5764 hidden = Column('hidden', Boolean(), nullable=False, default=False)
5755 5765
5756 5766 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
5757 5767 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id', back_populates='artifacts')
5758 5768
5759 5769 file_metadata = relationship('FileStoreMetadata', lazy='joined')
5760 5770
5761 5771 # scope limited to user, which requester have access to
5762 5772 scope_user_id = Column(
5763 5773 'scope_user_id', Integer(), ForeignKey('users.user_id'),
5764 5774 nullable=True, unique=None, default=None)
5765 5775 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id', back_populates='scope_artifacts')
5766 5776
5767 5777 # scope limited to user group, which requester have access to
5768 5778 scope_user_group_id = Column(
5769 5779 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
5770 5780 nullable=True, unique=None, default=None)
5771 5781 user_group = relationship('UserGroup', lazy='joined')
5772 5782
5773 5783 # scope limited to repo, which requester have access to
5774 5784 scope_repo_id = Column(
5775 5785 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
5776 5786 nullable=True, unique=None, default=None)
5777 5787 repo = relationship('Repository', lazy='joined')
5778 5788
5779 5789 # scope limited to repo group, which requester have access to
5780 5790 scope_repo_group_id = Column(
5781 5791 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
5782 5792 nullable=True, unique=None, default=None)
5783 5793 repo_group = relationship('RepoGroup', lazy='joined')
5784 5794
5785 5795 @classmethod
5786 5796 def get_scope(cls, scope_type, scope_id):
5787 5797 if scope_type == 'repo':
5788 5798 return f'repo:{scope_id}'
5789 5799 elif scope_type == 'repo-group':
5790 5800 return f'repo-group:{scope_id}'
5791 5801 elif scope_type == 'user':
5792 5802 return f'user:{scope_id}'
5793 5803 elif scope_type == 'user-group':
5794 5804 return f'user-group:{scope_id}'
5795 5805 else:
5796 5806 return scope_type
5797 5807
5798 5808 @classmethod
5799 5809 def get_by_store_uid(cls, file_store_uid, safe=False):
5800 5810 if safe:
5801 5811 return FileStore.query().filter(FileStore.file_uid == file_store_uid).first()
5802 5812 else:
5803 5813 return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar()
5804 5814
5805 5815 @classmethod
5806 5816 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
5807 5817 file_description='', enabled=True, hidden=False, check_acl=True,
5808 5818 user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None):
5809 5819
5810 5820 store_entry = FileStore()
5811 5821 store_entry.file_uid = file_uid
5812 5822 store_entry.file_display_name = file_display_name
5813 5823 store_entry.file_org_name = filename
5814 5824 store_entry.file_size = file_size
5815 5825 store_entry.file_hash = file_hash
5816 5826 store_entry.file_description = file_description
5817 5827
5818 5828 store_entry.check_acl = check_acl
5819 5829 store_entry.enabled = enabled
5820 5830 store_entry.hidden = hidden
5821 5831
5822 5832 store_entry.user_id = user_id
5823 5833 store_entry.scope_user_id = scope_user_id
5824 5834 store_entry.scope_repo_id = scope_repo_id
5825 5835 store_entry.scope_repo_group_id = scope_repo_group_id
5826 5836
5827 5837 return store_entry
5828 5838
5829 5839 @classmethod
5830 5840 def store_metadata(cls, file_store_id, args, commit=True):
5831 5841 file_store = FileStore.get(file_store_id)
5832 5842 if file_store is None:
5833 5843 return
5834 5844
5835 5845 for section, key, value, value_type in args:
5836 5846 has_key = FileStoreMetadata().query() \
5837 5847 .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \
5838 5848 .filter(FileStoreMetadata.file_store_meta_section == section) \
5839 5849 .filter(FileStoreMetadata.file_store_meta_key == key) \
5840 5850 .scalar()
5841 5851 if has_key:
5842 5852 msg = 'key `{}` already defined under section `{}` for this file.'\
5843 5853 .format(key, section)
5844 5854 raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key)
5845 5855
5846 5856 # NOTE(marcink): raises ArtifactMetadataBadValueType
5847 5857 FileStoreMetadata.valid_value_type(value_type)
5848 5858
5849 5859 meta_entry = FileStoreMetadata()
5850 5860 meta_entry.file_store = file_store
5851 5861 meta_entry.file_store_meta_section = section
5852 5862 meta_entry.file_store_meta_key = key
5853 5863 meta_entry.file_store_meta_value_type = value_type
5854 5864 meta_entry.file_store_meta_value = value
5855 5865
5856 5866 Session().add(meta_entry)
5857 5867
5858 5868 try:
5859 5869 if commit:
5860 5870 Session().commit()
5861 5871 except IntegrityError:
5862 5872 Session().rollback()
5863 5873 raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.')
5864 5874
5865 5875 @classmethod
5866 5876 def bump_access_counter(cls, file_uid, commit=True):
5867 5877 FileStore().query()\
5868 5878 .filter(FileStore.file_uid == file_uid)\
5869 5879 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
5870 5880 FileStore.accessed_on: datetime.datetime.now()})
5871 5881 if commit:
5872 5882 Session().commit()
5873 5883
5874 5884 def __json__(self):
5875 5885 data = {
5876 5886 'filename': self.file_display_name,
5877 5887 'filename_org': self.file_org_name,
5878 5888 'file_uid': self.file_uid,
5879 5889 'description': self.file_description,
5880 5890 'hidden': self.hidden,
5881 5891 'size': self.file_size,
5882 5892 'created_on': self.created_on,
5883 5893 'uploaded_by': self.upload_user.get_api_data(details='basic'),
5884 5894 'downloaded_times': self.accessed_count,
5885 5895 'sha256': self.file_hash,
5886 5896 'metadata': self.file_metadata,
5887 5897 }
5888 5898
5889 5899 return data
5890 5900
5891 5901 def __repr__(self):
5892 5902 return f'<FileStore({self.file_store_id})>'
5893 5903
5894 5904
5895 5905 class FileStoreMetadata(Base, BaseModel):
5896 5906 __tablename__ = 'file_store_metadata'
5897 5907 __table_args__ = (
5898 5908 UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'),
5899 5909 Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255),
5900 5910 Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255),
5901 5911 base_table_args
5902 5912 )
5903 5913 SETTINGS_TYPES = {
5904 5914 'str': safe_str,
5905 5915 'int': safe_int,
5906 5916 'unicode': safe_str,
5907 5917 'bool': str2bool,
5908 5918 'list': functools.partial(aslist, sep=',')
5909 5919 }
5910 5920
5911 5921 file_store_meta_id = Column(
5912 5922 "file_store_meta_id", Integer(), nullable=False, unique=True, default=None,
5913 5923 primary_key=True)
5914 5924 _file_store_meta_section = Column(
5915 5925 "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5916 5926 nullable=True, unique=None, default=None)
5917 5927 _file_store_meta_section_hash = Column(
5918 5928 "file_store_meta_section_hash", String(255),
5919 5929 nullable=True, unique=None, default=None)
5920 5930 _file_store_meta_key = Column(
5921 5931 "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5922 5932 nullable=True, unique=None, default=None)
5923 5933 _file_store_meta_key_hash = Column(
5924 5934 "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None)
5925 5935 _file_store_meta_value = Column(
5926 5936 "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'),
5927 5937 nullable=True, unique=None, default=None)
5928 5938 _file_store_meta_value_type = Column(
5929 5939 "file_store_meta_value_type", String(255), nullable=True, unique=None,
5930 5940 default='unicode')
5931 5941
5932 5942 file_store_id = Column(
5933 5943 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'),
5934 5944 nullable=True, unique=None, default=None)
5935 5945
5936 5946 file_store = relationship('FileStore', lazy='joined', viewonly=True)
5937 5947
5938 5948 @classmethod
5939 5949 def valid_value_type(cls, value):
5940 5950 if value.split('.')[0] not in cls.SETTINGS_TYPES:
5941 5951 raise ArtifactMetadataBadValueType(
5942 5952 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value))
5943 5953
5944 5954 @hybrid_property
5945 5955 def file_store_meta_section(self):
5946 5956 return self._file_store_meta_section
5947 5957
5948 5958 @file_store_meta_section.setter
5949 5959 def file_store_meta_section(self, value):
5950 5960 self._file_store_meta_section = value
5951 5961 self._file_store_meta_section_hash = _hash_key(value)
5952 5962
5953 5963 @hybrid_property
5954 5964 def file_store_meta_key(self):
5955 5965 return self._file_store_meta_key
5956 5966
5957 5967 @file_store_meta_key.setter
5958 5968 def file_store_meta_key(self, value):
5959 5969 self._file_store_meta_key = value
5960 5970 self._file_store_meta_key_hash = _hash_key(value)
5961 5971
5962 5972 @hybrid_property
5963 5973 def file_store_meta_value(self):
5964 5974 val = self._file_store_meta_value
5965 5975
5966 5976 if self._file_store_meta_value_type:
5967 5977 # e.g unicode.encrypted == unicode
5968 5978 _type = self._file_store_meta_value_type.split('.')[0]
5969 5979 # decode the encrypted value if it's encrypted field type
5970 5980 if '.encrypted' in self._file_store_meta_value_type:
5971 5981 cipher = EncryptedTextValue()
5972 5982 val = safe_str(cipher.process_result_value(val, None))
5973 5983 # do final type conversion
5974 5984 converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
5975 5985 val = converter(val)
5976 5986
5977 5987 return val
5978 5988
5979 5989 @file_store_meta_value.setter
5980 5990 def file_store_meta_value(self, val):
5981 5991 val = safe_str(val)
5982 5992 # encode the encrypted value
5983 5993 if '.encrypted' in self.file_store_meta_value_type:
5984 5994 cipher = EncryptedTextValue()
5985 5995 val = safe_str(cipher.process_bind_param(val, None))
5986 5996 self._file_store_meta_value = val
5987 5997
5988 5998 @hybrid_property
5989 5999 def file_store_meta_value_type(self):
5990 6000 return self._file_store_meta_value_type
5991 6001
5992 6002 @file_store_meta_value_type.setter
5993 6003 def file_store_meta_value_type(self, val):
5994 6004 # e.g unicode.encrypted
5995 6005 self.valid_value_type(val)
5996 6006 self._file_store_meta_value_type = val
5997 6007
5998 6008 def __json__(self):
5999 6009 data = {
6000 6010 'artifact': self.file_store.file_uid,
6001 6011 'section': self.file_store_meta_section,
6002 6012 'key': self.file_store_meta_key,
6003 6013 'value': self.file_store_meta_value,
6004 6014 }
6005 6015
6006 6016 return data
6007 6017
6008 6018 def __repr__(self):
6009 6019 return '<%s[%s]%s=>%s]>' % (self.cls_name, self.file_store_meta_section,
6010 6020 self.file_store_meta_key, self.file_store_meta_value)
6011 6021
6012 6022
6013 6023 class DbMigrateVersion(Base, BaseModel):
6014 6024 __tablename__ = 'db_migrate_version'
6015 6025 __table_args__ = (
6016 6026 base_table_args,
6017 6027 )
6018 6028
6019 6029 repository_id = Column('repository_id', String(250), primary_key=True)
6020 6030 repository_path = Column('repository_path', Text)
6021 6031 version = Column('version', Integer)
6022 6032
6023 6033 @classmethod
6024 6034 def set_version(cls, version):
6025 6035 """
6026 6036 Helper for forcing a different version, usually for debugging purposes via ishell.
6027 6037 """
6028 6038 ver = DbMigrateVersion.query().first()
6029 6039 ver.version = version
6030 6040 Session().commit()
6031 6041
6032 6042
6033 6043 class DbSession(Base, BaseModel):
6034 6044 __tablename__ = 'db_session'
6035 6045 __table_args__ = (
6036 6046 base_table_args,
6037 6047 )
6038 6048
6039 6049 def __repr__(self):
6040 6050 return f'<DB:DbSession({self.id})>'
6041 6051
6042 6052 id = Column('id', Integer())
6043 6053 namespace = Column('namespace', String(255), primary_key=True)
6044 6054 accessed = Column('accessed', DateTime, nullable=False)
6045 6055 created = Column('created', DateTime, nullable=False)
6046 6056 data = Column('data', PickleType, nullable=False)
General Comments 0
You need to be logged in to leave comments. Login now