##// END OF EJS Templates
shadow-repos: skip init of full repo to generate shadow repo path.
marcink -
r3931:2a9f3cf9 default
parent child Browse files
Show More
@@ -1,678 +1,675 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 SimpleVCS middleware for handling protocol request (push/clone etc.)
23 23 It's implemented with basic auth function
24 24 """
25 25
26 26 import os
27 27 import re
28 28 import logging
29 29 import importlib
30 30 from functools import wraps
31 31 from StringIO import StringIO
32 32 from lxml import etree
33 33
34 34 import time
35 35 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
36 36
37 37 from pyramid.httpexceptions import (
38 38 HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError)
39 39 from zope.cachedescriptors.property import Lazy as LazyProperty
40 40
41 41 import rhodecode
42 42 from rhodecode.authentication.base import authenticate, VCS_TYPE, loadplugin
43 43 from rhodecode.lib import rc_cache
44 44 from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware
45 45 from rhodecode.lib.base import (
46 46 BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context)
47 47 from rhodecode.lib.exceptions import (UserCreationError, NotAllowedToCreateUserError)
48 48 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
49 49 from rhodecode.lib.middleware import appenlight
50 50 from rhodecode.lib.middleware.utils import scm_app_http
51 51 from rhodecode.lib.utils import is_valid_repo, SLUG_RE
52 52 from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool, safe_unicode
53 53 from rhodecode.lib.vcs.conf import settings as vcs_settings
54 54 from rhodecode.lib.vcs.backends import base
55 55
56 56 from rhodecode.model import meta
57 57 from rhodecode.model.db import User, Repository, PullRequest
58 58 from rhodecode.model.scm import ScmModel
59 59 from rhodecode.model.pull_request import PullRequestModel
60 60 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
61 61
62 62 log = logging.getLogger(__name__)
63 63
64 64
65 65 def extract_svn_txn_id(acl_repo_name, data):
66 66 """
67 67 Helper method for extraction of svn txn_id from submitted XML data during
68 68 POST operations
69 69 """
70 70 try:
71 71 root = etree.fromstring(data)
72 72 pat = re.compile(r'/txn/(?P<txn_id>.*)')
73 73 for el in root:
74 74 if el.tag == '{DAV:}source':
75 75 for sub_el in el:
76 76 if sub_el.tag == '{DAV:}href':
77 77 match = pat.search(sub_el.text)
78 78 if match:
79 79 svn_tx_id = match.groupdict()['txn_id']
80 80 txn_id = rc_cache.utils.compute_key_from_params(
81 81 acl_repo_name, svn_tx_id)
82 82 return txn_id
83 83 except Exception:
84 84 log.exception('Failed to extract txn_id')
85 85
86 86
87 87 def initialize_generator(factory):
88 88 """
89 89 Initializes the returned generator by draining its first element.
90 90
91 91 This can be used to give a generator an initializer, which is the code
92 92 up to the first yield statement. This decorator enforces that the first
93 93 produced element has the value ``"__init__"`` to make its special
94 94 purpose very explicit in the using code.
95 95 """
96 96
97 97 @wraps(factory)
98 98 def wrapper(*args, **kwargs):
99 99 gen = factory(*args, **kwargs)
100 100 try:
101 101 init = gen.next()
102 102 except StopIteration:
103 103 raise ValueError('Generator must yield at least one element.')
104 104 if init != "__init__":
105 105 raise ValueError('First yielded element must be "__init__".')
106 106 return gen
107 107 return wrapper
108 108
109 109
110 110 class SimpleVCS(object):
111 111 """Common functionality for SCM HTTP handlers."""
112 112
113 113 SCM = 'unknown'
114 114
115 115 acl_repo_name = None
116 116 url_repo_name = None
117 117 vcs_repo_name = None
118 118 rc_extras = {}
119 119
120 120 # We have to handle requests to shadow repositories different than requests
121 121 # to normal repositories. Therefore we have to distinguish them. To do this
122 122 # we use this regex which will match only on URLs pointing to shadow
123 123 # repositories.
124 124 shadow_repo_re = re.compile(
125 125 '(?P<groups>(?:{slug_pat}/)*)' # repo groups
126 126 '(?P<target>{slug_pat})/' # target repo
127 127 'pull-request/(?P<pr_id>\d+)/' # pull request
128 128 'repository$' # shadow repo
129 129 .format(slug_pat=SLUG_RE.pattern))
130 130
131 131 def __init__(self, config, registry):
132 132 self.registry = registry
133 133 self.config = config
134 134 # re-populated by specialized middleware
135 135 self.repo_vcs_config = base.Config()
136 136 self.rhodecode_settings = SettingsModel().get_all_settings(cache=True)
137 137
138 138 registry.rhodecode_settings = self.rhodecode_settings
139 139 # authenticate this VCS request using authfunc
140 140 auth_ret_code_detection = \
141 141 str2bool(self.config.get('auth_ret_code_detection', False))
142 142 self.authenticate = BasicAuth(
143 143 '', authenticate, registry, config.get('auth_ret_code'),
144 144 auth_ret_code_detection)
145 145 self.ip_addr = '0.0.0.0'
146 146
147 147 @LazyProperty
148 148 def global_vcs_config(self):
149 149 try:
150 150 return VcsSettingsModel().get_ui_settings_as_config_obj()
151 151 except Exception:
152 152 return base.Config()
153 153
154 154 @property
155 155 def base_path(self):
156 156 settings_path = self.repo_vcs_config.get(*VcsSettingsModel.PATH_SETTING)
157 157
158 158 if not settings_path:
159 159 settings_path = self.global_vcs_config.get(*VcsSettingsModel.PATH_SETTING)
160 160
161 161 if not settings_path:
162 162 # try, maybe we passed in explicitly as config option
163 163 settings_path = self.config.get('base_path')
164 164
165 165 if not settings_path:
166 166 raise ValueError('FATAL: base_path is empty')
167 167 return settings_path
168 168
169 169 def set_repo_names(self, environ):
170 170 """
171 171 This will populate the attributes acl_repo_name, url_repo_name,
172 172 vcs_repo_name and is_shadow_repo. In case of requests to normal (non
173 173 shadow) repositories all names are equal. In case of requests to a
174 174 shadow repository the acl-name points to the target repo of the pull
175 175 request and the vcs-name points to the shadow repo file system path.
176 176 The url-name is always the URL used by the vcs client program.
177 177
178 178 Example in case of a shadow repo:
179 179 acl_repo_name = RepoGroup/MyRepo
180 180 url_repo_name = RepoGroup/MyRepo/pull-request/3/repository
181 181 vcs_repo_name = /repo/base/path/RepoGroup/.__shadow_MyRepo_pr-3'
182 182 """
183 183 # First we set the repo name from URL for all attributes. This is the
184 184 # default if handling normal (non shadow) repo requests.
185 185 self.url_repo_name = self._get_repository_name(environ)
186 186 self.acl_repo_name = self.vcs_repo_name = self.url_repo_name
187 187 self.is_shadow_repo = False
188 188
189 189 # Check if this is a request to a shadow repository.
190 190 match = self.shadow_repo_re.match(self.url_repo_name)
191 191 if match:
192 192 match_dict = match.groupdict()
193 193
194 194 # Build acl repo name from regex match.
195 195 acl_repo_name = safe_unicode('{groups}{target}'.format(
196 196 groups=match_dict['groups'] or '',
197 197 target=match_dict['target']))
198 198
199 199 # Retrieve pull request instance by ID from regex match.
200 200 pull_request = PullRequest.get(match_dict['pr_id'])
201 201
202 202 # Only proceed if we got a pull request and if acl repo name from
203 203 # URL equals the target repo name of the pull request.
204 if pull_request and \
205 (acl_repo_name == pull_request.target_repo.repo_name):
206 repo_id = pull_request.target_repo.repo_id
204 if pull_request and (acl_repo_name == pull_request.target_repo.repo_name):
205
207 206 # Get file system path to shadow repository.
208 207 workspace_id = PullRequestModel()._workspace_id(pull_request)
209 target_vcs = pull_request.target_repo.scm_instance()
210 vcs_repo_name = target_vcs._get_shadow_repository_path(
211 repo_id, workspace_id)
208 vcs_repo_name = pull_request.target_repo.get_shadow_repository_path(workspace_id)
212 209
213 210 # Store names for later usage.
214 211 self.vcs_repo_name = vcs_repo_name
215 212 self.acl_repo_name = acl_repo_name
216 213 self.is_shadow_repo = True
217 214
218 215 log.debug('Setting all VCS repository names: %s', {
219 216 'acl_repo_name': self.acl_repo_name,
220 217 'url_repo_name': self.url_repo_name,
221 218 'vcs_repo_name': self.vcs_repo_name,
222 219 })
223 220
224 221 @property
225 222 def scm_app(self):
226 223 custom_implementation = self.config['vcs.scm_app_implementation']
227 224 if custom_implementation == 'http':
228 225 log.debug('Using HTTP implementation of scm app.')
229 226 scm_app_impl = scm_app_http
230 227 else:
231 228 log.debug('Using custom implementation of scm_app: "{}"'.format(
232 229 custom_implementation))
233 230 scm_app_impl = importlib.import_module(custom_implementation)
234 231 return scm_app_impl
235 232
236 233 def _get_by_id(self, repo_name):
237 234 """
238 235 Gets a special pattern _<ID> from clone url and tries to replace it
239 236 with a repository_name for support of _<ID> non changeable urls
240 237 """
241 238
242 239 data = repo_name.split('/')
243 240 if len(data) >= 2:
244 241 from rhodecode.model.repo import RepoModel
245 242 by_id_match = RepoModel().get_repo_by_id(repo_name)
246 243 if by_id_match:
247 244 data[1] = by_id_match.repo_name
248 245
249 246 return safe_str('/'.join(data))
250 247
251 248 def _invalidate_cache(self, repo_name):
252 249 """
253 250 Set's cache for this repository for invalidation on next access
254 251
255 252 :param repo_name: full repo name, also a cache key
256 253 """
257 254 ScmModel().mark_for_invalidation(repo_name)
258 255
259 256 def is_valid_and_existing_repo(self, repo_name, base_path, scm_type):
260 257 db_repo = Repository.get_by_repo_name(repo_name)
261 258 if not db_repo:
262 259 log.debug('Repository `%s` not found inside the database.',
263 260 repo_name)
264 261 return False
265 262
266 263 if db_repo.repo_type != scm_type:
267 264 log.warning(
268 265 'Repository `%s` have incorrect scm_type, expected %s got %s',
269 266 repo_name, db_repo.repo_type, scm_type)
270 267 return False
271 268
272 269 config = db_repo._config
273 270 config.set('extensions', 'largefiles', '')
274 271 return is_valid_repo(
275 272 repo_name, base_path,
276 273 explicit_scm=scm_type, expect_scm=scm_type, config=config)
277 274
278 275 def valid_and_active_user(self, user):
279 276 """
280 277 Checks if that user is not empty, and if it's actually object it checks
281 278 if he's active.
282 279
283 280 :param user: user object or None
284 281 :return: boolean
285 282 """
286 283 if user is None:
287 284 return False
288 285
289 286 elif user.active:
290 287 return True
291 288
292 289 return False
293 290
294 291 @property
295 292 def is_shadow_repo_dir(self):
296 293 return os.path.isdir(self.vcs_repo_name)
297 294
298 295 def _check_permission(self, action, user, auth_user, repo_name, ip_addr=None,
299 296 plugin_id='', plugin_cache_active=False, cache_ttl=0):
300 297 """
301 298 Checks permissions using action (push/pull) user and repository
302 299 name. If plugin_cache and ttl is set it will use the plugin which
303 300 authenticated the user to store the cached permissions result for N
304 301 amount of seconds as in cache_ttl
305 302
306 303 :param action: push or pull action
307 304 :param user: user instance
308 305 :param repo_name: repository name
309 306 """
310 307
311 308 log.debug('AUTH_CACHE_TTL for permissions `%s` active: %s (TTL: %s)',
312 309 plugin_id, plugin_cache_active, cache_ttl)
313 310
314 311 user_id = user.user_id
315 312 cache_namespace_uid = 'cache_user_auth.{}'.format(user_id)
316 313 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
317 314
318 315 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
319 316 expiration_time=cache_ttl,
320 317 condition=plugin_cache_active)
321 318 def compute_perm_vcs(
322 319 cache_name, plugin_id, action, user_id, repo_name, ip_addr):
323 320
324 321 log.debug('auth: calculating permission access now...')
325 322 # check IP
326 323 inherit = user.inherit_default_permissions
327 324 ip_allowed = AuthUser.check_ip_allowed(
328 325 user_id, ip_addr, inherit_from_default=inherit)
329 326 if ip_allowed:
330 327 log.info('Access for IP:%s allowed', ip_addr)
331 328 else:
332 329 return False
333 330
334 331 if action == 'push':
335 332 perms = ('repository.write', 'repository.admin')
336 333 if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name):
337 334 return False
338 335
339 336 else:
340 337 # any other action need at least read permission
341 338 perms = (
342 339 'repository.read', 'repository.write', 'repository.admin')
343 340 if not HasPermissionAnyMiddleware(*perms)(auth_user, repo_name):
344 341 return False
345 342
346 343 return True
347 344
348 345 start = time.time()
349 346 log.debug('Running plugin `%s` permissions check', plugin_id)
350 347
351 348 # for environ based auth, password can be empty, but then the validation is
352 349 # on the server that fills in the env data needed for authentication
353 350 perm_result = compute_perm_vcs(
354 351 'vcs_permissions', plugin_id, action, user.user_id, repo_name, ip_addr)
355 352
356 353 auth_time = time.time() - start
357 354 log.debug('Permissions for plugin `%s` completed in %.4fs, '
358 355 'expiration time of fetched cache %.1fs.',
359 356 plugin_id, auth_time, cache_ttl)
360 357
361 358 return perm_result
362 359
363 360 def _get_http_scheme(self, environ):
364 361 try:
365 362 return environ['wsgi.url_scheme']
366 363 except Exception:
367 364 log.exception('Failed to read http scheme')
368 365 return 'http'
369 366
370 367 def _check_ssl(self, environ, start_response):
371 368 """
372 369 Checks the SSL check flag and returns False if SSL is not present
373 370 and required True otherwise
374 371 """
375 372 org_proto = environ['wsgi._org_proto']
376 373 # check if we have SSL required ! if not it's a bad request !
377 374 require_ssl = str2bool(self.repo_vcs_config.get('web', 'push_ssl'))
378 375 if require_ssl and org_proto == 'http':
379 376 log.debug(
380 377 'Bad request: detected protocol is `%s` and '
381 378 'SSL/HTTPS is required.', org_proto)
382 379 return False
383 380 return True
384 381
385 382 def _get_default_cache_ttl(self):
386 383 # take AUTH_CACHE_TTL from the `rhodecode` auth plugin
387 384 plugin = loadplugin('egg:rhodecode-enterprise-ce#rhodecode')
388 385 plugin_settings = plugin.get_settings()
389 386 plugin_cache_active, cache_ttl = plugin.get_ttl_cache(
390 387 plugin_settings) or (False, 0)
391 388 return plugin_cache_active, cache_ttl
392 389
393 390 def __call__(self, environ, start_response):
394 391 try:
395 392 return self._handle_request(environ, start_response)
396 393 except Exception:
397 394 log.exception("Exception while handling request")
398 395 appenlight.track_exception(environ)
399 396 return HTTPInternalServerError()(environ, start_response)
400 397 finally:
401 398 meta.Session.remove()
402 399
403 400 def _handle_request(self, environ, start_response):
404 401 if not self._check_ssl(environ, start_response):
405 402 reason = ('SSL required, while RhodeCode was unable '
406 403 'to detect this as SSL request')
407 404 log.debug('User not allowed to proceed, %s', reason)
408 405 return HTTPNotAcceptable(reason)(environ, start_response)
409 406
410 407 if not self.url_repo_name:
411 408 log.warning('Repository name is empty: %s', self.url_repo_name)
412 409 # failed to get repo name, we fail now
413 410 return HTTPNotFound()(environ, start_response)
414 411 log.debug('Extracted repo name is %s', self.url_repo_name)
415 412
416 413 ip_addr = get_ip_addr(environ)
417 414 user_agent = get_user_agent(environ)
418 415 username = None
419 416
420 417 # skip passing error to error controller
421 418 environ['pylons.status_code_redirect'] = True
422 419
423 420 # ======================================================================
424 421 # GET ACTION PULL or PUSH
425 422 # ======================================================================
426 423 action = self._get_action(environ)
427 424
428 425 # ======================================================================
429 426 # Check if this is a request to a shadow repository of a pull request.
430 427 # In this case only pull action is allowed.
431 428 # ======================================================================
432 429 if self.is_shadow_repo and action != 'pull':
433 430 reason = 'Only pull action is allowed for shadow repositories.'
434 431 log.debug('User not allowed to proceed, %s', reason)
435 432 return HTTPNotAcceptable(reason)(environ, start_response)
436 433
437 434 # Check if the shadow repo actually exists, in case someone refers
438 435 # to it, and it has been deleted because of successful merge.
439 436 if self.is_shadow_repo and not self.is_shadow_repo_dir:
440 437 log.debug(
441 438 'Shadow repo detected, and shadow repo dir `%s` is missing',
442 439 self.is_shadow_repo_dir)
443 440 return HTTPNotFound()(environ, start_response)
444 441
445 442 # ======================================================================
446 443 # CHECK ANONYMOUS PERMISSION
447 444 # ======================================================================
448 445 detect_force_push = False
449 446 check_branch_perms = False
450 447 if action in ['pull', 'push']:
451 448 user_obj = anonymous_user = User.get_default_user()
452 449 auth_user = user_obj.AuthUser()
453 450 username = anonymous_user.username
454 451 if anonymous_user.active:
455 452 plugin_cache_active, cache_ttl = self._get_default_cache_ttl()
456 453 # ONLY check permissions if the user is activated
457 454 anonymous_perm = self._check_permission(
458 455 action, anonymous_user, auth_user, self.acl_repo_name, ip_addr,
459 456 plugin_id='anonymous_access',
460 457 plugin_cache_active=plugin_cache_active,
461 458 cache_ttl=cache_ttl,
462 459 )
463 460 else:
464 461 anonymous_perm = False
465 462
466 463 if not anonymous_user.active or not anonymous_perm:
467 464 if not anonymous_user.active:
468 465 log.debug('Anonymous access is disabled, running '
469 466 'authentication')
470 467
471 468 if not anonymous_perm:
472 469 log.debug('Not enough credentials to access this '
473 470 'repository as anonymous user')
474 471
475 472 username = None
476 473 # ==============================================================
477 474 # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
478 475 # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
479 476 # ==============================================================
480 477
481 478 # try to auth based on environ, container auth methods
482 479 log.debug('Running PRE-AUTH for container based authentication')
483 480 pre_auth = authenticate(
484 481 '', '', environ, VCS_TYPE, registry=self.registry,
485 482 acl_repo_name=self.acl_repo_name)
486 483 if pre_auth and pre_auth.get('username'):
487 484 username = pre_auth['username']
488 485 log.debug('PRE-AUTH got %s as username', username)
489 486 if pre_auth:
490 487 log.debug('PRE-AUTH successful from %s',
491 488 pre_auth.get('auth_data', {}).get('_plugin'))
492 489
493 490 # If not authenticated by the container, running basic auth
494 491 # before inject the calling repo_name for special scope checks
495 492 self.authenticate.acl_repo_name = self.acl_repo_name
496 493
497 494 plugin_cache_active, cache_ttl = False, 0
498 495 plugin = None
499 496 if not username:
500 497 self.authenticate.realm = self.authenticate.get_rc_realm()
501 498
502 499 try:
503 500 auth_result = self.authenticate(environ)
504 501 except (UserCreationError, NotAllowedToCreateUserError) as e:
505 502 log.error(e)
506 503 reason = safe_str(e)
507 504 return HTTPNotAcceptable(reason)(environ, start_response)
508 505
509 506 if isinstance(auth_result, dict):
510 507 AUTH_TYPE.update(environ, 'basic')
511 508 REMOTE_USER.update(environ, auth_result['username'])
512 509 username = auth_result['username']
513 510 plugin = auth_result.get('auth_data', {}).get('_plugin')
514 511 log.info(
515 512 'MAIN-AUTH successful for user `%s` from %s plugin',
516 513 username, plugin)
517 514
518 515 plugin_cache_active, cache_ttl = auth_result.get(
519 516 'auth_data', {}).get('_ttl_cache') or (False, 0)
520 517 else:
521 518 return auth_result.wsgi_application(environ, start_response)
522 519
523 520 # ==============================================================
524 521 # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
525 522 # ==============================================================
526 523 user = User.get_by_username(username)
527 524 if not self.valid_and_active_user(user):
528 525 return HTTPForbidden()(environ, start_response)
529 526 username = user.username
530 527 user_id = user.user_id
531 528
532 529 # check user attributes for password change flag
533 530 user_obj = user
534 531 auth_user = user_obj.AuthUser()
535 532 if user_obj and user_obj.username != User.DEFAULT_USER and \
536 533 user_obj.user_data.get('force_password_change'):
537 534 reason = 'password change required'
538 535 log.debug('User not allowed to authenticate, %s', reason)
539 536 return HTTPNotAcceptable(reason)(environ, start_response)
540 537
541 538 # check permissions for this repository
542 539 perm = self._check_permission(
543 540 action, user, auth_user, self.acl_repo_name, ip_addr,
544 541 plugin, plugin_cache_active, cache_ttl)
545 542 if not perm:
546 543 return HTTPForbidden()(environ, start_response)
547 544 environ['rc_auth_user_id'] = user_id
548 545
549 546 if action == 'push':
550 547 perms = auth_user.get_branch_permissions(self.acl_repo_name)
551 548 if perms:
552 549 check_branch_perms = True
553 550 detect_force_push = True
554 551
555 552 # extras are injected into UI object and later available
556 553 # in hooks executed by RhodeCode
557 554 check_locking = _should_check_locking(environ.get('QUERY_STRING'))
558 555
559 556 extras = vcs_operation_context(
560 557 environ, repo_name=self.acl_repo_name, username=username,
561 558 action=action, scm=self.SCM, check_locking=check_locking,
562 559 is_shadow_repo=self.is_shadow_repo, check_branch_perms=check_branch_perms,
563 560 detect_force_push=detect_force_push
564 561 )
565 562
566 563 # ======================================================================
567 564 # REQUEST HANDLING
568 565 # ======================================================================
569 566 repo_path = os.path.join(
570 567 safe_str(self.base_path), safe_str(self.vcs_repo_name))
571 568 log.debug('Repository path is %s', repo_path)
572 569
573 570 fix_PATH()
574 571
575 572 log.info(
576 573 '%s action on %s repo "%s" by "%s" from %s %s',
577 574 action, self.SCM, safe_str(self.url_repo_name),
578 575 safe_str(username), ip_addr, user_agent)
579 576
580 577 return self._generate_vcs_response(
581 578 environ, start_response, repo_path, extras, action)
582 579
583 580 @initialize_generator
584 581 def _generate_vcs_response(
585 582 self, environ, start_response, repo_path, extras, action):
586 583 """
587 584 Returns a generator for the response content.
588 585
589 586 This method is implemented as a generator, so that it can trigger
590 587 the cache validation after all content sent back to the client. It
591 588 also handles the locking exceptions which will be triggered when
592 589 the first chunk is produced by the underlying WSGI application.
593 590 """
594 591 txn_id = ''
595 592 if 'CONTENT_LENGTH' in environ and environ['REQUEST_METHOD'] == 'MERGE':
596 593 # case for SVN, we want to re-use the callback daemon port
597 594 # so we use the txn_id, for this we peek the body, and still save
598 595 # it as wsgi.input
599 596 data = environ['wsgi.input'].read()
600 597 environ['wsgi.input'] = StringIO(data)
601 598 txn_id = extract_svn_txn_id(self.acl_repo_name, data)
602 599
603 600 callback_daemon, extras = self._prepare_callback_daemon(
604 601 extras, environ, action, txn_id=txn_id)
605 602 log.debug('HOOKS extras is %s', extras)
606 603
607 604 http_scheme = self._get_http_scheme(environ)
608 605
609 606 config = self._create_config(extras, self.acl_repo_name, scheme=http_scheme)
610 607 app = self._create_wsgi_app(repo_path, self.url_repo_name, config)
611 608 with callback_daemon:
612 609 app.rc_extras = extras
613 610
614 611 try:
615 612 response = app(environ, start_response)
616 613 finally:
617 614 # This statement works together with the decorator
618 615 # "initialize_generator" above. The decorator ensures that
619 616 # we hit the first yield statement before the generator is
620 617 # returned back to the WSGI server. This is needed to
621 618 # ensure that the call to "app" above triggers the
622 619 # needed callback to "start_response" before the
623 620 # generator is actually used.
624 621 yield "__init__"
625 622
626 623 # iter content
627 624 for chunk in response:
628 625 yield chunk
629 626
630 627 try:
631 628 # invalidate cache on push
632 629 if action == 'push':
633 630 self._invalidate_cache(self.url_repo_name)
634 631 finally:
635 632 meta.Session.remove()
636 633
637 634 def _get_repository_name(self, environ):
638 635 """Get repository name out of the environmnent
639 636
640 637 :param environ: WSGI environment
641 638 """
642 639 raise NotImplementedError()
643 640
644 641 def _get_action(self, environ):
645 642 """Map request commands into a pull or push command.
646 643
647 644 :param environ: WSGI environment
648 645 """
649 646 raise NotImplementedError()
650 647
651 648 def _create_wsgi_app(self, repo_path, repo_name, config):
652 649 """Return the WSGI app that will finally handle the request."""
653 650 raise NotImplementedError()
654 651
655 652 def _create_config(self, extras, repo_name, scheme='http'):
656 653 """Create a safe config representation."""
657 654 raise NotImplementedError()
658 655
659 656 def _should_use_callback_daemon(self, extras, environ, action):
660 657 return True
661 658
662 659 def _prepare_callback_daemon(self, extras, environ, action, txn_id=None):
663 660 direct_calls = vcs_settings.HOOKS_DIRECT_CALLS
664 661 if not self._should_use_callback_daemon(extras, environ, action):
665 662 # disable callback daemon for actions that don't require it
666 663 direct_calls = True
667 664
668 665 return prepare_callback_daemon(
669 666 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
670 667 host=vcs_settings.HOOKS_HOST, use_direct_calls=direct_calls, txn_id=txn_id)
671 668
672 669
673 670 def _should_check_locking(query_string):
674 671 # this is kind of hacky, but due to how mercurial handles client-server
675 672 # server see all operation on commit; bookmarks, phases and
676 673 # obsolescence marker in different transaction, we don't want to check
677 674 # locking on those
678 675 return query_string not in ['cmd=listkeys']
@@ -1,1896 +1,1899 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base module for all VCS systems
23 23 """
24 24 import os
25 25 import re
26 26 import time
27 27 import shutil
28 28 import datetime
29 29 import fnmatch
30 30 import itertools
31 31 import logging
32 32 import collections
33 33 import warnings
34 34
35 35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 36
37 37 from pyramid import compat
38 38
39 39 import rhodecode
40 40 from rhodecode.translation import lazy_ugettext
41 41 from rhodecode.lib.utils2 import safe_str, safe_unicode, CachedProperty
42 42 from rhodecode.lib.vcs import connection
43 43 from rhodecode.lib.vcs.utils import author_name, author_email
44 44 from rhodecode.lib.vcs.conf import settings
45 45 from rhodecode.lib.vcs.exceptions import (
46 46 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
47 47 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
48 48 NodeDoesNotExistError, NodeNotChangedError, VCSError,
49 49 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
50 50 RepositoryError)
51 51
52 52
53 53 log = logging.getLogger(__name__)
54 54
55 55
56 56 FILEMODE_DEFAULT = 0o100644
57 57 FILEMODE_EXECUTABLE = 0o100755
58 58 EMPTY_COMMIT_ID = '0' * 40
59 59
60 60 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
61 61
62 62
63 63 class MergeFailureReason(object):
64 64 """
65 65 Enumeration with all the reasons why the server side merge could fail.
66 66
67 67 DO NOT change the number of the reasons, as they may be stored in the
68 68 database.
69 69
70 70 Changing the name of a reason is acceptable and encouraged to deprecate old
71 71 reasons.
72 72 """
73 73
74 74 # Everything went well.
75 75 NONE = 0
76 76
77 77 # An unexpected exception was raised. Check the logs for more details.
78 78 UNKNOWN = 1
79 79
80 80 # The merge was not successful, there are conflicts.
81 81 MERGE_FAILED = 2
82 82
83 83 # The merge succeeded but we could not push it to the target repository.
84 84 PUSH_FAILED = 3
85 85
86 86 # The specified target is not a head in the target repository.
87 87 TARGET_IS_NOT_HEAD = 4
88 88
89 89 # The source repository contains more branches than the target. Pushing
90 90 # the merge will create additional branches in the target.
91 91 HG_SOURCE_HAS_MORE_BRANCHES = 5
92 92
93 93 # The target reference has multiple heads. That does not allow to correctly
94 94 # identify the target location. This could only happen for mercurial
95 95 # branches.
96 96 HG_TARGET_HAS_MULTIPLE_HEADS = 6
97 97
98 98 # The target repository is locked
99 99 TARGET_IS_LOCKED = 7
100 100
101 101 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
102 102 # A involved commit could not be found.
103 103 _DEPRECATED_MISSING_COMMIT = 8
104 104
105 105 # The target repo reference is missing.
106 106 MISSING_TARGET_REF = 9
107 107
108 108 # The source repo reference is missing.
109 109 MISSING_SOURCE_REF = 10
110 110
111 111 # The merge was not successful, there are conflicts related to sub
112 112 # repositories.
113 113 SUBREPO_MERGE_FAILED = 11
114 114
115 115
116 116 class UpdateFailureReason(object):
117 117 """
118 118 Enumeration with all the reasons why the pull request update could fail.
119 119
120 120 DO NOT change the number of the reasons, as they may be stored in the
121 121 database.
122 122
123 123 Changing the name of a reason is acceptable and encouraged to deprecate old
124 124 reasons.
125 125 """
126 126
127 127 # Everything went well.
128 128 NONE = 0
129 129
130 130 # An unexpected exception was raised. Check the logs for more details.
131 131 UNKNOWN = 1
132 132
133 133 # The pull request is up to date.
134 134 NO_CHANGE = 2
135 135
136 136 # The pull request has a reference type that is not supported for update.
137 137 WRONG_REF_TYPE = 3
138 138
139 139 # Update failed because the target reference is missing.
140 140 MISSING_TARGET_REF = 4
141 141
142 142 # Update failed because the source reference is missing.
143 143 MISSING_SOURCE_REF = 5
144 144
145 145
146 146 class MergeResponse(object):
147 147
148 148 # uses .format(**metadata) for variables
149 149 MERGE_STATUS_MESSAGES = {
150 150 MergeFailureReason.NONE: lazy_ugettext(
151 151 u'This pull request can be automatically merged.'),
152 152 MergeFailureReason.UNKNOWN: lazy_ugettext(
153 153 u'This pull request cannot be merged because of an unhandled exception. '
154 154 u'{exception}'),
155 155 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
156 156 u'This pull request cannot be merged because of merge conflicts.'),
157 157 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
158 158 u'This pull request could not be merged because push to '
159 159 u'target:`{target}@{merge_commit}` failed.'),
160 160 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
161 161 u'This pull request cannot be merged because the target '
162 162 u'`{target_ref.name}` is not a head.'),
163 163 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
164 164 u'This pull request cannot be merged because the source contains '
165 165 u'more branches than the target.'),
166 166 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
167 167 u'This pull request cannot be merged because the target `{target_ref.name}` '
168 168 u'has multiple heads: `{heads}`.'),
169 169 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
170 170 u'This pull request cannot be merged because the target repository is '
171 171 u'locked by {locked_by}.'),
172 172
173 173 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
174 174 u'This pull request cannot be merged because the target '
175 175 u'reference `{target_ref.name}` is missing.'),
176 176 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
177 177 u'This pull request cannot be merged because the source '
178 178 u'reference `{source_ref.name}` is missing.'),
179 179 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
180 180 u'This pull request cannot be merged because of conflicts related '
181 181 u'to sub repositories.'),
182 182
183 183 # Deprecations
184 184 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
185 185 u'This pull request cannot be merged because the target or the '
186 186 u'source reference is missing.'),
187 187
188 188 }
189 189
190 190 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
191 191 self.possible = possible
192 192 self.executed = executed
193 193 self.merge_ref = merge_ref
194 194 self.failure_reason = failure_reason
195 195 self.metadata = metadata or {}
196 196
197 197 def __repr__(self):
198 198 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
199 199
200 200 def __eq__(self, other):
201 201 same_instance = isinstance(other, self.__class__)
202 202 return same_instance \
203 203 and self.possible == other.possible \
204 204 and self.executed == other.executed \
205 205 and self.failure_reason == other.failure_reason
206 206
207 207 @property
208 208 def label(self):
209 209 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
210 210 not k.startswith('_'))
211 211 return label_dict.get(self.failure_reason)
212 212
213 213 @property
214 214 def merge_status_message(self):
215 215 """
216 216 Return a human friendly error message for the given merge status code.
217 217 """
218 218 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
219 219 try:
220 220 return msg.format(**self.metadata)
221 221 except Exception:
222 222 log.exception('Failed to format %s message', self)
223 223 return msg
224 224
225 225 def asdict(self):
226 226 data = {}
227 227 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
228 228 'merge_status_message']:
229 229 data[k] = getattr(self, k)
230 230 return data
231 231
232 232
233 233 class BaseRepository(object):
234 234 """
235 235 Base Repository for final backends
236 236
237 237 .. attribute:: DEFAULT_BRANCH_NAME
238 238
239 239 name of default branch (i.e. "trunk" for svn, "master" for git etc.
240 240
241 241 .. attribute:: commit_ids
242 242
243 243 list of all available commit ids, in ascending order
244 244
245 245 .. attribute:: path
246 246
247 247 absolute path to the repository
248 248
249 249 .. attribute:: bookmarks
250 250
251 251 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
252 252 there are no bookmarks or the backend implementation does not support
253 253 bookmarks.
254 254
255 255 .. attribute:: tags
256 256
257 257 Mapping from name to :term:`Commit ID` of the tag.
258 258
259 259 """
260 260
261 261 DEFAULT_BRANCH_NAME = None
262 262 DEFAULT_CONTACT = u"Unknown"
263 263 DEFAULT_DESCRIPTION = u"unknown"
264 264 EMPTY_COMMIT_ID = '0' * 40
265 265
266 266 path = None
267 267
268 268 _is_empty = None
269 269 _commit_ids = {}
270 270
271 271 def __init__(self, repo_path, config=None, create=False, **kwargs):
272 272 """
273 273 Initializes repository. Raises RepositoryError if repository could
274 274 not be find at the given ``repo_path`` or directory at ``repo_path``
275 275 exists and ``create`` is set to True.
276 276
277 277 :param repo_path: local path of the repository
278 278 :param config: repository configuration
279 279 :param create=False: if set to True, would try to create repository.
280 280 :param src_url=None: if set, should be proper url from which repository
281 281 would be cloned; requires ``create`` parameter to be set to True -
282 282 raises RepositoryError if src_url is set and create evaluates to
283 283 False
284 284 """
285 285 raise NotImplementedError
286 286
287 287 def __repr__(self):
288 288 return '<%s at %s>' % (self.__class__.__name__, self.path)
289 289
290 290 def __len__(self):
291 291 return self.count()
292 292
293 293 def __eq__(self, other):
294 294 same_instance = isinstance(other, self.__class__)
295 295 return same_instance and other.path == self.path
296 296
297 297 def __ne__(self, other):
298 298 return not self.__eq__(other)
299 299
300 300 def get_create_shadow_cache_pr_path(self, db_repo):
301 301 path = db_repo.cached_diffs_dir
302 302 if not os.path.exists(path):
303 303 os.makedirs(path, 0o755)
304 304 return path
305 305
306 306 @classmethod
307 307 def get_default_config(cls, default=None):
308 308 config = Config()
309 309 if default and isinstance(default, list):
310 310 for section, key, val in default:
311 311 config.set(section, key, val)
312 312 return config
313 313
314 314 @LazyProperty
315 315 def _remote(self):
316 316 raise NotImplementedError
317 317
318 318 def _heads(self, branch=None):
319 319 return []
320 320
321 321 @LazyProperty
322 322 def EMPTY_COMMIT(self):
323 323 return EmptyCommit(self.EMPTY_COMMIT_ID)
324 324
325 325 @LazyProperty
326 326 def alias(self):
327 327 for k, v in settings.BACKENDS.items():
328 328 if v.split('.')[-1] == str(self.__class__.__name__):
329 329 return k
330 330
331 331 @LazyProperty
332 332 def name(self):
333 333 return safe_unicode(os.path.basename(self.path))
334 334
335 335 @LazyProperty
336 336 def description(self):
337 337 raise NotImplementedError
338 338
339 339 def refs(self):
340 340 """
341 341 returns a `dict` with branches, bookmarks, tags, and closed_branches
342 342 for this repository
343 343 """
344 344 return dict(
345 345 branches=self.branches,
346 346 branches_closed=self.branches_closed,
347 347 tags=self.tags,
348 348 bookmarks=self.bookmarks
349 349 )
350 350
351 351 @LazyProperty
352 352 def branches(self):
353 353 """
354 354 A `dict` which maps branch names to commit ids.
355 355 """
356 356 raise NotImplementedError
357 357
358 358 @LazyProperty
359 359 def branches_closed(self):
360 360 """
361 361 A `dict` which maps tags names to commit ids.
362 362 """
363 363 raise NotImplementedError
364 364
365 365 @LazyProperty
366 366 def bookmarks(self):
367 367 """
368 368 A `dict` which maps tags names to commit ids.
369 369 """
370 370 raise NotImplementedError
371 371
372 372 @LazyProperty
373 373 def tags(self):
374 374 """
375 375 A `dict` which maps tags names to commit ids.
376 376 """
377 377 raise NotImplementedError
378 378
379 379 @LazyProperty
380 380 def size(self):
381 381 """
382 382 Returns combined size in bytes for all repository files
383 383 """
384 384 tip = self.get_commit()
385 385 return tip.size
386 386
387 387 def size_at_commit(self, commit_id):
388 388 commit = self.get_commit(commit_id)
389 389 return commit.size
390 390
391 391 def _check_for_empty(self):
392 392 no_commits = len(self._commit_ids) == 0
393 393 if no_commits:
394 394 # check on remote to be sure
395 395 return self._remote.is_empty()
396 396 else:
397 397 return False
398 398
399 399 def is_empty(self):
400 400 if rhodecode.is_test:
401 401 return self._check_for_empty()
402 402
403 403 if self._is_empty is None:
404 404 # cache empty for production, but not tests
405 405 self._is_empty = self._check_for_empty()
406 406
407 407 return self._is_empty
408 408
409 409 @staticmethod
410 410 def check_url(url, config):
411 411 """
412 412 Function will check given url and try to verify if it's a valid
413 413 link.
414 414 """
415 415 raise NotImplementedError
416 416
417 417 @staticmethod
418 418 def is_valid_repository(path):
419 419 """
420 420 Check if given `path` contains a valid repository of this backend
421 421 """
422 422 raise NotImplementedError
423 423
424 424 # ==========================================================================
425 425 # COMMITS
426 426 # ==========================================================================
427 427
428 428 @CachedProperty
429 429 def commit_ids(self):
430 430 raise NotImplementedError
431 431
432 432 def append_commit_id(self, commit_id):
433 433 if commit_id not in self.commit_ids:
434 434 self._rebuild_cache(self.commit_ids + [commit_id])
435 435
436 436 # clear cache
437 437 self._invalidate_prop_cache('commit_ids')
438 438 self._is_empty = False
439 439
440 440 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
441 441 """
442 442 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
443 443 are both None, most recent commit is returned.
444 444
445 445 :param pre_load: Optional. List of commit attributes to load.
446 446
447 447 :raises ``EmptyRepositoryError``: if there are no commits
448 448 """
449 449 raise NotImplementedError
450 450
451 451 def __iter__(self):
452 452 for commit_id in self.commit_ids:
453 453 yield self.get_commit(commit_id=commit_id)
454 454
455 455 def get_commits(
456 456 self, start_id=None, end_id=None, start_date=None, end_date=None,
457 457 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
458 458 """
459 459 Returns iterator of `BaseCommit` objects from start to end
460 460 not inclusive. This should behave just like a list, ie. end is not
461 461 inclusive.
462 462
463 463 :param start_id: None or str, must be a valid commit id
464 464 :param end_id: None or str, must be a valid commit id
465 465 :param start_date:
466 466 :param end_date:
467 467 :param branch_name:
468 468 :param show_hidden:
469 469 :param pre_load:
470 470 :param translate_tags:
471 471 """
472 472 raise NotImplementedError
473 473
474 474 def __getitem__(self, key):
475 475 """
476 476 Allows index based access to the commit objects of this repository.
477 477 """
478 478 pre_load = ["author", "branch", "date", "message", "parents"]
479 479 if isinstance(key, slice):
480 480 return self._get_range(key, pre_load)
481 481 return self.get_commit(commit_idx=key, pre_load=pre_load)
482 482
483 483 def _get_range(self, slice_obj, pre_load):
484 484 for commit_id in self.commit_ids.__getitem__(slice_obj):
485 485 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
486 486
487 487 def count(self):
488 488 return len(self.commit_ids)
489 489
490 490 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
491 491 """
492 492 Creates and returns a tag for the given ``commit_id``.
493 493
494 494 :param name: name for new tag
495 495 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
496 496 :param commit_id: commit id for which new tag would be created
497 497 :param message: message of the tag's commit
498 498 :param date: date of tag's commit
499 499
500 500 :raises TagAlreadyExistError: if tag with same name already exists
501 501 """
502 502 raise NotImplementedError
503 503
504 504 def remove_tag(self, name, user, message=None, date=None):
505 505 """
506 506 Removes tag with the given ``name``.
507 507
508 508 :param name: name of the tag to be removed
509 509 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
510 510 :param message: message of the tag's removal commit
511 511 :param date: date of tag's removal commit
512 512
513 513 :raises TagDoesNotExistError: if tag with given name does not exists
514 514 """
515 515 raise NotImplementedError
516 516
517 517 def get_diff(
518 518 self, commit1, commit2, path=None, ignore_whitespace=False,
519 519 context=3, path1=None):
520 520 """
521 521 Returns (git like) *diff*, as plain text. Shows changes introduced by
522 522 `commit2` since `commit1`.
523 523
524 524 :param commit1: Entry point from which diff is shown. Can be
525 525 ``self.EMPTY_COMMIT`` - in this case, patch showing all
526 526 the changes since empty state of the repository until `commit2`
527 527 :param commit2: Until which commit changes should be shown.
528 528 :param path: Can be set to a path of a file to create a diff of that
529 529 file. If `path1` is also set, this value is only associated to
530 530 `commit2`.
531 531 :param ignore_whitespace: If set to ``True``, would not show whitespace
532 532 changes. Defaults to ``False``.
533 533 :param context: How many lines before/after changed lines should be
534 534 shown. Defaults to ``3``.
535 535 :param path1: Can be set to a path to associate with `commit1`. This
536 536 parameter works only for backends which support diff generation for
537 537 different paths. Other backends will raise a `ValueError` if `path1`
538 538 is set and has a different value than `path`.
539 539 :param file_path: filter this diff by given path pattern
540 540 """
541 541 raise NotImplementedError
542 542
543 543 def strip(self, commit_id, branch=None):
544 544 """
545 545 Strip given commit_id from the repository
546 546 """
547 547 raise NotImplementedError
548 548
549 549 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
550 550 """
551 551 Return a latest common ancestor commit if one exists for this repo
552 552 `commit_id1` vs `commit_id2` from `repo2`.
553 553
554 554 :param commit_id1: Commit it from this repository to use as a
555 555 target for the comparison.
556 556 :param commit_id2: Source commit id to use for comparison.
557 557 :param repo2: Source repository to use for comparison.
558 558 """
559 559 raise NotImplementedError
560 560
561 561 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
562 562 """
563 563 Compare this repository's revision `commit_id1` with `commit_id2`.
564 564
565 565 Returns a tuple(commits, ancestor) that would be merged from
566 566 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
567 567 will be returned as ancestor.
568 568
569 569 :param commit_id1: Commit it from this repository to use as a
570 570 target for the comparison.
571 571 :param commit_id2: Source commit id to use for comparison.
572 572 :param repo2: Source repository to use for comparison.
573 573 :param merge: If set to ``True`` will do a merge compare which also
574 574 returns the common ancestor.
575 575 :param pre_load: Optional. List of commit attributes to load.
576 576 """
577 577 raise NotImplementedError
578 578
579 579 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
580 580 user_name='', user_email='', message='', dry_run=False,
581 581 use_rebase=False, close_branch=False):
582 582 """
583 583 Merge the revisions specified in `source_ref` from `source_repo`
584 584 onto the `target_ref` of this repository.
585 585
586 586 `source_ref` and `target_ref` are named tupls with the following
587 587 fields `type`, `name` and `commit_id`.
588 588
589 589 Returns a MergeResponse named tuple with the following fields
590 590 'possible', 'executed', 'source_commit', 'target_commit',
591 591 'merge_commit'.
592 592
593 593 :param repo_id: `repo_id` target repo id.
594 594 :param workspace_id: `workspace_id` unique identifier.
595 595 :param target_ref: `target_ref` points to the commit on top of which
596 596 the `source_ref` should be merged.
597 597 :param source_repo: The repository that contains the commits to be
598 598 merged.
599 599 :param source_ref: `source_ref` points to the topmost commit from
600 600 the `source_repo` which should be merged.
601 601 :param user_name: Merge commit `user_name`.
602 602 :param user_email: Merge commit `user_email`.
603 603 :param message: Merge commit `message`.
604 604 :param dry_run: If `True` the merge will not take place.
605 605 :param use_rebase: If `True` commits from the source will be rebased
606 606 on top of the target instead of being merged.
607 607 :param close_branch: If `True` branch will be close before merging it
608 608 """
609 609 if dry_run:
610 610 message = message or settings.MERGE_DRY_RUN_MESSAGE
611 611 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
612 612 user_name = user_name or settings.MERGE_DRY_RUN_USER
613 613 else:
614 614 if not user_name:
615 615 raise ValueError('user_name cannot be empty')
616 616 if not user_email:
617 617 raise ValueError('user_email cannot be empty')
618 618 if not message:
619 619 raise ValueError('message cannot be empty')
620 620
621 621 try:
622 622 return self._merge_repo(
623 623 repo_id, workspace_id, target_ref, source_repo,
624 624 source_ref, message, user_name, user_email, dry_run=dry_run,
625 625 use_rebase=use_rebase, close_branch=close_branch)
626 626 except RepositoryError as exc:
627 627 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
628 628 return MergeResponse(
629 629 False, False, None, MergeFailureReason.UNKNOWN,
630 630 metadata={'exception': str(exc)})
631 631
632 632 def _merge_repo(self, repo_id, workspace_id, target_ref,
633 633 source_repo, source_ref, merge_message,
634 634 merger_name, merger_email, dry_run=False,
635 635 use_rebase=False, close_branch=False):
636 636 """Internal implementation of merge."""
637 637 raise NotImplementedError
638 638
639 639 def _maybe_prepare_merge_workspace(
640 640 self, repo_id, workspace_id, target_ref, source_ref):
641 641 """
642 642 Create the merge workspace.
643 643
644 644 :param workspace_id: `workspace_id` unique identifier.
645 645 """
646 646 raise NotImplementedError
647 647
648 def _get_legacy_shadow_repository_path(self, workspace_id):
648 @classmethod
649 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
649 650 """
650 651 Legacy version that was used before. We still need it for
651 652 backward compat
652 653 """
653 654 return os.path.join(
654 os.path.dirname(self.path),
655 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
655 os.path.dirname(repo_path),
656 '.__shadow_%s_%s' % (os.path.basename(repo_path), workspace_id))
656 657
657 def _get_shadow_repository_path(self, repo_id, workspace_id):
658 @classmethod
659 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
658 660 # The name of the shadow repository must start with '.', so it is
659 661 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
660 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
662 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
661 663 if os.path.exists(legacy_repository_path):
662 664 return legacy_repository_path
663 665 else:
664 666 return os.path.join(
665 os.path.dirname(self.path),
667 os.path.dirname(repo_path),
666 668 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
667 669
668 670 def cleanup_merge_workspace(self, repo_id, workspace_id):
669 671 """
670 672 Remove merge workspace.
671 673
672 674 This function MUST not fail in case there is no workspace associated to
673 675 the given `workspace_id`.
674 676
675 677 :param workspace_id: `workspace_id` unique identifier.
676 678 """
677 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
679 shadow_repository_path = self._get_shadow_repository_path(
680 self.path, repo_id, workspace_id)
678 681 shadow_repository_path_del = '{}.{}.delete'.format(
679 682 shadow_repository_path, time.time())
680 683
681 684 # move the shadow repo, so it never conflicts with the one used.
682 685 # we use this method because shutil.rmtree had some edge case problems
683 686 # removing symlinked repositories
684 687 if not os.path.isdir(shadow_repository_path):
685 688 return
686 689
687 690 shutil.move(shadow_repository_path, shadow_repository_path_del)
688 691 try:
689 692 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
690 693 except Exception:
691 694 log.exception('Failed to gracefully remove shadow repo under %s',
692 695 shadow_repository_path_del)
693 696 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
694 697
695 698 # ========== #
696 699 # COMMIT API #
697 700 # ========== #
698 701
699 702 @LazyProperty
700 703 def in_memory_commit(self):
701 704 """
702 705 Returns :class:`InMemoryCommit` object for this repository.
703 706 """
704 707 raise NotImplementedError
705 708
706 709 # ======================== #
707 710 # UTILITIES FOR SUBCLASSES #
708 711 # ======================== #
709 712
710 713 def _validate_diff_commits(self, commit1, commit2):
711 714 """
712 715 Validates that the given commits are related to this repository.
713 716
714 717 Intended as a utility for sub classes to have a consistent validation
715 718 of input parameters in methods like :meth:`get_diff`.
716 719 """
717 720 self._validate_commit(commit1)
718 721 self._validate_commit(commit2)
719 722 if (isinstance(commit1, EmptyCommit) and
720 723 isinstance(commit2, EmptyCommit)):
721 724 raise ValueError("Cannot compare two empty commits")
722 725
723 726 def _validate_commit(self, commit):
724 727 if not isinstance(commit, BaseCommit):
725 728 raise TypeError(
726 729 "%s is not of type BaseCommit" % repr(commit))
727 730 if commit.repository != self and not isinstance(commit, EmptyCommit):
728 731 raise ValueError(
729 732 "Commit %s must be a valid commit from this repository %s, "
730 733 "related to this repository instead %s." %
731 734 (commit, self, commit.repository))
732 735
733 736 def _validate_commit_id(self, commit_id):
734 737 if not isinstance(commit_id, compat.string_types):
735 738 raise TypeError("commit_id must be a string value")
736 739
737 740 def _validate_commit_idx(self, commit_idx):
738 741 if not isinstance(commit_idx, (int, long)):
739 742 raise TypeError("commit_idx must be a numeric value")
740 743
741 744 def _validate_branch_name(self, branch_name):
742 745 if branch_name and branch_name not in self.branches_all:
743 746 msg = ("Branch %s not found in %s" % (branch_name, self))
744 747 raise BranchDoesNotExistError(msg)
745 748
746 749 #
747 750 # Supporting deprecated API parts
748 751 # TODO: johbo: consider to move this into a mixin
749 752 #
750 753
751 754 @property
752 755 def EMPTY_CHANGESET(self):
753 756 warnings.warn(
754 757 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
755 758 return self.EMPTY_COMMIT_ID
756 759
757 760 @property
758 761 def revisions(self):
759 762 warnings.warn("Use commits attribute instead", DeprecationWarning)
760 763 return self.commit_ids
761 764
762 765 @revisions.setter
763 766 def revisions(self, value):
764 767 warnings.warn("Use commits attribute instead", DeprecationWarning)
765 768 self.commit_ids = value
766 769
767 770 def get_changeset(self, revision=None, pre_load=None):
768 771 warnings.warn("Use get_commit instead", DeprecationWarning)
769 772 commit_id = None
770 773 commit_idx = None
771 774 if isinstance(revision, compat.string_types):
772 775 commit_id = revision
773 776 else:
774 777 commit_idx = revision
775 778 return self.get_commit(
776 779 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
777 780
778 781 def get_changesets(
779 782 self, start=None, end=None, start_date=None, end_date=None,
780 783 branch_name=None, pre_load=None):
781 784 warnings.warn("Use get_commits instead", DeprecationWarning)
782 785 start_id = self._revision_to_commit(start)
783 786 end_id = self._revision_to_commit(end)
784 787 return self.get_commits(
785 788 start_id=start_id, end_id=end_id, start_date=start_date,
786 789 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
787 790
788 791 def _revision_to_commit(self, revision):
789 792 """
790 793 Translates a revision to a commit_id
791 794
792 795 Helps to support the old changeset based API which allows to use
793 796 commit ids and commit indices interchangeable.
794 797 """
795 798 if revision is None:
796 799 return revision
797 800
798 801 if isinstance(revision, compat.string_types):
799 802 commit_id = revision
800 803 else:
801 804 commit_id = self.commit_ids[revision]
802 805 return commit_id
803 806
804 807 @property
805 808 def in_memory_changeset(self):
806 809 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
807 810 return self.in_memory_commit
808 811
809 812 def get_path_permissions(self, username):
810 813 """
811 814 Returns a path permission checker or None if not supported
812 815
813 816 :param username: session user name
814 817 :return: an instance of BasePathPermissionChecker or None
815 818 """
816 819 return None
817 820
818 821 def install_hooks(self, force=False):
819 822 return self._remote.install_hooks(force)
820 823
821 824 def get_hooks_info(self):
822 825 return self._remote.get_hooks_info()
823 826
824 827
825 828 class BaseCommit(object):
826 829 """
827 830 Each backend should implement it's commit representation.
828 831
829 832 **Attributes**
830 833
831 834 ``repository``
832 835 repository object within which commit exists
833 836
834 837 ``id``
835 838 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
836 839 just ``tip``.
837 840
838 841 ``raw_id``
839 842 raw commit representation (i.e. full 40 length sha for git
840 843 backend)
841 844
842 845 ``short_id``
843 846 shortened (if apply) version of ``raw_id``; it would be simple
844 847 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
845 848 as ``raw_id`` for subversion
846 849
847 850 ``idx``
848 851 commit index
849 852
850 853 ``files``
851 854 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
852 855
853 856 ``dirs``
854 857 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
855 858
856 859 ``nodes``
857 860 combined list of ``Node`` objects
858 861
859 862 ``author``
860 863 author of the commit, as unicode
861 864
862 865 ``message``
863 866 message of the commit, as unicode
864 867
865 868 ``parents``
866 869 list of parent commits
867 870
868 871 """
869 872
870 873 branch = None
871 874 """
872 875 Depending on the backend this should be set to the branch name of the
873 876 commit. Backends not supporting branches on commits should leave this
874 877 value as ``None``.
875 878 """
876 879
877 880 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
878 881 """
879 882 This template is used to generate a default prefix for repository archives
880 883 if no prefix has been specified.
881 884 """
882 885
883 886 def __str__(self):
884 887 return '<%s at %s:%s>' % (
885 888 self.__class__.__name__, self.idx, self.short_id)
886 889
887 890 def __repr__(self):
888 891 return self.__str__()
889 892
890 893 def __unicode__(self):
891 894 return u'%s:%s' % (self.idx, self.short_id)
892 895
893 896 def __eq__(self, other):
894 897 same_instance = isinstance(other, self.__class__)
895 898 return same_instance and self.raw_id == other.raw_id
896 899
897 900 def __json__(self):
898 901 parents = []
899 902 try:
900 903 for parent in self.parents:
901 904 parents.append({'raw_id': parent.raw_id})
902 905 except NotImplementedError:
903 906 # empty commit doesn't have parents implemented
904 907 pass
905 908
906 909 return {
907 910 'short_id': self.short_id,
908 911 'raw_id': self.raw_id,
909 912 'revision': self.idx,
910 913 'message': self.message,
911 914 'date': self.date,
912 915 'author': self.author,
913 916 'parents': parents,
914 917 'branch': self.branch
915 918 }
916 919
917 920 def __getstate__(self):
918 921 d = self.__dict__.copy()
919 922 d.pop('_remote', None)
920 923 d.pop('repository', None)
921 924 return d
922 925
923 926 def _get_refs(self):
924 927 return {
925 928 'branches': [self.branch] if self.branch else [],
926 929 'bookmarks': getattr(self, 'bookmarks', []),
927 930 'tags': self.tags
928 931 }
929 932
930 933 @LazyProperty
931 934 def last(self):
932 935 """
933 936 ``True`` if this is last commit in repository, ``False``
934 937 otherwise; trying to access this attribute while there is no
935 938 commits would raise `EmptyRepositoryError`
936 939 """
937 940 if self.repository is None:
938 941 raise CommitError("Cannot check if it's most recent commit")
939 942 return self.raw_id == self.repository.commit_ids[-1]
940 943
941 944 @LazyProperty
942 945 def parents(self):
943 946 """
944 947 Returns list of parent commits.
945 948 """
946 949 raise NotImplementedError
947 950
948 951 @LazyProperty
949 952 def first_parent(self):
950 953 """
951 954 Returns list of parent commits.
952 955 """
953 956 return self.parents[0] if self.parents else EmptyCommit()
954 957
955 958 @property
956 959 def merge(self):
957 960 """
958 961 Returns boolean if commit is a merge.
959 962 """
960 963 return len(self.parents) > 1
961 964
962 965 @LazyProperty
963 966 def children(self):
964 967 """
965 968 Returns list of child commits.
966 969 """
967 970 raise NotImplementedError
968 971
969 972 @LazyProperty
970 973 def id(self):
971 974 """
972 975 Returns string identifying this commit.
973 976 """
974 977 raise NotImplementedError
975 978
976 979 @LazyProperty
977 980 def raw_id(self):
978 981 """
979 982 Returns raw string identifying this commit.
980 983 """
981 984 raise NotImplementedError
982 985
983 986 @LazyProperty
984 987 def short_id(self):
985 988 """
986 989 Returns shortened version of ``raw_id`` attribute, as string,
987 990 identifying this commit, useful for presentation to users.
988 991 """
989 992 raise NotImplementedError
990 993
991 994 @LazyProperty
992 995 def idx(self):
993 996 """
994 997 Returns integer identifying this commit.
995 998 """
996 999 raise NotImplementedError
997 1000
998 1001 @LazyProperty
999 1002 def committer(self):
1000 1003 """
1001 1004 Returns committer for this commit
1002 1005 """
1003 1006 raise NotImplementedError
1004 1007
1005 1008 @LazyProperty
1006 1009 def committer_name(self):
1007 1010 """
1008 1011 Returns committer name for this commit
1009 1012 """
1010 1013
1011 1014 return author_name(self.committer)
1012 1015
1013 1016 @LazyProperty
1014 1017 def committer_email(self):
1015 1018 """
1016 1019 Returns committer email address for this commit
1017 1020 """
1018 1021
1019 1022 return author_email(self.committer)
1020 1023
1021 1024 @LazyProperty
1022 1025 def author(self):
1023 1026 """
1024 1027 Returns author for this commit
1025 1028 """
1026 1029
1027 1030 raise NotImplementedError
1028 1031
1029 1032 @LazyProperty
1030 1033 def author_name(self):
1031 1034 """
1032 1035 Returns author name for this commit
1033 1036 """
1034 1037
1035 1038 return author_name(self.author)
1036 1039
1037 1040 @LazyProperty
1038 1041 def author_email(self):
1039 1042 """
1040 1043 Returns author email address for this commit
1041 1044 """
1042 1045
1043 1046 return author_email(self.author)
1044 1047
1045 1048 def get_file_mode(self, path):
1046 1049 """
1047 1050 Returns stat mode of the file at `path`.
1048 1051 """
1049 1052 raise NotImplementedError
1050 1053
1051 1054 def is_link(self, path):
1052 1055 """
1053 1056 Returns ``True`` if given `path` is a symlink
1054 1057 """
1055 1058 raise NotImplementedError
1056 1059
1057 1060 def is_node_binary(self, path):
1058 1061 """
1059 1062 Returns ``True`` is given path is a binary file
1060 1063 """
1061 1064 raise NotImplementedError
1062 1065
1063 1066 def get_file_content(self, path):
1064 1067 """
1065 1068 Returns content of the file at the given `path`.
1066 1069 """
1067 1070 raise NotImplementedError
1068 1071
1069 1072 def get_file_content_streamed(self, path):
1070 1073 """
1071 1074 returns a streaming response from vcsserver with file content
1072 1075 """
1073 1076 raise NotImplementedError
1074 1077
1075 1078 def get_file_size(self, path):
1076 1079 """
1077 1080 Returns size of the file at the given `path`.
1078 1081 """
1079 1082 raise NotImplementedError
1080 1083
1081 1084 def get_path_commit(self, path, pre_load=None):
1082 1085 """
1083 1086 Returns last commit of the file at the given `path`.
1084 1087
1085 1088 :param pre_load: Optional. List of commit attributes to load.
1086 1089 """
1087 1090 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1088 1091 if not commits:
1089 1092 raise RepositoryError(
1090 1093 'Failed to fetch history for path {}. '
1091 1094 'Please check if such path exists in your repository'.format(
1092 1095 path))
1093 1096 return commits[0]
1094 1097
1095 1098 def get_path_history(self, path, limit=None, pre_load=None):
1096 1099 """
1097 1100 Returns history of file as reversed list of :class:`BaseCommit`
1098 1101 objects for which file at given `path` has been modified.
1099 1102
1100 1103 :param limit: Optional. Allows to limit the size of the returned
1101 1104 history. This is intended as a hint to the underlying backend, so
1102 1105 that it can apply optimizations depending on the limit.
1103 1106 :param pre_load: Optional. List of commit attributes to load.
1104 1107 """
1105 1108 raise NotImplementedError
1106 1109
1107 1110 def get_file_annotate(self, path, pre_load=None):
1108 1111 """
1109 1112 Returns a generator of four element tuples with
1110 1113 lineno, sha, commit lazy loader and line
1111 1114
1112 1115 :param pre_load: Optional. List of commit attributes to load.
1113 1116 """
1114 1117 raise NotImplementedError
1115 1118
1116 1119 def get_nodes(self, path):
1117 1120 """
1118 1121 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1119 1122 state of commit at the given ``path``.
1120 1123
1121 1124 :raises ``CommitError``: if node at the given ``path`` is not
1122 1125 instance of ``DirNode``
1123 1126 """
1124 1127 raise NotImplementedError
1125 1128
1126 1129 def get_node(self, path):
1127 1130 """
1128 1131 Returns ``Node`` object from the given ``path``.
1129 1132
1130 1133 :raises ``NodeDoesNotExistError``: if there is no node at the given
1131 1134 ``path``
1132 1135 """
1133 1136 raise NotImplementedError
1134 1137
1135 1138 def get_largefile_node(self, path):
1136 1139 """
1137 1140 Returns the path to largefile from Mercurial/Git-lfs storage.
1138 1141 or None if it's not a largefile node
1139 1142 """
1140 1143 return None
1141 1144
1142 1145 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1143 1146 prefix=None, write_metadata=False, mtime=None, archive_at_path='/'):
1144 1147 """
1145 1148 Creates an archive containing the contents of the repository.
1146 1149
1147 1150 :param archive_dest_path: path to the file which to create the archive.
1148 1151 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1149 1152 :param prefix: name of root directory in archive.
1150 1153 Default is repository name and commit's short_id joined with dash:
1151 1154 ``"{repo_name}-{short_id}"``.
1152 1155 :param write_metadata: write a metadata file into archive.
1153 1156 :param mtime: custom modification time for archive creation, defaults
1154 1157 to time.time() if not given.
1155 1158 :param archive_at_path: pack files at this path (default '/')
1156 1159
1157 1160 :raise VCSError: If prefix has a problem.
1158 1161 """
1159 1162 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1160 1163 if kind not in allowed_kinds:
1161 1164 raise ImproperArchiveTypeError(
1162 1165 'Archive kind (%s) not supported use one of %s' %
1163 1166 (kind, allowed_kinds))
1164 1167
1165 1168 prefix = self._validate_archive_prefix(prefix)
1166 1169
1167 1170 mtime = mtime is not None or time.mktime(self.date.timetuple())
1168 1171
1169 1172 file_info = []
1170 1173 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1171 1174 for _r, _d, files in cur_rev.walk(archive_at_path):
1172 1175 for f in files:
1173 1176 f_path = os.path.join(prefix, f.path)
1174 1177 file_info.append(
1175 1178 (f_path, f.mode, f.is_link(), f.raw_bytes))
1176 1179
1177 1180 if write_metadata:
1178 1181 metadata = [
1179 1182 ('repo_name', self.repository.name),
1180 1183 ('commit_id', self.raw_id),
1181 1184 ('mtime', mtime),
1182 1185 ('branch', self.branch),
1183 1186 ('tags', ','.join(self.tags)),
1184 1187 ]
1185 1188 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1186 1189 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1187 1190
1188 1191 connection.Hg.archive_repo(archive_dest_path, mtime, file_info, kind)
1189 1192
1190 1193 def _validate_archive_prefix(self, prefix):
1191 1194 if prefix is None:
1192 1195 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1193 1196 repo_name=safe_str(self.repository.name),
1194 1197 short_id=self.short_id)
1195 1198 elif not isinstance(prefix, str):
1196 1199 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1197 1200 elif prefix.startswith('/'):
1198 1201 raise VCSError("Prefix cannot start with leading slash")
1199 1202 elif prefix.strip() == '':
1200 1203 raise VCSError("Prefix cannot be empty")
1201 1204 return prefix
1202 1205
1203 1206 @LazyProperty
1204 1207 def root(self):
1205 1208 """
1206 1209 Returns ``RootNode`` object for this commit.
1207 1210 """
1208 1211 return self.get_node('')
1209 1212
1210 1213 def next(self, branch=None):
1211 1214 """
1212 1215 Returns next commit from current, if branch is gives it will return
1213 1216 next commit belonging to this branch
1214 1217
1215 1218 :param branch: show commits within the given named branch
1216 1219 """
1217 1220 indexes = xrange(self.idx + 1, self.repository.count())
1218 1221 return self._find_next(indexes, branch)
1219 1222
1220 1223 def prev(self, branch=None):
1221 1224 """
1222 1225 Returns previous commit from current, if branch is gives it will
1223 1226 return previous commit belonging to this branch
1224 1227
1225 1228 :param branch: show commit within the given named branch
1226 1229 """
1227 1230 indexes = xrange(self.idx - 1, -1, -1)
1228 1231 return self._find_next(indexes, branch)
1229 1232
1230 1233 def _find_next(self, indexes, branch=None):
1231 1234 if branch and self.branch != branch:
1232 1235 raise VCSError('Branch option used on commit not belonging '
1233 1236 'to that branch')
1234 1237
1235 1238 for next_idx in indexes:
1236 1239 commit = self.repository.get_commit(commit_idx=next_idx)
1237 1240 if branch and branch != commit.branch:
1238 1241 continue
1239 1242 return commit
1240 1243 raise CommitDoesNotExistError
1241 1244
1242 1245 def diff(self, ignore_whitespace=True, context=3):
1243 1246 """
1244 1247 Returns a `Diff` object representing the change made by this commit.
1245 1248 """
1246 1249 parent = self.first_parent
1247 1250 diff = self.repository.get_diff(
1248 1251 parent, self,
1249 1252 ignore_whitespace=ignore_whitespace,
1250 1253 context=context)
1251 1254 return diff
1252 1255
1253 1256 @LazyProperty
1254 1257 def added(self):
1255 1258 """
1256 1259 Returns list of added ``FileNode`` objects.
1257 1260 """
1258 1261 raise NotImplementedError
1259 1262
1260 1263 @LazyProperty
1261 1264 def changed(self):
1262 1265 """
1263 1266 Returns list of modified ``FileNode`` objects.
1264 1267 """
1265 1268 raise NotImplementedError
1266 1269
1267 1270 @LazyProperty
1268 1271 def removed(self):
1269 1272 """
1270 1273 Returns list of removed ``FileNode`` objects.
1271 1274 """
1272 1275 raise NotImplementedError
1273 1276
1274 1277 @LazyProperty
1275 1278 def size(self):
1276 1279 """
1277 1280 Returns total number of bytes from contents of all filenodes.
1278 1281 """
1279 1282 return sum((node.size for node in self.get_filenodes_generator()))
1280 1283
1281 1284 def walk(self, topurl=''):
1282 1285 """
1283 1286 Similar to os.walk method. Insted of filesystem it walks through
1284 1287 commit starting at given ``topurl``. Returns generator of tuples
1285 1288 (topnode, dirnodes, filenodes).
1286 1289 """
1287 1290 topnode = self.get_node(topurl)
1288 1291 if not topnode.is_dir():
1289 1292 return
1290 1293 yield (topnode, topnode.dirs, topnode.files)
1291 1294 for dirnode in topnode.dirs:
1292 1295 for tup in self.walk(dirnode.path):
1293 1296 yield tup
1294 1297
1295 1298 def get_filenodes_generator(self):
1296 1299 """
1297 1300 Returns generator that yields *all* file nodes.
1298 1301 """
1299 1302 for topnode, dirs, files in self.walk():
1300 1303 for node in files:
1301 1304 yield node
1302 1305
1303 1306 #
1304 1307 # Utilities for sub classes to support consistent behavior
1305 1308 #
1306 1309
1307 1310 def no_node_at_path(self, path):
1308 1311 return NodeDoesNotExistError(
1309 1312 u"There is no file nor directory at the given path: "
1310 1313 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1311 1314
1312 1315 def _fix_path(self, path):
1313 1316 """
1314 1317 Paths are stored without trailing slash so we need to get rid off it if
1315 1318 needed.
1316 1319 """
1317 1320 return path.rstrip('/')
1318 1321
1319 1322 #
1320 1323 # Deprecated API based on changesets
1321 1324 #
1322 1325
1323 1326 @property
1324 1327 def revision(self):
1325 1328 warnings.warn("Use idx instead", DeprecationWarning)
1326 1329 return self.idx
1327 1330
1328 1331 @revision.setter
1329 1332 def revision(self, value):
1330 1333 warnings.warn("Use idx instead", DeprecationWarning)
1331 1334 self.idx = value
1332 1335
1333 1336 def get_file_changeset(self, path):
1334 1337 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1335 1338 return self.get_path_commit(path)
1336 1339
1337 1340
1338 1341 class BaseChangesetClass(type):
1339 1342
1340 1343 def __instancecheck__(self, instance):
1341 1344 return isinstance(instance, BaseCommit)
1342 1345
1343 1346
1344 1347 class BaseChangeset(BaseCommit):
1345 1348
1346 1349 __metaclass__ = BaseChangesetClass
1347 1350
1348 1351 def __new__(cls, *args, **kwargs):
1349 1352 warnings.warn(
1350 1353 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1351 1354 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1352 1355
1353 1356
1354 1357 class BaseInMemoryCommit(object):
1355 1358 """
1356 1359 Represents differences between repository's state (most recent head) and
1357 1360 changes made *in place*.
1358 1361
1359 1362 **Attributes**
1360 1363
1361 1364 ``repository``
1362 1365 repository object for this in-memory-commit
1363 1366
1364 1367 ``added``
1365 1368 list of ``FileNode`` objects marked as *added*
1366 1369
1367 1370 ``changed``
1368 1371 list of ``FileNode`` objects marked as *changed*
1369 1372
1370 1373 ``removed``
1371 1374 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1372 1375 *removed*
1373 1376
1374 1377 ``parents``
1375 1378 list of :class:`BaseCommit` instances representing parents of
1376 1379 in-memory commit. Should always be 2-element sequence.
1377 1380
1378 1381 """
1379 1382
1380 1383 def __init__(self, repository):
1381 1384 self.repository = repository
1382 1385 self.added = []
1383 1386 self.changed = []
1384 1387 self.removed = []
1385 1388 self.parents = []
1386 1389
1387 1390 def add(self, *filenodes):
1388 1391 """
1389 1392 Marks given ``FileNode`` objects as *to be committed*.
1390 1393
1391 1394 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1392 1395 latest commit
1393 1396 :raises ``NodeAlreadyAddedError``: if node with same path is already
1394 1397 marked as *added*
1395 1398 """
1396 1399 # Check if not already marked as *added* first
1397 1400 for node in filenodes:
1398 1401 if node.path in (n.path for n in self.added):
1399 1402 raise NodeAlreadyAddedError(
1400 1403 "Such FileNode %s is already marked for addition"
1401 1404 % node.path)
1402 1405 for node in filenodes:
1403 1406 self.added.append(node)
1404 1407
1405 1408 def change(self, *filenodes):
1406 1409 """
1407 1410 Marks given ``FileNode`` objects to be *changed* in next commit.
1408 1411
1409 1412 :raises ``EmptyRepositoryError``: if there are no commits yet
1410 1413 :raises ``NodeAlreadyExistsError``: if node with same path is already
1411 1414 marked to be *changed*
1412 1415 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1413 1416 marked to be *removed*
1414 1417 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1415 1418 commit
1416 1419 :raises ``NodeNotChangedError``: if node hasn't really be changed
1417 1420 """
1418 1421 for node in filenodes:
1419 1422 if node.path in (n.path for n in self.removed):
1420 1423 raise NodeAlreadyRemovedError(
1421 1424 "Node at %s is already marked as removed" % node.path)
1422 1425 try:
1423 1426 self.repository.get_commit()
1424 1427 except EmptyRepositoryError:
1425 1428 raise EmptyRepositoryError(
1426 1429 "Nothing to change - try to *add* new nodes rather than "
1427 1430 "changing them")
1428 1431 for node in filenodes:
1429 1432 if node.path in (n.path for n in self.changed):
1430 1433 raise NodeAlreadyChangedError(
1431 1434 "Node at '%s' is already marked as changed" % node.path)
1432 1435 self.changed.append(node)
1433 1436
1434 1437 def remove(self, *filenodes):
1435 1438 """
1436 1439 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1437 1440 *removed* in next commit.
1438 1441
1439 1442 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1440 1443 be *removed*
1441 1444 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1442 1445 be *changed*
1443 1446 """
1444 1447 for node in filenodes:
1445 1448 if node.path in (n.path for n in self.removed):
1446 1449 raise NodeAlreadyRemovedError(
1447 1450 "Node is already marked to for removal at %s" % node.path)
1448 1451 if node.path in (n.path for n in self.changed):
1449 1452 raise NodeAlreadyChangedError(
1450 1453 "Node is already marked to be changed at %s" % node.path)
1451 1454 # We only mark node as *removed* - real removal is done by
1452 1455 # commit method
1453 1456 self.removed.append(node)
1454 1457
1455 1458 def reset(self):
1456 1459 """
1457 1460 Resets this instance to initial state (cleans ``added``, ``changed``
1458 1461 and ``removed`` lists).
1459 1462 """
1460 1463 self.added = []
1461 1464 self.changed = []
1462 1465 self.removed = []
1463 1466 self.parents = []
1464 1467
1465 1468 def get_ipaths(self):
1466 1469 """
1467 1470 Returns generator of paths from nodes marked as added, changed or
1468 1471 removed.
1469 1472 """
1470 1473 for node in itertools.chain(self.added, self.changed, self.removed):
1471 1474 yield node.path
1472 1475
1473 1476 def get_paths(self):
1474 1477 """
1475 1478 Returns list of paths from nodes marked as added, changed or removed.
1476 1479 """
1477 1480 return list(self.get_ipaths())
1478 1481
1479 1482 def check_integrity(self, parents=None):
1480 1483 """
1481 1484 Checks in-memory commit's integrity. Also, sets parents if not
1482 1485 already set.
1483 1486
1484 1487 :raises CommitError: if any error occurs (i.e.
1485 1488 ``NodeDoesNotExistError``).
1486 1489 """
1487 1490 if not self.parents:
1488 1491 parents = parents or []
1489 1492 if len(parents) == 0:
1490 1493 try:
1491 1494 parents = [self.repository.get_commit(), None]
1492 1495 except EmptyRepositoryError:
1493 1496 parents = [None, None]
1494 1497 elif len(parents) == 1:
1495 1498 parents += [None]
1496 1499 self.parents = parents
1497 1500
1498 1501 # Local parents, only if not None
1499 1502 parents = [p for p in self.parents if p]
1500 1503
1501 1504 # Check nodes marked as added
1502 1505 for p in parents:
1503 1506 for node in self.added:
1504 1507 try:
1505 1508 p.get_node(node.path)
1506 1509 except NodeDoesNotExistError:
1507 1510 pass
1508 1511 else:
1509 1512 raise NodeAlreadyExistsError(
1510 1513 "Node `%s` already exists at %s" % (node.path, p))
1511 1514
1512 1515 # Check nodes marked as changed
1513 1516 missing = set(self.changed)
1514 1517 not_changed = set(self.changed)
1515 1518 if self.changed and not parents:
1516 1519 raise NodeDoesNotExistError(str(self.changed[0].path))
1517 1520 for p in parents:
1518 1521 for node in self.changed:
1519 1522 try:
1520 1523 old = p.get_node(node.path)
1521 1524 missing.remove(node)
1522 1525 # if content actually changed, remove node from not_changed
1523 1526 if old.content != node.content:
1524 1527 not_changed.remove(node)
1525 1528 except NodeDoesNotExistError:
1526 1529 pass
1527 1530 if self.changed and missing:
1528 1531 raise NodeDoesNotExistError(
1529 1532 "Node `%s` marked as modified but missing in parents: %s"
1530 1533 % (node.path, parents))
1531 1534
1532 1535 if self.changed and not_changed:
1533 1536 raise NodeNotChangedError(
1534 1537 "Node `%s` wasn't actually changed (parents: %s)"
1535 1538 % (not_changed.pop().path, parents))
1536 1539
1537 1540 # Check nodes marked as removed
1538 1541 if self.removed and not parents:
1539 1542 raise NodeDoesNotExistError(
1540 1543 "Cannot remove node at %s as there "
1541 1544 "were no parents specified" % self.removed[0].path)
1542 1545 really_removed = set()
1543 1546 for p in parents:
1544 1547 for node in self.removed:
1545 1548 try:
1546 1549 p.get_node(node.path)
1547 1550 really_removed.add(node)
1548 1551 except CommitError:
1549 1552 pass
1550 1553 not_removed = set(self.removed) - really_removed
1551 1554 if not_removed:
1552 1555 # TODO: johbo: This code branch does not seem to be covered
1553 1556 raise NodeDoesNotExistError(
1554 1557 "Cannot remove node at %s from "
1555 1558 "following parents: %s" % (not_removed, parents))
1556 1559
1557 1560 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1558 1561 """
1559 1562 Performs in-memory commit (doesn't check workdir in any way) and
1560 1563 returns newly created :class:`BaseCommit`. Updates repository's
1561 1564 attribute `commits`.
1562 1565
1563 1566 .. note::
1564 1567
1565 1568 While overriding this method each backend's should call
1566 1569 ``self.check_integrity(parents)`` in the first place.
1567 1570
1568 1571 :param message: message of the commit
1569 1572 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1570 1573 :param parents: single parent or sequence of parents from which commit
1571 1574 would be derived
1572 1575 :param date: ``datetime.datetime`` instance. Defaults to
1573 1576 ``datetime.datetime.now()``.
1574 1577 :param branch: branch name, as string. If none given, default backend's
1575 1578 branch would be used.
1576 1579
1577 1580 :raises ``CommitError``: if any error occurs while committing
1578 1581 """
1579 1582 raise NotImplementedError
1580 1583
1581 1584
1582 1585 class BaseInMemoryChangesetClass(type):
1583 1586
1584 1587 def __instancecheck__(self, instance):
1585 1588 return isinstance(instance, BaseInMemoryCommit)
1586 1589
1587 1590
1588 1591 class BaseInMemoryChangeset(BaseInMemoryCommit):
1589 1592
1590 1593 __metaclass__ = BaseInMemoryChangesetClass
1591 1594
1592 1595 def __new__(cls, *args, **kwargs):
1593 1596 warnings.warn(
1594 1597 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1595 1598 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1596 1599
1597 1600
1598 1601 class EmptyCommit(BaseCommit):
1599 1602 """
1600 1603 An dummy empty commit. It's possible to pass hash when creating
1601 1604 an EmptyCommit
1602 1605 """
1603 1606
1604 1607 def __init__(
1605 1608 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1606 1609 message='', author='', date=None):
1607 1610 self._empty_commit_id = commit_id
1608 1611 # TODO: johbo: Solve idx parameter, default value does not make
1609 1612 # too much sense
1610 1613 self.idx = idx
1611 1614 self.message = message
1612 1615 self.author = author
1613 1616 self.date = date or datetime.datetime.fromtimestamp(0)
1614 1617 self.repository = repo
1615 1618 self.alias = alias
1616 1619
1617 1620 @LazyProperty
1618 1621 def raw_id(self):
1619 1622 """
1620 1623 Returns raw string identifying this commit, useful for web
1621 1624 representation.
1622 1625 """
1623 1626
1624 1627 return self._empty_commit_id
1625 1628
1626 1629 @LazyProperty
1627 1630 def branch(self):
1628 1631 if self.alias:
1629 1632 from rhodecode.lib.vcs.backends import get_backend
1630 1633 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1631 1634
1632 1635 @LazyProperty
1633 1636 def short_id(self):
1634 1637 return self.raw_id[:12]
1635 1638
1636 1639 @LazyProperty
1637 1640 def id(self):
1638 1641 return self.raw_id
1639 1642
1640 1643 def get_path_commit(self, path):
1641 1644 return self
1642 1645
1643 1646 def get_file_content(self, path):
1644 1647 return u''
1645 1648
1646 1649 def get_file_content_streamed(self, path):
1647 1650 yield self.get_file_content()
1648 1651
1649 1652 def get_file_size(self, path):
1650 1653 return 0
1651 1654
1652 1655
1653 1656 class EmptyChangesetClass(type):
1654 1657
1655 1658 def __instancecheck__(self, instance):
1656 1659 return isinstance(instance, EmptyCommit)
1657 1660
1658 1661
1659 1662 class EmptyChangeset(EmptyCommit):
1660 1663
1661 1664 __metaclass__ = EmptyChangesetClass
1662 1665
1663 1666 def __new__(cls, *args, **kwargs):
1664 1667 warnings.warn(
1665 1668 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1666 1669 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1667 1670
1668 1671 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1669 1672 alias=None, revision=-1, message='', author='', date=None):
1670 1673 if requested_revision is not None:
1671 1674 warnings.warn(
1672 1675 "Parameter requested_revision not supported anymore",
1673 1676 DeprecationWarning)
1674 1677 super(EmptyChangeset, self).__init__(
1675 1678 commit_id=cs, repo=repo, alias=alias, idx=revision,
1676 1679 message=message, author=author, date=date)
1677 1680
1678 1681 @property
1679 1682 def revision(self):
1680 1683 warnings.warn("Use idx instead", DeprecationWarning)
1681 1684 return self.idx
1682 1685
1683 1686 @revision.setter
1684 1687 def revision(self, value):
1685 1688 warnings.warn("Use idx instead", DeprecationWarning)
1686 1689 self.idx = value
1687 1690
1688 1691
1689 1692 class EmptyRepository(BaseRepository):
1690 1693 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1691 1694 pass
1692 1695
1693 1696 def get_diff(self, *args, **kwargs):
1694 1697 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1695 1698 return GitDiff('')
1696 1699
1697 1700
1698 1701 class CollectionGenerator(object):
1699 1702
1700 1703 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1701 1704 self.repo = repo
1702 1705 self.commit_ids = commit_ids
1703 1706 # TODO: (oliver) this isn't currently hooked up
1704 1707 self.collection_size = None
1705 1708 self.pre_load = pre_load
1706 1709 self.translate_tag = translate_tag
1707 1710
1708 1711 def __len__(self):
1709 1712 if self.collection_size is not None:
1710 1713 return self.collection_size
1711 1714 return self.commit_ids.__len__()
1712 1715
1713 1716 def __iter__(self):
1714 1717 for commit_id in self.commit_ids:
1715 1718 # TODO: johbo: Mercurial passes in commit indices or commit ids
1716 1719 yield self._commit_factory(commit_id)
1717 1720
1718 1721 def _commit_factory(self, commit_id):
1719 1722 """
1720 1723 Allows backends to override the way commits are generated.
1721 1724 """
1722 1725 return self.repo.get_commit(
1723 1726 commit_id=commit_id, pre_load=self.pre_load,
1724 1727 translate_tag=self.translate_tag)
1725 1728
1726 1729 def __getslice__(self, i, j):
1727 1730 """
1728 1731 Returns an iterator of sliced repository
1729 1732 """
1730 1733 commit_ids = self.commit_ids[i:j]
1731 1734 return self.__class__(
1732 1735 self.repo, commit_ids, pre_load=self.pre_load,
1733 1736 translate_tag=self.translate_tag)
1734 1737
1735 1738 def __repr__(self):
1736 1739 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1737 1740
1738 1741
1739 1742 class Config(object):
1740 1743 """
1741 1744 Represents the configuration for a repository.
1742 1745
1743 1746 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1744 1747 standard library. It implements only the needed subset.
1745 1748 """
1746 1749
1747 1750 def __init__(self):
1748 1751 self._values = {}
1749 1752
1750 1753 def copy(self):
1751 1754 clone = Config()
1752 1755 for section, values in self._values.items():
1753 1756 clone._values[section] = values.copy()
1754 1757 return clone
1755 1758
1756 1759 def __repr__(self):
1757 1760 return '<Config(%s sections) at %s>' % (
1758 1761 len(self._values), hex(id(self)))
1759 1762
1760 1763 def items(self, section):
1761 1764 return self._values.get(section, {}).iteritems()
1762 1765
1763 1766 def get(self, section, option):
1764 1767 return self._values.get(section, {}).get(option)
1765 1768
1766 1769 def set(self, section, option, value):
1767 1770 section_values = self._values.setdefault(section, {})
1768 1771 section_values[option] = value
1769 1772
1770 1773 def clear_section(self, section):
1771 1774 self._values[section] = {}
1772 1775
1773 1776 def serialize(self):
1774 1777 """
1775 1778 Creates a list of three tuples (section, key, value) representing
1776 1779 this config object.
1777 1780 """
1778 1781 items = []
1779 1782 for section in self._values:
1780 1783 for option, value in self._values[section].items():
1781 1784 items.append(
1782 1785 (safe_str(section), safe_str(option), safe_str(value)))
1783 1786 return items
1784 1787
1785 1788
1786 1789 class Diff(object):
1787 1790 """
1788 1791 Represents a diff result from a repository backend.
1789 1792
1790 1793 Subclasses have to provide a backend specific value for
1791 1794 :attr:`_header_re` and :attr:`_meta_re`.
1792 1795 """
1793 1796 _meta_re = None
1794 1797 _header_re = None
1795 1798
1796 1799 def __init__(self, raw_diff):
1797 1800 self.raw = raw_diff
1798 1801
1799 1802 def chunks(self):
1800 1803 """
1801 1804 split the diff in chunks of separate --git a/file b/file chunks
1802 1805 to make diffs consistent we must prepend with \n, and make sure
1803 1806 we can detect last chunk as this was also has special rule
1804 1807 """
1805 1808
1806 1809 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1807 1810 header = diff_parts[0]
1808 1811
1809 1812 if self._meta_re:
1810 1813 match = self._meta_re.match(header)
1811 1814
1812 1815 chunks = diff_parts[1:]
1813 1816 total_chunks = len(chunks)
1814 1817
1815 1818 return (
1816 1819 DiffChunk(chunk, self, cur_chunk == total_chunks)
1817 1820 for cur_chunk, chunk in enumerate(chunks, start=1))
1818 1821
1819 1822
1820 1823 class DiffChunk(object):
1821 1824
1822 1825 def __init__(self, chunk, diff, last_chunk):
1823 1826 self._diff = diff
1824 1827
1825 1828 # since we split by \ndiff --git that part is lost from original diff
1826 1829 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1827 1830 if not last_chunk:
1828 1831 chunk += '\n'
1829 1832
1830 1833 match = self._diff._header_re.match(chunk)
1831 1834 self.header = match.groupdict()
1832 1835 self.diff = chunk[match.end():]
1833 1836 self.raw = chunk
1834 1837
1835 1838
1836 1839 class BasePathPermissionChecker(object):
1837 1840
1838 1841 @staticmethod
1839 1842 def create_from_patterns(includes, excludes):
1840 1843 if includes and '*' in includes and not excludes:
1841 1844 return AllPathPermissionChecker()
1842 1845 elif excludes and '*' in excludes:
1843 1846 return NonePathPermissionChecker()
1844 1847 else:
1845 1848 return PatternPathPermissionChecker(includes, excludes)
1846 1849
1847 1850 @property
1848 1851 def has_full_access(self):
1849 1852 raise NotImplemented()
1850 1853
1851 1854 def has_access(self, path):
1852 1855 raise NotImplemented()
1853 1856
1854 1857
1855 1858 class AllPathPermissionChecker(BasePathPermissionChecker):
1856 1859
1857 1860 @property
1858 1861 def has_full_access(self):
1859 1862 return True
1860 1863
1861 1864 def has_access(self, path):
1862 1865 return True
1863 1866
1864 1867
1865 1868 class NonePathPermissionChecker(BasePathPermissionChecker):
1866 1869
1867 1870 @property
1868 1871 def has_full_access(self):
1869 1872 return False
1870 1873
1871 1874 def has_access(self, path):
1872 1875 return False
1873 1876
1874 1877
1875 1878 class PatternPathPermissionChecker(BasePathPermissionChecker):
1876 1879
1877 1880 def __init__(self, includes, excludes):
1878 1881 self.includes = includes
1879 1882 self.excludes = excludes
1880 1883 self.includes_re = [] if not includes else [
1881 1884 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1882 1885 self.excludes_re = [] if not excludes else [
1883 1886 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1884 1887
1885 1888 @property
1886 1889 def has_full_access(self):
1887 1890 return '*' in self.includes and not self.excludes
1888 1891
1889 1892 def has_access(self, path):
1890 1893 for regex in self.excludes_re:
1891 1894 if regex.match(path):
1892 1895 return False
1893 1896 for regex in self.includes_re:
1894 1897 if regex.match(path):
1895 1898 return True
1896 1899 return False
@@ -1,1004 +1,1004 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT repository module
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.utils2 import CachedProperty
36 36 from rhodecode.lib.vcs import connection, path as vcspath
37 37 from rhodecode.lib.vcs.backends.base import (
38 38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 39 MergeFailureReason, Reference)
40 40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
41 41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
42 42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
43 43 from rhodecode.lib.vcs.exceptions import (
44 44 CommitDoesNotExistError, EmptyRepositoryError,
45 45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
46 46
47 47
48 48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52
53 53 class GitRepository(BaseRepository):
54 54 """
55 55 Git repository backend.
56 56 """
57 57 DEFAULT_BRANCH_NAME = 'master'
58 58
59 59 contact = BaseRepository.DEFAULT_CONTACT
60 60
61 61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 62 do_workspace_checkout=False, with_wire=None, bare=False):
63 63
64 64 self.path = safe_str(os.path.abspath(repo_path))
65 65 self.config = config if config else self.get_default_config()
66 66 self.with_wire = with_wire or {"cache": False} # default should not use cache
67 67
68 68 self._init_repo(create, src_url, do_workspace_checkout, bare)
69 69
70 70 # caches
71 71 self._commit_ids = {}
72 72
73 73 @LazyProperty
74 74 def _remote(self):
75 75 repo_id = self.path
76 76 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
77 77
78 78 @LazyProperty
79 79 def bare(self):
80 80 return self._remote.bare()
81 81
82 82 @LazyProperty
83 83 def head(self):
84 84 return self._remote.head()
85 85
86 86 @CachedProperty
87 87 def commit_ids(self):
88 88 """
89 89 Returns list of commit ids, in ascending order. Being lazy
90 90 attribute allows external tools to inject commit ids from cache.
91 91 """
92 92 commit_ids = self._get_all_commit_ids()
93 93 self._rebuild_cache(commit_ids)
94 94 return commit_ids
95 95
96 96 def _rebuild_cache(self, commit_ids):
97 97 self._commit_ids = dict((commit_id, index)
98 98 for index, commit_id in enumerate(commit_ids))
99 99
100 100 def run_git_command(self, cmd, **opts):
101 101 """
102 102 Runs given ``cmd`` as git command and returns tuple
103 103 (stdout, stderr).
104 104
105 105 :param cmd: git command to be executed
106 106 :param opts: env options to pass into Subprocess command
107 107 """
108 108 if not isinstance(cmd, list):
109 109 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
110 110
111 111 skip_stderr_log = opts.pop('skip_stderr_log', False)
112 112 out, err = self._remote.run_git_command(cmd, **opts)
113 113 if err and not skip_stderr_log:
114 114 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
115 115 return out, err
116 116
117 117 @staticmethod
118 118 def check_url(url, config):
119 119 """
120 120 Function will check given url and try to verify if it's a valid
121 121 link. Sometimes it may happened that git will issue basic
122 122 auth request that can cause whole API to hang when used from python
123 123 or other external calls.
124 124
125 125 On failures it'll raise urllib2.HTTPError, exception is also thrown
126 126 when the return code is non 200
127 127 """
128 128 # check first if it's not an url
129 129 if os.path.isdir(url) or url.startswith('file:'):
130 130 return True
131 131
132 132 if '+' in url.split('://', 1)[0]:
133 133 url = url.split('+', 1)[1]
134 134
135 135 # Request the _remote to verify the url
136 136 return connection.Git.check_url(url, config.serialize())
137 137
138 138 @staticmethod
139 139 def is_valid_repository(path):
140 140 if os.path.isdir(os.path.join(path, '.git')):
141 141 return True
142 142 # check case of bare repository
143 143 try:
144 144 GitRepository(path)
145 145 return True
146 146 except VCSError:
147 147 pass
148 148 return False
149 149
150 150 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
151 151 bare=False):
152 152 if create and os.path.exists(self.path):
153 153 raise RepositoryError(
154 154 "Cannot create repository at %s, location already exist"
155 155 % self.path)
156 156
157 157 if bare and do_workspace_checkout:
158 158 raise RepositoryError("Cannot update a bare repository")
159 159 try:
160 160
161 161 if src_url:
162 162 # check URL before any actions
163 163 GitRepository.check_url(src_url, self.config)
164 164
165 165 if create:
166 166 os.makedirs(self.path, mode=0o755)
167 167
168 168 if bare:
169 169 self._remote.init_bare()
170 170 else:
171 171 self._remote.init()
172 172
173 173 if src_url and bare:
174 174 # bare repository only allows a fetch and checkout is not allowed
175 175 self.fetch(src_url, commit_ids=None)
176 176 elif src_url:
177 177 self.pull(src_url, commit_ids=None,
178 178 update_after=do_workspace_checkout)
179 179
180 180 else:
181 181 if not self._remote.assert_correct_path():
182 182 raise RepositoryError(
183 183 'Path "%s" does not contain a Git repository' %
184 184 (self.path,))
185 185
186 186 # TODO: johbo: check if we have to translate the OSError here
187 187 except OSError as err:
188 188 raise RepositoryError(err)
189 189
190 190 def _get_all_commit_ids(self):
191 191 return self._remote.get_all_commit_ids()
192 192
193 193 def _get_commit_ids(self, filters=None):
194 194 # we must check if this repo is not empty, since later command
195 195 # fails if it is. And it's cheaper to ask than throw the subprocess
196 196 # errors
197 197
198 198 head = self._remote.head(show_exc=False)
199 199
200 200 if not head:
201 201 return []
202 202
203 203 rev_filter = ['--branches', '--tags']
204 204 extra_filter = []
205 205
206 206 if filters:
207 207 if filters.get('since'):
208 208 extra_filter.append('--since=%s' % (filters['since']))
209 209 if filters.get('until'):
210 210 extra_filter.append('--until=%s' % (filters['until']))
211 211 if filters.get('branch_name'):
212 212 rev_filter = []
213 213 extra_filter.append(filters['branch_name'])
214 214 rev_filter.extend(extra_filter)
215 215
216 216 # if filters.get('start') or filters.get('end'):
217 217 # # skip is offset, max-count is limit
218 218 # if filters.get('start'):
219 219 # extra_filter += ' --skip=%s' % filters['start']
220 220 # if filters.get('end'):
221 221 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
222 222
223 223 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
224 224 try:
225 225 output, __ = self.run_git_command(cmd)
226 226 except RepositoryError:
227 227 # Can be raised for empty repositories
228 228 return []
229 229 return output.splitlines()
230 230
231 231 def _lookup_commit(self, commit_id_or_idx, translate_tag=True):
232 232 def is_null(value):
233 233 return len(value) == commit_id_or_idx.count('0')
234 234
235 235 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
236 236 return self.commit_ids[-1]
237 237 commit_missing_err = "Commit {} does not exist for `{}`".format(
238 238 *map(safe_str, [commit_id_or_idx, self.name]))
239 239
240 240 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
241 241 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
242 242 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
243 243 try:
244 244 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
245 245 except Exception:
246 246 raise CommitDoesNotExistError(commit_missing_err)
247 247
248 248 elif is_bstr:
249 249 # Need to call remote to translate id for tagging scenario
250 250 try:
251 251 remote_data = self._remote.get_object(commit_id_or_idx)
252 252 commit_id_or_idx = remote_data["commit_id"]
253 253 except (CommitDoesNotExistError,):
254 254 raise CommitDoesNotExistError(commit_missing_err)
255 255
256 256 # Ensure we return full id
257 257 if not SHA_PATTERN.match(str(commit_id_or_idx)):
258 258 raise CommitDoesNotExistError(
259 259 "Given commit id %s not recognized" % commit_id_or_idx)
260 260 return commit_id_or_idx
261 261
262 262 def get_hook_location(self):
263 263 """
264 264 returns absolute path to location where hooks are stored
265 265 """
266 266 loc = os.path.join(self.path, 'hooks')
267 267 if not self.bare:
268 268 loc = os.path.join(self.path, '.git', 'hooks')
269 269 return loc
270 270
271 271 @LazyProperty
272 272 def last_change(self):
273 273 """
274 274 Returns last change made on this repository as
275 275 `datetime.datetime` object.
276 276 """
277 277 try:
278 278 return self.get_commit().date
279 279 except RepositoryError:
280 280 tzoffset = makedate()[1]
281 281 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
282 282
283 283 def _get_fs_mtime(self):
284 284 idx_loc = '' if self.bare else '.git'
285 285 # fallback to filesystem
286 286 in_path = os.path.join(self.path, idx_loc, "index")
287 287 he_path = os.path.join(self.path, idx_loc, "HEAD")
288 288 if os.path.exists(in_path):
289 289 return os.stat(in_path).st_mtime
290 290 else:
291 291 return os.stat(he_path).st_mtime
292 292
293 293 @LazyProperty
294 294 def description(self):
295 295 description = self._remote.get_description()
296 296 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
297 297
298 298 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
299 299 if self.is_empty():
300 300 return OrderedDict()
301 301
302 302 result = []
303 303 for ref, sha in self._refs.iteritems():
304 304 if ref.startswith(prefix):
305 305 ref_name = ref
306 306 if strip_prefix:
307 307 ref_name = ref[len(prefix):]
308 308 result.append((safe_unicode(ref_name), sha))
309 309
310 310 def get_name(entry):
311 311 return entry[0]
312 312
313 313 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
314 314
315 315 def _get_branches(self):
316 316 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
317 317
318 318 @CachedProperty
319 319 def branches(self):
320 320 return self._get_branches()
321 321
322 322 @CachedProperty
323 323 def branches_closed(self):
324 324 return {}
325 325
326 326 @CachedProperty
327 327 def bookmarks(self):
328 328 return {}
329 329
330 330 @CachedProperty
331 331 def branches_all(self):
332 332 all_branches = {}
333 333 all_branches.update(self.branches)
334 334 all_branches.update(self.branches_closed)
335 335 return all_branches
336 336
337 337 @CachedProperty
338 338 def tags(self):
339 339 return self._get_tags()
340 340
341 341 def _get_tags(self):
342 342 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
343 343
344 344 def tag(self, name, user, commit_id=None, message=None, date=None,
345 345 **kwargs):
346 346 # TODO: fix this method to apply annotated tags correct with message
347 347 """
348 348 Creates and returns a tag for the given ``commit_id``.
349 349
350 350 :param name: name for new tag
351 351 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
352 352 :param commit_id: commit id for which new tag would be created
353 353 :param message: message of the tag's commit
354 354 :param date: date of tag's commit
355 355
356 356 :raises TagAlreadyExistError: if tag with same name already exists
357 357 """
358 358 if name in self.tags:
359 359 raise TagAlreadyExistError("Tag %s already exists" % name)
360 360 commit = self.get_commit(commit_id=commit_id)
361 361 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
362 362
363 363 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
364 364
365 365 self._invalidate_prop_cache('tags')
366 366 self._invalidate_prop_cache('_refs')
367 367
368 368 return commit
369 369
370 370 def remove_tag(self, name, user, message=None, date=None):
371 371 """
372 372 Removes tag with the given ``name``.
373 373
374 374 :param name: name of the tag to be removed
375 375 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
376 376 :param message: message of the tag's removal commit
377 377 :param date: date of tag's removal commit
378 378
379 379 :raises TagDoesNotExistError: if tag with given name does not exists
380 380 """
381 381 if name not in self.tags:
382 382 raise TagDoesNotExistError("Tag %s does not exist" % name)
383 383
384 384 self._remote.tag_remove(name)
385 385 self._invalidate_prop_cache('tags')
386 386 self._invalidate_prop_cache('_refs')
387 387
388 388 def _get_refs(self):
389 389 return self._remote.get_refs()
390 390
391 391 @CachedProperty
392 392 def _refs(self):
393 393 return self._get_refs()
394 394
395 395 @property
396 396 def _ref_tree(self):
397 397 node = tree = {}
398 398 for ref, sha in self._refs.iteritems():
399 399 path = ref.split('/')
400 400 for bit in path[:-1]:
401 401 node = node.setdefault(bit, {})
402 402 node[path[-1]] = sha
403 403 node = tree
404 404 return tree
405 405
406 406 def get_remote_ref(self, ref_name):
407 407 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
408 408 try:
409 409 return self._refs[ref_key]
410 410 except Exception:
411 411 return
412 412
413 413 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=True):
414 414 """
415 415 Returns `GitCommit` object representing commit from git repository
416 416 at the given `commit_id` or head (most recent commit) if None given.
417 417 """
418 418 if self.is_empty():
419 419 raise EmptyRepositoryError("There are no commits yet")
420 420
421 421 if commit_id is not None:
422 422 self._validate_commit_id(commit_id)
423 423 try:
424 424 # we have cached idx, use it without contacting the remote
425 425 idx = self._commit_ids[commit_id]
426 426 return GitCommit(self, commit_id, idx, pre_load=pre_load)
427 427 except KeyError:
428 428 pass
429 429
430 430 elif commit_idx is not None:
431 431 self._validate_commit_idx(commit_idx)
432 432 try:
433 433 _commit_id = self.commit_ids[commit_idx]
434 434 if commit_idx < 0:
435 435 commit_idx = self.commit_ids.index(_commit_id)
436 436 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
437 437 except IndexError:
438 438 commit_id = commit_idx
439 439 else:
440 440 commit_id = "tip"
441 441
442 442 if translate_tag:
443 443 commit_id = self._lookup_commit(commit_id)
444 444
445 445 try:
446 446 idx = self._commit_ids[commit_id]
447 447 except KeyError:
448 448 idx = -1
449 449
450 450 return GitCommit(self, commit_id, idx, pre_load=pre_load)
451 451
452 452 def get_commits(
453 453 self, start_id=None, end_id=None, start_date=None, end_date=None,
454 454 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
455 455 """
456 456 Returns generator of `GitCommit` objects from start to end (both
457 457 are inclusive), in ascending date order.
458 458
459 459 :param start_id: None, str(commit_id)
460 460 :param end_id: None, str(commit_id)
461 461 :param start_date: if specified, commits with commit date less than
462 462 ``start_date`` would be filtered out from returned set
463 463 :param end_date: if specified, commits with commit date greater than
464 464 ``end_date`` would be filtered out from returned set
465 465 :param branch_name: if specified, commits not reachable from given
466 466 branch would be filtered out from returned set
467 467 :param show_hidden: Show hidden commits such as obsolete or hidden from
468 468 Mercurial evolve
469 469 :raise BranchDoesNotExistError: If given `branch_name` does not
470 470 exist.
471 471 :raise CommitDoesNotExistError: If commits for given `start` or
472 472 `end` could not be found.
473 473
474 474 """
475 475 if self.is_empty():
476 476 raise EmptyRepositoryError("There are no commits yet")
477 477
478 478 self._validate_branch_name(branch_name)
479 479
480 480 if start_id is not None:
481 481 self._validate_commit_id(start_id)
482 482 if end_id is not None:
483 483 self._validate_commit_id(end_id)
484 484
485 485 start_raw_id = self._lookup_commit(start_id)
486 486 start_pos = self._commit_ids[start_raw_id] if start_id else None
487 487 end_raw_id = self._lookup_commit(end_id)
488 488 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
489 489
490 490 if None not in [start_id, end_id] and start_pos > end_pos:
491 491 raise RepositoryError(
492 492 "Start commit '%s' cannot be after end commit '%s'" %
493 493 (start_id, end_id))
494 494
495 495 if end_pos is not None:
496 496 end_pos += 1
497 497
498 498 filter_ = []
499 499 if branch_name:
500 500 filter_.append({'branch_name': branch_name})
501 501 if start_date and not end_date:
502 502 filter_.append({'since': start_date})
503 503 if end_date and not start_date:
504 504 filter_.append({'until': end_date})
505 505 if start_date and end_date:
506 506 filter_.append({'since': start_date})
507 507 filter_.append({'until': end_date})
508 508
509 509 # if start_pos or end_pos:
510 510 # filter_.append({'start': start_pos})
511 511 # filter_.append({'end': end_pos})
512 512
513 513 if filter_:
514 514 revfilters = {
515 515 'branch_name': branch_name,
516 516 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
517 517 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
518 518 'start': start_pos,
519 519 'end': end_pos,
520 520 }
521 521 commit_ids = self._get_commit_ids(filters=revfilters)
522 522
523 523 else:
524 524 commit_ids = self.commit_ids
525 525
526 526 if start_pos or end_pos:
527 527 commit_ids = commit_ids[start_pos: end_pos]
528 528
529 529 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
530 530 translate_tag=translate_tags)
531 531
532 532 def get_diff(
533 533 self, commit1, commit2, path='', ignore_whitespace=False,
534 534 context=3, path1=None):
535 535 """
536 536 Returns (git like) *diff*, as plain text. Shows changes introduced by
537 537 ``commit2`` since ``commit1``.
538 538
539 539 :param commit1: Entry point from which diff is shown. Can be
540 540 ``self.EMPTY_COMMIT`` - in this case, patch showing all
541 541 the changes since empty state of the repository until ``commit2``
542 542 :param commit2: Until which commits changes should be shown.
543 543 :param ignore_whitespace: If set to ``True``, would not show whitespace
544 544 changes. Defaults to ``False``.
545 545 :param context: How many lines before/after changed lines should be
546 546 shown. Defaults to ``3``.
547 547 """
548 548 self._validate_diff_commits(commit1, commit2)
549 549 if path1 is not None and path1 != path:
550 550 raise ValueError("Diff of two different paths not supported.")
551 551
552 552 if path:
553 553 file_filter = path
554 554 else:
555 555 file_filter = None
556 556
557 557 diff = self._remote.diff(
558 558 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
559 559 opt_ignorews=ignore_whitespace,
560 560 context=context)
561 561 return GitDiff(diff)
562 562
563 563 def strip(self, commit_id, branch_name):
564 564 commit = self.get_commit(commit_id=commit_id)
565 565 if commit.merge:
566 566 raise Exception('Cannot reset to merge commit')
567 567
568 568 # parent is going to be the new head now
569 569 commit = commit.parents[0]
570 570 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
571 571
572 572 # clear cached properties
573 573 self._invalidate_prop_cache('commit_ids')
574 574 self._invalidate_prop_cache('_refs')
575 575 self._invalidate_prop_cache('branches')
576 576
577 577 return len(self.commit_ids)
578 578
579 579 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
580 580 if commit_id1 == commit_id2:
581 581 return commit_id1
582 582
583 583 if self != repo2:
584 584 commits = self._remote.get_missing_revs(
585 585 commit_id1, commit_id2, repo2.path)
586 586 if commits:
587 587 commit = repo2.get_commit(commits[-1])
588 588 if commit.parents:
589 589 ancestor_id = commit.parents[0].raw_id
590 590 else:
591 591 ancestor_id = None
592 592 else:
593 593 # no commits from other repo, ancestor_id is the commit_id2
594 594 ancestor_id = commit_id2
595 595 else:
596 596 output, __ = self.run_git_command(
597 597 ['merge-base', commit_id1, commit_id2])
598 598 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
599 599
600 600 return ancestor_id
601 601
602 602 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
603 603 repo1 = self
604 604 ancestor_id = None
605 605
606 606 if commit_id1 == commit_id2:
607 607 commits = []
608 608 elif repo1 != repo2:
609 609 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
610 610 repo2.path)
611 611 commits = [
612 612 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
613 613 for commit_id in reversed(missing_ids)]
614 614 else:
615 615 output, __ = repo1.run_git_command(
616 616 ['log', '--reverse', '--pretty=format: %H', '-s',
617 617 '%s..%s' % (commit_id1, commit_id2)])
618 618 commits = [
619 619 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
620 620 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
621 621
622 622 return commits
623 623
624 624 @LazyProperty
625 625 def in_memory_commit(self):
626 626 """
627 627 Returns ``GitInMemoryCommit`` object for this repository.
628 628 """
629 629 return GitInMemoryCommit(self)
630 630
631 631 def pull(self, url, commit_ids=None, update_after=False):
632 632 """
633 633 Pull changes from external location. Pull is different in GIT
634 634 that fetch since it's doing a checkout
635 635
636 636 :param commit_ids: Optional. Can be set to a list of commit ids
637 637 which shall be pulled from the other repository.
638 638 """
639 639 refs = None
640 640 if commit_ids is not None:
641 641 remote_refs = self._remote.get_remote_refs(url)
642 642 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
643 643 self._remote.pull(url, refs=refs, update_after=update_after)
644 644 self._remote.invalidate_vcs_cache()
645 645
646 646 def fetch(self, url, commit_ids=None):
647 647 """
648 648 Fetch all git objects from external location.
649 649 """
650 650 self._remote.sync_fetch(url, refs=commit_ids)
651 651 self._remote.invalidate_vcs_cache()
652 652
653 653 def push(self, url):
654 654 refs = None
655 655 self._remote.sync_push(url, refs=refs)
656 656
657 657 def set_refs(self, ref_name, commit_id):
658 658 self._remote.set_refs(ref_name, commit_id)
659 659 self._invalidate_prop_cache('_refs')
660 660
661 661 def remove_ref(self, ref_name):
662 662 self._remote.remove_ref(ref_name)
663 663 self._invalidate_prop_cache('_refs')
664 664
665 665 def _update_server_info(self):
666 666 """
667 667 runs gits update-server-info command in this repo instance
668 668 """
669 669 self._remote.update_server_info()
670 670
671 671 def _current_branch(self):
672 672 """
673 673 Return the name of the current branch.
674 674
675 675 It only works for non bare repositories (i.e. repositories with a
676 676 working copy)
677 677 """
678 678 if self.bare:
679 679 raise RepositoryError('Bare git repos do not have active branches')
680 680
681 681 if self.is_empty():
682 682 return None
683 683
684 684 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
685 685 return stdout.strip()
686 686
687 687 def _checkout(self, branch_name, create=False, force=False):
688 688 """
689 689 Checkout a branch in the working directory.
690 690
691 691 It tries to create the branch if create is True, failing if the branch
692 692 already exists.
693 693
694 694 It only works for non bare repositories (i.e. repositories with a
695 695 working copy)
696 696 """
697 697 if self.bare:
698 698 raise RepositoryError('Cannot checkout branches in a bare git repo')
699 699
700 700 cmd = ['checkout']
701 701 if force:
702 702 cmd.append('-f')
703 703 if create:
704 704 cmd.append('-b')
705 705 cmd.append(branch_name)
706 706 self.run_git_command(cmd, fail_on_stderr=False)
707 707
708 708 def _create_branch(self, branch_name, commit_id):
709 709 """
710 710 creates a branch in a GIT repo
711 711 """
712 712 self._remote.create_branch(branch_name, commit_id)
713 713
714 714 def _identify(self):
715 715 """
716 716 Return the current state of the working directory.
717 717 """
718 718 if self.bare:
719 719 raise RepositoryError('Bare git repos do not have active branches')
720 720
721 721 if self.is_empty():
722 722 return None
723 723
724 724 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
725 725 return stdout.strip()
726 726
727 727 def _local_clone(self, clone_path, branch_name, source_branch=None):
728 728 """
729 729 Create a local clone of the current repo.
730 730 """
731 731 # N.B.(skreft): the --branch option is required as otherwise the shallow
732 732 # clone will only fetch the active branch.
733 733 cmd = ['clone', '--branch', branch_name,
734 734 self.path, os.path.abspath(clone_path)]
735 735
736 736 self.run_git_command(cmd, fail_on_stderr=False)
737 737
738 738 # if we get the different source branch, make sure we also fetch it for
739 739 # merge conditions
740 740 if source_branch and source_branch != branch_name:
741 741 # check if the ref exists.
742 742 shadow_repo = GitRepository(os.path.abspath(clone_path))
743 743 if shadow_repo.get_remote_ref(source_branch):
744 744 cmd = ['fetch', self.path, source_branch]
745 745 self.run_git_command(cmd, fail_on_stderr=False)
746 746
747 747 def _local_fetch(self, repository_path, branch_name, use_origin=False):
748 748 """
749 749 Fetch a branch from a local repository.
750 750 """
751 751 repository_path = os.path.abspath(repository_path)
752 752 if repository_path == self.path:
753 753 raise ValueError('Cannot fetch from the same repository')
754 754
755 755 if use_origin:
756 756 branch_name = '+{branch}:refs/heads/{branch}'.format(
757 757 branch=branch_name)
758 758
759 759 cmd = ['fetch', '--no-tags', '--update-head-ok',
760 760 repository_path, branch_name]
761 761 self.run_git_command(cmd, fail_on_stderr=False)
762 762
763 763 def _local_reset(self, branch_name):
764 764 branch_name = '{}'.format(branch_name)
765 765 cmd = ['reset', '--hard', branch_name, '--']
766 766 self.run_git_command(cmd, fail_on_stderr=False)
767 767
768 768 def _last_fetch_heads(self):
769 769 """
770 770 Return the last fetched heads that need merging.
771 771
772 772 The algorithm is defined at
773 773 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
774 774 """
775 775 if not self.bare:
776 776 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
777 777 else:
778 778 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
779 779
780 780 heads = []
781 781 with open(fetch_heads_path) as f:
782 782 for line in f:
783 783 if ' not-for-merge ' in line:
784 784 continue
785 785 line = re.sub('\t.*', '', line, flags=re.DOTALL)
786 786 heads.append(line)
787 787
788 788 return heads
789 789
790 790 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
791 791 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
792 792
793 793 def _local_pull(self, repository_path, branch_name, ff_only=True):
794 794 """
795 795 Pull a branch from a local repository.
796 796 """
797 797 if self.bare:
798 798 raise RepositoryError('Cannot pull into a bare git repository')
799 799 # N.B.(skreft): The --ff-only option is to make sure this is a
800 800 # fast-forward (i.e., we are only pulling new changes and there are no
801 801 # conflicts with our current branch)
802 802 # Additionally, that option needs to go before --no-tags, otherwise git
803 803 # pull complains about it being an unknown flag.
804 804 cmd = ['pull']
805 805 if ff_only:
806 806 cmd.append('--ff-only')
807 807 cmd.extend(['--no-tags', repository_path, branch_name])
808 808 self.run_git_command(cmd, fail_on_stderr=False)
809 809
810 810 def _local_merge(self, merge_message, user_name, user_email, heads):
811 811 """
812 812 Merge the given head into the checked out branch.
813 813
814 814 It will force a merge commit.
815 815
816 816 Currently it raises an error if the repo is empty, as it is not possible
817 817 to create a merge commit in an empty repo.
818 818
819 819 :param merge_message: The message to use for the merge commit.
820 820 :param heads: the heads to merge.
821 821 """
822 822 if self.bare:
823 823 raise RepositoryError('Cannot merge into a bare git repository')
824 824
825 825 if not heads:
826 826 return
827 827
828 828 if self.is_empty():
829 829 # TODO(skreft): do somehting more robust in this case.
830 830 raise RepositoryError(
831 831 'Do not know how to merge into empty repositories yet')
832 832
833 833 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
834 834 # commit message. We also specify the user who is doing the merge.
835 835 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
836 836 '-c', 'user.email=%s' % safe_str(user_email),
837 837 'merge', '--no-ff', '-m', safe_str(merge_message)]
838 838 cmd.extend(heads)
839 839 try:
840 840 output = self.run_git_command(cmd, fail_on_stderr=False)
841 841 except RepositoryError:
842 842 # Cleanup any merge leftovers
843 843 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
844 844 raise
845 845
846 846 def _local_push(
847 847 self, source_branch, repository_path, target_branch,
848 848 enable_hooks=False, rc_scm_data=None):
849 849 """
850 850 Push the source_branch to the given repository and target_branch.
851 851
852 852 Currently it if the target_branch is not master and the target repo is
853 853 empty, the push will work, but then GitRepository won't be able to find
854 854 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
855 855 pointing to master, which does not exist).
856 856
857 857 It does not run the hooks in the target repo.
858 858 """
859 859 # TODO(skreft): deal with the case in which the target repo is empty,
860 860 # and the target_branch is not master.
861 861 target_repo = GitRepository(repository_path)
862 862 if (not target_repo.bare and
863 863 target_repo._current_branch() == target_branch):
864 864 # Git prevents pushing to the checked out branch, so simulate it by
865 865 # pulling into the target repository.
866 866 target_repo._local_pull(self.path, source_branch)
867 867 else:
868 868 cmd = ['push', os.path.abspath(repository_path),
869 869 '%s:%s' % (source_branch, target_branch)]
870 870 gitenv = {}
871 871 if rc_scm_data:
872 872 gitenv.update({'RC_SCM_DATA': rc_scm_data})
873 873
874 874 if not enable_hooks:
875 875 gitenv['RC_SKIP_HOOKS'] = '1'
876 876 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
877 877
878 878 def _get_new_pr_branch(self, source_branch, target_branch):
879 879 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
880 880 pr_branches = []
881 881 for branch in self.branches:
882 882 if branch.startswith(prefix):
883 883 pr_branches.append(int(branch[len(prefix):]))
884 884
885 885 if not pr_branches:
886 886 branch_id = 0
887 887 else:
888 888 branch_id = max(pr_branches) + 1
889 889
890 890 return '%s%d' % (prefix, branch_id)
891 891
892 892 def _maybe_prepare_merge_workspace(
893 893 self, repo_id, workspace_id, target_ref, source_ref):
894 894 shadow_repository_path = self._get_shadow_repository_path(
895 repo_id, workspace_id)
895 self.path, repo_id, workspace_id)
896 896 if not os.path.exists(shadow_repository_path):
897 897 self._local_clone(
898 898 shadow_repository_path, target_ref.name, source_ref.name)
899 899 log.debug('Prepared %s shadow repository in %s',
900 900 self.alias, shadow_repository_path)
901 901
902 902 return shadow_repository_path
903 903
904 904 def _merge_repo(self, repo_id, workspace_id, target_ref,
905 905 source_repo, source_ref, merge_message,
906 906 merger_name, merger_email, dry_run=False,
907 907 use_rebase=False, close_branch=False):
908 908
909 909 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
910 910 'rebase' if use_rebase else 'merge', dry_run)
911 911 if target_ref.commit_id != self.branches[target_ref.name]:
912 912 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
913 913 target_ref.commit_id, self.branches[target_ref.name])
914 914 return MergeResponse(
915 915 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
916 916 metadata={'target_ref': target_ref})
917 917
918 918 shadow_repository_path = self._maybe_prepare_merge_workspace(
919 919 repo_id, workspace_id, target_ref, source_ref)
920 920 shadow_repo = self.get_shadow_instance(shadow_repository_path)
921 921
922 922 # checkout source, if it's different. Otherwise we could not
923 923 # fetch proper commits for merge testing
924 924 if source_ref.name != target_ref.name:
925 925 if shadow_repo.get_remote_ref(source_ref.name):
926 926 shadow_repo._checkout(source_ref.name, force=True)
927 927
928 928 # checkout target, and fetch changes
929 929 shadow_repo._checkout(target_ref.name, force=True)
930 930
931 931 # fetch/reset pull the target, in case it is changed
932 932 # this handles even force changes
933 933 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
934 934 shadow_repo._local_reset(target_ref.name)
935 935
936 936 # Need to reload repo to invalidate the cache, or otherwise we cannot
937 937 # retrieve the last target commit.
938 938 shadow_repo = self.get_shadow_instance(shadow_repository_path)
939 939 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
940 940 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
941 941 target_ref, target_ref.commit_id,
942 942 shadow_repo.branches[target_ref.name])
943 943 return MergeResponse(
944 944 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
945 945 metadata={'target_ref': target_ref})
946 946
947 947 # calculate new branch
948 948 pr_branch = shadow_repo._get_new_pr_branch(
949 949 source_ref.name, target_ref.name)
950 950 log.debug('using pull-request merge branch: `%s`', pr_branch)
951 951 # checkout to temp branch, and fetch changes
952 952 shadow_repo._checkout(pr_branch, create=True)
953 953 try:
954 954 shadow_repo._local_fetch(source_repo.path, source_ref.name)
955 955 except RepositoryError:
956 956 log.exception('Failure when doing local fetch on '
957 957 'shadow repo: %s', shadow_repo)
958 958 return MergeResponse(
959 959 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
960 960 metadata={'source_ref': source_ref})
961 961
962 962 merge_ref = None
963 963 merge_failure_reason = MergeFailureReason.NONE
964 964 metadata = {}
965 965 try:
966 966 shadow_repo._local_merge(merge_message, merger_name, merger_email,
967 967 [source_ref.commit_id])
968 968 merge_possible = True
969 969
970 970 # Need to invalidate the cache, or otherwise we
971 971 # cannot retrieve the merge commit.
972 972 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
973 973 merge_commit_id = shadow_repo.branches[pr_branch]
974 974
975 975 # Set a reference pointing to the merge commit. This reference may
976 976 # be used to easily identify the last successful merge commit in
977 977 # the shadow repository.
978 978 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
979 979 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
980 980 except RepositoryError:
981 981 log.exception('Failure when doing local merge on git shadow repo')
982 982 merge_possible = False
983 983 merge_failure_reason = MergeFailureReason.MERGE_FAILED
984 984
985 985 if merge_possible and not dry_run:
986 986 try:
987 987 shadow_repo._local_push(
988 988 pr_branch, self.path, target_ref.name, enable_hooks=True,
989 989 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
990 990 merge_succeeded = True
991 991 except RepositoryError:
992 992 log.exception(
993 993 'Failure when doing local push from the shadow '
994 994 'repository to the target repository at %s.', self.path)
995 995 merge_succeeded = False
996 996 merge_failure_reason = MergeFailureReason.PUSH_FAILED
997 997 metadata['target'] = 'git shadow repo'
998 998 metadata['merge_commit'] = pr_branch
999 999 else:
1000 1000 merge_succeeded = False
1001 1001
1002 1002 return MergeResponse(
1003 1003 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1004 1004 metadata=metadata)
@@ -1,952 +1,952 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG repository module
23 23 """
24 24 import os
25 25 import logging
26 26 import binascii
27 27 import urllib
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.utils2 import CachedProperty
36 36 from rhodecode.lib.vcs import connection, exceptions
37 37 from rhodecode.lib.vcs.backends.base import (
38 38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 39 MergeFailureReason, Reference, BasePathPermissionChecker)
40 40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
41 41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
42 42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
43 43 from rhodecode.lib.vcs.exceptions import (
44 44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
45 45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
46 46 from rhodecode.lib.vcs.compat import configparser
47 47
48 48 hexlify = binascii.hexlify
49 49 nullid = "\0" * 20
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 class MercurialRepository(BaseRepository):
55 55 """
56 56 Mercurial repository backend
57 57 """
58 58 DEFAULT_BRANCH_NAME = 'default'
59 59
60 60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 61 do_workspace_checkout=False, with_wire=None, bare=False):
62 62 """
63 63 Raises RepositoryError if repository could not be find at the given
64 64 ``repo_path``.
65 65
66 66 :param repo_path: local path of the repository
67 67 :param config: config object containing the repo configuration
68 68 :param create=False: if set to True, would try to create repository if
69 69 it does not exist rather than raising exception
70 70 :param src_url=None: would try to clone repository from given location
71 71 :param do_workspace_checkout=False: sets update of working copy after
72 72 making a clone
73 73 :param bare: not used, compatible with other VCS
74 74 """
75 75
76 76 self.path = safe_str(os.path.abspath(repo_path))
77 77 # mercurial since 4.4.X requires certain configuration to be present
78 78 # because sometimes we init the repos with config we need to meet
79 79 # special requirements
80 80 self.config = config if config else self.get_default_config(
81 81 default=[('extensions', 'largefiles', '1')])
82 82 self.with_wire = with_wire or {"cache": False} # default should not use cache
83 83
84 84 self._init_repo(create, src_url, do_workspace_checkout)
85 85
86 86 # caches
87 87 self._commit_ids = {}
88 88
89 89 @LazyProperty
90 90 def _remote(self):
91 91 repo_id = self.path
92 92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93 93
94 94 @CachedProperty
95 95 def commit_ids(self):
96 96 """
97 97 Returns list of commit ids, in ascending order. Being lazy
98 98 attribute allows external tools to inject shas from cache.
99 99 """
100 100 commit_ids = self._get_all_commit_ids()
101 101 self._rebuild_cache(commit_ids)
102 102 return commit_ids
103 103
104 104 def _rebuild_cache(self, commit_ids):
105 105 self._commit_ids = dict((commit_id, index)
106 106 for index, commit_id in enumerate(commit_ids))
107 107
108 108 @CachedProperty
109 109 def branches(self):
110 110 return self._get_branches()
111 111
112 112 @CachedProperty
113 113 def branches_closed(self):
114 114 return self._get_branches(active=False, closed=True)
115 115
116 116 @CachedProperty
117 117 def branches_all(self):
118 118 all_branches = {}
119 119 all_branches.update(self.branches)
120 120 all_branches.update(self.branches_closed)
121 121 return all_branches
122 122
123 123 def _get_branches(self, active=True, closed=False):
124 124 """
125 125 Gets branches for this repository
126 126 Returns only not closed active branches by default
127 127
128 128 :param active: return also active branches
129 129 :param closed: return also closed branches
130 130
131 131 """
132 132 if self.is_empty():
133 133 return {}
134 134
135 135 def get_name(ctx):
136 136 return ctx[0]
137 137
138 138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
139 139 self._remote.branches(active, closed).items()]
140 140
141 141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142 142
143 143 @CachedProperty
144 144 def tags(self):
145 145 """
146 146 Gets tags for this repository
147 147 """
148 148 return self._get_tags()
149 149
150 150 def _get_tags(self):
151 151 if self.is_empty():
152 152 return {}
153 153
154 154 def get_name(ctx):
155 155 return ctx[0]
156 156
157 157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
158 158 self._remote.tags().items()]
159 159
160 160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161 161
162 162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 163 """
164 164 Creates and returns a tag for the given ``commit_id``.
165 165
166 166 :param name: name for new tag
167 167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 168 :param commit_id: commit id for which new tag would be created
169 169 :param message: message of the tag's commit
170 170 :param date: date of tag's commit
171 171
172 172 :raises TagAlreadyExistError: if tag with same name already exists
173 173 """
174 174 if name in self.tags:
175 175 raise TagAlreadyExistError("Tag %s already exists" % name)
176 176
177 177 commit = self.get_commit(commit_id=commit_id)
178 178 local = kwargs.setdefault('local', False)
179 179
180 180 if message is None:
181 181 message = "Added tag %s for commit %s" % (name, commit.short_id)
182 182
183 183 date, tz = date_to_timestamp_plus_offset(date)
184 184
185 185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 186 self._remote.invalidate_vcs_cache()
187 187
188 188 # Reinitialize tags
189 189 self._invalidate_prop_cache('tags')
190 190 tag_id = self.tags[name]
191 191
192 192 return self.get_commit(commit_id=tag_id)
193 193
194 194 def remove_tag(self, name, user, message=None, date=None):
195 195 """
196 196 Removes tag with the given `name`.
197 197
198 198 :param name: name of the tag to be removed
199 199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 200 :param message: message of the tag's removal commit
201 201 :param date: date of tag's removal commit
202 202
203 203 :raises TagDoesNotExistError: if tag with given name does not exists
204 204 """
205 205 if name not in self.tags:
206 206 raise TagDoesNotExistError("Tag %s does not exist" % name)
207 207
208 208 if message is None:
209 209 message = "Removed tag %s" % name
210 210 local = False
211 211
212 212 date, tz = date_to_timestamp_plus_offset(date)
213 213
214 214 self._remote.tag(name, nullid, message, local, user, date, tz)
215 215 self._remote.invalidate_vcs_cache()
216 216 self._invalidate_prop_cache('tags')
217 217
218 218 @LazyProperty
219 219 def bookmarks(self):
220 220 """
221 221 Gets bookmarks for this repository
222 222 """
223 223 return self._get_bookmarks()
224 224
225 225 def _get_bookmarks(self):
226 226 if self.is_empty():
227 227 return {}
228 228
229 229 def get_name(ctx):
230 230 return ctx[0]
231 231
232 232 _bookmarks = [
233 233 (safe_unicode(n), hexlify(h)) for n, h in
234 234 self._remote.bookmarks().items()]
235 235
236 236 return OrderedDict(sorted(_bookmarks, key=get_name))
237 237
238 238 def _get_all_commit_ids(self):
239 239 return self._remote.get_all_commit_ids('visible')
240 240
241 241 def get_diff(
242 242 self, commit1, commit2, path='', ignore_whitespace=False,
243 243 context=3, path1=None):
244 244 """
245 245 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 246 `commit2` since `commit1`.
247 247
248 248 :param commit1: Entry point from which diff is shown. Can be
249 249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 250 the changes since empty state of the repository until `commit2`
251 251 :param commit2: Until which commit changes should be shown.
252 252 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 253 changes. Defaults to ``False``.
254 254 :param context: How many lines before/after changed lines should be
255 255 shown. Defaults to ``3``.
256 256 """
257 257 self._validate_diff_commits(commit1, commit2)
258 258 if path1 is not None and path1 != path:
259 259 raise ValueError("Diff of two different paths not supported.")
260 260
261 261 if path:
262 262 file_filter = [self.path, path]
263 263 else:
264 264 file_filter = None
265 265
266 266 diff = self._remote.diff(
267 267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 268 opt_git=True, opt_ignorews=ignore_whitespace,
269 269 context=context)
270 270 return MercurialDiff(diff)
271 271
272 272 def strip(self, commit_id, branch=None):
273 273 self._remote.strip(commit_id, update=False, backup="none")
274 274
275 275 self._remote.invalidate_vcs_cache()
276 276 # clear cache
277 277 self._invalidate_prop_cache('commit_ids')
278 278
279 279 return len(self.commit_ids)
280 280
281 281 def verify(self):
282 282 verify = self._remote.verify()
283 283
284 284 self._remote.invalidate_vcs_cache()
285 285 return verify
286 286
287 287 def hg_update_cache(self):
288 288 update_cache = self._remote.hg_update_cache()
289 289
290 290 self._remote.invalidate_vcs_cache()
291 291 return update_cache
292 292
293 293 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
294 294 if commit_id1 == commit_id2:
295 295 return commit_id1
296 296
297 297 ancestors = self._remote.revs_from_revspec(
298 298 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
299 299 other_path=repo2.path)
300 300 return repo2[ancestors[0]].raw_id if ancestors else None
301 301
302 302 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
303 303 if commit_id1 == commit_id2:
304 304 commits = []
305 305 else:
306 306 if merge:
307 307 indexes = self._remote.revs_from_revspec(
308 308 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
309 309 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
310 310 else:
311 311 indexes = self._remote.revs_from_revspec(
312 312 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
313 313 commit_id1, other_path=repo2.path)
314 314
315 315 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
316 316 for idx in indexes]
317 317
318 318 return commits
319 319
320 320 @staticmethod
321 321 def check_url(url, config):
322 322 """
323 323 Function will check given url and try to verify if it's a valid
324 324 link. Sometimes it may happened that mercurial will issue basic
325 325 auth request that can cause whole API to hang when used from python
326 326 or other external calls.
327 327
328 328 On failures it'll raise urllib2.HTTPError, exception is also thrown
329 329 when the return code is non 200
330 330 """
331 331 # check first if it's not an local url
332 332 if os.path.isdir(url) or url.startswith('file:'):
333 333 return True
334 334
335 335 # Request the _remote to verify the url
336 336 return connection.Hg.check_url(url, config.serialize())
337 337
338 338 @staticmethod
339 339 def is_valid_repository(path):
340 340 return os.path.isdir(os.path.join(path, '.hg'))
341 341
342 342 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
343 343 """
344 344 Function will check for mercurial repository in given path. If there
345 345 is no repository in that path it will raise an exception unless
346 346 `create` parameter is set to True - in that case repository would
347 347 be created.
348 348
349 349 If `src_url` is given, would try to clone repository from the
350 350 location at given clone_point. Additionally it'll make update to
351 351 working copy accordingly to `do_workspace_checkout` flag.
352 352 """
353 353 if create and os.path.exists(self.path):
354 354 raise RepositoryError(
355 355 "Cannot create repository at %s, location already exist"
356 356 % self.path)
357 357
358 358 if src_url:
359 359 url = str(self._get_url(src_url))
360 360 MercurialRepository.check_url(url, self.config)
361 361
362 362 self._remote.clone(url, self.path, do_workspace_checkout)
363 363
364 364 # Don't try to create if we've already cloned repo
365 365 create = False
366 366
367 367 if create:
368 368 os.makedirs(self.path, mode=0o755)
369 369 self._remote.localrepository(create)
370 370
371 371 @LazyProperty
372 372 def in_memory_commit(self):
373 373 return MercurialInMemoryCommit(self)
374 374
375 375 @LazyProperty
376 376 def description(self):
377 377 description = self._remote.get_config_value(
378 378 'web', 'description', untrusted=True)
379 379 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
380 380
381 381 @LazyProperty
382 382 def contact(self):
383 383 contact = (
384 384 self._remote.get_config_value("web", "contact") or
385 385 self._remote.get_config_value("ui", "username"))
386 386 return safe_unicode(contact or self.DEFAULT_CONTACT)
387 387
388 388 @LazyProperty
389 389 def last_change(self):
390 390 """
391 391 Returns last change made on this repository as
392 392 `datetime.datetime` object.
393 393 """
394 394 try:
395 395 return self.get_commit().date
396 396 except RepositoryError:
397 397 tzoffset = makedate()[1]
398 398 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
399 399
400 400 def _get_fs_mtime(self):
401 401 # fallback to filesystem
402 402 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
403 403 st_path = os.path.join(self.path, '.hg', "store")
404 404 if os.path.exists(cl_path):
405 405 return os.stat(cl_path).st_mtime
406 406 else:
407 407 return os.stat(st_path).st_mtime
408 408
409 409 def _get_url(self, url):
410 410 """
411 411 Returns normalized url. If schema is not given, would fall
412 412 to filesystem
413 413 (``file:///``) schema.
414 414 """
415 415 url = url.encode('utf8')
416 416 if url != 'default' and '://' not in url:
417 417 url = "file:" + urllib.pathname2url(url)
418 418 return url
419 419
420 420 def get_hook_location(self):
421 421 """
422 422 returns absolute path to location where hooks are stored
423 423 """
424 424 return os.path.join(self.path, '.hg', '.hgrc')
425 425
426 426 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
427 427 """
428 428 Returns ``MercurialCommit`` object representing repository's
429 429 commit at the given `commit_id` or `commit_idx`.
430 430 """
431 431 if self.is_empty():
432 432 raise EmptyRepositoryError("There are no commits yet")
433 433
434 434 if commit_id is not None:
435 435 self._validate_commit_id(commit_id)
436 436 try:
437 437 # we have cached idx, use it without contacting the remote
438 438 idx = self._commit_ids[commit_id]
439 439 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
440 440 except KeyError:
441 441 pass
442 442
443 443 elif commit_idx is not None:
444 444 self._validate_commit_idx(commit_idx)
445 445 try:
446 446 _commit_id = self.commit_ids[commit_idx]
447 447 if commit_idx < 0:
448 448 commit_idx = self.commit_ids.index(_commit_id)
449 449
450 450 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
451 451 except IndexError:
452 452 commit_id = commit_idx
453 453 else:
454 454 commit_id = "tip"
455 455
456 456 if isinstance(commit_id, unicode):
457 457 commit_id = safe_str(commit_id)
458 458
459 459 try:
460 460 raw_id, idx = self._remote.lookup(commit_id, both=True)
461 461 except CommitDoesNotExistError:
462 462 msg = "Commit {} does not exist for `{}`".format(
463 463 *map(safe_str, [commit_id, self.name]))
464 464 raise CommitDoesNotExistError(msg)
465 465
466 466 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
467 467
468 468 def get_commits(
469 469 self, start_id=None, end_id=None, start_date=None, end_date=None,
470 470 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
471 471 """
472 472 Returns generator of ``MercurialCommit`` objects from start to end
473 473 (both are inclusive)
474 474
475 475 :param start_id: None, str(commit_id)
476 476 :param end_id: None, str(commit_id)
477 477 :param start_date: if specified, commits with commit date less than
478 478 ``start_date`` would be filtered out from returned set
479 479 :param end_date: if specified, commits with commit date greater than
480 480 ``end_date`` would be filtered out from returned set
481 481 :param branch_name: if specified, commits not reachable from given
482 482 branch would be filtered out from returned set
483 483 :param show_hidden: Show hidden commits such as obsolete or hidden from
484 484 Mercurial evolve
485 485 :raise BranchDoesNotExistError: If given ``branch_name`` does not
486 486 exist.
487 487 :raise CommitDoesNotExistError: If commit for given ``start`` or
488 488 ``end`` could not be found.
489 489 """
490 490 # actually we should check now if it's not an empty repo
491 491 if self.is_empty():
492 492 raise EmptyRepositoryError("There are no commits yet")
493 493 self._validate_branch_name(branch_name)
494 494
495 495 branch_ancestors = False
496 496 if start_id is not None:
497 497 self._validate_commit_id(start_id)
498 498 c_start = self.get_commit(commit_id=start_id)
499 499 start_pos = self._commit_ids[c_start.raw_id]
500 500 else:
501 501 start_pos = None
502 502
503 503 if end_id is not None:
504 504 self._validate_commit_id(end_id)
505 505 c_end = self.get_commit(commit_id=end_id)
506 506 end_pos = max(0, self._commit_ids[c_end.raw_id])
507 507 else:
508 508 end_pos = None
509 509
510 510 if None not in [start_id, end_id] and start_pos > end_pos:
511 511 raise RepositoryError(
512 512 "Start commit '%s' cannot be after end commit '%s'" %
513 513 (start_id, end_id))
514 514
515 515 if end_pos is not None:
516 516 end_pos += 1
517 517
518 518 commit_filter = []
519 519
520 520 if branch_name and not branch_ancestors:
521 521 commit_filter.append('branch("%s")' % (branch_name,))
522 522 elif branch_name and branch_ancestors:
523 523 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
524 524
525 525 if start_date and not end_date:
526 526 commit_filter.append('date(">%s")' % (start_date,))
527 527 if end_date and not start_date:
528 528 commit_filter.append('date("<%s")' % (end_date,))
529 529 if start_date and end_date:
530 530 commit_filter.append(
531 531 'date(">%s") and date("<%s")' % (start_date, end_date))
532 532
533 533 if not show_hidden:
534 534 commit_filter.append('not obsolete()')
535 535 commit_filter.append('not hidden()')
536 536
537 537 # TODO: johbo: Figure out a simpler way for this solution
538 538 collection_generator = CollectionGenerator
539 539 if commit_filter:
540 540 commit_filter = ' and '.join(map(safe_str, commit_filter))
541 541 revisions = self._remote.rev_range([commit_filter])
542 542 collection_generator = MercurialIndexBasedCollectionGenerator
543 543 else:
544 544 revisions = self.commit_ids
545 545
546 546 if start_pos or end_pos:
547 547 revisions = revisions[start_pos:end_pos]
548 548
549 549 return collection_generator(self, revisions, pre_load=pre_load)
550 550
551 551 def pull(self, url, commit_ids=None):
552 552 """
553 553 Pull changes from external location.
554 554
555 555 :param commit_ids: Optional. Can be set to a list of commit ids
556 556 which shall be pulled from the other repository.
557 557 """
558 558 url = self._get_url(url)
559 559 self._remote.pull(url, commit_ids=commit_ids)
560 560 self._remote.invalidate_vcs_cache()
561 561
562 562 def fetch(self, url, commit_ids=None):
563 563 """
564 564 Backward compatibility with GIT fetch==pull
565 565 """
566 566 return self.pull(url, commit_ids=commit_ids)
567 567
568 568 def push(self, url):
569 569 url = self._get_url(url)
570 570 self._remote.sync_push(url)
571 571
572 572 def _local_clone(self, clone_path):
573 573 """
574 574 Create a local clone of the current repo.
575 575 """
576 576 self._remote.clone(self.path, clone_path, update_after_clone=True,
577 577 hooks=False)
578 578
579 579 def _update(self, revision, clean=False):
580 580 """
581 581 Update the working copy to the specified revision.
582 582 """
583 583 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
584 584 self._remote.update(revision, clean=clean)
585 585
586 586 def _identify(self):
587 587 """
588 588 Return the current state of the working directory.
589 589 """
590 590 return self._remote.identify().strip().rstrip('+')
591 591
592 592 def _heads(self, branch=None):
593 593 """
594 594 Return the commit ids of the repository heads.
595 595 """
596 596 return self._remote.heads(branch=branch).strip().split(' ')
597 597
598 598 def _ancestor(self, revision1, revision2):
599 599 """
600 600 Return the common ancestor of the two revisions.
601 601 """
602 602 return self._remote.ancestor(revision1, revision2)
603 603
604 604 def _local_push(
605 605 self, revision, repository_path, push_branches=False,
606 606 enable_hooks=False):
607 607 """
608 608 Push the given revision to the specified repository.
609 609
610 610 :param push_branches: allow to create branches in the target repo.
611 611 """
612 612 self._remote.push(
613 613 [revision], repository_path, hooks=enable_hooks,
614 614 push_branches=push_branches)
615 615
616 616 def _local_merge(self, target_ref, merge_message, user_name, user_email,
617 617 source_ref, use_rebase=False, dry_run=False):
618 618 """
619 619 Merge the given source_revision into the checked out revision.
620 620
621 621 Returns the commit id of the merge and a boolean indicating if the
622 622 commit needs to be pushed.
623 623 """
624 624 self._update(target_ref.commit_id, clean=True)
625 625
626 626 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
627 627 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
628 628
629 629 if ancestor == source_ref.commit_id:
630 630 # Nothing to do, the changes were already integrated
631 631 return target_ref.commit_id, False
632 632
633 633 elif ancestor == target_ref.commit_id and is_the_same_branch:
634 634 # In this case we should force a commit message
635 635 return source_ref.commit_id, True
636 636
637 637 if use_rebase:
638 638 try:
639 639 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
640 640 target_ref.commit_id)
641 641 self.bookmark(bookmark_name, revision=source_ref.commit_id)
642 642 self._remote.rebase(
643 643 source=source_ref.commit_id, dest=target_ref.commit_id)
644 644 self._remote.invalidate_vcs_cache()
645 645 self._update(bookmark_name, clean=True)
646 646 return self._identify(), True
647 647 except RepositoryError:
648 648 # The rebase-abort may raise another exception which 'hides'
649 649 # the original one, therefore we log it here.
650 650 log.exception('Error while rebasing shadow repo during merge.')
651 651
652 652 # Cleanup any rebase leftovers
653 653 self._remote.invalidate_vcs_cache()
654 654 self._remote.rebase(abort=True)
655 655 self._remote.invalidate_vcs_cache()
656 656 self._remote.update(clean=True)
657 657 raise
658 658 else:
659 659 try:
660 660 self._remote.merge(source_ref.commit_id)
661 661 self._remote.invalidate_vcs_cache()
662 662 self._remote.commit(
663 663 message=safe_str(merge_message),
664 664 username=safe_str('%s <%s>' % (user_name, user_email)))
665 665 self._remote.invalidate_vcs_cache()
666 666 return self._identify(), True
667 667 except RepositoryError:
668 668 # Cleanup any merge leftovers
669 669 self._remote.update(clean=True)
670 670 raise
671 671
672 672 def _local_close(self, target_ref, user_name, user_email,
673 673 source_ref, close_message=''):
674 674 """
675 675 Close the branch of the given source_revision
676 676
677 677 Returns the commit id of the close and a boolean indicating if the
678 678 commit needs to be pushed.
679 679 """
680 680 self._update(source_ref.commit_id)
681 681 message = close_message or "Closing branch: `{}`".format(source_ref.name)
682 682 try:
683 683 self._remote.commit(
684 684 message=safe_str(message),
685 685 username=safe_str('%s <%s>' % (user_name, user_email)),
686 686 close_branch=True)
687 687 self._remote.invalidate_vcs_cache()
688 688 return self._identify(), True
689 689 except RepositoryError:
690 690 # Cleanup any commit leftovers
691 691 self._remote.update(clean=True)
692 692 raise
693 693
694 694 def _is_the_same_branch(self, target_ref, source_ref):
695 695 return (
696 696 self._get_branch_name(target_ref) ==
697 697 self._get_branch_name(source_ref))
698 698
699 699 def _get_branch_name(self, ref):
700 700 if ref.type == 'branch':
701 701 return ref.name
702 702 return self._remote.ctx_branch(ref.commit_id)
703 703
704 704 def _maybe_prepare_merge_workspace(
705 705 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
706 706 shadow_repository_path = self._get_shadow_repository_path(
707 repo_id, workspace_id)
707 self.path, repo_id, workspace_id)
708 708 if not os.path.exists(shadow_repository_path):
709 709 self._local_clone(shadow_repository_path)
710 710 log.debug(
711 711 'Prepared shadow repository in %s', shadow_repository_path)
712 712
713 713 return shadow_repository_path
714 714
715 715 def _merge_repo(self, repo_id, workspace_id, target_ref,
716 716 source_repo, source_ref, merge_message,
717 717 merger_name, merger_email, dry_run=False,
718 718 use_rebase=False, close_branch=False):
719 719
720 720 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
721 721 'rebase' if use_rebase else 'merge', dry_run)
722 722 if target_ref.commit_id not in self._heads():
723 723 return MergeResponse(
724 724 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
725 725 metadata={'target_ref': target_ref})
726 726
727 727 try:
728 728 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
729 729 heads = '\n,'.join(self._heads(target_ref.name))
730 730 metadata = {
731 731 'target_ref': target_ref,
732 732 'source_ref': source_ref,
733 733 'heads': heads
734 734 }
735 735 return MergeResponse(
736 736 False, False, None,
737 737 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
738 738 metadata=metadata)
739 739 except CommitDoesNotExistError:
740 740 log.exception('Failure when looking up branch heads on hg target')
741 741 return MergeResponse(
742 742 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
743 743 metadata={'target_ref': target_ref})
744 744
745 745 shadow_repository_path = self._maybe_prepare_merge_workspace(
746 746 repo_id, workspace_id, target_ref, source_ref)
747 747 shadow_repo = self.get_shadow_instance(shadow_repository_path)
748 748
749 749 log.debug('Pulling in target reference %s', target_ref)
750 750 self._validate_pull_reference(target_ref)
751 751 shadow_repo._local_pull(self.path, target_ref)
752 752
753 753 try:
754 754 log.debug('Pulling in source reference %s', source_ref)
755 755 source_repo._validate_pull_reference(source_ref)
756 756 shadow_repo._local_pull(source_repo.path, source_ref)
757 757 except CommitDoesNotExistError:
758 758 log.exception('Failure when doing local pull on hg shadow repo')
759 759 return MergeResponse(
760 760 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
761 761 metadata={'source_ref': source_ref})
762 762
763 763 merge_ref = None
764 764 merge_commit_id = None
765 765 close_commit_id = None
766 766 merge_failure_reason = MergeFailureReason.NONE
767 767 metadata = {}
768 768
769 769 # enforce that close branch should be used only in case we source from
770 770 # an actual Branch
771 771 close_branch = close_branch and source_ref.type == 'branch'
772 772
773 773 # don't allow to close branch if source and target are the same
774 774 close_branch = close_branch and source_ref.name != target_ref.name
775 775
776 776 needs_push_on_close = False
777 777 if close_branch and not use_rebase and not dry_run:
778 778 try:
779 779 close_commit_id, needs_push_on_close = shadow_repo._local_close(
780 780 target_ref, merger_name, merger_email, source_ref)
781 781 merge_possible = True
782 782 except RepositoryError:
783 783 log.exception('Failure when doing close branch on '
784 784 'shadow repo: %s', shadow_repo)
785 785 merge_possible = False
786 786 merge_failure_reason = MergeFailureReason.MERGE_FAILED
787 787 else:
788 788 merge_possible = True
789 789
790 790 needs_push = False
791 791 if merge_possible:
792 792 try:
793 793 merge_commit_id, needs_push = shadow_repo._local_merge(
794 794 target_ref, merge_message, merger_name, merger_email,
795 795 source_ref, use_rebase=use_rebase, dry_run=dry_run)
796 796 merge_possible = True
797 797
798 798 # read the state of the close action, if it
799 799 # maybe required a push
800 800 needs_push = needs_push or needs_push_on_close
801 801
802 802 # Set a bookmark pointing to the merge commit. This bookmark
803 803 # may be used to easily identify the last successful merge
804 804 # commit in the shadow repository.
805 805 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
806 806 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
807 807 except SubrepoMergeError:
808 808 log.exception(
809 809 'Subrepo merge error during local merge on hg shadow repo.')
810 810 merge_possible = False
811 811 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
812 812 needs_push = False
813 813 except RepositoryError:
814 814 log.exception('Failure when doing local merge on hg shadow repo')
815 815 merge_possible = False
816 816 merge_failure_reason = MergeFailureReason.MERGE_FAILED
817 817 needs_push = False
818 818
819 819 if merge_possible and not dry_run:
820 820 if needs_push:
821 821 # In case the target is a bookmark, update it, so after pushing
822 822 # the bookmarks is also updated in the target.
823 823 if target_ref.type == 'book':
824 824 shadow_repo.bookmark(
825 825 target_ref.name, revision=merge_commit_id)
826 826 try:
827 827 shadow_repo_with_hooks = self.get_shadow_instance(
828 828 shadow_repository_path,
829 829 enable_hooks=True)
830 830 # This is the actual merge action, we push from shadow
831 831 # into origin.
832 832 # Note: the push_branches option will push any new branch
833 833 # defined in the source repository to the target. This may
834 834 # be dangerous as branches are permanent in Mercurial.
835 835 # This feature was requested in issue #441.
836 836 shadow_repo_with_hooks._local_push(
837 837 merge_commit_id, self.path, push_branches=True,
838 838 enable_hooks=True)
839 839
840 840 # maybe we also need to push the close_commit_id
841 841 if close_commit_id:
842 842 shadow_repo_with_hooks._local_push(
843 843 close_commit_id, self.path, push_branches=True,
844 844 enable_hooks=True)
845 845 merge_succeeded = True
846 846 except RepositoryError:
847 847 log.exception(
848 848 'Failure when doing local push from the shadow '
849 849 'repository to the target repository at %s.', self.path)
850 850 merge_succeeded = False
851 851 merge_failure_reason = MergeFailureReason.PUSH_FAILED
852 852 metadata['target'] = 'hg shadow repo'
853 853 metadata['merge_commit'] = merge_commit_id
854 854 else:
855 855 merge_succeeded = True
856 856 else:
857 857 merge_succeeded = False
858 858
859 859 return MergeResponse(
860 860 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
861 861 metadata=metadata)
862 862
863 863 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
864 864 config = self.config.copy()
865 865 if not enable_hooks:
866 866 config.clear_section('hooks')
867 867 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
868 868
869 869 def _validate_pull_reference(self, reference):
870 870 if not (reference.name in self.bookmarks or
871 871 reference.name in self.branches or
872 872 self.get_commit(reference.commit_id)):
873 873 raise CommitDoesNotExistError(
874 874 'Unknown branch, bookmark or commit id')
875 875
876 876 def _local_pull(self, repository_path, reference):
877 877 """
878 878 Fetch a branch, bookmark or commit from a local repository.
879 879 """
880 880 repository_path = os.path.abspath(repository_path)
881 881 if repository_path == self.path:
882 882 raise ValueError('Cannot pull from the same repository')
883 883
884 884 reference_type_to_option_name = {
885 885 'book': 'bookmark',
886 886 'branch': 'branch',
887 887 }
888 888 option_name = reference_type_to_option_name.get(
889 889 reference.type, 'revision')
890 890
891 891 if option_name == 'revision':
892 892 ref = reference.commit_id
893 893 else:
894 894 ref = reference.name
895 895
896 896 options = {option_name: [ref]}
897 897 self._remote.pull_cmd(repository_path, hooks=False, **options)
898 898 self._remote.invalidate_vcs_cache()
899 899
900 900 def bookmark(self, bookmark, revision=None):
901 901 if isinstance(bookmark, unicode):
902 902 bookmark = safe_str(bookmark)
903 903 self._remote.bookmark(bookmark, revision=revision)
904 904 self._remote.invalidate_vcs_cache()
905 905
906 906 def get_path_permissions(self, username):
907 907 hgacl_file = os.path.join(self.path, '.hg/hgacl')
908 908
909 909 def read_patterns(suffix):
910 910 svalue = None
911 911 for section, option in [
912 912 ('narrowacl', username + suffix),
913 913 ('narrowacl', 'default' + suffix),
914 914 ('narrowhgacl', username + suffix),
915 915 ('narrowhgacl', 'default' + suffix)
916 916 ]:
917 917 try:
918 918 svalue = hgacl.get(section, option)
919 919 break # stop at the first value we find
920 920 except configparser.NoOptionError:
921 921 pass
922 922 if not svalue:
923 923 return None
924 924 result = ['/']
925 925 for pattern in svalue.split():
926 926 result.append(pattern)
927 927 if '*' not in pattern and '?' not in pattern:
928 928 result.append(pattern + '/*')
929 929 return result
930 930
931 931 if os.path.exists(hgacl_file):
932 932 try:
933 933 hgacl = configparser.RawConfigParser()
934 934 hgacl.read(hgacl_file)
935 935
936 936 includes = read_patterns('.includes')
937 937 excludes = read_patterns('.excludes')
938 938 return BasePathPermissionChecker.create_from_patterns(
939 939 includes, excludes)
940 940 except BaseException as e:
941 941 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
942 942 hgacl_file, self.name, e)
943 943 raise exceptions.RepositoryRequirementError(msg)
944 944 else:
945 945 return None
946 946
947 947
948 948 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
949 949
950 950 def _commit_factory(self, commit_id):
951 951 return self.repo.get_commit(
952 952 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,5208 +1,5213 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import string
29 29 import hashlib
30 30 import logging
31 31 import datetime
32 32 import uuid
33 33 import warnings
34 34 import ipaddress
35 35 import functools
36 36 import traceback
37 37 import collections
38 38
39 39 from sqlalchemy import (
40 40 or_, and_, not_, func, TypeDecorator, event,
41 41 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
42 42 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
43 43 Text, Float, PickleType)
44 44 from sqlalchemy.sql.expression import true, false, case
45 45 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
46 46 from sqlalchemy.orm import (
47 47 relationship, joinedload, class_mapper, validates, aliased)
48 48 from sqlalchemy.ext.declarative import declared_attr
49 49 from sqlalchemy.ext.hybrid import hybrid_property
50 50 from sqlalchemy.exc import IntegrityError # pragma: no cover
51 51 from sqlalchemy.dialects.mysql import LONGTEXT
52 52 from zope.cachedescriptors.property import Lazy as LazyProperty
53 53 from pyramid import compat
54 54 from pyramid.threadlocal import get_current_request
55 55 from webhelpers.text import collapse, remove_formatting
56 56
57 57 from rhodecode.translation import _
58 58 from rhodecode.lib.vcs import get_vcs_instance
59 59 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
60 60 from rhodecode.lib.utils2 import (
61 61 str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe,
62 62 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
63 63 glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time, OrderedDefaultDict)
64 64 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
65 65 JsonRaw
66 66 from rhodecode.lib.ext_json import json
67 67 from rhodecode.lib.caching_query import FromCache
68 68 from rhodecode.lib.encrypt import AESCipher, validate_and_get_enc_data
69 69 from rhodecode.lib.encrypt2 import Encryptor
70 70 from rhodecode.model.meta import Base, Session
71 71
72 72 URL_SEP = '/'
73 73 log = logging.getLogger(__name__)
74 74
75 75 # =============================================================================
76 76 # BASE CLASSES
77 77 # =============================================================================
78 78
79 79 # this is propagated from .ini file rhodecode.encrypted_values.secret or
80 80 # beaker.session.secret if first is not set.
81 81 # and initialized at environment.py
82 82 ENCRYPTION_KEY = None
83 83
84 84 # used to sort permissions by types, '#' used here is not allowed to be in
85 85 # usernames, and it's very early in sorted string.printable table.
86 86 PERMISSION_TYPE_SORT = {
87 87 'admin': '####',
88 88 'write': '###',
89 89 'read': '##',
90 90 'none': '#',
91 91 }
92 92
93 93
94 94 def display_user_sort(obj):
95 95 """
96 96 Sort function used to sort permissions in .permissions() function of
97 97 Repository, RepoGroup, UserGroup. Also it put the default user in front
98 98 of all other resources
99 99 """
100 100
101 101 if obj.username == User.DEFAULT_USER:
102 102 return '#####'
103 103 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
104 104 return prefix + obj.username
105 105
106 106
107 107 def display_user_group_sort(obj):
108 108 """
109 109 Sort function used to sort permissions in .permissions() function of
110 110 Repository, RepoGroup, UserGroup. Also it put the default user in front
111 111 of all other resources
112 112 """
113 113
114 114 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
115 115 return prefix + obj.users_group_name
116 116
117 117
118 118 def _hash_key(k):
119 119 return sha1_safe(k)
120 120
121 121
122 122 def in_filter_generator(qry, items, limit=500):
123 123 """
124 124 Splits IN() into multiple with OR
125 125 e.g.::
126 126 cnt = Repository.query().filter(
127 127 or_(
128 128 *in_filter_generator(Repository.repo_id, range(100000))
129 129 )).count()
130 130 """
131 131 if not items:
132 132 # empty list will cause empty query which might cause security issues
133 133 # this can lead to hidden unpleasant results
134 134 items = [-1]
135 135
136 136 parts = []
137 137 for chunk in xrange(0, len(items), limit):
138 138 parts.append(
139 139 qry.in_(items[chunk: chunk + limit])
140 140 )
141 141
142 142 return parts
143 143
144 144
145 145 base_table_args = {
146 146 'extend_existing': True,
147 147 'mysql_engine': 'InnoDB',
148 148 'mysql_charset': 'utf8',
149 149 'sqlite_autoincrement': True
150 150 }
151 151
152 152
153 153 class EncryptedTextValue(TypeDecorator):
154 154 """
155 155 Special column for encrypted long text data, use like::
156 156
157 157 value = Column("encrypted_value", EncryptedValue(), nullable=False)
158 158
159 159 This column is intelligent so if value is in unencrypted form it return
160 160 unencrypted form, but on save it always encrypts
161 161 """
162 162 impl = Text
163 163
164 164 def process_bind_param(self, value, dialect):
165 165 """
166 166 Setter for storing value
167 167 """
168 168 import rhodecode
169 169 if not value:
170 170 return value
171 171
172 172 # protect against double encrypting if values is already encrypted
173 173 if value.startswith('enc$aes$') \
174 174 or value.startswith('enc$aes_hmac$') \
175 175 or value.startswith('enc2$'):
176 176 raise ValueError('value needs to be in unencrypted format, '
177 177 'ie. not starting with enc$ or enc2$')
178 178
179 179 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
180 180 if algo == 'aes':
181 181 return 'enc$aes_hmac$%s' % AESCipher(ENCRYPTION_KEY, hmac=True).encrypt(value)
182 182 elif algo == 'fernet':
183 183 return Encryptor(ENCRYPTION_KEY).encrypt(value)
184 184 else:
185 185 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
186 186
187 187 def process_result_value(self, value, dialect):
188 188 """
189 189 Getter for retrieving value
190 190 """
191 191
192 192 import rhodecode
193 193 if not value:
194 194 return value
195 195
196 196 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
197 197 enc_strict_mode = str2bool(rhodecode.CONFIG.get('rhodecode.encrypted_values.strict') or True)
198 198 if algo == 'aes':
199 199 decrypted_data = validate_and_get_enc_data(value, ENCRYPTION_KEY, enc_strict_mode)
200 200 elif algo == 'fernet':
201 201 return Encryptor(ENCRYPTION_KEY).decrypt(value)
202 202 else:
203 203 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
204 204 return decrypted_data
205 205
206 206
207 207 class BaseModel(object):
208 208 """
209 209 Base Model for all classes
210 210 """
211 211
212 212 @classmethod
213 213 def _get_keys(cls):
214 214 """return column names for this model """
215 215 return class_mapper(cls).c.keys()
216 216
217 217 def get_dict(self):
218 218 """
219 219 return dict with keys and values corresponding
220 220 to this model data """
221 221
222 222 d = {}
223 223 for k in self._get_keys():
224 224 d[k] = getattr(self, k)
225 225
226 226 # also use __json__() if present to get additional fields
227 227 _json_attr = getattr(self, '__json__', None)
228 228 if _json_attr:
229 229 # update with attributes from __json__
230 230 if callable(_json_attr):
231 231 _json_attr = _json_attr()
232 232 for k, val in _json_attr.iteritems():
233 233 d[k] = val
234 234 return d
235 235
236 236 def get_appstruct(self):
237 237 """return list with keys and values tuples corresponding
238 238 to this model data """
239 239
240 240 lst = []
241 241 for k in self._get_keys():
242 242 lst.append((k, getattr(self, k),))
243 243 return lst
244 244
245 245 def populate_obj(self, populate_dict):
246 246 """populate model with data from given populate_dict"""
247 247
248 248 for k in self._get_keys():
249 249 if k in populate_dict:
250 250 setattr(self, k, populate_dict[k])
251 251
252 252 @classmethod
253 253 def query(cls):
254 254 return Session().query(cls)
255 255
256 256 @classmethod
257 257 def get(cls, id_):
258 258 if id_:
259 259 return cls.query().get(id_)
260 260
261 261 @classmethod
262 262 def get_or_404(cls, id_):
263 263 from pyramid.httpexceptions import HTTPNotFound
264 264
265 265 try:
266 266 id_ = int(id_)
267 267 except (TypeError, ValueError):
268 268 raise HTTPNotFound()
269 269
270 270 res = cls.query().get(id_)
271 271 if not res:
272 272 raise HTTPNotFound()
273 273 return res
274 274
275 275 @classmethod
276 276 def getAll(cls):
277 277 # deprecated and left for backward compatibility
278 278 return cls.get_all()
279 279
280 280 @classmethod
281 281 def get_all(cls):
282 282 return cls.query().all()
283 283
284 284 @classmethod
285 285 def delete(cls, id_):
286 286 obj = cls.query().get(id_)
287 287 Session().delete(obj)
288 288
289 289 @classmethod
290 290 def identity_cache(cls, session, attr_name, value):
291 291 exist_in_session = []
292 292 for (item_cls, pkey), instance in session.identity_map.items():
293 293 if cls == item_cls and getattr(instance, attr_name) == value:
294 294 exist_in_session.append(instance)
295 295 if exist_in_session:
296 296 if len(exist_in_session) == 1:
297 297 return exist_in_session[0]
298 298 log.exception(
299 299 'multiple objects with attr %s and '
300 300 'value %s found with same name: %r',
301 301 attr_name, value, exist_in_session)
302 302
303 303 def __repr__(self):
304 304 if hasattr(self, '__unicode__'):
305 305 # python repr needs to return str
306 306 try:
307 307 return safe_str(self.__unicode__())
308 308 except UnicodeDecodeError:
309 309 pass
310 310 return '<DB:%s>' % (self.__class__.__name__)
311 311
312 312
313 313 class RhodeCodeSetting(Base, BaseModel):
314 314 __tablename__ = 'rhodecode_settings'
315 315 __table_args__ = (
316 316 UniqueConstraint('app_settings_name'),
317 317 base_table_args
318 318 )
319 319
320 320 SETTINGS_TYPES = {
321 321 'str': safe_str,
322 322 'int': safe_int,
323 323 'unicode': safe_unicode,
324 324 'bool': str2bool,
325 325 'list': functools.partial(aslist, sep=',')
326 326 }
327 327 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
328 328 GLOBAL_CONF_KEY = 'app_settings'
329 329
330 330 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
331 331 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
332 332 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
333 333 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
334 334
335 335 def __init__(self, key='', val='', type='unicode'):
336 336 self.app_settings_name = key
337 337 self.app_settings_type = type
338 338 self.app_settings_value = val
339 339
340 340 @validates('_app_settings_value')
341 341 def validate_settings_value(self, key, val):
342 342 assert type(val) == unicode
343 343 return val
344 344
345 345 @hybrid_property
346 346 def app_settings_value(self):
347 347 v = self._app_settings_value
348 348 _type = self.app_settings_type
349 349 if _type:
350 350 _type = self.app_settings_type.split('.')[0]
351 351 # decode the encrypted value
352 352 if 'encrypted' in self.app_settings_type:
353 353 cipher = EncryptedTextValue()
354 354 v = safe_unicode(cipher.process_result_value(v, None))
355 355
356 356 converter = self.SETTINGS_TYPES.get(_type) or \
357 357 self.SETTINGS_TYPES['unicode']
358 358 return converter(v)
359 359
360 360 @app_settings_value.setter
361 361 def app_settings_value(self, val):
362 362 """
363 363 Setter that will always make sure we use unicode in app_settings_value
364 364
365 365 :param val:
366 366 """
367 367 val = safe_unicode(val)
368 368 # encode the encrypted value
369 369 if 'encrypted' in self.app_settings_type:
370 370 cipher = EncryptedTextValue()
371 371 val = safe_unicode(cipher.process_bind_param(val, None))
372 372 self._app_settings_value = val
373 373
374 374 @hybrid_property
375 375 def app_settings_type(self):
376 376 return self._app_settings_type
377 377
378 378 @app_settings_type.setter
379 379 def app_settings_type(self, val):
380 380 if val.split('.')[0] not in self.SETTINGS_TYPES:
381 381 raise Exception('type must be one of %s got %s'
382 382 % (self.SETTINGS_TYPES.keys(), val))
383 383 self._app_settings_type = val
384 384
385 385 @classmethod
386 386 def get_by_prefix(cls, prefix):
387 387 return RhodeCodeSetting.query()\
388 388 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
389 389 .all()
390 390
391 391 def __unicode__(self):
392 392 return u"<%s('%s:%s[%s]')>" % (
393 393 self.__class__.__name__,
394 394 self.app_settings_name, self.app_settings_value,
395 395 self.app_settings_type
396 396 )
397 397
398 398
399 399 class RhodeCodeUi(Base, BaseModel):
400 400 __tablename__ = 'rhodecode_ui'
401 401 __table_args__ = (
402 402 UniqueConstraint('ui_key'),
403 403 base_table_args
404 404 )
405 405
406 406 HOOK_REPO_SIZE = 'changegroup.repo_size'
407 407 # HG
408 408 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
409 409 HOOK_PULL = 'outgoing.pull_logger'
410 410 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
411 411 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
412 412 HOOK_PUSH = 'changegroup.push_logger'
413 413 HOOK_PUSH_KEY = 'pushkey.key_push'
414 414
415 415 HOOKS_BUILTIN = [
416 416 HOOK_PRE_PULL,
417 417 HOOK_PULL,
418 418 HOOK_PRE_PUSH,
419 419 HOOK_PRETX_PUSH,
420 420 HOOK_PUSH,
421 421 HOOK_PUSH_KEY,
422 422 ]
423 423
424 424 # TODO: johbo: Unify way how hooks are configured for git and hg,
425 425 # git part is currently hardcoded.
426 426
427 427 # SVN PATTERNS
428 428 SVN_BRANCH_ID = 'vcs_svn_branch'
429 429 SVN_TAG_ID = 'vcs_svn_tag'
430 430
431 431 ui_id = Column(
432 432 "ui_id", Integer(), nullable=False, unique=True, default=None,
433 433 primary_key=True)
434 434 ui_section = Column(
435 435 "ui_section", String(255), nullable=True, unique=None, default=None)
436 436 ui_key = Column(
437 437 "ui_key", String(255), nullable=True, unique=None, default=None)
438 438 ui_value = Column(
439 439 "ui_value", String(255), nullable=True, unique=None, default=None)
440 440 ui_active = Column(
441 441 "ui_active", Boolean(), nullable=True, unique=None, default=True)
442 442
443 443 def __repr__(self):
444 444 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
445 445 self.ui_key, self.ui_value)
446 446
447 447
448 448 class RepoRhodeCodeSetting(Base, BaseModel):
449 449 __tablename__ = 'repo_rhodecode_settings'
450 450 __table_args__ = (
451 451 UniqueConstraint(
452 452 'app_settings_name', 'repository_id',
453 453 name='uq_repo_rhodecode_setting_name_repo_id'),
454 454 base_table_args
455 455 )
456 456
457 457 repository_id = Column(
458 458 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
459 459 nullable=False)
460 460 app_settings_id = Column(
461 461 "app_settings_id", Integer(), nullable=False, unique=True,
462 462 default=None, primary_key=True)
463 463 app_settings_name = Column(
464 464 "app_settings_name", String(255), nullable=True, unique=None,
465 465 default=None)
466 466 _app_settings_value = Column(
467 467 "app_settings_value", String(4096), nullable=True, unique=None,
468 468 default=None)
469 469 _app_settings_type = Column(
470 470 "app_settings_type", String(255), nullable=True, unique=None,
471 471 default=None)
472 472
473 473 repository = relationship('Repository')
474 474
475 475 def __init__(self, repository_id, key='', val='', type='unicode'):
476 476 self.repository_id = repository_id
477 477 self.app_settings_name = key
478 478 self.app_settings_type = type
479 479 self.app_settings_value = val
480 480
481 481 @validates('_app_settings_value')
482 482 def validate_settings_value(self, key, val):
483 483 assert type(val) == unicode
484 484 return val
485 485
486 486 @hybrid_property
487 487 def app_settings_value(self):
488 488 v = self._app_settings_value
489 489 type_ = self.app_settings_type
490 490 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
491 491 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
492 492 return converter(v)
493 493
494 494 @app_settings_value.setter
495 495 def app_settings_value(self, val):
496 496 """
497 497 Setter that will always make sure we use unicode in app_settings_value
498 498
499 499 :param val:
500 500 """
501 501 self._app_settings_value = safe_unicode(val)
502 502
503 503 @hybrid_property
504 504 def app_settings_type(self):
505 505 return self._app_settings_type
506 506
507 507 @app_settings_type.setter
508 508 def app_settings_type(self, val):
509 509 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
510 510 if val not in SETTINGS_TYPES:
511 511 raise Exception('type must be one of %s got %s'
512 512 % (SETTINGS_TYPES.keys(), val))
513 513 self._app_settings_type = val
514 514
515 515 def __unicode__(self):
516 516 return u"<%s('%s:%s:%s[%s]')>" % (
517 517 self.__class__.__name__, self.repository.repo_name,
518 518 self.app_settings_name, self.app_settings_value,
519 519 self.app_settings_type
520 520 )
521 521
522 522
523 523 class RepoRhodeCodeUi(Base, BaseModel):
524 524 __tablename__ = 'repo_rhodecode_ui'
525 525 __table_args__ = (
526 526 UniqueConstraint(
527 527 'repository_id', 'ui_section', 'ui_key',
528 528 name='uq_repo_rhodecode_ui_repository_id_section_key'),
529 529 base_table_args
530 530 )
531 531
532 532 repository_id = Column(
533 533 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
534 534 nullable=False)
535 535 ui_id = Column(
536 536 "ui_id", Integer(), nullable=False, unique=True, default=None,
537 537 primary_key=True)
538 538 ui_section = Column(
539 539 "ui_section", String(255), nullable=True, unique=None, default=None)
540 540 ui_key = Column(
541 541 "ui_key", String(255), nullable=True, unique=None, default=None)
542 542 ui_value = Column(
543 543 "ui_value", String(255), nullable=True, unique=None, default=None)
544 544 ui_active = Column(
545 545 "ui_active", Boolean(), nullable=True, unique=None, default=True)
546 546
547 547 repository = relationship('Repository')
548 548
549 549 def __repr__(self):
550 550 return '<%s[%s:%s]%s=>%s]>' % (
551 551 self.__class__.__name__, self.repository.repo_name,
552 552 self.ui_section, self.ui_key, self.ui_value)
553 553
554 554
555 555 class User(Base, BaseModel):
556 556 __tablename__ = 'users'
557 557 __table_args__ = (
558 558 UniqueConstraint('username'), UniqueConstraint('email'),
559 559 Index('u_username_idx', 'username'),
560 560 Index('u_email_idx', 'email'),
561 561 base_table_args
562 562 )
563 563
564 564 DEFAULT_USER = 'default'
565 565 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
566 566 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
567 567
568 568 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
569 569 username = Column("username", String(255), nullable=True, unique=None, default=None)
570 570 password = Column("password", String(255), nullable=True, unique=None, default=None)
571 571 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
572 572 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
573 573 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
574 574 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
575 575 _email = Column("email", String(255), nullable=True, unique=None, default=None)
576 576 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
577 577 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
578 578
579 579 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
580 580 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
581 581 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
582 582 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
583 583 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
584 584 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
585 585
586 586 user_log = relationship('UserLog')
587 587 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
588 588
589 589 repositories = relationship('Repository')
590 590 repository_groups = relationship('RepoGroup')
591 591 user_groups = relationship('UserGroup')
592 592
593 593 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
594 594 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
595 595
596 596 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
597 597 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
598 598 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
599 599
600 600 group_member = relationship('UserGroupMember', cascade='all')
601 601
602 602 notifications = relationship('UserNotification', cascade='all')
603 603 # notifications assigned to this user
604 604 user_created_notifications = relationship('Notification', cascade='all')
605 605 # comments created by this user
606 606 user_comments = relationship('ChangesetComment', cascade='all')
607 607 # user profile extra info
608 608 user_emails = relationship('UserEmailMap', cascade='all')
609 609 user_ip_map = relationship('UserIpMap', cascade='all')
610 610 user_auth_tokens = relationship('UserApiKeys', cascade='all')
611 611 user_ssh_keys = relationship('UserSshKeys', cascade='all')
612 612
613 613 # gists
614 614 user_gists = relationship('Gist', cascade='all')
615 615 # user pull requests
616 616 user_pull_requests = relationship('PullRequest', cascade='all')
617 617 # external identities
618 618 extenal_identities = relationship(
619 619 'ExternalIdentity',
620 620 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
621 621 cascade='all')
622 622 # review rules
623 623 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
624 624
625 625 def __unicode__(self):
626 626 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
627 627 self.user_id, self.username)
628 628
629 629 @hybrid_property
630 630 def email(self):
631 631 return self._email
632 632
633 633 @email.setter
634 634 def email(self, val):
635 635 self._email = val.lower() if val else None
636 636
637 637 @hybrid_property
638 638 def first_name(self):
639 639 from rhodecode.lib import helpers as h
640 640 if self.name:
641 641 return h.escape(self.name)
642 642 return self.name
643 643
644 644 @hybrid_property
645 645 def last_name(self):
646 646 from rhodecode.lib import helpers as h
647 647 if self.lastname:
648 648 return h.escape(self.lastname)
649 649 return self.lastname
650 650
651 651 @hybrid_property
652 652 def api_key(self):
653 653 """
654 654 Fetch if exist an auth-token with role ALL connected to this user
655 655 """
656 656 user_auth_token = UserApiKeys.query()\
657 657 .filter(UserApiKeys.user_id == self.user_id)\
658 658 .filter(or_(UserApiKeys.expires == -1,
659 659 UserApiKeys.expires >= time.time()))\
660 660 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
661 661 if user_auth_token:
662 662 user_auth_token = user_auth_token.api_key
663 663
664 664 return user_auth_token
665 665
666 666 @api_key.setter
667 667 def api_key(self, val):
668 668 # don't allow to set API key this is deprecated for now
669 669 self._api_key = None
670 670
671 671 @property
672 672 def reviewer_pull_requests(self):
673 673 return PullRequestReviewers.query() \
674 674 .options(joinedload(PullRequestReviewers.pull_request)) \
675 675 .filter(PullRequestReviewers.user_id == self.user_id) \
676 676 .all()
677 677
678 678 @property
679 679 def firstname(self):
680 680 # alias for future
681 681 return self.name
682 682
683 683 @property
684 684 def emails(self):
685 685 other = UserEmailMap.query()\
686 686 .filter(UserEmailMap.user == self) \
687 687 .order_by(UserEmailMap.email_id.asc()) \
688 688 .all()
689 689 return [self.email] + [x.email for x in other]
690 690
691 691 @property
692 692 def auth_tokens(self):
693 693 auth_tokens = self.get_auth_tokens()
694 694 return [x.api_key for x in auth_tokens]
695 695
696 696 def get_auth_tokens(self):
697 697 return UserApiKeys.query()\
698 698 .filter(UserApiKeys.user == self)\
699 699 .order_by(UserApiKeys.user_api_key_id.asc())\
700 700 .all()
701 701
702 702 @LazyProperty
703 703 def feed_token(self):
704 704 return self.get_feed_token()
705 705
706 706 def get_feed_token(self, cache=True):
707 707 feed_tokens = UserApiKeys.query()\
708 708 .filter(UserApiKeys.user == self)\
709 709 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
710 710 if cache:
711 711 feed_tokens = feed_tokens.options(
712 712 FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id))
713 713
714 714 feed_tokens = feed_tokens.all()
715 715 if feed_tokens:
716 716 return feed_tokens[0].api_key
717 717 return 'NO_FEED_TOKEN_AVAILABLE'
718 718
719 719 @classmethod
720 720 def get(cls, user_id, cache=False):
721 721 if not user_id:
722 722 return
723 723
724 724 user = cls.query()
725 725 if cache:
726 726 user = user.options(
727 727 FromCache("sql_cache_short", "get_users_%s" % user_id))
728 728 return user.get(user_id)
729 729
730 730 @classmethod
731 731 def extra_valid_auth_tokens(cls, user, role=None):
732 732 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
733 733 .filter(or_(UserApiKeys.expires == -1,
734 734 UserApiKeys.expires >= time.time()))
735 735 if role:
736 736 tokens = tokens.filter(or_(UserApiKeys.role == role,
737 737 UserApiKeys.role == UserApiKeys.ROLE_ALL))
738 738 return tokens.all()
739 739
740 740 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
741 741 from rhodecode.lib import auth
742 742
743 743 log.debug('Trying to authenticate user: %s via auth-token, '
744 744 'and roles: %s', self, roles)
745 745
746 746 if not auth_token:
747 747 return False
748 748
749 749 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
750 750 tokens_q = UserApiKeys.query()\
751 751 .filter(UserApiKeys.user_id == self.user_id)\
752 752 .filter(or_(UserApiKeys.expires == -1,
753 753 UserApiKeys.expires >= time.time()))
754 754
755 755 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
756 756
757 757 crypto_backend = auth.crypto_backend()
758 758 enc_token_map = {}
759 759 plain_token_map = {}
760 760 for token in tokens_q:
761 761 if token.api_key.startswith(crypto_backend.ENC_PREF):
762 762 enc_token_map[token.api_key] = token
763 763 else:
764 764 plain_token_map[token.api_key] = token
765 765 log.debug(
766 766 'Found %s plain and %s encrypted user tokens to check for authentication',
767 767 len(plain_token_map), len(enc_token_map))
768 768
769 769 # plain token match comes first
770 770 match = plain_token_map.get(auth_token)
771 771
772 772 # check encrypted tokens now
773 773 if not match:
774 774 for token_hash, token in enc_token_map.items():
775 775 # NOTE(marcink): this is expensive to calculate, but most secure
776 776 if crypto_backend.hash_check(auth_token, token_hash):
777 777 match = token
778 778 break
779 779
780 780 if match:
781 781 log.debug('Found matching token %s', match)
782 782 if match.repo_id:
783 783 log.debug('Found scope, checking for scope match of token %s', match)
784 784 if match.repo_id == scope_repo_id:
785 785 return True
786 786 else:
787 787 log.debug(
788 788 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
789 789 'and calling scope is:%s, skipping further checks',
790 790 match.repo, scope_repo_id)
791 791 return False
792 792 else:
793 793 return True
794 794
795 795 return False
796 796
797 797 @property
798 798 def ip_addresses(self):
799 799 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
800 800 return [x.ip_addr for x in ret]
801 801
802 802 @property
803 803 def username_and_name(self):
804 804 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
805 805
806 806 @property
807 807 def username_or_name_or_email(self):
808 808 full_name = self.full_name if self.full_name is not ' ' else None
809 809 return self.username or full_name or self.email
810 810
811 811 @property
812 812 def full_name(self):
813 813 return '%s %s' % (self.first_name, self.last_name)
814 814
815 815 @property
816 816 def full_name_or_username(self):
817 817 return ('%s %s' % (self.first_name, self.last_name)
818 818 if (self.first_name and self.last_name) else self.username)
819 819
820 820 @property
821 821 def full_contact(self):
822 822 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
823 823
824 824 @property
825 825 def short_contact(self):
826 826 return '%s %s' % (self.first_name, self.last_name)
827 827
828 828 @property
829 829 def is_admin(self):
830 830 return self.admin
831 831
832 832 def AuthUser(self, **kwargs):
833 833 """
834 834 Returns instance of AuthUser for this user
835 835 """
836 836 from rhodecode.lib.auth import AuthUser
837 837 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
838 838
839 839 @hybrid_property
840 840 def user_data(self):
841 841 if not self._user_data:
842 842 return {}
843 843
844 844 try:
845 845 return json.loads(self._user_data)
846 846 except TypeError:
847 847 return {}
848 848
849 849 @user_data.setter
850 850 def user_data(self, val):
851 851 if not isinstance(val, dict):
852 852 raise Exception('user_data must be dict, got %s' % type(val))
853 853 try:
854 854 self._user_data = json.dumps(val)
855 855 except Exception:
856 856 log.error(traceback.format_exc())
857 857
858 858 @classmethod
859 859 def get_by_username(cls, username, case_insensitive=False,
860 860 cache=False, identity_cache=False):
861 861 session = Session()
862 862
863 863 if case_insensitive:
864 864 q = cls.query().filter(
865 865 func.lower(cls.username) == func.lower(username))
866 866 else:
867 867 q = cls.query().filter(cls.username == username)
868 868
869 869 if cache:
870 870 if identity_cache:
871 871 val = cls.identity_cache(session, 'username', username)
872 872 if val:
873 873 return val
874 874 else:
875 875 cache_key = "get_user_by_name_%s" % _hash_key(username)
876 876 q = q.options(
877 877 FromCache("sql_cache_short", cache_key))
878 878
879 879 return q.scalar()
880 880
881 881 @classmethod
882 882 def get_by_auth_token(cls, auth_token, cache=False):
883 883 q = UserApiKeys.query()\
884 884 .filter(UserApiKeys.api_key == auth_token)\
885 885 .filter(or_(UserApiKeys.expires == -1,
886 886 UserApiKeys.expires >= time.time()))
887 887 if cache:
888 888 q = q.options(
889 889 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
890 890
891 891 match = q.first()
892 892 if match:
893 893 return match.user
894 894
895 895 @classmethod
896 896 def get_by_email(cls, email, case_insensitive=False, cache=False):
897 897
898 898 if case_insensitive:
899 899 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
900 900
901 901 else:
902 902 q = cls.query().filter(cls.email == email)
903 903
904 904 email_key = _hash_key(email)
905 905 if cache:
906 906 q = q.options(
907 907 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
908 908
909 909 ret = q.scalar()
910 910 if ret is None:
911 911 q = UserEmailMap.query()
912 912 # try fetching in alternate email map
913 913 if case_insensitive:
914 914 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
915 915 else:
916 916 q = q.filter(UserEmailMap.email == email)
917 917 q = q.options(joinedload(UserEmailMap.user))
918 918 if cache:
919 919 q = q.options(
920 920 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
921 921 ret = getattr(q.scalar(), 'user', None)
922 922
923 923 return ret
924 924
925 925 @classmethod
926 926 def get_from_cs_author(cls, author):
927 927 """
928 928 Tries to get User objects out of commit author string
929 929
930 930 :param author:
931 931 """
932 932 from rhodecode.lib.helpers import email, author_name
933 933 # Valid email in the attribute passed, see if they're in the system
934 934 _email = email(author)
935 935 if _email:
936 936 user = cls.get_by_email(_email, case_insensitive=True)
937 937 if user:
938 938 return user
939 939 # Maybe we can match by username?
940 940 _author = author_name(author)
941 941 user = cls.get_by_username(_author, case_insensitive=True)
942 942 if user:
943 943 return user
944 944
945 945 def update_userdata(self, **kwargs):
946 946 usr = self
947 947 old = usr.user_data
948 948 old.update(**kwargs)
949 949 usr.user_data = old
950 950 Session().add(usr)
951 951 log.debug('updated userdata with ', kwargs)
952 952
953 953 def update_lastlogin(self):
954 954 """Update user lastlogin"""
955 955 self.last_login = datetime.datetime.now()
956 956 Session().add(self)
957 957 log.debug('updated user %s lastlogin', self.username)
958 958
959 959 def update_password(self, new_password):
960 960 from rhodecode.lib.auth import get_crypt_password
961 961
962 962 self.password = get_crypt_password(new_password)
963 963 Session().add(self)
964 964
965 965 @classmethod
966 966 def get_first_super_admin(cls):
967 967 user = User.query()\
968 968 .filter(User.admin == true()) \
969 969 .order_by(User.user_id.asc()) \
970 970 .first()
971 971
972 972 if user is None:
973 973 raise Exception('FATAL: Missing administrative account!')
974 974 return user
975 975
976 976 @classmethod
977 977 def get_all_super_admins(cls, only_active=False):
978 978 """
979 979 Returns all admin accounts sorted by username
980 980 """
981 981 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
982 982 if only_active:
983 983 qry = qry.filter(User.active == true())
984 984 return qry.all()
985 985
986 986 @classmethod
987 987 def get_default_user(cls, cache=False, refresh=False):
988 988 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
989 989 if user is None:
990 990 raise Exception('FATAL: Missing default account!')
991 991 if refresh:
992 992 # The default user might be based on outdated state which
993 993 # has been loaded from the cache.
994 994 # A call to refresh() ensures that the
995 995 # latest state from the database is used.
996 996 Session().refresh(user)
997 997 return user
998 998
999 999 def _get_default_perms(self, user, suffix=''):
1000 1000 from rhodecode.model.permission import PermissionModel
1001 1001 return PermissionModel().get_default_perms(user.user_perms, suffix)
1002 1002
1003 1003 def get_default_perms(self, suffix=''):
1004 1004 return self._get_default_perms(self, suffix)
1005 1005
1006 1006 def get_api_data(self, include_secrets=False, details='full'):
1007 1007 """
1008 1008 Common function for generating user related data for API
1009 1009
1010 1010 :param include_secrets: By default secrets in the API data will be replaced
1011 1011 by a placeholder value to prevent exposing this data by accident. In case
1012 1012 this data shall be exposed, set this flag to ``True``.
1013 1013
1014 1014 :param details: details can be 'basic|full' basic gives only a subset of
1015 1015 the available user information that includes user_id, name and emails.
1016 1016 """
1017 1017 user = self
1018 1018 user_data = self.user_data
1019 1019 data = {
1020 1020 'user_id': user.user_id,
1021 1021 'username': user.username,
1022 1022 'firstname': user.name,
1023 1023 'lastname': user.lastname,
1024 1024 'email': user.email,
1025 1025 'emails': user.emails,
1026 1026 }
1027 1027 if details == 'basic':
1028 1028 return data
1029 1029
1030 1030 auth_token_length = 40
1031 1031 auth_token_replacement = '*' * auth_token_length
1032 1032
1033 1033 extras = {
1034 1034 'auth_tokens': [auth_token_replacement],
1035 1035 'active': user.active,
1036 1036 'admin': user.admin,
1037 1037 'extern_type': user.extern_type,
1038 1038 'extern_name': user.extern_name,
1039 1039 'last_login': user.last_login,
1040 1040 'last_activity': user.last_activity,
1041 1041 'ip_addresses': user.ip_addresses,
1042 1042 'language': user_data.get('language')
1043 1043 }
1044 1044 data.update(extras)
1045 1045
1046 1046 if include_secrets:
1047 1047 data['auth_tokens'] = user.auth_tokens
1048 1048 return data
1049 1049
1050 1050 def __json__(self):
1051 1051 data = {
1052 1052 'full_name': self.full_name,
1053 1053 'full_name_or_username': self.full_name_or_username,
1054 1054 'short_contact': self.short_contact,
1055 1055 'full_contact': self.full_contact,
1056 1056 }
1057 1057 data.update(self.get_api_data())
1058 1058 return data
1059 1059
1060 1060
1061 1061 class UserApiKeys(Base, BaseModel):
1062 1062 __tablename__ = 'user_api_keys'
1063 1063 __table_args__ = (
1064 1064 Index('uak_api_key_idx', 'api_key', unique=True),
1065 1065 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1066 1066 base_table_args
1067 1067 )
1068 1068 __mapper_args__ = {}
1069 1069
1070 1070 # ApiKey role
1071 1071 ROLE_ALL = 'token_role_all'
1072 1072 ROLE_HTTP = 'token_role_http'
1073 1073 ROLE_VCS = 'token_role_vcs'
1074 1074 ROLE_API = 'token_role_api'
1075 1075 ROLE_FEED = 'token_role_feed'
1076 1076 ROLE_PASSWORD_RESET = 'token_password_reset'
1077 1077
1078 1078 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
1079 1079
1080 1080 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1081 1081 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1082 1082 api_key = Column("api_key", String(255), nullable=False, unique=True)
1083 1083 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1084 1084 expires = Column('expires', Float(53), nullable=False)
1085 1085 role = Column('role', String(255), nullable=True)
1086 1086 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1087 1087
1088 1088 # scope columns
1089 1089 repo_id = Column(
1090 1090 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1091 1091 nullable=True, unique=None, default=None)
1092 1092 repo = relationship('Repository', lazy='joined')
1093 1093
1094 1094 repo_group_id = Column(
1095 1095 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1096 1096 nullable=True, unique=None, default=None)
1097 1097 repo_group = relationship('RepoGroup', lazy='joined')
1098 1098
1099 1099 user = relationship('User', lazy='joined')
1100 1100
1101 1101 def __unicode__(self):
1102 1102 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1103 1103
1104 1104 def __json__(self):
1105 1105 data = {
1106 1106 'auth_token': self.api_key,
1107 1107 'role': self.role,
1108 1108 'scope': self.scope_humanized,
1109 1109 'expired': self.expired
1110 1110 }
1111 1111 return data
1112 1112
1113 1113 def get_api_data(self, include_secrets=False):
1114 1114 data = self.__json__()
1115 1115 if include_secrets:
1116 1116 return data
1117 1117 else:
1118 1118 data['auth_token'] = self.token_obfuscated
1119 1119 return data
1120 1120
1121 1121 @hybrid_property
1122 1122 def description_safe(self):
1123 1123 from rhodecode.lib import helpers as h
1124 1124 return h.escape(self.description)
1125 1125
1126 1126 @property
1127 1127 def expired(self):
1128 1128 if self.expires == -1:
1129 1129 return False
1130 1130 return time.time() > self.expires
1131 1131
1132 1132 @classmethod
1133 1133 def _get_role_name(cls, role):
1134 1134 return {
1135 1135 cls.ROLE_ALL: _('all'),
1136 1136 cls.ROLE_HTTP: _('http/web interface'),
1137 1137 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1138 1138 cls.ROLE_API: _('api calls'),
1139 1139 cls.ROLE_FEED: _('feed access'),
1140 1140 }.get(role, role)
1141 1141
1142 1142 @property
1143 1143 def role_humanized(self):
1144 1144 return self._get_role_name(self.role)
1145 1145
1146 1146 def _get_scope(self):
1147 1147 if self.repo:
1148 1148 return 'Repository: {}'.format(self.repo.repo_name)
1149 1149 if self.repo_group:
1150 1150 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1151 1151 return 'Global'
1152 1152
1153 1153 @property
1154 1154 def scope_humanized(self):
1155 1155 return self._get_scope()
1156 1156
1157 1157 @property
1158 1158 def token_obfuscated(self):
1159 1159 if self.api_key:
1160 1160 return self.api_key[:4] + "****"
1161 1161
1162 1162
1163 1163 class UserEmailMap(Base, BaseModel):
1164 1164 __tablename__ = 'user_email_map'
1165 1165 __table_args__ = (
1166 1166 Index('uem_email_idx', 'email'),
1167 1167 UniqueConstraint('email'),
1168 1168 base_table_args
1169 1169 )
1170 1170 __mapper_args__ = {}
1171 1171
1172 1172 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1173 1173 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1174 1174 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1175 1175 user = relationship('User', lazy='joined')
1176 1176
1177 1177 @validates('_email')
1178 1178 def validate_email(self, key, email):
1179 1179 # check if this email is not main one
1180 1180 main_email = Session().query(User).filter(User.email == email).scalar()
1181 1181 if main_email is not None:
1182 1182 raise AttributeError('email %s is present is user table' % email)
1183 1183 return email
1184 1184
1185 1185 @hybrid_property
1186 1186 def email(self):
1187 1187 return self._email
1188 1188
1189 1189 @email.setter
1190 1190 def email(self, val):
1191 1191 self._email = val.lower() if val else None
1192 1192
1193 1193
1194 1194 class UserIpMap(Base, BaseModel):
1195 1195 __tablename__ = 'user_ip_map'
1196 1196 __table_args__ = (
1197 1197 UniqueConstraint('user_id', 'ip_addr'),
1198 1198 base_table_args
1199 1199 )
1200 1200 __mapper_args__ = {}
1201 1201
1202 1202 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1203 1203 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1204 1204 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1205 1205 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1206 1206 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1207 1207 user = relationship('User', lazy='joined')
1208 1208
1209 1209 @hybrid_property
1210 1210 def description_safe(self):
1211 1211 from rhodecode.lib import helpers as h
1212 1212 return h.escape(self.description)
1213 1213
1214 1214 @classmethod
1215 1215 def _get_ip_range(cls, ip_addr):
1216 1216 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1217 1217 return [str(net.network_address), str(net.broadcast_address)]
1218 1218
1219 1219 def __json__(self):
1220 1220 return {
1221 1221 'ip_addr': self.ip_addr,
1222 1222 'ip_range': self._get_ip_range(self.ip_addr),
1223 1223 }
1224 1224
1225 1225 def __unicode__(self):
1226 1226 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1227 1227 self.user_id, self.ip_addr)
1228 1228
1229 1229
1230 1230 class UserSshKeys(Base, BaseModel):
1231 1231 __tablename__ = 'user_ssh_keys'
1232 1232 __table_args__ = (
1233 1233 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1234 1234
1235 1235 UniqueConstraint('ssh_key_fingerprint'),
1236 1236
1237 1237 base_table_args
1238 1238 )
1239 1239 __mapper_args__ = {}
1240 1240
1241 1241 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1242 1242 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1243 1243 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1244 1244
1245 1245 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1246 1246
1247 1247 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1248 1248 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1249 1249 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1250 1250
1251 1251 user = relationship('User', lazy='joined')
1252 1252
1253 1253 def __json__(self):
1254 1254 data = {
1255 1255 'ssh_fingerprint': self.ssh_key_fingerprint,
1256 1256 'description': self.description,
1257 1257 'created_on': self.created_on
1258 1258 }
1259 1259 return data
1260 1260
1261 1261 def get_api_data(self):
1262 1262 data = self.__json__()
1263 1263 return data
1264 1264
1265 1265
1266 1266 class UserLog(Base, BaseModel):
1267 1267 __tablename__ = 'user_logs'
1268 1268 __table_args__ = (
1269 1269 base_table_args,
1270 1270 )
1271 1271
1272 1272 VERSION_1 = 'v1'
1273 1273 VERSION_2 = 'v2'
1274 1274 VERSIONS = [VERSION_1, VERSION_2]
1275 1275
1276 1276 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1277 1277 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1278 1278 username = Column("username", String(255), nullable=True, unique=None, default=None)
1279 1279 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1280 1280 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1281 1281 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1282 1282 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1283 1283 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1284 1284
1285 1285 version = Column("version", String(255), nullable=True, default=VERSION_1)
1286 1286 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1287 1287 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1288 1288
1289 1289 def __unicode__(self):
1290 1290 return u"<%s('id:%s:%s')>" % (
1291 1291 self.__class__.__name__, self.repository_name, self.action)
1292 1292
1293 1293 def __json__(self):
1294 1294 return {
1295 1295 'user_id': self.user_id,
1296 1296 'username': self.username,
1297 1297 'repository_id': self.repository_id,
1298 1298 'repository_name': self.repository_name,
1299 1299 'user_ip': self.user_ip,
1300 1300 'action_date': self.action_date,
1301 1301 'action': self.action,
1302 1302 }
1303 1303
1304 1304 @hybrid_property
1305 1305 def entry_id(self):
1306 1306 return self.user_log_id
1307 1307
1308 1308 @property
1309 1309 def action_as_day(self):
1310 1310 return datetime.date(*self.action_date.timetuple()[:3])
1311 1311
1312 1312 user = relationship('User')
1313 1313 repository = relationship('Repository', cascade='')
1314 1314
1315 1315
1316 1316 class UserGroup(Base, BaseModel):
1317 1317 __tablename__ = 'users_groups'
1318 1318 __table_args__ = (
1319 1319 base_table_args,
1320 1320 )
1321 1321
1322 1322 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1323 1323 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1324 1324 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1325 1325 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1326 1326 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1327 1327 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1328 1328 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1329 1329 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1330 1330
1331 1331 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1332 1332 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1333 1333 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1334 1334 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1335 1335 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1336 1336 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1337 1337
1338 1338 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1339 1339 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1340 1340
1341 1341 @classmethod
1342 1342 def _load_group_data(cls, column):
1343 1343 if not column:
1344 1344 return {}
1345 1345
1346 1346 try:
1347 1347 return json.loads(column) or {}
1348 1348 except TypeError:
1349 1349 return {}
1350 1350
1351 1351 @hybrid_property
1352 1352 def description_safe(self):
1353 1353 from rhodecode.lib import helpers as h
1354 1354 return h.escape(self.user_group_description)
1355 1355
1356 1356 @hybrid_property
1357 1357 def group_data(self):
1358 1358 return self._load_group_data(self._group_data)
1359 1359
1360 1360 @group_data.expression
1361 1361 def group_data(self, **kwargs):
1362 1362 return self._group_data
1363 1363
1364 1364 @group_data.setter
1365 1365 def group_data(self, val):
1366 1366 try:
1367 1367 self._group_data = json.dumps(val)
1368 1368 except Exception:
1369 1369 log.error(traceback.format_exc())
1370 1370
1371 1371 @classmethod
1372 1372 def _load_sync(cls, group_data):
1373 1373 if group_data:
1374 1374 return group_data.get('extern_type')
1375 1375
1376 1376 @property
1377 1377 def sync(self):
1378 1378 return self._load_sync(self.group_data)
1379 1379
1380 1380 def __unicode__(self):
1381 1381 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1382 1382 self.users_group_id,
1383 1383 self.users_group_name)
1384 1384
1385 1385 @classmethod
1386 1386 def get_by_group_name(cls, group_name, cache=False,
1387 1387 case_insensitive=False):
1388 1388 if case_insensitive:
1389 1389 q = cls.query().filter(func.lower(cls.users_group_name) ==
1390 1390 func.lower(group_name))
1391 1391
1392 1392 else:
1393 1393 q = cls.query().filter(cls.users_group_name == group_name)
1394 1394 if cache:
1395 1395 q = q.options(
1396 1396 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1397 1397 return q.scalar()
1398 1398
1399 1399 @classmethod
1400 1400 def get(cls, user_group_id, cache=False):
1401 1401 if not user_group_id:
1402 1402 return
1403 1403
1404 1404 user_group = cls.query()
1405 1405 if cache:
1406 1406 user_group = user_group.options(
1407 1407 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1408 1408 return user_group.get(user_group_id)
1409 1409
1410 1410 def permissions(self, with_admins=True, with_owner=True,
1411 1411 expand_from_user_groups=False):
1412 1412 """
1413 1413 Permissions for user groups
1414 1414 """
1415 1415 _admin_perm = 'usergroup.admin'
1416 1416
1417 1417 owner_row = []
1418 1418 if with_owner:
1419 1419 usr = AttributeDict(self.user.get_dict())
1420 1420 usr.owner_row = True
1421 1421 usr.permission = _admin_perm
1422 1422 owner_row.append(usr)
1423 1423
1424 1424 super_admin_ids = []
1425 1425 super_admin_rows = []
1426 1426 if with_admins:
1427 1427 for usr in User.get_all_super_admins():
1428 1428 super_admin_ids.append(usr.user_id)
1429 1429 # if this admin is also owner, don't double the record
1430 1430 if usr.user_id == owner_row[0].user_id:
1431 1431 owner_row[0].admin_row = True
1432 1432 else:
1433 1433 usr = AttributeDict(usr.get_dict())
1434 1434 usr.admin_row = True
1435 1435 usr.permission = _admin_perm
1436 1436 super_admin_rows.append(usr)
1437 1437
1438 1438 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1439 1439 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1440 1440 joinedload(UserUserGroupToPerm.user),
1441 1441 joinedload(UserUserGroupToPerm.permission),)
1442 1442
1443 1443 # get owners and admins and permissions. We do a trick of re-writing
1444 1444 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1445 1445 # has a global reference and changing one object propagates to all
1446 1446 # others. This means if admin is also an owner admin_row that change
1447 1447 # would propagate to both objects
1448 1448 perm_rows = []
1449 1449 for _usr in q.all():
1450 1450 usr = AttributeDict(_usr.user.get_dict())
1451 1451 # if this user is also owner/admin, mark as duplicate record
1452 1452 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1453 1453 usr.duplicate_perm = True
1454 1454 usr.permission = _usr.permission.permission_name
1455 1455 perm_rows.append(usr)
1456 1456
1457 1457 # filter the perm rows by 'default' first and then sort them by
1458 1458 # admin,write,read,none permissions sorted again alphabetically in
1459 1459 # each group
1460 1460 perm_rows = sorted(perm_rows, key=display_user_sort)
1461 1461
1462 1462 user_groups_rows = []
1463 1463 if expand_from_user_groups:
1464 1464 for ug in self.permission_user_groups(with_members=True):
1465 1465 for user_data in ug.members:
1466 1466 user_groups_rows.append(user_data)
1467 1467
1468 1468 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1469 1469
1470 1470 def permission_user_groups(self, with_members=False):
1471 1471 q = UserGroupUserGroupToPerm.query()\
1472 1472 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1473 1473 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1474 1474 joinedload(UserGroupUserGroupToPerm.target_user_group),
1475 1475 joinedload(UserGroupUserGroupToPerm.permission),)
1476 1476
1477 1477 perm_rows = []
1478 1478 for _user_group in q.all():
1479 1479 entry = AttributeDict(_user_group.user_group.get_dict())
1480 1480 entry.permission = _user_group.permission.permission_name
1481 1481 if with_members:
1482 1482 entry.members = [x.user.get_dict()
1483 1483 for x in _user_group.user_group.members]
1484 1484 perm_rows.append(entry)
1485 1485
1486 1486 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1487 1487 return perm_rows
1488 1488
1489 1489 def _get_default_perms(self, user_group, suffix=''):
1490 1490 from rhodecode.model.permission import PermissionModel
1491 1491 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1492 1492
1493 1493 def get_default_perms(self, suffix=''):
1494 1494 return self._get_default_perms(self, suffix)
1495 1495
1496 1496 def get_api_data(self, with_group_members=True, include_secrets=False):
1497 1497 """
1498 1498 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1499 1499 basically forwarded.
1500 1500
1501 1501 """
1502 1502 user_group = self
1503 1503 data = {
1504 1504 'users_group_id': user_group.users_group_id,
1505 1505 'group_name': user_group.users_group_name,
1506 1506 'group_description': user_group.user_group_description,
1507 1507 'active': user_group.users_group_active,
1508 1508 'owner': user_group.user.username,
1509 1509 'sync': user_group.sync,
1510 1510 'owner_email': user_group.user.email,
1511 1511 }
1512 1512
1513 1513 if with_group_members:
1514 1514 users = []
1515 1515 for user in user_group.members:
1516 1516 user = user.user
1517 1517 users.append(user.get_api_data(include_secrets=include_secrets))
1518 1518 data['users'] = users
1519 1519
1520 1520 return data
1521 1521
1522 1522
1523 1523 class UserGroupMember(Base, BaseModel):
1524 1524 __tablename__ = 'users_groups_members'
1525 1525 __table_args__ = (
1526 1526 base_table_args,
1527 1527 )
1528 1528
1529 1529 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1530 1530 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1531 1531 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1532 1532
1533 1533 user = relationship('User', lazy='joined')
1534 1534 users_group = relationship('UserGroup')
1535 1535
1536 1536 def __init__(self, gr_id='', u_id=''):
1537 1537 self.users_group_id = gr_id
1538 1538 self.user_id = u_id
1539 1539
1540 1540
1541 1541 class RepositoryField(Base, BaseModel):
1542 1542 __tablename__ = 'repositories_fields'
1543 1543 __table_args__ = (
1544 1544 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1545 1545 base_table_args,
1546 1546 )
1547 1547
1548 1548 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1549 1549
1550 1550 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1551 1551 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1552 1552 field_key = Column("field_key", String(250))
1553 1553 field_label = Column("field_label", String(1024), nullable=False)
1554 1554 field_value = Column("field_value", String(10000), nullable=False)
1555 1555 field_desc = Column("field_desc", String(1024), nullable=False)
1556 1556 field_type = Column("field_type", String(255), nullable=False, unique=None)
1557 1557 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1558 1558
1559 1559 repository = relationship('Repository')
1560 1560
1561 1561 @property
1562 1562 def field_key_prefixed(self):
1563 1563 return 'ex_%s' % self.field_key
1564 1564
1565 1565 @classmethod
1566 1566 def un_prefix_key(cls, key):
1567 1567 if key.startswith(cls.PREFIX):
1568 1568 return key[len(cls.PREFIX):]
1569 1569 return key
1570 1570
1571 1571 @classmethod
1572 1572 def get_by_key_name(cls, key, repo):
1573 1573 row = cls.query()\
1574 1574 .filter(cls.repository == repo)\
1575 1575 .filter(cls.field_key == key).scalar()
1576 1576 return row
1577 1577
1578 1578
1579 1579 class Repository(Base, BaseModel):
1580 1580 __tablename__ = 'repositories'
1581 1581 __table_args__ = (
1582 1582 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1583 1583 base_table_args,
1584 1584 )
1585 1585 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1586 1586 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1587 1587 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1588 1588
1589 1589 STATE_CREATED = 'repo_state_created'
1590 1590 STATE_PENDING = 'repo_state_pending'
1591 1591 STATE_ERROR = 'repo_state_error'
1592 1592
1593 1593 LOCK_AUTOMATIC = 'lock_auto'
1594 1594 LOCK_API = 'lock_api'
1595 1595 LOCK_WEB = 'lock_web'
1596 1596 LOCK_PULL = 'lock_pull'
1597 1597
1598 1598 NAME_SEP = URL_SEP
1599 1599
1600 1600 repo_id = Column(
1601 1601 "repo_id", Integer(), nullable=False, unique=True, default=None,
1602 1602 primary_key=True)
1603 1603 _repo_name = Column(
1604 1604 "repo_name", Text(), nullable=False, default=None)
1605 1605 _repo_name_hash = Column(
1606 1606 "repo_name_hash", String(255), nullable=False, unique=True)
1607 1607 repo_state = Column("repo_state", String(255), nullable=True)
1608 1608
1609 1609 clone_uri = Column(
1610 1610 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1611 1611 default=None)
1612 1612 push_uri = Column(
1613 1613 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1614 1614 default=None)
1615 1615 repo_type = Column(
1616 1616 "repo_type", String(255), nullable=False, unique=False, default=None)
1617 1617 user_id = Column(
1618 1618 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1619 1619 unique=False, default=None)
1620 1620 private = Column(
1621 1621 "private", Boolean(), nullable=True, unique=None, default=None)
1622 1622 archived = Column(
1623 1623 "archived", Boolean(), nullable=True, unique=None, default=None)
1624 1624 enable_statistics = Column(
1625 1625 "statistics", Boolean(), nullable=True, unique=None, default=True)
1626 1626 enable_downloads = Column(
1627 1627 "downloads", Boolean(), nullable=True, unique=None, default=True)
1628 1628 description = Column(
1629 1629 "description", String(10000), nullable=True, unique=None, default=None)
1630 1630 created_on = Column(
1631 1631 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1632 1632 default=datetime.datetime.now)
1633 1633 updated_on = Column(
1634 1634 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1635 1635 default=datetime.datetime.now)
1636 1636 _landing_revision = Column(
1637 1637 "landing_revision", String(255), nullable=False, unique=False,
1638 1638 default=None)
1639 1639 enable_locking = Column(
1640 1640 "enable_locking", Boolean(), nullable=False, unique=None,
1641 1641 default=False)
1642 1642 _locked = Column(
1643 1643 "locked", String(255), nullable=True, unique=False, default=None)
1644 1644 _changeset_cache = Column(
1645 1645 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1646 1646
1647 1647 fork_id = Column(
1648 1648 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1649 1649 nullable=True, unique=False, default=None)
1650 1650 group_id = Column(
1651 1651 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1652 1652 unique=False, default=None)
1653 1653
1654 1654 user = relationship('User', lazy='joined')
1655 1655 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1656 1656 group = relationship('RepoGroup', lazy='joined')
1657 1657 repo_to_perm = relationship(
1658 1658 'UserRepoToPerm', cascade='all',
1659 1659 order_by='UserRepoToPerm.repo_to_perm_id')
1660 1660 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1661 1661 stats = relationship('Statistics', cascade='all', uselist=False)
1662 1662
1663 1663 followers = relationship(
1664 1664 'UserFollowing',
1665 1665 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1666 1666 cascade='all')
1667 1667 extra_fields = relationship(
1668 1668 'RepositoryField', cascade="all, delete, delete-orphan")
1669 1669 logs = relationship('UserLog')
1670 1670 comments = relationship(
1671 1671 'ChangesetComment', cascade="all, delete, delete-orphan")
1672 1672 pull_requests_source = relationship(
1673 1673 'PullRequest',
1674 1674 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1675 1675 cascade="all, delete, delete-orphan")
1676 1676 pull_requests_target = relationship(
1677 1677 'PullRequest',
1678 1678 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1679 1679 cascade="all, delete, delete-orphan")
1680 1680 ui = relationship('RepoRhodeCodeUi', cascade="all")
1681 1681 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1682 1682 integrations = relationship('Integration', cascade="all, delete, delete-orphan")
1683 1683
1684 1684 scoped_tokens = relationship('UserApiKeys', cascade="all")
1685 1685
1686 1686 artifacts = relationship('FileStore', cascade="all")
1687 1687
1688 1688 def __unicode__(self):
1689 1689 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1690 1690 safe_unicode(self.repo_name))
1691 1691
1692 1692 @hybrid_property
1693 1693 def description_safe(self):
1694 1694 from rhodecode.lib import helpers as h
1695 1695 return h.escape(self.description)
1696 1696
1697 1697 @hybrid_property
1698 1698 def landing_rev(self):
1699 1699 # always should return [rev_type, rev]
1700 1700 if self._landing_revision:
1701 1701 _rev_info = self._landing_revision.split(':')
1702 1702 if len(_rev_info) < 2:
1703 1703 _rev_info.insert(0, 'rev')
1704 1704 return [_rev_info[0], _rev_info[1]]
1705 1705 return [None, None]
1706 1706
1707 1707 @landing_rev.setter
1708 1708 def landing_rev(self, val):
1709 1709 if ':' not in val:
1710 1710 raise ValueError('value must be delimited with `:` and consist '
1711 1711 'of <rev_type>:<rev>, got %s instead' % val)
1712 1712 self._landing_revision = val
1713 1713
1714 1714 @hybrid_property
1715 1715 def locked(self):
1716 1716 if self._locked:
1717 1717 user_id, timelocked, reason = self._locked.split(':')
1718 1718 lock_values = int(user_id), timelocked, reason
1719 1719 else:
1720 1720 lock_values = [None, None, None]
1721 1721 return lock_values
1722 1722
1723 1723 @locked.setter
1724 1724 def locked(self, val):
1725 1725 if val and isinstance(val, (list, tuple)):
1726 1726 self._locked = ':'.join(map(str, val))
1727 1727 else:
1728 1728 self._locked = None
1729 1729
1730 1730 @hybrid_property
1731 1731 def changeset_cache(self):
1732 1732 from rhodecode.lib.vcs.backends.base import EmptyCommit
1733 1733 dummy = EmptyCommit().__json__()
1734 1734 if not self._changeset_cache:
1735 1735 dummy['source_repo_id'] = self.repo_id
1736 1736 return json.loads(json.dumps(dummy))
1737 1737
1738 1738 try:
1739 1739 return json.loads(self._changeset_cache)
1740 1740 except TypeError:
1741 1741 return dummy
1742 1742 except Exception:
1743 1743 log.error(traceback.format_exc())
1744 1744 return dummy
1745 1745
1746 1746 @changeset_cache.setter
1747 1747 def changeset_cache(self, val):
1748 1748 try:
1749 1749 self._changeset_cache = json.dumps(val)
1750 1750 except Exception:
1751 1751 log.error(traceback.format_exc())
1752 1752
1753 1753 @hybrid_property
1754 1754 def repo_name(self):
1755 1755 return self._repo_name
1756 1756
1757 1757 @repo_name.setter
1758 1758 def repo_name(self, value):
1759 1759 self._repo_name = value
1760 1760 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1761 1761
1762 1762 @classmethod
1763 1763 def normalize_repo_name(cls, repo_name):
1764 1764 """
1765 1765 Normalizes os specific repo_name to the format internally stored inside
1766 1766 database using URL_SEP
1767 1767
1768 1768 :param cls:
1769 1769 :param repo_name:
1770 1770 """
1771 1771 return cls.NAME_SEP.join(repo_name.split(os.sep))
1772 1772
1773 1773 @classmethod
1774 1774 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1775 1775 session = Session()
1776 1776 q = session.query(cls).filter(cls.repo_name == repo_name)
1777 1777
1778 1778 if cache:
1779 1779 if identity_cache:
1780 1780 val = cls.identity_cache(session, 'repo_name', repo_name)
1781 1781 if val:
1782 1782 return val
1783 1783 else:
1784 1784 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1785 1785 q = q.options(
1786 1786 FromCache("sql_cache_short", cache_key))
1787 1787
1788 1788 return q.scalar()
1789 1789
1790 1790 @classmethod
1791 1791 def get_by_id_or_repo_name(cls, repoid):
1792 1792 if isinstance(repoid, (int, long)):
1793 1793 try:
1794 1794 repo = cls.get(repoid)
1795 1795 except ValueError:
1796 1796 repo = None
1797 1797 else:
1798 1798 repo = cls.get_by_repo_name(repoid)
1799 1799 return repo
1800 1800
1801 1801 @classmethod
1802 1802 def get_by_full_path(cls, repo_full_path):
1803 1803 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1804 1804 repo_name = cls.normalize_repo_name(repo_name)
1805 1805 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1806 1806
1807 1807 @classmethod
1808 1808 def get_repo_forks(cls, repo_id):
1809 1809 return cls.query().filter(Repository.fork_id == repo_id)
1810 1810
1811 1811 @classmethod
1812 1812 def base_path(cls):
1813 1813 """
1814 1814 Returns base path when all repos are stored
1815 1815
1816 1816 :param cls:
1817 1817 """
1818 1818 q = Session().query(RhodeCodeUi)\
1819 1819 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1820 1820 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1821 1821 return q.one().ui_value
1822 1822
1823 1823 @classmethod
1824 1824 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1825 1825 case_insensitive=True, archived=False):
1826 1826 q = Repository.query()
1827 1827
1828 1828 if not archived:
1829 1829 q = q.filter(Repository.archived.isnot(true()))
1830 1830
1831 1831 if not isinstance(user_id, Optional):
1832 1832 q = q.filter(Repository.user_id == user_id)
1833 1833
1834 1834 if not isinstance(group_id, Optional):
1835 1835 q = q.filter(Repository.group_id == group_id)
1836 1836
1837 1837 if case_insensitive:
1838 1838 q = q.order_by(func.lower(Repository.repo_name))
1839 1839 else:
1840 1840 q = q.order_by(Repository.repo_name)
1841 1841
1842 1842 return q.all()
1843 1843
1844 1844 @property
1845 1845 def repo_uid(self):
1846 1846 return '_{}'.format(self.repo_id)
1847 1847
1848 1848 @property
1849 1849 def forks(self):
1850 1850 """
1851 1851 Return forks of this repo
1852 1852 """
1853 1853 return Repository.get_repo_forks(self.repo_id)
1854 1854
1855 1855 @property
1856 1856 def parent(self):
1857 1857 """
1858 1858 Returns fork parent
1859 1859 """
1860 1860 return self.fork
1861 1861
1862 1862 @property
1863 1863 def just_name(self):
1864 1864 return self.repo_name.split(self.NAME_SEP)[-1]
1865 1865
1866 1866 @property
1867 1867 def groups_with_parents(self):
1868 1868 groups = []
1869 1869 if self.group is None:
1870 1870 return groups
1871 1871
1872 1872 cur_gr = self.group
1873 1873 groups.insert(0, cur_gr)
1874 1874 while 1:
1875 1875 gr = getattr(cur_gr, 'parent_group', None)
1876 1876 cur_gr = cur_gr.parent_group
1877 1877 if gr is None:
1878 1878 break
1879 1879 groups.insert(0, gr)
1880 1880
1881 1881 return groups
1882 1882
1883 1883 @property
1884 1884 def groups_and_repo(self):
1885 1885 return self.groups_with_parents, self
1886 1886
1887 1887 @LazyProperty
1888 1888 def repo_path(self):
1889 1889 """
1890 1890 Returns base full path for that repository means where it actually
1891 1891 exists on a filesystem
1892 1892 """
1893 1893 q = Session().query(RhodeCodeUi).filter(
1894 1894 RhodeCodeUi.ui_key == self.NAME_SEP)
1895 1895 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1896 1896 return q.one().ui_value
1897 1897
1898 1898 @property
1899 1899 def repo_full_path(self):
1900 1900 p = [self.repo_path]
1901 1901 # we need to split the name by / since this is how we store the
1902 1902 # names in the database, but that eventually needs to be converted
1903 1903 # into a valid system path
1904 1904 p += self.repo_name.split(self.NAME_SEP)
1905 1905 return os.path.join(*map(safe_unicode, p))
1906 1906
1907 1907 @property
1908 1908 def cache_keys(self):
1909 1909 """
1910 1910 Returns associated cache keys for that repo
1911 1911 """
1912 1912 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
1913 1913 repo_id=self.repo_id)
1914 1914 return CacheKey.query()\
1915 1915 .filter(CacheKey.cache_args == invalidation_namespace)\
1916 1916 .order_by(CacheKey.cache_key)\
1917 1917 .all()
1918 1918
1919 1919 @property
1920 1920 def cached_diffs_relative_dir(self):
1921 1921 """
1922 1922 Return a relative to the repository store path of cached diffs
1923 1923 used for safe display for users, who shouldn't know the absolute store
1924 1924 path
1925 1925 """
1926 1926 return os.path.join(
1927 1927 os.path.dirname(self.repo_name),
1928 1928 self.cached_diffs_dir.split(os.path.sep)[-1])
1929 1929
1930 1930 @property
1931 1931 def cached_diffs_dir(self):
1932 1932 path = self.repo_full_path
1933 1933 return os.path.join(
1934 1934 os.path.dirname(path),
1935 1935 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
1936 1936
1937 1937 def cached_diffs(self):
1938 1938 diff_cache_dir = self.cached_diffs_dir
1939 1939 if os.path.isdir(diff_cache_dir):
1940 1940 return os.listdir(diff_cache_dir)
1941 1941 return []
1942 1942
1943 1943 def shadow_repos(self):
1944 1944 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
1945 1945 return [
1946 1946 x for x in os.listdir(os.path.dirname(self.repo_full_path))
1947 1947 if x.startswith(shadow_repos_pattern)]
1948 1948
1949 1949 def get_new_name(self, repo_name):
1950 1950 """
1951 1951 returns new full repository name based on assigned group and new new
1952 1952
1953 1953 :param group_name:
1954 1954 """
1955 1955 path_prefix = self.group.full_path_splitted if self.group else []
1956 1956 return self.NAME_SEP.join(path_prefix + [repo_name])
1957 1957
1958 1958 @property
1959 1959 def _config(self):
1960 1960 """
1961 1961 Returns db based config object.
1962 1962 """
1963 1963 from rhodecode.lib.utils import make_db_config
1964 1964 return make_db_config(clear_session=False, repo=self)
1965 1965
1966 1966 def permissions(self, with_admins=True, with_owner=True,
1967 1967 expand_from_user_groups=False):
1968 1968 """
1969 1969 Permissions for repositories
1970 1970 """
1971 1971 _admin_perm = 'repository.admin'
1972 1972
1973 1973 owner_row = []
1974 1974 if with_owner:
1975 1975 usr = AttributeDict(self.user.get_dict())
1976 1976 usr.owner_row = True
1977 1977 usr.permission = _admin_perm
1978 1978 usr.permission_id = None
1979 1979 owner_row.append(usr)
1980 1980
1981 1981 super_admin_ids = []
1982 1982 super_admin_rows = []
1983 1983 if with_admins:
1984 1984 for usr in User.get_all_super_admins():
1985 1985 super_admin_ids.append(usr.user_id)
1986 1986 # if this admin is also owner, don't double the record
1987 1987 if usr.user_id == owner_row[0].user_id:
1988 1988 owner_row[0].admin_row = True
1989 1989 else:
1990 1990 usr = AttributeDict(usr.get_dict())
1991 1991 usr.admin_row = True
1992 1992 usr.permission = _admin_perm
1993 1993 usr.permission_id = None
1994 1994 super_admin_rows.append(usr)
1995 1995
1996 1996 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1997 1997 q = q.options(joinedload(UserRepoToPerm.repository),
1998 1998 joinedload(UserRepoToPerm.user),
1999 1999 joinedload(UserRepoToPerm.permission),)
2000 2000
2001 2001 # get owners and admins and permissions. We do a trick of re-writing
2002 2002 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2003 2003 # has a global reference and changing one object propagates to all
2004 2004 # others. This means if admin is also an owner admin_row that change
2005 2005 # would propagate to both objects
2006 2006 perm_rows = []
2007 2007 for _usr in q.all():
2008 2008 usr = AttributeDict(_usr.user.get_dict())
2009 2009 # if this user is also owner/admin, mark as duplicate record
2010 2010 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2011 2011 usr.duplicate_perm = True
2012 2012 # also check if this permission is maybe used by branch_permissions
2013 2013 if _usr.branch_perm_entry:
2014 2014 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
2015 2015
2016 2016 usr.permission = _usr.permission.permission_name
2017 2017 usr.permission_id = _usr.repo_to_perm_id
2018 2018 perm_rows.append(usr)
2019 2019
2020 2020 # filter the perm rows by 'default' first and then sort them by
2021 2021 # admin,write,read,none permissions sorted again alphabetically in
2022 2022 # each group
2023 2023 perm_rows = sorted(perm_rows, key=display_user_sort)
2024 2024
2025 2025 user_groups_rows = []
2026 2026 if expand_from_user_groups:
2027 2027 for ug in self.permission_user_groups(with_members=True):
2028 2028 for user_data in ug.members:
2029 2029 user_groups_rows.append(user_data)
2030 2030
2031 2031 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2032 2032
2033 2033 def permission_user_groups(self, with_members=True):
2034 2034 q = UserGroupRepoToPerm.query()\
2035 2035 .filter(UserGroupRepoToPerm.repository == self)
2036 2036 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2037 2037 joinedload(UserGroupRepoToPerm.users_group),
2038 2038 joinedload(UserGroupRepoToPerm.permission),)
2039 2039
2040 2040 perm_rows = []
2041 2041 for _user_group in q.all():
2042 2042 entry = AttributeDict(_user_group.users_group.get_dict())
2043 2043 entry.permission = _user_group.permission.permission_name
2044 2044 if with_members:
2045 2045 entry.members = [x.user.get_dict()
2046 2046 for x in _user_group.users_group.members]
2047 2047 perm_rows.append(entry)
2048 2048
2049 2049 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2050 2050 return perm_rows
2051 2051
2052 2052 def get_api_data(self, include_secrets=False):
2053 2053 """
2054 2054 Common function for generating repo api data
2055 2055
2056 2056 :param include_secrets: See :meth:`User.get_api_data`.
2057 2057
2058 2058 """
2059 2059 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2060 2060 # move this methods on models level.
2061 2061 from rhodecode.model.settings import SettingsModel
2062 2062 from rhodecode.model.repo import RepoModel
2063 2063
2064 2064 repo = self
2065 2065 _user_id, _time, _reason = self.locked
2066 2066
2067 2067 data = {
2068 2068 'repo_id': repo.repo_id,
2069 2069 'repo_name': repo.repo_name,
2070 2070 'repo_type': repo.repo_type,
2071 2071 'clone_uri': repo.clone_uri or '',
2072 2072 'push_uri': repo.push_uri or '',
2073 2073 'url': RepoModel().get_url(self),
2074 2074 'private': repo.private,
2075 2075 'created_on': repo.created_on,
2076 2076 'description': repo.description_safe,
2077 2077 'landing_rev': repo.landing_rev,
2078 2078 'owner': repo.user.username,
2079 2079 'fork_of': repo.fork.repo_name if repo.fork else None,
2080 2080 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2081 2081 'enable_statistics': repo.enable_statistics,
2082 2082 'enable_locking': repo.enable_locking,
2083 2083 'enable_downloads': repo.enable_downloads,
2084 2084 'last_changeset': repo.changeset_cache,
2085 2085 'locked_by': User.get(_user_id).get_api_data(
2086 2086 include_secrets=include_secrets) if _user_id else None,
2087 2087 'locked_date': time_to_datetime(_time) if _time else None,
2088 2088 'lock_reason': _reason if _reason else None,
2089 2089 }
2090 2090
2091 2091 # TODO: mikhail: should be per-repo settings here
2092 2092 rc_config = SettingsModel().get_all_settings()
2093 2093 repository_fields = str2bool(
2094 2094 rc_config.get('rhodecode_repository_fields'))
2095 2095 if repository_fields:
2096 2096 for f in self.extra_fields:
2097 2097 data[f.field_key_prefixed] = f.field_value
2098 2098
2099 2099 return data
2100 2100
2101 2101 @classmethod
2102 2102 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2103 2103 if not lock_time:
2104 2104 lock_time = time.time()
2105 2105 if not lock_reason:
2106 2106 lock_reason = cls.LOCK_AUTOMATIC
2107 2107 repo.locked = [user_id, lock_time, lock_reason]
2108 2108 Session().add(repo)
2109 2109 Session().commit()
2110 2110
2111 2111 @classmethod
2112 2112 def unlock(cls, repo):
2113 2113 repo.locked = None
2114 2114 Session().add(repo)
2115 2115 Session().commit()
2116 2116
2117 2117 @classmethod
2118 2118 def getlock(cls, repo):
2119 2119 return repo.locked
2120 2120
2121 2121 def is_user_lock(self, user_id):
2122 2122 if self.lock[0]:
2123 2123 lock_user_id = safe_int(self.lock[0])
2124 2124 user_id = safe_int(user_id)
2125 2125 # both are ints, and they are equal
2126 2126 return all([lock_user_id, user_id]) and lock_user_id == user_id
2127 2127
2128 2128 return False
2129 2129
2130 2130 def get_locking_state(self, action, user_id, only_when_enabled=True):
2131 2131 """
2132 2132 Checks locking on this repository, if locking is enabled and lock is
2133 2133 present returns a tuple of make_lock, locked, locked_by.
2134 2134 make_lock can have 3 states None (do nothing) True, make lock
2135 2135 False release lock, This value is later propagated to hooks, which
2136 2136 do the locking. Think about this as signals passed to hooks what to do.
2137 2137
2138 2138 """
2139 2139 # TODO: johbo: This is part of the business logic and should be moved
2140 2140 # into the RepositoryModel.
2141 2141
2142 2142 if action not in ('push', 'pull'):
2143 2143 raise ValueError("Invalid action value: %s" % repr(action))
2144 2144
2145 2145 # defines if locked error should be thrown to user
2146 2146 currently_locked = False
2147 2147 # defines if new lock should be made, tri-state
2148 2148 make_lock = None
2149 2149 repo = self
2150 2150 user = User.get(user_id)
2151 2151
2152 2152 lock_info = repo.locked
2153 2153
2154 2154 if repo and (repo.enable_locking or not only_when_enabled):
2155 2155 if action == 'push':
2156 2156 # check if it's already locked !, if it is compare users
2157 2157 locked_by_user_id = lock_info[0]
2158 2158 if user.user_id == locked_by_user_id:
2159 2159 log.debug(
2160 2160 'Got `push` action from user %s, now unlocking', user)
2161 2161 # unlock if we have push from user who locked
2162 2162 make_lock = False
2163 2163 else:
2164 2164 # we're not the same user who locked, ban with
2165 2165 # code defined in settings (default is 423 HTTP Locked) !
2166 2166 log.debug('Repo %s is currently locked by %s', repo, user)
2167 2167 currently_locked = True
2168 2168 elif action == 'pull':
2169 2169 # [0] user [1] date
2170 2170 if lock_info[0] and lock_info[1]:
2171 2171 log.debug('Repo %s is currently locked by %s', repo, user)
2172 2172 currently_locked = True
2173 2173 else:
2174 2174 log.debug('Setting lock on repo %s by %s', repo, user)
2175 2175 make_lock = True
2176 2176
2177 2177 else:
2178 2178 log.debug('Repository %s do not have locking enabled', repo)
2179 2179
2180 2180 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2181 2181 make_lock, currently_locked, lock_info)
2182 2182
2183 2183 from rhodecode.lib.auth import HasRepoPermissionAny
2184 2184 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2185 2185 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2186 2186 # if we don't have at least write permission we cannot make a lock
2187 2187 log.debug('lock state reset back to FALSE due to lack '
2188 2188 'of at least read permission')
2189 2189 make_lock = False
2190 2190
2191 2191 return make_lock, currently_locked, lock_info
2192 2192
2193 2193 @property
2194 2194 def last_commit_cache_update_diff(self):
2195 2195 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2196 2196
2197 2197 @property
2198 2198 def last_commit_change(self):
2199 2199 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2200 2200 empty_date = datetime.datetime.fromtimestamp(0)
2201 2201 date_latest = self.changeset_cache.get('date', empty_date)
2202 2202 try:
2203 2203 return parse_datetime(date_latest)
2204 2204 except Exception:
2205 2205 return empty_date
2206 2206
2207 2207 @property
2208 2208 def last_db_change(self):
2209 2209 return self.updated_on
2210 2210
2211 2211 @property
2212 2212 def clone_uri_hidden(self):
2213 2213 clone_uri = self.clone_uri
2214 2214 if clone_uri:
2215 2215 import urlobject
2216 2216 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2217 2217 if url_obj.password:
2218 2218 clone_uri = url_obj.with_password('*****')
2219 2219 return clone_uri
2220 2220
2221 2221 @property
2222 2222 def push_uri_hidden(self):
2223 2223 push_uri = self.push_uri
2224 2224 if push_uri:
2225 2225 import urlobject
2226 2226 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2227 2227 if url_obj.password:
2228 2228 push_uri = url_obj.with_password('*****')
2229 2229 return push_uri
2230 2230
2231 2231 def clone_url(self, **override):
2232 2232 from rhodecode.model.settings import SettingsModel
2233 2233
2234 2234 uri_tmpl = None
2235 2235 if 'with_id' in override:
2236 2236 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2237 2237 del override['with_id']
2238 2238
2239 2239 if 'uri_tmpl' in override:
2240 2240 uri_tmpl = override['uri_tmpl']
2241 2241 del override['uri_tmpl']
2242 2242
2243 2243 ssh = False
2244 2244 if 'ssh' in override:
2245 2245 ssh = True
2246 2246 del override['ssh']
2247 2247
2248 2248 # we didn't override our tmpl from **overrides
2249 2249 request = get_current_request()
2250 2250 if not uri_tmpl:
2251 2251 if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
2252 2252 rc_config = request.call_context.rc_config
2253 2253 else:
2254 2254 rc_config = SettingsModel().get_all_settings(cache=True)
2255 2255 if ssh:
2256 2256 uri_tmpl = rc_config.get(
2257 2257 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2258 2258 else:
2259 2259 uri_tmpl = rc_config.get(
2260 2260 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2261 2261
2262 2262 return get_clone_url(request=request,
2263 2263 uri_tmpl=uri_tmpl,
2264 2264 repo_name=self.repo_name,
2265 2265 repo_id=self.repo_id, **override)
2266 2266
2267 2267 def set_state(self, state):
2268 2268 self.repo_state = state
2269 2269 Session().add(self)
2270 2270 #==========================================================================
2271 2271 # SCM PROPERTIES
2272 2272 #==========================================================================
2273 2273
2274 2274 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
2275 2275 return get_commit_safe(
2276 2276 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
2277 2277
2278 2278 def get_changeset(self, rev=None, pre_load=None):
2279 2279 warnings.warn("Use get_commit", DeprecationWarning)
2280 2280 commit_id = None
2281 2281 commit_idx = None
2282 2282 if isinstance(rev, compat.string_types):
2283 2283 commit_id = rev
2284 2284 else:
2285 2285 commit_idx = rev
2286 2286 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2287 2287 pre_load=pre_load)
2288 2288
2289 2289 def get_landing_commit(self):
2290 2290 """
2291 2291 Returns landing commit, or if that doesn't exist returns the tip
2292 2292 """
2293 2293 _rev_type, _rev = self.landing_rev
2294 2294 commit = self.get_commit(_rev)
2295 2295 if isinstance(commit, EmptyCommit):
2296 2296 return self.get_commit()
2297 2297 return commit
2298 2298
2299 2299 def update_commit_cache(self, cs_cache=None, config=None):
2300 2300 """
2301 2301 Update cache of last commit for repository, keys should be::
2302 2302
2303 2303 source_repo_id
2304 2304 short_id
2305 2305 raw_id
2306 2306 revision
2307 2307 parents
2308 2308 message
2309 2309 date
2310 2310 author
2311 2311 updated_on
2312 2312
2313 2313 """
2314 2314 from rhodecode.lib.vcs.backends.base import BaseChangeset
2315 2315 if cs_cache is None:
2316 2316 # use no-cache version here
2317 2317 scm_repo = self.scm_instance(cache=False, config=config)
2318 2318
2319 2319 empty = scm_repo is None or scm_repo.is_empty()
2320 2320 if not empty:
2321 2321 cs_cache = scm_repo.get_commit(
2322 2322 pre_load=["author", "date", "message", "parents", "branch"])
2323 2323 else:
2324 2324 cs_cache = EmptyCommit()
2325 2325
2326 2326 if isinstance(cs_cache, BaseChangeset):
2327 2327 cs_cache = cs_cache.__json__()
2328 2328
2329 2329 def is_outdated(new_cs_cache):
2330 2330 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2331 2331 new_cs_cache['revision'] != self.changeset_cache['revision']):
2332 2332 return True
2333 2333 return False
2334 2334
2335 2335 # check if we have maybe already latest cached revision
2336 2336 if is_outdated(cs_cache) or not self.changeset_cache:
2337 2337 _default = datetime.datetime.utcnow()
2338 2338 last_change = cs_cache.get('date') or _default
2339 2339 # we check if last update is newer than the new value
2340 2340 # if yes, we use the current timestamp instead. Imagine you get
2341 2341 # old commit pushed 1y ago, we'd set last update 1y to ago.
2342 2342 last_change_timestamp = datetime_to_time(last_change)
2343 2343 current_timestamp = datetime_to_time(last_change)
2344 2344 if last_change_timestamp > current_timestamp:
2345 2345 cs_cache['date'] = _default
2346 2346
2347 2347 cs_cache['updated_on'] = time.time()
2348 2348 self.changeset_cache = cs_cache
2349 2349 Session().add(self)
2350 2350 Session().commit()
2351 2351
2352 2352 log.debug('updated repo %s with new commit cache %s',
2353 2353 self.repo_name, cs_cache)
2354 2354 else:
2355 2355 cs_cache = self.changeset_cache
2356 2356 cs_cache['updated_on'] = time.time()
2357 2357 self.changeset_cache = cs_cache
2358 2358 Session().add(self)
2359 2359 Session().commit()
2360 2360
2361 2361 log.debug('Skipping update_commit_cache for repo:`%s` '
2362 2362 'commit already with latest changes', self.repo_name)
2363 2363
2364 2364 @property
2365 2365 def tip(self):
2366 2366 return self.get_commit('tip')
2367 2367
2368 2368 @property
2369 2369 def author(self):
2370 2370 return self.tip.author
2371 2371
2372 2372 @property
2373 2373 def last_change(self):
2374 2374 return self.scm_instance().last_change
2375 2375
2376 2376 def get_comments(self, revisions=None):
2377 2377 """
2378 2378 Returns comments for this repository grouped by revisions
2379 2379
2380 2380 :param revisions: filter query by revisions only
2381 2381 """
2382 2382 cmts = ChangesetComment.query()\
2383 2383 .filter(ChangesetComment.repo == self)
2384 2384 if revisions:
2385 2385 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2386 2386 grouped = collections.defaultdict(list)
2387 2387 for cmt in cmts.all():
2388 2388 grouped[cmt.revision].append(cmt)
2389 2389 return grouped
2390 2390
2391 2391 def statuses(self, revisions=None):
2392 2392 """
2393 2393 Returns statuses for this repository
2394 2394
2395 2395 :param revisions: list of revisions to get statuses for
2396 2396 """
2397 2397 statuses = ChangesetStatus.query()\
2398 2398 .filter(ChangesetStatus.repo == self)\
2399 2399 .filter(ChangesetStatus.version == 0)
2400 2400
2401 2401 if revisions:
2402 2402 # Try doing the filtering in chunks to avoid hitting limits
2403 2403 size = 500
2404 2404 status_results = []
2405 2405 for chunk in xrange(0, len(revisions), size):
2406 2406 status_results += statuses.filter(
2407 2407 ChangesetStatus.revision.in_(
2408 2408 revisions[chunk: chunk+size])
2409 2409 ).all()
2410 2410 else:
2411 2411 status_results = statuses.all()
2412 2412
2413 2413 grouped = {}
2414 2414
2415 2415 # maybe we have open new pullrequest without a status?
2416 2416 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2417 2417 status_lbl = ChangesetStatus.get_status_lbl(stat)
2418 2418 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2419 2419 for rev in pr.revisions:
2420 2420 pr_id = pr.pull_request_id
2421 2421 pr_repo = pr.target_repo.repo_name
2422 2422 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2423 2423
2424 2424 for stat in status_results:
2425 2425 pr_id = pr_repo = None
2426 2426 if stat.pull_request:
2427 2427 pr_id = stat.pull_request.pull_request_id
2428 2428 pr_repo = stat.pull_request.target_repo.repo_name
2429 2429 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2430 2430 pr_id, pr_repo]
2431 2431 return grouped
2432 2432
2433 2433 # ==========================================================================
2434 2434 # SCM CACHE INSTANCE
2435 2435 # ==========================================================================
2436 2436
2437 2437 def scm_instance(self, **kwargs):
2438 2438 import rhodecode
2439 2439
2440 2440 # Passing a config will not hit the cache currently only used
2441 2441 # for repo2dbmapper
2442 2442 config = kwargs.pop('config', None)
2443 2443 cache = kwargs.pop('cache', None)
2444 2444 vcs_full_cache = kwargs.pop('vcs_full_cache', None)
2445 2445 if vcs_full_cache is not None:
2446 2446 # allows override global config
2447 2447 full_cache = vcs_full_cache
2448 2448 else:
2449 2449 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2450 2450 # if cache is NOT defined use default global, else we have a full
2451 2451 # control over cache behaviour
2452 2452 if cache is None and full_cache and not config:
2453 2453 log.debug('Initializing pure cached instance for %s', self.repo_path)
2454 2454 return self._get_instance_cached()
2455 2455
2456 2456 # cache here is sent to the "vcs server"
2457 2457 return self._get_instance(cache=bool(cache), config=config)
2458 2458
2459 2459 def _get_instance_cached(self):
2460 2460 from rhodecode.lib import rc_cache
2461 2461
2462 2462 cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id)
2463 2463 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2464 2464 repo_id=self.repo_id)
2465 2465 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2466 2466
2467 2467 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2468 2468 def get_instance_cached(repo_id, context_id, _cache_state_uid):
2469 2469 return self._get_instance(repo_state_uid=_cache_state_uid)
2470 2470
2471 2471 # we must use thread scoped cache here,
2472 2472 # because each thread of gevent needs it's own not shared connection and cache
2473 2473 # we also alter `args` so the cache key is individual for every green thread.
2474 2474 inv_context_manager = rc_cache.InvalidationContext(
2475 2475 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2476 2476 thread_scoped=True)
2477 2477 with inv_context_manager as invalidation_context:
2478 2478 cache_state_uid = invalidation_context.cache_data['cache_state_uid']
2479 2479 args = (self.repo_id, inv_context_manager.cache_key, cache_state_uid)
2480 2480
2481 2481 # re-compute and store cache if we get invalidate signal
2482 2482 if invalidation_context.should_invalidate():
2483 2483 instance = get_instance_cached.refresh(*args)
2484 2484 else:
2485 2485 instance = get_instance_cached(*args)
2486 2486
2487 2487 log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time)
2488 2488 return instance
2489 2489
2490 2490 def _get_instance(self, cache=True, config=None, repo_state_uid=None):
2491 2491 log.debug('Initializing %s instance `%s` with cache flag set to: %s',
2492 2492 self.repo_type, self.repo_path, cache)
2493 2493 config = config or self._config
2494 2494 custom_wire = {
2495 2495 'cache': cache, # controls the vcs.remote cache
2496 2496 'repo_state_uid': repo_state_uid
2497 2497 }
2498 2498 repo = get_vcs_instance(
2499 2499 repo_path=safe_str(self.repo_full_path),
2500 2500 config=config,
2501 2501 with_wire=custom_wire,
2502 2502 create=False,
2503 2503 _vcs_alias=self.repo_type)
2504 2504 if repo is not None:
2505 2505 repo.count() # cache rebuild
2506 2506 return repo
2507 2507
2508 def get_shadow_repository_path(self, workspace_id):
2509 from rhodecode.lib.vcs.backends.base import BaseRepository
2510 shadow_repo_path = BaseRepository._get_shadow_repository_path(
2511 self.repo_full_path, self.repo_id, workspace_id)
2512 return shadow_repo_path
2513
2508 2514 def __json__(self):
2509 2515 return {'landing_rev': self.landing_rev}
2510 2516
2511 2517 def get_dict(self):
2512 2518
2513 2519 # Since we transformed `repo_name` to a hybrid property, we need to
2514 2520 # keep compatibility with the code which uses `repo_name` field.
2515 2521
2516 2522 result = super(Repository, self).get_dict()
2517 2523 result['repo_name'] = result.pop('_repo_name', None)
2518 2524 return result
2519 2525
2520 2526
2521 2527 class RepoGroup(Base, BaseModel):
2522 2528 __tablename__ = 'groups'
2523 2529 __table_args__ = (
2524 2530 UniqueConstraint('group_name', 'group_parent_id'),
2525 2531 base_table_args,
2526 2532 )
2527 2533 __mapper_args__ = {'order_by': 'group_name'}
2528 2534
2529 2535 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2530 2536
2531 2537 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2532 2538 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2533 2539 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2534 2540 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2535 2541 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2536 2542 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2537 2543 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2538 2544 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2539 2545 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2540 2546 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2541 2547 _changeset_cache = Column(
2542 2548 "changeset_cache", LargeBinary(), nullable=True) # JSON data
2543 2549
2544 2550 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2545 2551 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2546 2552 parent_group = relationship('RepoGroup', remote_side=group_id)
2547 2553 user = relationship('User')
2548 2554 integrations = relationship('Integration', cascade="all, delete, delete-orphan")
2549 2555
2550 2556 def __init__(self, group_name='', parent_group=None):
2551 2557 self.group_name = group_name
2552 2558 self.parent_group = parent_group
2553 2559
2554 2560 def __unicode__(self):
2555 2561 return u"<%s('id:%s:%s')>" % (
2556 2562 self.__class__.__name__, self.group_id, self.group_name)
2557 2563
2558 2564 @hybrid_property
2559 2565 def group_name(self):
2560 2566 return self._group_name
2561 2567
2562 2568 @group_name.setter
2563 2569 def group_name(self, value):
2564 2570 self._group_name = value
2565 2571 self.group_name_hash = self.hash_repo_group_name(value)
2566 2572
2567 2573 @hybrid_property
2568 2574 def changeset_cache(self):
2569 2575 from rhodecode.lib.vcs.backends.base import EmptyCommit
2570 2576 dummy = EmptyCommit().__json__()
2571 2577 if not self._changeset_cache:
2572 2578 dummy['source_repo_id'] = ''
2573 2579 return json.loads(json.dumps(dummy))
2574 2580
2575 2581 try:
2576 2582 return json.loads(self._changeset_cache)
2577 2583 except TypeError:
2578 2584 return dummy
2579 2585 except Exception:
2580 2586 log.error(traceback.format_exc())
2581 2587 return dummy
2582 2588
2583 2589 @changeset_cache.setter
2584 2590 def changeset_cache(self, val):
2585 2591 try:
2586 2592 self._changeset_cache = json.dumps(val)
2587 2593 except Exception:
2588 2594 log.error(traceback.format_exc())
2589 2595
2590 2596 @validates('group_parent_id')
2591 2597 def validate_group_parent_id(self, key, val):
2592 2598 """
2593 2599 Check cycle references for a parent group to self
2594 2600 """
2595 2601 if self.group_id and val:
2596 2602 assert val != self.group_id
2597 2603
2598 2604 return val
2599 2605
2600 2606 @hybrid_property
2601 2607 def description_safe(self):
2602 2608 from rhodecode.lib import helpers as h
2603 2609 return h.escape(self.group_description)
2604 2610
2605 2611 @classmethod
2606 2612 def hash_repo_group_name(cls, repo_group_name):
2607 2613 val = remove_formatting(repo_group_name)
2608 2614 val = safe_str(val).lower()
2609 2615 chars = []
2610 2616 for c in val:
2611 2617 if c not in string.ascii_letters:
2612 2618 c = str(ord(c))
2613 2619 chars.append(c)
2614 2620
2615 2621 return ''.join(chars)
2616 2622
2617 2623 @classmethod
2618 2624 def _generate_choice(cls, repo_group):
2619 2625 from webhelpers.html import literal as _literal
2620 2626 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2621 2627 return repo_group.group_id, _name(repo_group.full_path_splitted)
2622 2628
2623 2629 @classmethod
2624 2630 def groups_choices(cls, groups=None, show_empty_group=True):
2625 2631 if not groups:
2626 2632 groups = cls.query().all()
2627 2633
2628 2634 repo_groups = []
2629 2635 if show_empty_group:
2630 2636 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2631 2637
2632 2638 repo_groups.extend([cls._generate_choice(x) for x in groups])
2633 2639
2634 2640 repo_groups = sorted(
2635 2641 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2636 2642 return repo_groups
2637 2643
2638 2644 @classmethod
2639 2645 def url_sep(cls):
2640 2646 return URL_SEP
2641 2647
2642 2648 @classmethod
2643 2649 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2644 2650 if case_insensitive:
2645 2651 gr = cls.query().filter(func.lower(cls.group_name)
2646 2652 == func.lower(group_name))
2647 2653 else:
2648 2654 gr = cls.query().filter(cls.group_name == group_name)
2649 2655 if cache:
2650 2656 name_key = _hash_key(group_name)
2651 2657 gr = gr.options(
2652 2658 FromCache("sql_cache_short", "get_group_%s" % name_key))
2653 2659 return gr.scalar()
2654 2660
2655 2661 @classmethod
2656 2662 def get_user_personal_repo_group(cls, user_id):
2657 2663 user = User.get(user_id)
2658 2664 if user.username == User.DEFAULT_USER:
2659 2665 return None
2660 2666
2661 2667 return cls.query()\
2662 2668 .filter(cls.personal == true()) \
2663 2669 .filter(cls.user == user) \
2664 2670 .order_by(cls.group_id.asc()) \
2665 2671 .first()
2666 2672
2667 2673 @classmethod
2668 2674 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2669 2675 case_insensitive=True):
2670 2676 q = RepoGroup.query()
2671 2677
2672 2678 if not isinstance(user_id, Optional):
2673 2679 q = q.filter(RepoGroup.user_id == user_id)
2674 2680
2675 2681 if not isinstance(group_id, Optional):
2676 2682 q = q.filter(RepoGroup.group_parent_id == group_id)
2677 2683
2678 2684 if case_insensitive:
2679 2685 q = q.order_by(func.lower(RepoGroup.group_name))
2680 2686 else:
2681 2687 q = q.order_by(RepoGroup.group_name)
2682 2688 return q.all()
2683 2689
2684 2690 @property
2685 2691 def parents(self, parents_recursion_limit = 10):
2686 2692 groups = []
2687 2693 if self.parent_group is None:
2688 2694 return groups
2689 2695 cur_gr = self.parent_group
2690 2696 groups.insert(0, cur_gr)
2691 2697 cnt = 0
2692 2698 while 1:
2693 2699 cnt += 1
2694 2700 gr = getattr(cur_gr, 'parent_group', None)
2695 2701 cur_gr = cur_gr.parent_group
2696 2702 if gr is None:
2697 2703 break
2698 2704 if cnt == parents_recursion_limit:
2699 2705 # this will prevent accidental infinit loops
2700 2706 log.error('more than %s parents found for group %s, stopping '
2701 2707 'recursive parent fetching', parents_recursion_limit, self)
2702 2708 break
2703 2709
2704 2710 groups.insert(0, gr)
2705 2711 return groups
2706 2712
2707 2713 @property
2708 2714 def last_commit_cache_update_diff(self):
2709 2715 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2710 2716
2711 2717 @property
2712 2718 def last_commit_change(self):
2713 2719 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2714 2720 empty_date = datetime.datetime.fromtimestamp(0)
2715 2721 date_latest = self.changeset_cache.get('date', empty_date)
2716 2722 try:
2717 2723 return parse_datetime(date_latest)
2718 2724 except Exception:
2719 2725 return empty_date
2720 2726
2721 2727 @property
2722 2728 def last_db_change(self):
2723 2729 return self.updated_on
2724 2730
2725 2731 @property
2726 2732 def children(self):
2727 2733 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2728 2734
2729 2735 @property
2730 2736 def name(self):
2731 2737 return self.group_name.split(RepoGroup.url_sep())[-1]
2732 2738
2733 2739 @property
2734 2740 def full_path(self):
2735 2741 return self.group_name
2736 2742
2737 2743 @property
2738 2744 def full_path_splitted(self):
2739 2745 return self.group_name.split(RepoGroup.url_sep())
2740 2746
2741 2747 @property
2742 2748 def repositories(self):
2743 2749 return Repository.query()\
2744 2750 .filter(Repository.group == self)\
2745 2751 .order_by(Repository.repo_name)
2746 2752
2747 2753 @property
2748 2754 def repositories_recursive_count(self):
2749 2755 cnt = self.repositories.count()
2750 2756
2751 2757 def children_count(group):
2752 2758 cnt = 0
2753 2759 for child in group.children:
2754 2760 cnt += child.repositories.count()
2755 2761 cnt += children_count(child)
2756 2762 return cnt
2757 2763
2758 2764 return cnt + children_count(self)
2759 2765
2760 2766 def _recursive_objects(self, include_repos=True, include_groups=True):
2761 2767 all_ = []
2762 2768
2763 2769 def _get_members(root_gr):
2764 2770 if include_repos:
2765 2771 for r in root_gr.repositories:
2766 2772 all_.append(r)
2767 2773 childs = root_gr.children.all()
2768 2774 if childs:
2769 2775 for gr in childs:
2770 2776 if include_groups:
2771 2777 all_.append(gr)
2772 2778 _get_members(gr)
2773 2779
2774 2780 root_group = []
2775 2781 if include_groups:
2776 2782 root_group = [self]
2777 2783
2778 2784 _get_members(self)
2779 2785 return root_group + all_
2780 2786
2781 2787 def recursive_groups_and_repos(self):
2782 2788 """
2783 2789 Recursive return all groups, with repositories in those groups
2784 2790 """
2785 2791 return self._recursive_objects()
2786 2792
2787 2793 def recursive_groups(self):
2788 2794 """
2789 2795 Returns all children groups for this group including children of children
2790 2796 """
2791 2797 return self._recursive_objects(include_repos=False)
2792 2798
2793 2799 def recursive_repos(self):
2794 2800 """
2795 2801 Returns all children repositories for this group
2796 2802 """
2797 2803 return self._recursive_objects(include_groups=False)
2798 2804
2799 2805 def get_new_name(self, group_name):
2800 2806 """
2801 2807 returns new full group name based on parent and new name
2802 2808
2803 2809 :param group_name:
2804 2810 """
2805 2811 path_prefix = (self.parent_group.full_path_splitted if
2806 2812 self.parent_group else [])
2807 2813 return RepoGroup.url_sep().join(path_prefix + [group_name])
2808 2814
2809 2815 def update_commit_cache(self, config=None):
2810 2816 """
2811 2817 Update cache of last changeset for newest repository inside this group, keys should be::
2812 2818
2813 2819 source_repo_id
2814 2820 short_id
2815 2821 raw_id
2816 2822 revision
2817 2823 parents
2818 2824 message
2819 2825 date
2820 2826 author
2821 2827
2822 2828 """
2823 2829 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2824 2830
2825 2831 def repo_groups_and_repos():
2826 2832 all_entries = OrderedDefaultDict(list)
2827 2833
2828 2834 def _get_members(root_gr, pos=0):
2829 2835
2830 2836 for repo in root_gr.repositories:
2831 2837 all_entries[root_gr].append(repo)
2832 2838
2833 2839 # fill in all parent positions
2834 2840 for parent_group in root_gr.parents:
2835 2841 all_entries[parent_group].extend(all_entries[root_gr])
2836 2842
2837 2843 children_groups = root_gr.children.all()
2838 2844 if children_groups:
2839 2845 for cnt, gr in enumerate(children_groups, 1):
2840 2846 _get_members(gr, pos=pos+cnt)
2841 2847
2842 2848 _get_members(root_gr=self)
2843 2849 return all_entries
2844 2850
2845 2851 empty_date = datetime.datetime.fromtimestamp(0)
2846 2852 for repo_group, repos in repo_groups_and_repos().items():
2847 2853
2848 2854 latest_repo_cs_cache = {}
2849 2855 for repo in repos:
2850 2856 repo_cs_cache = repo.changeset_cache
2851 2857 date_latest = latest_repo_cs_cache.get('date', empty_date)
2852 2858 date_current = repo_cs_cache.get('date', empty_date)
2853 2859 current_timestamp = datetime_to_time(parse_datetime(date_latest))
2854 2860 if current_timestamp < datetime_to_time(parse_datetime(date_current)):
2855 2861 latest_repo_cs_cache = repo_cs_cache
2856 2862 latest_repo_cs_cache['source_repo_id'] = repo.repo_id
2857 2863
2858 2864 latest_repo_cs_cache['updated_on'] = time.time()
2859 2865 repo_group.changeset_cache = latest_repo_cs_cache
2860 2866 Session().add(repo_group)
2861 2867 Session().commit()
2862 2868
2863 2869 log.debug('updated repo group %s with new commit cache %s',
2864 2870 repo_group.group_name, latest_repo_cs_cache)
2865 2871
2866 2872 def permissions(self, with_admins=True, with_owner=True,
2867 2873 expand_from_user_groups=False):
2868 2874 """
2869 2875 Permissions for repository groups
2870 2876 """
2871 2877 _admin_perm = 'group.admin'
2872 2878
2873 2879 owner_row = []
2874 2880 if with_owner:
2875 2881 usr = AttributeDict(self.user.get_dict())
2876 2882 usr.owner_row = True
2877 2883 usr.permission = _admin_perm
2878 2884 owner_row.append(usr)
2879 2885
2880 2886 super_admin_ids = []
2881 2887 super_admin_rows = []
2882 2888 if with_admins:
2883 2889 for usr in User.get_all_super_admins():
2884 2890 super_admin_ids.append(usr.user_id)
2885 2891 # if this admin is also owner, don't double the record
2886 2892 if usr.user_id == owner_row[0].user_id:
2887 2893 owner_row[0].admin_row = True
2888 2894 else:
2889 2895 usr = AttributeDict(usr.get_dict())
2890 2896 usr.admin_row = True
2891 2897 usr.permission = _admin_perm
2892 2898 super_admin_rows.append(usr)
2893 2899
2894 2900 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2895 2901 q = q.options(joinedload(UserRepoGroupToPerm.group),
2896 2902 joinedload(UserRepoGroupToPerm.user),
2897 2903 joinedload(UserRepoGroupToPerm.permission),)
2898 2904
2899 2905 # get owners and admins and permissions. We do a trick of re-writing
2900 2906 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2901 2907 # has a global reference and changing one object propagates to all
2902 2908 # others. This means if admin is also an owner admin_row that change
2903 2909 # would propagate to both objects
2904 2910 perm_rows = []
2905 2911 for _usr in q.all():
2906 2912 usr = AttributeDict(_usr.user.get_dict())
2907 2913 # if this user is also owner/admin, mark as duplicate record
2908 2914 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2909 2915 usr.duplicate_perm = True
2910 2916 usr.permission = _usr.permission.permission_name
2911 2917 perm_rows.append(usr)
2912 2918
2913 2919 # filter the perm rows by 'default' first and then sort them by
2914 2920 # admin,write,read,none permissions sorted again alphabetically in
2915 2921 # each group
2916 2922 perm_rows = sorted(perm_rows, key=display_user_sort)
2917 2923
2918 2924 user_groups_rows = []
2919 2925 if expand_from_user_groups:
2920 2926 for ug in self.permission_user_groups(with_members=True):
2921 2927 for user_data in ug.members:
2922 2928 user_groups_rows.append(user_data)
2923 2929
2924 2930 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2925 2931
2926 2932 def permission_user_groups(self, with_members=False):
2927 2933 q = UserGroupRepoGroupToPerm.query()\
2928 2934 .filter(UserGroupRepoGroupToPerm.group == self)
2929 2935 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2930 2936 joinedload(UserGroupRepoGroupToPerm.users_group),
2931 2937 joinedload(UserGroupRepoGroupToPerm.permission),)
2932 2938
2933 2939 perm_rows = []
2934 2940 for _user_group in q.all():
2935 2941 entry = AttributeDict(_user_group.users_group.get_dict())
2936 2942 entry.permission = _user_group.permission.permission_name
2937 2943 if with_members:
2938 2944 entry.members = [x.user.get_dict()
2939 2945 for x in _user_group.users_group.members]
2940 2946 perm_rows.append(entry)
2941 2947
2942 2948 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2943 2949 return perm_rows
2944 2950
2945 2951 def get_api_data(self):
2946 2952 """
2947 2953 Common function for generating api data
2948 2954
2949 2955 """
2950 2956 group = self
2951 2957 data = {
2952 2958 'group_id': group.group_id,
2953 2959 'group_name': group.group_name,
2954 2960 'group_description': group.description_safe,
2955 2961 'parent_group': group.parent_group.group_name if group.parent_group else None,
2956 2962 'repositories': [x.repo_name for x in group.repositories],
2957 2963 'owner': group.user.username,
2958 2964 }
2959 2965 return data
2960 2966
2961 2967 def get_dict(self):
2962 2968 # Since we transformed `group_name` to a hybrid property, we need to
2963 2969 # keep compatibility with the code which uses `group_name` field.
2964 2970 result = super(RepoGroup, self).get_dict()
2965 2971 result['group_name'] = result.pop('_group_name', None)
2966 2972 return result
2967 2973
2968 2974
2969 2975 class Permission(Base, BaseModel):
2970 2976 __tablename__ = 'permissions'
2971 2977 __table_args__ = (
2972 2978 Index('p_perm_name_idx', 'permission_name'),
2973 2979 base_table_args,
2974 2980 )
2975 2981
2976 2982 PERMS = [
2977 2983 ('hg.admin', _('RhodeCode Super Administrator')),
2978 2984
2979 2985 ('repository.none', _('Repository no access')),
2980 2986 ('repository.read', _('Repository read access')),
2981 2987 ('repository.write', _('Repository write access')),
2982 2988 ('repository.admin', _('Repository admin access')),
2983 2989
2984 2990 ('group.none', _('Repository group no access')),
2985 2991 ('group.read', _('Repository group read access')),
2986 2992 ('group.write', _('Repository group write access')),
2987 2993 ('group.admin', _('Repository group admin access')),
2988 2994
2989 2995 ('usergroup.none', _('User group no access')),
2990 2996 ('usergroup.read', _('User group read access')),
2991 2997 ('usergroup.write', _('User group write access')),
2992 2998 ('usergroup.admin', _('User group admin access')),
2993 2999
2994 3000 ('branch.none', _('Branch no permissions')),
2995 3001 ('branch.merge', _('Branch access by web merge')),
2996 3002 ('branch.push', _('Branch access by push')),
2997 3003 ('branch.push_force', _('Branch access by push with force')),
2998 3004
2999 3005 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
3000 3006 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
3001 3007
3002 3008 ('hg.usergroup.create.false', _('User Group creation disabled')),
3003 3009 ('hg.usergroup.create.true', _('User Group creation enabled')),
3004 3010
3005 3011 ('hg.create.none', _('Repository creation disabled')),
3006 3012 ('hg.create.repository', _('Repository creation enabled')),
3007 3013 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
3008 3014 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
3009 3015
3010 3016 ('hg.fork.none', _('Repository forking disabled')),
3011 3017 ('hg.fork.repository', _('Repository forking enabled')),
3012 3018
3013 3019 ('hg.register.none', _('Registration disabled')),
3014 3020 ('hg.register.manual_activate', _('User Registration with manual account activation')),
3015 3021 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
3016 3022
3017 3023 ('hg.password_reset.enabled', _('Password reset enabled')),
3018 3024 ('hg.password_reset.hidden', _('Password reset hidden')),
3019 3025 ('hg.password_reset.disabled', _('Password reset disabled')),
3020 3026
3021 3027 ('hg.extern_activate.manual', _('Manual activation of external account')),
3022 3028 ('hg.extern_activate.auto', _('Automatic activation of external account')),
3023 3029
3024 3030 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
3025 3031 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
3026 3032 ]
3027 3033
3028 3034 # definition of system default permissions for DEFAULT user, created on
3029 3035 # system setup
3030 3036 DEFAULT_USER_PERMISSIONS = [
3031 3037 # object perms
3032 3038 'repository.read',
3033 3039 'group.read',
3034 3040 'usergroup.read',
3035 3041 # branch, for backward compat we need same value as before so forced pushed
3036 3042 'branch.push_force',
3037 3043 # global
3038 3044 'hg.create.repository',
3039 3045 'hg.repogroup.create.false',
3040 3046 'hg.usergroup.create.false',
3041 3047 'hg.create.write_on_repogroup.true',
3042 3048 'hg.fork.repository',
3043 3049 'hg.register.manual_activate',
3044 3050 'hg.password_reset.enabled',
3045 3051 'hg.extern_activate.auto',
3046 3052 'hg.inherit_default_perms.true',
3047 3053 ]
3048 3054
3049 3055 # defines which permissions are more important higher the more important
3050 3056 # Weight defines which permissions are more important.
3051 3057 # The higher number the more important.
3052 3058 PERM_WEIGHTS = {
3053 3059 'repository.none': 0,
3054 3060 'repository.read': 1,
3055 3061 'repository.write': 3,
3056 3062 'repository.admin': 4,
3057 3063
3058 3064 'group.none': 0,
3059 3065 'group.read': 1,
3060 3066 'group.write': 3,
3061 3067 'group.admin': 4,
3062 3068
3063 3069 'usergroup.none': 0,
3064 3070 'usergroup.read': 1,
3065 3071 'usergroup.write': 3,
3066 3072 'usergroup.admin': 4,
3067 3073
3068 3074 'branch.none': 0,
3069 3075 'branch.merge': 1,
3070 3076 'branch.push': 3,
3071 3077 'branch.push_force': 4,
3072 3078
3073 3079 'hg.repogroup.create.false': 0,
3074 3080 'hg.repogroup.create.true': 1,
3075 3081
3076 3082 'hg.usergroup.create.false': 0,
3077 3083 'hg.usergroup.create.true': 1,
3078 3084
3079 3085 'hg.fork.none': 0,
3080 3086 'hg.fork.repository': 1,
3081 3087 'hg.create.none': 0,
3082 3088 'hg.create.repository': 1
3083 3089 }
3084 3090
3085 3091 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3086 3092 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
3087 3093 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
3088 3094
3089 3095 def __unicode__(self):
3090 3096 return u"<%s('%s:%s')>" % (
3091 3097 self.__class__.__name__, self.permission_id, self.permission_name
3092 3098 )
3093 3099
3094 3100 @classmethod
3095 3101 def get_by_key(cls, key):
3096 3102 return cls.query().filter(cls.permission_name == key).scalar()
3097 3103
3098 3104 @classmethod
3099 3105 def get_default_repo_perms(cls, user_id, repo_id=None):
3100 3106 q = Session().query(UserRepoToPerm, Repository, Permission)\
3101 3107 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
3102 3108 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
3103 3109 .filter(UserRepoToPerm.user_id == user_id)
3104 3110 if repo_id:
3105 3111 q = q.filter(UserRepoToPerm.repository_id == repo_id)
3106 3112 return q.all()
3107 3113
3108 3114 @classmethod
3109 3115 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
3110 3116 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
3111 3117 .join(
3112 3118 Permission,
3113 3119 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
3114 3120 .join(
3115 3121 UserRepoToPerm,
3116 3122 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
3117 3123 .filter(UserRepoToPerm.user_id == user_id)
3118 3124
3119 3125 if repo_id:
3120 3126 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
3121 3127 return q.order_by(UserToRepoBranchPermission.rule_order).all()
3122 3128
3123 3129 @classmethod
3124 3130 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
3125 3131 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
3126 3132 .join(
3127 3133 Permission,
3128 3134 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
3129 3135 .join(
3130 3136 Repository,
3131 3137 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
3132 3138 .join(
3133 3139 UserGroup,
3134 3140 UserGroupRepoToPerm.users_group_id ==
3135 3141 UserGroup.users_group_id)\
3136 3142 .join(
3137 3143 UserGroupMember,
3138 3144 UserGroupRepoToPerm.users_group_id ==
3139 3145 UserGroupMember.users_group_id)\
3140 3146 .filter(
3141 3147 UserGroupMember.user_id == user_id,
3142 3148 UserGroup.users_group_active == true())
3143 3149 if repo_id:
3144 3150 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
3145 3151 return q.all()
3146 3152
3147 3153 @classmethod
3148 3154 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
3149 3155 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
3150 3156 .join(
3151 3157 Permission,
3152 3158 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
3153 3159 .join(
3154 3160 UserGroupRepoToPerm,
3155 3161 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
3156 3162 .join(
3157 3163 UserGroup,
3158 3164 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
3159 3165 .join(
3160 3166 UserGroupMember,
3161 3167 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
3162 3168 .filter(
3163 3169 UserGroupMember.user_id == user_id,
3164 3170 UserGroup.users_group_active == true())
3165 3171
3166 3172 if repo_id:
3167 3173 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
3168 3174 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
3169 3175
3170 3176 @classmethod
3171 3177 def get_default_group_perms(cls, user_id, repo_group_id=None):
3172 3178 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
3173 3179 .join(
3174 3180 Permission,
3175 3181 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
3176 3182 .join(
3177 3183 RepoGroup,
3178 3184 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
3179 3185 .filter(UserRepoGroupToPerm.user_id == user_id)
3180 3186 if repo_group_id:
3181 3187 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
3182 3188 return q.all()
3183 3189
3184 3190 @classmethod
3185 3191 def get_default_group_perms_from_user_group(
3186 3192 cls, user_id, repo_group_id=None):
3187 3193 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
3188 3194 .join(
3189 3195 Permission,
3190 3196 UserGroupRepoGroupToPerm.permission_id ==
3191 3197 Permission.permission_id)\
3192 3198 .join(
3193 3199 RepoGroup,
3194 3200 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3195 3201 .join(
3196 3202 UserGroup,
3197 3203 UserGroupRepoGroupToPerm.users_group_id ==
3198 3204 UserGroup.users_group_id)\
3199 3205 .join(
3200 3206 UserGroupMember,
3201 3207 UserGroupRepoGroupToPerm.users_group_id ==
3202 3208 UserGroupMember.users_group_id)\
3203 3209 .filter(
3204 3210 UserGroupMember.user_id == user_id,
3205 3211 UserGroup.users_group_active == true())
3206 3212 if repo_group_id:
3207 3213 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3208 3214 return q.all()
3209 3215
3210 3216 @classmethod
3211 3217 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3212 3218 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3213 3219 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3214 3220 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3215 3221 .filter(UserUserGroupToPerm.user_id == user_id)
3216 3222 if user_group_id:
3217 3223 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3218 3224 return q.all()
3219 3225
3220 3226 @classmethod
3221 3227 def get_default_user_group_perms_from_user_group(
3222 3228 cls, user_id, user_group_id=None):
3223 3229 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3224 3230 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3225 3231 .join(
3226 3232 Permission,
3227 3233 UserGroupUserGroupToPerm.permission_id ==
3228 3234 Permission.permission_id)\
3229 3235 .join(
3230 3236 TargetUserGroup,
3231 3237 UserGroupUserGroupToPerm.target_user_group_id ==
3232 3238 TargetUserGroup.users_group_id)\
3233 3239 .join(
3234 3240 UserGroup,
3235 3241 UserGroupUserGroupToPerm.user_group_id ==
3236 3242 UserGroup.users_group_id)\
3237 3243 .join(
3238 3244 UserGroupMember,
3239 3245 UserGroupUserGroupToPerm.user_group_id ==
3240 3246 UserGroupMember.users_group_id)\
3241 3247 .filter(
3242 3248 UserGroupMember.user_id == user_id,
3243 3249 UserGroup.users_group_active == true())
3244 3250 if user_group_id:
3245 3251 q = q.filter(
3246 3252 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3247 3253
3248 3254 return q.all()
3249 3255
3250 3256
3251 3257 class UserRepoToPerm(Base, BaseModel):
3252 3258 __tablename__ = 'repo_to_perm'
3253 3259 __table_args__ = (
3254 3260 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3255 3261 base_table_args
3256 3262 )
3257 3263
3258 3264 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3259 3265 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3260 3266 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3261 3267 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3262 3268
3263 3269 user = relationship('User')
3264 3270 repository = relationship('Repository')
3265 3271 permission = relationship('Permission')
3266 3272
3267 3273 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete, delete-orphan", lazy='joined')
3268 3274
3269 3275 @classmethod
3270 3276 def create(cls, user, repository, permission):
3271 3277 n = cls()
3272 3278 n.user = user
3273 3279 n.repository = repository
3274 3280 n.permission = permission
3275 3281 Session().add(n)
3276 3282 return n
3277 3283
3278 3284 def __unicode__(self):
3279 3285 return u'<%s => %s >' % (self.user, self.repository)
3280 3286
3281 3287
3282 3288 class UserUserGroupToPerm(Base, BaseModel):
3283 3289 __tablename__ = 'user_user_group_to_perm'
3284 3290 __table_args__ = (
3285 3291 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3286 3292 base_table_args
3287 3293 )
3288 3294
3289 3295 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3290 3296 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3291 3297 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3292 3298 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3293 3299
3294 3300 user = relationship('User')
3295 3301 user_group = relationship('UserGroup')
3296 3302 permission = relationship('Permission')
3297 3303
3298 3304 @classmethod
3299 3305 def create(cls, user, user_group, permission):
3300 3306 n = cls()
3301 3307 n.user = user
3302 3308 n.user_group = user_group
3303 3309 n.permission = permission
3304 3310 Session().add(n)
3305 3311 return n
3306 3312
3307 3313 def __unicode__(self):
3308 3314 return u'<%s => %s >' % (self.user, self.user_group)
3309 3315
3310 3316
3311 3317 class UserToPerm(Base, BaseModel):
3312 3318 __tablename__ = 'user_to_perm'
3313 3319 __table_args__ = (
3314 3320 UniqueConstraint('user_id', 'permission_id'),
3315 3321 base_table_args
3316 3322 )
3317 3323
3318 3324 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3319 3325 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3320 3326 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3321 3327
3322 3328 user = relationship('User')
3323 3329 permission = relationship('Permission', lazy='joined')
3324 3330
3325 3331 def __unicode__(self):
3326 3332 return u'<%s => %s >' % (self.user, self.permission)
3327 3333
3328 3334
3329 3335 class UserGroupRepoToPerm(Base, BaseModel):
3330 3336 __tablename__ = 'users_group_repo_to_perm'
3331 3337 __table_args__ = (
3332 3338 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3333 3339 base_table_args
3334 3340 )
3335 3341
3336 3342 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3337 3343 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3338 3344 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3339 3345 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3340 3346
3341 3347 users_group = relationship('UserGroup')
3342 3348 permission = relationship('Permission')
3343 3349 repository = relationship('Repository')
3344 3350 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all')
3345 3351
3346 3352 @classmethod
3347 3353 def create(cls, users_group, repository, permission):
3348 3354 n = cls()
3349 3355 n.users_group = users_group
3350 3356 n.repository = repository
3351 3357 n.permission = permission
3352 3358 Session().add(n)
3353 3359 return n
3354 3360
3355 3361 def __unicode__(self):
3356 3362 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
3357 3363
3358 3364
3359 3365 class UserGroupUserGroupToPerm(Base, BaseModel):
3360 3366 __tablename__ = 'user_group_user_group_to_perm'
3361 3367 __table_args__ = (
3362 3368 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3363 3369 CheckConstraint('target_user_group_id != user_group_id'),
3364 3370 base_table_args
3365 3371 )
3366 3372
3367 3373 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3368 3374 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3369 3375 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3370 3376 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3371 3377
3372 3378 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3373 3379 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3374 3380 permission = relationship('Permission')
3375 3381
3376 3382 @classmethod
3377 3383 def create(cls, target_user_group, user_group, permission):
3378 3384 n = cls()
3379 3385 n.target_user_group = target_user_group
3380 3386 n.user_group = user_group
3381 3387 n.permission = permission
3382 3388 Session().add(n)
3383 3389 return n
3384 3390
3385 3391 def __unicode__(self):
3386 3392 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3387 3393
3388 3394
3389 3395 class UserGroupToPerm(Base, BaseModel):
3390 3396 __tablename__ = 'users_group_to_perm'
3391 3397 __table_args__ = (
3392 3398 UniqueConstraint('users_group_id', 'permission_id',),
3393 3399 base_table_args
3394 3400 )
3395 3401
3396 3402 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3397 3403 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3398 3404 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3399 3405
3400 3406 users_group = relationship('UserGroup')
3401 3407 permission = relationship('Permission')
3402 3408
3403 3409
3404 3410 class UserRepoGroupToPerm(Base, BaseModel):
3405 3411 __tablename__ = 'user_repo_group_to_perm'
3406 3412 __table_args__ = (
3407 3413 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3408 3414 base_table_args
3409 3415 )
3410 3416
3411 3417 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3412 3418 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3413 3419 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3414 3420 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3415 3421
3416 3422 user = relationship('User')
3417 3423 group = relationship('RepoGroup')
3418 3424 permission = relationship('Permission')
3419 3425
3420 3426 @classmethod
3421 3427 def create(cls, user, repository_group, permission):
3422 3428 n = cls()
3423 3429 n.user = user
3424 3430 n.group = repository_group
3425 3431 n.permission = permission
3426 3432 Session().add(n)
3427 3433 return n
3428 3434
3429 3435
3430 3436 class UserGroupRepoGroupToPerm(Base, BaseModel):
3431 3437 __tablename__ = 'users_group_repo_group_to_perm'
3432 3438 __table_args__ = (
3433 3439 UniqueConstraint('users_group_id', 'group_id'),
3434 3440 base_table_args
3435 3441 )
3436 3442
3437 3443 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3438 3444 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3439 3445 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3440 3446 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3441 3447
3442 3448 users_group = relationship('UserGroup')
3443 3449 permission = relationship('Permission')
3444 3450 group = relationship('RepoGroup')
3445 3451
3446 3452 @classmethod
3447 3453 def create(cls, user_group, repository_group, permission):
3448 3454 n = cls()
3449 3455 n.users_group = user_group
3450 3456 n.group = repository_group
3451 3457 n.permission = permission
3452 3458 Session().add(n)
3453 3459 return n
3454 3460
3455 3461 def __unicode__(self):
3456 3462 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3457 3463
3458 3464
3459 3465 class Statistics(Base, BaseModel):
3460 3466 __tablename__ = 'statistics'
3461 3467 __table_args__ = (
3462 3468 base_table_args
3463 3469 )
3464 3470
3465 3471 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3466 3472 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3467 3473 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3468 3474 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3469 3475 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3470 3476 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3471 3477
3472 3478 repository = relationship('Repository', single_parent=True)
3473 3479
3474 3480
3475 3481 class UserFollowing(Base, BaseModel):
3476 3482 __tablename__ = 'user_followings'
3477 3483 __table_args__ = (
3478 3484 UniqueConstraint('user_id', 'follows_repository_id'),
3479 3485 UniqueConstraint('user_id', 'follows_user_id'),
3480 3486 base_table_args
3481 3487 )
3482 3488
3483 3489 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3484 3490 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3485 3491 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3486 3492 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3487 3493 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3488 3494
3489 3495 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3490 3496
3491 3497 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3492 3498 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3493 3499
3494 3500 @classmethod
3495 3501 def get_repo_followers(cls, repo_id):
3496 3502 return cls.query().filter(cls.follows_repo_id == repo_id)
3497 3503
3498 3504
3499 3505 class CacheKey(Base, BaseModel):
3500 3506 __tablename__ = 'cache_invalidation'
3501 3507 __table_args__ = (
3502 3508 UniqueConstraint('cache_key'),
3503 3509 Index('key_idx', 'cache_key'),
3504 3510 base_table_args,
3505 3511 )
3506 3512
3507 3513 CACHE_TYPE_FEED = 'FEED'
3508 3514
3509 3515 # namespaces used to register process/thread aware caches
3510 3516 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3511 3517 SETTINGS_INVALIDATION_NAMESPACE = 'system_settings'
3512 3518
3513 3519 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3514 3520 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3515 3521 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3516 3522 cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None)
3517 3523 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3518 3524
3519 3525 def __init__(self, cache_key, cache_args='', cache_state_uid=None):
3520 3526 self.cache_key = cache_key
3521 3527 self.cache_args = cache_args
3522 3528 self.cache_active = False
3523 3529 # first key should be same for all entries, since all workers should share it
3524 3530 self.cache_state_uid = cache_state_uid or self.generate_new_state_uid()
3525 3531
3526 3532 def __unicode__(self):
3527 3533 return u"<%s('%s:%s[%s]')>" % (
3528 3534 self.__class__.__name__,
3529 3535 self.cache_id, self.cache_key, self.cache_active)
3530 3536
3531 3537 def _cache_key_partition(self):
3532 3538 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3533 3539 return prefix, repo_name, suffix
3534 3540
3535 3541 def get_prefix(self):
3536 3542 """
3537 3543 Try to extract prefix from existing cache key. The key could consist
3538 3544 of prefix, repo_name, suffix
3539 3545 """
3540 3546 # this returns prefix, repo_name, suffix
3541 3547 return self._cache_key_partition()[0]
3542 3548
3543 3549 def get_suffix(self):
3544 3550 """
3545 3551 get suffix that might have been used in _get_cache_key to
3546 3552 generate self.cache_key. Only used for informational purposes
3547 3553 in repo_edit.mako.
3548 3554 """
3549 3555 # prefix, repo_name, suffix
3550 3556 return self._cache_key_partition()[2]
3551 3557
3552 3558 @classmethod
3553 3559 def generate_new_state_uid(cls, based_on=None):
3554 3560 if based_on:
3555 3561 return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on)))
3556 3562 else:
3557 3563 return str(uuid.uuid4())
3558 3564
3559 3565 @classmethod
3560 3566 def delete_all_cache(cls):
3561 3567 """
3562 3568 Delete all cache keys from database.
3563 3569 Should only be run when all instances are down and all entries
3564 3570 thus stale.
3565 3571 """
3566 3572 cls.query().delete()
3567 3573 Session().commit()
3568 3574
3569 3575 @classmethod
3570 3576 def set_invalidate(cls, cache_uid, delete=False):
3571 3577 """
3572 3578 Mark all caches of a repo as invalid in the database.
3573 3579 """
3574 3580
3575 3581 try:
3576 3582 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3577 3583 if delete:
3578 3584 qry.delete()
3579 3585 log.debug('cache objects deleted for cache args %s',
3580 3586 safe_str(cache_uid))
3581 3587 else:
3582 3588 qry.update({"cache_active": False,
3583 3589 "cache_state_uid": cls.generate_new_state_uid()})
3584 3590 log.debug('cache objects marked as invalid for cache args %s',
3585 3591 safe_str(cache_uid))
3586 3592
3587 3593 Session().commit()
3588 3594 except Exception:
3589 3595 log.exception(
3590 3596 'Cache key invalidation failed for cache args %s',
3591 3597 safe_str(cache_uid))
3592 3598 Session().rollback()
3593 3599
3594 3600 @classmethod
3595 3601 def get_active_cache(cls, cache_key):
3596 3602 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3597 3603 if inv_obj:
3598 3604 return inv_obj
3599 3605 return None
3600 3606
3601 3607 @classmethod
3602 3608 def get_namespace_map(cls, namespace):
3603 3609 return {
3604 3610 x.cache_key: x
3605 3611 for x in cls.query().filter(cls.cache_args == namespace)}
3606 3612
3607 3613
3608 3614 class ChangesetComment(Base, BaseModel):
3609 3615 __tablename__ = 'changeset_comments'
3610 3616 __table_args__ = (
3611 3617 Index('cc_revision_idx', 'revision'),
3612 3618 base_table_args,
3613 3619 )
3614 3620
3615 3621 COMMENT_OUTDATED = u'comment_outdated'
3616 3622 COMMENT_TYPE_NOTE = u'note'
3617 3623 COMMENT_TYPE_TODO = u'todo'
3618 3624 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3619 3625
3620 3626 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3621 3627 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3622 3628 revision = Column('revision', String(40), nullable=True)
3623 3629 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3624 3630 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3625 3631 line_no = Column('line_no', Unicode(10), nullable=True)
3626 3632 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3627 3633 f_path = Column('f_path', Unicode(1000), nullable=True)
3628 3634 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3629 3635 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3630 3636 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3631 3637 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3632 3638 renderer = Column('renderer', Unicode(64), nullable=True)
3633 3639 display_state = Column('display_state', Unicode(128), nullable=True)
3634 3640
3635 3641 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3636 3642 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3637 3643
3638 3644 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3639 3645 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3640 3646
3641 3647 author = relationship('User', lazy='joined')
3642 3648 repo = relationship('Repository')
3643 3649 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined')
3644 3650 pull_request = relationship('PullRequest', lazy='joined')
3645 3651 pull_request_version = relationship('PullRequestVersion')
3646 3652
3647 3653 @classmethod
3648 3654 def get_users(cls, revision=None, pull_request_id=None):
3649 3655 """
3650 3656 Returns user associated with this ChangesetComment. ie those
3651 3657 who actually commented
3652 3658
3653 3659 :param cls:
3654 3660 :param revision:
3655 3661 """
3656 3662 q = Session().query(User)\
3657 3663 .join(ChangesetComment.author)
3658 3664 if revision:
3659 3665 q = q.filter(cls.revision == revision)
3660 3666 elif pull_request_id:
3661 3667 q = q.filter(cls.pull_request_id == pull_request_id)
3662 3668 return q.all()
3663 3669
3664 3670 @classmethod
3665 3671 def get_index_from_version(cls, pr_version, versions):
3666 3672 num_versions = [x.pull_request_version_id for x in versions]
3667 3673 try:
3668 3674 return num_versions.index(pr_version) +1
3669 3675 except (IndexError, ValueError):
3670 3676 return
3671 3677
3672 3678 @property
3673 3679 def outdated(self):
3674 3680 return self.display_state == self.COMMENT_OUTDATED
3675 3681
3676 3682 def outdated_at_version(self, version):
3677 3683 """
3678 3684 Checks if comment is outdated for given pull request version
3679 3685 """
3680 3686 return self.outdated and self.pull_request_version_id != version
3681 3687
3682 3688 def older_than_version(self, version):
3683 3689 """
3684 3690 Checks if comment is made from previous version than given
3685 3691 """
3686 3692 if version is None:
3687 3693 return self.pull_request_version_id is not None
3688 3694
3689 3695 return self.pull_request_version_id < version
3690 3696
3691 3697 @property
3692 3698 def resolved(self):
3693 3699 return self.resolved_by[0] if self.resolved_by else None
3694 3700
3695 3701 @property
3696 3702 def is_todo(self):
3697 3703 return self.comment_type == self.COMMENT_TYPE_TODO
3698 3704
3699 3705 @property
3700 3706 def is_inline(self):
3701 3707 return self.line_no and self.f_path
3702 3708
3703 3709 def get_index_version(self, versions):
3704 3710 return self.get_index_from_version(
3705 3711 self.pull_request_version_id, versions)
3706 3712
3707 3713 def __repr__(self):
3708 3714 if self.comment_id:
3709 3715 return '<DB:Comment #%s>' % self.comment_id
3710 3716 else:
3711 3717 return '<DB:Comment at %#x>' % id(self)
3712 3718
3713 3719 def get_api_data(self):
3714 3720 comment = self
3715 3721 data = {
3716 3722 'comment_id': comment.comment_id,
3717 3723 'comment_type': comment.comment_type,
3718 3724 'comment_text': comment.text,
3719 3725 'comment_status': comment.status_change,
3720 3726 'comment_f_path': comment.f_path,
3721 3727 'comment_lineno': comment.line_no,
3722 3728 'comment_author': comment.author,
3723 3729 'comment_created_on': comment.created_on,
3724 3730 'comment_resolved_by': self.resolved
3725 3731 }
3726 3732 return data
3727 3733
3728 3734 def __json__(self):
3729 3735 data = dict()
3730 3736 data.update(self.get_api_data())
3731 3737 return data
3732 3738
3733 3739
3734 3740 class ChangesetStatus(Base, BaseModel):
3735 3741 __tablename__ = 'changeset_statuses'
3736 3742 __table_args__ = (
3737 3743 Index('cs_revision_idx', 'revision'),
3738 3744 Index('cs_version_idx', 'version'),
3739 3745 UniqueConstraint('repo_id', 'revision', 'version'),
3740 3746 base_table_args
3741 3747 )
3742 3748
3743 3749 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3744 3750 STATUS_APPROVED = 'approved'
3745 3751 STATUS_REJECTED = 'rejected'
3746 3752 STATUS_UNDER_REVIEW = 'under_review'
3747 3753
3748 3754 STATUSES = [
3749 3755 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3750 3756 (STATUS_APPROVED, _("Approved")),
3751 3757 (STATUS_REJECTED, _("Rejected")),
3752 3758 (STATUS_UNDER_REVIEW, _("Under Review")),
3753 3759 ]
3754 3760
3755 3761 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3756 3762 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3757 3763 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3758 3764 revision = Column('revision', String(40), nullable=False)
3759 3765 status = Column('status', String(128), nullable=False, default=DEFAULT)
3760 3766 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3761 3767 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3762 3768 version = Column('version', Integer(), nullable=False, default=0)
3763 3769 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3764 3770
3765 3771 author = relationship('User', lazy='joined')
3766 3772 repo = relationship('Repository')
3767 3773 comment = relationship('ChangesetComment', lazy='joined')
3768 3774 pull_request = relationship('PullRequest', lazy='joined')
3769 3775
3770 3776 def __unicode__(self):
3771 3777 return u"<%s('%s[v%s]:%s')>" % (
3772 3778 self.__class__.__name__,
3773 3779 self.status, self.version, self.author
3774 3780 )
3775 3781
3776 3782 @classmethod
3777 3783 def get_status_lbl(cls, value):
3778 3784 return dict(cls.STATUSES).get(value)
3779 3785
3780 3786 @property
3781 3787 def status_lbl(self):
3782 3788 return ChangesetStatus.get_status_lbl(self.status)
3783 3789
3784 3790 def get_api_data(self):
3785 3791 status = self
3786 3792 data = {
3787 3793 'status_id': status.changeset_status_id,
3788 3794 'status': status.status,
3789 3795 }
3790 3796 return data
3791 3797
3792 3798 def __json__(self):
3793 3799 data = dict()
3794 3800 data.update(self.get_api_data())
3795 3801 return data
3796 3802
3797 3803
3798 3804 class _SetState(object):
3799 3805 """
3800 3806 Context processor allowing changing state for sensitive operation such as
3801 3807 pull request update or merge
3802 3808 """
3803 3809
3804 3810 def __init__(self, pull_request, pr_state, back_state=None):
3805 3811 self._pr = pull_request
3806 3812 self._org_state = back_state or pull_request.pull_request_state
3807 3813 self._pr_state = pr_state
3808 3814 self._current_state = None
3809 3815
3810 3816 def __enter__(self):
3811 3817 log.debug('StateLock: entering set state context, setting state to: `%s`',
3812 3818 self._pr_state)
3813 3819 self.set_pr_state(self._pr_state)
3814 3820 return self
3815 3821
3816 3822 def __exit__(self, exc_type, exc_val, exc_tb):
3817 3823 if exc_val is not None:
3818 3824 log.error(traceback.format_exc(exc_tb))
3819 3825 return None
3820 3826
3821 3827 self.set_pr_state(self._org_state)
3822 3828 log.debug('StateLock: exiting set state context, setting state to: `%s`',
3823 3829 self._org_state)
3824 3830 @property
3825 3831 def state(self):
3826 3832 return self._current_state
3827 3833
3828 3834 def set_pr_state(self, pr_state):
3829 3835 try:
3830 3836 self._pr.pull_request_state = pr_state
3831 3837 Session().add(self._pr)
3832 3838 Session().commit()
3833 3839 self._current_state = pr_state
3834 3840 except Exception:
3835 3841 log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state)
3836 3842 raise
3837 3843
3838 3844 class _PullRequestBase(BaseModel):
3839 3845 """
3840 3846 Common attributes of pull request and version entries.
3841 3847 """
3842 3848
3843 3849 # .status values
3844 3850 STATUS_NEW = u'new'
3845 3851 STATUS_OPEN = u'open'
3846 3852 STATUS_CLOSED = u'closed'
3847 3853
3848 3854 # available states
3849 3855 STATE_CREATING = u'creating'
3850 3856 STATE_UPDATING = u'updating'
3851 3857 STATE_MERGING = u'merging'
3852 3858 STATE_CREATED = u'created'
3853 3859
3854 3860 title = Column('title', Unicode(255), nullable=True)
3855 3861 description = Column(
3856 3862 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3857 3863 nullable=True)
3858 3864 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
3859 3865
3860 3866 # new/open/closed status of pull request (not approve/reject/etc)
3861 3867 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3862 3868 created_on = Column(
3863 3869 'created_on', DateTime(timezone=False), nullable=False,
3864 3870 default=datetime.datetime.now)
3865 3871 updated_on = Column(
3866 3872 'updated_on', DateTime(timezone=False), nullable=False,
3867 3873 default=datetime.datetime.now)
3868 3874
3869 3875 pull_request_state = Column("pull_request_state", String(255), nullable=True)
3870 3876
3871 3877 @declared_attr
3872 3878 def user_id(cls):
3873 3879 return Column(
3874 3880 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3875 3881 unique=None)
3876 3882
3877 3883 # 500 revisions max
3878 3884 _revisions = Column(
3879 3885 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3880 3886
3881 3887 @declared_attr
3882 3888 def source_repo_id(cls):
3883 3889 # TODO: dan: rename column to source_repo_id
3884 3890 return Column(
3885 3891 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3886 3892 nullable=False)
3887 3893
3888 3894 _source_ref = Column('org_ref', Unicode(255), nullable=False)
3889 3895
3890 3896 @hybrid_property
3891 3897 def source_ref(self):
3892 3898 return self._source_ref
3893 3899
3894 3900 @source_ref.setter
3895 3901 def source_ref(self, val):
3896 3902 parts = (val or '').split(':')
3897 3903 if len(parts) != 3:
3898 3904 raise ValueError(
3899 3905 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
3900 3906 self._source_ref = safe_unicode(val)
3901 3907
3902 3908 _target_ref = Column('other_ref', Unicode(255), nullable=False)
3903 3909
3904 3910 @hybrid_property
3905 3911 def target_ref(self):
3906 3912 return self._target_ref
3907 3913
3908 3914 @target_ref.setter
3909 3915 def target_ref(self, val):
3910 3916 parts = (val or '').split(':')
3911 3917 if len(parts) != 3:
3912 3918 raise ValueError(
3913 3919 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
3914 3920 self._target_ref = safe_unicode(val)
3915 3921
3916 3922 @declared_attr
3917 3923 def target_repo_id(cls):
3918 3924 # TODO: dan: rename column to target_repo_id
3919 3925 return Column(
3920 3926 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3921 3927 nullable=False)
3922 3928
3923 3929 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3924 3930
3925 3931 # TODO: dan: rename column to last_merge_source_rev
3926 3932 _last_merge_source_rev = Column(
3927 3933 'last_merge_org_rev', String(40), nullable=True)
3928 3934 # TODO: dan: rename column to last_merge_target_rev
3929 3935 _last_merge_target_rev = Column(
3930 3936 'last_merge_other_rev', String(40), nullable=True)
3931 3937 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3932 3938 merge_rev = Column('merge_rev', String(40), nullable=True)
3933 3939
3934 3940 reviewer_data = Column(
3935 3941 'reviewer_data_json', MutationObj.as_mutable(
3936 3942 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3937 3943
3938 3944 @property
3939 3945 def reviewer_data_json(self):
3940 3946 return json.dumps(self.reviewer_data)
3941 3947
3942 3948 @hybrid_property
3943 3949 def description_safe(self):
3944 3950 from rhodecode.lib import helpers as h
3945 3951 return h.escape(self.description)
3946 3952
3947 3953 @hybrid_property
3948 3954 def revisions(self):
3949 3955 return self._revisions.split(':') if self._revisions else []
3950 3956
3951 3957 @revisions.setter
3952 3958 def revisions(self, val):
3953 3959 self._revisions = ':'.join(val)
3954 3960
3955 3961 @hybrid_property
3956 3962 def last_merge_status(self):
3957 3963 return safe_int(self._last_merge_status)
3958 3964
3959 3965 @last_merge_status.setter
3960 3966 def last_merge_status(self, val):
3961 3967 self._last_merge_status = val
3962 3968
3963 3969 @declared_attr
3964 3970 def author(cls):
3965 3971 return relationship('User', lazy='joined')
3966 3972
3967 3973 @declared_attr
3968 3974 def source_repo(cls):
3969 3975 return relationship(
3970 3976 'Repository',
3971 3977 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3972 3978
3973 3979 @property
3974 3980 def source_ref_parts(self):
3975 3981 return self.unicode_to_reference(self.source_ref)
3976 3982
3977 3983 @declared_attr
3978 3984 def target_repo(cls):
3979 3985 return relationship(
3980 3986 'Repository',
3981 3987 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3982 3988
3983 3989 @property
3984 3990 def target_ref_parts(self):
3985 3991 return self.unicode_to_reference(self.target_ref)
3986 3992
3987 3993 @property
3988 3994 def shadow_merge_ref(self):
3989 3995 return self.unicode_to_reference(self._shadow_merge_ref)
3990 3996
3991 3997 @shadow_merge_ref.setter
3992 3998 def shadow_merge_ref(self, ref):
3993 3999 self._shadow_merge_ref = self.reference_to_unicode(ref)
3994 4000
3995 4001 @staticmethod
3996 4002 def unicode_to_reference(raw):
3997 4003 """
3998 4004 Convert a unicode (or string) to a reference object.
3999 4005 If unicode evaluates to False it returns None.
4000 4006 """
4001 4007 if raw:
4002 4008 refs = raw.split(':')
4003 4009 return Reference(*refs)
4004 4010 else:
4005 4011 return None
4006 4012
4007 4013 @staticmethod
4008 4014 def reference_to_unicode(ref):
4009 4015 """
4010 4016 Convert a reference object to unicode.
4011 4017 If reference is None it returns None.
4012 4018 """
4013 4019 if ref:
4014 4020 return u':'.join(ref)
4015 4021 else:
4016 4022 return None
4017 4023
4018 4024 def get_api_data(self, with_merge_state=True):
4019 4025 from rhodecode.model.pull_request import PullRequestModel
4020 4026
4021 4027 pull_request = self
4022 4028 if with_merge_state:
4023 4029 merge_status = PullRequestModel().merge_status(pull_request)
4024 4030 merge_state = {
4025 4031 'status': merge_status[0],
4026 4032 'message': safe_unicode(merge_status[1]),
4027 4033 }
4028 4034 else:
4029 4035 merge_state = {'status': 'not_available',
4030 4036 'message': 'not_available'}
4031 4037
4032 4038 merge_data = {
4033 4039 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
4034 4040 'reference': (
4035 4041 pull_request.shadow_merge_ref._asdict()
4036 4042 if pull_request.shadow_merge_ref else None),
4037 4043 }
4038 4044
4039 4045 data = {
4040 4046 'pull_request_id': pull_request.pull_request_id,
4041 4047 'url': PullRequestModel().get_url(pull_request),
4042 4048 'title': pull_request.title,
4043 4049 'description': pull_request.description,
4044 4050 'status': pull_request.status,
4045 4051 'state': pull_request.pull_request_state,
4046 4052 'created_on': pull_request.created_on,
4047 4053 'updated_on': pull_request.updated_on,
4048 4054 'commit_ids': pull_request.revisions,
4049 4055 'review_status': pull_request.calculated_review_status(),
4050 4056 'mergeable': merge_state,
4051 4057 'source': {
4052 4058 'clone_url': pull_request.source_repo.clone_url(),
4053 4059 'repository': pull_request.source_repo.repo_name,
4054 4060 'reference': {
4055 4061 'name': pull_request.source_ref_parts.name,
4056 4062 'type': pull_request.source_ref_parts.type,
4057 4063 'commit_id': pull_request.source_ref_parts.commit_id,
4058 4064 },
4059 4065 },
4060 4066 'target': {
4061 4067 'clone_url': pull_request.target_repo.clone_url(),
4062 4068 'repository': pull_request.target_repo.repo_name,
4063 4069 'reference': {
4064 4070 'name': pull_request.target_ref_parts.name,
4065 4071 'type': pull_request.target_ref_parts.type,
4066 4072 'commit_id': pull_request.target_ref_parts.commit_id,
4067 4073 },
4068 4074 },
4069 4075 'merge': merge_data,
4070 4076 'author': pull_request.author.get_api_data(include_secrets=False,
4071 4077 details='basic'),
4072 4078 'reviewers': [
4073 4079 {
4074 4080 'user': reviewer.get_api_data(include_secrets=False,
4075 4081 details='basic'),
4076 4082 'reasons': reasons,
4077 4083 'review_status': st[0][1].status if st else 'not_reviewed',
4078 4084 }
4079 4085 for obj, reviewer, reasons, mandatory, st in
4080 4086 pull_request.reviewers_statuses()
4081 4087 ]
4082 4088 }
4083 4089
4084 4090 return data
4085 4091
4086 4092 def set_state(self, pull_request_state, final_state=None):
4087 4093 """
4088 4094 # goes from initial state to updating to initial state.
4089 4095 # initial state can be changed by specifying back_state=
4090 4096 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
4091 4097 pull_request.merge()
4092 4098
4093 4099 :param pull_request_state:
4094 4100 :param final_state:
4095 4101
4096 4102 """
4097 4103
4098 4104 return _SetState(self, pull_request_state, back_state=final_state)
4099 4105
4100 4106
4101 4107 class PullRequest(Base, _PullRequestBase):
4102 4108 __tablename__ = 'pull_requests'
4103 4109 __table_args__ = (
4104 4110 base_table_args,
4105 4111 )
4106 4112
4107 4113 pull_request_id = Column(
4108 4114 'pull_request_id', Integer(), nullable=False, primary_key=True)
4109 4115
4110 4116 def __repr__(self):
4111 4117 if self.pull_request_id:
4112 4118 return '<DB:PullRequest #%s>' % self.pull_request_id
4113 4119 else:
4114 4120 return '<DB:PullRequest at %#x>' % id(self)
4115 4121
4116 4122 reviewers = relationship('PullRequestReviewers',
4117 4123 cascade="all, delete, delete-orphan")
4118 4124 statuses = relationship('ChangesetStatus',
4119 4125 cascade="all, delete, delete-orphan")
4120 4126 comments = relationship('ChangesetComment',
4121 4127 cascade="all, delete, delete-orphan")
4122 4128 versions = relationship('PullRequestVersion',
4123 4129 cascade="all, delete, delete-orphan",
4124 4130 lazy='dynamic')
4125 4131
4126 4132 @classmethod
4127 4133 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
4128 4134 internal_methods=None):
4129 4135
4130 4136 class PullRequestDisplay(object):
4131 4137 """
4132 4138 Special object wrapper for showing PullRequest data via Versions
4133 4139 It mimics PR object as close as possible. This is read only object
4134 4140 just for display
4135 4141 """
4136 4142
4137 4143 def __init__(self, attrs, internal=None):
4138 4144 self.attrs = attrs
4139 4145 # internal have priority over the given ones via attrs
4140 4146 self.internal = internal or ['versions']
4141 4147
4142 4148 def __getattr__(self, item):
4143 4149 if item in self.internal:
4144 4150 return getattr(self, item)
4145 4151 try:
4146 4152 return self.attrs[item]
4147 4153 except KeyError:
4148 4154 raise AttributeError(
4149 4155 '%s object has no attribute %s' % (self, item))
4150 4156
4151 4157 def __repr__(self):
4152 4158 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
4153 4159
4154 4160 def versions(self):
4155 4161 return pull_request_obj.versions.order_by(
4156 4162 PullRequestVersion.pull_request_version_id).all()
4157 4163
4158 4164 def is_closed(self):
4159 4165 return pull_request_obj.is_closed()
4160 4166
4161 4167 @property
4162 4168 def pull_request_version_id(self):
4163 4169 return getattr(pull_request_obj, 'pull_request_version_id', None)
4164 4170
4165 4171 attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False))
4166 4172
4167 4173 attrs.author = StrictAttributeDict(
4168 4174 pull_request_obj.author.get_api_data())
4169 4175 if pull_request_obj.target_repo:
4170 4176 attrs.target_repo = StrictAttributeDict(
4171 4177 pull_request_obj.target_repo.get_api_data())
4172 4178 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
4173 4179
4174 4180 if pull_request_obj.source_repo:
4175 4181 attrs.source_repo = StrictAttributeDict(
4176 4182 pull_request_obj.source_repo.get_api_data())
4177 4183 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
4178 4184
4179 4185 attrs.source_ref_parts = pull_request_obj.source_ref_parts
4180 4186 attrs.target_ref_parts = pull_request_obj.target_ref_parts
4181 4187 attrs.revisions = pull_request_obj.revisions
4182 4188
4183 4189 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
4184 4190 attrs.reviewer_data = org_pull_request_obj.reviewer_data
4185 4191 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
4186 4192
4187 4193 return PullRequestDisplay(attrs, internal=internal_methods)
4188 4194
4189 4195 def is_closed(self):
4190 4196 return self.status == self.STATUS_CLOSED
4191 4197
4192 4198 def __json__(self):
4193 4199 return {
4194 4200 'revisions': self.revisions,
4195 4201 }
4196 4202
4197 4203 def calculated_review_status(self):
4198 4204 from rhodecode.model.changeset_status import ChangesetStatusModel
4199 4205 return ChangesetStatusModel().calculated_review_status(self)
4200 4206
4201 4207 def reviewers_statuses(self):
4202 4208 from rhodecode.model.changeset_status import ChangesetStatusModel
4203 4209 return ChangesetStatusModel().reviewers_statuses(self)
4204 4210
4205 4211 @property
4206 4212 def workspace_id(self):
4207 4213 from rhodecode.model.pull_request import PullRequestModel
4208 4214 return PullRequestModel()._workspace_id(self)
4209 4215
4210 4216 def get_shadow_repo(self):
4211 4217 workspace_id = self.workspace_id
4212 vcs_obj = self.target_repo.scm_instance()
4213 shadow_repository_path = vcs_obj._get_shadow_repository_path(
4214 self.target_repo.repo_id, workspace_id)
4218 shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id)
4215 4219 if os.path.isdir(shadow_repository_path):
4220 vcs_obj = self.target_repo.scm_instance()
4216 4221 return vcs_obj.get_shadow_instance(shadow_repository_path)
4217 4222
4218 4223
4219 4224 class PullRequestVersion(Base, _PullRequestBase):
4220 4225 __tablename__ = 'pull_request_versions'
4221 4226 __table_args__ = (
4222 4227 base_table_args,
4223 4228 )
4224 4229
4225 4230 pull_request_version_id = Column(
4226 4231 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
4227 4232 pull_request_id = Column(
4228 4233 'pull_request_id', Integer(),
4229 4234 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4230 4235 pull_request = relationship('PullRequest')
4231 4236
4232 4237 def __repr__(self):
4233 4238 if self.pull_request_version_id:
4234 4239 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
4235 4240 else:
4236 4241 return '<DB:PullRequestVersion at %#x>' % id(self)
4237 4242
4238 4243 @property
4239 4244 def reviewers(self):
4240 4245 return self.pull_request.reviewers
4241 4246
4242 4247 @property
4243 4248 def versions(self):
4244 4249 return self.pull_request.versions
4245 4250
4246 4251 def is_closed(self):
4247 4252 # calculate from original
4248 4253 return self.pull_request.status == self.STATUS_CLOSED
4249 4254
4250 4255 def calculated_review_status(self):
4251 4256 return self.pull_request.calculated_review_status()
4252 4257
4253 4258 def reviewers_statuses(self):
4254 4259 return self.pull_request.reviewers_statuses()
4255 4260
4256 4261
4257 4262 class PullRequestReviewers(Base, BaseModel):
4258 4263 __tablename__ = 'pull_request_reviewers'
4259 4264 __table_args__ = (
4260 4265 base_table_args,
4261 4266 )
4262 4267
4263 4268 @hybrid_property
4264 4269 def reasons(self):
4265 4270 if not self._reasons:
4266 4271 return []
4267 4272 return self._reasons
4268 4273
4269 4274 @reasons.setter
4270 4275 def reasons(self, val):
4271 4276 val = val or []
4272 4277 if any(not isinstance(x, compat.string_types) for x in val):
4273 4278 raise Exception('invalid reasons type, must be list of strings')
4274 4279 self._reasons = val
4275 4280
4276 4281 pull_requests_reviewers_id = Column(
4277 4282 'pull_requests_reviewers_id', Integer(), nullable=False,
4278 4283 primary_key=True)
4279 4284 pull_request_id = Column(
4280 4285 "pull_request_id", Integer(),
4281 4286 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4282 4287 user_id = Column(
4283 4288 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4284 4289 _reasons = Column(
4285 4290 'reason', MutationList.as_mutable(
4286 4291 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4287 4292
4288 4293 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4289 4294 user = relationship('User')
4290 4295 pull_request = relationship('PullRequest')
4291 4296
4292 4297 rule_data = Column(
4293 4298 'rule_data_json',
4294 4299 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4295 4300
4296 4301 def rule_user_group_data(self):
4297 4302 """
4298 4303 Returns the voting user group rule data for this reviewer
4299 4304 """
4300 4305
4301 4306 if self.rule_data and 'vote_rule' in self.rule_data:
4302 4307 user_group_data = {}
4303 4308 if 'rule_user_group_entry_id' in self.rule_data:
4304 4309 # means a group with voting rules !
4305 4310 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4306 4311 user_group_data['name'] = self.rule_data['rule_name']
4307 4312 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4308 4313
4309 4314 return user_group_data
4310 4315
4311 4316 def __unicode__(self):
4312 4317 return u"<%s('id:%s')>" % (self.__class__.__name__,
4313 4318 self.pull_requests_reviewers_id)
4314 4319
4315 4320
4316 4321 class Notification(Base, BaseModel):
4317 4322 __tablename__ = 'notifications'
4318 4323 __table_args__ = (
4319 4324 Index('notification_type_idx', 'type'),
4320 4325 base_table_args,
4321 4326 )
4322 4327
4323 4328 TYPE_CHANGESET_COMMENT = u'cs_comment'
4324 4329 TYPE_MESSAGE = u'message'
4325 4330 TYPE_MENTION = u'mention'
4326 4331 TYPE_REGISTRATION = u'registration'
4327 4332 TYPE_PULL_REQUEST = u'pull_request'
4328 4333 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
4329 4334
4330 4335 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4331 4336 subject = Column('subject', Unicode(512), nullable=True)
4332 4337 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4333 4338 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4334 4339 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4335 4340 type_ = Column('type', Unicode(255))
4336 4341
4337 4342 created_by_user = relationship('User')
4338 4343 notifications_to_users = relationship('UserNotification', lazy='joined',
4339 4344 cascade="all, delete, delete-orphan")
4340 4345
4341 4346 @property
4342 4347 def recipients(self):
4343 4348 return [x.user for x in UserNotification.query()\
4344 4349 .filter(UserNotification.notification == self)\
4345 4350 .order_by(UserNotification.user_id.asc()).all()]
4346 4351
4347 4352 @classmethod
4348 4353 def create(cls, created_by, subject, body, recipients, type_=None):
4349 4354 if type_ is None:
4350 4355 type_ = Notification.TYPE_MESSAGE
4351 4356
4352 4357 notification = cls()
4353 4358 notification.created_by_user = created_by
4354 4359 notification.subject = subject
4355 4360 notification.body = body
4356 4361 notification.type_ = type_
4357 4362 notification.created_on = datetime.datetime.now()
4358 4363
4359 4364 # For each recipient link the created notification to his account
4360 4365 for u in recipients:
4361 4366 assoc = UserNotification()
4362 4367 assoc.user_id = u.user_id
4363 4368 assoc.notification = notification
4364 4369
4365 4370 # if created_by is inside recipients mark his notification
4366 4371 # as read
4367 4372 if u.user_id == created_by.user_id:
4368 4373 assoc.read = True
4369 4374 Session().add(assoc)
4370 4375
4371 4376 Session().add(notification)
4372 4377
4373 4378 return notification
4374 4379
4375 4380
4376 4381 class UserNotification(Base, BaseModel):
4377 4382 __tablename__ = 'user_to_notification'
4378 4383 __table_args__ = (
4379 4384 UniqueConstraint('user_id', 'notification_id'),
4380 4385 base_table_args
4381 4386 )
4382 4387
4383 4388 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4384 4389 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4385 4390 read = Column('read', Boolean, default=False)
4386 4391 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4387 4392
4388 4393 user = relationship('User', lazy="joined")
4389 4394 notification = relationship('Notification', lazy="joined",
4390 4395 order_by=lambda: Notification.created_on.desc(),)
4391 4396
4392 4397 def mark_as_read(self):
4393 4398 self.read = True
4394 4399 Session().add(self)
4395 4400
4396 4401
4397 4402 class Gist(Base, BaseModel):
4398 4403 __tablename__ = 'gists'
4399 4404 __table_args__ = (
4400 4405 Index('g_gist_access_id_idx', 'gist_access_id'),
4401 4406 Index('g_created_on_idx', 'created_on'),
4402 4407 base_table_args
4403 4408 )
4404 4409
4405 4410 GIST_PUBLIC = u'public'
4406 4411 GIST_PRIVATE = u'private'
4407 4412 DEFAULT_FILENAME = u'gistfile1.txt'
4408 4413
4409 4414 ACL_LEVEL_PUBLIC = u'acl_public'
4410 4415 ACL_LEVEL_PRIVATE = u'acl_private'
4411 4416
4412 4417 gist_id = Column('gist_id', Integer(), primary_key=True)
4413 4418 gist_access_id = Column('gist_access_id', Unicode(250))
4414 4419 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4415 4420 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4416 4421 gist_expires = Column('gist_expires', Float(53), nullable=False)
4417 4422 gist_type = Column('gist_type', Unicode(128), nullable=False)
4418 4423 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4419 4424 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4420 4425 acl_level = Column('acl_level', Unicode(128), nullable=True)
4421 4426
4422 4427 owner = relationship('User')
4423 4428
4424 4429 def __repr__(self):
4425 4430 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
4426 4431
4427 4432 @hybrid_property
4428 4433 def description_safe(self):
4429 4434 from rhodecode.lib import helpers as h
4430 4435 return h.escape(self.gist_description)
4431 4436
4432 4437 @classmethod
4433 4438 def get_or_404(cls, id_):
4434 4439 from pyramid.httpexceptions import HTTPNotFound
4435 4440
4436 4441 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4437 4442 if not res:
4438 4443 raise HTTPNotFound()
4439 4444 return res
4440 4445
4441 4446 @classmethod
4442 4447 def get_by_access_id(cls, gist_access_id):
4443 4448 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4444 4449
4445 4450 def gist_url(self):
4446 4451 from rhodecode.model.gist import GistModel
4447 4452 return GistModel().get_url(self)
4448 4453
4449 4454 @classmethod
4450 4455 def base_path(cls):
4451 4456 """
4452 4457 Returns base path when all gists are stored
4453 4458
4454 4459 :param cls:
4455 4460 """
4456 4461 from rhodecode.model.gist import GIST_STORE_LOC
4457 4462 q = Session().query(RhodeCodeUi)\
4458 4463 .filter(RhodeCodeUi.ui_key == URL_SEP)
4459 4464 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4460 4465 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4461 4466
4462 4467 def get_api_data(self):
4463 4468 """
4464 4469 Common function for generating gist related data for API
4465 4470 """
4466 4471 gist = self
4467 4472 data = {
4468 4473 'gist_id': gist.gist_id,
4469 4474 'type': gist.gist_type,
4470 4475 'access_id': gist.gist_access_id,
4471 4476 'description': gist.gist_description,
4472 4477 'url': gist.gist_url(),
4473 4478 'expires': gist.gist_expires,
4474 4479 'created_on': gist.created_on,
4475 4480 'modified_at': gist.modified_at,
4476 4481 'content': None,
4477 4482 'acl_level': gist.acl_level,
4478 4483 }
4479 4484 return data
4480 4485
4481 4486 def __json__(self):
4482 4487 data = dict(
4483 4488 )
4484 4489 data.update(self.get_api_data())
4485 4490 return data
4486 4491 # SCM functions
4487 4492
4488 4493 def scm_instance(self, **kwargs):
4489 4494 """
4490 4495 Get an instance of VCS Repository
4491 4496
4492 4497 :param kwargs:
4493 4498 """
4494 4499 from rhodecode.model.gist import GistModel
4495 4500 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4496 4501 return get_vcs_instance(
4497 4502 repo_path=safe_str(full_repo_path), create=False,
4498 4503 _vcs_alias=GistModel.vcs_backend)
4499 4504
4500 4505
4501 4506 class ExternalIdentity(Base, BaseModel):
4502 4507 __tablename__ = 'external_identities'
4503 4508 __table_args__ = (
4504 4509 Index('local_user_id_idx', 'local_user_id'),
4505 4510 Index('external_id_idx', 'external_id'),
4506 4511 base_table_args
4507 4512 )
4508 4513
4509 4514 external_id = Column('external_id', Unicode(255), default=u'', primary_key=True)
4510 4515 external_username = Column('external_username', Unicode(1024), default=u'')
4511 4516 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4512 4517 provider_name = Column('provider_name', Unicode(255), default=u'', primary_key=True)
4513 4518 access_token = Column('access_token', String(1024), default=u'')
4514 4519 alt_token = Column('alt_token', String(1024), default=u'')
4515 4520 token_secret = Column('token_secret', String(1024), default=u'')
4516 4521
4517 4522 @classmethod
4518 4523 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4519 4524 """
4520 4525 Returns ExternalIdentity instance based on search params
4521 4526
4522 4527 :param external_id:
4523 4528 :param provider_name:
4524 4529 :return: ExternalIdentity
4525 4530 """
4526 4531 query = cls.query()
4527 4532 query = query.filter(cls.external_id == external_id)
4528 4533 query = query.filter(cls.provider_name == provider_name)
4529 4534 if local_user_id:
4530 4535 query = query.filter(cls.local_user_id == local_user_id)
4531 4536 return query.first()
4532 4537
4533 4538 @classmethod
4534 4539 def user_by_external_id_and_provider(cls, external_id, provider_name):
4535 4540 """
4536 4541 Returns User instance based on search params
4537 4542
4538 4543 :param external_id:
4539 4544 :param provider_name:
4540 4545 :return: User
4541 4546 """
4542 4547 query = User.query()
4543 4548 query = query.filter(cls.external_id == external_id)
4544 4549 query = query.filter(cls.provider_name == provider_name)
4545 4550 query = query.filter(User.user_id == cls.local_user_id)
4546 4551 return query.first()
4547 4552
4548 4553 @classmethod
4549 4554 def by_local_user_id(cls, local_user_id):
4550 4555 """
4551 4556 Returns all tokens for user
4552 4557
4553 4558 :param local_user_id:
4554 4559 :return: ExternalIdentity
4555 4560 """
4556 4561 query = cls.query()
4557 4562 query = query.filter(cls.local_user_id == local_user_id)
4558 4563 return query
4559 4564
4560 4565 @classmethod
4561 4566 def load_provider_plugin(cls, plugin_id):
4562 4567 from rhodecode.authentication.base import loadplugin
4563 4568 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
4564 4569 auth_plugin = loadplugin(_plugin_id)
4565 4570 return auth_plugin
4566 4571
4567 4572
4568 4573 class Integration(Base, BaseModel):
4569 4574 __tablename__ = 'integrations'
4570 4575 __table_args__ = (
4571 4576 base_table_args
4572 4577 )
4573 4578
4574 4579 integration_id = Column('integration_id', Integer(), primary_key=True)
4575 4580 integration_type = Column('integration_type', String(255))
4576 4581 enabled = Column('enabled', Boolean(), nullable=False)
4577 4582 name = Column('name', String(255), nullable=False)
4578 4583 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4579 4584 default=False)
4580 4585
4581 4586 settings = Column(
4582 4587 'settings_json', MutationObj.as_mutable(
4583 4588 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4584 4589 repo_id = Column(
4585 4590 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4586 4591 nullable=True, unique=None, default=None)
4587 4592 repo = relationship('Repository', lazy='joined')
4588 4593
4589 4594 repo_group_id = Column(
4590 4595 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4591 4596 nullable=True, unique=None, default=None)
4592 4597 repo_group = relationship('RepoGroup', lazy='joined')
4593 4598
4594 4599 @property
4595 4600 def scope(self):
4596 4601 if self.repo:
4597 4602 return repr(self.repo)
4598 4603 if self.repo_group:
4599 4604 if self.child_repos_only:
4600 4605 return repr(self.repo_group) + ' (child repos only)'
4601 4606 else:
4602 4607 return repr(self.repo_group) + ' (recursive)'
4603 4608 if self.child_repos_only:
4604 4609 return 'root_repos'
4605 4610 return 'global'
4606 4611
4607 4612 def __repr__(self):
4608 4613 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4609 4614
4610 4615
4611 4616 class RepoReviewRuleUser(Base, BaseModel):
4612 4617 __tablename__ = 'repo_review_rules_users'
4613 4618 __table_args__ = (
4614 4619 base_table_args
4615 4620 )
4616 4621
4617 4622 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4618 4623 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4619 4624 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4620 4625 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4621 4626 user = relationship('User')
4622 4627
4623 4628 def rule_data(self):
4624 4629 return {
4625 4630 'mandatory': self.mandatory
4626 4631 }
4627 4632
4628 4633
4629 4634 class RepoReviewRuleUserGroup(Base, BaseModel):
4630 4635 __tablename__ = 'repo_review_rules_users_groups'
4631 4636 __table_args__ = (
4632 4637 base_table_args
4633 4638 )
4634 4639
4635 4640 VOTE_RULE_ALL = -1
4636 4641
4637 4642 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4638 4643 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4639 4644 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4640 4645 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4641 4646 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4642 4647 users_group = relationship('UserGroup')
4643 4648
4644 4649 def rule_data(self):
4645 4650 return {
4646 4651 'mandatory': self.mandatory,
4647 4652 'vote_rule': self.vote_rule
4648 4653 }
4649 4654
4650 4655 @property
4651 4656 def vote_rule_label(self):
4652 4657 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4653 4658 return 'all must vote'
4654 4659 else:
4655 4660 return 'min. vote {}'.format(self.vote_rule)
4656 4661
4657 4662
4658 4663 class RepoReviewRule(Base, BaseModel):
4659 4664 __tablename__ = 'repo_review_rules'
4660 4665 __table_args__ = (
4661 4666 base_table_args
4662 4667 )
4663 4668
4664 4669 repo_review_rule_id = Column(
4665 4670 'repo_review_rule_id', Integer(), primary_key=True)
4666 4671 repo_id = Column(
4667 4672 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4668 4673 repo = relationship('Repository', backref='review_rules')
4669 4674
4670 4675 review_rule_name = Column('review_rule_name', String(255))
4671 4676 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4672 4677 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4673 4678 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4674 4679
4675 4680 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4676 4681 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4677 4682 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4678 4683 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4679 4684
4680 4685 rule_users = relationship('RepoReviewRuleUser')
4681 4686 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4682 4687
4683 4688 def _validate_pattern(self, value):
4684 4689 re.compile('^' + glob2re(value) + '$')
4685 4690
4686 4691 @hybrid_property
4687 4692 def source_branch_pattern(self):
4688 4693 return self._branch_pattern or '*'
4689 4694
4690 4695 @source_branch_pattern.setter
4691 4696 def source_branch_pattern(self, value):
4692 4697 self._validate_pattern(value)
4693 4698 self._branch_pattern = value or '*'
4694 4699
4695 4700 @hybrid_property
4696 4701 def target_branch_pattern(self):
4697 4702 return self._target_branch_pattern or '*'
4698 4703
4699 4704 @target_branch_pattern.setter
4700 4705 def target_branch_pattern(self, value):
4701 4706 self._validate_pattern(value)
4702 4707 self._target_branch_pattern = value or '*'
4703 4708
4704 4709 @hybrid_property
4705 4710 def file_pattern(self):
4706 4711 return self._file_pattern or '*'
4707 4712
4708 4713 @file_pattern.setter
4709 4714 def file_pattern(self, value):
4710 4715 self._validate_pattern(value)
4711 4716 self._file_pattern = value or '*'
4712 4717
4713 4718 def matches(self, source_branch, target_branch, files_changed):
4714 4719 """
4715 4720 Check if this review rule matches a branch/files in a pull request
4716 4721
4717 4722 :param source_branch: source branch name for the commit
4718 4723 :param target_branch: target branch name for the commit
4719 4724 :param files_changed: list of file paths changed in the pull request
4720 4725 """
4721 4726
4722 4727 source_branch = source_branch or ''
4723 4728 target_branch = target_branch or ''
4724 4729 files_changed = files_changed or []
4725 4730
4726 4731 branch_matches = True
4727 4732 if source_branch or target_branch:
4728 4733 if self.source_branch_pattern == '*':
4729 4734 source_branch_match = True
4730 4735 else:
4731 4736 if self.source_branch_pattern.startswith('re:'):
4732 4737 source_pattern = self.source_branch_pattern[3:]
4733 4738 else:
4734 4739 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
4735 4740 source_branch_regex = re.compile(source_pattern)
4736 4741 source_branch_match = bool(source_branch_regex.search(source_branch))
4737 4742 if self.target_branch_pattern == '*':
4738 4743 target_branch_match = True
4739 4744 else:
4740 4745 if self.target_branch_pattern.startswith('re:'):
4741 4746 target_pattern = self.target_branch_pattern[3:]
4742 4747 else:
4743 4748 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
4744 4749 target_branch_regex = re.compile(target_pattern)
4745 4750 target_branch_match = bool(target_branch_regex.search(target_branch))
4746 4751
4747 4752 branch_matches = source_branch_match and target_branch_match
4748 4753
4749 4754 files_matches = True
4750 4755 if self.file_pattern != '*':
4751 4756 files_matches = False
4752 4757 if self.file_pattern.startswith('re:'):
4753 4758 file_pattern = self.file_pattern[3:]
4754 4759 else:
4755 4760 file_pattern = glob2re(self.file_pattern)
4756 4761 file_regex = re.compile(file_pattern)
4757 4762 for filename in files_changed:
4758 4763 if file_regex.search(filename):
4759 4764 files_matches = True
4760 4765 break
4761 4766
4762 4767 return branch_matches and files_matches
4763 4768
4764 4769 @property
4765 4770 def review_users(self):
4766 4771 """ Returns the users which this rule applies to """
4767 4772
4768 4773 users = collections.OrderedDict()
4769 4774
4770 4775 for rule_user in self.rule_users:
4771 4776 if rule_user.user.active:
4772 4777 if rule_user.user not in users:
4773 4778 users[rule_user.user.username] = {
4774 4779 'user': rule_user.user,
4775 4780 'source': 'user',
4776 4781 'source_data': {},
4777 4782 'data': rule_user.rule_data()
4778 4783 }
4779 4784
4780 4785 for rule_user_group in self.rule_user_groups:
4781 4786 source_data = {
4782 4787 'user_group_id': rule_user_group.users_group.users_group_id,
4783 4788 'name': rule_user_group.users_group.users_group_name,
4784 4789 'members': len(rule_user_group.users_group.members)
4785 4790 }
4786 4791 for member in rule_user_group.users_group.members:
4787 4792 if member.user.active:
4788 4793 key = member.user.username
4789 4794 if key in users:
4790 4795 # skip this member as we have him already
4791 4796 # this prevents from override the "first" matched
4792 4797 # users with duplicates in multiple groups
4793 4798 continue
4794 4799
4795 4800 users[key] = {
4796 4801 'user': member.user,
4797 4802 'source': 'user_group',
4798 4803 'source_data': source_data,
4799 4804 'data': rule_user_group.rule_data()
4800 4805 }
4801 4806
4802 4807 return users
4803 4808
4804 4809 def user_group_vote_rule(self, user_id):
4805 4810
4806 4811 rules = []
4807 4812 if not self.rule_user_groups:
4808 4813 return rules
4809 4814
4810 4815 for user_group in self.rule_user_groups:
4811 4816 user_group_members = [x.user_id for x in user_group.users_group.members]
4812 4817 if user_id in user_group_members:
4813 4818 rules.append(user_group)
4814 4819 return rules
4815 4820
4816 4821 def __repr__(self):
4817 4822 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4818 4823 self.repo_review_rule_id, self.repo)
4819 4824
4820 4825
4821 4826 class ScheduleEntry(Base, BaseModel):
4822 4827 __tablename__ = 'schedule_entries'
4823 4828 __table_args__ = (
4824 4829 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
4825 4830 UniqueConstraint('task_uid', name='s_task_uid_idx'),
4826 4831 base_table_args,
4827 4832 )
4828 4833
4829 4834 schedule_types = ['crontab', 'timedelta', 'integer']
4830 4835 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
4831 4836
4832 4837 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
4833 4838 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
4834 4839 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
4835 4840
4836 4841 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
4837 4842 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
4838 4843
4839 4844 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
4840 4845 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
4841 4846
4842 4847 # task
4843 4848 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
4844 4849 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
4845 4850 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
4846 4851 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
4847 4852
4848 4853 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4849 4854 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
4850 4855
4851 4856 @hybrid_property
4852 4857 def schedule_type(self):
4853 4858 return self._schedule_type
4854 4859
4855 4860 @schedule_type.setter
4856 4861 def schedule_type(self, val):
4857 4862 if val not in self.schedule_types:
4858 4863 raise ValueError('Value must be on of `{}` and got `{}`'.format(
4859 4864 val, self.schedule_type))
4860 4865
4861 4866 self._schedule_type = val
4862 4867
4863 4868 @classmethod
4864 4869 def get_uid(cls, obj):
4865 4870 args = obj.task_args
4866 4871 kwargs = obj.task_kwargs
4867 4872 if isinstance(args, JsonRaw):
4868 4873 try:
4869 4874 args = json.loads(args)
4870 4875 except ValueError:
4871 4876 args = tuple()
4872 4877
4873 4878 if isinstance(kwargs, JsonRaw):
4874 4879 try:
4875 4880 kwargs = json.loads(kwargs)
4876 4881 except ValueError:
4877 4882 kwargs = dict()
4878 4883
4879 4884 dot_notation = obj.task_dot_notation
4880 4885 val = '.'.join(map(safe_str, [
4881 4886 sorted(dot_notation), args, sorted(kwargs.items())]))
4882 4887 return hashlib.sha1(val).hexdigest()
4883 4888
4884 4889 @classmethod
4885 4890 def get_by_schedule_name(cls, schedule_name):
4886 4891 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
4887 4892
4888 4893 @classmethod
4889 4894 def get_by_schedule_id(cls, schedule_id):
4890 4895 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
4891 4896
4892 4897 @property
4893 4898 def task(self):
4894 4899 return self.task_dot_notation
4895 4900
4896 4901 @property
4897 4902 def schedule(self):
4898 4903 from rhodecode.lib.celerylib.utils import raw_2_schedule
4899 4904 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
4900 4905 return schedule
4901 4906
4902 4907 @property
4903 4908 def args(self):
4904 4909 try:
4905 4910 return list(self.task_args or [])
4906 4911 except ValueError:
4907 4912 return list()
4908 4913
4909 4914 @property
4910 4915 def kwargs(self):
4911 4916 try:
4912 4917 return dict(self.task_kwargs or {})
4913 4918 except ValueError:
4914 4919 return dict()
4915 4920
4916 4921 def _as_raw(self, val):
4917 4922 if hasattr(val, 'de_coerce'):
4918 4923 val = val.de_coerce()
4919 4924 if val:
4920 4925 val = json.dumps(val)
4921 4926
4922 4927 return val
4923 4928
4924 4929 @property
4925 4930 def schedule_definition_raw(self):
4926 4931 return self._as_raw(self.schedule_definition)
4927 4932
4928 4933 @property
4929 4934 def args_raw(self):
4930 4935 return self._as_raw(self.task_args)
4931 4936
4932 4937 @property
4933 4938 def kwargs_raw(self):
4934 4939 return self._as_raw(self.task_kwargs)
4935 4940
4936 4941 def __repr__(self):
4937 4942 return '<DB:ScheduleEntry({}:{})>'.format(
4938 4943 self.schedule_entry_id, self.schedule_name)
4939 4944
4940 4945
4941 4946 @event.listens_for(ScheduleEntry, 'before_update')
4942 4947 def update_task_uid(mapper, connection, target):
4943 4948 target.task_uid = ScheduleEntry.get_uid(target)
4944 4949
4945 4950
4946 4951 @event.listens_for(ScheduleEntry, 'before_insert')
4947 4952 def set_task_uid(mapper, connection, target):
4948 4953 target.task_uid = ScheduleEntry.get_uid(target)
4949 4954
4950 4955
4951 4956 class _BaseBranchPerms(BaseModel):
4952 4957 @classmethod
4953 4958 def compute_hash(cls, value):
4954 4959 return sha1_safe(value)
4955 4960
4956 4961 @hybrid_property
4957 4962 def branch_pattern(self):
4958 4963 return self._branch_pattern or '*'
4959 4964
4960 4965 @hybrid_property
4961 4966 def branch_hash(self):
4962 4967 return self._branch_hash
4963 4968
4964 4969 def _validate_glob(self, value):
4965 4970 re.compile('^' + glob2re(value) + '$')
4966 4971
4967 4972 @branch_pattern.setter
4968 4973 def branch_pattern(self, value):
4969 4974 self._validate_glob(value)
4970 4975 self._branch_pattern = value or '*'
4971 4976 # set the Hash when setting the branch pattern
4972 4977 self._branch_hash = self.compute_hash(self._branch_pattern)
4973 4978
4974 4979 def matches(self, branch):
4975 4980 """
4976 4981 Check if this the branch matches entry
4977 4982
4978 4983 :param branch: branch name for the commit
4979 4984 """
4980 4985
4981 4986 branch = branch or ''
4982 4987
4983 4988 branch_matches = True
4984 4989 if branch:
4985 4990 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
4986 4991 branch_matches = bool(branch_regex.search(branch))
4987 4992
4988 4993 return branch_matches
4989 4994
4990 4995
4991 4996 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
4992 4997 __tablename__ = 'user_to_repo_branch_permissions'
4993 4998 __table_args__ = (
4994 4999 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4995 5000 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4996 5001 )
4997 5002
4998 5003 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
4999 5004
5000 5005 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5001 5006 repo = relationship('Repository', backref='user_branch_perms')
5002 5007
5003 5008 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5004 5009 permission = relationship('Permission')
5005 5010
5006 5011 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
5007 5012 user_repo_to_perm = relationship('UserRepoToPerm')
5008 5013
5009 5014 rule_order = Column('rule_order', Integer(), nullable=False)
5010 5015 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5011 5016 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5012 5017
5013 5018 def __unicode__(self):
5014 5019 return u'<UserBranchPermission(%s => %r)>' % (
5015 5020 self.user_repo_to_perm, self.branch_pattern)
5016 5021
5017 5022
5018 5023 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
5019 5024 __tablename__ = 'user_group_to_repo_branch_permissions'
5020 5025 __table_args__ = (
5021 5026 {'extend_existing': True, 'mysql_engine': 'InnoDB',
5022 5027 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
5023 5028 )
5024 5029
5025 5030 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5026 5031
5027 5032 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5028 5033 repo = relationship('Repository', backref='user_group_branch_perms')
5029 5034
5030 5035 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5031 5036 permission = relationship('Permission')
5032 5037
5033 5038 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
5034 5039 user_group_repo_to_perm = relationship('UserGroupRepoToPerm')
5035 5040
5036 5041 rule_order = Column('rule_order', Integer(), nullable=False)
5037 5042 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5038 5043 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5039 5044
5040 5045 def __unicode__(self):
5041 5046 return u'<UserBranchPermission(%s => %r)>' % (
5042 5047 self.user_group_repo_to_perm, self.branch_pattern)
5043 5048
5044 5049
5045 5050 class UserBookmark(Base, BaseModel):
5046 5051 __tablename__ = 'user_bookmarks'
5047 5052 __table_args__ = (
5048 5053 UniqueConstraint('user_id', 'bookmark_repo_id'),
5049 5054 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
5050 5055 UniqueConstraint('user_id', 'bookmark_position'),
5051 5056 base_table_args
5052 5057 )
5053 5058
5054 5059 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
5055 5060 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
5056 5061 position = Column("bookmark_position", Integer(), nullable=False)
5057 5062 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
5058 5063 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
5059 5064 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5060 5065
5061 5066 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
5062 5067 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
5063 5068
5064 5069 user = relationship("User")
5065 5070
5066 5071 repository = relationship("Repository")
5067 5072 repository_group = relationship("RepoGroup")
5068 5073
5069 5074 @classmethod
5070 5075 def get_by_position_for_user(cls, position, user_id):
5071 5076 return cls.query() \
5072 5077 .filter(UserBookmark.user_id == user_id) \
5073 5078 .filter(UserBookmark.position == position).scalar()
5074 5079
5075 5080 @classmethod
5076 5081 def get_bookmarks_for_user(cls, user_id):
5077 5082 return cls.query() \
5078 5083 .filter(UserBookmark.user_id == user_id) \
5079 5084 .options(joinedload(UserBookmark.repository)) \
5080 5085 .options(joinedload(UserBookmark.repository_group)) \
5081 5086 .order_by(UserBookmark.position.asc()) \
5082 5087 .all()
5083 5088
5084 5089 def __unicode__(self):
5085 5090 return u'<UserBookmark(%d @ %r)>' % (self.position, self.redirect_url)
5086 5091
5087 5092
5088 5093 class FileStore(Base, BaseModel):
5089 5094 __tablename__ = 'file_store'
5090 5095 __table_args__ = (
5091 5096 base_table_args
5092 5097 )
5093 5098
5094 5099 file_store_id = Column('file_store_id', Integer(), primary_key=True)
5095 5100 file_uid = Column('file_uid', String(1024), nullable=False)
5096 5101 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
5097 5102 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
5098 5103 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
5099 5104
5100 5105 # sha256 hash
5101 5106 file_hash = Column('file_hash', String(512), nullable=False)
5102 5107 file_size = Column('file_size', Integer(), nullable=False)
5103 5108
5104 5109 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5105 5110 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
5106 5111 accessed_count = Column('accessed_count', Integer(), default=0)
5107 5112
5108 5113 enabled = Column('enabled', Boolean(), nullable=False, default=True)
5109 5114
5110 5115 # if repo/repo_group reference is set, check for permissions
5111 5116 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
5112 5117
5113 5118 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
5114 5119 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id')
5115 5120
5116 5121 # scope limited to user, which requester have access to
5117 5122 scope_user_id = Column(
5118 5123 'scope_user_id', Integer(), ForeignKey('users.user_id'),
5119 5124 nullable=True, unique=None, default=None)
5120 5125 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id')
5121 5126
5122 5127 # scope limited to user group, which requester have access to
5123 5128 scope_user_group_id = Column(
5124 5129 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
5125 5130 nullable=True, unique=None, default=None)
5126 5131 user_group = relationship('UserGroup', lazy='joined')
5127 5132
5128 5133 # scope limited to repo, which requester have access to
5129 5134 scope_repo_id = Column(
5130 5135 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
5131 5136 nullable=True, unique=None, default=None)
5132 5137 repo = relationship('Repository', lazy='joined')
5133 5138
5134 5139 # scope limited to repo group, which requester have access to
5135 5140 scope_repo_group_id = Column(
5136 5141 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
5137 5142 nullable=True, unique=None, default=None)
5138 5143 repo_group = relationship('RepoGroup', lazy='joined')
5139 5144
5140 5145 @classmethod
5141 5146 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
5142 5147 file_description='', enabled=True, check_acl=True, user_id=None,
5143 5148 scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None):
5144 5149
5145 5150 store_entry = FileStore()
5146 5151 store_entry.file_uid = file_uid
5147 5152 store_entry.file_display_name = file_display_name
5148 5153 store_entry.file_org_name = filename
5149 5154 store_entry.file_size = file_size
5150 5155 store_entry.file_hash = file_hash
5151 5156 store_entry.file_description = file_description
5152 5157
5153 5158 store_entry.check_acl = check_acl
5154 5159 store_entry.enabled = enabled
5155 5160
5156 5161 store_entry.user_id = user_id
5157 5162 store_entry.scope_user_id = scope_user_id
5158 5163 store_entry.scope_repo_id = scope_repo_id
5159 5164 store_entry.scope_repo_group_id = scope_repo_group_id
5160 5165 return store_entry
5161 5166
5162 5167 @classmethod
5163 5168 def bump_access_counter(cls, file_uid, commit=True):
5164 5169 FileStore().query()\
5165 5170 .filter(FileStore.file_uid == file_uid)\
5166 5171 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
5167 5172 FileStore.accessed_on: datetime.datetime.now()})
5168 5173 if commit:
5169 5174 Session().commit()
5170 5175
5171 5176 def __repr__(self):
5172 5177 return '<FileStore({})>'.format(self.file_store_id)
5173 5178
5174 5179
5175 5180 class DbMigrateVersion(Base, BaseModel):
5176 5181 __tablename__ = 'db_migrate_version'
5177 5182 __table_args__ = (
5178 5183 base_table_args,
5179 5184 )
5180 5185
5181 5186 repository_id = Column('repository_id', String(250), primary_key=True)
5182 5187 repository_path = Column('repository_path', Text)
5183 5188 version = Column('version', Integer)
5184 5189
5185 5190 @classmethod
5186 5191 def set_version(cls, version):
5187 5192 """
5188 5193 Helper for forcing a different version, usually for debugging purposes via ishell.
5189 5194 """
5190 5195 ver = DbMigrateVersion.query().first()
5191 5196 ver.version = version
5192 5197 Session().commit()
5193 5198
5194 5199
5195 5200 class DbSession(Base, BaseModel):
5196 5201 __tablename__ = 'db_session'
5197 5202 __table_args__ = (
5198 5203 base_table_args,
5199 5204 )
5200 5205
5201 5206 def __repr__(self):
5202 5207 return '<DB:DbSession({})>'.format(self.id)
5203 5208
5204 5209 id = Column('id', Integer())
5205 5210 namespace = Column('namespace', String(255), primary_key=True)
5206 5211 accessed = Column('accessed', DateTime, nullable=False)
5207 5212 created = Column('created', DateTime, nullable=False)
5208 5213 data = Column('data', PickleType, nullable=False)
@@ -1,475 +1,473 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import base64
22 22
23 23 import mock
24 24 import pytest
25 25
26 26 from rhodecode.lib.utils2 import AttributeDict
27 27 from rhodecode.tests.utils import CustomTestApp
28 28
29 29 from rhodecode.lib.caching_query import FromCache
30 30 from rhodecode.lib.hooks_daemon import DummyHooksCallbackDaemon
31 31 from rhodecode.lib.middleware import simplevcs
32 32 from rhodecode.lib.middleware.https_fixup import HttpsFixup
33 33 from rhodecode.lib.middleware.utils import scm_app_http
34 34 from rhodecode.model.db import User, _hash_key
35 35 from rhodecode.model.meta import Session
36 36 from rhodecode.tests import (
37 37 HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
38 38 from rhodecode.tests.lib.middleware import mock_scm_app
39 39
40 40
41 41 class StubVCSController(simplevcs.SimpleVCS):
42 42
43 43 SCM = 'hg'
44 44 stub_response_body = tuple()
45 45
46 46 def __init__(self, *args, **kwargs):
47 47 super(StubVCSController, self).__init__(*args, **kwargs)
48 48 self._action = 'pull'
49 49 self._is_shadow_repo_dir = True
50 50 self._name = HG_REPO
51 51 self.set_repo_names(None)
52 52
53 53 @property
54 54 def is_shadow_repo_dir(self):
55 55 return self._is_shadow_repo_dir
56 56
57 57 def _get_repository_name(self, environ):
58 58 return self._name
59 59
60 60 def _get_action(self, environ):
61 61 return self._action
62 62
63 63 def _create_wsgi_app(self, repo_path, repo_name, config):
64 64 def fake_app(environ, start_response):
65 65 headers = [
66 66 ('Http-Accept', 'application/mercurial')
67 67 ]
68 68 start_response('200 OK', headers)
69 69 return self.stub_response_body
70 70 return fake_app
71 71
72 72 def _create_config(self, extras, repo_name, scheme='http'):
73 73 return None
74 74
75 75
76 76 @pytest.fixture
77 77 def vcscontroller(baseapp, config_stub, request_stub):
78 78 config_stub.testing_securitypolicy()
79 79 config_stub.include('rhodecode.authentication')
80 80 config_stub.include('rhodecode.authentication.plugins.auth_rhodecode')
81 81 config_stub.include('rhodecode.authentication.plugins.auth_token')
82 82
83 83 controller = StubVCSController(
84 84 baseapp.config.get_settings(), request_stub.registry)
85 85 app = HttpsFixup(controller, baseapp.config.get_settings())
86 86 app = CustomTestApp(app)
87 87
88 88 _remove_default_user_from_query_cache()
89 89
90 90 # Sanity checks that things are set up correctly
91 91 app.get('/' + HG_REPO, status=200)
92 92
93 93 app.controller = controller
94 94 return app
95 95
96 96
97 97 def _remove_default_user_from_query_cache():
98 98 user = User.get_default_user(cache=True)
99 99 query = Session().query(User).filter(User.username == user.username)
100 100 query = query.options(
101 101 FromCache("sql_cache_short", "get_user_%s" % _hash_key(user.username)))
102 102 query.invalidate()
103 103 Session().expire(user)
104 104
105 105
106 106 def test_handles_exceptions_during_permissions_checks(
107 107 vcscontroller, disable_anonymous_user):
108 108 user_and_pass = '%s:%s' % (TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
109 109 auth_password = base64.encodestring(user_and_pass).strip()
110 110 extra_environ = {
111 111 'AUTH_TYPE': 'Basic',
112 112 'HTTP_AUTHORIZATION': 'Basic %s' % auth_password,
113 113 'REMOTE_USER': TEST_USER_ADMIN_LOGIN,
114 114 }
115 115
116 116 # Verify that things are hooked up correctly
117 117 vcscontroller.get('/', status=200, extra_environ=extra_environ)
118 118
119 119 # Simulate trouble during permission checks
120 120 with mock.patch('rhodecode.model.db.User.get_by_username',
121 121 side_effect=Exception) as get_user:
122 122 # Verify that a correct 500 is returned and check that the expected
123 123 # code path was hit.
124 124 vcscontroller.get('/', status=500, extra_environ=extra_environ)
125 125 assert get_user.called
126 126
127 127
128 128 def test_returns_forbidden_if_no_anonymous_access(
129 129 vcscontroller, disable_anonymous_user):
130 130 vcscontroller.get('/', status=401)
131 131
132 132
133 133 class StubFailVCSController(simplevcs.SimpleVCS):
134 134 def _handle_request(self, environ, start_response):
135 135 raise Exception("BOOM")
136 136
137 137
138 138 @pytest.fixture(scope='module')
139 139 def fail_controller(baseapp):
140 140 controller = StubFailVCSController(
141 141 baseapp.config.get_settings(), baseapp.config)
142 142 controller = HttpsFixup(controller, baseapp.config.get_settings())
143 143 controller = CustomTestApp(controller)
144 144 return controller
145 145
146 146
147 147 def test_handles_exceptions_as_internal_server_error(fail_controller):
148 148 fail_controller.get('/', status=500)
149 149
150 150
151 151 def test_provides_traceback_for_appenlight(fail_controller):
152 152 response = fail_controller.get(
153 153 '/', status=500, extra_environ={'appenlight.client': 'fake'})
154 154 assert 'appenlight.__traceback' in response.request.environ
155 155
156 156
157 157 def test_provides_utils_scm_app_as_scm_app_by_default(baseapp, request_stub):
158 158 controller = StubVCSController(baseapp.config.get_settings(), request_stub.registry)
159 159 assert controller.scm_app is scm_app_http
160 160
161 161
162 162 def test_allows_to_override_scm_app_via_config(baseapp, request_stub):
163 163 config = baseapp.config.get_settings().copy()
164 164 config['vcs.scm_app_implementation'] = (
165 165 'rhodecode.tests.lib.middleware.mock_scm_app')
166 166 controller = StubVCSController(config, request_stub.registry)
167 167 assert controller.scm_app is mock_scm_app
168 168
169 169
170 170 @pytest.mark.parametrize('query_string, expected', [
171 171 ('cmd=stub_command', True),
172 172 ('cmd=listkeys', False),
173 173 ])
174 174 def test_should_check_locking(query_string, expected):
175 175 result = simplevcs._should_check_locking(query_string)
176 176 assert result == expected
177 177
178 178
179 179 class TestShadowRepoRegularExpression(object):
180 180 pr_segment = 'pull-request'
181 181 shadow_segment = 'repository'
182 182
183 183 @pytest.mark.parametrize('url, expected', [
184 184 # repo with/without groups
185 185 ('My-Repo/{pr_segment}/1/{shadow_segment}', True),
186 186 ('Group/My-Repo/{pr_segment}/2/{shadow_segment}', True),
187 187 ('Group/Sub-Group/My-Repo/{pr_segment}/3/{shadow_segment}', True),
188 188 ('Group/Sub-Group1/Sub-Group2/My-Repo/{pr_segment}/3/{shadow_segment}', True),
189 189
190 190 # pull request ID
191 191 ('MyRepo/{pr_segment}/1/{shadow_segment}', True),
192 192 ('MyRepo/{pr_segment}/1234567890/{shadow_segment}', True),
193 193 ('MyRepo/{pr_segment}/-1/{shadow_segment}', False),
194 194 ('MyRepo/{pr_segment}/invalid/{shadow_segment}', False),
195 195
196 196 # unicode
197 197 (u'Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
198 198 (u'Sp€çîál-Gröüp/Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
199 199
200 200 # trailing/leading slash
201 201 ('/My-Repo/{pr_segment}/1/{shadow_segment}', False),
202 202 ('My-Repo/{pr_segment}/1/{shadow_segment}/', False),
203 203 ('/My-Repo/{pr_segment}/1/{shadow_segment}/', False),
204 204
205 205 # misc
206 206 ('My-Repo/{pr_segment}/1/{shadow_segment}/extra', False),
207 207 ('My-Repo/{pr_segment}/1/{shadow_segment}extra', False),
208 208 ])
209 209 def test_shadow_repo_regular_expression(self, url, expected):
210 210 from rhodecode.lib.middleware.simplevcs import SimpleVCS
211 211 url = url.format(
212 212 pr_segment=self.pr_segment,
213 213 shadow_segment=self.shadow_segment)
214 214 match_obj = SimpleVCS.shadow_repo_re.match(url)
215 215 assert (match_obj is not None) == expected
216 216
217 217
218 218 @pytest.mark.backends('git', 'hg')
219 219 class TestShadowRepoExposure(object):
220 220
221 221 def test_pull_on_shadow_repo_propagates_to_wsgi_app(
222 222 self, baseapp, request_stub):
223 223 """
224 224 Check that a pull action to a shadow repo is propagated to the
225 225 underlying wsgi app.
226 226 """
227 227 controller = StubVCSController(
228 228 baseapp.config.get_settings(), request_stub.registry)
229 229 controller._check_ssl = mock.Mock()
230 230 controller.is_shadow_repo = True
231 231 controller._action = 'pull'
232 232 controller._is_shadow_repo_dir = True
233 233 controller.stub_response_body = 'dummy body value'
234 234 controller._get_default_cache_ttl = mock.Mock(
235 235 return_value=(False, 0))
236 236
237 237 environ_stub = {
238 238 'HTTP_HOST': 'test.example.com',
239 239 'HTTP_ACCEPT': 'application/mercurial',
240 240 'REQUEST_METHOD': 'GET',
241 241 'wsgi.url_scheme': 'http',
242 242 }
243 243
244 244 response = controller(environ_stub, mock.Mock())
245 245 response_body = ''.join(response)
246 246
247 247 # Assert that we got the response from the wsgi app.
248 248 assert response_body == controller.stub_response_body
249 249
250 250 def test_pull_on_shadow_repo_that_is_missing(self, baseapp, request_stub):
251 251 """
252 252 Check that a pull action to a shadow repo is propagated to the
253 253 underlying wsgi app.
254 254 """
255 255 controller = StubVCSController(
256 256 baseapp.config.get_settings(), request_stub.registry)
257 257 controller._check_ssl = mock.Mock()
258 258 controller.is_shadow_repo = True
259 259 controller._action = 'pull'
260 260 controller._is_shadow_repo_dir = False
261 261 controller.stub_response_body = 'dummy body value'
262 262 environ_stub = {
263 263 'HTTP_HOST': 'test.example.com',
264 264 'HTTP_ACCEPT': 'application/mercurial',
265 265 'REQUEST_METHOD': 'GET',
266 266 'wsgi.url_scheme': 'http',
267 267 }
268 268
269 269 response = controller(environ_stub, mock.Mock())
270 270 response_body = ''.join(response)
271 271
272 272 # Assert that we got the response from the wsgi app.
273 273 assert '404 Not Found' in response_body
274 274
275 275 def test_push_on_shadow_repo_raises(self, baseapp, request_stub):
276 276 """
277 277 Check that a push action to a shadow repo is aborted.
278 278 """
279 279 controller = StubVCSController(
280 280 baseapp.config.get_settings(), request_stub.registry)
281 281 controller._check_ssl = mock.Mock()
282 282 controller.is_shadow_repo = True
283 283 controller._action = 'push'
284 284 controller.stub_response_body = 'dummy body value'
285 285 environ_stub = {
286 286 'HTTP_HOST': 'test.example.com',
287 287 'HTTP_ACCEPT': 'application/mercurial',
288 288 'REQUEST_METHOD': 'GET',
289 289 'wsgi.url_scheme': 'http',
290 290 }
291 291
292 292 response = controller(environ_stub, mock.Mock())
293 293 response_body = ''.join(response)
294 294
295 295 assert response_body != controller.stub_response_body
296 296 # Assert that a 406 error is returned.
297 297 assert '406 Not Acceptable' in response_body
298 298
299 299 def test_set_repo_names_no_shadow(self, baseapp, request_stub):
300 300 """
301 301 Check that the set_repo_names method sets all names to the one returned
302 302 by the _get_repository_name method on a request to a non shadow repo.
303 303 """
304 304 environ_stub = {}
305 305 controller = StubVCSController(
306 306 baseapp.config.get_settings(), request_stub.registry)
307 307 controller._name = 'RepoGroup/MyRepo'
308 308 controller.set_repo_names(environ_stub)
309 309 assert not controller.is_shadow_repo
310 310 assert (controller.url_repo_name ==
311 311 controller.acl_repo_name ==
312 312 controller.vcs_repo_name ==
313 313 controller._get_repository_name(environ_stub))
314 314
315 315 def test_set_repo_names_with_shadow(
316 316 self, baseapp, pr_util, config_stub, request_stub):
317 317 """
318 318 Check that the set_repo_names method sets correct names on a request
319 319 to a shadow repo.
320 320 """
321 321 from rhodecode.model.pull_request import PullRequestModel
322 322
323 323 pull_request = pr_util.create_pull_request()
324 324 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
325 325 target=pull_request.target_repo.repo_name,
326 326 pr_id=pull_request.pull_request_id,
327 327 pr_segment=TestShadowRepoRegularExpression.pr_segment,
328 328 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
329 329 controller = StubVCSController(
330 330 baseapp.config.get_settings(), request_stub.registry)
331 331 controller._name = shadow_url
332 332 controller.set_repo_names({})
333 333
334 334 # Get file system path to shadow repo for assertions.
335 335 workspace_id = PullRequestModel()._workspace_id(pull_request)
336 target_vcs = pull_request.target_repo.scm_instance()
337 vcs_repo_name = target_vcs._get_shadow_repository_path(
338 pull_request.target_repo.repo_id, workspace_id)
336 vcs_repo_name = pull_request.target_repo.get_shadow_repository_path(workspace_id)
339 337
340 338 assert controller.vcs_repo_name == vcs_repo_name
341 339 assert controller.url_repo_name == shadow_url
342 340 assert controller.acl_repo_name == pull_request.target_repo.repo_name
343 341 assert controller.is_shadow_repo
344 342
345 343 def test_set_repo_names_with_shadow_but_missing_pr(
346 344 self, baseapp, pr_util, config_stub, request_stub):
347 345 """
348 346 Checks that the set_repo_names method enforces matching target repos
349 347 and pull request IDs.
350 348 """
351 349 pull_request = pr_util.create_pull_request()
352 350 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
353 351 target=pull_request.target_repo.repo_name,
354 352 pr_id=999999999,
355 353 pr_segment=TestShadowRepoRegularExpression.pr_segment,
356 354 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
357 355 controller = StubVCSController(
358 356 baseapp.config.get_settings(), request_stub.registry)
359 357 controller._name = shadow_url
360 358 controller.set_repo_names({})
361 359
362 360 assert not controller.is_shadow_repo
363 361 assert (controller.url_repo_name ==
364 362 controller.acl_repo_name ==
365 363 controller.vcs_repo_name)
366 364
367 365
368 366 @pytest.mark.usefixtures('baseapp')
369 367 class TestGenerateVcsResponse(object):
370 368
371 369 def test_ensures_that_start_response_is_called_early_enough(self):
372 370 self.call_controller_with_response_body(iter(['a', 'b']))
373 371 assert self.start_response.called
374 372
375 373 def test_invalidates_cache_after_body_is_consumed(self):
376 374 result = self.call_controller_with_response_body(iter(['a', 'b']))
377 375 assert not self.was_cache_invalidated()
378 376 # Consume the result
379 377 list(result)
380 378 assert self.was_cache_invalidated()
381 379
382 380 def test_raises_unknown_exceptions(self):
383 381 result = self.call_controller_with_response_body(
384 382 self.raise_result_iter(vcs_kind='unknown'))
385 383 with pytest.raises(Exception):
386 384 list(result)
387 385
388 386 def test_prepare_callback_daemon_is_called(self):
389 387 def side_effect(extras, environ, action, txn_id=None):
390 388 return DummyHooksCallbackDaemon(), extras
391 389
392 390 prepare_patcher = mock.patch.object(
393 391 StubVCSController, '_prepare_callback_daemon')
394 392 with prepare_patcher as prepare_mock:
395 393 prepare_mock.side_effect = side_effect
396 394 self.call_controller_with_response_body(iter(['a', 'b']))
397 395 assert prepare_mock.called
398 396 assert prepare_mock.call_count == 1
399 397
400 398 def call_controller_with_response_body(self, response_body):
401 399 settings = {
402 400 'base_path': 'fake_base_path',
403 401 'vcs.hooks.protocol': 'http',
404 402 'vcs.hooks.direct_calls': False,
405 403 }
406 404 registry = AttributeDict()
407 405 controller = StubVCSController(settings, registry)
408 406 controller._invalidate_cache = mock.Mock()
409 407 controller.stub_response_body = response_body
410 408 self.start_response = mock.Mock()
411 409 result = controller._generate_vcs_response(
412 410 environ={}, start_response=self.start_response,
413 411 repo_path='fake_repo_path',
414 412 extras={}, action='push')
415 413 self.controller = controller
416 414 return result
417 415
418 416 def raise_result_iter(self, vcs_kind='repo_locked'):
419 417 """
420 418 Simulates an exception due to a vcs raised exception if kind vcs_kind
421 419 """
422 420 raise self.vcs_exception(vcs_kind=vcs_kind)
423 421 yield "never_reached"
424 422
425 423 def vcs_exception(self, vcs_kind='repo_locked'):
426 424 locked_exception = Exception('TEST_MESSAGE')
427 425 locked_exception._vcs_kind = vcs_kind
428 426 return locked_exception
429 427
430 428 def was_cache_invalidated(self):
431 429 return self.controller._invalidate_cache.called
432 430
433 431
434 432 class TestInitializeGenerator(object):
435 433
436 434 def test_drains_first_element(self):
437 435 gen = self.factory(['__init__', 1, 2])
438 436 result = list(gen)
439 437 assert result == [1, 2]
440 438
441 439 @pytest.mark.parametrize('values', [
442 440 [],
443 441 [1, 2],
444 442 ])
445 443 def test_raises_value_error(self, values):
446 444 with pytest.raises(ValueError):
447 445 self.factory(values)
448 446
449 447 @simplevcs.initialize_generator
450 448 def factory(self, iterable):
451 449 for elem in iterable:
452 450 yield elem
453 451
454 452
455 453 class TestPrepareHooksDaemon(object):
456 454 def test_calls_imported_prepare_callback_daemon(self, app_settings, request_stub):
457 455 expected_extras = {'extra1': 'value1'}
458 456 daemon = DummyHooksCallbackDaemon()
459 457
460 458 controller = StubVCSController(app_settings, request_stub.registry)
461 459 prepare_patcher = mock.patch.object(
462 460 simplevcs, 'prepare_callback_daemon',
463 461 return_value=(daemon, expected_extras))
464 462 with prepare_patcher as prepare_mock:
465 463 callback_daemon, extras = controller._prepare_callback_daemon(
466 464 expected_extras.copy(), {}, 'push')
467 465 prepare_mock.assert_called_once_with(
468 466 expected_extras,
469 467 protocol=app_settings['vcs.hooks.protocol'],
470 468 host=app_settings['vcs.hooks.host'],
471 469 txn_id=None,
472 470 use_direct_calls=app_settings['vcs.hooks.direct_calls'])
473 471
474 472 assert callback_daemon == daemon
475 473 assert extras == extras
General Comments 0
You need to be logged in to leave comments. Login now