##// END OF EJS Templates
pr: Unify clone url generation of shadow repository.
Martin Bornhold -
r897:b8b5fdac default
parent child Browse files
Show More
@@ -1,513 +1,510 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 SimpleVCS middleware for handling protocol request (push/clone etc.)
23 23 It's implemented with basic auth function
24 24 """
25 25
26 26 import os
27 27 import logging
28 28 import importlib
29 29 import re
30 30 from functools import wraps
31 31
32 32 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
33 33 from webob.exc import (
34 34 HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError)
35 35
36 36 import rhodecode
37 37 from rhodecode.authentication.base import authenticate, VCS_TYPE
38 38 from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware
39 39 from rhodecode.lib.base import BasicAuth, get_ip_addr, vcs_operation_context
40 40 from rhodecode.lib.exceptions import (
41 41 HTTPLockedRC, HTTPRequirementError, UserCreationError,
42 42 NotAllowedToCreateUserError)
43 43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
44 44 from rhodecode.lib.middleware import appenlight
45 45 from rhodecode.lib.middleware.utils import scm_app
46 46 from rhodecode.lib.utils import (
47 47 is_valid_repo, get_rhodecode_realm, get_rhodecode_base_path)
48 48 from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool
49 49 from rhodecode.lib.vcs.conf import settings as vcs_settings
50 50 from rhodecode.lib.vcs.backends import base
51 51 from rhodecode.model import meta
52 52 from rhodecode.model.db import User, Repository, PullRequest
53 53 from rhodecode.model.scm import ScmModel
54 54 from rhodecode.model.pull_request import PullRequestModel
55 55
56 56
57 57 log = logging.getLogger(__name__)
58 58
59 59
60 60 def initialize_generator(factory):
61 61 """
62 62 Initializes the returned generator by draining its first element.
63 63
64 64 This can be used to give a generator an initializer, which is the code
65 65 up to the first yield statement. This decorator enforces that the first
66 66 produced element has the value ``"__init__"`` to make its special
67 67 purpose very explicit in the using code.
68 68 """
69 69
70 70 @wraps(factory)
71 71 def wrapper(*args, **kwargs):
72 72 gen = factory(*args, **kwargs)
73 73 try:
74 74 init = gen.next()
75 75 except StopIteration:
76 76 raise ValueError('Generator must yield at least one element.')
77 77 if init != "__init__":
78 78 raise ValueError('First yielded element must be "__init__".')
79 79 return gen
80 80 return wrapper
81 81
82 82
83 83 class SimpleVCS(object):
84 84 """Common functionality for SCM HTTP handlers."""
85 85
86 86 SCM = 'unknown'
87 87
88 88 acl_repo_name = None
89 89 url_repo_name = None
90 90 vcs_repo_name = None
91 91
92 92 def __init__(self, application, config, registry):
93 93 self.registry = registry
94 94 self.application = application
95 95 self.config = config
96 96 # re-populated by specialized middleware
97 97 self.repo_vcs_config = base.Config()
98 98
99 99 # base path of repo locations
100 100 self.basepath = get_rhodecode_base_path()
101 101 # authenticate this VCS request using authfunc
102 102 auth_ret_code_detection = \
103 103 str2bool(self.config.get('auth_ret_code_detection', False))
104 104 self.authenticate = BasicAuth(
105 105 '', authenticate, registry, config.get('auth_ret_code'),
106 106 auth_ret_code_detection)
107 107 self.ip_addr = '0.0.0.0'
108 108
109 109 def set_repo_names(self, environ):
110 110 """
111 111 This will populate the attributes acl_repo_name, url_repo_name,
112 112 vcs_repo_name and pr_id on the current instance.
113 113 """
114 # TODO: martinb: Unify generation/suffix of clone url. It is currently
115 # used here in the regex, in PullRequest in get_api_data() and
116 # indirectly in routing configuration.
117 114 # TODO: martinb: Move to class or module scope.
118 115 # TODO: martinb: Check if we have to use re.UNICODE.
119 116 # TODO: martinb: Check which chars are allowed for repo/group names.
120 117 # These chars are excluded: '`?=[]\;\'"<>,/~!@#$%^&*()+{}|: '
121 118 # Code from: rhodecode/lib/utils.py:repo_name_slug()
122 119 pr_regex = re.compile(
123 120 '(?P<base_name>(?:[\w-]+)(?:/[\w-]+)*)/' # repo groups
124 121 '(?P<repo_name>[\w-]+)' # target repo name
125 122 '/pull-request/(?P<pr_id>\d+)/repository') # pr suffix
126 123
127 124 # Get url repo name from environment.
128 125 self.url_repo_name = self._get_repository_name(environ)
129 126
130 127 # Check if this is a request to a shadow repository. In case of a
131 128 # shadow repo set vcs_repo_name to the file system path pointing to the
132 129 # shadow repo. And set acl_repo_name to the pull request target repo
133 130 # because we use the target repo for permission checks. Otherwise all
134 131 # names are equal.
135 132 match = pr_regex.match(self.url_repo_name)
136 133 if match:
137 134 # Get pull request instance.
138 135 match_dict = match.groupdict()
139 136 pr_id = match_dict['pr_id']
140 137 pull_request = PullRequest.get(pr_id)
141 138
142 139 # Get file system path to shadow repository.
143 140 workspace_id = PullRequestModel()._workspace_id(pull_request)
144 141 target_vcs = pull_request.target_repo.scm_instance()
145 142 vcs_repo_name = target_vcs._get_shadow_repository_path(
146 143 workspace_id)
147 144
148 145 # Store names for later usage.
149 146 self.pr_id = pr_id
150 147 self.vcs_repo_name = vcs_repo_name
151 148 self.acl_repo_name = pull_request.target_repo.repo_name
152 149 else:
153 150 # All names are equal for normal (non shadow) repositories.
154 151 self.acl_repo_name = self.url_repo_name
155 152 self.vcs_repo_name = self.url_repo_name
156 153 self.pr_id = None
157 154
158 155 @property
159 156 def repo_name(self):
160 157 # TODO: johbo: Remove, switch to correct repo name attribute
161 158 return self.acl_repo_name
162 159
163 160 @property
164 161 def scm_app(self):
165 162 custom_implementation = self.config.get('vcs.scm_app_implementation')
166 163 if custom_implementation and custom_implementation != 'pyro4':
167 164 log.info(
168 165 "Using custom implementation of scm_app: %s",
169 166 custom_implementation)
170 167 scm_app_impl = importlib.import_module(custom_implementation)
171 168 else:
172 169 scm_app_impl = scm_app
173 170 return scm_app_impl
174 171
175 172 def _get_by_id(self, repo_name):
176 173 """
177 174 Gets a special pattern _<ID> from clone url and tries to replace it
178 175 with a repository_name for support of _<ID> non changeable urls
179 176 """
180 177
181 178 data = repo_name.split('/')
182 179 if len(data) >= 2:
183 180 from rhodecode.model.repo import RepoModel
184 181 by_id_match = RepoModel().get_repo_by_id(repo_name)
185 182 if by_id_match:
186 183 data[1] = by_id_match.repo_name
187 184
188 185 return safe_str('/'.join(data))
189 186
190 187 def _invalidate_cache(self, repo_name):
191 188 """
192 189 Set's cache for this repository for invalidation on next access
193 190
194 191 :param repo_name: full repo name, also a cache key
195 192 """
196 193 ScmModel().mark_for_invalidation(repo_name)
197 194
198 195 def is_valid_and_existing_repo(self, repo_name, base_path, scm_type):
199 196 db_repo = Repository.get_by_repo_name(repo_name)
200 197 if not db_repo:
201 198 log.debug('Repository `%s` not found inside the database.',
202 199 repo_name)
203 200 return False
204 201
205 202 if db_repo.repo_type != scm_type:
206 203 log.warning(
207 204 'Repository `%s` have incorrect scm_type, expected %s got %s',
208 205 repo_name, db_repo.repo_type, scm_type)
209 206 return False
210 207
211 208 return is_valid_repo(repo_name, base_path, expect_scm=scm_type)
212 209
213 210 def valid_and_active_user(self, user):
214 211 """
215 212 Checks if that user is not empty, and if it's actually object it checks
216 213 if he's active.
217 214
218 215 :param user: user object or None
219 216 :return: boolean
220 217 """
221 218 if user is None:
222 219 return False
223 220
224 221 elif user.active:
225 222 return True
226 223
227 224 return False
228 225
229 226 def _check_permission(self, action, user, repo_name, ip_addr=None):
230 227 """
231 228 Checks permissions using action (push/pull) user and repository
232 229 name
233 230
234 231 :param action: push or pull action
235 232 :param user: user instance
236 233 :param repo_name: repository name
237 234 """
238 235 # check IP
239 236 inherit = user.inherit_default_permissions
240 237 ip_allowed = AuthUser.check_ip_allowed(user.user_id, ip_addr,
241 238 inherit_from_default=inherit)
242 239 if ip_allowed:
243 240 log.info('Access for IP:%s allowed', ip_addr)
244 241 else:
245 242 return False
246 243
247 244 if action == 'push':
248 245 if not HasPermissionAnyMiddleware('repository.write',
249 246 'repository.admin')(user,
250 247 repo_name):
251 248 return False
252 249
253 250 else:
254 251 # any other action need at least read permission
255 252 if not HasPermissionAnyMiddleware('repository.read',
256 253 'repository.write',
257 254 'repository.admin')(user,
258 255 repo_name):
259 256 return False
260 257
261 258 return True
262 259
263 260 def _check_ssl(self, environ, start_response):
264 261 """
265 262 Checks the SSL check flag and returns False if SSL is not present
266 263 and required True otherwise
267 264 """
268 265 org_proto = environ['wsgi._org_proto']
269 266 # check if we have SSL required ! if not it's a bad request !
270 267 require_ssl = str2bool(self.repo_vcs_config.get('web', 'push_ssl'))
271 268 if require_ssl and org_proto == 'http':
272 269 log.debug('proto is %s and SSL is required BAD REQUEST !',
273 270 org_proto)
274 271 return False
275 272 return True
276 273
277 274 def __call__(self, environ, start_response):
278 275 try:
279 276 return self._handle_request(environ, start_response)
280 277 except Exception:
281 278 log.exception("Exception while handling request")
282 279 appenlight.track_exception(environ)
283 280 return HTTPInternalServerError()(environ, start_response)
284 281 finally:
285 282 meta.Session.remove()
286 283
287 284 def _handle_request(self, environ, start_response):
288 285
289 286 if not self._check_ssl(environ, start_response):
290 287 reason = ('SSL required, while RhodeCode was unable '
291 288 'to detect this as SSL request')
292 289 log.debug('User not allowed to proceed, %s', reason)
293 290 return HTTPNotAcceptable(reason)(environ, start_response)
294 291
295 292 if not self.repo_name:
296 293 log.warning('Repository name is empty: %s', self.repo_name)
297 294 # failed to get repo name, we fail now
298 295 return HTTPNotFound()(environ, start_response)
299 296 log.debug('Extracted repo name is %s', self.repo_name)
300 297
301 298 ip_addr = get_ip_addr(environ)
302 299 username = None
303 300
304 301 # skip passing error to error controller
305 302 environ['pylons.status_code_redirect'] = True
306 303
307 304 # ======================================================================
308 305 # GET ACTION PULL or PUSH
309 306 # ======================================================================
310 307 action = self._get_action(environ)
311 308
312 309 # ======================================================================
313 310 # Check if this is a request to a shadow repository of a pull request.
314 311 # In this case only pull action is allowed.
315 312 # ======================================================================
316 313 if self.pr_id is not None and action != 'pull':
317 314 reason = 'Only pull action is allowed for shadow repositories.'
318 315 log.debug('User not allowed to proceed, %s', reason)
319 316 return HTTPNotAcceptable(reason)(environ, start_response)
320 317
321 318 # ======================================================================
322 319 # CHECK ANONYMOUS PERMISSION
323 320 # ======================================================================
324 321 if action in ['pull', 'push']:
325 322 anonymous_user = User.get_default_user()
326 323 username = anonymous_user.username
327 324 if anonymous_user.active:
328 325 # ONLY check permissions if the user is activated
329 326 anonymous_perm = self._check_permission(
330 327 action, anonymous_user, self.repo_name, ip_addr)
331 328 else:
332 329 anonymous_perm = False
333 330
334 331 if not anonymous_user.active or not anonymous_perm:
335 332 if not anonymous_user.active:
336 333 log.debug('Anonymous access is disabled, running '
337 334 'authentication')
338 335
339 336 if not anonymous_perm:
340 337 log.debug('Not enough credentials to access this '
341 338 'repository as anonymous user')
342 339
343 340 username = None
344 341 # ==============================================================
345 342 # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
346 343 # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
347 344 # ==============================================================
348 345
349 346 # try to auth based on environ, container auth methods
350 347 log.debug('Running PRE-AUTH for container based authentication')
351 348 pre_auth = authenticate(
352 349 '', '', environ, VCS_TYPE, registry=self.registry)
353 350 if pre_auth and pre_auth.get('username'):
354 351 username = pre_auth['username']
355 352 log.debug('PRE-AUTH got %s as username', username)
356 353
357 354 # If not authenticated by the container, running basic auth
358 355 if not username:
359 356 self.authenticate.realm = get_rhodecode_realm()
360 357
361 358 try:
362 359 result = self.authenticate(environ)
363 360 except (UserCreationError, NotAllowedToCreateUserError) as e:
364 361 log.error(e)
365 362 reason = safe_str(e)
366 363 return HTTPNotAcceptable(reason)(environ, start_response)
367 364
368 365 if isinstance(result, str):
369 366 AUTH_TYPE.update(environ, 'basic')
370 367 REMOTE_USER.update(environ, result)
371 368 username = result
372 369 else:
373 370 return result.wsgi_application(environ, start_response)
374 371
375 372 # ==============================================================
376 373 # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
377 374 # ==============================================================
378 375 user = User.get_by_username(username)
379 376 if not self.valid_and_active_user(user):
380 377 return HTTPForbidden()(environ, start_response)
381 378 username = user.username
382 379 user.update_lastactivity()
383 380 meta.Session().commit()
384 381
385 382 # check user attributes for password change flag
386 383 user_obj = user
387 384 if user_obj and user_obj.username != User.DEFAULT_USER and \
388 385 user_obj.user_data.get('force_password_change'):
389 386 reason = 'password change required'
390 387 log.debug('User not allowed to authenticate, %s', reason)
391 388 return HTTPNotAcceptable(reason)(environ, start_response)
392 389
393 390 # check permissions for this repository
394 391 perm = self._check_permission(
395 392 action, user, self.repo_name, ip_addr)
396 393 if not perm:
397 394 return HTTPForbidden()(environ, start_response)
398 395
399 396 # extras are injected into UI object and later available
400 397 # in hooks executed by rhodecode
401 398 check_locking = _should_check_locking(environ.get('QUERY_STRING'))
402 399 extras = vcs_operation_context(
403 400 environ, repo_name=self.repo_name, username=username,
404 401 action=action, scm=self.SCM,
405 402 check_locking=check_locking)
406 403
407 404 # ======================================================================
408 405 # REQUEST HANDLING
409 406 # ======================================================================
410 407 str_repo_name = safe_str(self.repo_name)
411 408 repo_path = os.path.join(
412 409 safe_str(self.basepath), safe_str(self.vcs_repo_name))
413 410 log.debug('Repository path is %s', repo_path)
414 411
415 412 fix_PATH()
416 413
417 414 log.info(
418 415 '%s action on %s repo "%s" by "%s" from %s',
419 416 action, self.SCM, str_repo_name, safe_str(username), ip_addr)
420 417
421 418 return self._generate_vcs_response(
422 419 environ, start_response, repo_path, self.url_repo_name, extras, action)
423 420
424 421 @initialize_generator
425 422 def _generate_vcs_response(
426 423 self, environ, start_response, repo_path, repo_name, extras,
427 424 action):
428 425 """
429 426 Returns a generator for the response content.
430 427
431 428 This method is implemented as a generator, so that it can trigger
432 429 the cache validation after all content sent back to the client. It
433 430 also handles the locking exceptions which will be triggered when
434 431 the first chunk is produced by the underlying WSGI application.
435 432 """
436 433 callback_daemon, extras = self._prepare_callback_daemon(extras)
437 434 config = self._create_config(extras, self.acl_repo_name)
438 435 log.debug('HOOKS extras is %s', extras)
439 436 app = self._create_wsgi_app(repo_path, repo_name, config)
440 437
441 438 try:
442 439 with callback_daemon:
443 440 try:
444 441 response = app(environ, start_response)
445 442 finally:
446 443 # This statement works together with the decorator
447 444 # "initialize_generator" above. The decorator ensures that
448 445 # we hit the first yield statement before the generator is
449 446 # returned back to the WSGI server. This is needed to
450 447 # ensure that the call to "app" above triggers the
451 448 # needed callback to "start_response" before the
452 449 # generator is actually used.
453 450 yield "__init__"
454 451
455 452 for chunk in response:
456 453 yield chunk
457 454 except Exception as exc:
458 455 # TODO: johbo: Improve "translating" back the exception.
459 456 if getattr(exc, '_vcs_kind', None) == 'repo_locked':
460 457 exc = HTTPLockedRC(*exc.args)
461 458 _code = rhodecode.CONFIG.get('lock_ret_code')
462 459 log.debug('Repository LOCKED ret code %s!', (_code,))
463 460 elif getattr(exc, '_vcs_kind', None) == 'requirement':
464 461 log.debug(
465 462 'Repository requires features unknown to this Mercurial')
466 463 exc = HTTPRequirementError(*exc.args)
467 464 else:
468 465 raise
469 466
470 467 for chunk in exc(environ, start_response):
471 468 yield chunk
472 469 finally:
473 470 # invalidate cache on push
474 471 try:
475 472 if action == 'push':
476 473 self._invalidate_cache(repo_name)
477 474 finally:
478 475 meta.Session.remove()
479 476
480 477 def _get_repository_name(self, environ):
481 478 """Get repository name out of the environmnent
482 479
483 480 :param environ: WSGI environment
484 481 """
485 482 raise NotImplementedError()
486 483
487 484 def _get_action(self, environ):
488 485 """Map request commands into a pull or push command.
489 486
490 487 :param environ: WSGI environment
491 488 """
492 489 raise NotImplementedError()
493 490
494 491 def _create_wsgi_app(self, repo_path, repo_name, config):
495 492 """Return the WSGI app that will finally handle the request."""
496 493 raise NotImplementedError()
497 494
498 495 def _create_config(self, extras, repo_name):
499 496 """Create a Pyro safe config representation."""
500 497 raise NotImplementedError()
501 498
502 499 def _prepare_callback_daemon(self, extras):
503 500 return prepare_callback_daemon(
504 501 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
505 502 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
506 503
507 504
508 505 def _should_check_locking(query_string):
509 506 # this is kind of hacky, but due to how mercurial handles client-server
510 507 # server see all operation on commit; bookmarks, phases and
511 508 # obsolescence marker in different transaction, we don't want to check
512 509 # locking on those
513 510 return query_string not in ['cmd=listkeys']
@@ -1,3658 +1,3658 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import hashlib
29 29 import logging
30 30 import datetime
31 31 import warnings
32 32 import ipaddress
33 33 import functools
34 34 import traceback
35 35 import collections
36 36
37 37
38 38 from sqlalchemy import *
39 39 from sqlalchemy.ext.declarative import declared_attr
40 40 from sqlalchemy.ext.hybrid import hybrid_property
41 41 from sqlalchemy.orm import (
42 42 relationship, joinedload, class_mapper, validates, aliased)
43 43 from sqlalchemy.sql.expression import true
44 44 from beaker.cache import cache_region
45 45 from webob.exc import HTTPNotFound
46 46 from zope.cachedescriptors.property import Lazy as LazyProperty
47 47
48 48 from pylons import url
49 49 from pylons.i18n.translation import lazy_ugettext as _
50 50
51 51 from rhodecode.lib.vcs import get_vcs_instance
52 52 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
53 53 from rhodecode.lib.utils2 import (
54 54 str2bool, safe_str, get_commit_safe, safe_unicode, md5_safe,
55 55 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
56 56 glob2re)
57 57 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType
58 58 from rhodecode.lib.ext_json import json
59 59 from rhodecode.lib.caching_query import FromCache
60 60 from rhodecode.lib.encrypt import AESCipher
61 61
62 62 from rhodecode.model.meta import Base, Session
63 63
64 64 URL_SEP = '/'
65 65 log = logging.getLogger(__name__)
66 66
67 67 # =============================================================================
68 68 # BASE CLASSES
69 69 # =============================================================================
70 70
71 71 # this is propagated from .ini file rhodecode.encrypted_values.secret or
72 72 # beaker.session.secret if first is not set.
73 73 # and initialized at environment.py
74 74 ENCRYPTION_KEY = None
75 75
76 76 # used to sort permissions by types, '#' used here is not allowed to be in
77 77 # usernames, and it's very early in sorted string.printable table.
78 78 PERMISSION_TYPE_SORT = {
79 79 'admin': '####',
80 80 'write': '###',
81 81 'read': '##',
82 82 'none': '#',
83 83 }
84 84
85 85
86 86 def display_sort(obj):
87 87 """
88 88 Sort function used to sort permissions in .permissions() function of
89 89 Repository, RepoGroup, UserGroup. Also it put the default user in front
90 90 of all other resources
91 91 """
92 92
93 93 if obj.username == User.DEFAULT_USER:
94 94 return '#####'
95 95 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
96 96 return prefix + obj.username
97 97
98 98
99 99 def _hash_key(k):
100 100 return md5_safe(k)
101 101
102 102
103 103 class EncryptedTextValue(TypeDecorator):
104 104 """
105 105 Special column for encrypted long text data, use like::
106 106
107 107 value = Column("encrypted_value", EncryptedValue(), nullable=False)
108 108
109 109 This column is intelligent so if value is in unencrypted form it return
110 110 unencrypted form, but on save it always encrypts
111 111 """
112 112 impl = Text
113 113
114 114 def process_bind_param(self, value, dialect):
115 115 if not value:
116 116 return value
117 117 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
118 118 # protect against double encrypting if someone manually starts
119 119 # doing
120 120 raise ValueError('value needs to be in unencrypted format, ie. '
121 121 'not starting with enc$aes')
122 122 return 'enc$aes_hmac$%s' % AESCipher(
123 123 ENCRYPTION_KEY, hmac=True).encrypt(value)
124 124
125 125 def process_result_value(self, value, dialect):
126 126 import rhodecode
127 127
128 128 if not value:
129 129 return value
130 130
131 131 parts = value.split('$', 3)
132 132 if not len(parts) == 3:
133 133 # probably not encrypted values
134 134 return value
135 135 else:
136 136 if parts[0] != 'enc':
137 137 # parts ok but without our header ?
138 138 return value
139 139 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
140 140 'rhodecode.encrypted_values.strict') or True)
141 141 # at that stage we know it's our encryption
142 142 if parts[1] == 'aes':
143 143 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
144 144 elif parts[1] == 'aes_hmac':
145 145 decrypted_data = AESCipher(
146 146 ENCRYPTION_KEY, hmac=True,
147 147 strict_verification=enc_strict_mode).decrypt(parts[2])
148 148 else:
149 149 raise ValueError(
150 150 'Encryption type part is wrong, must be `aes` '
151 151 'or `aes_hmac`, got `%s` instead' % (parts[1]))
152 152 return decrypted_data
153 153
154 154
155 155 class BaseModel(object):
156 156 """
157 157 Base Model for all classes
158 158 """
159 159
160 160 @classmethod
161 161 def _get_keys(cls):
162 162 """return column names for this model """
163 163 return class_mapper(cls).c.keys()
164 164
165 165 def get_dict(self):
166 166 """
167 167 return dict with keys and values corresponding
168 168 to this model data """
169 169
170 170 d = {}
171 171 for k in self._get_keys():
172 172 d[k] = getattr(self, k)
173 173
174 174 # also use __json__() if present to get additional fields
175 175 _json_attr = getattr(self, '__json__', None)
176 176 if _json_attr:
177 177 # update with attributes from __json__
178 178 if callable(_json_attr):
179 179 _json_attr = _json_attr()
180 180 for k, val in _json_attr.iteritems():
181 181 d[k] = val
182 182 return d
183 183
184 184 def get_appstruct(self):
185 185 """return list with keys and values tuples corresponding
186 186 to this model data """
187 187
188 188 l = []
189 189 for k in self._get_keys():
190 190 l.append((k, getattr(self, k),))
191 191 return l
192 192
193 193 def populate_obj(self, populate_dict):
194 194 """populate model with data from given populate_dict"""
195 195
196 196 for k in self._get_keys():
197 197 if k in populate_dict:
198 198 setattr(self, k, populate_dict[k])
199 199
200 200 @classmethod
201 201 def query(cls):
202 202 return Session().query(cls)
203 203
204 204 @classmethod
205 205 def get(cls, id_):
206 206 if id_:
207 207 return cls.query().get(id_)
208 208
209 209 @classmethod
210 210 def get_or_404(cls, id_):
211 211 try:
212 212 id_ = int(id_)
213 213 except (TypeError, ValueError):
214 214 raise HTTPNotFound
215 215
216 216 res = cls.query().get(id_)
217 217 if not res:
218 218 raise HTTPNotFound
219 219 return res
220 220
221 221 @classmethod
222 222 def getAll(cls):
223 223 # deprecated and left for backward compatibility
224 224 return cls.get_all()
225 225
226 226 @classmethod
227 227 def get_all(cls):
228 228 return cls.query().all()
229 229
230 230 @classmethod
231 231 def delete(cls, id_):
232 232 obj = cls.query().get(id_)
233 233 Session().delete(obj)
234 234
235 235 @classmethod
236 236 def identity_cache(cls, session, attr_name, value):
237 237 exist_in_session = []
238 238 for (item_cls, pkey), instance in session.identity_map.items():
239 239 if cls == item_cls and getattr(instance, attr_name) == value:
240 240 exist_in_session.append(instance)
241 241 if exist_in_session:
242 242 if len(exist_in_session) == 1:
243 243 return exist_in_session[0]
244 244 log.exception(
245 245 'multiple objects with attr %s and '
246 246 'value %s found with same name: %r',
247 247 attr_name, value, exist_in_session)
248 248
249 249 def __repr__(self):
250 250 if hasattr(self, '__unicode__'):
251 251 # python repr needs to return str
252 252 try:
253 253 return safe_str(self.__unicode__())
254 254 except UnicodeDecodeError:
255 255 pass
256 256 return '<DB:%s>' % (self.__class__.__name__)
257 257
258 258
259 259 class RhodeCodeSetting(Base, BaseModel):
260 260 __tablename__ = 'rhodecode_settings'
261 261 __table_args__ = (
262 262 UniqueConstraint('app_settings_name'),
263 263 {'extend_existing': True, 'mysql_engine': 'InnoDB',
264 264 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
265 265 )
266 266
267 267 SETTINGS_TYPES = {
268 268 'str': safe_str,
269 269 'int': safe_int,
270 270 'unicode': safe_unicode,
271 271 'bool': str2bool,
272 272 'list': functools.partial(aslist, sep=',')
273 273 }
274 274 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
275 275 GLOBAL_CONF_KEY = 'app_settings'
276 276
277 277 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
278 278 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
279 279 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
280 280 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
281 281
282 282 def __init__(self, key='', val='', type='unicode'):
283 283 self.app_settings_name = key
284 284 self.app_settings_type = type
285 285 self.app_settings_value = val
286 286
287 287 @validates('_app_settings_value')
288 288 def validate_settings_value(self, key, val):
289 289 assert type(val) == unicode
290 290 return val
291 291
292 292 @hybrid_property
293 293 def app_settings_value(self):
294 294 v = self._app_settings_value
295 295 _type = self.app_settings_type
296 296 if _type:
297 297 _type = self.app_settings_type.split('.')[0]
298 298 # decode the encrypted value
299 299 if 'encrypted' in self.app_settings_type:
300 300 cipher = EncryptedTextValue()
301 301 v = safe_unicode(cipher.process_result_value(v, None))
302 302
303 303 converter = self.SETTINGS_TYPES.get(_type) or \
304 304 self.SETTINGS_TYPES['unicode']
305 305 return converter(v)
306 306
307 307 @app_settings_value.setter
308 308 def app_settings_value(self, val):
309 309 """
310 310 Setter that will always make sure we use unicode in app_settings_value
311 311
312 312 :param val:
313 313 """
314 314 val = safe_unicode(val)
315 315 # encode the encrypted value
316 316 if 'encrypted' in self.app_settings_type:
317 317 cipher = EncryptedTextValue()
318 318 val = safe_unicode(cipher.process_bind_param(val, None))
319 319 self._app_settings_value = val
320 320
321 321 @hybrid_property
322 322 def app_settings_type(self):
323 323 return self._app_settings_type
324 324
325 325 @app_settings_type.setter
326 326 def app_settings_type(self, val):
327 327 if val.split('.')[0] not in self.SETTINGS_TYPES:
328 328 raise Exception('type must be one of %s got %s'
329 329 % (self.SETTINGS_TYPES.keys(), val))
330 330 self._app_settings_type = val
331 331
332 332 def __unicode__(self):
333 333 return u"<%s('%s:%s[%s]')>" % (
334 334 self.__class__.__name__,
335 335 self.app_settings_name, self.app_settings_value,
336 336 self.app_settings_type
337 337 )
338 338
339 339
340 340 class RhodeCodeUi(Base, BaseModel):
341 341 __tablename__ = 'rhodecode_ui'
342 342 __table_args__ = (
343 343 UniqueConstraint('ui_key'),
344 344 {'extend_existing': True, 'mysql_engine': 'InnoDB',
345 345 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
346 346 )
347 347
348 348 HOOK_REPO_SIZE = 'changegroup.repo_size'
349 349 # HG
350 350 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
351 351 HOOK_PULL = 'outgoing.pull_logger'
352 352 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
353 353 HOOK_PUSH = 'changegroup.push_logger'
354 354
355 355 # TODO: johbo: Unify way how hooks are configured for git and hg,
356 356 # git part is currently hardcoded.
357 357
358 358 # SVN PATTERNS
359 359 SVN_BRANCH_ID = 'vcs_svn_branch'
360 360 SVN_TAG_ID = 'vcs_svn_tag'
361 361
362 362 ui_id = Column(
363 363 "ui_id", Integer(), nullable=False, unique=True, default=None,
364 364 primary_key=True)
365 365 ui_section = Column(
366 366 "ui_section", String(255), nullable=True, unique=None, default=None)
367 367 ui_key = Column(
368 368 "ui_key", String(255), nullable=True, unique=None, default=None)
369 369 ui_value = Column(
370 370 "ui_value", String(255), nullable=True, unique=None, default=None)
371 371 ui_active = Column(
372 372 "ui_active", Boolean(), nullable=True, unique=None, default=True)
373 373
374 374 def __repr__(self):
375 375 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
376 376 self.ui_key, self.ui_value)
377 377
378 378
379 379 class RepoRhodeCodeSetting(Base, BaseModel):
380 380 __tablename__ = 'repo_rhodecode_settings'
381 381 __table_args__ = (
382 382 UniqueConstraint(
383 383 'app_settings_name', 'repository_id',
384 384 name='uq_repo_rhodecode_setting_name_repo_id'),
385 385 {'extend_existing': True, 'mysql_engine': 'InnoDB',
386 386 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
387 387 )
388 388
389 389 repository_id = Column(
390 390 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
391 391 nullable=False)
392 392 app_settings_id = Column(
393 393 "app_settings_id", Integer(), nullable=False, unique=True,
394 394 default=None, primary_key=True)
395 395 app_settings_name = Column(
396 396 "app_settings_name", String(255), nullable=True, unique=None,
397 397 default=None)
398 398 _app_settings_value = Column(
399 399 "app_settings_value", String(4096), nullable=True, unique=None,
400 400 default=None)
401 401 _app_settings_type = Column(
402 402 "app_settings_type", String(255), nullable=True, unique=None,
403 403 default=None)
404 404
405 405 repository = relationship('Repository')
406 406
407 407 def __init__(self, repository_id, key='', val='', type='unicode'):
408 408 self.repository_id = repository_id
409 409 self.app_settings_name = key
410 410 self.app_settings_type = type
411 411 self.app_settings_value = val
412 412
413 413 @validates('_app_settings_value')
414 414 def validate_settings_value(self, key, val):
415 415 assert type(val) == unicode
416 416 return val
417 417
418 418 @hybrid_property
419 419 def app_settings_value(self):
420 420 v = self._app_settings_value
421 421 type_ = self.app_settings_type
422 422 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
423 423 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
424 424 return converter(v)
425 425
426 426 @app_settings_value.setter
427 427 def app_settings_value(self, val):
428 428 """
429 429 Setter that will always make sure we use unicode in app_settings_value
430 430
431 431 :param val:
432 432 """
433 433 self._app_settings_value = safe_unicode(val)
434 434
435 435 @hybrid_property
436 436 def app_settings_type(self):
437 437 return self._app_settings_type
438 438
439 439 @app_settings_type.setter
440 440 def app_settings_type(self, val):
441 441 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
442 442 if val not in SETTINGS_TYPES:
443 443 raise Exception('type must be one of %s got %s'
444 444 % (SETTINGS_TYPES.keys(), val))
445 445 self._app_settings_type = val
446 446
447 447 def __unicode__(self):
448 448 return u"<%s('%s:%s:%s[%s]')>" % (
449 449 self.__class__.__name__, self.repository.repo_name,
450 450 self.app_settings_name, self.app_settings_value,
451 451 self.app_settings_type
452 452 )
453 453
454 454
455 455 class RepoRhodeCodeUi(Base, BaseModel):
456 456 __tablename__ = 'repo_rhodecode_ui'
457 457 __table_args__ = (
458 458 UniqueConstraint(
459 459 'repository_id', 'ui_section', 'ui_key',
460 460 name='uq_repo_rhodecode_ui_repository_id_section_key'),
461 461 {'extend_existing': True, 'mysql_engine': 'InnoDB',
462 462 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
463 463 )
464 464
465 465 repository_id = Column(
466 466 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
467 467 nullable=False)
468 468 ui_id = Column(
469 469 "ui_id", Integer(), nullable=False, unique=True, default=None,
470 470 primary_key=True)
471 471 ui_section = Column(
472 472 "ui_section", String(255), nullable=True, unique=None, default=None)
473 473 ui_key = Column(
474 474 "ui_key", String(255), nullable=True, unique=None, default=None)
475 475 ui_value = Column(
476 476 "ui_value", String(255), nullable=True, unique=None, default=None)
477 477 ui_active = Column(
478 478 "ui_active", Boolean(), nullable=True, unique=None, default=True)
479 479
480 480 repository = relationship('Repository')
481 481
482 482 def __repr__(self):
483 483 return '<%s[%s:%s]%s=>%s]>' % (
484 484 self.__class__.__name__, self.repository.repo_name,
485 485 self.ui_section, self.ui_key, self.ui_value)
486 486
487 487
488 488 class User(Base, BaseModel):
489 489 __tablename__ = 'users'
490 490 __table_args__ = (
491 491 UniqueConstraint('username'), UniqueConstraint('email'),
492 492 Index('u_username_idx', 'username'),
493 493 Index('u_email_idx', 'email'),
494 494 {'extend_existing': True, 'mysql_engine': 'InnoDB',
495 495 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
496 496 )
497 497 DEFAULT_USER = 'default'
498 498 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
499 499 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
500 500
501 501 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
502 502 username = Column("username", String(255), nullable=True, unique=None, default=None)
503 503 password = Column("password", String(255), nullable=True, unique=None, default=None)
504 504 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
505 505 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
506 506 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
507 507 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
508 508 _email = Column("email", String(255), nullable=True, unique=None, default=None)
509 509 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
510 510 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
511 511 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
512 512 api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
513 513 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
514 514 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
515 515 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
516 516
517 517 user_log = relationship('UserLog')
518 518 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
519 519
520 520 repositories = relationship('Repository')
521 521 repository_groups = relationship('RepoGroup')
522 522 user_groups = relationship('UserGroup')
523 523
524 524 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
525 525 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
526 526
527 527 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
528 528 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
529 529 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
530 530
531 531 group_member = relationship('UserGroupMember', cascade='all')
532 532
533 533 notifications = relationship('UserNotification', cascade='all')
534 534 # notifications assigned to this user
535 535 user_created_notifications = relationship('Notification', cascade='all')
536 536 # comments created by this user
537 537 user_comments = relationship('ChangesetComment', cascade='all')
538 538 # user profile extra info
539 539 user_emails = relationship('UserEmailMap', cascade='all')
540 540 user_ip_map = relationship('UserIpMap', cascade='all')
541 541 user_auth_tokens = relationship('UserApiKeys', cascade='all')
542 542 # gists
543 543 user_gists = relationship('Gist', cascade='all')
544 544 # user pull requests
545 545 user_pull_requests = relationship('PullRequest', cascade='all')
546 546 # external identities
547 547 extenal_identities = relationship(
548 548 'ExternalIdentity',
549 549 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
550 550 cascade='all')
551 551
552 552 def __unicode__(self):
553 553 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
554 554 self.user_id, self.username)
555 555
556 556 @hybrid_property
557 557 def email(self):
558 558 return self._email
559 559
560 560 @email.setter
561 561 def email(self, val):
562 562 self._email = val.lower() if val else None
563 563
564 564 @property
565 565 def firstname(self):
566 566 # alias for future
567 567 return self.name
568 568
569 569 @property
570 570 def emails(self):
571 571 other = UserEmailMap.query().filter(UserEmailMap.user==self).all()
572 572 return [self.email] + [x.email for x in other]
573 573
574 574 @property
575 575 def auth_tokens(self):
576 576 return [self.api_key] + [x.api_key for x in self.extra_auth_tokens]
577 577
578 578 @property
579 579 def extra_auth_tokens(self):
580 580 return UserApiKeys.query().filter(UserApiKeys.user == self).all()
581 581
582 582 @property
583 583 def feed_token(self):
584 584 feed_tokens = UserApiKeys.query()\
585 585 .filter(UserApiKeys.user == self)\
586 586 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)\
587 587 .all()
588 588 if feed_tokens:
589 589 return feed_tokens[0].api_key
590 590 else:
591 591 # use the main token so we don't end up with nothing...
592 592 return self.api_key
593 593
594 594 @classmethod
595 595 def extra_valid_auth_tokens(cls, user, role=None):
596 596 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
597 597 .filter(or_(UserApiKeys.expires == -1,
598 598 UserApiKeys.expires >= time.time()))
599 599 if role:
600 600 tokens = tokens.filter(or_(UserApiKeys.role == role,
601 601 UserApiKeys.role == UserApiKeys.ROLE_ALL))
602 602 return tokens.all()
603 603
604 604 @property
605 605 def ip_addresses(self):
606 606 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
607 607 return [x.ip_addr for x in ret]
608 608
609 609 @property
610 610 def username_and_name(self):
611 611 return '%s (%s %s)' % (self.username, self.firstname, self.lastname)
612 612
613 613 @property
614 614 def username_or_name_or_email(self):
615 615 full_name = self.full_name if self.full_name is not ' ' else None
616 616 return self.username or full_name or self.email
617 617
618 618 @property
619 619 def full_name(self):
620 620 return '%s %s' % (self.firstname, self.lastname)
621 621
622 622 @property
623 623 def full_name_or_username(self):
624 624 return ('%s %s' % (self.firstname, self.lastname)
625 625 if (self.firstname and self.lastname) else self.username)
626 626
627 627 @property
628 628 def full_contact(self):
629 629 return '%s %s <%s>' % (self.firstname, self.lastname, self.email)
630 630
631 631 @property
632 632 def short_contact(self):
633 633 return '%s %s' % (self.firstname, self.lastname)
634 634
635 635 @property
636 636 def is_admin(self):
637 637 return self.admin
638 638
639 639 @property
640 640 def AuthUser(self):
641 641 """
642 642 Returns instance of AuthUser for this user
643 643 """
644 644 from rhodecode.lib.auth import AuthUser
645 645 return AuthUser(user_id=self.user_id, api_key=self.api_key,
646 646 username=self.username)
647 647
648 648 @hybrid_property
649 649 def user_data(self):
650 650 if not self._user_data:
651 651 return {}
652 652
653 653 try:
654 654 return json.loads(self._user_data)
655 655 except TypeError:
656 656 return {}
657 657
658 658 @user_data.setter
659 659 def user_data(self, val):
660 660 if not isinstance(val, dict):
661 661 raise Exception('user_data must be dict, got %s' % type(val))
662 662 try:
663 663 self._user_data = json.dumps(val)
664 664 except Exception:
665 665 log.error(traceback.format_exc())
666 666
667 667 @classmethod
668 668 def get_by_username(cls, username, case_insensitive=False,
669 669 cache=False, identity_cache=False):
670 670 session = Session()
671 671
672 672 if case_insensitive:
673 673 q = cls.query().filter(
674 674 func.lower(cls.username) == func.lower(username))
675 675 else:
676 676 q = cls.query().filter(cls.username == username)
677 677
678 678 if cache:
679 679 if identity_cache:
680 680 val = cls.identity_cache(session, 'username', username)
681 681 if val:
682 682 return val
683 683 else:
684 684 q = q.options(
685 685 FromCache("sql_cache_short",
686 686 "get_user_by_name_%s" % _hash_key(username)))
687 687
688 688 return q.scalar()
689 689
690 690 @classmethod
691 691 def get_by_auth_token(cls, auth_token, cache=False, fallback=True):
692 692 q = cls.query().filter(cls.api_key == auth_token)
693 693
694 694 if cache:
695 695 q = q.options(FromCache("sql_cache_short",
696 696 "get_auth_token_%s" % auth_token))
697 697 res = q.scalar()
698 698
699 699 if fallback and not res:
700 700 #fallback to additional keys
701 701 _res = UserApiKeys.query()\
702 702 .filter(UserApiKeys.api_key == auth_token)\
703 703 .filter(or_(UserApiKeys.expires == -1,
704 704 UserApiKeys.expires >= time.time()))\
705 705 .first()
706 706 if _res:
707 707 res = _res.user
708 708 return res
709 709
710 710 @classmethod
711 711 def get_by_email(cls, email, case_insensitive=False, cache=False):
712 712
713 713 if case_insensitive:
714 714 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
715 715
716 716 else:
717 717 q = cls.query().filter(cls.email == email)
718 718
719 719 if cache:
720 720 q = q.options(FromCache("sql_cache_short",
721 721 "get_email_key_%s" % _hash_key(email)))
722 722
723 723 ret = q.scalar()
724 724 if ret is None:
725 725 q = UserEmailMap.query()
726 726 # try fetching in alternate email map
727 727 if case_insensitive:
728 728 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
729 729 else:
730 730 q = q.filter(UserEmailMap.email == email)
731 731 q = q.options(joinedload(UserEmailMap.user))
732 732 if cache:
733 733 q = q.options(FromCache("sql_cache_short",
734 734 "get_email_map_key_%s" % email))
735 735 ret = getattr(q.scalar(), 'user', None)
736 736
737 737 return ret
738 738
739 739 @classmethod
740 740 def get_from_cs_author(cls, author):
741 741 """
742 742 Tries to get User objects out of commit author string
743 743
744 744 :param author:
745 745 """
746 746 from rhodecode.lib.helpers import email, author_name
747 747 # Valid email in the attribute passed, see if they're in the system
748 748 _email = email(author)
749 749 if _email:
750 750 user = cls.get_by_email(_email, case_insensitive=True)
751 751 if user:
752 752 return user
753 753 # Maybe we can match by username?
754 754 _author = author_name(author)
755 755 user = cls.get_by_username(_author, case_insensitive=True)
756 756 if user:
757 757 return user
758 758
759 759 def update_userdata(self, **kwargs):
760 760 usr = self
761 761 old = usr.user_data
762 762 old.update(**kwargs)
763 763 usr.user_data = old
764 764 Session().add(usr)
765 765 log.debug('updated userdata with ', kwargs)
766 766
767 767 def update_lastlogin(self):
768 768 """Update user lastlogin"""
769 769 self.last_login = datetime.datetime.now()
770 770 Session().add(self)
771 771 log.debug('updated user %s lastlogin', self.username)
772 772
773 773 def update_lastactivity(self):
774 774 """Update user lastactivity"""
775 775 usr = self
776 776 old = usr.user_data
777 777 old.update({'last_activity': time.time()})
778 778 usr.user_data = old
779 779 Session().add(usr)
780 780 log.debug('updated user %s lastactivity', usr.username)
781 781
782 782 def update_password(self, new_password, change_api_key=False):
783 783 from rhodecode.lib.auth import get_crypt_password,generate_auth_token
784 784
785 785 self.password = get_crypt_password(new_password)
786 786 if change_api_key:
787 787 self.api_key = generate_auth_token(self.username)
788 788 Session().add(self)
789 789
790 790 @classmethod
791 791 def get_first_super_admin(cls):
792 792 user = User.query().filter(User.admin == true()).first()
793 793 if user is None:
794 794 raise Exception('FATAL: Missing administrative account!')
795 795 return user
796 796
797 797 @classmethod
798 798 def get_all_super_admins(cls):
799 799 """
800 800 Returns all admin accounts sorted by username
801 801 """
802 802 return User.query().filter(User.admin == true())\
803 803 .order_by(User.username.asc()).all()
804 804
805 805 @classmethod
806 806 def get_default_user(cls, cache=False):
807 807 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
808 808 if user is None:
809 809 raise Exception('FATAL: Missing default account!')
810 810 return user
811 811
812 812 def _get_default_perms(self, user, suffix=''):
813 813 from rhodecode.model.permission import PermissionModel
814 814 return PermissionModel().get_default_perms(user.user_perms, suffix)
815 815
816 816 def get_default_perms(self, suffix=''):
817 817 return self._get_default_perms(self, suffix)
818 818
819 819 def get_api_data(self, include_secrets=False, details='full'):
820 820 """
821 821 Common function for generating user related data for API
822 822
823 823 :param include_secrets: By default secrets in the API data will be replaced
824 824 by a placeholder value to prevent exposing this data by accident. In case
825 825 this data shall be exposed, set this flag to ``True``.
826 826
827 827 :param details: details can be 'basic|full' basic gives only a subset of
828 828 the available user information that includes user_id, name and emails.
829 829 """
830 830 user = self
831 831 user_data = self.user_data
832 832 data = {
833 833 'user_id': user.user_id,
834 834 'username': user.username,
835 835 'firstname': user.name,
836 836 'lastname': user.lastname,
837 837 'email': user.email,
838 838 'emails': user.emails,
839 839 }
840 840 if details == 'basic':
841 841 return data
842 842
843 843 api_key_length = 40
844 844 api_key_replacement = '*' * api_key_length
845 845
846 846 extras = {
847 847 'api_key': api_key_replacement,
848 848 'api_keys': [api_key_replacement],
849 849 'active': user.active,
850 850 'admin': user.admin,
851 851 'extern_type': user.extern_type,
852 852 'extern_name': user.extern_name,
853 853 'last_login': user.last_login,
854 854 'ip_addresses': user.ip_addresses,
855 855 'language': user_data.get('language')
856 856 }
857 857 data.update(extras)
858 858
859 859 if include_secrets:
860 860 data['api_key'] = user.api_key
861 861 data['api_keys'] = user.auth_tokens
862 862 return data
863 863
864 864 def __json__(self):
865 865 data = {
866 866 'full_name': self.full_name,
867 867 'full_name_or_username': self.full_name_or_username,
868 868 'short_contact': self.short_contact,
869 869 'full_contact': self.full_contact,
870 870 }
871 871 data.update(self.get_api_data())
872 872 return data
873 873
874 874
875 875 class UserApiKeys(Base, BaseModel):
876 876 __tablename__ = 'user_api_keys'
877 877 __table_args__ = (
878 878 Index('uak_api_key_idx', 'api_key'),
879 879 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
880 880 UniqueConstraint('api_key'),
881 881 {'extend_existing': True, 'mysql_engine': 'InnoDB',
882 882 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
883 883 )
884 884 __mapper_args__ = {}
885 885
886 886 # ApiKey role
887 887 ROLE_ALL = 'token_role_all'
888 888 ROLE_HTTP = 'token_role_http'
889 889 ROLE_VCS = 'token_role_vcs'
890 890 ROLE_API = 'token_role_api'
891 891 ROLE_FEED = 'token_role_feed'
892 892 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
893 893
894 894 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
895 895 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
896 896 api_key = Column("api_key", String(255), nullable=False, unique=True)
897 897 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
898 898 expires = Column('expires', Float(53), nullable=False)
899 899 role = Column('role', String(255), nullable=True)
900 900 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
901 901
902 902 user = relationship('User', lazy='joined')
903 903
904 904 @classmethod
905 905 def _get_role_name(cls, role):
906 906 return {
907 907 cls.ROLE_ALL: _('all'),
908 908 cls.ROLE_HTTP: _('http/web interface'),
909 909 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
910 910 cls.ROLE_API: _('api calls'),
911 911 cls.ROLE_FEED: _('feed access'),
912 912 }.get(role, role)
913 913
914 914 @property
915 915 def expired(self):
916 916 if self.expires == -1:
917 917 return False
918 918 return time.time() > self.expires
919 919
920 920 @property
921 921 def role_humanized(self):
922 922 return self._get_role_name(self.role)
923 923
924 924
925 925 class UserEmailMap(Base, BaseModel):
926 926 __tablename__ = 'user_email_map'
927 927 __table_args__ = (
928 928 Index('uem_email_idx', 'email'),
929 929 UniqueConstraint('email'),
930 930 {'extend_existing': True, 'mysql_engine': 'InnoDB',
931 931 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
932 932 )
933 933 __mapper_args__ = {}
934 934
935 935 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
936 936 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
937 937 _email = Column("email", String(255), nullable=True, unique=False, default=None)
938 938 user = relationship('User', lazy='joined')
939 939
940 940 @validates('_email')
941 941 def validate_email(self, key, email):
942 942 # check if this email is not main one
943 943 main_email = Session().query(User).filter(User.email == email).scalar()
944 944 if main_email is not None:
945 945 raise AttributeError('email %s is present is user table' % email)
946 946 return email
947 947
948 948 @hybrid_property
949 949 def email(self):
950 950 return self._email
951 951
952 952 @email.setter
953 953 def email(self, val):
954 954 self._email = val.lower() if val else None
955 955
956 956
957 957 class UserIpMap(Base, BaseModel):
958 958 __tablename__ = 'user_ip_map'
959 959 __table_args__ = (
960 960 UniqueConstraint('user_id', 'ip_addr'),
961 961 {'extend_existing': True, 'mysql_engine': 'InnoDB',
962 962 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
963 963 )
964 964 __mapper_args__ = {}
965 965
966 966 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
967 967 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
968 968 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
969 969 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
970 970 description = Column("description", String(10000), nullable=True, unique=None, default=None)
971 971 user = relationship('User', lazy='joined')
972 972
973 973 @classmethod
974 974 def _get_ip_range(cls, ip_addr):
975 975 net = ipaddress.ip_network(ip_addr, strict=False)
976 976 return [str(net.network_address), str(net.broadcast_address)]
977 977
978 978 def __json__(self):
979 979 return {
980 980 'ip_addr': self.ip_addr,
981 981 'ip_range': self._get_ip_range(self.ip_addr),
982 982 }
983 983
984 984 def __unicode__(self):
985 985 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
986 986 self.user_id, self.ip_addr)
987 987
988 988 class UserLog(Base, BaseModel):
989 989 __tablename__ = 'user_logs'
990 990 __table_args__ = (
991 991 {'extend_existing': True, 'mysql_engine': 'InnoDB',
992 992 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
993 993 )
994 994 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
995 995 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
996 996 username = Column("username", String(255), nullable=True, unique=None, default=None)
997 997 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True)
998 998 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
999 999 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1000 1000 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1001 1001 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1002 1002
1003 1003 def __unicode__(self):
1004 1004 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1005 1005 self.repository_name,
1006 1006 self.action)
1007 1007
1008 1008 @property
1009 1009 def action_as_day(self):
1010 1010 return datetime.date(*self.action_date.timetuple()[:3])
1011 1011
1012 1012 user = relationship('User')
1013 1013 repository = relationship('Repository', cascade='')
1014 1014
1015 1015
1016 1016 class UserGroup(Base, BaseModel):
1017 1017 __tablename__ = 'users_groups'
1018 1018 __table_args__ = (
1019 1019 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1020 1020 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1021 1021 )
1022 1022
1023 1023 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1024 1024 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1025 1025 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1026 1026 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1027 1027 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1028 1028 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1029 1029 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1030 1030 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1031 1031
1032 1032 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1033 1033 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1034 1034 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1035 1035 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1036 1036 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1037 1037 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1038 1038
1039 1039 user = relationship('User')
1040 1040
1041 1041 @hybrid_property
1042 1042 def group_data(self):
1043 1043 if not self._group_data:
1044 1044 return {}
1045 1045
1046 1046 try:
1047 1047 return json.loads(self._group_data)
1048 1048 except TypeError:
1049 1049 return {}
1050 1050
1051 1051 @group_data.setter
1052 1052 def group_data(self, val):
1053 1053 try:
1054 1054 self._group_data = json.dumps(val)
1055 1055 except Exception:
1056 1056 log.error(traceback.format_exc())
1057 1057
1058 1058 def __unicode__(self):
1059 1059 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1060 1060 self.users_group_id,
1061 1061 self.users_group_name)
1062 1062
1063 1063 @classmethod
1064 1064 def get_by_group_name(cls, group_name, cache=False,
1065 1065 case_insensitive=False):
1066 1066 if case_insensitive:
1067 1067 q = cls.query().filter(func.lower(cls.users_group_name) ==
1068 1068 func.lower(group_name))
1069 1069
1070 1070 else:
1071 1071 q = cls.query().filter(cls.users_group_name == group_name)
1072 1072 if cache:
1073 1073 q = q.options(FromCache(
1074 1074 "sql_cache_short",
1075 1075 "get_group_%s" % _hash_key(group_name)))
1076 1076 return q.scalar()
1077 1077
1078 1078 @classmethod
1079 1079 def get(cls, user_group_id, cache=False):
1080 1080 user_group = cls.query()
1081 1081 if cache:
1082 1082 user_group = user_group.options(FromCache("sql_cache_short",
1083 1083 "get_users_group_%s" % user_group_id))
1084 1084 return user_group.get(user_group_id)
1085 1085
1086 1086 def permissions(self, with_admins=True, with_owner=True):
1087 1087 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1088 1088 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1089 1089 joinedload(UserUserGroupToPerm.user),
1090 1090 joinedload(UserUserGroupToPerm.permission),)
1091 1091
1092 1092 # get owners and admins and permissions. We do a trick of re-writing
1093 1093 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1094 1094 # has a global reference and changing one object propagates to all
1095 1095 # others. This means if admin is also an owner admin_row that change
1096 1096 # would propagate to both objects
1097 1097 perm_rows = []
1098 1098 for _usr in q.all():
1099 1099 usr = AttributeDict(_usr.user.get_dict())
1100 1100 usr.permission = _usr.permission.permission_name
1101 1101 perm_rows.append(usr)
1102 1102
1103 1103 # filter the perm rows by 'default' first and then sort them by
1104 1104 # admin,write,read,none permissions sorted again alphabetically in
1105 1105 # each group
1106 1106 perm_rows = sorted(perm_rows, key=display_sort)
1107 1107
1108 1108 _admin_perm = 'usergroup.admin'
1109 1109 owner_row = []
1110 1110 if with_owner:
1111 1111 usr = AttributeDict(self.user.get_dict())
1112 1112 usr.owner_row = True
1113 1113 usr.permission = _admin_perm
1114 1114 owner_row.append(usr)
1115 1115
1116 1116 super_admin_rows = []
1117 1117 if with_admins:
1118 1118 for usr in User.get_all_super_admins():
1119 1119 # if this admin is also owner, don't double the record
1120 1120 if usr.user_id == owner_row[0].user_id:
1121 1121 owner_row[0].admin_row = True
1122 1122 else:
1123 1123 usr = AttributeDict(usr.get_dict())
1124 1124 usr.admin_row = True
1125 1125 usr.permission = _admin_perm
1126 1126 super_admin_rows.append(usr)
1127 1127
1128 1128 return super_admin_rows + owner_row + perm_rows
1129 1129
1130 1130 def permission_user_groups(self):
1131 1131 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1132 1132 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1133 1133 joinedload(UserGroupUserGroupToPerm.target_user_group),
1134 1134 joinedload(UserGroupUserGroupToPerm.permission),)
1135 1135
1136 1136 perm_rows = []
1137 1137 for _user_group in q.all():
1138 1138 usr = AttributeDict(_user_group.user_group.get_dict())
1139 1139 usr.permission = _user_group.permission.permission_name
1140 1140 perm_rows.append(usr)
1141 1141
1142 1142 return perm_rows
1143 1143
1144 1144 def _get_default_perms(self, user_group, suffix=''):
1145 1145 from rhodecode.model.permission import PermissionModel
1146 1146 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1147 1147
1148 1148 def get_default_perms(self, suffix=''):
1149 1149 return self._get_default_perms(self, suffix)
1150 1150
1151 1151 def get_api_data(self, with_group_members=True, include_secrets=False):
1152 1152 """
1153 1153 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1154 1154 basically forwarded.
1155 1155
1156 1156 """
1157 1157 user_group = self
1158 1158
1159 1159 data = {
1160 1160 'users_group_id': user_group.users_group_id,
1161 1161 'group_name': user_group.users_group_name,
1162 1162 'group_description': user_group.user_group_description,
1163 1163 'active': user_group.users_group_active,
1164 1164 'owner': user_group.user.username,
1165 1165 }
1166 1166 if with_group_members:
1167 1167 users = []
1168 1168 for user in user_group.members:
1169 1169 user = user.user
1170 1170 users.append(user.get_api_data(include_secrets=include_secrets))
1171 1171 data['users'] = users
1172 1172
1173 1173 return data
1174 1174
1175 1175
1176 1176 class UserGroupMember(Base, BaseModel):
1177 1177 __tablename__ = 'users_groups_members'
1178 1178 __table_args__ = (
1179 1179 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1180 1180 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1181 1181 )
1182 1182
1183 1183 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1184 1184 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1185 1185 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1186 1186
1187 1187 user = relationship('User', lazy='joined')
1188 1188 users_group = relationship('UserGroup')
1189 1189
1190 1190 def __init__(self, gr_id='', u_id=''):
1191 1191 self.users_group_id = gr_id
1192 1192 self.user_id = u_id
1193 1193
1194 1194
1195 1195 class RepositoryField(Base, BaseModel):
1196 1196 __tablename__ = 'repositories_fields'
1197 1197 __table_args__ = (
1198 1198 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1199 1199 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1200 1200 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1201 1201 )
1202 1202 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1203 1203
1204 1204 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1205 1205 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1206 1206 field_key = Column("field_key", String(250))
1207 1207 field_label = Column("field_label", String(1024), nullable=False)
1208 1208 field_value = Column("field_value", String(10000), nullable=False)
1209 1209 field_desc = Column("field_desc", String(1024), nullable=False)
1210 1210 field_type = Column("field_type", String(255), nullable=False, unique=None)
1211 1211 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1212 1212
1213 1213 repository = relationship('Repository')
1214 1214
1215 1215 @property
1216 1216 def field_key_prefixed(self):
1217 1217 return 'ex_%s' % self.field_key
1218 1218
1219 1219 @classmethod
1220 1220 def un_prefix_key(cls, key):
1221 1221 if key.startswith(cls.PREFIX):
1222 1222 return key[len(cls.PREFIX):]
1223 1223 return key
1224 1224
1225 1225 @classmethod
1226 1226 def get_by_key_name(cls, key, repo):
1227 1227 row = cls.query()\
1228 1228 .filter(cls.repository == repo)\
1229 1229 .filter(cls.field_key == key).scalar()
1230 1230 return row
1231 1231
1232 1232
1233 1233 class Repository(Base, BaseModel):
1234 1234 __tablename__ = 'repositories'
1235 1235 __table_args__ = (
1236 1236 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1237 1237 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1238 1238 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1239 1239 )
1240 1240 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1241 1241 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1242 1242
1243 1243 STATE_CREATED = 'repo_state_created'
1244 1244 STATE_PENDING = 'repo_state_pending'
1245 1245 STATE_ERROR = 'repo_state_error'
1246 1246
1247 1247 LOCK_AUTOMATIC = 'lock_auto'
1248 1248 LOCK_API = 'lock_api'
1249 1249 LOCK_WEB = 'lock_web'
1250 1250 LOCK_PULL = 'lock_pull'
1251 1251
1252 1252 NAME_SEP = URL_SEP
1253 1253
1254 1254 repo_id = Column(
1255 1255 "repo_id", Integer(), nullable=False, unique=True, default=None,
1256 1256 primary_key=True)
1257 1257 _repo_name = Column(
1258 1258 "repo_name", Text(), nullable=False, default=None)
1259 1259 _repo_name_hash = Column(
1260 1260 "repo_name_hash", String(255), nullable=False, unique=True)
1261 1261 repo_state = Column("repo_state", String(255), nullable=True)
1262 1262
1263 1263 clone_uri = Column(
1264 1264 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1265 1265 default=None)
1266 1266 repo_type = Column(
1267 1267 "repo_type", String(255), nullable=False, unique=False, default=None)
1268 1268 user_id = Column(
1269 1269 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1270 1270 unique=False, default=None)
1271 1271 private = Column(
1272 1272 "private", Boolean(), nullable=True, unique=None, default=None)
1273 1273 enable_statistics = Column(
1274 1274 "statistics", Boolean(), nullable=True, unique=None, default=True)
1275 1275 enable_downloads = Column(
1276 1276 "downloads", Boolean(), nullable=True, unique=None, default=True)
1277 1277 description = Column(
1278 1278 "description", String(10000), nullable=True, unique=None, default=None)
1279 1279 created_on = Column(
1280 1280 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1281 1281 default=datetime.datetime.now)
1282 1282 updated_on = Column(
1283 1283 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1284 1284 default=datetime.datetime.now)
1285 1285 _landing_revision = Column(
1286 1286 "landing_revision", String(255), nullable=False, unique=False,
1287 1287 default=None)
1288 1288 enable_locking = Column(
1289 1289 "enable_locking", Boolean(), nullable=False, unique=None,
1290 1290 default=False)
1291 1291 _locked = Column(
1292 1292 "locked", String(255), nullable=True, unique=False, default=None)
1293 1293 _changeset_cache = Column(
1294 1294 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1295 1295
1296 1296 fork_id = Column(
1297 1297 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1298 1298 nullable=True, unique=False, default=None)
1299 1299 group_id = Column(
1300 1300 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1301 1301 unique=False, default=None)
1302 1302
1303 1303 user = relationship('User', lazy='joined')
1304 1304 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1305 1305 group = relationship('RepoGroup', lazy='joined')
1306 1306 repo_to_perm = relationship(
1307 1307 'UserRepoToPerm', cascade='all',
1308 1308 order_by='UserRepoToPerm.repo_to_perm_id')
1309 1309 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1310 1310 stats = relationship('Statistics', cascade='all', uselist=False)
1311 1311
1312 1312 followers = relationship(
1313 1313 'UserFollowing',
1314 1314 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1315 1315 cascade='all')
1316 1316 extra_fields = relationship(
1317 1317 'RepositoryField', cascade="all, delete, delete-orphan")
1318 1318 logs = relationship('UserLog')
1319 1319 comments = relationship(
1320 1320 'ChangesetComment', cascade="all, delete, delete-orphan")
1321 1321 pull_requests_source = relationship(
1322 1322 'PullRequest',
1323 1323 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1324 1324 cascade="all, delete, delete-orphan")
1325 1325 pull_requests_target = relationship(
1326 1326 'PullRequest',
1327 1327 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1328 1328 cascade="all, delete, delete-orphan")
1329 1329 ui = relationship('RepoRhodeCodeUi', cascade="all")
1330 1330 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1331 1331 integrations = relationship('Integration',
1332 1332 cascade="all, delete, delete-orphan")
1333 1333
1334 1334 def __unicode__(self):
1335 1335 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1336 1336 safe_unicode(self.repo_name))
1337 1337
1338 1338 @hybrid_property
1339 1339 def landing_rev(self):
1340 1340 # always should return [rev_type, rev]
1341 1341 if self._landing_revision:
1342 1342 _rev_info = self._landing_revision.split(':')
1343 1343 if len(_rev_info) < 2:
1344 1344 _rev_info.insert(0, 'rev')
1345 1345 return [_rev_info[0], _rev_info[1]]
1346 1346 return [None, None]
1347 1347
1348 1348 @landing_rev.setter
1349 1349 def landing_rev(self, val):
1350 1350 if ':' not in val:
1351 1351 raise ValueError('value must be delimited with `:` and consist '
1352 1352 'of <rev_type>:<rev>, got %s instead' % val)
1353 1353 self._landing_revision = val
1354 1354
1355 1355 @hybrid_property
1356 1356 def locked(self):
1357 1357 if self._locked:
1358 1358 user_id, timelocked, reason = self._locked.split(':')
1359 1359 lock_values = int(user_id), timelocked, reason
1360 1360 else:
1361 1361 lock_values = [None, None, None]
1362 1362 return lock_values
1363 1363
1364 1364 @locked.setter
1365 1365 def locked(self, val):
1366 1366 if val and isinstance(val, (list, tuple)):
1367 1367 self._locked = ':'.join(map(str, val))
1368 1368 else:
1369 1369 self._locked = None
1370 1370
1371 1371 @hybrid_property
1372 1372 def changeset_cache(self):
1373 1373 from rhodecode.lib.vcs.backends.base import EmptyCommit
1374 1374 dummy = EmptyCommit().__json__()
1375 1375 if not self._changeset_cache:
1376 1376 return dummy
1377 1377 try:
1378 1378 return json.loads(self._changeset_cache)
1379 1379 except TypeError:
1380 1380 return dummy
1381 1381 except Exception:
1382 1382 log.error(traceback.format_exc())
1383 1383 return dummy
1384 1384
1385 1385 @changeset_cache.setter
1386 1386 def changeset_cache(self, val):
1387 1387 try:
1388 1388 self._changeset_cache = json.dumps(val)
1389 1389 except Exception:
1390 1390 log.error(traceback.format_exc())
1391 1391
1392 1392 @hybrid_property
1393 1393 def repo_name(self):
1394 1394 return self._repo_name
1395 1395
1396 1396 @repo_name.setter
1397 1397 def repo_name(self, value):
1398 1398 self._repo_name = value
1399 1399 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1400 1400
1401 1401 @classmethod
1402 1402 def normalize_repo_name(cls, repo_name):
1403 1403 """
1404 1404 Normalizes os specific repo_name to the format internally stored inside
1405 1405 database using URL_SEP
1406 1406
1407 1407 :param cls:
1408 1408 :param repo_name:
1409 1409 """
1410 1410 return cls.NAME_SEP.join(repo_name.split(os.sep))
1411 1411
1412 1412 @classmethod
1413 1413 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1414 1414 session = Session()
1415 1415 q = session.query(cls).filter(cls.repo_name == repo_name)
1416 1416
1417 1417 if cache:
1418 1418 if identity_cache:
1419 1419 val = cls.identity_cache(session, 'repo_name', repo_name)
1420 1420 if val:
1421 1421 return val
1422 1422 else:
1423 1423 q = q.options(
1424 1424 FromCache("sql_cache_short",
1425 1425 "get_repo_by_name_%s" % _hash_key(repo_name)))
1426 1426
1427 1427 return q.scalar()
1428 1428
1429 1429 @classmethod
1430 1430 def get_by_full_path(cls, repo_full_path):
1431 1431 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1432 1432 repo_name = cls.normalize_repo_name(repo_name)
1433 1433 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1434 1434
1435 1435 @classmethod
1436 1436 def get_repo_forks(cls, repo_id):
1437 1437 return cls.query().filter(Repository.fork_id == repo_id)
1438 1438
1439 1439 @classmethod
1440 1440 def base_path(cls):
1441 1441 """
1442 1442 Returns base path when all repos are stored
1443 1443
1444 1444 :param cls:
1445 1445 """
1446 1446 q = Session().query(RhodeCodeUi)\
1447 1447 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1448 1448 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1449 1449 return q.one().ui_value
1450 1450
1451 1451 @classmethod
1452 1452 def is_valid(cls, repo_name):
1453 1453 """
1454 1454 returns True if given repo name is a valid filesystem repository
1455 1455
1456 1456 :param cls:
1457 1457 :param repo_name:
1458 1458 """
1459 1459 from rhodecode.lib.utils import is_valid_repo
1460 1460
1461 1461 return is_valid_repo(repo_name, cls.base_path())
1462 1462
1463 1463 @classmethod
1464 1464 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1465 1465 case_insensitive=True):
1466 1466 q = Repository.query()
1467 1467
1468 1468 if not isinstance(user_id, Optional):
1469 1469 q = q.filter(Repository.user_id == user_id)
1470 1470
1471 1471 if not isinstance(group_id, Optional):
1472 1472 q = q.filter(Repository.group_id == group_id)
1473 1473
1474 1474 if case_insensitive:
1475 1475 q = q.order_by(func.lower(Repository.repo_name))
1476 1476 else:
1477 1477 q = q.order_by(Repository.repo_name)
1478 1478 return q.all()
1479 1479
1480 1480 @property
1481 1481 def forks(self):
1482 1482 """
1483 1483 Return forks of this repo
1484 1484 """
1485 1485 return Repository.get_repo_forks(self.repo_id)
1486 1486
1487 1487 @property
1488 1488 def parent(self):
1489 1489 """
1490 1490 Returns fork parent
1491 1491 """
1492 1492 return self.fork
1493 1493
1494 1494 @property
1495 1495 def just_name(self):
1496 1496 return self.repo_name.split(self.NAME_SEP)[-1]
1497 1497
1498 1498 @property
1499 1499 def groups_with_parents(self):
1500 1500 groups = []
1501 1501 if self.group is None:
1502 1502 return groups
1503 1503
1504 1504 cur_gr = self.group
1505 1505 groups.insert(0, cur_gr)
1506 1506 while 1:
1507 1507 gr = getattr(cur_gr, 'parent_group', None)
1508 1508 cur_gr = cur_gr.parent_group
1509 1509 if gr is None:
1510 1510 break
1511 1511 groups.insert(0, gr)
1512 1512
1513 1513 return groups
1514 1514
1515 1515 @property
1516 1516 def groups_and_repo(self):
1517 1517 return self.groups_with_parents, self
1518 1518
1519 1519 @LazyProperty
1520 1520 def repo_path(self):
1521 1521 """
1522 1522 Returns base full path for that repository means where it actually
1523 1523 exists on a filesystem
1524 1524 """
1525 1525 q = Session().query(RhodeCodeUi).filter(
1526 1526 RhodeCodeUi.ui_key == self.NAME_SEP)
1527 1527 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1528 1528 return q.one().ui_value
1529 1529
1530 1530 @property
1531 1531 def repo_full_path(self):
1532 1532 p = [self.repo_path]
1533 1533 # we need to split the name by / since this is how we store the
1534 1534 # names in the database, but that eventually needs to be converted
1535 1535 # into a valid system path
1536 1536 p += self.repo_name.split(self.NAME_SEP)
1537 1537 return os.path.join(*map(safe_unicode, p))
1538 1538
1539 1539 @property
1540 1540 def cache_keys(self):
1541 1541 """
1542 1542 Returns associated cache keys for that repo
1543 1543 """
1544 1544 return CacheKey.query()\
1545 1545 .filter(CacheKey.cache_args == self.repo_name)\
1546 1546 .order_by(CacheKey.cache_key)\
1547 1547 .all()
1548 1548
1549 1549 def get_new_name(self, repo_name):
1550 1550 """
1551 1551 returns new full repository name based on assigned group and new new
1552 1552
1553 1553 :param group_name:
1554 1554 """
1555 1555 path_prefix = self.group.full_path_splitted if self.group else []
1556 1556 return self.NAME_SEP.join(path_prefix + [repo_name])
1557 1557
1558 1558 @property
1559 1559 def _config(self):
1560 1560 """
1561 1561 Returns db based config object.
1562 1562 """
1563 1563 from rhodecode.lib.utils import make_db_config
1564 1564 return make_db_config(clear_session=False, repo=self)
1565 1565
1566 1566 def permissions(self, with_admins=True, with_owner=True):
1567 1567 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1568 1568 q = q.options(joinedload(UserRepoToPerm.repository),
1569 1569 joinedload(UserRepoToPerm.user),
1570 1570 joinedload(UserRepoToPerm.permission),)
1571 1571
1572 1572 # get owners and admins and permissions. We do a trick of re-writing
1573 1573 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1574 1574 # has a global reference and changing one object propagates to all
1575 1575 # others. This means if admin is also an owner admin_row that change
1576 1576 # would propagate to both objects
1577 1577 perm_rows = []
1578 1578 for _usr in q.all():
1579 1579 usr = AttributeDict(_usr.user.get_dict())
1580 1580 usr.permission = _usr.permission.permission_name
1581 1581 perm_rows.append(usr)
1582 1582
1583 1583 # filter the perm rows by 'default' first and then sort them by
1584 1584 # admin,write,read,none permissions sorted again alphabetically in
1585 1585 # each group
1586 1586 perm_rows = sorted(perm_rows, key=display_sort)
1587 1587
1588 1588 _admin_perm = 'repository.admin'
1589 1589 owner_row = []
1590 1590 if with_owner:
1591 1591 usr = AttributeDict(self.user.get_dict())
1592 1592 usr.owner_row = True
1593 1593 usr.permission = _admin_perm
1594 1594 owner_row.append(usr)
1595 1595
1596 1596 super_admin_rows = []
1597 1597 if with_admins:
1598 1598 for usr in User.get_all_super_admins():
1599 1599 # if this admin is also owner, don't double the record
1600 1600 if usr.user_id == owner_row[0].user_id:
1601 1601 owner_row[0].admin_row = True
1602 1602 else:
1603 1603 usr = AttributeDict(usr.get_dict())
1604 1604 usr.admin_row = True
1605 1605 usr.permission = _admin_perm
1606 1606 super_admin_rows.append(usr)
1607 1607
1608 1608 return super_admin_rows + owner_row + perm_rows
1609 1609
1610 1610 def permission_user_groups(self):
1611 1611 q = UserGroupRepoToPerm.query().filter(
1612 1612 UserGroupRepoToPerm.repository == self)
1613 1613 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1614 1614 joinedload(UserGroupRepoToPerm.users_group),
1615 1615 joinedload(UserGroupRepoToPerm.permission),)
1616 1616
1617 1617 perm_rows = []
1618 1618 for _user_group in q.all():
1619 1619 usr = AttributeDict(_user_group.users_group.get_dict())
1620 1620 usr.permission = _user_group.permission.permission_name
1621 1621 perm_rows.append(usr)
1622 1622
1623 1623 return perm_rows
1624 1624
1625 1625 def get_api_data(self, include_secrets=False):
1626 1626 """
1627 1627 Common function for generating repo api data
1628 1628
1629 1629 :param include_secrets: See :meth:`User.get_api_data`.
1630 1630
1631 1631 """
1632 1632 # TODO: mikhail: Here there is an anti-pattern, we probably need to
1633 1633 # move this methods on models level.
1634 1634 from rhodecode.model.settings import SettingsModel
1635 1635
1636 1636 repo = self
1637 1637 _user_id, _time, _reason = self.locked
1638 1638
1639 1639 data = {
1640 1640 'repo_id': repo.repo_id,
1641 1641 'repo_name': repo.repo_name,
1642 1642 'repo_type': repo.repo_type,
1643 1643 'clone_uri': repo.clone_uri or '',
1644 1644 'url': url('summary_home', repo_name=self.repo_name, qualified=True),
1645 1645 'private': repo.private,
1646 1646 'created_on': repo.created_on,
1647 1647 'description': repo.description,
1648 1648 'landing_rev': repo.landing_rev,
1649 1649 'owner': repo.user.username,
1650 1650 'fork_of': repo.fork.repo_name if repo.fork else None,
1651 1651 'enable_statistics': repo.enable_statistics,
1652 1652 'enable_locking': repo.enable_locking,
1653 1653 'enable_downloads': repo.enable_downloads,
1654 1654 'last_changeset': repo.changeset_cache,
1655 1655 'locked_by': User.get(_user_id).get_api_data(
1656 1656 include_secrets=include_secrets) if _user_id else None,
1657 1657 'locked_date': time_to_datetime(_time) if _time else None,
1658 1658 'lock_reason': _reason if _reason else None,
1659 1659 }
1660 1660
1661 1661 # TODO: mikhail: should be per-repo settings here
1662 1662 rc_config = SettingsModel().get_all_settings()
1663 1663 repository_fields = str2bool(
1664 1664 rc_config.get('rhodecode_repository_fields'))
1665 1665 if repository_fields:
1666 1666 for f in self.extra_fields:
1667 1667 data[f.field_key_prefixed] = f.field_value
1668 1668
1669 1669 return data
1670 1670
1671 1671 @classmethod
1672 1672 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
1673 1673 if not lock_time:
1674 1674 lock_time = time.time()
1675 1675 if not lock_reason:
1676 1676 lock_reason = cls.LOCK_AUTOMATIC
1677 1677 repo.locked = [user_id, lock_time, lock_reason]
1678 1678 Session().add(repo)
1679 1679 Session().commit()
1680 1680
1681 1681 @classmethod
1682 1682 def unlock(cls, repo):
1683 1683 repo.locked = None
1684 1684 Session().add(repo)
1685 1685 Session().commit()
1686 1686
1687 1687 @classmethod
1688 1688 def getlock(cls, repo):
1689 1689 return repo.locked
1690 1690
1691 1691 def is_user_lock(self, user_id):
1692 1692 if self.lock[0]:
1693 1693 lock_user_id = safe_int(self.lock[0])
1694 1694 user_id = safe_int(user_id)
1695 1695 # both are ints, and they are equal
1696 1696 return all([lock_user_id, user_id]) and lock_user_id == user_id
1697 1697
1698 1698 return False
1699 1699
1700 1700 def get_locking_state(self, action, user_id, only_when_enabled=True):
1701 1701 """
1702 1702 Checks locking on this repository, if locking is enabled and lock is
1703 1703 present returns a tuple of make_lock, locked, locked_by.
1704 1704 make_lock can have 3 states None (do nothing) True, make lock
1705 1705 False release lock, This value is later propagated to hooks, which
1706 1706 do the locking. Think about this as signals passed to hooks what to do.
1707 1707
1708 1708 """
1709 1709 # TODO: johbo: This is part of the business logic and should be moved
1710 1710 # into the RepositoryModel.
1711 1711
1712 1712 if action not in ('push', 'pull'):
1713 1713 raise ValueError("Invalid action value: %s" % repr(action))
1714 1714
1715 1715 # defines if locked error should be thrown to user
1716 1716 currently_locked = False
1717 1717 # defines if new lock should be made, tri-state
1718 1718 make_lock = None
1719 1719 repo = self
1720 1720 user = User.get(user_id)
1721 1721
1722 1722 lock_info = repo.locked
1723 1723
1724 1724 if repo and (repo.enable_locking or not only_when_enabled):
1725 1725 if action == 'push':
1726 1726 # check if it's already locked !, if it is compare users
1727 1727 locked_by_user_id = lock_info[0]
1728 1728 if user.user_id == locked_by_user_id:
1729 1729 log.debug(
1730 1730 'Got `push` action from user %s, now unlocking', user)
1731 1731 # unlock if we have push from user who locked
1732 1732 make_lock = False
1733 1733 else:
1734 1734 # we're not the same user who locked, ban with
1735 1735 # code defined in settings (default is 423 HTTP Locked) !
1736 1736 log.debug('Repo %s is currently locked by %s', repo, user)
1737 1737 currently_locked = True
1738 1738 elif action == 'pull':
1739 1739 # [0] user [1] date
1740 1740 if lock_info[0] and lock_info[1]:
1741 1741 log.debug('Repo %s is currently locked by %s', repo, user)
1742 1742 currently_locked = True
1743 1743 else:
1744 1744 log.debug('Setting lock on repo %s by %s', repo, user)
1745 1745 make_lock = True
1746 1746
1747 1747 else:
1748 1748 log.debug('Repository %s do not have locking enabled', repo)
1749 1749
1750 1750 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
1751 1751 make_lock, currently_locked, lock_info)
1752 1752
1753 1753 from rhodecode.lib.auth import HasRepoPermissionAny
1754 1754 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
1755 1755 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
1756 1756 # if we don't have at least write permission we cannot make a lock
1757 1757 log.debug('lock state reset back to FALSE due to lack '
1758 1758 'of at least read permission')
1759 1759 make_lock = False
1760 1760
1761 1761 return make_lock, currently_locked, lock_info
1762 1762
1763 1763 @property
1764 1764 def last_db_change(self):
1765 1765 return self.updated_on
1766 1766
1767 1767 @property
1768 1768 def clone_uri_hidden(self):
1769 1769 clone_uri = self.clone_uri
1770 1770 if clone_uri:
1771 1771 import urlobject
1772 1772 url_obj = urlobject.URLObject(clone_uri)
1773 1773 if url_obj.password:
1774 1774 clone_uri = url_obj.with_password('*****')
1775 1775 return clone_uri
1776 1776
1777 1777 def clone_url(self, **override):
1778 1778 qualified_home_url = url('home', qualified=True)
1779 1779
1780 1780 uri_tmpl = None
1781 1781 if 'with_id' in override:
1782 1782 uri_tmpl = self.DEFAULT_CLONE_URI_ID
1783 1783 del override['with_id']
1784 1784
1785 1785 if 'uri_tmpl' in override:
1786 1786 uri_tmpl = override['uri_tmpl']
1787 1787 del override['uri_tmpl']
1788 1788
1789 1789 # we didn't override our tmpl from **overrides
1790 1790 if not uri_tmpl:
1791 1791 uri_tmpl = self.DEFAULT_CLONE_URI
1792 1792 try:
1793 1793 from pylons import tmpl_context as c
1794 1794 uri_tmpl = c.clone_uri_tmpl
1795 1795 except Exception:
1796 1796 # in any case if we call this outside of request context,
1797 1797 # ie, not having tmpl_context set up
1798 1798 pass
1799 1799
1800 1800 return get_clone_url(uri_tmpl=uri_tmpl,
1801 1801 qualifed_home_url=qualified_home_url,
1802 1802 repo_name=self.repo_name,
1803 1803 repo_id=self.repo_id, **override)
1804 1804
1805 1805 def set_state(self, state):
1806 1806 self.repo_state = state
1807 1807 Session().add(self)
1808 1808 #==========================================================================
1809 1809 # SCM PROPERTIES
1810 1810 #==========================================================================
1811 1811
1812 1812 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
1813 1813 return get_commit_safe(
1814 1814 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
1815 1815
1816 1816 def get_changeset(self, rev=None, pre_load=None):
1817 1817 warnings.warn("Use get_commit", DeprecationWarning)
1818 1818 commit_id = None
1819 1819 commit_idx = None
1820 1820 if isinstance(rev, basestring):
1821 1821 commit_id = rev
1822 1822 else:
1823 1823 commit_idx = rev
1824 1824 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
1825 1825 pre_load=pre_load)
1826 1826
1827 1827 def get_landing_commit(self):
1828 1828 """
1829 1829 Returns landing commit, or if that doesn't exist returns the tip
1830 1830 """
1831 1831 _rev_type, _rev = self.landing_rev
1832 1832 commit = self.get_commit(_rev)
1833 1833 if isinstance(commit, EmptyCommit):
1834 1834 return self.get_commit()
1835 1835 return commit
1836 1836
1837 1837 def update_commit_cache(self, cs_cache=None, config=None):
1838 1838 """
1839 1839 Update cache of last changeset for repository, keys should be::
1840 1840
1841 1841 short_id
1842 1842 raw_id
1843 1843 revision
1844 1844 parents
1845 1845 message
1846 1846 date
1847 1847 author
1848 1848
1849 1849 :param cs_cache:
1850 1850 """
1851 1851 from rhodecode.lib.vcs.backends.base import BaseChangeset
1852 1852 if cs_cache is None:
1853 1853 # use no-cache version here
1854 1854 scm_repo = self.scm_instance(cache=False, config=config)
1855 1855 if scm_repo:
1856 1856 cs_cache = scm_repo.get_commit(
1857 1857 pre_load=["author", "date", "message", "parents"])
1858 1858 else:
1859 1859 cs_cache = EmptyCommit()
1860 1860
1861 1861 if isinstance(cs_cache, BaseChangeset):
1862 1862 cs_cache = cs_cache.__json__()
1863 1863
1864 1864 def is_outdated(new_cs_cache):
1865 1865 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
1866 1866 new_cs_cache['revision'] != self.changeset_cache['revision']):
1867 1867 return True
1868 1868 return False
1869 1869
1870 1870 # check if we have maybe already latest cached revision
1871 1871 if is_outdated(cs_cache) or not self.changeset_cache:
1872 1872 _default = datetime.datetime.fromtimestamp(0)
1873 1873 last_change = cs_cache.get('date') or _default
1874 1874 log.debug('updated repo %s with new cs cache %s',
1875 1875 self.repo_name, cs_cache)
1876 1876 self.updated_on = last_change
1877 1877 self.changeset_cache = cs_cache
1878 1878 Session().add(self)
1879 1879 Session().commit()
1880 1880 else:
1881 1881 log.debug('Skipping update_commit_cache for repo:`%s` '
1882 1882 'commit already with latest changes', self.repo_name)
1883 1883
1884 1884 @property
1885 1885 def tip(self):
1886 1886 return self.get_commit('tip')
1887 1887
1888 1888 @property
1889 1889 def author(self):
1890 1890 return self.tip.author
1891 1891
1892 1892 @property
1893 1893 def last_change(self):
1894 1894 return self.scm_instance().last_change
1895 1895
1896 1896 def get_comments(self, revisions=None):
1897 1897 """
1898 1898 Returns comments for this repository grouped by revisions
1899 1899
1900 1900 :param revisions: filter query by revisions only
1901 1901 """
1902 1902 cmts = ChangesetComment.query()\
1903 1903 .filter(ChangesetComment.repo == self)
1904 1904 if revisions:
1905 1905 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
1906 1906 grouped = collections.defaultdict(list)
1907 1907 for cmt in cmts.all():
1908 1908 grouped[cmt.revision].append(cmt)
1909 1909 return grouped
1910 1910
1911 1911 def statuses(self, revisions=None):
1912 1912 """
1913 1913 Returns statuses for this repository
1914 1914
1915 1915 :param revisions: list of revisions to get statuses for
1916 1916 """
1917 1917 statuses = ChangesetStatus.query()\
1918 1918 .filter(ChangesetStatus.repo == self)\
1919 1919 .filter(ChangesetStatus.version == 0)
1920 1920
1921 1921 if revisions:
1922 1922 # Try doing the filtering in chunks to avoid hitting limits
1923 1923 size = 500
1924 1924 status_results = []
1925 1925 for chunk in xrange(0, len(revisions), size):
1926 1926 status_results += statuses.filter(
1927 1927 ChangesetStatus.revision.in_(
1928 1928 revisions[chunk: chunk+size])
1929 1929 ).all()
1930 1930 else:
1931 1931 status_results = statuses.all()
1932 1932
1933 1933 grouped = {}
1934 1934
1935 1935 # maybe we have open new pullrequest without a status?
1936 1936 stat = ChangesetStatus.STATUS_UNDER_REVIEW
1937 1937 status_lbl = ChangesetStatus.get_status_lbl(stat)
1938 1938 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
1939 1939 for rev in pr.revisions:
1940 1940 pr_id = pr.pull_request_id
1941 1941 pr_repo = pr.target_repo.repo_name
1942 1942 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
1943 1943
1944 1944 for stat in status_results:
1945 1945 pr_id = pr_repo = None
1946 1946 if stat.pull_request:
1947 1947 pr_id = stat.pull_request.pull_request_id
1948 1948 pr_repo = stat.pull_request.target_repo.repo_name
1949 1949 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
1950 1950 pr_id, pr_repo]
1951 1951 return grouped
1952 1952
1953 1953 # ==========================================================================
1954 1954 # SCM CACHE INSTANCE
1955 1955 # ==========================================================================
1956 1956
1957 1957 def scm_instance(self, **kwargs):
1958 1958 import rhodecode
1959 1959
1960 1960 # Passing a config will not hit the cache currently only used
1961 1961 # for repo2dbmapper
1962 1962 config = kwargs.pop('config', None)
1963 1963 cache = kwargs.pop('cache', None)
1964 1964 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
1965 1965 # if cache is NOT defined use default global, else we have a full
1966 1966 # control over cache behaviour
1967 1967 if cache is None and full_cache and not config:
1968 1968 return self._get_instance_cached()
1969 1969 return self._get_instance(cache=bool(cache), config=config)
1970 1970
1971 1971 def _get_instance_cached(self):
1972 1972 @cache_region('long_term')
1973 1973 def _get_repo(cache_key):
1974 1974 return self._get_instance()
1975 1975
1976 1976 invalidator_context = CacheKey.repo_context_cache(
1977 1977 _get_repo, self.repo_name, None, thread_scoped=True)
1978 1978
1979 1979 with invalidator_context as context:
1980 1980 context.invalidate()
1981 1981 repo = context.compute()
1982 1982
1983 1983 return repo
1984 1984
1985 1985 def _get_instance(self, cache=True, config=None):
1986 1986 config = config or self._config
1987 1987 custom_wire = {
1988 1988 'cache': cache # controls the vcs.remote cache
1989 1989 }
1990 1990
1991 1991 repo = get_vcs_instance(
1992 1992 repo_path=safe_str(self.repo_full_path),
1993 1993 config=config,
1994 1994 with_wire=custom_wire,
1995 1995 create=False)
1996 1996
1997 1997 return repo
1998 1998
1999 1999 def __json__(self):
2000 2000 return {'landing_rev': self.landing_rev}
2001 2001
2002 2002 def get_dict(self):
2003 2003
2004 2004 # Since we transformed `repo_name` to a hybrid property, we need to
2005 2005 # keep compatibility with the code which uses `repo_name` field.
2006 2006
2007 2007 result = super(Repository, self).get_dict()
2008 2008 result['repo_name'] = result.pop('_repo_name', None)
2009 2009 return result
2010 2010
2011 2011
2012 2012 class RepoGroup(Base, BaseModel):
2013 2013 __tablename__ = 'groups'
2014 2014 __table_args__ = (
2015 2015 UniqueConstraint('group_name', 'group_parent_id'),
2016 2016 CheckConstraint('group_id != group_parent_id'),
2017 2017 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2018 2018 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2019 2019 )
2020 2020 __mapper_args__ = {'order_by': 'group_name'}
2021 2021
2022 2022 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2023 2023
2024 2024 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2025 2025 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2026 2026 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2027 2027 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2028 2028 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2029 2029 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2030 2030 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2031 2031
2032 2032 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2033 2033 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2034 2034 parent_group = relationship('RepoGroup', remote_side=group_id)
2035 2035 user = relationship('User')
2036 2036 integrations = relationship('Integration',
2037 2037 cascade="all, delete, delete-orphan")
2038 2038
2039 2039 def __init__(self, group_name='', parent_group=None):
2040 2040 self.group_name = group_name
2041 2041 self.parent_group = parent_group
2042 2042
2043 2043 def __unicode__(self):
2044 2044 return u"<%s('id:%s:%s')>" % (self.__class__.__name__, self.group_id,
2045 2045 self.group_name)
2046 2046
2047 2047 @classmethod
2048 2048 def _generate_choice(cls, repo_group):
2049 2049 from webhelpers.html import literal as _literal
2050 2050 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2051 2051 return repo_group.group_id, _name(repo_group.full_path_splitted)
2052 2052
2053 2053 @classmethod
2054 2054 def groups_choices(cls, groups=None, show_empty_group=True):
2055 2055 if not groups:
2056 2056 groups = cls.query().all()
2057 2057
2058 2058 repo_groups = []
2059 2059 if show_empty_group:
2060 2060 repo_groups = [('-1', u'-- %s --' % _('No parent'))]
2061 2061
2062 2062 repo_groups.extend([cls._generate_choice(x) for x in groups])
2063 2063
2064 2064 repo_groups = sorted(
2065 2065 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2066 2066 return repo_groups
2067 2067
2068 2068 @classmethod
2069 2069 def url_sep(cls):
2070 2070 return URL_SEP
2071 2071
2072 2072 @classmethod
2073 2073 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2074 2074 if case_insensitive:
2075 2075 gr = cls.query().filter(func.lower(cls.group_name)
2076 2076 == func.lower(group_name))
2077 2077 else:
2078 2078 gr = cls.query().filter(cls.group_name == group_name)
2079 2079 if cache:
2080 2080 gr = gr.options(FromCache(
2081 2081 "sql_cache_short",
2082 2082 "get_group_%s" % _hash_key(group_name)))
2083 2083 return gr.scalar()
2084 2084
2085 2085 @classmethod
2086 2086 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2087 2087 case_insensitive=True):
2088 2088 q = RepoGroup.query()
2089 2089
2090 2090 if not isinstance(user_id, Optional):
2091 2091 q = q.filter(RepoGroup.user_id == user_id)
2092 2092
2093 2093 if not isinstance(group_id, Optional):
2094 2094 q = q.filter(RepoGroup.group_parent_id == group_id)
2095 2095
2096 2096 if case_insensitive:
2097 2097 q = q.order_by(func.lower(RepoGroup.group_name))
2098 2098 else:
2099 2099 q = q.order_by(RepoGroup.group_name)
2100 2100 return q.all()
2101 2101
2102 2102 @property
2103 2103 def parents(self):
2104 2104 parents_recursion_limit = 10
2105 2105 groups = []
2106 2106 if self.parent_group is None:
2107 2107 return groups
2108 2108 cur_gr = self.parent_group
2109 2109 groups.insert(0, cur_gr)
2110 2110 cnt = 0
2111 2111 while 1:
2112 2112 cnt += 1
2113 2113 gr = getattr(cur_gr, 'parent_group', None)
2114 2114 cur_gr = cur_gr.parent_group
2115 2115 if gr is None:
2116 2116 break
2117 2117 if cnt == parents_recursion_limit:
2118 2118 # this will prevent accidental infinit loops
2119 2119 log.error(('more than %s parents found for group %s, stopping '
2120 2120 'recursive parent fetching' % (parents_recursion_limit, self)))
2121 2121 break
2122 2122
2123 2123 groups.insert(0, gr)
2124 2124 return groups
2125 2125
2126 2126 @property
2127 2127 def children(self):
2128 2128 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2129 2129
2130 2130 @property
2131 2131 def name(self):
2132 2132 return self.group_name.split(RepoGroup.url_sep())[-1]
2133 2133
2134 2134 @property
2135 2135 def full_path(self):
2136 2136 return self.group_name
2137 2137
2138 2138 @property
2139 2139 def full_path_splitted(self):
2140 2140 return self.group_name.split(RepoGroup.url_sep())
2141 2141
2142 2142 @property
2143 2143 def repositories(self):
2144 2144 return Repository.query()\
2145 2145 .filter(Repository.group == self)\
2146 2146 .order_by(Repository.repo_name)
2147 2147
2148 2148 @property
2149 2149 def repositories_recursive_count(self):
2150 2150 cnt = self.repositories.count()
2151 2151
2152 2152 def children_count(group):
2153 2153 cnt = 0
2154 2154 for child in group.children:
2155 2155 cnt += child.repositories.count()
2156 2156 cnt += children_count(child)
2157 2157 return cnt
2158 2158
2159 2159 return cnt + children_count(self)
2160 2160
2161 2161 def _recursive_objects(self, include_repos=True):
2162 2162 all_ = []
2163 2163
2164 2164 def _get_members(root_gr):
2165 2165 if include_repos:
2166 2166 for r in root_gr.repositories:
2167 2167 all_.append(r)
2168 2168 childs = root_gr.children.all()
2169 2169 if childs:
2170 2170 for gr in childs:
2171 2171 all_.append(gr)
2172 2172 _get_members(gr)
2173 2173
2174 2174 _get_members(self)
2175 2175 return [self] + all_
2176 2176
2177 2177 def recursive_groups_and_repos(self):
2178 2178 """
2179 2179 Recursive return all groups, with repositories in those groups
2180 2180 """
2181 2181 return self._recursive_objects()
2182 2182
2183 2183 def recursive_groups(self):
2184 2184 """
2185 2185 Returns all children groups for this group including children of children
2186 2186 """
2187 2187 return self._recursive_objects(include_repos=False)
2188 2188
2189 2189 def get_new_name(self, group_name):
2190 2190 """
2191 2191 returns new full group name based on parent and new name
2192 2192
2193 2193 :param group_name:
2194 2194 """
2195 2195 path_prefix = (self.parent_group.full_path_splitted if
2196 2196 self.parent_group else [])
2197 2197 return RepoGroup.url_sep().join(path_prefix + [group_name])
2198 2198
2199 2199 def permissions(self, with_admins=True, with_owner=True):
2200 2200 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2201 2201 q = q.options(joinedload(UserRepoGroupToPerm.group),
2202 2202 joinedload(UserRepoGroupToPerm.user),
2203 2203 joinedload(UserRepoGroupToPerm.permission),)
2204 2204
2205 2205 # get owners and admins and permissions. We do a trick of re-writing
2206 2206 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2207 2207 # has a global reference and changing one object propagates to all
2208 2208 # others. This means if admin is also an owner admin_row that change
2209 2209 # would propagate to both objects
2210 2210 perm_rows = []
2211 2211 for _usr in q.all():
2212 2212 usr = AttributeDict(_usr.user.get_dict())
2213 2213 usr.permission = _usr.permission.permission_name
2214 2214 perm_rows.append(usr)
2215 2215
2216 2216 # filter the perm rows by 'default' first and then sort them by
2217 2217 # admin,write,read,none permissions sorted again alphabetically in
2218 2218 # each group
2219 2219 perm_rows = sorted(perm_rows, key=display_sort)
2220 2220
2221 2221 _admin_perm = 'group.admin'
2222 2222 owner_row = []
2223 2223 if with_owner:
2224 2224 usr = AttributeDict(self.user.get_dict())
2225 2225 usr.owner_row = True
2226 2226 usr.permission = _admin_perm
2227 2227 owner_row.append(usr)
2228 2228
2229 2229 super_admin_rows = []
2230 2230 if with_admins:
2231 2231 for usr in User.get_all_super_admins():
2232 2232 # if this admin is also owner, don't double the record
2233 2233 if usr.user_id == owner_row[0].user_id:
2234 2234 owner_row[0].admin_row = True
2235 2235 else:
2236 2236 usr = AttributeDict(usr.get_dict())
2237 2237 usr.admin_row = True
2238 2238 usr.permission = _admin_perm
2239 2239 super_admin_rows.append(usr)
2240 2240
2241 2241 return super_admin_rows + owner_row + perm_rows
2242 2242
2243 2243 def permission_user_groups(self):
2244 2244 q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self)
2245 2245 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2246 2246 joinedload(UserGroupRepoGroupToPerm.users_group),
2247 2247 joinedload(UserGroupRepoGroupToPerm.permission),)
2248 2248
2249 2249 perm_rows = []
2250 2250 for _user_group in q.all():
2251 2251 usr = AttributeDict(_user_group.users_group.get_dict())
2252 2252 usr.permission = _user_group.permission.permission_name
2253 2253 perm_rows.append(usr)
2254 2254
2255 2255 return perm_rows
2256 2256
2257 2257 def get_api_data(self):
2258 2258 """
2259 2259 Common function for generating api data
2260 2260
2261 2261 """
2262 2262 group = self
2263 2263 data = {
2264 2264 'group_id': group.group_id,
2265 2265 'group_name': group.group_name,
2266 2266 'group_description': group.group_description,
2267 2267 'parent_group': group.parent_group.group_name if group.parent_group else None,
2268 2268 'repositories': [x.repo_name for x in group.repositories],
2269 2269 'owner': group.user.username,
2270 2270 }
2271 2271 return data
2272 2272
2273 2273
2274 2274 class Permission(Base, BaseModel):
2275 2275 __tablename__ = 'permissions'
2276 2276 __table_args__ = (
2277 2277 Index('p_perm_name_idx', 'permission_name'),
2278 2278 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2279 2279 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2280 2280 )
2281 2281 PERMS = [
2282 2282 ('hg.admin', _('RhodeCode Super Administrator')),
2283 2283
2284 2284 ('repository.none', _('Repository no access')),
2285 2285 ('repository.read', _('Repository read access')),
2286 2286 ('repository.write', _('Repository write access')),
2287 2287 ('repository.admin', _('Repository admin access')),
2288 2288
2289 2289 ('group.none', _('Repository group no access')),
2290 2290 ('group.read', _('Repository group read access')),
2291 2291 ('group.write', _('Repository group write access')),
2292 2292 ('group.admin', _('Repository group admin access')),
2293 2293
2294 2294 ('usergroup.none', _('User group no access')),
2295 2295 ('usergroup.read', _('User group read access')),
2296 2296 ('usergroup.write', _('User group write access')),
2297 2297 ('usergroup.admin', _('User group admin access')),
2298 2298
2299 2299 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2300 2300 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2301 2301
2302 2302 ('hg.usergroup.create.false', _('User Group creation disabled')),
2303 2303 ('hg.usergroup.create.true', _('User Group creation enabled')),
2304 2304
2305 2305 ('hg.create.none', _('Repository creation disabled')),
2306 2306 ('hg.create.repository', _('Repository creation enabled')),
2307 2307 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2308 2308 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2309 2309
2310 2310 ('hg.fork.none', _('Repository forking disabled')),
2311 2311 ('hg.fork.repository', _('Repository forking enabled')),
2312 2312
2313 2313 ('hg.register.none', _('Registration disabled')),
2314 2314 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2315 2315 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2316 2316
2317 2317 ('hg.extern_activate.manual', _('Manual activation of external account')),
2318 2318 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2319 2319
2320 2320 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2321 2321 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2322 2322 ]
2323 2323
2324 2324 # definition of system default permissions for DEFAULT user
2325 2325 DEFAULT_USER_PERMISSIONS = [
2326 2326 'repository.read',
2327 2327 'group.read',
2328 2328 'usergroup.read',
2329 2329 'hg.create.repository',
2330 2330 'hg.repogroup.create.false',
2331 2331 'hg.usergroup.create.false',
2332 2332 'hg.create.write_on_repogroup.true',
2333 2333 'hg.fork.repository',
2334 2334 'hg.register.manual_activate',
2335 2335 'hg.extern_activate.auto',
2336 2336 'hg.inherit_default_perms.true',
2337 2337 ]
2338 2338
2339 2339 # defines which permissions are more important higher the more important
2340 2340 # Weight defines which permissions are more important.
2341 2341 # The higher number the more important.
2342 2342 PERM_WEIGHTS = {
2343 2343 'repository.none': 0,
2344 2344 'repository.read': 1,
2345 2345 'repository.write': 3,
2346 2346 'repository.admin': 4,
2347 2347
2348 2348 'group.none': 0,
2349 2349 'group.read': 1,
2350 2350 'group.write': 3,
2351 2351 'group.admin': 4,
2352 2352
2353 2353 'usergroup.none': 0,
2354 2354 'usergroup.read': 1,
2355 2355 'usergroup.write': 3,
2356 2356 'usergroup.admin': 4,
2357 2357
2358 2358 'hg.repogroup.create.false': 0,
2359 2359 'hg.repogroup.create.true': 1,
2360 2360
2361 2361 'hg.usergroup.create.false': 0,
2362 2362 'hg.usergroup.create.true': 1,
2363 2363
2364 2364 'hg.fork.none': 0,
2365 2365 'hg.fork.repository': 1,
2366 2366 'hg.create.none': 0,
2367 2367 'hg.create.repository': 1
2368 2368 }
2369 2369
2370 2370 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2371 2371 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2372 2372 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2373 2373
2374 2374 def __unicode__(self):
2375 2375 return u"<%s('%s:%s')>" % (
2376 2376 self.__class__.__name__, self.permission_id, self.permission_name
2377 2377 )
2378 2378
2379 2379 @classmethod
2380 2380 def get_by_key(cls, key):
2381 2381 return cls.query().filter(cls.permission_name == key).scalar()
2382 2382
2383 2383 @classmethod
2384 2384 def get_default_repo_perms(cls, user_id, repo_id=None):
2385 2385 q = Session().query(UserRepoToPerm, Repository, Permission)\
2386 2386 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2387 2387 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2388 2388 .filter(UserRepoToPerm.user_id == user_id)
2389 2389 if repo_id:
2390 2390 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2391 2391 return q.all()
2392 2392
2393 2393 @classmethod
2394 2394 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2395 2395 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2396 2396 .join(
2397 2397 Permission,
2398 2398 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2399 2399 .join(
2400 2400 Repository,
2401 2401 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2402 2402 .join(
2403 2403 UserGroup,
2404 2404 UserGroupRepoToPerm.users_group_id ==
2405 2405 UserGroup.users_group_id)\
2406 2406 .join(
2407 2407 UserGroupMember,
2408 2408 UserGroupRepoToPerm.users_group_id ==
2409 2409 UserGroupMember.users_group_id)\
2410 2410 .filter(
2411 2411 UserGroupMember.user_id == user_id,
2412 2412 UserGroup.users_group_active == true())
2413 2413 if repo_id:
2414 2414 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2415 2415 return q.all()
2416 2416
2417 2417 @classmethod
2418 2418 def get_default_group_perms(cls, user_id, repo_group_id=None):
2419 2419 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2420 2420 .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\
2421 2421 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
2422 2422 .filter(UserRepoGroupToPerm.user_id == user_id)
2423 2423 if repo_group_id:
2424 2424 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2425 2425 return q.all()
2426 2426
2427 2427 @classmethod
2428 2428 def get_default_group_perms_from_user_group(
2429 2429 cls, user_id, repo_group_id=None):
2430 2430 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2431 2431 .join(
2432 2432 Permission,
2433 2433 UserGroupRepoGroupToPerm.permission_id ==
2434 2434 Permission.permission_id)\
2435 2435 .join(
2436 2436 RepoGroup,
2437 2437 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2438 2438 .join(
2439 2439 UserGroup,
2440 2440 UserGroupRepoGroupToPerm.users_group_id ==
2441 2441 UserGroup.users_group_id)\
2442 2442 .join(
2443 2443 UserGroupMember,
2444 2444 UserGroupRepoGroupToPerm.users_group_id ==
2445 2445 UserGroupMember.users_group_id)\
2446 2446 .filter(
2447 2447 UserGroupMember.user_id == user_id,
2448 2448 UserGroup.users_group_active == true())
2449 2449 if repo_group_id:
2450 2450 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2451 2451 return q.all()
2452 2452
2453 2453 @classmethod
2454 2454 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2455 2455 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2456 2456 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2457 2457 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2458 2458 .filter(UserUserGroupToPerm.user_id == user_id)
2459 2459 if user_group_id:
2460 2460 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2461 2461 return q.all()
2462 2462
2463 2463 @classmethod
2464 2464 def get_default_user_group_perms_from_user_group(
2465 2465 cls, user_id, user_group_id=None):
2466 2466 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2467 2467 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2468 2468 .join(
2469 2469 Permission,
2470 2470 UserGroupUserGroupToPerm.permission_id ==
2471 2471 Permission.permission_id)\
2472 2472 .join(
2473 2473 TargetUserGroup,
2474 2474 UserGroupUserGroupToPerm.target_user_group_id ==
2475 2475 TargetUserGroup.users_group_id)\
2476 2476 .join(
2477 2477 UserGroup,
2478 2478 UserGroupUserGroupToPerm.user_group_id ==
2479 2479 UserGroup.users_group_id)\
2480 2480 .join(
2481 2481 UserGroupMember,
2482 2482 UserGroupUserGroupToPerm.user_group_id ==
2483 2483 UserGroupMember.users_group_id)\
2484 2484 .filter(
2485 2485 UserGroupMember.user_id == user_id,
2486 2486 UserGroup.users_group_active == true())
2487 2487 if user_group_id:
2488 2488 q = q.filter(
2489 2489 UserGroupUserGroupToPerm.user_group_id == user_group_id)
2490 2490
2491 2491 return q.all()
2492 2492
2493 2493
2494 2494 class UserRepoToPerm(Base, BaseModel):
2495 2495 __tablename__ = 'repo_to_perm'
2496 2496 __table_args__ = (
2497 2497 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
2498 2498 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2499 2499 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2500 2500 )
2501 2501 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2502 2502 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2503 2503 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2504 2504 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2505 2505
2506 2506 user = relationship('User')
2507 2507 repository = relationship('Repository')
2508 2508 permission = relationship('Permission')
2509 2509
2510 2510 @classmethod
2511 2511 def create(cls, user, repository, permission):
2512 2512 n = cls()
2513 2513 n.user = user
2514 2514 n.repository = repository
2515 2515 n.permission = permission
2516 2516 Session().add(n)
2517 2517 return n
2518 2518
2519 2519 def __unicode__(self):
2520 2520 return u'<%s => %s >' % (self.user, self.repository)
2521 2521
2522 2522
2523 2523 class UserUserGroupToPerm(Base, BaseModel):
2524 2524 __tablename__ = 'user_user_group_to_perm'
2525 2525 __table_args__ = (
2526 2526 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
2527 2527 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2528 2528 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2529 2529 )
2530 2530 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2531 2531 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2532 2532 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2533 2533 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2534 2534
2535 2535 user = relationship('User')
2536 2536 user_group = relationship('UserGroup')
2537 2537 permission = relationship('Permission')
2538 2538
2539 2539 @classmethod
2540 2540 def create(cls, user, user_group, permission):
2541 2541 n = cls()
2542 2542 n.user = user
2543 2543 n.user_group = user_group
2544 2544 n.permission = permission
2545 2545 Session().add(n)
2546 2546 return n
2547 2547
2548 2548 def __unicode__(self):
2549 2549 return u'<%s => %s >' % (self.user, self.user_group)
2550 2550
2551 2551
2552 2552 class UserToPerm(Base, BaseModel):
2553 2553 __tablename__ = 'user_to_perm'
2554 2554 __table_args__ = (
2555 2555 UniqueConstraint('user_id', 'permission_id'),
2556 2556 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2557 2557 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2558 2558 )
2559 2559 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2560 2560 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2561 2561 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2562 2562
2563 2563 user = relationship('User')
2564 2564 permission = relationship('Permission', lazy='joined')
2565 2565
2566 2566 def __unicode__(self):
2567 2567 return u'<%s => %s >' % (self.user, self.permission)
2568 2568
2569 2569
2570 2570 class UserGroupRepoToPerm(Base, BaseModel):
2571 2571 __tablename__ = 'users_group_repo_to_perm'
2572 2572 __table_args__ = (
2573 2573 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
2574 2574 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2575 2575 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2576 2576 )
2577 2577 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2578 2578 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2579 2579 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2580 2580 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2581 2581
2582 2582 users_group = relationship('UserGroup')
2583 2583 permission = relationship('Permission')
2584 2584 repository = relationship('Repository')
2585 2585
2586 2586 @classmethod
2587 2587 def create(cls, users_group, repository, permission):
2588 2588 n = cls()
2589 2589 n.users_group = users_group
2590 2590 n.repository = repository
2591 2591 n.permission = permission
2592 2592 Session().add(n)
2593 2593 return n
2594 2594
2595 2595 def __unicode__(self):
2596 2596 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
2597 2597
2598 2598
2599 2599 class UserGroupUserGroupToPerm(Base, BaseModel):
2600 2600 __tablename__ = 'user_group_user_group_to_perm'
2601 2601 __table_args__ = (
2602 2602 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
2603 2603 CheckConstraint('target_user_group_id != user_group_id'),
2604 2604 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2605 2605 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2606 2606 )
2607 2607 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2608 2608 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2609 2609 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2610 2610 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2611 2611
2612 2612 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
2613 2613 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
2614 2614 permission = relationship('Permission')
2615 2615
2616 2616 @classmethod
2617 2617 def create(cls, target_user_group, user_group, permission):
2618 2618 n = cls()
2619 2619 n.target_user_group = target_user_group
2620 2620 n.user_group = user_group
2621 2621 n.permission = permission
2622 2622 Session().add(n)
2623 2623 return n
2624 2624
2625 2625 def __unicode__(self):
2626 2626 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
2627 2627
2628 2628
2629 2629 class UserGroupToPerm(Base, BaseModel):
2630 2630 __tablename__ = 'users_group_to_perm'
2631 2631 __table_args__ = (
2632 2632 UniqueConstraint('users_group_id', 'permission_id',),
2633 2633 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2634 2634 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2635 2635 )
2636 2636 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2637 2637 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2638 2638 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2639 2639
2640 2640 users_group = relationship('UserGroup')
2641 2641 permission = relationship('Permission')
2642 2642
2643 2643
2644 2644 class UserRepoGroupToPerm(Base, BaseModel):
2645 2645 __tablename__ = 'user_repo_group_to_perm'
2646 2646 __table_args__ = (
2647 2647 UniqueConstraint('user_id', 'group_id', 'permission_id'),
2648 2648 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2649 2649 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2650 2650 )
2651 2651
2652 2652 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2653 2653 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2654 2654 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
2655 2655 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2656 2656
2657 2657 user = relationship('User')
2658 2658 group = relationship('RepoGroup')
2659 2659 permission = relationship('Permission')
2660 2660
2661 2661 @classmethod
2662 2662 def create(cls, user, repository_group, permission):
2663 2663 n = cls()
2664 2664 n.user = user
2665 2665 n.group = repository_group
2666 2666 n.permission = permission
2667 2667 Session().add(n)
2668 2668 return n
2669 2669
2670 2670
2671 2671 class UserGroupRepoGroupToPerm(Base, BaseModel):
2672 2672 __tablename__ = 'users_group_repo_group_to_perm'
2673 2673 __table_args__ = (
2674 2674 UniqueConstraint('users_group_id', 'group_id'),
2675 2675 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2676 2676 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2677 2677 )
2678 2678
2679 2679 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2680 2680 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2681 2681 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
2682 2682 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2683 2683
2684 2684 users_group = relationship('UserGroup')
2685 2685 permission = relationship('Permission')
2686 2686 group = relationship('RepoGroup')
2687 2687
2688 2688 @classmethod
2689 2689 def create(cls, user_group, repository_group, permission):
2690 2690 n = cls()
2691 2691 n.users_group = user_group
2692 2692 n.group = repository_group
2693 2693 n.permission = permission
2694 2694 Session().add(n)
2695 2695 return n
2696 2696
2697 2697 def __unicode__(self):
2698 2698 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
2699 2699
2700 2700
2701 2701 class Statistics(Base, BaseModel):
2702 2702 __tablename__ = 'statistics'
2703 2703 __table_args__ = (
2704 2704 UniqueConstraint('repository_id'),
2705 2705 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2706 2706 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2707 2707 )
2708 2708 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2709 2709 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
2710 2710 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
2711 2711 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
2712 2712 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
2713 2713 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
2714 2714
2715 2715 repository = relationship('Repository', single_parent=True)
2716 2716
2717 2717
2718 2718 class UserFollowing(Base, BaseModel):
2719 2719 __tablename__ = 'user_followings'
2720 2720 __table_args__ = (
2721 2721 UniqueConstraint('user_id', 'follows_repository_id'),
2722 2722 UniqueConstraint('user_id', 'follows_user_id'),
2723 2723 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2724 2724 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2725 2725 )
2726 2726
2727 2727 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2728 2728 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2729 2729 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
2730 2730 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
2731 2731 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2732 2732
2733 2733 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
2734 2734
2735 2735 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
2736 2736 follows_repository = relationship('Repository', order_by='Repository.repo_name')
2737 2737
2738 2738 @classmethod
2739 2739 def get_repo_followers(cls, repo_id):
2740 2740 return cls.query().filter(cls.follows_repo_id == repo_id)
2741 2741
2742 2742
2743 2743 class CacheKey(Base, BaseModel):
2744 2744 __tablename__ = 'cache_invalidation'
2745 2745 __table_args__ = (
2746 2746 UniqueConstraint('cache_key'),
2747 2747 Index('key_idx', 'cache_key'),
2748 2748 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2749 2749 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2750 2750 )
2751 2751 CACHE_TYPE_ATOM = 'ATOM'
2752 2752 CACHE_TYPE_RSS = 'RSS'
2753 2753 CACHE_TYPE_README = 'README'
2754 2754
2755 2755 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2756 2756 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
2757 2757 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
2758 2758 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
2759 2759
2760 2760 def __init__(self, cache_key, cache_args=''):
2761 2761 self.cache_key = cache_key
2762 2762 self.cache_args = cache_args
2763 2763 self.cache_active = False
2764 2764
2765 2765 def __unicode__(self):
2766 2766 return u"<%s('%s:%s[%s]')>" % (
2767 2767 self.__class__.__name__,
2768 2768 self.cache_id, self.cache_key, self.cache_active)
2769 2769
2770 2770 def _cache_key_partition(self):
2771 2771 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
2772 2772 return prefix, repo_name, suffix
2773 2773
2774 2774 def get_prefix(self):
2775 2775 """
2776 2776 Try to extract prefix from existing cache key. The key could consist
2777 2777 of prefix, repo_name, suffix
2778 2778 """
2779 2779 # this returns prefix, repo_name, suffix
2780 2780 return self._cache_key_partition()[0]
2781 2781
2782 2782 def get_suffix(self):
2783 2783 """
2784 2784 get suffix that might have been used in _get_cache_key to
2785 2785 generate self.cache_key. Only used for informational purposes
2786 2786 in repo_edit.html.
2787 2787 """
2788 2788 # prefix, repo_name, suffix
2789 2789 return self._cache_key_partition()[2]
2790 2790
2791 2791 @classmethod
2792 2792 def delete_all_cache(cls):
2793 2793 """
2794 2794 Delete all cache keys from database.
2795 2795 Should only be run when all instances are down and all entries
2796 2796 thus stale.
2797 2797 """
2798 2798 cls.query().delete()
2799 2799 Session().commit()
2800 2800
2801 2801 @classmethod
2802 2802 def get_cache_key(cls, repo_name, cache_type):
2803 2803 """
2804 2804
2805 2805 Generate a cache key for this process of RhodeCode instance.
2806 2806 Prefix most likely will be process id or maybe explicitly set
2807 2807 instance_id from .ini file.
2808 2808 """
2809 2809 import rhodecode
2810 2810 prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '')
2811 2811
2812 2812 repo_as_unicode = safe_unicode(repo_name)
2813 2813 key = u'{}_{}'.format(repo_as_unicode, cache_type) \
2814 2814 if cache_type else repo_as_unicode
2815 2815
2816 2816 return u'{}{}'.format(prefix, key)
2817 2817
2818 2818 @classmethod
2819 2819 def set_invalidate(cls, repo_name, delete=False):
2820 2820 """
2821 2821 Mark all caches of a repo as invalid in the database.
2822 2822 """
2823 2823
2824 2824 try:
2825 2825 qry = Session().query(cls).filter(cls.cache_args == repo_name)
2826 2826 if delete:
2827 2827 log.debug('cache objects deleted for repo %s',
2828 2828 safe_str(repo_name))
2829 2829 qry.delete()
2830 2830 else:
2831 2831 log.debug('cache objects marked as invalid for repo %s',
2832 2832 safe_str(repo_name))
2833 2833 qry.update({"cache_active": False})
2834 2834
2835 2835 Session().commit()
2836 2836 except Exception:
2837 2837 log.exception(
2838 2838 'Cache key invalidation failed for repository %s',
2839 2839 safe_str(repo_name))
2840 2840 Session().rollback()
2841 2841
2842 2842 @classmethod
2843 2843 def get_active_cache(cls, cache_key):
2844 2844 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
2845 2845 if inv_obj:
2846 2846 return inv_obj
2847 2847 return None
2848 2848
2849 2849 @classmethod
2850 2850 def repo_context_cache(cls, compute_func, repo_name, cache_type,
2851 2851 thread_scoped=False):
2852 2852 """
2853 2853 @cache_region('long_term')
2854 2854 def _heavy_calculation(cache_key):
2855 2855 return 'result'
2856 2856
2857 2857 cache_context = CacheKey.repo_context_cache(
2858 2858 _heavy_calculation, repo_name, cache_type)
2859 2859
2860 2860 with cache_context as context:
2861 2861 context.invalidate()
2862 2862 computed = context.compute()
2863 2863
2864 2864 assert computed == 'result'
2865 2865 """
2866 2866 from rhodecode.lib import caches
2867 2867 return caches.InvalidationContext(
2868 2868 compute_func, repo_name, cache_type, thread_scoped=thread_scoped)
2869 2869
2870 2870
2871 2871 class ChangesetComment(Base, BaseModel):
2872 2872 __tablename__ = 'changeset_comments'
2873 2873 __table_args__ = (
2874 2874 Index('cc_revision_idx', 'revision'),
2875 2875 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2876 2876 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2877 2877 )
2878 2878
2879 2879 COMMENT_OUTDATED = u'comment_outdated'
2880 2880
2881 2881 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
2882 2882 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
2883 2883 revision = Column('revision', String(40), nullable=True)
2884 2884 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
2885 2885 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
2886 2886 line_no = Column('line_no', Unicode(10), nullable=True)
2887 2887 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
2888 2888 f_path = Column('f_path', Unicode(1000), nullable=True)
2889 2889 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
2890 2890 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
2891 2891 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2892 2892 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2893 2893 renderer = Column('renderer', Unicode(64), nullable=True)
2894 2894 display_state = Column('display_state', Unicode(128), nullable=True)
2895 2895
2896 2896 author = relationship('User', lazy='joined')
2897 2897 repo = relationship('Repository')
2898 2898 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan")
2899 2899 pull_request = relationship('PullRequest', lazy='joined')
2900 2900 pull_request_version = relationship('PullRequestVersion')
2901 2901
2902 2902 @classmethod
2903 2903 def get_users(cls, revision=None, pull_request_id=None):
2904 2904 """
2905 2905 Returns user associated with this ChangesetComment. ie those
2906 2906 who actually commented
2907 2907
2908 2908 :param cls:
2909 2909 :param revision:
2910 2910 """
2911 2911 q = Session().query(User)\
2912 2912 .join(ChangesetComment.author)
2913 2913 if revision:
2914 2914 q = q.filter(cls.revision == revision)
2915 2915 elif pull_request_id:
2916 2916 q = q.filter(cls.pull_request_id == pull_request_id)
2917 2917 return q.all()
2918 2918
2919 2919 def render(self, mentions=False):
2920 2920 from rhodecode.lib import helpers as h
2921 2921 return h.render(self.text, renderer=self.renderer, mentions=mentions)
2922 2922
2923 2923 def __repr__(self):
2924 2924 if self.comment_id:
2925 2925 return '<DB:ChangesetComment #%s>' % self.comment_id
2926 2926 else:
2927 2927 return '<DB:ChangesetComment at %#x>' % id(self)
2928 2928
2929 2929
2930 2930 class ChangesetStatus(Base, BaseModel):
2931 2931 __tablename__ = 'changeset_statuses'
2932 2932 __table_args__ = (
2933 2933 Index('cs_revision_idx', 'revision'),
2934 2934 Index('cs_version_idx', 'version'),
2935 2935 UniqueConstraint('repo_id', 'revision', 'version'),
2936 2936 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2937 2937 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2938 2938 )
2939 2939 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
2940 2940 STATUS_APPROVED = 'approved'
2941 2941 STATUS_REJECTED = 'rejected'
2942 2942 STATUS_UNDER_REVIEW = 'under_review'
2943 2943
2944 2944 STATUSES = [
2945 2945 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
2946 2946 (STATUS_APPROVED, _("Approved")),
2947 2947 (STATUS_REJECTED, _("Rejected")),
2948 2948 (STATUS_UNDER_REVIEW, _("Under Review")),
2949 2949 ]
2950 2950
2951 2951 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
2952 2952 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
2953 2953 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
2954 2954 revision = Column('revision', String(40), nullable=False)
2955 2955 status = Column('status', String(128), nullable=False, default=DEFAULT)
2956 2956 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
2957 2957 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
2958 2958 version = Column('version', Integer(), nullable=False, default=0)
2959 2959 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
2960 2960
2961 2961 author = relationship('User', lazy='joined')
2962 2962 repo = relationship('Repository')
2963 2963 comment = relationship('ChangesetComment', lazy='joined')
2964 2964 pull_request = relationship('PullRequest', lazy='joined')
2965 2965
2966 2966 def __unicode__(self):
2967 2967 return u"<%s('%s[%s]:%s')>" % (
2968 2968 self.__class__.__name__,
2969 2969 self.status, self.version, self.author
2970 2970 )
2971 2971
2972 2972 @classmethod
2973 2973 def get_status_lbl(cls, value):
2974 2974 return dict(cls.STATUSES).get(value)
2975 2975
2976 2976 @property
2977 2977 def status_lbl(self):
2978 2978 return ChangesetStatus.get_status_lbl(self.status)
2979 2979
2980 2980
2981 2981 class _PullRequestBase(BaseModel):
2982 2982 """
2983 2983 Common attributes of pull request and version entries.
2984 2984 """
2985 2985
2986 2986 # .status values
2987 2987 STATUS_NEW = u'new'
2988 2988 STATUS_OPEN = u'open'
2989 2989 STATUS_CLOSED = u'closed'
2990 2990
2991 2991 title = Column('title', Unicode(255), nullable=True)
2992 2992 description = Column(
2993 2993 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
2994 2994 nullable=True)
2995 2995 # new/open/closed status of pull request (not approve/reject/etc)
2996 2996 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
2997 2997 created_on = Column(
2998 2998 'created_on', DateTime(timezone=False), nullable=False,
2999 2999 default=datetime.datetime.now)
3000 3000 updated_on = Column(
3001 3001 'updated_on', DateTime(timezone=False), nullable=False,
3002 3002 default=datetime.datetime.now)
3003 3003
3004 3004 @declared_attr
3005 3005 def user_id(cls):
3006 3006 return Column(
3007 3007 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3008 3008 unique=None)
3009 3009
3010 3010 # 500 revisions max
3011 3011 _revisions = Column(
3012 3012 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3013 3013
3014 3014 @declared_attr
3015 3015 def source_repo_id(cls):
3016 3016 # TODO: dan: rename column to source_repo_id
3017 3017 return Column(
3018 3018 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3019 3019 nullable=False)
3020 3020
3021 3021 source_ref = Column('org_ref', Unicode(255), nullable=False)
3022 3022
3023 3023 @declared_attr
3024 3024 def target_repo_id(cls):
3025 3025 # TODO: dan: rename column to target_repo_id
3026 3026 return Column(
3027 3027 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3028 3028 nullable=False)
3029 3029
3030 3030 target_ref = Column('other_ref', Unicode(255), nullable=False)
3031 3031
3032 3032 # TODO: dan: rename column to last_merge_source_rev
3033 3033 _last_merge_source_rev = Column(
3034 3034 'last_merge_org_rev', String(40), nullable=True)
3035 3035 # TODO: dan: rename column to last_merge_target_rev
3036 3036 _last_merge_target_rev = Column(
3037 3037 'last_merge_other_rev', String(40), nullable=True)
3038 3038 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3039 3039 merge_rev = Column('merge_rev', String(40), nullable=True)
3040 3040
3041 3041 @hybrid_property
3042 3042 def revisions(self):
3043 3043 return self._revisions.split(':') if self._revisions else []
3044 3044
3045 3045 @revisions.setter
3046 3046 def revisions(self, val):
3047 3047 self._revisions = ':'.join(val)
3048 3048
3049 3049 @declared_attr
3050 3050 def author(cls):
3051 3051 return relationship('User', lazy='joined')
3052 3052
3053 3053 @declared_attr
3054 3054 def source_repo(cls):
3055 3055 return relationship(
3056 3056 'Repository',
3057 3057 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3058 3058
3059 3059 @property
3060 3060 def source_ref_parts(self):
3061 3061 refs = self.source_ref.split(':')
3062 3062 return Reference(refs[0], refs[1], refs[2])
3063 3063
3064 3064 @declared_attr
3065 3065 def target_repo(cls):
3066 3066 return relationship(
3067 3067 'Repository',
3068 3068 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3069 3069
3070 3070 @property
3071 3071 def target_ref_parts(self):
3072 3072 refs = self.target_ref.split(':')
3073 3073 return Reference(refs[0], refs[1], refs[2])
3074 3074
3075 3075
3076 3076 class PullRequest(Base, _PullRequestBase):
3077 3077 __tablename__ = 'pull_requests'
3078 3078 __table_args__ = (
3079 3079 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3080 3080 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3081 3081 )
3082 3082
3083 3083 pull_request_id = Column(
3084 3084 'pull_request_id', Integer(), nullable=False, primary_key=True)
3085 3085
3086 3086 def __repr__(self):
3087 3087 if self.pull_request_id:
3088 3088 return '<DB:PullRequest #%s>' % self.pull_request_id
3089 3089 else:
3090 3090 return '<DB:PullRequest at %#x>' % id(self)
3091 3091
3092 3092 reviewers = relationship('PullRequestReviewers',
3093 3093 cascade="all, delete, delete-orphan")
3094 3094 statuses = relationship('ChangesetStatus')
3095 3095 comments = relationship('ChangesetComment',
3096 3096 cascade="all, delete, delete-orphan")
3097 3097 versions = relationship('PullRequestVersion',
3098 3098 cascade="all, delete, delete-orphan")
3099 3099
3100 3100 def is_closed(self):
3101 3101 return self.status == self.STATUS_CLOSED
3102 3102
3103 3103 def get_api_data(self):
3104 3104 from rhodecode.model.pull_request import PullRequestModel
3105 3105 pull_request = self
3106 3106 merge_status = PullRequestModel().merge_status(pull_request)
3107 3107 pull_request_url = url(
3108 3108 'pullrequest_show', repo_name=self.target_repo.repo_name,
3109 3109 pull_request_id=self.pull_request_id, qualified=True)
3110 3110 data = {
3111 3111 'pull_request_id': pull_request.pull_request_id,
3112 3112 'url': pull_request_url,
3113 3113 'title': pull_request.title,
3114 3114 'description': pull_request.description,
3115 3115 'status': pull_request.status,
3116 3116 'created_on': pull_request.created_on,
3117 3117 'updated_on': pull_request.updated_on,
3118 3118 'commit_ids': pull_request.revisions,
3119 3119 'review_status': pull_request.calculated_review_status(),
3120 3120 'mergeable': {
3121 3121 'status': merge_status[0],
3122 3122 'message': unicode(merge_status[1]),
3123 3123 },
3124 3124 'source': {
3125 3125 'clone_url': pull_request.source_repo.clone_url(),
3126 3126 'repository': pull_request.source_repo.repo_name,
3127 3127 'reference': {
3128 3128 'name': pull_request.source_ref_parts.name,
3129 3129 'type': pull_request.source_ref_parts.type,
3130 3130 'commit_id': pull_request.source_ref_parts.commit_id,
3131 3131 },
3132 3132 },
3133 3133 'target': {
3134 3134 'clone_url': pull_request.target_repo.clone_url(),
3135 3135 'repository': pull_request.target_repo.repo_name,
3136 3136 'reference': {
3137 3137 'name': pull_request.target_ref_parts.name,
3138 3138 'type': pull_request.target_ref_parts.type,
3139 3139 'commit_id': pull_request.target_ref_parts.commit_id,
3140 3140 },
3141 3141 },
3142 3142 'shadow': {
3143 # TODO: martinb: Unify generation/suffix of clone url.
3144 'clone_url': '{}/repository'.format(pull_request_url),
3143 'clone_url': PullRequestModel().get_shadow_clone_url(
3144 pull_request),
3145 3145 },
3146 3146 'author': pull_request.author.get_api_data(include_secrets=False,
3147 3147 details='basic'),
3148 3148 'reviewers': [
3149 3149 {
3150 3150 'user': reviewer.get_api_data(include_secrets=False,
3151 3151 details='basic'),
3152 3152 'reasons': reasons,
3153 3153 'review_status': st[0][1].status if st else 'not_reviewed',
3154 3154 }
3155 3155 for reviewer, reasons, st in pull_request.reviewers_statuses()
3156 3156 ]
3157 3157 }
3158 3158
3159 3159 return data
3160 3160
3161 3161 def __json__(self):
3162 3162 return {
3163 3163 'revisions': self.revisions,
3164 3164 }
3165 3165
3166 3166 def calculated_review_status(self):
3167 3167 # TODO: anderson: 13.05.15 Used only on templates/my_account_pullrequests.html
3168 3168 # because it's tricky on how to use ChangesetStatusModel from there
3169 3169 warnings.warn("Use calculated_review_status from ChangesetStatusModel", DeprecationWarning)
3170 3170 from rhodecode.model.changeset_status import ChangesetStatusModel
3171 3171 return ChangesetStatusModel().calculated_review_status(self)
3172 3172
3173 3173 def reviewers_statuses(self):
3174 3174 warnings.warn("Use reviewers_statuses from ChangesetStatusModel", DeprecationWarning)
3175 3175 from rhodecode.model.changeset_status import ChangesetStatusModel
3176 3176 return ChangesetStatusModel().reviewers_statuses(self)
3177 3177
3178 3178
3179 3179 class PullRequestVersion(Base, _PullRequestBase):
3180 3180 __tablename__ = 'pull_request_versions'
3181 3181 __table_args__ = (
3182 3182 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3183 3183 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3184 3184 )
3185 3185
3186 3186 pull_request_version_id = Column(
3187 3187 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3188 3188 pull_request_id = Column(
3189 3189 'pull_request_id', Integer(),
3190 3190 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3191 3191 pull_request = relationship('PullRequest')
3192 3192
3193 3193 def __repr__(self):
3194 3194 if self.pull_request_version_id:
3195 3195 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3196 3196 else:
3197 3197 return '<DB:PullRequestVersion at %#x>' % id(self)
3198 3198
3199 3199
3200 3200 class PullRequestReviewers(Base, BaseModel):
3201 3201 __tablename__ = 'pull_request_reviewers'
3202 3202 __table_args__ = (
3203 3203 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3204 3204 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3205 3205 )
3206 3206
3207 3207 def __init__(self, user=None, pull_request=None, reasons=None):
3208 3208 self.user = user
3209 3209 self.pull_request = pull_request
3210 3210 self.reasons = reasons or []
3211 3211
3212 3212 @hybrid_property
3213 3213 def reasons(self):
3214 3214 if not self._reasons:
3215 3215 return []
3216 3216 return self._reasons
3217 3217
3218 3218 @reasons.setter
3219 3219 def reasons(self, val):
3220 3220 val = val or []
3221 3221 if any(not isinstance(x, basestring) for x in val):
3222 3222 raise Exception('invalid reasons type, must be list of strings')
3223 3223 self._reasons = val
3224 3224
3225 3225 pull_requests_reviewers_id = Column(
3226 3226 'pull_requests_reviewers_id', Integer(), nullable=False,
3227 3227 primary_key=True)
3228 3228 pull_request_id = Column(
3229 3229 "pull_request_id", Integer(),
3230 3230 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3231 3231 user_id = Column(
3232 3232 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3233 3233 _reasons = Column(
3234 3234 'reason', MutationList.as_mutable(
3235 3235 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
3236 3236
3237 3237 user = relationship('User')
3238 3238 pull_request = relationship('PullRequest')
3239 3239
3240 3240
3241 3241 class Notification(Base, BaseModel):
3242 3242 __tablename__ = 'notifications'
3243 3243 __table_args__ = (
3244 3244 Index('notification_type_idx', 'type'),
3245 3245 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3246 3246 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3247 3247 )
3248 3248
3249 3249 TYPE_CHANGESET_COMMENT = u'cs_comment'
3250 3250 TYPE_MESSAGE = u'message'
3251 3251 TYPE_MENTION = u'mention'
3252 3252 TYPE_REGISTRATION = u'registration'
3253 3253 TYPE_PULL_REQUEST = u'pull_request'
3254 3254 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
3255 3255
3256 3256 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
3257 3257 subject = Column('subject', Unicode(512), nullable=True)
3258 3258 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
3259 3259 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
3260 3260 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3261 3261 type_ = Column('type', Unicode(255))
3262 3262
3263 3263 created_by_user = relationship('User')
3264 3264 notifications_to_users = relationship('UserNotification', lazy='joined',
3265 3265 cascade="all, delete, delete-orphan")
3266 3266
3267 3267 @property
3268 3268 def recipients(self):
3269 3269 return [x.user for x in UserNotification.query()\
3270 3270 .filter(UserNotification.notification == self)\
3271 3271 .order_by(UserNotification.user_id.asc()).all()]
3272 3272
3273 3273 @classmethod
3274 3274 def create(cls, created_by, subject, body, recipients, type_=None):
3275 3275 if type_ is None:
3276 3276 type_ = Notification.TYPE_MESSAGE
3277 3277
3278 3278 notification = cls()
3279 3279 notification.created_by_user = created_by
3280 3280 notification.subject = subject
3281 3281 notification.body = body
3282 3282 notification.type_ = type_
3283 3283 notification.created_on = datetime.datetime.now()
3284 3284
3285 3285 for u in recipients:
3286 3286 assoc = UserNotification()
3287 3287 assoc.notification = notification
3288 3288
3289 3289 # if created_by is inside recipients mark his notification
3290 3290 # as read
3291 3291 if u.user_id == created_by.user_id:
3292 3292 assoc.read = True
3293 3293
3294 3294 u.notifications.append(assoc)
3295 3295 Session().add(notification)
3296 3296
3297 3297 return notification
3298 3298
3299 3299 @property
3300 3300 def description(self):
3301 3301 from rhodecode.model.notification import NotificationModel
3302 3302 return NotificationModel().make_description(self)
3303 3303
3304 3304
3305 3305 class UserNotification(Base, BaseModel):
3306 3306 __tablename__ = 'user_to_notification'
3307 3307 __table_args__ = (
3308 3308 UniqueConstraint('user_id', 'notification_id'),
3309 3309 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3310 3310 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3311 3311 )
3312 3312 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
3313 3313 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
3314 3314 read = Column('read', Boolean, default=False)
3315 3315 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
3316 3316
3317 3317 user = relationship('User', lazy="joined")
3318 3318 notification = relationship('Notification', lazy="joined",
3319 3319 order_by=lambda: Notification.created_on.desc(),)
3320 3320
3321 3321 def mark_as_read(self):
3322 3322 self.read = True
3323 3323 Session().add(self)
3324 3324
3325 3325
3326 3326 class Gist(Base, BaseModel):
3327 3327 __tablename__ = 'gists'
3328 3328 __table_args__ = (
3329 3329 Index('g_gist_access_id_idx', 'gist_access_id'),
3330 3330 Index('g_created_on_idx', 'created_on'),
3331 3331 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3332 3332 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3333 3333 )
3334 3334 GIST_PUBLIC = u'public'
3335 3335 GIST_PRIVATE = u'private'
3336 3336 DEFAULT_FILENAME = u'gistfile1.txt'
3337 3337
3338 3338 ACL_LEVEL_PUBLIC = u'acl_public'
3339 3339 ACL_LEVEL_PRIVATE = u'acl_private'
3340 3340
3341 3341 gist_id = Column('gist_id', Integer(), primary_key=True)
3342 3342 gist_access_id = Column('gist_access_id', Unicode(250))
3343 3343 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
3344 3344 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
3345 3345 gist_expires = Column('gist_expires', Float(53), nullable=False)
3346 3346 gist_type = Column('gist_type', Unicode(128), nullable=False)
3347 3347 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3348 3348 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3349 3349 acl_level = Column('acl_level', Unicode(128), nullable=True)
3350 3350
3351 3351 owner = relationship('User')
3352 3352
3353 3353 def __repr__(self):
3354 3354 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
3355 3355
3356 3356 @classmethod
3357 3357 def get_or_404(cls, id_):
3358 3358 res = cls.query().filter(cls.gist_access_id == id_).scalar()
3359 3359 if not res:
3360 3360 raise HTTPNotFound
3361 3361 return res
3362 3362
3363 3363 @classmethod
3364 3364 def get_by_access_id(cls, gist_access_id):
3365 3365 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
3366 3366
3367 3367 def gist_url(self):
3368 3368 import rhodecode
3369 3369 alias_url = rhodecode.CONFIG.get('gist_alias_url')
3370 3370 if alias_url:
3371 3371 return alias_url.replace('{gistid}', self.gist_access_id)
3372 3372
3373 3373 return url('gist', gist_id=self.gist_access_id, qualified=True)
3374 3374
3375 3375 @classmethod
3376 3376 def base_path(cls):
3377 3377 """
3378 3378 Returns base path when all gists are stored
3379 3379
3380 3380 :param cls:
3381 3381 """
3382 3382 from rhodecode.model.gist import GIST_STORE_LOC
3383 3383 q = Session().query(RhodeCodeUi)\
3384 3384 .filter(RhodeCodeUi.ui_key == URL_SEP)
3385 3385 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
3386 3386 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
3387 3387
3388 3388 def get_api_data(self):
3389 3389 """
3390 3390 Common function for generating gist related data for API
3391 3391 """
3392 3392 gist = self
3393 3393 data = {
3394 3394 'gist_id': gist.gist_id,
3395 3395 'type': gist.gist_type,
3396 3396 'access_id': gist.gist_access_id,
3397 3397 'description': gist.gist_description,
3398 3398 'url': gist.gist_url(),
3399 3399 'expires': gist.gist_expires,
3400 3400 'created_on': gist.created_on,
3401 3401 'modified_at': gist.modified_at,
3402 3402 'content': None,
3403 3403 'acl_level': gist.acl_level,
3404 3404 }
3405 3405 return data
3406 3406
3407 3407 def __json__(self):
3408 3408 data = dict(
3409 3409 )
3410 3410 data.update(self.get_api_data())
3411 3411 return data
3412 3412 # SCM functions
3413 3413
3414 3414 def scm_instance(self, **kwargs):
3415 3415 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
3416 3416 return get_vcs_instance(
3417 3417 repo_path=safe_str(full_repo_path), create=False)
3418 3418
3419 3419
3420 3420 class DbMigrateVersion(Base, BaseModel):
3421 3421 __tablename__ = 'db_migrate_version'
3422 3422 __table_args__ = (
3423 3423 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3424 3424 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3425 3425 )
3426 3426 repository_id = Column('repository_id', String(250), primary_key=True)
3427 3427 repository_path = Column('repository_path', Text)
3428 3428 version = Column('version', Integer)
3429 3429
3430 3430
3431 3431 class ExternalIdentity(Base, BaseModel):
3432 3432 __tablename__ = 'external_identities'
3433 3433 __table_args__ = (
3434 3434 Index('local_user_id_idx', 'local_user_id'),
3435 3435 Index('external_id_idx', 'external_id'),
3436 3436 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3437 3437 'mysql_charset': 'utf8'})
3438 3438
3439 3439 external_id = Column('external_id', Unicode(255), default=u'',
3440 3440 primary_key=True)
3441 3441 external_username = Column('external_username', Unicode(1024), default=u'')
3442 3442 local_user_id = Column('local_user_id', Integer(),
3443 3443 ForeignKey('users.user_id'), primary_key=True)
3444 3444 provider_name = Column('provider_name', Unicode(255), default=u'',
3445 3445 primary_key=True)
3446 3446 access_token = Column('access_token', String(1024), default=u'')
3447 3447 alt_token = Column('alt_token', String(1024), default=u'')
3448 3448 token_secret = Column('token_secret', String(1024), default=u'')
3449 3449
3450 3450 @classmethod
3451 3451 def by_external_id_and_provider(cls, external_id, provider_name,
3452 3452 local_user_id=None):
3453 3453 """
3454 3454 Returns ExternalIdentity instance based on search params
3455 3455
3456 3456 :param external_id:
3457 3457 :param provider_name:
3458 3458 :return: ExternalIdentity
3459 3459 """
3460 3460 query = cls.query()
3461 3461 query = query.filter(cls.external_id == external_id)
3462 3462 query = query.filter(cls.provider_name == provider_name)
3463 3463 if local_user_id:
3464 3464 query = query.filter(cls.local_user_id == local_user_id)
3465 3465 return query.first()
3466 3466
3467 3467 @classmethod
3468 3468 def user_by_external_id_and_provider(cls, external_id, provider_name):
3469 3469 """
3470 3470 Returns User instance based on search params
3471 3471
3472 3472 :param external_id:
3473 3473 :param provider_name:
3474 3474 :return: User
3475 3475 """
3476 3476 query = User.query()
3477 3477 query = query.filter(cls.external_id == external_id)
3478 3478 query = query.filter(cls.provider_name == provider_name)
3479 3479 query = query.filter(User.user_id == cls.local_user_id)
3480 3480 return query.first()
3481 3481
3482 3482 @classmethod
3483 3483 def by_local_user_id(cls, local_user_id):
3484 3484 """
3485 3485 Returns all tokens for user
3486 3486
3487 3487 :param local_user_id:
3488 3488 :return: ExternalIdentity
3489 3489 """
3490 3490 query = cls.query()
3491 3491 query = query.filter(cls.local_user_id == local_user_id)
3492 3492 return query
3493 3493
3494 3494
3495 3495 class Integration(Base, BaseModel):
3496 3496 __tablename__ = 'integrations'
3497 3497 __table_args__ = (
3498 3498 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3499 3499 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3500 3500 )
3501 3501
3502 3502 integration_id = Column('integration_id', Integer(), primary_key=True)
3503 3503 integration_type = Column('integration_type', String(255))
3504 3504 enabled = Column('enabled', Boolean(), nullable=False)
3505 3505 name = Column('name', String(255), nullable=False)
3506 3506 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
3507 3507 default=False)
3508 3508
3509 3509 settings = Column(
3510 3510 'settings_json', MutationObj.as_mutable(
3511 3511 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3512 3512 repo_id = Column(
3513 3513 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
3514 3514 nullable=True, unique=None, default=None)
3515 3515 repo = relationship('Repository', lazy='joined')
3516 3516
3517 3517 repo_group_id = Column(
3518 3518 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
3519 3519 nullable=True, unique=None, default=None)
3520 3520 repo_group = relationship('RepoGroup', lazy='joined')
3521 3521
3522 3522 @property
3523 3523 def scope(self):
3524 3524 if self.repo:
3525 3525 return repr(self.repo)
3526 3526 if self.repo_group:
3527 3527 if self.child_repos_only:
3528 3528 return repr(self.repo_group) + ' (child repos only)'
3529 3529 else:
3530 3530 return repr(self.repo_group) + ' (recursive)'
3531 3531 if self.child_repos_only:
3532 3532 return 'root_repos'
3533 3533 return 'global'
3534 3534
3535 3535 def __repr__(self):
3536 3536 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
3537 3537
3538 3538
3539 3539 class RepoReviewRuleUser(Base, BaseModel):
3540 3540 __tablename__ = 'repo_review_rules_users'
3541 3541 __table_args__ = (
3542 3542 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3543 3543 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
3544 3544 )
3545 3545 repo_review_rule_user_id = Column(
3546 3546 'repo_review_rule_user_id', Integer(), primary_key=True)
3547 3547 repo_review_rule_id = Column("repo_review_rule_id",
3548 3548 Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
3549 3549 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'),
3550 3550 nullable=False)
3551 3551 user = relationship('User')
3552 3552
3553 3553
3554 3554 class RepoReviewRuleUserGroup(Base, BaseModel):
3555 3555 __tablename__ = 'repo_review_rules_users_groups'
3556 3556 __table_args__ = (
3557 3557 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3558 3558 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
3559 3559 )
3560 3560 repo_review_rule_users_group_id = Column(
3561 3561 'repo_review_rule_users_group_id', Integer(), primary_key=True)
3562 3562 repo_review_rule_id = Column("repo_review_rule_id",
3563 3563 Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
3564 3564 users_group_id = Column("users_group_id", Integer(),
3565 3565 ForeignKey('users_groups.users_group_id'), nullable=False)
3566 3566 users_group = relationship('UserGroup')
3567 3567
3568 3568
3569 3569 class RepoReviewRule(Base, BaseModel):
3570 3570 __tablename__ = 'repo_review_rules'
3571 3571 __table_args__ = (
3572 3572 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3573 3573 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
3574 3574 )
3575 3575
3576 3576 repo_review_rule_id = Column(
3577 3577 'repo_review_rule_id', Integer(), primary_key=True)
3578 3578 repo_id = Column(
3579 3579 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
3580 3580 repo = relationship('Repository', backref='review_rules')
3581 3581
3582 3582 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'),
3583 3583 default=u'*') # glob
3584 3584 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'),
3585 3585 default=u'*') # glob
3586 3586
3587 3587 use_authors_for_review = Column("use_authors_for_review", Boolean(),
3588 3588 nullable=False, default=False)
3589 3589 rule_users = relationship('RepoReviewRuleUser')
3590 3590 rule_user_groups = relationship('RepoReviewRuleUserGroup')
3591 3591
3592 3592 @hybrid_property
3593 3593 def branch_pattern(self):
3594 3594 return self._branch_pattern or '*'
3595 3595
3596 3596 def _validate_glob(self, value):
3597 3597 re.compile('^' + glob2re(value) + '$')
3598 3598
3599 3599 @branch_pattern.setter
3600 3600 def branch_pattern(self, value):
3601 3601 self._validate_glob(value)
3602 3602 self._branch_pattern = value or '*'
3603 3603
3604 3604 @hybrid_property
3605 3605 def file_pattern(self):
3606 3606 return self._file_pattern or '*'
3607 3607
3608 3608 @file_pattern.setter
3609 3609 def file_pattern(self, value):
3610 3610 self._validate_glob(value)
3611 3611 self._file_pattern = value or '*'
3612 3612
3613 3613 def matches(self, branch, files_changed):
3614 3614 """
3615 3615 Check if this review rule matches a branch/files in a pull request
3616 3616
3617 3617 :param branch: branch name for the commit
3618 3618 :param files_changed: list of file paths changed in the pull request
3619 3619 """
3620 3620
3621 3621 branch = branch or ''
3622 3622 files_changed = files_changed or []
3623 3623
3624 3624 branch_matches = True
3625 3625 if branch:
3626 3626 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
3627 3627 branch_matches = bool(branch_regex.search(branch))
3628 3628
3629 3629 files_matches = True
3630 3630 if self.file_pattern != '*':
3631 3631 files_matches = False
3632 3632 file_regex = re.compile(glob2re(self.file_pattern))
3633 3633 for filename in files_changed:
3634 3634 if file_regex.search(filename):
3635 3635 files_matches = True
3636 3636 break
3637 3637
3638 3638 return branch_matches and files_matches
3639 3639
3640 3640 @property
3641 3641 def review_users(self):
3642 3642 """ Returns the users which this rule applies to """
3643 3643
3644 3644 users = set()
3645 3645 users |= set([
3646 3646 rule_user.user for rule_user in self.rule_users
3647 3647 if rule_user.user.active])
3648 3648 users |= set(
3649 3649 member.user
3650 3650 for rule_user_group in self.rule_user_groups
3651 3651 for member in rule_user_group.users_group.members
3652 3652 if member.user.active
3653 3653 )
3654 3654 return users
3655 3655
3656 3656 def __repr__(self):
3657 3657 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
3658 3658 self.repo_review_rule_id, self.repo)
@@ -1,1180 +1,1188 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26 from collections import namedtuple
27 27 import json
28 28 import logging
29 29 import datetime
30 30
31 31 from pylons.i18n.translation import _
32 32 from pylons.i18n.translation import lazy_ugettext
33 33
34 34 from rhodecode.lib import helpers as h, hooks_utils, diffs
35 35 from rhodecode.lib.compat import OrderedDict
36 36 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
37 37 from rhodecode.lib.markup_renderer import (
38 38 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
39 39 from rhodecode.lib.utils import action_logger
40 40 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
41 41 from rhodecode.lib.vcs.backends.base import (
42 42 Reference, MergeResponse, MergeFailureReason)
43 43 from rhodecode.lib.vcs.conf import settings as vcs_settings
44 44 from rhodecode.lib.vcs.exceptions import (
45 45 CommitDoesNotExistError, EmptyRepositoryError)
46 46 from rhodecode.model import BaseModel
47 47 from rhodecode.model.changeset_status import ChangesetStatusModel
48 48 from rhodecode.model.comment import ChangesetCommentsModel
49 49 from rhodecode.model.db import (
50 50 PullRequest, PullRequestReviewers, ChangesetStatus,
51 51 PullRequestVersion, ChangesetComment)
52 52 from rhodecode.model.meta import Session
53 53 from rhodecode.model.notification import NotificationModel, \
54 54 EmailNotificationModel
55 55 from rhodecode.model.scm import ScmModel
56 56 from rhodecode.model.settings import VcsSettingsModel
57 57
58 58
59 59 log = logging.getLogger(__name__)
60 60
61 61
62 62 class PullRequestModel(BaseModel):
63 63
64 64 cls = PullRequest
65 65
66 66 DIFF_CONTEXT = 3
67 67
68 68 MERGE_STATUS_MESSAGES = {
69 69 MergeFailureReason.NONE: lazy_ugettext(
70 70 'This pull request can be automatically merged.'),
71 71 MergeFailureReason.UNKNOWN: lazy_ugettext(
72 72 'This pull request cannot be merged because of an unhandled'
73 73 ' exception.'),
74 74 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
75 75 'This pull request cannot be merged because of conflicts.'),
76 76 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
77 77 'This pull request could not be merged because push to target'
78 78 ' failed.'),
79 79 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
80 80 'This pull request cannot be merged because the target is not a'
81 81 ' head.'),
82 82 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
83 83 'This pull request cannot be merged because the source contains'
84 84 ' more branches than the target.'),
85 85 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
86 86 'This pull request cannot be merged because the target has'
87 87 ' multiple heads.'),
88 88 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
89 89 'This pull request cannot be merged because the target repository'
90 90 ' is locked.'),
91 91 MergeFailureReason.MISSING_COMMIT: lazy_ugettext(
92 92 'This pull request cannot be merged because the target or the '
93 93 'source reference is missing.'),
94 94 }
95 95
96 96 def __get_pull_request(self, pull_request):
97 97 return self._get_instance(PullRequest, pull_request)
98 98
99 99 def _check_perms(self, perms, pull_request, user, api=False):
100 100 if not api:
101 101 return h.HasRepoPermissionAny(*perms)(
102 102 user=user, repo_name=pull_request.target_repo.repo_name)
103 103 else:
104 104 return h.HasRepoPermissionAnyApi(*perms)(
105 105 user=user, repo_name=pull_request.target_repo.repo_name)
106 106
107 107 def check_user_read(self, pull_request, user, api=False):
108 108 _perms = ('repository.admin', 'repository.write', 'repository.read',)
109 109 return self._check_perms(_perms, pull_request, user, api)
110 110
111 111 def check_user_merge(self, pull_request, user, api=False):
112 112 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
113 113 return self._check_perms(_perms, pull_request, user, api)
114 114
115 115 def check_user_update(self, pull_request, user, api=False):
116 116 owner = user.user_id == pull_request.user_id
117 117 return self.check_user_merge(pull_request, user, api) or owner
118 118
119 119 def check_user_change_status(self, pull_request, user, api=False):
120 120 reviewer = user.user_id in [x.user_id for x in
121 121 pull_request.reviewers]
122 122 return self.check_user_update(pull_request, user, api) or reviewer
123 123
124 124 def get(self, pull_request):
125 125 return self.__get_pull_request(pull_request)
126 126
127 127 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
128 128 opened_by=None, order_by=None,
129 129 order_dir='desc'):
130 130 repo = self._get_repo(repo_name)
131 131 q = PullRequest.query()
132 132 # source or target
133 133 if source:
134 134 q = q.filter(PullRequest.source_repo == repo)
135 135 else:
136 136 q = q.filter(PullRequest.target_repo == repo)
137 137
138 138 # closed,opened
139 139 if statuses:
140 140 q = q.filter(PullRequest.status.in_(statuses))
141 141
142 142 # opened by filter
143 143 if opened_by:
144 144 q = q.filter(PullRequest.user_id.in_(opened_by))
145 145
146 146 if order_by:
147 147 order_map = {
148 148 'name_raw': PullRequest.pull_request_id,
149 149 'title': PullRequest.title,
150 150 'updated_on_raw': PullRequest.updated_on
151 151 }
152 152 if order_dir == 'asc':
153 153 q = q.order_by(order_map[order_by].asc())
154 154 else:
155 155 q = q.order_by(order_map[order_by].desc())
156 156
157 157 return q
158 158
159 159 def count_all(self, repo_name, source=False, statuses=None,
160 160 opened_by=None):
161 161 """
162 162 Count the number of pull requests for a specific repository.
163 163
164 164 :param repo_name: target or source repo
165 165 :param source: boolean flag to specify if repo_name refers to source
166 166 :param statuses: list of pull request statuses
167 167 :param opened_by: author user of the pull request
168 168 :returns: int number of pull requests
169 169 """
170 170 q = self._prepare_get_all_query(
171 171 repo_name, source=source, statuses=statuses, opened_by=opened_by)
172 172
173 173 return q.count()
174 174
175 175 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
176 176 offset=0, length=None, order_by=None, order_dir='desc'):
177 177 """
178 178 Get all pull requests for a specific repository.
179 179
180 180 :param repo_name: target or source repo
181 181 :param source: boolean flag to specify if repo_name refers to source
182 182 :param statuses: list of pull request statuses
183 183 :param opened_by: author user of the pull request
184 184 :param offset: pagination offset
185 185 :param length: length of returned list
186 186 :param order_by: order of the returned list
187 187 :param order_dir: 'asc' or 'desc' ordering direction
188 188 :returns: list of pull requests
189 189 """
190 190 q = self._prepare_get_all_query(
191 191 repo_name, source=source, statuses=statuses, opened_by=opened_by,
192 192 order_by=order_by, order_dir=order_dir)
193 193
194 194 if length:
195 195 pull_requests = q.limit(length).offset(offset).all()
196 196 else:
197 197 pull_requests = q.all()
198 198
199 199 return pull_requests
200 200
201 201 def count_awaiting_review(self, repo_name, source=False, statuses=None,
202 202 opened_by=None):
203 203 """
204 204 Count the number of pull requests for a specific repository that are
205 205 awaiting review.
206 206
207 207 :param repo_name: target or source repo
208 208 :param source: boolean flag to specify if repo_name refers to source
209 209 :param statuses: list of pull request statuses
210 210 :param opened_by: author user of the pull request
211 211 :returns: int number of pull requests
212 212 """
213 213 pull_requests = self.get_awaiting_review(
214 214 repo_name, source=source, statuses=statuses, opened_by=opened_by)
215 215
216 216 return len(pull_requests)
217 217
218 218 def get_awaiting_review(self, repo_name, source=False, statuses=None,
219 219 opened_by=None, offset=0, length=None,
220 220 order_by=None, order_dir='desc'):
221 221 """
222 222 Get all pull requests for a specific repository that are awaiting
223 223 review.
224 224
225 225 :param repo_name: target or source repo
226 226 :param source: boolean flag to specify if repo_name refers to source
227 227 :param statuses: list of pull request statuses
228 228 :param opened_by: author user of the pull request
229 229 :param offset: pagination offset
230 230 :param length: length of returned list
231 231 :param order_by: order of the returned list
232 232 :param order_dir: 'asc' or 'desc' ordering direction
233 233 :returns: list of pull requests
234 234 """
235 235 pull_requests = self.get_all(
236 236 repo_name, source=source, statuses=statuses, opened_by=opened_by,
237 237 order_by=order_by, order_dir=order_dir)
238 238
239 239 _filtered_pull_requests = []
240 240 for pr in pull_requests:
241 241 status = pr.calculated_review_status()
242 242 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
243 243 ChangesetStatus.STATUS_UNDER_REVIEW]:
244 244 _filtered_pull_requests.append(pr)
245 245 if length:
246 246 return _filtered_pull_requests[offset:offset+length]
247 247 else:
248 248 return _filtered_pull_requests
249 249
250 250 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
251 251 opened_by=None, user_id=None):
252 252 """
253 253 Count the number of pull requests for a specific repository that are
254 254 awaiting review from a specific user.
255 255
256 256 :param repo_name: target or source repo
257 257 :param source: boolean flag to specify if repo_name refers to source
258 258 :param statuses: list of pull request statuses
259 259 :param opened_by: author user of the pull request
260 260 :param user_id: reviewer user of the pull request
261 261 :returns: int number of pull requests
262 262 """
263 263 pull_requests = self.get_awaiting_my_review(
264 264 repo_name, source=source, statuses=statuses, opened_by=opened_by,
265 265 user_id=user_id)
266 266
267 267 return len(pull_requests)
268 268
269 269 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
270 270 opened_by=None, user_id=None, offset=0,
271 271 length=None, order_by=None, order_dir='desc'):
272 272 """
273 273 Get all pull requests for a specific repository that are awaiting
274 274 review from a specific user.
275 275
276 276 :param repo_name: target or source repo
277 277 :param source: boolean flag to specify if repo_name refers to source
278 278 :param statuses: list of pull request statuses
279 279 :param opened_by: author user of the pull request
280 280 :param user_id: reviewer user of the pull request
281 281 :param offset: pagination offset
282 282 :param length: length of returned list
283 283 :param order_by: order of the returned list
284 284 :param order_dir: 'asc' or 'desc' ordering direction
285 285 :returns: list of pull requests
286 286 """
287 287 pull_requests = self.get_all(
288 288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
289 289 order_by=order_by, order_dir=order_dir)
290 290
291 291 _my = PullRequestModel().get_not_reviewed(user_id)
292 292 my_participation = []
293 293 for pr in pull_requests:
294 294 if pr in _my:
295 295 my_participation.append(pr)
296 296 _filtered_pull_requests = my_participation
297 297 if length:
298 298 return _filtered_pull_requests[offset:offset+length]
299 299 else:
300 300 return _filtered_pull_requests
301 301
302 302 def get_not_reviewed(self, user_id):
303 303 return [
304 304 x.pull_request for x in PullRequestReviewers.query().filter(
305 305 PullRequestReviewers.user_id == user_id).all()
306 306 ]
307 307
308 308 def get_versions(self, pull_request):
309 309 """
310 310 returns version of pull request sorted by ID descending
311 311 """
312 312 return PullRequestVersion.query()\
313 313 .filter(PullRequestVersion.pull_request == pull_request)\
314 314 .order_by(PullRequestVersion.pull_request_version_id.asc())\
315 315 .all()
316 316
317 317 def create(self, created_by, source_repo, source_ref, target_repo,
318 318 target_ref, revisions, reviewers, title, description=None):
319 319 created_by_user = self._get_user(created_by)
320 320 source_repo = self._get_repo(source_repo)
321 321 target_repo = self._get_repo(target_repo)
322 322
323 323 pull_request = PullRequest()
324 324 pull_request.source_repo = source_repo
325 325 pull_request.source_ref = source_ref
326 326 pull_request.target_repo = target_repo
327 327 pull_request.target_ref = target_ref
328 328 pull_request.revisions = revisions
329 329 pull_request.title = title
330 330 pull_request.description = description
331 331 pull_request.author = created_by_user
332 332
333 333 Session().add(pull_request)
334 334 Session().flush()
335 335
336 336 reviewer_ids = set()
337 337 # members / reviewers
338 338 for reviewer_object in reviewers:
339 339 if isinstance(reviewer_object, tuple):
340 340 user_id, reasons = reviewer_object
341 341 else:
342 342 user_id, reasons = reviewer_object, []
343 343
344 344 user = self._get_user(user_id)
345 345 reviewer_ids.add(user.user_id)
346 346
347 347 reviewer = PullRequestReviewers(user, pull_request, reasons)
348 348 Session().add(reviewer)
349 349
350 350 # Set approval status to "Under Review" for all commits which are
351 351 # part of this pull request.
352 352 ChangesetStatusModel().set_status(
353 353 repo=target_repo,
354 354 status=ChangesetStatus.STATUS_UNDER_REVIEW,
355 355 user=created_by_user,
356 356 pull_request=pull_request
357 357 )
358 358
359 359 self.notify_reviewers(pull_request, reviewer_ids)
360 360 self._trigger_pull_request_hook(
361 361 pull_request, created_by_user, 'create')
362 362
363 363 return pull_request
364 364
365 365 def _trigger_pull_request_hook(self, pull_request, user, action):
366 366 pull_request = self.__get_pull_request(pull_request)
367 367 target_scm = pull_request.target_repo.scm_instance()
368 368 if action == 'create':
369 369 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
370 370 elif action == 'merge':
371 371 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
372 372 elif action == 'close':
373 373 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
374 374 elif action == 'review_status_change':
375 375 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
376 376 elif action == 'update':
377 377 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
378 378 else:
379 379 return
380 380
381 381 trigger_hook(
382 382 username=user.username,
383 383 repo_name=pull_request.target_repo.repo_name,
384 384 repo_alias=target_scm.alias,
385 385 pull_request=pull_request)
386 386
387 387 def _get_commit_ids(self, pull_request):
388 388 """
389 389 Return the commit ids of the merged pull request.
390 390
391 391 This method is not dealing correctly yet with the lack of autoupdates
392 392 nor with the implicit target updates.
393 393 For example: if a commit in the source repo is already in the target it
394 394 will be reported anyways.
395 395 """
396 396 merge_rev = pull_request.merge_rev
397 397 if merge_rev is None:
398 398 raise ValueError('This pull request was not merged yet')
399 399
400 400 commit_ids = list(pull_request.revisions)
401 401 if merge_rev not in commit_ids:
402 402 commit_ids.append(merge_rev)
403 403
404 404 return commit_ids
405 405
406 406 def merge(self, pull_request, user, extras):
407 407 log.debug("Merging pull request %s", pull_request.pull_request_id)
408 408 merge_state = self._merge_pull_request(pull_request, user, extras)
409 409 if merge_state.executed:
410 410 log.debug(
411 411 "Merge was successful, updating the pull request comments.")
412 412 self._comment_and_close_pr(pull_request, user, merge_state)
413 413 self._log_action('user_merged_pull_request', user, pull_request)
414 414 else:
415 415 log.warn("Merge failed, not updating the pull request.")
416 416 return merge_state
417 417
418 418 def _merge_pull_request(self, pull_request, user, extras):
419 419 target_vcs = pull_request.target_repo.scm_instance()
420 420 source_vcs = pull_request.source_repo.scm_instance()
421 421 target_ref = self._refresh_reference(
422 422 pull_request.target_ref_parts, target_vcs)
423 423
424 424 message = _(
425 425 'Merge pull request #%(pr_id)s from '
426 426 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
427 427 'pr_id': pull_request.pull_request_id,
428 428 'source_repo': source_vcs.name,
429 429 'source_ref_name': pull_request.source_ref_parts.name,
430 430 'pr_title': pull_request.title
431 431 }
432 432
433 433 workspace_id = self._workspace_id(pull_request)
434 434 use_rebase = self._use_rebase_for_merging(pull_request)
435 435
436 436 callback_daemon, extras = prepare_callback_daemon(
437 437 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
438 438 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
439 439
440 440 with callback_daemon:
441 441 # TODO: johbo: Implement a clean way to run a config_override
442 442 # for a single call.
443 443 target_vcs.config.set(
444 444 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
445 445 merge_state = target_vcs.merge(
446 446 target_ref, source_vcs, pull_request.source_ref_parts,
447 447 workspace_id, user_name=user.username,
448 448 user_email=user.email, message=message, use_rebase=use_rebase)
449 449 return merge_state
450 450
451 451 def _comment_and_close_pr(self, pull_request, user, merge_state):
452 452 pull_request.merge_rev = merge_state.merge_commit_id
453 453 pull_request.updated_on = datetime.datetime.now()
454 454
455 455 ChangesetCommentsModel().create(
456 456 text=unicode(_('Pull request merged and closed')),
457 457 repo=pull_request.target_repo.repo_id,
458 458 user=user.user_id,
459 459 pull_request=pull_request.pull_request_id,
460 460 f_path=None,
461 461 line_no=None,
462 462 closing_pr=True
463 463 )
464 464
465 465 Session().add(pull_request)
466 466 Session().flush()
467 467 # TODO: paris: replace invalidation with less radical solution
468 468 ScmModel().mark_for_invalidation(
469 469 pull_request.target_repo.repo_name)
470 470 self._trigger_pull_request_hook(pull_request, user, 'merge')
471 471
472 472 def has_valid_update_type(self, pull_request):
473 473 source_ref_type = pull_request.source_ref_parts.type
474 474 return source_ref_type in ['book', 'branch', 'tag']
475 475
476 476 def update_commits(self, pull_request):
477 477 """
478 478 Get the updated list of commits for the pull request
479 479 and return the new pull request version and the list
480 480 of commits processed by this update action
481 481 """
482 482
483 483 pull_request = self.__get_pull_request(pull_request)
484 484 source_ref_type = pull_request.source_ref_parts.type
485 485 source_ref_name = pull_request.source_ref_parts.name
486 486 source_ref_id = pull_request.source_ref_parts.commit_id
487 487
488 488 if not self.has_valid_update_type(pull_request):
489 489 log.debug(
490 490 "Skipping update of pull request %s due to ref type: %s",
491 491 pull_request, source_ref_type)
492 492 return (None, None)
493 493
494 494 source_repo = pull_request.source_repo.scm_instance()
495 495 source_commit = source_repo.get_commit(commit_id=source_ref_name)
496 496 if source_ref_id == source_commit.raw_id:
497 497 log.debug("Nothing changed in pull request %s", pull_request)
498 498 return (None, None)
499 499
500 500 # Finally there is a need for an update
501 501 pull_request_version = self._create_version_from_snapshot(pull_request)
502 502 self._link_comments_to_version(pull_request_version)
503 503
504 504 target_ref_type = pull_request.target_ref_parts.type
505 505 target_ref_name = pull_request.target_ref_parts.name
506 506 target_ref_id = pull_request.target_ref_parts.commit_id
507 507 target_repo = pull_request.target_repo.scm_instance()
508 508
509 509 if target_ref_type in ('tag', 'branch', 'book'):
510 510 target_commit = target_repo.get_commit(target_ref_name)
511 511 else:
512 512 target_commit = target_repo.get_commit(target_ref_id)
513 513
514 514 # re-compute commit ids
515 515 old_commit_ids = set(pull_request.revisions)
516 516 pre_load = ["author", "branch", "date", "message"]
517 517 commit_ranges = target_repo.compare(
518 518 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
519 519 pre_load=pre_load)
520 520
521 521 ancestor = target_repo.get_common_ancestor(
522 522 target_commit.raw_id, source_commit.raw_id, source_repo)
523 523
524 524 pull_request.source_ref = '%s:%s:%s' % (
525 525 source_ref_type, source_ref_name, source_commit.raw_id)
526 526 pull_request.target_ref = '%s:%s:%s' % (
527 527 target_ref_type, target_ref_name, ancestor)
528 528 pull_request.revisions = [
529 529 commit.raw_id for commit in reversed(commit_ranges)]
530 530 pull_request.updated_on = datetime.datetime.now()
531 531 Session().add(pull_request)
532 532 new_commit_ids = set(pull_request.revisions)
533 533
534 534 changes = self._calculate_commit_id_changes(
535 535 old_commit_ids, new_commit_ids)
536 536
537 537 old_diff_data, new_diff_data = self._generate_update_diffs(
538 538 pull_request, pull_request_version)
539 539
540 540 ChangesetCommentsModel().outdate_comments(
541 541 pull_request, old_diff_data=old_diff_data,
542 542 new_diff_data=new_diff_data)
543 543
544 544 file_changes = self._calculate_file_changes(
545 545 old_diff_data, new_diff_data)
546 546
547 547 # Add an automatic comment to the pull request
548 548 update_comment = ChangesetCommentsModel().create(
549 549 text=self._render_update_message(changes, file_changes),
550 550 repo=pull_request.target_repo,
551 551 user=pull_request.author,
552 552 pull_request=pull_request,
553 553 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
554 554
555 555 # Update status to "Under Review" for added commits
556 556 for commit_id in changes.added:
557 557 ChangesetStatusModel().set_status(
558 558 repo=pull_request.source_repo,
559 559 status=ChangesetStatus.STATUS_UNDER_REVIEW,
560 560 comment=update_comment,
561 561 user=pull_request.author,
562 562 pull_request=pull_request,
563 563 revision=commit_id)
564 564
565 565 log.debug(
566 566 'Updated pull request %s, added_ids: %s, common_ids: %s, '
567 567 'removed_ids: %s', pull_request.pull_request_id,
568 568 changes.added, changes.common, changes.removed)
569 569 log.debug('Updated pull request with the following file changes: %s',
570 570 file_changes)
571 571
572 572 log.info(
573 573 "Updated pull request %s from commit %s to commit %s, "
574 574 "stored new version %s of this pull request.",
575 575 pull_request.pull_request_id, source_ref_id,
576 576 pull_request.source_ref_parts.commit_id,
577 577 pull_request_version.pull_request_version_id)
578 578 Session().commit()
579 579 self._trigger_pull_request_hook(pull_request, pull_request.author,
580 580 'update')
581 581
582 582 return (pull_request_version, changes)
583 583
584 584 def _create_version_from_snapshot(self, pull_request):
585 585 version = PullRequestVersion()
586 586 version.title = pull_request.title
587 587 version.description = pull_request.description
588 588 version.status = pull_request.status
589 589 version.created_on = pull_request.created_on
590 590 version.updated_on = pull_request.updated_on
591 591 version.user_id = pull_request.user_id
592 592 version.source_repo = pull_request.source_repo
593 593 version.source_ref = pull_request.source_ref
594 594 version.target_repo = pull_request.target_repo
595 595 version.target_ref = pull_request.target_ref
596 596
597 597 version._last_merge_source_rev = pull_request._last_merge_source_rev
598 598 version._last_merge_target_rev = pull_request._last_merge_target_rev
599 599 version._last_merge_status = pull_request._last_merge_status
600 600 version.merge_rev = pull_request.merge_rev
601 601
602 602 version.revisions = pull_request.revisions
603 603 version.pull_request = pull_request
604 604 Session().add(version)
605 605 Session().flush()
606 606
607 607 return version
608 608
609 609 def _generate_update_diffs(self, pull_request, pull_request_version):
610 610 diff_context = (
611 611 self.DIFF_CONTEXT +
612 612 ChangesetCommentsModel.needed_extra_diff_context())
613 613 old_diff = self._get_diff_from_pr_or_version(
614 614 pull_request_version, context=diff_context)
615 615 new_diff = self._get_diff_from_pr_or_version(
616 616 pull_request, context=diff_context)
617 617
618 618 old_diff_data = diffs.DiffProcessor(old_diff)
619 619 old_diff_data.prepare()
620 620 new_diff_data = diffs.DiffProcessor(new_diff)
621 621 new_diff_data.prepare()
622 622
623 623 return old_diff_data, new_diff_data
624 624
625 625 def _link_comments_to_version(self, pull_request_version):
626 626 """
627 627 Link all unlinked comments of this pull request to the given version.
628 628
629 629 :param pull_request_version: The `PullRequestVersion` to which
630 630 the comments shall be linked.
631 631
632 632 """
633 633 pull_request = pull_request_version.pull_request
634 634 comments = ChangesetComment.query().filter(
635 635 # TODO: johbo: Should we query for the repo at all here?
636 636 # Pending decision on how comments of PRs are to be related
637 637 # to either the source repo, the target repo or no repo at all.
638 638 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
639 639 ChangesetComment.pull_request == pull_request,
640 640 ChangesetComment.pull_request_version == None)
641 641
642 642 # TODO: johbo: Find out why this breaks if it is done in a bulk
643 643 # operation.
644 644 for comment in comments:
645 645 comment.pull_request_version_id = (
646 646 pull_request_version.pull_request_version_id)
647 647 Session().add(comment)
648 648
649 649 def _calculate_commit_id_changes(self, old_ids, new_ids):
650 650 added = new_ids.difference(old_ids)
651 651 common = old_ids.intersection(new_ids)
652 652 removed = old_ids.difference(new_ids)
653 653 return ChangeTuple(added, common, removed)
654 654
655 655 def _calculate_file_changes(self, old_diff_data, new_diff_data):
656 656
657 657 old_files = OrderedDict()
658 658 for diff_data in old_diff_data.parsed_diff:
659 659 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
660 660
661 661 added_files = []
662 662 modified_files = []
663 663 removed_files = []
664 664 for diff_data in new_diff_data.parsed_diff:
665 665 new_filename = diff_data['filename']
666 666 new_hash = md5_safe(diff_data['raw_diff'])
667 667
668 668 old_hash = old_files.get(new_filename)
669 669 if not old_hash:
670 670 # file is not present in old diff, means it's added
671 671 added_files.append(new_filename)
672 672 else:
673 673 if new_hash != old_hash:
674 674 modified_files.append(new_filename)
675 675 # now remove a file from old, since we have seen it already
676 676 del old_files[new_filename]
677 677
678 678 # removed files is when there are present in old, but not in NEW,
679 679 # since we remove old files that are present in new diff, left-overs
680 680 # if any should be the removed files
681 681 removed_files.extend(old_files.keys())
682 682
683 683 return FileChangeTuple(added_files, modified_files, removed_files)
684 684
685 685 def _render_update_message(self, changes, file_changes):
686 686 """
687 687 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
688 688 so it's always looking the same disregarding on which default
689 689 renderer system is using.
690 690
691 691 :param changes: changes named tuple
692 692 :param file_changes: file changes named tuple
693 693
694 694 """
695 695 new_status = ChangesetStatus.get_status_lbl(
696 696 ChangesetStatus.STATUS_UNDER_REVIEW)
697 697
698 698 changed_files = (
699 699 file_changes.added + file_changes.modified + file_changes.removed)
700 700
701 701 params = {
702 702 'under_review_label': new_status,
703 703 'added_commits': changes.added,
704 704 'removed_commits': changes.removed,
705 705 'changed_files': changed_files,
706 706 'added_files': file_changes.added,
707 707 'modified_files': file_changes.modified,
708 708 'removed_files': file_changes.removed,
709 709 }
710 710 renderer = RstTemplateRenderer()
711 711 return renderer.render('pull_request_update.mako', **params)
712 712
713 713 def edit(self, pull_request, title, description):
714 714 pull_request = self.__get_pull_request(pull_request)
715 715 if pull_request.is_closed():
716 716 raise ValueError('This pull request is closed')
717 717 if title:
718 718 pull_request.title = title
719 719 pull_request.description = description
720 720 pull_request.updated_on = datetime.datetime.now()
721 721 Session().add(pull_request)
722 722
723 723 def update_reviewers(self, pull_request, reviewer_data):
724 724 """
725 725 Update the reviewers in the pull request
726 726
727 727 :param pull_request: the pr to update
728 728 :param reviewer_data: list of tuples [(user, ['reason1', 'reason2'])]
729 729 """
730 730
731 731 reviewers_reasons = {}
732 732 for user_id, reasons in reviewer_data:
733 733 if isinstance(user_id, (int, basestring)):
734 734 user_id = self._get_user(user_id).user_id
735 735 reviewers_reasons[user_id] = reasons
736 736
737 737 reviewers_ids = set(reviewers_reasons.keys())
738 738 pull_request = self.__get_pull_request(pull_request)
739 739 current_reviewers = PullRequestReviewers.query()\
740 740 .filter(PullRequestReviewers.pull_request ==
741 741 pull_request).all()
742 742 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
743 743
744 744 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
745 745 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
746 746
747 747 log.debug("Adding %s reviewers", ids_to_add)
748 748 log.debug("Removing %s reviewers", ids_to_remove)
749 749 changed = False
750 750 for uid in ids_to_add:
751 751 changed = True
752 752 _usr = self._get_user(uid)
753 753 reasons = reviewers_reasons[uid]
754 754 reviewer = PullRequestReviewers(_usr, pull_request, reasons)
755 755 Session().add(reviewer)
756 756
757 757 self.notify_reviewers(pull_request, ids_to_add)
758 758
759 759 for uid in ids_to_remove:
760 760 changed = True
761 761 reviewer = PullRequestReviewers.query()\
762 762 .filter(PullRequestReviewers.user_id == uid,
763 763 PullRequestReviewers.pull_request == pull_request)\
764 764 .scalar()
765 765 if reviewer:
766 766 Session().delete(reviewer)
767 767 if changed:
768 768 pull_request.updated_on = datetime.datetime.now()
769 769 Session().add(pull_request)
770 770
771 771 return ids_to_add, ids_to_remove
772 772
773 773 def get_url(self, pull_request):
774 774 return h.url('pullrequest_show',
775 775 repo_name=safe_str(pull_request.target_repo.repo_name),
776 776 pull_request_id=pull_request.pull_request_id,
777 777 qualified=True)
778 778
779 779 def get_shadow_clone_url(self, pull_request):
780 return u'{url}/repository'.format(url=self.get_url(pull_request))
780 """
781 Returns qualified url pointing to the shadow repository. If this pull
782 request is closed there is no shadow repository and ``None`` will be
783 returned.
784 """
785 if pull_request.is_closed():
786 return None
787 else:
788 return u'{url}/repository'.format(url=self.get_url(pull_request))
781 789
782 790 def notify_reviewers(self, pull_request, reviewers_ids):
783 791 # notification to reviewers
784 792 if not reviewers_ids:
785 793 return
786 794
787 795 pull_request_obj = pull_request
788 796 # get the current participants of this pull request
789 797 recipients = reviewers_ids
790 798 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
791 799
792 800 pr_source_repo = pull_request_obj.source_repo
793 801 pr_target_repo = pull_request_obj.target_repo
794 802
795 803 pr_url = h.url(
796 804 'pullrequest_show',
797 805 repo_name=pr_target_repo.repo_name,
798 806 pull_request_id=pull_request_obj.pull_request_id,
799 807 qualified=True,)
800 808
801 809 # set some variables for email notification
802 810 pr_target_repo_url = h.url(
803 811 'summary_home',
804 812 repo_name=pr_target_repo.repo_name,
805 813 qualified=True)
806 814
807 815 pr_source_repo_url = h.url(
808 816 'summary_home',
809 817 repo_name=pr_source_repo.repo_name,
810 818 qualified=True)
811 819
812 820 # pull request specifics
813 821 pull_request_commits = [
814 822 (x.raw_id, x.message)
815 823 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
816 824
817 825 kwargs = {
818 826 'user': pull_request.author,
819 827 'pull_request': pull_request_obj,
820 828 'pull_request_commits': pull_request_commits,
821 829
822 830 'pull_request_target_repo': pr_target_repo,
823 831 'pull_request_target_repo_url': pr_target_repo_url,
824 832
825 833 'pull_request_source_repo': pr_source_repo,
826 834 'pull_request_source_repo_url': pr_source_repo_url,
827 835
828 836 'pull_request_url': pr_url,
829 837 }
830 838
831 839 # pre-generate the subject for notification itself
832 840 (subject,
833 841 _h, _e, # we don't care about those
834 842 body_plaintext) = EmailNotificationModel().render_email(
835 843 notification_type, **kwargs)
836 844
837 845 # create notification objects, and emails
838 846 NotificationModel().create(
839 847 created_by=pull_request.author,
840 848 notification_subject=subject,
841 849 notification_body=body_plaintext,
842 850 notification_type=notification_type,
843 851 recipients=recipients,
844 852 email_kwargs=kwargs,
845 853 )
846 854
847 855 def delete(self, pull_request):
848 856 pull_request = self.__get_pull_request(pull_request)
849 857 self._cleanup_merge_workspace(pull_request)
850 858 Session().delete(pull_request)
851 859
852 860 def close_pull_request(self, pull_request, user):
853 861 pull_request = self.__get_pull_request(pull_request)
854 862 self._cleanup_merge_workspace(pull_request)
855 863 pull_request.status = PullRequest.STATUS_CLOSED
856 864 pull_request.updated_on = datetime.datetime.now()
857 865 Session().add(pull_request)
858 866 self._trigger_pull_request_hook(
859 867 pull_request, pull_request.author, 'close')
860 868 self._log_action('user_closed_pull_request', user, pull_request)
861 869
862 870 def close_pull_request_with_comment(self, pull_request, user, repo,
863 871 message=None):
864 872 status = ChangesetStatus.STATUS_REJECTED
865 873
866 874 if not message:
867 875 message = (
868 876 _('Status change %(transition_icon)s %(status)s') % {
869 877 'transition_icon': '>',
870 878 'status': ChangesetStatus.get_status_lbl(status)})
871 879
872 880 internal_message = _('Closing with') + ' ' + message
873 881
874 882 comm = ChangesetCommentsModel().create(
875 883 text=internal_message,
876 884 repo=repo.repo_id,
877 885 user=user.user_id,
878 886 pull_request=pull_request.pull_request_id,
879 887 f_path=None,
880 888 line_no=None,
881 889 status_change=ChangesetStatus.get_status_lbl(status),
882 890 status_change_type=status,
883 891 closing_pr=True
884 892 )
885 893
886 894 ChangesetStatusModel().set_status(
887 895 repo.repo_id,
888 896 status,
889 897 user.user_id,
890 898 comm,
891 899 pull_request=pull_request.pull_request_id
892 900 )
893 901 Session().flush()
894 902
895 903 PullRequestModel().close_pull_request(
896 904 pull_request.pull_request_id, user)
897 905
898 906 def merge_status(self, pull_request):
899 907 if not self._is_merge_enabled(pull_request):
900 908 return False, _('Server-side pull request merging is disabled.')
901 909 if pull_request.is_closed():
902 910 return False, _('This pull request is closed.')
903 911 merge_possible, msg = self._check_repo_requirements(
904 912 target=pull_request.target_repo, source=pull_request.source_repo)
905 913 if not merge_possible:
906 914 return merge_possible, msg
907 915
908 916 try:
909 917 resp = self._try_merge(pull_request)
910 918 status = resp.possible, self.merge_status_message(
911 919 resp.failure_reason)
912 920 except NotImplementedError:
913 921 status = False, _('Pull request merging is not supported.')
914 922
915 923 return status
916 924
917 925 def _check_repo_requirements(self, target, source):
918 926 """
919 927 Check if `target` and `source` have compatible requirements.
920 928
921 929 Currently this is just checking for largefiles.
922 930 """
923 931 target_has_largefiles = self._has_largefiles(target)
924 932 source_has_largefiles = self._has_largefiles(source)
925 933 merge_possible = True
926 934 message = u''
927 935
928 936 if target_has_largefiles != source_has_largefiles:
929 937 merge_possible = False
930 938 if source_has_largefiles:
931 939 message = _(
932 940 'Target repository large files support is disabled.')
933 941 else:
934 942 message = _(
935 943 'Source repository large files support is disabled.')
936 944
937 945 return merge_possible, message
938 946
939 947 def _has_largefiles(self, repo):
940 948 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
941 949 'extensions', 'largefiles')
942 950 return largefiles_ui and largefiles_ui[0].active
943 951
944 952 def _try_merge(self, pull_request):
945 953 """
946 954 Try to merge the pull request and return the merge status.
947 955 """
948 956 log.debug(
949 957 "Trying out if the pull request %s can be merged.",
950 958 pull_request.pull_request_id)
951 959 target_vcs = pull_request.target_repo.scm_instance()
952 960 target_ref = self._refresh_reference(
953 961 pull_request.target_ref_parts, target_vcs)
954 962
955 963 target_locked = pull_request.target_repo.locked
956 964 if target_locked and target_locked[0]:
957 965 log.debug("The target repository is locked.")
958 966 merge_state = MergeResponse(
959 967 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
960 968 elif self._needs_merge_state_refresh(pull_request, target_ref):
961 969 log.debug("Refreshing the merge status of the repository.")
962 970 merge_state = self._refresh_merge_state(
963 971 pull_request, target_vcs, target_ref)
964 972 else:
965 973 possible = pull_request.\
966 974 _last_merge_status == MergeFailureReason.NONE
967 975 merge_state = MergeResponse(
968 976 possible, False, None, pull_request._last_merge_status)
969 977 log.debug("Merge response: %s", merge_state)
970 978 return merge_state
971 979
972 980 def _refresh_reference(self, reference, vcs_repository):
973 981 if reference.type in ('branch', 'book'):
974 982 name_or_id = reference.name
975 983 else:
976 984 name_or_id = reference.commit_id
977 985 refreshed_commit = vcs_repository.get_commit(name_or_id)
978 986 refreshed_reference = Reference(
979 987 reference.type, reference.name, refreshed_commit.raw_id)
980 988 return refreshed_reference
981 989
982 990 def _needs_merge_state_refresh(self, pull_request, target_reference):
983 991 return not(
984 992 pull_request.revisions and
985 993 pull_request.revisions[0] == pull_request._last_merge_source_rev and
986 994 target_reference.commit_id == pull_request._last_merge_target_rev)
987 995
988 996 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
989 997 workspace_id = self._workspace_id(pull_request)
990 998 source_vcs = pull_request.source_repo.scm_instance()
991 999 use_rebase = self._use_rebase_for_merging(pull_request)
992 1000 merge_state = target_vcs.merge(
993 1001 target_reference, source_vcs, pull_request.source_ref_parts,
994 1002 workspace_id, dry_run=True, use_rebase=use_rebase)
995 1003
996 1004 # Do not store the response if there was an unknown error.
997 1005 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
998 1006 pull_request._last_merge_source_rev = pull_request.\
999 1007 source_ref_parts.commit_id
1000 1008 pull_request._last_merge_target_rev = target_reference.commit_id
1001 1009 pull_request._last_merge_status = (
1002 1010 merge_state.failure_reason)
1003 1011 Session().add(pull_request)
1004 1012 Session().flush()
1005 1013
1006 1014 return merge_state
1007 1015
1008 1016 def _workspace_id(self, pull_request):
1009 1017 workspace_id = 'pr-%s' % pull_request.pull_request_id
1010 1018 return workspace_id
1011 1019
1012 1020 def merge_status_message(self, status_code):
1013 1021 """
1014 1022 Return a human friendly error message for the given merge status code.
1015 1023 """
1016 1024 return self.MERGE_STATUS_MESSAGES[status_code]
1017 1025
1018 1026 def generate_repo_data(self, repo, commit_id=None, branch=None,
1019 1027 bookmark=None):
1020 1028 all_refs, selected_ref = \
1021 1029 self._get_repo_pullrequest_sources(
1022 1030 repo.scm_instance(), commit_id=commit_id,
1023 1031 branch=branch, bookmark=bookmark)
1024 1032
1025 1033 refs_select2 = []
1026 1034 for element in all_refs:
1027 1035 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1028 1036 refs_select2.append({'text': element[1], 'children': children})
1029 1037
1030 1038 return {
1031 1039 'user': {
1032 1040 'user_id': repo.user.user_id,
1033 1041 'username': repo.user.username,
1034 1042 'firstname': repo.user.firstname,
1035 1043 'lastname': repo.user.lastname,
1036 1044 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1037 1045 },
1038 1046 'description': h.chop_at_smart(repo.description, '\n'),
1039 1047 'refs': {
1040 1048 'all_refs': all_refs,
1041 1049 'selected_ref': selected_ref,
1042 1050 'select2_refs': refs_select2
1043 1051 }
1044 1052 }
1045 1053
1046 1054 def generate_pullrequest_title(self, source, source_ref, target):
1047 1055 return u'{source}#{at_ref} to {target}'.format(
1048 1056 source=source,
1049 1057 at_ref=source_ref,
1050 1058 target=target,
1051 1059 )
1052 1060
1053 1061 def _cleanup_merge_workspace(self, pull_request):
1054 1062 # Merging related cleanup
1055 1063 target_scm = pull_request.target_repo.scm_instance()
1056 1064 workspace_id = 'pr-%s' % pull_request.pull_request_id
1057 1065
1058 1066 try:
1059 1067 target_scm.cleanup_merge_workspace(workspace_id)
1060 1068 except NotImplementedError:
1061 1069 pass
1062 1070
1063 1071 def _get_repo_pullrequest_sources(
1064 1072 self, repo, commit_id=None, branch=None, bookmark=None):
1065 1073 """
1066 1074 Return a structure with repo's interesting commits, suitable for
1067 1075 the selectors in pullrequest controller
1068 1076
1069 1077 :param commit_id: a commit that must be in the list somehow
1070 1078 and selected by default
1071 1079 :param branch: a branch that must be in the list and selected
1072 1080 by default - even if closed
1073 1081 :param bookmark: a bookmark that must be in the list and selected
1074 1082 """
1075 1083
1076 1084 commit_id = safe_str(commit_id) if commit_id else None
1077 1085 branch = safe_str(branch) if branch else None
1078 1086 bookmark = safe_str(bookmark) if bookmark else None
1079 1087
1080 1088 selected = None
1081 1089
1082 1090 # order matters: first source that has commit_id in it will be selected
1083 1091 sources = []
1084 1092 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1085 1093 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1086 1094
1087 1095 if commit_id:
1088 1096 ref_commit = (h.short_id(commit_id), commit_id)
1089 1097 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1090 1098
1091 1099 sources.append(
1092 1100 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1093 1101 )
1094 1102
1095 1103 groups = []
1096 1104 for group_key, ref_list, group_name, match in sources:
1097 1105 group_refs = []
1098 1106 for ref_name, ref_id in ref_list:
1099 1107 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1100 1108 group_refs.append((ref_key, ref_name))
1101 1109
1102 1110 if not selected:
1103 1111 if set([commit_id, match]) & set([ref_id, ref_name]):
1104 1112 selected = ref_key
1105 1113
1106 1114 if group_refs:
1107 1115 groups.append((group_refs, group_name))
1108 1116
1109 1117 if not selected:
1110 1118 ref = commit_id or branch or bookmark
1111 1119 if ref:
1112 1120 raise CommitDoesNotExistError(
1113 1121 'No commit refs could be found matching: %s' % ref)
1114 1122 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1115 1123 selected = 'branch:%s:%s' % (
1116 1124 repo.DEFAULT_BRANCH_NAME,
1117 1125 repo.branches[repo.DEFAULT_BRANCH_NAME]
1118 1126 )
1119 1127 elif repo.commit_ids:
1120 1128 rev = repo.commit_ids[0]
1121 1129 selected = 'rev:%s:%s' % (rev, rev)
1122 1130 else:
1123 1131 raise EmptyRepositoryError()
1124 1132 return groups, selected
1125 1133
1126 1134 def get_diff(self, pull_request, context=DIFF_CONTEXT):
1127 1135 pull_request = self.__get_pull_request(pull_request)
1128 1136 return self._get_diff_from_pr_or_version(pull_request, context=context)
1129 1137
1130 1138 def _get_diff_from_pr_or_version(self, pr_or_version, context):
1131 1139 source_repo = pr_or_version.source_repo
1132 1140
1133 1141 # we swap org/other ref since we run a simple diff on one repo
1134 1142 target_ref_id = pr_or_version.target_ref_parts.commit_id
1135 1143 source_ref_id = pr_or_version.source_ref_parts.commit_id
1136 1144 target_commit = source_repo.get_commit(
1137 1145 commit_id=safe_str(target_ref_id))
1138 1146 source_commit = source_repo.get_commit(commit_id=safe_str(source_ref_id))
1139 1147 vcs_repo = source_repo.scm_instance()
1140 1148
1141 1149 # TODO: johbo: In the context of an update, we cannot reach
1142 1150 # the old commit anymore with our normal mechanisms. It needs
1143 1151 # some sort of special support in the vcs layer to avoid this
1144 1152 # workaround.
1145 1153 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1146 1154 vcs_repo.alias == 'git'):
1147 1155 source_commit.raw_id = safe_str(source_ref_id)
1148 1156
1149 1157 log.debug('calculating diff between '
1150 1158 'source_ref:%s and target_ref:%s for repo `%s`',
1151 1159 target_ref_id, source_ref_id,
1152 1160 safe_unicode(vcs_repo.path))
1153 1161
1154 1162 vcs_diff = vcs_repo.get_diff(
1155 1163 commit1=target_commit, commit2=source_commit, context=context)
1156 1164 return vcs_diff
1157 1165
1158 1166 def _is_merge_enabled(self, pull_request):
1159 1167 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1160 1168 settings = settings_model.get_general_settings()
1161 1169 return settings.get('rhodecode_pr_merge_enabled', False)
1162 1170
1163 1171 def _use_rebase_for_merging(self, pull_request):
1164 1172 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1165 1173 settings = settings_model.get_general_settings()
1166 1174 return settings.get('rhodecode_hg_use_rebase_for_merging', False)
1167 1175
1168 1176 def _log_action(self, action, user, pull_request):
1169 1177 action_logger(
1170 1178 user,
1171 1179 '{action}:{pr_id}'.format(
1172 1180 action=action, pr_id=pull_request.pull_request_id),
1173 1181 pull_request.target_repo)
1174 1182
1175 1183
1176 1184 ChangeTuple = namedtuple('ChangeTuple',
1177 1185 ['added', 'common', 'removed'])
1178 1186
1179 1187 FileChangeTuple = namedtuple('FileChangeTuple',
1180 1188 ['added', 'modified', 'removed'])
General Comments 0
You need to be logged in to leave comments. Login now