##// END OF EJS Templates
pull-requests: introduce operation state for pull requests to prevent from...
marcink -
r3371:e7214a9f default
parent child Browse files
Show More
This diff has been collapsed as it changes many lines, (4759 lines changed) Show them Hide them
@@ -0,0 +1,4759 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 """
22 Database Models for RhodeCode Enterprise
23 """
24
25 import re
26 import os
27 import time
28 import hashlib
29 import logging
30 import datetime
31 import warnings
32 import ipaddress
33 import functools
34 import traceback
35 import collections
36
37 from sqlalchemy import (
38 or_, and_, not_, func, TypeDecorator, event,
39 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
40 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
41 Text, Float, PickleType)
42 from sqlalchemy.sql.expression import true, false
43 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
44 from sqlalchemy.orm import (
45 relationship, joinedload, class_mapper, validates, aliased)
46 from sqlalchemy.ext.declarative import declared_attr
47 from sqlalchemy.ext.hybrid import hybrid_property
48 from sqlalchemy.exc import IntegrityError # pragma: no cover
49 from sqlalchemy.dialects.mysql import LONGTEXT
50 from zope.cachedescriptors.property import Lazy as LazyProperty
51
52 from pyramid.threadlocal import get_current_request
53
54 from rhodecode.translation import _
55 from rhodecode.lib.vcs import get_vcs_instance
56 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
57 from rhodecode.lib.utils2 import (
58 str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe,
59 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
60 glob2re, StrictAttributeDict, cleaned_uri)
61 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
62 JsonRaw
63 from rhodecode.lib.ext_json import json
64 from rhodecode.lib.caching_query import FromCache
65 from rhodecode.lib.encrypt import AESCipher
66
67 from rhodecode.model.meta import Base, Session
68
69 URL_SEP = '/'
70 log = logging.getLogger(__name__)
71
72 # =============================================================================
73 # BASE CLASSES
74 # =============================================================================
75
76 # this is propagated from .ini file rhodecode.encrypted_values.secret or
77 # beaker.session.secret if first is not set.
78 # and initialized at environment.py
79 ENCRYPTION_KEY = None
80
81 # used to sort permissions by types, '#' used here is not allowed to be in
82 # usernames, and it's very early in sorted string.printable table.
83 PERMISSION_TYPE_SORT = {
84 'admin': '####',
85 'write': '###',
86 'read': '##',
87 'none': '#',
88 }
89
90
91 def display_user_sort(obj):
92 """
93 Sort function used to sort permissions in .permissions() function of
94 Repository, RepoGroup, UserGroup. Also it put the default user in front
95 of all other resources
96 """
97
98 if obj.username == User.DEFAULT_USER:
99 return '#####'
100 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
101 return prefix + obj.username
102
103
104 def display_user_group_sort(obj):
105 """
106 Sort function used to sort permissions in .permissions() function of
107 Repository, RepoGroup, UserGroup. Also it put the default user in front
108 of all other resources
109 """
110
111 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
112 return prefix + obj.users_group_name
113
114
115 def _hash_key(k):
116 return sha1_safe(k)
117
118
119 def in_filter_generator(qry, items, limit=500):
120 """
121 Splits IN() into multiple with OR
122 e.g.::
123 cnt = Repository.query().filter(
124 or_(
125 *in_filter_generator(Repository.repo_id, range(100000))
126 )).count()
127 """
128 if not items:
129 # empty list will cause empty query which might cause security issues
130 # this can lead to hidden unpleasant results
131 items = [-1]
132
133 parts = []
134 for chunk in xrange(0, len(items), limit):
135 parts.append(
136 qry.in_(items[chunk: chunk + limit])
137 )
138
139 return parts
140
141
142 base_table_args = {
143 'extend_existing': True,
144 'mysql_engine': 'InnoDB',
145 'mysql_charset': 'utf8',
146 'sqlite_autoincrement': True
147 }
148
149
150 class EncryptedTextValue(TypeDecorator):
151 """
152 Special column for encrypted long text data, use like::
153
154 value = Column("encrypted_value", EncryptedValue(), nullable=False)
155
156 This column is intelligent so if value is in unencrypted form it return
157 unencrypted form, but on save it always encrypts
158 """
159 impl = Text
160
161 def process_bind_param(self, value, dialect):
162 if not value:
163 return value
164 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
165 # protect against double encrypting if someone manually starts
166 # doing
167 raise ValueError('value needs to be in unencrypted format, ie. '
168 'not starting with enc$aes')
169 return 'enc$aes_hmac$%s' % AESCipher(
170 ENCRYPTION_KEY, hmac=True).encrypt(value)
171
172 def process_result_value(self, value, dialect):
173 import rhodecode
174
175 if not value:
176 return value
177
178 parts = value.split('$', 3)
179 if not len(parts) == 3:
180 # probably not encrypted values
181 return value
182 else:
183 if parts[0] != 'enc':
184 # parts ok but without our header ?
185 return value
186 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
187 'rhodecode.encrypted_values.strict') or True)
188 # at that stage we know it's our encryption
189 if parts[1] == 'aes':
190 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
191 elif parts[1] == 'aes_hmac':
192 decrypted_data = AESCipher(
193 ENCRYPTION_KEY, hmac=True,
194 strict_verification=enc_strict_mode).decrypt(parts[2])
195 else:
196 raise ValueError(
197 'Encryption type part is wrong, must be `aes` '
198 'or `aes_hmac`, got `%s` instead' % (parts[1]))
199 return decrypted_data
200
201
202 class BaseModel(object):
203 """
204 Base Model for all classes
205 """
206
207 @classmethod
208 def _get_keys(cls):
209 """return column names for this model """
210 return class_mapper(cls).c.keys()
211
212 def get_dict(self):
213 """
214 return dict with keys and values corresponding
215 to this model data """
216
217 d = {}
218 for k in self._get_keys():
219 d[k] = getattr(self, k)
220
221 # also use __json__() if present to get additional fields
222 _json_attr = getattr(self, '__json__', None)
223 if _json_attr:
224 # update with attributes from __json__
225 if callable(_json_attr):
226 _json_attr = _json_attr()
227 for k, val in _json_attr.iteritems():
228 d[k] = val
229 return d
230
231 def get_appstruct(self):
232 """return list with keys and values tuples corresponding
233 to this model data """
234
235 lst = []
236 for k in self._get_keys():
237 lst.append((k, getattr(self, k),))
238 return lst
239
240 def populate_obj(self, populate_dict):
241 """populate model with data from given populate_dict"""
242
243 for k in self._get_keys():
244 if k in populate_dict:
245 setattr(self, k, populate_dict[k])
246
247 @classmethod
248 def query(cls):
249 return Session().query(cls)
250
251 @classmethod
252 def get(cls, id_):
253 if id_:
254 return cls.query().get(id_)
255
256 @classmethod
257 def get_or_404(cls, id_):
258 from pyramid.httpexceptions import HTTPNotFound
259
260 try:
261 id_ = int(id_)
262 except (TypeError, ValueError):
263 raise HTTPNotFound()
264
265 res = cls.query().get(id_)
266 if not res:
267 raise HTTPNotFound()
268 return res
269
270 @classmethod
271 def getAll(cls):
272 # deprecated and left for backward compatibility
273 return cls.get_all()
274
275 @classmethod
276 def get_all(cls):
277 return cls.query().all()
278
279 @classmethod
280 def delete(cls, id_):
281 obj = cls.query().get(id_)
282 Session().delete(obj)
283
284 @classmethod
285 def identity_cache(cls, session, attr_name, value):
286 exist_in_session = []
287 for (item_cls, pkey), instance in session.identity_map.items():
288 if cls == item_cls and getattr(instance, attr_name) == value:
289 exist_in_session.append(instance)
290 if exist_in_session:
291 if len(exist_in_session) == 1:
292 return exist_in_session[0]
293 log.exception(
294 'multiple objects with attr %s and '
295 'value %s found with same name: %r',
296 attr_name, value, exist_in_session)
297
298 def __repr__(self):
299 if hasattr(self, '__unicode__'):
300 # python repr needs to return str
301 try:
302 return safe_str(self.__unicode__())
303 except UnicodeDecodeError:
304 pass
305 return '<DB:%s>' % (self.__class__.__name__)
306
307
308 class RhodeCodeSetting(Base, BaseModel):
309 __tablename__ = 'rhodecode_settings'
310 __table_args__ = (
311 UniqueConstraint('app_settings_name'),
312 base_table_args
313 )
314
315 SETTINGS_TYPES = {
316 'str': safe_str,
317 'int': safe_int,
318 'unicode': safe_unicode,
319 'bool': str2bool,
320 'list': functools.partial(aslist, sep=',')
321 }
322 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
323 GLOBAL_CONF_KEY = 'app_settings'
324
325 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
326 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
327 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
328 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
329
330 def __init__(self, key='', val='', type='unicode'):
331 self.app_settings_name = key
332 self.app_settings_type = type
333 self.app_settings_value = val
334
335 @validates('_app_settings_value')
336 def validate_settings_value(self, key, val):
337 assert type(val) == unicode
338 return val
339
340 @hybrid_property
341 def app_settings_value(self):
342 v = self._app_settings_value
343 _type = self.app_settings_type
344 if _type:
345 _type = self.app_settings_type.split('.')[0]
346 # decode the encrypted value
347 if 'encrypted' in self.app_settings_type:
348 cipher = EncryptedTextValue()
349 v = safe_unicode(cipher.process_result_value(v, None))
350
351 converter = self.SETTINGS_TYPES.get(_type) or \
352 self.SETTINGS_TYPES['unicode']
353 return converter(v)
354
355 @app_settings_value.setter
356 def app_settings_value(self, val):
357 """
358 Setter that will always make sure we use unicode in app_settings_value
359
360 :param val:
361 """
362 val = safe_unicode(val)
363 # encode the encrypted value
364 if 'encrypted' in self.app_settings_type:
365 cipher = EncryptedTextValue()
366 val = safe_unicode(cipher.process_bind_param(val, None))
367 self._app_settings_value = val
368
369 @hybrid_property
370 def app_settings_type(self):
371 return self._app_settings_type
372
373 @app_settings_type.setter
374 def app_settings_type(self, val):
375 if val.split('.')[0] not in self.SETTINGS_TYPES:
376 raise Exception('type must be one of %s got %s'
377 % (self.SETTINGS_TYPES.keys(), val))
378 self._app_settings_type = val
379
380 @classmethod
381 def get_by_prefix(cls, prefix):
382 return RhodeCodeSetting.query()\
383 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
384 .all()
385
386 def __unicode__(self):
387 return u"<%s('%s:%s[%s]')>" % (
388 self.__class__.__name__,
389 self.app_settings_name, self.app_settings_value,
390 self.app_settings_type
391 )
392
393
394 class RhodeCodeUi(Base, BaseModel):
395 __tablename__ = 'rhodecode_ui'
396 __table_args__ = (
397 UniqueConstraint('ui_key'),
398 base_table_args
399 )
400
401 HOOK_REPO_SIZE = 'changegroup.repo_size'
402 # HG
403 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
404 HOOK_PULL = 'outgoing.pull_logger'
405 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
406 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
407 HOOK_PUSH = 'changegroup.push_logger'
408 HOOK_PUSH_KEY = 'pushkey.key_push'
409
410 # TODO: johbo: Unify way how hooks are configured for git and hg,
411 # git part is currently hardcoded.
412
413 # SVN PATTERNS
414 SVN_BRANCH_ID = 'vcs_svn_branch'
415 SVN_TAG_ID = 'vcs_svn_tag'
416
417 ui_id = Column(
418 "ui_id", Integer(), nullable=False, unique=True, default=None,
419 primary_key=True)
420 ui_section = Column(
421 "ui_section", String(255), nullable=True, unique=None, default=None)
422 ui_key = Column(
423 "ui_key", String(255), nullable=True, unique=None, default=None)
424 ui_value = Column(
425 "ui_value", String(255), nullable=True, unique=None, default=None)
426 ui_active = Column(
427 "ui_active", Boolean(), nullable=True, unique=None, default=True)
428
429 def __repr__(self):
430 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
431 self.ui_key, self.ui_value)
432
433
434 class RepoRhodeCodeSetting(Base, BaseModel):
435 __tablename__ = 'repo_rhodecode_settings'
436 __table_args__ = (
437 UniqueConstraint(
438 'app_settings_name', 'repository_id',
439 name='uq_repo_rhodecode_setting_name_repo_id'),
440 base_table_args
441 )
442
443 repository_id = Column(
444 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
445 nullable=False)
446 app_settings_id = Column(
447 "app_settings_id", Integer(), nullable=False, unique=True,
448 default=None, primary_key=True)
449 app_settings_name = Column(
450 "app_settings_name", String(255), nullable=True, unique=None,
451 default=None)
452 _app_settings_value = Column(
453 "app_settings_value", String(4096), nullable=True, unique=None,
454 default=None)
455 _app_settings_type = Column(
456 "app_settings_type", String(255), nullable=True, unique=None,
457 default=None)
458
459 repository = relationship('Repository')
460
461 def __init__(self, repository_id, key='', val='', type='unicode'):
462 self.repository_id = repository_id
463 self.app_settings_name = key
464 self.app_settings_type = type
465 self.app_settings_value = val
466
467 @validates('_app_settings_value')
468 def validate_settings_value(self, key, val):
469 assert type(val) == unicode
470 return val
471
472 @hybrid_property
473 def app_settings_value(self):
474 v = self._app_settings_value
475 type_ = self.app_settings_type
476 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
477 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
478 return converter(v)
479
480 @app_settings_value.setter
481 def app_settings_value(self, val):
482 """
483 Setter that will always make sure we use unicode in app_settings_value
484
485 :param val:
486 """
487 self._app_settings_value = safe_unicode(val)
488
489 @hybrid_property
490 def app_settings_type(self):
491 return self._app_settings_type
492
493 @app_settings_type.setter
494 def app_settings_type(self, val):
495 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
496 if val not in SETTINGS_TYPES:
497 raise Exception('type must be one of %s got %s'
498 % (SETTINGS_TYPES.keys(), val))
499 self._app_settings_type = val
500
501 def __unicode__(self):
502 return u"<%s('%s:%s:%s[%s]')>" % (
503 self.__class__.__name__, self.repository.repo_name,
504 self.app_settings_name, self.app_settings_value,
505 self.app_settings_type
506 )
507
508
509 class RepoRhodeCodeUi(Base, BaseModel):
510 __tablename__ = 'repo_rhodecode_ui'
511 __table_args__ = (
512 UniqueConstraint(
513 'repository_id', 'ui_section', 'ui_key',
514 name='uq_repo_rhodecode_ui_repository_id_section_key'),
515 base_table_args
516 )
517
518 repository_id = Column(
519 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
520 nullable=False)
521 ui_id = Column(
522 "ui_id", Integer(), nullable=False, unique=True, default=None,
523 primary_key=True)
524 ui_section = Column(
525 "ui_section", String(255), nullable=True, unique=None, default=None)
526 ui_key = Column(
527 "ui_key", String(255), nullable=True, unique=None, default=None)
528 ui_value = Column(
529 "ui_value", String(255), nullable=True, unique=None, default=None)
530 ui_active = Column(
531 "ui_active", Boolean(), nullable=True, unique=None, default=True)
532
533 repository = relationship('Repository')
534
535 def __repr__(self):
536 return '<%s[%s:%s]%s=>%s]>' % (
537 self.__class__.__name__, self.repository.repo_name,
538 self.ui_section, self.ui_key, self.ui_value)
539
540
541 class User(Base, BaseModel):
542 __tablename__ = 'users'
543 __table_args__ = (
544 UniqueConstraint('username'), UniqueConstraint('email'),
545 Index('u_username_idx', 'username'),
546 Index('u_email_idx', 'email'),
547 base_table_args
548 )
549
550 DEFAULT_USER = 'default'
551 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
552 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
553
554 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
555 username = Column("username", String(255), nullable=True, unique=None, default=None)
556 password = Column("password", String(255), nullable=True, unique=None, default=None)
557 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
558 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
559 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
560 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
561 _email = Column("email", String(255), nullable=True, unique=None, default=None)
562 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
563 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
564
565 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
566 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
567 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
568 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
569 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
570 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
571
572 user_log = relationship('UserLog')
573 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
574
575 repositories = relationship('Repository')
576 repository_groups = relationship('RepoGroup')
577 user_groups = relationship('UserGroup')
578
579 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
580 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
581
582 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
583 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
584 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
585
586 group_member = relationship('UserGroupMember', cascade='all')
587
588 notifications = relationship('UserNotification', cascade='all')
589 # notifications assigned to this user
590 user_created_notifications = relationship('Notification', cascade='all')
591 # comments created by this user
592 user_comments = relationship('ChangesetComment', cascade='all')
593 # user profile extra info
594 user_emails = relationship('UserEmailMap', cascade='all')
595 user_ip_map = relationship('UserIpMap', cascade='all')
596 user_auth_tokens = relationship('UserApiKeys', cascade='all')
597 user_ssh_keys = relationship('UserSshKeys', cascade='all')
598
599 # gists
600 user_gists = relationship('Gist', cascade='all')
601 # user pull requests
602 user_pull_requests = relationship('PullRequest', cascade='all')
603 # external identities
604 extenal_identities = relationship(
605 'ExternalIdentity',
606 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
607 cascade='all')
608 # review rules
609 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
610
611 def __unicode__(self):
612 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
613 self.user_id, self.username)
614
615 @hybrid_property
616 def email(self):
617 return self._email
618
619 @email.setter
620 def email(self, val):
621 self._email = val.lower() if val else None
622
623 @hybrid_property
624 def first_name(self):
625 from rhodecode.lib import helpers as h
626 if self.name:
627 return h.escape(self.name)
628 return self.name
629
630 @hybrid_property
631 def last_name(self):
632 from rhodecode.lib import helpers as h
633 if self.lastname:
634 return h.escape(self.lastname)
635 return self.lastname
636
637 @hybrid_property
638 def api_key(self):
639 """
640 Fetch if exist an auth-token with role ALL connected to this user
641 """
642 user_auth_token = UserApiKeys.query()\
643 .filter(UserApiKeys.user_id == self.user_id)\
644 .filter(or_(UserApiKeys.expires == -1,
645 UserApiKeys.expires >= time.time()))\
646 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
647 if user_auth_token:
648 user_auth_token = user_auth_token.api_key
649
650 return user_auth_token
651
652 @api_key.setter
653 def api_key(self, val):
654 # don't allow to set API key this is deprecated for now
655 self._api_key = None
656
657 @property
658 def reviewer_pull_requests(self):
659 return PullRequestReviewers.query() \
660 .options(joinedload(PullRequestReviewers.pull_request)) \
661 .filter(PullRequestReviewers.user_id == self.user_id) \
662 .all()
663
664 @property
665 def firstname(self):
666 # alias for future
667 return self.name
668
669 @property
670 def emails(self):
671 other = UserEmailMap.query()\
672 .filter(UserEmailMap.user == self) \
673 .order_by(UserEmailMap.email_id.asc()) \
674 .all()
675 return [self.email] + [x.email for x in other]
676
677 @property
678 def auth_tokens(self):
679 auth_tokens = self.get_auth_tokens()
680 return [x.api_key for x in auth_tokens]
681
682 def get_auth_tokens(self):
683 return UserApiKeys.query()\
684 .filter(UserApiKeys.user == self)\
685 .order_by(UserApiKeys.user_api_key_id.asc())\
686 .all()
687
688 @LazyProperty
689 def feed_token(self):
690 return self.get_feed_token()
691
692 def get_feed_token(self, cache=True):
693 feed_tokens = UserApiKeys.query()\
694 .filter(UserApiKeys.user == self)\
695 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
696 if cache:
697 feed_tokens = feed_tokens.options(
698 FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id))
699
700 feed_tokens = feed_tokens.all()
701 if feed_tokens:
702 return feed_tokens[0].api_key
703 return 'NO_FEED_TOKEN_AVAILABLE'
704
705 @classmethod
706 def get(cls, user_id, cache=False):
707 if not user_id:
708 return
709
710 user = cls.query()
711 if cache:
712 user = user.options(
713 FromCache("sql_cache_short", "get_users_%s" % user_id))
714 return user.get(user_id)
715
716 @classmethod
717 def extra_valid_auth_tokens(cls, user, role=None):
718 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
719 .filter(or_(UserApiKeys.expires == -1,
720 UserApiKeys.expires >= time.time()))
721 if role:
722 tokens = tokens.filter(or_(UserApiKeys.role == role,
723 UserApiKeys.role == UserApiKeys.ROLE_ALL))
724 return tokens.all()
725
726 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
727 from rhodecode.lib import auth
728
729 log.debug('Trying to authenticate user: %s via auth-token, '
730 'and roles: %s', self, roles)
731
732 if not auth_token:
733 return False
734
735 crypto_backend = auth.crypto_backend()
736
737 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
738 tokens_q = UserApiKeys.query()\
739 .filter(UserApiKeys.user_id == self.user_id)\
740 .filter(or_(UserApiKeys.expires == -1,
741 UserApiKeys.expires >= time.time()))
742
743 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
744
745 plain_tokens = []
746 hash_tokens = []
747
748 user_tokens = tokens_q.all()
749 log.debug('Found %s user tokens to check for authentication', len(user_tokens))
750 for token in user_tokens:
751 log.debug('AUTH_TOKEN: checking if user token with id `%s` matches',
752 token.user_api_key_id)
753 # verify scope first, since it's way faster than hash calculation of
754 # encrypted tokens
755 if token.repo_id:
756 # token has a scope, we need to verify it
757 if scope_repo_id != token.repo_id:
758 log.debug(
759 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
760 'and calling scope is:%s, skipping further checks',
761 token.repo, scope_repo_id)
762 # token has a scope, and it doesn't match, skip token
763 continue
764
765 if token.api_key.startswith(crypto_backend.ENC_PREF):
766 hash_tokens.append(token.api_key)
767 else:
768 plain_tokens.append(token.api_key)
769
770 is_plain_match = auth_token in plain_tokens
771 if is_plain_match:
772 return True
773
774 for hashed in hash_tokens:
775 # NOTE(marcink): this is expensive to calculate, but most secure
776 match = crypto_backend.hash_check(auth_token, hashed)
777 if match:
778 return True
779
780 return False
781
782 @property
783 def ip_addresses(self):
784 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
785 return [x.ip_addr for x in ret]
786
787 @property
788 def username_and_name(self):
789 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
790
791 @property
792 def username_or_name_or_email(self):
793 full_name = self.full_name if self.full_name is not ' ' else None
794 return self.username or full_name or self.email
795
796 @property
797 def full_name(self):
798 return '%s %s' % (self.first_name, self.last_name)
799
800 @property
801 def full_name_or_username(self):
802 return ('%s %s' % (self.first_name, self.last_name)
803 if (self.first_name and self.last_name) else self.username)
804
805 @property
806 def full_contact(self):
807 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
808
809 @property
810 def short_contact(self):
811 return '%s %s' % (self.first_name, self.last_name)
812
813 @property
814 def is_admin(self):
815 return self.admin
816
817 def AuthUser(self, **kwargs):
818 """
819 Returns instance of AuthUser for this user
820 """
821 from rhodecode.lib.auth import AuthUser
822 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
823
824 @hybrid_property
825 def user_data(self):
826 if not self._user_data:
827 return {}
828
829 try:
830 return json.loads(self._user_data)
831 except TypeError:
832 return {}
833
834 @user_data.setter
835 def user_data(self, val):
836 if not isinstance(val, dict):
837 raise Exception('user_data must be dict, got %s' % type(val))
838 try:
839 self._user_data = json.dumps(val)
840 except Exception:
841 log.error(traceback.format_exc())
842
843 @classmethod
844 def get_by_username(cls, username, case_insensitive=False,
845 cache=False, identity_cache=False):
846 session = Session()
847
848 if case_insensitive:
849 q = cls.query().filter(
850 func.lower(cls.username) == func.lower(username))
851 else:
852 q = cls.query().filter(cls.username == username)
853
854 if cache:
855 if identity_cache:
856 val = cls.identity_cache(session, 'username', username)
857 if val:
858 return val
859 else:
860 cache_key = "get_user_by_name_%s" % _hash_key(username)
861 q = q.options(
862 FromCache("sql_cache_short", cache_key))
863
864 return q.scalar()
865
866 @classmethod
867 def get_by_auth_token(cls, auth_token, cache=False):
868 q = UserApiKeys.query()\
869 .filter(UserApiKeys.api_key == auth_token)\
870 .filter(or_(UserApiKeys.expires == -1,
871 UserApiKeys.expires >= time.time()))
872 if cache:
873 q = q.options(
874 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
875
876 match = q.first()
877 if match:
878 return match.user
879
880 @classmethod
881 def get_by_email(cls, email, case_insensitive=False, cache=False):
882
883 if case_insensitive:
884 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
885
886 else:
887 q = cls.query().filter(cls.email == email)
888
889 email_key = _hash_key(email)
890 if cache:
891 q = q.options(
892 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
893
894 ret = q.scalar()
895 if ret is None:
896 q = UserEmailMap.query()
897 # try fetching in alternate email map
898 if case_insensitive:
899 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
900 else:
901 q = q.filter(UserEmailMap.email == email)
902 q = q.options(joinedload(UserEmailMap.user))
903 if cache:
904 q = q.options(
905 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
906 ret = getattr(q.scalar(), 'user', None)
907
908 return ret
909
910 @classmethod
911 def get_from_cs_author(cls, author):
912 """
913 Tries to get User objects out of commit author string
914
915 :param author:
916 """
917 from rhodecode.lib.helpers import email, author_name
918 # Valid email in the attribute passed, see if they're in the system
919 _email = email(author)
920 if _email:
921 user = cls.get_by_email(_email, case_insensitive=True)
922 if user:
923 return user
924 # Maybe we can match by username?
925 _author = author_name(author)
926 user = cls.get_by_username(_author, case_insensitive=True)
927 if user:
928 return user
929
930 def update_userdata(self, **kwargs):
931 usr = self
932 old = usr.user_data
933 old.update(**kwargs)
934 usr.user_data = old
935 Session().add(usr)
936 log.debug('updated userdata with ', kwargs)
937
938 def update_lastlogin(self):
939 """Update user lastlogin"""
940 self.last_login = datetime.datetime.now()
941 Session().add(self)
942 log.debug('updated user %s lastlogin', self.username)
943
944 def update_password(self, new_password):
945 from rhodecode.lib.auth import get_crypt_password
946
947 self.password = get_crypt_password(new_password)
948 Session().add(self)
949
950 @classmethod
951 def get_first_super_admin(cls):
952 user = User.query()\
953 .filter(User.admin == true()) \
954 .order_by(User.user_id.asc()) \
955 .first()
956
957 if user is None:
958 raise Exception('FATAL: Missing administrative account!')
959 return user
960
961 @classmethod
962 def get_all_super_admins(cls):
963 """
964 Returns all admin accounts sorted by username
965 """
966 return User.query().filter(User.admin == true())\
967 .order_by(User.username.asc()).all()
968
969 @classmethod
970 def get_default_user(cls, cache=False, refresh=False):
971 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
972 if user is None:
973 raise Exception('FATAL: Missing default account!')
974 if refresh:
975 # The default user might be based on outdated state which
976 # has been loaded from the cache.
977 # A call to refresh() ensures that the
978 # latest state from the database is used.
979 Session().refresh(user)
980 return user
981
982 def _get_default_perms(self, user, suffix=''):
983 from rhodecode.model.permission import PermissionModel
984 return PermissionModel().get_default_perms(user.user_perms, suffix)
985
986 def get_default_perms(self, suffix=''):
987 return self._get_default_perms(self, suffix)
988
989 def get_api_data(self, include_secrets=False, details='full'):
990 """
991 Common function for generating user related data for API
992
993 :param include_secrets: By default secrets in the API data will be replaced
994 by a placeholder value to prevent exposing this data by accident. In case
995 this data shall be exposed, set this flag to ``True``.
996
997 :param details: details can be 'basic|full' basic gives only a subset of
998 the available user information that includes user_id, name and emails.
999 """
1000 user = self
1001 user_data = self.user_data
1002 data = {
1003 'user_id': user.user_id,
1004 'username': user.username,
1005 'firstname': user.name,
1006 'lastname': user.lastname,
1007 'email': user.email,
1008 'emails': user.emails,
1009 }
1010 if details == 'basic':
1011 return data
1012
1013 auth_token_length = 40
1014 auth_token_replacement = '*' * auth_token_length
1015
1016 extras = {
1017 'auth_tokens': [auth_token_replacement],
1018 'active': user.active,
1019 'admin': user.admin,
1020 'extern_type': user.extern_type,
1021 'extern_name': user.extern_name,
1022 'last_login': user.last_login,
1023 'last_activity': user.last_activity,
1024 'ip_addresses': user.ip_addresses,
1025 'language': user_data.get('language')
1026 }
1027 data.update(extras)
1028
1029 if include_secrets:
1030 data['auth_tokens'] = user.auth_tokens
1031 return data
1032
1033 def __json__(self):
1034 data = {
1035 'full_name': self.full_name,
1036 'full_name_or_username': self.full_name_or_username,
1037 'short_contact': self.short_contact,
1038 'full_contact': self.full_contact,
1039 }
1040 data.update(self.get_api_data())
1041 return data
1042
1043
1044 class UserApiKeys(Base, BaseModel):
1045 __tablename__ = 'user_api_keys'
1046 __table_args__ = (
1047 Index('uak_api_key_idx', 'api_key', unique=True),
1048 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1049 base_table_args
1050 )
1051 __mapper_args__ = {}
1052
1053 # ApiKey role
1054 ROLE_ALL = 'token_role_all'
1055 ROLE_HTTP = 'token_role_http'
1056 ROLE_VCS = 'token_role_vcs'
1057 ROLE_API = 'token_role_api'
1058 ROLE_FEED = 'token_role_feed'
1059 ROLE_PASSWORD_RESET = 'token_password_reset'
1060
1061 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
1062
1063 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1064 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1065 api_key = Column("api_key", String(255), nullable=False, unique=True)
1066 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1067 expires = Column('expires', Float(53), nullable=False)
1068 role = Column('role', String(255), nullable=True)
1069 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1070
1071 # scope columns
1072 repo_id = Column(
1073 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1074 nullable=True, unique=None, default=None)
1075 repo = relationship('Repository', lazy='joined')
1076
1077 repo_group_id = Column(
1078 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1079 nullable=True, unique=None, default=None)
1080 repo_group = relationship('RepoGroup', lazy='joined')
1081
1082 user = relationship('User', lazy='joined')
1083
1084 def __unicode__(self):
1085 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1086
1087 def __json__(self):
1088 data = {
1089 'auth_token': self.api_key,
1090 'role': self.role,
1091 'scope': self.scope_humanized,
1092 'expired': self.expired
1093 }
1094 return data
1095
1096 def get_api_data(self, include_secrets=False):
1097 data = self.__json__()
1098 if include_secrets:
1099 return data
1100 else:
1101 data['auth_token'] = self.token_obfuscated
1102 return data
1103
1104 @hybrid_property
1105 def description_safe(self):
1106 from rhodecode.lib import helpers as h
1107 return h.escape(self.description)
1108
1109 @property
1110 def expired(self):
1111 if self.expires == -1:
1112 return False
1113 return time.time() > self.expires
1114
1115 @classmethod
1116 def _get_role_name(cls, role):
1117 return {
1118 cls.ROLE_ALL: _('all'),
1119 cls.ROLE_HTTP: _('http/web interface'),
1120 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1121 cls.ROLE_API: _('api calls'),
1122 cls.ROLE_FEED: _('feed access'),
1123 }.get(role, role)
1124
1125 @property
1126 def role_humanized(self):
1127 return self._get_role_name(self.role)
1128
1129 def _get_scope(self):
1130 if self.repo:
1131 return repr(self.repo)
1132 if self.repo_group:
1133 return repr(self.repo_group) + ' (recursive)'
1134 return 'global'
1135
1136 @property
1137 def scope_humanized(self):
1138 return self._get_scope()
1139
1140 @property
1141 def token_obfuscated(self):
1142 if self.api_key:
1143 return self.api_key[:4] + "****"
1144
1145
1146 class UserEmailMap(Base, BaseModel):
1147 __tablename__ = 'user_email_map'
1148 __table_args__ = (
1149 Index('uem_email_idx', 'email'),
1150 UniqueConstraint('email'),
1151 base_table_args
1152 )
1153 __mapper_args__ = {}
1154
1155 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1156 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1157 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1158 user = relationship('User', lazy='joined')
1159
1160 @validates('_email')
1161 def validate_email(self, key, email):
1162 # check if this email is not main one
1163 main_email = Session().query(User).filter(User.email == email).scalar()
1164 if main_email is not None:
1165 raise AttributeError('email %s is present is user table' % email)
1166 return email
1167
1168 @hybrid_property
1169 def email(self):
1170 return self._email
1171
1172 @email.setter
1173 def email(self, val):
1174 self._email = val.lower() if val else None
1175
1176
1177 class UserIpMap(Base, BaseModel):
1178 __tablename__ = 'user_ip_map'
1179 __table_args__ = (
1180 UniqueConstraint('user_id', 'ip_addr'),
1181 base_table_args
1182 )
1183 __mapper_args__ = {}
1184
1185 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1186 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1187 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1188 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1189 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1190 user = relationship('User', lazy='joined')
1191
1192 @hybrid_property
1193 def description_safe(self):
1194 from rhodecode.lib import helpers as h
1195 return h.escape(self.description)
1196
1197 @classmethod
1198 def _get_ip_range(cls, ip_addr):
1199 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1200 return [str(net.network_address), str(net.broadcast_address)]
1201
1202 def __json__(self):
1203 return {
1204 'ip_addr': self.ip_addr,
1205 'ip_range': self._get_ip_range(self.ip_addr),
1206 }
1207
1208 def __unicode__(self):
1209 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1210 self.user_id, self.ip_addr)
1211
1212
1213 class UserSshKeys(Base, BaseModel):
1214 __tablename__ = 'user_ssh_keys'
1215 __table_args__ = (
1216 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1217
1218 UniqueConstraint('ssh_key_fingerprint'),
1219
1220 base_table_args
1221 )
1222 __mapper_args__ = {}
1223
1224 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1225 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1226 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1227
1228 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1229
1230 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1231 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1232 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1233
1234 user = relationship('User', lazy='joined')
1235
1236 def __json__(self):
1237 data = {
1238 'ssh_fingerprint': self.ssh_key_fingerprint,
1239 'description': self.description,
1240 'created_on': self.created_on
1241 }
1242 return data
1243
1244 def get_api_data(self):
1245 data = self.__json__()
1246 return data
1247
1248
1249 class UserLog(Base, BaseModel):
1250 __tablename__ = 'user_logs'
1251 __table_args__ = (
1252 base_table_args,
1253 )
1254
1255 VERSION_1 = 'v1'
1256 VERSION_2 = 'v2'
1257 VERSIONS = [VERSION_1, VERSION_2]
1258
1259 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1260 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1261 username = Column("username", String(255), nullable=True, unique=None, default=None)
1262 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1263 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1264 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1265 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1266 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1267
1268 version = Column("version", String(255), nullable=True, default=VERSION_1)
1269 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1270 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1271
1272 def __unicode__(self):
1273 return u"<%s('id:%s:%s')>" % (
1274 self.__class__.__name__, self.repository_name, self.action)
1275
1276 def __json__(self):
1277 return {
1278 'user_id': self.user_id,
1279 'username': self.username,
1280 'repository_id': self.repository_id,
1281 'repository_name': self.repository_name,
1282 'user_ip': self.user_ip,
1283 'action_date': self.action_date,
1284 'action': self.action,
1285 }
1286
1287 @hybrid_property
1288 def entry_id(self):
1289 return self.user_log_id
1290
1291 @property
1292 def action_as_day(self):
1293 return datetime.date(*self.action_date.timetuple()[:3])
1294
1295 user = relationship('User')
1296 repository = relationship('Repository', cascade='')
1297
1298
1299 class UserGroup(Base, BaseModel):
1300 __tablename__ = 'users_groups'
1301 __table_args__ = (
1302 base_table_args,
1303 )
1304
1305 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1306 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1307 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1308 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1309 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1310 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1311 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1312 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1313
1314 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1315 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1316 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1317 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1318 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1319 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1320
1321 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1322 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1323
1324 @classmethod
1325 def _load_group_data(cls, column):
1326 if not column:
1327 return {}
1328
1329 try:
1330 return json.loads(column) or {}
1331 except TypeError:
1332 return {}
1333
1334 @hybrid_property
1335 def description_safe(self):
1336 from rhodecode.lib import helpers as h
1337 return h.escape(self.user_group_description)
1338
1339 @hybrid_property
1340 def group_data(self):
1341 return self._load_group_data(self._group_data)
1342
1343 @group_data.expression
1344 def group_data(self, **kwargs):
1345 return self._group_data
1346
1347 @group_data.setter
1348 def group_data(self, val):
1349 try:
1350 self._group_data = json.dumps(val)
1351 except Exception:
1352 log.error(traceback.format_exc())
1353
1354 @classmethod
1355 def _load_sync(cls, group_data):
1356 if group_data:
1357 return group_data.get('extern_type')
1358
1359 @property
1360 def sync(self):
1361 return self._load_sync(self.group_data)
1362
1363 def __unicode__(self):
1364 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1365 self.users_group_id,
1366 self.users_group_name)
1367
1368 @classmethod
1369 def get_by_group_name(cls, group_name, cache=False,
1370 case_insensitive=False):
1371 if case_insensitive:
1372 q = cls.query().filter(func.lower(cls.users_group_name) ==
1373 func.lower(group_name))
1374
1375 else:
1376 q = cls.query().filter(cls.users_group_name == group_name)
1377 if cache:
1378 q = q.options(
1379 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1380 return q.scalar()
1381
1382 @classmethod
1383 def get(cls, user_group_id, cache=False):
1384 if not user_group_id:
1385 return
1386
1387 user_group = cls.query()
1388 if cache:
1389 user_group = user_group.options(
1390 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1391 return user_group.get(user_group_id)
1392
1393 def permissions(self, with_admins=True, with_owner=True):
1394 """
1395 Permissions for user groups
1396 """
1397 _admin_perm = 'usergroup.admin'
1398
1399 owner_row = []
1400 if with_owner:
1401 usr = AttributeDict(self.user.get_dict())
1402 usr.owner_row = True
1403 usr.permission = _admin_perm
1404 owner_row.append(usr)
1405
1406 super_admin_ids = []
1407 super_admin_rows = []
1408 if with_admins:
1409 for usr in User.get_all_super_admins():
1410 super_admin_ids.append(usr.user_id)
1411 # if this admin is also owner, don't double the record
1412 if usr.user_id == owner_row[0].user_id:
1413 owner_row[0].admin_row = True
1414 else:
1415 usr = AttributeDict(usr.get_dict())
1416 usr.admin_row = True
1417 usr.permission = _admin_perm
1418 super_admin_rows.append(usr)
1419
1420 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1421 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1422 joinedload(UserUserGroupToPerm.user),
1423 joinedload(UserUserGroupToPerm.permission),)
1424
1425 # get owners and admins and permissions. We do a trick of re-writing
1426 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1427 # has a global reference and changing one object propagates to all
1428 # others. This means if admin is also an owner admin_row that change
1429 # would propagate to both objects
1430 perm_rows = []
1431 for _usr in q.all():
1432 usr = AttributeDict(_usr.user.get_dict())
1433 # if this user is also owner/admin, mark as duplicate record
1434 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1435 usr.duplicate_perm = True
1436 usr.permission = _usr.permission.permission_name
1437 perm_rows.append(usr)
1438
1439 # filter the perm rows by 'default' first and then sort them by
1440 # admin,write,read,none permissions sorted again alphabetically in
1441 # each group
1442 perm_rows = sorted(perm_rows, key=display_user_sort)
1443
1444 return super_admin_rows + owner_row + perm_rows
1445
1446 def permission_user_groups(self):
1447 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1448 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1449 joinedload(UserGroupUserGroupToPerm.target_user_group),
1450 joinedload(UserGroupUserGroupToPerm.permission),)
1451
1452 perm_rows = []
1453 for _user_group in q.all():
1454 usr = AttributeDict(_user_group.user_group.get_dict())
1455 usr.permission = _user_group.permission.permission_name
1456 perm_rows.append(usr)
1457
1458 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1459 return perm_rows
1460
1461 def _get_default_perms(self, user_group, suffix=''):
1462 from rhodecode.model.permission import PermissionModel
1463 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1464
1465 def get_default_perms(self, suffix=''):
1466 return self._get_default_perms(self, suffix)
1467
1468 def get_api_data(self, with_group_members=True, include_secrets=False):
1469 """
1470 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1471 basically forwarded.
1472
1473 """
1474 user_group = self
1475 data = {
1476 'users_group_id': user_group.users_group_id,
1477 'group_name': user_group.users_group_name,
1478 'group_description': user_group.user_group_description,
1479 'active': user_group.users_group_active,
1480 'owner': user_group.user.username,
1481 'sync': user_group.sync,
1482 'owner_email': user_group.user.email,
1483 }
1484
1485 if with_group_members:
1486 users = []
1487 for user in user_group.members:
1488 user = user.user
1489 users.append(user.get_api_data(include_secrets=include_secrets))
1490 data['users'] = users
1491
1492 return data
1493
1494
1495 class UserGroupMember(Base, BaseModel):
1496 __tablename__ = 'users_groups_members'
1497 __table_args__ = (
1498 base_table_args,
1499 )
1500
1501 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1502 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1503 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1504
1505 user = relationship('User', lazy='joined')
1506 users_group = relationship('UserGroup')
1507
1508 def __init__(self, gr_id='', u_id=''):
1509 self.users_group_id = gr_id
1510 self.user_id = u_id
1511
1512
1513 class RepositoryField(Base, BaseModel):
1514 __tablename__ = 'repositories_fields'
1515 __table_args__ = (
1516 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1517 base_table_args,
1518 )
1519
1520 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1521
1522 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1523 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1524 field_key = Column("field_key", String(250))
1525 field_label = Column("field_label", String(1024), nullable=False)
1526 field_value = Column("field_value", String(10000), nullable=False)
1527 field_desc = Column("field_desc", String(1024), nullable=False)
1528 field_type = Column("field_type", String(255), nullable=False, unique=None)
1529 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1530
1531 repository = relationship('Repository')
1532
1533 @property
1534 def field_key_prefixed(self):
1535 return 'ex_%s' % self.field_key
1536
1537 @classmethod
1538 def un_prefix_key(cls, key):
1539 if key.startswith(cls.PREFIX):
1540 return key[len(cls.PREFIX):]
1541 return key
1542
1543 @classmethod
1544 def get_by_key_name(cls, key, repo):
1545 row = cls.query()\
1546 .filter(cls.repository == repo)\
1547 .filter(cls.field_key == key).scalar()
1548 return row
1549
1550
1551 class Repository(Base, BaseModel):
1552 __tablename__ = 'repositories'
1553 __table_args__ = (
1554 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1555 base_table_args,
1556 )
1557 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1558 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1559 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1560
1561 STATE_CREATED = 'repo_state_created'
1562 STATE_PENDING = 'repo_state_pending'
1563 STATE_ERROR = 'repo_state_error'
1564
1565 LOCK_AUTOMATIC = 'lock_auto'
1566 LOCK_API = 'lock_api'
1567 LOCK_WEB = 'lock_web'
1568 LOCK_PULL = 'lock_pull'
1569
1570 NAME_SEP = URL_SEP
1571
1572 repo_id = Column(
1573 "repo_id", Integer(), nullable=False, unique=True, default=None,
1574 primary_key=True)
1575 _repo_name = Column(
1576 "repo_name", Text(), nullable=False, default=None)
1577 _repo_name_hash = Column(
1578 "repo_name_hash", String(255), nullable=False, unique=True)
1579 repo_state = Column("repo_state", String(255), nullable=True)
1580
1581 clone_uri = Column(
1582 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1583 default=None)
1584 push_uri = Column(
1585 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1586 default=None)
1587 repo_type = Column(
1588 "repo_type", String(255), nullable=False, unique=False, default=None)
1589 user_id = Column(
1590 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1591 unique=False, default=None)
1592 private = Column(
1593 "private", Boolean(), nullable=True, unique=None, default=None)
1594 archived = Column(
1595 "archived", Boolean(), nullable=True, unique=None, default=None)
1596 enable_statistics = Column(
1597 "statistics", Boolean(), nullable=True, unique=None, default=True)
1598 enable_downloads = Column(
1599 "downloads", Boolean(), nullable=True, unique=None, default=True)
1600 description = Column(
1601 "description", String(10000), nullable=True, unique=None, default=None)
1602 created_on = Column(
1603 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1604 default=datetime.datetime.now)
1605 updated_on = Column(
1606 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1607 default=datetime.datetime.now)
1608 _landing_revision = Column(
1609 "landing_revision", String(255), nullable=False, unique=False,
1610 default=None)
1611 enable_locking = Column(
1612 "enable_locking", Boolean(), nullable=False, unique=None,
1613 default=False)
1614 _locked = Column(
1615 "locked", String(255), nullable=True, unique=False, default=None)
1616 _changeset_cache = Column(
1617 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1618
1619 fork_id = Column(
1620 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1621 nullable=True, unique=False, default=None)
1622 group_id = Column(
1623 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1624 unique=False, default=None)
1625
1626 user = relationship('User', lazy='joined')
1627 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1628 group = relationship('RepoGroup', lazy='joined')
1629 repo_to_perm = relationship(
1630 'UserRepoToPerm', cascade='all',
1631 order_by='UserRepoToPerm.repo_to_perm_id')
1632 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1633 stats = relationship('Statistics', cascade='all', uselist=False)
1634
1635 followers = relationship(
1636 'UserFollowing',
1637 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1638 cascade='all')
1639 extra_fields = relationship(
1640 'RepositoryField', cascade="all, delete, delete-orphan")
1641 logs = relationship('UserLog')
1642 comments = relationship(
1643 'ChangesetComment', cascade="all, delete, delete-orphan")
1644 pull_requests_source = relationship(
1645 'PullRequest',
1646 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1647 cascade="all, delete, delete-orphan")
1648 pull_requests_target = relationship(
1649 'PullRequest',
1650 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1651 cascade="all, delete, delete-orphan")
1652 ui = relationship('RepoRhodeCodeUi', cascade="all")
1653 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1654 integrations = relationship('Integration',
1655 cascade="all, delete, delete-orphan")
1656
1657 scoped_tokens = relationship('UserApiKeys', cascade="all")
1658
1659 def __unicode__(self):
1660 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1661 safe_unicode(self.repo_name))
1662
1663 @hybrid_property
1664 def description_safe(self):
1665 from rhodecode.lib import helpers as h
1666 return h.escape(self.description)
1667
1668 @hybrid_property
1669 def landing_rev(self):
1670 # always should return [rev_type, rev]
1671 if self._landing_revision:
1672 _rev_info = self._landing_revision.split(':')
1673 if len(_rev_info) < 2:
1674 _rev_info.insert(0, 'rev')
1675 return [_rev_info[0], _rev_info[1]]
1676 return [None, None]
1677
1678 @landing_rev.setter
1679 def landing_rev(self, val):
1680 if ':' not in val:
1681 raise ValueError('value must be delimited with `:` and consist '
1682 'of <rev_type>:<rev>, got %s instead' % val)
1683 self._landing_revision = val
1684
1685 @hybrid_property
1686 def locked(self):
1687 if self._locked:
1688 user_id, timelocked, reason = self._locked.split(':')
1689 lock_values = int(user_id), timelocked, reason
1690 else:
1691 lock_values = [None, None, None]
1692 return lock_values
1693
1694 @locked.setter
1695 def locked(self, val):
1696 if val and isinstance(val, (list, tuple)):
1697 self._locked = ':'.join(map(str, val))
1698 else:
1699 self._locked = None
1700
1701 @hybrid_property
1702 def changeset_cache(self):
1703 from rhodecode.lib.vcs.backends.base import EmptyCommit
1704 dummy = EmptyCommit().__json__()
1705 if not self._changeset_cache:
1706 return dummy
1707 try:
1708 return json.loads(self._changeset_cache)
1709 except TypeError:
1710 return dummy
1711 except Exception:
1712 log.error(traceback.format_exc())
1713 return dummy
1714
1715 @changeset_cache.setter
1716 def changeset_cache(self, val):
1717 try:
1718 self._changeset_cache = json.dumps(val)
1719 except Exception:
1720 log.error(traceback.format_exc())
1721
1722 @hybrid_property
1723 def repo_name(self):
1724 return self._repo_name
1725
1726 @repo_name.setter
1727 def repo_name(self, value):
1728 self._repo_name = value
1729 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1730
1731 @classmethod
1732 def normalize_repo_name(cls, repo_name):
1733 """
1734 Normalizes os specific repo_name to the format internally stored inside
1735 database using URL_SEP
1736
1737 :param cls:
1738 :param repo_name:
1739 """
1740 return cls.NAME_SEP.join(repo_name.split(os.sep))
1741
1742 @classmethod
1743 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1744 session = Session()
1745 q = session.query(cls).filter(cls.repo_name == repo_name)
1746
1747 if cache:
1748 if identity_cache:
1749 val = cls.identity_cache(session, 'repo_name', repo_name)
1750 if val:
1751 return val
1752 else:
1753 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1754 q = q.options(
1755 FromCache("sql_cache_short", cache_key))
1756
1757 return q.scalar()
1758
1759 @classmethod
1760 def get_by_id_or_repo_name(cls, repoid):
1761 if isinstance(repoid, (int, long)):
1762 try:
1763 repo = cls.get(repoid)
1764 except ValueError:
1765 repo = None
1766 else:
1767 repo = cls.get_by_repo_name(repoid)
1768 return repo
1769
1770 @classmethod
1771 def get_by_full_path(cls, repo_full_path):
1772 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1773 repo_name = cls.normalize_repo_name(repo_name)
1774 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1775
1776 @classmethod
1777 def get_repo_forks(cls, repo_id):
1778 return cls.query().filter(Repository.fork_id == repo_id)
1779
1780 @classmethod
1781 def base_path(cls):
1782 """
1783 Returns base path when all repos are stored
1784
1785 :param cls:
1786 """
1787 q = Session().query(RhodeCodeUi)\
1788 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1789 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1790 return q.one().ui_value
1791
1792 @classmethod
1793 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1794 case_insensitive=True, archived=False):
1795 q = Repository.query()
1796
1797 if not archived:
1798 q = q.filter(Repository.archived.isnot(true()))
1799
1800 if not isinstance(user_id, Optional):
1801 q = q.filter(Repository.user_id == user_id)
1802
1803 if not isinstance(group_id, Optional):
1804 q = q.filter(Repository.group_id == group_id)
1805
1806 if case_insensitive:
1807 q = q.order_by(func.lower(Repository.repo_name))
1808 else:
1809 q = q.order_by(Repository.repo_name)
1810
1811 return q.all()
1812
1813 @property
1814 def forks(self):
1815 """
1816 Return forks of this repo
1817 """
1818 return Repository.get_repo_forks(self.repo_id)
1819
1820 @property
1821 def parent(self):
1822 """
1823 Returns fork parent
1824 """
1825 return self.fork
1826
1827 @property
1828 def just_name(self):
1829 return self.repo_name.split(self.NAME_SEP)[-1]
1830
1831 @property
1832 def groups_with_parents(self):
1833 groups = []
1834 if self.group is None:
1835 return groups
1836
1837 cur_gr = self.group
1838 groups.insert(0, cur_gr)
1839 while 1:
1840 gr = getattr(cur_gr, 'parent_group', None)
1841 cur_gr = cur_gr.parent_group
1842 if gr is None:
1843 break
1844 groups.insert(0, gr)
1845
1846 return groups
1847
1848 @property
1849 def groups_and_repo(self):
1850 return self.groups_with_parents, self
1851
1852 @LazyProperty
1853 def repo_path(self):
1854 """
1855 Returns base full path for that repository means where it actually
1856 exists on a filesystem
1857 """
1858 q = Session().query(RhodeCodeUi).filter(
1859 RhodeCodeUi.ui_key == self.NAME_SEP)
1860 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1861 return q.one().ui_value
1862
1863 @property
1864 def repo_full_path(self):
1865 p = [self.repo_path]
1866 # we need to split the name by / since this is how we store the
1867 # names in the database, but that eventually needs to be converted
1868 # into a valid system path
1869 p += self.repo_name.split(self.NAME_SEP)
1870 return os.path.join(*map(safe_unicode, p))
1871
1872 @property
1873 def cache_keys(self):
1874 """
1875 Returns associated cache keys for that repo
1876 """
1877 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
1878 repo_id=self.repo_id)
1879 return CacheKey.query()\
1880 .filter(CacheKey.cache_args == invalidation_namespace)\
1881 .order_by(CacheKey.cache_key)\
1882 .all()
1883
1884 @property
1885 def cached_diffs_relative_dir(self):
1886 """
1887 Return a relative to the repository store path of cached diffs
1888 used for safe display for users, who shouldn't know the absolute store
1889 path
1890 """
1891 return os.path.join(
1892 os.path.dirname(self.repo_name),
1893 self.cached_diffs_dir.split(os.path.sep)[-1])
1894
1895 @property
1896 def cached_diffs_dir(self):
1897 path = self.repo_full_path
1898 return os.path.join(
1899 os.path.dirname(path),
1900 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
1901
1902 def cached_diffs(self):
1903 diff_cache_dir = self.cached_diffs_dir
1904 if os.path.isdir(diff_cache_dir):
1905 return os.listdir(diff_cache_dir)
1906 return []
1907
1908 def shadow_repos(self):
1909 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
1910 return [
1911 x for x in os.listdir(os.path.dirname(self.repo_full_path))
1912 if x.startswith(shadow_repos_pattern)]
1913
1914 def get_new_name(self, repo_name):
1915 """
1916 returns new full repository name based on assigned group and new new
1917
1918 :param group_name:
1919 """
1920 path_prefix = self.group.full_path_splitted if self.group else []
1921 return self.NAME_SEP.join(path_prefix + [repo_name])
1922
1923 @property
1924 def _config(self):
1925 """
1926 Returns db based config object.
1927 """
1928 from rhodecode.lib.utils import make_db_config
1929 return make_db_config(clear_session=False, repo=self)
1930
1931 def permissions(self, with_admins=True, with_owner=True):
1932 """
1933 Permissions for repositories
1934 """
1935 _admin_perm = 'repository.admin'
1936
1937 owner_row = []
1938 if with_owner:
1939 usr = AttributeDict(self.user.get_dict())
1940 usr.owner_row = True
1941 usr.permission = _admin_perm
1942 usr.permission_id = None
1943 owner_row.append(usr)
1944
1945 super_admin_ids = []
1946 super_admin_rows = []
1947 if with_admins:
1948 for usr in User.get_all_super_admins():
1949 super_admin_ids.append(usr.user_id)
1950 # if this admin is also owner, don't double the record
1951 if usr.user_id == owner_row[0].user_id:
1952 owner_row[0].admin_row = True
1953 else:
1954 usr = AttributeDict(usr.get_dict())
1955 usr.admin_row = True
1956 usr.permission = _admin_perm
1957 usr.permission_id = None
1958 super_admin_rows.append(usr)
1959
1960 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1961 q = q.options(joinedload(UserRepoToPerm.repository),
1962 joinedload(UserRepoToPerm.user),
1963 joinedload(UserRepoToPerm.permission),)
1964
1965 # get owners and admins and permissions. We do a trick of re-writing
1966 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1967 # has a global reference and changing one object propagates to all
1968 # others. This means if admin is also an owner admin_row that change
1969 # would propagate to both objects
1970 perm_rows = []
1971 for _usr in q.all():
1972 usr = AttributeDict(_usr.user.get_dict())
1973 # if this user is also owner/admin, mark as duplicate record
1974 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1975 usr.duplicate_perm = True
1976 # also check if this permission is maybe used by branch_permissions
1977 if _usr.branch_perm_entry:
1978 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
1979
1980 usr.permission = _usr.permission.permission_name
1981 usr.permission_id = _usr.repo_to_perm_id
1982 perm_rows.append(usr)
1983
1984 # filter the perm rows by 'default' first and then sort them by
1985 # admin,write,read,none permissions sorted again alphabetically in
1986 # each group
1987 perm_rows = sorted(perm_rows, key=display_user_sort)
1988
1989 return super_admin_rows + owner_row + perm_rows
1990
1991 def permission_user_groups(self):
1992 q = UserGroupRepoToPerm.query().filter(
1993 UserGroupRepoToPerm.repository == self)
1994 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1995 joinedload(UserGroupRepoToPerm.users_group),
1996 joinedload(UserGroupRepoToPerm.permission),)
1997
1998 perm_rows = []
1999 for _user_group in q.all():
2000 usr = AttributeDict(_user_group.users_group.get_dict())
2001 usr.permission = _user_group.permission.permission_name
2002 perm_rows.append(usr)
2003
2004 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2005 return perm_rows
2006
2007 def get_api_data(self, include_secrets=False):
2008 """
2009 Common function for generating repo api data
2010
2011 :param include_secrets: See :meth:`User.get_api_data`.
2012
2013 """
2014 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2015 # move this methods on models level.
2016 from rhodecode.model.settings import SettingsModel
2017 from rhodecode.model.repo import RepoModel
2018
2019 repo = self
2020 _user_id, _time, _reason = self.locked
2021
2022 data = {
2023 'repo_id': repo.repo_id,
2024 'repo_name': repo.repo_name,
2025 'repo_type': repo.repo_type,
2026 'clone_uri': repo.clone_uri or '',
2027 'push_uri': repo.push_uri or '',
2028 'url': RepoModel().get_url(self),
2029 'private': repo.private,
2030 'created_on': repo.created_on,
2031 'description': repo.description_safe,
2032 'landing_rev': repo.landing_rev,
2033 'owner': repo.user.username,
2034 'fork_of': repo.fork.repo_name if repo.fork else None,
2035 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2036 'enable_statistics': repo.enable_statistics,
2037 'enable_locking': repo.enable_locking,
2038 'enable_downloads': repo.enable_downloads,
2039 'last_changeset': repo.changeset_cache,
2040 'locked_by': User.get(_user_id).get_api_data(
2041 include_secrets=include_secrets) if _user_id else None,
2042 'locked_date': time_to_datetime(_time) if _time else None,
2043 'lock_reason': _reason if _reason else None,
2044 }
2045
2046 # TODO: mikhail: should be per-repo settings here
2047 rc_config = SettingsModel().get_all_settings()
2048 repository_fields = str2bool(
2049 rc_config.get('rhodecode_repository_fields'))
2050 if repository_fields:
2051 for f in self.extra_fields:
2052 data[f.field_key_prefixed] = f.field_value
2053
2054 return data
2055
2056 @classmethod
2057 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2058 if not lock_time:
2059 lock_time = time.time()
2060 if not lock_reason:
2061 lock_reason = cls.LOCK_AUTOMATIC
2062 repo.locked = [user_id, lock_time, lock_reason]
2063 Session().add(repo)
2064 Session().commit()
2065
2066 @classmethod
2067 def unlock(cls, repo):
2068 repo.locked = None
2069 Session().add(repo)
2070 Session().commit()
2071
2072 @classmethod
2073 def getlock(cls, repo):
2074 return repo.locked
2075
2076 def is_user_lock(self, user_id):
2077 if self.lock[0]:
2078 lock_user_id = safe_int(self.lock[0])
2079 user_id = safe_int(user_id)
2080 # both are ints, and they are equal
2081 return all([lock_user_id, user_id]) and lock_user_id == user_id
2082
2083 return False
2084
2085 def get_locking_state(self, action, user_id, only_when_enabled=True):
2086 """
2087 Checks locking on this repository, if locking is enabled and lock is
2088 present returns a tuple of make_lock, locked, locked_by.
2089 make_lock can have 3 states None (do nothing) True, make lock
2090 False release lock, This value is later propagated to hooks, which
2091 do the locking. Think about this as signals passed to hooks what to do.
2092
2093 """
2094 # TODO: johbo: This is part of the business logic and should be moved
2095 # into the RepositoryModel.
2096
2097 if action not in ('push', 'pull'):
2098 raise ValueError("Invalid action value: %s" % repr(action))
2099
2100 # defines if locked error should be thrown to user
2101 currently_locked = False
2102 # defines if new lock should be made, tri-state
2103 make_lock = None
2104 repo = self
2105 user = User.get(user_id)
2106
2107 lock_info = repo.locked
2108
2109 if repo and (repo.enable_locking or not only_when_enabled):
2110 if action == 'push':
2111 # check if it's already locked !, if it is compare users
2112 locked_by_user_id = lock_info[0]
2113 if user.user_id == locked_by_user_id:
2114 log.debug(
2115 'Got `push` action from user %s, now unlocking', user)
2116 # unlock if we have push from user who locked
2117 make_lock = False
2118 else:
2119 # we're not the same user who locked, ban with
2120 # code defined in settings (default is 423 HTTP Locked) !
2121 log.debug('Repo %s is currently locked by %s', repo, user)
2122 currently_locked = True
2123 elif action == 'pull':
2124 # [0] user [1] date
2125 if lock_info[0] and lock_info[1]:
2126 log.debug('Repo %s is currently locked by %s', repo, user)
2127 currently_locked = True
2128 else:
2129 log.debug('Setting lock on repo %s by %s', repo, user)
2130 make_lock = True
2131
2132 else:
2133 log.debug('Repository %s do not have locking enabled', repo)
2134
2135 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2136 make_lock, currently_locked, lock_info)
2137
2138 from rhodecode.lib.auth import HasRepoPermissionAny
2139 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2140 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2141 # if we don't have at least write permission we cannot make a lock
2142 log.debug('lock state reset back to FALSE due to lack '
2143 'of at least read permission')
2144 make_lock = False
2145
2146 return make_lock, currently_locked, lock_info
2147
2148 @property
2149 def last_db_change(self):
2150 return self.updated_on
2151
2152 @property
2153 def clone_uri_hidden(self):
2154 clone_uri = self.clone_uri
2155 if clone_uri:
2156 import urlobject
2157 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2158 if url_obj.password:
2159 clone_uri = url_obj.with_password('*****')
2160 return clone_uri
2161
2162 @property
2163 def push_uri_hidden(self):
2164 push_uri = self.push_uri
2165 if push_uri:
2166 import urlobject
2167 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2168 if url_obj.password:
2169 push_uri = url_obj.with_password('*****')
2170 return push_uri
2171
2172 def clone_url(self, **override):
2173 from rhodecode.model.settings import SettingsModel
2174
2175 uri_tmpl = None
2176 if 'with_id' in override:
2177 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2178 del override['with_id']
2179
2180 if 'uri_tmpl' in override:
2181 uri_tmpl = override['uri_tmpl']
2182 del override['uri_tmpl']
2183
2184 ssh = False
2185 if 'ssh' in override:
2186 ssh = True
2187 del override['ssh']
2188
2189 # we didn't override our tmpl from **overrides
2190 if not uri_tmpl:
2191 rc_config = SettingsModel().get_all_settings(cache=True)
2192 if ssh:
2193 uri_tmpl = rc_config.get(
2194 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2195 else:
2196 uri_tmpl = rc_config.get(
2197 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2198
2199 request = get_current_request()
2200 return get_clone_url(request=request,
2201 uri_tmpl=uri_tmpl,
2202 repo_name=self.repo_name,
2203 repo_id=self.repo_id, **override)
2204
2205 def set_state(self, state):
2206 self.repo_state = state
2207 Session().add(self)
2208 #==========================================================================
2209 # SCM PROPERTIES
2210 #==========================================================================
2211
2212 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
2213 return get_commit_safe(
2214 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
2215
2216 def get_changeset(self, rev=None, pre_load=None):
2217 warnings.warn("Use get_commit", DeprecationWarning)
2218 commit_id = None
2219 commit_idx = None
2220 if isinstance(rev, basestring):
2221 commit_id = rev
2222 else:
2223 commit_idx = rev
2224 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2225 pre_load=pre_load)
2226
2227 def get_landing_commit(self):
2228 """
2229 Returns landing commit, or if that doesn't exist returns the tip
2230 """
2231 _rev_type, _rev = self.landing_rev
2232 commit = self.get_commit(_rev)
2233 if isinstance(commit, EmptyCommit):
2234 return self.get_commit()
2235 return commit
2236
2237 def update_commit_cache(self, cs_cache=None, config=None):
2238 """
2239 Update cache of last changeset for repository, keys should be::
2240
2241 short_id
2242 raw_id
2243 revision
2244 parents
2245 message
2246 date
2247 author
2248
2249 :param cs_cache:
2250 """
2251 from rhodecode.lib.vcs.backends.base import BaseChangeset
2252 if cs_cache is None:
2253 # use no-cache version here
2254 scm_repo = self.scm_instance(cache=False, config=config)
2255
2256 empty = scm_repo.is_empty()
2257 if not empty:
2258 cs_cache = scm_repo.get_commit(
2259 pre_load=["author", "date", "message", "parents"])
2260 else:
2261 cs_cache = EmptyCommit()
2262
2263 if isinstance(cs_cache, BaseChangeset):
2264 cs_cache = cs_cache.__json__()
2265
2266 def is_outdated(new_cs_cache):
2267 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2268 new_cs_cache['revision'] != self.changeset_cache['revision']):
2269 return True
2270 return False
2271
2272 # check if we have maybe already latest cached revision
2273 if is_outdated(cs_cache) or not self.changeset_cache:
2274 _default = datetime.datetime.utcnow()
2275 last_change = cs_cache.get('date') or _default
2276 if self.updated_on and self.updated_on > last_change:
2277 # we check if last update is newer than the new value
2278 # if yes, we use the current timestamp instead. Imagine you get
2279 # old commit pushed 1y ago, we'd set last update 1y to ago.
2280 last_change = _default
2281 log.debug('updated repo %s with new cs cache %s',
2282 self.repo_name, cs_cache)
2283 self.updated_on = last_change
2284 self.changeset_cache = cs_cache
2285 Session().add(self)
2286 Session().commit()
2287 else:
2288 log.debug('Skipping update_commit_cache for repo:`%s` '
2289 'commit already with latest changes', self.repo_name)
2290
2291 @property
2292 def tip(self):
2293 return self.get_commit('tip')
2294
2295 @property
2296 def author(self):
2297 return self.tip.author
2298
2299 @property
2300 def last_change(self):
2301 return self.scm_instance().last_change
2302
2303 def get_comments(self, revisions=None):
2304 """
2305 Returns comments for this repository grouped by revisions
2306
2307 :param revisions: filter query by revisions only
2308 """
2309 cmts = ChangesetComment.query()\
2310 .filter(ChangesetComment.repo == self)
2311 if revisions:
2312 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2313 grouped = collections.defaultdict(list)
2314 for cmt in cmts.all():
2315 grouped[cmt.revision].append(cmt)
2316 return grouped
2317
2318 def statuses(self, revisions=None):
2319 """
2320 Returns statuses for this repository
2321
2322 :param revisions: list of revisions to get statuses for
2323 """
2324 statuses = ChangesetStatus.query()\
2325 .filter(ChangesetStatus.repo == self)\
2326 .filter(ChangesetStatus.version == 0)
2327
2328 if revisions:
2329 # Try doing the filtering in chunks to avoid hitting limits
2330 size = 500
2331 status_results = []
2332 for chunk in xrange(0, len(revisions), size):
2333 status_results += statuses.filter(
2334 ChangesetStatus.revision.in_(
2335 revisions[chunk: chunk+size])
2336 ).all()
2337 else:
2338 status_results = statuses.all()
2339
2340 grouped = {}
2341
2342 # maybe we have open new pullrequest without a status?
2343 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2344 status_lbl = ChangesetStatus.get_status_lbl(stat)
2345 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2346 for rev in pr.revisions:
2347 pr_id = pr.pull_request_id
2348 pr_repo = pr.target_repo.repo_name
2349 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2350
2351 for stat in status_results:
2352 pr_id = pr_repo = None
2353 if stat.pull_request:
2354 pr_id = stat.pull_request.pull_request_id
2355 pr_repo = stat.pull_request.target_repo.repo_name
2356 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2357 pr_id, pr_repo]
2358 return grouped
2359
2360 # ==========================================================================
2361 # SCM CACHE INSTANCE
2362 # ==========================================================================
2363
2364 def scm_instance(self, **kwargs):
2365 import rhodecode
2366
2367 # Passing a config will not hit the cache currently only used
2368 # for repo2dbmapper
2369 config = kwargs.pop('config', None)
2370 cache = kwargs.pop('cache', None)
2371 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2372 # if cache is NOT defined use default global, else we have a full
2373 # control over cache behaviour
2374 if cache is None and full_cache and not config:
2375 return self._get_instance_cached()
2376 return self._get_instance(cache=bool(cache), config=config)
2377
2378 def _get_instance_cached(self):
2379 from rhodecode.lib import rc_cache
2380
2381 cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id)
2382 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2383 repo_id=self.repo_id)
2384 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2385
2386 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2387 def get_instance_cached(repo_id, context_id):
2388 return self._get_instance()
2389
2390 # we must use thread scoped cache here,
2391 # because each thread of gevent needs it's own not shared connection and cache
2392 # we also alter `args` so the cache key is individual for every green thread.
2393 inv_context_manager = rc_cache.InvalidationContext(
2394 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2395 thread_scoped=True)
2396 with inv_context_manager as invalidation_context:
2397 args = (self.repo_id, inv_context_manager.cache_key)
2398 # re-compute and store cache if we get invalidate signal
2399 if invalidation_context.should_invalidate():
2400 instance = get_instance_cached.refresh(*args)
2401 else:
2402 instance = get_instance_cached(*args)
2403
2404 log.debug(
2405 'Repo instance fetched in %.3fs', inv_context_manager.compute_time)
2406 return instance
2407
2408 def _get_instance(self, cache=True, config=None):
2409 config = config or self._config
2410 custom_wire = {
2411 'cache': cache # controls the vcs.remote cache
2412 }
2413 repo = get_vcs_instance(
2414 repo_path=safe_str(self.repo_full_path),
2415 config=config,
2416 with_wire=custom_wire,
2417 create=False,
2418 _vcs_alias=self.repo_type)
2419
2420 return repo
2421
2422 def __json__(self):
2423 return {'landing_rev': self.landing_rev}
2424
2425 def get_dict(self):
2426
2427 # Since we transformed `repo_name` to a hybrid property, we need to
2428 # keep compatibility with the code which uses `repo_name` field.
2429
2430 result = super(Repository, self).get_dict()
2431 result['repo_name'] = result.pop('_repo_name', None)
2432 return result
2433
2434
2435 class RepoGroup(Base, BaseModel):
2436 __tablename__ = 'groups'
2437 __table_args__ = (
2438 UniqueConstraint('group_name', 'group_parent_id'),
2439 CheckConstraint('group_id != group_parent_id'),
2440 base_table_args,
2441 )
2442 __mapper_args__ = {'order_by': 'group_name'}
2443
2444 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2445
2446 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2447 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2448 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2449 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2450 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2451 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2452 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2453 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2454 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2455
2456 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2457 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2458 parent_group = relationship('RepoGroup', remote_side=group_id)
2459 user = relationship('User')
2460 integrations = relationship('Integration',
2461 cascade="all, delete, delete-orphan")
2462
2463 def __init__(self, group_name='', parent_group=None):
2464 self.group_name = group_name
2465 self.parent_group = parent_group
2466
2467 def __unicode__(self):
2468 return u"<%s('id:%s:%s')>" % (
2469 self.__class__.__name__, self.group_id, self.group_name)
2470
2471 @hybrid_property
2472 def description_safe(self):
2473 from rhodecode.lib import helpers as h
2474 return h.escape(self.group_description)
2475
2476 @classmethod
2477 def _generate_choice(cls, repo_group):
2478 from webhelpers.html import literal as _literal
2479 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2480 return repo_group.group_id, _name(repo_group.full_path_splitted)
2481
2482 @classmethod
2483 def groups_choices(cls, groups=None, show_empty_group=True):
2484 if not groups:
2485 groups = cls.query().all()
2486
2487 repo_groups = []
2488 if show_empty_group:
2489 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2490
2491 repo_groups.extend([cls._generate_choice(x) for x in groups])
2492
2493 repo_groups = sorted(
2494 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2495 return repo_groups
2496
2497 @classmethod
2498 def url_sep(cls):
2499 return URL_SEP
2500
2501 @classmethod
2502 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2503 if case_insensitive:
2504 gr = cls.query().filter(func.lower(cls.group_name)
2505 == func.lower(group_name))
2506 else:
2507 gr = cls.query().filter(cls.group_name == group_name)
2508 if cache:
2509 name_key = _hash_key(group_name)
2510 gr = gr.options(
2511 FromCache("sql_cache_short", "get_group_%s" % name_key))
2512 return gr.scalar()
2513
2514 @classmethod
2515 def get_user_personal_repo_group(cls, user_id):
2516 user = User.get(user_id)
2517 if user.username == User.DEFAULT_USER:
2518 return None
2519
2520 return cls.query()\
2521 .filter(cls.personal == true()) \
2522 .filter(cls.user == user) \
2523 .order_by(cls.group_id.asc()) \
2524 .first()
2525
2526 @classmethod
2527 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2528 case_insensitive=True):
2529 q = RepoGroup.query()
2530
2531 if not isinstance(user_id, Optional):
2532 q = q.filter(RepoGroup.user_id == user_id)
2533
2534 if not isinstance(group_id, Optional):
2535 q = q.filter(RepoGroup.group_parent_id == group_id)
2536
2537 if case_insensitive:
2538 q = q.order_by(func.lower(RepoGroup.group_name))
2539 else:
2540 q = q.order_by(RepoGroup.group_name)
2541 return q.all()
2542
2543 @property
2544 def parents(self):
2545 parents_recursion_limit = 10
2546 groups = []
2547 if self.parent_group is None:
2548 return groups
2549 cur_gr = self.parent_group
2550 groups.insert(0, cur_gr)
2551 cnt = 0
2552 while 1:
2553 cnt += 1
2554 gr = getattr(cur_gr, 'parent_group', None)
2555 cur_gr = cur_gr.parent_group
2556 if gr is None:
2557 break
2558 if cnt == parents_recursion_limit:
2559 # this will prevent accidental infinit loops
2560 log.error('more than %s parents found for group %s, stopping '
2561 'recursive parent fetching', parents_recursion_limit, self)
2562 break
2563
2564 groups.insert(0, gr)
2565 return groups
2566
2567 @property
2568 def last_db_change(self):
2569 return self.updated_on
2570
2571 @property
2572 def children(self):
2573 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2574
2575 @property
2576 def name(self):
2577 return self.group_name.split(RepoGroup.url_sep())[-1]
2578
2579 @property
2580 def full_path(self):
2581 return self.group_name
2582
2583 @property
2584 def full_path_splitted(self):
2585 return self.group_name.split(RepoGroup.url_sep())
2586
2587 @property
2588 def repositories(self):
2589 return Repository.query()\
2590 .filter(Repository.group == self)\
2591 .order_by(Repository.repo_name)
2592
2593 @property
2594 def repositories_recursive_count(self):
2595 cnt = self.repositories.count()
2596
2597 def children_count(group):
2598 cnt = 0
2599 for child in group.children:
2600 cnt += child.repositories.count()
2601 cnt += children_count(child)
2602 return cnt
2603
2604 return cnt + children_count(self)
2605
2606 def _recursive_objects(self, include_repos=True):
2607 all_ = []
2608
2609 def _get_members(root_gr):
2610 if include_repos:
2611 for r in root_gr.repositories:
2612 all_.append(r)
2613 childs = root_gr.children.all()
2614 if childs:
2615 for gr in childs:
2616 all_.append(gr)
2617 _get_members(gr)
2618
2619 _get_members(self)
2620 return [self] + all_
2621
2622 def recursive_groups_and_repos(self):
2623 """
2624 Recursive return all groups, with repositories in those groups
2625 """
2626 return self._recursive_objects()
2627
2628 def recursive_groups(self):
2629 """
2630 Returns all children groups for this group including children of children
2631 """
2632 return self._recursive_objects(include_repos=False)
2633
2634 def get_new_name(self, group_name):
2635 """
2636 returns new full group name based on parent and new name
2637
2638 :param group_name:
2639 """
2640 path_prefix = (self.parent_group.full_path_splitted if
2641 self.parent_group else [])
2642 return RepoGroup.url_sep().join(path_prefix + [group_name])
2643
2644 def permissions(self, with_admins=True, with_owner=True):
2645 """
2646 Permissions for repository groups
2647 """
2648 _admin_perm = 'group.admin'
2649
2650 owner_row = []
2651 if with_owner:
2652 usr = AttributeDict(self.user.get_dict())
2653 usr.owner_row = True
2654 usr.permission = _admin_perm
2655 owner_row.append(usr)
2656
2657 super_admin_ids = []
2658 super_admin_rows = []
2659 if with_admins:
2660 for usr in User.get_all_super_admins():
2661 super_admin_ids.append(usr.user_id)
2662 # if this admin is also owner, don't double the record
2663 if usr.user_id == owner_row[0].user_id:
2664 owner_row[0].admin_row = True
2665 else:
2666 usr = AttributeDict(usr.get_dict())
2667 usr.admin_row = True
2668 usr.permission = _admin_perm
2669 super_admin_rows.append(usr)
2670
2671 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2672 q = q.options(joinedload(UserRepoGroupToPerm.group),
2673 joinedload(UserRepoGroupToPerm.user),
2674 joinedload(UserRepoGroupToPerm.permission),)
2675
2676 # get owners and admins and permissions. We do a trick of re-writing
2677 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2678 # has a global reference and changing one object propagates to all
2679 # others. This means if admin is also an owner admin_row that change
2680 # would propagate to both objects
2681 perm_rows = []
2682 for _usr in q.all():
2683 usr = AttributeDict(_usr.user.get_dict())
2684 # if this user is also owner/admin, mark as duplicate record
2685 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2686 usr.duplicate_perm = True
2687 usr.permission = _usr.permission.permission_name
2688 perm_rows.append(usr)
2689
2690 # filter the perm rows by 'default' first and then sort them by
2691 # admin,write,read,none permissions sorted again alphabetically in
2692 # each group
2693 perm_rows = sorted(perm_rows, key=display_user_sort)
2694
2695 return super_admin_rows + owner_row + perm_rows
2696
2697 def permission_user_groups(self):
2698 q = UserGroupRepoGroupToPerm.query().filter(
2699 UserGroupRepoGroupToPerm.group == self)
2700 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2701 joinedload(UserGroupRepoGroupToPerm.users_group),
2702 joinedload(UserGroupRepoGroupToPerm.permission),)
2703
2704 perm_rows = []
2705 for _user_group in q.all():
2706 usr = AttributeDict(_user_group.users_group.get_dict())
2707 usr.permission = _user_group.permission.permission_name
2708 perm_rows.append(usr)
2709
2710 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2711 return perm_rows
2712
2713 def get_api_data(self):
2714 """
2715 Common function for generating api data
2716
2717 """
2718 group = self
2719 data = {
2720 'group_id': group.group_id,
2721 'group_name': group.group_name,
2722 'group_description': group.description_safe,
2723 'parent_group': group.parent_group.group_name if group.parent_group else None,
2724 'repositories': [x.repo_name for x in group.repositories],
2725 'owner': group.user.username,
2726 }
2727 return data
2728
2729
2730 class Permission(Base, BaseModel):
2731 __tablename__ = 'permissions'
2732 __table_args__ = (
2733 Index('p_perm_name_idx', 'permission_name'),
2734 base_table_args,
2735 )
2736
2737 PERMS = [
2738 ('hg.admin', _('RhodeCode Super Administrator')),
2739
2740 ('repository.none', _('Repository no access')),
2741 ('repository.read', _('Repository read access')),
2742 ('repository.write', _('Repository write access')),
2743 ('repository.admin', _('Repository admin access')),
2744
2745 ('group.none', _('Repository group no access')),
2746 ('group.read', _('Repository group read access')),
2747 ('group.write', _('Repository group write access')),
2748 ('group.admin', _('Repository group admin access')),
2749
2750 ('usergroup.none', _('User group no access')),
2751 ('usergroup.read', _('User group read access')),
2752 ('usergroup.write', _('User group write access')),
2753 ('usergroup.admin', _('User group admin access')),
2754
2755 ('branch.none', _('Branch no permissions')),
2756 ('branch.merge', _('Branch access by web merge')),
2757 ('branch.push', _('Branch access by push')),
2758 ('branch.push_force', _('Branch access by push with force')),
2759
2760 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2761 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2762
2763 ('hg.usergroup.create.false', _('User Group creation disabled')),
2764 ('hg.usergroup.create.true', _('User Group creation enabled')),
2765
2766 ('hg.create.none', _('Repository creation disabled')),
2767 ('hg.create.repository', _('Repository creation enabled')),
2768 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2769 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2770
2771 ('hg.fork.none', _('Repository forking disabled')),
2772 ('hg.fork.repository', _('Repository forking enabled')),
2773
2774 ('hg.register.none', _('Registration disabled')),
2775 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2776 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2777
2778 ('hg.password_reset.enabled', _('Password reset enabled')),
2779 ('hg.password_reset.hidden', _('Password reset hidden')),
2780 ('hg.password_reset.disabled', _('Password reset disabled')),
2781
2782 ('hg.extern_activate.manual', _('Manual activation of external account')),
2783 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2784
2785 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2786 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2787 ]
2788
2789 # definition of system default permissions for DEFAULT user, created on
2790 # system setup
2791 DEFAULT_USER_PERMISSIONS = [
2792 # object perms
2793 'repository.read',
2794 'group.read',
2795 'usergroup.read',
2796 # branch, for backward compat we need same value as before so forced pushed
2797 'branch.push_force',
2798 # global
2799 'hg.create.repository',
2800 'hg.repogroup.create.false',
2801 'hg.usergroup.create.false',
2802 'hg.create.write_on_repogroup.true',
2803 'hg.fork.repository',
2804 'hg.register.manual_activate',
2805 'hg.password_reset.enabled',
2806 'hg.extern_activate.auto',
2807 'hg.inherit_default_perms.true',
2808 ]
2809
2810 # defines which permissions are more important higher the more important
2811 # Weight defines which permissions are more important.
2812 # The higher number the more important.
2813 PERM_WEIGHTS = {
2814 'repository.none': 0,
2815 'repository.read': 1,
2816 'repository.write': 3,
2817 'repository.admin': 4,
2818
2819 'group.none': 0,
2820 'group.read': 1,
2821 'group.write': 3,
2822 'group.admin': 4,
2823
2824 'usergroup.none': 0,
2825 'usergroup.read': 1,
2826 'usergroup.write': 3,
2827 'usergroup.admin': 4,
2828
2829 'branch.none': 0,
2830 'branch.merge': 1,
2831 'branch.push': 3,
2832 'branch.push_force': 4,
2833
2834 'hg.repogroup.create.false': 0,
2835 'hg.repogroup.create.true': 1,
2836
2837 'hg.usergroup.create.false': 0,
2838 'hg.usergroup.create.true': 1,
2839
2840 'hg.fork.none': 0,
2841 'hg.fork.repository': 1,
2842 'hg.create.none': 0,
2843 'hg.create.repository': 1
2844 }
2845
2846 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2847 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2848 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2849
2850 def __unicode__(self):
2851 return u"<%s('%s:%s')>" % (
2852 self.__class__.__name__, self.permission_id, self.permission_name
2853 )
2854
2855 @classmethod
2856 def get_by_key(cls, key):
2857 return cls.query().filter(cls.permission_name == key).scalar()
2858
2859 @classmethod
2860 def get_default_repo_perms(cls, user_id, repo_id=None):
2861 q = Session().query(UserRepoToPerm, Repository, Permission)\
2862 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2863 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2864 .filter(UserRepoToPerm.user_id == user_id)
2865 if repo_id:
2866 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2867 return q.all()
2868
2869 @classmethod
2870 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
2871 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
2872 .join(
2873 Permission,
2874 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
2875 .join(
2876 UserRepoToPerm,
2877 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
2878 .filter(UserRepoToPerm.user_id == user_id)
2879
2880 if repo_id:
2881 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
2882 return q.order_by(UserToRepoBranchPermission.rule_order).all()
2883
2884 @classmethod
2885 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2886 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2887 .join(
2888 Permission,
2889 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2890 .join(
2891 Repository,
2892 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2893 .join(
2894 UserGroup,
2895 UserGroupRepoToPerm.users_group_id ==
2896 UserGroup.users_group_id)\
2897 .join(
2898 UserGroupMember,
2899 UserGroupRepoToPerm.users_group_id ==
2900 UserGroupMember.users_group_id)\
2901 .filter(
2902 UserGroupMember.user_id == user_id,
2903 UserGroup.users_group_active == true())
2904 if repo_id:
2905 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2906 return q.all()
2907
2908 @classmethod
2909 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
2910 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
2911 .join(
2912 Permission,
2913 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
2914 .join(
2915 UserGroupRepoToPerm,
2916 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
2917 .join(
2918 UserGroup,
2919 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
2920 .join(
2921 UserGroupMember,
2922 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
2923 .filter(
2924 UserGroupMember.user_id == user_id,
2925 UserGroup.users_group_active == true())
2926
2927 if repo_id:
2928 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
2929 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
2930
2931 @classmethod
2932 def get_default_group_perms(cls, user_id, repo_group_id=None):
2933 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2934 .join(
2935 Permission,
2936 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
2937 .join(
2938 RepoGroup,
2939 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
2940 .filter(UserRepoGroupToPerm.user_id == user_id)
2941 if repo_group_id:
2942 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2943 return q.all()
2944
2945 @classmethod
2946 def get_default_group_perms_from_user_group(
2947 cls, user_id, repo_group_id=None):
2948 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2949 .join(
2950 Permission,
2951 UserGroupRepoGroupToPerm.permission_id ==
2952 Permission.permission_id)\
2953 .join(
2954 RepoGroup,
2955 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2956 .join(
2957 UserGroup,
2958 UserGroupRepoGroupToPerm.users_group_id ==
2959 UserGroup.users_group_id)\
2960 .join(
2961 UserGroupMember,
2962 UserGroupRepoGroupToPerm.users_group_id ==
2963 UserGroupMember.users_group_id)\
2964 .filter(
2965 UserGroupMember.user_id == user_id,
2966 UserGroup.users_group_active == true())
2967 if repo_group_id:
2968 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2969 return q.all()
2970
2971 @classmethod
2972 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2973 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2974 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2975 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2976 .filter(UserUserGroupToPerm.user_id == user_id)
2977 if user_group_id:
2978 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2979 return q.all()
2980
2981 @classmethod
2982 def get_default_user_group_perms_from_user_group(
2983 cls, user_id, user_group_id=None):
2984 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2985 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2986 .join(
2987 Permission,
2988 UserGroupUserGroupToPerm.permission_id ==
2989 Permission.permission_id)\
2990 .join(
2991 TargetUserGroup,
2992 UserGroupUserGroupToPerm.target_user_group_id ==
2993 TargetUserGroup.users_group_id)\
2994 .join(
2995 UserGroup,
2996 UserGroupUserGroupToPerm.user_group_id ==
2997 UserGroup.users_group_id)\
2998 .join(
2999 UserGroupMember,
3000 UserGroupUserGroupToPerm.user_group_id ==
3001 UserGroupMember.users_group_id)\
3002 .filter(
3003 UserGroupMember.user_id == user_id,
3004 UserGroup.users_group_active == true())
3005 if user_group_id:
3006 q = q.filter(
3007 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3008
3009 return q.all()
3010
3011
3012 class UserRepoToPerm(Base, BaseModel):
3013 __tablename__ = 'repo_to_perm'
3014 __table_args__ = (
3015 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3016 base_table_args
3017 )
3018
3019 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3020 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3021 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3022 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3023
3024 user = relationship('User')
3025 repository = relationship('Repository')
3026 permission = relationship('Permission')
3027
3028 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete, delete-orphan", lazy='joined')
3029
3030 @classmethod
3031 def create(cls, user, repository, permission):
3032 n = cls()
3033 n.user = user
3034 n.repository = repository
3035 n.permission = permission
3036 Session().add(n)
3037 return n
3038
3039 def __unicode__(self):
3040 return u'<%s => %s >' % (self.user, self.repository)
3041
3042
3043 class UserUserGroupToPerm(Base, BaseModel):
3044 __tablename__ = 'user_user_group_to_perm'
3045 __table_args__ = (
3046 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3047 base_table_args
3048 )
3049
3050 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3051 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3052 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3053 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3054
3055 user = relationship('User')
3056 user_group = relationship('UserGroup')
3057 permission = relationship('Permission')
3058
3059 @classmethod
3060 def create(cls, user, user_group, permission):
3061 n = cls()
3062 n.user = user
3063 n.user_group = user_group
3064 n.permission = permission
3065 Session().add(n)
3066 return n
3067
3068 def __unicode__(self):
3069 return u'<%s => %s >' % (self.user, self.user_group)
3070
3071
3072 class UserToPerm(Base, BaseModel):
3073 __tablename__ = 'user_to_perm'
3074 __table_args__ = (
3075 UniqueConstraint('user_id', 'permission_id'),
3076 base_table_args
3077 )
3078
3079 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3080 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3081 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3082
3083 user = relationship('User')
3084 permission = relationship('Permission', lazy='joined')
3085
3086 def __unicode__(self):
3087 return u'<%s => %s >' % (self.user, self.permission)
3088
3089
3090 class UserGroupRepoToPerm(Base, BaseModel):
3091 __tablename__ = 'users_group_repo_to_perm'
3092 __table_args__ = (
3093 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3094 base_table_args
3095 )
3096
3097 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3098 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3099 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3100 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3101
3102 users_group = relationship('UserGroup')
3103 permission = relationship('Permission')
3104 repository = relationship('Repository')
3105 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all')
3106
3107 @classmethod
3108 def create(cls, users_group, repository, permission):
3109 n = cls()
3110 n.users_group = users_group
3111 n.repository = repository
3112 n.permission = permission
3113 Session().add(n)
3114 return n
3115
3116 def __unicode__(self):
3117 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
3118
3119
3120 class UserGroupUserGroupToPerm(Base, BaseModel):
3121 __tablename__ = 'user_group_user_group_to_perm'
3122 __table_args__ = (
3123 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3124 CheckConstraint('target_user_group_id != user_group_id'),
3125 base_table_args
3126 )
3127
3128 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3129 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3130 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3131 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3132
3133 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3134 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3135 permission = relationship('Permission')
3136
3137 @classmethod
3138 def create(cls, target_user_group, user_group, permission):
3139 n = cls()
3140 n.target_user_group = target_user_group
3141 n.user_group = user_group
3142 n.permission = permission
3143 Session().add(n)
3144 return n
3145
3146 def __unicode__(self):
3147 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3148
3149
3150 class UserGroupToPerm(Base, BaseModel):
3151 __tablename__ = 'users_group_to_perm'
3152 __table_args__ = (
3153 UniqueConstraint('users_group_id', 'permission_id',),
3154 base_table_args
3155 )
3156
3157 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3158 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3159 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3160
3161 users_group = relationship('UserGroup')
3162 permission = relationship('Permission')
3163
3164
3165 class UserRepoGroupToPerm(Base, BaseModel):
3166 __tablename__ = 'user_repo_group_to_perm'
3167 __table_args__ = (
3168 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3169 base_table_args
3170 )
3171
3172 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3173 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3174 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3175 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3176
3177 user = relationship('User')
3178 group = relationship('RepoGroup')
3179 permission = relationship('Permission')
3180
3181 @classmethod
3182 def create(cls, user, repository_group, permission):
3183 n = cls()
3184 n.user = user
3185 n.group = repository_group
3186 n.permission = permission
3187 Session().add(n)
3188 return n
3189
3190
3191 class UserGroupRepoGroupToPerm(Base, BaseModel):
3192 __tablename__ = 'users_group_repo_group_to_perm'
3193 __table_args__ = (
3194 UniqueConstraint('users_group_id', 'group_id'),
3195 base_table_args
3196 )
3197
3198 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3199 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3200 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3201 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3202
3203 users_group = relationship('UserGroup')
3204 permission = relationship('Permission')
3205 group = relationship('RepoGroup')
3206
3207 @classmethod
3208 def create(cls, user_group, repository_group, permission):
3209 n = cls()
3210 n.users_group = user_group
3211 n.group = repository_group
3212 n.permission = permission
3213 Session().add(n)
3214 return n
3215
3216 def __unicode__(self):
3217 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3218
3219
3220 class Statistics(Base, BaseModel):
3221 __tablename__ = 'statistics'
3222 __table_args__ = (
3223 base_table_args
3224 )
3225
3226 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3227 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3228 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3229 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3230 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3231 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3232
3233 repository = relationship('Repository', single_parent=True)
3234
3235
3236 class UserFollowing(Base, BaseModel):
3237 __tablename__ = 'user_followings'
3238 __table_args__ = (
3239 UniqueConstraint('user_id', 'follows_repository_id'),
3240 UniqueConstraint('user_id', 'follows_user_id'),
3241 base_table_args
3242 )
3243
3244 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3245 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3246 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3247 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3248 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3249
3250 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3251
3252 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3253 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3254
3255 @classmethod
3256 def get_repo_followers(cls, repo_id):
3257 return cls.query().filter(cls.follows_repo_id == repo_id)
3258
3259
3260 class CacheKey(Base, BaseModel):
3261 __tablename__ = 'cache_invalidation'
3262 __table_args__ = (
3263 UniqueConstraint('cache_key'),
3264 Index('key_idx', 'cache_key'),
3265 base_table_args,
3266 )
3267
3268 CACHE_TYPE_FEED = 'FEED'
3269 CACHE_TYPE_README = 'README'
3270 # namespaces used to register process/thread aware caches
3271 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3272 SETTINGS_INVALIDATION_NAMESPACE = 'system_settings'
3273
3274 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3275 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3276 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3277 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3278
3279 def __init__(self, cache_key, cache_args=''):
3280 self.cache_key = cache_key
3281 self.cache_args = cache_args
3282 self.cache_active = False
3283
3284 def __unicode__(self):
3285 return u"<%s('%s:%s[%s]')>" % (
3286 self.__class__.__name__,
3287 self.cache_id, self.cache_key, self.cache_active)
3288
3289 def _cache_key_partition(self):
3290 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3291 return prefix, repo_name, suffix
3292
3293 def get_prefix(self):
3294 """
3295 Try to extract prefix from existing cache key. The key could consist
3296 of prefix, repo_name, suffix
3297 """
3298 # this returns prefix, repo_name, suffix
3299 return self._cache_key_partition()[0]
3300
3301 def get_suffix(self):
3302 """
3303 get suffix that might have been used in _get_cache_key to
3304 generate self.cache_key. Only used for informational purposes
3305 in repo_edit.mako.
3306 """
3307 # prefix, repo_name, suffix
3308 return self._cache_key_partition()[2]
3309
3310 @classmethod
3311 def delete_all_cache(cls):
3312 """
3313 Delete all cache keys from database.
3314 Should only be run when all instances are down and all entries
3315 thus stale.
3316 """
3317 cls.query().delete()
3318 Session().commit()
3319
3320 @classmethod
3321 def set_invalidate(cls, cache_uid, delete=False):
3322 """
3323 Mark all caches of a repo as invalid in the database.
3324 """
3325
3326 try:
3327 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3328 if delete:
3329 qry.delete()
3330 log.debug('cache objects deleted for cache args %s',
3331 safe_str(cache_uid))
3332 else:
3333 qry.update({"cache_active": False})
3334 log.debug('cache objects marked as invalid for cache args %s',
3335 safe_str(cache_uid))
3336
3337 Session().commit()
3338 except Exception:
3339 log.exception(
3340 'Cache key invalidation failed for cache args %s',
3341 safe_str(cache_uid))
3342 Session().rollback()
3343
3344 @classmethod
3345 def get_active_cache(cls, cache_key):
3346 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3347 if inv_obj:
3348 return inv_obj
3349 return None
3350
3351
3352 class ChangesetComment(Base, BaseModel):
3353 __tablename__ = 'changeset_comments'
3354 __table_args__ = (
3355 Index('cc_revision_idx', 'revision'),
3356 base_table_args,
3357 )
3358
3359 COMMENT_OUTDATED = u'comment_outdated'
3360 COMMENT_TYPE_NOTE = u'note'
3361 COMMENT_TYPE_TODO = u'todo'
3362 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3363
3364 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3365 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3366 revision = Column('revision', String(40), nullable=True)
3367 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3368 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3369 line_no = Column('line_no', Unicode(10), nullable=True)
3370 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3371 f_path = Column('f_path', Unicode(1000), nullable=True)
3372 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3373 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3374 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3375 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3376 renderer = Column('renderer', Unicode(64), nullable=True)
3377 display_state = Column('display_state', Unicode(128), nullable=True)
3378
3379 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3380 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3381
3382 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3383 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3384
3385 author = relationship('User', lazy='joined')
3386 repo = relationship('Repository')
3387 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined')
3388 pull_request = relationship('PullRequest', lazy='joined')
3389 pull_request_version = relationship('PullRequestVersion')
3390
3391 @classmethod
3392 def get_users(cls, revision=None, pull_request_id=None):
3393 """
3394 Returns user associated with this ChangesetComment. ie those
3395 who actually commented
3396
3397 :param cls:
3398 :param revision:
3399 """
3400 q = Session().query(User)\
3401 .join(ChangesetComment.author)
3402 if revision:
3403 q = q.filter(cls.revision == revision)
3404 elif pull_request_id:
3405 q = q.filter(cls.pull_request_id == pull_request_id)
3406 return q.all()
3407
3408 @classmethod
3409 def get_index_from_version(cls, pr_version, versions):
3410 num_versions = [x.pull_request_version_id for x in versions]
3411 try:
3412 return num_versions.index(pr_version) +1
3413 except (IndexError, ValueError):
3414 return
3415
3416 @property
3417 def outdated(self):
3418 return self.display_state == self.COMMENT_OUTDATED
3419
3420 def outdated_at_version(self, version):
3421 """
3422 Checks if comment is outdated for given pull request version
3423 """
3424 return self.outdated and self.pull_request_version_id != version
3425
3426 def older_than_version(self, version):
3427 """
3428 Checks if comment is made from previous version than given
3429 """
3430 if version is None:
3431 return self.pull_request_version_id is not None
3432
3433 return self.pull_request_version_id < version
3434
3435 @property
3436 def resolved(self):
3437 return self.resolved_by[0] if self.resolved_by else None
3438
3439 @property
3440 def is_todo(self):
3441 return self.comment_type == self.COMMENT_TYPE_TODO
3442
3443 @property
3444 def is_inline(self):
3445 return self.line_no and self.f_path
3446
3447 def get_index_version(self, versions):
3448 return self.get_index_from_version(
3449 self.pull_request_version_id, versions)
3450
3451 def __repr__(self):
3452 if self.comment_id:
3453 return '<DB:Comment #%s>' % self.comment_id
3454 else:
3455 return '<DB:Comment at %#x>' % id(self)
3456
3457 def get_api_data(self):
3458 comment = self
3459 data = {
3460 'comment_id': comment.comment_id,
3461 'comment_type': comment.comment_type,
3462 'comment_text': comment.text,
3463 'comment_status': comment.status_change,
3464 'comment_f_path': comment.f_path,
3465 'comment_lineno': comment.line_no,
3466 'comment_author': comment.author,
3467 'comment_created_on': comment.created_on
3468 }
3469 return data
3470
3471 def __json__(self):
3472 data = dict()
3473 data.update(self.get_api_data())
3474 return data
3475
3476
3477 class ChangesetStatus(Base, BaseModel):
3478 __tablename__ = 'changeset_statuses'
3479 __table_args__ = (
3480 Index('cs_revision_idx', 'revision'),
3481 Index('cs_version_idx', 'version'),
3482 UniqueConstraint('repo_id', 'revision', 'version'),
3483 base_table_args
3484 )
3485
3486 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3487 STATUS_APPROVED = 'approved'
3488 STATUS_REJECTED = 'rejected'
3489 STATUS_UNDER_REVIEW = 'under_review'
3490
3491 STATUSES = [
3492 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3493 (STATUS_APPROVED, _("Approved")),
3494 (STATUS_REJECTED, _("Rejected")),
3495 (STATUS_UNDER_REVIEW, _("Under Review")),
3496 ]
3497
3498 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3499 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3500 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3501 revision = Column('revision', String(40), nullable=False)
3502 status = Column('status', String(128), nullable=False, default=DEFAULT)
3503 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3504 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3505 version = Column('version', Integer(), nullable=False, default=0)
3506 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3507
3508 author = relationship('User', lazy='joined')
3509 repo = relationship('Repository')
3510 comment = relationship('ChangesetComment', lazy='joined')
3511 pull_request = relationship('PullRequest', lazy='joined')
3512
3513 def __unicode__(self):
3514 return u"<%s('%s[v%s]:%s')>" % (
3515 self.__class__.__name__,
3516 self.status, self.version, self.author
3517 )
3518
3519 @classmethod
3520 def get_status_lbl(cls, value):
3521 return dict(cls.STATUSES).get(value)
3522
3523 @property
3524 def status_lbl(self):
3525 return ChangesetStatus.get_status_lbl(self.status)
3526
3527 def get_api_data(self):
3528 status = self
3529 data = {
3530 'status_id': status.changeset_status_id,
3531 'status': status.status,
3532 }
3533 return data
3534
3535 def __json__(self):
3536 data = dict()
3537 data.update(self.get_api_data())
3538 return data
3539
3540
3541 class _PullRequestBase(BaseModel):
3542 """
3543 Common attributes of pull request and version entries.
3544 """
3545
3546 # .status values
3547 STATUS_NEW = u'new'
3548 STATUS_OPEN = u'open'
3549 STATUS_CLOSED = u'closed'
3550
3551 # available states
3552 STATE_CREATING = u'creating'
3553 STATE_UPDATING = u'updating'
3554 STATE_MERGING = u'merging'
3555 STATE_CREATED = u'created'
3556
3557 title = Column('title', Unicode(255), nullable=True)
3558 description = Column(
3559 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3560 nullable=True)
3561 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
3562
3563 # new/open/closed status of pull request (not approve/reject/etc)
3564 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3565 created_on = Column(
3566 'created_on', DateTime(timezone=False), nullable=False,
3567 default=datetime.datetime.now)
3568 updated_on = Column(
3569 'updated_on', DateTime(timezone=False), nullable=False,
3570 default=datetime.datetime.now)
3571
3572 pull_request_state = Column("pull_request_state", String(255), nullable=True)
3573
3574 @declared_attr
3575 def user_id(cls):
3576 return Column(
3577 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3578 unique=None)
3579
3580 # 500 revisions max
3581 _revisions = Column(
3582 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3583
3584 @declared_attr
3585 def source_repo_id(cls):
3586 # TODO: dan: rename column to source_repo_id
3587 return Column(
3588 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3589 nullable=False)
3590
3591 _source_ref = Column('org_ref', Unicode(255), nullable=False)
3592
3593 @hybrid_property
3594 def source_ref(self):
3595 return self._source_ref
3596
3597 @source_ref.setter
3598 def source_ref(self, val):
3599 parts = (val or '').split(':')
3600 if len(parts) != 3:
3601 raise ValueError(
3602 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
3603 self._source_ref = safe_unicode(val)
3604
3605 _target_ref = Column('other_ref', Unicode(255), nullable=False)
3606
3607 @hybrid_property
3608 def target_ref(self):
3609 return self._target_ref
3610
3611 @target_ref.setter
3612 def target_ref(self, val):
3613 parts = (val or '').split(':')
3614 if len(parts) != 3:
3615 raise ValueError(
3616 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
3617 self._target_ref = safe_unicode(val)
3618
3619 @declared_attr
3620 def target_repo_id(cls):
3621 # TODO: dan: rename column to target_repo_id
3622 return Column(
3623 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3624 nullable=False)
3625
3626 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3627
3628 # TODO: dan: rename column to last_merge_source_rev
3629 _last_merge_source_rev = Column(
3630 'last_merge_org_rev', String(40), nullable=True)
3631 # TODO: dan: rename column to last_merge_target_rev
3632 _last_merge_target_rev = Column(
3633 'last_merge_other_rev', String(40), nullable=True)
3634 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3635 merge_rev = Column('merge_rev', String(40), nullable=True)
3636
3637 reviewer_data = Column(
3638 'reviewer_data_json', MutationObj.as_mutable(
3639 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3640
3641 @property
3642 def reviewer_data_json(self):
3643 return json.dumps(self.reviewer_data)
3644
3645 @hybrid_property
3646 def description_safe(self):
3647 from rhodecode.lib import helpers as h
3648 return h.escape(self.description)
3649
3650 @hybrid_property
3651 def revisions(self):
3652 return self._revisions.split(':') if self._revisions else []
3653
3654 @revisions.setter
3655 def revisions(self, val):
3656 self._revisions = ':'.join(val)
3657
3658 @hybrid_property
3659 def last_merge_status(self):
3660 return safe_int(self._last_merge_status)
3661
3662 @last_merge_status.setter
3663 def last_merge_status(self, val):
3664 self._last_merge_status = val
3665
3666 @declared_attr
3667 def author(cls):
3668 return relationship('User', lazy='joined')
3669
3670 @declared_attr
3671 def source_repo(cls):
3672 return relationship(
3673 'Repository',
3674 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3675
3676 @property
3677 def source_ref_parts(self):
3678 return self.unicode_to_reference(self.source_ref)
3679
3680 @declared_attr
3681 def target_repo(cls):
3682 return relationship(
3683 'Repository',
3684 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3685
3686 @property
3687 def target_ref_parts(self):
3688 return self.unicode_to_reference(self.target_ref)
3689
3690 @property
3691 def shadow_merge_ref(self):
3692 return self.unicode_to_reference(self._shadow_merge_ref)
3693
3694 @shadow_merge_ref.setter
3695 def shadow_merge_ref(self, ref):
3696 self._shadow_merge_ref = self.reference_to_unicode(ref)
3697
3698 @staticmethod
3699 def unicode_to_reference(raw):
3700 """
3701 Convert a unicode (or string) to a reference object.
3702 If unicode evaluates to False it returns None.
3703 """
3704 if raw:
3705 refs = raw.split(':')
3706 return Reference(*refs)
3707 else:
3708 return None
3709
3710 @staticmethod
3711 def reference_to_unicode(ref):
3712 """
3713 Convert a reference object to unicode.
3714 If reference is None it returns None.
3715 """
3716 if ref:
3717 return u':'.join(ref)
3718 else:
3719 return None
3720
3721 def get_api_data(self, with_merge_state=True):
3722 from rhodecode.model.pull_request import PullRequestModel
3723
3724 pull_request = self
3725 if with_merge_state:
3726 merge_status = PullRequestModel().merge_status(pull_request)
3727 merge_state = {
3728 'status': merge_status[0],
3729 'message': safe_unicode(merge_status[1]),
3730 }
3731 else:
3732 merge_state = {'status': 'not_available',
3733 'message': 'not_available'}
3734
3735 merge_data = {
3736 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
3737 'reference': (
3738 pull_request.shadow_merge_ref._asdict()
3739 if pull_request.shadow_merge_ref else None),
3740 }
3741
3742 data = {
3743 'pull_request_id': pull_request.pull_request_id,
3744 'url': PullRequestModel().get_url(pull_request),
3745 'title': pull_request.title,
3746 'description': pull_request.description,
3747 'status': pull_request.status,
3748 'created_on': pull_request.created_on,
3749 'updated_on': pull_request.updated_on,
3750 'commit_ids': pull_request.revisions,
3751 'review_status': pull_request.calculated_review_status(),
3752 'mergeable': merge_state,
3753 'source': {
3754 'clone_url': pull_request.source_repo.clone_url(),
3755 'repository': pull_request.source_repo.repo_name,
3756 'reference': {
3757 'name': pull_request.source_ref_parts.name,
3758 'type': pull_request.source_ref_parts.type,
3759 'commit_id': pull_request.source_ref_parts.commit_id,
3760 },
3761 },
3762 'target': {
3763 'clone_url': pull_request.target_repo.clone_url(),
3764 'repository': pull_request.target_repo.repo_name,
3765 'reference': {
3766 'name': pull_request.target_ref_parts.name,
3767 'type': pull_request.target_ref_parts.type,
3768 'commit_id': pull_request.target_ref_parts.commit_id,
3769 },
3770 },
3771 'merge': merge_data,
3772 'author': pull_request.author.get_api_data(include_secrets=False,
3773 details='basic'),
3774 'reviewers': [
3775 {
3776 'user': reviewer.get_api_data(include_secrets=False,
3777 details='basic'),
3778 'reasons': reasons,
3779 'review_status': st[0][1].status if st else 'not_reviewed',
3780 }
3781 for obj, reviewer, reasons, mandatory, st in
3782 pull_request.reviewers_statuses()
3783 ]
3784 }
3785
3786 return data
3787
3788
3789 class PullRequest(Base, _PullRequestBase):
3790 __tablename__ = 'pull_requests'
3791 __table_args__ = (
3792 base_table_args,
3793 )
3794
3795 pull_request_id = Column(
3796 'pull_request_id', Integer(), nullable=False, primary_key=True)
3797
3798 def __repr__(self):
3799 if self.pull_request_id:
3800 return '<DB:PullRequest #%s>' % self.pull_request_id
3801 else:
3802 return '<DB:PullRequest at %#x>' % id(self)
3803
3804 reviewers = relationship('PullRequestReviewers',
3805 cascade="all, delete, delete-orphan")
3806 statuses = relationship('ChangesetStatus',
3807 cascade="all, delete, delete-orphan")
3808 comments = relationship('ChangesetComment',
3809 cascade="all, delete, delete-orphan")
3810 versions = relationship('PullRequestVersion',
3811 cascade="all, delete, delete-orphan",
3812 lazy='dynamic')
3813
3814 @classmethod
3815 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
3816 internal_methods=None):
3817
3818 class PullRequestDisplay(object):
3819 """
3820 Special object wrapper for showing PullRequest data via Versions
3821 It mimics PR object as close as possible. This is read only object
3822 just for display
3823 """
3824
3825 def __init__(self, attrs, internal=None):
3826 self.attrs = attrs
3827 # internal have priority over the given ones via attrs
3828 self.internal = internal or ['versions']
3829
3830 def __getattr__(self, item):
3831 if item in self.internal:
3832 return getattr(self, item)
3833 try:
3834 return self.attrs[item]
3835 except KeyError:
3836 raise AttributeError(
3837 '%s object has no attribute %s' % (self, item))
3838
3839 def __repr__(self):
3840 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
3841
3842 def versions(self):
3843 return pull_request_obj.versions.order_by(
3844 PullRequestVersion.pull_request_version_id).all()
3845
3846 def is_closed(self):
3847 return pull_request_obj.is_closed()
3848
3849 @property
3850 def pull_request_version_id(self):
3851 return getattr(pull_request_obj, 'pull_request_version_id', None)
3852
3853 attrs = StrictAttributeDict(pull_request_obj.get_api_data())
3854
3855 attrs.author = StrictAttributeDict(
3856 pull_request_obj.author.get_api_data())
3857 if pull_request_obj.target_repo:
3858 attrs.target_repo = StrictAttributeDict(
3859 pull_request_obj.target_repo.get_api_data())
3860 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
3861
3862 if pull_request_obj.source_repo:
3863 attrs.source_repo = StrictAttributeDict(
3864 pull_request_obj.source_repo.get_api_data())
3865 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
3866
3867 attrs.source_ref_parts = pull_request_obj.source_ref_parts
3868 attrs.target_ref_parts = pull_request_obj.target_ref_parts
3869 attrs.revisions = pull_request_obj.revisions
3870
3871 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
3872 attrs.reviewer_data = org_pull_request_obj.reviewer_data
3873 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
3874
3875 return PullRequestDisplay(attrs, internal=internal_methods)
3876
3877 def is_closed(self):
3878 return self.status == self.STATUS_CLOSED
3879
3880 def __json__(self):
3881 return {
3882 'revisions': self.revisions,
3883 }
3884
3885 def calculated_review_status(self):
3886 from rhodecode.model.changeset_status import ChangesetStatusModel
3887 return ChangesetStatusModel().calculated_review_status(self)
3888
3889 def reviewers_statuses(self):
3890 from rhodecode.model.changeset_status import ChangesetStatusModel
3891 return ChangesetStatusModel().reviewers_statuses(self)
3892
3893 @property
3894 def workspace_id(self):
3895 from rhodecode.model.pull_request import PullRequestModel
3896 return PullRequestModel()._workspace_id(self)
3897
3898 def get_shadow_repo(self):
3899 workspace_id = self.workspace_id
3900 vcs_obj = self.target_repo.scm_instance()
3901 shadow_repository_path = vcs_obj._get_shadow_repository_path(
3902 self.target_repo.repo_id, workspace_id)
3903 if os.path.isdir(shadow_repository_path):
3904 return vcs_obj._get_shadow_instance(shadow_repository_path)
3905
3906
3907 class PullRequestVersion(Base, _PullRequestBase):
3908 __tablename__ = 'pull_request_versions'
3909 __table_args__ = (
3910 base_table_args,
3911 )
3912
3913 pull_request_version_id = Column(
3914 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3915 pull_request_id = Column(
3916 'pull_request_id', Integer(),
3917 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3918 pull_request = relationship('PullRequest')
3919
3920 def __repr__(self):
3921 if self.pull_request_version_id:
3922 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3923 else:
3924 return '<DB:PullRequestVersion at %#x>' % id(self)
3925
3926 @property
3927 def reviewers(self):
3928 return self.pull_request.reviewers
3929
3930 @property
3931 def versions(self):
3932 return self.pull_request.versions
3933
3934 def is_closed(self):
3935 # calculate from original
3936 return self.pull_request.status == self.STATUS_CLOSED
3937
3938 def calculated_review_status(self):
3939 return self.pull_request.calculated_review_status()
3940
3941 def reviewers_statuses(self):
3942 return self.pull_request.reviewers_statuses()
3943
3944
3945 class PullRequestReviewers(Base, BaseModel):
3946 __tablename__ = 'pull_request_reviewers'
3947 __table_args__ = (
3948 base_table_args,
3949 )
3950
3951 @hybrid_property
3952 def reasons(self):
3953 if not self._reasons:
3954 return []
3955 return self._reasons
3956
3957 @reasons.setter
3958 def reasons(self, val):
3959 val = val or []
3960 if any(not isinstance(x, basestring) for x in val):
3961 raise Exception('invalid reasons type, must be list of strings')
3962 self._reasons = val
3963
3964 pull_requests_reviewers_id = Column(
3965 'pull_requests_reviewers_id', Integer(), nullable=False,
3966 primary_key=True)
3967 pull_request_id = Column(
3968 "pull_request_id", Integer(),
3969 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3970 user_id = Column(
3971 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3972 _reasons = Column(
3973 'reason', MutationList.as_mutable(
3974 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
3975
3976 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3977 user = relationship('User')
3978 pull_request = relationship('PullRequest')
3979
3980 rule_data = Column(
3981 'rule_data_json',
3982 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
3983
3984 def rule_user_group_data(self):
3985 """
3986 Returns the voting user group rule data for this reviewer
3987 """
3988
3989 if self.rule_data and 'vote_rule' in self.rule_data:
3990 user_group_data = {}
3991 if 'rule_user_group_entry_id' in self.rule_data:
3992 # means a group with voting rules !
3993 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
3994 user_group_data['name'] = self.rule_data['rule_name']
3995 user_group_data['vote_rule'] = self.rule_data['vote_rule']
3996
3997 return user_group_data
3998
3999 def __unicode__(self):
4000 return u"<%s('id:%s')>" % (self.__class__.__name__,
4001 self.pull_requests_reviewers_id)
4002
4003
4004 class Notification(Base, BaseModel):
4005 __tablename__ = 'notifications'
4006 __table_args__ = (
4007 Index('notification_type_idx', 'type'),
4008 base_table_args,
4009 )
4010
4011 TYPE_CHANGESET_COMMENT = u'cs_comment'
4012 TYPE_MESSAGE = u'message'
4013 TYPE_MENTION = u'mention'
4014 TYPE_REGISTRATION = u'registration'
4015 TYPE_PULL_REQUEST = u'pull_request'
4016 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
4017
4018 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4019 subject = Column('subject', Unicode(512), nullable=True)
4020 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4021 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4022 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4023 type_ = Column('type', Unicode(255))
4024
4025 created_by_user = relationship('User')
4026 notifications_to_users = relationship('UserNotification', lazy='joined',
4027 cascade="all, delete, delete-orphan")
4028
4029 @property
4030 def recipients(self):
4031 return [x.user for x in UserNotification.query()\
4032 .filter(UserNotification.notification == self)\
4033 .order_by(UserNotification.user_id.asc()).all()]
4034
4035 @classmethod
4036 def create(cls, created_by, subject, body, recipients, type_=None):
4037 if type_ is None:
4038 type_ = Notification.TYPE_MESSAGE
4039
4040 notification = cls()
4041 notification.created_by_user = created_by
4042 notification.subject = subject
4043 notification.body = body
4044 notification.type_ = type_
4045 notification.created_on = datetime.datetime.now()
4046
4047 # For each recipient link the created notification to his account
4048 for u in recipients:
4049 assoc = UserNotification()
4050 assoc.user_id = u.user_id
4051 assoc.notification = notification
4052
4053 # if created_by is inside recipients mark his notification
4054 # as read
4055 if u.user_id == created_by.user_id:
4056 assoc.read = True
4057 Session().add(assoc)
4058
4059 Session().add(notification)
4060
4061 return notification
4062
4063
4064 class UserNotification(Base, BaseModel):
4065 __tablename__ = 'user_to_notification'
4066 __table_args__ = (
4067 UniqueConstraint('user_id', 'notification_id'),
4068 base_table_args
4069 )
4070
4071 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4072 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4073 read = Column('read', Boolean, default=False)
4074 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4075
4076 user = relationship('User', lazy="joined")
4077 notification = relationship('Notification', lazy="joined",
4078 order_by=lambda: Notification.created_on.desc(),)
4079
4080 def mark_as_read(self):
4081 self.read = True
4082 Session().add(self)
4083
4084
4085 class Gist(Base, BaseModel):
4086 __tablename__ = 'gists'
4087 __table_args__ = (
4088 Index('g_gist_access_id_idx', 'gist_access_id'),
4089 Index('g_created_on_idx', 'created_on'),
4090 base_table_args
4091 )
4092
4093 GIST_PUBLIC = u'public'
4094 GIST_PRIVATE = u'private'
4095 DEFAULT_FILENAME = u'gistfile1.txt'
4096
4097 ACL_LEVEL_PUBLIC = u'acl_public'
4098 ACL_LEVEL_PRIVATE = u'acl_private'
4099
4100 gist_id = Column('gist_id', Integer(), primary_key=True)
4101 gist_access_id = Column('gist_access_id', Unicode(250))
4102 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4103 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4104 gist_expires = Column('gist_expires', Float(53), nullable=False)
4105 gist_type = Column('gist_type', Unicode(128), nullable=False)
4106 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4107 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4108 acl_level = Column('acl_level', Unicode(128), nullable=True)
4109
4110 owner = relationship('User')
4111
4112 def __repr__(self):
4113 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
4114
4115 @hybrid_property
4116 def description_safe(self):
4117 from rhodecode.lib import helpers as h
4118 return h.escape(self.gist_description)
4119
4120 @classmethod
4121 def get_or_404(cls, id_):
4122 from pyramid.httpexceptions import HTTPNotFound
4123
4124 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4125 if not res:
4126 raise HTTPNotFound()
4127 return res
4128
4129 @classmethod
4130 def get_by_access_id(cls, gist_access_id):
4131 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4132
4133 def gist_url(self):
4134 from rhodecode.model.gist import GistModel
4135 return GistModel().get_url(self)
4136
4137 @classmethod
4138 def base_path(cls):
4139 """
4140 Returns base path when all gists are stored
4141
4142 :param cls:
4143 """
4144 from rhodecode.model.gist import GIST_STORE_LOC
4145 q = Session().query(RhodeCodeUi)\
4146 .filter(RhodeCodeUi.ui_key == URL_SEP)
4147 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4148 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4149
4150 def get_api_data(self):
4151 """
4152 Common function for generating gist related data for API
4153 """
4154 gist = self
4155 data = {
4156 'gist_id': gist.gist_id,
4157 'type': gist.gist_type,
4158 'access_id': gist.gist_access_id,
4159 'description': gist.gist_description,
4160 'url': gist.gist_url(),
4161 'expires': gist.gist_expires,
4162 'created_on': gist.created_on,
4163 'modified_at': gist.modified_at,
4164 'content': None,
4165 'acl_level': gist.acl_level,
4166 }
4167 return data
4168
4169 def __json__(self):
4170 data = dict(
4171 )
4172 data.update(self.get_api_data())
4173 return data
4174 # SCM functions
4175
4176 def scm_instance(self, **kwargs):
4177 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4178 return get_vcs_instance(
4179 repo_path=safe_str(full_repo_path), create=False)
4180
4181
4182 class ExternalIdentity(Base, BaseModel):
4183 __tablename__ = 'external_identities'
4184 __table_args__ = (
4185 Index('local_user_id_idx', 'local_user_id'),
4186 Index('external_id_idx', 'external_id'),
4187 base_table_args
4188 )
4189
4190 external_id = Column('external_id', Unicode(255), default=u'', primary_key=True)
4191 external_username = Column('external_username', Unicode(1024), default=u'')
4192 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4193 provider_name = Column('provider_name', Unicode(255), default=u'', primary_key=True)
4194 access_token = Column('access_token', String(1024), default=u'')
4195 alt_token = Column('alt_token', String(1024), default=u'')
4196 token_secret = Column('token_secret', String(1024), default=u'')
4197
4198 @classmethod
4199 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4200 """
4201 Returns ExternalIdentity instance based on search params
4202
4203 :param external_id:
4204 :param provider_name:
4205 :return: ExternalIdentity
4206 """
4207 query = cls.query()
4208 query = query.filter(cls.external_id == external_id)
4209 query = query.filter(cls.provider_name == provider_name)
4210 if local_user_id:
4211 query = query.filter(cls.local_user_id == local_user_id)
4212 return query.first()
4213
4214 @classmethod
4215 def user_by_external_id_and_provider(cls, external_id, provider_name):
4216 """
4217 Returns User instance based on search params
4218
4219 :param external_id:
4220 :param provider_name:
4221 :return: User
4222 """
4223 query = User.query()
4224 query = query.filter(cls.external_id == external_id)
4225 query = query.filter(cls.provider_name == provider_name)
4226 query = query.filter(User.user_id == cls.local_user_id)
4227 return query.first()
4228
4229 @classmethod
4230 def by_local_user_id(cls, local_user_id):
4231 """
4232 Returns all tokens for user
4233
4234 :param local_user_id:
4235 :return: ExternalIdentity
4236 """
4237 query = cls.query()
4238 query = query.filter(cls.local_user_id == local_user_id)
4239 return query
4240
4241 @classmethod
4242 def load_provider_plugin(cls, plugin_id):
4243 from rhodecode.authentication.base import loadplugin
4244 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
4245 auth_plugin = loadplugin(_plugin_id)
4246 return auth_plugin
4247
4248
4249 class Integration(Base, BaseModel):
4250 __tablename__ = 'integrations'
4251 __table_args__ = (
4252 base_table_args
4253 )
4254
4255 integration_id = Column('integration_id', Integer(), primary_key=True)
4256 integration_type = Column('integration_type', String(255))
4257 enabled = Column('enabled', Boolean(), nullable=False)
4258 name = Column('name', String(255), nullable=False)
4259 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4260 default=False)
4261
4262 settings = Column(
4263 'settings_json', MutationObj.as_mutable(
4264 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4265 repo_id = Column(
4266 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4267 nullable=True, unique=None, default=None)
4268 repo = relationship('Repository', lazy='joined')
4269
4270 repo_group_id = Column(
4271 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4272 nullable=True, unique=None, default=None)
4273 repo_group = relationship('RepoGroup', lazy='joined')
4274
4275 @property
4276 def scope(self):
4277 if self.repo:
4278 return repr(self.repo)
4279 if self.repo_group:
4280 if self.child_repos_only:
4281 return repr(self.repo_group) + ' (child repos only)'
4282 else:
4283 return repr(self.repo_group) + ' (recursive)'
4284 if self.child_repos_only:
4285 return 'root_repos'
4286 return 'global'
4287
4288 def __repr__(self):
4289 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4290
4291
4292 class RepoReviewRuleUser(Base, BaseModel):
4293 __tablename__ = 'repo_review_rules_users'
4294 __table_args__ = (
4295 base_table_args
4296 )
4297
4298 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4299 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4300 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4301 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4302 user = relationship('User')
4303
4304 def rule_data(self):
4305 return {
4306 'mandatory': self.mandatory
4307 }
4308
4309
4310 class RepoReviewRuleUserGroup(Base, BaseModel):
4311 __tablename__ = 'repo_review_rules_users_groups'
4312 __table_args__ = (
4313 base_table_args
4314 )
4315
4316 VOTE_RULE_ALL = -1
4317
4318 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4319 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4320 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4321 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4322 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4323 users_group = relationship('UserGroup')
4324
4325 def rule_data(self):
4326 return {
4327 'mandatory': self.mandatory,
4328 'vote_rule': self.vote_rule
4329 }
4330
4331 @property
4332 def vote_rule_label(self):
4333 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4334 return 'all must vote'
4335 else:
4336 return 'min. vote {}'.format(self.vote_rule)
4337
4338
4339 class RepoReviewRule(Base, BaseModel):
4340 __tablename__ = 'repo_review_rules'
4341 __table_args__ = (
4342 base_table_args
4343 )
4344
4345 repo_review_rule_id = Column(
4346 'repo_review_rule_id', Integer(), primary_key=True)
4347 repo_id = Column(
4348 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4349 repo = relationship('Repository', backref='review_rules')
4350
4351 review_rule_name = Column('review_rule_name', String(255))
4352 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4353 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4354 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4355
4356 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4357 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4358 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4359 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4360
4361 rule_users = relationship('RepoReviewRuleUser')
4362 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4363
4364 def _validate_pattern(self, value):
4365 re.compile('^' + glob2re(value) + '$')
4366
4367 @hybrid_property
4368 def source_branch_pattern(self):
4369 return self._branch_pattern or '*'
4370
4371 @source_branch_pattern.setter
4372 def source_branch_pattern(self, value):
4373 self._validate_pattern(value)
4374 self._branch_pattern = value or '*'
4375
4376 @hybrid_property
4377 def target_branch_pattern(self):
4378 return self._target_branch_pattern or '*'
4379
4380 @target_branch_pattern.setter
4381 def target_branch_pattern(self, value):
4382 self._validate_pattern(value)
4383 self._target_branch_pattern = value or '*'
4384
4385 @hybrid_property
4386 def file_pattern(self):
4387 return self._file_pattern or '*'
4388
4389 @file_pattern.setter
4390 def file_pattern(self, value):
4391 self._validate_pattern(value)
4392 self._file_pattern = value or '*'
4393
4394 def matches(self, source_branch, target_branch, files_changed):
4395 """
4396 Check if this review rule matches a branch/files in a pull request
4397
4398 :param source_branch: source branch name for the commit
4399 :param target_branch: target branch name for the commit
4400 :param files_changed: list of file paths changed in the pull request
4401 """
4402
4403 source_branch = source_branch or ''
4404 target_branch = target_branch or ''
4405 files_changed = files_changed or []
4406
4407 branch_matches = True
4408 if source_branch or target_branch:
4409 if self.source_branch_pattern == '*':
4410 source_branch_match = True
4411 else:
4412 if self.source_branch_pattern.startswith('re:'):
4413 source_pattern = self.source_branch_pattern[3:]
4414 else:
4415 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
4416 source_branch_regex = re.compile(source_pattern)
4417 source_branch_match = bool(source_branch_regex.search(source_branch))
4418 if self.target_branch_pattern == '*':
4419 target_branch_match = True
4420 else:
4421 if self.target_branch_pattern.startswith('re:'):
4422 target_pattern = self.target_branch_pattern[3:]
4423 else:
4424 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
4425 target_branch_regex = re.compile(target_pattern)
4426 target_branch_match = bool(target_branch_regex.search(target_branch))
4427
4428 branch_matches = source_branch_match and target_branch_match
4429
4430 files_matches = True
4431 if self.file_pattern != '*':
4432 files_matches = False
4433 if self.file_pattern.startswith('re:'):
4434 file_pattern = self.file_pattern[3:]
4435 else:
4436 file_pattern = glob2re(self.file_pattern)
4437 file_regex = re.compile(file_pattern)
4438 for filename in files_changed:
4439 if file_regex.search(filename):
4440 files_matches = True
4441 break
4442
4443 return branch_matches and files_matches
4444
4445 @property
4446 def review_users(self):
4447 """ Returns the users which this rule applies to """
4448
4449 users = collections.OrderedDict()
4450
4451 for rule_user in self.rule_users:
4452 if rule_user.user.active:
4453 if rule_user.user not in users:
4454 users[rule_user.user.username] = {
4455 'user': rule_user.user,
4456 'source': 'user',
4457 'source_data': {},
4458 'data': rule_user.rule_data()
4459 }
4460
4461 for rule_user_group in self.rule_user_groups:
4462 source_data = {
4463 'user_group_id': rule_user_group.users_group.users_group_id,
4464 'name': rule_user_group.users_group.users_group_name,
4465 'members': len(rule_user_group.users_group.members)
4466 }
4467 for member in rule_user_group.users_group.members:
4468 if member.user.active:
4469 key = member.user.username
4470 if key in users:
4471 # skip this member as we have him already
4472 # this prevents from override the "first" matched
4473 # users with duplicates in multiple groups
4474 continue
4475
4476 users[key] = {
4477 'user': member.user,
4478 'source': 'user_group',
4479 'source_data': source_data,
4480 'data': rule_user_group.rule_data()
4481 }
4482
4483 return users
4484
4485 def user_group_vote_rule(self, user_id):
4486
4487 rules = []
4488 if not self.rule_user_groups:
4489 return rules
4490
4491 for user_group in self.rule_user_groups:
4492 user_group_members = [x.user_id for x in user_group.users_group.members]
4493 if user_id in user_group_members:
4494 rules.append(user_group)
4495 return rules
4496
4497 def __repr__(self):
4498 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4499 self.repo_review_rule_id, self.repo)
4500
4501
4502 class ScheduleEntry(Base, BaseModel):
4503 __tablename__ = 'schedule_entries'
4504 __table_args__ = (
4505 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
4506 UniqueConstraint('task_uid', name='s_task_uid_idx'),
4507 base_table_args,
4508 )
4509
4510 schedule_types = ['crontab', 'timedelta', 'integer']
4511 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
4512
4513 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
4514 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
4515 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
4516
4517 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
4518 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
4519
4520 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
4521 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
4522
4523 # task
4524 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
4525 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
4526 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
4527 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
4528
4529 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4530 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
4531
4532 @hybrid_property
4533 def schedule_type(self):
4534 return self._schedule_type
4535
4536 @schedule_type.setter
4537 def schedule_type(self, val):
4538 if val not in self.schedule_types:
4539 raise ValueError('Value must be on of `{}` and got `{}`'.format(
4540 val, self.schedule_type))
4541
4542 self._schedule_type = val
4543
4544 @classmethod
4545 def get_uid(cls, obj):
4546 args = obj.task_args
4547 kwargs = obj.task_kwargs
4548 if isinstance(args, JsonRaw):
4549 try:
4550 args = json.loads(args)
4551 except ValueError:
4552 args = tuple()
4553
4554 if isinstance(kwargs, JsonRaw):
4555 try:
4556 kwargs = json.loads(kwargs)
4557 except ValueError:
4558 kwargs = dict()
4559
4560 dot_notation = obj.task_dot_notation
4561 val = '.'.join(map(safe_str, [
4562 sorted(dot_notation), args, sorted(kwargs.items())]))
4563 return hashlib.sha1(val).hexdigest()
4564
4565 @classmethod
4566 def get_by_schedule_name(cls, schedule_name):
4567 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
4568
4569 @classmethod
4570 def get_by_schedule_id(cls, schedule_id):
4571 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
4572
4573 @property
4574 def task(self):
4575 return self.task_dot_notation
4576
4577 @property
4578 def schedule(self):
4579 from rhodecode.lib.celerylib.utils import raw_2_schedule
4580 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
4581 return schedule
4582
4583 @property
4584 def args(self):
4585 try:
4586 return list(self.task_args or [])
4587 except ValueError:
4588 return list()
4589
4590 @property
4591 def kwargs(self):
4592 try:
4593 return dict(self.task_kwargs or {})
4594 except ValueError:
4595 return dict()
4596
4597 def _as_raw(self, val):
4598 if hasattr(val, 'de_coerce'):
4599 val = val.de_coerce()
4600 if val:
4601 val = json.dumps(val)
4602
4603 return val
4604
4605 @property
4606 def schedule_definition_raw(self):
4607 return self._as_raw(self.schedule_definition)
4608
4609 @property
4610 def args_raw(self):
4611 return self._as_raw(self.task_args)
4612
4613 @property
4614 def kwargs_raw(self):
4615 return self._as_raw(self.task_kwargs)
4616
4617 def __repr__(self):
4618 return '<DB:ScheduleEntry({}:{})>'.format(
4619 self.schedule_entry_id, self.schedule_name)
4620
4621
4622 @event.listens_for(ScheduleEntry, 'before_update')
4623 def update_task_uid(mapper, connection, target):
4624 target.task_uid = ScheduleEntry.get_uid(target)
4625
4626
4627 @event.listens_for(ScheduleEntry, 'before_insert')
4628 def set_task_uid(mapper, connection, target):
4629 target.task_uid = ScheduleEntry.get_uid(target)
4630
4631
4632 class _BaseBranchPerms(BaseModel):
4633 @classmethod
4634 def compute_hash(cls, value):
4635 return sha1_safe(value)
4636
4637 @hybrid_property
4638 def branch_pattern(self):
4639 return self._branch_pattern or '*'
4640
4641 @hybrid_property
4642 def branch_hash(self):
4643 return self._branch_hash
4644
4645 def _validate_glob(self, value):
4646 re.compile('^' + glob2re(value) + '$')
4647
4648 @branch_pattern.setter
4649 def branch_pattern(self, value):
4650 self._validate_glob(value)
4651 self._branch_pattern = value or '*'
4652 # set the Hash when setting the branch pattern
4653 self._branch_hash = self.compute_hash(self._branch_pattern)
4654
4655 def matches(self, branch):
4656 """
4657 Check if this the branch matches entry
4658
4659 :param branch: branch name for the commit
4660 """
4661
4662 branch = branch or ''
4663
4664 branch_matches = True
4665 if branch:
4666 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
4667 branch_matches = bool(branch_regex.search(branch))
4668
4669 return branch_matches
4670
4671
4672 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
4673 __tablename__ = 'user_to_repo_branch_permissions'
4674 __table_args__ = (
4675 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4676 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4677 )
4678
4679 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
4680
4681 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
4682 repo = relationship('Repository', backref='user_branch_perms')
4683
4684 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
4685 permission = relationship('Permission')
4686
4687 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
4688 user_repo_to_perm = relationship('UserRepoToPerm')
4689
4690 rule_order = Column('rule_order', Integer(), nullable=False)
4691 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
4692 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
4693
4694 def __unicode__(self):
4695 return u'<UserBranchPermission(%s => %r)>' % (
4696 self.user_repo_to_perm, self.branch_pattern)
4697
4698
4699 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
4700 __tablename__ = 'user_group_to_repo_branch_permissions'
4701 __table_args__ = (
4702 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4703 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4704 )
4705
4706 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
4707
4708 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
4709 repo = relationship('Repository', backref='user_group_branch_perms')
4710
4711 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
4712 permission = relationship('Permission')
4713
4714 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
4715 user_group_repo_to_perm = relationship('UserGroupRepoToPerm')
4716
4717 rule_order = Column('rule_order', Integer(), nullable=False)
4718 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
4719 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
4720
4721 def __unicode__(self):
4722 return u'<UserBranchPermission(%s => %r)>' % (
4723 self.user_group_repo_to_perm, self.branch_pattern)
4724
4725
4726 class DbMigrateVersion(Base, BaseModel):
4727 __tablename__ = 'db_migrate_version'
4728 __table_args__ = (
4729 base_table_args,
4730 )
4731
4732 repository_id = Column('repository_id', String(250), primary_key=True)
4733 repository_path = Column('repository_path', Text)
4734 version = Column('version', Integer)
4735
4736 @classmethod
4737 def set_version(cls, version):
4738 """
4739 Helper for forcing a different version, usually for debugging purposes via ishell.
4740 """
4741 ver = DbMigrateVersion.query().first()
4742 ver.version = version
4743 Session().commit()
4744
4745
4746 class DbSession(Base, BaseModel):
4747 __tablename__ = 'db_session'
4748 __table_args__ = (
4749 base_table_args,
4750 )
4751
4752 def __repr__(self):
4753 return '<DB:DbSession({})>'.format(self.id)
4754
4755 id = Column('id', Integer())
4756 namespace = Column('namespace', String(255), primary_key=True)
4757 accessed = Column('accessed', DateTime, nullable=False)
4758 created = Column('created', DateTime, nullable=False)
4759 data = Column('data', PickleType, nullable=False)
@@ -0,0 +1,37 b''
1 import logging
2
3 from sqlalchemy import *
4
5 from rhodecode.model import meta
6 from rhodecode.lib.dbmigrate.versions import _reset_base, notify
7
8 log = logging.getLogger(__name__)
9
10
11 def upgrade(migrate_engine):
12 """
13 Upgrade operations go here.
14 Don't create your own engine; bind migrate_engine to your metadata
15 """
16 _reset_base(migrate_engine)
17 from rhodecode.lib.dbmigrate.schema import db_4_13_0_0 as db
18
19 pull_request = db.PullRequest.__table__
20 pull_request_version = db.PullRequestVersion.__table__
21
22 repo_state_1 = Column("pull_request_state", String(255), nullable=True)
23 repo_state_1.create(table=pull_request)
24
25 repo_state_2 = Column("pull_request_state", String(255), nullable=True)
26 repo_state_2.create(table=pull_request_version)
27
28 fixups(db, meta.Session)
29
30
31 def downgrade(migrate_engine):
32 meta = MetaData()
33 meta.bind = migrate_engine
34
35
36 def fixups(models, _SESSION):
37 pass
@@ -0,0 +1,41 b''
1 import logging
2
3 from sqlalchemy import *
4
5 from rhodecode.model import meta
6 from rhodecode.lib.dbmigrate.versions import _reset_base, notify
7
8 log = logging.getLogger(__name__)
9
10
11 def upgrade(migrate_engine):
12 """
13 Upgrade operations go here.
14 Don't create your own engine; bind migrate_engine to your metadata
15 """
16 _reset_base(migrate_engine)
17 from rhodecode.lib.dbmigrate.schema import db_4_16_0_0 as db
18
19 fixups(db, meta.Session)
20
21
22 def downgrade(migrate_engine):
23 meta = MetaData()
24 meta.bind = migrate_engine
25
26
27 def fixups(models, _SESSION):
28 # move the builtin token to external tokens
29
30 log.info('Updating pull request pull_request_state to %s',
31 models.PullRequest.STATE_CREATED)
32 qry = _SESSION().query(models.PullRequest)
33 qry.update({"pull_request_state": models.PullRequest.STATE_CREATED})
34 _SESSION().commit()
35
36 log.info('Updating pull_request_version pull_request_state to %s',
37 models.PullRequest.STATE_CREATED)
38 qry = _SESSION().query(models.PullRequestVersion)
39 qry.update({"pull_request_state": models.PullRequest.STATE_CREATED})
40 _SESSION().commit()
41
@@ -1,63 +1,57 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 """
22
23 RhodeCode, a web based repository management software
24 versioning implementation: http://www.python.org/dev/peps/pep-0386/
25 """
26
27 21 import os
28 22 import sys
29 23 import platform
30 24
31 25 VERSION = tuple(open(os.path.join(
32 26 os.path.dirname(__file__), 'VERSION')).read().split('.'))
33 27
34 28 BACKENDS = {
35 29 'hg': 'Mercurial repository',
36 30 'git': 'Git repository',
37 31 'svn': 'Subversion repository',
38 32 }
39 33
40 34 CELERY_ENABLED = False
41 35 CELERY_EAGER = False
42 36
43 37 # link to config for pyramid
44 38 CONFIG = {}
45 39
46 40 # Populated with the settings dictionary from application init in
47 41 # rhodecode.conf.environment.load_pyramid_environment
48 42 PYRAMID_SETTINGS = {}
49 43
50 44 # Linked module for extensions
51 45 EXTENSIONS = {}
52 46
53 47 __version__ = ('.'.join((str(each) for each in VERSION[:3])))
54 __dbversion__ = 91 # defines current db version for migrations
48 __dbversion__ = 93 # defines current db version for migrations
55 49 __platform__ = platform.system()
56 50 __license__ = 'AGPLv3, and Commercial License'
57 51 __author__ = 'RhodeCode GmbH'
58 52 __url__ = 'https://code.rhodecode.com'
59 53
60 54 is_windows = __platform__ in ['Windows']
61 55 is_unix = not is_windows
62 56 is_test = False
63 57 disable_error_handler = False
@@ -1,142 +1,143 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import pytest
23 23 import urlobject
24 24
25 25 from rhodecode.api.tests.utils import (
26 26 build_data, api_call, assert_error, assert_ok)
27 27 from rhodecode.lib import helpers as h
28 28 from rhodecode.lib.utils2 import safe_unicode
29 29
30 30 pytestmark = pytest.mark.backends("git", "hg")
31 31
32 32
33 33 @pytest.mark.usefixtures("testuser_api", "app")
34 34 class TestGetPullRequest(object):
35 35
36 36 def test_api_get_pull_request(self, pr_util, http_host_only_stub):
37 37 from rhodecode.model.pull_request import PullRequestModel
38 38 pull_request = pr_util.create_pull_request(mergeable=True)
39 39 id_, params = build_data(
40 40 self.apikey, 'get_pull_request',
41 41 pullrequestid=pull_request.pull_request_id)
42 42
43 43 response = api_call(self.app, params)
44 44
45 45 assert response.status == '200 OK'
46 46
47 47 url_obj = urlobject.URLObject(
48 48 h.route_url(
49 49 'pullrequest_show',
50 50 repo_name=pull_request.target_repo.repo_name,
51 51 pull_request_id=pull_request.pull_request_id))
52 52
53 53 pr_url = safe_unicode(
54 54 url_obj.with_netloc(http_host_only_stub))
55 55 source_url = safe_unicode(
56 56 pull_request.source_repo.clone_url().with_netloc(http_host_only_stub))
57 57 target_url = safe_unicode(
58 58 pull_request.target_repo.clone_url().with_netloc(http_host_only_stub))
59 59 shadow_url = safe_unicode(
60 60 PullRequestModel().get_shadow_clone_url(pull_request))
61 61
62 62 expected = {
63 63 'pull_request_id': pull_request.pull_request_id,
64 64 'url': pr_url,
65 65 'title': pull_request.title,
66 66 'description': pull_request.description,
67 67 'status': pull_request.status,
68 'state': pull_request.pull_request_state,
68 69 'created_on': pull_request.created_on,
69 70 'updated_on': pull_request.updated_on,
70 71 'commit_ids': pull_request.revisions,
71 72 'review_status': pull_request.calculated_review_status(),
72 73 'mergeable': {
73 74 'status': True,
74 75 'message': 'This pull request can be automatically merged.',
75 76 },
76 77 'source': {
77 78 'clone_url': source_url,
78 79 'repository': pull_request.source_repo.repo_name,
79 80 'reference': {
80 81 'name': pull_request.source_ref_parts.name,
81 82 'type': pull_request.source_ref_parts.type,
82 83 'commit_id': pull_request.source_ref_parts.commit_id,
83 84 },
84 85 },
85 86 'target': {
86 87 'clone_url': target_url,
87 88 'repository': pull_request.target_repo.repo_name,
88 89 'reference': {
89 90 'name': pull_request.target_ref_parts.name,
90 91 'type': pull_request.target_ref_parts.type,
91 92 'commit_id': pull_request.target_ref_parts.commit_id,
92 93 },
93 94 },
94 95 'merge': {
95 96 'clone_url': shadow_url,
96 97 'reference': {
97 98 'name': pull_request.shadow_merge_ref.name,
98 99 'type': pull_request.shadow_merge_ref.type,
99 100 'commit_id': pull_request.shadow_merge_ref.commit_id,
100 101 },
101 102 },
102 103 'author': pull_request.author.get_api_data(include_secrets=False,
103 104 details='basic'),
104 105 'reviewers': [
105 106 {
106 107 'user': reviewer.get_api_data(include_secrets=False,
107 108 details='basic'),
108 109 'reasons': reasons,
109 110 'review_status': st[0][1].status if st else 'not_reviewed',
110 111 }
111 112 for obj, reviewer, reasons, mandatory, st in
112 113 pull_request.reviewers_statuses()
113 114 ]
114 115 }
115 116 assert_ok(id_, expected, response.body)
116 117
117 118 def test_api_get_pull_request_repo_error(self, pr_util):
118 119 pull_request = pr_util.create_pull_request()
119 120 id_, params = build_data(
120 121 self.apikey, 'get_pull_request',
121 122 repoid=666, pullrequestid=pull_request.pull_request_id)
122 123 response = api_call(self.app, params)
123 124
124 125 expected = 'repository `666` does not exist'
125 126 assert_error(id_, expected, given=response.body)
126 127
127 128 def test_api_get_pull_request_pull_request_error(self):
128 129 id_, params = build_data(
129 130 self.apikey, 'get_pull_request', pullrequestid=666)
130 131 response = api_call(self.app, params)
131 132
132 133 expected = 'pull request `666` does not exist'
133 134 assert_error(id_, expected, given=response.body)
134 135
135 136 def test_api_get_pull_request_pull_request_error_just_pr_id(self):
136 137 id_, params = build_data(
137 138 self.apikey, 'get_pull_request',
138 139 pullrequestid=666)
139 140 response = api_call(self.app, params)
140 141
141 142 expected = 'pull request `666` does not exist'
142 143 assert_error(id_, expected, given=response.body)
@@ -1,137 +1,157 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.model.db import UserLog, PullRequest
24 24 from rhodecode.model.meta import Session
25 25 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
26 26 from rhodecode.api.tests.utils import (
27 27 build_data, api_call, assert_error, assert_ok)
28 28
29 29
30 30 @pytest.mark.usefixtures("testuser_api", "app")
31 31 class TestMergePullRequest(object):
32
32 33 @pytest.mark.backends("git", "hg")
33 34 def test_api_merge_pull_request_merge_failed(self, pr_util, no_notifications):
34 35 pull_request = pr_util.create_pull_request(mergeable=True)
35 author = pull_request.user_id
36 repo = pull_request.target_repo.repo_id
37 36 pull_request_id = pull_request.pull_request_id
38 37 pull_request_repo = pull_request.target_repo.repo_name
39 38
40 39 id_, params = build_data(
41 40 self.apikey, 'merge_pull_request',
42 41 repoid=pull_request_repo,
43 42 pullrequestid=pull_request_id)
44 43
45 44 response = api_call(self.app, params)
46 45
47 46 # The above api call detaches the pull request DB object from the
48 47 # session because of an unconditional transaction rollback in our
49 # middleware. Therefore we need to add it back here if we want to use
50 # it.
48 # middleware. Therefore we need to add it back here if we want to use it.
51 49 Session().add(pull_request)
52 50
53 51 expected = 'merge not possible for following reasons: ' \
54 52 'Pull request reviewer approval is pending.'
55 53 assert_error(id_, expected, given=response.body)
56 54
57 55 @pytest.mark.backends("git", "hg")
56 def test_api_merge_pull_request_merge_failed_disallowed_state(
57 self, pr_util, no_notifications):
58 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
59 pull_request_id = pull_request.pull_request_id
60 pull_request_repo = pull_request.target_repo.repo_name
61
62 pr = PullRequest.get(pull_request_id)
63 pr.pull_request_state = pull_request.STATE_UPDATING
64 Session().add(pr)
65 Session().commit()
66
67 id_, params = build_data(
68 self.apikey, 'merge_pull_request',
69 repoid=pull_request_repo,
70 pullrequestid=pull_request_id)
71
72 response = api_call(self.app, params)
73 expected = 'Operation forbidden because pull request is in state {}, '\
74 'only state {} is allowed.'.format(PullRequest.STATE_UPDATING,
75 PullRequest.STATE_CREATED)
76 assert_error(id_, expected, given=response.body)
77
78 @pytest.mark.backends("git", "hg")
58 79 def test_api_merge_pull_request(self, pr_util, no_notifications):
59 80 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
60 81 author = pull_request.user_id
61 82 repo = pull_request.target_repo.repo_id
62 83 pull_request_id = pull_request.pull_request_id
63 84 pull_request_repo = pull_request.target_repo.repo_name
64 85
65 86 id_, params = build_data(
66 87 self.apikey, 'comment_pull_request',
67 88 repoid=pull_request_repo,
68 89 pullrequestid=pull_request_id,
69 90 status='approved')
70 91
71 92 response = api_call(self.app, params)
72 93 expected = {
73 94 'comment_id': response.json.get('result', {}).get('comment_id'),
74 95 'pull_request_id': pull_request_id,
75 96 'status': {'given': 'approved', 'was_changed': True}
76 97 }
77 98 assert_ok(id_, expected, given=response.body)
78 99
79 100 id_, params = build_data(
80 101 self.apikey, 'merge_pull_request',
81 102 repoid=pull_request_repo,
82 103 pullrequestid=pull_request_id)
83 104
84 105 response = api_call(self.app, params)
85 106
86 107 pull_request = PullRequest.get(pull_request_id)
87 108
88 109 expected = {
89 110 'executed': True,
90 111 'failure_reason': 0,
91 112 'possible': True,
92 113 'merge_commit_id': pull_request.shadow_merge_ref.commit_id,
93 114 'merge_ref': pull_request.shadow_merge_ref._asdict()
94 115 }
95 116
96 117 assert_ok(id_, expected, response.body)
97 118
98 119 journal = UserLog.query()\
99 120 .filter(UserLog.user_id == author)\
100 121 .filter(UserLog.repository_id == repo) \
101 122 .order_by('user_log_id') \
102 123 .all()
103 124 assert journal[-2].action == 'repo.pull_request.merge'
104 125 assert journal[-1].action == 'repo.pull_request.close'
105 126
106 127 id_, params = build_data(
107 128 self.apikey, 'merge_pull_request',
108 129 repoid=pull_request_repo, pullrequestid=pull_request_id)
109 130 response = api_call(self.app, params)
110 131
111 132 expected = 'merge not possible for following reasons: This pull request is closed.'
112 133 assert_error(id_, expected, given=response.body)
113 134
114 135 @pytest.mark.backends("git", "hg")
115 136 def test_api_merge_pull_request_repo_error(self, pr_util):
116 137 pull_request = pr_util.create_pull_request()
117 138 id_, params = build_data(
118 139 self.apikey, 'merge_pull_request',
119 140 repoid=666, pullrequestid=pull_request.pull_request_id)
120 141 response = api_call(self.app, params)
121 142
122 143 expected = 'repository `666` does not exist'
123 144 assert_error(id_, expected, given=response.body)
124 145
125 146 @pytest.mark.backends("git", "hg")
126 def test_api_merge_pull_request_non_admin_with_userid_error(self,
127 pr_util):
147 def test_api_merge_pull_request_non_admin_with_userid_error(self, pr_util):
128 148 pull_request = pr_util.create_pull_request(mergeable=True)
129 149 id_, params = build_data(
130 150 self.apikey_regular, 'merge_pull_request',
131 151 repoid=pull_request.target_repo.repo_name,
132 152 pullrequestid=pull_request.pull_request_id,
133 153 userid=TEST_USER_ADMIN_LOGIN)
134 154 response = api_call(self.app, params)
135 155
136 156 expected = 'userid is not the same as your user'
137 157 assert_error(id_, expected, given=response.body)
@@ -1,937 +1,957 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import logging
23 23
24 24 from rhodecode import events
25 25 from rhodecode.api import jsonrpc_method, JSONRPCError, JSONRPCValidationError
26 26 from rhodecode.api.utils import (
27 27 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
28 28 get_pull_request_or_error, get_commit_or_error, get_user_or_error,
29 29 validate_repo_permissions, resolve_ref_or_error)
30 30 from rhodecode.lib.auth import (HasRepoPermissionAnyApi)
31 31 from rhodecode.lib.base import vcs_operation_context
32 32 from rhodecode.lib.utils2 import str2bool
33 33 from rhodecode.model.changeset_status import ChangesetStatusModel
34 34 from rhodecode.model.comment import CommentsModel
35 from rhodecode.model.db import Session, ChangesetStatus, ChangesetComment
35 from rhodecode.model.db import Session, ChangesetStatus, ChangesetComment, PullRequest
36 36 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
37 37 from rhodecode.model.settings import SettingsModel
38 38 from rhodecode.model.validation_schema import Invalid
39 39 from rhodecode.model.validation_schema.schemas.reviewer_schema import(
40 40 ReviewerListSchema)
41 41
42 42 log = logging.getLogger(__name__)
43 43
44 44
45 45 @jsonrpc_method()
46 46 def get_pull_request(request, apiuser, pullrequestid, repoid=Optional(None)):
47 47 """
48 48 Get a pull request based on the given ID.
49 49
50 50 :param apiuser: This is filled automatically from the |authtoken|.
51 51 :type apiuser: AuthUser
52 52 :param repoid: Optional, repository name or repository ID from where
53 53 the pull request was opened.
54 54 :type repoid: str or int
55 55 :param pullrequestid: ID of the requested pull request.
56 56 :type pullrequestid: int
57 57
58 58 Example output:
59 59
60 60 .. code-block:: bash
61 61
62 62 "id": <id_given_in_input>,
63 63 "result":
64 64 {
65 65 "pull_request_id": "<pull_request_id>",
66 66 "url": "<url>",
67 67 "title": "<title>",
68 68 "description": "<description>",
69 69 "status" : "<status>",
70 70 "created_on": "<date_time_created>",
71 71 "updated_on": "<date_time_updated>",
72 72 "commit_ids": [
73 73 ...
74 74 "<commit_id>",
75 75 "<commit_id>",
76 76 ...
77 77 ],
78 78 "review_status": "<review_status>",
79 79 "mergeable": {
80 80 "status": "<bool>",
81 81 "message": "<message>",
82 82 },
83 83 "source": {
84 84 "clone_url": "<clone_url>",
85 85 "repository": "<repository_name>",
86 86 "reference":
87 87 {
88 88 "name": "<name>",
89 89 "type": "<type>",
90 90 "commit_id": "<commit_id>",
91 91 }
92 92 },
93 93 "target": {
94 94 "clone_url": "<clone_url>",
95 95 "repository": "<repository_name>",
96 96 "reference":
97 97 {
98 98 "name": "<name>",
99 99 "type": "<type>",
100 100 "commit_id": "<commit_id>",
101 101 }
102 102 },
103 103 "merge": {
104 104 "clone_url": "<clone_url>",
105 105 "reference":
106 106 {
107 107 "name": "<name>",
108 108 "type": "<type>",
109 109 "commit_id": "<commit_id>",
110 110 }
111 111 },
112 112 "author": <user_obj>,
113 113 "reviewers": [
114 114 ...
115 115 {
116 116 "user": "<user_obj>",
117 117 "review_status": "<review_status>",
118 118 }
119 119 ...
120 120 ]
121 121 },
122 122 "error": null
123 123 """
124 124
125 125 pull_request = get_pull_request_or_error(pullrequestid)
126 126 if Optional.extract(repoid):
127 127 repo = get_repo_or_error(repoid)
128 128 else:
129 129 repo = pull_request.target_repo
130 130
131 if not PullRequestModel().check_user_read(
132 pull_request, apiuser, api=True):
131 if not PullRequestModel().check_user_read(pull_request, apiuser, api=True):
133 132 raise JSONRPCError('repository `%s` or pull request `%s` '
134 133 'does not exist' % (repoid, pullrequestid))
135 data = pull_request.get_api_data()
134
135 # NOTE(marcink): only calculate and return merge state if the pr state is 'created'
136 # otherwise we can lock the repo on calculation of merge state while update/merge
137 # is happening.
138 merge_state = pull_request.pull_request_state == pull_request.STATE_CREATED
139 data = pull_request.get_api_data(with_merge_state=merge_state)
136 140 return data
137 141
138 142
139 143 @jsonrpc_method()
140 144 def get_pull_requests(request, apiuser, repoid, status=Optional('new')):
141 145 """
142 146 Get all pull requests from the repository specified in `repoid`.
143 147
144 148 :param apiuser: This is filled automatically from the |authtoken|.
145 149 :type apiuser: AuthUser
146 150 :param repoid: Optional repository name or repository ID.
147 151 :type repoid: str or int
148 152 :param status: Only return pull requests with the specified status.
149 153 Valid options are.
150 154 * ``new`` (default)
151 155 * ``open``
152 156 * ``closed``
153 157 :type status: str
154 158
155 159 Example output:
156 160
157 161 .. code-block:: bash
158 162
159 163 "id": <id_given_in_input>,
160 164 "result":
161 165 [
162 166 ...
163 167 {
164 168 "pull_request_id": "<pull_request_id>",
165 169 "url": "<url>",
166 170 "title" : "<title>",
167 171 "description": "<description>",
168 172 "status": "<status>",
169 173 "created_on": "<date_time_created>",
170 174 "updated_on": "<date_time_updated>",
171 175 "commit_ids": [
172 176 ...
173 177 "<commit_id>",
174 178 "<commit_id>",
175 179 ...
176 180 ],
177 181 "review_status": "<review_status>",
178 182 "mergeable": {
179 183 "status": "<bool>",
180 184 "message: "<message>",
181 185 },
182 186 "source": {
183 187 "clone_url": "<clone_url>",
184 188 "reference":
185 189 {
186 190 "name": "<name>",
187 191 "type": "<type>",
188 192 "commit_id": "<commit_id>",
189 193 }
190 194 },
191 195 "target": {
192 196 "clone_url": "<clone_url>",
193 197 "reference":
194 198 {
195 199 "name": "<name>",
196 200 "type": "<type>",
197 201 "commit_id": "<commit_id>",
198 202 }
199 203 },
200 204 "merge": {
201 205 "clone_url": "<clone_url>",
202 206 "reference":
203 207 {
204 208 "name": "<name>",
205 209 "type": "<type>",
206 210 "commit_id": "<commit_id>",
207 211 }
208 212 },
209 213 "author": <user_obj>,
210 214 "reviewers": [
211 215 ...
212 216 {
213 217 "user": "<user_obj>",
214 218 "review_status": "<review_status>",
215 219 }
216 220 ...
217 221 ]
218 222 }
219 223 ...
220 224 ],
221 225 "error": null
222 226
223 227 """
224 228 repo = get_repo_or_error(repoid)
225 229 if not has_superadmin_permission(apiuser):
226 230 _perms = (
227 231 'repository.admin', 'repository.write', 'repository.read',)
228 232 validate_repo_permissions(apiuser, repoid, repo, _perms)
229 233
230 234 status = Optional.extract(status)
231 235 pull_requests = PullRequestModel().get_all(repo, statuses=[status])
232 236 data = [pr.get_api_data() for pr in pull_requests]
233 237 return data
234 238
235 239
236 240 @jsonrpc_method()
237 241 def merge_pull_request(
238 242 request, apiuser, pullrequestid, repoid=Optional(None),
239 243 userid=Optional(OAttr('apiuser'))):
240 244 """
241 245 Merge the pull request specified by `pullrequestid` into its target
242 246 repository.
243 247
244 248 :param apiuser: This is filled automatically from the |authtoken|.
245 249 :type apiuser: AuthUser
246 250 :param repoid: Optional, repository name or repository ID of the
247 251 target repository to which the |pr| is to be merged.
248 252 :type repoid: str or int
249 253 :param pullrequestid: ID of the pull request which shall be merged.
250 254 :type pullrequestid: int
251 255 :param userid: Merge the pull request as this user.
252 256 :type userid: Optional(str or int)
253 257
254 258 Example output:
255 259
256 260 .. code-block:: bash
257 261
258 262 "id": <id_given_in_input>,
259 263 "result": {
260 264 "executed": "<bool>",
261 265 "failure_reason": "<int>",
262 266 "merge_commit_id": "<merge_commit_id>",
263 267 "possible": "<bool>",
264 268 "merge_ref": {
265 269 "commit_id": "<commit_id>",
266 270 "type": "<type>",
267 271 "name": "<name>"
268 272 }
269 273 },
270 274 "error": null
271 275 """
272 276 pull_request = get_pull_request_or_error(pullrequestid)
273 277 if Optional.extract(repoid):
274 278 repo = get_repo_or_error(repoid)
275 279 else:
276 280 repo = pull_request.target_repo
277 281
278 282 if not isinstance(userid, Optional):
279 283 if (has_superadmin_permission(apiuser) or
280 284 HasRepoPermissionAnyApi('repository.admin')(
281 285 user=apiuser, repo_name=repo.repo_name)):
282 286 apiuser = get_user_or_error(userid)
283 287 else:
284 288 raise JSONRPCError('userid is not the same as your user')
285 289
286 check = MergeCheck.validate(
287 pull_request, auth_user=apiuser, translator=request.translate)
290 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
291 raise JSONRPCError(
292 'Operation forbidden because pull request is in state {}, '
293 'only state {} is allowed.'.format(
294 pull_request.pull_request_state, PullRequest.STATE_CREATED))
295
296 with pull_request.set_state(PullRequest.STATE_UPDATING):
297 check = MergeCheck.validate(
298 pull_request, auth_user=apiuser,
299 translator=request.translate)
288 300 merge_possible = not check.failed
289 301
290 302 if not merge_possible:
291 303 error_messages = []
292 304 for err_type, error_msg in check.errors:
293 305 error_msg = request.translate(error_msg)
294 306 error_messages.append(error_msg)
295 307
296 308 reasons = ','.join(error_messages)
297 309 raise JSONRPCError(
298 310 'merge not possible for following reasons: {}'.format(reasons))
299 311
300 312 target_repo = pull_request.target_repo
301 313 extras = vcs_operation_context(
302 314 request.environ, repo_name=target_repo.repo_name,
303 315 username=apiuser.username, action='push',
304 316 scm=target_repo.repo_type)
305 merge_response = PullRequestModel().merge_repo(
306 pull_request, apiuser, extras=extras)
317 with pull_request.set_state(PullRequest.STATE_UPDATING):
318 merge_response = PullRequestModel().merge_repo(
319 pull_request, apiuser, extras=extras)
307 320 if merge_response.executed:
308 321 PullRequestModel().close_pull_request(
309 322 pull_request.pull_request_id, apiuser)
310 323
311 324 Session().commit()
312 325
313 326 # In previous versions the merge response directly contained the merge
314 327 # commit id. It is now contained in the merge reference object. To be
315 328 # backwards compatible we have to extract it again.
316 329 merge_response = merge_response.asdict()
317 330 merge_response['merge_commit_id'] = merge_response['merge_ref'].commit_id
318 331
319 332 return merge_response
320 333
321 334
322 335 @jsonrpc_method()
323 336 def get_pull_request_comments(
324 337 request, apiuser, pullrequestid, repoid=Optional(None)):
325 338 """
326 339 Get all comments of pull request specified with the `pullrequestid`
327 340
328 341 :param apiuser: This is filled automatically from the |authtoken|.
329 342 :type apiuser: AuthUser
330 343 :param repoid: Optional repository name or repository ID.
331 344 :type repoid: str or int
332 345 :param pullrequestid: The pull request ID.
333 346 :type pullrequestid: int
334 347
335 348 Example output:
336 349
337 350 .. code-block:: bash
338 351
339 352 id : <id_given_in_input>
340 353 result : [
341 354 {
342 355 "comment_author": {
343 356 "active": true,
344 357 "full_name_or_username": "Tom Gore",
345 358 "username": "admin"
346 359 },
347 360 "comment_created_on": "2017-01-02T18:43:45.533",
348 361 "comment_f_path": null,
349 362 "comment_id": 25,
350 363 "comment_lineno": null,
351 364 "comment_status": {
352 365 "status": "under_review",
353 366 "status_lbl": "Under Review"
354 367 },
355 368 "comment_text": "Example text",
356 369 "comment_type": null,
357 370 "pull_request_version": null
358 371 }
359 372 ],
360 373 error : null
361 374 """
362 375
363 376 pull_request = get_pull_request_or_error(pullrequestid)
364 377 if Optional.extract(repoid):
365 378 repo = get_repo_or_error(repoid)
366 379 else:
367 380 repo = pull_request.target_repo
368 381
369 382 if not PullRequestModel().check_user_read(
370 383 pull_request, apiuser, api=True):
371 384 raise JSONRPCError('repository `%s` or pull request `%s` '
372 385 'does not exist' % (repoid, pullrequestid))
373 386
374 387 (pull_request_latest,
375 388 pull_request_at_ver,
376 389 pull_request_display_obj,
377 390 at_version) = PullRequestModel().get_pr_version(
378 391 pull_request.pull_request_id, version=None)
379 392
380 393 versions = pull_request_display_obj.versions()
381 394 ver_map = {
382 395 ver.pull_request_version_id: cnt
383 396 for cnt, ver in enumerate(versions, 1)
384 397 }
385 398
386 399 # GENERAL COMMENTS with versions #
387 400 q = CommentsModel()._all_general_comments_of_pull_request(pull_request)
388 401 q = q.order_by(ChangesetComment.comment_id.asc())
389 402 general_comments = q.all()
390 403
391 404 # INLINE COMMENTS with versions #
392 405 q = CommentsModel()._all_inline_comments_of_pull_request(pull_request)
393 406 q = q.order_by(ChangesetComment.comment_id.asc())
394 407 inline_comments = q.all()
395 408
396 409 data = []
397 410 for comment in inline_comments + general_comments:
398 411 full_data = comment.get_api_data()
399 412 pr_version_id = None
400 413 if comment.pull_request_version_id:
401 414 pr_version_id = 'v{}'.format(
402 415 ver_map[comment.pull_request_version_id])
403 416
404 417 # sanitize some entries
405 418
406 419 full_data['pull_request_version'] = pr_version_id
407 420 full_data['comment_author'] = {
408 421 'username': full_data['comment_author'].username,
409 422 'full_name_or_username': full_data['comment_author'].full_name_or_username,
410 423 'active': full_data['comment_author'].active,
411 424 }
412 425
413 426 if full_data['comment_status']:
414 427 full_data['comment_status'] = {
415 428 'status': full_data['comment_status'][0].status,
416 429 'status_lbl': full_data['comment_status'][0].status_lbl,
417 430 }
418 431 else:
419 432 full_data['comment_status'] = {}
420 433
421 434 data.append(full_data)
422 435 return data
423 436
424 437
425 438 @jsonrpc_method()
426 439 def comment_pull_request(
427 440 request, apiuser, pullrequestid, repoid=Optional(None),
428 441 message=Optional(None), commit_id=Optional(None), status=Optional(None),
429 442 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
430 443 resolves_comment_id=Optional(None),
431 444 userid=Optional(OAttr('apiuser'))):
432 445 """
433 446 Comment on the pull request specified with the `pullrequestid`,
434 447 in the |repo| specified by the `repoid`, and optionally change the
435 448 review status.
436 449
437 450 :param apiuser: This is filled automatically from the |authtoken|.
438 451 :type apiuser: AuthUser
439 452 :param repoid: Optional repository name or repository ID.
440 453 :type repoid: str or int
441 454 :param pullrequestid: The pull request ID.
442 455 :type pullrequestid: int
443 456 :param commit_id: Specify the commit_id for which to set a comment. If
444 457 given commit_id is different than latest in the PR status
445 458 change won't be performed.
446 459 :type commit_id: str
447 460 :param message: The text content of the comment.
448 461 :type message: str
449 462 :param status: (**Optional**) Set the approval status of the pull
450 463 request. One of: 'not_reviewed', 'approved', 'rejected',
451 464 'under_review'
452 465 :type status: str
453 466 :param comment_type: Comment type, one of: 'note', 'todo'
454 467 :type comment_type: Optional(str), default: 'note'
455 468 :param userid: Comment on the pull request as this user
456 469 :type userid: Optional(str or int)
457 470
458 471 Example output:
459 472
460 473 .. code-block:: bash
461 474
462 475 id : <id_given_in_input>
463 476 result : {
464 477 "pull_request_id": "<Integer>",
465 478 "comment_id": "<Integer>",
466 479 "status": {"given": <given_status>,
467 480 "was_changed": <bool status_was_actually_changed> },
468 481 },
469 482 error : null
470 483 """
471 484 pull_request = get_pull_request_or_error(pullrequestid)
472 485 if Optional.extract(repoid):
473 486 repo = get_repo_or_error(repoid)
474 487 else:
475 488 repo = pull_request.target_repo
476 489
477 490 if not isinstance(userid, Optional):
478 491 if (has_superadmin_permission(apiuser) or
479 492 HasRepoPermissionAnyApi('repository.admin')(
480 493 user=apiuser, repo_name=repo.repo_name)):
481 494 apiuser = get_user_or_error(userid)
482 495 else:
483 496 raise JSONRPCError('userid is not the same as your user')
484 497
485 498 if not PullRequestModel().check_user_read(
486 499 pull_request, apiuser, api=True):
487 500 raise JSONRPCError('repository `%s` does not exist' % (repoid,))
488 501 message = Optional.extract(message)
489 502 status = Optional.extract(status)
490 503 commit_id = Optional.extract(commit_id)
491 504 comment_type = Optional.extract(comment_type)
492 505 resolves_comment_id = Optional.extract(resolves_comment_id)
493 506
494 507 if not message and not status:
495 508 raise JSONRPCError(
496 509 'Both message and status parameters are missing. '
497 510 'At least one is required.')
498 511
499 512 if (status not in (st[0] for st in ChangesetStatus.STATUSES) and
500 513 status is not None):
501 514 raise JSONRPCError('Unknown comment status: `%s`' % status)
502 515
503 516 if commit_id and commit_id not in pull_request.revisions:
504 517 raise JSONRPCError(
505 518 'Invalid commit_id `%s` for this pull request.' % commit_id)
506 519
507 520 allowed_to_change_status = PullRequestModel().check_user_change_status(
508 521 pull_request, apiuser)
509 522
510 523 # if commit_id is passed re-validated if user is allowed to change status
511 524 # based on latest commit_id from the PR
512 525 if commit_id:
513 526 commit_idx = pull_request.revisions.index(commit_id)
514 527 if commit_idx != 0:
515 528 allowed_to_change_status = False
516 529
517 530 if resolves_comment_id:
518 531 comment = ChangesetComment.get(resolves_comment_id)
519 532 if not comment:
520 533 raise JSONRPCError(
521 534 'Invalid resolves_comment_id `%s` for this pull request.'
522 535 % resolves_comment_id)
523 536 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
524 537 raise JSONRPCError(
525 538 'Comment `%s` is wrong type for setting status to resolved.'
526 539 % resolves_comment_id)
527 540
528 541 text = message
529 542 status_label = ChangesetStatus.get_status_lbl(status)
530 543 if status and allowed_to_change_status:
531 544 st_message = ('Status change %(transition_icon)s %(status)s'
532 545 % {'transition_icon': '>', 'status': status_label})
533 546 text = message or st_message
534 547
535 548 rc_config = SettingsModel().get_all_settings()
536 549 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
537 550
538 551 status_change = status and allowed_to_change_status
539 552 comment = CommentsModel().create(
540 553 text=text,
541 554 repo=pull_request.target_repo.repo_id,
542 555 user=apiuser.user_id,
543 556 pull_request=pull_request.pull_request_id,
544 557 f_path=None,
545 558 line_no=None,
546 559 status_change=(status_label if status_change else None),
547 560 status_change_type=(status if status_change else None),
548 561 closing_pr=False,
549 562 renderer=renderer,
550 563 comment_type=comment_type,
551 564 resolves_comment_id=resolves_comment_id,
552 565 auth_user=apiuser
553 566 )
554 567
555 568 if allowed_to_change_status and status:
556 569 ChangesetStatusModel().set_status(
557 570 pull_request.target_repo.repo_id,
558 571 status,
559 572 apiuser.user_id,
560 573 comment,
561 574 pull_request=pull_request.pull_request_id
562 575 )
563 576 Session().flush()
564 577
565 578 Session().commit()
566 579 data = {
567 580 'pull_request_id': pull_request.pull_request_id,
568 581 'comment_id': comment.comment_id if comment else None,
569 582 'status': {'given': status, 'was_changed': status_change},
570 583 }
571 584 return data
572 585
573 586
574 587 @jsonrpc_method()
575 588 def create_pull_request(
576 589 request, apiuser, source_repo, target_repo, source_ref, target_ref,
577 590 title=Optional(''), description=Optional(''), description_renderer=Optional(''),
578 591 reviewers=Optional(None)):
579 592 """
580 593 Creates a new pull request.
581 594
582 595 Accepts refs in the following formats:
583 596
584 597 * branch:<branch_name>:<sha>
585 598 * branch:<branch_name>
586 599 * bookmark:<bookmark_name>:<sha> (Mercurial only)
587 600 * bookmark:<bookmark_name> (Mercurial only)
588 601
589 602 :param apiuser: This is filled automatically from the |authtoken|.
590 603 :type apiuser: AuthUser
591 604 :param source_repo: Set the source repository name.
592 605 :type source_repo: str
593 606 :param target_repo: Set the target repository name.
594 607 :type target_repo: str
595 608 :param source_ref: Set the source ref name.
596 609 :type source_ref: str
597 610 :param target_ref: Set the target ref name.
598 611 :type target_ref: str
599 612 :param title: Optionally Set the pull request title, it's generated otherwise
600 613 :type title: str
601 614 :param description: Set the pull request description.
602 615 :type description: Optional(str)
603 616 :type description_renderer: Optional(str)
604 617 :param description_renderer: Set pull request renderer for the description.
605 618 It should be 'rst', 'markdown' or 'plain'. If not give default
606 619 system renderer will be used
607 620 :param reviewers: Set the new pull request reviewers list.
608 621 Reviewer defined by review rules will be added automatically to the
609 622 defined list.
610 623 :type reviewers: Optional(list)
611 624 Accepts username strings or objects of the format:
612 625
613 626 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
614 627 """
615 628
616 629 source_db_repo = get_repo_or_error(source_repo)
617 630 target_db_repo = get_repo_or_error(target_repo)
618 631 if not has_superadmin_permission(apiuser):
619 632 _perms = ('repository.admin', 'repository.write', 'repository.read',)
620 633 validate_repo_permissions(apiuser, source_repo, source_db_repo, _perms)
621 634
622 635 full_source_ref = resolve_ref_or_error(source_ref, source_db_repo)
623 636 full_target_ref = resolve_ref_or_error(target_ref, target_db_repo)
624 637
625 638 source_scm = source_db_repo.scm_instance()
626 639 target_scm = target_db_repo.scm_instance()
627 640
628 641 source_commit = get_commit_or_error(full_source_ref, source_db_repo)
629 642 target_commit = get_commit_or_error(full_target_ref, target_db_repo)
630 643
631 644 ancestor = source_scm.get_common_ancestor(
632 645 source_commit.raw_id, target_commit.raw_id, target_scm)
633 646 if not ancestor:
634 647 raise JSONRPCError('no common ancestor found')
635 648
636 649 # recalculate target ref based on ancestor
637 650 target_ref_type, target_ref_name, __ = full_target_ref.split(':')
638 651 full_target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
639 652
640 653 commit_ranges = target_scm.compare(
641 654 target_commit.raw_id, source_commit.raw_id, source_scm,
642 655 merge=True, pre_load=[])
643 656
644 657 if not commit_ranges:
645 658 raise JSONRPCError('no commits found')
646 659
647 660 reviewer_objects = Optional.extract(reviewers) or []
648 661
649 662 # serialize and validate passed in given reviewers
650 663 if reviewer_objects:
651 664 schema = ReviewerListSchema()
652 665 try:
653 666 reviewer_objects = schema.deserialize(reviewer_objects)
654 667 except Invalid as err:
655 668 raise JSONRPCValidationError(colander_exc=err)
656 669
657 670 # validate users
658 671 for reviewer_object in reviewer_objects:
659 672 user = get_user_or_error(reviewer_object['username'])
660 673 reviewer_object['user_id'] = user.user_id
661 674
662 675 get_default_reviewers_data, validate_default_reviewers = \
663 676 PullRequestModel().get_reviewer_functions()
664 677
665 678 # recalculate reviewers logic, to make sure we can validate this
666 679 reviewer_rules = get_default_reviewers_data(
667 680 apiuser.get_instance(), source_db_repo,
668 681 source_commit, target_db_repo, target_commit)
669 682
670 683 # now MERGE our given with the calculated
671 684 reviewer_objects = reviewer_rules['reviewers'] + reviewer_objects
672 685
673 686 try:
674 687 reviewers = validate_default_reviewers(
675 688 reviewer_objects, reviewer_rules)
676 689 except ValueError as e:
677 690 raise JSONRPCError('Reviewers Validation: {}'.format(e))
678 691
679 692 title = Optional.extract(title)
680 693 if not title:
681 694 title_source_ref = source_ref.split(':', 2)[1]
682 695 title = PullRequestModel().generate_pullrequest_title(
683 696 source=source_repo,
684 697 source_ref=title_source_ref,
685 698 target=target_repo
686 699 )
687 700 # fetch renderer, if set fallback to plain in case of PR
688 701 rc_config = SettingsModel().get_all_settings()
689 702 default_system_renderer = rc_config.get('rhodecode_markup_renderer', 'plain')
690 703 description = Optional.extract(description)
691 704 description_renderer = Optional.extract(description_renderer) or default_system_renderer
692 705
693 706 pull_request = PullRequestModel().create(
694 707 created_by=apiuser.user_id,
695 708 source_repo=source_repo,
696 709 source_ref=full_source_ref,
697 710 target_repo=target_repo,
698 711 target_ref=full_target_ref,
699 712 revisions=[commit.raw_id for commit in reversed(commit_ranges)],
700 713 reviewers=reviewers,
701 714 title=title,
702 715 description=description,
703 716 description_renderer=description_renderer,
704 717 reviewer_data=reviewer_rules,
705 718 auth_user=apiuser
706 719 )
707 720
708 721 Session().commit()
709 722 data = {
710 723 'msg': 'Created new pull request `{}`'.format(title),
711 724 'pull_request_id': pull_request.pull_request_id,
712 725 }
713 726 return data
714 727
715 728
716 729 @jsonrpc_method()
717 730 def update_pull_request(
718 731 request, apiuser, pullrequestid, repoid=Optional(None),
719 732 title=Optional(''), description=Optional(''), description_renderer=Optional(''),
720 733 reviewers=Optional(None), update_commits=Optional(None)):
721 734 """
722 735 Updates a pull request.
723 736
724 737 :param apiuser: This is filled automatically from the |authtoken|.
725 738 :type apiuser: AuthUser
726 739 :param repoid: Optional repository name or repository ID.
727 740 :type repoid: str or int
728 741 :param pullrequestid: The pull request ID.
729 742 :type pullrequestid: int
730 743 :param title: Set the pull request title.
731 744 :type title: str
732 745 :param description: Update pull request description.
733 746 :type description: Optional(str)
734 747 :type description_renderer: Optional(str)
735 748 :param description_renderer: Update pull request renderer for the description.
736 749 It should be 'rst', 'markdown' or 'plain'
737 750 :param reviewers: Update pull request reviewers list with new value.
738 751 :type reviewers: Optional(list)
739 752 Accepts username strings or objects of the format:
740 753
741 754 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
742 755
743 756 :param update_commits: Trigger update of commits for this pull request
744 757 :type: update_commits: Optional(bool)
745 758
746 759 Example output:
747 760
748 761 .. code-block:: bash
749 762
750 763 id : <id_given_in_input>
751 764 result : {
752 765 "msg": "Updated pull request `63`",
753 766 "pull_request": <pull_request_object>,
754 767 "updated_reviewers": {
755 768 "added": [
756 769 "username"
757 770 ],
758 771 "removed": []
759 772 },
760 773 "updated_commits": {
761 774 "added": [
762 775 "<sha1_hash>"
763 776 ],
764 777 "common": [
765 778 "<sha1_hash>",
766 779 "<sha1_hash>",
767 780 ],
768 781 "removed": []
769 782 }
770 783 }
771 784 error : null
772 785 """
773 786
774 787 pull_request = get_pull_request_or_error(pullrequestid)
775 788 if Optional.extract(repoid):
776 789 repo = get_repo_or_error(repoid)
777 790 else:
778 791 repo = pull_request.target_repo
779 792
780 793 if not PullRequestModel().check_user_update(
781 794 pull_request, apiuser, api=True):
782 795 raise JSONRPCError(
783 796 'pull request `%s` update failed, no permission to update.' % (
784 797 pullrequestid,))
785 798 if pull_request.is_closed():
786 799 raise JSONRPCError(
787 800 'pull request `%s` update failed, pull request is closed' % (
788 801 pullrequestid,))
789 802
790 803 reviewer_objects = Optional.extract(reviewers) or []
791 804
792 805 if reviewer_objects:
793 806 schema = ReviewerListSchema()
794 807 try:
795 808 reviewer_objects = schema.deserialize(reviewer_objects)
796 809 except Invalid as err:
797 810 raise JSONRPCValidationError(colander_exc=err)
798 811
799 812 # validate users
800 813 for reviewer_object in reviewer_objects:
801 814 user = get_user_or_error(reviewer_object['username'])
802 815 reviewer_object['user_id'] = user.user_id
803 816
804 817 get_default_reviewers_data, get_validated_reviewers = \
805 818 PullRequestModel().get_reviewer_functions()
806 819
807 820 # re-use stored rules
808 821 reviewer_rules = pull_request.reviewer_data
809 822 try:
810 823 reviewers = get_validated_reviewers(
811 824 reviewer_objects, reviewer_rules)
812 825 except ValueError as e:
813 826 raise JSONRPCError('Reviewers Validation: {}'.format(e))
814 827 else:
815 828 reviewers = []
816 829
817 830 title = Optional.extract(title)
818 831 description = Optional.extract(description)
819 832 description_renderer = Optional.extract(description_renderer)
820 833
821 834 if title or description:
822 835 PullRequestModel().edit(
823 836 pull_request,
824 837 title or pull_request.title,
825 838 description or pull_request.description,
826 839 description_renderer or pull_request.description_renderer,
827 840 apiuser)
828 841 Session().commit()
829 842
830 843 commit_changes = {"added": [], "common": [], "removed": []}
831 844 if str2bool(Optional.extract(update_commits)):
832 if PullRequestModel().has_valid_update_type(pull_request):
833 update_response = PullRequestModel().update_commits(
834 pull_request)
835 commit_changes = update_response.changes or commit_changes
836 Session().commit()
845
846 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
847 raise JSONRPCError(
848 'Operation forbidden because pull request is in state {}, '
849 'only state {} is allowed.'.format(
850 pull_request.pull_request_state, PullRequest.STATE_CREATED))
851
852 with pull_request.set_state(PullRequest.STATE_UPDATING):
853 if PullRequestModel().has_valid_update_type(pull_request):
854 update_response = PullRequestModel().update_commits(pull_request)
855 commit_changes = update_response.changes or commit_changes
856 Session().commit()
837 857
838 858 reviewers_changes = {"added": [], "removed": []}
839 859 if reviewers:
840 860 added_reviewers, removed_reviewers = \
841 861 PullRequestModel().update_reviewers(pull_request, reviewers, apiuser)
842 862
843 863 reviewers_changes['added'] = sorted(
844 864 [get_user_or_error(n).username for n in added_reviewers])
845 865 reviewers_changes['removed'] = sorted(
846 866 [get_user_or_error(n).username for n in removed_reviewers])
847 867 Session().commit()
848 868
849 869 data = {
850 870 'msg': 'Updated pull request `{}`'.format(
851 871 pull_request.pull_request_id),
852 872 'pull_request': pull_request.get_api_data(),
853 873 'updated_commits': commit_changes,
854 874 'updated_reviewers': reviewers_changes
855 875 }
856 876
857 877 return data
858 878
859 879
860 880 @jsonrpc_method()
861 881 def close_pull_request(
862 882 request, apiuser, pullrequestid, repoid=Optional(None),
863 883 userid=Optional(OAttr('apiuser')), message=Optional('')):
864 884 """
865 885 Close the pull request specified by `pullrequestid`.
866 886
867 887 :param apiuser: This is filled automatically from the |authtoken|.
868 888 :type apiuser: AuthUser
869 889 :param repoid: Repository name or repository ID to which the pull
870 890 request belongs.
871 891 :type repoid: str or int
872 892 :param pullrequestid: ID of the pull request to be closed.
873 893 :type pullrequestid: int
874 894 :param userid: Close the pull request as this user.
875 895 :type userid: Optional(str or int)
876 896 :param message: Optional message to close the Pull Request with. If not
877 897 specified it will be generated automatically.
878 898 :type message: Optional(str)
879 899
880 900 Example output:
881 901
882 902 .. code-block:: bash
883 903
884 904 "id": <id_given_in_input>,
885 905 "result": {
886 906 "pull_request_id": "<int>",
887 907 "close_status": "<str:status_lbl>,
888 908 "closed": "<bool>"
889 909 },
890 910 "error": null
891 911
892 912 """
893 913 _ = request.translate
894 914
895 915 pull_request = get_pull_request_or_error(pullrequestid)
896 916 if Optional.extract(repoid):
897 917 repo = get_repo_or_error(repoid)
898 918 else:
899 919 repo = pull_request.target_repo
900 920
901 921 if not isinstance(userid, Optional):
902 922 if (has_superadmin_permission(apiuser) or
903 923 HasRepoPermissionAnyApi('repository.admin')(
904 924 user=apiuser, repo_name=repo.repo_name)):
905 925 apiuser = get_user_or_error(userid)
906 926 else:
907 927 raise JSONRPCError('userid is not the same as your user')
908 928
909 929 if pull_request.is_closed():
910 930 raise JSONRPCError(
911 931 'pull request `%s` is already closed' % (pullrequestid,))
912 932
913 933 # only owner or admin or person with write permissions
914 934 allowed_to_close = PullRequestModel().check_user_update(
915 935 pull_request, apiuser, api=True)
916 936
917 937 if not allowed_to_close:
918 938 raise JSONRPCError(
919 939 'pull request `%s` close failed, no permission to close.' % (
920 940 pullrequestid,))
921 941
922 942 # message we're using to close the PR, else it's automatically generated
923 943 message = Optional.extract(message)
924 944
925 945 # finally close the PR, with proper message comment
926 946 comment, status = PullRequestModel().close_pull_request_with_comment(
927 947 pull_request, apiuser, repo, message=message, auth_user=apiuser)
928 948 status_lbl = ChangesetStatus.get_status_lbl(status)
929 949
930 950 Session().commit()
931 951
932 952 data = {
933 953 'pull_request_id': pull_request.pull_request_id,
934 954 'close_status': status_lbl,
935 955 'closed': True,
936 956 }
937 957 return data
@@ -1,1233 +1,1216 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 import mock
21 21 import pytest
22 22
23 23 import rhodecode
24 24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
25 25 from rhodecode.lib.vcs.nodes import FileNode
26 26 from rhodecode.lib import helpers as h
27 27 from rhodecode.model.changeset_status import ChangesetStatusModel
28 28 from rhodecode.model.db import (
29 29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
30 30 from rhodecode.model.meta import Session
31 31 from rhodecode.model.pull_request import PullRequestModel
32 32 from rhodecode.model.user import UserModel
33 33 from rhodecode.tests import (
34 34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
35 35
36 36
37 37 def route_path(name, params=None, **kwargs):
38 38 import urllib
39 39
40 40 base_url = {
41 41 'repo_changelog': '/{repo_name}/changelog',
42 42 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
43 43 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
44 44 'pullrequest_show_all': '/{repo_name}/pull-request',
45 45 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
46 46 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
47 47 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations',
48 48 'pullrequest_new': '/{repo_name}/pull-request/new',
49 49 'pullrequest_create': '/{repo_name}/pull-request/create',
50 50 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
51 51 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
52 52 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
53 53 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
54 54 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
55 55 }[name].format(**kwargs)
56 56
57 57 if params:
58 58 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
59 59 return base_url
60 60
61 61
62 62 @pytest.mark.usefixtures('app', 'autologin_user')
63 63 @pytest.mark.backends("git", "hg")
64 64 class TestPullrequestsView(object):
65 65
66 66 def test_index(self, backend):
67 67 self.app.get(route_path(
68 68 'pullrequest_new',
69 69 repo_name=backend.repo_name))
70 70
71 71 def test_option_menu_create_pull_request_exists(self, backend):
72 72 repo_name = backend.repo_name
73 73 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
74 74
75 75 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
76 76 'pullrequest_new', repo_name=repo_name)
77 77 response.mustcontain(create_pr_link)
78 78
79 79 def test_create_pr_form_with_raw_commit_id(self, backend):
80 80 repo = backend.repo
81 81
82 82 self.app.get(
83 83 route_path('pullrequest_new', repo_name=repo.repo_name,
84 84 commit=repo.get_commit().raw_id),
85 85 status=200)
86 86
87 87 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
88 88 @pytest.mark.parametrize('range_diff', ["0", "1"])
89 89 def test_show(self, pr_util, pr_merge_enabled, range_diff):
90 90 pull_request = pr_util.create_pull_request(
91 91 mergeable=pr_merge_enabled, enable_notifications=False)
92 92
93 93 response = self.app.get(route_path(
94 94 'pullrequest_show',
95 95 repo_name=pull_request.target_repo.scm_instance().name,
96 96 pull_request_id=pull_request.pull_request_id,
97 97 params={'range-diff': range_diff}))
98 98
99 99 for commit_id in pull_request.revisions:
100 100 response.mustcontain(commit_id)
101 101
102 102 assert pull_request.target_ref_parts.type in response
103 103 assert pull_request.target_ref_parts.name in response
104 104 target_clone_url = pull_request.target_repo.clone_url()
105 105 assert target_clone_url in response
106 106
107 107 assert 'class="pull-request-merge"' in response
108 108 if pr_merge_enabled:
109 109 response.mustcontain('Pull request reviewer approval is pending')
110 110 else:
111 111 response.mustcontain('Server-side pull request merging is disabled.')
112 112
113 113 if range_diff == "1":
114 114 response.mustcontain('Turn off: Show the diff as commit range')
115 115
116 116 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
117 117 # Logout
118 118 response = self.app.post(
119 119 h.route_path('logout'),
120 120 params={'csrf_token': csrf_token})
121 121 # Login as regular user
122 122 response = self.app.post(h.route_path('login'),
123 123 {'username': TEST_USER_REGULAR_LOGIN,
124 124 'password': 'test12'})
125 125
126 126 pull_request = pr_util.create_pull_request(
127 127 author=TEST_USER_REGULAR_LOGIN)
128 128
129 129 response = self.app.get(route_path(
130 130 'pullrequest_show',
131 131 repo_name=pull_request.target_repo.scm_instance().name,
132 132 pull_request_id=pull_request.pull_request_id))
133 133
134 134 response.mustcontain('Server-side pull request merging is disabled.')
135 135
136 136 assert_response = response.assert_response()
137 137 # for regular user without a merge permissions, we don't see it
138 138 assert_response.no_element_exists('#close-pull-request-action')
139 139
140 140 user_util.grant_user_permission_to_repo(
141 141 pull_request.target_repo,
142 142 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
143 143 'repository.write')
144 144 response = self.app.get(route_path(
145 145 'pullrequest_show',
146 146 repo_name=pull_request.target_repo.scm_instance().name,
147 147 pull_request_id=pull_request.pull_request_id))
148 148
149 149 response.mustcontain('Server-side pull request merging is disabled.')
150 150
151 151 assert_response = response.assert_response()
152 152 # now regular user has a merge permissions, we have CLOSE button
153 153 assert_response.one_element_exists('#close-pull-request-action')
154 154
155 155 def test_show_invalid_commit_id(self, pr_util):
156 156 # Simulating invalid revisions which will cause a lookup error
157 157 pull_request = pr_util.create_pull_request()
158 158 pull_request.revisions = ['invalid']
159 159 Session().add(pull_request)
160 160 Session().commit()
161 161
162 162 response = self.app.get(route_path(
163 163 'pullrequest_show',
164 164 repo_name=pull_request.target_repo.scm_instance().name,
165 165 pull_request_id=pull_request.pull_request_id))
166 166
167 167 for commit_id in pull_request.revisions:
168 168 response.mustcontain(commit_id)
169 169
170 170 def test_show_invalid_source_reference(self, pr_util):
171 171 pull_request = pr_util.create_pull_request()
172 172 pull_request.source_ref = 'branch:b:invalid'
173 173 Session().add(pull_request)
174 174 Session().commit()
175 175
176 176 self.app.get(route_path(
177 177 'pullrequest_show',
178 178 repo_name=pull_request.target_repo.scm_instance().name,
179 179 pull_request_id=pull_request.pull_request_id))
180 180
181 181 def test_edit_title_description(self, pr_util, csrf_token):
182 182 pull_request = pr_util.create_pull_request()
183 183 pull_request_id = pull_request.pull_request_id
184 184
185 185 response = self.app.post(
186 186 route_path('pullrequest_update',
187 187 repo_name=pull_request.target_repo.repo_name,
188 188 pull_request_id=pull_request_id),
189 189 params={
190 190 'edit_pull_request': 'true',
191 191 'title': 'New title',
192 192 'description': 'New description',
193 193 'csrf_token': csrf_token})
194 194
195 195 assert_session_flash(
196 196 response, u'Pull request title & description updated.',
197 197 category='success')
198 198
199 199 pull_request = PullRequest.get(pull_request_id)
200 200 assert pull_request.title == 'New title'
201 201 assert pull_request.description == 'New description'
202 202
203 203 def test_edit_title_description_closed(self, pr_util, csrf_token):
204 204 pull_request = pr_util.create_pull_request()
205 205 pull_request_id = pull_request.pull_request_id
206 206 repo_name = pull_request.target_repo.repo_name
207 207 pr_util.close()
208 208
209 209 response = self.app.post(
210 210 route_path('pullrequest_update',
211 211 repo_name=repo_name, pull_request_id=pull_request_id),
212 212 params={
213 213 'edit_pull_request': 'true',
214 214 'title': 'New title',
215 215 'description': 'New description',
216 216 'csrf_token': csrf_token}, status=200)
217 217 assert_session_flash(
218 218 response, u'Cannot update closed pull requests.',
219 219 category='error')
220 220
221 221 def test_update_invalid_source_reference(self, pr_util, csrf_token):
222 222 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
223 223
224 224 pull_request = pr_util.create_pull_request()
225 225 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
226 226 Session().add(pull_request)
227 227 Session().commit()
228 228
229 229 pull_request_id = pull_request.pull_request_id
230 230
231 231 response = self.app.post(
232 232 route_path('pullrequest_update',
233 233 repo_name=pull_request.target_repo.repo_name,
234 234 pull_request_id=pull_request_id),
235 235 params={'update_commits': 'true', 'csrf_token': csrf_token})
236 236
237 237 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
238 238 UpdateFailureReason.MISSING_SOURCE_REF])
239 239 assert_session_flash(response, expected_msg, category='error')
240 240
241 241 def test_missing_target_reference(self, pr_util, csrf_token):
242 242 from rhodecode.lib.vcs.backends.base import MergeFailureReason
243 243 pull_request = pr_util.create_pull_request(
244 244 approved=True, mergeable=True)
245 245 unicode_reference = u'branch:invalid-branch:invalid-commit-id'
246 246 pull_request.target_ref = unicode_reference
247 247 Session().add(pull_request)
248 248 Session().commit()
249 249
250 250 pull_request_id = pull_request.pull_request_id
251 251 pull_request_url = route_path(
252 252 'pullrequest_show',
253 253 repo_name=pull_request.target_repo.repo_name,
254 254 pull_request_id=pull_request_id)
255 255
256 256 response = self.app.get(pull_request_url)
257 257 target_ref_id = 'invalid-branch'
258 258 merge_resp = MergeResponse(
259 259 True, True, '', MergeFailureReason.MISSING_TARGET_REF,
260 260 metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)})
261 261 response.assert_response().element_contains(
262 262 'span[data-role="merge-message"]', merge_resp.merge_status_message)
263 263
264 264 def test_comment_and_close_pull_request_custom_message_approved(
265 265 self, pr_util, csrf_token, xhr_header):
266 266
267 267 pull_request = pr_util.create_pull_request(approved=True)
268 268 pull_request_id = pull_request.pull_request_id
269 269 author = pull_request.user_id
270 270 repo = pull_request.target_repo.repo_id
271 271
272 272 self.app.post(
273 273 route_path('pullrequest_comment_create',
274 274 repo_name=pull_request.target_repo.scm_instance().name,
275 275 pull_request_id=pull_request_id),
276 276 params={
277 277 'close_pull_request': '1',
278 278 'text': 'Closing a PR',
279 279 'csrf_token': csrf_token},
280 280 extra_environ=xhr_header,)
281 281
282 282 journal = UserLog.query()\
283 283 .filter(UserLog.user_id == author)\
284 284 .filter(UserLog.repository_id == repo) \
285 285 .order_by('user_log_id') \
286 286 .all()
287 287 assert journal[-1].action == 'repo.pull_request.close'
288 288
289 289 pull_request = PullRequest.get(pull_request_id)
290 290 assert pull_request.is_closed()
291 291
292 292 status = ChangesetStatusModel().get_status(
293 293 pull_request.source_repo, pull_request=pull_request)
294 294 assert status == ChangesetStatus.STATUS_APPROVED
295 295 comments = ChangesetComment().query() \
296 296 .filter(ChangesetComment.pull_request == pull_request) \
297 297 .order_by(ChangesetComment.comment_id.asc())\
298 298 .all()
299 299 assert comments[-1].text == 'Closing a PR'
300 300
301 301 def test_comment_force_close_pull_request_rejected(
302 302 self, pr_util, csrf_token, xhr_header):
303 303 pull_request = pr_util.create_pull_request()
304 304 pull_request_id = pull_request.pull_request_id
305 305 PullRequestModel().update_reviewers(
306 306 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
307 307 pull_request.author)
308 308 author = pull_request.user_id
309 309 repo = pull_request.target_repo.repo_id
310 310
311 311 self.app.post(
312 312 route_path('pullrequest_comment_create',
313 313 repo_name=pull_request.target_repo.scm_instance().name,
314 314 pull_request_id=pull_request_id),
315 315 params={
316 316 'close_pull_request': '1',
317 317 'csrf_token': csrf_token},
318 318 extra_environ=xhr_header)
319 319
320 320 pull_request = PullRequest.get(pull_request_id)
321 321
322 322 journal = UserLog.query()\
323 323 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
324 324 .order_by('user_log_id') \
325 325 .all()
326 326 assert journal[-1].action == 'repo.pull_request.close'
327 327
328 328 # check only the latest status, not the review status
329 329 status = ChangesetStatusModel().get_status(
330 330 pull_request.source_repo, pull_request=pull_request)
331 331 assert status == ChangesetStatus.STATUS_REJECTED
332 332
333 333 def test_comment_and_close_pull_request(
334 334 self, pr_util, csrf_token, xhr_header):
335 335 pull_request = pr_util.create_pull_request()
336 336 pull_request_id = pull_request.pull_request_id
337 337
338 338 response = self.app.post(
339 339 route_path('pullrequest_comment_create',
340 340 repo_name=pull_request.target_repo.scm_instance().name,
341 341 pull_request_id=pull_request.pull_request_id),
342 342 params={
343 343 'close_pull_request': 'true',
344 344 'csrf_token': csrf_token},
345 345 extra_environ=xhr_header)
346 346
347 347 assert response.json
348 348
349 349 pull_request = PullRequest.get(pull_request_id)
350 350 assert pull_request.is_closed()
351 351
352 352 # check only the latest status, not the review status
353 353 status = ChangesetStatusModel().get_status(
354 354 pull_request.source_repo, pull_request=pull_request)
355 355 assert status == ChangesetStatus.STATUS_REJECTED
356 356
357 357 def test_create_pull_request(self, backend, csrf_token):
358 358 commits = [
359 359 {'message': 'ancestor'},
360 360 {'message': 'change'},
361 361 {'message': 'change2'},
362 362 ]
363 363 commit_ids = backend.create_master_repo(commits)
364 364 target = backend.create_repo(heads=['ancestor'])
365 365 source = backend.create_repo(heads=['change2'])
366 366
367 367 response = self.app.post(
368 368 route_path('pullrequest_create', repo_name=source.repo_name),
369 369 [
370 370 ('source_repo', source.repo_name),
371 371 ('source_ref', 'branch:default:' + commit_ids['change2']),
372 372 ('target_repo', target.repo_name),
373 373 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
374 374 ('common_ancestor', commit_ids['ancestor']),
375 375 ('pullrequest_title', 'Title'),
376 376 ('pullrequest_desc', 'Description'),
377 377 ('description_renderer', 'markdown'),
378 378 ('__start__', 'review_members:sequence'),
379 379 ('__start__', 'reviewer:mapping'),
380 380 ('user_id', '1'),
381 381 ('__start__', 'reasons:sequence'),
382 382 ('reason', 'Some reason'),
383 383 ('__end__', 'reasons:sequence'),
384 384 ('__start__', 'rules:sequence'),
385 385 ('__end__', 'rules:sequence'),
386 386 ('mandatory', 'False'),
387 387 ('__end__', 'reviewer:mapping'),
388 388 ('__end__', 'review_members:sequence'),
389 389 ('__start__', 'revisions:sequence'),
390 390 ('revisions', commit_ids['change']),
391 391 ('revisions', commit_ids['change2']),
392 392 ('__end__', 'revisions:sequence'),
393 393 ('user', ''),
394 394 ('csrf_token', csrf_token),
395 395 ],
396 396 status=302)
397 397
398 398 location = response.headers['Location']
399 399 pull_request_id = location.rsplit('/', 1)[1]
400 400 assert pull_request_id != 'new'
401 401 pull_request = PullRequest.get(int(pull_request_id))
402 402
403 403 # check that we have now both revisions
404 404 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
405 405 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
406 406 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
407 407 assert pull_request.target_ref == expected_target_ref
408 408
409 409 def test_reviewer_notifications(self, backend, csrf_token):
410 410 # We have to use the app.post for this test so it will create the
411 411 # notifications properly with the new PR
412 412 commits = [
413 413 {'message': 'ancestor',
414 414 'added': [FileNode('file_A', content='content_of_ancestor')]},
415 415 {'message': 'change',
416 416 'added': [FileNode('file_a', content='content_of_change')]},
417 417 {'message': 'change-child'},
418 418 {'message': 'ancestor-child', 'parents': ['ancestor'],
419 419 'added': [
420 420 FileNode('file_B', content='content_of_ancestor_child')]},
421 421 {'message': 'ancestor-child-2'},
422 422 ]
423 423 commit_ids = backend.create_master_repo(commits)
424 424 target = backend.create_repo(heads=['ancestor-child'])
425 425 source = backend.create_repo(heads=['change'])
426 426
427 427 response = self.app.post(
428 428 route_path('pullrequest_create', repo_name=source.repo_name),
429 429 [
430 430 ('source_repo', source.repo_name),
431 431 ('source_ref', 'branch:default:' + commit_ids['change']),
432 432 ('target_repo', target.repo_name),
433 433 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
434 434 ('common_ancestor', commit_ids['ancestor']),
435 435 ('pullrequest_title', 'Title'),
436 436 ('pullrequest_desc', 'Description'),
437 437 ('description_renderer', 'markdown'),
438 438 ('__start__', 'review_members:sequence'),
439 439 ('__start__', 'reviewer:mapping'),
440 440 ('user_id', '2'),
441 441 ('__start__', 'reasons:sequence'),
442 442 ('reason', 'Some reason'),
443 443 ('__end__', 'reasons:sequence'),
444 444 ('__start__', 'rules:sequence'),
445 445 ('__end__', 'rules:sequence'),
446 446 ('mandatory', 'False'),
447 447 ('__end__', 'reviewer:mapping'),
448 448 ('__end__', 'review_members:sequence'),
449 449 ('__start__', 'revisions:sequence'),
450 450 ('revisions', commit_ids['change']),
451 451 ('__end__', 'revisions:sequence'),
452 452 ('user', ''),
453 453 ('csrf_token', csrf_token),
454 454 ],
455 455 status=302)
456 456
457 457 location = response.headers['Location']
458 458
459 459 pull_request_id = location.rsplit('/', 1)[1]
460 460 assert pull_request_id != 'new'
461 461 pull_request = PullRequest.get(int(pull_request_id))
462 462
463 463 # Check that a notification was made
464 464 notifications = Notification.query()\
465 465 .filter(Notification.created_by == pull_request.author.user_id,
466 466 Notification.type_ == Notification.TYPE_PULL_REQUEST,
467 467 Notification.subject.contains(
468 468 "wants you to review pull request #%s" % pull_request_id))
469 469 assert len(notifications.all()) == 1
470 470
471 471 # Change reviewers and check that a notification was made
472 472 PullRequestModel().update_reviewers(
473 473 pull_request.pull_request_id, [(1, [], False, [])],
474 474 pull_request.author)
475 475 assert len(notifications.all()) == 2
476 476
477 477 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
478 478 csrf_token):
479 479 commits = [
480 480 {'message': 'ancestor',
481 481 'added': [FileNode('file_A', content='content_of_ancestor')]},
482 482 {'message': 'change',
483 483 'added': [FileNode('file_a', content='content_of_change')]},
484 484 {'message': 'change-child'},
485 485 {'message': 'ancestor-child', 'parents': ['ancestor'],
486 486 'added': [
487 487 FileNode('file_B', content='content_of_ancestor_child')]},
488 488 {'message': 'ancestor-child-2'},
489 489 ]
490 490 commit_ids = backend.create_master_repo(commits)
491 491 target = backend.create_repo(heads=['ancestor-child'])
492 492 source = backend.create_repo(heads=['change'])
493 493
494 494 response = self.app.post(
495 495 route_path('pullrequest_create', repo_name=source.repo_name),
496 496 [
497 497 ('source_repo', source.repo_name),
498 498 ('source_ref', 'branch:default:' + commit_ids['change']),
499 499 ('target_repo', target.repo_name),
500 500 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
501 501 ('common_ancestor', commit_ids['ancestor']),
502 502 ('pullrequest_title', 'Title'),
503 503 ('pullrequest_desc', 'Description'),
504 504 ('description_renderer', 'markdown'),
505 505 ('__start__', 'review_members:sequence'),
506 506 ('__start__', 'reviewer:mapping'),
507 507 ('user_id', '1'),
508 508 ('__start__', 'reasons:sequence'),
509 509 ('reason', 'Some reason'),
510 510 ('__end__', 'reasons:sequence'),
511 511 ('__start__', 'rules:sequence'),
512 512 ('__end__', 'rules:sequence'),
513 513 ('mandatory', 'False'),
514 514 ('__end__', 'reviewer:mapping'),
515 515 ('__end__', 'review_members:sequence'),
516 516 ('__start__', 'revisions:sequence'),
517 517 ('revisions', commit_ids['change']),
518 518 ('__end__', 'revisions:sequence'),
519 519 ('user', ''),
520 520 ('csrf_token', csrf_token),
521 521 ],
522 522 status=302)
523 523
524 524 location = response.headers['Location']
525 525
526 526 pull_request_id = location.rsplit('/', 1)[1]
527 527 assert pull_request_id != 'new'
528 528 pull_request = PullRequest.get(int(pull_request_id))
529 529
530 530 # target_ref has to point to the ancestor's commit_id in order to
531 531 # show the correct diff
532 532 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
533 533 assert pull_request.target_ref == expected_target_ref
534 534
535 535 # Check generated diff contents
536 536 response = response.follow()
537 537 assert 'content_of_ancestor' not in response.body
538 538 assert 'content_of_ancestor-child' not in response.body
539 539 assert 'content_of_change' in response.body
540 540
541 541 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
542 542 # Clear any previous calls to rcextensions
543 543 rhodecode.EXTENSIONS.calls.clear()
544 544
545 545 pull_request = pr_util.create_pull_request(
546 546 approved=True, mergeable=True)
547 547 pull_request_id = pull_request.pull_request_id
548 548 repo_name = pull_request.target_repo.scm_instance().name,
549 549
550 550 response = self.app.post(
551 551 route_path('pullrequest_merge',
552 552 repo_name=str(repo_name[0]),
553 553 pull_request_id=pull_request_id),
554 554 params={'csrf_token': csrf_token}).follow()
555 555
556 556 pull_request = PullRequest.get(pull_request_id)
557 557
558 558 assert response.status_int == 200
559 559 assert pull_request.is_closed()
560 560 assert_pull_request_status(
561 561 pull_request, ChangesetStatus.STATUS_APPROVED)
562 562
563 563 # Check the relevant log entries were added
564 564 user_logs = UserLog.query().order_by('-user_log_id').limit(3)
565 565 actions = [log.action for log in user_logs]
566 566 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
567 567 expected_actions = [
568 568 u'repo.pull_request.close',
569 569 u'repo.pull_request.merge',
570 570 u'repo.pull_request.comment.create'
571 571 ]
572 572 assert actions == expected_actions
573 573
574 574 user_logs = UserLog.query().order_by('-user_log_id').limit(4)
575 575 actions = [log for log in user_logs]
576 576 assert actions[-1].action == 'user.push'
577 577 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
578 578
579 579 # Check post_push rcextension was really executed
580 580 push_calls = rhodecode.EXTENSIONS.calls['_push_hook']
581 581 assert len(push_calls) == 1
582 582 unused_last_call_args, last_call_kwargs = push_calls[0]
583 583 assert last_call_kwargs['action'] == 'push'
584 584 assert last_call_kwargs['commit_ids'] == pr_commit_ids
585 585
586 586 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
587 587 pull_request = pr_util.create_pull_request(mergeable=False)
588 588 pull_request_id = pull_request.pull_request_id
589 589 pull_request = PullRequest.get(pull_request_id)
590 590
591 591 response = self.app.post(
592 592 route_path('pullrequest_merge',
593 593 repo_name=pull_request.target_repo.scm_instance().name,
594 594 pull_request_id=pull_request.pull_request_id),
595 595 params={'csrf_token': csrf_token}).follow()
596 596
597 597 assert response.status_int == 200
598 598 response.mustcontain(
599 599 'Merge is not currently possible because of below failed checks.')
600 600 response.mustcontain('Server-side pull request merging is disabled.')
601 601
602 602 @pytest.mark.skip_backends('svn')
603 603 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
604 604 pull_request = pr_util.create_pull_request(mergeable=True)
605 605 pull_request_id = pull_request.pull_request_id
606 606 repo_name = pull_request.target_repo.scm_instance().name
607 607
608 608 response = self.app.post(
609 609 route_path('pullrequest_merge',
610 610 repo_name=repo_name, pull_request_id=pull_request_id),
611 611 params={'csrf_token': csrf_token}).follow()
612 612
613 613 assert response.status_int == 200
614 614
615 615 response.mustcontain(
616 616 'Merge is not currently possible because of below failed checks.')
617 617 response.mustcontain('Pull request reviewer approval is pending.')
618 618
619 619 def test_merge_pull_request_renders_failure_reason(
620 620 self, user_regular, csrf_token, pr_util):
621 621 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
622 622 pull_request_id = pull_request.pull_request_id
623 623 repo_name = pull_request.target_repo.scm_instance().name
624 624
625 625 merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID',
626 626 MergeFailureReason.PUSH_FAILED,
627 627 metadata={'target': 'shadow repo',
628 628 'merge_commit': 'xxx'})
629 629 model_patcher = mock.patch.multiple(
630 630 PullRequestModel,
631 631 merge_repo=mock.Mock(return_value=merge_resp),
632 632 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
633 633
634 634 with model_patcher:
635 635 response = self.app.post(
636 636 route_path('pullrequest_merge',
637 637 repo_name=repo_name,
638 638 pull_request_id=pull_request_id),
639 639 params={'csrf_token': csrf_token}, status=302)
640 640
641 641 merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED,
642 642 metadata={'target': 'shadow repo',
643 643 'merge_commit': 'xxx'})
644 644 assert_session_flash(response, merge_resp.merge_status_message)
645 645
646 646 def test_update_source_revision(self, backend, csrf_token):
647 647 commits = [
648 648 {'message': 'ancestor'},
649 649 {'message': 'change'},
650 650 {'message': 'change-2'},
651 651 ]
652 652 commit_ids = backend.create_master_repo(commits)
653 653 target = backend.create_repo(heads=['ancestor'])
654 654 source = backend.create_repo(heads=['change'])
655 655
656 656 # create pr from a in source to A in target
657 657 pull_request = PullRequest()
658
658 659 pull_request.source_repo = source
659 # TODO: johbo: Make sure that we write the source ref this way!
660 660 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
661 661 branch=backend.default_branch_name, commit_id=commit_ids['change'])
662
662 663 pull_request.target_repo = target
663
664 664 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
665 branch=backend.default_branch_name,
666 commit_id=commit_ids['ancestor'])
665 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
666
667 667 pull_request.revisions = [commit_ids['change']]
668 668 pull_request.title = u"Test"
669 669 pull_request.description = u"Description"
670 pull_request.author = UserModel().get_by_username(
671 TEST_USER_ADMIN_LOGIN)
670 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
671 pull_request.pull_request_state = PullRequest.STATE_CREATED
672 672 Session().add(pull_request)
673 673 Session().commit()
674 674 pull_request_id = pull_request.pull_request_id
675 675
676 676 # source has ancestor - change - change-2
677 677 backend.pull_heads(source, heads=['change-2'])
678 678
679 679 # update PR
680 680 self.app.post(
681 681 route_path('pullrequest_update',
682 repo_name=target.repo_name,
683 pull_request_id=pull_request_id),
684 params={'update_commits': 'true',
685 'csrf_token': csrf_token})
682 repo_name=target.repo_name, pull_request_id=pull_request_id),
683 params={'update_commits': 'true', 'csrf_token': csrf_token})
684
685 response = self.app.get(
686 route_path('pullrequest_show',
687 repo_name=target.repo_name,
688 pull_request_id=pull_request.pull_request_id))
689
690 assert response.status_int == 200
691 assert 'Pull request updated to' in response.body
692 assert 'with 1 added, 0 removed commits.' in response.body
686 693
687 694 # check that we have now both revisions
688 695 pull_request = PullRequest.get(pull_request_id)
689 assert pull_request.revisions == [
690 commit_ids['change-2'], commit_ids['change']]
691
692 # TODO: johbo: this should be a test on its own
693 response = self.app.get(route_path(
694 'pullrequest_new',
695 repo_name=target.repo_name))
696 assert response.status_int == 200
697 assert 'Pull request updated to' in response.body
698 assert 'with 1 added, 0 removed commits.' in response.body
696 assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']]
699 697
700 698 def test_update_target_revision(self, backend, csrf_token):
701 699 commits = [
702 700 {'message': 'ancestor'},
703 701 {'message': 'change'},
704 702 {'message': 'ancestor-new', 'parents': ['ancestor']},
705 703 {'message': 'change-rebased'},
706 704 ]
707 705 commit_ids = backend.create_master_repo(commits)
708 706 target = backend.create_repo(heads=['ancestor'])
709 707 source = backend.create_repo(heads=['change'])
710 708
711 709 # create pr from a in source to A in target
712 710 pull_request = PullRequest()
711
713 712 pull_request.source_repo = source
714 # TODO: johbo: Make sure that we write the source ref this way!
715 713 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
716 714 branch=backend.default_branch_name, commit_id=commit_ids['change'])
715
717 716 pull_request.target_repo = target
718 # TODO: johbo: Target ref should be branch based, since tip can jump
719 # from branch to branch
720 717 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
721 branch=backend.default_branch_name,
722 commit_id=commit_ids['ancestor'])
718 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
719
723 720 pull_request.revisions = [commit_ids['change']]
724 721 pull_request.title = u"Test"
725 722 pull_request.description = u"Description"
726 pull_request.author = UserModel().get_by_username(
727 TEST_USER_ADMIN_LOGIN)
723 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
724 pull_request.pull_request_state = PullRequest.STATE_CREATED
725
728 726 Session().add(pull_request)
729 727 Session().commit()
730 728 pull_request_id = pull_request.pull_request_id
731 729
732 730 # target has ancestor - ancestor-new
733 731 # source has ancestor - ancestor-new - change-rebased
734 732 backend.pull_heads(target, heads=['ancestor-new'])
735 733 backend.pull_heads(source, heads=['change-rebased'])
736 734
737 735 # update PR
738 736 self.app.post(
739 737 route_path('pullrequest_update',
740 repo_name=target.repo_name,
741 pull_request_id=pull_request_id),
742 params={'update_commits': 'true',
743 'csrf_token': csrf_token},
738 repo_name=target.repo_name,
739 pull_request_id=pull_request_id),
740 params={'update_commits': 'true', 'csrf_token': csrf_token},
744 741 status=200)
745 742
746 743 # check that we have now both revisions
747 744 pull_request = PullRequest.get(pull_request_id)
748 745 assert pull_request.revisions == [commit_ids['change-rebased']]
749 746 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
750 branch=backend.default_branch_name,
751 commit_id=commit_ids['ancestor-new'])
747 branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new'])
752 748
753 # TODO: johbo: This should be a test on its own
754 response = self.app.get(route_path(
755 'pullrequest_new',
756 repo_name=target.repo_name))
749 response = self.app.get(
750 route_path('pullrequest_show',
751 repo_name=target.repo_name,
752 pull_request_id=pull_request.pull_request_id))
757 753 assert response.status_int == 200
758 754 assert 'Pull request updated to' in response.body
759 755 assert 'with 1 added, 1 removed commits.' in response.body
760 756
761 757 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
762 758 backend = backend_git
763 759 commits = [
764 760 {'message': 'master-commit-1'},
765 761 {'message': 'master-commit-2-change-1'},
766 762 {'message': 'master-commit-3-change-2'},
767 763
768 764 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
769 765 {'message': 'feat-commit-2'},
770 766 ]
771 767 commit_ids = backend.create_master_repo(commits)
772 768 target = backend.create_repo(heads=['master-commit-3-change-2'])
773 769 source = backend.create_repo(heads=['feat-commit-2'])
774 770
775 771 # create pr from a in source to A in target
776 772 pull_request = PullRequest()
777 773 pull_request.source_repo = source
778 # TODO: johbo: Make sure that we write the source ref this way!
774
779 775 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
780 776 branch=backend.default_branch_name,
781 777 commit_id=commit_ids['master-commit-3-change-2'])
782 778
783 779 pull_request.target_repo = target
784 # TODO: johbo: Target ref should be branch based, since tip can jump
785 # from branch to branch
786 780 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
787 branch=backend.default_branch_name,
788 commit_id=commit_ids['feat-commit-2'])
781 branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2'])
789 782
790 783 pull_request.revisions = [
791 784 commit_ids['feat-commit-1'],
792 785 commit_ids['feat-commit-2']
793 786 ]
794 787 pull_request.title = u"Test"
795 788 pull_request.description = u"Description"
796 pull_request.author = UserModel().get_by_username(
797 TEST_USER_ADMIN_LOGIN)
789 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
790 pull_request.pull_request_state = PullRequest.STATE_CREATED
798 791 Session().add(pull_request)
799 792 Session().commit()
800 793 pull_request_id = pull_request.pull_request_id
801 794
802 795 # PR is created, now we simulate a force-push into target,
803 796 # that drops a 2 last commits
804 797 vcsrepo = target.scm_instance()
805 798 vcsrepo.config.clear_section('hooks')
806 799 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
807 800
808 801 # update PR
809 802 self.app.post(
810 803 route_path('pullrequest_update',
811 804 repo_name=target.repo_name,
812 805 pull_request_id=pull_request_id),
813 params={'update_commits': 'true',
814 'csrf_token': csrf_token},
806 params={'update_commits': 'true', 'csrf_token': csrf_token},
815 807 status=200)
816 808
817 response = self.app.get(route_path(
818 'pullrequest_new',
819 repo_name=target.repo_name))
809 response = self.app.get(route_path('pullrequest_new', repo_name=target.repo_name))
820 810 assert response.status_int == 200
821 811 response.mustcontain('Pull request updated to')
822 812 response.mustcontain('with 0 added, 0 removed commits.')
823 813
824 814 def test_update_of_ancestor_reference(self, backend, csrf_token):
825 815 commits = [
826 816 {'message': 'ancestor'},
827 817 {'message': 'change'},
828 818 {'message': 'change-2'},
829 819 {'message': 'ancestor-new', 'parents': ['ancestor']},
830 820 {'message': 'change-rebased'},
831 821 ]
832 822 commit_ids = backend.create_master_repo(commits)
833 823 target = backend.create_repo(heads=['ancestor'])
834 824 source = backend.create_repo(heads=['change'])
835 825
836 826 # create pr from a in source to A in target
837 827 pull_request = PullRequest()
838 828 pull_request.source_repo = source
839 # TODO: johbo: Make sure that we write the source ref this way!
829
840 830 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
841 branch=backend.default_branch_name,
842 commit_id=commit_ids['change'])
831 branch=backend.default_branch_name, commit_id=commit_ids['change'])
843 832 pull_request.target_repo = target
844 # TODO: johbo: Target ref should be branch based, since tip can jump
845 # from branch to branch
846 833 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
847 branch=backend.default_branch_name,
848 commit_id=commit_ids['ancestor'])
834 branch=backend.default_branch_name, commit_id=commit_ids['ancestor'])
849 835 pull_request.revisions = [commit_ids['change']]
850 836 pull_request.title = u"Test"
851 837 pull_request.description = u"Description"
852 pull_request.author = UserModel().get_by_username(
853 TEST_USER_ADMIN_LOGIN)
838 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
839 pull_request.pull_request_state = PullRequest.STATE_CREATED
854 840 Session().add(pull_request)
855 841 Session().commit()
856 842 pull_request_id = pull_request.pull_request_id
857 843
858 844 # target has ancestor - ancestor-new
859 845 # source has ancestor - ancestor-new - change-rebased
860 846 backend.pull_heads(target, heads=['ancestor-new'])
861 847 backend.pull_heads(source, heads=['change-rebased'])
862 848
863 849 # update PR
864 850 self.app.post(
865 851 route_path('pullrequest_update',
866 repo_name=target.repo_name,
867 pull_request_id=pull_request_id),
868 params={'update_commits': 'true',
869 'csrf_token': csrf_token},
852 repo_name=target.repo_name, pull_request_id=pull_request_id),
853 params={'update_commits': 'true', 'csrf_token': csrf_token},
870 854 status=200)
871 855
872 856 # Expect the target reference to be updated correctly
873 857 pull_request = PullRequest.get(pull_request_id)
874 858 assert pull_request.revisions == [commit_ids['change-rebased']]
875 859 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
876 860 branch=backend.default_branch_name,
877 861 commit_id=commit_ids['ancestor-new'])
878 862 assert pull_request.target_ref == expected_target_ref
879 863
880 864 def test_remove_pull_request_branch(self, backend_git, csrf_token):
881 865 branch_name = 'development'
882 866 commits = [
883 867 {'message': 'initial-commit'},
884 868 {'message': 'old-feature'},
885 869 {'message': 'new-feature', 'branch': branch_name},
886 870 ]
887 871 repo = backend_git.create_repo(commits)
888 872 commit_ids = backend_git.commit_ids
889 873
890 874 pull_request = PullRequest()
891 875 pull_request.source_repo = repo
892 876 pull_request.target_repo = repo
893 877 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
894 878 branch=branch_name, commit_id=commit_ids['new-feature'])
895 879 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
896 branch=backend_git.default_branch_name,
897 commit_id=commit_ids['old-feature'])
880 branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature'])
898 881 pull_request.revisions = [commit_ids['new-feature']]
899 882 pull_request.title = u"Test"
900 883 pull_request.description = u"Description"
901 pull_request.author = UserModel().get_by_username(
902 TEST_USER_ADMIN_LOGIN)
884 pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
885 pull_request.pull_request_state = PullRequest.STATE_CREATED
903 886 Session().add(pull_request)
904 887 Session().commit()
905 888
906 889 vcs = repo.scm_instance()
907 890 vcs.remove_ref('refs/heads/{}'.format(branch_name))
908 891
909 892 response = self.app.get(route_path(
910 893 'pullrequest_show',
911 894 repo_name=repo.repo_name,
912 895 pull_request_id=pull_request.pull_request_id))
913 896
914 897 assert response.status_int == 200
915 898
916 899 response.assert_response().element_contains(
917 900 '#changeset_compare_view_content .alert strong',
918 901 'Missing commits')
919 902 response.assert_response().element_contains(
920 903 '#changeset_compare_view_content .alert',
921 904 'This pull request cannot be displayed, because one or more'
922 905 ' commits no longer exist in the source repository.')
923 906
924 907 def test_strip_commits_from_pull_request(
925 908 self, backend, pr_util, csrf_token):
926 909 commits = [
927 910 {'message': 'initial-commit'},
928 911 {'message': 'old-feature'},
929 912 {'message': 'new-feature', 'parents': ['initial-commit']},
930 913 ]
931 914 pull_request = pr_util.create_pull_request(
932 915 commits, target_head='initial-commit', source_head='new-feature',
933 916 revisions=['new-feature'])
934 917
935 918 vcs = pr_util.source_repository.scm_instance()
936 919 if backend.alias == 'git':
937 920 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
938 921 else:
939 922 vcs.strip(pr_util.commit_ids['new-feature'])
940 923
941 924 response = self.app.get(route_path(
942 925 'pullrequest_show',
943 926 repo_name=pr_util.target_repository.repo_name,
944 927 pull_request_id=pull_request.pull_request_id))
945 928
946 929 assert response.status_int == 200
947 930
948 931 response.assert_response().element_contains(
949 932 '#changeset_compare_view_content .alert strong',
950 933 'Missing commits')
951 934 response.assert_response().element_contains(
952 935 '#changeset_compare_view_content .alert',
953 936 'This pull request cannot be displayed, because one or more'
954 937 ' commits no longer exist in the source repository.')
955 938 response.assert_response().element_contains(
956 939 '#update_commits',
957 940 'Update commits')
958 941
959 942 def test_strip_commits_and_update(
960 943 self, backend, pr_util, csrf_token):
961 944 commits = [
962 945 {'message': 'initial-commit'},
963 946 {'message': 'old-feature'},
964 947 {'message': 'new-feature', 'parents': ['old-feature']},
965 948 ]
966 949 pull_request = pr_util.create_pull_request(
967 950 commits, target_head='old-feature', source_head='new-feature',
968 951 revisions=['new-feature'], mergeable=True)
969 952
970 953 vcs = pr_util.source_repository.scm_instance()
971 954 if backend.alias == 'git':
972 955 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
973 956 else:
974 957 vcs.strip(pr_util.commit_ids['new-feature'])
975 958
976 959 response = self.app.post(
977 960 route_path('pullrequest_update',
978 961 repo_name=pull_request.target_repo.repo_name,
979 962 pull_request_id=pull_request.pull_request_id),
980 963 params={'update_commits': 'true',
981 964 'csrf_token': csrf_token})
982 965
983 966 assert response.status_int == 200
984 967 assert response.body == 'true'
985 968
986 969 # Make sure that after update, it won't raise 500 errors
987 970 response = self.app.get(route_path(
988 971 'pullrequest_show',
989 972 repo_name=pr_util.target_repository.repo_name,
990 973 pull_request_id=pull_request.pull_request_id))
991 974
992 975 assert response.status_int == 200
993 976 response.assert_response().element_contains(
994 977 '#changeset_compare_view_content .alert strong',
995 978 'Missing commits')
996 979
997 980 def test_branch_is_a_link(self, pr_util):
998 981 pull_request = pr_util.create_pull_request()
999 982 pull_request.source_ref = 'branch:origin:1234567890abcdef'
1000 983 pull_request.target_ref = 'branch:target:abcdef1234567890'
1001 984 Session().add(pull_request)
1002 985 Session().commit()
1003 986
1004 987 response = self.app.get(route_path(
1005 988 'pullrequest_show',
1006 989 repo_name=pull_request.target_repo.scm_instance().name,
1007 990 pull_request_id=pull_request.pull_request_id))
1008 991 assert response.status_int == 200
1009 992
1010 993 origin = response.assert_response().get_element('.pr-origininfo .tag')
1011 994 origin_children = origin.getchildren()
1012 995 assert len(origin_children) == 1
1013 996 target = response.assert_response().get_element('.pr-targetinfo .tag')
1014 997 target_children = target.getchildren()
1015 998 assert len(target_children) == 1
1016 999
1017 1000 expected_origin_link = route_path(
1018 1001 'repo_changelog',
1019 1002 repo_name=pull_request.source_repo.scm_instance().name,
1020 1003 params=dict(branch='origin'))
1021 1004 expected_target_link = route_path(
1022 1005 'repo_changelog',
1023 1006 repo_name=pull_request.target_repo.scm_instance().name,
1024 1007 params=dict(branch='target'))
1025 1008 assert origin_children[0].attrib['href'] == expected_origin_link
1026 1009 assert origin_children[0].text == 'branch: origin'
1027 1010 assert target_children[0].attrib['href'] == expected_target_link
1028 1011 assert target_children[0].text == 'branch: target'
1029 1012
1030 1013 def test_bookmark_is_not_a_link(self, pr_util):
1031 1014 pull_request = pr_util.create_pull_request()
1032 1015 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1033 1016 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1034 1017 Session().add(pull_request)
1035 1018 Session().commit()
1036 1019
1037 1020 response = self.app.get(route_path(
1038 1021 'pullrequest_show',
1039 1022 repo_name=pull_request.target_repo.scm_instance().name,
1040 1023 pull_request_id=pull_request.pull_request_id))
1041 1024 assert response.status_int == 200
1042 1025
1043 1026 origin = response.assert_response().get_element('.pr-origininfo .tag')
1044 1027 assert origin.text.strip() == 'bookmark: origin'
1045 1028 assert origin.getchildren() == []
1046 1029
1047 1030 target = response.assert_response().get_element('.pr-targetinfo .tag')
1048 1031 assert target.text.strip() == 'bookmark: target'
1049 1032 assert target.getchildren() == []
1050 1033
1051 1034 def test_tag_is_not_a_link(self, pr_util):
1052 1035 pull_request = pr_util.create_pull_request()
1053 1036 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1054 1037 pull_request.target_ref = 'tag:target:abcdef1234567890'
1055 1038 Session().add(pull_request)
1056 1039 Session().commit()
1057 1040
1058 1041 response = self.app.get(route_path(
1059 1042 'pullrequest_show',
1060 1043 repo_name=pull_request.target_repo.scm_instance().name,
1061 1044 pull_request_id=pull_request.pull_request_id))
1062 1045 assert response.status_int == 200
1063 1046
1064 1047 origin = response.assert_response().get_element('.pr-origininfo .tag')
1065 1048 assert origin.text.strip() == 'tag: origin'
1066 1049 assert origin.getchildren() == []
1067 1050
1068 1051 target = response.assert_response().get_element('.pr-targetinfo .tag')
1069 1052 assert target.text.strip() == 'tag: target'
1070 1053 assert target.getchildren() == []
1071 1054
1072 1055 @pytest.mark.parametrize('mergeable', [True, False])
1073 1056 def test_shadow_repository_link(
1074 1057 self, mergeable, pr_util, http_host_only_stub):
1075 1058 """
1076 1059 Check that the pull request summary page displays a link to the shadow
1077 1060 repository if the pull request is mergeable. If it is not mergeable
1078 1061 the link should not be displayed.
1079 1062 """
1080 1063 pull_request = pr_util.create_pull_request(
1081 1064 mergeable=mergeable, enable_notifications=False)
1082 1065 target_repo = pull_request.target_repo.scm_instance()
1083 1066 pr_id = pull_request.pull_request_id
1084 1067 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1085 1068 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1086 1069
1087 1070 response = self.app.get(route_path(
1088 1071 'pullrequest_show',
1089 1072 repo_name=target_repo.name,
1090 1073 pull_request_id=pr_id))
1091 1074
1092 1075 if mergeable:
1093 1076 response.assert_response().element_value_contains(
1094 1077 'input.pr-mergeinfo', shadow_url)
1095 1078 response.assert_response().element_value_contains(
1096 1079 'input.pr-mergeinfo ', 'pr-merge')
1097 1080 else:
1098 1081 response.assert_response().no_element_exists('.pr-mergeinfo')
1099 1082
1100 1083
1101 1084 @pytest.mark.usefixtures('app')
1102 1085 @pytest.mark.backends("git", "hg")
1103 1086 class TestPullrequestsControllerDelete(object):
1104 1087 def test_pull_request_delete_button_permissions_admin(
1105 1088 self, autologin_user, user_admin, pr_util):
1106 1089 pull_request = pr_util.create_pull_request(
1107 1090 author=user_admin.username, enable_notifications=False)
1108 1091
1109 1092 response = self.app.get(route_path(
1110 1093 'pullrequest_show',
1111 1094 repo_name=pull_request.target_repo.scm_instance().name,
1112 1095 pull_request_id=pull_request.pull_request_id))
1113 1096
1114 1097 response.mustcontain('id="delete_pullrequest"')
1115 1098 response.mustcontain('Confirm to delete this pull request')
1116 1099
1117 1100 def test_pull_request_delete_button_permissions_owner(
1118 1101 self, autologin_regular_user, user_regular, pr_util):
1119 1102 pull_request = pr_util.create_pull_request(
1120 1103 author=user_regular.username, enable_notifications=False)
1121 1104
1122 1105 response = self.app.get(route_path(
1123 1106 'pullrequest_show',
1124 1107 repo_name=pull_request.target_repo.scm_instance().name,
1125 1108 pull_request_id=pull_request.pull_request_id))
1126 1109
1127 1110 response.mustcontain('id="delete_pullrequest"')
1128 1111 response.mustcontain('Confirm to delete this pull request')
1129 1112
1130 1113 def test_pull_request_delete_button_permissions_forbidden(
1131 1114 self, autologin_regular_user, user_regular, user_admin, pr_util):
1132 1115 pull_request = pr_util.create_pull_request(
1133 1116 author=user_admin.username, enable_notifications=False)
1134 1117
1135 1118 response = self.app.get(route_path(
1136 1119 'pullrequest_show',
1137 1120 repo_name=pull_request.target_repo.scm_instance().name,
1138 1121 pull_request_id=pull_request.pull_request_id))
1139 1122 response.mustcontain(no=['id="delete_pullrequest"'])
1140 1123 response.mustcontain(no=['Confirm to delete this pull request'])
1141 1124
1142 1125 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1143 1126 self, autologin_regular_user, user_regular, user_admin, pr_util,
1144 1127 user_util):
1145 1128
1146 1129 pull_request = pr_util.create_pull_request(
1147 1130 author=user_admin.username, enable_notifications=False)
1148 1131
1149 1132 user_util.grant_user_permission_to_repo(
1150 1133 pull_request.target_repo, user_regular,
1151 1134 'repository.write')
1152 1135
1153 1136 response = self.app.get(route_path(
1154 1137 'pullrequest_show',
1155 1138 repo_name=pull_request.target_repo.scm_instance().name,
1156 1139 pull_request_id=pull_request.pull_request_id))
1157 1140
1158 1141 response.mustcontain('id="open_edit_pullrequest"')
1159 1142 response.mustcontain('id="delete_pullrequest"')
1160 1143 response.mustcontain(no=['Confirm to delete this pull request'])
1161 1144
1162 1145 def test_delete_comment_returns_404_if_comment_does_not_exist(
1163 1146 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1164 1147
1165 1148 pull_request = pr_util.create_pull_request(
1166 1149 author=user_admin.username, enable_notifications=False)
1167 1150
1168 1151 self.app.post(
1169 1152 route_path(
1170 1153 'pullrequest_comment_delete',
1171 1154 repo_name=pull_request.target_repo.scm_instance().name,
1172 1155 pull_request_id=pull_request.pull_request_id,
1173 1156 comment_id=1024404),
1174 1157 extra_environ=xhr_header,
1175 1158 params={'csrf_token': csrf_token},
1176 1159 status=404
1177 1160 )
1178 1161
1179 1162 def test_delete_comment(
1180 1163 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1181 1164
1182 1165 pull_request = pr_util.create_pull_request(
1183 1166 author=user_admin.username, enable_notifications=False)
1184 1167 comment = pr_util.create_comment()
1185 1168 comment_id = comment.comment_id
1186 1169
1187 1170 response = self.app.post(
1188 1171 route_path(
1189 1172 'pullrequest_comment_delete',
1190 1173 repo_name=pull_request.target_repo.scm_instance().name,
1191 1174 pull_request_id=pull_request.pull_request_id,
1192 1175 comment_id=comment_id),
1193 1176 extra_environ=xhr_header,
1194 1177 params={'csrf_token': csrf_token},
1195 1178 status=200
1196 1179 )
1197 1180 assert response.body == 'true'
1198 1181
1199 1182 @pytest.mark.parametrize('url_type', [
1200 1183 'pullrequest_new',
1201 1184 'pullrequest_create',
1202 1185 'pullrequest_update',
1203 1186 'pullrequest_merge',
1204 1187 ])
1205 1188 def test_pull_request_is_forbidden_on_archived_repo(
1206 1189 self, autologin_user, backend, xhr_header, user_util, url_type):
1207 1190
1208 1191 # create a temporary repo
1209 1192 source = user_util.create_repo(repo_type=backend.alias)
1210 1193 repo_name = source.repo_name
1211 1194 repo = Repository.get_by_repo_name(repo_name)
1212 1195 repo.archived = True
1213 1196 Session().commit()
1214 1197
1215 1198 response = self.app.get(
1216 1199 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1217 1200
1218 1201 msg = 'Action not supported for archived repository.'
1219 1202 assert_session_flash(response, msg)
1220 1203
1221 1204
1222 1205 def assert_pull_request_status(pull_request, expected_status):
1223 1206 status = ChangesetStatusModel().calculated_review_status(
1224 1207 pull_request=pull_request)
1225 1208 assert status == expected_status
1226 1209
1227 1210
1228 1211 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1229 1212 @pytest.mark.usefixtures("autologin_user")
1230 1213 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1231 1214 response = app.get(
1232 1215 route_path(route, repo_name=backend_svn.repo_name), status=404)
1233 1216
@@ -1,1413 +1,1456 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import collections
23 23
24 24 import formencode
25 25 import formencode.htmlfill
26 26 import peppercorn
27 27 from pyramid.httpexceptions import (
28 28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
29 29 from pyramid.view import view_config
30 30 from pyramid.renderers import render
31 31
32 32 from rhodecode import events
33 33 from rhodecode.apps._base import RepoAppView, DataGridAppView
34 34
35 35 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
36 36 from rhodecode.lib.base import vcs_operation_context
37 37 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
38 38 from rhodecode.lib.ext_json import json
39 39 from rhodecode.lib.auth import (
40 40 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
41 41 NotAnonymous, CSRFRequired)
42 42 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
43 43 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
44 44 from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError,
45 45 RepositoryRequirementError, EmptyRepositoryError)
46 46 from rhodecode.model.changeset_status import ChangesetStatusModel
47 47 from rhodecode.model.comment import CommentsModel
48 48 from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion,
49 49 ChangesetComment, ChangesetStatus, Repository)
50 50 from rhodecode.model.forms import PullRequestForm
51 51 from rhodecode.model.meta import Session
52 52 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
53 53 from rhodecode.model.scm import ScmModel
54 54
55 55 log = logging.getLogger(__name__)
56 56
57 57
58 58 class RepoPullRequestsView(RepoAppView, DataGridAppView):
59 59
60 60 def load_default_context(self):
61 61 c = self._get_local_tmpl_context(include_app_defaults=True)
62 62 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
63 63 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
64 64 # backward compat., we use for OLD PRs a plain renderer
65 65 c.renderer = 'plain'
66 66 return c
67 67
68 68 def _get_pull_requests_list(
69 69 self, repo_name, source, filter_type, opened_by, statuses):
70 70
71 71 draw, start, limit = self._extract_chunk(self.request)
72 72 search_q, order_by, order_dir = self._extract_ordering(self.request)
73 73 _render = self.request.get_partial_renderer(
74 74 'rhodecode:templates/data_table/_dt_elements.mako')
75 75
76 76 # pagination
77 77
78 78 if filter_type == 'awaiting_review':
79 79 pull_requests = PullRequestModel().get_awaiting_review(
80 80 repo_name, source=source, opened_by=opened_by,
81 81 statuses=statuses, offset=start, length=limit,
82 82 order_by=order_by, order_dir=order_dir)
83 83 pull_requests_total_count = PullRequestModel().count_awaiting_review(
84 84 repo_name, source=source, statuses=statuses,
85 85 opened_by=opened_by)
86 86 elif filter_type == 'awaiting_my_review':
87 87 pull_requests = PullRequestModel().get_awaiting_my_review(
88 88 repo_name, source=source, opened_by=opened_by,
89 89 user_id=self._rhodecode_user.user_id, statuses=statuses,
90 90 offset=start, length=limit, order_by=order_by,
91 91 order_dir=order_dir)
92 92 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
93 93 repo_name, source=source, user_id=self._rhodecode_user.user_id,
94 94 statuses=statuses, opened_by=opened_by)
95 95 else:
96 96 pull_requests = PullRequestModel().get_all(
97 97 repo_name, source=source, opened_by=opened_by,
98 98 statuses=statuses, offset=start, length=limit,
99 99 order_by=order_by, order_dir=order_dir)
100 100 pull_requests_total_count = PullRequestModel().count_all(
101 101 repo_name, source=source, statuses=statuses,
102 102 opened_by=opened_by)
103 103
104 104 data = []
105 105 comments_model = CommentsModel()
106 106 for pr in pull_requests:
107 107 comments = comments_model.get_all_comments(
108 108 self.db_repo.repo_id, pull_request=pr)
109 109
110 110 data.append({
111 111 'name': _render('pullrequest_name',
112 112 pr.pull_request_id, pr.target_repo.repo_name),
113 113 'name_raw': pr.pull_request_id,
114 114 'status': _render('pullrequest_status',
115 115 pr.calculated_review_status()),
116 116 'title': _render(
117 117 'pullrequest_title', pr.title, pr.description),
118 118 'description': h.escape(pr.description),
119 119 'updated_on': _render('pullrequest_updated_on',
120 120 h.datetime_to_time(pr.updated_on)),
121 121 'updated_on_raw': h.datetime_to_time(pr.updated_on),
122 122 'created_on': _render('pullrequest_updated_on',
123 123 h.datetime_to_time(pr.created_on)),
124 124 'created_on_raw': h.datetime_to_time(pr.created_on),
125 125 'author': _render('pullrequest_author',
126 126 pr.author.full_contact, ),
127 127 'author_raw': pr.author.full_name,
128 128 'comments': _render('pullrequest_comments', len(comments)),
129 129 'comments_raw': len(comments),
130 130 'closed': pr.is_closed(),
131 131 })
132 132
133 133 data = ({
134 134 'draw': draw,
135 135 'data': data,
136 136 'recordsTotal': pull_requests_total_count,
137 137 'recordsFiltered': pull_requests_total_count,
138 138 })
139 139 return data
140 140
141 141 def get_recache_flag(self):
142 142 for flag_name in ['force_recache', 'force-recache', 'no-cache']:
143 143 flag_val = self.request.GET.get(flag_name)
144 144 if str2bool(flag_val):
145 145 return True
146 146 return False
147 147
148 148 @LoginRequired()
149 149 @HasRepoPermissionAnyDecorator(
150 150 'repository.read', 'repository.write', 'repository.admin')
151 151 @view_config(
152 152 route_name='pullrequest_show_all', request_method='GET',
153 153 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
154 154 def pull_request_list(self):
155 155 c = self.load_default_context()
156 156
157 157 req_get = self.request.GET
158 158 c.source = str2bool(req_get.get('source'))
159 159 c.closed = str2bool(req_get.get('closed'))
160 160 c.my = str2bool(req_get.get('my'))
161 161 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
162 162 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
163 163
164 164 c.active = 'open'
165 165 if c.my:
166 166 c.active = 'my'
167 167 if c.closed:
168 168 c.active = 'closed'
169 169 if c.awaiting_review and not c.source:
170 170 c.active = 'awaiting'
171 171 if c.source and not c.awaiting_review:
172 172 c.active = 'source'
173 173 if c.awaiting_my_review:
174 174 c.active = 'awaiting_my'
175 175
176 176 return self._get_template_context(c)
177 177
178 178 @LoginRequired()
179 179 @HasRepoPermissionAnyDecorator(
180 180 'repository.read', 'repository.write', 'repository.admin')
181 181 @view_config(
182 182 route_name='pullrequest_show_all_data', request_method='GET',
183 183 renderer='json_ext', xhr=True)
184 184 def pull_request_list_data(self):
185 185 self.load_default_context()
186 186
187 187 # additional filters
188 188 req_get = self.request.GET
189 189 source = str2bool(req_get.get('source'))
190 190 closed = str2bool(req_get.get('closed'))
191 191 my = str2bool(req_get.get('my'))
192 192 awaiting_review = str2bool(req_get.get('awaiting_review'))
193 193 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
194 194
195 195 filter_type = 'awaiting_review' if awaiting_review \
196 196 else 'awaiting_my_review' if awaiting_my_review \
197 197 else None
198 198
199 199 opened_by = None
200 200 if my:
201 201 opened_by = [self._rhodecode_user.user_id]
202 202
203 203 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
204 204 if closed:
205 205 statuses = [PullRequest.STATUS_CLOSED]
206 206
207 207 data = self._get_pull_requests_list(
208 208 repo_name=self.db_repo_name, source=source,
209 209 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
210 210
211 211 return data
212 212
213 213 def _is_diff_cache_enabled(self, target_repo):
214 214 caching_enabled = self._get_general_setting(
215 215 target_repo, 'rhodecode_diff_cache')
216 216 log.debug('Diff caching enabled: %s', caching_enabled)
217 217 return caching_enabled
218 218
219 219 def _get_diffset(self, source_repo_name, source_repo,
220 220 source_ref_id, target_ref_id,
221 221 target_commit, source_commit, diff_limit, file_limit,
222 222 fulldiff, hide_whitespace_changes, diff_context):
223 223
224 224 vcs_diff = PullRequestModel().get_diff(
225 225 source_repo, source_ref_id, target_ref_id,
226 226 hide_whitespace_changes, diff_context)
227 227
228 228 diff_processor = diffs.DiffProcessor(
229 229 vcs_diff, format='newdiff', diff_limit=diff_limit,
230 230 file_limit=file_limit, show_full_diff=fulldiff)
231 231
232 232 _parsed = diff_processor.prepare()
233 233
234 234 diffset = codeblocks.DiffSet(
235 235 repo_name=self.db_repo_name,
236 236 source_repo_name=source_repo_name,
237 237 source_node_getter=codeblocks.diffset_node_getter(target_commit),
238 238 target_node_getter=codeblocks.diffset_node_getter(source_commit),
239 239 )
240 240 diffset = self.path_filter.render_patchset_filtered(
241 241 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
242 242
243 243 return diffset
244 244
245 245 def _get_range_diffset(self, source_scm, source_repo,
246 246 commit1, commit2, diff_limit, file_limit,
247 247 fulldiff, hide_whitespace_changes, diff_context):
248 248 vcs_diff = source_scm.get_diff(
249 249 commit1, commit2,
250 250 ignore_whitespace=hide_whitespace_changes,
251 251 context=diff_context)
252 252
253 253 diff_processor = diffs.DiffProcessor(
254 254 vcs_diff, format='newdiff', diff_limit=diff_limit,
255 255 file_limit=file_limit, show_full_diff=fulldiff)
256 256
257 257 _parsed = diff_processor.prepare()
258 258
259 259 diffset = codeblocks.DiffSet(
260 260 repo_name=source_repo.repo_name,
261 261 source_node_getter=codeblocks.diffset_node_getter(commit1),
262 262 target_node_getter=codeblocks.diffset_node_getter(commit2))
263 263
264 264 diffset = self.path_filter.render_patchset_filtered(
265 265 diffset, _parsed, commit1.raw_id, commit2.raw_id)
266 266
267 267 return diffset
268 268
269 269 @LoginRequired()
270 270 @HasRepoPermissionAnyDecorator(
271 271 'repository.read', 'repository.write', 'repository.admin')
272 272 @view_config(
273 273 route_name='pullrequest_show', request_method='GET',
274 274 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
275 275 def pull_request_show(self):
276 pull_request_id = self.request.matchdict['pull_request_id']
276 _ = self.request.translate
277 c = self.load_default_context()
278
279 pull_request = PullRequest.get_or_404(
280 self.request.matchdict['pull_request_id'])
281 pull_request_id = pull_request.pull_request_id
277 282
278 c = self.load_default_context()
283 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
284 log.debug('show: forbidden because pull request is in state %s',
285 pull_request.pull_request_state)
286 msg = _(u'Cannot show pull requests in state other than `{}`. '
287 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
288 pull_request.pull_request_state)
289 h.flash(msg, category='error')
290 raise HTTPFound(h.route_path('pullrequest_show_all',
291 repo_name=self.db_repo_name))
279 292
280 293 version = self.request.GET.get('version')
281 294 from_version = self.request.GET.get('from_version') or version
282 295 merge_checks = self.request.GET.get('merge_checks')
283 296 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
284 297
285 298 # fetch global flags of ignore ws or context lines
286 299 diff_context = diffs.get_diff_context(self.request)
287 300 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
288 301
289 302 force_refresh = str2bool(self.request.GET.get('force_refresh'))
290 303
291 304 (pull_request_latest,
292 305 pull_request_at_ver,
293 306 pull_request_display_obj,
294 307 at_version) = PullRequestModel().get_pr_version(
295 308 pull_request_id, version=version)
296 309 pr_closed = pull_request_latest.is_closed()
297 310
298 311 if pr_closed and (version or from_version):
299 312 # not allow to browse versions
300 313 raise HTTPFound(h.route_path(
301 314 'pullrequest_show', repo_name=self.db_repo_name,
302 315 pull_request_id=pull_request_id))
303 316
304 317 versions = pull_request_display_obj.versions()
305 318 # used to store per-commit range diffs
306 319 c.changes = collections.OrderedDict()
307 320 c.range_diff_on = self.request.GET.get('range-diff') == "1"
308 321
309 322 c.at_version = at_version
310 323 c.at_version_num = (at_version
311 324 if at_version and at_version != 'latest'
312 325 else None)
313 326 c.at_version_pos = ChangesetComment.get_index_from_version(
314 327 c.at_version_num, versions)
315 328
316 329 (prev_pull_request_latest,
317 330 prev_pull_request_at_ver,
318 331 prev_pull_request_display_obj,
319 332 prev_at_version) = PullRequestModel().get_pr_version(
320 333 pull_request_id, version=from_version)
321 334
322 335 c.from_version = prev_at_version
323 336 c.from_version_num = (prev_at_version
324 337 if prev_at_version and prev_at_version != 'latest'
325 338 else None)
326 339 c.from_version_pos = ChangesetComment.get_index_from_version(
327 340 c.from_version_num, versions)
328 341
329 342 # define if we're in COMPARE mode or VIEW at version mode
330 343 compare = at_version != prev_at_version
331 344
332 345 # pull_requests repo_name we opened it against
333 346 # ie. target_repo must match
334 347 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
335 348 raise HTTPNotFound()
336 349
337 350 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
338 351 pull_request_at_ver)
339 352
340 353 c.pull_request = pull_request_display_obj
341 354 c.renderer = pull_request_at_ver.description_renderer or c.renderer
342 355 c.pull_request_latest = pull_request_latest
343 356
344 357 if compare or (at_version and not at_version == 'latest'):
345 358 c.allowed_to_change_status = False
346 359 c.allowed_to_update = False
347 360 c.allowed_to_merge = False
348 361 c.allowed_to_delete = False
349 362 c.allowed_to_comment = False
350 363 c.allowed_to_close = False
351 364 else:
352 365 can_change_status = PullRequestModel().check_user_change_status(
353 366 pull_request_at_ver, self._rhodecode_user)
354 367 c.allowed_to_change_status = can_change_status and not pr_closed
355 368
356 369 c.allowed_to_update = PullRequestModel().check_user_update(
357 370 pull_request_latest, self._rhodecode_user) and not pr_closed
358 371 c.allowed_to_merge = PullRequestModel().check_user_merge(
359 372 pull_request_latest, self._rhodecode_user) and not pr_closed
360 373 c.allowed_to_delete = PullRequestModel().check_user_delete(
361 374 pull_request_latest, self._rhodecode_user) and not pr_closed
362 375 c.allowed_to_comment = not pr_closed
363 376 c.allowed_to_close = c.allowed_to_merge and not pr_closed
364 377
365 378 c.forbid_adding_reviewers = False
366 379 c.forbid_author_to_review = False
367 380 c.forbid_commit_author_to_review = False
368 381
369 382 if pull_request_latest.reviewer_data and \
370 383 'rules' in pull_request_latest.reviewer_data:
371 384 rules = pull_request_latest.reviewer_data['rules'] or {}
372 385 try:
373 386 c.forbid_adding_reviewers = rules.get(
374 387 'forbid_adding_reviewers')
375 388 c.forbid_author_to_review = rules.get(
376 389 'forbid_author_to_review')
377 390 c.forbid_commit_author_to_review = rules.get(
378 391 'forbid_commit_author_to_review')
379 392 except Exception:
380 393 pass
381 394
382 395 # check merge capabilities
383 396 _merge_check = MergeCheck.validate(
384 397 pull_request_latest, auth_user=self._rhodecode_user,
385 398 translator=self.request.translate,
386 399 force_shadow_repo_refresh=force_refresh)
387 400 c.pr_merge_errors = _merge_check.error_details
388 401 c.pr_merge_possible = not _merge_check.failed
389 402 c.pr_merge_message = _merge_check.merge_msg
390 403
391 404 c.pr_merge_info = MergeCheck.get_merge_conditions(
392 405 pull_request_latest, translator=self.request.translate)
393 406
394 407 c.pull_request_review_status = _merge_check.review_status
395 408 if merge_checks:
396 409 self.request.override_renderer = \
397 410 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
398 411 return self._get_template_context(c)
399 412
400 413 comments_model = CommentsModel()
401 414
402 415 # reviewers and statuses
403 416 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
404 417 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
405 418
406 419 # GENERAL COMMENTS with versions #
407 420 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
408 421 q = q.order_by(ChangesetComment.comment_id.asc())
409 422 general_comments = q
410 423
411 424 # pick comments we want to render at current version
412 425 c.comment_versions = comments_model.aggregate_comments(
413 426 general_comments, versions, c.at_version_num)
414 427 c.comments = c.comment_versions[c.at_version_num]['until']
415 428
416 429 # INLINE COMMENTS with versions #
417 430 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
418 431 q = q.order_by(ChangesetComment.comment_id.asc())
419 432 inline_comments = q
420 433
421 434 c.inline_versions = comments_model.aggregate_comments(
422 435 inline_comments, versions, c.at_version_num, inline=True)
423 436
424 437 # inject latest version
425 438 latest_ver = PullRequest.get_pr_display_object(
426 439 pull_request_latest, pull_request_latest)
427 440
428 441 c.versions = versions + [latest_ver]
429 442
430 443 # if we use version, then do not show later comments
431 444 # than current version
432 445 display_inline_comments = collections.defaultdict(
433 446 lambda: collections.defaultdict(list))
434 447 for co in inline_comments:
435 448 if c.at_version_num:
436 449 # pick comments that are at least UPTO given version, so we
437 450 # don't render comments for higher version
438 451 should_render = co.pull_request_version_id and \
439 452 co.pull_request_version_id <= c.at_version_num
440 453 else:
441 454 # showing all, for 'latest'
442 455 should_render = True
443 456
444 457 if should_render:
445 458 display_inline_comments[co.f_path][co.line_no].append(co)
446 459
447 460 # load diff data into template context, if we use compare mode then
448 461 # diff is calculated based on changes between versions of PR
449 462
450 463 source_repo = pull_request_at_ver.source_repo
451 464 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
452 465
453 466 target_repo = pull_request_at_ver.target_repo
454 467 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
455 468
456 469 if compare:
457 470 # in compare switch the diff base to latest commit from prev version
458 471 target_ref_id = prev_pull_request_display_obj.revisions[0]
459 472
460 473 # despite opening commits for bookmarks/branches/tags, we always
461 474 # convert this to rev to prevent changes after bookmark or branch change
462 475 c.source_ref_type = 'rev'
463 476 c.source_ref = source_ref_id
464 477
465 478 c.target_ref_type = 'rev'
466 479 c.target_ref = target_ref_id
467 480
468 481 c.source_repo = source_repo
469 482 c.target_repo = target_repo
470 483
471 484 c.commit_ranges = []
472 485 source_commit = EmptyCommit()
473 486 target_commit = EmptyCommit()
474 487 c.missing_requirements = False
475 488
476 489 source_scm = source_repo.scm_instance()
477 490 target_scm = target_repo.scm_instance()
478 491
479 492 shadow_scm = None
480 493 try:
481 494 shadow_scm = pull_request_latest.get_shadow_repo()
482 495 except Exception:
483 496 log.debug('Failed to get shadow repo', exc_info=True)
484 497 # try first the existing source_repo, and then shadow
485 498 # repo if we can obtain one
486 499 commits_source_repo = source_scm or shadow_scm
487 500
488 501 c.commits_source_repo = commits_source_repo
489 502 c.ancestor = None # set it to None, to hide it from PR view
490 503
491 504 # empty version means latest, so we keep this to prevent
492 505 # double caching
493 506 version_normalized = version or 'latest'
494 507 from_version_normalized = from_version or 'latest'
495 508
496 509 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo)
497 510 cache_file_path = diff_cache_exist(
498 511 cache_path, 'pull_request', pull_request_id, version_normalized,
499 512 from_version_normalized, source_ref_id, target_ref_id,
500 513 hide_whitespace_changes, diff_context, c.fulldiff)
501 514
502 515 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
503 516 force_recache = self.get_recache_flag()
504 517
505 518 cached_diff = None
506 519 if caching_enabled:
507 520 cached_diff = load_cached_diff(cache_file_path)
508 521
509 522 has_proper_commit_cache = (
510 523 cached_diff and cached_diff.get('commits')
511 524 and len(cached_diff.get('commits', [])) == 5
512 525 and cached_diff.get('commits')[0]
513 526 and cached_diff.get('commits')[3])
514 527
515 528 if not force_recache and not c.range_diff_on and has_proper_commit_cache:
516 529 diff_commit_cache = \
517 530 (ancestor_commit, commit_cache, missing_requirements,
518 531 source_commit, target_commit) = cached_diff['commits']
519 532 else:
520 533 diff_commit_cache = \
521 534 (ancestor_commit, commit_cache, missing_requirements,
522 535 source_commit, target_commit) = self.get_commits(
523 536 commits_source_repo,
524 537 pull_request_at_ver,
525 538 source_commit,
526 539 source_ref_id,
527 540 source_scm,
528 541 target_commit,
529 542 target_ref_id,
530 543 target_scm)
531 544
532 545 # register our commit range
533 546 for comm in commit_cache.values():
534 547 c.commit_ranges.append(comm)
535 548
536 549 c.missing_requirements = missing_requirements
537 550 c.ancestor_commit = ancestor_commit
538 551 c.statuses = source_repo.statuses(
539 552 [x.raw_id for x in c.commit_ranges])
540 553
541 554 # auto collapse if we have more than limit
542 555 collapse_limit = diffs.DiffProcessor._collapse_commits_over
543 556 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
544 557 c.compare_mode = compare
545 558
546 559 # diff_limit is the old behavior, will cut off the whole diff
547 560 # if the limit is applied otherwise will just hide the
548 561 # big files from the front-end
549 562 diff_limit = c.visual.cut_off_limit_diff
550 563 file_limit = c.visual.cut_off_limit_file
551 564
552 565 c.missing_commits = False
553 566 if (c.missing_requirements
554 567 or isinstance(source_commit, EmptyCommit)
555 568 or source_commit == target_commit):
556 569
557 570 c.missing_commits = True
558 571 else:
559 572 c.inline_comments = display_inline_comments
560 573
561 574 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
562 575 if not force_recache and has_proper_diff_cache:
563 576 c.diffset = cached_diff['diff']
564 577 (ancestor_commit, commit_cache, missing_requirements,
565 578 source_commit, target_commit) = cached_diff['commits']
566 579 else:
567 580 c.diffset = self._get_diffset(
568 581 c.source_repo.repo_name, commits_source_repo,
569 582 source_ref_id, target_ref_id,
570 583 target_commit, source_commit,
571 584 diff_limit, file_limit, c.fulldiff,
572 585 hide_whitespace_changes, diff_context)
573 586
574 587 # save cached diff
575 588 if caching_enabled:
576 589 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
577 590
578 591 c.limited_diff = c.diffset.limited_diff
579 592
580 593 # calculate removed files that are bound to comments
581 594 comment_deleted_files = [
582 595 fname for fname in display_inline_comments
583 596 if fname not in c.diffset.file_stats]
584 597
585 598 c.deleted_files_comments = collections.defaultdict(dict)
586 599 for fname, per_line_comments in display_inline_comments.items():
587 600 if fname in comment_deleted_files:
588 601 c.deleted_files_comments[fname]['stats'] = 0
589 602 c.deleted_files_comments[fname]['comments'] = list()
590 603 for lno, comments in per_line_comments.items():
591 604 c.deleted_files_comments[fname]['comments'].extend(comments)
592 605
593 606 # maybe calculate the range diff
594 607 if c.range_diff_on:
595 608 # TODO(marcink): set whitespace/context
596 609 context_lcl = 3
597 610 ign_whitespace_lcl = False
598 611
599 612 for commit in c.commit_ranges:
600 613 commit2 = commit
601 614 commit1 = commit.first_parent
602 615
603 616 range_diff_cache_file_path = diff_cache_exist(
604 617 cache_path, 'diff', commit.raw_id,
605 618 ign_whitespace_lcl, context_lcl, c.fulldiff)
606 619
607 620 cached_diff = None
608 621 if caching_enabled:
609 622 cached_diff = load_cached_diff(range_diff_cache_file_path)
610 623
611 624 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
612 625 if not force_recache and has_proper_diff_cache:
613 626 diffset = cached_diff['diff']
614 627 else:
615 628 diffset = self._get_range_diffset(
616 629 source_scm, source_repo,
617 630 commit1, commit2, diff_limit, file_limit,
618 631 c.fulldiff, ign_whitespace_lcl, context_lcl
619 632 )
620 633
621 634 # save cached diff
622 635 if caching_enabled:
623 636 cache_diff(range_diff_cache_file_path, diffset, None)
624 637
625 638 c.changes[commit.raw_id] = diffset
626 639
627 640 # this is a hack to properly display links, when creating PR, the
628 641 # compare view and others uses different notation, and
629 642 # compare_commits.mako renders links based on the target_repo.
630 643 # We need to swap that here to generate it properly on the html side
631 644 c.target_repo = c.source_repo
632 645
633 646 c.commit_statuses = ChangesetStatus.STATUSES
634 647
635 648 c.show_version_changes = not pr_closed
636 649 if c.show_version_changes:
637 650 cur_obj = pull_request_at_ver
638 651 prev_obj = prev_pull_request_at_ver
639 652
640 653 old_commit_ids = prev_obj.revisions
641 654 new_commit_ids = cur_obj.revisions
642 655 commit_changes = PullRequestModel()._calculate_commit_id_changes(
643 656 old_commit_ids, new_commit_ids)
644 657 c.commit_changes_summary = commit_changes
645 658
646 659 # calculate the diff for commits between versions
647 660 c.commit_changes = []
648 661 mark = lambda cs, fw: list(
649 662 h.itertools.izip_longest([], cs, fillvalue=fw))
650 663 for c_type, raw_id in mark(commit_changes.added, 'a') \
651 664 + mark(commit_changes.removed, 'r') \
652 665 + mark(commit_changes.common, 'c'):
653 666
654 667 if raw_id in commit_cache:
655 668 commit = commit_cache[raw_id]
656 669 else:
657 670 try:
658 671 commit = commits_source_repo.get_commit(raw_id)
659 672 except CommitDoesNotExistError:
660 673 # in case we fail extracting still use "dummy" commit
661 674 # for display in commit diff
662 675 commit = h.AttributeDict(
663 676 {'raw_id': raw_id,
664 677 'message': 'EMPTY or MISSING COMMIT'})
665 678 c.commit_changes.append([c_type, commit])
666 679
667 680 # current user review statuses for each version
668 681 c.review_versions = {}
669 682 if self._rhodecode_user.user_id in allowed_reviewers:
670 683 for co in general_comments:
671 684 if co.author.user_id == self._rhodecode_user.user_id:
672 685 status = co.status_change
673 686 if status:
674 687 _ver_pr = status[0].comment.pull_request_version_id
675 688 c.review_versions[_ver_pr] = status[0]
676 689
677 690 return self._get_template_context(c)
678 691
679 692 def get_commits(
680 693 self, commits_source_repo, pull_request_at_ver, source_commit,
681 694 source_ref_id, source_scm, target_commit, target_ref_id, target_scm):
682 695 commit_cache = collections.OrderedDict()
683 696 missing_requirements = False
684 697 try:
685 698 pre_load = ["author", "branch", "date", "message", "parents"]
686 699 show_revs = pull_request_at_ver.revisions
687 700 for rev in show_revs:
688 701 comm = commits_source_repo.get_commit(
689 702 commit_id=rev, pre_load=pre_load)
690 703 commit_cache[comm.raw_id] = comm
691 704
692 705 # Order here matters, we first need to get target, and then
693 706 # the source
694 707 target_commit = commits_source_repo.get_commit(
695 708 commit_id=safe_str(target_ref_id))
696 709
697 710 source_commit = commits_source_repo.get_commit(
698 711 commit_id=safe_str(source_ref_id))
699 712 except CommitDoesNotExistError:
700 713 log.warning(
701 714 'Failed to get commit from `{}` repo'.format(
702 715 commits_source_repo), exc_info=True)
703 716 except RepositoryRequirementError:
704 717 log.warning(
705 718 'Failed to get all required data from repo', exc_info=True)
706 719 missing_requirements = True
707 720 ancestor_commit = None
708 721 try:
709 722 ancestor_id = source_scm.get_common_ancestor(
710 723 source_commit.raw_id, target_commit.raw_id, target_scm)
711 724 ancestor_commit = source_scm.get_commit(ancestor_id)
712 725 except Exception:
713 726 ancestor_commit = None
714 727 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
715 728
716 729 def assure_not_empty_repo(self):
717 730 _ = self.request.translate
718 731
719 732 try:
720 733 self.db_repo.scm_instance().get_commit()
721 734 except EmptyRepositoryError:
722 735 h.flash(h.literal(_('There are no commits yet')),
723 736 category='warning')
724 737 raise HTTPFound(
725 738 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
726 739
727 740 @LoginRequired()
728 741 @NotAnonymous()
729 742 @HasRepoPermissionAnyDecorator(
730 743 'repository.read', 'repository.write', 'repository.admin')
731 744 @view_config(
732 745 route_name='pullrequest_new', request_method='GET',
733 746 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
734 747 def pull_request_new(self):
735 748 _ = self.request.translate
736 749 c = self.load_default_context()
737 750
738 751 self.assure_not_empty_repo()
739 752 source_repo = self.db_repo
740 753
741 754 commit_id = self.request.GET.get('commit')
742 755 branch_ref = self.request.GET.get('branch')
743 756 bookmark_ref = self.request.GET.get('bookmark')
744 757
745 758 try:
746 759 source_repo_data = PullRequestModel().generate_repo_data(
747 760 source_repo, commit_id=commit_id,
748 761 branch=branch_ref, bookmark=bookmark_ref,
749 762 translator=self.request.translate)
750 763 except CommitDoesNotExistError as e:
751 764 log.exception(e)
752 765 h.flash(_('Commit does not exist'), 'error')
753 766 raise HTTPFound(
754 767 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
755 768
756 769 default_target_repo = source_repo
757 770
758 771 if source_repo.parent and c.has_origin_repo_read_perm:
759 772 parent_vcs_obj = source_repo.parent.scm_instance()
760 773 if parent_vcs_obj and not parent_vcs_obj.is_empty():
761 774 # change default if we have a parent repo
762 775 default_target_repo = source_repo.parent
763 776
764 777 target_repo_data = PullRequestModel().generate_repo_data(
765 778 default_target_repo, translator=self.request.translate)
766 779
767 780 selected_source_ref = source_repo_data['refs']['selected_ref']
768 781 title_source_ref = ''
769 782 if selected_source_ref:
770 783 title_source_ref = selected_source_ref.split(':', 2)[1]
771 784 c.default_title = PullRequestModel().generate_pullrequest_title(
772 785 source=source_repo.repo_name,
773 786 source_ref=title_source_ref,
774 787 target=default_target_repo.repo_name
775 788 )
776 789
777 790 c.default_repo_data = {
778 791 'source_repo_name': source_repo.repo_name,
779 792 'source_refs_json': json.dumps(source_repo_data),
780 793 'target_repo_name': default_target_repo.repo_name,
781 794 'target_refs_json': json.dumps(target_repo_data),
782 795 }
783 796 c.default_source_ref = selected_source_ref
784 797
785 798 return self._get_template_context(c)
786 799
787 800 @LoginRequired()
788 801 @NotAnonymous()
789 802 @HasRepoPermissionAnyDecorator(
790 803 'repository.read', 'repository.write', 'repository.admin')
791 804 @view_config(
792 805 route_name='pullrequest_repo_refs', request_method='GET',
793 806 renderer='json_ext', xhr=True)
794 807 def pull_request_repo_refs(self):
795 808 self.load_default_context()
796 809 target_repo_name = self.request.matchdict['target_repo_name']
797 810 repo = Repository.get_by_repo_name(target_repo_name)
798 811 if not repo:
799 812 raise HTTPNotFound()
800 813
801 814 target_perm = HasRepoPermissionAny(
802 815 'repository.read', 'repository.write', 'repository.admin')(
803 816 target_repo_name)
804 817 if not target_perm:
805 818 raise HTTPNotFound()
806 819
807 820 return PullRequestModel().generate_repo_data(
808 821 repo, translator=self.request.translate)
809 822
810 823 @LoginRequired()
811 824 @NotAnonymous()
812 825 @HasRepoPermissionAnyDecorator(
813 826 'repository.read', 'repository.write', 'repository.admin')
814 827 @view_config(
815 828 route_name='pullrequest_repo_targets', request_method='GET',
816 829 renderer='json_ext', xhr=True)
817 830 def pullrequest_repo_targets(self):
818 831 _ = self.request.translate
819 832 filter_query = self.request.GET.get('query')
820 833
821 834 # get the parents
822 835 parent_target_repos = []
823 836 if self.db_repo.parent:
824 837 parents_query = Repository.query() \
825 838 .order_by(func.length(Repository.repo_name)) \
826 839 .filter(Repository.fork_id == self.db_repo.parent.repo_id)
827 840
828 841 if filter_query:
829 842 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
830 843 parents_query = parents_query.filter(
831 844 Repository.repo_name.ilike(ilike_expression))
832 845 parents = parents_query.limit(20).all()
833 846
834 847 for parent in parents:
835 848 parent_vcs_obj = parent.scm_instance()
836 849 if parent_vcs_obj and not parent_vcs_obj.is_empty():
837 850 parent_target_repos.append(parent)
838 851
839 852 # get other forks, and repo itself
840 853 query = Repository.query() \
841 854 .order_by(func.length(Repository.repo_name)) \
842 855 .filter(
843 856 or_(Repository.repo_id == self.db_repo.repo_id, # repo itself
844 857 Repository.fork_id == self.db_repo.repo_id) # forks of this repo
845 858 ) \
846 859 .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos]))
847 860
848 861 if filter_query:
849 862 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
850 863 query = query.filter(Repository.repo_name.ilike(ilike_expression))
851 864
852 865 limit = max(20 - len(parent_target_repos), 5) # not less then 5
853 866 target_repos = query.limit(limit).all()
854 867
855 868 all_target_repos = target_repos + parent_target_repos
856 869
857 870 repos = []
858 871 # This checks permissions to the repositories
859 872 for obj in ScmModel().get_repos(all_target_repos):
860 873 repos.append({
861 874 'id': obj['name'],
862 875 'text': obj['name'],
863 876 'type': 'repo',
864 877 'repo_id': obj['dbrepo']['repo_id'],
865 878 'repo_type': obj['dbrepo']['repo_type'],
866 879 'private': obj['dbrepo']['private'],
867 880
868 881 })
869 882
870 883 data = {
871 884 'more': False,
872 885 'results': [{
873 886 'text': _('Repositories'),
874 887 'children': repos
875 888 }] if repos else []
876 889 }
877 890 return data
878 891
879 892 @LoginRequired()
880 893 @NotAnonymous()
881 894 @HasRepoPermissionAnyDecorator(
882 895 'repository.read', 'repository.write', 'repository.admin')
883 896 @CSRFRequired()
884 897 @view_config(
885 898 route_name='pullrequest_create', request_method='POST',
886 899 renderer=None)
887 900 def pull_request_create(self):
888 901 _ = self.request.translate
889 902 self.assure_not_empty_repo()
890 903 self.load_default_context()
891 904
892 905 controls = peppercorn.parse(self.request.POST.items())
893 906
894 907 try:
895 908 form = PullRequestForm(
896 909 self.request.translate, self.db_repo.repo_id)()
897 910 _form = form.to_python(controls)
898 911 except formencode.Invalid as errors:
899 912 if errors.error_dict.get('revisions'):
900 913 msg = 'Revisions: %s' % errors.error_dict['revisions']
901 914 elif errors.error_dict.get('pullrequest_title'):
902 915 msg = errors.error_dict.get('pullrequest_title')
903 916 else:
904 917 msg = _('Error creating pull request: {}').format(errors)
905 918 log.exception(msg)
906 919 h.flash(msg, 'error')
907 920
908 921 # would rather just go back to form ...
909 922 raise HTTPFound(
910 923 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
911 924
912 925 source_repo = _form['source_repo']
913 926 source_ref = _form['source_ref']
914 927 target_repo = _form['target_repo']
915 928 target_ref = _form['target_ref']
916 929 commit_ids = _form['revisions'][::-1]
917 930
918 931 # find the ancestor for this pr
919 932 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
920 933 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
921 934
935 if not (source_db_repo or target_db_repo):
936 h.flash(_('source_repo or target repo not found'), category='error')
937 raise HTTPFound(
938 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
939
922 940 # re-check permissions again here
923 941 # source_repo we must have read permissions
924 942
925 943 source_perm = HasRepoPermissionAny(
926 'repository.read',
927 'repository.write', 'repository.admin')(source_db_repo.repo_name)
944 'repository.read', 'repository.write', 'repository.admin')(
945 source_db_repo.repo_name)
928 946 if not source_perm:
929 947 msg = _('Not Enough permissions to source repo `{}`.'.format(
930 948 source_db_repo.repo_name))
931 949 h.flash(msg, category='error')
932 950 # copy the args back to redirect
933 951 org_query = self.request.GET.mixed()
934 952 raise HTTPFound(
935 953 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
936 954 _query=org_query))
937 955
938 956 # target repo we must have read permissions, and also later on
939 957 # we want to check branch permissions here
940 958 target_perm = HasRepoPermissionAny(
941 'repository.read',
942 'repository.write', 'repository.admin')(target_db_repo.repo_name)
959 'repository.read', 'repository.write', 'repository.admin')(
960 target_db_repo.repo_name)
943 961 if not target_perm:
944 962 msg = _('Not Enough permissions to target repo `{}`.'.format(
945 963 target_db_repo.repo_name))
946 964 h.flash(msg, category='error')
947 965 # copy the args back to redirect
948 966 org_query = self.request.GET.mixed()
949 967 raise HTTPFound(
950 968 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
951 969 _query=org_query))
952 970
953 971 source_scm = source_db_repo.scm_instance()
954 972 target_scm = target_db_repo.scm_instance()
955 973
956 974 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
957 975 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
958 976
959 977 ancestor = source_scm.get_common_ancestor(
960 978 source_commit.raw_id, target_commit.raw_id, target_scm)
961 979
962 980 # recalculate target ref based on ancestor
963 981 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
964 982 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
965 983
966 984 get_default_reviewers_data, validate_default_reviewers = \
967 985 PullRequestModel().get_reviewer_functions()
968 986
969 987 # recalculate reviewers logic, to make sure we can validate this
970 988 reviewer_rules = get_default_reviewers_data(
971 989 self._rhodecode_db_user, source_db_repo,
972 990 source_commit, target_db_repo, target_commit)
973 991
974 992 given_reviewers = _form['review_members']
975 993 reviewers = validate_default_reviewers(
976 994 given_reviewers, reviewer_rules)
977 995
978 996 pullrequest_title = _form['pullrequest_title']
979 997 title_source_ref = source_ref.split(':', 2)[1]
980 998 if not pullrequest_title:
981 999 pullrequest_title = PullRequestModel().generate_pullrequest_title(
982 1000 source=source_repo,
983 1001 source_ref=title_source_ref,
984 1002 target=target_repo
985 1003 )
986 1004
987 1005 description = _form['pullrequest_desc']
988 1006 description_renderer = _form['description_renderer']
989 1007
990 1008 try:
991 1009 pull_request = PullRequestModel().create(
992 1010 created_by=self._rhodecode_user.user_id,
993 1011 source_repo=source_repo,
994 1012 source_ref=source_ref,
995 1013 target_repo=target_repo,
996 1014 target_ref=target_ref,
997 1015 revisions=commit_ids,
998 1016 reviewers=reviewers,
999 1017 title=pullrequest_title,
1000 1018 description=description,
1001 1019 description_renderer=description_renderer,
1002 1020 reviewer_data=reviewer_rules,
1003 1021 auth_user=self._rhodecode_user
1004 1022 )
1005 1023 Session().commit()
1006 1024
1007 1025 h.flash(_('Successfully opened new pull request'),
1008 1026 category='success')
1009 1027 except Exception:
1010 1028 msg = _('Error occurred during creation of this pull request.')
1011 1029 log.exception(msg)
1012 1030 h.flash(msg, category='error')
1013 1031
1014 1032 # copy the args back to redirect
1015 1033 org_query = self.request.GET.mixed()
1016 1034 raise HTTPFound(
1017 1035 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1018 1036 _query=org_query))
1019 1037
1020 1038 raise HTTPFound(
1021 1039 h.route_path('pullrequest_show', repo_name=target_repo,
1022 1040 pull_request_id=pull_request.pull_request_id))
1023 1041
1024 1042 @LoginRequired()
1025 1043 @NotAnonymous()
1026 1044 @HasRepoPermissionAnyDecorator(
1027 1045 'repository.read', 'repository.write', 'repository.admin')
1028 1046 @CSRFRequired()
1029 1047 @view_config(
1030 1048 route_name='pullrequest_update', request_method='POST',
1031 1049 renderer='json_ext')
1032 1050 def pull_request_update(self):
1033 1051 pull_request = PullRequest.get_or_404(
1034 1052 self.request.matchdict['pull_request_id'])
1035 1053 _ = self.request.translate
1036 1054
1037 1055 self.load_default_context()
1038 1056
1039 1057 if pull_request.is_closed():
1040 1058 log.debug('update: forbidden because pull request is closed')
1041 1059 msg = _(u'Cannot update closed pull requests.')
1042 1060 h.flash(msg, category='error')
1043 1061 return True
1044 1062
1063 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
1064 log.debug('update: forbidden because pull request is in state %s',
1065 pull_request.pull_request_state)
1066 msg = _(u'Cannot update pull requests in state other than `{}`. '
1067 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1068 pull_request.pull_request_state)
1069 h.flash(msg, category='error')
1070 return True
1071
1045 1072 # only owner or admin can update it
1046 1073 allowed_to_update = PullRequestModel().check_user_update(
1047 1074 pull_request, self._rhodecode_user)
1048 1075 if allowed_to_update:
1049 1076 controls = peppercorn.parse(self.request.POST.items())
1050 1077
1051 1078 if 'review_members' in controls:
1052 1079 self._update_reviewers(
1053 1080 pull_request, controls['review_members'],
1054 1081 pull_request.reviewer_data)
1055 1082 elif str2bool(self.request.POST.get('update_commits', 'false')):
1056 1083 self._update_commits(pull_request)
1057 1084 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
1058 1085 self._edit_pull_request(pull_request)
1059 1086 else:
1060 1087 raise HTTPBadRequest()
1061 1088 return True
1062 1089 raise HTTPForbidden()
1063 1090
1064 1091 def _edit_pull_request(self, pull_request):
1065 1092 _ = self.request.translate
1066 1093
1067 1094 try:
1068 1095 PullRequestModel().edit(
1069 1096 pull_request,
1070 1097 self.request.POST.get('title'),
1071 1098 self.request.POST.get('description'),
1072 1099 self.request.POST.get('description_renderer'),
1073 1100 self._rhodecode_user)
1074 1101 except ValueError:
1075 1102 msg = _(u'Cannot update closed pull requests.')
1076 1103 h.flash(msg, category='error')
1077 1104 return
1078 1105 else:
1079 1106 Session().commit()
1080 1107
1081 1108 msg = _(u'Pull request title & description updated.')
1082 1109 h.flash(msg, category='success')
1083 1110 return
1084 1111
1085 1112 def _update_commits(self, pull_request):
1086 1113 _ = self.request.translate
1087 resp = PullRequestModel().update_commits(pull_request)
1114
1115 with pull_request.set_state(PullRequest.STATE_UPDATING):
1116 resp = PullRequestModel().update_commits(pull_request)
1088 1117
1089 1118 if resp.executed:
1090 1119
1091 1120 if resp.target_changed and resp.source_changed:
1092 1121 changed = 'target and source repositories'
1093 1122 elif resp.target_changed and not resp.source_changed:
1094 1123 changed = 'target repository'
1095 1124 elif not resp.target_changed and resp.source_changed:
1096 1125 changed = 'source repository'
1097 1126 else:
1098 1127 changed = 'nothing'
1099 1128
1100 msg = _(
1101 u'Pull request updated to "{source_commit_id}" with '
1102 u'{count_added} added, {count_removed} removed commits. '
1103 u'Source of changes: {change_source}')
1129 msg = _(u'Pull request updated to "{source_commit_id}" with '
1130 u'{count_added} added, {count_removed} removed commits. '
1131 u'Source of changes: {change_source}')
1104 1132 msg = msg.format(
1105 1133 source_commit_id=pull_request.source_ref_parts.commit_id,
1106 1134 count_added=len(resp.changes.added),
1107 1135 count_removed=len(resp.changes.removed),
1108 1136 change_source=changed)
1109 1137 h.flash(msg, category='success')
1110 1138
1111 1139 channel = '/repo${}$/pr/{}'.format(
1112 pull_request.target_repo.repo_name,
1113 pull_request.pull_request_id)
1140 pull_request.target_repo.repo_name, pull_request.pull_request_id)
1114 1141 message = msg + (
1115 1142 ' - <a onclick="window.location.reload()">'
1116 1143 '<strong>{}</strong></a>'.format(_('Reload page')))
1117 1144 channelstream.post_message(
1118 1145 channel, message, self._rhodecode_user.username,
1119 1146 registry=self.request.registry)
1120 1147 else:
1121 1148 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1122 1149 warning_reasons = [
1123 1150 UpdateFailureReason.NO_CHANGE,
1124 1151 UpdateFailureReason.WRONG_REF_TYPE,
1125 1152 ]
1126 1153 category = 'warning' if resp.reason in warning_reasons else 'error'
1127 1154 h.flash(msg, category=category)
1128 1155
1129 1156 @LoginRequired()
1130 1157 @NotAnonymous()
1131 1158 @HasRepoPermissionAnyDecorator(
1132 1159 'repository.read', 'repository.write', 'repository.admin')
1133 1160 @CSRFRequired()
1134 1161 @view_config(
1135 1162 route_name='pullrequest_merge', request_method='POST',
1136 1163 renderer='json_ext')
1137 1164 def pull_request_merge(self):
1138 1165 """
1139 1166 Merge will perform a server-side merge of the specified
1140 1167 pull request, if the pull request is approved and mergeable.
1141 1168 After successful merging, the pull request is automatically
1142 1169 closed, with a relevant comment.
1143 1170 """
1144 1171 pull_request = PullRequest.get_or_404(
1145 1172 self.request.matchdict['pull_request_id'])
1173 _ = self.request.translate
1174
1175 if pull_request.pull_request_state != PullRequest.STATE_CREATED:
1176 log.debug('show: forbidden because pull request is in state %s',
1177 pull_request.pull_request_state)
1178 msg = _(u'Cannot merge pull requests in state other than `{}`. '
1179 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1180 pull_request.pull_request_state)
1181 h.flash(msg, category='error')
1182 raise HTTPFound(
1183 h.route_path('pullrequest_show',
1184 repo_name=pull_request.target_repo.repo_name,
1185 pull_request_id=pull_request.pull_request_id))
1146 1186
1147 1187 self.load_default_context()
1148 check = MergeCheck.validate(
1149 pull_request, auth_user=self._rhodecode_user,
1150 translator=self.request.translate)
1188
1189 with pull_request.set_state(PullRequest.STATE_UPDATING):
1190 check = MergeCheck.validate(
1191 pull_request, auth_user=self._rhodecode_user,
1192 translator=self.request.translate)
1151 1193 merge_possible = not check.failed
1152 1194
1153 1195 for err_type, error_msg in check.errors:
1154 1196 h.flash(error_msg, category=err_type)
1155 1197
1156 1198 if merge_possible:
1157 1199 log.debug("Pre-conditions checked, trying to merge.")
1158 1200 extras = vcs_operation_context(
1159 1201 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1160 1202 username=self._rhodecode_db_user.username, action='push',
1161 1203 scm=pull_request.target_repo.repo_type)
1162 self._merge_pull_request(
1163 pull_request, self._rhodecode_db_user, extras)
1204 with pull_request.set_state(PullRequest.STATE_UPDATING):
1205 self._merge_pull_request(
1206 pull_request, self._rhodecode_db_user, extras)
1164 1207 else:
1165 1208 log.debug("Pre-conditions failed, NOT merging.")
1166 1209
1167 1210 raise HTTPFound(
1168 1211 h.route_path('pullrequest_show',
1169 1212 repo_name=pull_request.target_repo.repo_name,
1170 1213 pull_request_id=pull_request.pull_request_id))
1171 1214
1172 1215 def _merge_pull_request(self, pull_request, user, extras):
1173 1216 _ = self.request.translate
1174 1217 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1175 1218
1176 1219 if merge_resp.executed:
1177 1220 log.debug("The merge was successful, closing the pull request.")
1178 1221 PullRequestModel().close_pull_request(
1179 1222 pull_request.pull_request_id, user)
1180 1223 Session().commit()
1181 1224 msg = _('Pull request was successfully merged and closed.')
1182 1225 h.flash(msg, category='success')
1183 1226 else:
1184 1227 log.debug(
1185 1228 "The merge was not successful. Merge response: %s", merge_resp)
1186 1229 msg = merge_resp.merge_status_message
1187 1230 h.flash(msg, category='error')
1188 1231
1189 1232 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1190 1233 _ = self.request.translate
1191 1234 get_default_reviewers_data, validate_default_reviewers = \
1192 1235 PullRequestModel().get_reviewer_functions()
1193 1236
1194 1237 try:
1195 1238 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1196 1239 except ValueError as e:
1197 1240 log.error('Reviewers Validation: {}'.format(e))
1198 1241 h.flash(e, category='error')
1199 1242 return
1200 1243
1201 1244 PullRequestModel().update_reviewers(
1202 1245 pull_request, reviewers, self._rhodecode_user)
1203 1246 h.flash(_('Pull request reviewers updated.'), category='success')
1204 1247 Session().commit()
1205 1248
1206 1249 @LoginRequired()
1207 1250 @NotAnonymous()
1208 1251 @HasRepoPermissionAnyDecorator(
1209 1252 'repository.read', 'repository.write', 'repository.admin')
1210 1253 @CSRFRequired()
1211 1254 @view_config(
1212 1255 route_name='pullrequest_delete', request_method='POST',
1213 1256 renderer='json_ext')
1214 1257 def pull_request_delete(self):
1215 1258 _ = self.request.translate
1216 1259
1217 1260 pull_request = PullRequest.get_or_404(
1218 1261 self.request.matchdict['pull_request_id'])
1219 1262 self.load_default_context()
1220 1263
1221 1264 pr_closed = pull_request.is_closed()
1222 1265 allowed_to_delete = PullRequestModel().check_user_delete(
1223 1266 pull_request, self._rhodecode_user) and not pr_closed
1224 1267
1225 1268 # only owner can delete it !
1226 1269 if allowed_to_delete:
1227 1270 PullRequestModel().delete(pull_request, self._rhodecode_user)
1228 1271 Session().commit()
1229 1272 h.flash(_('Successfully deleted pull request'),
1230 1273 category='success')
1231 1274 raise HTTPFound(h.route_path('pullrequest_show_all',
1232 1275 repo_name=self.db_repo_name))
1233 1276
1234 1277 log.warning('user %s tried to delete pull request without access',
1235 1278 self._rhodecode_user)
1236 1279 raise HTTPNotFound()
1237 1280
1238 1281 @LoginRequired()
1239 1282 @NotAnonymous()
1240 1283 @HasRepoPermissionAnyDecorator(
1241 1284 'repository.read', 'repository.write', 'repository.admin')
1242 1285 @CSRFRequired()
1243 1286 @view_config(
1244 1287 route_name='pullrequest_comment_create', request_method='POST',
1245 1288 renderer='json_ext')
1246 1289 def pull_request_comment_create(self):
1247 1290 _ = self.request.translate
1248 1291
1249 1292 pull_request = PullRequest.get_or_404(
1250 1293 self.request.matchdict['pull_request_id'])
1251 1294 pull_request_id = pull_request.pull_request_id
1252 1295
1253 1296 if pull_request.is_closed():
1254 1297 log.debug('comment: forbidden because pull request is closed')
1255 1298 raise HTTPForbidden()
1256 1299
1257 1300 allowed_to_comment = PullRequestModel().check_user_comment(
1258 1301 pull_request, self._rhodecode_user)
1259 1302 if not allowed_to_comment:
1260 1303 log.debug(
1261 1304 'comment: forbidden because pull request is from forbidden repo')
1262 1305 raise HTTPForbidden()
1263 1306
1264 1307 c = self.load_default_context()
1265 1308
1266 1309 status = self.request.POST.get('changeset_status', None)
1267 1310 text = self.request.POST.get('text')
1268 1311 comment_type = self.request.POST.get('comment_type')
1269 1312 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1270 1313 close_pull_request = self.request.POST.get('close_pull_request')
1271 1314
1272 1315 # the logic here should work like following, if we submit close
1273 1316 # pr comment, use `close_pull_request_with_comment` function
1274 1317 # else handle regular comment logic
1275 1318
1276 1319 if close_pull_request:
1277 1320 # only owner or admin or person with write permissions
1278 1321 allowed_to_close = PullRequestModel().check_user_update(
1279 1322 pull_request, self._rhodecode_user)
1280 1323 if not allowed_to_close:
1281 1324 log.debug('comment: forbidden because not allowed to close '
1282 1325 'pull request %s', pull_request_id)
1283 1326 raise HTTPForbidden()
1284 1327 comment, status = PullRequestModel().close_pull_request_with_comment(
1285 1328 pull_request, self._rhodecode_user, self.db_repo, message=text,
1286 1329 auth_user=self._rhodecode_user)
1287 1330 Session().flush()
1288 1331 events.trigger(
1289 1332 events.PullRequestCommentEvent(pull_request, comment))
1290 1333
1291 1334 else:
1292 1335 # regular comment case, could be inline, or one with status.
1293 1336 # for that one we check also permissions
1294 1337
1295 1338 allowed_to_change_status = PullRequestModel().check_user_change_status(
1296 1339 pull_request, self._rhodecode_user)
1297 1340
1298 1341 if status and allowed_to_change_status:
1299 1342 message = (_('Status change %(transition_icon)s %(status)s')
1300 1343 % {'transition_icon': '>',
1301 1344 'status': ChangesetStatus.get_status_lbl(status)})
1302 1345 text = text or message
1303 1346
1304 1347 comment = CommentsModel().create(
1305 1348 text=text,
1306 1349 repo=self.db_repo.repo_id,
1307 1350 user=self._rhodecode_user.user_id,
1308 1351 pull_request=pull_request,
1309 1352 f_path=self.request.POST.get('f_path'),
1310 1353 line_no=self.request.POST.get('line'),
1311 1354 status_change=(ChangesetStatus.get_status_lbl(status)
1312 1355 if status and allowed_to_change_status else None),
1313 1356 status_change_type=(status
1314 1357 if status and allowed_to_change_status else None),
1315 1358 comment_type=comment_type,
1316 1359 resolves_comment_id=resolves_comment_id,
1317 1360 auth_user=self._rhodecode_user
1318 1361 )
1319 1362
1320 1363 if allowed_to_change_status:
1321 1364 # calculate old status before we change it
1322 1365 old_calculated_status = pull_request.calculated_review_status()
1323 1366
1324 1367 # get status if set !
1325 1368 if status:
1326 1369 ChangesetStatusModel().set_status(
1327 1370 self.db_repo.repo_id,
1328 1371 status,
1329 1372 self._rhodecode_user.user_id,
1330 1373 comment,
1331 1374 pull_request=pull_request
1332 1375 )
1333 1376
1334 1377 Session().flush()
1335 1378 # this is somehow required to get access to some relationship
1336 1379 # loaded on comment
1337 1380 Session().refresh(comment)
1338 1381
1339 1382 events.trigger(
1340 1383 events.PullRequestCommentEvent(pull_request, comment))
1341 1384
1342 1385 # we now calculate the status of pull request, and based on that
1343 1386 # calculation we set the commits status
1344 1387 calculated_status = pull_request.calculated_review_status()
1345 1388 if old_calculated_status != calculated_status:
1346 1389 PullRequestModel()._trigger_pull_request_hook(
1347 1390 pull_request, self._rhodecode_user, 'review_status_change')
1348 1391
1349 1392 Session().commit()
1350 1393
1351 1394 data = {
1352 1395 'target_id': h.safeid(h.safe_unicode(
1353 1396 self.request.POST.get('f_path'))),
1354 1397 }
1355 1398 if comment:
1356 1399 c.co = comment
1357 1400 rendered_comment = render(
1358 1401 'rhodecode:templates/changeset/changeset_comment_block.mako',
1359 1402 self._get_template_context(c), self.request)
1360 1403
1361 1404 data.update(comment.get_dict())
1362 1405 data.update({'rendered_text': rendered_comment})
1363 1406
1364 1407 return data
1365 1408
1366 1409 @LoginRequired()
1367 1410 @NotAnonymous()
1368 1411 @HasRepoPermissionAnyDecorator(
1369 1412 'repository.read', 'repository.write', 'repository.admin')
1370 1413 @CSRFRequired()
1371 1414 @view_config(
1372 1415 route_name='pullrequest_comment_delete', request_method='POST',
1373 1416 renderer='json_ext')
1374 1417 def pull_request_comment_delete(self):
1375 1418 pull_request = PullRequest.get_or_404(
1376 1419 self.request.matchdict['pull_request_id'])
1377 1420
1378 1421 comment = ChangesetComment.get_or_404(
1379 1422 self.request.matchdict['comment_id'])
1380 1423 comment_id = comment.comment_id
1381 1424
1382 1425 if pull_request.is_closed():
1383 1426 log.debug('comment: forbidden because pull request is closed')
1384 1427 raise HTTPForbidden()
1385 1428
1386 1429 if not comment:
1387 1430 log.debug('Comment with id:%s not found, skipping', comment_id)
1388 1431 # comment already deleted in another call probably
1389 1432 return True
1390 1433
1391 1434 if comment.pull_request.is_closed():
1392 1435 # don't allow deleting comments on closed pull request
1393 1436 raise HTTPForbidden()
1394 1437
1395 1438 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1396 1439 super_admin = h.HasPermissionAny('hg.admin')()
1397 1440 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1398 1441 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1399 1442 comment_repo_admin = is_repo_admin and is_repo_comment
1400 1443
1401 1444 if super_admin or comment_owner or comment_repo_admin:
1402 1445 old_calculated_status = comment.pull_request.calculated_review_status()
1403 1446 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1404 1447 Session().commit()
1405 1448 calculated_status = comment.pull_request.calculated_review_status()
1406 1449 if old_calculated_status != calculated_status:
1407 1450 PullRequestModel()._trigger_pull_request_hook(
1408 1451 comment.pull_request, self._rhodecode_user, 'review_status_change')
1409 1452 return True
1410 1453 else:
1411 1454 log.warning('No permissions for user %s to delete comment_id: %s',
1412 1455 self._rhodecode_db_user, comment_id)
1413 1456 raise HTTPNotFound()
@@ -1,4751 +1,4800 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import hashlib
29 29 import logging
30 30 import datetime
31 31 import warnings
32 32 import ipaddress
33 33 import functools
34 34 import traceback
35 35 import collections
36 36
37 37 from sqlalchemy import (
38 38 or_, and_, not_, func, TypeDecorator, event,
39 39 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
40 40 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
41 41 Text, Float, PickleType)
42 42 from sqlalchemy.sql.expression import true, false
43 43 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
44 44 from sqlalchemy.orm import (
45 45 relationship, joinedload, class_mapper, validates, aliased)
46 46 from sqlalchemy.ext.declarative import declared_attr
47 47 from sqlalchemy.ext.hybrid import hybrid_property
48 48 from sqlalchemy.exc import IntegrityError # pragma: no cover
49 49 from sqlalchemy.dialects.mysql import LONGTEXT
50 50 from zope.cachedescriptors.property import Lazy as LazyProperty
51 51
52 52 from pyramid.threadlocal import get_current_request
53 53
54 54 from rhodecode.translation import _
55 55 from rhodecode.lib.vcs import get_vcs_instance
56 56 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
57 57 from rhodecode.lib.utils2 import (
58 58 str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe,
59 59 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
60 60 glob2re, StrictAttributeDict, cleaned_uri)
61 61 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
62 62 JsonRaw
63 63 from rhodecode.lib.ext_json import json
64 64 from rhodecode.lib.caching_query import FromCache
65 65 from rhodecode.lib.encrypt import AESCipher
66 66
67 67 from rhodecode.model.meta import Base, Session
68 68
69 69 URL_SEP = '/'
70 70 log = logging.getLogger(__name__)
71 71
72 72 # =============================================================================
73 73 # BASE CLASSES
74 74 # =============================================================================
75 75
76 76 # this is propagated from .ini file rhodecode.encrypted_values.secret or
77 77 # beaker.session.secret if first is not set.
78 78 # and initialized at environment.py
79 79 ENCRYPTION_KEY = None
80 80
81 81 # used to sort permissions by types, '#' used here is not allowed to be in
82 82 # usernames, and it's very early in sorted string.printable table.
83 83 PERMISSION_TYPE_SORT = {
84 84 'admin': '####',
85 85 'write': '###',
86 86 'read': '##',
87 87 'none': '#',
88 88 }
89 89
90 90
91 91 def display_user_sort(obj):
92 92 """
93 93 Sort function used to sort permissions in .permissions() function of
94 94 Repository, RepoGroup, UserGroup. Also it put the default user in front
95 95 of all other resources
96 96 """
97 97
98 98 if obj.username == User.DEFAULT_USER:
99 99 return '#####'
100 100 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
101 101 return prefix + obj.username
102 102
103 103
104 104 def display_user_group_sort(obj):
105 105 """
106 106 Sort function used to sort permissions in .permissions() function of
107 107 Repository, RepoGroup, UserGroup. Also it put the default user in front
108 108 of all other resources
109 109 """
110 110
111 111 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
112 112 return prefix + obj.users_group_name
113 113
114 114
115 115 def _hash_key(k):
116 116 return sha1_safe(k)
117 117
118 118
119 119 def in_filter_generator(qry, items, limit=500):
120 120 """
121 121 Splits IN() into multiple with OR
122 122 e.g.::
123 123 cnt = Repository.query().filter(
124 124 or_(
125 125 *in_filter_generator(Repository.repo_id, range(100000))
126 126 )).count()
127 127 """
128 128 if not items:
129 129 # empty list will cause empty query which might cause security issues
130 130 # this can lead to hidden unpleasant results
131 131 items = [-1]
132 132
133 133 parts = []
134 134 for chunk in xrange(0, len(items), limit):
135 135 parts.append(
136 136 qry.in_(items[chunk: chunk + limit])
137 137 )
138 138
139 139 return parts
140 140
141 141
142 142 base_table_args = {
143 143 'extend_existing': True,
144 144 'mysql_engine': 'InnoDB',
145 145 'mysql_charset': 'utf8',
146 146 'sqlite_autoincrement': True
147 147 }
148 148
149 149
150 150 class EncryptedTextValue(TypeDecorator):
151 151 """
152 152 Special column for encrypted long text data, use like::
153 153
154 154 value = Column("encrypted_value", EncryptedValue(), nullable=False)
155 155
156 156 This column is intelligent so if value is in unencrypted form it return
157 157 unencrypted form, but on save it always encrypts
158 158 """
159 159 impl = Text
160 160
161 161 def process_bind_param(self, value, dialect):
162 162 if not value:
163 163 return value
164 164 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
165 165 # protect against double encrypting if someone manually starts
166 166 # doing
167 167 raise ValueError('value needs to be in unencrypted format, ie. '
168 168 'not starting with enc$aes')
169 169 return 'enc$aes_hmac$%s' % AESCipher(
170 170 ENCRYPTION_KEY, hmac=True).encrypt(value)
171 171
172 172 def process_result_value(self, value, dialect):
173 173 import rhodecode
174 174
175 175 if not value:
176 176 return value
177 177
178 178 parts = value.split('$', 3)
179 179 if not len(parts) == 3:
180 180 # probably not encrypted values
181 181 return value
182 182 else:
183 183 if parts[0] != 'enc':
184 184 # parts ok but without our header ?
185 185 return value
186 186 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
187 187 'rhodecode.encrypted_values.strict') or True)
188 188 # at that stage we know it's our encryption
189 189 if parts[1] == 'aes':
190 190 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
191 191 elif parts[1] == 'aes_hmac':
192 192 decrypted_data = AESCipher(
193 193 ENCRYPTION_KEY, hmac=True,
194 194 strict_verification=enc_strict_mode).decrypt(parts[2])
195 195 else:
196 196 raise ValueError(
197 197 'Encryption type part is wrong, must be `aes` '
198 198 'or `aes_hmac`, got `%s` instead' % (parts[1]))
199 199 return decrypted_data
200 200
201 201
202 202 class BaseModel(object):
203 203 """
204 204 Base Model for all classes
205 205 """
206 206
207 207 @classmethod
208 208 def _get_keys(cls):
209 209 """return column names for this model """
210 210 return class_mapper(cls).c.keys()
211 211
212 212 def get_dict(self):
213 213 """
214 214 return dict with keys and values corresponding
215 215 to this model data """
216 216
217 217 d = {}
218 218 for k in self._get_keys():
219 219 d[k] = getattr(self, k)
220 220
221 221 # also use __json__() if present to get additional fields
222 222 _json_attr = getattr(self, '__json__', None)
223 223 if _json_attr:
224 224 # update with attributes from __json__
225 225 if callable(_json_attr):
226 226 _json_attr = _json_attr()
227 227 for k, val in _json_attr.iteritems():
228 228 d[k] = val
229 229 return d
230 230
231 231 def get_appstruct(self):
232 232 """return list with keys and values tuples corresponding
233 233 to this model data """
234 234
235 235 lst = []
236 236 for k in self._get_keys():
237 237 lst.append((k, getattr(self, k),))
238 238 return lst
239 239
240 240 def populate_obj(self, populate_dict):
241 241 """populate model with data from given populate_dict"""
242 242
243 243 for k in self._get_keys():
244 244 if k in populate_dict:
245 245 setattr(self, k, populate_dict[k])
246 246
247 247 @classmethod
248 248 def query(cls):
249 249 return Session().query(cls)
250 250
251 251 @classmethod
252 252 def get(cls, id_):
253 253 if id_:
254 254 return cls.query().get(id_)
255 255
256 256 @classmethod
257 257 def get_or_404(cls, id_):
258 258 from pyramid.httpexceptions import HTTPNotFound
259 259
260 260 try:
261 261 id_ = int(id_)
262 262 except (TypeError, ValueError):
263 263 raise HTTPNotFound()
264 264
265 265 res = cls.query().get(id_)
266 266 if not res:
267 267 raise HTTPNotFound()
268 268 return res
269 269
270 270 @classmethod
271 271 def getAll(cls):
272 272 # deprecated and left for backward compatibility
273 273 return cls.get_all()
274 274
275 275 @classmethod
276 276 def get_all(cls):
277 277 return cls.query().all()
278 278
279 279 @classmethod
280 280 def delete(cls, id_):
281 281 obj = cls.query().get(id_)
282 282 Session().delete(obj)
283 283
284 284 @classmethod
285 285 def identity_cache(cls, session, attr_name, value):
286 286 exist_in_session = []
287 287 for (item_cls, pkey), instance in session.identity_map.items():
288 288 if cls == item_cls and getattr(instance, attr_name) == value:
289 289 exist_in_session.append(instance)
290 290 if exist_in_session:
291 291 if len(exist_in_session) == 1:
292 292 return exist_in_session[0]
293 293 log.exception(
294 294 'multiple objects with attr %s and '
295 295 'value %s found with same name: %r',
296 296 attr_name, value, exist_in_session)
297 297
298 298 def __repr__(self):
299 299 if hasattr(self, '__unicode__'):
300 300 # python repr needs to return str
301 301 try:
302 302 return safe_str(self.__unicode__())
303 303 except UnicodeDecodeError:
304 304 pass
305 305 return '<DB:%s>' % (self.__class__.__name__)
306 306
307 307
308 308 class RhodeCodeSetting(Base, BaseModel):
309 309 __tablename__ = 'rhodecode_settings'
310 310 __table_args__ = (
311 311 UniqueConstraint('app_settings_name'),
312 312 base_table_args
313 313 )
314 314
315 315 SETTINGS_TYPES = {
316 316 'str': safe_str,
317 317 'int': safe_int,
318 318 'unicode': safe_unicode,
319 319 'bool': str2bool,
320 320 'list': functools.partial(aslist, sep=',')
321 321 }
322 322 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
323 323 GLOBAL_CONF_KEY = 'app_settings'
324 324
325 325 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
326 326 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
327 327 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
328 328 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
329 329
330 330 def __init__(self, key='', val='', type='unicode'):
331 331 self.app_settings_name = key
332 332 self.app_settings_type = type
333 333 self.app_settings_value = val
334 334
335 335 @validates('_app_settings_value')
336 336 def validate_settings_value(self, key, val):
337 337 assert type(val) == unicode
338 338 return val
339 339
340 340 @hybrid_property
341 341 def app_settings_value(self):
342 342 v = self._app_settings_value
343 343 _type = self.app_settings_type
344 344 if _type:
345 345 _type = self.app_settings_type.split('.')[0]
346 346 # decode the encrypted value
347 347 if 'encrypted' in self.app_settings_type:
348 348 cipher = EncryptedTextValue()
349 349 v = safe_unicode(cipher.process_result_value(v, None))
350 350
351 351 converter = self.SETTINGS_TYPES.get(_type) or \
352 352 self.SETTINGS_TYPES['unicode']
353 353 return converter(v)
354 354
355 355 @app_settings_value.setter
356 356 def app_settings_value(self, val):
357 357 """
358 358 Setter that will always make sure we use unicode in app_settings_value
359 359
360 360 :param val:
361 361 """
362 362 val = safe_unicode(val)
363 363 # encode the encrypted value
364 364 if 'encrypted' in self.app_settings_type:
365 365 cipher = EncryptedTextValue()
366 366 val = safe_unicode(cipher.process_bind_param(val, None))
367 367 self._app_settings_value = val
368 368
369 369 @hybrid_property
370 370 def app_settings_type(self):
371 371 return self._app_settings_type
372 372
373 373 @app_settings_type.setter
374 374 def app_settings_type(self, val):
375 375 if val.split('.')[0] not in self.SETTINGS_TYPES:
376 376 raise Exception('type must be one of %s got %s'
377 377 % (self.SETTINGS_TYPES.keys(), val))
378 378 self._app_settings_type = val
379 379
380 380 @classmethod
381 381 def get_by_prefix(cls, prefix):
382 382 return RhodeCodeSetting.query()\
383 383 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
384 384 .all()
385 385
386 386 def __unicode__(self):
387 387 return u"<%s('%s:%s[%s]')>" % (
388 388 self.__class__.__name__,
389 389 self.app_settings_name, self.app_settings_value,
390 390 self.app_settings_type
391 391 )
392 392
393 393
394 394 class RhodeCodeUi(Base, BaseModel):
395 395 __tablename__ = 'rhodecode_ui'
396 396 __table_args__ = (
397 397 UniqueConstraint('ui_key'),
398 398 base_table_args
399 399 )
400 400
401 401 HOOK_REPO_SIZE = 'changegroup.repo_size'
402 402 # HG
403 403 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
404 404 HOOK_PULL = 'outgoing.pull_logger'
405 405 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
406 406 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
407 407 HOOK_PUSH = 'changegroup.push_logger'
408 408 HOOK_PUSH_KEY = 'pushkey.key_push'
409 409
410 410 # TODO: johbo: Unify way how hooks are configured for git and hg,
411 411 # git part is currently hardcoded.
412 412
413 413 # SVN PATTERNS
414 414 SVN_BRANCH_ID = 'vcs_svn_branch'
415 415 SVN_TAG_ID = 'vcs_svn_tag'
416 416
417 417 ui_id = Column(
418 418 "ui_id", Integer(), nullable=False, unique=True, default=None,
419 419 primary_key=True)
420 420 ui_section = Column(
421 421 "ui_section", String(255), nullable=True, unique=None, default=None)
422 422 ui_key = Column(
423 423 "ui_key", String(255), nullable=True, unique=None, default=None)
424 424 ui_value = Column(
425 425 "ui_value", String(255), nullable=True, unique=None, default=None)
426 426 ui_active = Column(
427 427 "ui_active", Boolean(), nullable=True, unique=None, default=True)
428 428
429 429 def __repr__(self):
430 430 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
431 431 self.ui_key, self.ui_value)
432 432
433 433
434 434 class RepoRhodeCodeSetting(Base, BaseModel):
435 435 __tablename__ = 'repo_rhodecode_settings'
436 436 __table_args__ = (
437 437 UniqueConstraint(
438 438 'app_settings_name', 'repository_id',
439 439 name='uq_repo_rhodecode_setting_name_repo_id'),
440 440 base_table_args
441 441 )
442 442
443 443 repository_id = Column(
444 444 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
445 445 nullable=False)
446 446 app_settings_id = Column(
447 447 "app_settings_id", Integer(), nullable=False, unique=True,
448 448 default=None, primary_key=True)
449 449 app_settings_name = Column(
450 450 "app_settings_name", String(255), nullable=True, unique=None,
451 451 default=None)
452 452 _app_settings_value = Column(
453 453 "app_settings_value", String(4096), nullable=True, unique=None,
454 454 default=None)
455 455 _app_settings_type = Column(
456 456 "app_settings_type", String(255), nullable=True, unique=None,
457 457 default=None)
458 458
459 459 repository = relationship('Repository')
460 460
461 461 def __init__(self, repository_id, key='', val='', type='unicode'):
462 462 self.repository_id = repository_id
463 463 self.app_settings_name = key
464 464 self.app_settings_type = type
465 465 self.app_settings_value = val
466 466
467 467 @validates('_app_settings_value')
468 468 def validate_settings_value(self, key, val):
469 469 assert type(val) == unicode
470 470 return val
471 471
472 472 @hybrid_property
473 473 def app_settings_value(self):
474 474 v = self._app_settings_value
475 475 type_ = self.app_settings_type
476 476 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
477 477 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
478 478 return converter(v)
479 479
480 480 @app_settings_value.setter
481 481 def app_settings_value(self, val):
482 482 """
483 483 Setter that will always make sure we use unicode in app_settings_value
484 484
485 485 :param val:
486 486 """
487 487 self._app_settings_value = safe_unicode(val)
488 488
489 489 @hybrid_property
490 490 def app_settings_type(self):
491 491 return self._app_settings_type
492 492
493 493 @app_settings_type.setter
494 494 def app_settings_type(self, val):
495 495 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
496 496 if val not in SETTINGS_TYPES:
497 497 raise Exception('type must be one of %s got %s'
498 498 % (SETTINGS_TYPES.keys(), val))
499 499 self._app_settings_type = val
500 500
501 501 def __unicode__(self):
502 502 return u"<%s('%s:%s:%s[%s]')>" % (
503 503 self.__class__.__name__, self.repository.repo_name,
504 504 self.app_settings_name, self.app_settings_value,
505 505 self.app_settings_type
506 506 )
507 507
508 508
509 509 class RepoRhodeCodeUi(Base, BaseModel):
510 510 __tablename__ = 'repo_rhodecode_ui'
511 511 __table_args__ = (
512 512 UniqueConstraint(
513 513 'repository_id', 'ui_section', 'ui_key',
514 514 name='uq_repo_rhodecode_ui_repository_id_section_key'),
515 515 base_table_args
516 516 )
517 517
518 518 repository_id = Column(
519 519 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
520 520 nullable=False)
521 521 ui_id = Column(
522 522 "ui_id", Integer(), nullable=False, unique=True, default=None,
523 523 primary_key=True)
524 524 ui_section = Column(
525 525 "ui_section", String(255), nullable=True, unique=None, default=None)
526 526 ui_key = Column(
527 527 "ui_key", String(255), nullable=True, unique=None, default=None)
528 528 ui_value = Column(
529 529 "ui_value", String(255), nullable=True, unique=None, default=None)
530 530 ui_active = Column(
531 531 "ui_active", Boolean(), nullable=True, unique=None, default=True)
532 532
533 533 repository = relationship('Repository')
534 534
535 535 def __repr__(self):
536 536 return '<%s[%s:%s]%s=>%s]>' % (
537 537 self.__class__.__name__, self.repository.repo_name,
538 538 self.ui_section, self.ui_key, self.ui_value)
539 539
540 540
541 541 class User(Base, BaseModel):
542 542 __tablename__ = 'users'
543 543 __table_args__ = (
544 544 UniqueConstraint('username'), UniqueConstraint('email'),
545 545 Index('u_username_idx', 'username'),
546 546 Index('u_email_idx', 'email'),
547 547 base_table_args
548 548 )
549 549
550 550 DEFAULT_USER = 'default'
551 551 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
552 552 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
553 553
554 554 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
555 555 username = Column("username", String(255), nullable=True, unique=None, default=None)
556 556 password = Column("password", String(255), nullable=True, unique=None, default=None)
557 557 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
558 558 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
559 559 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
560 560 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
561 561 _email = Column("email", String(255), nullable=True, unique=None, default=None)
562 562 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
563 563 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
564 564
565 565 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
566 566 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
567 567 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
568 568 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
569 569 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
570 570 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
571 571
572 572 user_log = relationship('UserLog')
573 573 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
574 574
575 575 repositories = relationship('Repository')
576 576 repository_groups = relationship('RepoGroup')
577 577 user_groups = relationship('UserGroup')
578 578
579 579 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
580 580 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
581 581
582 582 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
583 583 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
584 584 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
585 585
586 586 group_member = relationship('UserGroupMember', cascade='all')
587 587
588 588 notifications = relationship('UserNotification', cascade='all')
589 589 # notifications assigned to this user
590 590 user_created_notifications = relationship('Notification', cascade='all')
591 591 # comments created by this user
592 592 user_comments = relationship('ChangesetComment', cascade='all')
593 593 # user profile extra info
594 594 user_emails = relationship('UserEmailMap', cascade='all')
595 595 user_ip_map = relationship('UserIpMap', cascade='all')
596 596 user_auth_tokens = relationship('UserApiKeys', cascade='all')
597 597 user_ssh_keys = relationship('UserSshKeys', cascade='all')
598 598
599 599 # gists
600 600 user_gists = relationship('Gist', cascade='all')
601 601 # user pull requests
602 602 user_pull_requests = relationship('PullRequest', cascade='all')
603 603 # external identities
604 604 extenal_identities = relationship(
605 605 'ExternalIdentity',
606 606 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
607 607 cascade='all')
608 608 # review rules
609 609 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
610 610
611 611 def __unicode__(self):
612 612 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
613 613 self.user_id, self.username)
614 614
615 615 @hybrid_property
616 616 def email(self):
617 617 return self._email
618 618
619 619 @email.setter
620 620 def email(self, val):
621 621 self._email = val.lower() if val else None
622 622
623 623 @hybrid_property
624 624 def first_name(self):
625 625 from rhodecode.lib import helpers as h
626 626 if self.name:
627 627 return h.escape(self.name)
628 628 return self.name
629 629
630 630 @hybrid_property
631 631 def last_name(self):
632 632 from rhodecode.lib import helpers as h
633 633 if self.lastname:
634 634 return h.escape(self.lastname)
635 635 return self.lastname
636 636
637 637 @hybrid_property
638 638 def api_key(self):
639 639 """
640 640 Fetch if exist an auth-token with role ALL connected to this user
641 641 """
642 642 user_auth_token = UserApiKeys.query()\
643 643 .filter(UserApiKeys.user_id == self.user_id)\
644 644 .filter(or_(UserApiKeys.expires == -1,
645 645 UserApiKeys.expires >= time.time()))\
646 646 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
647 647 if user_auth_token:
648 648 user_auth_token = user_auth_token.api_key
649 649
650 650 return user_auth_token
651 651
652 652 @api_key.setter
653 653 def api_key(self, val):
654 654 # don't allow to set API key this is deprecated for now
655 655 self._api_key = None
656 656
657 657 @property
658 658 def reviewer_pull_requests(self):
659 659 return PullRequestReviewers.query() \
660 660 .options(joinedload(PullRequestReviewers.pull_request)) \
661 661 .filter(PullRequestReviewers.user_id == self.user_id) \
662 662 .all()
663 663
664 664 @property
665 665 def firstname(self):
666 666 # alias for future
667 667 return self.name
668 668
669 669 @property
670 670 def emails(self):
671 671 other = UserEmailMap.query()\
672 672 .filter(UserEmailMap.user == self) \
673 673 .order_by(UserEmailMap.email_id.asc()) \
674 674 .all()
675 675 return [self.email] + [x.email for x in other]
676 676
677 677 @property
678 678 def auth_tokens(self):
679 679 auth_tokens = self.get_auth_tokens()
680 680 return [x.api_key for x in auth_tokens]
681 681
682 682 def get_auth_tokens(self):
683 683 return UserApiKeys.query()\
684 684 .filter(UserApiKeys.user == self)\
685 685 .order_by(UserApiKeys.user_api_key_id.asc())\
686 686 .all()
687 687
688 688 @LazyProperty
689 689 def feed_token(self):
690 690 return self.get_feed_token()
691 691
692 692 def get_feed_token(self, cache=True):
693 693 feed_tokens = UserApiKeys.query()\
694 694 .filter(UserApiKeys.user == self)\
695 695 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
696 696 if cache:
697 697 feed_tokens = feed_tokens.options(
698 698 FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id))
699 699
700 700 feed_tokens = feed_tokens.all()
701 701 if feed_tokens:
702 702 return feed_tokens[0].api_key
703 703 return 'NO_FEED_TOKEN_AVAILABLE'
704 704
705 705 @classmethod
706 706 def get(cls, user_id, cache=False):
707 707 if not user_id:
708 708 return
709 709
710 710 user = cls.query()
711 711 if cache:
712 712 user = user.options(
713 713 FromCache("sql_cache_short", "get_users_%s" % user_id))
714 714 return user.get(user_id)
715 715
716 716 @classmethod
717 717 def extra_valid_auth_tokens(cls, user, role=None):
718 718 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
719 719 .filter(or_(UserApiKeys.expires == -1,
720 720 UserApiKeys.expires >= time.time()))
721 721 if role:
722 722 tokens = tokens.filter(or_(UserApiKeys.role == role,
723 723 UserApiKeys.role == UserApiKeys.ROLE_ALL))
724 724 return tokens.all()
725 725
726 726 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
727 727 from rhodecode.lib import auth
728 728
729 729 log.debug('Trying to authenticate user: %s via auth-token, '
730 730 'and roles: %s', self, roles)
731 731
732 732 if not auth_token:
733 733 return False
734 734
735 735 crypto_backend = auth.crypto_backend()
736 736
737 737 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
738 738 tokens_q = UserApiKeys.query()\
739 739 .filter(UserApiKeys.user_id == self.user_id)\
740 740 .filter(or_(UserApiKeys.expires == -1,
741 741 UserApiKeys.expires >= time.time()))
742 742
743 743 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
744 744
745 745 plain_tokens = []
746 746 hash_tokens = []
747 747
748 748 user_tokens = tokens_q.all()
749 749 log.debug('Found %s user tokens to check for authentication', len(user_tokens))
750 750 for token in user_tokens:
751 751 log.debug('AUTH_TOKEN: checking if user token with id `%s` matches',
752 752 token.user_api_key_id)
753 753 # verify scope first, since it's way faster than hash calculation of
754 754 # encrypted tokens
755 755 if token.repo_id:
756 756 # token has a scope, we need to verify it
757 757 if scope_repo_id != token.repo_id:
758 758 log.debug(
759 759 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
760 760 'and calling scope is:%s, skipping further checks',
761 761 token.repo, scope_repo_id)
762 762 # token has a scope, and it doesn't match, skip token
763 763 continue
764 764
765 765 if token.api_key.startswith(crypto_backend.ENC_PREF):
766 766 hash_tokens.append(token.api_key)
767 767 else:
768 768 plain_tokens.append(token.api_key)
769 769
770 770 is_plain_match = auth_token in plain_tokens
771 771 if is_plain_match:
772 772 return True
773 773
774 774 for hashed in hash_tokens:
775 775 # NOTE(marcink): this is expensive to calculate, but most secure
776 776 match = crypto_backend.hash_check(auth_token, hashed)
777 777 if match:
778 778 return True
779 779
780 780 return False
781 781
782 782 @property
783 783 def ip_addresses(self):
784 784 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
785 785 return [x.ip_addr for x in ret]
786 786
787 787 @property
788 788 def username_and_name(self):
789 789 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
790 790
791 791 @property
792 792 def username_or_name_or_email(self):
793 793 full_name = self.full_name if self.full_name is not ' ' else None
794 794 return self.username or full_name or self.email
795 795
796 796 @property
797 797 def full_name(self):
798 798 return '%s %s' % (self.first_name, self.last_name)
799 799
800 800 @property
801 801 def full_name_or_username(self):
802 802 return ('%s %s' % (self.first_name, self.last_name)
803 803 if (self.first_name and self.last_name) else self.username)
804 804
805 805 @property
806 806 def full_contact(self):
807 807 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
808 808
809 809 @property
810 810 def short_contact(self):
811 811 return '%s %s' % (self.first_name, self.last_name)
812 812
813 813 @property
814 814 def is_admin(self):
815 815 return self.admin
816 816
817 817 def AuthUser(self, **kwargs):
818 818 """
819 819 Returns instance of AuthUser for this user
820 820 """
821 821 from rhodecode.lib.auth import AuthUser
822 822 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
823 823
824 824 @hybrid_property
825 825 def user_data(self):
826 826 if not self._user_data:
827 827 return {}
828 828
829 829 try:
830 830 return json.loads(self._user_data)
831 831 except TypeError:
832 832 return {}
833 833
834 834 @user_data.setter
835 835 def user_data(self, val):
836 836 if not isinstance(val, dict):
837 837 raise Exception('user_data must be dict, got %s' % type(val))
838 838 try:
839 839 self._user_data = json.dumps(val)
840 840 except Exception:
841 841 log.error(traceback.format_exc())
842 842
843 843 @classmethod
844 844 def get_by_username(cls, username, case_insensitive=False,
845 845 cache=False, identity_cache=False):
846 846 session = Session()
847 847
848 848 if case_insensitive:
849 849 q = cls.query().filter(
850 850 func.lower(cls.username) == func.lower(username))
851 851 else:
852 852 q = cls.query().filter(cls.username == username)
853 853
854 854 if cache:
855 855 if identity_cache:
856 856 val = cls.identity_cache(session, 'username', username)
857 857 if val:
858 858 return val
859 859 else:
860 860 cache_key = "get_user_by_name_%s" % _hash_key(username)
861 861 q = q.options(
862 862 FromCache("sql_cache_short", cache_key))
863 863
864 864 return q.scalar()
865 865
866 866 @classmethod
867 867 def get_by_auth_token(cls, auth_token, cache=False):
868 868 q = UserApiKeys.query()\
869 869 .filter(UserApiKeys.api_key == auth_token)\
870 870 .filter(or_(UserApiKeys.expires == -1,
871 871 UserApiKeys.expires >= time.time()))
872 872 if cache:
873 873 q = q.options(
874 874 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
875 875
876 876 match = q.first()
877 877 if match:
878 878 return match.user
879 879
880 880 @classmethod
881 881 def get_by_email(cls, email, case_insensitive=False, cache=False):
882 882
883 883 if case_insensitive:
884 884 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
885 885
886 886 else:
887 887 q = cls.query().filter(cls.email == email)
888 888
889 889 email_key = _hash_key(email)
890 890 if cache:
891 891 q = q.options(
892 892 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
893 893
894 894 ret = q.scalar()
895 895 if ret is None:
896 896 q = UserEmailMap.query()
897 897 # try fetching in alternate email map
898 898 if case_insensitive:
899 899 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
900 900 else:
901 901 q = q.filter(UserEmailMap.email == email)
902 902 q = q.options(joinedload(UserEmailMap.user))
903 903 if cache:
904 904 q = q.options(
905 905 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
906 906 ret = getattr(q.scalar(), 'user', None)
907 907
908 908 return ret
909 909
910 910 @classmethod
911 911 def get_from_cs_author(cls, author):
912 912 """
913 913 Tries to get User objects out of commit author string
914 914
915 915 :param author:
916 916 """
917 917 from rhodecode.lib.helpers import email, author_name
918 918 # Valid email in the attribute passed, see if they're in the system
919 919 _email = email(author)
920 920 if _email:
921 921 user = cls.get_by_email(_email, case_insensitive=True)
922 922 if user:
923 923 return user
924 924 # Maybe we can match by username?
925 925 _author = author_name(author)
926 926 user = cls.get_by_username(_author, case_insensitive=True)
927 927 if user:
928 928 return user
929 929
930 930 def update_userdata(self, **kwargs):
931 931 usr = self
932 932 old = usr.user_data
933 933 old.update(**kwargs)
934 934 usr.user_data = old
935 935 Session().add(usr)
936 936 log.debug('updated userdata with ', kwargs)
937 937
938 938 def update_lastlogin(self):
939 939 """Update user lastlogin"""
940 940 self.last_login = datetime.datetime.now()
941 941 Session().add(self)
942 942 log.debug('updated user %s lastlogin', self.username)
943 943
944 944 def update_password(self, new_password):
945 945 from rhodecode.lib.auth import get_crypt_password
946 946
947 947 self.password = get_crypt_password(new_password)
948 948 Session().add(self)
949 949
950 950 @classmethod
951 951 def get_first_super_admin(cls):
952 952 user = User.query()\
953 953 .filter(User.admin == true()) \
954 954 .order_by(User.user_id.asc()) \
955 955 .first()
956 956
957 957 if user is None:
958 958 raise Exception('FATAL: Missing administrative account!')
959 959 return user
960 960
961 961 @classmethod
962 962 def get_all_super_admins(cls):
963 963 """
964 964 Returns all admin accounts sorted by username
965 965 """
966 966 return User.query().filter(User.admin == true())\
967 967 .order_by(User.username.asc()).all()
968 968
969 969 @classmethod
970 970 def get_default_user(cls, cache=False, refresh=False):
971 971 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
972 972 if user is None:
973 973 raise Exception('FATAL: Missing default account!')
974 974 if refresh:
975 975 # The default user might be based on outdated state which
976 976 # has been loaded from the cache.
977 977 # A call to refresh() ensures that the
978 978 # latest state from the database is used.
979 979 Session().refresh(user)
980 980 return user
981 981
982 982 def _get_default_perms(self, user, suffix=''):
983 983 from rhodecode.model.permission import PermissionModel
984 984 return PermissionModel().get_default_perms(user.user_perms, suffix)
985 985
986 986 def get_default_perms(self, suffix=''):
987 987 return self._get_default_perms(self, suffix)
988 988
989 989 def get_api_data(self, include_secrets=False, details='full'):
990 990 """
991 991 Common function for generating user related data for API
992 992
993 993 :param include_secrets: By default secrets in the API data will be replaced
994 994 by a placeholder value to prevent exposing this data by accident. In case
995 995 this data shall be exposed, set this flag to ``True``.
996 996
997 997 :param details: details can be 'basic|full' basic gives only a subset of
998 998 the available user information that includes user_id, name and emails.
999 999 """
1000 1000 user = self
1001 1001 user_data = self.user_data
1002 1002 data = {
1003 1003 'user_id': user.user_id,
1004 1004 'username': user.username,
1005 1005 'firstname': user.name,
1006 1006 'lastname': user.lastname,
1007 1007 'email': user.email,
1008 1008 'emails': user.emails,
1009 1009 }
1010 1010 if details == 'basic':
1011 1011 return data
1012 1012
1013 1013 auth_token_length = 40
1014 1014 auth_token_replacement = '*' * auth_token_length
1015 1015
1016 1016 extras = {
1017 1017 'auth_tokens': [auth_token_replacement],
1018 1018 'active': user.active,
1019 1019 'admin': user.admin,
1020 1020 'extern_type': user.extern_type,
1021 1021 'extern_name': user.extern_name,
1022 1022 'last_login': user.last_login,
1023 1023 'last_activity': user.last_activity,
1024 1024 'ip_addresses': user.ip_addresses,
1025 1025 'language': user_data.get('language')
1026 1026 }
1027 1027 data.update(extras)
1028 1028
1029 1029 if include_secrets:
1030 1030 data['auth_tokens'] = user.auth_tokens
1031 1031 return data
1032 1032
1033 1033 def __json__(self):
1034 1034 data = {
1035 1035 'full_name': self.full_name,
1036 1036 'full_name_or_username': self.full_name_or_username,
1037 1037 'short_contact': self.short_contact,
1038 1038 'full_contact': self.full_contact,
1039 1039 }
1040 1040 data.update(self.get_api_data())
1041 1041 return data
1042 1042
1043 1043
1044 1044 class UserApiKeys(Base, BaseModel):
1045 1045 __tablename__ = 'user_api_keys'
1046 1046 __table_args__ = (
1047 1047 Index('uak_api_key_idx', 'api_key', unique=True),
1048 1048 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1049 1049 base_table_args
1050 1050 )
1051 1051 __mapper_args__ = {}
1052 1052
1053 1053 # ApiKey role
1054 1054 ROLE_ALL = 'token_role_all'
1055 1055 ROLE_HTTP = 'token_role_http'
1056 1056 ROLE_VCS = 'token_role_vcs'
1057 1057 ROLE_API = 'token_role_api'
1058 1058 ROLE_FEED = 'token_role_feed'
1059 1059 ROLE_PASSWORD_RESET = 'token_password_reset'
1060 1060
1061 1061 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
1062 1062
1063 1063 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1064 1064 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1065 1065 api_key = Column("api_key", String(255), nullable=False, unique=True)
1066 1066 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1067 1067 expires = Column('expires', Float(53), nullable=False)
1068 1068 role = Column('role', String(255), nullable=True)
1069 1069 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1070 1070
1071 1071 # scope columns
1072 1072 repo_id = Column(
1073 1073 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1074 1074 nullable=True, unique=None, default=None)
1075 1075 repo = relationship('Repository', lazy='joined')
1076 1076
1077 1077 repo_group_id = Column(
1078 1078 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1079 1079 nullable=True, unique=None, default=None)
1080 1080 repo_group = relationship('RepoGroup', lazy='joined')
1081 1081
1082 1082 user = relationship('User', lazy='joined')
1083 1083
1084 1084 def __unicode__(self):
1085 1085 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1086 1086
1087 1087 def __json__(self):
1088 1088 data = {
1089 1089 'auth_token': self.api_key,
1090 1090 'role': self.role,
1091 1091 'scope': self.scope_humanized,
1092 1092 'expired': self.expired
1093 1093 }
1094 1094 return data
1095 1095
1096 1096 def get_api_data(self, include_secrets=False):
1097 1097 data = self.__json__()
1098 1098 if include_secrets:
1099 1099 return data
1100 1100 else:
1101 1101 data['auth_token'] = self.token_obfuscated
1102 1102 return data
1103 1103
1104 1104 @hybrid_property
1105 1105 def description_safe(self):
1106 1106 from rhodecode.lib import helpers as h
1107 1107 return h.escape(self.description)
1108 1108
1109 1109 @property
1110 1110 def expired(self):
1111 1111 if self.expires == -1:
1112 1112 return False
1113 1113 return time.time() > self.expires
1114 1114
1115 1115 @classmethod
1116 1116 def _get_role_name(cls, role):
1117 1117 return {
1118 1118 cls.ROLE_ALL: _('all'),
1119 1119 cls.ROLE_HTTP: _('http/web interface'),
1120 1120 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1121 1121 cls.ROLE_API: _('api calls'),
1122 1122 cls.ROLE_FEED: _('feed access'),
1123 1123 }.get(role, role)
1124 1124
1125 1125 @property
1126 1126 def role_humanized(self):
1127 1127 return self._get_role_name(self.role)
1128 1128
1129 1129 def _get_scope(self):
1130 1130 if self.repo:
1131 1131 return repr(self.repo)
1132 1132 if self.repo_group:
1133 1133 return repr(self.repo_group) + ' (recursive)'
1134 1134 return 'global'
1135 1135
1136 1136 @property
1137 1137 def scope_humanized(self):
1138 1138 return self._get_scope()
1139 1139
1140 1140 @property
1141 1141 def token_obfuscated(self):
1142 1142 if self.api_key:
1143 1143 return self.api_key[:4] + "****"
1144 1144
1145 1145
1146 1146 class UserEmailMap(Base, BaseModel):
1147 1147 __tablename__ = 'user_email_map'
1148 1148 __table_args__ = (
1149 1149 Index('uem_email_idx', 'email'),
1150 1150 UniqueConstraint('email'),
1151 1151 base_table_args
1152 1152 )
1153 1153 __mapper_args__ = {}
1154 1154
1155 1155 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1156 1156 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1157 1157 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1158 1158 user = relationship('User', lazy='joined')
1159 1159
1160 1160 @validates('_email')
1161 1161 def validate_email(self, key, email):
1162 1162 # check if this email is not main one
1163 1163 main_email = Session().query(User).filter(User.email == email).scalar()
1164 1164 if main_email is not None:
1165 1165 raise AttributeError('email %s is present is user table' % email)
1166 1166 return email
1167 1167
1168 1168 @hybrid_property
1169 1169 def email(self):
1170 1170 return self._email
1171 1171
1172 1172 @email.setter
1173 1173 def email(self, val):
1174 1174 self._email = val.lower() if val else None
1175 1175
1176 1176
1177 1177 class UserIpMap(Base, BaseModel):
1178 1178 __tablename__ = 'user_ip_map'
1179 1179 __table_args__ = (
1180 1180 UniqueConstraint('user_id', 'ip_addr'),
1181 1181 base_table_args
1182 1182 )
1183 1183 __mapper_args__ = {}
1184 1184
1185 1185 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1186 1186 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1187 1187 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1188 1188 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1189 1189 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1190 1190 user = relationship('User', lazy='joined')
1191 1191
1192 1192 @hybrid_property
1193 1193 def description_safe(self):
1194 1194 from rhodecode.lib import helpers as h
1195 1195 return h.escape(self.description)
1196 1196
1197 1197 @classmethod
1198 1198 def _get_ip_range(cls, ip_addr):
1199 1199 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1200 1200 return [str(net.network_address), str(net.broadcast_address)]
1201 1201
1202 1202 def __json__(self):
1203 1203 return {
1204 1204 'ip_addr': self.ip_addr,
1205 1205 'ip_range': self._get_ip_range(self.ip_addr),
1206 1206 }
1207 1207
1208 1208 def __unicode__(self):
1209 1209 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1210 1210 self.user_id, self.ip_addr)
1211 1211
1212 1212
1213 1213 class UserSshKeys(Base, BaseModel):
1214 1214 __tablename__ = 'user_ssh_keys'
1215 1215 __table_args__ = (
1216 1216 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1217 1217
1218 1218 UniqueConstraint('ssh_key_fingerprint'),
1219 1219
1220 1220 base_table_args
1221 1221 )
1222 1222 __mapper_args__ = {}
1223 1223
1224 1224 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1225 1225 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1226 1226 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1227 1227
1228 1228 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1229 1229
1230 1230 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1231 1231 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1232 1232 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1233 1233
1234 1234 user = relationship('User', lazy='joined')
1235 1235
1236 1236 def __json__(self):
1237 1237 data = {
1238 1238 'ssh_fingerprint': self.ssh_key_fingerprint,
1239 1239 'description': self.description,
1240 1240 'created_on': self.created_on
1241 1241 }
1242 1242 return data
1243 1243
1244 1244 def get_api_data(self):
1245 1245 data = self.__json__()
1246 1246 return data
1247 1247
1248 1248
1249 1249 class UserLog(Base, BaseModel):
1250 1250 __tablename__ = 'user_logs'
1251 1251 __table_args__ = (
1252 1252 base_table_args,
1253 1253 )
1254 1254
1255 1255 VERSION_1 = 'v1'
1256 1256 VERSION_2 = 'v2'
1257 1257 VERSIONS = [VERSION_1, VERSION_2]
1258 1258
1259 1259 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1260 1260 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1261 1261 username = Column("username", String(255), nullable=True, unique=None, default=None)
1262 1262 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1263 1263 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1264 1264 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1265 1265 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1266 1266 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1267 1267
1268 1268 version = Column("version", String(255), nullable=True, default=VERSION_1)
1269 1269 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1270 1270 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1271 1271
1272 1272 def __unicode__(self):
1273 1273 return u"<%s('id:%s:%s')>" % (
1274 1274 self.__class__.__name__, self.repository_name, self.action)
1275 1275
1276 1276 def __json__(self):
1277 1277 return {
1278 1278 'user_id': self.user_id,
1279 1279 'username': self.username,
1280 1280 'repository_id': self.repository_id,
1281 1281 'repository_name': self.repository_name,
1282 1282 'user_ip': self.user_ip,
1283 1283 'action_date': self.action_date,
1284 1284 'action': self.action,
1285 1285 }
1286 1286
1287 1287 @hybrid_property
1288 1288 def entry_id(self):
1289 1289 return self.user_log_id
1290 1290
1291 1291 @property
1292 1292 def action_as_day(self):
1293 1293 return datetime.date(*self.action_date.timetuple()[:3])
1294 1294
1295 1295 user = relationship('User')
1296 1296 repository = relationship('Repository', cascade='')
1297 1297
1298 1298
1299 1299 class UserGroup(Base, BaseModel):
1300 1300 __tablename__ = 'users_groups'
1301 1301 __table_args__ = (
1302 1302 base_table_args,
1303 1303 )
1304 1304
1305 1305 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1306 1306 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1307 1307 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1308 1308 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1309 1309 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1310 1310 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1311 1311 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1312 1312 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1313 1313
1314 1314 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1315 1315 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1316 1316 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1317 1317 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1318 1318 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1319 1319 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1320 1320
1321 1321 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1322 1322 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1323 1323
1324 1324 @classmethod
1325 1325 def _load_group_data(cls, column):
1326 1326 if not column:
1327 1327 return {}
1328 1328
1329 1329 try:
1330 1330 return json.loads(column) or {}
1331 1331 except TypeError:
1332 1332 return {}
1333 1333
1334 1334 @hybrid_property
1335 1335 def description_safe(self):
1336 1336 from rhodecode.lib import helpers as h
1337 1337 return h.escape(self.user_group_description)
1338 1338
1339 1339 @hybrid_property
1340 1340 def group_data(self):
1341 1341 return self._load_group_data(self._group_data)
1342 1342
1343 1343 @group_data.expression
1344 1344 def group_data(self, **kwargs):
1345 1345 return self._group_data
1346 1346
1347 1347 @group_data.setter
1348 1348 def group_data(self, val):
1349 1349 try:
1350 1350 self._group_data = json.dumps(val)
1351 1351 except Exception:
1352 1352 log.error(traceback.format_exc())
1353 1353
1354 1354 @classmethod
1355 1355 def _load_sync(cls, group_data):
1356 1356 if group_data:
1357 1357 return group_data.get('extern_type')
1358 1358
1359 1359 @property
1360 1360 def sync(self):
1361 1361 return self._load_sync(self.group_data)
1362 1362
1363 1363 def __unicode__(self):
1364 1364 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1365 1365 self.users_group_id,
1366 1366 self.users_group_name)
1367 1367
1368 1368 @classmethod
1369 1369 def get_by_group_name(cls, group_name, cache=False,
1370 1370 case_insensitive=False):
1371 1371 if case_insensitive:
1372 1372 q = cls.query().filter(func.lower(cls.users_group_name) ==
1373 1373 func.lower(group_name))
1374 1374
1375 1375 else:
1376 1376 q = cls.query().filter(cls.users_group_name == group_name)
1377 1377 if cache:
1378 1378 q = q.options(
1379 1379 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1380 1380 return q.scalar()
1381 1381
1382 1382 @classmethod
1383 1383 def get(cls, user_group_id, cache=False):
1384 1384 if not user_group_id:
1385 1385 return
1386 1386
1387 1387 user_group = cls.query()
1388 1388 if cache:
1389 1389 user_group = user_group.options(
1390 1390 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1391 1391 return user_group.get(user_group_id)
1392 1392
1393 1393 def permissions(self, with_admins=True, with_owner=True):
1394 1394 """
1395 1395 Permissions for user groups
1396 1396 """
1397 1397 _admin_perm = 'usergroup.admin'
1398 1398
1399 1399 owner_row = []
1400 1400 if with_owner:
1401 1401 usr = AttributeDict(self.user.get_dict())
1402 1402 usr.owner_row = True
1403 1403 usr.permission = _admin_perm
1404 1404 owner_row.append(usr)
1405 1405
1406 1406 super_admin_ids = []
1407 1407 super_admin_rows = []
1408 1408 if with_admins:
1409 1409 for usr in User.get_all_super_admins():
1410 1410 super_admin_ids.append(usr.user_id)
1411 1411 # if this admin is also owner, don't double the record
1412 1412 if usr.user_id == owner_row[0].user_id:
1413 1413 owner_row[0].admin_row = True
1414 1414 else:
1415 1415 usr = AttributeDict(usr.get_dict())
1416 1416 usr.admin_row = True
1417 1417 usr.permission = _admin_perm
1418 1418 super_admin_rows.append(usr)
1419 1419
1420 1420 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1421 1421 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1422 1422 joinedload(UserUserGroupToPerm.user),
1423 1423 joinedload(UserUserGroupToPerm.permission),)
1424 1424
1425 1425 # get owners and admins and permissions. We do a trick of re-writing
1426 1426 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1427 1427 # has a global reference and changing one object propagates to all
1428 1428 # others. This means if admin is also an owner admin_row that change
1429 1429 # would propagate to both objects
1430 1430 perm_rows = []
1431 1431 for _usr in q.all():
1432 1432 usr = AttributeDict(_usr.user.get_dict())
1433 1433 # if this user is also owner/admin, mark as duplicate record
1434 1434 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1435 1435 usr.duplicate_perm = True
1436 1436 usr.permission = _usr.permission.permission_name
1437 1437 perm_rows.append(usr)
1438 1438
1439 1439 # filter the perm rows by 'default' first and then sort them by
1440 1440 # admin,write,read,none permissions sorted again alphabetically in
1441 1441 # each group
1442 1442 perm_rows = sorted(perm_rows, key=display_user_sort)
1443 1443
1444 1444 return super_admin_rows + owner_row + perm_rows
1445 1445
1446 1446 def permission_user_groups(self):
1447 1447 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1448 1448 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1449 1449 joinedload(UserGroupUserGroupToPerm.target_user_group),
1450 1450 joinedload(UserGroupUserGroupToPerm.permission),)
1451 1451
1452 1452 perm_rows = []
1453 1453 for _user_group in q.all():
1454 1454 usr = AttributeDict(_user_group.user_group.get_dict())
1455 1455 usr.permission = _user_group.permission.permission_name
1456 1456 perm_rows.append(usr)
1457 1457
1458 1458 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1459 1459 return perm_rows
1460 1460
1461 1461 def _get_default_perms(self, user_group, suffix=''):
1462 1462 from rhodecode.model.permission import PermissionModel
1463 1463 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1464 1464
1465 1465 def get_default_perms(self, suffix=''):
1466 1466 return self._get_default_perms(self, suffix)
1467 1467
1468 1468 def get_api_data(self, with_group_members=True, include_secrets=False):
1469 1469 """
1470 1470 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1471 1471 basically forwarded.
1472 1472
1473 1473 """
1474 1474 user_group = self
1475 1475 data = {
1476 1476 'users_group_id': user_group.users_group_id,
1477 1477 'group_name': user_group.users_group_name,
1478 1478 'group_description': user_group.user_group_description,
1479 1479 'active': user_group.users_group_active,
1480 1480 'owner': user_group.user.username,
1481 1481 'sync': user_group.sync,
1482 1482 'owner_email': user_group.user.email,
1483 1483 }
1484 1484
1485 1485 if with_group_members:
1486 1486 users = []
1487 1487 for user in user_group.members:
1488 1488 user = user.user
1489 1489 users.append(user.get_api_data(include_secrets=include_secrets))
1490 1490 data['users'] = users
1491 1491
1492 1492 return data
1493 1493
1494 1494
1495 1495 class UserGroupMember(Base, BaseModel):
1496 1496 __tablename__ = 'users_groups_members'
1497 1497 __table_args__ = (
1498 1498 base_table_args,
1499 1499 )
1500 1500
1501 1501 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1502 1502 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1503 1503 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1504 1504
1505 1505 user = relationship('User', lazy='joined')
1506 1506 users_group = relationship('UserGroup')
1507 1507
1508 1508 def __init__(self, gr_id='', u_id=''):
1509 1509 self.users_group_id = gr_id
1510 1510 self.user_id = u_id
1511 1511
1512 1512
1513 1513 class RepositoryField(Base, BaseModel):
1514 1514 __tablename__ = 'repositories_fields'
1515 1515 __table_args__ = (
1516 1516 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1517 1517 base_table_args,
1518 1518 )
1519 1519
1520 1520 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1521 1521
1522 1522 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1523 1523 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1524 1524 field_key = Column("field_key", String(250))
1525 1525 field_label = Column("field_label", String(1024), nullable=False)
1526 1526 field_value = Column("field_value", String(10000), nullable=False)
1527 1527 field_desc = Column("field_desc", String(1024), nullable=False)
1528 1528 field_type = Column("field_type", String(255), nullable=False, unique=None)
1529 1529 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1530 1530
1531 1531 repository = relationship('Repository')
1532 1532
1533 1533 @property
1534 1534 def field_key_prefixed(self):
1535 1535 return 'ex_%s' % self.field_key
1536 1536
1537 1537 @classmethod
1538 1538 def un_prefix_key(cls, key):
1539 1539 if key.startswith(cls.PREFIX):
1540 1540 return key[len(cls.PREFIX):]
1541 1541 return key
1542 1542
1543 1543 @classmethod
1544 1544 def get_by_key_name(cls, key, repo):
1545 1545 row = cls.query()\
1546 1546 .filter(cls.repository == repo)\
1547 1547 .filter(cls.field_key == key).scalar()
1548 1548 return row
1549 1549
1550 1550
1551 1551 class Repository(Base, BaseModel):
1552 1552 __tablename__ = 'repositories'
1553 1553 __table_args__ = (
1554 1554 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1555 1555 base_table_args,
1556 1556 )
1557 1557 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1558 1558 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1559 1559 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1560 1560
1561 1561 STATE_CREATED = 'repo_state_created'
1562 1562 STATE_PENDING = 'repo_state_pending'
1563 1563 STATE_ERROR = 'repo_state_error'
1564 1564
1565 1565 LOCK_AUTOMATIC = 'lock_auto'
1566 1566 LOCK_API = 'lock_api'
1567 1567 LOCK_WEB = 'lock_web'
1568 1568 LOCK_PULL = 'lock_pull'
1569 1569
1570 1570 NAME_SEP = URL_SEP
1571 1571
1572 1572 repo_id = Column(
1573 1573 "repo_id", Integer(), nullable=False, unique=True, default=None,
1574 1574 primary_key=True)
1575 1575 _repo_name = Column(
1576 1576 "repo_name", Text(), nullable=False, default=None)
1577 1577 _repo_name_hash = Column(
1578 1578 "repo_name_hash", String(255), nullable=False, unique=True)
1579 1579 repo_state = Column("repo_state", String(255), nullable=True)
1580 1580
1581 1581 clone_uri = Column(
1582 1582 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1583 1583 default=None)
1584 1584 push_uri = Column(
1585 1585 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1586 1586 default=None)
1587 1587 repo_type = Column(
1588 1588 "repo_type", String(255), nullable=False, unique=False, default=None)
1589 1589 user_id = Column(
1590 1590 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1591 1591 unique=False, default=None)
1592 1592 private = Column(
1593 1593 "private", Boolean(), nullable=True, unique=None, default=None)
1594 1594 archived = Column(
1595 1595 "archived", Boolean(), nullable=True, unique=None, default=None)
1596 1596 enable_statistics = Column(
1597 1597 "statistics", Boolean(), nullable=True, unique=None, default=True)
1598 1598 enable_downloads = Column(
1599 1599 "downloads", Boolean(), nullable=True, unique=None, default=True)
1600 1600 description = Column(
1601 1601 "description", String(10000), nullable=True, unique=None, default=None)
1602 1602 created_on = Column(
1603 1603 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1604 1604 default=datetime.datetime.now)
1605 1605 updated_on = Column(
1606 1606 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1607 1607 default=datetime.datetime.now)
1608 1608 _landing_revision = Column(
1609 1609 "landing_revision", String(255), nullable=False, unique=False,
1610 1610 default=None)
1611 1611 enable_locking = Column(
1612 1612 "enable_locking", Boolean(), nullable=False, unique=None,
1613 1613 default=False)
1614 1614 _locked = Column(
1615 1615 "locked", String(255), nullable=True, unique=False, default=None)
1616 1616 _changeset_cache = Column(
1617 1617 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1618 1618
1619 1619 fork_id = Column(
1620 1620 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1621 1621 nullable=True, unique=False, default=None)
1622 1622 group_id = Column(
1623 1623 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1624 1624 unique=False, default=None)
1625 1625
1626 1626 user = relationship('User', lazy='joined')
1627 1627 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1628 1628 group = relationship('RepoGroup', lazy='joined')
1629 1629 repo_to_perm = relationship(
1630 1630 'UserRepoToPerm', cascade='all',
1631 1631 order_by='UserRepoToPerm.repo_to_perm_id')
1632 1632 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1633 1633 stats = relationship('Statistics', cascade='all', uselist=False)
1634 1634
1635 1635 followers = relationship(
1636 1636 'UserFollowing',
1637 1637 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1638 1638 cascade='all')
1639 1639 extra_fields = relationship(
1640 1640 'RepositoryField', cascade="all, delete, delete-orphan")
1641 1641 logs = relationship('UserLog')
1642 1642 comments = relationship(
1643 1643 'ChangesetComment', cascade="all, delete, delete-orphan")
1644 1644 pull_requests_source = relationship(
1645 1645 'PullRequest',
1646 1646 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1647 1647 cascade="all, delete, delete-orphan")
1648 1648 pull_requests_target = relationship(
1649 1649 'PullRequest',
1650 1650 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1651 1651 cascade="all, delete, delete-orphan")
1652 1652 ui = relationship('RepoRhodeCodeUi', cascade="all")
1653 1653 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1654 1654 integrations = relationship('Integration',
1655 1655 cascade="all, delete, delete-orphan")
1656 1656
1657 1657 scoped_tokens = relationship('UserApiKeys', cascade="all")
1658 1658
1659 1659 def __unicode__(self):
1660 1660 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1661 1661 safe_unicode(self.repo_name))
1662 1662
1663 1663 @hybrid_property
1664 1664 def description_safe(self):
1665 1665 from rhodecode.lib import helpers as h
1666 1666 return h.escape(self.description)
1667 1667
1668 1668 @hybrid_property
1669 1669 def landing_rev(self):
1670 1670 # always should return [rev_type, rev]
1671 1671 if self._landing_revision:
1672 1672 _rev_info = self._landing_revision.split(':')
1673 1673 if len(_rev_info) < 2:
1674 1674 _rev_info.insert(0, 'rev')
1675 1675 return [_rev_info[0], _rev_info[1]]
1676 1676 return [None, None]
1677 1677
1678 1678 @landing_rev.setter
1679 1679 def landing_rev(self, val):
1680 1680 if ':' not in val:
1681 1681 raise ValueError('value must be delimited with `:` and consist '
1682 1682 'of <rev_type>:<rev>, got %s instead' % val)
1683 1683 self._landing_revision = val
1684 1684
1685 1685 @hybrid_property
1686 1686 def locked(self):
1687 1687 if self._locked:
1688 1688 user_id, timelocked, reason = self._locked.split(':')
1689 1689 lock_values = int(user_id), timelocked, reason
1690 1690 else:
1691 1691 lock_values = [None, None, None]
1692 1692 return lock_values
1693 1693
1694 1694 @locked.setter
1695 1695 def locked(self, val):
1696 1696 if val and isinstance(val, (list, tuple)):
1697 1697 self._locked = ':'.join(map(str, val))
1698 1698 else:
1699 1699 self._locked = None
1700 1700
1701 1701 @hybrid_property
1702 1702 def changeset_cache(self):
1703 1703 from rhodecode.lib.vcs.backends.base import EmptyCommit
1704 1704 dummy = EmptyCommit().__json__()
1705 1705 if not self._changeset_cache:
1706 1706 return dummy
1707 1707 try:
1708 1708 return json.loads(self._changeset_cache)
1709 1709 except TypeError:
1710 1710 return dummy
1711 1711 except Exception:
1712 1712 log.error(traceback.format_exc())
1713 1713 return dummy
1714 1714
1715 1715 @changeset_cache.setter
1716 1716 def changeset_cache(self, val):
1717 1717 try:
1718 1718 self._changeset_cache = json.dumps(val)
1719 1719 except Exception:
1720 1720 log.error(traceback.format_exc())
1721 1721
1722 1722 @hybrid_property
1723 1723 def repo_name(self):
1724 1724 return self._repo_name
1725 1725
1726 1726 @repo_name.setter
1727 1727 def repo_name(self, value):
1728 1728 self._repo_name = value
1729 1729 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1730 1730
1731 1731 @classmethod
1732 1732 def normalize_repo_name(cls, repo_name):
1733 1733 """
1734 1734 Normalizes os specific repo_name to the format internally stored inside
1735 1735 database using URL_SEP
1736 1736
1737 1737 :param cls:
1738 1738 :param repo_name:
1739 1739 """
1740 1740 return cls.NAME_SEP.join(repo_name.split(os.sep))
1741 1741
1742 1742 @classmethod
1743 1743 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1744 1744 session = Session()
1745 1745 q = session.query(cls).filter(cls.repo_name == repo_name)
1746 1746
1747 1747 if cache:
1748 1748 if identity_cache:
1749 1749 val = cls.identity_cache(session, 'repo_name', repo_name)
1750 1750 if val:
1751 1751 return val
1752 1752 else:
1753 1753 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1754 1754 q = q.options(
1755 1755 FromCache("sql_cache_short", cache_key))
1756 1756
1757 1757 return q.scalar()
1758 1758
1759 1759 @classmethod
1760 1760 def get_by_id_or_repo_name(cls, repoid):
1761 1761 if isinstance(repoid, (int, long)):
1762 1762 try:
1763 1763 repo = cls.get(repoid)
1764 1764 except ValueError:
1765 1765 repo = None
1766 1766 else:
1767 1767 repo = cls.get_by_repo_name(repoid)
1768 1768 return repo
1769 1769
1770 1770 @classmethod
1771 1771 def get_by_full_path(cls, repo_full_path):
1772 1772 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1773 1773 repo_name = cls.normalize_repo_name(repo_name)
1774 1774 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1775 1775
1776 1776 @classmethod
1777 1777 def get_repo_forks(cls, repo_id):
1778 1778 return cls.query().filter(Repository.fork_id == repo_id)
1779 1779
1780 1780 @classmethod
1781 1781 def base_path(cls):
1782 1782 """
1783 1783 Returns base path when all repos are stored
1784 1784
1785 1785 :param cls:
1786 1786 """
1787 1787 q = Session().query(RhodeCodeUi)\
1788 1788 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1789 1789 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1790 1790 return q.one().ui_value
1791 1791
1792 1792 @classmethod
1793 1793 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1794 1794 case_insensitive=True, archived=False):
1795 1795 q = Repository.query()
1796 1796
1797 1797 if not archived:
1798 1798 q = q.filter(Repository.archived.isnot(true()))
1799 1799
1800 1800 if not isinstance(user_id, Optional):
1801 1801 q = q.filter(Repository.user_id == user_id)
1802 1802
1803 1803 if not isinstance(group_id, Optional):
1804 1804 q = q.filter(Repository.group_id == group_id)
1805 1805
1806 1806 if case_insensitive:
1807 1807 q = q.order_by(func.lower(Repository.repo_name))
1808 1808 else:
1809 1809 q = q.order_by(Repository.repo_name)
1810 1810
1811 1811 return q.all()
1812 1812
1813 1813 @property
1814 1814 def forks(self):
1815 1815 """
1816 1816 Return forks of this repo
1817 1817 """
1818 1818 return Repository.get_repo_forks(self.repo_id)
1819 1819
1820 1820 @property
1821 1821 def parent(self):
1822 1822 """
1823 1823 Returns fork parent
1824 1824 """
1825 1825 return self.fork
1826 1826
1827 1827 @property
1828 1828 def just_name(self):
1829 1829 return self.repo_name.split(self.NAME_SEP)[-1]
1830 1830
1831 1831 @property
1832 1832 def groups_with_parents(self):
1833 1833 groups = []
1834 1834 if self.group is None:
1835 1835 return groups
1836 1836
1837 1837 cur_gr = self.group
1838 1838 groups.insert(0, cur_gr)
1839 1839 while 1:
1840 1840 gr = getattr(cur_gr, 'parent_group', None)
1841 1841 cur_gr = cur_gr.parent_group
1842 1842 if gr is None:
1843 1843 break
1844 1844 groups.insert(0, gr)
1845 1845
1846 1846 return groups
1847 1847
1848 1848 @property
1849 1849 def groups_and_repo(self):
1850 1850 return self.groups_with_parents, self
1851 1851
1852 1852 @LazyProperty
1853 1853 def repo_path(self):
1854 1854 """
1855 1855 Returns base full path for that repository means where it actually
1856 1856 exists on a filesystem
1857 1857 """
1858 1858 q = Session().query(RhodeCodeUi).filter(
1859 1859 RhodeCodeUi.ui_key == self.NAME_SEP)
1860 1860 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1861 1861 return q.one().ui_value
1862 1862
1863 1863 @property
1864 1864 def repo_full_path(self):
1865 1865 p = [self.repo_path]
1866 1866 # we need to split the name by / since this is how we store the
1867 1867 # names in the database, but that eventually needs to be converted
1868 1868 # into a valid system path
1869 1869 p += self.repo_name.split(self.NAME_SEP)
1870 1870 return os.path.join(*map(safe_unicode, p))
1871 1871
1872 1872 @property
1873 1873 def cache_keys(self):
1874 1874 """
1875 1875 Returns associated cache keys for that repo
1876 1876 """
1877 1877 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
1878 1878 repo_id=self.repo_id)
1879 1879 return CacheKey.query()\
1880 1880 .filter(CacheKey.cache_args == invalidation_namespace)\
1881 1881 .order_by(CacheKey.cache_key)\
1882 1882 .all()
1883 1883
1884 1884 @property
1885 1885 def cached_diffs_relative_dir(self):
1886 1886 """
1887 1887 Return a relative to the repository store path of cached diffs
1888 1888 used for safe display for users, who shouldn't know the absolute store
1889 1889 path
1890 1890 """
1891 1891 return os.path.join(
1892 1892 os.path.dirname(self.repo_name),
1893 1893 self.cached_diffs_dir.split(os.path.sep)[-1])
1894 1894
1895 1895 @property
1896 1896 def cached_diffs_dir(self):
1897 1897 path = self.repo_full_path
1898 1898 return os.path.join(
1899 1899 os.path.dirname(path),
1900 1900 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
1901 1901
1902 1902 def cached_diffs(self):
1903 1903 diff_cache_dir = self.cached_diffs_dir
1904 1904 if os.path.isdir(diff_cache_dir):
1905 1905 return os.listdir(diff_cache_dir)
1906 1906 return []
1907 1907
1908 1908 def shadow_repos(self):
1909 1909 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
1910 1910 return [
1911 1911 x for x in os.listdir(os.path.dirname(self.repo_full_path))
1912 1912 if x.startswith(shadow_repos_pattern)]
1913 1913
1914 1914 def get_new_name(self, repo_name):
1915 1915 """
1916 1916 returns new full repository name based on assigned group and new new
1917 1917
1918 1918 :param group_name:
1919 1919 """
1920 1920 path_prefix = self.group.full_path_splitted if self.group else []
1921 1921 return self.NAME_SEP.join(path_prefix + [repo_name])
1922 1922
1923 1923 @property
1924 1924 def _config(self):
1925 1925 """
1926 1926 Returns db based config object.
1927 1927 """
1928 1928 from rhodecode.lib.utils import make_db_config
1929 1929 return make_db_config(clear_session=False, repo=self)
1930 1930
1931 1931 def permissions(self, with_admins=True, with_owner=True):
1932 1932 """
1933 1933 Permissions for repositories
1934 1934 """
1935 1935 _admin_perm = 'repository.admin'
1936 1936
1937 1937 owner_row = []
1938 1938 if with_owner:
1939 1939 usr = AttributeDict(self.user.get_dict())
1940 1940 usr.owner_row = True
1941 1941 usr.permission = _admin_perm
1942 1942 usr.permission_id = None
1943 1943 owner_row.append(usr)
1944 1944
1945 1945 super_admin_ids = []
1946 1946 super_admin_rows = []
1947 1947 if with_admins:
1948 1948 for usr in User.get_all_super_admins():
1949 1949 super_admin_ids.append(usr.user_id)
1950 1950 # if this admin is also owner, don't double the record
1951 1951 if usr.user_id == owner_row[0].user_id:
1952 1952 owner_row[0].admin_row = True
1953 1953 else:
1954 1954 usr = AttributeDict(usr.get_dict())
1955 1955 usr.admin_row = True
1956 1956 usr.permission = _admin_perm
1957 1957 usr.permission_id = None
1958 1958 super_admin_rows.append(usr)
1959 1959
1960 1960 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1961 1961 q = q.options(joinedload(UserRepoToPerm.repository),
1962 1962 joinedload(UserRepoToPerm.user),
1963 1963 joinedload(UserRepoToPerm.permission),)
1964 1964
1965 1965 # get owners and admins and permissions. We do a trick of re-writing
1966 1966 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1967 1967 # has a global reference and changing one object propagates to all
1968 1968 # others. This means if admin is also an owner admin_row that change
1969 1969 # would propagate to both objects
1970 1970 perm_rows = []
1971 1971 for _usr in q.all():
1972 1972 usr = AttributeDict(_usr.user.get_dict())
1973 1973 # if this user is also owner/admin, mark as duplicate record
1974 1974 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1975 1975 usr.duplicate_perm = True
1976 1976 # also check if this permission is maybe used by branch_permissions
1977 1977 if _usr.branch_perm_entry:
1978 1978 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
1979 1979
1980 1980 usr.permission = _usr.permission.permission_name
1981 1981 usr.permission_id = _usr.repo_to_perm_id
1982 1982 perm_rows.append(usr)
1983 1983
1984 1984 # filter the perm rows by 'default' first and then sort them by
1985 1985 # admin,write,read,none permissions sorted again alphabetically in
1986 1986 # each group
1987 1987 perm_rows = sorted(perm_rows, key=display_user_sort)
1988 1988
1989 1989 return super_admin_rows + owner_row + perm_rows
1990 1990
1991 1991 def permission_user_groups(self):
1992 1992 q = UserGroupRepoToPerm.query().filter(
1993 1993 UserGroupRepoToPerm.repository == self)
1994 1994 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1995 1995 joinedload(UserGroupRepoToPerm.users_group),
1996 1996 joinedload(UserGroupRepoToPerm.permission),)
1997 1997
1998 1998 perm_rows = []
1999 1999 for _user_group in q.all():
2000 2000 usr = AttributeDict(_user_group.users_group.get_dict())
2001 2001 usr.permission = _user_group.permission.permission_name
2002 2002 perm_rows.append(usr)
2003 2003
2004 2004 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2005 2005 return perm_rows
2006 2006
2007 2007 def get_api_data(self, include_secrets=False):
2008 2008 """
2009 2009 Common function for generating repo api data
2010 2010
2011 2011 :param include_secrets: See :meth:`User.get_api_data`.
2012 2012
2013 2013 """
2014 2014 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2015 2015 # move this methods on models level.
2016 2016 from rhodecode.model.settings import SettingsModel
2017 2017 from rhodecode.model.repo import RepoModel
2018 2018
2019 2019 repo = self
2020 2020 _user_id, _time, _reason = self.locked
2021 2021
2022 2022 data = {
2023 2023 'repo_id': repo.repo_id,
2024 2024 'repo_name': repo.repo_name,
2025 2025 'repo_type': repo.repo_type,
2026 2026 'clone_uri': repo.clone_uri or '',
2027 2027 'push_uri': repo.push_uri or '',
2028 2028 'url': RepoModel().get_url(self),
2029 2029 'private': repo.private,
2030 2030 'created_on': repo.created_on,
2031 2031 'description': repo.description_safe,
2032 2032 'landing_rev': repo.landing_rev,
2033 2033 'owner': repo.user.username,
2034 2034 'fork_of': repo.fork.repo_name if repo.fork else None,
2035 2035 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2036 2036 'enable_statistics': repo.enable_statistics,
2037 2037 'enable_locking': repo.enable_locking,
2038 2038 'enable_downloads': repo.enable_downloads,
2039 2039 'last_changeset': repo.changeset_cache,
2040 2040 'locked_by': User.get(_user_id).get_api_data(
2041 2041 include_secrets=include_secrets) if _user_id else None,
2042 2042 'locked_date': time_to_datetime(_time) if _time else None,
2043 2043 'lock_reason': _reason if _reason else None,
2044 2044 }
2045 2045
2046 2046 # TODO: mikhail: should be per-repo settings here
2047 2047 rc_config = SettingsModel().get_all_settings()
2048 2048 repository_fields = str2bool(
2049 2049 rc_config.get('rhodecode_repository_fields'))
2050 2050 if repository_fields:
2051 2051 for f in self.extra_fields:
2052 2052 data[f.field_key_prefixed] = f.field_value
2053 2053
2054 2054 return data
2055 2055
2056 2056 @classmethod
2057 2057 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2058 2058 if not lock_time:
2059 2059 lock_time = time.time()
2060 2060 if not lock_reason:
2061 2061 lock_reason = cls.LOCK_AUTOMATIC
2062 2062 repo.locked = [user_id, lock_time, lock_reason]
2063 2063 Session().add(repo)
2064 2064 Session().commit()
2065 2065
2066 2066 @classmethod
2067 2067 def unlock(cls, repo):
2068 2068 repo.locked = None
2069 2069 Session().add(repo)
2070 2070 Session().commit()
2071 2071
2072 2072 @classmethod
2073 2073 def getlock(cls, repo):
2074 2074 return repo.locked
2075 2075
2076 2076 def is_user_lock(self, user_id):
2077 2077 if self.lock[0]:
2078 2078 lock_user_id = safe_int(self.lock[0])
2079 2079 user_id = safe_int(user_id)
2080 2080 # both are ints, and they are equal
2081 2081 return all([lock_user_id, user_id]) and lock_user_id == user_id
2082 2082
2083 2083 return False
2084 2084
2085 2085 def get_locking_state(self, action, user_id, only_when_enabled=True):
2086 2086 """
2087 2087 Checks locking on this repository, if locking is enabled and lock is
2088 2088 present returns a tuple of make_lock, locked, locked_by.
2089 2089 make_lock can have 3 states None (do nothing) True, make lock
2090 2090 False release lock, This value is later propagated to hooks, which
2091 2091 do the locking. Think about this as signals passed to hooks what to do.
2092 2092
2093 2093 """
2094 2094 # TODO: johbo: This is part of the business logic and should be moved
2095 2095 # into the RepositoryModel.
2096 2096
2097 2097 if action not in ('push', 'pull'):
2098 2098 raise ValueError("Invalid action value: %s" % repr(action))
2099 2099
2100 2100 # defines if locked error should be thrown to user
2101 2101 currently_locked = False
2102 2102 # defines if new lock should be made, tri-state
2103 2103 make_lock = None
2104 2104 repo = self
2105 2105 user = User.get(user_id)
2106 2106
2107 2107 lock_info = repo.locked
2108 2108
2109 2109 if repo and (repo.enable_locking or not only_when_enabled):
2110 2110 if action == 'push':
2111 2111 # check if it's already locked !, if it is compare users
2112 2112 locked_by_user_id = lock_info[0]
2113 2113 if user.user_id == locked_by_user_id:
2114 2114 log.debug(
2115 2115 'Got `push` action from user %s, now unlocking', user)
2116 2116 # unlock if we have push from user who locked
2117 2117 make_lock = False
2118 2118 else:
2119 2119 # we're not the same user who locked, ban with
2120 2120 # code defined in settings (default is 423 HTTP Locked) !
2121 2121 log.debug('Repo %s is currently locked by %s', repo, user)
2122 2122 currently_locked = True
2123 2123 elif action == 'pull':
2124 2124 # [0] user [1] date
2125 2125 if lock_info[0] and lock_info[1]:
2126 2126 log.debug('Repo %s is currently locked by %s', repo, user)
2127 2127 currently_locked = True
2128 2128 else:
2129 2129 log.debug('Setting lock on repo %s by %s', repo, user)
2130 2130 make_lock = True
2131 2131
2132 2132 else:
2133 2133 log.debug('Repository %s do not have locking enabled', repo)
2134 2134
2135 2135 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2136 2136 make_lock, currently_locked, lock_info)
2137 2137
2138 2138 from rhodecode.lib.auth import HasRepoPermissionAny
2139 2139 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2140 2140 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2141 2141 # if we don't have at least write permission we cannot make a lock
2142 2142 log.debug('lock state reset back to FALSE due to lack '
2143 2143 'of at least read permission')
2144 2144 make_lock = False
2145 2145
2146 2146 return make_lock, currently_locked, lock_info
2147 2147
2148 2148 @property
2149 2149 def last_db_change(self):
2150 2150 return self.updated_on
2151 2151
2152 2152 @property
2153 2153 def clone_uri_hidden(self):
2154 2154 clone_uri = self.clone_uri
2155 2155 if clone_uri:
2156 2156 import urlobject
2157 2157 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2158 2158 if url_obj.password:
2159 2159 clone_uri = url_obj.with_password('*****')
2160 2160 return clone_uri
2161 2161
2162 2162 @property
2163 2163 def push_uri_hidden(self):
2164 2164 push_uri = self.push_uri
2165 2165 if push_uri:
2166 2166 import urlobject
2167 2167 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2168 2168 if url_obj.password:
2169 2169 push_uri = url_obj.with_password('*****')
2170 2170 return push_uri
2171 2171
2172 2172 def clone_url(self, **override):
2173 2173 from rhodecode.model.settings import SettingsModel
2174 2174
2175 2175 uri_tmpl = None
2176 2176 if 'with_id' in override:
2177 2177 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2178 2178 del override['with_id']
2179 2179
2180 2180 if 'uri_tmpl' in override:
2181 2181 uri_tmpl = override['uri_tmpl']
2182 2182 del override['uri_tmpl']
2183 2183
2184 2184 ssh = False
2185 2185 if 'ssh' in override:
2186 2186 ssh = True
2187 2187 del override['ssh']
2188 2188
2189 2189 # we didn't override our tmpl from **overrides
2190 2190 if not uri_tmpl:
2191 2191 rc_config = SettingsModel().get_all_settings(cache=True)
2192 2192 if ssh:
2193 2193 uri_tmpl = rc_config.get(
2194 2194 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2195 2195 else:
2196 2196 uri_tmpl = rc_config.get(
2197 2197 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2198 2198
2199 2199 request = get_current_request()
2200 2200 return get_clone_url(request=request,
2201 2201 uri_tmpl=uri_tmpl,
2202 2202 repo_name=self.repo_name,
2203 2203 repo_id=self.repo_id, **override)
2204 2204
2205 2205 def set_state(self, state):
2206 2206 self.repo_state = state
2207 2207 Session().add(self)
2208 2208 #==========================================================================
2209 2209 # SCM PROPERTIES
2210 2210 #==========================================================================
2211 2211
2212 2212 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
2213 2213 return get_commit_safe(
2214 2214 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
2215 2215
2216 2216 def get_changeset(self, rev=None, pre_load=None):
2217 2217 warnings.warn("Use get_commit", DeprecationWarning)
2218 2218 commit_id = None
2219 2219 commit_idx = None
2220 2220 if isinstance(rev, basestring):
2221 2221 commit_id = rev
2222 2222 else:
2223 2223 commit_idx = rev
2224 2224 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2225 2225 pre_load=pre_load)
2226 2226
2227 2227 def get_landing_commit(self):
2228 2228 """
2229 2229 Returns landing commit, or if that doesn't exist returns the tip
2230 2230 """
2231 2231 _rev_type, _rev = self.landing_rev
2232 2232 commit = self.get_commit(_rev)
2233 2233 if isinstance(commit, EmptyCommit):
2234 2234 return self.get_commit()
2235 2235 return commit
2236 2236
2237 2237 def update_commit_cache(self, cs_cache=None, config=None):
2238 2238 """
2239 2239 Update cache of last changeset for repository, keys should be::
2240 2240
2241 2241 short_id
2242 2242 raw_id
2243 2243 revision
2244 2244 parents
2245 2245 message
2246 2246 date
2247 2247 author
2248 2248
2249 2249 :param cs_cache:
2250 2250 """
2251 2251 from rhodecode.lib.vcs.backends.base import BaseChangeset
2252 2252 if cs_cache is None:
2253 2253 # use no-cache version here
2254 2254 scm_repo = self.scm_instance(cache=False, config=config)
2255 2255
2256 2256 empty = scm_repo.is_empty()
2257 2257 if not empty:
2258 2258 cs_cache = scm_repo.get_commit(
2259 2259 pre_load=["author", "date", "message", "parents"])
2260 2260 else:
2261 2261 cs_cache = EmptyCommit()
2262 2262
2263 2263 if isinstance(cs_cache, BaseChangeset):
2264 2264 cs_cache = cs_cache.__json__()
2265 2265
2266 2266 def is_outdated(new_cs_cache):
2267 2267 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2268 2268 new_cs_cache['revision'] != self.changeset_cache['revision']):
2269 2269 return True
2270 2270 return False
2271 2271
2272 2272 # check if we have maybe already latest cached revision
2273 2273 if is_outdated(cs_cache) or not self.changeset_cache:
2274 2274 _default = datetime.datetime.utcnow()
2275 2275 last_change = cs_cache.get('date') or _default
2276 2276 if self.updated_on and self.updated_on > last_change:
2277 2277 # we check if last update is newer than the new value
2278 2278 # if yes, we use the current timestamp instead. Imagine you get
2279 2279 # old commit pushed 1y ago, we'd set last update 1y to ago.
2280 2280 last_change = _default
2281 2281 log.debug('updated repo %s with new cs cache %s',
2282 2282 self.repo_name, cs_cache)
2283 2283 self.updated_on = last_change
2284 2284 self.changeset_cache = cs_cache
2285 2285 Session().add(self)
2286 2286 Session().commit()
2287 2287 else:
2288 2288 log.debug('Skipping update_commit_cache for repo:`%s` '
2289 2289 'commit already with latest changes', self.repo_name)
2290 2290
2291 2291 @property
2292 2292 def tip(self):
2293 2293 return self.get_commit('tip')
2294 2294
2295 2295 @property
2296 2296 def author(self):
2297 2297 return self.tip.author
2298 2298
2299 2299 @property
2300 2300 def last_change(self):
2301 2301 return self.scm_instance().last_change
2302 2302
2303 2303 def get_comments(self, revisions=None):
2304 2304 """
2305 2305 Returns comments for this repository grouped by revisions
2306 2306
2307 2307 :param revisions: filter query by revisions only
2308 2308 """
2309 2309 cmts = ChangesetComment.query()\
2310 2310 .filter(ChangesetComment.repo == self)
2311 2311 if revisions:
2312 2312 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2313 2313 grouped = collections.defaultdict(list)
2314 2314 for cmt in cmts.all():
2315 2315 grouped[cmt.revision].append(cmt)
2316 2316 return grouped
2317 2317
2318 2318 def statuses(self, revisions=None):
2319 2319 """
2320 2320 Returns statuses for this repository
2321 2321
2322 2322 :param revisions: list of revisions to get statuses for
2323 2323 """
2324 2324 statuses = ChangesetStatus.query()\
2325 2325 .filter(ChangesetStatus.repo == self)\
2326 2326 .filter(ChangesetStatus.version == 0)
2327 2327
2328 2328 if revisions:
2329 2329 # Try doing the filtering in chunks to avoid hitting limits
2330 2330 size = 500
2331 2331 status_results = []
2332 2332 for chunk in xrange(0, len(revisions), size):
2333 2333 status_results += statuses.filter(
2334 2334 ChangesetStatus.revision.in_(
2335 2335 revisions[chunk: chunk+size])
2336 2336 ).all()
2337 2337 else:
2338 2338 status_results = statuses.all()
2339 2339
2340 2340 grouped = {}
2341 2341
2342 2342 # maybe we have open new pullrequest without a status?
2343 2343 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2344 2344 status_lbl = ChangesetStatus.get_status_lbl(stat)
2345 2345 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2346 2346 for rev in pr.revisions:
2347 2347 pr_id = pr.pull_request_id
2348 2348 pr_repo = pr.target_repo.repo_name
2349 2349 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2350 2350
2351 2351 for stat in status_results:
2352 2352 pr_id = pr_repo = None
2353 2353 if stat.pull_request:
2354 2354 pr_id = stat.pull_request.pull_request_id
2355 2355 pr_repo = stat.pull_request.target_repo.repo_name
2356 2356 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2357 2357 pr_id, pr_repo]
2358 2358 return grouped
2359 2359
2360 2360 # ==========================================================================
2361 2361 # SCM CACHE INSTANCE
2362 2362 # ==========================================================================
2363 2363
2364 2364 def scm_instance(self, **kwargs):
2365 2365 import rhodecode
2366 2366
2367 2367 # Passing a config will not hit the cache currently only used
2368 2368 # for repo2dbmapper
2369 2369 config = kwargs.pop('config', None)
2370 2370 cache = kwargs.pop('cache', None)
2371 2371 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2372 2372 # if cache is NOT defined use default global, else we have a full
2373 2373 # control over cache behaviour
2374 2374 if cache is None and full_cache and not config:
2375 2375 return self._get_instance_cached()
2376 2376 return self._get_instance(cache=bool(cache), config=config)
2377 2377
2378 2378 def _get_instance_cached(self):
2379 2379 from rhodecode.lib import rc_cache
2380 2380
2381 2381 cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id)
2382 2382 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2383 2383 repo_id=self.repo_id)
2384 2384 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2385 2385
2386 2386 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2387 2387 def get_instance_cached(repo_id, context_id):
2388 2388 return self._get_instance()
2389 2389
2390 2390 # we must use thread scoped cache here,
2391 2391 # because each thread of gevent needs it's own not shared connection and cache
2392 2392 # we also alter `args` so the cache key is individual for every green thread.
2393 2393 inv_context_manager = rc_cache.InvalidationContext(
2394 2394 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2395 2395 thread_scoped=True)
2396 2396 with inv_context_manager as invalidation_context:
2397 2397 args = (self.repo_id, inv_context_manager.cache_key)
2398 2398 # re-compute and store cache if we get invalidate signal
2399 2399 if invalidation_context.should_invalidate():
2400 2400 instance = get_instance_cached.refresh(*args)
2401 2401 else:
2402 2402 instance = get_instance_cached(*args)
2403 2403
2404 2404 log.debug(
2405 2405 'Repo instance fetched in %.3fs', inv_context_manager.compute_time)
2406 2406 return instance
2407 2407
2408 2408 def _get_instance(self, cache=True, config=None):
2409 2409 config = config or self._config
2410 2410 custom_wire = {
2411 2411 'cache': cache # controls the vcs.remote cache
2412 2412 }
2413 2413 repo = get_vcs_instance(
2414 2414 repo_path=safe_str(self.repo_full_path),
2415 2415 config=config,
2416 2416 with_wire=custom_wire,
2417 2417 create=False,
2418 2418 _vcs_alias=self.repo_type)
2419 2419
2420 2420 return repo
2421 2421
2422 2422 def __json__(self):
2423 2423 return {'landing_rev': self.landing_rev}
2424 2424
2425 2425 def get_dict(self):
2426 2426
2427 2427 # Since we transformed `repo_name` to a hybrid property, we need to
2428 2428 # keep compatibility with the code which uses `repo_name` field.
2429 2429
2430 2430 result = super(Repository, self).get_dict()
2431 2431 result['repo_name'] = result.pop('_repo_name', None)
2432 2432 return result
2433 2433
2434 2434
2435 2435 class RepoGroup(Base, BaseModel):
2436 2436 __tablename__ = 'groups'
2437 2437 __table_args__ = (
2438 2438 UniqueConstraint('group_name', 'group_parent_id'),
2439 2439 CheckConstraint('group_id != group_parent_id'),
2440 2440 base_table_args,
2441 2441 )
2442 2442 __mapper_args__ = {'order_by': 'group_name'}
2443 2443
2444 2444 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2445 2445
2446 2446 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2447 2447 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2448 2448 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2449 2449 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2450 2450 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2451 2451 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2452 2452 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2453 2453 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2454 2454 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2455 2455
2456 2456 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2457 2457 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2458 2458 parent_group = relationship('RepoGroup', remote_side=group_id)
2459 2459 user = relationship('User')
2460 2460 integrations = relationship('Integration',
2461 2461 cascade="all, delete, delete-orphan")
2462 2462
2463 2463 def __init__(self, group_name='', parent_group=None):
2464 2464 self.group_name = group_name
2465 2465 self.parent_group = parent_group
2466 2466
2467 2467 def __unicode__(self):
2468 2468 return u"<%s('id:%s:%s')>" % (
2469 2469 self.__class__.__name__, self.group_id, self.group_name)
2470 2470
2471 2471 @hybrid_property
2472 2472 def description_safe(self):
2473 2473 from rhodecode.lib import helpers as h
2474 2474 return h.escape(self.group_description)
2475 2475
2476 2476 @classmethod
2477 2477 def _generate_choice(cls, repo_group):
2478 2478 from webhelpers.html import literal as _literal
2479 2479 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2480 2480 return repo_group.group_id, _name(repo_group.full_path_splitted)
2481 2481
2482 2482 @classmethod
2483 2483 def groups_choices(cls, groups=None, show_empty_group=True):
2484 2484 if not groups:
2485 2485 groups = cls.query().all()
2486 2486
2487 2487 repo_groups = []
2488 2488 if show_empty_group:
2489 2489 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2490 2490
2491 2491 repo_groups.extend([cls._generate_choice(x) for x in groups])
2492 2492
2493 2493 repo_groups = sorted(
2494 2494 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2495 2495 return repo_groups
2496 2496
2497 2497 @classmethod
2498 2498 def url_sep(cls):
2499 2499 return URL_SEP
2500 2500
2501 2501 @classmethod
2502 2502 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2503 2503 if case_insensitive:
2504 2504 gr = cls.query().filter(func.lower(cls.group_name)
2505 2505 == func.lower(group_name))
2506 2506 else:
2507 2507 gr = cls.query().filter(cls.group_name == group_name)
2508 2508 if cache:
2509 2509 name_key = _hash_key(group_name)
2510 2510 gr = gr.options(
2511 2511 FromCache("sql_cache_short", "get_group_%s" % name_key))
2512 2512 return gr.scalar()
2513 2513
2514 2514 @classmethod
2515 2515 def get_user_personal_repo_group(cls, user_id):
2516 2516 user = User.get(user_id)
2517 2517 if user.username == User.DEFAULT_USER:
2518 2518 return None
2519 2519
2520 2520 return cls.query()\
2521 2521 .filter(cls.personal == true()) \
2522 2522 .filter(cls.user == user) \
2523 2523 .order_by(cls.group_id.asc()) \
2524 2524 .first()
2525 2525
2526 2526 @classmethod
2527 2527 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2528 2528 case_insensitive=True):
2529 2529 q = RepoGroup.query()
2530 2530
2531 2531 if not isinstance(user_id, Optional):
2532 2532 q = q.filter(RepoGroup.user_id == user_id)
2533 2533
2534 2534 if not isinstance(group_id, Optional):
2535 2535 q = q.filter(RepoGroup.group_parent_id == group_id)
2536 2536
2537 2537 if case_insensitive:
2538 2538 q = q.order_by(func.lower(RepoGroup.group_name))
2539 2539 else:
2540 2540 q = q.order_by(RepoGroup.group_name)
2541 2541 return q.all()
2542 2542
2543 2543 @property
2544 2544 def parents(self):
2545 2545 parents_recursion_limit = 10
2546 2546 groups = []
2547 2547 if self.parent_group is None:
2548 2548 return groups
2549 2549 cur_gr = self.parent_group
2550 2550 groups.insert(0, cur_gr)
2551 2551 cnt = 0
2552 2552 while 1:
2553 2553 cnt += 1
2554 2554 gr = getattr(cur_gr, 'parent_group', None)
2555 2555 cur_gr = cur_gr.parent_group
2556 2556 if gr is None:
2557 2557 break
2558 2558 if cnt == parents_recursion_limit:
2559 2559 # this will prevent accidental infinit loops
2560 2560 log.error('more than %s parents found for group %s, stopping '
2561 2561 'recursive parent fetching', parents_recursion_limit, self)
2562 2562 break
2563 2563
2564 2564 groups.insert(0, gr)
2565 2565 return groups
2566 2566
2567 2567 @property
2568 2568 def last_db_change(self):
2569 2569 return self.updated_on
2570 2570
2571 2571 @property
2572 2572 def children(self):
2573 2573 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2574 2574
2575 2575 @property
2576 2576 def name(self):
2577 2577 return self.group_name.split(RepoGroup.url_sep())[-1]
2578 2578
2579 2579 @property
2580 2580 def full_path(self):
2581 2581 return self.group_name
2582 2582
2583 2583 @property
2584 2584 def full_path_splitted(self):
2585 2585 return self.group_name.split(RepoGroup.url_sep())
2586 2586
2587 2587 @property
2588 2588 def repositories(self):
2589 2589 return Repository.query()\
2590 2590 .filter(Repository.group == self)\
2591 2591 .order_by(Repository.repo_name)
2592 2592
2593 2593 @property
2594 2594 def repositories_recursive_count(self):
2595 2595 cnt = self.repositories.count()
2596 2596
2597 2597 def children_count(group):
2598 2598 cnt = 0
2599 2599 for child in group.children:
2600 2600 cnt += child.repositories.count()
2601 2601 cnt += children_count(child)
2602 2602 return cnt
2603 2603
2604 2604 return cnt + children_count(self)
2605 2605
2606 2606 def _recursive_objects(self, include_repos=True):
2607 2607 all_ = []
2608 2608
2609 2609 def _get_members(root_gr):
2610 2610 if include_repos:
2611 2611 for r in root_gr.repositories:
2612 2612 all_.append(r)
2613 2613 childs = root_gr.children.all()
2614 2614 if childs:
2615 2615 for gr in childs:
2616 2616 all_.append(gr)
2617 2617 _get_members(gr)
2618 2618
2619 2619 _get_members(self)
2620 2620 return [self] + all_
2621 2621
2622 2622 def recursive_groups_and_repos(self):
2623 2623 """
2624 2624 Recursive return all groups, with repositories in those groups
2625 2625 """
2626 2626 return self._recursive_objects()
2627 2627
2628 2628 def recursive_groups(self):
2629 2629 """
2630 2630 Returns all children groups for this group including children of children
2631 2631 """
2632 2632 return self._recursive_objects(include_repos=False)
2633 2633
2634 2634 def get_new_name(self, group_name):
2635 2635 """
2636 2636 returns new full group name based on parent and new name
2637 2637
2638 2638 :param group_name:
2639 2639 """
2640 2640 path_prefix = (self.parent_group.full_path_splitted if
2641 2641 self.parent_group else [])
2642 2642 return RepoGroup.url_sep().join(path_prefix + [group_name])
2643 2643
2644 2644 def permissions(self, with_admins=True, with_owner=True):
2645 2645 """
2646 2646 Permissions for repository groups
2647 2647 """
2648 2648 _admin_perm = 'group.admin'
2649 2649
2650 2650 owner_row = []
2651 2651 if with_owner:
2652 2652 usr = AttributeDict(self.user.get_dict())
2653 2653 usr.owner_row = True
2654 2654 usr.permission = _admin_perm
2655 2655 owner_row.append(usr)
2656 2656
2657 2657 super_admin_ids = []
2658 2658 super_admin_rows = []
2659 2659 if with_admins:
2660 2660 for usr in User.get_all_super_admins():
2661 2661 super_admin_ids.append(usr.user_id)
2662 2662 # if this admin is also owner, don't double the record
2663 2663 if usr.user_id == owner_row[0].user_id:
2664 2664 owner_row[0].admin_row = True
2665 2665 else:
2666 2666 usr = AttributeDict(usr.get_dict())
2667 2667 usr.admin_row = True
2668 2668 usr.permission = _admin_perm
2669 2669 super_admin_rows.append(usr)
2670 2670
2671 2671 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2672 2672 q = q.options(joinedload(UserRepoGroupToPerm.group),
2673 2673 joinedload(UserRepoGroupToPerm.user),
2674 2674 joinedload(UserRepoGroupToPerm.permission),)
2675 2675
2676 2676 # get owners and admins and permissions. We do a trick of re-writing
2677 2677 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2678 2678 # has a global reference and changing one object propagates to all
2679 2679 # others. This means if admin is also an owner admin_row that change
2680 2680 # would propagate to both objects
2681 2681 perm_rows = []
2682 2682 for _usr in q.all():
2683 2683 usr = AttributeDict(_usr.user.get_dict())
2684 2684 # if this user is also owner/admin, mark as duplicate record
2685 2685 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2686 2686 usr.duplicate_perm = True
2687 2687 usr.permission = _usr.permission.permission_name
2688 2688 perm_rows.append(usr)
2689 2689
2690 2690 # filter the perm rows by 'default' first and then sort them by
2691 2691 # admin,write,read,none permissions sorted again alphabetically in
2692 2692 # each group
2693 2693 perm_rows = sorted(perm_rows, key=display_user_sort)
2694 2694
2695 2695 return super_admin_rows + owner_row + perm_rows
2696 2696
2697 2697 def permission_user_groups(self):
2698 2698 q = UserGroupRepoGroupToPerm.query().filter(
2699 2699 UserGroupRepoGroupToPerm.group == self)
2700 2700 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2701 2701 joinedload(UserGroupRepoGroupToPerm.users_group),
2702 2702 joinedload(UserGroupRepoGroupToPerm.permission),)
2703 2703
2704 2704 perm_rows = []
2705 2705 for _user_group in q.all():
2706 2706 usr = AttributeDict(_user_group.users_group.get_dict())
2707 2707 usr.permission = _user_group.permission.permission_name
2708 2708 perm_rows.append(usr)
2709 2709
2710 2710 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2711 2711 return perm_rows
2712 2712
2713 2713 def get_api_data(self):
2714 2714 """
2715 2715 Common function for generating api data
2716 2716
2717 2717 """
2718 2718 group = self
2719 2719 data = {
2720 2720 'group_id': group.group_id,
2721 2721 'group_name': group.group_name,
2722 2722 'group_description': group.description_safe,
2723 2723 'parent_group': group.parent_group.group_name if group.parent_group else None,
2724 2724 'repositories': [x.repo_name for x in group.repositories],
2725 2725 'owner': group.user.username,
2726 2726 }
2727 2727 return data
2728 2728
2729 2729
2730 2730 class Permission(Base, BaseModel):
2731 2731 __tablename__ = 'permissions'
2732 2732 __table_args__ = (
2733 2733 Index('p_perm_name_idx', 'permission_name'),
2734 2734 base_table_args,
2735 2735 )
2736 2736
2737 2737 PERMS = [
2738 2738 ('hg.admin', _('RhodeCode Super Administrator')),
2739 2739
2740 2740 ('repository.none', _('Repository no access')),
2741 2741 ('repository.read', _('Repository read access')),
2742 2742 ('repository.write', _('Repository write access')),
2743 2743 ('repository.admin', _('Repository admin access')),
2744 2744
2745 2745 ('group.none', _('Repository group no access')),
2746 2746 ('group.read', _('Repository group read access')),
2747 2747 ('group.write', _('Repository group write access')),
2748 2748 ('group.admin', _('Repository group admin access')),
2749 2749
2750 2750 ('usergroup.none', _('User group no access')),
2751 2751 ('usergroup.read', _('User group read access')),
2752 2752 ('usergroup.write', _('User group write access')),
2753 2753 ('usergroup.admin', _('User group admin access')),
2754 2754
2755 2755 ('branch.none', _('Branch no permissions')),
2756 2756 ('branch.merge', _('Branch access by web merge')),
2757 2757 ('branch.push', _('Branch access by push')),
2758 2758 ('branch.push_force', _('Branch access by push with force')),
2759 2759
2760 2760 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2761 2761 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2762 2762
2763 2763 ('hg.usergroup.create.false', _('User Group creation disabled')),
2764 2764 ('hg.usergroup.create.true', _('User Group creation enabled')),
2765 2765
2766 2766 ('hg.create.none', _('Repository creation disabled')),
2767 2767 ('hg.create.repository', _('Repository creation enabled')),
2768 2768 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2769 2769 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2770 2770
2771 2771 ('hg.fork.none', _('Repository forking disabled')),
2772 2772 ('hg.fork.repository', _('Repository forking enabled')),
2773 2773
2774 2774 ('hg.register.none', _('Registration disabled')),
2775 2775 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2776 2776 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2777 2777
2778 2778 ('hg.password_reset.enabled', _('Password reset enabled')),
2779 2779 ('hg.password_reset.hidden', _('Password reset hidden')),
2780 2780 ('hg.password_reset.disabled', _('Password reset disabled')),
2781 2781
2782 2782 ('hg.extern_activate.manual', _('Manual activation of external account')),
2783 2783 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2784 2784
2785 2785 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2786 2786 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2787 2787 ]
2788 2788
2789 2789 # definition of system default permissions for DEFAULT user, created on
2790 2790 # system setup
2791 2791 DEFAULT_USER_PERMISSIONS = [
2792 2792 # object perms
2793 2793 'repository.read',
2794 2794 'group.read',
2795 2795 'usergroup.read',
2796 2796 # branch, for backward compat we need same value as before so forced pushed
2797 2797 'branch.push_force',
2798 2798 # global
2799 2799 'hg.create.repository',
2800 2800 'hg.repogroup.create.false',
2801 2801 'hg.usergroup.create.false',
2802 2802 'hg.create.write_on_repogroup.true',
2803 2803 'hg.fork.repository',
2804 2804 'hg.register.manual_activate',
2805 2805 'hg.password_reset.enabled',
2806 2806 'hg.extern_activate.auto',
2807 2807 'hg.inherit_default_perms.true',
2808 2808 ]
2809 2809
2810 2810 # defines which permissions are more important higher the more important
2811 2811 # Weight defines which permissions are more important.
2812 2812 # The higher number the more important.
2813 2813 PERM_WEIGHTS = {
2814 2814 'repository.none': 0,
2815 2815 'repository.read': 1,
2816 2816 'repository.write': 3,
2817 2817 'repository.admin': 4,
2818 2818
2819 2819 'group.none': 0,
2820 2820 'group.read': 1,
2821 2821 'group.write': 3,
2822 2822 'group.admin': 4,
2823 2823
2824 2824 'usergroup.none': 0,
2825 2825 'usergroup.read': 1,
2826 2826 'usergroup.write': 3,
2827 2827 'usergroup.admin': 4,
2828 2828
2829 2829 'branch.none': 0,
2830 2830 'branch.merge': 1,
2831 2831 'branch.push': 3,
2832 2832 'branch.push_force': 4,
2833 2833
2834 2834 'hg.repogroup.create.false': 0,
2835 2835 'hg.repogroup.create.true': 1,
2836 2836
2837 2837 'hg.usergroup.create.false': 0,
2838 2838 'hg.usergroup.create.true': 1,
2839 2839
2840 2840 'hg.fork.none': 0,
2841 2841 'hg.fork.repository': 1,
2842 2842 'hg.create.none': 0,
2843 2843 'hg.create.repository': 1
2844 2844 }
2845 2845
2846 2846 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2847 2847 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2848 2848 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2849 2849
2850 2850 def __unicode__(self):
2851 2851 return u"<%s('%s:%s')>" % (
2852 2852 self.__class__.__name__, self.permission_id, self.permission_name
2853 2853 )
2854 2854
2855 2855 @classmethod
2856 2856 def get_by_key(cls, key):
2857 2857 return cls.query().filter(cls.permission_name == key).scalar()
2858 2858
2859 2859 @classmethod
2860 2860 def get_default_repo_perms(cls, user_id, repo_id=None):
2861 2861 q = Session().query(UserRepoToPerm, Repository, Permission)\
2862 2862 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2863 2863 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2864 2864 .filter(UserRepoToPerm.user_id == user_id)
2865 2865 if repo_id:
2866 2866 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2867 2867 return q.all()
2868 2868
2869 2869 @classmethod
2870 2870 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
2871 2871 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
2872 2872 .join(
2873 2873 Permission,
2874 2874 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
2875 2875 .join(
2876 2876 UserRepoToPerm,
2877 2877 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
2878 2878 .filter(UserRepoToPerm.user_id == user_id)
2879 2879
2880 2880 if repo_id:
2881 2881 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
2882 2882 return q.order_by(UserToRepoBranchPermission.rule_order).all()
2883 2883
2884 2884 @classmethod
2885 2885 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2886 2886 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2887 2887 .join(
2888 2888 Permission,
2889 2889 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2890 2890 .join(
2891 2891 Repository,
2892 2892 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2893 2893 .join(
2894 2894 UserGroup,
2895 2895 UserGroupRepoToPerm.users_group_id ==
2896 2896 UserGroup.users_group_id)\
2897 2897 .join(
2898 2898 UserGroupMember,
2899 2899 UserGroupRepoToPerm.users_group_id ==
2900 2900 UserGroupMember.users_group_id)\
2901 2901 .filter(
2902 2902 UserGroupMember.user_id == user_id,
2903 2903 UserGroup.users_group_active == true())
2904 2904 if repo_id:
2905 2905 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2906 2906 return q.all()
2907 2907
2908 2908 @classmethod
2909 2909 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
2910 2910 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
2911 2911 .join(
2912 2912 Permission,
2913 2913 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
2914 2914 .join(
2915 2915 UserGroupRepoToPerm,
2916 2916 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
2917 2917 .join(
2918 2918 UserGroup,
2919 2919 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
2920 2920 .join(
2921 2921 UserGroupMember,
2922 2922 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
2923 2923 .filter(
2924 2924 UserGroupMember.user_id == user_id,
2925 2925 UserGroup.users_group_active == true())
2926 2926
2927 2927 if repo_id:
2928 2928 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
2929 2929 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
2930 2930
2931 2931 @classmethod
2932 2932 def get_default_group_perms(cls, user_id, repo_group_id=None):
2933 2933 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2934 2934 .join(
2935 2935 Permission,
2936 2936 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
2937 2937 .join(
2938 2938 RepoGroup,
2939 2939 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
2940 2940 .filter(UserRepoGroupToPerm.user_id == user_id)
2941 2941 if repo_group_id:
2942 2942 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2943 2943 return q.all()
2944 2944
2945 2945 @classmethod
2946 2946 def get_default_group_perms_from_user_group(
2947 2947 cls, user_id, repo_group_id=None):
2948 2948 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2949 2949 .join(
2950 2950 Permission,
2951 2951 UserGroupRepoGroupToPerm.permission_id ==
2952 2952 Permission.permission_id)\
2953 2953 .join(
2954 2954 RepoGroup,
2955 2955 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2956 2956 .join(
2957 2957 UserGroup,
2958 2958 UserGroupRepoGroupToPerm.users_group_id ==
2959 2959 UserGroup.users_group_id)\
2960 2960 .join(
2961 2961 UserGroupMember,
2962 2962 UserGroupRepoGroupToPerm.users_group_id ==
2963 2963 UserGroupMember.users_group_id)\
2964 2964 .filter(
2965 2965 UserGroupMember.user_id == user_id,
2966 2966 UserGroup.users_group_active == true())
2967 2967 if repo_group_id:
2968 2968 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2969 2969 return q.all()
2970 2970
2971 2971 @classmethod
2972 2972 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2973 2973 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2974 2974 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2975 2975 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2976 2976 .filter(UserUserGroupToPerm.user_id == user_id)
2977 2977 if user_group_id:
2978 2978 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2979 2979 return q.all()
2980 2980
2981 2981 @classmethod
2982 2982 def get_default_user_group_perms_from_user_group(
2983 2983 cls, user_id, user_group_id=None):
2984 2984 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2985 2985 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2986 2986 .join(
2987 2987 Permission,
2988 2988 UserGroupUserGroupToPerm.permission_id ==
2989 2989 Permission.permission_id)\
2990 2990 .join(
2991 2991 TargetUserGroup,
2992 2992 UserGroupUserGroupToPerm.target_user_group_id ==
2993 2993 TargetUserGroup.users_group_id)\
2994 2994 .join(
2995 2995 UserGroup,
2996 2996 UserGroupUserGroupToPerm.user_group_id ==
2997 2997 UserGroup.users_group_id)\
2998 2998 .join(
2999 2999 UserGroupMember,
3000 3000 UserGroupUserGroupToPerm.user_group_id ==
3001 3001 UserGroupMember.users_group_id)\
3002 3002 .filter(
3003 3003 UserGroupMember.user_id == user_id,
3004 3004 UserGroup.users_group_active == true())
3005 3005 if user_group_id:
3006 3006 q = q.filter(
3007 3007 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3008 3008
3009 3009 return q.all()
3010 3010
3011 3011
3012 3012 class UserRepoToPerm(Base, BaseModel):
3013 3013 __tablename__ = 'repo_to_perm'
3014 3014 __table_args__ = (
3015 3015 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3016 3016 base_table_args
3017 3017 )
3018 3018
3019 3019 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3020 3020 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3021 3021 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3022 3022 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3023 3023
3024 3024 user = relationship('User')
3025 3025 repository = relationship('Repository')
3026 3026 permission = relationship('Permission')
3027 3027
3028 3028 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete, delete-orphan", lazy='joined')
3029 3029
3030 3030 @classmethod
3031 3031 def create(cls, user, repository, permission):
3032 3032 n = cls()
3033 3033 n.user = user
3034 3034 n.repository = repository
3035 3035 n.permission = permission
3036 3036 Session().add(n)
3037 3037 return n
3038 3038
3039 3039 def __unicode__(self):
3040 3040 return u'<%s => %s >' % (self.user, self.repository)
3041 3041
3042 3042
3043 3043 class UserUserGroupToPerm(Base, BaseModel):
3044 3044 __tablename__ = 'user_user_group_to_perm'
3045 3045 __table_args__ = (
3046 3046 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3047 3047 base_table_args
3048 3048 )
3049 3049
3050 3050 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3051 3051 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3052 3052 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3053 3053 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3054 3054
3055 3055 user = relationship('User')
3056 3056 user_group = relationship('UserGroup')
3057 3057 permission = relationship('Permission')
3058 3058
3059 3059 @classmethod
3060 3060 def create(cls, user, user_group, permission):
3061 3061 n = cls()
3062 3062 n.user = user
3063 3063 n.user_group = user_group
3064 3064 n.permission = permission
3065 3065 Session().add(n)
3066 3066 return n
3067 3067
3068 3068 def __unicode__(self):
3069 3069 return u'<%s => %s >' % (self.user, self.user_group)
3070 3070
3071 3071
3072 3072 class UserToPerm(Base, BaseModel):
3073 3073 __tablename__ = 'user_to_perm'
3074 3074 __table_args__ = (
3075 3075 UniqueConstraint('user_id', 'permission_id'),
3076 3076 base_table_args
3077 3077 )
3078 3078
3079 3079 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3080 3080 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3081 3081 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3082 3082
3083 3083 user = relationship('User')
3084 3084 permission = relationship('Permission', lazy='joined')
3085 3085
3086 3086 def __unicode__(self):
3087 3087 return u'<%s => %s >' % (self.user, self.permission)
3088 3088
3089 3089
3090 3090 class UserGroupRepoToPerm(Base, BaseModel):
3091 3091 __tablename__ = 'users_group_repo_to_perm'
3092 3092 __table_args__ = (
3093 3093 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3094 3094 base_table_args
3095 3095 )
3096 3096
3097 3097 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3098 3098 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3099 3099 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3100 3100 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3101 3101
3102 3102 users_group = relationship('UserGroup')
3103 3103 permission = relationship('Permission')
3104 3104 repository = relationship('Repository')
3105 3105 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all')
3106 3106
3107 3107 @classmethod
3108 3108 def create(cls, users_group, repository, permission):
3109 3109 n = cls()
3110 3110 n.users_group = users_group
3111 3111 n.repository = repository
3112 3112 n.permission = permission
3113 3113 Session().add(n)
3114 3114 return n
3115 3115
3116 3116 def __unicode__(self):
3117 3117 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
3118 3118
3119 3119
3120 3120 class UserGroupUserGroupToPerm(Base, BaseModel):
3121 3121 __tablename__ = 'user_group_user_group_to_perm'
3122 3122 __table_args__ = (
3123 3123 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3124 3124 CheckConstraint('target_user_group_id != user_group_id'),
3125 3125 base_table_args
3126 3126 )
3127 3127
3128 3128 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3129 3129 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3130 3130 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3131 3131 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3132 3132
3133 3133 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3134 3134 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3135 3135 permission = relationship('Permission')
3136 3136
3137 3137 @classmethod
3138 3138 def create(cls, target_user_group, user_group, permission):
3139 3139 n = cls()
3140 3140 n.target_user_group = target_user_group
3141 3141 n.user_group = user_group
3142 3142 n.permission = permission
3143 3143 Session().add(n)
3144 3144 return n
3145 3145
3146 3146 def __unicode__(self):
3147 3147 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3148 3148
3149 3149
3150 3150 class UserGroupToPerm(Base, BaseModel):
3151 3151 __tablename__ = 'users_group_to_perm'
3152 3152 __table_args__ = (
3153 3153 UniqueConstraint('users_group_id', 'permission_id',),
3154 3154 base_table_args
3155 3155 )
3156 3156
3157 3157 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3158 3158 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3159 3159 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3160 3160
3161 3161 users_group = relationship('UserGroup')
3162 3162 permission = relationship('Permission')
3163 3163
3164 3164
3165 3165 class UserRepoGroupToPerm(Base, BaseModel):
3166 3166 __tablename__ = 'user_repo_group_to_perm'
3167 3167 __table_args__ = (
3168 3168 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3169 3169 base_table_args
3170 3170 )
3171 3171
3172 3172 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3173 3173 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3174 3174 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3175 3175 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3176 3176
3177 3177 user = relationship('User')
3178 3178 group = relationship('RepoGroup')
3179 3179 permission = relationship('Permission')
3180 3180
3181 3181 @classmethod
3182 3182 def create(cls, user, repository_group, permission):
3183 3183 n = cls()
3184 3184 n.user = user
3185 3185 n.group = repository_group
3186 3186 n.permission = permission
3187 3187 Session().add(n)
3188 3188 return n
3189 3189
3190 3190
3191 3191 class UserGroupRepoGroupToPerm(Base, BaseModel):
3192 3192 __tablename__ = 'users_group_repo_group_to_perm'
3193 3193 __table_args__ = (
3194 3194 UniqueConstraint('users_group_id', 'group_id'),
3195 3195 base_table_args
3196 3196 )
3197 3197
3198 3198 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3199 3199 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3200 3200 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3201 3201 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3202 3202
3203 3203 users_group = relationship('UserGroup')
3204 3204 permission = relationship('Permission')
3205 3205 group = relationship('RepoGroup')
3206 3206
3207 3207 @classmethod
3208 3208 def create(cls, user_group, repository_group, permission):
3209 3209 n = cls()
3210 3210 n.users_group = user_group
3211 3211 n.group = repository_group
3212 3212 n.permission = permission
3213 3213 Session().add(n)
3214 3214 return n
3215 3215
3216 3216 def __unicode__(self):
3217 3217 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3218 3218
3219 3219
3220 3220 class Statistics(Base, BaseModel):
3221 3221 __tablename__ = 'statistics'
3222 3222 __table_args__ = (
3223 3223 base_table_args
3224 3224 )
3225 3225
3226 3226 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3227 3227 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3228 3228 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3229 3229 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3230 3230 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3231 3231 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3232 3232
3233 3233 repository = relationship('Repository', single_parent=True)
3234 3234
3235 3235
3236 3236 class UserFollowing(Base, BaseModel):
3237 3237 __tablename__ = 'user_followings'
3238 3238 __table_args__ = (
3239 3239 UniqueConstraint('user_id', 'follows_repository_id'),
3240 3240 UniqueConstraint('user_id', 'follows_user_id'),
3241 3241 base_table_args
3242 3242 )
3243 3243
3244 3244 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3245 3245 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3246 3246 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3247 3247 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3248 3248 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3249 3249
3250 3250 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3251 3251
3252 3252 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3253 3253 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3254 3254
3255 3255 @classmethod
3256 3256 def get_repo_followers(cls, repo_id):
3257 3257 return cls.query().filter(cls.follows_repo_id == repo_id)
3258 3258
3259 3259
3260 3260 class CacheKey(Base, BaseModel):
3261 3261 __tablename__ = 'cache_invalidation'
3262 3262 __table_args__ = (
3263 3263 UniqueConstraint('cache_key'),
3264 3264 Index('key_idx', 'cache_key'),
3265 3265 base_table_args,
3266 3266 )
3267 3267
3268 3268 CACHE_TYPE_FEED = 'FEED'
3269 3269 CACHE_TYPE_README = 'README'
3270 3270 # namespaces used to register process/thread aware caches
3271 3271 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3272 3272 SETTINGS_INVALIDATION_NAMESPACE = 'system_settings'
3273 3273
3274 3274 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3275 3275 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3276 3276 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3277 3277 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3278 3278
3279 3279 def __init__(self, cache_key, cache_args=''):
3280 3280 self.cache_key = cache_key
3281 3281 self.cache_args = cache_args
3282 3282 self.cache_active = False
3283 3283
3284 3284 def __unicode__(self):
3285 3285 return u"<%s('%s:%s[%s]')>" % (
3286 3286 self.__class__.__name__,
3287 3287 self.cache_id, self.cache_key, self.cache_active)
3288 3288
3289 3289 def _cache_key_partition(self):
3290 3290 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3291 3291 return prefix, repo_name, suffix
3292 3292
3293 3293 def get_prefix(self):
3294 3294 """
3295 3295 Try to extract prefix from existing cache key. The key could consist
3296 3296 of prefix, repo_name, suffix
3297 3297 """
3298 3298 # this returns prefix, repo_name, suffix
3299 3299 return self._cache_key_partition()[0]
3300 3300
3301 3301 def get_suffix(self):
3302 3302 """
3303 3303 get suffix that might have been used in _get_cache_key to
3304 3304 generate self.cache_key. Only used for informational purposes
3305 3305 in repo_edit.mako.
3306 3306 """
3307 3307 # prefix, repo_name, suffix
3308 3308 return self._cache_key_partition()[2]
3309 3309
3310 3310 @classmethod
3311 3311 def delete_all_cache(cls):
3312 3312 """
3313 3313 Delete all cache keys from database.
3314 3314 Should only be run when all instances are down and all entries
3315 3315 thus stale.
3316 3316 """
3317 3317 cls.query().delete()
3318 3318 Session().commit()
3319 3319
3320 3320 @classmethod
3321 3321 def set_invalidate(cls, cache_uid, delete=False):
3322 3322 """
3323 3323 Mark all caches of a repo as invalid in the database.
3324 3324 """
3325 3325
3326 3326 try:
3327 3327 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3328 3328 if delete:
3329 3329 qry.delete()
3330 3330 log.debug('cache objects deleted for cache args %s',
3331 3331 safe_str(cache_uid))
3332 3332 else:
3333 3333 qry.update({"cache_active": False})
3334 3334 log.debug('cache objects marked as invalid for cache args %s',
3335 3335 safe_str(cache_uid))
3336 3336
3337 3337 Session().commit()
3338 3338 except Exception:
3339 3339 log.exception(
3340 3340 'Cache key invalidation failed for cache args %s',
3341 3341 safe_str(cache_uid))
3342 3342 Session().rollback()
3343 3343
3344 3344 @classmethod
3345 3345 def get_active_cache(cls, cache_key):
3346 3346 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3347 3347 if inv_obj:
3348 3348 return inv_obj
3349 3349 return None
3350 3350
3351 3351
3352 3352 class ChangesetComment(Base, BaseModel):
3353 3353 __tablename__ = 'changeset_comments'
3354 3354 __table_args__ = (
3355 3355 Index('cc_revision_idx', 'revision'),
3356 3356 base_table_args,
3357 3357 )
3358 3358
3359 3359 COMMENT_OUTDATED = u'comment_outdated'
3360 3360 COMMENT_TYPE_NOTE = u'note'
3361 3361 COMMENT_TYPE_TODO = u'todo'
3362 3362 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3363 3363
3364 3364 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3365 3365 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3366 3366 revision = Column('revision', String(40), nullable=True)
3367 3367 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3368 3368 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3369 3369 line_no = Column('line_no', Unicode(10), nullable=True)
3370 3370 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3371 3371 f_path = Column('f_path', Unicode(1000), nullable=True)
3372 3372 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3373 3373 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3374 3374 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3375 3375 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3376 3376 renderer = Column('renderer', Unicode(64), nullable=True)
3377 3377 display_state = Column('display_state', Unicode(128), nullable=True)
3378 3378
3379 3379 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3380 3380 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3381 3381
3382 3382 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3383 3383 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3384 3384
3385 3385 author = relationship('User', lazy='joined')
3386 3386 repo = relationship('Repository')
3387 3387 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined')
3388 3388 pull_request = relationship('PullRequest', lazy='joined')
3389 3389 pull_request_version = relationship('PullRequestVersion')
3390 3390
3391 3391 @classmethod
3392 3392 def get_users(cls, revision=None, pull_request_id=None):
3393 3393 """
3394 3394 Returns user associated with this ChangesetComment. ie those
3395 3395 who actually commented
3396 3396
3397 3397 :param cls:
3398 3398 :param revision:
3399 3399 """
3400 3400 q = Session().query(User)\
3401 3401 .join(ChangesetComment.author)
3402 3402 if revision:
3403 3403 q = q.filter(cls.revision == revision)
3404 3404 elif pull_request_id:
3405 3405 q = q.filter(cls.pull_request_id == pull_request_id)
3406 3406 return q.all()
3407 3407
3408 3408 @classmethod
3409 3409 def get_index_from_version(cls, pr_version, versions):
3410 3410 num_versions = [x.pull_request_version_id for x in versions]
3411 3411 try:
3412 3412 return num_versions.index(pr_version) +1
3413 3413 except (IndexError, ValueError):
3414 3414 return
3415 3415
3416 3416 @property
3417 3417 def outdated(self):
3418 3418 return self.display_state == self.COMMENT_OUTDATED
3419 3419
3420 3420 def outdated_at_version(self, version):
3421 3421 """
3422 3422 Checks if comment is outdated for given pull request version
3423 3423 """
3424 3424 return self.outdated and self.pull_request_version_id != version
3425 3425
3426 3426 def older_than_version(self, version):
3427 3427 """
3428 3428 Checks if comment is made from previous version than given
3429 3429 """
3430 3430 if version is None:
3431 3431 return self.pull_request_version_id is not None
3432 3432
3433 3433 return self.pull_request_version_id < version
3434 3434
3435 3435 @property
3436 3436 def resolved(self):
3437 3437 return self.resolved_by[0] if self.resolved_by else None
3438 3438
3439 3439 @property
3440 3440 def is_todo(self):
3441 3441 return self.comment_type == self.COMMENT_TYPE_TODO
3442 3442
3443 3443 @property
3444 3444 def is_inline(self):
3445 3445 return self.line_no and self.f_path
3446 3446
3447 3447 def get_index_version(self, versions):
3448 3448 return self.get_index_from_version(
3449 3449 self.pull_request_version_id, versions)
3450 3450
3451 3451 def __repr__(self):
3452 3452 if self.comment_id:
3453 3453 return '<DB:Comment #%s>' % self.comment_id
3454 3454 else:
3455 3455 return '<DB:Comment at %#x>' % id(self)
3456 3456
3457 3457 def get_api_data(self):
3458 3458 comment = self
3459 3459 data = {
3460 3460 'comment_id': comment.comment_id,
3461 3461 'comment_type': comment.comment_type,
3462 3462 'comment_text': comment.text,
3463 3463 'comment_status': comment.status_change,
3464 3464 'comment_f_path': comment.f_path,
3465 3465 'comment_lineno': comment.line_no,
3466 3466 'comment_author': comment.author,
3467 3467 'comment_created_on': comment.created_on
3468 3468 }
3469 3469 return data
3470 3470
3471 3471 def __json__(self):
3472 3472 data = dict()
3473 3473 data.update(self.get_api_data())
3474 3474 return data
3475 3475
3476 3476
3477 3477 class ChangesetStatus(Base, BaseModel):
3478 3478 __tablename__ = 'changeset_statuses'
3479 3479 __table_args__ = (
3480 3480 Index('cs_revision_idx', 'revision'),
3481 3481 Index('cs_version_idx', 'version'),
3482 3482 UniqueConstraint('repo_id', 'revision', 'version'),
3483 3483 base_table_args
3484 3484 )
3485 3485
3486 3486 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3487 3487 STATUS_APPROVED = 'approved'
3488 3488 STATUS_REJECTED = 'rejected'
3489 3489 STATUS_UNDER_REVIEW = 'under_review'
3490 3490
3491 3491 STATUSES = [
3492 3492 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3493 3493 (STATUS_APPROVED, _("Approved")),
3494 3494 (STATUS_REJECTED, _("Rejected")),
3495 3495 (STATUS_UNDER_REVIEW, _("Under Review")),
3496 3496 ]
3497 3497
3498 3498 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3499 3499 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3500 3500 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3501 3501 revision = Column('revision', String(40), nullable=False)
3502 3502 status = Column('status', String(128), nullable=False, default=DEFAULT)
3503 3503 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3504 3504 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3505 3505 version = Column('version', Integer(), nullable=False, default=0)
3506 3506 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3507 3507
3508 3508 author = relationship('User', lazy='joined')
3509 3509 repo = relationship('Repository')
3510 3510 comment = relationship('ChangesetComment', lazy='joined')
3511 3511 pull_request = relationship('PullRequest', lazy='joined')
3512 3512
3513 3513 def __unicode__(self):
3514 3514 return u"<%s('%s[v%s]:%s')>" % (
3515 3515 self.__class__.__name__,
3516 3516 self.status, self.version, self.author
3517 3517 )
3518 3518
3519 3519 @classmethod
3520 3520 def get_status_lbl(cls, value):
3521 3521 return dict(cls.STATUSES).get(value)
3522 3522
3523 3523 @property
3524 3524 def status_lbl(self):
3525 3525 return ChangesetStatus.get_status_lbl(self.status)
3526 3526
3527 3527 def get_api_data(self):
3528 3528 status = self
3529 3529 data = {
3530 3530 'status_id': status.changeset_status_id,
3531 3531 'status': status.status,
3532 3532 }
3533 3533 return data
3534 3534
3535 3535 def __json__(self):
3536 3536 data = dict()
3537 3537 data.update(self.get_api_data())
3538 3538 return data
3539 3539
3540 3540
3541 class _SetState(object):
3542 """
3543 Context processor allowing changing state for sensitive operation such as
3544 pull request update or merge
3545 """
3546
3547 def __init__(self, pull_request, pr_state, back_state=None):
3548 self._pr = pull_request
3549 self._org_state = back_state or pull_request.pull_request_state
3550 self._pr_state = pr_state
3551
3552 def __enter__(self):
3553 log.debug('StateLock: entering set state context, setting state to: `%s`',
3554 self._pr_state)
3555 self._pr.pull_request_state = self._pr_state
3556 Session().add(self._pr)
3557 Session().commit()
3558
3559 def __exit__(self, exc_type, exc_val, exc_tb):
3560 log.debug('StateLock: exiting set state context, setting state to: `%s`',
3561 self._org_state)
3562 self._pr.pull_request_state = self._org_state
3563 Session().add(self._pr)
3564 Session().commit()
3565
3566
3541 3567 class _PullRequestBase(BaseModel):
3542 3568 """
3543 3569 Common attributes of pull request and version entries.
3544 3570 """
3545 3571
3546 3572 # .status values
3547 3573 STATUS_NEW = u'new'
3548 3574 STATUS_OPEN = u'open'
3549 3575 STATUS_CLOSED = u'closed'
3550 3576
3577 # available states
3578 STATE_CREATING = u'creating'
3579 STATE_UPDATING = u'updating'
3580 STATE_MERGING = u'merging'
3581 STATE_CREATED = u'created'
3582
3551 3583 title = Column('title', Unicode(255), nullable=True)
3552 3584 description = Column(
3553 3585 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3554 3586 nullable=True)
3555 3587 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
3556 3588
3557 3589 # new/open/closed status of pull request (not approve/reject/etc)
3558 3590 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3559 3591 created_on = Column(
3560 3592 'created_on', DateTime(timezone=False), nullable=False,
3561 3593 default=datetime.datetime.now)
3562 3594 updated_on = Column(
3563 3595 'updated_on', DateTime(timezone=False), nullable=False,
3564 3596 default=datetime.datetime.now)
3565 3597
3598 pull_request_state = Column("pull_request_state", String(255), nullable=True)
3599
3566 3600 @declared_attr
3567 3601 def user_id(cls):
3568 3602 return Column(
3569 3603 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3570 3604 unique=None)
3571 3605
3572 3606 # 500 revisions max
3573 3607 _revisions = Column(
3574 3608 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3575 3609
3576 3610 @declared_attr
3577 3611 def source_repo_id(cls):
3578 3612 # TODO: dan: rename column to source_repo_id
3579 3613 return Column(
3580 3614 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3581 3615 nullable=False)
3582 3616
3583 3617 _source_ref = Column('org_ref', Unicode(255), nullable=False)
3584 3618
3585 3619 @hybrid_property
3586 3620 def source_ref(self):
3587 3621 return self._source_ref
3588 3622
3589 3623 @source_ref.setter
3590 3624 def source_ref(self, val):
3591 3625 parts = (val or '').split(':')
3592 3626 if len(parts) != 3:
3593 3627 raise ValueError(
3594 3628 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
3595 3629 self._source_ref = safe_unicode(val)
3596 3630
3597 3631 _target_ref = Column('other_ref', Unicode(255), nullable=False)
3598 3632
3599 3633 @hybrid_property
3600 3634 def target_ref(self):
3601 3635 return self._target_ref
3602 3636
3603 3637 @target_ref.setter
3604 3638 def target_ref(self, val):
3605 3639 parts = (val or '').split(':')
3606 3640 if len(parts) != 3:
3607 3641 raise ValueError(
3608 3642 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
3609 3643 self._target_ref = safe_unicode(val)
3610 3644
3611 3645 @declared_attr
3612 3646 def target_repo_id(cls):
3613 3647 # TODO: dan: rename column to target_repo_id
3614 3648 return Column(
3615 3649 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3616 3650 nullable=False)
3617 3651
3618 3652 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3619 3653
3620 3654 # TODO: dan: rename column to last_merge_source_rev
3621 3655 _last_merge_source_rev = Column(
3622 3656 'last_merge_org_rev', String(40), nullable=True)
3623 3657 # TODO: dan: rename column to last_merge_target_rev
3624 3658 _last_merge_target_rev = Column(
3625 3659 'last_merge_other_rev', String(40), nullable=True)
3626 3660 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3627 3661 merge_rev = Column('merge_rev', String(40), nullable=True)
3628 3662
3629 3663 reviewer_data = Column(
3630 3664 'reviewer_data_json', MutationObj.as_mutable(
3631 3665 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3632 3666
3633 3667 @property
3634 3668 def reviewer_data_json(self):
3635 3669 return json.dumps(self.reviewer_data)
3636 3670
3637 3671 @hybrid_property
3638 3672 def description_safe(self):
3639 3673 from rhodecode.lib import helpers as h
3640 3674 return h.escape(self.description)
3641 3675
3642 3676 @hybrid_property
3643 3677 def revisions(self):
3644 3678 return self._revisions.split(':') if self._revisions else []
3645 3679
3646 3680 @revisions.setter
3647 3681 def revisions(self, val):
3648 3682 self._revisions = ':'.join(val)
3649 3683
3650 3684 @hybrid_property
3651 3685 def last_merge_status(self):
3652 3686 return safe_int(self._last_merge_status)
3653 3687
3654 3688 @last_merge_status.setter
3655 3689 def last_merge_status(self, val):
3656 3690 self._last_merge_status = val
3657 3691
3658 3692 @declared_attr
3659 3693 def author(cls):
3660 3694 return relationship('User', lazy='joined')
3661 3695
3662 3696 @declared_attr
3663 3697 def source_repo(cls):
3664 3698 return relationship(
3665 3699 'Repository',
3666 3700 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3667 3701
3668 3702 @property
3669 3703 def source_ref_parts(self):
3670 3704 return self.unicode_to_reference(self.source_ref)
3671 3705
3672 3706 @declared_attr
3673 3707 def target_repo(cls):
3674 3708 return relationship(
3675 3709 'Repository',
3676 3710 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3677 3711
3678 3712 @property
3679 3713 def target_ref_parts(self):
3680 3714 return self.unicode_to_reference(self.target_ref)
3681 3715
3682 3716 @property
3683 3717 def shadow_merge_ref(self):
3684 3718 return self.unicode_to_reference(self._shadow_merge_ref)
3685 3719
3686 3720 @shadow_merge_ref.setter
3687 3721 def shadow_merge_ref(self, ref):
3688 3722 self._shadow_merge_ref = self.reference_to_unicode(ref)
3689 3723
3690 3724 @staticmethod
3691 3725 def unicode_to_reference(raw):
3692 3726 """
3693 3727 Convert a unicode (or string) to a reference object.
3694 3728 If unicode evaluates to False it returns None.
3695 3729 """
3696 3730 if raw:
3697 3731 refs = raw.split(':')
3698 3732 return Reference(*refs)
3699 3733 else:
3700 3734 return None
3701 3735
3702 3736 @staticmethod
3703 3737 def reference_to_unicode(ref):
3704 3738 """
3705 3739 Convert a reference object to unicode.
3706 3740 If reference is None it returns None.
3707 3741 """
3708 3742 if ref:
3709 3743 return u':'.join(ref)
3710 3744 else:
3711 3745 return None
3712 3746
3713 3747 def get_api_data(self, with_merge_state=True):
3714 3748 from rhodecode.model.pull_request import PullRequestModel
3715 3749
3716 3750 pull_request = self
3717 3751 if with_merge_state:
3718 3752 merge_status = PullRequestModel().merge_status(pull_request)
3719 3753 merge_state = {
3720 3754 'status': merge_status[0],
3721 3755 'message': safe_unicode(merge_status[1]),
3722 3756 }
3723 3757 else:
3724 3758 merge_state = {'status': 'not_available',
3725 3759 'message': 'not_available'}
3726 3760
3727 3761 merge_data = {
3728 3762 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
3729 3763 'reference': (
3730 3764 pull_request.shadow_merge_ref._asdict()
3731 3765 if pull_request.shadow_merge_ref else None),
3732 3766 }
3733 3767
3734 3768 data = {
3735 3769 'pull_request_id': pull_request.pull_request_id,
3736 3770 'url': PullRequestModel().get_url(pull_request),
3737 3771 'title': pull_request.title,
3738 3772 'description': pull_request.description,
3739 3773 'status': pull_request.status,
3774 'state': pull_request.pull_request_state,
3740 3775 'created_on': pull_request.created_on,
3741 3776 'updated_on': pull_request.updated_on,
3742 3777 'commit_ids': pull_request.revisions,
3743 3778 'review_status': pull_request.calculated_review_status(),
3744 3779 'mergeable': merge_state,
3745 3780 'source': {
3746 3781 'clone_url': pull_request.source_repo.clone_url(),
3747 3782 'repository': pull_request.source_repo.repo_name,
3748 3783 'reference': {
3749 3784 'name': pull_request.source_ref_parts.name,
3750 3785 'type': pull_request.source_ref_parts.type,
3751 3786 'commit_id': pull_request.source_ref_parts.commit_id,
3752 3787 },
3753 3788 },
3754 3789 'target': {
3755 3790 'clone_url': pull_request.target_repo.clone_url(),
3756 3791 'repository': pull_request.target_repo.repo_name,
3757 3792 'reference': {
3758 3793 'name': pull_request.target_ref_parts.name,
3759 3794 'type': pull_request.target_ref_parts.type,
3760 3795 'commit_id': pull_request.target_ref_parts.commit_id,
3761 3796 },
3762 3797 },
3763 3798 'merge': merge_data,
3764 3799 'author': pull_request.author.get_api_data(include_secrets=False,
3765 3800 details='basic'),
3766 3801 'reviewers': [
3767 3802 {
3768 3803 'user': reviewer.get_api_data(include_secrets=False,
3769 3804 details='basic'),
3770 3805 'reasons': reasons,
3771 3806 'review_status': st[0][1].status if st else 'not_reviewed',
3772 3807 }
3773 3808 for obj, reviewer, reasons, mandatory, st in
3774 3809 pull_request.reviewers_statuses()
3775 3810 ]
3776 3811 }
3777 3812
3778 3813 return data
3779 3814
3815 def set_state(self, pull_request_state, final_state=None):
3816 """
3817 # goes from initial state to updating to initial state.
3818 # initial state can be changed by specifying back_state=
3819 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
3820 pull_request.merge()
3821
3822 :param pull_request_state:
3823 :param final_state:
3824
3825 """
3826
3827 return _SetState(self, pull_request_state, back_state=final_state)
3828
3780 3829
3781 3830 class PullRequest(Base, _PullRequestBase):
3782 3831 __tablename__ = 'pull_requests'
3783 3832 __table_args__ = (
3784 3833 base_table_args,
3785 3834 )
3786 3835
3787 3836 pull_request_id = Column(
3788 3837 'pull_request_id', Integer(), nullable=False, primary_key=True)
3789 3838
3790 3839 def __repr__(self):
3791 3840 if self.pull_request_id:
3792 3841 return '<DB:PullRequest #%s>' % self.pull_request_id
3793 3842 else:
3794 3843 return '<DB:PullRequest at %#x>' % id(self)
3795 3844
3796 3845 reviewers = relationship('PullRequestReviewers',
3797 3846 cascade="all, delete, delete-orphan")
3798 3847 statuses = relationship('ChangesetStatus',
3799 3848 cascade="all, delete, delete-orphan")
3800 3849 comments = relationship('ChangesetComment',
3801 3850 cascade="all, delete, delete-orphan")
3802 3851 versions = relationship('PullRequestVersion',
3803 3852 cascade="all, delete, delete-orphan",
3804 3853 lazy='dynamic')
3805 3854
3806 3855 @classmethod
3807 3856 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
3808 3857 internal_methods=None):
3809 3858
3810 3859 class PullRequestDisplay(object):
3811 3860 """
3812 3861 Special object wrapper for showing PullRequest data via Versions
3813 3862 It mimics PR object as close as possible. This is read only object
3814 3863 just for display
3815 3864 """
3816 3865
3817 3866 def __init__(self, attrs, internal=None):
3818 3867 self.attrs = attrs
3819 3868 # internal have priority over the given ones via attrs
3820 3869 self.internal = internal or ['versions']
3821 3870
3822 3871 def __getattr__(self, item):
3823 3872 if item in self.internal:
3824 3873 return getattr(self, item)
3825 3874 try:
3826 3875 return self.attrs[item]
3827 3876 except KeyError:
3828 3877 raise AttributeError(
3829 3878 '%s object has no attribute %s' % (self, item))
3830 3879
3831 3880 def __repr__(self):
3832 3881 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
3833 3882
3834 3883 def versions(self):
3835 3884 return pull_request_obj.versions.order_by(
3836 3885 PullRequestVersion.pull_request_version_id).all()
3837 3886
3838 3887 def is_closed(self):
3839 3888 return pull_request_obj.is_closed()
3840 3889
3841 3890 @property
3842 3891 def pull_request_version_id(self):
3843 3892 return getattr(pull_request_obj, 'pull_request_version_id', None)
3844 3893
3845 3894 attrs = StrictAttributeDict(pull_request_obj.get_api_data())
3846 3895
3847 3896 attrs.author = StrictAttributeDict(
3848 3897 pull_request_obj.author.get_api_data())
3849 3898 if pull_request_obj.target_repo:
3850 3899 attrs.target_repo = StrictAttributeDict(
3851 3900 pull_request_obj.target_repo.get_api_data())
3852 3901 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
3853 3902
3854 3903 if pull_request_obj.source_repo:
3855 3904 attrs.source_repo = StrictAttributeDict(
3856 3905 pull_request_obj.source_repo.get_api_data())
3857 3906 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
3858 3907
3859 3908 attrs.source_ref_parts = pull_request_obj.source_ref_parts
3860 3909 attrs.target_ref_parts = pull_request_obj.target_ref_parts
3861 3910 attrs.revisions = pull_request_obj.revisions
3862 3911
3863 3912 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
3864 3913 attrs.reviewer_data = org_pull_request_obj.reviewer_data
3865 3914 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
3866 3915
3867 3916 return PullRequestDisplay(attrs, internal=internal_methods)
3868 3917
3869 3918 def is_closed(self):
3870 3919 return self.status == self.STATUS_CLOSED
3871 3920
3872 3921 def __json__(self):
3873 3922 return {
3874 3923 'revisions': self.revisions,
3875 3924 }
3876 3925
3877 3926 def calculated_review_status(self):
3878 3927 from rhodecode.model.changeset_status import ChangesetStatusModel
3879 3928 return ChangesetStatusModel().calculated_review_status(self)
3880 3929
3881 3930 def reviewers_statuses(self):
3882 3931 from rhodecode.model.changeset_status import ChangesetStatusModel
3883 3932 return ChangesetStatusModel().reviewers_statuses(self)
3884 3933
3885 3934 @property
3886 3935 def workspace_id(self):
3887 3936 from rhodecode.model.pull_request import PullRequestModel
3888 3937 return PullRequestModel()._workspace_id(self)
3889 3938
3890 3939 def get_shadow_repo(self):
3891 3940 workspace_id = self.workspace_id
3892 3941 vcs_obj = self.target_repo.scm_instance()
3893 3942 shadow_repository_path = vcs_obj._get_shadow_repository_path(
3894 3943 self.target_repo.repo_id, workspace_id)
3895 3944 if os.path.isdir(shadow_repository_path):
3896 3945 return vcs_obj._get_shadow_instance(shadow_repository_path)
3897 3946
3898 3947
3899 3948 class PullRequestVersion(Base, _PullRequestBase):
3900 3949 __tablename__ = 'pull_request_versions'
3901 3950 __table_args__ = (
3902 3951 base_table_args,
3903 3952 )
3904 3953
3905 3954 pull_request_version_id = Column(
3906 3955 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3907 3956 pull_request_id = Column(
3908 3957 'pull_request_id', Integer(),
3909 3958 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3910 3959 pull_request = relationship('PullRequest')
3911 3960
3912 3961 def __repr__(self):
3913 3962 if self.pull_request_version_id:
3914 3963 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3915 3964 else:
3916 3965 return '<DB:PullRequestVersion at %#x>' % id(self)
3917 3966
3918 3967 @property
3919 3968 def reviewers(self):
3920 3969 return self.pull_request.reviewers
3921 3970
3922 3971 @property
3923 3972 def versions(self):
3924 3973 return self.pull_request.versions
3925 3974
3926 3975 def is_closed(self):
3927 3976 # calculate from original
3928 3977 return self.pull_request.status == self.STATUS_CLOSED
3929 3978
3930 3979 def calculated_review_status(self):
3931 3980 return self.pull_request.calculated_review_status()
3932 3981
3933 3982 def reviewers_statuses(self):
3934 3983 return self.pull_request.reviewers_statuses()
3935 3984
3936 3985
3937 3986 class PullRequestReviewers(Base, BaseModel):
3938 3987 __tablename__ = 'pull_request_reviewers'
3939 3988 __table_args__ = (
3940 3989 base_table_args,
3941 3990 )
3942 3991
3943 3992 @hybrid_property
3944 3993 def reasons(self):
3945 3994 if not self._reasons:
3946 3995 return []
3947 3996 return self._reasons
3948 3997
3949 3998 @reasons.setter
3950 3999 def reasons(self, val):
3951 4000 val = val or []
3952 4001 if any(not isinstance(x, basestring) for x in val):
3953 4002 raise Exception('invalid reasons type, must be list of strings')
3954 4003 self._reasons = val
3955 4004
3956 4005 pull_requests_reviewers_id = Column(
3957 4006 'pull_requests_reviewers_id', Integer(), nullable=False,
3958 4007 primary_key=True)
3959 4008 pull_request_id = Column(
3960 4009 "pull_request_id", Integer(),
3961 4010 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3962 4011 user_id = Column(
3963 4012 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3964 4013 _reasons = Column(
3965 4014 'reason', MutationList.as_mutable(
3966 4015 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
3967 4016
3968 4017 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3969 4018 user = relationship('User')
3970 4019 pull_request = relationship('PullRequest')
3971 4020
3972 4021 rule_data = Column(
3973 4022 'rule_data_json',
3974 4023 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
3975 4024
3976 4025 def rule_user_group_data(self):
3977 4026 """
3978 4027 Returns the voting user group rule data for this reviewer
3979 4028 """
3980 4029
3981 4030 if self.rule_data and 'vote_rule' in self.rule_data:
3982 4031 user_group_data = {}
3983 4032 if 'rule_user_group_entry_id' in self.rule_data:
3984 4033 # means a group with voting rules !
3985 4034 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
3986 4035 user_group_data['name'] = self.rule_data['rule_name']
3987 4036 user_group_data['vote_rule'] = self.rule_data['vote_rule']
3988 4037
3989 4038 return user_group_data
3990 4039
3991 4040 def __unicode__(self):
3992 4041 return u"<%s('id:%s')>" % (self.__class__.__name__,
3993 4042 self.pull_requests_reviewers_id)
3994 4043
3995 4044
3996 4045 class Notification(Base, BaseModel):
3997 4046 __tablename__ = 'notifications'
3998 4047 __table_args__ = (
3999 4048 Index('notification_type_idx', 'type'),
4000 4049 base_table_args,
4001 4050 )
4002 4051
4003 4052 TYPE_CHANGESET_COMMENT = u'cs_comment'
4004 4053 TYPE_MESSAGE = u'message'
4005 4054 TYPE_MENTION = u'mention'
4006 4055 TYPE_REGISTRATION = u'registration'
4007 4056 TYPE_PULL_REQUEST = u'pull_request'
4008 4057 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
4009 4058
4010 4059 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4011 4060 subject = Column('subject', Unicode(512), nullable=True)
4012 4061 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4013 4062 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4014 4063 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4015 4064 type_ = Column('type', Unicode(255))
4016 4065
4017 4066 created_by_user = relationship('User')
4018 4067 notifications_to_users = relationship('UserNotification', lazy='joined',
4019 4068 cascade="all, delete, delete-orphan")
4020 4069
4021 4070 @property
4022 4071 def recipients(self):
4023 4072 return [x.user for x in UserNotification.query()\
4024 4073 .filter(UserNotification.notification == self)\
4025 4074 .order_by(UserNotification.user_id.asc()).all()]
4026 4075
4027 4076 @classmethod
4028 4077 def create(cls, created_by, subject, body, recipients, type_=None):
4029 4078 if type_ is None:
4030 4079 type_ = Notification.TYPE_MESSAGE
4031 4080
4032 4081 notification = cls()
4033 4082 notification.created_by_user = created_by
4034 4083 notification.subject = subject
4035 4084 notification.body = body
4036 4085 notification.type_ = type_
4037 4086 notification.created_on = datetime.datetime.now()
4038 4087
4039 4088 # For each recipient link the created notification to his account
4040 4089 for u in recipients:
4041 4090 assoc = UserNotification()
4042 4091 assoc.user_id = u.user_id
4043 4092 assoc.notification = notification
4044 4093
4045 4094 # if created_by is inside recipients mark his notification
4046 4095 # as read
4047 4096 if u.user_id == created_by.user_id:
4048 4097 assoc.read = True
4049 4098 Session().add(assoc)
4050 4099
4051 4100 Session().add(notification)
4052 4101
4053 4102 return notification
4054 4103
4055 4104
4056 4105 class UserNotification(Base, BaseModel):
4057 4106 __tablename__ = 'user_to_notification'
4058 4107 __table_args__ = (
4059 4108 UniqueConstraint('user_id', 'notification_id'),
4060 4109 base_table_args
4061 4110 )
4062 4111
4063 4112 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4064 4113 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4065 4114 read = Column('read', Boolean, default=False)
4066 4115 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4067 4116
4068 4117 user = relationship('User', lazy="joined")
4069 4118 notification = relationship('Notification', lazy="joined",
4070 4119 order_by=lambda: Notification.created_on.desc(),)
4071 4120
4072 4121 def mark_as_read(self):
4073 4122 self.read = True
4074 4123 Session().add(self)
4075 4124
4076 4125
4077 4126 class Gist(Base, BaseModel):
4078 4127 __tablename__ = 'gists'
4079 4128 __table_args__ = (
4080 4129 Index('g_gist_access_id_idx', 'gist_access_id'),
4081 4130 Index('g_created_on_idx', 'created_on'),
4082 4131 base_table_args
4083 4132 )
4084 4133
4085 4134 GIST_PUBLIC = u'public'
4086 4135 GIST_PRIVATE = u'private'
4087 4136 DEFAULT_FILENAME = u'gistfile1.txt'
4088 4137
4089 4138 ACL_LEVEL_PUBLIC = u'acl_public'
4090 4139 ACL_LEVEL_PRIVATE = u'acl_private'
4091 4140
4092 4141 gist_id = Column('gist_id', Integer(), primary_key=True)
4093 4142 gist_access_id = Column('gist_access_id', Unicode(250))
4094 4143 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4095 4144 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4096 4145 gist_expires = Column('gist_expires', Float(53), nullable=False)
4097 4146 gist_type = Column('gist_type', Unicode(128), nullable=False)
4098 4147 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4099 4148 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4100 4149 acl_level = Column('acl_level', Unicode(128), nullable=True)
4101 4150
4102 4151 owner = relationship('User')
4103 4152
4104 4153 def __repr__(self):
4105 4154 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
4106 4155
4107 4156 @hybrid_property
4108 4157 def description_safe(self):
4109 4158 from rhodecode.lib import helpers as h
4110 4159 return h.escape(self.gist_description)
4111 4160
4112 4161 @classmethod
4113 4162 def get_or_404(cls, id_):
4114 4163 from pyramid.httpexceptions import HTTPNotFound
4115 4164
4116 4165 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4117 4166 if not res:
4118 4167 raise HTTPNotFound()
4119 4168 return res
4120 4169
4121 4170 @classmethod
4122 4171 def get_by_access_id(cls, gist_access_id):
4123 4172 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4124 4173
4125 4174 def gist_url(self):
4126 4175 from rhodecode.model.gist import GistModel
4127 4176 return GistModel().get_url(self)
4128 4177
4129 4178 @classmethod
4130 4179 def base_path(cls):
4131 4180 """
4132 4181 Returns base path when all gists are stored
4133 4182
4134 4183 :param cls:
4135 4184 """
4136 4185 from rhodecode.model.gist import GIST_STORE_LOC
4137 4186 q = Session().query(RhodeCodeUi)\
4138 4187 .filter(RhodeCodeUi.ui_key == URL_SEP)
4139 4188 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4140 4189 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4141 4190
4142 4191 def get_api_data(self):
4143 4192 """
4144 4193 Common function for generating gist related data for API
4145 4194 """
4146 4195 gist = self
4147 4196 data = {
4148 4197 'gist_id': gist.gist_id,
4149 4198 'type': gist.gist_type,
4150 4199 'access_id': gist.gist_access_id,
4151 4200 'description': gist.gist_description,
4152 4201 'url': gist.gist_url(),
4153 4202 'expires': gist.gist_expires,
4154 4203 'created_on': gist.created_on,
4155 4204 'modified_at': gist.modified_at,
4156 4205 'content': None,
4157 4206 'acl_level': gist.acl_level,
4158 4207 }
4159 4208 return data
4160 4209
4161 4210 def __json__(self):
4162 4211 data = dict(
4163 4212 )
4164 4213 data.update(self.get_api_data())
4165 4214 return data
4166 4215 # SCM functions
4167 4216
4168 4217 def scm_instance(self, **kwargs):
4169 4218 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4170 4219 return get_vcs_instance(
4171 4220 repo_path=safe_str(full_repo_path), create=False)
4172 4221
4173 4222
4174 4223 class ExternalIdentity(Base, BaseModel):
4175 4224 __tablename__ = 'external_identities'
4176 4225 __table_args__ = (
4177 4226 Index('local_user_id_idx', 'local_user_id'),
4178 4227 Index('external_id_idx', 'external_id'),
4179 4228 base_table_args
4180 4229 )
4181 4230
4182 4231 external_id = Column('external_id', Unicode(255), default=u'', primary_key=True)
4183 4232 external_username = Column('external_username', Unicode(1024), default=u'')
4184 4233 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4185 4234 provider_name = Column('provider_name', Unicode(255), default=u'', primary_key=True)
4186 4235 access_token = Column('access_token', String(1024), default=u'')
4187 4236 alt_token = Column('alt_token', String(1024), default=u'')
4188 4237 token_secret = Column('token_secret', String(1024), default=u'')
4189 4238
4190 4239 @classmethod
4191 4240 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4192 4241 """
4193 4242 Returns ExternalIdentity instance based on search params
4194 4243
4195 4244 :param external_id:
4196 4245 :param provider_name:
4197 4246 :return: ExternalIdentity
4198 4247 """
4199 4248 query = cls.query()
4200 4249 query = query.filter(cls.external_id == external_id)
4201 4250 query = query.filter(cls.provider_name == provider_name)
4202 4251 if local_user_id:
4203 4252 query = query.filter(cls.local_user_id == local_user_id)
4204 4253 return query.first()
4205 4254
4206 4255 @classmethod
4207 4256 def user_by_external_id_and_provider(cls, external_id, provider_name):
4208 4257 """
4209 4258 Returns User instance based on search params
4210 4259
4211 4260 :param external_id:
4212 4261 :param provider_name:
4213 4262 :return: User
4214 4263 """
4215 4264 query = User.query()
4216 4265 query = query.filter(cls.external_id == external_id)
4217 4266 query = query.filter(cls.provider_name == provider_name)
4218 4267 query = query.filter(User.user_id == cls.local_user_id)
4219 4268 return query.first()
4220 4269
4221 4270 @classmethod
4222 4271 def by_local_user_id(cls, local_user_id):
4223 4272 """
4224 4273 Returns all tokens for user
4225 4274
4226 4275 :param local_user_id:
4227 4276 :return: ExternalIdentity
4228 4277 """
4229 4278 query = cls.query()
4230 4279 query = query.filter(cls.local_user_id == local_user_id)
4231 4280 return query
4232 4281
4233 4282 @classmethod
4234 4283 def load_provider_plugin(cls, plugin_id):
4235 4284 from rhodecode.authentication.base import loadplugin
4236 4285 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
4237 4286 auth_plugin = loadplugin(_plugin_id)
4238 4287 return auth_plugin
4239 4288
4240 4289
4241 4290 class Integration(Base, BaseModel):
4242 4291 __tablename__ = 'integrations'
4243 4292 __table_args__ = (
4244 4293 base_table_args
4245 4294 )
4246 4295
4247 4296 integration_id = Column('integration_id', Integer(), primary_key=True)
4248 4297 integration_type = Column('integration_type', String(255))
4249 4298 enabled = Column('enabled', Boolean(), nullable=False)
4250 4299 name = Column('name', String(255), nullable=False)
4251 4300 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4252 4301 default=False)
4253 4302
4254 4303 settings = Column(
4255 4304 'settings_json', MutationObj.as_mutable(
4256 4305 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4257 4306 repo_id = Column(
4258 4307 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4259 4308 nullable=True, unique=None, default=None)
4260 4309 repo = relationship('Repository', lazy='joined')
4261 4310
4262 4311 repo_group_id = Column(
4263 4312 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4264 4313 nullable=True, unique=None, default=None)
4265 4314 repo_group = relationship('RepoGroup', lazy='joined')
4266 4315
4267 4316 @property
4268 4317 def scope(self):
4269 4318 if self.repo:
4270 4319 return repr(self.repo)
4271 4320 if self.repo_group:
4272 4321 if self.child_repos_only:
4273 4322 return repr(self.repo_group) + ' (child repos only)'
4274 4323 else:
4275 4324 return repr(self.repo_group) + ' (recursive)'
4276 4325 if self.child_repos_only:
4277 4326 return 'root_repos'
4278 4327 return 'global'
4279 4328
4280 4329 def __repr__(self):
4281 4330 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4282 4331
4283 4332
4284 4333 class RepoReviewRuleUser(Base, BaseModel):
4285 4334 __tablename__ = 'repo_review_rules_users'
4286 4335 __table_args__ = (
4287 4336 base_table_args
4288 4337 )
4289 4338
4290 4339 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4291 4340 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4292 4341 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4293 4342 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4294 4343 user = relationship('User')
4295 4344
4296 4345 def rule_data(self):
4297 4346 return {
4298 4347 'mandatory': self.mandatory
4299 4348 }
4300 4349
4301 4350
4302 4351 class RepoReviewRuleUserGroup(Base, BaseModel):
4303 4352 __tablename__ = 'repo_review_rules_users_groups'
4304 4353 __table_args__ = (
4305 4354 base_table_args
4306 4355 )
4307 4356
4308 4357 VOTE_RULE_ALL = -1
4309 4358
4310 4359 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4311 4360 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4312 4361 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4313 4362 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4314 4363 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4315 4364 users_group = relationship('UserGroup')
4316 4365
4317 4366 def rule_data(self):
4318 4367 return {
4319 4368 'mandatory': self.mandatory,
4320 4369 'vote_rule': self.vote_rule
4321 4370 }
4322 4371
4323 4372 @property
4324 4373 def vote_rule_label(self):
4325 4374 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4326 4375 return 'all must vote'
4327 4376 else:
4328 4377 return 'min. vote {}'.format(self.vote_rule)
4329 4378
4330 4379
4331 4380 class RepoReviewRule(Base, BaseModel):
4332 4381 __tablename__ = 'repo_review_rules'
4333 4382 __table_args__ = (
4334 4383 base_table_args
4335 4384 )
4336 4385
4337 4386 repo_review_rule_id = Column(
4338 4387 'repo_review_rule_id', Integer(), primary_key=True)
4339 4388 repo_id = Column(
4340 4389 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4341 4390 repo = relationship('Repository', backref='review_rules')
4342 4391
4343 4392 review_rule_name = Column('review_rule_name', String(255))
4344 4393 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4345 4394 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4346 4395 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4347 4396
4348 4397 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4349 4398 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4350 4399 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4351 4400 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4352 4401
4353 4402 rule_users = relationship('RepoReviewRuleUser')
4354 4403 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4355 4404
4356 4405 def _validate_pattern(self, value):
4357 4406 re.compile('^' + glob2re(value) + '$')
4358 4407
4359 4408 @hybrid_property
4360 4409 def source_branch_pattern(self):
4361 4410 return self._branch_pattern or '*'
4362 4411
4363 4412 @source_branch_pattern.setter
4364 4413 def source_branch_pattern(self, value):
4365 4414 self._validate_pattern(value)
4366 4415 self._branch_pattern = value or '*'
4367 4416
4368 4417 @hybrid_property
4369 4418 def target_branch_pattern(self):
4370 4419 return self._target_branch_pattern or '*'
4371 4420
4372 4421 @target_branch_pattern.setter
4373 4422 def target_branch_pattern(self, value):
4374 4423 self._validate_pattern(value)
4375 4424 self._target_branch_pattern = value or '*'
4376 4425
4377 4426 @hybrid_property
4378 4427 def file_pattern(self):
4379 4428 return self._file_pattern or '*'
4380 4429
4381 4430 @file_pattern.setter
4382 4431 def file_pattern(self, value):
4383 4432 self._validate_pattern(value)
4384 4433 self._file_pattern = value or '*'
4385 4434
4386 4435 def matches(self, source_branch, target_branch, files_changed):
4387 4436 """
4388 4437 Check if this review rule matches a branch/files in a pull request
4389 4438
4390 4439 :param source_branch: source branch name for the commit
4391 4440 :param target_branch: target branch name for the commit
4392 4441 :param files_changed: list of file paths changed in the pull request
4393 4442 """
4394 4443
4395 4444 source_branch = source_branch or ''
4396 4445 target_branch = target_branch or ''
4397 4446 files_changed = files_changed or []
4398 4447
4399 4448 branch_matches = True
4400 4449 if source_branch or target_branch:
4401 4450 if self.source_branch_pattern == '*':
4402 4451 source_branch_match = True
4403 4452 else:
4404 4453 if self.source_branch_pattern.startswith('re:'):
4405 4454 source_pattern = self.source_branch_pattern[3:]
4406 4455 else:
4407 4456 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
4408 4457 source_branch_regex = re.compile(source_pattern)
4409 4458 source_branch_match = bool(source_branch_regex.search(source_branch))
4410 4459 if self.target_branch_pattern == '*':
4411 4460 target_branch_match = True
4412 4461 else:
4413 4462 if self.target_branch_pattern.startswith('re:'):
4414 4463 target_pattern = self.target_branch_pattern[3:]
4415 4464 else:
4416 4465 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
4417 4466 target_branch_regex = re.compile(target_pattern)
4418 4467 target_branch_match = bool(target_branch_regex.search(target_branch))
4419 4468
4420 4469 branch_matches = source_branch_match and target_branch_match
4421 4470
4422 4471 files_matches = True
4423 4472 if self.file_pattern != '*':
4424 4473 files_matches = False
4425 4474 if self.file_pattern.startswith('re:'):
4426 4475 file_pattern = self.file_pattern[3:]
4427 4476 else:
4428 4477 file_pattern = glob2re(self.file_pattern)
4429 4478 file_regex = re.compile(file_pattern)
4430 4479 for filename in files_changed:
4431 4480 if file_regex.search(filename):
4432 4481 files_matches = True
4433 4482 break
4434 4483
4435 4484 return branch_matches and files_matches
4436 4485
4437 4486 @property
4438 4487 def review_users(self):
4439 4488 """ Returns the users which this rule applies to """
4440 4489
4441 4490 users = collections.OrderedDict()
4442 4491
4443 4492 for rule_user in self.rule_users:
4444 4493 if rule_user.user.active:
4445 4494 if rule_user.user not in users:
4446 4495 users[rule_user.user.username] = {
4447 4496 'user': rule_user.user,
4448 4497 'source': 'user',
4449 4498 'source_data': {},
4450 4499 'data': rule_user.rule_data()
4451 4500 }
4452 4501
4453 4502 for rule_user_group in self.rule_user_groups:
4454 4503 source_data = {
4455 4504 'user_group_id': rule_user_group.users_group.users_group_id,
4456 4505 'name': rule_user_group.users_group.users_group_name,
4457 4506 'members': len(rule_user_group.users_group.members)
4458 4507 }
4459 4508 for member in rule_user_group.users_group.members:
4460 4509 if member.user.active:
4461 4510 key = member.user.username
4462 4511 if key in users:
4463 4512 # skip this member as we have him already
4464 4513 # this prevents from override the "first" matched
4465 4514 # users with duplicates in multiple groups
4466 4515 continue
4467 4516
4468 4517 users[key] = {
4469 4518 'user': member.user,
4470 4519 'source': 'user_group',
4471 4520 'source_data': source_data,
4472 4521 'data': rule_user_group.rule_data()
4473 4522 }
4474 4523
4475 4524 return users
4476 4525
4477 4526 def user_group_vote_rule(self, user_id):
4478 4527
4479 4528 rules = []
4480 4529 if not self.rule_user_groups:
4481 4530 return rules
4482 4531
4483 4532 for user_group in self.rule_user_groups:
4484 4533 user_group_members = [x.user_id for x in user_group.users_group.members]
4485 4534 if user_id in user_group_members:
4486 4535 rules.append(user_group)
4487 4536 return rules
4488 4537
4489 4538 def __repr__(self):
4490 4539 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4491 4540 self.repo_review_rule_id, self.repo)
4492 4541
4493 4542
4494 4543 class ScheduleEntry(Base, BaseModel):
4495 4544 __tablename__ = 'schedule_entries'
4496 4545 __table_args__ = (
4497 4546 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
4498 4547 UniqueConstraint('task_uid', name='s_task_uid_idx'),
4499 4548 base_table_args,
4500 4549 )
4501 4550
4502 4551 schedule_types = ['crontab', 'timedelta', 'integer']
4503 4552 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
4504 4553
4505 4554 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
4506 4555 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
4507 4556 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
4508 4557
4509 4558 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
4510 4559 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
4511 4560
4512 4561 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
4513 4562 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
4514 4563
4515 4564 # task
4516 4565 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
4517 4566 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
4518 4567 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
4519 4568 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
4520 4569
4521 4570 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4522 4571 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
4523 4572
4524 4573 @hybrid_property
4525 4574 def schedule_type(self):
4526 4575 return self._schedule_type
4527 4576
4528 4577 @schedule_type.setter
4529 4578 def schedule_type(self, val):
4530 4579 if val not in self.schedule_types:
4531 4580 raise ValueError('Value must be on of `{}` and got `{}`'.format(
4532 4581 val, self.schedule_type))
4533 4582
4534 4583 self._schedule_type = val
4535 4584
4536 4585 @classmethod
4537 4586 def get_uid(cls, obj):
4538 4587 args = obj.task_args
4539 4588 kwargs = obj.task_kwargs
4540 4589 if isinstance(args, JsonRaw):
4541 4590 try:
4542 4591 args = json.loads(args)
4543 4592 except ValueError:
4544 4593 args = tuple()
4545 4594
4546 4595 if isinstance(kwargs, JsonRaw):
4547 4596 try:
4548 4597 kwargs = json.loads(kwargs)
4549 4598 except ValueError:
4550 4599 kwargs = dict()
4551 4600
4552 4601 dot_notation = obj.task_dot_notation
4553 4602 val = '.'.join(map(safe_str, [
4554 4603 sorted(dot_notation), args, sorted(kwargs.items())]))
4555 4604 return hashlib.sha1(val).hexdigest()
4556 4605
4557 4606 @classmethod
4558 4607 def get_by_schedule_name(cls, schedule_name):
4559 4608 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
4560 4609
4561 4610 @classmethod
4562 4611 def get_by_schedule_id(cls, schedule_id):
4563 4612 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
4564 4613
4565 4614 @property
4566 4615 def task(self):
4567 4616 return self.task_dot_notation
4568 4617
4569 4618 @property
4570 4619 def schedule(self):
4571 4620 from rhodecode.lib.celerylib.utils import raw_2_schedule
4572 4621 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
4573 4622 return schedule
4574 4623
4575 4624 @property
4576 4625 def args(self):
4577 4626 try:
4578 4627 return list(self.task_args or [])
4579 4628 except ValueError:
4580 4629 return list()
4581 4630
4582 4631 @property
4583 4632 def kwargs(self):
4584 4633 try:
4585 4634 return dict(self.task_kwargs or {})
4586 4635 except ValueError:
4587 4636 return dict()
4588 4637
4589 4638 def _as_raw(self, val):
4590 4639 if hasattr(val, 'de_coerce'):
4591 4640 val = val.de_coerce()
4592 4641 if val:
4593 4642 val = json.dumps(val)
4594 4643
4595 4644 return val
4596 4645
4597 4646 @property
4598 4647 def schedule_definition_raw(self):
4599 4648 return self._as_raw(self.schedule_definition)
4600 4649
4601 4650 @property
4602 4651 def args_raw(self):
4603 4652 return self._as_raw(self.task_args)
4604 4653
4605 4654 @property
4606 4655 def kwargs_raw(self):
4607 4656 return self._as_raw(self.task_kwargs)
4608 4657
4609 4658 def __repr__(self):
4610 4659 return '<DB:ScheduleEntry({}:{})>'.format(
4611 4660 self.schedule_entry_id, self.schedule_name)
4612 4661
4613 4662
4614 4663 @event.listens_for(ScheduleEntry, 'before_update')
4615 4664 def update_task_uid(mapper, connection, target):
4616 4665 target.task_uid = ScheduleEntry.get_uid(target)
4617 4666
4618 4667
4619 4668 @event.listens_for(ScheduleEntry, 'before_insert')
4620 4669 def set_task_uid(mapper, connection, target):
4621 4670 target.task_uid = ScheduleEntry.get_uid(target)
4622 4671
4623 4672
4624 4673 class _BaseBranchPerms(BaseModel):
4625 4674 @classmethod
4626 4675 def compute_hash(cls, value):
4627 4676 return sha1_safe(value)
4628 4677
4629 4678 @hybrid_property
4630 4679 def branch_pattern(self):
4631 4680 return self._branch_pattern or '*'
4632 4681
4633 4682 @hybrid_property
4634 4683 def branch_hash(self):
4635 4684 return self._branch_hash
4636 4685
4637 4686 def _validate_glob(self, value):
4638 4687 re.compile('^' + glob2re(value) + '$')
4639 4688
4640 4689 @branch_pattern.setter
4641 4690 def branch_pattern(self, value):
4642 4691 self._validate_glob(value)
4643 4692 self._branch_pattern = value or '*'
4644 4693 # set the Hash when setting the branch pattern
4645 4694 self._branch_hash = self.compute_hash(self._branch_pattern)
4646 4695
4647 4696 def matches(self, branch):
4648 4697 """
4649 4698 Check if this the branch matches entry
4650 4699
4651 4700 :param branch: branch name for the commit
4652 4701 """
4653 4702
4654 4703 branch = branch or ''
4655 4704
4656 4705 branch_matches = True
4657 4706 if branch:
4658 4707 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
4659 4708 branch_matches = bool(branch_regex.search(branch))
4660 4709
4661 4710 return branch_matches
4662 4711
4663 4712
4664 4713 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
4665 4714 __tablename__ = 'user_to_repo_branch_permissions'
4666 4715 __table_args__ = (
4667 4716 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4668 4717 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4669 4718 )
4670 4719
4671 4720 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
4672 4721
4673 4722 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
4674 4723 repo = relationship('Repository', backref='user_branch_perms')
4675 4724
4676 4725 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
4677 4726 permission = relationship('Permission')
4678 4727
4679 4728 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
4680 4729 user_repo_to_perm = relationship('UserRepoToPerm')
4681 4730
4682 4731 rule_order = Column('rule_order', Integer(), nullable=False)
4683 4732 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
4684 4733 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
4685 4734
4686 4735 def __unicode__(self):
4687 4736 return u'<UserBranchPermission(%s => %r)>' % (
4688 4737 self.user_repo_to_perm, self.branch_pattern)
4689 4738
4690 4739
4691 4740 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
4692 4741 __tablename__ = 'user_group_to_repo_branch_permissions'
4693 4742 __table_args__ = (
4694 4743 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4695 4744 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4696 4745 )
4697 4746
4698 4747 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
4699 4748
4700 4749 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
4701 4750 repo = relationship('Repository', backref='user_group_branch_perms')
4702 4751
4703 4752 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
4704 4753 permission = relationship('Permission')
4705 4754
4706 4755 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
4707 4756 user_group_repo_to_perm = relationship('UserGroupRepoToPerm')
4708 4757
4709 4758 rule_order = Column('rule_order', Integer(), nullable=False)
4710 4759 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
4711 4760 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
4712 4761
4713 4762 def __unicode__(self):
4714 4763 return u'<UserBranchPermission(%s => %r)>' % (
4715 4764 self.user_group_repo_to_perm, self.branch_pattern)
4716 4765
4717 4766
4718 4767 class DbMigrateVersion(Base, BaseModel):
4719 4768 __tablename__ = 'db_migrate_version'
4720 4769 __table_args__ = (
4721 4770 base_table_args,
4722 4771 )
4723 4772
4724 4773 repository_id = Column('repository_id', String(250), primary_key=True)
4725 4774 repository_path = Column('repository_path', Text)
4726 4775 version = Column('version', Integer)
4727 4776
4728 4777 @classmethod
4729 4778 def set_version(cls, version):
4730 4779 """
4731 4780 Helper for forcing a different version, usually for debugging purposes via ishell.
4732 4781 """
4733 4782 ver = DbMigrateVersion.query().first()
4734 4783 ver.version = version
4735 4784 Session().commit()
4736 4785
4737 4786
4738 4787 class DbSession(Base, BaseModel):
4739 4788 __tablename__ = 'db_session'
4740 4789 __table_args__ = (
4741 4790 base_table_args,
4742 4791 )
4743 4792
4744 4793 def __repr__(self):
4745 4794 return '<DB:DbSession({})>'.format(self.id)
4746 4795
4747 4796 id = Column('id', Integer())
4748 4797 namespace = Column('namespace', String(255), primary_key=True)
4749 4798 accessed = Column('accessed', DateTime, nullable=False)
4750 4799 created = Column('created', DateTime, nullable=False)
4751 4800 data = Column('data', PickleType, nullable=False)
@@ -1,1696 +1,1706 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31 import collections
32 32
33 33 from pyramid.threadlocal import get_current_request
34 34
35 35 from rhodecode import events
36 36 from rhodecode.translation import lazy_ugettext
37 37 from rhodecode.lib import helpers as h, hooks_utils, diffs
38 38 from rhodecode.lib import audit_logger
39 39 from rhodecode.lib.compat import OrderedDict
40 40 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
41 41 from rhodecode.lib.markup_renderer import (
42 42 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
43 43 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
44 44 from rhodecode.lib.vcs.backends.base import (
45 45 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
46 46 from rhodecode.lib.vcs.conf import settings as vcs_settings
47 47 from rhodecode.lib.vcs.exceptions import (
48 48 CommitDoesNotExistError, EmptyRepositoryError)
49 49 from rhodecode.model import BaseModel
50 50 from rhodecode.model.changeset_status import ChangesetStatusModel
51 51 from rhodecode.model.comment import CommentsModel
52 52 from rhodecode.model.db import (
53 53 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
54 54 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
55 55 from rhodecode.model.meta import Session
56 56 from rhodecode.model.notification import NotificationModel, \
57 57 EmailNotificationModel
58 58 from rhodecode.model.scm import ScmModel
59 59 from rhodecode.model.settings import VcsSettingsModel
60 60
61 61
62 62 log = logging.getLogger(__name__)
63 63
64 64
65 65 # Data structure to hold the response data when updating commits during a pull
66 66 # request update.
67 67 UpdateResponse = collections.namedtuple('UpdateResponse', [
68 68 'executed', 'reason', 'new', 'old', 'changes',
69 69 'source_changed', 'target_changed'])
70 70
71 71
72 72 class PullRequestModel(BaseModel):
73 73
74 74 cls = PullRequest
75 75
76 76 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
77 77
78 78 UPDATE_STATUS_MESSAGES = {
79 79 UpdateFailureReason.NONE: lazy_ugettext(
80 80 'Pull request update successful.'),
81 81 UpdateFailureReason.UNKNOWN: lazy_ugettext(
82 82 'Pull request update failed because of an unknown error.'),
83 83 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
84 84 'No update needed because the source and target have not changed.'),
85 85 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
86 86 'Pull request cannot be updated because the reference type is '
87 87 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
88 88 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
89 89 'This pull request cannot be updated because the target '
90 90 'reference is missing.'),
91 91 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
92 92 'This pull request cannot be updated because the source '
93 93 'reference is missing.'),
94 94 }
95 95 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
96 96 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
97 97
98 98 def __get_pull_request(self, pull_request):
99 99 return self._get_instance((
100 100 PullRequest, PullRequestVersion), pull_request)
101 101
102 102 def _check_perms(self, perms, pull_request, user, api=False):
103 103 if not api:
104 104 return h.HasRepoPermissionAny(*perms)(
105 105 user=user, repo_name=pull_request.target_repo.repo_name)
106 106 else:
107 107 return h.HasRepoPermissionAnyApi(*perms)(
108 108 user=user, repo_name=pull_request.target_repo.repo_name)
109 109
110 110 def check_user_read(self, pull_request, user, api=False):
111 111 _perms = ('repository.admin', 'repository.write', 'repository.read',)
112 112 return self._check_perms(_perms, pull_request, user, api)
113 113
114 114 def check_user_merge(self, pull_request, user, api=False):
115 115 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
116 116 return self._check_perms(_perms, pull_request, user, api)
117 117
118 118 def check_user_update(self, pull_request, user, api=False):
119 119 owner = user.user_id == pull_request.user_id
120 120 return self.check_user_merge(pull_request, user, api) or owner
121 121
122 122 def check_user_delete(self, pull_request, user):
123 123 owner = user.user_id == pull_request.user_id
124 124 _perms = ('repository.admin',)
125 125 return self._check_perms(_perms, pull_request, user) or owner
126 126
127 127 def check_user_change_status(self, pull_request, user, api=False):
128 128 reviewer = user.user_id in [x.user_id for x in
129 129 pull_request.reviewers]
130 130 return self.check_user_update(pull_request, user, api) or reviewer
131 131
132 132 def check_user_comment(self, pull_request, user):
133 133 owner = user.user_id == pull_request.user_id
134 134 return self.check_user_read(pull_request, user) or owner
135 135
136 136 def get(self, pull_request):
137 137 return self.__get_pull_request(pull_request)
138 138
139 139 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
140 140 opened_by=None, order_by=None,
141 order_dir='desc'):
141 order_dir='desc', only_created=True):
142 142 repo = None
143 143 if repo_name:
144 144 repo = self._get_repo(repo_name)
145 145
146 146 q = PullRequest.query()
147 147
148 148 # source or target
149 149 if repo and source:
150 150 q = q.filter(PullRequest.source_repo == repo)
151 151 elif repo:
152 152 q = q.filter(PullRequest.target_repo == repo)
153 153
154 154 # closed,opened
155 155 if statuses:
156 156 q = q.filter(PullRequest.status.in_(statuses))
157 157
158 158 # opened by filter
159 159 if opened_by:
160 160 q = q.filter(PullRequest.user_id.in_(opened_by))
161 161
162 # only get those that are in "created" state
163 if only_created:
164 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
165
162 166 if order_by:
163 167 order_map = {
164 168 'name_raw': PullRequest.pull_request_id,
165 169 'title': PullRequest.title,
166 170 'updated_on_raw': PullRequest.updated_on,
167 171 'target_repo': PullRequest.target_repo_id
168 172 }
169 173 if order_dir == 'asc':
170 174 q = q.order_by(order_map[order_by].asc())
171 175 else:
172 176 q = q.order_by(order_map[order_by].desc())
173 177
174 178 return q
175 179
176 180 def count_all(self, repo_name, source=False, statuses=None,
177 181 opened_by=None):
178 182 """
179 183 Count the number of pull requests for a specific repository.
180 184
181 185 :param repo_name: target or source repo
182 186 :param source: boolean flag to specify if repo_name refers to source
183 187 :param statuses: list of pull request statuses
184 188 :param opened_by: author user of the pull request
185 189 :returns: int number of pull requests
186 190 """
187 191 q = self._prepare_get_all_query(
188 192 repo_name, source=source, statuses=statuses, opened_by=opened_by)
189 193
190 194 return q.count()
191 195
192 196 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
193 197 offset=0, length=None, order_by=None, order_dir='desc'):
194 198 """
195 199 Get all pull requests for a specific repository.
196 200
197 201 :param repo_name: target or source repo
198 202 :param source: boolean flag to specify if repo_name refers to source
199 203 :param statuses: list of pull request statuses
200 204 :param opened_by: author user of the pull request
201 205 :param offset: pagination offset
202 206 :param length: length of returned list
203 207 :param order_by: order of the returned list
204 208 :param order_dir: 'asc' or 'desc' ordering direction
205 209 :returns: list of pull requests
206 210 """
207 211 q = self._prepare_get_all_query(
208 212 repo_name, source=source, statuses=statuses, opened_by=opened_by,
209 213 order_by=order_by, order_dir=order_dir)
210 214
211 215 if length:
212 216 pull_requests = q.limit(length).offset(offset).all()
213 217 else:
214 218 pull_requests = q.all()
215 219
216 220 return pull_requests
217 221
218 222 def count_awaiting_review(self, repo_name, source=False, statuses=None,
219 223 opened_by=None):
220 224 """
221 225 Count the number of pull requests for a specific repository that are
222 226 awaiting review.
223 227
224 228 :param repo_name: target or source repo
225 229 :param source: boolean flag to specify if repo_name refers to source
226 230 :param statuses: list of pull request statuses
227 231 :param opened_by: author user of the pull request
228 232 :returns: int number of pull requests
229 233 """
230 234 pull_requests = self.get_awaiting_review(
231 235 repo_name, source=source, statuses=statuses, opened_by=opened_by)
232 236
233 237 return len(pull_requests)
234 238
235 239 def get_awaiting_review(self, repo_name, source=False, statuses=None,
236 240 opened_by=None, offset=0, length=None,
237 241 order_by=None, order_dir='desc'):
238 242 """
239 243 Get all pull requests for a specific repository that are awaiting
240 244 review.
241 245
242 246 :param repo_name: target or source repo
243 247 :param source: boolean flag to specify if repo_name refers to source
244 248 :param statuses: list of pull request statuses
245 249 :param opened_by: author user of the pull request
246 250 :param offset: pagination offset
247 251 :param length: length of returned list
248 252 :param order_by: order of the returned list
249 253 :param order_dir: 'asc' or 'desc' ordering direction
250 254 :returns: list of pull requests
251 255 """
252 256 pull_requests = self.get_all(
253 257 repo_name, source=source, statuses=statuses, opened_by=opened_by,
254 258 order_by=order_by, order_dir=order_dir)
255 259
256 260 _filtered_pull_requests = []
257 261 for pr in pull_requests:
258 262 status = pr.calculated_review_status()
259 263 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
260 264 ChangesetStatus.STATUS_UNDER_REVIEW]:
261 265 _filtered_pull_requests.append(pr)
262 266 if length:
263 267 return _filtered_pull_requests[offset:offset+length]
264 268 else:
265 269 return _filtered_pull_requests
266 270
267 271 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
268 272 opened_by=None, user_id=None):
269 273 """
270 274 Count the number of pull requests for a specific repository that are
271 275 awaiting review from a specific user.
272 276
273 277 :param repo_name: target or source repo
274 278 :param source: boolean flag to specify if repo_name refers to source
275 279 :param statuses: list of pull request statuses
276 280 :param opened_by: author user of the pull request
277 281 :param user_id: reviewer user of the pull request
278 282 :returns: int number of pull requests
279 283 """
280 284 pull_requests = self.get_awaiting_my_review(
281 285 repo_name, source=source, statuses=statuses, opened_by=opened_by,
282 286 user_id=user_id)
283 287
284 288 return len(pull_requests)
285 289
286 290 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
287 291 opened_by=None, user_id=None, offset=0,
288 292 length=None, order_by=None, order_dir='desc'):
289 293 """
290 294 Get all pull requests for a specific repository that are awaiting
291 295 review from a specific user.
292 296
293 297 :param repo_name: target or source repo
294 298 :param source: boolean flag to specify if repo_name refers to source
295 299 :param statuses: list of pull request statuses
296 300 :param opened_by: author user of the pull request
297 301 :param user_id: reviewer user of the pull request
298 302 :param offset: pagination offset
299 303 :param length: length of returned list
300 304 :param order_by: order of the returned list
301 305 :param order_dir: 'asc' or 'desc' ordering direction
302 306 :returns: list of pull requests
303 307 """
304 308 pull_requests = self.get_all(
305 309 repo_name, source=source, statuses=statuses, opened_by=opened_by,
306 310 order_by=order_by, order_dir=order_dir)
307 311
308 312 _my = PullRequestModel().get_not_reviewed(user_id)
309 313 my_participation = []
310 314 for pr in pull_requests:
311 315 if pr in _my:
312 316 my_participation.append(pr)
313 317 _filtered_pull_requests = my_participation
314 318 if length:
315 319 return _filtered_pull_requests[offset:offset+length]
316 320 else:
317 321 return _filtered_pull_requests
318 322
319 323 def get_not_reviewed(self, user_id):
320 324 return [
321 325 x.pull_request for x in PullRequestReviewers.query().filter(
322 326 PullRequestReviewers.user_id == user_id).all()
323 327 ]
324 328
325 329 def _prepare_participating_query(self, user_id=None, statuses=None,
326 330 order_by=None, order_dir='desc'):
327 331 q = PullRequest.query()
328 332 if user_id:
329 333 reviewers_subquery = Session().query(
330 334 PullRequestReviewers.pull_request_id).filter(
331 335 PullRequestReviewers.user_id == user_id).subquery()
332 336 user_filter = or_(
333 337 PullRequest.user_id == user_id,
334 338 PullRequest.pull_request_id.in_(reviewers_subquery)
335 339 )
336 340 q = PullRequest.query().filter(user_filter)
337 341
338 342 # closed,opened
339 343 if statuses:
340 344 q = q.filter(PullRequest.status.in_(statuses))
341 345
342 346 if order_by:
343 347 order_map = {
344 348 'name_raw': PullRequest.pull_request_id,
345 349 'title': PullRequest.title,
346 350 'updated_on_raw': PullRequest.updated_on,
347 351 'target_repo': PullRequest.target_repo_id
348 352 }
349 353 if order_dir == 'asc':
350 354 q = q.order_by(order_map[order_by].asc())
351 355 else:
352 356 q = q.order_by(order_map[order_by].desc())
353 357
354 358 return q
355 359
356 360 def count_im_participating_in(self, user_id=None, statuses=None):
357 361 q = self._prepare_participating_query(user_id, statuses=statuses)
358 362 return q.count()
359 363
360 364 def get_im_participating_in(
361 365 self, user_id=None, statuses=None, offset=0,
362 366 length=None, order_by=None, order_dir='desc'):
363 367 """
364 368 Get all Pull requests that i'm participating in, or i have opened
365 369 """
366 370
367 371 q = self._prepare_participating_query(
368 372 user_id, statuses=statuses, order_by=order_by,
369 373 order_dir=order_dir)
370 374
371 375 if length:
372 376 pull_requests = q.limit(length).offset(offset).all()
373 377 else:
374 378 pull_requests = q.all()
375 379
376 380 return pull_requests
377 381
378 382 def get_versions(self, pull_request):
379 383 """
380 384 returns version of pull request sorted by ID descending
381 385 """
382 386 return PullRequestVersion.query()\
383 387 .filter(PullRequestVersion.pull_request == pull_request)\
384 388 .order_by(PullRequestVersion.pull_request_version_id.asc())\
385 389 .all()
386 390
387 391 def get_pr_version(self, pull_request_id, version=None):
388 392 at_version = None
389 393
390 394 if version and version == 'latest':
391 395 pull_request_ver = PullRequest.get(pull_request_id)
392 396 pull_request_obj = pull_request_ver
393 397 _org_pull_request_obj = pull_request_obj
394 398 at_version = 'latest'
395 399 elif version:
396 400 pull_request_ver = PullRequestVersion.get_or_404(version)
397 401 pull_request_obj = pull_request_ver
398 402 _org_pull_request_obj = pull_request_ver.pull_request
399 403 at_version = pull_request_ver.pull_request_version_id
400 404 else:
401 405 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
402 406 pull_request_id)
403 407
404 408 pull_request_display_obj = PullRequest.get_pr_display_object(
405 409 pull_request_obj, _org_pull_request_obj)
406 410
407 411 return _org_pull_request_obj, pull_request_obj, \
408 412 pull_request_display_obj, at_version
409 413
410 414 def create(self, created_by, source_repo, source_ref, target_repo,
411 415 target_ref, revisions, reviewers, title, description=None,
412 416 description_renderer=None,
413 417 reviewer_data=None, translator=None, auth_user=None):
414 418 translator = translator or get_current_request().translate
415 419
416 420 created_by_user = self._get_user(created_by)
417 421 auth_user = auth_user or created_by_user.AuthUser()
418 422 source_repo = self._get_repo(source_repo)
419 423 target_repo = self._get_repo(target_repo)
420 424
421 425 pull_request = PullRequest()
422 426 pull_request.source_repo = source_repo
423 427 pull_request.source_ref = source_ref
424 428 pull_request.target_repo = target_repo
425 429 pull_request.target_ref = target_ref
426 430 pull_request.revisions = revisions
427 431 pull_request.title = title
428 432 pull_request.description = description
429 433 pull_request.description_renderer = description_renderer
430 434 pull_request.author = created_by_user
431 435 pull_request.reviewer_data = reviewer_data
432
436 pull_request.pull_request_state = pull_request.STATE_CREATING
433 437 Session().add(pull_request)
434 438 Session().flush()
435 439
436 440 reviewer_ids = set()
437 441 # members / reviewers
438 442 for reviewer_object in reviewers:
439 443 user_id, reasons, mandatory, rules = reviewer_object
440 444 user = self._get_user(user_id)
441 445
442 446 # skip duplicates
443 447 if user.user_id in reviewer_ids:
444 448 continue
445 449
446 450 reviewer_ids.add(user.user_id)
447 451
448 452 reviewer = PullRequestReviewers()
449 453 reviewer.user = user
450 454 reviewer.pull_request = pull_request
451 455 reviewer.reasons = reasons
452 456 reviewer.mandatory = mandatory
453 457
454 458 # NOTE(marcink): pick only first rule for now
455 459 rule_id = list(rules)[0] if rules else None
456 460 rule = RepoReviewRule.get(rule_id) if rule_id else None
457 461 if rule:
458 462 review_group = rule.user_group_vote_rule(user_id)
459 463 # we check if this particular reviewer is member of a voting group
460 464 if review_group:
461 465 # NOTE(marcink):
462 466 # can be that user is member of more but we pick the first same,
463 467 # same as default reviewers algo
464 468 review_group = review_group[0]
465 469
466 470 rule_data = {
467 471 'rule_name':
468 472 rule.review_rule_name,
469 473 'rule_user_group_entry_id':
470 474 review_group.repo_review_rule_users_group_id,
471 475 'rule_user_group_name':
472 476 review_group.users_group.users_group_name,
473 477 'rule_user_group_members':
474 478 [x.user.username for x in review_group.users_group.members],
475 479 'rule_user_group_members_id':
476 480 [x.user.user_id for x in review_group.users_group.members],
477 481 }
478 482 # e.g {'vote_rule': -1, 'mandatory': True}
479 483 rule_data.update(review_group.rule_data())
480 484
481 485 reviewer.rule_data = rule_data
482 486
483 487 Session().add(reviewer)
484 488 Session().flush()
485 489
486 490 # Set approval status to "Under Review" for all commits which are
487 491 # part of this pull request.
488 492 ChangesetStatusModel().set_status(
489 493 repo=target_repo,
490 494 status=ChangesetStatus.STATUS_UNDER_REVIEW,
491 495 user=created_by_user,
492 496 pull_request=pull_request
493 497 )
494 498 # we commit early at this point. This has to do with a fact
495 499 # that before queries do some row-locking. And because of that
496 500 # we need to commit and finish transaction before below validate call
497 501 # that for large repos could be long resulting in long row locks
498 502 Session().commit()
499 503
500 # prepare workspace, and run initial merge simulation
501 MergeCheck.validate(
502 pull_request, auth_user=auth_user, translator=translator)
504 # prepare workspace, and run initial merge simulation. Set state during that
505 # operation
506 pull_request = PullRequest.get(pull_request.pull_request_id)
507
508 # set as merging, for simulation, and if finished to created so we mark
509 # simulation is working fine
510 with pull_request.set_state(PullRequest.STATE_MERGING,
511 final_state=PullRequest.STATE_CREATED):
512 MergeCheck.validate(
513 pull_request, auth_user=auth_user, translator=translator)
503 514
504 515 self.notify_reviewers(pull_request, reviewer_ids)
505 516 self._trigger_pull_request_hook(
506 517 pull_request, created_by_user, 'create')
507 518
508 519 creation_data = pull_request.get_api_data(with_merge_state=False)
509 520 self._log_audit_action(
510 521 'repo.pull_request.create', {'data': creation_data},
511 522 auth_user, pull_request)
512 523
513 524 return pull_request
514 525
515 526 def _trigger_pull_request_hook(self, pull_request, user, action):
516 527 pull_request = self.__get_pull_request(pull_request)
517 528 target_scm = pull_request.target_repo.scm_instance()
518 529 if action == 'create':
519 530 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
520 531 elif action == 'merge':
521 532 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
522 533 elif action == 'close':
523 534 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
524 535 elif action == 'review_status_change':
525 536 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
526 537 elif action == 'update':
527 538 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
528 539 else:
529 540 return
530 541
531 542 trigger_hook(
532 543 username=user.username,
533 544 repo_name=pull_request.target_repo.repo_name,
534 545 repo_alias=target_scm.alias,
535 546 pull_request=pull_request)
536 547
537 548 def _get_commit_ids(self, pull_request):
538 549 """
539 550 Return the commit ids of the merged pull request.
540 551
541 552 This method is not dealing correctly yet with the lack of autoupdates
542 553 nor with the implicit target updates.
543 554 For example: if a commit in the source repo is already in the target it
544 555 will be reported anyways.
545 556 """
546 557 merge_rev = pull_request.merge_rev
547 558 if merge_rev is None:
548 559 raise ValueError('This pull request was not merged yet')
549 560
550 561 commit_ids = list(pull_request.revisions)
551 562 if merge_rev not in commit_ids:
552 563 commit_ids.append(merge_rev)
553 564
554 565 return commit_ids
555 566
556 567 def merge_repo(self, pull_request, user, extras):
557 568 log.debug("Merging pull request %s", pull_request.pull_request_id)
558 569 extras['user_agent'] = 'internal-merge'
559 570 merge_state = self._merge_pull_request(pull_request, user, extras)
560 571 if merge_state.executed:
561 572 log.debug("Merge was successful, updating the pull request comments.")
562 573 self._comment_and_close_pr(pull_request, user, merge_state)
563 574
564 575 self._log_audit_action(
565 576 'repo.pull_request.merge',
566 577 {'merge_state': merge_state.__dict__},
567 578 user, pull_request)
568 579
569 580 else:
570 581 log.warn("Merge failed, not updating the pull request.")
571 582 return merge_state
572 583
573 584 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
574 585 target_vcs = pull_request.target_repo.scm_instance()
575 586 source_vcs = pull_request.source_repo.scm_instance()
576 587
577 588 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
578 589 pr_id=pull_request.pull_request_id,
579 590 pr_title=pull_request.title,
580 591 source_repo=source_vcs.name,
581 592 source_ref_name=pull_request.source_ref_parts.name,
582 593 target_repo=target_vcs.name,
583 594 target_ref_name=pull_request.target_ref_parts.name,
584 595 )
585 596
586 597 workspace_id = self._workspace_id(pull_request)
587 598 repo_id = pull_request.target_repo.repo_id
588 599 use_rebase = self._use_rebase_for_merging(pull_request)
589 600 close_branch = self._close_branch_before_merging(pull_request)
590 601
591 602 target_ref = self._refresh_reference(
592 603 pull_request.target_ref_parts, target_vcs)
593 604
594 605 callback_daemon, extras = prepare_callback_daemon(
595 606 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
596 607 host=vcs_settings.HOOKS_HOST,
597 608 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
598 609
599 610 with callback_daemon:
600 611 # TODO: johbo: Implement a clean way to run a config_override
601 612 # for a single call.
602 613 target_vcs.config.set(
603 614 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
604 615
605 616 user_name = user.short_contact
606 617 merge_state = target_vcs.merge(
607 618 repo_id, workspace_id, target_ref, source_vcs,
608 619 pull_request.source_ref_parts,
609 620 user_name=user_name, user_email=user.email,
610 621 message=message, use_rebase=use_rebase,
611 622 close_branch=close_branch)
612 623 return merge_state
613 624
614 625 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
615 626 pull_request.merge_rev = merge_state.merge_ref.commit_id
616 627 pull_request.updated_on = datetime.datetime.now()
617 628 close_msg = close_msg or 'Pull request merged and closed'
618 629
619 630 CommentsModel().create(
620 631 text=safe_unicode(close_msg),
621 632 repo=pull_request.target_repo.repo_id,
622 633 user=user.user_id,
623 634 pull_request=pull_request.pull_request_id,
624 635 f_path=None,
625 636 line_no=None,
626 637 closing_pr=True
627 638 )
628 639
629 640 Session().add(pull_request)
630 641 Session().flush()
631 642 # TODO: paris: replace invalidation with less radical solution
632 643 ScmModel().mark_for_invalidation(
633 644 pull_request.target_repo.repo_name)
634 645 self._trigger_pull_request_hook(pull_request, user, 'merge')
635 646
636 647 def has_valid_update_type(self, pull_request):
637 648 source_ref_type = pull_request.source_ref_parts.type
638 649 return source_ref_type in self.REF_TYPES
639 650
640 651 def update_commits(self, pull_request):
641 652 """
642 653 Get the updated list of commits for the pull request
643 654 and return the new pull request version and the list
644 655 of commits processed by this update action
645 656 """
646 657 pull_request = self.__get_pull_request(pull_request)
647 658 source_ref_type = pull_request.source_ref_parts.type
648 659 source_ref_name = pull_request.source_ref_parts.name
649 660 source_ref_id = pull_request.source_ref_parts.commit_id
650 661
651 662 target_ref_type = pull_request.target_ref_parts.type
652 663 target_ref_name = pull_request.target_ref_parts.name
653 664 target_ref_id = pull_request.target_ref_parts.commit_id
654 665
655 666 if not self.has_valid_update_type(pull_request):
656 log.debug(
657 "Skipping update of pull request %s due to ref type: %s",
658 pull_request, source_ref_type)
667 log.debug("Skipping update of pull request %s due to ref type: %s",
668 pull_request, source_ref_type)
659 669 return UpdateResponse(
660 670 executed=False,
661 671 reason=UpdateFailureReason.WRONG_REF_TYPE,
662 672 old=pull_request, new=None, changes=None,
663 673 source_changed=False, target_changed=False)
664 674
665 675 # source repo
666 676 source_repo = pull_request.source_repo.scm_instance()
667 677 try:
668 678 source_commit = source_repo.get_commit(commit_id=source_ref_name)
669 679 except CommitDoesNotExistError:
670 680 return UpdateResponse(
671 681 executed=False,
672 682 reason=UpdateFailureReason.MISSING_SOURCE_REF,
673 683 old=pull_request, new=None, changes=None,
674 684 source_changed=False, target_changed=False)
675 685
676 686 source_changed = source_ref_id != source_commit.raw_id
677 687
678 688 # target repo
679 689 target_repo = pull_request.target_repo.scm_instance()
680 690 try:
681 691 target_commit = target_repo.get_commit(commit_id=target_ref_name)
682 692 except CommitDoesNotExistError:
683 693 return UpdateResponse(
684 694 executed=False,
685 695 reason=UpdateFailureReason.MISSING_TARGET_REF,
686 696 old=pull_request, new=None, changes=None,
687 697 source_changed=False, target_changed=False)
688 698 target_changed = target_ref_id != target_commit.raw_id
689 699
690 700 if not (source_changed or target_changed):
691 701 log.debug("Nothing changed in pull request %s", pull_request)
692 702 return UpdateResponse(
693 703 executed=False,
694 704 reason=UpdateFailureReason.NO_CHANGE,
695 705 old=pull_request, new=None, changes=None,
696 706 source_changed=target_changed, target_changed=source_changed)
697 707
698 708 change_in_found = 'target repo' if target_changed else 'source repo'
699 709 log.debug('Updating pull request because of change in %s detected',
700 710 change_in_found)
701 711
702 712 # Finally there is a need for an update, in case of source change
703 713 # we create a new version, else just an update
704 714 if source_changed:
705 715 pull_request_version = self._create_version_from_snapshot(pull_request)
706 716 self._link_comments_to_version(pull_request_version)
707 717 else:
708 718 try:
709 719 ver = pull_request.versions[-1]
710 720 except IndexError:
711 721 ver = None
712 722
713 723 pull_request.pull_request_version_id = \
714 724 ver.pull_request_version_id if ver else None
715 725 pull_request_version = pull_request
716 726
717 727 try:
718 728 if target_ref_type in self.REF_TYPES:
719 729 target_commit = target_repo.get_commit(target_ref_name)
720 730 else:
721 731 target_commit = target_repo.get_commit(target_ref_id)
722 732 except CommitDoesNotExistError:
723 733 return UpdateResponse(
724 734 executed=False,
725 735 reason=UpdateFailureReason.MISSING_TARGET_REF,
726 736 old=pull_request, new=None, changes=None,
727 737 source_changed=source_changed, target_changed=target_changed)
728 738
729 739 # re-compute commit ids
730 740 old_commit_ids = pull_request.revisions
731 741 pre_load = ["author", "branch", "date", "message"]
732 742 commit_ranges = target_repo.compare(
733 743 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
734 744 pre_load=pre_load)
735 745
736 746 ancestor = target_repo.get_common_ancestor(
737 747 target_commit.raw_id, source_commit.raw_id, source_repo)
738 748
739 749 pull_request.source_ref = '%s:%s:%s' % (
740 750 source_ref_type, source_ref_name, source_commit.raw_id)
741 751 pull_request.target_ref = '%s:%s:%s' % (
742 752 target_ref_type, target_ref_name, ancestor)
743 753
744 754 pull_request.revisions = [
745 755 commit.raw_id for commit in reversed(commit_ranges)]
746 756 pull_request.updated_on = datetime.datetime.now()
747 757 Session().add(pull_request)
748 758 new_commit_ids = pull_request.revisions
749 759
750 760 old_diff_data, new_diff_data = self._generate_update_diffs(
751 761 pull_request, pull_request_version)
752 762
753 763 # calculate commit and file changes
754 764 changes = self._calculate_commit_id_changes(
755 765 old_commit_ids, new_commit_ids)
756 766 file_changes = self._calculate_file_changes(
757 767 old_diff_data, new_diff_data)
758 768
759 769 # set comments as outdated if DIFFS changed
760 770 CommentsModel().outdate_comments(
761 771 pull_request, old_diff_data=old_diff_data,
762 772 new_diff_data=new_diff_data)
763 773
764 774 commit_changes = (changes.added or changes.removed)
765 775 file_node_changes = (
766 776 file_changes.added or file_changes.modified or file_changes.removed)
767 777 pr_has_changes = commit_changes or file_node_changes
768 778
769 779 # Add an automatic comment to the pull request, in case
770 780 # anything has changed
771 781 if pr_has_changes:
772 782 update_comment = CommentsModel().create(
773 783 text=self._render_update_message(changes, file_changes),
774 784 repo=pull_request.target_repo,
775 785 user=pull_request.author,
776 786 pull_request=pull_request,
777 787 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
778 788
779 789 # Update status to "Under Review" for added commits
780 790 for commit_id in changes.added:
781 791 ChangesetStatusModel().set_status(
782 792 repo=pull_request.source_repo,
783 793 status=ChangesetStatus.STATUS_UNDER_REVIEW,
784 794 comment=update_comment,
785 795 user=pull_request.author,
786 796 pull_request=pull_request,
787 797 revision=commit_id)
788 798
789 799 log.debug(
790 800 'Updated pull request %s, added_ids: %s, common_ids: %s, '
791 801 'removed_ids: %s', pull_request.pull_request_id,
792 802 changes.added, changes.common, changes.removed)
793 803 log.debug(
794 804 'Updated pull request with the following file changes: %s',
795 805 file_changes)
796 806
797 807 log.info(
798 808 "Updated pull request %s from commit %s to commit %s, "
799 809 "stored new version %s of this pull request.",
800 810 pull_request.pull_request_id, source_ref_id,
801 811 pull_request.source_ref_parts.commit_id,
802 812 pull_request_version.pull_request_version_id)
803 813 Session().commit()
804 self._trigger_pull_request_hook(
805 pull_request, pull_request.author, 'update')
814 self._trigger_pull_request_hook(pull_request, pull_request.author, 'update')
806 815
807 816 return UpdateResponse(
808 817 executed=True, reason=UpdateFailureReason.NONE,
809 818 old=pull_request, new=pull_request_version, changes=changes,
810 819 source_changed=source_changed, target_changed=target_changed)
811 820
812 821 def _create_version_from_snapshot(self, pull_request):
813 822 version = PullRequestVersion()
814 823 version.title = pull_request.title
815 824 version.description = pull_request.description
816 825 version.status = pull_request.status
826 version.pull_request_state = pull_request.pull_request_state
817 827 version.created_on = datetime.datetime.now()
818 828 version.updated_on = pull_request.updated_on
819 829 version.user_id = pull_request.user_id
820 830 version.source_repo = pull_request.source_repo
821 831 version.source_ref = pull_request.source_ref
822 832 version.target_repo = pull_request.target_repo
823 833 version.target_ref = pull_request.target_ref
824 834
825 835 version._last_merge_source_rev = pull_request._last_merge_source_rev
826 836 version._last_merge_target_rev = pull_request._last_merge_target_rev
827 837 version.last_merge_status = pull_request.last_merge_status
828 838 version.shadow_merge_ref = pull_request.shadow_merge_ref
829 839 version.merge_rev = pull_request.merge_rev
830 840 version.reviewer_data = pull_request.reviewer_data
831 841
832 842 version.revisions = pull_request.revisions
833 843 version.pull_request = pull_request
834 844 Session().add(version)
835 845 Session().flush()
836 846
837 847 return version
838 848
839 849 def _generate_update_diffs(self, pull_request, pull_request_version):
840 850
841 851 diff_context = (
842 852 self.DIFF_CONTEXT +
843 853 CommentsModel.needed_extra_diff_context())
844 854 hide_whitespace_changes = False
845 855 source_repo = pull_request_version.source_repo
846 856 source_ref_id = pull_request_version.source_ref_parts.commit_id
847 857 target_ref_id = pull_request_version.target_ref_parts.commit_id
848 858 old_diff = self._get_diff_from_pr_or_version(
849 859 source_repo, source_ref_id, target_ref_id,
850 860 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
851 861
852 862 source_repo = pull_request.source_repo
853 863 source_ref_id = pull_request.source_ref_parts.commit_id
854 864 target_ref_id = pull_request.target_ref_parts.commit_id
855 865
856 866 new_diff = self._get_diff_from_pr_or_version(
857 867 source_repo, source_ref_id, target_ref_id,
858 868 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
859 869
860 870 old_diff_data = diffs.DiffProcessor(old_diff)
861 871 old_diff_data.prepare()
862 872 new_diff_data = diffs.DiffProcessor(new_diff)
863 873 new_diff_data.prepare()
864 874
865 875 return old_diff_data, new_diff_data
866 876
867 877 def _link_comments_to_version(self, pull_request_version):
868 878 """
869 879 Link all unlinked comments of this pull request to the given version.
870 880
871 881 :param pull_request_version: The `PullRequestVersion` to which
872 882 the comments shall be linked.
873 883
874 884 """
875 885 pull_request = pull_request_version.pull_request
876 886 comments = ChangesetComment.query()\
877 887 .filter(
878 888 # TODO: johbo: Should we query for the repo at all here?
879 889 # Pending decision on how comments of PRs are to be related
880 890 # to either the source repo, the target repo or no repo at all.
881 891 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
882 892 ChangesetComment.pull_request == pull_request,
883 893 ChangesetComment.pull_request_version == None)\
884 894 .order_by(ChangesetComment.comment_id.asc())
885 895
886 896 # TODO: johbo: Find out why this breaks if it is done in a bulk
887 897 # operation.
888 898 for comment in comments:
889 899 comment.pull_request_version_id = (
890 900 pull_request_version.pull_request_version_id)
891 901 Session().add(comment)
892 902
893 903 def _calculate_commit_id_changes(self, old_ids, new_ids):
894 904 added = [x for x in new_ids if x not in old_ids]
895 905 common = [x for x in new_ids if x in old_ids]
896 906 removed = [x for x in old_ids if x not in new_ids]
897 907 total = new_ids
898 908 return ChangeTuple(added, common, removed, total)
899 909
900 910 def _calculate_file_changes(self, old_diff_data, new_diff_data):
901 911
902 912 old_files = OrderedDict()
903 913 for diff_data in old_diff_data.parsed_diff:
904 914 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
905 915
906 916 added_files = []
907 917 modified_files = []
908 918 removed_files = []
909 919 for diff_data in new_diff_data.parsed_diff:
910 920 new_filename = diff_data['filename']
911 921 new_hash = md5_safe(diff_data['raw_diff'])
912 922
913 923 old_hash = old_files.get(new_filename)
914 924 if not old_hash:
915 925 # file is not present in old diff, means it's added
916 926 added_files.append(new_filename)
917 927 else:
918 928 if new_hash != old_hash:
919 929 modified_files.append(new_filename)
920 930 # now remove a file from old, since we have seen it already
921 931 del old_files[new_filename]
922 932
923 933 # removed files is when there are present in old, but not in NEW,
924 934 # since we remove old files that are present in new diff, left-overs
925 935 # if any should be the removed files
926 936 removed_files.extend(old_files.keys())
927 937
928 938 return FileChangeTuple(added_files, modified_files, removed_files)
929 939
930 940 def _render_update_message(self, changes, file_changes):
931 941 """
932 942 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
933 943 so it's always looking the same disregarding on which default
934 944 renderer system is using.
935 945
936 946 :param changes: changes named tuple
937 947 :param file_changes: file changes named tuple
938 948
939 949 """
940 950 new_status = ChangesetStatus.get_status_lbl(
941 951 ChangesetStatus.STATUS_UNDER_REVIEW)
942 952
943 953 changed_files = (
944 954 file_changes.added + file_changes.modified + file_changes.removed)
945 955
946 956 params = {
947 957 'under_review_label': new_status,
948 958 'added_commits': changes.added,
949 959 'removed_commits': changes.removed,
950 960 'changed_files': changed_files,
951 961 'added_files': file_changes.added,
952 962 'modified_files': file_changes.modified,
953 963 'removed_files': file_changes.removed,
954 964 }
955 965 renderer = RstTemplateRenderer()
956 966 return renderer.render('pull_request_update.mako', **params)
957 967
958 968 def edit(self, pull_request, title, description, description_renderer, user):
959 969 pull_request = self.__get_pull_request(pull_request)
960 970 old_data = pull_request.get_api_data(with_merge_state=False)
961 971 if pull_request.is_closed():
962 972 raise ValueError('This pull request is closed')
963 973 if title:
964 974 pull_request.title = title
965 975 pull_request.description = description
966 976 pull_request.updated_on = datetime.datetime.now()
967 977 pull_request.description_renderer = description_renderer
968 978 Session().add(pull_request)
969 979 self._log_audit_action(
970 980 'repo.pull_request.edit', {'old_data': old_data},
971 981 user, pull_request)
972 982
973 983 def update_reviewers(self, pull_request, reviewer_data, user):
974 984 """
975 985 Update the reviewers in the pull request
976 986
977 987 :param pull_request: the pr to update
978 988 :param reviewer_data: list of tuples
979 989 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
980 990 """
981 991 pull_request = self.__get_pull_request(pull_request)
982 992 if pull_request.is_closed():
983 993 raise ValueError('This pull request is closed')
984 994
985 995 reviewers = {}
986 996 for user_id, reasons, mandatory, rules in reviewer_data:
987 997 if isinstance(user_id, (int, basestring)):
988 998 user_id = self._get_user(user_id).user_id
989 999 reviewers[user_id] = {
990 1000 'reasons': reasons, 'mandatory': mandatory}
991 1001
992 1002 reviewers_ids = set(reviewers.keys())
993 1003 current_reviewers = PullRequestReviewers.query()\
994 1004 .filter(PullRequestReviewers.pull_request ==
995 1005 pull_request).all()
996 1006 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
997 1007
998 1008 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
999 1009 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1000 1010
1001 1011 log.debug("Adding %s reviewers", ids_to_add)
1002 1012 log.debug("Removing %s reviewers", ids_to_remove)
1003 1013 changed = False
1004 1014 for uid in ids_to_add:
1005 1015 changed = True
1006 1016 _usr = self._get_user(uid)
1007 1017 reviewer = PullRequestReviewers()
1008 1018 reviewer.user = _usr
1009 1019 reviewer.pull_request = pull_request
1010 1020 reviewer.reasons = reviewers[uid]['reasons']
1011 1021 # NOTE(marcink): mandatory shouldn't be changed now
1012 1022 # reviewer.mandatory = reviewers[uid]['reasons']
1013 1023 Session().add(reviewer)
1014 1024 self._log_audit_action(
1015 1025 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()},
1016 1026 user, pull_request)
1017 1027
1018 1028 for uid in ids_to_remove:
1019 1029 changed = True
1020 1030 reviewers = PullRequestReviewers.query()\
1021 1031 .filter(PullRequestReviewers.user_id == uid,
1022 1032 PullRequestReviewers.pull_request == pull_request)\
1023 1033 .all()
1024 1034 # use .all() in case we accidentally added the same person twice
1025 1035 # this CAN happen due to the lack of DB checks
1026 1036 for obj in reviewers:
1027 1037 old_data = obj.get_dict()
1028 1038 Session().delete(obj)
1029 1039 self._log_audit_action(
1030 1040 'repo.pull_request.reviewer.delete',
1031 1041 {'old_data': old_data}, user, pull_request)
1032 1042
1033 1043 if changed:
1034 1044 pull_request.updated_on = datetime.datetime.now()
1035 1045 Session().add(pull_request)
1036 1046
1037 1047 self.notify_reviewers(pull_request, ids_to_add)
1038 1048 return ids_to_add, ids_to_remove
1039 1049
1040 1050 def get_url(self, pull_request, request=None, permalink=False):
1041 1051 if not request:
1042 1052 request = get_current_request()
1043 1053
1044 1054 if permalink:
1045 1055 return request.route_url(
1046 1056 'pull_requests_global',
1047 1057 pull_request_id=pull_request.pull_request_id,)
1048 1058 else:
1049 1059 return request.route_url('pullrequest_show',
1050 1060 repo_name=safe_str(pull_request.target_repo.repo_name),
1051 1061 pull_request_id=pull_request.pull_request_id,)
1052 1062
1053 1063 def get_shadow_clone_url(self, pull_request, request=None):
1054 1064 """
1055 1065 Returns qualified url pointing to the shadow repository. If this pull
1056 1066 request is closed there is no shadow repository and ``None`` will be
1057 1067 returned.
1058 1068 """
1059 1069 if pull_request.is_closed():
1060 1070 return None
1061 1071 else:
1062 1072 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1063 1073 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1064 1074
1065 1075 def notify_reviewers(self, pull_request, reviewers_ids):
1066 1076 # notification to reviewers
1067 1077 if not reviewers_ids:
1068 1078 return
1069 1079
1070 1080 pull_request_obj = pull_request
1071 1081 # get the current participants of this pull request
1072 1082 recipients = reviewers_ids
1073 1083 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1074 1084
1075 1085 pr_source_repo = pull_request_obj.source_repo
1076 1086 pr_target_repo = pull_request_obj.target_repo
1077 1087
1078 1088 pr_url = h.route_url('pullrequest_show',
1079 1089 repo_name=pr_target_repo.repo_name,
1080 1090 pull_request_id=pull_request_obj.pull_request_id,)
1081 1091
1082 1092 # set some variables for email notification
1083 1093 pr_target_repo_url = h.route_url(
1084 1094 'repo_summary', repo_name=pr_target_repo.repo_name)
1085 1095
1086 1096 pr_source_repo_url = h.route_url(
1087 1097 'repo_summary', repo_name=pr_source_repo.repo_name)
1088 1098
1089 1099 # pull request specifics
1090 1100 pull_request_commits = [
1091 1101 (x.raw_id, x.message)
1092 1102 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1093 1103
1094 1104 kwargs = {
1095 1105 'user': pull_request.author,
1096 1106 'pull_request': pull_request_obj,
1097 1107 'pull_request_commits': pull_request_commits,
1098 1108
1099 1109 'pull_request_target_repo': pr_target_repo,
1100 1110 'pull_request_target_repo_url': pr_target_repo_url,
1101 1111
1102 1112 'pull_request_source_repo': pr_source_repo,
1103 1113 'pull_request_source_repo_url': pr_source_repo_url,
1104 1114
1105 1115 'pull_request_url': pr_url,
1106 1116 }
1107 1117
1108 1118 # pre-generate the subject for notification itself
1109 1119 (subject,
1110 1120 _h, _e, # we don't care about those
1111 1121 body_plaintext) = EmailNotificationModel().render_email(
1112 1122 notification_type, **kwargs)
1113 1123
1114 1124 # create notification objects, and emails
1115 1125 NotificationModel().create(
1116 1126 created_by=pull_request.author,
1117 1127 notification_subject=subject,
1118 1128 notification_body=body_plaintext,
1119 1129 notification_type=notification_type,
1120 1130 recipients=recipients,
1121 1131 email_kwargs=kwargs,
1122 1132 )
1123 1133
1124 1134 def delete(self, pull_request, user):
1125 1135 pull_request = self.__get_pull_request(pull_request)
1126 1136 old_data = pull_request.get_api_data(with_merge_state=False)
1127 1137 self._cleanup_merge_workspace(pull_request)
1128 1138 self._log_audit_action(
1129 1139 'repo.pull_request.delete', {'old_data': old_data},
1130 1140 user, pull_request)
1131 1141 Session().delete(pull_request)
1132 1142
1133 1143 def close_pull_request(self, pull_request, user):
1134 1144 pull_request = self.__get_pull_request(pull_request)
1135 1145 self._cleanup_merge_workspace(pull_request)
1136 1146 pull_request.status = PullRequest.STATUS_CLOSED
1137 1147 pull_request.updated_on = datetime.datetime.now()
1138 1148 Session().add(pull_request)
1139 1149 self._trigger_pull_request_hook(
1140 1150 pull_request, pull_request.author, 'close')
1141 1151
1142 1152 pr_data = pull_request.get_api_data(with_merge_state=False)
1143 1153 self._log_audit_action(
1144 1154 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1145 1155
1146 1156 def close_pull_request_with_comment(
1147 1157 self, pull_request, user, repo, message=None, auth_user=None):
1148 1158
1149 1159 pull_request_review_status = pull_request.calculated_review_status()
1150 1160
1151 1161 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1152 1162 # approved only if we have voting consent
1153 1163 status = ChangesetStatus.STATUS_APPROVED
1154 1164 else:
1155 1165 status = ChangesetStatus.STATUS_REJECTED
1156 1166 status_lbl = ChangesetStatus.get_status_lbl(status)
1157 1167
1158 1168 default_message = (
1159 1169 'Closing with status change {transition_icon} {status}.'
1160 1170 ).format(transition_icon='>', status=status_lbl)
1161 1171 text = message or default_message
1162 1172
1163 1173 # create a comment, and link it to new status
1164 1174 comment = CommentsModel().create(
1165 1175 text=text,
1166 1176 repo=repo.repo_id,
1167 1177 user=user.user_id,
1168 1178 pull_request=pull_request.pull_request_id,
1169 1179 status_change=status_lbl,
1170 1180 status_change_type=status,
1171 1181 closing_pr=True,
1172 1182 auth_user=auth_user,
1173 1183 )
1174 1184
1175 1185 # calculate old status before we change it
1176 1186 old_calculated_status = pull_request.calculated_review_status()
1177 1187 ChangesetStatusModel().set_status(
1178 1188 repo.repo_id,
1179 1189 status,
1180 1190 user.user_id,
1181 1191 comment=comment,
1182 1192 pull_request=pull_request.pull_request_id
1183 1193 )
1184 1194
1185 1195 Session().flush()
1186 1196 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1187 1197 # we now calculate the status of pull request again, and based on that
1188 1198 # calculation trigger status change. This might happen in cases
1189 1199 # that non-reviewer admin closes a pr, which means his vote doesn't
1190 1200 # change the status, while if he's a reviewer this might change it.
1191 1201 calculated_status = pull_request.calculated_review_status()
1192 1202 if old_calculated_status != calculated_status:
1193 1203 self._trigger_pull_request_hook(
1194 1204 pull_request, user, 'review_status_change')
1195 1205
1196 1206 # finally close the PR
1197 1207 PullRequestModel().close_pull_request(
1198 1208 pull_request.pull_request_id, user)
1199 1209
1200 1210 return comment, status
1201 1211
1202 1212 def merge_status(self, pull_request, translator=None,
1203 1213 force_shadow_repo_refresh=False):
1204 1214 _ = translator or get_current_request().translate
1205 1215
1206 1216 if not self._is_merge_enabled(pull_request):
1207 1217 return False, _('Server-side pull request merging is disabled.')
1208 1218 if pull_request.is_closed():
1209 1219 return False, _('This pull request is closed.')
1210 1220 merge_possible, msg = self._check_repo_requirements(
1211 1221 target=pull_request.target_repo, source=pull_request.source_repo,
1212 1222 translator=_)
1213 1223 if not merge_possible:
1214 1224 return merge_possible, msg
1215 1225
1216 1226 try:
1217 1227 resp = self._try_merge(
1218 1228 pull_request,
1219 1229 force_shadow_repo_refresh=force_shadow_repo_refresh)
1220 1230 log.debug("Merge response: %s", resp)
1221 1231 status = resp.possible, resp.merge_status_message
1222 1232 except NotImplementedError:
1223 1233 status = False, _('Pull request merging is not supported.')
1224 1234
1225 1235 return status
1226 1236
1227 1237 def _check_repo_requirements(self, target, source, translator):
1228 1238 """
1229 1239 Check if `target` and `source` have compatible requirements.
1230 1240
1231 1241 Currently this is just checking for largefiles.
1232 1242 """
1233 1243 _ = translator
1234 1244 target_has_largefiles = self._has_largefiles(target)
1235 1245 source_has_largefiles = self._has_largefiles(source)
1236 1246 merge_possible = True
1237 1247 message = u''
1238 1248
1239 1249 if target_has_largefiles != source_has_largefiles:
1240 1250 merge_possible = False
1241 1251 if source_has_largefiles:
1242 1252 message = _(
1243 1253 'Target repository large files support is disabled.')
1244 1254 else:
1245 1255 message = _(
1246 1256 'Source repository large files support is disabled.')
1247 1257
1248 1258 return merge_possible, message
1249 1259
1250 1260 def _has_largefiles(self, repo):
1251 1261 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1252 1262 'extensions', 'largefiles')
1253 1263 return largefiles_ui and largefiles_ui[0].active
1254 1264
1255 1265 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1256 1266 """
1257 1267 Try to merge the pull request and return the merge status.
1258 1268 """
1259 1269 log.debug(
1260 1270 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1261 1271 pull_request.pull_request_id, force_shadow_repo_refresh)
1262 1272 target_vcs = pull_request.target_repo.scm_instance()
1263 1273 # Refresh the target reference.
1264 1274 try:
1265 1275 target_ref = self._refresh_reference(
1266 1276 pull_request.target_ref_parts, target_vcs)
1267 1277 except CommitDoesNotExistError:
1268 1278 merge_state = MergeResponse(
1269 1279 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1270 1280 metadata={'target_ref': pull_request.target_ref_parts})
1271 1281 return merge_state
1272 1282
1273 1283 target_locked = pull_request.target_repo.locked
1274 1284 if target_locked and target_locked[0]:
1275 1285 locked_by = 'user:{}'.format(target_locked[0])
1276 1286 log.debug("The target repository is locked by %s.", locked_by)
1277 1287 merge_state = MergeResponse(
1278 1288 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1279 1289 metadata={'locked_by': locked_by})
1280 1290 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1281 1291 pull_request, target_ref):
1282 1292 log.debug("Refreshing the merge status of the repository.")
1283 1293 merge_state = self._refresh_merge_state(
1284 1294 pull_request, target_vcs, target_ref)
1285 1295 else:
1286 1296 possible = pull_request.\
1287 1297 last_merge_status == MergeFailureReason.NONE
1288 1298 merge_state = MergeResponse(
1289 1299 possible, False, None, pull_request.last_merge_status)
1290 1300
1291 1301 return merge_state
1292 1302
1293 1303 def _refresh_reference(self, reference, vcs_repository):
1294 1304 if reference.type in self.UPDATABLE_REF_TYPES:
1295 1305 name_or_id = reference.name
1296 1306 else:
1297 1307 name_or_id = reference.commit_id
1298 1308 refreshed_commit = vcs_repository.get_commit(name_or_id)
1299 1309 refreshed_reference = Reference(
1300 1310 reference.type, reference.name, refreshed_commit.raw_id)
1301 1311 return refreshed_reference
1302 1312
1303 1313 def _needs_merge_state_refresh(self, pull_request, target_reference):
1304 1314 return not(
1305 1315 pull_request.revisions and
1306 1316 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1307 1317 target_reference.commit_id == pull_request._last_merge_target_rev)
1308 1318
1309 1319 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1310 1320 workspace_id = self._workspace_id(pull_request)
1311 1321 source_vcs = pull_request.source_repo.scm_instance()
1312 1322 repo_id = pull_request.target_repo.repo_id
1313 1323 use_rebase = self._use_rebase_for_merging(pull_request)
1314 1324 close_branch = self._close_branch_before_merging(pull_request)
1315 1325 merge_state = target_vcs.merge(
1316 1326 repo_id, workspace_id,
1317 1327 target_reference, source_vcs, pull_request.source_ref_parts,
1318 1328 dry_run=True, use_rebase=use_rebase,
1319 1329 close_branch=close_branch)
1320 1330
1321 1331 # Do not store the response if there was an unknown error.
1322 1332 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1323 1333 pull_request._last_merge_source_rev = \
1324 1334 pull_request.source_ref_parts.commit_id
1325 1335 pull_request._last_merge_target_rev = target_reference.commit_id
1326 1336 pull_request.last_merge_status = merge_state.failure_reason
1327 1337 pull_request.shadow_merge_ref = merge_state.merge_ref
1328 1338 Session().add(pull_request)
1329 1339 Session().commit()
1330 1340
1331 1341 return merge_state
1332 1342
1333 1343 def _workspace_id(self, pull_request):
1334 1344 workspace_id = 'pr-%s' % pull_request.pull_request_id
1335 1345 return workspace_id
1336 1346
1337 1347 def generate_repo_data(self, repo, commit_id=None, branch=None,
1338 1348 bookmark=None, translator=None):
1339 1349 from rhodecode.model.repo import RepoModel
1340 1350
1341 1351 all_refs, selected_ref = \
1342 1352 self._get_repo_pullrequest_sources(
1343 1353 repo.scm_instance(), commit_id=commit_id,
1344 1354 branch=branch, bookmark=bookmark, translator=translator)
1345 1355
1346 1356 refs_select2 = []
1347 1357 for element in all_refs:
1348 1358 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1349 1359 refs_select2.append({'text': element[1], 'children': children})
1350 1360
1351 1361 return {
1352 1362 'user': {
1353 1363 'user_id': repo.user.user_id,
1354 1364 'username': repo.user.username,
1355 1365 'firstname': repo.user.first_name,
1356 1366 'lastname': repo.user.last_name,
1357 1367 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1358 1368 },
1359 1369 'name': repo.repo_name,
1360 1370 'link': RepoModel().get_url(repo),
1361 1371 'description': h.chop_at_smart(repo.description_safe, '\n'),
1362 1372 'refs': {
1363 1373 'all_refs': all_refs,
1364 1374 'selected_ref': selected_ref,
1365 1375 'select2_refs': refs_select2
1366 1376 }
1367 1377 }
1368 1378
1369 1379 def generate_pullrequest_title(self, source, source_ref, target):
1370 1380 return u'{source}#{at_ref} to {target}'.format(
1371 1381 source=source,
1372 1382 at_ref=source_ref,
1373 1383 target=target,
1374 1384 )
1375 1385
1376 1386 def _cleanup_merge_workspace(self, pull_request):
1377 1387 # Merging related cleanup
1378 1388 repo_id = pull_request.target_repo.repo_id
1379 1389 target_scm = pull_request.target_repo.scm_instance()
1380 1390 workspace_id = self._workspace_id(pull_request)
1381 1391
1382 1392 try:
1383 1393 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1384 1394 except NotImplementedError:
1385 1395 pass
1386 1396
1387 1397 def _get_repo_pullrequest_sources(
1388 1398 self, repo, commit_id=None, branch=None, bookmark=None,
1389 1399 translator=None):
1390 1400 """
1391 1401 Return a structure with repo's interesting commits, suitable for
1392 1402 the selectors in pullrequest controller
1393 1403
1394 1404 :param commit_id: a commit that must be in the list somehow
1395 1405 and selected by default
1396 1406 :param branch: a branch that must be in the list and selected
1397 1407 by default - even if closed
1398 1408 :param bookmark: a bookmark that must be in the list and selected
1399 1409 """
1400 1410 _ = translator or get_current_request().translate
1401 1411
1402 1412 commit_id = safe_str(commit_id) if commit_id else None
1403 1413 branch = safe_str(branch) if branch else None
1404 1414 bookmark = safe_str(bookmark) if bookmark else None
1405 1415
1406 1416 selected = None
1407 1417
1408 1418 # order matters: first source that has commit_id in it will be selected
1409 1419 sources = []
1410 1420 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1411 1421 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1412 1422
1413 1423 if commit_id:
1414 1424 ref_commit = (h.short_id(commit_id), commit_id)
1415 1425 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1416 1426
1417 1427 sources.append(
1418 1428 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1419 1429 )
1420 1430
1421 1431 groups = []
1422 1432 for group_key, ref_list, group_name, match in sources:
1423 1433 group_refs = []
1424 1434 for ref_name, ref_id in ref_list:
1425 1435 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1426 1436 group_refs.append((ref_key, ref_name))
1427 1437
1428 1438 if not selected:
1429 1439 if set([commit_id, match]) & set([ref_id, ref_name]):
1430 1440 selected = ref_key
1431 1441
1432 1442 if group_refs:
1433 1443 groups.append((group_refs, group_name))
1434 1444
1435 1445 if not selected:
1436 1446 ref = commit_id or branch or bookmark
1437 1447 if ref:
1438 1448 raise CommitDoesNotExistError(
1439 1449 'No commit refs could be found matching: %s' % ref)
1440 1450 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1441 1451 selected = 'branch:%s:%s' % (
1442 1452 repo.DEFAULT_BRANCH_NAME,
1443 1453 repo.branches[repo.DEFAULT_BRANCH_NAME]
1444 1454 )
1445 1455 elif repo.commit_ids:
1446 1456 # make the user select in this case
1447 1457 selected = None
1448 1458 else:
1449 1459 raise EmptyRepositoryError()
1450 1460 return groups, selected
1451 1461
1452 1462 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1453 1463 hide_whitespace_changes, diff_context):
1454 1464
1455 1465 return self._get_diff_from_pr_or_version(
1456 1466 source_repo, source_ref_id, target_ref_id,
1457 1467 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1458 1468
1459 1469 def _get_diff_from_pr_or_version(
1460 1470 self, source_repo, source_ref_id, target_ref_id,
1461 1471 hide_whitespace_changes, diff_context):
1462 1472
1463 1473 target_commit = source_repo.get_commit(
1464 1474 commit_id=safe_str(target_ref_id))
1465 1475 source_commit = source_repo.get_commit(
1466 1476 commit_id=safe_str(source_ref_id))
1467 1477 if isinstance(source_repo, Repository):
1468 1478 vcs_repo = source_repo.scm_instance()
1469 1479 else:
1470 1480 vcs_repo = source_repo
1471 1481
1472 1482 # TODO: johbo: In the context of an update, we cannot reach
1473 1483 # the old commit anymore with our normal mechanisms. It needs
1474 1484 # some sort of special support in the vcs layer to avoid this
1475 1485 # workaround.
1476 1486 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1477 1487 vcs_repo.alias == 'git'):
1478 1488 source_commit.raw_id = safe_str(source_ref_id)
1479 1489
1480 1490 log.debug('calculating diff between '
1481 1491 'source_ref:%s and target_ref:%s for repo `%s`',
1482 1492 target_ref_id, source_ref_id,
1483 1493 safe_unicode(vcs_repo.path))
1484 1494
1485 1495 vcs_diff = vcs_repo.get_diff(
1486 1496 commit1=target_commit, commit2=source_commit,
1487 1497 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1488 1498 return vcs_diff
1489 1499
1490 1500 def _is_merge_enabled(self, pull_request):
1491 1501 return self._get_general_setting(
1492 1502 pull_request, 'rhodecode_pr_merge_enabled')
1493 1503
1494 1504 def _use_rebase_for_merging(self, pull_request):
1495 1505 repo_type = pull_request.target_repo.repo_type
1496 1506 if repo_type == 'hg':
1497 1507 return self._get_general_setting(
1498 1508 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1499 1509 elif repo_type == 'git':
1500 1510 return self._get_general_setting(
1501 1511 pull_request, 'rhodecode_git_use_rebase_for_merging')
1502 1512
1503 1513 return False
1504 1514
1505 1515 def _close_branch_before_merging(self, pull_request):
1506 1516 repo_type = pull_request.target_repo.repo_type
1507 1517 if repo_type == 'hg':
1508 1518 return self._get_general_setting(
1509 1519 pull_request, 'rhodecode_hg_close_branch_before_merging')
1510 1520 elif repo_type == 'git':
1511 1521 return self._get_general_setting(
1512 1522 pull_request, 'rhodecode_git_close_branch_before_merging')
1513 1523
1514 1524 return False
1515 1525
1516 1526 def _get_general_setting(self, pull_request, settings_key, default=False):
1517 1527 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1518 1528 settings = settings_model.get_general_settings()
1519 1529 return settings.get(settings_key, default)
1520 1530
1521 1531 def _log_audit_action(self, action, action_data, user, pull_request):
1522 1532 audit_logger.store(
1523 1533 action=action,
1524 1534 action_data=action_data,
1525 1535 user=user,
1526 1536 repo=pull_request.target_repo)
1527 1537
1528 1538 def get_reviewer_functions(self):
1529 1539 """
1530 1540 Fetches functions for validation and fetching default reviewers.
1531 1541 If available we use the EE package, else we fallback to CE
1532 1542 package functions
1533 1543 """
1534 1544 try:
1535 1545 from rc_reviewers.utils import get_default_reviewers_data
1536 1546 from rc_reviewers.utils import validate_default_reviewers
1537 1547 except ImportError:
1538 1548 from rhodecode.apps.repository.utils import get_default_reviewers_data
1539 1549 from rhodecode.apps.repository.utils import validate_default_reviewers
1540 1550
1541 1551 return get_default_reviewers_data, validate_default_reviewers
1542 1552
1543 1553
1544 1554 class MergeCheck(object):
1545 1555 """
1546 1556 Perform Merge Checks and returns a check object which stores information
1547 1557 about merge errors, and merge conditions
1548 1558 """
1549 1559 TODO_CHECK = 'todo'
1550 1560 PERM_CHECK = 'perm'
1551 1561 REVIEW_CHECK = 'review'
1552 1562 MERGE_CHECK = 'merge'
1553 1563
1554 1564 def __init__(self):
1555 1565 self.review_status = None
1556 1566 self.merge_possible = None
1557 1567 self.merge_msg = ''
1558 1568 self.failed = None
1559 1569 self.errors = []
1560 1570 self.error_details = OrderedDict()
1561 1571
1562 1572 def push_error(self, error_type, message, error_key, details):
1563 1573 self.failed = True
1564 1574 self.errors.append([error_type, message])
1565 1575 self.error_details[error_key] = dict(
1566 1576 details=details,
1567 1577 error_type=error_type,
1568 1578 message=message
1569 1579 )
1570 1580
1571 1581 @classmethod
1572 1582 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1573 1583 force_shadow_repo_refresh=False):
1574 1584 _ = translator
1575 1585 merge_check = cls()
1576 1586
1577 1587 # permissions to merge
1578 1588 user_allowed_to_merge = PullRequestModel().check_user_merge(
1579 1589 pull_request, auth_user)
1580 1590 if not user_allowed_to_merge:
1581 1591 log.debug("MergeCheck: cannot merge, approval is pending.")
1582 1592
1583 1593 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1584 1594 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1585 1595 if fail_early:
1586 1596 return merge_check
1587 1597
1588 1598 # permission to merge into the target branch
1589 1599 target_commit_id = pull_request.target_ref_parts.commit_id
1590 1600 if pull_request.target_ref_parts.type == 'branch':
1591 1601 branch_name = pull_request.target_ref_parts.name
1592 1602 else:
1593 1603 # for mercurial we can always figure out the branch from the commit
1594 1604 # in case of bookmark
1595 1605 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1596 1606 branch_name = target_commit.branch
1597 1607
1598 1608 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1599 1609 pull_request.target_repo.repo_name, branch_name)
1600 1610 if branch_perm and branch_perm == 'branch.none':
1601 1611 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1602 1612 branch_name, rule)
1603 1613 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1604 1614 if fail_early:
1605 1615 return merge_check
1606 1616
1607 1617 # review status, must be always present
1608 1618 review_status = pull_request.calculated_review_status()
1609 1619 merge_check.review_status = review_status
1610 1620
1611 1621 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1612 1622 if not status_approved:
1613 1623 log.debug("MergeCheck: cannot merge, approval is pending.")
1614 1624
1615 1625 msg = _('Pull request reviewer approval is pending.')
1616 1626
1617 merge_check.push_error(
1618 'warning', msg, cls.REVIEW_CHECK, review_status)
1627 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1619 1628
1620 1629 if fail_early:
1621 1630 return merge_check
1622 1631
1623 1632 # left over TODOs
1624 1633 todos = CommentsModel().get_unresolved_todos(pull_request)
1625 1634 if todos:
1626 1635 log.debug("MergeCheck: cannot merge, {} "
1627 "unresolved todos left.".format(len(todos)))
1636 "unresolved TODOs left.".format(len(todos)))
1628 1637
1629 1638 if len(todos) == 1:
1630 1639 msg = _('Cannot merge, {} TODO still not resolved.').format(
1631 1640 len(todos))
1632 1641 else:
1633 1642 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1634 1643 len(todos))
1635 1644
1636 1645 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1637 1646
1638 1647 if fail_early:
1639 1648 return merge_check
1640 1649
1641 1650 # merge possible, here is the filesystem simulation + shadow repo
1642 1651 merge_status, msg = PullRequestModel().merge_status(
1643 1652 pull_request, translator=translator,
1644 1653 force_shadow_repo_refresh=force_shadow_repo_refresh)
1645 1654 merge_check.merge_possible = merge_status
1646 1655 merge_check.merge_msg = msg
1647 1656 if not merge_status:
1648 log.debug(
1649 "MergeCheck: cannot merge, pull request merge not possible.")
1657 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1650 1658 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1651 1659
1652 1660 if fail_early:
1653 1661 return merge_check
1654 1662
1655 1663 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1656 1664 return merge_check
1657 1665
1658 1666 @classmethod
1659 1667 def get_merge_conditions(cls, pull_request, translator):
1660 1668 _ = translator
1661 1669 merge_details = {}
1662 1670
1663 1671 model = PullRequestModel()
1664 1672 use_rebase = model._use_rebase_for_merging(pull_request)
1665 1673
1666 1674 if use_rebase:
1667 1675 merge_details['merge_strategy'] = dict(
1668 1676 details={},
1669 1677 message=_('Merge strategy: rebase')
1670 1678 )
1671 1679 else:
1672 1680 merge_details['merge_strategy'] = dict(
1673 1681 details={},
1674 1682 message=_('Merge strategy: explicit merge commit')
1675 1683 )
1676 1684
1677 1685 close_branch = model._close_branch_before_merging(pull_request)
1678 1686 if close_branch:
1679 1687 repo_type = pull_request.target_repo.repo_type
1688 close_msg = ''
1680 1689 if repo_type == 'hg':
1681 1690 close_msg = _('Source branch will be closed after merge.')
1682 1691 elif repo_type == 'git':
1683 1692 close_msg = _('Source branch will be deleted after merge.')
1684 1693
1685 1694 merge_details['close_branch'] = dict(
1686 1695 details={},
1687 1696 message=close_msg
1688 1697 )
1689 1698
1690 1699 return merge_details
1691 1700
1701
1692 1702 ChangeTuple = collections.namedtuple(
1693 1703 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1694 1704
1695 1705 FileChangeTuple = collections.namedtuple(
1696 1706 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,910 +1,949 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23 import textwrap
24 24
25 25 import rhodecode
26 26 from rhodecode.lib.utils2 import safe_unicode
27 27 from rhodecode.lib.vcs.backends import get_backend
28 28 from rhodecode.lib.vcs.backends.base import (
29 29 MergeResponse, MergeFailureReason, Reference)
30 30 from rhodecode.lib.vcs.exceptions import RepositoryError
31 31 from rhodecode.lib.vcs.nodes import FileNode
32 32 from rhodecode.model.comment import CommentsModel
33 33 from rhodecode.model.db import PullRequest, Session
34 34 from rhodecode.model.pull_request import PullRequestModel
35 35 from rhodecode.model.user import UserModel
36 36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
37 37
38 38
39 39 pytestmark = [
40 40 pytest.mark.backends("git", "hg"),
41 41 ]
42 42
43 43
44 44 @pytest.mark.usefixtures('config_stub')
45 45 class TestPullRequestModel(object):
46 46
47 47 @pytest.fixture
48 48 def pull_request(self, request, backend, pr_util):
49 49 """
50 50 A pull request combined with multiples patches.
51 51 """
52 52 BackendClass = get_backend(backend.alias)
53 53 merge_resp = MergeResponse(
54 54 False, False, None, MergeFailureReason.UNKNOWN,
55 55 metadata={'exception': 'MockError'})
56 56 self.merge_patcher = mock.patch.object(
57 57 BackendClass, 'merge', return_value=merge_resp)
58 58 self.workspace_remove_patcher = mock.patch.object(
59 59 BackendClass, 'cleanup_merge_workspace')
60 60
61 61 self.workspace_remove_mock = self.workspace_remove_patcher.start()
62 62 self.merge_mock = self.merge_patcher.start()
63 63 self.comment_patcher = mock.patch(
64 64 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
65 65 self.comment_patcher.start()
66 66 self.notification_patcher = mock.patch(
67 67 'rhodecode.model.notification.NotificationModel.create')
68 68 self.notification_patcher.start()
69 69 self.helper_patcher = mock.patch(
70 70 'rhodecode.lib.helpers.route_path')
71 71 self.helper_patcher.start()
72 72
73 73 self.hook_patcher = mock.patch.object(PullRequestModel,
74 74 '_trigger_pull_request_hook')
75 75 self.hook_mock = self.hook_patcher.start()
76 76
77 77 self.invalidation_patcher = mock.patch(
78 78 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
79 79 self.invalidation_mock = self.invalidation_patcher.start()
80 80
81 81 self.pull_request = pr_util.create_pull_request(
82 82 mergeable=True, name_suffix=u'Δ…Δ‡')
83 83 self.source_commit = self.pull_request.source_ref_parts.commit_id
84 84 self.target_commit = self.pull_request.target_ref_parts.commit_id
85 85 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
86 86 self.repo_id = self.pull_request.target_repo.repo_id
87 87
88 88 @request.addfinalizer
89 89 def cleanup_pull_request():
90 90 calls = [mock.call(
91 91 self.pull_request, self.pull_request.author, 'create')]
92 92 self.hook_mock.assert_has_calls(calls)
93 93
94 94 self.workspace_remove_patcher.stop()
95 95 self.merge_patcher.stop()
96 96 self.comment_patcher.stop()
97 97 self.notification_patcher.stop()
98 98 self.helper_patcher.stop()
99 99 self.hook_patcher.stop()
100 100 self.invalidation_patcher.stop()
101 101
102 102 return self.pull_request
103 103
104 104 def test_get_all(self, pull_request):
105 105 prs = PullRequestModel().get_all(pull_request.target_repo)
106 106 assert isinstance(prs, list)
107 107 assert len(prs) == 1
108 108
109 109 def test_count_all(self, pull_request):
110 110 pr_count = PullRequestModel().count_all(pull_request.target_repo)
111 111 assert pr_count == 1
112 112
113 113 def test_get_awaiting_review(self, pull_request):
114 114 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
115 115 assert isinstance(prs, list)
116 116 assert len(prs) == 1
117 117
118 118 def test_count_awaiting_review(self, pull_request):
119 119 pr_count = PullRequestModel().count_awaiting_review(
120 120 pull_request.target_repo)
121 121 assert pr_count == 1
122 122
123 123 def test_get_awaiting_my_review(self, pull_request):
124 124 PullRequestModel().update_reviewers(
125 125 pull_request, [(pull_request.author, ['author'], False, [])],
126 126 pull_request.author)
127 127 prs = PullRequestModel().get_awaiting_my_review(
128 128 pull_request.target_repo, user_id=pull_request.author.user_id)
129 129 assert isinstance(prs, list)
130 130 assert len(prs) == 1
131 131
132 132 def test_count_awaiting_my_review(self, pull_request):
133 133 PullRequestModel().update_reviewers(
134 134 pull_request, [(pull_request.author, ['author'], False, [])],
135 135 pull_request.author)
136 136 pr_count = PullRequestModel().count_awaiting_my_review(
137 137 pull_request.target_repo, user_id=pull_request.author.user_id)
138 138 assert pr_count == 1
139 139
140 140 def test_delete_calls_cleanup_merge(self, pull_request):
141 141 repo_id = pull_request.target_repo.repo_id
142 142 PullRequestModel().delete(pull_request, pull_request.author)
143 143
144 144 self.workspace_remove_mock.assert_called_once_with(
145 145 repo_id, self.workspace_id)
146 146
147 147 def test_close_calls_cleanup_and_hook(self, pull_request):
148 148 PullRequestModel().close_pull_request(
149 149 pull_request, pull_request.author)
150 150 repo_id = pull_request.target_repo.repo_id
151 151
152 152 self.workspace_remove_mock.assert_called_once_with(
153 153 repo_id, self.workspace_id)
154 154 self.hook_mock.assert_called_with(
155 155 self.pull_request, self.pull_request.author, 'close')
156 156
157 157 def test_merge_status(self, pull_request):
158 158 self.merge_mock.return_value = MergeResponse(
159 159 True, False, None, MergeFailureReason.NONE)
160 160
161 161 assert pull_request._last_merge_source_rev is None
162 162 assert pull_request._last_merge_target_rev is None
163 163 assert pull_request.last_merge_status is None
164 164
165 165 status, msg = PullRequestModel().merge_status(pull_request)
166 166 assert status is True
167 167 assert msg == 'This pull request can be automatically merged.'
168 168 self.merge_mock.assert_called_with(
169 169 self.repo_id, self.workspace_id,
170 170 pull_request.target_ref_parts,
171 171 pull_request.source_repo.scm_instance(),
172 172 pull_request.source_ref_parts, dry_run=True,
173 173 use_rebase=False, close_branch=False)
174 174
175 175 assert pull_request._last_merge_source_rev == self.source_commit
176 176 assert pull_request._last_merge_target_rev == self.target_commit
177 177 assert pull_request.last_merge_status is MergeFailureReason.NONE
178 178
179 179 self.merge_mock.reset_mock()
180 180 status, msg = PullRequestModel().merge_status(pull_request)
181 181 assert status is True
182 182 assert msg == 'This pull request can be automatically merged.'
183 183 assert self.merge_mock.called is False
184 184
185 185 def test_merge_status_known_failure(self, pull_request):
186 186 self.merge_mock.return_value = MergeResponse(
187 187 False, False, None, MergeFailureReason.MERGE_FAILED)
188 188
189 189 assert pull_request._last_merge_source_rev is None
190 190 assert pull_request._last_merge_target_rev is None
191 191 assert pull_request.last_merge_status is None
192 192
193 193 status, msg = PullRequestModel().merge_status(pull_request)
194 194 assert status is False
195 195 assert msg == 'This pull request cannot be merged because of merge conflicts.'
196 196 self.merge_mock.assert_called_with(
197 197 self.repo_id, self.workspace_id,
198 198 pull_request.target_ref_parts,
199 199 pull_request.source_repo.scm_instance(),
200 200 pull_request.source_ref_parts, dry_run=True,
201 201 use_rebase=False, close_branch=False)
202 202
203 203 assert pull_request._last_merge_source_rev == self.source_commit
204 204 assert pull_request._last_merge_target_rev == self.target_commit
205 205 assert (
206 206 pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED)
207 207
208 208 self.merge_mock.reset_mock()
209 209 status, msg = PullRequestModel().merge_status(pull_request)
210 210 assert status is False
211 211 assert msg == 'This pull request cannot be merged because of merge conflicts.'
212 212 assert self.merge_mock.called is False
213 213
214 214 def test_merge_status_unknown_failure(self, pull_request):
215 215 self.merge_mock.return_value = MergeResponse(
216 216 False, False, None, MergeFailureReason.UNKNOWN,
217 217 metadata={'exception': 'MockError'})
218 218
219 219 assert pull_request._last_merge_source_rev is None
220 220 assert pull_request._last_merge_target_rev is None
221 221 assert pull_request.last_merge_status is None
222 222
223 223 status, msg = PullRequestModel().merge_status(pull_request)
224 224 assert status is False
225 225 assert msg == (
226 226 'This pull request cannot be merged because of an unhandled exception. '
227 227 'MockError')
228 228 self.merge_mock.assert_called_with(
229 229 self.repo_id, self.workspace_id,
230 230 pull_request.target_ref_parts,
231 231 pull_request.source_repo.scm_instance(),
232 232 pull_request.source_ref_parts, dry_run=True,
233 233 use_rebase=False, close_branch=False)
234 234
235 235 assert pull_request._last_merge_source_rev is None
236 236 assert pull_request._last_merge_target_rev is None
237 237 assert pull_request.last_merge_status is None
238 238
239 239 self.merge_mock.reset_mock()
240 240 status, msg = PullRequestModel().merge_status(pull_request)
241 241 assert status is False
242 242 assert msg == (
243 243 'This pull request cannot be merged because of an unhandled exception. '
244 244 'MockError')
245 245 assert self.merge_mock.called is True
246 246
247 247 def test_merge_status_when_target_is_locked(self, pull_request):
248 248 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
249 249 status, msg = PullRequestModel().merge_status(pull_request)
250 250 assert status is False
251 251 assert msg == (
252 252 'This pull request cannot be merged because the target repository '
253 253 'is locked by user:1.')
254 254
255 255 def test_merge_status_requirements_check_target(self, pull_request):
256 256
257 257 def has_largefiles(self, repo):
258 258 return repo == pull_request.source_repo
259 259
260 patcher = mock.patch.object(
261 PullRequestModel, '_has_largefiles', has_largefiles)
260 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
262 261 with patcher:
263 262 status, msg = PullRequestModel().merge_status(pull_request)
264 263
265 264 assert status is False
266 265 assert msg == 'Target repository large files support is disabled.'
267 266
268 267 def test_merge_status_requirements_check_source(self, pull_request):
269 268
270 269 def has_largefiles(self, repo):
271 270 return repo == pull_request.target_repo
272 271
273 patcher = mock.patch.object(
274 PullRequestModel, '_has_largefiles', has_largefiles)
272 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
275 273 with patcher:
276 274 status, msg = PullRequestModel().merge_status(pull_request)
277 275
278 276 assert status is False
279 277 assert msg == 'Source repository large files support is disabled.'
280 278
281 279 def test_merge(self, pull_request, merge_extras):
282 280 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
283 281 merge_ref = Reference(
284 282 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
285 283 self.merge_mock.return_value = MergeResponse(
286 284 True, True, merge_ref, MergeFailureReason.NONE)
287 285
288 286 merge_extras['repository'] = pull_request.target_repo.repo_name
289 287 PullRequestModel().merge_repo(
290 288 pull_request, pull_request.author, extras=merge_extras)
291 289
292 290 message = (
293 291 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
294 292 u'\n\n {pr_title}'.format(
295 293 pr_id=pull_request.pull_request_id,
296 294 source_repo=safe_unicode(
297 295 pull_request.source_repo.scm_instance().name),
298 296 source_ref_name=pull_request.source_ref_parts.name,
299 297 pr_title=safe_unicode(pull_request.title)
300 298 )
301 299 )
302 300 self.merge_mock.assert_called_with(
303 301 self.repo_id, self.workspace_id,
304 302 pull_request.target_ref_parts,
305 303 pull_request.source_repo.scm_instance(),
306 304 pull_request.source_ref_parts,
307 305 user_name=user.short_contact, user_email=user.email, message=message,
308 306 use_rebase=False, close_branch=False
309 307 )
310 308 self.invalidation_mock.assert_called_once_with(
311 309 pull_request.target_repo.repo_name)
312 310
313 311 self.hook_mock.assert_called_with(
314 312 self.pull_request, self.pull_request.author, 'merge')
315 313
316 314 pull_request = PullRequest.get(pull_request.pull_request_id)
317 assert (
318 pull_request.merge_rev ==
319 '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
315 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
316
317 def test_merge_with_status_lock(self, pull_request, merge_extras):
318 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
319 merge_ref = Reference(
320 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
321 self.merge_mock.return_value = MergeResponse(
322 True, True, merge_ref, MergeFailureReason.NONE)
323
324 merge_extras['repository'] = pull_request.target_repo.repo_name
325
326 with pull_request.set_state(PullRequest.STATE_UPDATING):
327 assert pull_request.pull_request_state == PullRequest.STATE_UPDATING
328 PullRequestModel().merge_repo(
329 pull_request, pull_request.author, extras=merge_extras)
330
331 assert pull_request.pull_request_state == PullRequest.STATE_CREATED
332
333 message = (
334 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
335 u'\n\n {pr_title}'.format(
336 pr_id=pull_request.pull_request_id,
337 source_repo=safe_unicode(
338 pull_request.source_repo.scm_instance().name),
339 source_ref_name=pull_request.source_ref_parts.name,
340 pr_title=safe_unicode(pull_request.title)
341 )
342 )
343 self.merge_mock.assert_called_with(
344 self.repo_id, self.workspace_id,
345 pull_request.target_ref_parts,
346 pull_request.source_repo.scm_instance(),
347 pull_request.source_ref_parts,
348 user_name=user.short_contact, user_email=user.email, message=message,
349 use_rebase=False, close_branch=False
350 )
351 self.invalidation_mock.assert_called_once_with(
352 pull_request.target_repo.repo_name)
353
354 self.hook_mock.assert_called_with(
355 self.pull_request, self.pull_request.author, 'merge')
356
357 pull_request = PullRequest.get(pull_request.pull_request_id)
358 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
320 359
321 360 def test_merge_failed(self, pull_request, merge_extras):
322 361 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
323 362 merge_ref = Reference(
324 363 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
325 364 self.merge_mock.return_value = MergeResponse(
326 365 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
327 366
328 367 merge_extras['repository'] = pull_request.target_repo.repo_name
329 368 PullRequestModel().merge_repo(
330 369 pull_request, pull_request.author, extras=merge_extras)
331 370
332 371 message = (
333 372 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
334 373 u'\n\n {pr_title}'.format(
335 374 pr_id=pull_request.pull_request_id,
336 375 source_repo=safe_unicode(
337 376 pull_request.source_repo.scm_instance().name),
338 377 source_ref_name=pull_request.source_ref_parts.name,
339 378 pr_title=safe_unicode(pull_request.title)
340 379 )
341 380 )
342 381 self.merge_mock.assert_called_with(
343 382 self.repo_id, self.workspace_id,
344 383 pull_request.target_ref_parts,
345 384 pull_request.source_repo.scm_instance(),
346 385 pull_request.source_ref_parts,
347 386 user_name=user.short_contact, user_email=user.email, message=message,
348 387 use_rebase=False, close_branch=False
349 388 )
350 389
351 390 pull_request = PullRequest.get(pull_request.pull_request_id)
352 391 assert self.invalidation_mock.called is False
353 392 assert pull_request.merge_rev is None
354 393
355 394 def test_get_commit_ids(self, pull_request):
356 395 # The PR has been not merget yet, so expect an exception
357 396 with pytest.raises(ValueError):
358 397 PullRequestModel()._get_commit_ids(pull_request)
359 398
360 399 # Merge revision is in the revisions list
361 400 pull_request.merge_rev = pull_request.revisions[0]
362 401 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
363 402 assert commit_ids == pull_request.revisions
364 403
365 404 # Merge revision is not in the revisions list
366 405 pull_request.merge_rev = 'f000' * 10
367 406 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
368 407 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
369 408
370 409 def test_get_diff_from_pr_version(self, pull_request):
371 410 source_repo = pull_request.source_repo
372 411 source_ref_id = pull_request.source_ref_parts.commit_id
373 412 target_ref_id = pull_request.target_ref_parts.commit_id
374 413 diff = PullRequestModel()._get_diff_from_pr_or_version(
375 414 source_repo, source_ref_id, target_ref_id,
376 415 hide_whitespace_changes=False, diff_context=6)
377 416 assert 'file_1' in diff.raw
378 417
379 418 def test_generate_title_returns_unicode(self):
380 419 title = PullRequestModel().generate_pullrequest_title(
381 420 source='source-dummy',
382 421 source_ref='source-ref-dummy',
383 422 target='target-dummy',
384 423 )
385 424 assert type(title) == unicode
386 425
387 426
388 427 @pytest.mark.usefixtures('config_stub')
389 428 class TestIntegrationMerge(object):
390 429 @pytest.mark.parametrize('extra_config', (
391 430 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
392 431 ))
393 432 def test_merge_triggers_push_hooks(
394 433 self, pr_util, user_admin, capture_rcextensions, merge_extras,
395 434 extra_config):
396 435
397 436 pull_request = pr_util.create_pull_request(
398 437 approved=True, mergeable=True)
399 438 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
400 439 merge_extras['repository'] = pull_request.target_repo.repo_name
401 440 Session().commit()
402 441
403 442 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
404 443 merge_state = PullRequestModel().merge_repo(
405 444 pull_request, user_admin, extras=merge_extras)
406 445
407 446 assert merge_state.executed
408 447 assert '_pre_push_hook' in capture_rcextensions
409 448 assert '_push_hook' in capture_rcextensions
410 449
411 450 def test_merge_can_be_rejected_by_pre_push_hook(
412 451 self, pr_util, user_admin, capture_rcextensions, merge_extras):
413 452 pull_request = pr_util.create_pull_request(
414 453 approved=True, mergeable=True)
415 454 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
416 455 merge_extras['repository'] = pull_request.target_repo.repo_name
417 456 Session().commit()
418 457
419 458 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
420 459 pre_pull.side_effect = RepositoryError("Disallow push!")
421 460 merge_status = PullRequestModel().merge_repo(
422 461 pull_request, user_admin, extras=merge_extras)
423 462
424 463 assert not merge_status.executed
425 464 assert 'pre_push' not in capture_rcextensions
426 465 assert 'post_push' not in capture_rcextensions
427 466
428 467 def test_merge_fails_if_target_is_locked(
429 468 self, pr_util, user_regular, merge_extras):
430 469 pull_request = pr_util.create_pull_request(
431 470 approved=True, mergeable=True)
432 471 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
433 472 pull_request.target_repo.locked = locked_by
434 473 # TODO: johbo: Check if this can work based on the database, currently
435 474 # all data is pre-computed, that's why just updating the DB is not
436 475 # enough.
437 476 merge_extras['locked_by'] = locked_by
438 477 merge_extras['repository'] = pull_request.target_repo.repo_name
439 478 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
440 479 Session().commit()
441 480 merge_status = PullRequestModel().merge_repo(
442 481 pull_request, user_regular, extras=merge_extras)
443 482 assert not merge_status.executed
444 483
445 484
446 485 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
447 486 (False, 1, 0),
448 487 (True, 0, 1),
449 488 ])
450 489 def test_outdated_comments(
451 490 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
452 491 pull_request = pr_util.create_pull_request()
453 492 pr_util.create_inline_comment(file_path='not_in_updated_diff')
454 493
455 494 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
456 495 pr_util.add_one_commit()
457 496 assert_inline_comments(
458 497 pull_request, visible=inlines_count, outdated=outdated_count)
459 498 outdated_comment_mock.assert_called_with(pull_request)
460 499
461 500
462 501 @pytest.mark.parametrize('mr_type, expected_msg', [
463 502 (MergeFailureReason.NONE,
464 503 'This pull request can be automatically merged.'),
465 504 (MergeFailureReason.UNKNOWN,
466 505 'This pull request cannot be merged because of an unhandled exception. CRASH'),
467 506 (MergeFailureReason.MERGE_FAILED,
468 507 'This pull request cannot be merged because of merge conflicts.'),
469 508 (MergeFailureReason.PUSH_FAILED,
470 509 'This pull request could not be merged because push to target:`some-repo@merge_commit` failed.'),
471 510 (MergeFailureReason.TARGET_IS_NOT_HEAD,
472 511 'This pull request cannot be merged because the target `ref_name` is not a head.'),
473 512 (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES,
474 513 'This pull request cannot be merged because the source contains more branches than the target.'),
475 514 (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
476 515 'This pull request cannot be merged because the target has multiple heads: `a,b,c`.'),
477 516 (MergeFailureReason.TARGET_IS_LOCKED,
478 517 'This pull request cannot be merged because the target repository is locked by user:123.'),
479 518 (MergeFailureReason.MISSING_TARGET_REF,
480 519 'This pull request cannot be merged because the target reference `ref_name` is missing.'),
481 520 (MergeFailureReason.MISSING_SOURCE_REF,
482 521 'This pull request cannot be merged because the source reference `ref_name` is missing.'),
483 522 (MergeFailureReason.SUBREPO_MERGE_FAILED,
484 523 'This pull request cannot be merged because of conflicts related to sub repositories.'),
485 524
486 525 ])
487 526 def test_merge_response_message(mr_type, expected_msg):
488 527 merge_ref = Reference('type', 'ref_name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
489 528 metadata = {
490 529 'exception': "CRASH",
491 530 'target': 'some-repo',
492 531 'merge_commit': 'merge_commit',
493 532 'target_ref': merge_ref,
494 533 'source_ref': merge_ref,
495 534 'heads': ','.join(['a', 'b', 'c']),
496 535 'locked_by': 'user:123'}
497 536
498 537 merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata)
499 538 assert merge_response.merge_status_message == expected_msg
500 539
501 540
502 541 @pytest.fixture
503 542 def merge_extras(user_regular):
504 543 """
505 544 Context for the vcs operation when running a merge.
506 545 """
507 546 extras = {
508 547 'ip': '127.0.0.1',
509 548 'username': user_regular.username,
510 549 'user_id': user_regular.user_id,
511 550 'action': 'push',
512 551 'repository': 'fake_target_repo_name',
513 552 'scm': 'git',
514 553 'config': 'fake_config_ini_path',
515 554 'repo_store': '',
516 555 'make_lock': None,
517 556 'locked_by': [None, None, None],
518 557 'server_url': 'http://test.example.com:5000',
519 558 'hooks': ['push', 'pull'],
520 559 'is_shadow_repo': False,
521 560 }
522 561 return extras
523 562
524 563
525 564 @pytest.mark.usefixtures('config_stub')
526 565 class TestUpdateCommentHandling(object):
527 566
528 567 @pytest.fixture(autouse=True, scope='class')
529 568 def enable_outdated_comments(self, request, baseapp):
530 569 config_patch = mock.patch.dict(
531 570 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
532 571 config_patch.start()
533 572
534 573 @request.addfinalizer
535 574 def cleanup():
536 575 config_patch.stop()
537 576
538 577 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
539 578 commits = [
540 579 {'message': 'a'},
541 580 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
542 581 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
543 582 ]
544 583 pull_request = pr_util.create_pull_request(
545 584 commits=commits, target_head='a', source_head='b', revisions=['b'])
546 585 pr_util.create_inline_comment(file_path='file_b')
547 586 pr_util.add_one_commit(head='c')
548 587
549 588 assert_inline_comments(pull_request, visible=1, outdated=0)
550 589
551 590 def test_comment_stays_unflagged_on_change_above(self, pr_util):
552 591 original_content = ''.join(
553 592 ['line {}\n'.format(x) for x in range(1, 11)])
554 593 updated_content = 'new_line_at_top\n' + original_content
555 594 commits = [
556 595 {'message': 'a'},
557 596 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
558 597 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
559 598 ]
560 599 pull_request = pr_util.create_pull_request(
561 600 commits=commits, target_head='a', source_head='b', revisions=['b'])
562 601
563 602 with outdated_comments_patcher():
564 603 comment = pr_util.create_inline_comment(
565 604 line_no=u'n8', file_path='file_b')
566 605 pr_util.add_one_commit(head='c')
567 606
568 607 assert_inline_comments(pull_request, visible=1, outdated=0)
569 608 assert comment.line_no == u'n9'
570 609
571 610 def test_comment_stays_unflagged_on_change_below(self, pr_util):
572 611 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
573 612 updated_content = original_content + 'new_line_at_end\n'
574 613 commits = [
575 614 {'message': 'a'},
576 615 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
577 616 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
578 617 ]
579 618 pull_request = pr_util.create_pull_request(
580 619 commits=commits, target_head='a', source_head='b', revisions=['b'])
581 620 pr_util.create_inline_comment(file_path='file_b')
582 621 pr_util.add_one_commit(head='c')
583 622
584 623 assert_inline_comments(pull_request, visible=1, outdated=0)
585 624
586 625 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
587 626 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
588 627 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
589 628 change_lines = list(base_lines)
590 629 change_lines.insert(6, 'line 6a added\n')
591 630
592 631 # Changes on the last line of sight
593 632 update_lines = list(change_lines)
594 633 update_lines[0] = 'line 1 changed\n'
595 634 update_lines[-1] = 'line 12 changed\n'
596 635
597 636 def file_b(lines):
598 637 return FileNode('file_b', ''.join(lines))
599 638
600 639 commits = [
601 640 {'message': 'a', 'added': [file_b(base_lines)]},
602 641 {'message': 'b', 'changed': [file_b(change_lines)]},
603 642 {'message': 'c', 'changed': [file_b(update_lines)]},
604 643 ]
605 644
606 645 pull_request = pr_util.create_pull_request(
607 646 commits=commits, target_head='a', source_head='b', revisions=['b'])
608 647 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
609 648
610 649 with outdated_comments_patcher():
611 650 pr_util.add_one_commit(head='c')
612 651 assert_inline_comments(pull_request, visible=0, outdated=1)
613 652
614 653 @pytest.mark.parametrize("change, content", [
615 654 ('changed', 'changed\n'),
616 655 ('removed', ''),
617 656 ], ids=['changed', 'removed'])
618 657 def test_comment_flagged_on_change(self, pr_util, change, content):
619 658 commits = [
620 659 {'message': 'a'},
621 660 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
622 661 {'message': 'c', change: [FileNode('file_b', content)]},
623 662 ]
624 663 pull_request = pr_util.create_pull_request(
625 664 commits=commits, target_head='a', source_head='b', revisions=['b'])
626 665 pr_util.create_inline_comment(file_path='file_b')
627 666
628 667 with outdated_comments_patcher():
629 668 pr_util.add_one_commit(head='c')
630 669 assert_inline_comments(pull_request, visible=0, outdated=1)
631 670
632 671
633 672 @pytest.mark.usefixtures('config_stub')
634 673 class TestUpdateChangedFiles(object):
635 674
636 675 def test_no_changes_on_unchanged_diff(self, pr_util):
637 676 commits = [
638 677 {'message': 'a'},
639 678 {'message': 'b',
640 679 'added': [FileNode('file_b', 'test_content b\n')]},
641 680 {'message': 'c',
642 681 'added': [FileNode('file_c', 'test_content c\n')]},
643 682 ]
644 683 # open a PR from a to b, adding file_b
645 684 pull_request = pr_util.create_pull_request(
646 685 commits=commits, target_head='a', source_head='b', revisions=['b'],
647 686 name_suffix='per-file-review')
648 687
649 688 # modify PR adding new file file_c
650 689 pr_util.add_one_commit(head='c')
651 690
652 691 assert_pr_file_changes(
653 692 pull_request,
654 693 added=['file_c'],
655 694 modified=[],
656 695 removed=[])
657 696
658 697 def test_modify_and_undo_modification_diff(self, pr_util):
659 698 commits = [
660 699 {'message': 'a'},
661 700 {'message': 'b',
662 701 'added': [FileNode('file_b', 'test_content b\n')]},
663 702 {'message': 'c',
664 703 'changed': [FileNode('file_b', 'test_content b modified\n')]},
665 704 {'message': 'd',
666 705 'changed': [FileNode('file_b', 'test_content b\n')]},
667 706 ]
668 707 # open a PR from a to b, adding file_b
669 708 pull_request = pr_util.create_pull_request(
670 709 commits=commits, target_head='a', source_head='b', revisions=['b'],
671 710 name_suffix='per-file-review')
672 711
673 712 # modify PR modifying file file_b
674 713 pr_util.add_one_commit(head='c')
675 714
676 715 assert_pr_file_changes(
677 716 pull_request,
678 717 added=[],
679 718 modified=['file_b'],
680 719 removed=[])
681 720
682 721 # move the head again to d, which rollbacks change,
683 722 # meaning we should indicate no changes
684 723 pr_util.add_one_commit(head='d')
685 724
686 725 assert_pr_file_changes(
687 726 pull_request,
688 727 added=[],
689 728 modified=[],
690 729 removed=[])
691 730
692 731 def test_updated_all_files_in_pr(self, pr_util):
693 732 commits = [
694 733 {'message': 'a'},
695 734 {'message': 'b', 'added': [
696 735 FileNode('file_a', 'test_content a\n'),
697 736 FileNode('file_b', 'test_content b\n'),
698 737 FileNode('file_c', 'test_content c\n')]},
699 738 {'message': 'c', 'changed': [
700 739 FileNode('file_a', 'test_content a changed\n'),
701 740 FileNode('file_b', 'test_content b changed\n'),
702 741 FileNode('file_c', 'test_content c changed\n')]},
703 742 ]
704 743 # open a PR from a to b, changing 3 files
705 744 pull_request = pr_util.create_pull_request(
706 745 commits=commits, target_head='a', source_head='b', revisions=['b'],
707 746 name_suffix='per-file-review')
708 747
709 748 pr_util.add_one_commit(head='c')
710 749
711 750 assert_pr_file_changes(
712 751 pull_request,
713 752 added=[],
714 753 modified=['file_a', 'file_b', 'file_c'],
715 754 removed=[])
716 755
717 756 def test_updated_and_removed_all_files_in_pr(self, pr_util):
718 757 commits = [
719 758 {'message': 'a'},
720 759 {'message': 'b', 'added': [
721 760 FileNode('file_a', 'test_content a\n'),
722 761 FileNode('file_b', 'test_content b\n'),
723 762 FileNode('file_c', 'test_content c\n')]},
724 763 {'message': 'c', 'removed': [
725 764 FileNode('file_a', 'test_content a changed\n'),
726 765 FileNode('file_b', 'test_content b changed\n'),
727 766 FileNode('file_c', 'test_content c changed\n')]},
728 767 ]
729 768 # open a PR from a to b, removing 3 files
730 769 pull_request = pr_util.create_pull_request(
731 770 commits=commits, target_head='a', source_head='b', revisions=['b'],
732 771 name_suffix='per-file-review')
733 772
734 773 pr_util.add_one_commit(head='c')
735 774
736 775 assert_pr_file_changes(
737 776 pull_request,
738 777 added=[],
739 778 modified=[],
740 779 removed=['file_a', 'file_b', 'file_c'])
741 780
742 781
743 782 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
744 783 model = PullRequestModel()
745 784 pull_request = pr_util.create_pull_request()
746 785 pr_util.update_source_repository()
747 786
748 787 model.update_commits(pull_request)
749 788
750 789 # Expect that it has a version entry now
751 790 assert len(model.get_versions(pull_request)) == 1
752 791
753 792
754 793 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
755 794 pull_request = pr_util.create_pull_request()
756 795 model = PullRequestModel()
757 796 model.update_commits(pull_request)
758 797
759 798 # Expect that it still has no versions
760 799 assert len(model.get_versions(pull_request)) == 0
761 800
762 801
763 802 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
764 803 model = PullRequestModel()
765 804 pull_request = pr_util.create_pull_request()
766 805 comment = pr_util.create_comment()
767 806 pr_util.update_source_repository()
768 807
769 808 model.update_commits(pull_request)
770 809
771 810 # Expect that the comment is linked to the pr version now
772 811 assert comment.pull_request_version == model.get_versions(pull_request)[0]
773 812
774 813
775 814 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
776 815 model = PullRequestModel()
777 816 pull_request = pr_util.create_pull_request()
778 817 pr_util.update_source_repository()
779 818 pr_util.update_source_repository()
780 819
781 820 model.update_commits(pull_request)
782 821
783 822 # Expect to find a new comment about the change
784 823 expected_message = textwrap.dedent(
785 824 """\
786 825 Pull request updated. Auto status change to |under_review|
787 826
788 827 .. role:: added
789 828 .. role:: removed
790 829 .. parsed-literal::
791 830
792 831 Changed commits:
793 832 * :added:`1 added`
794 833 * :removed:`0 removed`
795 834
796 835 Changed files:
797 836 * `A file_2 <#a_c--92ed3b5f07b4>`_
798 837
799 838 .. |under_review| replace:: *"Under Review"*"""
800 839 )
801 840 pull_request_comments = sorted(
802 841 pull_request.comments, key=lambda c: c.modified_at)
803 842 update_comment = pull_request_comments[-1]
804 843 assert update_comment.text == expected_message
805 844
806 845
807 846 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
808 847 pull_request = pr_util.create_pull_request()
809 848
810 849 # Avoiding default values
811 850 pull_request.status = PullRequest.STATUS_CLOSED
812 851 pull_request._last_merge_source_rev = "0" * 40
813 852 pull_request._last_merge_target_rev = "1" * 40
814 853 pull_request.last_merge_status = 1
815 854 pull_request.merge_rev = "2" * 40
816 855
817 856 # Remember automatic values
818 857 created_on = pull_request.created_on
819 858 updated_on = pull_request.updated_on
820 859
821 860 # Create a new version of the pull request
822 861 version = PullRequestModel()._create_version_from_snapshot(pull_request)
823 862
824 863 # Check attributes
825 864 assert version.title == pr_util.create_parameters['title']
826 865 assert version.description == pr_util.create_parameters['description']
827 866 assert version.status == PullRequest.STATUS_CLOSED
828 867
829 868 # versions get updated created_on
830 869 assert version.created_on != created_on
831 870
832 871 assert version.updated_on == updated_on
833 872 assert version.user_id == pull_request.user_id
834 873 assert version.revisions == pr_util.create_parameters['revisions']
835 874 assert version.source_repo == pr_util.source_repository
836 875 assert version.source_ref == pr_util.create_parameters['source_ref']
837 876 assert version.target_repo == pr_util.target_repository
838 877 assert version.target_ref == pr_util.create_parameters['target_ref']
839 878 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
840 879 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
841 880 assert version.last_merge_status == pull_request.last_merge_status
842 881 assert version.merge_rev == pull_request.merge_rev
843 882 assert version.pull_request == pull_request
844 883
845 884
846 885 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
847 886 version1 = pr_util.create_version_of_pull_request()
848 887 comment_linked = pr_util.create_comment(linked_to=version1)
849 888 comment_unlinked = pr_util.create_comment()
850 889 version2 = pr_util.create_version_of_pull_request()
851 890
852 891 PullRequestModel()._link_comments_to_version(version2)
853 892
854 893 # Expect that only the new comment is linked to version2
855 894 assert (
856 895 comment_unlinked.pull_request_version_id ==
857 896 version2.pull_request_version_id)
858 897 assert (
859 898 comment_linked.pull_request_version_id ==
860 899 version1.pull_request_version_id)
861 900 assert (
862 901 comment_unlinked.pull_request_version_id !=
863 902 comment_linked.pull_request_version_id)
864 903
865 904
866 905 def test_calculate_commits():
867 906 old_ids = [1, 2, 3]
868 907 new_ids = [1, 3, 4, 5]
869 908 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
870 909 assert change.added == [4, 5]
871 910 assert change.common == [1, 3]
872 911 assert change.removed == [2]
873 912 assert change.total == [1, 3, 4, 5]
874 913
875 914
876 915 def assert_inline_comments(pull_request, visible=None, outdated=None):
877 916 if visible is not None:
878 917 inline_comments = CommentsModel().get_inline_comments(
879 918 pull_request.target_repo.repo_id, pull_request=pull_request)
880 919 inline_cnt = CommentsModel().get_inline_comments_count(
881 920 inline_comments)
882 921 assert inline_cnt == visible
883 922 if outdated is not None:
884 923 outdated_comments = CommentsModel().get_outdated_comments(
885 924 pull_request.target_repo.repo_id, pull_request)
886 925 assert len(outdated_comments) == outdated
887 926
888 927
889 928 def assert_pr_file_changes(
890 929 pull_request, added=None, modified=None, removed=None):
891 930 pr_versions = PullRequestModel().get_versions(pull_request)
892 931 # always use first version, ie original PR to calculate changes
893 932 pull_request_version = pr_versions[0]
894 933 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
895 934 pull_request, pull_request_version)
896 935 file_changes = PullRequestModel()._calculate_file_changes(
897 936 old_diff_data, new_diff_data)
898 937
899 938 assert added == file_changes.added, \
900 939 'expected added:%s vs value:%s' % (added, file_changes.added)
901 940 assert modified == file_changes.modified, \
902 941 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
903 942 assert removed == file_changes.removed, \
904 943 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
905 944
906 945
907 946 def outdated_comments_patcher(use_outdated=True):
908 947 return mock.patch.object(
909 948 CommentsModel, 'use_outdated_comments',
910 949 return_value=use_outdated)
General Comments 0
You need to be logged in to leave comments. Login now