##// END OF EJS Templates
pull-requests: handle exceptions in state change and improve logging.
marcink -
r3828:fde0bece stable
parent child Browse files
Show More
@@ -1,5161 +1,5175 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import string
29 29 import hashlib
30 30 import logging
31 31 import datetime
32 32 import warnings
33 33 import ipaddress
34 34 import functools
35 35 import traceback
36 36 import collections
37 37
38 38 from sqlalchemy import (
39 39 or_, and_, not_, func, TypeDecorator, event,
40 40 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
41 41 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
42 42 Text, Float, PickleType)
43 43 from sqlalchemy.sql.expression import true, false, case
44 44 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
45 45 from sqlalchemy.orm import (
46 46 relationship, joinedload, class_mapper, validates, aliased)
47 47 from sqlalchemy.ext.declarative import declared_attr
48 48 from sqlalchemy.ext.hybrid import hybrid_property
49 49 from sqlalchemy.exc import IntegrityError # pragma: no cover
50 50 from sqlalchemy.dialects.mysql import LONGTEXT
51 51 from zope.cachedescriptors.property import Lazy as LazyProperty
52 52 from pyramid import compat
53 53 from pyramid.threadlocal import get_current_request
54 54 from webhelpers.text import collapse, remove_formatting
55 55
56 56 from rhodecode.translation import _
57 57 from rhodecode.lib.vcs import get_vcs_instance
58 58 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
59 59 from rhodecode.lib.utils2 import (
60 60 str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe,
61 61 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
62 62 glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time, OrderedDefaultDict)
63 63 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
64 64 JsonRaw
65 65 from rhodecode.lib.ext_json import json
66 66 from rhodecode.lib.caching_query import FromCache
67 67 from rhodecode.lib.encrypt import AESCipher, validate_and_get_enc_data
68 68 from rhodecode.lib.encrypt2 import Encryptor
69 69 from rhodecode.model.meta import Base, Session
70 70
71 71 URL_SEP = '/'
72 72 log = logging.getLogger(__name__)
73 73
74 74 # =============================================================================
75 75 # BASE CLASSES
76 76 # =============================================================================
77 77
78 78 # this is propagated from .ini file rhodecode.encrypted_values.secret or
79 79 # beaker.session.secret if first is not set.
80 80 # and initialized at environment.py
81 81 ENCRYPTION_KEY = None
82 82
83 83 # used to sort permissions by types, '#' used here is not allowed to be in
84 84 # usernames, and it's very early in sorted string.printable table.
85 85 PERMISSION_TYPE_SORT = {
86 86 'admin': '####',
87 87 'write': '###',
88 88 'read': '##',
89 89 'none': '#',
90 90 }
91 91
92 92
93 93 def display_user_sort(obj):
94 94 """
95 95 Sort function used to sort permissions in .permissions() function of
96 96 Repository, RepoGroup, UserGroup. Also it put the default user in front
97 97 of all other resources
98 98 """
99 99
100 100 if obj.username == User.DEFAULT_USER:
101 101 return '#####'
102 102 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
103 103 return prefix + obj.username
104 104
105 105
106 106 def display_user_group_sort(obj):
107 107 """
108 108 Sort function used to sort permissions in .permissions() function of
109 109 Repository, RepoGroup, UserGroup. Also it put the default user in front
110 110 of all other resources
111 111 """
112 112
113 113 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
114 114 return prefix + obj.users_group_name
115 115
116 116
117 117 def _hash_key(k):
118 118 return sha1_safe(k)
119 119
120 120
121 121 def in_filter_generator(qry, items, limit=500):
122 122 """
123 123 Splits IN() into multiple with OR
124 124 e.g.::
125 125 cnt = Repository.query().filter(
126 126 or_(
127 127 *in_filter_generator(Repository.repo_id, range(100000))
128 128 )).count()
129 129 """
130 130 if not items:
131 131 # empty list will cause empty query which might cause security issues
132 132 # this can lead to hidden unpleasant results
133 133 items = [-1]
134 134
135 135 parts = []
136 136 for chunk in xrange(0, len(items), limit):
137 137 parts.append(
138 138 qry.in_(items[chunk: chunk + limit])
139 139 )
140 140
141 141 return parts
142 142
143 143
144 144 base_table_args = {
145 145 'extend_existing': True,
146 146 'mysql_engine': 'InnoDB',
147 147 'mysql_charset': 'utf8',
148 148 'sqlite_autoincrement': True
149 149 }
150 150
151 151
152 152 class EncryptedTextValue(TypeDecorator):
153 153 """
154 154 Special column for encrypted long text data, use like::
155 155
156 156 value = Column("encrypted_value", EncryptedValue(), nullable=False)
157 157
158 158 This column is intelligent so if value is in unencrypted form it return
159 159 unencrypted form, but on save it always encrypts
160 160 """
161 161 impl = Text
162 162
163 163 def process_bind_param(self, value, dialect):
164 164 """
165 165 Setter for storing value
166 166 """
167 167 import rhodecode
168 168 if not value:
169 169 return value
170 170
171 171 # protect against double encrypting if values is already encrypted
172 172 if value.startswith('enc$aes$') \
173 173 or value.startswith('enc$aes_hmac$') \
174 174 or value.startswith('enc2$'):
175 175 raise ValueError('value needs to be in unencrypted format, '
176 176 'ie. not starting with enc$ or enc2$')
177 177
178 178 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
179 179 if algo == 'aes':
180 180 return 'enc$aes_hmac$%s' % AESCipher(ENCRYPTION_KEY, hmac=True).encrypt(value)
181 181 elif algo == 'fernet':
182 182 return Encryptor(ENCRYPTION_KEY).encrypt(value)
183 183 else:
184 184 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
185 185
186 186 def process_result_value(self, value, dialect):
187 187 """
188 188 Getter for retrieving value
189 189 """
190 190
191 191 import rhodecode
192 192 if not value:
193 193 return value
194 194
195 195 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
196 196 enc_strict_mode = str2bool(rhodecode.CONFIG.get('rhodecode.encrypted_values.strict') or True)
197 197 if algo == 'aes':
198 198 decrypted_data = validate_and_get_enc_data(value, ENCRYPTION_KEY, enc_strict_mode)
199 199 elif algo == 'fernet':
200 200 return Encryptor(ENCRYPTION_KEY).decrypt(value)
201 201 else:
202 202 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
203 203 return decrypted_data
204 204
205 205
206 206 class BaseModel(object):
207 207 """
208 208 Base Model for all classes
209 209 """
210 210
211 211 @classmethod
212 212 def _get_keys(cls):
213 213 """return column names for this model """
214 214 return class_mapper(cls).c.keys()
215 215
216 216 def get_dict(self):
217 217 """
218 218 return dict with keys and values corresponding
219 219 to this model data """
220 220
221 221 d = {}
222 222 for k in self._get_keys():
223 223 d[k] = getattr(self, k)
224 224
225 225 # also use __json__() if present to get additional fields
226 226 _json_attr = getattr(self, '__json__', None)
227 227 if _json_attr:
228 228 # update with attributes from __json__
229 229 if callable(_json_attr):
230 230 _json_attr = _json_attr()
231 231 for k, val in _json_attr.iteritems():
232 232 d[k] = val
233 233 return d
234 234
235 235 def get_appstruct(self):
236 236 """return list with keys and values tuples corresponding
237 237 to this model data """
238 238
239 239 lst = []
240 240 for k in self._get_keys():
241 241 lst.append((k, getattr(self, k),))
242 242 return lst
243 243
244 244 def populate_obj(self, populate_dict):
245 245 """populate model with data from given populate_dict"""
246 246
247 247 for k in self._get_keys():
248 248 if k in populate_dict:
249 249 setattr(self, k, populate_dict[k])
250 250
251 251 @classmethod
252 252 def query(cls):
253 253 return Session().query(cls)
254 254
255 255 @classmethod
256 256 def get(cls, id_):
257 257 if id_:
258 258 return cls.query().get(id_)
259 259
260 260 @classmethod
261 261 def get_or_404(cls, id_):
262 262 from pyramid.httpexceptions import HTTPNotFound
263 263
264 264 try:
265 265 id_ = int(id_)
266 266 except (TypeError, ValueError):
267 267 raise HTTPNotFound()
268 268
269 269 res = cls.query().get(id_)
270 270 if not res:
271 271 raise HTTPNotFound()
272 272 return res
273 273
274 274 @classmethod
275 275 def getAll(cls):
276 276 # deprecated and left for backward compatibility
277 277 return cls.get_all()
278 278
279 279 @classmethod
280 280 def get_all(cls):
281 281 return cls.query().all()
282 282
283 283 @classmethod
284 284 def delete(cls, id_):
285 285 obj = cls.query().get(id_)
286 286 Session().delete(obj)
287 287
288 288 @classmethod
289 289 def identity_cache(cls, session, attr_name, value):
290 290 exist_in_session = []
291 291 for (item_cls, pkey), instance in session.identity_map.items():
292 292 if cls == item_cls and getattr(instance, attr_name) == value:
293 293 exist_in_session.append(instance)
294 294 if exist_in_session:
295 295 if len(exist_in_session) == 1:
296 296 return exist_in_session[0]
297 297 log.exception(
298 298 'multiple objects with attr %s and '
299 299 'value %s found with same name: %r',
300 300 attr_name, value, exist_in_session)
301 301
302 302 def __repr__(self):
303 303 if hasattr(self, '__unicode__'):
304 304 # python repr needs to return str
305 305 try:
306 306 return safe_str(self.__unicode__())
307 307 except UnicodeDecodeError:
308 308 pass
309 309 return '<DB:%s>' % (self.__class__.__name__)
310 310
311 311
312 312 class RhodeCodeSetting(Base, BaseModel):
313 313 __tablename__ = 'rhodecode_settings'
314 314 __table_args__ = (
315 315 UniqueConstraint('app_settings_name'),
316 316 base_table_args
317 317 )
318 318
319 319 SETTINGS_TYPES = {
320 320 'str': safe_str,
321 321 'int': safe_int,
322 322 'unicode': safe_unicode,
323 323 'bool': str2bool,
324 324 'list': functools.partial(aslist, sep=',')
325 325 }
326 326 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
327 327 GLOBAL_CONF_KEY = 'app_settings'
328 328
329 329 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
330 330 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
331 331 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
332 332 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
333 333
334 334 def __init__(self, key='', val='', type='unicode'):
335 335 self.app_settings_name = key
336 336 self.app_settings_type = type
337 337 self.app_settings_value = val
338 338
339 339 @validates('_app_settings_value')
340 340 def validate_settings_value(self, key, val):
341 341 assert type(val) == unicode
342 342 return val
343 343
344 344 @hybrid_property
345 345 def app_settings_value(self):
346 346 v = self._app_settings_value
347 347 _type = self.app_settings_type
348 348 if _type:
349 349 _type = self.app_settings_type.split('.')[0]
350 350 # decode the encrypted value
351 351 if 'encrypted' in self.app_settings_type:
352 352 cipher = EncryptedTextValue()
353 353 v = safe_unicode(cipher.process_result_value(v, None))
354 354
355 355 converter = self.SETTINGS_TYPES.get(_type) or \
356 356 self.SETTINGS_TYPES['unicode']
357 357 return converter(v)
358 358
359 359 @app_settings_value.setter
360 360 def app_settings_value(self, val):
361 361 """
362 362 Setter that will always make sure we use unicode in app_settings_value
363 363
364 364 :param val:
365 365 """
366 366 val = safe_unicode(val)
367 367 # encode the encrypted value
368 368 if 'encrypted' in self.app_settings_type:
369 369 cipher = EncryptedTextValue()
370 370 val = safe_unicode(cipher.process_bind_param(val, None))
371 371 self._app_settings_value = val
372 372
373 373 @hybrid_property
374 374 def app_settings_type(self):
375 375 return self._app_settings_type
376 376
377 377 @app_settings_type.setter
378 378 def app_settings_type(self, val):
379 379 if val.split('.')[0] not in self.SETTINGS_TYPES:
380 380 raise Exception('type must be one of %s got %s'
381 381 % (self.SETTINGS_TYPES.keys(), val))
382 382 self._app_settings_type = val
383 383
384 384 @classmethod
385 385 def get_by_prefix(cls, prefix):
386 386 return RhodeCodeSetting.query()\
387 387 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
388 388 .all()
389 389
390 390 def __unicode__(self):
391 391 return u"<%s('%s:%s[%s]')>" % (
392 392 self.__class__.__name__,
393 393 self.app_settings_name, self.app_settings_value,
394 394 self.app_settings_type
395 395 )
396 396
397 397
398 398 class RhodeCodeUi(Base, BaseModel):
399 399 __tablename__ = 'rhodecode_ui'
400 400 __table_args__ = (
401 401 UniqueConstraint('ui_key'),
402 402 base_table_args
403 403 )
404 404
405 405 HOOK_REPO_SIZE = 'changegroup.repo_size'
406 406 # HG
407 407 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
408 408 HOOK_PULL = 'outgoing.pull_logger'
409 409 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
410 410 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
411 411 HOOK_PUSH = 'changegroup.push_logger'
412 412 HOOK_PUSH_KEY = 'pushkey.key_push'
413 413
414 414 HOOKS_BUILTIN = [
415 415 HOOK_PRE_PULL,
416 416 HOOK_PULL,
417 417 HOOK_PRE_PUSH,
418 418 HOOK_PRETX_PUSH,
419 419 HOOK_PUSH,
420 420 HOOK_PUSH_KEY,
421 421 ]
422 422
423 423 # TODO: johbo: Unify way how hooks are configured for git and hg,
424 424 # git part is currently hardcoded.
425 425
426 426 # SVN PATTERNS
427 427 SVN_BRANCH_ID = 'vcs_svn_branch'
428 428 SVN_TAG_ID = 'vcs_svn_tag'
429 429
430 430 ui_id = Column(
431 431 "ui_id", Integer(), nullable=False, unique=True, default=None,
432 432 primary_key=True)
433 433 ui_section = Column(
434 434 "ui_section", String(255), nullable=True, unique=None, default=None)
435 435 ui_key = Column(
436 436 "ui_key", String(255), nullable=True, unique=None, default=None)
437 437 ui_value = Column(
438 438 "ui_value", String(255), nullable=True, unique=None, default=None)
439 439 ui_active = Column(
440 440 "ui_active", Boolean(), nullable=True, unique=None, default=True)
441 441
442 442 def __repr__(self):
443 443 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
444 444 self.ui_key, self.ui_value)
445 445
446 446
447 447 class RepoRhodeCodeSetting(Base, BaseModel):
448 448 __tablename__ = 'repo_rhodecode_settings'
449 449 __table_args__ = (
450 450 UniqueConstraint(
451 451 'app_settings_name', 'repository_id',
452 452 name='uq_repo_rhodecode_setting_name_repo_id'),
453 453 base_table_args
454 454 )
455 455
456 456 repository_id = Column(
457 457 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
458 458 nullable=False)
459 459 app_settings_id = Column(
460 460 "app_settings_id", Integer(), nullable=False, unique=True,
461 461 default=None, primary_key=True)
462 462 app_settings_name = Column(
463 463 "app_settings_name", String(255), nullable=True, unique=None,
464 464 default=None)
465 465 _app_settings_value = Column(
466 466 "app_settings_value", String(4096), nullable=True, unique=None,
467 467 default=None)
468 468 _app_settings_type = Column(
469 469 "app_settings_type", String(255), nullable=True, unique=None,
470 470 default=None)
471 471
472 472 repository = relationship('Repository')
473 473
474 474 def __init__(self, repository_id, key='', val='', type='unicode'):
475 475 self.repository_id = repository_id
476 476 self.app_settings_name = key
477 477 self.app_settings_type = type
478 478 self.app_settings_value = val
479 479
480 480 @validates('_app_settings_value')
481 481 def validate_settings_value(self, key, val):
482 482 assert type(val) == unicode
483 483 return val
484 484
485 485 @hybrid_property
486 486 def app_settings_value(self):
487 487 v = self._app_settings_value
488 488 type_ = self.app_settings_type
489 489 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
490 490 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
491 491 return converter(v)
492 492
493 493 @app_settings_value.setter
494 494 def app_settings_value(self, val):
495 495 """
496 496 Setter that will always make sure we use unicode in app_settings_value
497 497
498 498 :param val:
499 499 """
500 500 self._app_settings_value = safe_unicode(val)
501 501
502 502 @hybrid_property
503 503 def app_settings_type(self):
504 504 return self._app_settings_type
505 505
506 506 @app_settings_type.setter
507 507 def app_settings_type(self, val):
508 508 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
509 509 if val not in SETTINGS_TYPES:
510 510 raise Exception('type must be one of %s got %s'
511 511 % (SETTINGS_TYPES.keys(), val))
512 512 self._app_settings_type = val
513 513
514 514 def __unicode__(self):
515 515 return u"<%s('%s:%s:%s[%s]')>" % (
516 516 self.__class__.__name__, self.repository.repo_name,
517 517 self.app_settings_name, self.app_settings_value,
518 518 self.app_settings_type
519 519 )
520 520
521 521
522 522 class RepoRhodeCodeUi(Base, BaseModel):
523 523 __tablename__ = 'repo_rhodecode_ui'
524 524 __table_args__ = (
525 525 UniqueConstraint(
526 526 'repository_id', 'ui_section', 'ui_key',
527 527 name='uq_repo_rhodecode_ui_repository_id_section_key'),
528 528 base_table_args
529 529 )
530 530
531 531 repository_id = Column(
532 532 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
533 533 nullable=False)
534 534 ui_id = Column(
535 535 "ui_id", Integer(), nullable=False, unique=True, default=None,
536 536 primary_key=True)
537 537 ui_section = Column(
538 538 "ui_section", String(255), nullable=True, unique=None, default=None)
539 539 ui_key = Column(
540 540 "ui_key", String(255), nullable=True, unique=None, default=None)
541 541 ui_value = Column(
542 542 "ui_value", String(255), nullable=True, unique=None, default=None)
543 543 ui_active = Column(
544 544 "ui_active", Boolean(), nullable=True, unique=None, default=True)
545 545
546 546 repository = relationship('Repository')
547 547
548 548 def __repr__(self):
549 549 return '<%s[%s:%s]%s=>%s]>' % (
550 550 self.__class__.__name__, self.repository.repo_name,
551 551 self.ui_section, self.ui_key, self.ui_value)
552 552
553 553
554 554 class User(Base, BaseModel):
555 555 __tablename__ = 'users'
556 556 __table_args__ = (
557 557 UniqueConstraint('username'), UniqueConstraint('email'),
558 558 Index('u_username_idx', 'username'),
559 559 Index('u_email_idx', 'email'),
560 560 base_table_args
561 561 )
562 562
563 563 DEFAULT_USER = 'default'
564 564 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
565 565 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
566 566
567 567 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
568 568 username = Column("username", String(255), nullable=True, unique=None, default=None)
569 569 password = Column("password", String(255), nullable=True, unique=None, default=None)
570 570 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
571 571 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
572 572 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
573 573 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
574 574 _email = Column("email", String(255), nullable=True, unique=None, default=None)
575 575 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
576 576 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
577 577
578 578 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
579 579 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
580 580 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
581 581 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
582 582 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
583 583 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
584 584
585 585 user_log = relationship('UserLog')
586 586 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
587 587
588 588 repositories = relationship('Repository')
589 589 repository_groups = relationship('RepoGroup')
590 590 user_groups = relationship('UserGroup')
591 591
592 592 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
593 593 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
594 594
595 595 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
596 596 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
597 597 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
598 598
599 599 group_member = relationship('UserGroupMember', cascade='all')
600 600
601 601 notifications = relationship('UserNotification', cascade='all')
602 602 # notifications assigned to this user
603 603 user_created_notifications = relationship('Notification', cascade='all')
604 604 # comments created by this user
605 605 user_comments = relationship('ChangesetComment', cascade='all')
606 606 # user profile extra info
607 607 user_emails = relationship('UserEmailMap', cascade='all')
608 608 user_ip_map = relationship('UserIpMap', cascade='all')
609 609 user_auth_tokens = relationship('UserApiKeys', cascade='all')
610 610 user_ssh_keys = relationship('UserSshKeys', cascade='all')
611 611
612 612 # gists
613 613 user_gists = relationship('Gist', cascade='all')
614 614 # user pull requests
615 615 user_pull_requests = relationship('PullRequest', cascade='all')
616 616 # external identities
617 617 extenal_identities = relationship(
618 618 'ExternalIdentity',
619 619 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
620 620 cascade='all')
621 621 # review rules
622 622 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
623 623
624 624 def __unicode__(self):
625 625 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
626 626 self.user_id, self.username)
627 627
628 628 @hybrid_property
629 629 def email(self):
630 630 return self._email
631 631
632 632 @email.setter
633 633 def email(self, val):
634 634 self._email = val.lower() if val else None
635 635
636 636 @hybrid_property
637 637 def first_name(self):
638 638 from rhodecode.lib import helpers as h
639 639 if self.name:
640 640 return h.escape(self.name)
641 641 return self.name
642 642
643 643 @hybrid_property
644 644 def last_name(self):
645 645 from rhodecode.lib import helpers as h
646 646 if self.lastname:
647 647 return h.escape(self.lastname)
648 648 return self.lastname
649 649
650 650 @hybrid_property
651 651 def api_key(self):
652 652 """
653 653 Fetch if exist an auth-token with role ALL connected to this user
654 654 """
655 655 user_auth_token = UserApiKeys.query()\
656 656 .filter(UserApiKeys.user_id == self.user_id)\
657 657 .filter(or_(UserApiKeys.expires == -1,
658 658 UserApiKeys.expires >= time.time()))\
659 659 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
660 660 if user_auth_token:
661 661 user_auth_token = user_auth_token.api_key
662 662
663 663 return user_auth_token
664 664
665 665 @api_key.setter
666 666 def api_key(self, val):
667 667 # don't allow to set API key this is deprecated for now
668 668 self._api_key = None
669 669
670 670 @property
671 671 def reviewer_pull_requests(self):
672 672 return PullRequestReviewers.query() \
673 673 .options(joinedload(PullRequestReviewers.pull_request)) \
674 674 .filter(PullRequestReviewers.user_id == self.user_id) \
675 675 .all()
676 676
677 677 @property
678 678 def firstname(self):
679 679 # alias for future
680 680 return self.name
681 681
682 682 @property
683 683 def emails(self):
684 684 other = UserEmailMap.query()\
685 685 .filter(UserEmailMap.user == self) \
686 686 .order_by(UserEmailMap.email_id.asc()) \
687 687 .all()
688 688 return [self.email] + [x.email for x in other]
689 689
690 690 @property
691 691 def auth_tokens(self):
692 692 auth_tokens = self.get_auth_tokens()
693 693 return [x.api_key for x in auth_tokens]
694 694
695 695 def get_auth_tokens(self):
696 696 return UserApiKeys.query()\
697 697 .filter(UserApiKeys.user == self)\
698 698 .order_by(UserApiKeys.user_api_key_id.asc())\
699 699 .all()
700 700
701 701 @LazyProperty
702 702 def feed_token(self):
703 703 return self.get_feed_token()
704 704
705 705 def get_feed_token(self, cache=True):
706 706 feed_tokens = UserApiKeys.query()\
707 707 .filter(UserApiKeys.user == self)\
708 708 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
709 709 if cache:
710 710 feed_tokens = feed_tokens.options(
711 711 FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id))
712 712
713 713 feed_tokens = feed_tokens.all()
714 714 if feed_tokens:
715 715 return feed_tokens[0].api_key
716 716 return 'NO_FEED_TOKEN_AVAILABLE'
717 717
718 718 @classmethod
719 719 def get(cls, user_id, cache=False):
720 720 if not user_id:
721 721 return
722 722
723 723 user = cls.query()
724 724 if cache:
725 725 user = user.options(
726 726 FromCache("sql_cache_short", "get_users_%s" % user_id))
727 727 return user.get(user_id)
728 728
729 729 @classmethod
730 730 def extra_valid_auth_tokens(cls, user, role=None):
731 731 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
732 732 .filter(or_(UserApiKeys.expires == -1,
733 733 UserApiKeys.expires >= time.time()))
734 734 if role:
735 735 tokens = tokens.filter(or_(UserApiKeys.role == role,
736 736 UserApiKeys.role == UserApiKeys.ROLE_ALL))
737 737 return tokens.all()
738 738
739 739 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
740 740 from rhodecode.lib import auth
741 741
742 742 log.debug('Trying to authenticate user: %s via auth-token, '
743 743 'and roles: %s', self, roles)
744 744
745 745 if not auth_token:
746 746 return False
747 747
748 748 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
749 749 tokens_q = UserApiKeys.query()\
750 750 .filter(UserApiKeys.user_id == self.user_id)\
751 751 .filter(or_(UserApiKeys.expires == -1,
752 752 UserApiKeys.expires >= time.time()))
753 753
754 754 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
755 755
756 756 crypto_backend = auth.crypto_backend()
757 757 enc_token_map = {}
758 758 plain_token_map = {}
759 759 for token in tokens_q:
760 760 if token.api_key.startswith(crypto_backend.ENC_PREF):
761 761 enc_token_map[token.api_key] = token
762 762 else:
763 763 plain_token_map[token.api_key] = token
764 764 log.debug(
765 765 'Found %s plain and %s encrypted user tokens to check for authentication',
766 766 len(plain_token_map), len(enc_token_map))
767 767
768 768 # plain token match comes first
769 769 match = plain_token_map.get(auth_token)
770 770
771 771 # check encrypted tokens now
772 772 if not match:
773 773 for token_hash, token in enc_token_map.items():
774 774 # NOTE(marcink): this is expensive to calculate, but most secure
775 775 if crypto_backend.hash_check(auth_token, token_hash):
776 776 match = token
777 777 break
778 778
779 779 if match:
780 780 log.debug('Found matching token %s', match)
781 781 if match.repo_id:
782 782 log.debug('Found scope, checking for scope match of token %s', match)
783 783 if match.repo_id == scope_repo_id:
784 784 return True
785 785 else:
786 786 log.debug(
787 787 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
788 788 'and calling scope is:%s, skipping further checks',
789 789 match.repo, scope_repo_id)
790 790 return False
791 791 else:
792 792 return True
793 793
794 794 return False
795 795
796 796 @property
797 797 def ip_addresses(self):
798 798 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
799 799 return [x.ip_addr for x in ret]
800 800
801 801 @property
802 802 def username_and_name(self):
803 803 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
804 804
805 805 @property
806 806 def username_or_name_or_email(self):
807 807 full_name = self.full_name if self.full_name is not ' ' else None
808 808 return self.username or full_name or self.email
809 809
810 810 @property
811 811 def full_name(self):
812 812 return '%s %s' % (self.first_name, self.last_name)
813 813
814 814 @property
815 815 def full_name_or_username(self):
816 816 return ('%s %s' % (self.first_name, self.last_name)
817 817 if (self.first_name and self.last_name) else self.username)
818 818
819 819 @property
820 820 def full_contact(self):
821 821 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
822 822
823 823 @property
824 824 def short_contact(self):
825 825 return '%s %s' % (self.first_name, self.last_name)
826 826
827 827 @property
828 828 def is_admin(self):
829 829 return self.admin
830 830
831 831 def AuthUser(self, **kwargs):
832 832 """
833 833 Returns instance of AuthUser for this user
834 834 """
835 835 from rhodecode.lib.auth import AuthUser
836 836 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
837 837
838 838 @hybrid_property
839 839 def user_data(self):
840 840 if not self._user_data:
841 841 return {}
842 842
843 843 try:
844 844 return json.loads(self._user_data)
845 845 except TypeError:
846 846 return {}
847 847
848 848 @user_data.setter
849 849 def user_data(self, val):
850 850 if not isinstance(val, dict):
851 851 raise Exception('user_data must be dict, got %s' % type(val))
852 852 try:
853 853 self._user_data = json.dumps(val)
854 854 except Exception:
855 855 log.error(traceback.format_exc())
856 856
857 857 @classmethod
858 858 def get_by_username(cls, username, case_insensitive=False,
859 859 cache=False, identity_cache=False):
860 860 session = Session()
861 861
862 862 if case_insensitive:
863 863 q = cls.query().filter(
864 864 func.lower(cls.username) == func.lower(username))
865 865 else:
866 866 q = cls.query().filter(cls.username == username)
867 867
868 868 if cache:
869 869 if identity_cache:
870 870 val = cls.identity_cache(session, 'username', username)
871 871 if val:
872 872 return val
873 873 else:
874 874 cache_key = "get_user_by_name_%s" % _hash_key(username)
875 875 q = q.options(
876 876 FromCache("sql_cache_short", cache_key))
877 877
878 878 return q.scalar()
879 879
880 880 @classmethod
881 881 def get_by_auth_token(cls, auth_token, cache=False):
882 882 q = UserApiKeys.query()\
883 883 .filter(UserApiKeys.api_key == auth_token)\
884 884 .filter(or_(UserApiKeys.expires == -1,
885 885 UserApiKeys.expires >= time.time()))
886 886 if cache:
887 887 q = q.options(
888 888 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
889 889
890 890 match = q.first()
891 891 if match:
892 892 return match.user
893 893
894 894 @classmethod
895 895 def get_by_email(cls, email, case_insensitive=False, cache=False):
896 896
897 897 if case_insensitive:
898 898 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
899 899
900 900 else:
901 901 q = cls.query().filter(cls.email == email)
902 902
903 903 email_key = _hash_key(email)
904 904 if cache:
905 905 q = q.options(
906 906 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
907 907
908 908 ret = q.scalar()
909 909 if ret is None:
910 910 q = UserEmailMap.query()
911 911 # try fetching in alternate email map
912 912 if case_insensitive:
913 913 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
914 914 else:
915 915 q = q.filter(UserEmailMap.email == email)
916 916 q = q.options(joinedload(UserEmailMap.user))
917 917 if cache:
918 918 q = q.options(
919 919 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
920 920 ret = getattr(q.scalar(), 'user', None)
921 921
922 922 return ret
923 923
924 924 @classmethod
925 925 def get_from_cs_author(cls, author):
926 926 """
927 927 Tries to get User objects out of commit author string
928 928
929 929 :param author:
930 930 """
931 931 from rhodecode.lib.helpers import email, author_name
932 932 # Valid email in the attribute passed, see if they're in the system
933 933 _email = email(author)
934 934 if _email:
935 935 user = cls.get_by_email(_email, case_insensitive=True)
936 936 if user:
937 937 return user
938 938 # Maybe we can match by username?
939 939 _author = author_name(author)
940 940 user = cls.get_by_username(_author, case_insensitive=True)
941 941 if user:
942 942 return user
943 943
944 944 def update_userdata(self, **kwargs):
945 945 usr = self
946 946 old = usr.user_data
947 947 old.update(**kwargs)
948 948 usr.user_data = old
949 949 Session().add(usr)
950 950 log.debug('updated userdata with ', kwargs)
951 951
952 952 def update_lastlogin(self):
953 953 """Update user lastlogin"""
954 954 self.last_login = datetime.datetime.now()
955 955 Session().add(self)
956 956 log.debug('updated user %s lastlogin', self.username)
957 957
958 958 def update_password(self, new_password):
959 959 from rhodecode.lib.auth import get_crypt_password
960 960
961 961 self.password = get_crypt_password(new_password)
962 962 Session().add(self)
963 963
964 964 @classmethod
965 965 def get_first_super_admin(cls):
966 966 user = User.query()\
967 967 .filter(User.admin == true()) \
968 968 .order_by(User.user_id.asc()) \
969 969 .first()
970 970
971 971 if user is None:
972 972 raise Exception('FATAL: Missing administrative account!')
973 973 return user
974 974
975 975 @classmethod
976 976 def get_all_super_admins(cls, only_active=False):
977 977 """
978 978 Returns all admin accounts sorted by username
979 979 """
980 980 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
981 981 if only_active:
982 982 qry = qry.filter(User.active == true())
983 983 return qry.all()
984 984
985 985 @classmethod
986 986 def get_default_user(cls, cache=False, refresh=False):
987 987 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
988 988 if user is None:
989 989 raise Exception('FATAL: Missing default account!')
990 990 if refresh:
991 991 # The default user might be based on outdated state which
992 992 # has been loaded from the cache.
993 993 # A call to refresh() ensures that the
994 994 # latest state from the database is used.
995 995 Session().refresh(user)
996 996 return user
997 997
998 998 def _get_default_perms(self, user, suffix=''):
999 999 from rhodecode.model.permission import PermissionModel
1000 1000 return PermissionModel().get_default_perms(user.user_perms, suffix)
1001 1001
1002 1002 def get_default_perms(self, suffix=''):
1003 1003 return self._get_default_perms(self, suffix)
1004 1004
1005 1005 def get_api_data(self, include_secrets=False, details='full'):
1006 1006 """
1007 1007 Common function for generating user related data for API
1008 1008
1009 1009 :param include_secrets: By default secrets in the API data will be replaced
1010 1010 by a placeholder value to prevent exposing this data by accident. In case
1011 1011 this data shall be exposed, set this flag to ``True``.
1012 1012
1013 1013 :param details: details can be 'basic|full' basic gives only a subset of
1014 1014 the available user information that includes user_id, name and emails.
1015 1015 """
1016 1016 user = self
1017 1017 user_data = self.user_data
1018 1018 data = {
1019 1019 'user_id': user.user_id,
1020 1020 'username': user.username,
1021 1021 'firstname': user.name,
1022 1022 'lastname': user.lastname,
1023 1023 'email': user.email,
1024 1024 'emails': user.emails,
1025 1025 }
1026 1026 if details == 'basic':
1027 1027 return data
1028 1028
1029 1029 auth_token_length = 40
1030 1030 auth_token_replacement = '*' * auth_token_length
1031 1031
1032 1032 extras = {
1033 1033 'auth_tokens': [auth_token_replacement],
1034 1034 'active': user.active,
1035 1035 'admin': user.admin,
1036 1036 'extern_type': user.extern_type,
1037 1037 'extern_name': user.extern_name,
1038 1038 'last_login': user.last_login,
1039 1039 'last_activity': user.last_activity,
1040 1040 'ip_addresses': user.ip_addresses,
1041 1041 'language': user_data.get('language')
1042 1042 }
1043 1043 data.update(extras)
1044 1044
1045 1045 if include_secrets:
1046 1046 data['auth_tokens'] = user.auth_tokens
1047 1047 return data
1048 1048
1049 1049 def __json__(self):
1050 1050 data = {
1051 1051 'full_name': self.full_name,
1052 1052 'full_name_or_username': self.full_name_or_username,
1053 1053 'short_contact': self.short_contact,
1054 1054 'full_contact': self.full_contact,
1055 1055 }
1056 1056 data.update(self.get_api_data())
1057 1057 return data
1058 1058
1059 1059
1060 1060 class UserApiKeys(Base, BaseModel):
1061 1061 __tablename__ = 'user_api_keys'
1062 1062 __table_args__ = (
1063 1063 Index('uak_api_key_idx', 'api_key', unique=True),
1064 1064 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1065 1065 base_table_args
1066 1066 )
1067 1067 __mapper_args__ = {}
1068 1068
1069 1069 # ApiKey role
1070 1070 ROLE_ALL = 'token_role_all'
1071 1071 ROLE_HTTP = 'token_role_http'
1072 1072 ROLE_VCS = 'token_role_vcs'
1073 1073 ROLE_API = 'token_role_api'
1074 1074 ROLE_FEED = 'token_role_feed'
1075 1075 ROLE_PASSWORD_RESET = 'token_password_reset'
1076 1076
1077 1077 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
1078 1078
1079 1079 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1080 1080 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1081 1081 api_key = Column("api_key", String(255), nullable=False, unique=True)
1082 1082 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1083 1083 expires = Column('expires', Float(53), nullable=False)
1084 1084 role = Column('role', String(255), nullable=True)
1085 1085 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1086 1086
1087 1087 # scope columns
1088 1088 repo_id = Column(
1089 1089 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1090 1090 nullable=True, unique=None, default=None)
1091 1091 repo = relationship('Repository', lazy='joined')
1092 1092
1093 1093 repo_group_id = Column(
1094 1094 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1095 1095 nullable=True, unique=None, default=None)
1096 1096 repo_group = relationship('RepoGroup', lazy='joined')
1097 1097
1098 1098 user = relationship('User', lazy='joined')
1099 1099
1100 1100 def __unicode__(self):
1101 1101 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1102 1102
1103 1103 def __json__(self):
1104 1104 data = {
1105 1105 'auth_token': self.api_key,
1106 1106 'role': self.role,
1107 1107 'scope': self.scope_humanized,
1108 1108 'expired': self.expired
1109 1109 }
1110 1110 return data
1111 1111
1112 1112 def get_api_data(self, include_secrets=False):
1113 1113 data = self.__json__()
1114 1114 if include_secrets:
1115 1115 return data
1116 1116 else:
1117 1117 data['auth_token'] = self.token_obfuscated
1118 1118 return data
1119 1119
1120 1120 @hybrid_property
1121 1121 def description_safe(self):
1122 1122 from rhodecode.lib import helpers as h
1123 1123 return h.escape(self.description)
1124 1124
1125 1125 @property
1126 1126 def expired(self):
1127 1127 if self.expires == -1:
1128 1128 return False
1129 1129 return time.time() > self.expires
1130 1130
1131 1131 @classmethod
1132 1132 def _get_role_name(cls, role):
1133 1133 return {
1134 1134 cls.ROLE_ALL: _('all'),
1135 1135 cls.ROLE_HTTP: _('http/web interface'),
1136 1136 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1137 1137 cls.ROLE_API: _('api calls'),
1138 1138 cls.ROLE_FEED: _('feed access'),
1139 1139 }.get(role, role)
1140 1140
1141 1141 @property
1142 1142 def role_humanized(self):
1143 1143 return self._get_role_name(self.role)
1144 1144
1145 1145 def _get_scope(self):
1146 1146 if self.repo:
1147 1147 return 'Repository: {}'.format(self.repo.repo_name)
1148 1148 if self.repo_group:
1149 1149 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1150 1150 return 'Global'
1151 1151
1152 1152 @property
1153 1153 def scope_humanized(self):
1154 1154 return self._get_scope()
1155 1155
1156 1156 @property
1157 1157 def token_obfuscated(self):
1158 1158 if self.api_key:
1159 1159 return self.api_key[:4] + "****"
1160 1160
1161 1161
1162 1162 class UserEmailMap(Base, BaseModel):
1163 1163 __tablename__ = 'user_email_map'
1164 1164 __table_args__ = (
1165 1165 Index('uem_email_idx', 'email'),
1166 1166 UniqueConstraint('email'),
1167 1167 base_table_args
1168 1168 )
1169 1169 __mapper_args__ = {}
1170 1170
1171 1171 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1172 1172 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1173 1173 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1174 1174 user = relationship('User', lazy='joined')
1175 1175
1176 1176 @validates('_email')
1177 1177 def validate_email(self, key, email):
1178 1178 # check if this email is not main one
1179 1179 main_email = Session().query(User).filter(User.email == email).scalar()
1180 1180 if main_email is not None:
1181 1181 raise AttributeError('email %s is present is user table' % email)
1182 1182 return email
1183 1183
1184 1184 @hybrid_property
1185 1185 def email(self):
1186 1186 return self._email
1187 1187
1188 1188 @email.setter
1189 1189 def email(self, val):
1190 1190 self._email = val.lower() if val else None
1191 1191
1192 1192
1193 1193 class UserIpMap(Base, BaseModel):
1194 1194 __tablename__ = 'user_ip_map'
1195 1195 __table_args__ = (
1196 1196 UniqueConstraint('user_id', 'ip_addr'),
1197 1197 base_table_args
1198 1198 )
1199 1199 __mapper_args__ = {}
1200 1200
1201 1201 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1202 1202 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1203 1203 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1204 1204 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1205 1205 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1206 1206 user = relationship('User', lazy='joined')
1207 1207
1208 1208 @hybrid_property
1209 1209 def description_safe(self):
1210 1210 from rhodecode.lib import helpers as h
1211 1211 return h.escape(self.description)
1212 1212
1213 1213 @classmethod
1214 1214 def _get_ip_range(cls, ip_addr):
1215 1215 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1216 1216 return [str(net.network_address), str(net.broadcast_address)]
1217 1217
1218 1218 def __json__(self):
1219 1219 return {
1220 1220 'ip_addr': self.ip_addr,
1221 1221 'ip_range': self._get_ip_range(self.ip_addr),
1222 1222 }
1223 1223
1224 1224 def __unicode__(self):
1225 1225 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1226 1226 self.user_id, self.ip_addr)
1227 1227
1228 1228
1229 1229 class UserSshKeys(Base, BaseModel):
1230 1230 __tablename__ = 'user_ssh_keys'
1231 1231 __table_args__ = (
1232 1232 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1233 1233
1234 1234 UniqueConstraint('ssh_key_fingerprint'),
1235 1235
1236 1236 base_table_args
1237 1237 )
1238 1238 __mapper_args__ = {}
1239 1239
1240 1240 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1241 1241 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1242 1242 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1243 1243
1244 1244 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1245 1245
1246 1246 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1247 1247 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1248 1248 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1249 1249
1250 1250 user = relationship('User', lazy='joined')
1251 1251
1252 1252 def __json__(self):
1253 1253 data = {
1254 1254 'ssh_fingerprint': self.ssh_key_fingerprint,
1255 1255 'description': self.description,
1256 1256 'created_on': self.created_on
1257 1257 }
1258 1258 return data
1259 1259
1260 1260 def get_api_data(self):
1261 1261 data = self.__json__()
1262 1262 return data
1263 1263
1264 1264
1265 1265 class UserLog(Base, BaseModel):
1266 1266 __tablename__ = 'user_logs'
1267 1267 __table_args__ = (
1268 1268 base_table_args,
1269 1269 )
1270 1270
1271 1271 VERSION_1 = 'v1'
1272 1272 VERSION_2 = 'v2'
1273 1273 VERSIONS = [VERSION_1, VERSION_2]
1274 1274
1275 1275 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1276 1276 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1277 1277 username = Column("username", String(255), nullable=True, unique=None, default=None)
1278 1278 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1279 1279 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1280 1280 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1281 1281 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1282 1282 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1283 1283
1284 1284 version = Column("version", String(255), nullable=True, default=VERSION_1)
1285 1285 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1286 1286 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1287 1287
1288 1288 def __unicode__(self):
1289 1289 return u"<%s('id:%s:%s')>" % (
1290 1290 self.__class__.__name__, self.repository_name, self.action)
1291 1291
1292 1292 def __json__(self):
1293 1293 return {
1294 1294 'user_id': self.user_id,
1295 1295 'username': self.username,
1296 1296 'repository_id': self.repository_id,
1297 1297 'repository_name': self.repository_name,
1298 1298 'user_ip': self.user_ip,
1299 1299 'action_date': self.action_date,
1300 1300 'action': self.action,
1301 1301 }
1302 1302
1303 1303 @hybrid_property
1304 1304 def entry_id(self):
1305 1305 return self.user_log_id
1306 1306
1307 1307 @property
1308 1308 def action_as_day(self):
1309 1309 return datetime.date(*self.action_date.timetuple()[:3])
1310 1310
1311 1311 user = relationship('User')
1312 1312 repository = relationship('Repository', cascade='')
1313 1313
1314 1314
1315 1315 class UserGroup(Base, BaseModel):
1316 1316 __tablename__ = 'users_groups'
1317 1317 __table_args__ = (
1318 1318 base_table_args,
1319 1319 )
1320 1320
1321 1321 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1322 1322 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1323 1323 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1324 1324 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1325 1325 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1326 1326 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1327 1327 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1328 1328 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1329 1329
1330 1330 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1331 1331 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1332 1332 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1333 1333 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1334 1334 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1335 1335 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1336 1336
1337 1337 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1338 1338 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1339 1339
1340 1340 @classmethod
1341 1341 def _load_group_data(cls, column):
1342 1342 if not column:
1343 1343 return {}
1344 1344
1345 1345 try:
1346 1346 return json.loads(column) or {}
1347 1347 except TypeError:
1348 1348 return {}
1349 1349
1350 1350 @hybrid_property
1351 1351 def description_safe(self):
1352 1352 from rhodecode.lib import helpers as h
1353 1353 return h.escape(self.user_group_description)
1354 1354
1355 1355 @hybrid_property
1356 1356 def group_data(self):
1357 1357 return self._load_group_data(self._group_data)
1358 1358
1359 1359 @group_data.expression
1360 1360 def group_data(self, **kwargs):
1361 1361 return self._group_data
1362 1362
1363 1363 @group_data.setter
1364 1364 def group_data(self, val):
1365 1365 try:
1366 1366 self._group_data = json.dumps(val)
1367 1367 except Exception:
1368 1368 log.error(traceback.format_exc())
1369 1369
1370 1370 @classmethod
1371 1371 def _load_sync(cls, group_data):
1372 1372 if group_data:
1373 1373 return group_data.get('extern_type')
1374 1374
1375 1375 @property
1376 1376 def sync(self):
1377 1377 return self._load_sync(self.group_data)
1378 1378
1379 1379 def __unicode__(self):
1380 1380 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1381 1381 self.users_group_id,
1382 1382 self.users_group_name)
1383 1383
1384 1384 @classmethod
1385 1385 def get_by_group_name(cls, group_name, cache=False,
1386 1386 case_insensitive=False):
1387 1387 if case_insensitive:
1388 1388 q = cls.query().filter(func.lower(cls.users_group_name) ==
1389 1389 func.lower(group_name))
1390 1390
1391 1391 else:
1392 1392 q = cls.query().filter(cls.users_group_name == group_name)
1393 1393 if cache:
1394 1394 q = q.options(
1395 1395 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1396 1396 return q.scalar()
1397 1397
1398 1398 @classmethod
1399 1399 def get(cls, user_group_id, cache=False):
1400 1400 if not user_group_id:
1401 1401 return
1402 1402
1403 1403 user_group = cls.query()
1404 1404 if cache:
1405 1405 user_group = user_group.options(
1406 1406 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1407 1407 return user_group.get(user_group_id)
1408 1408
1409 1409 def permissions(self, with_admins=True, with_owner=True,
1410 1410 expand_from_user_groups=False):
1411 1411 """
1412 1412 Permissions for user groups
1413 1413 """
1414 1414 _admin_perm = 'usergroup.admin'
1415 1415
1416 1416 owner_row = []
1417 1417 if with_owner:
1418 1418 usr = AttributeDict(self.user.get_dict())
1419 1419 usr.owner_row = True
1420 1420 usr.permission = _admin_perm
1421 1421 owner_row.append(usr)
1422 1422
1423 1423 super_admin_ids = []
1424 1424 super_admin_rows = []
1425 1425 if with_admins:
1426 1426 for usr in User.get_all_super_admins():
1427 1427 super_admin_ids.append(usr.user_id)
1428 1428 # if this admin is also owner, don't double the record
1429 1429 if usr.user_id == owner_row[0].user_id:
1430 1430 owner_row[0].admin_row = True
1431 1431 else:
1432 1432 usr = AttributeDict(usr.get_dict())
1433 1433 usr.admin_row = True
1434 1434 usr.permission = _admin_perm
1435 1435 super_admin_rows.append(usr)
1436 1436
1437 1437 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1438 1438 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1439 1439 joinedload(UserUserGroupToPerm.user),
1440 1440 joinedload(UserUserGroupToPerm.permission),)
1441 1441
1442 1442 # get owners and admins and permissions. We do a trick of re-writing
1443 1443 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1444 1444 # has a global reference and changing one object propagates to all
1445 1445 # others. This means if admin is also an owner admin_row that change
1446 1446 # would propagate to both objects
1447 1447 perm_rows = []
1448 1448 for _usr in q.all():
1449 1449 usr = AttributeDict(_usr.user.get_dict())
1450 1450 # if this user is also owner/admin, mark as duplicate record
1451 1451 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1452 1452 usr.duplicate_perm = True
1453 1453 usr.permission = _usr.permission.permission_name
1454 1454 perm_rows.append(usr)
1455 1455
1456 1456 # filter the perm rows by 'default' first and then sort them by
1457 1457 # admin,write,read,none permissions sorted again alphabetically in
1458 1458 # each group
1459 1459 perm_rows = sorted(perm_rows, key=display_user_sort)
1460 1460
1461 1461 user_groups_rows = []
1462 1462 if expand_from_user_groups:
1463 1463 for ug in self.permission_user_groups(with_members=True):
1464 1464 for user_data in ug.members:
1465 1465 user_groups_rows.append(user_data)
1466 1466
1467 1467 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1468 1468
1469 1469 def permission_user_groups(self, with_members=False):
1470 1470 q = UserGroupUserGroupToPerm.query()\
1471 1471 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1472 1472 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1473 1473 joinedload(UserGroupUserGroupToPerm.target_user_group),
1474 1474 joinedload(UserGroupUserGroupToPerm.permission),)
1475 1475
1476 1476 perm_rows = []
1477 1477 for _user_group in q.all():
1478 1478 entry = AttributeDict(_user_group.user_group.get_dict())
1479 1479 entry.permission = _user_group.permission.permission_name
1480 1480 if with_members:
1481 1481 entry.members = [x.user.get_dict()
1482 1482 for x in _user_group.user_group.members]
1483 1483 perm_rows.append(entry)
1484 1484
1485 1485 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1486 1486 return perm_rows
1487 1487
1488 1488 def _get_default_perms(self, user_group, suffix=''):
1489 1489 from rhodecode.model.permission import PermissionModel
1490 1490 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1491 1491
1492 1492 def get_default_perms(self, suffix=''):
1493 1493 return self._get_default_perms(self, suffix)
1494 1494
1495 1495 def get_api_data(self, with_group_members=True, include_secrets=False):
1496 1496 """
1497 1497 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1498 1498 basically forwarded.
1499 1499
1500 1500 """
1501 1501 user_group = self
1502 1502 data = {
1503 1503 'users_group_id': user_group.users_group_id,
1504 1504 'group_name': user_group.users_group_name,
1505 1505 'group_description': user_group.user_group_description,
1506 1506 'active': user_group.users_group_active,
1507 1507 'owner': user_group.user.username,
1508 1508 'sync': user_group.sync,
1509 1509 'owner_email': user_group.user.email,
1510 1510 }
1511 1511
1512 1512 if with_group_members:
1513 1513 users = []
1514 1514 for user in user_group.members:
1515 1515 user = user.user
1516 1516 users.append(user.get_api_data(include_secrets=include_secrets))
1517 1517 data['users'] = users
1518 1518
1519 1519 return data
1520 1520
1521 1521
1522 1522 class UserGroupMember(Base, BaseModel):
1523 1523 __tablename__ = 'users_groups_members'
1524 1524 __table_args__ = (
1525 1525 base_table_args,
1526 1526 )
1527 1527
1528 1528 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1529 1529 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1530 1530 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1531 1531
1532 1532 user = relationship('User', lazy='joined')
1533 1533 users_group = relationship('UserGroup')
1534 1534
1535 1535 def __init__(self, gr_id='', u_id=''):
1536 1536 self.users_group_id = gr_id
1537 1537 self.user_id = u_id
1538 1538
1539 1539
1540 1540 class RepositoryField(Base, BaseModel):
1541 1541 __tablename__ = 'repositories_fields'
1542 1542 __table_args__ = (
1543 1543 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1544 1544 base_table_args,
1545 1545 )
1546 1546
1547 1547 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1548 1548
1549 1549 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1550 1550 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1551 1551 field_key = Column("field_key", String(250))
1552 1552 field_label = Column("field_label", String(1024), nullable=False)
1553 1553 field_value = Column("field_value", String(10000), nullable=False)
1554 1554 field_desc = Column("field_desc", String(1024), nullable=False)
1555 1555 field_type = Column("field_type", String(255), nullable=False, unique=None)
1556 1556 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1557 1557
1558 1558 repository = relationship('Repository')
1559 1559
1560 1560 @property
1561 1561 def field_key_prefixed(self):
1562 1562 return 'ex_%s' % self.field_key
1563 1563
1564 1564 @classmethod
1565 1565 def un_prefix_key(cls, key):
1566 1566 if key.startswith(cls.PREFIX):
1567 1567 return key[len(cls.PREFIX):]
1568 1568 return key
1569 1569
1570 1570 @classmethod
1571 1571 def get_by_key_name(cls, key, repo):
1572 1572 row = cls.query()\
1573 1573 .filter(cls.repository == repo)\
1574 1574 .filter(cls.field_key == key).scalar()
1575 1575 return row
1576 1576
1577 1577
1578 1578 class Repository(Base, BaseModel):
1579 1579 __tablename__ = 'repositories'
1580 1580 __table_args__ = (
1581 1581 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1582 1582 base_table_args,
1583 1583 )
1584 1584 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1585 1585 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1586 1586 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1587 1587
1588 1588 STATE_CREATED = 'repo_state_created'
1589 1589 STATE_PENDING = 'repo_state_pending'
1590 1590 STATE_ERROR = 'repo_state_error'
1591 1591
1592 1592 LOCK_AUTOMATIC = 'lock_auto'
1593 1593 LOCK_API = 'lock_api'
1594 1594 LOCK_WEB = 'lock_web'
1595 1595 LOCK_PULL = 'lock_pull'
1596 1596
1597 1597 NAME_SEP = URL_SEP
1598 1598
1599 1599 repo_id = Column(
1600 1600 "repo_id", Integer(), nullable=False, unique=True, default=None,
1601 1601 primary_key=True)
1602 1602 _repo_name = Column(
1603 1603 "repo_name", Text(), nullable=False, default=None)
1604 1604 _repo_name_hash = Column(
1605 1605 "repo_name_hash", String(255), nullable=False, unique=True)
1606 1606 repo_state = Column("repo_state", String(255), nullable=True)
1607 1607
1608 1608 clone_uri = Column(
1609 1609 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1610 1610 default=None)
1611 1611 push_uri = Column(
1612 1612 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1613 1613 default=None)
1614 1614 repo_type = Column(
1615 1615 "repo_type", String(255), nullable=False, unique=False, default=None)
1616 1616 user_id = Column(
1617 1617 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1618 1618 unique=False, default=None)
1619 1619 private = Column(
1620 1620 "private", Boolean(), nullable=True, unique=None, default=None)
1621 1621 archived = Column(
1622 1622 "archived", Boolean(), nullable=True, unique=None, default=None)
1623 1623 enable_statistics = Column(
1624 1624 "statistics", Boolean(), nullable=True, unique=None, default=True)
1625 1625 enable_downloads = Column(
1626 1626 "downloads", Boolean(), nullable=True, unique=None, default=True)
1627 1627 description = Column(
1628 1628 "description", String(10000), nullable=True, unique=None, default=None)
1629 1629 created_on = Column(
1630 1630 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1631 1631 default=datetime.datetime.now)
1632 1632 updated_on = Column(
1633 1633 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1634 1634 default=datetime.datetime.now)
1635 1635 _landing_revision = Column(
1636 1636 "landing_revision", String(255), nullable=False, unique=False,
1637 1637 default=None)
1638 1638 enable_locking = Column(
1639 1639 "enable_locking", Boolean(), nullable=False, unique=None,
1640 1640 default=False)
1641 1641 _locked = Column(
1642 1642 "locked", String(255), nullable=True, unique=False, default=None)
1643 1643 _changeset_cache = Column(
1644 1644 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1645 1645
1646 1646 fork_id = Column(
1647 1647 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1648 1648 nullable=True, unique=False, default=None)
1649 1649 group_id = Column(
1650 1650 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1651 1651 unique=False, default=None)
1652 1652
1653 1653 user = relationship('User', lazy='joined')
1654 1654 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1655 1655 group = relationship('RepoGroup', lazy='joined')
1656 1656 repo_to_perm = relationship(
1657 1657 'UserRepoToPerm', cascade='all',
1658 1658 order_by='UserRepoToPerm.repo_to_perm_id')
1659 1659 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1660 1660 stats = relationship('Statistics', cascade='all', uselist=False)
1661 1661
1662 1662 followers = relationship(
1663 1663 'UserFollowing',
1664 1664 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1665 1665 cascade='all')
1666 1666 extra_fields = relationship(
1667 1667 'RepositoryField', cascade="all, delete, delete-orphan")
1668 1668 logs = relationship('UserLog')
1669 1669 comments = relationship(
1670 1670 'ChangesetComment', cascade="all, delete, delete-orphan")
1671 1671 pull_requests_source = relationship(
1672 1672 'PullRequest',
1673 1673 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1674 1674 cascade="all, delete, delete-orphan")
1675 1675 pull_requests_target = relationship(
1676 1676 'PullRequest',
1677 1677 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1678 1678 cascade="all, delete, delete-orphan")
1679 1679 ui = relationship('RepoRhodeCodeUi', cascade="all")
1680 1680 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1681 1681 integrations = relationship('Integration', cascade="all, delete, delete-orphan")
1682 1682
1683 1683 scoped_tokens = relationship('UserApiKeys', cascade="all")
1684 1684
1685 1685 artifacts = relationship('FileStore', cascade="all")
1686 1686
1687 1687 def __unicode__(self):
1688 1688 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1689 1689 safe_unicode(self.repo_name))
1690 1690
1691 1691 @hybrid_property
1692 1692 def description_safe(self):
1693 1693 from rhodecode.lib import helpers as h
1694 1694 return h.escape(self.description)
1695 1695
1696 1696 @hybrid_property
1697 1697 def landing_rev(self):
1698 1698 # always should return [rev_type, rev]
1699 1699 if self._landing_revision:
1700 1700 _rev_info = self._landing_revision.split(':')
1701 1701 if len(_rev_info) < 2:
1702 1702 _rev_info.insert(0, 'rev')
1703 1703 return [_rev_info[0], _rev_info[1]]
1704 1704 return [None, None]
1705 1705
1706 1706 @landing_rev.setter
1707 1707 def landing_rev(self, val):
1708 1708 if ':' not in val:
1709 1709 raise ValueError('value must be delimited with `:` and consist '
1710 1710 'of <rev_type>:<rev>, got %s instead' % val)
1711 1711 self._landing_revision = val
1712 1712
1713 1713 @hybrid_property
1714 1714 def locked(self):
1715 1715 if self._locked:
1716 1716 user_id, timelocked, reason = self._locked.split(':')
1717 1717 lock_values = int(user_id), timelocked, reason
1718 1718 else:
1719 1719 lock_values = [None, None, None]
1720 1720 return lock_values
1721 1721
1722 1722 @locked.setter
1723 1723 def locked(self, val):
1724 1724 if val and isinstance(val, (list, tuple)):
1725 1725 self._locked = ':'.join(map(str, val))
1726 1726 else:
1727 1727 self._locked = None
1728 1728
1729 1729 @hybrid_property
1730 1730 def changeset_cache(self):
1731 1731 from rhodecode.lib.vcs.backends.base import EmptyCommit
1732 1732 dummy = EmptyCommit().__json__()
1733 1733 if not self._changeset_cache:
1734 1734 dummy['source_repo_id'] = self.repo_id
1735 1735 return json.loads(json.dumps(dummy))
1736 1736
1737 1737 try:
1738 1738 return json.loads(self._changeset_cache)
1739 1739 except TypeError:
1740 1740 return dummy
1741 1741 except Exception:
1742 1742 log.error(traceback.format_exc())
1743 1743 return dummy
1744 1744
1745 1745 @changeset_cache.setter
1746 1746 def changeset_cache(self, val):
1747 1747 try:
1748 1748 self._changeset_cache = json.dumps(val)
1749 1749 except Exception:
1750 1750 log.error(traceback.format_exc())
1751 1751
1752 1752 @hybrid_property
1753 1753 def repo_name(self):
1754 1754 return self._repo_name
1755 1755
1756 1756 @repo_name.setter
1757 1757 def repo_name(self, value):
1758 1758 self._repo_name = value
1759 1759 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1760 1760
1761 1761 @classmethod
1762 1762 def normalize_repo_name(cls, repo_name):
1763 1763 """
1764 1764 Normalizes os specific repo_name to the format internally stored inside
1765 1765 database using URL_SEP
1766 1766
1767 1767 :param cls:
1768 1768 :param repo_name:
1769 1769 """
1770 1770 return cls.NAME_SEP.join(repo_name.split(os.sep))
1771 1771
1772 1772 @classmethod
1773 1773 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1774 1774 session = Session()
1775 1775 q = session.query(cls).filter(cls.repo_name == repo_name)
1776 1776
1777 1777 if cache:
1778 1778 if identity_cache:
1779 1779 val = cls.identity_cache(session, 'repo_name', repo_name)
1780 1780 if val:
1781 1781 return val
1782 1782 else:
1783 1783 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1784 1784 q = q.options(
1785 1785 FromCache("sql_cache_short", cache_key))
1786 1786
1787 1787 return q.scalar()
1788 1788
1789 1789 @classmethod
1790 1790 def get_by_id_or_repo_name(cls, repoid):
1791 1791 if isinstance(repoid, (int, long)):
1792 1792 try:
1793 1793 repo = cls.get(repoid)
1794 1794 except ValueError:
1795 1795 repo = None
1796 1796 else:
1797 1797 repo = cls.get_by_repo_name(repoid)
1798 1798 return repo
1799 1799
1800 1800 @classmethod
1801 1801 def get_by_full_path(cls, repo_full_path):
1802 1802 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1803 1803 repo_name = cls.normalize_repo_name(repo_name)
1804 1804 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1805 1805
1806 1806 @classmethod
1807 1807 def get_repo_forks(cls, repo_id):
1808 1808 return cls.query().filter(Repository.fork_id == repo_id)
1809 1809
1810 1810 @classmethod
1811 1811 def base_path(cls):
1812 1812 """
1813 1813 Returns base path when all repos are stored
1814 1814
1815 1815 :param cls:
1816 1816 """
1817 1817 q = Session().query(RhodeCodeUi)\
1818 1818 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1819 1819 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1820 1820 return q.one().ui_value
1821 1821
1822 1822 @classmethod
1823 1823 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1824 1824 case_insensitive=True, archived=False):
1825 1825 q = Repository.query()
1826 1826
1827 1827 if not archived:
1828 1828 q = q.filter(Repository.archived.isnot(true()))
1829 1829
1830 1830 if not isinstance(user_id, Optional):
1831 1831 q = q.filter(Repository.user_id == user_id)
1832 1832
1833 1833 if not isinstance(group_id, Optional):
1834 1834 q = q.filter(Repository.group_id == group_id)
1835 1835
1836 1836 if case_insensitive:
1837 1837 q = q.order_by(func.lower(Repository.repo_name))
1838 1838 else:
1839 1839 q = q.order_by(Repository.repo_name)
1840 1840
1841 1841 return q.all()
1842 1842
1843 1843 @property
1844 1844 def repo_uid(self):
1845 1845 return '_{}'.format(self.repo_id)
1846 1846
1847 1847 @property
1848 1848 def forks(self):
1849 1849 """
1850 1850 Return forks of this repo
1851 1851 """
1852 1852 return Repository.get_repo_forks(self.repo_id)
1853 1853
1854 1854 @property
1855 1855 def parent(self):
1856 1856 """
1857 1857 Returns fork parent
1858 1858 """
1859 1859 return self.fork
1860 1860
1861 1861 @property
1862 1862 def just_name(self):
1863 1863 return self.repo_name.split(self.NAME_SEP)[-1]
1864 1864
1865 1865 @property
1866 1866 def groups_with_parents(self):
1867 1867 groups = []
1868 1868 if self.group is None:
1869 1869 return groups
1870 1870
1871 1871 cur_gr = self.group
1872 1872 groups.insert(0, cur_gr)
1873 1873 while 1:
1874 1874 gr = getattr(cur_gr, 'parent_group', None)
1875 1875 cur_gr = cur_gr.parent_group
1876 1876 if gr is None:
1877 1877 break
1878 1878 groups.insert(0, gr)
1879 1879
1880 1880 return groups
1881 1881
1882 1882 @property
1883 1883 def groups_and_repo(self):
1884 1884 return self.groups_with_parents, self
1885 1885
1886 1886 @LazyProperty
1887 1887 def repo_path(self):
1888 1888 """
1889 1889 Returns base full path for that repository means where it actually
1890 1890 exists on a filesystem
1891 1891 """
1892 1892 q = Session().query(RhodeCodeUi).filter(
1893 1893 RhodeCodeUi.ui_key == self.NAME_SEP)
1894 1894 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1895 1895 return q.one().ui_value
1896 1896
1897 1897 @property
1898 1898 def repo_full_path(self):
1899 1899 p = [self.repo_path]
1900 1900 # we need to split the name by / since this is how we store the
1901 1901 # names in the database, but that eventually needs to be converted
1902 1902 # into a valid system path
1903 1903 p += self.repo_name.split(self.NAME_SEP)
1904 1904 return os.path.join(*map(safe_unicode, p))
1905 1905
1906 1906 @property
1907 1907 def cache_keys(self):
1908 1908 """
1909 1909 Returns associated cache keys for that repo
1910 1910 """
1911 1911 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
1912 1912 repo_id=self.repo_id)
1913 1913 return CacheKey.query()\
1914 1914 .filter(CacheKey.cache_args == invalidation_namespace)\
1915 1915 .order_by(CacheKey.cache_key)\
1916 1916 .all()
1917 1917
1918 1918 @property
1919 1919 def cached_diffs_relative_dir(self):
1920 1920 """
1921 1921 Return a relative to the repository store path of cached diffs
1922 1922 used for safe display for users, who shouldn't know the absolute store
1923 1923 path
1924 1924 """
1925 1925 return os.path.join(
1926 1926 os.path.dirname(self.repo_name),
1927 1927 self.cached_diffs_dir.split(os.path.sep)[-1])
1928 1928
1929 1929 @property
1930 1930 def cached_diffs_dir(self):
1931 1931 path = self.repo_full_path
1932 1932 return os.path.join(
1933 1933 os.path.dirname(path),
1934 1934 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
1935 1935
1936 1936 def cached_diffs(self):
1937 1937 diff_cache_dir = self.cached_diffs_dir
1938 1938 if os.path.isdir(diff_cache_dir):
1939 1939 return os.listdir(diff_cache_dir)
1940 1940 return []
1941 1941
1942 1942 def shadow_repos(self):
1943 1943 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
1944 1944 return [
1945 1945 x for x in os.listdir(os.path.dirname(self.repo_full_path))
1946 1946 if x.startswith(shadow_repos_pattern)]
1947 1947
1948 1948 def get_new_name(self, repo_name):
1949 1949 """
1950 1950 returns new full repository name based on assigned group and new new
1951 1951
1952 1952 :param group_name:
1953 1953 """
1954 1954 path_prefix = self.group.full_path_splitted if self.group else []
1955 1955 return self.NAME_SEP.join(path_prefix + [repo_name])
1956 1956
1957 1957 @property
1958 1958 def _config(self):
1959 1959 """
1960 1960 Returns db based config object.
1961 1961 """
1962 1962 from rhodecode.lib.utils import make_db_config
1963 1963 return make_db_config(clear_session=False, repo=self)
1964 1964
1965 1965 def permissions(self, with_admins=True, with_owner=True,
1966 1966 expand_from_user_groups=False):
1967 1967 """
1968 1968 Permissions for repositories
1969 1969 """
1970 1970 _admin_perm = 'repository.admin'
1971 1971
1972 1972 owner_row = []
1973 1973 if with_owner:
1974 1974 usr = AttributeDict(self.user.get_dict())
1975 1975 usr.owner_row = True
1976 1976 usr.permission = _admin_perm
1977 1977 usr.permission_id = None
1978 1978 owner_row.append(usr)
1979 1979
1980 1980 super_admin_ids = []
1981 1981 super_admin_rows = []
1982 1982 if with_admins:
1983 1983 for usr in User.get_all_super_admins():
1984 1984 super_admin_ids.append(usr.user_id)
1985 1985 # if this admin is also owner, don't double the record
1986 1986 if usr.user_id == owner_row[0].user_id:
1987 1987 owner_row[0].admin_row = True
1988 1988 else:
1989 1989 usr = AttributeDict(usr.get_dict())
1990 1990 usr.admin_row = True
1991 1991 usr.permission = _admin_perm
1992 1992 usr.permission_id = None
1993 1993 super_admin_rows.append(usr)
1994 1994
1995 1995 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1996 1996 q = q.options(joinedload(UserRepoToPerm.repository),
1997 1997 joinedload(UserRepoToPerm.user),
1998 1998 joinedload(UserRepoToPerm.permission),)
1999 1999
2000 2000 # get owners and admins and permissions. We do a trick of re-writing
2001 2001 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2002 2002 # has a global reference and changing one object propagates to all
2003 2003 # others. This means if admin is also an owner admin_row that change
2004 2004 # would propagate to both objects
2005 2005 perm_rows = []
2006 2006 for _usr in q.all():
2007 2007 usr = AttributeDict(_usr.user.get_dict())
2008 2008 # if this user is also owner/admin, mark as duplicate record
2009 2009 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2010 2010 usr.duplicate_perm = True
2011 2011 # also check if this permission is maybe used by branch_permissions
2012 2012 if _usr.branch_perm_entry:
2013 2013 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
2014 2014
2015 2015 usr.permission = _usr.permission.permission_name
2016 2016 usr.permission_id = _usr.repo_to_perm_id
2017 2017 perm_rows.append(usr)
2018 2018
2019 2019 # filter the perm rows by 'default' first and then sort them by
2020 2020 # admin,write,read,none permissions sorted again alphabetically in
2021 2021 # each group
2022 2022 perm_rows = sorted(perm_rows, key=display_user_sort)
2023 2023
2024 2024 user_groups_rows = []
2025 2025 if expand_from_user_groups:
2026 2026 for ug in self.permission_user_groups(with_members=True):
2027 2027 for user_data in ug.members:
2028 2028 user_groups_rows.append(user_data)
2029 2029
2030 2030 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2031 2031
2032 2032 def permission_user_groups(self, with_members=True):
2033 2033 q = UserGroupRepoToPerm.query()\
2034 2034 .filter(UserGroupRepoToPerm.repository == self)
2035 2035 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2036 2036 joinedload(UserGroupRepoToPerm.users_group),
2037 2037 joinedload(UserGroupRepoToPerm.permission),)
2038 2038
2039 2039 perm_rows = []
2040 2040 for _user_group in q.all():
2041 2041 entry = AttributeDict(_user_group.users_group.get_dict())
2042 2042 entry.permission = _user_group.permission.permission_name
2043 2043 if with_members:
2044 2044 entry.members = [x.user.get_dict()
2045 2045 for x in _user_group.users_group.members]
2046 2046 perm_rows.append(entry)
2047 2047
2048 2048 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2049 2049 return perm_rows
2050 2050
2051 2051 def get_api_data(self, include_secrets=False):
2052 2052 """
2053 2053 Common function for generating repo api data
2054 2054
2055 2055 :param include_secrets: See :meth:`User.get_api_data`.
2056 2056
2057 2057 """
2058 2058 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2059 2059 # move this methods on models level.
2060 2060 from rhodecode.model.settings import SettingsModel
2061 2061 from rhodecode.model.repo import RepoModel
2062 2062
2063 2063 repo = self
2064 2064 _user_id, _time, _reason = self.locked
2065 2065
2066 2066 data = {
2067 2067 'repo_id': repo.repo_id,
2068 2068 'repo_name': repo.repo_name,
2069 2069 'repo_type': repo.repo_type,
2070 2070 'clone_uri': repo.clone_uri or '',
2071 2071 'push_uri': repo.push_uri or '',
2072 2072 'url': RepoModel().get_url(self),
2073 2073 'private': repo.private,
2074 2074 'created_on': repo.created_on,
2075 2075 'description': repo.description_safe,
2076 2076 'landing_rev': repo.landing_rev,
2077 2077 'owner': repo.user.username,
2078 2078 'fork_of': repo.fork.repo_name if repo.fork else None,
2079 2079 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2080 2080 'enable_statistics': repo.enable_statistics,
2081 2081 'enable_locking': repo.enable_locking,
2082 2082 'enable_downloads': repo.enable_downloads,
2083 2083 'last_changeset': repo.changeset_cache,
2084 2084 'locked_by': User.get(_user_id).get_api_data(
2085 2085 include_secrets=include_secrets) if _user_id else None,
2086 2086 'locked_date': time_to_datetime(_time) if _time else None,
2087 2087 'lock_reason': _reason if _reason else None,
2088 2088 }
2089 2089
2090 2090 # TODO: mikhail: should be per-repo settings here
2091 2091 rc_config = SettingsModel().get_all_settings()
2092 2092 repository_fields = str2bool(
2093 2093 rc_config.get('rhodecode_repository_fields'))
2094 2094 if repository_fields:
2095 2095 for f in self.extra_fields:
2096 2096 data[f.field_key_prefixed] = f.field_value
2097 2097
2098 2098 return data
2099 2099
2100 2100 @classmethod
2101 2101 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2102 2102 if not lock_time:
2103 2103 lock_time = time.time()
2104 2104 if not lock_reason:
2105 2105 lock_reason = cls.LOCK_AUTOMATIC
2106 2106 repo.locked = [user_id, lock_time, lock_reason]
2107 2107 Session().add(repo)
2108 2108 Session().commit()
2109 2109
2110 2110 @classmethod
2111 2111 def unlock(cls, repo):
2112 2112 repo.locked = None
2113 2113 Session().add(repo)
2114 2114 Session().commit()
2115 2115
2116 2116 @classmethod
2117 2117 def getlock(cls, repo):
2118 2118 return repo.locked
2119 2119
2120 2120 def is_user_lock(self, user_id):
2121 2121 if self.lock[0]:
2122 2122 lock_user_id = safe_int(self.lock[0])
2123 2123 user_id = safe_int(user_id)
2124 2124 # both are ints, and they are equal
2125 2125 return all([lock_user_id, user_id]) and lock_user_id == user_id
2126 2126
2127 2127 return False
2128 2128
2129 2129 def get_locking_state(self, action, user_id, only_when_enabled=True):
2130 2130 """
2131 2131 Checks locking on this repository, if locking is enabled and lock is
2132 2132 present returns a tuple of make_lock, locked, locked_by.
2133 2133 make_lock can have 3 states None (do nothing) True, make lock
2134 2134 False release lock, This value is later propagated to hooks, which
2135 2135 do the locking. Think about this as signals passed to hooks what to do.
2136 2136
2137 2137 """
2138 2138 # TODO: johbo: This is part of the business logic and should be moved
2139 2139 # into the RepositoryModel.
2140 2140
2141 2141 if action not in ('push', 'pull'):
2142 2142 raise ValueError("Invalid action value: %s" % repr(action))
2143 2143
2144 2144 # defines if locked error should be thrown to user
2145 2145 currently_locked = False
2146 2146 # defines if new lock should be made, tri-state
2147 2147 make_lock = None
2148 2148 repo = self
2149 2149 user = User.get(user_id)
2150 2150
2151 2151 lock_info = repo.locked
2152 2152
2153 2153 if repo and (repo.enable_locking or not only_when_enabled):
2154 2154 if action == 'push':
2155 2155 # check if it's already locked !, if it is compare users
2156 2156 locked_by_user_id = lock_info[0]
2157 2157 if user.user_id == locked_by_user_id:
2158 2158 log.debug(
2159 2159 'Got `push` action from user %s, now unlocking', user)
2160 2160 # unlock if we have push from user who locked
2161 2161 make_lock = False
2162 2162 else:
2163 2163 # we're not the same user who locked, ban with
2164 2164 # code defined in settings (default is 423 HTTP Locked) !
2165 2165 log.debug('Repo %s is currently locked by %s', repo, user)
2166 2166 currently_locked = True
2167 2167 elif action == 'pull':
2168 2168 # [0] user [1] date
2169 2169 if lock_info[0] and lock_info[1]:
2170 2170 log.debug('Repo %s is currently locked by %s', repo, user)
2171 2171 currently_locked = True
2172 2172 else:
2173 2173 log.debug('Setting lock on repo %s by %s', repo, user)
2174 2174 make_lock = True
2175 2175
2176 2176 else:
2177 2177 log.debug('Repository %s do not have locking enabled', repo)
2178 2178
2179 2179 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2180 2180 make_lock, currently_locked, lock_info)
2181 2181
2182 2182 from rhodecode.lib.auth import HasRepoPermissionAny
2183 2183 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2184 2184 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2185 2185 # if we don't have at least write permission we cannot make a lock
2186 2186 log.debug('lock state reset back to FALSE due to lack '
2187 2187 'of at least read permission')
2188 2188 make_lock = False
2189 2189
2190 2190 return make_lock, currently_locked, lock_info
2191 2191
2192 2192 @property
2193 2193 def last_commit_cache_update_diff(self):
2194 2194 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2195 2195
2196 2196 @property
2197 2197 def last_commit_change(self):
2198 2198 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2199 2199 empty_date = datetime.datetime.fromtimestamp(0)
2200 2200 date_latest = self.changeset_cache.get('date', empty_date)
2201 2201 try:
2202 2202 return parse_datetime(date_latest)
2203 2203 except Exception:
2204 2204 return empty_date
2205 2205
2206 2206 @property
2207 2207 def last_db_change(self):
2208 2208 return self.updated_on
2209 2209
2210 2210 @property
2211 2211 def clone_uri_hidden(self):
2212 2212 clone_uri = self.clone_uri
2213 2213 if clone_uri:
2214 2214 import urlobject
2215 2215 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2216 2216 if url_obj.password:
2217 2217 clone_uri = url_obj.with_password('*****')
2218 2218 return clone_uri
2219 2219
2220 2220 @property
2221 2221 def push_uri_hidden(self):
2222 2222 push_uri = self.push_uri
2223 2223 if push_uri:
2224 2224 import urlobject
2225 2225 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2226 2226 if url_obj.password:
2227 2227 push_uri = url_obj.with_password('*****')
2228 2228 return push_uri
2229 2229
2230 2230 def clone_url(self, **override):
2231 2231 from rhodecode.model.settings import SettingsModel
2232 2232
2233 2233 uri_tmpl = None
2234 2234 if 'with_id' in override:
2235 2235 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2236 2236 del override['with_id']
2237 2237
2238 2238 if 'uri_tmpl' in override:
2239 2239 uri_tmpl = override['uri_tmpl']
2240 2240 del override['uri_tmpl']
2241 2241
2242 2242 ssh = False
2243 2243 if 'ssh' in override:
2244 2244 ssh = True
2245 2245 del override['ssh']
2246 2246
2247 2247 # we didn't override our tmpl from **overrides
2248 2248 if not uri_tmpl:
2249 2249 rc_config = SettingsModel().get_all_settings(cache=True)
2250 2250 if ssh:
2251 2251 uri_tmpl = rc_config.get(
2252 2252 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2253 2253 else:
2254 2254 uri_tmpl = rc_config.get(
2255 2255 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2256 2256
2257 2257 request = get_current_request()
2258 2258 return get_clone_url(request=request,
2259 2259 uri_tmpl=uri_tmpl,
2260 2260 repo_name=self.repo_name,
2261 2261 repo_id=self.repo_id, **override)
2262 2262
2263 2263 def set_state(self, state):
2264 2264 self.repo_state = state
2265 2265 Session().add(self)
2266 2266 #==========================================================================
2267 2267 # SCM PROPERTIES
2268 2268 #==========================================================================
2269 2269
2270 2270 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
2271 2271 return get_commit_safe(
2272 2272 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
2273 2273
2274 2274 def get_changeset(self, rev=None, pre_load=None):
2275 2275 warnings.warn("Use get_commit", DeprecationWarning)
2276 2276 commit_id = None
2277 2277 commit_idx = None
2278 2278 if isinstance(rev, compat.string_types):
2279 2279 commit_id = rev
2280 2280 else:
2281 2281 commit_idx = rev
2282 2282 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2283 2283 pre_load=pre_load)
2284 2284
2285 2285 def get_landing_commit(self):
2286 2286 """
2287 2287 Returns landing commit, or if that doesn't exist returns the tip
2288 2288 """
2289 2289 _rev_type, _rev = self.landing_rev
2290 2290 commit = self.get_commit(_rev)
2291 2291 if isinstance(commit, EmptyCommit):
2292 2292 return self.get_commit()
2293 2293 return commit
2294 2294
2295 2295 def update_commit_cache(self, cs_cache=None, config=None):
2296 2296 """
2297 2297 Update cache of last changeset for repository, keys should be::
2298 2298
2299 2299 source_repo_id
2300 2300 short_id
2301 2301 raw_id
2302 2302 revision
2303 2303 parents
2304 2304 message
2305 2305 date
2306 2306 author
2307 2307 updated_on
2308 2308
2309 2309 """
2310 2310 from rhodecode.lib.vcs.backends.base import BaseChangeset
2311 2311 if cs_cache is None:
2312 2312 # use no-cache version here
2313 2313 scm_repo = self.scm_instance(cache=False, config=config)
2314 2314
2315 2315 empty = scm_repo is None or scm_repo.is_empty()
2316 2316 if not empty:
2317 2317 cs_cache = scm_repo.get_commit(
2318 2318 pre_load=["author", "date", "message", "parents"])
2319 2319 else:
2320 2320 cs_cache = EmptyCommit()
2321 2321
2322 2322 if isinstance(cs_cache, BaseChangeset):
2323 2323 cs_cache = cs_cache.__json__()
2324 2324
2325 2325 def is_outdated(new_cs_cache):
2326 2326 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2327 2327 new_cs_cache['revision'] != self.changeset_cache['revision']):
2328 2328 return True
2329 2329 return False
2330 2330
2331 2331 # check if we have maybe already latest cached revision
2332 2332 if is_outdated(cs_cache) or not self.changeset_cache:
2333 2333 _default = datetime.datetime.utcnow()
2334 2334 last_change = cs_cache.get('date') or _default
2335 2335 # we check if last update is newer than the new value
2336 2336 # if yes, we use the current timestamp instead. Imagine you get
2337 2337 # old commit pushed 1y ago, we'd set last update 1y to ago.
2338 2338 last_change_timestamp = datetime_to_time(last_change)
2339 2339 current_timestamp = datetime_to_time(last_change)
2340 2340 if last_change_timestamp > current_timestamp:
2341 2341 cs_cache['date'] = _default
2342 2342
2343 2343 cs_cache['updated_on'] = time.time()
2344 2344 self.changeset_cache = cs_cache
2345 2345 Session().add(self)
2346 2346 Session().commit()
2347 2347
2348 2348 log.debug('updated repo %s with new commit cache %s',
2349 2349 self.repo_name, cs_cache)
2350 2350 else:
2351 2351 cs_cache = self.changeset_cache
2352 2352 cs_cache['updated_on'] = time.time()
2353 2353 self.changeset_cache = cs_cache
2354 2354 Session().add(self)
2355 2355 Session().commit()
2356 2356
2357 2357 log.debug('Skipping update_commit_cache for repo:`%s` '
2358 2358 'commit already with latest changes', self.repo_name)
2359 2359
2360 2360 @property
2361 2361 def tip(self):
2362 2362 return self.get_commit('tip')
2363 2363
2364 2364 @property
2365 2365 def author(self):
2366 2366 return self.tip.author
2367 2367
2368 2368 @property
2369 2369 def last_change(self):
2370 2370 return self.scm_instance().last_change
2371 2371
2372 2372 def get_comments(self, revisions=None):
2373 2373 """
2374 2374 Returns comments for this repository grouped by revisions
2375 2375
2376 2376 :param revisions: filter query by revisions only
2377 2377 """
2378 2378 cmts = ChangesetComment.query()\
2379 2379 .filter(ChangesetComment.repo == self)
2380 2380 if revisions:
2381 2381 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2382 2382 grouped = collections.defaultdict(list)
2383 2383 for cmt in cmts.all():
2384 2384 grouped[cmt.revision].append(cmt)
2385 2385 return grouped
2386 2386
2387 2387 def statuses(self, revisions=None):
2388 2388 """
2389 2389 Returns statuses for this repository
2390 2390
2391 2391 :param revisions: list of revisions to get statuses for
2392 2392 """
2393 2393 statuses = ChangesetStatus.query()\
2394 2394 .filter(ChangesetStatus.repo == self)\
2395 2395 .filter(ChangesetStatus.version == 0)
2396 2396
2397 2397 if revisions:
2398 2398 # Try doing the filtering in chunks to avoid hitting limits
2399 2399 size = 500
2400 2400 status_results = []
2401 2401 for chunk in xrange(0, len(revisions), size):
2402 2402 status_results += statuses.filter(
2403 2403 ChangesetStatus.revision.in_(
2404 2404 revisions[chunk: chunk+size])
2405 2405 ).all()
2406 2406 else:
2407 2407 status_results = statuses.all()
2408 2408
2409 2409 grouped = {}
2410 2410
2411 2411 # maybe we have open new pullrequest without a status?
2412 2412 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2413 2413 status_lbl = ChangesetStatus.get_status_lbl(stat)
2414 2414 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2415 2415 for rev in pr.revisions:
2416 2416 pr_id = pr.pull_request_id
2417 2417 pr_repo = pr.target_repo.repo_name
2418 2418 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2419 2419
2420 2420 for stat in status_results:
2421 2421 pr_id = pr_repo = None
2422 2422 if stat.pull_request:
2423 2423 pr_id = stat.pull_request.pull_request_id
2424 2424 pr_repo = stat.pull_request.target_repo.repo_name
2425 2425 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2426 2426 pr_id, pr_repo]
2427 2427 return grouped
2428 2428
2429 2429 # ==========================================================================
2430 2430 # SCM CACHE INSTANCE
2431 2431 # ==========================================================================
2432 2432
2433 2433 def scm_instance(self, **kwargs):
2434 2434 import rhodecode
2435 2435
2436 2436 # Passing a config will not hit the cache currently only used
2437 2437 # for repo2dbmapper
2438 2438 config = kwargs.pop('config', None)
2439 2439 cache = kwargs.pop('cache', None)
2440 2440 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2441 2441 # if cache is NOT defined use default global, else we have a full
2442 2442 # control over cache behaviour
2443 2443 if cache is None and full_cache and not config:
2444 2444 return self._get_instance_cached()
2445 2445 # cache here is sent to the "vcs server"
2446 2446 return self._get_instance(cache=bool(cache), config=config)
2447 2447
2448 2448 def _get_instance_cached(self):
2449 2449 from rhodecode.lib import rc_cache
2450 2450
2451 2451 cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id)
2452 2452 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2453 2453 repo_id=self.repo_id)
2454 2454 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2455 2455
2456 2456 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2457 2457 def get_instance_cached(repo_id, context_id):
2458 2458 return self._get_instance()
2459 2459
2460 2460 # we must use thread scoped cache here,
2461 2461 # because each thread of gevent needs it's own not shared connection and cache
2462 2462 # we also alter `args` so the cache key is individual for every green thread.
2463 2463 inv_context_manager = rc_cache.InvalidationContext(
2464 2464 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2465 2465 thread_scoped=True)
2466 2466 with inv_context_manager as invalidation_context:
2467 2467 args = (self.repo_id, inv_context_manager.cache_key)
2468 2468 # re-compute and store cache if we get invalidate signal
2469 2469 if invalidation_context.should_invalidate():
2470 2470 instance = get_instance_cached.refresh(*args)
2471 2471 else:
2472 2472 instance = get_instance_cached(*args)
2473 2473
2474 2474 log.debug('Repo instance fetched in %.3fs', inv_context_manager.compute_time)
2475 2475 return instance
2476 2476
2477 2477 def _get_instance(self, cache=True, config=None):
2478 2478 config = config or self._config
2479 2479 custom_wire = {
2480 2480 'cache': cache # controls the vcs.remote cache
2481 2481 }
2482 2482 repo = get_vcs_instance(
2483 2483 repo_path=safe_str(self.repo_full_path),
2484 2484 config=config,
2485 2485 with_wire=custom_wire,
2486 2486 create=False,
2487 2487 _vcs_alias=self.repo_type)
2488 2488 if repo is not None:
2489 2489 repo.count() # cache rebuild
2490 2490 return repo
2491 2491
2492 2492 def __json__(self):
2493 2493 return {'landing_rev': self.landing_rev}
2494 2494
2495 2495 def get_dict(self):
2496 2496
2497 2497 # Since we transformed `repo_name` to a hybrid property, we need to
2498 2498 # keep compatibility with the code which uses `repo_name` field.
2499 2499
2500 2500 result = super(Repository, self).get_dict()
2501 2501 result['repo_name'] = result.pop('_repo_name', None)
2502 2502 return result
2503 2503
2504 2504
2505 2505 class RepoGroup(Base, BaseModel):
2506 2506 __tablename__ = 'groups'
2507 2507 __table_args__ = (
2508 2508 UniqueConstraint('group_name', 'group_parent_id'),
2509 2509 base_table_args,
2510 2510 )
2511 2511 __mapper_args__ = {'order_by': 'group_name'}
2512 2512
2513 2513 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2514 2514
2515 2515 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2516 2516 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2517 2517 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2518 2518 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2519 2519 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2520 2520 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2521 2521 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2522 2522 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2523 2523 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2524 2524 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2525 2525 _changeset_cache = Column(
2526 2526 "changeset_cache", LargeBinary(), nullable=True) # JSON data
2527 2527
2528 2528 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2529 2529 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2530 2530 parent_group = relationship('RepoGroup', remote_side=group_id)
2531 2531 user = relationship('User')
2532 2532 integrations = relationship('Integration', cascade="all, delete, delete-orphan")
2533 2533
2534 2534 def __init__(self, group_name='', parent_group=None):
2535 2535 self.group_name = group_name
2536 2536 self.parent_group = parent_group
2537 2537
2538 2538 def __unicode__(self):
2539 2539 return u"<%s('id:%s:%s')>" % (
2540 2540 self.__class__.__name__, self.group_id, self.group_name)
2541 2541
2542 2542 @hybrid_property
2543 2543 def group_name(self):
2544 2544 return self._group_name
2545 2545
2546 2546 @group_name.setter
2547 2547 def group_name(self, value):
2548 2548 self._group_name = value
2549 2549 self.group_name_hash = self.hash_repo_group_name(value)
2550 2550
2551 2551 @hybrid_property
2552 2552 def changeset_cache(self):
2553 2553 from rhodecode.lib.vcs.backends.base import EmptyCommit
2554 2554 dummy = EmptyCommit().__json__()
2555 2555 if not self._changeset_cache:
2556 2556 dummy['source_repo_id'] = ''
2557 2557 return json.loads(json.dumps(dummy))
2558 2558
2559 2559 try:
2560 2560 return json.loads(self._changeset_cache)
2561 2561 except TypeError:
2562 2562 return dummy
2563 2563 except Exception:
2564 2564 log.error(traceback.format_exc())
2565 2565 return dummy
2566 2566
2567 2567 @changeset_cache.setter
2568 2568 def changeset_cache(self, val):
2569 2569 try:
2570 2570 self._changeset_cache = json.dumps(val)
2571 2571 except Exception:
2572 2572 log.error(traceback.format_exc())
2573 2573
2574 2574 @validates('group_parent_id')
2575 2575 def validate_group_parent_id(self, key, val):
2576 2576 """
2577 2577 Check cycle references for a parent group to self
2578 2578 """
2579 2579 if self.group_id and val:
2580 2580 assert val != self.group_id
2581 2581
2582 2582 return val
2583 2583
2584 2584 @hybrid_property
2585 2585 def description_safe(self):
2586 2586 from rhodecode.lib import helpers as h
2587 2587 return h.escape(self.group_description)
2588 2588
2589 2589 @classmethod
2590 2590 def hash_repo_group_name(cls, repo_group_name):
2591 2591 val = remove_formatting(repo_group_name)
2592 2592 val = safe_str(val).lower()
2593 2593 chars = []
2594 2594 for c in val:
2595 2595 if c not in string.ascii_letters:
2596 2596 c = str(ord(c))
2597 2597 chars.append(c)
2598 2598
2599 2599 return ''.join(chars)
2600 2600
2601 2601 @classmethod
2602 2602 def _generate_choice(cls, repo_group):
2603 2603 from webhelpers.html import literal as _literal
2604 2604 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2605 2605 return repo_group.group_id, _name(repo_group.full_path_splitted)
2606 2606
2607 2607 @classmethod
2608 2608 def groups_choices(cls, groups=None, show_empty_group=True):
2609 2609 if not groups:
2610 2610 groups = cls.query().all()
2611 2611
2612 2612 repo_groups = []
2613 2613 if show_empty_group:
2614 2614 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2615 2615
2616 2616 repo_groups.extend([cls._generate_choice(x) for x in groups])
2617 2617
2618 2618 repo_groups = sorted(
2619 2619 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2620 2620 return repo_groups
2621 2621
2622 2622 @classmethod
2623 2623 def url_sep(cls):
2624 2624 return URL_SEP
2625 2625
2626 2626 @classmethod
2627 2627 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2628 2628 if case_insensitive:
2629 2629 gr = cls.query().filter(func.lower(cls.group_name)
2630 2630 == func.lower(group_name))
2631 2631 else:
2632 2632 gr = cls.query().filter(cls.group_name == group_name)
2633 2633 if cache:
2634 2634 name_key = _hash_key(group_name)
2635 2635 gr = gr.options(
2636 2636 FromCache("sql_cache_short", "get_group_%s" % name_key))
2637 2637 return gr.scalar()
2638 2638
2639 2639 @classmethod
2640 2640 def get_user_personal_repo_group(cls, user_id):
2641 2641 user = User.get(user_id)
2642 2642 if user.username == User.DEFAULT_USER:
2643 2643 return None
2644 2644
2645 2645 return cls.query()\
2646 2646 .filter(cls.personal == true()) \
2647 2647 .filter(cls.user == user) \
2648 2648 .order_by(cls.group_id.asc()) \
2649 2649 .first()
2650 2650
2651 2651 @classmethod
2652 2652 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2653 2653 case_insensitive=True):
2654 2654 q = RepoGroup.query()
2655 2655
2656 2656 if not isinstance(user_id, Optional):
2657 2657 q = q.filter(RepoGroup.user_id == user_id)
2658 2658
2659 2659 if not isinstance(group_id, Optional):
2660 2660 q = q.filter(RepoGroup.group_parent_id == group_id)
2661 2661
2662 2662 if case_insensitive:
2663 2663 q = q.order_by(func.lower(RepoGroup.group_name))
2664 2664 else:
2665 2665 q = q.order_by(RepoGroup.group_name)
2666 2666 return q.all()
2667 2667
2668 2668 @property
2669 2669 def parents(self, parents_recursion_limit = 10):
2670 2670 groups = []
2671 2671 if self.parent_group is None:
2672 2672 return groups
2673 2673 cur_gr = self.parent_group
2674 2674 groups.insert(0, cur_gr)
2675 2675 cnt = 0
2676 2676 while 1:
2677 2677 cnt += 1
2678 2678 gr = getattr(cur_gr, 'parent_group', None)
2679 2679 cur_gr = cur_gr.parent_group
2680 2680 if gr is None:
2681 2681 break
2682 2682 if cnt == parents_recursion_limit:
2683 2683 # this will prevent accidental infinit loops
2684 2684 log.error('more than %s parents found for group %s, stopping '
2685 2685 'recursive parent fetching', parents_recursion_limit, self)
2686 2686 break
2687 2687
2688 2688 groups.insert(0, gr)
2689 2689 return groups
2690 2690
2691 2691 @property
2692 2692 def last_commit_cache_update_diff(self):
2693 2693 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2694 2694
2695 2695 @property
2696 2696 def last_commit_change(self):
2697 2697 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2698 2698 empty_date = datetime.datetime.fromtimestamp(0)
2699 2699 date_latest = self.changeset_cache.get('date', empty_date)
2700 2700 try:
2701 2701 return parse_datetime(date_latest)
2702 2702 except Exception:
2703 2703 return empty_date
2704 2704
2705 2705 @property
2706 2706 def last_db_change(self):
2707 2707 return self.updated_on
2708 2708
2709 2709 @property
2710 2710 def children(self):
2711 2711 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2712 2712
2713 2713 @property
2714 2714 def name(self):
2715 2715 return self.group_name.split(RepoGroup.url_sep())[-1]
2716 2716
2717 2717 @property
2718 2718 def full_path(self):
2719 2719 return self.group_name
2720 2720
2721 2721 @property
2722 2722 def full_path_splitted(self):
2723 2723 return self.group_name.split(RepoGroup.url_sep())
2724 2724
2725 2725 @property
2726 2726 def repositories(self):
2727 2727 return Repository.query()\
2728 2728 .filter(Repository.group == self)\
2729 2729 .order_by(Repository.repo_name)
2730 2730
2731 2731 @property
2732 2732 def repositories_recursive_count(self):
2733 2733 cnt = self.repositories.count()
2734 2734
2735 2735 def children_count(group):
2736 2736 cnt = 0
2737 2737 for child in group.children:
2738 2738 cnt += child.repositories.count()
2739 2739 cnt += children_count(child)
2740 2740 return cnt
2741 2741
2742 2742 return cnt + children_count(self)
2743 2743
2744 2744 def _recursive_objects(self, include_repos=True, include_groups=True):
2745 2745 all_ = []
2746 2746
2747 2747 def _get_members(root_gr):
2748 2748 if include_repos:
2749 2749 for r in root_gr.repositories:
2750 2750 all_.append(r)
2751 2751 childs = root_gr.children.all()
2752 2752 if childs:
2753 2753 for gr in childs:
2754 2754 if include_groups:
2755 2755 all_.append(gr)
2756 2756 _get_members(gr)
2757 2757
2758 2758 root_group = []
2759 2759 if include_groups:
2760 2760 root_group = [self]
2761 2761
2762 2762 _get_members(self)
2763 2763 return root_group + all_
2764 2764
2765 2765 def recursive_groups_and_repos(self):
2766 2766 """
2767 2767 Recursive return all groups, with repositories in those groups
2768 2768 """
2769 2769 return self._recursive_objects()
2770 2770
2771 2771 def recursive_groups(self):
2772 2772 """
2773 2773 Returns all children groups for this group including children of children
2774 2774 """
2775 2775 return self._recursive_objects(include_repos=False)
2776 2776
2777 2777 def recursive_repos(self):
2778 2778 """
2779 2779 Returns all children repositories for this group
2780 2780 """
2781 2781 return self._recursive_objects(include_groups=False)
2782 2782
2783 2783 def get_new_name(self, group_name):
2784 2784 """
2785 2785 returns new full group name based on parent and new name
2786 2786
2787 2787 :param group_name:
2788 2788 """
2789 2789 path_prefix = (self.parent_group.full_path_splitted if
2790 2790 self.parent_group else [])
2791 2791 return RepoGroup.url_sep().join(path_prefix + [group_name])
2792 2792
2793 2793 def update_commit_cache(self, config=None):
2794 2794 """
2795 2795 Update cache of last changeset for newest repository inside this group, keys should be::
2796 2796
2797 2797 source_repo_id
2798 2798 short_id
2799 2799 raw_id
2800 2800 revision
2801 2801 parents
2802 2802 message
2803 2803 date
2804 2804 author
2805 2805
2806 2806 """
2807 2807 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2808 2808
2809 2809 def repo_groups_and_repos():
2810 2810 all_entries = OrderedDefaultDict(list)
2811 2811
2812 2812 def _get_members(root_gr, pos=0):
2813 2813
2814 2814 for repo in root_gr.repositories:
2815 2815 all_entries[root_gr].append(repo)
2816 2816
2817 2817 # fill in all parent positions
2818 2818 for parent_group in root_gr.parents:
2819 2819 all_entries[parent_group].extend(all_entries[root_gr])
2820 2820
2821 2821 children_groups = root_gr.children.all()
2822 2822 if children_groups:
2823 2823 for cnt, gr in enumerate(children_groups, 1):
2824 2824 _get_members(gr, pos=pos+cnt)
2825 2825
2826 2826 _get_members(root_gr=self)
2827 2827 return all_entries
2828 2828
2829 2829 empty_date = datetime.datetime.fromtimestamp(0)
2830 2830 for repo_group, repos in repo_groups_and_repos().items():
2831 2831
2832 2832 latest_repo_cs_cache = {}
2833 2833 for repo in repos:
2834 2834 repo_cs_cache = repo.changeset_cache
2835 2835 date_latest = latest_repo_cs_cache.get('date', empty_date)
2836 2836 date_current = repo_cs_cache.get('date', empty_date)
2837 2837 current_timestamp = datetime_to_time(parse_datetime(date_latest))
2838 2838 if current_timestamp < datetime_to_time(parse_datetime(date_current)):
2839 2839 latest_repo_cs_cache = repo_cs_cache
2840 2840 latest_repo_cs_cache['source_repo_id'] = repo.repo_id
2841 2841
2842 2842 latest_repo_cs_cache['updated_on'] = time.time()
2843 2843 repo_group.changeset_cache = latest_repo_cs_cache
2844 2844 Session().add(repo_group)
2845 2845 Session().commit()
2846 2846
2847 2847 log.debug('updated repo group %s with new commit cache %s',
2848 2848 repo_group.group_name, latest_repo_cs_cache)
2849 2849
2850 2850 def permissions(self, with_admins=True, with_owner=True,
2851 2851 expand_from_user_groups=False):
2852 2852 """
2853 2853 Permissions for repository groups
2854 2854 """
2855 2855 _admin_perm = 'group.admin'
2856 2856
2857 2857 owner_row = []
2858 2858 if with_owner:
2859 2859 usr = AttributeDict(self.user.get_dict())
2860 2860 usr.owner_row = True
2861 2861 usr.permission = _admin_perm
2862 2862 owner_row.append(usr)
2863 2863
2864 2864 super_admin_ids = []
2865 2865 super_admin_rows = []
2866 2866 if with_admins:
2867 2867 for usr in User.get_all_super_admins():
2868 2868 super_admin_ids.append(usr.user_id)
2869 2869 # if this admin is also owner, don't double the record
2870 2870 if usr.user_id == owner_row[0].user_id:
2871 2871 owner_row[0].admin_row = True
2872 2872 else:
2873 2873 usr = AttributeDict(usr.get_dict())
2874 2874 usr.admin_row = True
2875 2875 usr.permission = _admin_perm
2876 2876 super_admin_rows.append(usr)
2877 2877
2878 2878 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2879 2879 q = q.options(joinedload(UserRepoGroupToPerm.group),
2880 2880 joinedload(UserRepoGroupToPerm.user),
2881 2881 joinedload(UserRepoGroupToPerm.permission),)
2882 2882
2883 2883 # get owners and admins and permissions. We do a trick of re-writing
2884 2884 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2885 2885 # has a global reference and changing one object propagates to all
2886 2886 # others. This means if admin is also an owner admin_row that change
2887 2887 # would propagate to both objects
2888 2888 perm_rows = []
2889 2889 for _usr in q.all():
2890 2890 usr = AttributeDict(_usr.user.get_dict())
2891 2891 # if this user is also owner/admin, mark as duplicate record
2892 2892 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2893 2893 usr.duplicate_perm = True
2894 2894 usr.permission = _usr.permission.permission_name
2895 2895 perm_rows.append(usr)
2896 2896
2897 2897 # filter the perm rows by 'default' first and then sort them by
2898 2898 # admin,write,read,none permissions sorted again alphabetically in
2899 2899 # each group
2900 2900 perm_rows = sorted(perm_rows, key=display_user_sort)
2901 2901
2902 2902 user_groups_rows = []
2903 2903 if expand_from_user_groups:
2904 2904 for ug in self.permission_user_groups(with_members=True):
2905 2905 for user_data in ug.members:
2906 2906 user_groups_rows.append(user_data)
2907 2907
2908 2908 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2909 2909
2910 2910 def permission_user_groups(self, with_members=False):
2911 2911 q = UserGroupRepoGroupToPerm.query()\
2912 2912 .filter(UserGroupRepoGroupToPerm.group == self)
2913 2913 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2914 2914 joinedload(UserGroupRepoGroupToPerm.users_group),
2915 2915 joinedload(UserGroupRepoGroupToPerm.permission),)
2916 2916
2917 2917 perm_rows = []
2918 2918 for _user_group in q.all():
2919 2919 entry = AttributeDict(_user_group.users_group.get_dict())
2920 2920 entry.permission = _user_group.permission.permission_name
2921 2921 if with_members:
2922 2922 entry.members = [x.user.get_dict()
2923 2923 for x in _user_group.users_group.members]
2924 2924 perm_rows.append(entry)
2925 2925
2926 2926 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2927 2927 return perm_rows
2928 2928
2929 2929 def get_api_data(self):
2930 2930 """
2931 2931 Common function for generating api data
2932 2932
2933 2933 """
2934 2934 group = self
2935 2935 data = {
2936 2936 'group_id': group.group_id,
2937 2937 'group_name': group.group_name,
2938 2938 'group_description': group.description_safe,
2939 2939 'parent_group': group.parent_group.group_name if group.parent_group else None,
2940 2940 'repositories': [x.repo_name for x in group.repositories],
2941 2941 'owner': group.user.username,
2942 2942 }
2943 2943 return data
2944 2944
2945 2945 def get_dict(self):
2946 2946 # Since we transformed `group_name` to a hybrid property, we need to
2947 2947 # keep compatibility with the code which uses `group_name` field.
2948 2948 result = super(RepoGroup, self).get_dict()
2949 2949 result['group_name'] = result.pop('_group_name', None)
2950 2950 return result
2951 2951
2952 2952
2953 2953 class Permission(Base, BaseModel):
2954 2954 __tablename__ = 'permissions'
2955 2955 __table_args__ = (
2956 2956 Index('p_perm_name_idx', 'permission_name'),
2957 2957 base_table_args,
2958 2958 )
2959 2959
2960 2960 PERMS = [
2961 2961 ('hg.admin', _('RhodeCode Super Administrator')),
2962 2962
2963 2963 ('repository.none', _('Repository no access')),
2964 2964 ('repository.read', _('Repository read access')),
2965 2965 ('repository.write', _('Repository write access')),
2966 2966 ('repository.admin', _('Repository admin access')),
2967 2967
2968 2968 ('group.none', _('Repository group no access')),
2969 2969 ('group.read', _('Repository group read access')),
2970 2970 ('group.write', _('Repository group write access')),
2971 2971 ('group.admin', _('Repository group admin access')),
2972 2972
2973 2973 ('usergroup.none', _('User group no access')),
2974 2974 ('usergroup.read', _('User group read access')),
2975 2975 ('usergroup.write', _('User group write access')),
2976 2976 ('usergroup.admin', _('User group admin access')),
2977 2977
2978 2978 ('branch.none', _('Branch no permissions')),
2979 2979 ('branch.merge', _('Branch access by web merge')),
2980 2980 ('branch.push', _('Branch access by push')),
2981 2981 ('branch.push_force', _('Branch access by push with force')),
2982 2982
2983 2983 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2984 2984 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2985 2985
2986 2986 ('hg.usergroup.create.false', _('User Group creation disabled')),
2987 2987 ('hg.usergroup.create.true', _('User Group creation enabled')),
2988 2988
2989 2989 ('hg.create.none', _('Repository creation disabled')),
2990 2990 ('hg.create.repository', _('Repository creation enabled')),
2991 2991 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2992 2992 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2993 2993
2994 2994 ('hg.fork.none', _('Repository forking disabled')),
2995 2995 ('hg.fork.repository', _('Repository forking enabled')),
2996 2996
2997 2997 ('hg.register.none', _('Registration disabled')),
2998 2998 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2999 2999 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
3000 3000
3001 3001 ('hg.password_reset.enabled', _('Password reset enabled')),
3002 3002 ('hg.password_reset.hidden', _('Password reset hidden')),
3003 3003 ('hg.password_reset.disabled', _('Password reset disabled')),
3004 3004
3005 3005 ('hg.extern_activate.manual', _('Manual activation of external account')),
3006 3006 ('hg.extern_activate.auto', _('Automatic activation of external account')),
3007 3007
3008 3008 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
3009 3009 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
3010 3010 ]
3011 3011
3012 3012 # definition of system default permissions for DEFAULT user, created on
3013 3013 # system setup
3014 3014 DEFAULT_USER_PERMISSIONS = [
3015 3015 # object perms
3016 3016 'repository.read',
3017 3017 'group.read',
3018 3018 'usergroup.read',
3019 3019 # branch, for backward compat we need same value as before so forced pushed
3020 3020 'branch.push_force',
3021 3021 # global
3022 3022 'hg.create.repository',
3023 3023 'hg.repogroup.create.false',
3024 3024 'hg.usergroup.create.false',
3025 3025 'hg.create.write_on_repogroup.true',
3026 3026 'hg.fork.repository',
3027 3027 'hg.register.manual_activate',
3028 3028 'hg.password_reset.enabled',
3029 3029 'hg.extern_activate.auto',
3030 3030 'hg.inherit_default_perms.true',
3031 3031 ]
3032 3032
3033 3033 # defines which permissions are more important higher the more important
3034 3034 # Weight defines which permissions are more important.
3035 3035 # The higher number the more important.
3036 3036 PERM_WEIGHTS = {
3037 3037 'repository.none': 0,
3038 3038 'repository.read': 1,
3039 3039 'repository.write': 3,
3040 3040 'repository.admin': 4,
3041 3041
3042 3042 'group.none': 0,
3043 3043 'group.read': 1,
3044 3044 'group.write': 3,
3045 3045 'group.admin': 4,
3046 3046
3047 3047 'usergroup.none': 0,
3048 3048 'usergroup.read': 1,
3049 3049 'usergroup.write': 3,
3050 3050 'usergroup.admin': 4,
3051 3051
3052 3052 'branch.none': 0,
3053 3053 'branch.merge': 1,
3054 3054 'branch.push': 3,
3055 3055 'branch.push_force': 4,
3056 3056
3057 3057 'hg.repogroup.create.false': 0,
3058 3058 'hg.repogroup.create.true': 1,
3059 3059
3060 3060 'hg.usergroup.create.false': 0,
3061 3061 'hg.usergroup.create.true': 1,
3062 3062
3063 3063 'hg.fork.none': 0,
3064 3064 'hg.fork.repository': 1,
3065 3065 'hg.create.none': 0,
3066 3066 'hg.create.repository': 1
3067 3067 }
3068 3068
3069 3069 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3070 3070 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
3071 3071 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
3072 3072
3073 3073 def __unicode__(self):
3074 3074 return u"<%s('%s:%s')>" % (
3075 3075 self.__class__.__name__, self.permission_id, self.permission_name
3076 3076 )
3077 3077
3078 3078 @classmethod
3079 3079 def get_by_key(cls, key):
3080 3080 return cls.query().filter(cls.permission_name == key).scalar()
3081 3081
3082 3082 @classmethod
3083 3083 def get_default_repo_perms(cls, user_id, repo_id=None):
3084 3084 q = Session().query(UserRepoToPerm, Repository, Permission)\
3085 3085 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
3086 3086 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
3087 3087 .filter(UserRepoToPerm.user_id == user_id)
3088 3088 if repo_id:
3089 3089 q = q.filter(UserRepoToPerm.repository_id == repo_id)
3090 3090 return q.all()
3091 3091
3092 3092 @classmethod
3093 3093 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
3094 3094 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
3095 3095 .join(
3096 3096 Permission,
3097 3097 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
3098 3098 .join(
3099 3099 UserRepoToPerm,
3100 3100 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
3101 3101 .filter(UserRepoToPerm.user_id == user_id)
3102 3102
3103 3103 if repo_id:
3104 3104 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
3105 3105 return q.order_by(UserToRepoBranchPermission.rule_order).all()
3106 3106
3107 3107 @classmethod
3108 3108 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
3109 3109 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
3110 3110 .join(
3111 3111 Permission,
3112 3112 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
3113 3113 .join(
3114 3114 Repository,
3115 3115 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
3116 3116 .join(
3117 3117 UserGroup,
3118 3118 UserGroupRepoToPerm.users_group_id ==
3119 3119 UserGroup.users_group_id)\
3120 3120 .join(
3121 3121 UserGroupMember,
3122 3122 UserGroupRepoToPerm.users_group_id ==
3123 3123 UserGroupMember.users_group_id)\
3124 3124 .filter(
3125 3125 UserGroupMember.user_id == user_id,
3126 3126 UserGroup.users_group_active == true())
3127 3127 if repo_id:
3128 3128 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
3129 3129 return q.all()
3130 3130
3131 3131 @classmethod
3132 3132 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
3133 3133 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
3134 3134 .join(
3135 3135 Permission,
3136 3136 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
3137 3137 .join(
3138 3138 UserGroupRepoToPerm,
3139 3139 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
3140 3140 .join(
3141 3141 UserGroup,
3142 3142 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
3143 3143 .join(
3144 3144 UserGroupMember,
3145 3145 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
3146 3146 .filter(
3147 3147 UserGroupMember.user_id == user_id,
3148 3148 UserGroup.users_group_active == true())
3149 3149
3150 3150 if repo_id:
3151 3151 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
3152 3152 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
3153 3153
3154 3154 @classmethod
3155 3155 def get_default_group_perms(cls, user_id, repo_group_id=None):
3156 3156 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
3157 3157 .join(
3158 3158 Permission,
3159 3159 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
3160 3160 .join(
3161 3161 RepoGroup,
3162 3162 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
3163 3163 .filter(UserRepoGroupToPerm.user_id == user_id)
3164 3164 if repo_group_id:
3165 3165 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
3166 3166 return q.all()
3167 3167
3168 3168 @classmethod
3169 3169 def get_default_group_perms_from_user_group(
3170 3170 cls, user_id, repo_group_id=None):
3171 3171 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
3172 3172 .join(
3173 3173 Permission,
3174 3174 UserGroupRepoGroupToPerm.permission_id ==
3175 3175 Permission.permission_id)\
3176 3176 .join(
3177 3177 RepoGroup,
3178 3178 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3179 3179 .join(
3180 3180 UserGroup,
3181 3181 UserGroupRepoGroupToPerm.users_group_id ==
3182 3182 UserGroup.users_group_id)\
3183 3183 .join(
3184 3184 UserGroupMember,
3185 3185 UserGroupRepoGroupToPerm.users_group_id ==
3186 3186 UserGroupMember.users_group_id)\
3187 3187 .filter(
3188 3188 UserGroupMember.user_id == user_id,
3189 3189 UserGroup.users_group_active == true())
3190 3190 if repo_group_id:
3191 3191 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3192 3192 return q.all()
3193 3193
3194 3194 @classmethod
3195 3195 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3196 3196 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3197 3197 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3198 3198 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3199 3199 .filter(UserUserGroupToPerm.user_id == user_id)
3200 3200 if user_group_id:
3201 3201 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3202 3202 return q.all()
3203 3203
3204 3204 @classmethod
3205 3205 def get_default_user_group_perms_from_user_group(
3206 3206 cls, user_id, user_group_id=None):
3207 3207 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3208 3208 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3209 3209 .join(
3210 3210 Permission,
3211 3211 UserGroupUserGroupToPerm.permission_id ==
3212 3212 Permission.permission_id)\
3213 3213 .join(
3214 3214 TargetUserGroup,
3215 3215 UserGroupUserGroupToPerm.target_user_group_id ==
3216 3216 TargetUserGroup.users_group_id)\
3217 3217 .join(
3218 3218 UserGroup,
3219 3219 UserGroupUserGroupToPerm.user_group_id ==
3220 3220 UserGroup.users_group_id)\
3221 3221 .join(
3222 3222 UserGroupMember,
3223 3223 UserGroupUserGroupToPerm.user_group_id ==
3224 3224 UserGroupMember.users_group_id)\
3225 3225 .filter(
3226 3226 UserGroupMember.user_id == user_id,
3227 3227 UserGroup.users_group_active == true())
3228 3228 if user_group_id:
3229 3229 q = q.filter(
3230 3230 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3231 3231
3232 3232 return q.all()
3233 3233
3234 3234
3235 3235 class UserRepoToPerm(Base, BaseModel):
3236 3236 __tablename__ = 'repo_to_perm'
3237 3237 __table_args__ = (
3238 3238 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3239 3239 base_table_args
3240 3240 )
3241 3241
3242 3242 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3243 3243 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3244 3244 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3245 3245 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3246 3246
3247 3247 user = relationship('User')
3248 3248 repository = relationship('Repository')
3249 3249 permission = relationship('Permission')
3250 3250
3251 3251 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete, delete-orphan", lazy='joined')
3252 3252
3253 3253 @classmethod
3254 3254 def create(cls, user, repository, permission):
3255 3255 n = cls()
3256 3256 n.user = user
3257 3257 n.repository = repository
3258 3258 n.permission = permission
3259 3259 Session().add(n)
3260 3260 return n
3261 3261
3262 3262 def __unicode__(self):
3263 3263 return u'<%s => %s >' % (self.user, self.repository)
3264 3264
3265 3265
3266 3266 class UserUserGroupToPerm(Base, BaseModel):
3267 3267 __tablename__ = 'user_user_group_to_perm'
3268 3268 __table_args__ = (
3269 3269 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3270 3270 base_table_args
3271 3271 )
3272 3272
3273 3273 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3274 3274 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3275 3275 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3276 3276 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3277 3277
3278 3278 user = relationship('User')
3279 3279 user_group = relationship('UserGroup')
3280 3280 permission = relationship('Permission')
3281 3281
3282 3282 @classmethod
3283 3283 def create(cls, user, user_group, permission):
3284 3284 n = cls()
3285 3285 n.user = user
3286 3286 n.user_group = user_group
3287 3287 n.permission = permission
3288 3288 Session().add(n)
3289 3289 return n
3290 3290
3291 3291 def __unicode__(self):
3292 3292 return u'<%s => %s >' % (self.user, self.user_group)
3293 3293
3294 3294
3295 3295 class UserToPerm(Base, BaseModel):
3296 3296 __tablename__ = 'user_to_perm'
3297 3297 __table_args__ = (
3298 3298 UniqueConstraint('user_id', 'permission_id'),
3299 3299 base_table_args
3300 3300 )
3301 3301
3302 3302 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3303 3303 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3304 3304 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3305 3305
3306 3306 user = relationship('User')
3307 3307 permission = relationship('Permission', lazy='joined')
3308 3308
3309 3309 def __unicode__(self):
3310 3310 return u'<%s => %s >' % (self.user, self.permission)
3311 3311
3312 3312
3313 3313 class UserGroupRepoToPerm(Base, BaseModel):
3314 3314 __tablename__ = 'users_group_repo_to_perm'
3315 3315 __table_args__ = (
3316 3316 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3317 3317 base_table_args
3318 3318 )
3319 3319
3320 3320 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3321 3321 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3322 3322 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3323 3323 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3324 3324
3325 3325 users_group = relationship('UserGroup')
3326 3326 permission = relationship('Permission')
3327 3327 repository = relationship('Repository')
3328 3328 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all')
3329 3329
3330 3330 @classmethod
3331 3331 def create(cls, users_group, repository, permission):
3332 3332 n = cls()
3333 3333 n.users_group = users_group
3334 3334 n.repository = repository
3335 3335 n.permission = permission
3336 3336 Session().add(n)
3337 3337 return n
3338 3338
3339 3339 def __unicode__(self):
3340 3340 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
3341 3341
3342 3342
3343 3343 class UserGroupUserGroupToPerm(Base, BaseModel):
3344 3344 __tablename__ = 'user_group_user_group_to_perm'
3345 3345 __table_args__ = (
3346 3346 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3347 3347 CheckConstraint('target_user_group_id != user_group_id'),
3348 3348 base_table_args
3349 3349 )
3350 3350
3351 3351 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3352 3352 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3353 3353 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3354 3354 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3355 3355
3356 3356 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3357 3357 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3358 3358 permission = relationship('Permission')
3359 3359
3360 3360 @classmethod
3361 3361 def create(cls, target_user_group, user_group, permission):
3362 3362 n = cls()
3363 3363 n.target_user_group = target_user_group
3364 3364 n.user_group = user_group
3365 3365 n.permission = permission
3366 3366 Session().add(n)
3367 3367 return n
3368 3368
3369 3369 def __unicode__(self):
3370 3370 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3371 3371
3372 3372
3373 3373 class UserGroupToPerm(Base, BaseModel):
3374 3374 __tablename__ = 'users_group_to_perm'
3375 3375 __table_args__ = (
3376 3376 UniqueConstraint('users_group_id', 'permission_id',),
3377 3377 base_table_args
3378 3378 )
3379 3379
3380 3380 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3381 3381 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3382 3382 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3383 3383
3384 3384 users_group = relationship('UserGroup')
3385 3385 permission = relationship('Permission')
3386 3386
3387 3387
3388 3388 class UserRepoGroupToPerm(Base, BaseModel):
3389 3389 __tablename__ = 'user_repo_group_to_perm'
3390 3390 __table_args__ = (
3391 3391 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3392 3392 base_table_args
3393 3393 )
3394 3394
3395 3395 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3396 3396 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3397 3397 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3398 3398 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3399 3399
3400 3400 user = relationship('User')
3401 3401 group = relationship('RepoGroup')
3402 3402 permission = relationship('Permission')
3403 3403
3404 3404 @classmethod
3405 3405 def create(cls, user, repository_group, permission):
3406 3406 n = cls()
3407 3407 n.user = user
3408 3408 n.group = repository_group
3409 3409 n.permission = permission
3410 3410 Session().add(n)
3411 3411 return n
3412 3412
3413 3413
3414 3414 class UserGroupRepoGroupToPerm(Base, BaseModel):
3415 3415 __tablename__ = 'users_group_repo_group_to_perm'
3416 3416 __table_args__ = (
3417 3417 UniqueConstraint('users_group_id', 'group_id'),
3418 3418 base_table_args
3419 3419 )
3420 3420
3421 3421 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3422 3422 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3423 3423 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3424 3424 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3425 3425
3426 3426 users_group = relationship('UserGroup')
3427 3427 permission = relationship('Permission')
3428 3428 group = relationship('RepoGroup')
3429 3429
3430 3430 @classmethod
3431 3431 def create(cls, user_group, repository_group, permission):
3432 3432 n = cls()
3433 3433 n.users_group = user_group
3434 3434 n.group = repository_group
3435 3435 n.permission = permission
3436 3436 Session().add(n)
3437 3437 return n
3438 3438
3439 3439 def __unicode__(self):
3440 3440 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3441 3441
3442 3442
3443 3443 class Statistics(Base, BaseModel):
3444 3444 __tablename__ = 'statistics'
3445 3445 __table_args__ = (
3446 3446 base_table_args
3447 3447 )
3448 3448
3449 3449 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3450 3450 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3451 3451 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3452 3452 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3453 3453 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3454 3454 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3455 3455
3456 3456 repository = relationship('Repository', single_parent=True)
3457 3457
3458 3458
3459 3459 class UserFollowing(Base, BaseModel):
3460 3460 __tablename__ = 'user_followings'
3461 3461 __table_args__ = (
3462 3462 UniqueConstraint('user_id', 'follows_repository_id'),
3463 3463 UniqueConstraint('user_id', 'follows_user_id'),
3464 3464 base_table_args
3465 3465 )
3466 3466
3467 3467 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3468 3468 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3469 3469 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3470 3470 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3471 3471 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3472 3472
3473 3473 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3474 3474
3475 3475 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3476 3476 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3477 3477
3478 3478 @classmethod
3479 3479 def get_repo_followers(cls, repo_id):
3480 3480 return cls.query().filter(cls.follows_repo_id == repo_id)
3481 3481
3482 3482
3483 3483 class CacheKey(Base, BaseModel):
3484 3484 __tablename__ = 'cache_invalidation'
3485 3485 __table_args__ = (
3486 3486 UniqueConstraint('cache_key'),
3487 3487 Index('key_idx', 'cache_key'),
3488 3488 base_table_args,
3489 3489 )
3490 3490
3491 3491 CACHE_TYPE_FEED = 'FEED'
3492 3492 CACHE_TYPE_README = 'README'
3493 3493 # namespaces used to register process/thread aware caches
3494 3494 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3495 3495 SETTINGS_INVALIDATION_NAMESPACE = 'system_settings'
3496 3496
3497 3497 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3498 3498 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3499 3499 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3500 3500 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3501 3501
3502 3502 def __init__(self, cache_key, cache_args=''):
3503 3503 self.cache_key = cache_key
3504 3504 self.cache_args = cache_args
3505 3505 self.cache_active = False
3506 3506
3507 3507 def __unicode__(self):
3508 3508 return u"<%s('%s:%s[%s]')>" % (
3509 3509 self.__class__.__name__,
3510 3510 self.cache_id, self.cache_key, self.cache_active)
3511 3511
3512 3512 def _cache_key_partition(self):
3513 3513 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3514 3514 return prefix, repo_name, suffix
3515 3515
3516 3516 def get_prefix(self):
3517 3517 """
3518 3518 Try to extract prefix from existing cache key. The key could consist
3519 3519 of prefix, repo_name, suffix
3520 3520 """
3521 3521 # this returns prefix, repo_name, suffix
3522 3522 return self._cache_key_partition()[0]
3523 3523
3524 3524 def get_suffix(self):
3525 3525 """
3526 3526 get suffix that might have been used in _get_cache_key to
3527 3527 generate self.cache_key. Only used for informational purposes
3528 3528 in repo_edit.mako.
3529 3529 """
3530 3530 # prefix, repo_name, suffix
3531 3531 return self._cache_key_partition()[2]
3532 3532
3533 3533 @classmethod
3534 3534 def delete_all_cache(cls):
3535 3535 """
3536 3536 Delete all cache keys from database.
3537 3537 Should only be run when all instances are down and all entries
3538 3538 thus stale.
3539 3539 """
3540 3540 cls.query().delete()
3541 3541 Session().commit()
3542 3542
3543 3543 @classmethod
3544 3544 def set_invalidate(cls, cache_uid, delete=False):
3545 3545 """
3546 3546 Mark all caches of a repo as invalid in the database.
3547 3547 """
3548 3548
3549 3549 try:
3550 3550 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3551 3551 if delete:
3552 3552 qry.delete()
3553 3553 log.debug('cache objects deleted for cache args %s',
3554 3554 safe_str(cache_uid))
3555 3555 else:
3556 3556 qry.update({"cache_active": False})
3557 3557 log.debug('cache objects marked as invalid for cache args %s',
3558 3558 safe_str(cache_uid))
3559 3559
3560 3560 Session().commit()
3561 3561 except Exception:
3562 3562 log.exception(
3563 3563 'Cache key invalidation failed for cache args %s',
3564 3564 safe_str(cache_uid))
3565 3565 Session().rollback()
3566 3566
3567 3567 @classmethod
3568 3568 def get_active_cache(cls, cache_key):
3569 3569 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3570 3570 if inv_obj:
3571 3571 return inv_obj
3572 3572 return None
3573 3573
3574 3574
3575 3575 class ChangesetComment(Base, BaseModel):
3576 3576 __tablename__ = 'changeset_comments'
3577 3577 __table_args__ = (
3578 3578 Index('cc_revision_idx', 'revision'),
3579 3579 base_table_args,
3580 3580 )
3581 3581
3582 3582 COMMENT_OUTDATED = u'comment_outdated'
3583 3583 COMMENT_TYPE_NOTE = u'note'
3584 3584 COMMENT_TYPE_TODO = u'todo'
3585 3585 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3586 3586
3587 3587 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3588 3588 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3589 3589 revision = Column('revision', String(40), nullable=True)
3590 3590 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3591 3591 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3592 3592 line_no = Column('line_no', Unicode(10), nullable=True)
3593 3593 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3594 3594 f_path = Column('f_path', Unicode(1000), nullable=True)
3595 3595 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3596 3596 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3597 3597 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3598 3598 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3599 3599 renderer = Column('renderer', Unicode(64), nullable=True)
3600 3600 display_state = Column('display_state', Unicode(128), nullable=True)
3601 3601
3602 3602 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3603 3603 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3604 3604
3605 3605 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3606 3606 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3607 3607
3608 3608 author = relationship('User', lazy='joined')
3609 3609 repo = relationship('Repository')
3610 3610 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined')
3611 3611 pull_request = relationship('PullRequest', lazy='joined')
3612 3612 pull_request_version = relationship('PullRequestVersion')
3613 3613
3614 3614 @classmethod
3615 3615 def get_users(cls, revision=None, pull_request_id=None):
3616 3616 """
3617 3617 Returns user associated with this ChangesetComment. ie those
3618 3618 who actually commented
3619 3619
3620 3620 :param cls:
3621 3621 :param revision:
3622 3622 """
3623 3623 q = Session().query(User)\
3624 3624 .join(ChangesetComment.author)
3625 3625 if revision:
3626 3626 q = q.filter(cls.revision == revision)
3627 3627 elif pull_request_id:
3628 3628 q = q.filter(cls.pull_request_id == pull_request_id)
3629 3629 return q.all()
3630 3630
3631 3631 @classmethod
3632 3632 def get_index_from_version(cls, pr_version, versions):
3633 3633 num_versions = [x.pull_request_version_id for x in versions]
3634 3634 try:
3635 3635 return num_versions.index(pr_version) +1
3636 3636 except (IndexError, ValueError):
3637 3637 return
3638 3638
3639 3639 @property
3640 3640 def outdated(self):
3641 3641 return self.display_state == self.COMMENT_OUTDATED
3642 3642
3643 3643 def outdated_at_version(self, version):
3644 3644 """
3645 3645 Checks if comment is outdated for given pull request version
3646 3646 """
3647 3647 return self.outdated and self.pull_request_version_id != version
3648 3648
3649 3649 def older_than_version(self, version):
3650 3650 """
3651 3651 Checks if comment is made from previous version than given
3652 3652 """
3653 3653 if version is None:
3654 3654 return self.pull_request_version_id is not None
3655 3655
3656 3656 return self.pull_request_version_id < version
3657 3657
3658 3658 @property
3659 3659 def resolved(self):
3660 3660 return self.resolved_by[0] if self.resolved_by else None
3661 3661
3662 3662 @property
3663 3663 def is_todo(self):
3664 3664 return self.comment_type == self.COMMENT_TYPE_TODO
3665 3665
3666 3666 @property
3667 3667 def is_inline(self):
3668 3668 return self.line_no and self.f_path
3669 3669
3670 3670 def get_index_version(self, versions):
3671 3671 return self.get_index_from_version(
3672 3672 self.pull_request_version_id, versions)
3673 3673
3674 3674 def __repr__(self):
3675 3675 if self.comment_id:
3676 3676 return '<DB:Comment #%s>' % self.comment_id
3677 3677 else:
3678 3678 return '<DB:Comment at %#x>' % id(self)
3679 3679
3680 3680 def get_api_data(self):
3681 3681 comment = self
3682 3682 data = {
3683 3683 'comment_id': comment.comment_id,
3684 3684 'comment_type': comment.comment_type,
3685 3685 'comment_text': comment.text,
3686 3686 'comment_status': comment.status_change,
3687 3687 'comment_f_path': comment.f_path,
3688 3688 'comment_lineno': comment.line_no,
3689 3689 'comment_author': comment.author,
3690 3690 'comment_created_on': comment.created_on,
3691 3691 'comment_resolved_by': self.resolved
3692 3692 }
3693 3693 return data
3694 3694
3695 3695 def __json__(self):
3696 3696 data = dict()
3697 3697 data.update(self.get_api_data())
3698 3698 return data
3699 3699
3700 3700
3701 3701 class ChangesetStatus(Base, BaseModel):
3702 3702 __tablename__ = 'changeset_statuses'
3703 3703 __table_args__ = (
3704 3704 Index('cs_revision_idx', 'revision'),
3705 3705 Index('cs_version_idx', 'version'),
3706 3706 UniqueConstraint('repo_id', 'revision', 'version'),
3707 3707 base_table_args
3708 3708 )
3709 3709
3710 3710 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3711 3711 STATUS_APPROVED = 'approved'
3712 3712 STATUS_REJECTED = 'rejected'
3713 3713 STATUS_UNDER_REVIEW = 'under_review'
3714 3714
3715 3715 STATUSES = [
3716 3716 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3717 3717 (STATUS_APPROVED, _("Approved")),
3718 3718 (STATUS_REJECTED, _("Rejected")),
3719 3719 (STATUS_UNDER_REVIEW, _("Under Review")),
3720 3720 ]
3721 3721
3722 3722 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3723 3723 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3724 3724 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3725 3725 revision = Column('revision', String(40), nullable=False)
3726 3726 status = Column('status', String(128), nullable=False, default=DEFAULT)
3727 3727 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3728 3728 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3729 3729 version = Column('version', Integer(), nullable=False, default=0)
3730 3730 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3731 3731
3732 3732 author = relationship('User', lazy='joined')
3733 3733 repo = relationship('Repository')
3734 3734 comment = relationship('ChangesetComment', lazy='joined')
3735 3735 pull_request = relationship('PullRequest', lazy='joined')
3736 3736
3737 3737 def __unicode__(self):
3738 3738 return u"<%s('%s[v%s]:%s')>" % (
3739 3739 self.__class__.__name__,
3740 3740 self.status, self.version, self.author
3741 3741 )
3742 3742
3743 3743 @classmethod
3744 3744 def get_status_lbl(cls, value):
3745 3745 return dict(cls.STATUSES).get(value)
3746 3746
3747 3747 @property
3748 3748 def status_lbl(self):
3749 3749 return ChangesetStatus.get_status_lbl(self.status)
3750 3750
3751 3751 def get_api_data(self):
3752 3752 status = self
3753 3753 data = {
3754 3754 'status_id': status.changeset_status_id,
3755 3755 'status': status.status,
3756 3756 }
3757 3757 return data
3758 3758
3759 3759 def __json__(self):
3760 3760 data = dict()
3761 3761 data.update(self.get_api_data())
3762 3762 return data
3763 3763
3764 3764
3765 3765 class _SetState(object):
3766 3766 """
3767 3767 Context processor allowing changing state for sensitive operation such as
3768 3768 pull request update or merge
3769 3769 """
3770 3770
3771 3771 def __init__(self, pull_request, pr_state, back_state=None):
3772 3772 self._pr = pull_request
3773 3773 self._org_state = back_state or pull_request.pull_request_state
3774 3774 self._pr_state = pr_state
3775 self._current_state = None
3775 3776
3776 3777 def __enter__(self):
3777 3778 log.debug('StateLock: entering set state context, setting state to: `%s`',
3778 3779 self._pr_state)
3779 self._pr.pull_request_state = self._pr_state
3780 Session().add(self._pr)
3781 Session().commit()
3780 self.set_pr_state(self._pr_state)
3781 return self
3782 3782
3783 3783 def __exit__(self, exc_type, exc_val, exc_tb):
3784 if exc_val is not None:
3785 log.error(traceback.format_exc(exc_tb))
3786 return None
3787
3788 self.set_pr_state(self._org_state)
3784 3789 log.debug('StateLock: exiting set state context, setting state to: `%s`',
3785 3790 self._org_state)
3786 self._pr.pull_request_state = self._org_state
3791 @property
3792 def state(self):
3793 return self._current_state
3794
3795 def set_pr_state(self, pr_state):
3796 try:
3797 self._pr.pull_request_state = pr_state
3787 3798 Session().add(self._pr)
3788 3799 Session().commit()
3789
3800 self._current_state = pr_state
3801 except Exception:
3802 log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state)
3803 raise
3790 3804
3791 3805 class _PullRequestBase(BaseModel):
3792 3806 """
3793 3807 Common attributes of pull request and version entries.
3794 3808 """
3795 3809
3796 3810 # .status values
3797 3811 STATUS_NEW = u'new'
3798 3812 STATUS_OPEN = u'open'
3799 3813 STATUS_CLOSED = u'closed'
3800 3814
3801 3815 # available states
3802 3816 STATE_CREATING = u'creating'
3803 3817 STATE_UPDATING = u'updating'
3804 3818 STATE_MERGING = u'merging'
3805 3819 STATE_CREATED = u'created'
3806 3820
3807 3821 title = Column('title', Unicode(255), nullable=True)
3808 3822 description = Column(
3809 3823 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3810 3824 nullable=True)
3811 3825 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
3812 3826
3813 3827 # new/open/closed status of pull request (not approve/reject/etc)
3814 3828 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3815 3829 created_on = Column(
3816 3830 'created_on', DateTime(timezone=False), nullable=False,
3817 3831 default=datetime.datetime.now)
3818 3832 updated_on = Column(
3819 3833 'updated_on', DateTime(timezone=False), nullable=False,
3820 3834 default=datetime.datetime.now)
3821 3835
3822 3836 pull_request_state = Column("pull_request_state", String(255), nullable=True)
3823 3837
3824 3838 @declared_attr
3825 3839 def user_id(cls):
3826 3840 return Column(
3827 3841 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3828 3842 unique=None)
3829 3843
3830 3844 # 500 revisions max
3831 3845 _revisions = Column(
3832 3846 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3833 3847
3834 3848 @declared_attr
3835 3849 def source_repo_id(cls):
3836 3850 # TODO: dan: rename column to source_repo_id
3837 3851 return Column(
3838 3852 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3839 3853 nullable=False)
3840 3854
3841 3855 _source_ref = Column('org_ref', Unicode(255), nullable=False)
3842 3856
3843 3857 @hybrid_property
3844 3858 def source_ref(self):
3845 3859 return self._source_ref
3846 3860
3847 3861 @source_ref.setter
3848 3862 def source_ref(self, val):
3849 3863 parts = (val or '').split(':')
3850 3864 if len(parts) != 3:
3851 3865 raise ValueError(
3852 3866 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
3853 3867 self._source_ref = safe_unicode(val)
3854 3868
3855 3869 _target_ref = Column('other_ref', Unicode(255), nullable=False)
3856 3870
3857 3871 @hybrid_property
3858 3872 def target_ref(self):
3859 3873 return self._target_ref
3860 3874
3861 3875 @target_ref.setter
3862 3876 def target_ref(self, val):
3863 3877 parts = (val or '').split(':')
3864 3878 if len(parts) != 3:
3865 3879 raise ValueError(
3866 3880 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
3867 3881 self._target_ref = safe_unicode(val)
3868 3882
3869 3883 @declared_attr
3870 3884 def target_repo_id(cls):
3871 3885 # TODO: dan: rename column to target_repo_id
3872 3886 return Column(
3873 3887 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3874 3888 nullable=False)
3875 3889
3876 3890 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3877 3891
3878 3892 # TODO: dan: rename column to last_merge_source_rev
3879 3893 _last_merge_source_rev = Column(
3880 3894 'last_merge_org_rev', String(40), nullable=True)
3881 3895 # TODO: dan: rename column to last_merge_target_rev
3882 3896 _last_merge_target_rev = Column(
3883 3897 'last_merge_other_rev', String(40), nullable=True)
3884 3898 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3885 3899 merge_rev = Column('merge_rev', String(40), nullable=True)
3886 3900
3887 3901 reviewer_data = Column(
3888 3902 'reviewer_data_json', MutationObj.as_mutable(
3889 3903 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3890 3904
3891 3905 @property
3892 3906 def reviewer_data_json(self):
3893 3907 return json.dumps(self.reviewer_data)
3894 3908
3895 3909 @hybrid_property
3896 3910 def description_safe(self):
3897 3911 from rhodecode.lib import helpers as h
3898 3912 return h.escape(self.description)
3899 3913
3900 3914 @hybrid_property
3901 3915 def revisions(self):
3902 3916 return self._revisions.split(':') if self._revisions else []
3903 3917
3904 3918 @revisions.setter
3905 3919 def revisions(self, val):
3906 3920 self._revisions = ':'.join(val)
3907 3921
3908 3922 @hybrid_property
3909 3923 def last_merge_status(self):
3910 3924 return safe_int(self._last_merge_status)
3911 3925
3912 3926 @last_merge_status.setter
3913 3927 def last_merge_status(self, val):
3914 3928 self._last_merge_status = val
3915 3929
3916 3930 @declared_attr
3917 3931 def author(cls):
3918 3932 return relationship('User', lazy='joined')
3919 3933
3920 3934 @declared_attr
3921 3935 def source_repo(cls):
3922 3936 return relationship(
3923 3937 'Repository',
3924 3938 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3925 3939
3926 3940 @property
3927 3941 def source_ref_parts(self):
3928 3942 return self.unicode_to_reference(self.source_ref)
3929 3943
3930 3944 @declared_attr
3931 3945 def target_repo(cls):
3932 3946 return relationship(
3933 3947 'Repository',
3934 3948 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3935 3949
3936 3950 @property
3937 3951 def target_ref_parts(self):
3938 3952 return self.unicode_to_reference(self.target_ref)
3939 3953
3940 3954 @property
3941 3955 def shadow_merge_ref(self):
3942 3956 return self.unicode_to_reference(self._shadow_merge_ref)
3943 3957
3944 3958 @shadow_merge_ref.setter
3945 3959 def shadow_merge_ref(self, ref):
3946 3960 self._shadow_merge_ref = self.reference_to_unicode(ref)
3947 3961
3948 3962 @staticmethod
3949 3963 def unicode_to_reference(raw):
3950 3964 """
3951 3965 Convert a unicode (or string) to a reference object.
3952 3966 If unicode evaluates to False it returns None.
3953 3967 """
3954 3968 if raw:
3955 3969 refs = raw.split(':')
3956 3970 return Reference(*refs)
3957 3971 else:
3958 3972 return None
3959 3973
3960 3974 @staticmethod
3961 3975 def reference_to_unicode(ref):
3962 3976 """
3963 3977 Convert a reference object to unicode.
3964 3978 If reference is None it returns None.
3965 3979 """
3966 3980 if ref:
3967 3981 return u':'.join(ref)
3968 3982 else:
3969 3983 return None
3970 3984
3971 3985 def get_api_data(self, with_merge_state=True):
3972 3986 from rhodecode.model.pull_request import PullRequestModel
3973 3987
3974 3988 pull_request = self
3975 3989 if with_merge_state:
3976 3990 merge_status = PullRequestModel().merge_status(pull_request)
3977 3991 merge_state = {
3978 3992 'status': merge_status[0],
3979 3993 'message': safe_unicode(merge_status[1]),
3980 3994 }
3981 3995 else:
3982 3996 merge_state = {'status': 'not_available',
3983 3997 'message': 'not_available'}
3984 3998
3985 3999 merge_data = {
3986 4000 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
3987 4001 'reference': (
3988 4002 pull_request.shadow_merge_ref._asdict()
3989 4003 if pull_request.shadow_merge_ref else None),
3990 4004 }
3991 4005
3992 4006 data = {
3993 4007 'pull_request_id': pull_request.pull_request_id,
3994 4008 'url': PullRequestModel().get_url(pull_request),
3995 4009 'title': pull_request.title,
3996 4010 'description': pull_request.description,
3997 4011 'status': pull_request.status,
3998 4012 'state': pull_request.pull_request_state,
3999 4013 'created_on': pull_request.created_on,
4000 4014 'updated_on': pull_request.updated_on,
4001 4015 'commit_ids': pull_request.revisions,
4002 4016 'review_status': pull_request.calculated_review_status(),
4003 4017 'mergeable': merge_state,
4004 4018 'source': {
4005 4019 'clone_url': pull_request.source_repo.clone_url(),
4006 4020 'repository': pull_request.source_repo.repo_name,
4007 4021 'reference': {
4008 4022 'name': pull_request.source_ref_parts.name,
4009 4023 'type': pull_request.source_ref_parts.type,
4010 4024 'commit_id': pull_request.source_ref_parts.commit_id,
4011 4025 },
4012 4026 },
4013 4027 'target': {
4014 4028 'clone_url': pull_request.target_repo.clone_url(),
4015 4029 'repository': pull_request.target_repo.repo_name,
4016 4030 'reference': {
4017 4031 'name': pull_request.target_ref_parts.name,
4018 4032 'type': pull_request.target_ref_parts.type,
4019 4033 'commit_id': pull_request.target_ref_parts.commit_id,
4020 4034 },
4021 4035 },
4022 4036 'merge': merge_data,
4023 4037 'author': pull_request.author.get_api_data(include_secrets=False,
4024 4038 details='basic'),
4025 4039 'reviewers': [
4026 4040 {
4027 4041 'user': reviewer.get_api_data(include_secrets=False,
4028 4042 details='basic'),
4029 4043 'reasons': reasons,
4030 4044 'review_status': st[0][1].status if st else 'not_reviewed',
4031 4045 }
4032 4046 for obj, reviewer, reasons, mandatory, st in
4033 4047 pull_request.reviewers_statuses()
4034 4048 ]
4035 4049 }
4036 4050
4037 4051 return data
4038 4052
4039 4053 def set_state(self, pull_request_state, final_state=None):
4040 4054 """
4041 4055 # goes from initial state to updating to initial state.
4042 4056 # initial state can be changed by specifying back_state=
4043 4057 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
4044 4058 pull_request.merge()
4045 4059
4046 4060 :param pull_request_state:
4047 4061 :param final_state:
4048 4062
4049 4063 """
4050 4064
4051 4065 return _SetState(self, pull_request_state, back_state=final_state)
4052 4066
4053 4067
4054 4068 class PullRequest(Base, _PullRequestBase):
4055 4069 __tablename__ = 'pull_requests'
4056 4070 __table_args__ = (
4057 4071 base_table_args,
4058 4072 )
4059 4073
4060 4074 pull_request_id = Column(
4061 4075 'pull_request_id', Integer(), nullable=False, primary_key=True)
4062 4076
4063 4077 def __repr__(self):
4064 4078 if self.pull_request_id:
4065 4079 return '<DB:PullRequest #%s>' % self.pull_request_id
4066 4080 else:
4067 4081 return '<DB:PullRequest at %#x>' % id(self)
4068 4082
4069 4083 reviewers = relationship('PullRequestReviewers',
4070 4084 cascade="all, delete, delete-orphan")
4071 4085 statuses = relationship('ChangesetStatus',
4072 4086 cascade="all, delete, delete-orphan")
4073 4087 comments = relationship('ChangesetComment',
4074 4088 cascade="all, delete, delete-orphan")
4075 4089 versions = relationship('PullRequestVersion',
4076 4090 cascade="all, delete, delete-orphan",
4077 4091 lazy='dynamic')
4078 4092
4079 4093 @classmethod
4080 4094 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
4081 4095 internal_methods=None):
4082 4096
4083 4097 class PullRequestDisplay(object):
4084 4098 """
4085 4099 Special object wrapper for showing PullRequest data via Versions
4086 4100 It mimics PR object as close as possible. This is read only object
4087 4101 just for display
4088 4102 """
4089 4103
4090 4104 def __init__(self, attrs, internal=None):
4091 4105 self.attrs = attrs
4092 4106 # internal have priority over the given ones via attrs
4093 4107 self.internal = internal or ['versions']
4094 4108
4095 4109 def __getattr__(self, item):
4096 4110 if item in self.internal:
4097 4111 return getattr(self, item)
4098 4112 try:
4099 4113 return self.attrs[item]
4100 4114 except KeyError:
4101 4115 raise AttributeError(
4102 4116 '%s object has no attribute %s' % (self, item))
4103 4117
4104 4118 def __repr__(self):
4105 4119 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
4106 4120
4107 4121 def versions(self):
4108 4122 return pull_request_obj.versions.order_by(
4109 4123 PullRequestVersion.pull_request_version_id).all()
4110 4124
4111 4125 def is_closed(self):
4112 4126 return pull_request_obj.is_closed()
4113 4127
4114 4128 @property
4115 4129 def pull_request_version_id(self):
4116 4130 return getattr(pull_request_obj, 'pull_request_version_id', None)
4117 4131
4118 4132 attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False))
4119 4133
4120 4134 attrs.author = StrictAttributeDict(
4121 4135 pull_request_obj.author.get_api_data())
4122 4136 if pull_request_obj.target_repo:
4123 4137 attrs.target_repo = StrictAttributeDict(
4124 4138 pull_request_obj.target_repo.get_api_data())
4125 4139 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
4126 4140
4127 4141 if pull_request_obj.source_repo:
4128 4142 attrs.source_repo = StrictAttributeDict(
4129 4143 pull_request_obj.source_repo.get_api_data())
4130 4144 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
4131 4145
4132 4146 attrs.source_ref_parts = pull_request_obj.source_ref_parts
4133 4147 attrs.target_ref_parts = pull_request_obj.target_ref_parts
4134 4148 attrs.revisions = pull_request_obj.revisions
4135 4149
4136 4150 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
4137 4151 attrs.reviewer_data = org_pull_request_obj.reviewer_data
4138 4152 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
4139 4153
4140 4154 return PullRequestDisplay(attrs, internal=internal_methods)
4141 4155
4142 4156 def is_closed(self):
4143 4157 return self.status == self.STATUS_CLOSED
4144 4158
4145 4159 def __json__(self):
4146 4160 return {
4147 4161 'revisions': self.revisions,
4148 4162 }
4149 4163
4150 4164 def calculated_review_status(self):
4151 4165 from rhodecode.model.changeset_status import ChangesetStatusModel
4152 4166 return ChangesetStatusModel().calculated_review_status(self)
4153 4167
4154 4168 def reviewers_statuses(self):
4155 4169 from rhodecode.model.changeset_status import ChangesetStatusModel
4156 4170 return ChangesetStatusModel().reviewers_statuses(self)
4157 4171
4158 4172 @property
4159 4173 def workspace_id(self):
4160 4174 from rhodecode.model.pull_request import PullRequestModel
4161 4175 return PullRequestModel()._workspace_id(self)
4162 4176
4163 4177 def get_shadow_repo(self):
4164 4178 workspace_id = self.workspace_id
4165 4179 vcs_obj = self.target_repo.scm_instance()
4166 4180 shadow_repository_path = vcs_obj._get_shadow_repository_path(
4167 4181 self.target_repo.repo_id, workspace_id)
4168 4182 if os.path.isdir(shadow_repository_path):
4169 4183 return vcs_obj._get_shadow_instance(shadow_repository_path)
4170 4184
4171 4185
4172 4186 class PullRequestVersion(Base, _PullRequestBase):
4173 4187 __tablename__ = 'pull_request_versions'
4174 4188 __table_args__ = (
4175 4189 base_table_args,
4176 4190 )
4177 4191
4178 4192 pull_request_version_id = Column(
4179 4193 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
4180 4194 pull_request_id = Column(
4181 4195 'pull_request_id', Integer(),
4182 4196 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4183 4197 pull_request = relationship('PullRequest')
4184 4198
4185 4199 def __repr__(self):
4186 4200 if self.pull_request_version_id:
4187 4201 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
4188 4202 else:
4189 4203 return '<DB:PullRequestVersion at %#x>' % id(self)
4190 4204
4191 4205 @property
4192 4206 def reviewers(self):
4193 4207 return self.pull_request.reviewers
4194 4208
4195 4209 @property
4196 4210 def versions(self):
4197 4211 return self.pull_request.versions
4198 4212
4199 4213 def is_closed(self):
4200 4214 # calculate from original
4201 4215 return self.pull_request.status == self.STATUS_CLOSED
4202 4216
4203 4217 def calculated_review_status(self):
4204 4218 return self.pull_request.calculated_review_status()
4205 4219
4206 4220 def reviewers_statuses(self):
4207 4221 return self.pull_request.reviewers_statuses()
4208 4222
4209 4223
4210 4224 class PullRequestReviewers(Base, BaseModel):
4211 4225 __tablename__ = 'pull_request_reviewers'
4212 4226 __table_args__ = (
4213 4227 base_table_args,
4214 4228 )
4215 4229
4216 4230 @hybrid_property
4217 4231 def reasons(self):
4218 4232 if not self._reasons:
4219 4233 return []
4220 4234 return self._reasons
4221 4235
4222 4236 @reasons.setter
4223 4237 def reasons(self, val):
4224 4238 val = val or []
4225 4239 if any(not isinstance(x, compat.string_types) for x in val):
4226 4240 raise Exception('invalid reasons type, must be list of strings')
4227 4241 self._reasons = val
4228 4242
4229 4243 pull_requests_reviewers_id = Column(
4230 4244 'pull_requests_reviewers_id', Integer(), nullable=False,
4231 4245 primary_key=True)
4232 4246 pull_request_id = Column(
4233 4247 "pull_request_id", Integer(),
4234 4248 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4235 4249 user_id = Column(
4236 4250 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4237 4251 _reasons = Column(
4238 4252 'reason', MutationList.as_mutable(
4239 4253 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4240 4254
4241 4255 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4242 4256 user = relationship('User')
4243 4257 pull_request = relationship('PullRequest')
4244 4258
4245 4259 rule_data = Column(
4246 4260 'rule_data_json',
4247 4261 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4248 4262
4249 4263 def rule_user_group_data(self):
4250 4264 """
4251 4265 Returns the voting user group rule data for this reviewer
4252 4266 """
4253 4267
4254 4268 if self.rule_data and 'vote_rule' in self.rule_data:
4255 4269 user_group_data = {}
4256 4270 if 'rule_user_group_entry_id' in self.rule_data:
4257 4271 # means a group with voting rules !
4258 4272 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4259 4273 user_group_data['name'] = self.rule_data['rule_name']
4260 4274 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4261 4275
4262 4276 return user_group_data
4263 4277
4264 4278 def __unicode__(self):
4265 4279 return u"<%s('id:%s')>" % (self.__class__.__name__,
4266 4280 self.pull_requests_reviewers_id)
4267 4281
4268 4282
4269 4283 class Notification(Base, BaseModel):
4270 4284 __tablename__ = 'notifications'
4271 4285 __table_args__ = (
4272 4286 Index('notification_type_idx', 'type'),
4273 4287 base_table_args,
4274 4288 )
4275 4289
4276 4290 TYPE_CHANGESET_COMMENT = u'cs_comment'
4277 4291 TYPE_MESSAGE = u'message'
4278 4292 TYPE_MENTION = u'mention'
4279 4293 TYPE_REGISTRATION = u'registration'
4280 4294 TYPE_PULL_REQUEST = u'pull_request'
4281 4295 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
4282 4296
4283 4297 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4284 4298 subject = Column('subject', Unicode(512), nullable=True)
4285 4299 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4286 4300 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4287 4301 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4288 4302 type_ = Column('type', Unicode(255))
4289 4303
4290 4304 created_by_user = relationship('User')
4291 4305 notifications_to_users = relationship('UserNotification', lazy='joined',
4292 4306 cascade="all, delete, delete-orphan")
4293 4307
4294 4308 @property
4295 4309 def recipients(self):
4296 4310 return [x.user for x in UserNotification.query()\
4297 4311 .filter(UserNotification.notification == self)\
4298 4312 .order_by(UserNotification.user_id.asc()).all()]
4299 4313
4300 4314 @classmethod
4301 4315 def create(cls, created_by, subject, body, recipients, type_=None):
4302 4316 if type_ is None:
4303 4317 type_ = Notification.TYPE_MESSAGE
4304 4318
4305 4319 notification = cls()
4306 4320 notification.created_by_user = created_by
4307 4321 notification.subject = subject
4308 4322 notification.body = body
4309 4323 notification.type_ = type_
4310 4324 notification.created_on = datetime.datetime.now()
4311 4325
4312 4326 # For each recipient link the created notification to his account
4313 4327 for u in recipients:
4314 4328 assoc = UserNotification()
4315 4329 assoc.user_id = u.user_id
4316 4330 assoc.notification = notification
4317 4331
4318 4332 # if created_by is inside recipients mark his notification
4319 4333 # as read
4320 4334 if u.user_id == created_by.user_id:
4321 4335 assoc.read = True
4322 4336 Session().add(assoc)
4323 4337
4324 4338 Session().add(notification)
4325 4339
4326 4340 return notification
4327 4341
4328 4342
4329 4343 class UserNotification(Base, BaseModel):
4330 4344 __tablename__ = 'user_to_notification'
4331 4345 __table_args__ = (
4332 4346 UniqueConstraint('user_id', 'notification_id'),
4333 4347 base_table_args
4334 4348 )
4335 4349
4336 4350 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4337 4351 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4338 4352 read = Column('read', Boolean, default=False)
4339 4353 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4340 4354
4341 4355 user = relationship('User', lazy="joined")
4342 4356 notification = relationship('Notification', lazy="joined",
4343 4357 order_by=lambda: Notification.created_on.desc(),)
4344 4358
4345 4359 def mark_as_read(self):
4346 4360 self.read = True
4347 4361 Session().add(self)
4348 4362
4349 4363
4350 4364 class Gist(Base, BaseModel):
4351 4365 __tablename__ = 'gists'
4352 4366 __table_args__ = (
4353 4367 Index('g_gist_access_id_idx', 'gist_access_id'),
4354 4368 Index('g_created_on_idx', 'created_on'),
4355 4369 base_table_args
4356 4370 )
4357 4371
4358 4372 GIST_PUBLIC = u'public'
4359 4373 GIST_PRIVATE = u'private'
4360 4374 DEFAULT_FILENAME = u'gistfile1.txt'
4361 4375
4362 4376 ACL_LEVEL_PUBLIC = u'acl_public'
4363 4377 ACL_LEVEL_PRIVATE = u'acl_private'
4364 4378
4365 4379 gist_id = Column('gist_id', Integer(), primary_key=True)
4366 4380 gist_access_id = Column('gist_access_id', Unicode(250))
4367 4381 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4368 4382 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4369 4383 gist_expires = Column('gist_expires', Float(53), nullable=False)
4370 4384 gist_type = Column('gist_type', Unicode(128), nullable=False)
4371 4385 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4372 4386 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4373 4387 acl_level = Column('acl_level', Unicode(128), nullable=True)
4374 4388
4375 4389 owner = relationship('User')
4376 4390
4377 4391 def __repr__(self):
4378 4392 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
4379 4393
4380 4394 @hybrid_property
4381 4395 def description_safe(self):
4382 4396 from rhodecode.lib import helpers as h
4383 4397 return h.escape(self.gist_description)
4384 4398
4385 4399 @classmethod
4386 4400 def get_or_404(cls, id_):
4387 4401 from pyramid.httpexceptions import HTTPNotFound
4388 4402
4389 4403 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4390 4404 if not res:
4391 4405 raise HTTPNotFound()
4392 4406 return res
4393 4407
4394 4408 @classmethod
4395 4409 def get_by_access_id(cls, gist_access_id):
4396 4410 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4397 4411
4398 4412 def gist_url(self):
4399 4413 from rhodecode.model.gist import GistModel
4400 4414 return GistModel().get_url(self)
4401 4415
4402 4416 @classmethod
4403 4417 def base_path(cls):
4404 4418 """
4405 4419 Returns base path when all gists are stored
4406 4420
4407 4421 :param cls:
4408 4422 """
4409 4423 from rhodecode.model.gist import GIST_STORE_LOC
4410 4424 q = Session().query(RhodeCodeUi)\
4411 4425 .filter(RhodeCodeUi.ui_key == URL_SEP)
4412 4426 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4413 4427 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4414 4428
4415 4429 def get_api_data(self):
4416 4430 """
4417 4431 Common function for generating gist related data for API
4418 4432 """
4419 4433 gist = self
4420 4434 data = {
4421 4435 'gist_id': gist.gist_id,
4422 4436 'type': gist.gist_type,
4423 4437 'access_id': gist.gist_access_id,
4424 4438 'description': gist.gist_description,
4425 4439 'url': gist.gist_url(),
4426 4440 'expires': gist.gist_expires,
4427 4441 'created_on': gist.created_on,
4428 4442 'modified_at': gist.modified_at,
4429 4443 'content': None,
4430 4444 'acl_level': gist.acl_level,
4431 4445 }
4432 4446 return data
4433 4447
4434 4448 def __json__(self):
4435 4449 data = dict(
4436 4450 )
4437 4451 data.update(self.get_api_data())
4438 4452 return data
4439 4453 # SCM functions
4440 4454
4441 4455 def scm_instance(self, **kwargs):
4442 4456 """
4443 4457 Get an instance of VCS Repository
4444 4458
4445 4459 :param kwargs:
4446 4460 """
4447 4461 from rhodecode.model.gist import GistModel
4448 4462 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4449 4463 return get_vcs_instance(
4450 4464 repo_path=safe_str(full_repo_path), create=False,
4451 4465 _vcs_alias=GistModel.vcs_backend)
4452 4466
4453 4467
4454 4468 class ExternalIdentity(Base, BaseModel):
4455 4469 __tablename__ = 'external_identities'
4456 4470 __table_args__ = (
4457 4471 Index('local_user_id_idx', 'local_user_id'),
4458 4472 Index('external_id_idx', 'external_id'),
4459 4473 base_table_args
4460 4474 )
4461 4475
4462 4476 external_id = Column('external_id', Unicode(255), default=u'', primary_key=True)
4463 4477 external_username = Column('external_username', Unicode(1024), default=u'')
4464 4478 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4465 4479 provider_name = Column('provider_name', Unicode(255), default=u'', primary_key=True)
4466 4480 access_token = Column('access_token', String(1024), default=u'')
4467 4481 alt_token = Column('alt_token', String(1024), default=u'')
4468 4482 token_secret = Column('token_secret', String(1024), default=u'')
4469 4483
4470 4484 @classmethod
4471 4485 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4472 4486 """
4473 4487 Returns ExternalIdentity instance based on search params
4474 4488
4475 4489 :param external_id:
4476 4490 :param provider_name:
4477 4491 :return: ExternalIdentity
4478 4492 """
4479 4493 query = cls.query()
4480 4494 query = query.filter(cls.external_id == external_id)
4481 4495 query = query.filter(cls.provider_name == provider_name)
4482 4496 if local_user_id:
4483 4497 query = query.filter(cls.local_user_id == local_user_id)
4484 4498 return query.first()
4485 4499
4486 4500 @classmethod
4487 4501 def user_by_external_id_and_provider(cls, external_id, provider_name):
4488 4502 """
4489 4503 Returns User instance based on search params
4490 4504
4491 4505 :param external_id:
4492 4506 :param provider_name:
4493 4507 :return: User
4494 4508 """
4495 4509 query = User.query()
4496 4510 query = query.filter(cls.external_id == external_id)
4497 4511 query = query.filter(cls.provider_name == provider_name)
4498 4512 query = query.filter(User.user_id == cls.local_user_id)
4499 4513 return query.first()
4500 4514
4501 4515 @classmethod
4502 4516 def by_local_user_id(cls, local_user_id):
4503 4517 """
4504 4518 Returns all tokens for user
4505 4519
4506 4520 :param local_user_id:
4507 4521 :return: ExternalIdentity
4508 4522 """
4509 4523 query = cls.query()
4510 4524 query = query.filter(cls.local_user_id == local_user_id)
4511 4525 return query
4512 4526
4513 4527 @classmethod
4514 4528 def load_provider_plugin(cls, plugin_id):
4515 4529 from rhodecode.authentication.base import loadplugin
4516 4530 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
4517 4531 auth_plugin = loadplugin(_plugin_id)
4518 4532 return auth_plugin
4519 4533
4520 4534
4521 4535 class Integration(Base, BaseModel):
4522 4536 __tablename__ = 'integrations'
4523 4537 __table_args__ = (
4524 4538 base_table_args
4525 4539 )
4526 4540
4527 4541 integration_id = Column('integration_id', Integer(), primary_key=True)
4528 4542 integration_type = Column('integration_type', String(255))
4529 4543 enabled = Column('enabled', Boolean(), nullable=False)
4530 4544 name = Column('name', String(255), nullable=False)
4531 4545 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4532 4546 default=False)
4533 4547
4534 4548 settings = Column(
4535 4549 'settings_json', MutationObj.as_mutable(
4536 4550 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4537 4551 repo_id = Column(
4538 4552 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4539 4553 nullable=True, unique=None, default=None)
4540 4554 repo = relationship('Repository', lazy='joined')
4541 4555
4542 4556 repo_group_id = Column(
4543 4557 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4544 4558 nullable=True, unique=None, default=None)
4545 4559 repo_group = relationship('RepoGroup', lazy='joined')
4546 4560
4547 4561 @property
4548 4562 def scope(self):
4549 4563 if self.repo:
4550 4564 return repr(self.repo)
4551 4565 if self.repo_group:
4552 4566 if self.child_repos_only:
4553 4567 return repr(self.repo_group) + ' (child repos only)'
4554 4568 else:
4555 4569 return repr(self.repo_group) + ' (recursive)'
4556 4570 if self.child_repos_only:
4557 4571 return 'root_repos'
4558 4572 return 'global'
4559 4573
4560 4574 def __repr__(self):
4561 4575 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4562 4576
4563 4577
4564 4578 class RepoReviewRuleUser(Base, BaseModel):
4565 4579 __tablename__ = 'repo_review_rules_users'
4566 4580 __table_args__ = (
4567 4581 base_table_args
4568 4582 )
4569 4583
4570 4584 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4571 4585 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4572 4586 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4573 4587 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4574 4588 user = relationship('User')
4575 4589
4576 4590 def rule_data(self):
4577 4591 return {
4578 4592 'mandatory': self.mandatory
4579 4593 }
4580 4594
4581 4595
4582 4596 class RepoReviewRuleUserGroup(Base, BaseModel):
4583 4597 __tablename__ = 'repo_review_rules_users_groups'
4584 4598 __table_args__ = (
4585 4599 base_table_args
4586 4600 )
4587 4601
4588 4602 VOTE_RULE_ALL = -1
4589 4603
4590 4604 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4591 4605 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4592 4606 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4593 4607 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4594 4608 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4595 4609 users_group = relationship('UserGroup')
4596 4610
4597 4611 def rule_data(self):
4598 4612 return {
4599 4613 'mandatory': self.mandatory,
4600 4614 'vote_rule': self.vote_rule
4601 4615 }
4602 4616
4603 4617 @property
4604 4618 def vote_rule_label(self):
4605 4619 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4606 4620 return 'all must vote'
4607 4621 else:
4608 4622 return 'min. vote {}'.format(self.vote_rule)
4609 4623
4610 4624
4611 4625 class RepoReviewRule(Base, BaseModel):
4612 4626 __tablename__ = 'repo_review_rules'
4613 4627 __table_args__ = (
4614 4628 base_table_args
4615 4629 )
4616 4630
4617 4631 repo_review_rule_id = Column(
4618 4632 'repo_review_rule_id', Integer(), primary_key=True)
4619 4633 repo_id = Column(
4620 4634 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4621 4635 repo = relationship('Repository', backref='review_rules')
4622 4636
4623 4637 review_rule_name = Column('review_rule_name', String(255))
4624 4638 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4625 4639 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4626 4640 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4627 4641
4628 4642 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4629 4643 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4630 4644 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4631 4645 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4632 4646
4633 4647 rule_users = relationship('RepoReviewRuleUser')
4634 4648 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4635 4649
4636 4650 def _validate_pattern(self, value):
4637 4651 re.compile('^' + glob2re(value) + '$')
4638 4652
4639 4653 @hybrid_property
4640 4654 def source_branch_pattern(self):
4641 4655 return self._branch_pattern or '*'
4642 4656
4643 4657 @source_branch_pattern.setter
4644 4658 def source_branch_pattern(self, value):
4645 4659 self._validate_pattern(value)
4646 4660 self._branch_pattern = value or '*'
4647 4661
4648 4662 @hybrid_property
4649 4663 def target_branch_pattern(self):
4650 4664 return self._target_branch_pattern or '*'
4651 4665
4652 4666 @target_branch_pattern.setter
4653 4667 def target_branch_pattern(self, value):
4654 4668 self._validate_pattern(value)
4655 4669 self._target_branch_pattern = value or '*'
4656 4670
4657 4671 @hybrid_property
4658 4672 def file_pattern(self):
4659 4673 return self._file_pattern or '*'
4660 4674
4661 4675 @file_pattern.setter
4662 4676 def file_pattern(self, value):
4663 4677 self._validate_pattern(value)
4664 4678 self._file_pattern = value or '*'
4665 4679
4666 4680 def matches(self, source_branch, target_branch, files_changed):
4667 4681 """
4668 4682 Check if this review rule matches a branch/files in a pull request
4669 4683
4670 4684 :param source_branch: source branch name for the commit
4671 4685 :param target_branch: target branch name for the commit
4672 4686 :param files_changed: list of file paths changed in the pull request
4673 4687 """
4674 4688
4675 4689 source_branch = source_branch or ''
4676 4690 target_branch = target_branch or ''
4677 4691 files_changed = files_changed or []
4678 4692
4679 4693 branch_matches = True
4680 4694 if source_branch or target_branch:
4681 4695 if self.source_branch_pattern == '*':
4682 4696 source_branch_match = True
4683 4697 else:
4684 4698 if self.source_branch_pattern.startswith('re:'):
4685 4699 source_pattern = self.source_branch_pattern[3:]
4686 4700 else:
4687 4701 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
4688 4702 source_branch_regex = re.compile(source_pattern)
4689 4703 source_branch_match = bool(source_branch_regex.search(source_branch))
4690 4704 if self.target_branch_pattern == '*':
4691 4705 target_branch_match = True
4692 4706 else:
4693 4707 if self.target_branch_pattern.startswith('re:'):
4694 4708 target_pattern = self.target_branch_pattern[3:]
4695 4709 else:
4696 4710 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
4697 4711 target_branch_regex = re.compile(target_pattern)
4698 4712 target_branch_match = bool(target_branch_regex.search(target_branch))
4699 4713
4700 4714 branch_matches = source_branch_match and target_branch_match
4701 4715
4702 4716 files_matches = True
4703 4717 if self.file_pattern != '*':
4704 4718 files_matches = False
4705 4719 if self.file_pattern.startswith('re:'):
4706 4720 file_pattern = self.file_pattern[3:]
4707 4721 else:
4708 4722 file_pattern = glob2re(self.file_pattern)
4709 4723 file_regex = re.compile(file_pattern)
4710 4724 for filename in files_changed:
4711 4725 if file_regex.search(filename):
4712 4726 files_matches = True
4713 4727 break
4714 4728
4715 4729 return branch_matches and files_matches
4716 4730
4717 4731 @property
4718 4732 def review_users(self):
4719 4733 """ Returns the users which this rule applies to """
4720 4734
4721 4735 users = collections.OrderedDict()
4722 4736
4723 4737 for rule_user in self.rule_users:
4724 4738 if rule_user.user.active:
4725 4739 if rule_user.user not in users:
4726 4740 users[rule_user.user.username] = {
4727 4741 'user': rule_user.user,
4728 4742 'source': 'user',
4729 4743 'source_data': {},
4730 4744 'data': rule_user.rule_data()
4731 4745 }
4732 4746
4733 4747 for rule_user_group in self.rule_user_groups:
4734 4748 source_data = {
4735 4749 'user_group_id': rule_user_group.users_group.users_group_id,
4736 4750 'name': rule_user_group.users_group.users_group_name,
4737 4751 'members': len(rule_user_group.users_group.members)
4738 4752 }
4739 4753 for member in rule_user_group.users_group.members:
4740 4754 if member.user.active:
4741 4755 key = member.user.username
4742 4756 if key in users:
4743 4757 # skip this member as we have him already
4744 4758 # this prevents from override the "first" matched
4745 4759 # users with duplicates in multiple groups
4746 4760 continue
4747 4761
4748 4762 users[key] = {
4749 4763 'user': member.user,
4750 4764 'source': 'user_group',
4751 4765 'source_data': source_data,
4752 4766 'data': rule_user_group.rule_data()
4753 4767 }
4754 4768
4755 4769 return users
4756 4770
4757 4771 def user_group_vote_rule(self, user_id):
4758 4772
4759 4773 rules = []
4760 4774 if not self.rule_user_groups:
4761 4775 return rules
4762 4776
4763 4777 for user_group in self.rule_user_groups:
4764 4778 user_group_members = [x.user_id for x in user_group.users_group.members]
4765 4779 if user_id in user_group_members:
4766 4780 rules.append(user_group)
4767 4781 return rules
4768 4782
4769 4783 def __repr__(self):
4770 4784 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4771 4785 self.repo_review_rule_id, self.repo)
4772 4786
4773 4787
4774 4788 class ScheduleEntry(Base, BaseModel):
4775 4789 __tablename__ = 'schedule_entries'
4776 4790 __table_args__ = (
4777 4791 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
4778 4792 UniqueConstraint('task_uid', name='s_task_uid_idx'),
4779 4793 base_table_args,
4780 4794 )
4781 4795
4782 4796 schedule_types = ['crontab', 'timedelta', 'integer']
4783 4797 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
4784 4798
4785 4799 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
4786 4800 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
4787 4801 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
4788 4802
4789 4803 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
4790 4804 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
4791 4805
4792 4806 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
4793 4807 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
4794 4808
4795 4809 # task
4796 4810 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
4797 4811 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
4798 4812 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
4799 4813 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
4800 4814
4801 4815 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4802 4816 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
4803 4817
4804 4818 @hybrid_property
4805 4819 def schedule_type(self):
4806 4820 return self._schedule_type
4807 4821
4808 4822 @schedule_type.setter
4809 4823 def schedule_type(self, val):
4810 4824 if val not in self.schedule_types:
4811 4825 raise ValueError('Value must be on of `{}` and got `{}`'.format(
4812 4826 val, self.schedule_type))
4813 4827
4814 4828 self._schedule_type = val
4815 4829
4816 4830 @classmethod
4817 4831 def get_uid(cls, obj):
4818 4832 args = obj.task_args
4819 4833 kwargs = obj.task_kwargs
4820 4834 if isinstance(args, JsonRaw):
4821 4835 try:
4822 4836 args = json.loads(args)
4823 4837 except ValueError:
4824 4838 args = tuple()
4825 4839
4826 4840 if isinstance(kwargs, JsonRaw):
4827 4841 try:
4828 4842 kwargs = json.loads(kwargs)
4829 4843 except ValueError:
4830 4844 kwargs = dict()
4831 4845
4832 4846 dot_notation = obj.task_dot_notation
4833 4847 val = '.'.join(map(safe_str, [
4834 4848 sorted(dot_notation), args, sorted(kwargs.items())]))
4835 4849 return hashlib.sha1(val).hexdigest()
4836 4850
4837 4851 @classmethod
4838 4852 def get_by_schedule_name(cls, schedule_name):
4839 4853 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
4840 4854
4841 4855 @classmethod
4842 4856 def get_by_schedule_id(cls, schedule_id):
4843 4857 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
4844 4858
4845 4859 @property
4846 4860 def task(self):
4847 4861 return self.task_dot_notation
4848 4862
4849 4863 @property
4850 4864 def schedule(self):
4851 4865 from rhodecode.lib.celerylib.utils import raw_2_schedule
4852 4866 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
4853 4867 return schedule
4854 4868
4855 4869 @property
4856 4870 def args(self):
4857 4871 try:
4858 4872 return list(self.task_args or [])
4859 4873 except ValueError:
4860 4874 return list()
4861 4875
4862 4876 @property
4863 4877 def kwargs(self):
4864 4878 try:
4865 4879 return dict(self.task_kwargs or {})
4866 4880 except ValueError:
4867 4881 return dict()
4868 4882
4869 4883 def _as_raw(self, val):
4870 4884 if hasattr(val, 'de_coerce'):
4871 4885 val = val.de_coerce()
4872 4886 if val:
4873 4887 val = json.dumps(val)
4874 4888
4875 4889 return val
4876 4890
4877 4891 @property
4878 4892 def schedule_definition_raw(self):
4879 4893 return self._as_raw(self.schedule_definition)
4880 4894
4881 4895 @property
4882 4896 def args_raw(self):
4883 4897 return self._as_raw(self.task_args)
4884 4898
4885 4899 @property
4886 4900 def kwargs_raw(self):
4887 4901 return self._as_raw(self.task_kwargs)
4888 4902
4889 4903 def __repr__(self):
4890 4904 return '<DB:ScheduleEntry({}:{})>'.format(
4891 4905 self.schedule_entry_id, self.schedule_name)
4892 4906
4893 4907
4894 4908 @event.listens_for(ScheduleEntry, 'before_update')
4895 4909 def update_task_uid(mapper, connection, target):
4896 4910 target.task_uid = ScheduleEntry.get_uid(target)
4897 4911
4898 4912
4899 4913 @event.listens_for(ScheduleEntry, 'before_insert')
4900 4914 def set_task_uid(mapper, connection, target):
4901 4915 target.task_uid = ScheduleEntry.get_uid(target)
4902 4916
4903 4917
4904 4918 class _BaseBranchPerms(BaseModel):
4905 4919 @classmethod
4906 4920 def compute_hash(cls, value):
4907 4921 return sha1_safe(value)
4908 4922
4909 4923 @hybrid_property
4910 4924 def branch_pattern(self):
4911 4925 return self._branch_pattern or '*'
4912 4926
4913 4927 @hybrid_property
4914 4928 def branch_hash(self):
4915 4929 return self._branch_hash
4916 4930
4917 4931 def _validate_glob(self, value):
4918 4932 re.compile('^' + glob2re(value) + '$')
4919 4933
4920 4934 @branch_pattern.setter
4921 4935 def branch_pattern(self, value):
4922 4936 self._validate_glob(value)
4923 4937 self._branch_pattern = value or '*'
4924 4938 # set the Hash when setting the branch pattern
4925 4939 self._branch_hash = self.compute_hash(self._branch_pattern)
4926 4940
4927 4941 def matches(self, branch):
4928 4942 """
4929 4943 Check if this the branch matches entry
4930 4944
4931 4945 :param branch: branch name for the commit
4932 4946 """
4933 4947
4934 4948 branch = branch or ''
4935 4949
4936 4950 branch_matches = True
4937 4951 if branch:
4938 4952 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
4939 4953 branch_matches = bool(branch_regex.search(branch))
4940 4954
4941 4955 return branch_matches
4942 4956
4943 4957
4944 4958 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
4945 4959 __tablename__ = 'user_to_repo_branch_permissions'
4946 4960 __table_args__ = (
4947 4961 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4948 4962 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4949 4963 )
4950 4964
4951 4965 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
4952 4966
4953 4967 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
4954 4968 repo = relationship('Repository', backref='user_branch_perms')
4955 4969
4956 4970 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
4957 4971 permission = relationship('Permission')
4958 4972
4959 4973 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
4960 4974 user_repo_to_perm = relationship('UserRepoToPerm')
4961 4975
4962 4976 rule_order = Column('rule_order', Integer(), nullable=False)
4963 4977 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
4964 4978 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
4965 4979
4966 4980 def __unicode__(self):
4967 4981 return u'<UserBranchPermission(%s => %r)>' % (
4968 4982 self.user_repo_to_perm, self.branch_pattern)
4969 4983
4970 4984
4971 4985 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
4972 4986 __tablename__ = 'user_group_to_repo_branch_permissions'
4973 4987 __table_args__ = (
4974 4988 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4975 4989 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4976 4990 )
4977 4991
4978 4992 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
4979 4993
4980 4994 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
4981 4995 repo = relationship('Repository', backref='user_group_branch_perms')
4982 4996
4983 4997 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
4984 4998 permission = relationship('Permission')
4985 4999
4986 5000 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
4987 5001 user_group_repo_to_perm = relationship('UserGroupRepoToPerm')
4988 5002
4989 5003 rule_order = Column('rule_order', Integer(), nullable=False)
4990 5004 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
4991 5005 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
4992 5006
4993 5007 def __unicode__(self):
4994 5008 return u'<UserBranchPermission(%s => %r)>' % (
4995 5009 self.user_group_repo_to_perm, self.branch_pattern)
4996 5010
4997 5011
4998 5012 class UserBookmark(Base, BaseModel):
4999 5013 __tablename__ = 'user_bookmarks'
5000 5014 __table_args__ = (
5001 5015 UniqueConstraint('user_id', 'bookmark_repo_id'),
5002 5016 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
5003 5017 UniqueConstraint('user_id', 'bookmark_position'),
5004 5018 base_table_args
5005 5019 )
5006 5020
5007 5021 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
5008 5022 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
5009 5023 position = Column("bookmark_position", Integer(), nullable=False)
5010 5024 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
5011 5025 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
5012 5026 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5013 5027
5014 5028 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
5015 5029 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
5016 5030
5017 5031 user = relationship("User")
5018 5032
5019 5033 repository = relationship("Repository")
5020 5034 repository_group = relationship("RepoGroup")
5021 5035
5022 5036 @classmethod
5023 5037 def get_by_position_for_user(cls, position, user_id):
5024 5038 return cls.query() \
5025 5039 .filter(UserBookmark.user_id == user_id) \
5026 5040 .filter(UserBookmark.position == position).scalar()
5027 5041
5028 5042 @classmethod
5029 5043 def get_bookmarks_for_user(cls, user_id):
5030 5044 return cls.query() \
5031 5045 .filter(UserBookmark.user_id == user_id) \
5032 5046 .options(joinedload(UserBookmark.repository)) \
5033 5047 .options(joinedload(UserBookmark.repository_group)) \
5034 5048 .order_by(UserBookmark.position.asc()) \
5035 5049 .all()
5036 5050
5037 5051 def __unicode__(self):
5038 5052 return u'<UserBookmark(%d @ %r)>' % (self.position, self.redirect_url)
5039 5053
5040 5054
5041 5055 class FileStore(Base, BaseModel):
5042 5056 __tablename__ = 'file_store'
5043 5057 __table_args__ = (
5044 5058 base_table_args
5045 5059 )
5046 5060
5047 5061 file_store_id = Column('file_store_id', Integer(), primary_key=True)
5048 5062 file_uid = Column('file_uid', String(1024), nullable=False)
5049 5063 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
5050 5064 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
5051 5065 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
5052 5066
5053 5067 # sha256 hash
5054 5068 file_hash = Column('file_hash', String(512), nullable=False)
5055 5069 file_size = Column('file_size', Integer(), nullable=False)
5056 5070
5057 5071 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5058 5072 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
5059 5073 accessed_count = Column('accessed_count', Integer(), default=0)
5060 5074
5061 5075 enabled = Column('enabled', Boolean(), nullable=False, default=True)
5062 5076
5063 5077 # if repo/repo_group reference is set, check for permissions
5064 5078 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
5065 5079
5066 5080 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
5067 5081 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id')
5068 5082
5069 5083 # scope limited to user, which requester have access to
5070 5084 scope_user_id = Column(
5071 5085 'scope_user_id', Integer(), ForeignKey('users.user_id'),
5072 5086 nullable=True, unique=None, default=None)
5073 5087 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id')
5074 5088
5075 5089 # scope limited to user group, which requester have access to
5076 5090 scope_user_group_id = Column(
5077 5091 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
5078 5092 nullable=True, unique=None, default=None)
5079 5093 user_group = relationship('UserGroup', lazy='joined')
5080 5094
5081 5095 # scope limited to repo, which requester have access to
5082 5096 scope_repo_id = Column(
5083 5097 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
5084 5098 nullable=True, unique=None, default=None)
5085 5099 repo = relationship('Repository', lazy='joined')
5086 5100
5087 5101 # scope limited to repo group, which requester have access to
5088 5102 scope_repo_group_id = Column(
5089 5103 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
5090 5104 nullable=True, unique=None, default=None)
5091 5105 repo_group = relationship('RepoGroup', lazy='joined')
5092 5106
5093 5107 @classmethod
5094 5108 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
5095 5109 file_description='', enabled=True, check_acl=True, user_id=None,
5096 5110 scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None):
5097 5111
5098 5112 store_entry = FileStore()
5099 5113 store_entry.file_uid = file_uid
5100 5114 store_entry.file_display_name = file_display_name
5101 5115 store_entry.file_org_name = filename
5102 5116 store_entry.file_size = file_size
5103 5117 store_entry.file_hash = file_hash
5104 5118 store_entry.file_description = file_description
5105 5119
5106 5120 store_entry.check_acl = check_acl
5107 5121 store_entry.enabled = enabled
5108 5122
5109 5123 store_entry.user_id = user_id
5110 5124 store_entry.scope_user_id = scope_user_id
5111 5125 store_entry.scope_repo_id = scope_repo_id
5112 5126 store_entry.scope_repo_group_id = scope_repo_group_id
5113 5127 return store_entry
5114 5128
5115 5129 @classmethod
5116 5130 def bump_access_counter(cls, file_uid, commit=True):
5117 5131 FileStore().query()\
5118 5132 .filter(FileStore.file_uid == file_uid)\
5119 5133 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
5120 5134 FileStore.accessed_on: datetime.datetime.now()})
5121 5135 if commit:
5122 5136 Session().commit()
5123 5137
5124 5138 def __repr__(self):
5125 5139 return '<FileStore({})>'.format(self.file_store_id)
5126 5140
5127 5141
5128 5142 class DbMigrateVersion(Base, BaseModel):
5129 5143 __tablename__ = 'db_migrate_version'
5130 5144 __table_args__ = (
5131 5145 base_table_args,
5132 5146 )
5133 5147
5134 5148 repository_id = Column('repository_id', String(250), primary_key=True)
5135 5149 repository_path = Column('repository_path', Text)
5136 5150 version = Column('version', Integer)
5137 5151
5138 5152 @classmethod
5139 5153 def set_version(cls, version):
5140 5154 """
5141 5155 Helper for forcing a different version, usually for debugging purposes via ishell.
5142 5156 """
5143 5157 ver = DbMigrateVersion.query().first()
5144 5158 ver.version = version
5145 5159 Session().commit()
5146 5160
5147 5161
5148 5162 class DbSession(Base, BaseModel):
5149 5163 __tablename__ = 'db_session'
5150 5164 __table_args__ = (
5151 5165 base_table_args,
5152 5166 )
5153 5167
5154 5168 def __repr__(self):
5155 5169 return '<DB:DbSession({})>'.format(self.id)
5156 5170
5157 5171 id = Column('id', Integer())
5158 5172 namespace = Column('namespace', String(255), primary_key=True)
5159 5173 accessed = Column('accessed', DateTime, nullable=False)
5160 5174 created = Column('created', DateTime, nullable=False)
5161 5175 data = Column('data', PickleType, nullable=False)
@@ -1,1742 +1,1744 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31 import collections
32 32
33 33 from pyramid import compat
34 34 from pyramid.threadlocal import get_current_request
35 35
36 36 from rhodecode import events
37 37 from rhodecode.translation import lazy_ugettext
38 38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 39 from rhodecode.lib import audit_logger
40 40 from rhodecode.lib.compat import OrderedDict
41 41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
42 42 from rhodecode.lib.markup_renderer import (
43 43 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
44 44 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
45 45 from rhodecode.lib.vcs.backends.base import (
46 46 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
47 47 from rhodecode.lib.vcs.conf import settings as vcs_settings
48 48 from rhodecode.lib.vcs.exceptions import (
49 49 CommitDoesNotExistError, EmptyRepositoryError)
50 50 from rhodecode.model import BaseModel
51 51 from rhodecode.model.changeset_status import ChangesetStatusModel
52 52 from rhodecode.model.comment import CommentsModel
53 53 from rhodecode.model.db import (
54 54 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
55 55 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
56 56 from rhodecode.model.meta import Session
57 57 from rhodecode.model.notification import NotificationModel, \
58 58 EmailNotificationModel
59 59 from rhodecode.model.scm import ScmModel
60 60 from rhodecode.model.settings import VcsSettingsModel
61 61
62 62
63 63 log = logging.getLogger(__name__)
64 64
65 65
66 66 # Data structure to hold the response data when updating commits during a pull
67 67 # request update.
68 68 UpdateResponse = collections.namedtuple('UpdateResponse', [
69 69 'executed', 'reason', 'new', 'old', 'changes',
70 70 'source_changed', 'target_changed'])
71 71
72 72
73 73 class PullRequestModel(BaseModel):
74 74
75 75 cls = PullRequest
76 76
77 77 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
78 78
79 79 UPDATE_STATUS_MESSAGES = {
80 80 UpdateFailureReason.NONE: lazy_ugettext(
81 81 'Pull request update successful.'),
82 82 UpdateFailureReason.UNKNOWN: lazy_ugettext(
83 83 'Pull request update failed because of an unknown error.'),
84 84 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
85 85 'No update needed because the source and target have not changed.'),
86 86 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
87 87 'Pull request cannot be updated because the reference type is '
88 88 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
89 89 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
90 90 'This pull request cannot be updated because the target '
91 91 'reference is missing.'),
92 92 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
93 93 'This pull request cannot be updated because the source '
94 94 'reference is missing.'),
95 95 }
96 96 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
97 97 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
98 98
99 99 def __get_pull_request(self, pull_request):
100 100 return self._get_instance((
101 101 PullRequest, PullRequestVersion), pull_request)
102 102
103 103 def _check_perms(self, perms, pull_request, user, api=False):
104 104 if not api:
105 105 return h.HasRepoPermissionAny(*perms)(
106 106 user=user, repo_name=pull_request.target_repo.repo_name)
107 107 else:
108 108 return h.HasRepoPermissionAnyApi(*perms)(
109 109 user=user, repo_name=pull_request.target_repo.repo_name)
110 110
111 111 def check_user_read(self, pull_request, user, api=False):
112 112 _perms = ('repository.admin', 'repository.write', 'repository.read',)
113 113 return self._check_perms(_perms, pull_request, user, api)
114 114
115 115 def check_user_merge(self, pull_request, user, api=False):
116 116 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
117 117 return self._check_perms(_perms, pull_request, user, api)
118 118
119 119 def check_user_update(self, pull_request, user, api=False):
120 120 owner = user.user_id == pull_request.user_id
121 121 return self.check_user_merge(pull_request, user, api) or owner
122 122
123 123 def check_user_delete(self, pull_request, user):
124 124 owner = user.user_id == pull_request.user_id
125 125 _perms = ('repository.admin',)
126 126 return self._check_perms(_perms, pull_request, user) or owner
127 127
128 128 def check_user_change_status(self, pull_request, user, api=False):
129 129 reviewer = user.user_id in [x.user_id for x in
130 130 pull_request.reviewers]
131 131 return self.check_user_update(pull_request, user, api) or reviewer
132 132
133 133 def check_user_comment(self, pull_request, user):
134 134 owner = user.user_id == pull_request.user_id
135 135 return self.check_user_read(pull_request, user) or owner
136 136
137 137 def get(self, pull_request):
138 138 return self.__get_pull_request(pull_request)
139 139
140 140 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
141 141 opened_by=None, order_by=None,
142 142 order_dir='desc', only_created=False):
143 143 repo = None
144 144 if repo_name:
145 145 repo = self._get_repo(repo_name)
146 146
147 147 q = PullRequest.query()
148 148
149 149 # source or target
150 150 if repo and source:
151 151 q = q.filter(PullRequest.source_repo == repo)
152 152 elif repo:
153 153 q = q.filter(PullRequest.target_repo == repo)
154 154
155 155 # closed,opened
156 156 if statuses:
157 157 q = q.filter(PullRequest.status.in_(statuses))
158 158
159 159 # opened by filter
160 160 if opened_by:
161 161 q = q.filter(PullRequest.user_id.in_(opened_by))
162 162
163 163 # only get those that are in "created" state
164 164 if only_created:
165 165 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
166 166
167 167 if order_by:
168 168 order_map = {
169 169 'name_raw': PullRequest.pull_request_id,
170 170 'id': PullRequest.pull_request_id,
171 171 'title': PullRequest.title,
172 172 'updated_on_raw': PullRequest.updated_on,
173 173 'target_repo': PullRequest.target_repo_id
174 174 }
175 175 if order_dir == 'asc':
176 176 q = q.order_by(order_map[order_by].asc())
177 177 else:
178 178 q = q.order_by(order_map[order_by].desc())
179 179
180 180 return q
181 181
182 182 def count_all(self, repo_name, source=False, statuses=None,
183 183 opened_by=None):
184 184 """
185 185 Count the number of pull requests for a specific repository.
186 186
187 187 :param repo_name: target or source repo
188 188 :param source: boolean flag to specify if repo_name refers to source
189 189 :param statuses: list of pull request statuses
190 190 :param opened_by: author user of the pull request
191 191 :returns: int number of pull requests
192 192 """
193 193 q = self._prepare_get_all_query(
194 194 repo_name, source=source, statuses=statuses, opened_by=opened_by)
195 195
196 196 return q.count()
197 197
198 198 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
199 199 offset=0, length=None, order_by=None, order_dir='desc'):
200 200 """
201 201 Get all pull requests for a specific repository.
202 202
203 203 :param repo_name: target or source repo
204 204 :param source: boolean flag to specify if repo_name refers to source
205 205 :param statuses: list of pull request statuses
206 206 :param opened_by: author user of the pull request
207 207 :param offset: pagination offset
208 208 :param length: length of returned list
209 209 :param order_by: order of the returned list
210 210 :param order_dir: 'asc' or 'desc' ordering direction
211 211 :returns: list of pull requests
212 212 """
213 213 q = self._prepare_get_all_query(
214 214 repo_name, source=source, statuses=statuses, opened_by=opened_by,
215 215 order_by=order_by, order_dir=order_dir)
216 216
217 217 if length:
218 218 pull_requests = q.limit(length).offset(offset).all()
219 219 else:
220 220 pull_requests = q.all()
221 221
222 222 return pull_requests
223 223
224 224 def count_awaiting_review(self, repo_name, source=False, statuses=None,
225 225 opened_by=None):
226 226 """
227 227 Count the number of pull requests for a specific repository that are
228 228 awaiting review.
229 229
230 230 :param repo_name: target or source repo
231 231 :param source: boolean flag to specify if repo_name refers to source
232 232 :param statuses: list of pull request statuses
233 233 :param opened_by: author user of the pull request
234 234 :returns: int number of pull requests
235 235 """
236 236 pull_requests = self.get_awaiting_review(
237 237 repo_name, source=source, statuses=statuses, opened_by=opened_by)
238 238
239 239 return len(pull_requests)
240 240
241 241 def get_awaiting_review(self, repo_name, source=False, statuses=None,
242 242 opened_by=None, offset=0, length=None,
243 243 order_by=None, order_dir='desc'):
244 244 """
245 245 Get all pull requests for a specific repository that are awaiting
246 246 review.
247 247
248 248 :param repo_name: target or source repo
249 249 :param source: boolean flag to specify if repo_name refers to source
250 250 :param statuses: list of pull request statuses
251 251 :param opened_by: author user of the pull request
252 252 :param offset: pagination offset
253 253 :param length: length of returned list
254 254 :param order_by: order of the returned list
255 255 :param order_dir: 'asc' or 'desc' ordering direction
256 256 :returns: list of pull requests
257 257 """
258 258 pull_requests = self.get_all(
259 259 repo_name, source=source, statuses=statuses, opened_by=opened_by,
260 260 order_by=order_by, order_dir=order_dir)
261 261
262 262 _filtered_pull_requests = []
263 263 for pr in pull_requests:
264 264 status = pr.calculated_review_status()
265 265 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
266 266 ChangesetStatus.STATUS_UNDER_REVIEW]:
267 267 _filtered_pull_requests.append(pr)
268 268 if length:
269 269 return _filtered_pull_requests[offset:offset+length]
270 270 else:
271 271 return _filtered_pull_requests
272 272
273 273 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
274 274 opened_by=None, user_id=None):
275 275 """
276 276 Count the number of pull requests for a specific repository that are
277 277 awaiting review from a specific user.
278 278
279 279 :param repo_name: target or source repo
280 280 :param source: boolean flag to specify if repo_name refers to source
281 281 :param statuses: list of pull request statuses
282 282 :param opened_by: author user of the pull request
283 283 :param user_id: reviewer user of the pull request
284 284 :returns: int number of pull requests
285 285 """
286 286 pull_requests = self.get_awaiting_my_review(
287 287 repo_name, source=source, statuses=statuses, opened_by=opened_by,
288 288 user_id=user_id)
289 289
290 290 return len(pull_requests)
291 291
292 292 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
293 293 opened_by=None, user_id=None, offset=0,
294 294 length=None, order_by=None, order_dir='desc'):
295 295 """
296 296 Get all pull requests for a specific repository that are awaiting
297 297 review from a specific user.
298 298
299 299 :param repo_name: target or source repo
300 300 :param source: boolean flag to specify if repo_name refers to source
301 301 :param statuses: list of pull request statuses
302 302 :param opened_by: author user of the pull request
303 303 :param user_id: reviewer user of the pull request
304 304 :param offset: pagination offset
305 305 :param length: length of returned list
306 306 :param order_by: order of the returned list
307 307 :param order_dir: 'asc' or 'desc' ordering direction
308 308 :returns: list of pull requests
309 309 """
310 310 pull_requests = self.get_all(
311 311 repo_name, source=source, statuses=statuses, opened_by=opened_by,
312 312 order_by=order_by, order_dir=order_dir)
313 313
314 314 _my = PullRequestModel().get_not_reviewed(user_id)
315 315 my_participation = []
316 316 for pr in pull_requests:
317 317 if pr in _my:
318 318 my_participation.append(pr)
319 319 _filtered_pull_requests = my_participation
320 320 if length:
321 321 return _filtered_pull_requests[offset:offset+length]
322 322 else:
323 323 return _filtered_pull_requests
324 324
325 325 def get_not_reviewed(self, user_id):
326 326 return [
327 327 x.pull_request for x in PullRequestReviewers.query().filter(
328 328 PullRequestReviewers.user_id == user_id).all()
329 329 ]
330 330
331 331 def _prepare_participating_query(self, user_id=None, statuses=None,
332 332 order_by=None, order_dir='desc'):
333 333 q = PullRequest.query()
334 334 if user_id:
335 335 reviewers_subquery = Session().query(
336 336 PullRequestReviewers.pull_request_id).filter(
337 337 PullRequestReviewers.user_id == user_id).subquery()
338 338 user_filter = or_(
339 339 PullRequest.user_id == user_id,
340 340 PullRequest.pull_request_id.in_(reviewers_subquery)
341 341 )
342 342 q = PullRequest.query().filter(user_filter)
343 343
344 344 # closed,opened
345 345 if statuses:
346 346 q = q.filter(PullRequest.status.in_(statuses))
347 347
348 348 if order_by:
349 349 order_map = {
350 350 'name_raw': PullRequest.pull_request_id,
351 351 'title': PullRequest.title,
352 352 'updated_on_raw': PullRequest.updated_on,
353 353 'target_repo': PullRequest.target_repo_id
354 354 }
355 355 if order_dir == 'asc':
356 356 q = q.order_by(order_map[order_by].asc())
357 357 else:
358 358 q = q.order_by(order_map[order_by].desc())
359 359
360 360 return q
361 361
362 362 def count_im_participating_in(self, user_id=None, statuses=None):
363 363 q = self._prepare_participating_query(user_id, statuses=statuses)
364 364 return q.count()
365 365
366 366 def get_im_participating_in(
367 367 self, user_id=None, statuses=None, offset=0,
368 368 length=None, order_by=None, order_dir='desc'):
369 369 """
370 370 Get all Pull requests that i'm participating in, or i have opened
371 371 """
372 372
373 373 q = self._prepare_participating_query(
374 374 user_id, statuses=statuses, order_by=order_by,
375 375 order_dir=order_dir)
376 376
377 377 if length:
378 378 pull_requests = q.limit(length).offset(offset).all()
379 379 else:
380 380 pull_requests = q.all()
381 381
382 382 return pull_requests
383 383
384 384 def get_versions(self, pull_request):
385 385 """
386 386 returns version of pull request sorted by ID descending
387 387 """
388 388 return PullRequestVersion.query()\
389 389 .filter(PullRequestVersion.pull_request == pull_request)\
390 390 .order_by(PullRequestVersion.pull_request_version_id.asc())\
391 391 .all()
392 392
393 393 def get_pr_version(self, pull_request_id, version=None):
394 394 at_version = None
395 395
396 396 if version and version == 'latest':
397 397 pull_request_ver = PullRequest.get(pull_request_id)
398 398 pull_request_obj = pull_request_ver
399 399 _org_pull_request_obj = pull_request_obj
400 400 at_version = 'latest'
401 401 elif version:
402 402 pull_request_ver = PullRequestVersion.get_or_404(version)
403 403 pull_request_obj = pull_request_ver
404 404 _org_pull_request_obj = pull_request_ver.pull_request
405 405 at_version = pull_request_ver.pull_request_version_id
406 406 else:
407 407 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
408 408 pull_request_id)
409 409
410 410 pull_request_display_obj = PullRequest.get_pr_display_object(
411 411 pull_request_obj, _org_pull_request_obj)
412 412
413 413 return _org_pull_request_obj, pull_request_obj, \
414 414 pull_request_display_obj, at_version
415 415
416 416 def create(self, created_by, source_repo, source_ref, target_repo,
417 417 target_ref, revisions, reviewers, title, description=None,
418 418 description_renderer=None,
419 419 reviewer_data=None, translator=None, auth_user=None):
420 420 translator = translator or get_current_request().translate
421 421
422 422 created_by_user = self._get_user(created_by)
423 423 auth_user = auth_user or created_by_user.AuthUser()
424 424 source_repo = self._get_repo(source_repo)
425 425 target_repo = self._get_repo(target_repo)
426 426
427 427 pull_request = PullRequest()
428 428 pull_request.source_repo = source_repo
429 429 pull_request.source_ref = source_ref
430 430 pull_request.target_repo = target_repo
431 431 pull_request.target_ref = target_ref
432 432 pull_request.revisions = revisions
433 433 pull_request.title = title
434 434 pull_request.description = description
435 435 pull_request.description_renderer = description_renderer
436 436 pull_request.author = created_by_user
437 437 pull_request.reviewer_data = reviewer_data
438 438 pull_request.pull_request_state = pull_request.STATE_CREATING
439 439 Session().add(pull_request)
440 440 Session().flush()
441 441
442 442 reviewer_ids = set()
443 443 # members / reviewers
444 444 for reviewer_object in reviewers:
445 445 user_id, reasons, mandatory, rules = reviewer_object
446 446 user = self._get_user(user_id)
447 447
448 448 # skip duplicates
449 449 if user.user_id in reviewer_ids:
450 450 continue
451 451
452 452 reviewer_ids.add(user.user_id)
453 453
454 454 reviewer = PullRequestReviewers()
455 455 reviewer.user = user
456 456 reviewer.pull_request = pull_request
457 457 reviewer.reasons = reasons
458 458 reviewer.mandatory = mandatory
459 459
460 460 # NOTE(marcink): pick only first rule for now
461 461 rule_id = list(rules)[0] if rules else None
462 462 rule = RepoReviewRule.get(rule_id) if rule_id else None
463 463 if rule:
464 464 review_group = rule.user_group_vote_rule(user_id)
465 465 # we check if this particular reviewer is member of a voting group
466 466 if review_group:
467 467 # NOTE(marcink):
468 468 # can be that user is member of more but we pick the first same,
469 469 # same as default reviewers algo
470 470 review_group = review_group[0]
471 471
472 472 rule_data = {
473 473 'rule_name':
474 474 rule.review_rule_name,
475 475 'rule_user_group_entry_id':
476 476 review_group.repo_review_rule_users_group_id,
477 477 'rule_user_group_name':
478 478 review_group.users_group.users_group_name,
479 479 'rule_user_group_members':
480 480 [x.user.username for x in review_group.users_group.members],
481 481 'rule_user_group_members_id':
482 482 [x.user.user_id for x in review_group.users_group.members],
483 483 }
484 484 # e.g {'vote_rule': -1, 'mandatory': True}
485 485 rule_data.update(review_group.rule_data())
486 486
487 487 reviewer.rule_data = rule_data
488 488
489 489 Session().add(reviewer)
490 490 Session().flush()
491 491
492 492 # Set approval status to "Under Review" for all commits which are
493 493 # part of this pull request.
494 494 ChangesetStatusModel().set_status(
495 495 repo=target_repo,
496 496 status=ChangesetStatus.STATUS_UNDER_REVIEW,
497 497 user=created_by_user,
498 498 pull_request=pull_request
499 499 )
500 500 # we commit early at this point. This has to do with a fact
501 501 # that before queries do some row-locking. And because of that
502 502 # we need to commit and finish transaction before below validate call
503 503 # that for large repos could be long resulting in long row locks
504 504 Session().commit()
505 505
506 506 # prepare workspace, and run initial merge simulation. Set state during that
507 507 # operation
508 508 pull_request = PullRequest.get(pull_request.pull_request_id)
509 509
510 # set as merging, for simulation, and if finished to created so we mark
510 # set as merging, for merge simulation, and if finished to created so we mark
511 511 # simulation is working fine
512 512 with pull_request.set_state(PullRequest.STATE_MERGING,
513 final_state=PullRequest.STATE_CREATED):
513 final_state=PullRequest.STATE_CREATED) as state_obj:
514 514 MergeCheck.validate(
515 515 pull_request, auth_user=auth_user, translator=translator)
516 516
517 517 self.notify_reviewers(pull_request, reviewer_ids)
518 518 self.trigger_pull_request_hook(
519 519 pull_request, created_by_user, 'create')
520 520
521 521 creation_data = pull_request.get_api_data(with_merge_state=False)
522 522 self._log_audit_action(
523 523 'repo.pull_request.create', {'data': creation_data},
524 524 auth_user, pull_request)
525 525
526 526 return pull_request
527 527
528 528 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
529 529 pull_request = self.__get_pull_request(pull_request)
530 530 target_scm = pull_request.target_repo.scm_instance()
531 531 if action == 'create':
532 532 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
533 533 elif action == 'merge':
534 534 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
535 535 elif action == 'close':
536 536 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
537 537 elif action == 'review_status_change':
538 538 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
539 539 elif action == 'update':
540 540 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
541 541 elif action == 'comment':
542 542 # dummy hook ! for comment. We want this function to handle all cases
543 543 def trigger_hook(*args, **kwargs):
544 544 pass
545 545 comment = data['comment']
546 546 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
547 547 else:
548 548 return
549 549
550 550 trigger_hook(
551 551 username=user.username,
552 552 repo_name=pull_request.target_repo.repo_name,
553 553 repo_alias=target_scm.alias,
554 554 pull_request=pull_request,
555 555 data=data)
556 556
557 557 def _get_commit_ids(self, pull_request):
558 558 """
559 559 Return the commit ids of the merged pull request.
560 560
561 561 This method is not dealing correctly yet with the lack of autoupdates
562 562 nor with the implicit target updates.
563 563 For example: if a commit in the source repo is already in the target it
564 564 will be reported anyways.
565 565 """
566 566 merge_rev = pull_request.merge_rev
567 567 if merge_rev is None:
568 568 raise ValueError('This pull request was not merged yet')
569 569
570 570 commit_ids = list(pull_request.revisions)
571 571 if merge_rev not in commit_ids:
572 572 commit_ids.append(merge_rev)
573 573
574 574 return commit_ids
575 575
576 576 def merge_repo(self, pull_request, user, extras):
577 577 log.debug("Merging pull request %s", pull_request.pull_request_id)
578 578 extras['user_agent'] = 'internal-merge'
579 579 merge_state = self._merge_pull_request(pull_request, user, extras)
580 580 if merge_state.executed:
581 581 log.debug("Merge was successful, updating the pull request comments.")
582 582 self._comment_and_close_pr(pull_request, user, merge_state)
583 583
584 584 self._log_audit_action(
585 585 'repo.pull_request.merge',
586 586 {'merge_state': merge_state.__dict__},
587 587 user, pull_request)
588 588
589 589 else:
590 590 log.warn("Merge failed, not updating the pull request.")
591 591 return merge_state
592 592
593 593 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
594 594 target_vcs = pull_request.target_repo.scm_instance()
595 595 source_vcs = pull_request.source_repo.scm_instance()
596 596
597 597 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
598 598 pr_id=pull_request.pull_request_id,
599 599 pr_title=pull_request.title,
600 600 source_repo=source_vcs.name,
601 601 source_ref_name=pull_request.source_ref_parts.name,
602 602 target_repo=target_vcs.name,
603 603 target_ref_name=pull_request.target_ref_parts.name,
604 604 )
605 605
606 606 workspace_id = self._workspace_id(pull_request)
607 607 repo_id = pull_request.target_repo.repo_id
608 608 use_rebase = self._use_rebase_for_merging(pull_request)
609 609 close_branch = self._close_branch_before_merging(pull_request)
610 610
611 611 target_ref = self._refresh_reference(
612 612 pull_request.target_ref_parts, target_vcs)
613 613
614 614 callback_daemon, extras = prepare_callback_daemon(
615 615 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
616 616 host=vcs_settings.HOOKS_HOST,
617 617 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
618 618
619 619 with callback_daemon:
620 620 # TODO: johbo: Implement a clean way to run a config_override
621 621 # for a single call.
622 622 target_vcs.config.set(
623 623 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
624 624
625 625 user_name = user.short_contact
626 626 merge_state = target_vcs.merge(
627 627 repo_id, workspace_id, target_ref, source_vcs,
628 628 pull_request.source_ref_parts,
629 629 user_name=user_name, user_email=user.email,
630 630 message=message, use_rebase=use_rebase,
631 631 close_branch=close_branch)
632 632 return merge_state
633 633
634 634 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
635 635 pull_request.merge_rev = merge_state.merge_ref.commit_id
636 636 pull_request.updated_on = datetime.datetime.now()
637 637 close_msg = close_msg or 'Pull request merged and closed'
638 638
639 639 CommentsModel().create(
640 640 text=safe_unicode(close_msg),
641 641 repo=pull_request.target_repo.repo_id,
642 642 user=user.user_id,
643 643 pull_request=pull_request.pull_request_id,
644 644 f_path=None,
645 645 line_no=None,
646 646 closing_pr=True
647 647 )
648 648
649 649 Session().add(pull_request)
650 650 Session().flush()
651 651 # TODO: paris: replace invalidation with less radical solution
652 652 ScmModel().mark_for_invalidation(
653 653 pull_request.target_repo.repo_name)
654 654 self.trigger_pull_request_hook(pull_request, user, 'merge')
655 655
656 656 def has_valid_update_type(self, pull_request):
657 657 source_ref_type = pull_request.source_ref_parts.type
658 658 return source_ref_type in self.REF_TYPES
659 659
660 660 def update_commits(self, pull_request):
661 661 """
662 662 Get the updated list of commits for the pull request
663 663 and return the new pull request version and the list
664 664 of commits processed by this update action
665 665 """
666 666 pull_request = self.__get_pull_request(pull_request)
667 667 source_ref_type = pull_request.source_ref_parts.type
668 668 source_ref_name = pull_request.source_ref_parts.name
669 669 source_ref_id = pull_request.source_ref_parts.commit_id
670 670
671 671 target_ref_type = pull_request.target_ref_parts.type
672 672 target_ref_name = pull_request.target_ref_parts.name
673 673 target_ref_id = pull_request.target_ref_parts.commit_id
674 674
675 675 if not self.has_valid_update_type(pull_request):
676 676 log.debug("Skipping update of pull request %s due to ref type: %s",
677 677 pull_request, source_ref_type)
678 678 return UpdateResponse(
679 679 executed=False,
680 680 reason=UpdateFailureReason.WRONG_REF_TYPE,
681 681 old=pull_request, new=None, changes=None,
682 682 source_changed=False, target_changed=False)
683 683
684 684 # source repo
685 685 source_repo = pull_request.source_repo.scm_instance()
686 686
687 687 try:
688 688 source_commit = source_repo.get_commit(commit_id=source_ref_name)
689 689 except CommitDoesNotExistError:
690 690 return UpdateResponse(
691 691 executed=False,
692 692 reason=UpdateFailureReason.MISSING_SOURCE_REF,
693 693 old=pull_request, new=None, changes=None,
694 694 source_changed=False, target_changed=False)
695 695
696 696 source_changed = source_ref_id != source_commit.raw_id
697 697
698 698 # target repo
699 699 target_repo = pull_request.target_repo.scm_instance()
700 700
701 701 try:
702 702 target_commit = target_repo.get_commit(commit_id=target_ref_name)
703 703 except CommitDoesNotExistError:
704 704 return UpdateResponse(
705 705 executed=False,
706 706 reason=UpdateFailureReason.MISSING_TARGET_REF,
707 707 old=pull_request, new=None, changes=None,
708 708 source_changed=False, target_changed=False)
709 709 target_changed = target_ref_id != target_commit.raw_id
710 710
711 711 if not (source_changed or target_changed):
712 712 log.debug("Nothing changed in pull request %s", pull_request)
713 713 return UpdateResponse(
714 714 executed=False,
715 715 reason=UpdateFailureReason.NO_CHANGE,
716 716 old=pull_request, new=None, changes=None,
717 717 source_changed=target_changed, target_changed=source_changed)
718 718
719 719 change_in_found = 'target repo' if target_changed else 'source repo'
720 720 log.debug('Updating pull request because of change in %s detected',
721 721 change_in_found)
722 722
723 723 # Finally there is a need for an update, in case of source change
724 724 # we create a new version, else just an update
725 725 if source_changed:
726 726 pull_request_version = self._create_version_from_snapshot(pull_request)
727 727 self._link_comments_to_version(pull_request_version)
728 728 else:
729 729 try:
730 730 ver = pull_request.versions[-1]
731 731 except IndexError:
732 732 ver = None
733 733
734 734 pull_request.pull_request_version_id = \
735 735 ver.pull_request_version_id if ver else None
736 736 pull_request_version = pull_request
737 737
738 738 try:
739 739 if target_ref_type in self.REF_TYPES:
740 740 target_commit = target_repo.get_commit(target_ref_name)
741 741 else:
742 742 target_commit = target_repo.get_commit(target_ref_id)
743 743 except CommitDoesNotExistError:
744 744 return UpdateResponse(
745 745 executed=False,
746 746 reason=UpdateFailureReason.MISSING_TARGET_REF,
747 747 old=pull_request, new=None, changes=None,
748 748 source_changed=source_changed, target_changed=target_changed)
749 749
750 750 # re-compute commit ids
751 751 old_commit_ids = pull_request.revisions
752 752 pre_load = ["author", "branch", "date", "message"]
753 753 commit_ranges = target_repo.compare(
754 754 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
755 755 pre_load=pre_load)
756 756
757 757 ancestor = source_repo.get_common_ancestor(
758 758 source_commit.raw_id, target_commit.raw_id, target_repo)
759 759
760 760 pull_request.source_ref = '%s:%s:%s' % (
761 761 source_ref_type, source_ref_name, source_commit.raw_id)
762 762 pull_request.target_ref = '%s:%s:%s' % (
763 763 target_ref_type, target_ref_name, ancestor)
764 764
765 765 pull_request.revisions = [
766 766 commit.raw_id for commit in reversed(commit_ranges)]
767 767 pull_request.updated_on = datetime.datetime.now()
768 768 Session().add(pull_request)
769 769 new_commit_ids = pull_request.revisions
770 770
771 771 old_diff_data, new_diff_data = self._generate_update_diffs(
772 772 pull_request, pull_request_version)
773 773
774 774 # calculate commit and file changes
775 775 changes = self._calculate_commit_id_changes(
776 776 old_commit_ids, new_commit_ids)
777 777 file_changes = self._calculate_file_changes(
778 778 old_diff_data, new_diff_data)
779 779
780 780 # set comments as outdated if DIFFS changed
781 781 CommentsModel().outdate_comments(
782 782 pull_request, old_diff_data=old_diff_data,
783 783 new_diff_data=new_diff_data)
784 784
785 785 commit_changes = (changes.added or changes.removed)
786 786 file_node_changes = (
787 787 file_changes.added or file_changes.modified or file_changes.removed)
788 788 pr_has_changes = commit_changes or file_node_changes
789 789
790 790 # Add an automatic comment to the pull request, in case
791 791 # anything has changed
792 792 if pr_has_changes:
793 793 update_comment = CommentsModel().create(
794 794 text=self._render_update_message(changes, file_changes),
795 795 repo=pull_request.target_repo,
796 796 user=pull_request.author,
797 797 pull_request=pull_request,
798 798 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
799 799
800 800 # Update status to "Under Review" for added commits
801 801 for commit_id in changes.added:
802 802 ChangesetStatusModel().set_status(
803 803 repo=pull_request.source_repo,
804 804 status=ChangesetStatus.STATUS_UNDER_REVIEW,
805 805 comment=update_comment,
806 806 user=pull_request.author,
807 807 pull_request=pull_request,
808 808 revision=commit_id)
809 809
810 810 log.debug(
811 811 'Updated pull request %s, added_ids: %s, common_ids: %s, '
812 812 'removed_ids: %s', pull_request.pull_request_id,
813 813 changes.added, changes.common, changes.removed)
814 814 log.debug(
815 815 'Updated pull request with the following file changes: %s',
816 816 file_changes)
817 817
818 818 log.info(
819 819 "Updated pull request %s from commit %s to commit %s, "
820 820 "stored new version %s of this pull request.",
821 821 pull_request.pull_request_id, source_ref_id,
822 822 pull_request.source_ref_parts.commit_id,
823 823 pull_request_version.pull_request_version_id)
824 824 Session().commit()
825 825 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
826 826
827 827 return UpdateResponse(
828 828 executed=True, reason=UpdateFailureReason.NONE,
829 829 old=pull_request, new=pull_request_version, changes=changes,
830 830 source_changed=source_changed, target_changed=target_changed)
831 831
832 832 def _create_version_from_snapshot(self, pull_request):
833 833 version = PullRequestVersion()
834 834 version.title = pull_request.title
835 835 version.description = pull_request.description
836 836 version.status = pull_request.status
837 837 version.pull_request_state = pull_request.pull_request_state
838 838 version.created_on = datetime.datetime.now()
839 839 version.updated_on = pull_request.updated_on
840 840 version.user_id = pull_request.user_id
841 841 version.source_repo = pull_request.source_repo
842 842 version.source_ref = pull_request.source_ref
843 843 version.target_repo = pull_request.target_repo
844 844 version.target_ref = pull_request.target_ref
845 845
846 846 version._last_merge_source_rev = pull_request._last_merge_source_rev
847 847 version._last_merge_target_rev = pull_request._last_merge_target_rev
848 848 version.last_merge_status = pull_request.last_merge_status
849 849 version.shadow_merge_ref = pull_request.shadow_merge_ref
850 850 version.merge_rev = pull_request.merge_rev
851 851 version.reviewer_data = pull_request.reviewer_data
852 852
853 853 version.revisions = pull_request.revisions
854 854 version.pull_request = pull_request
855 855 Session().add(version)
856 856 Session().flush()
857 857
858 858 return version
859 859
860 860 def _generate_update_diffs(self, pull_request, pull_request_version):
861 861
862 862 diff_context = (
863 863 self.DIFF_CONTEXT +
864 864 CommentsModel.needed_extra_diff_context())
865 865 hide_whitespace_changes = False
866 866 source_repo = pull_request_version.source_repo
867 867 source_ref_id = pull_request_version.source_ref_parts.commit_id
868 868 target_ref_id = pull_request_version.target_ref_parts.commit_id
869 869 old_diff = self._get_diff_from_pr_or_version(
870 870 source_repo, source_ref_id, target_ref_id,
871 871 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
872 872
873 873 source_repo = pull_request.source_repo
874 874 source_ref_id = pull_request.source_ref_parts.commit_id
875 875 target_ref_id = pull_request.target_ref_parts.commit_id
876 876
877 877 new_diff = self._get_diff_from_pr_or_version(
878 878 source_repo, source_ref_id, target_ref_id,
879 879 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
880 880
881 881 old_diff_data = diffs.DiffProcessor(old_diff)
882 882 old_diff_data.prepare()
883 883 new_diff_data = diffs.DiffProcessor(new_diff)
884 884 new_diff_data.prepare()
885 885
886 886 return old_diff_data, new_diff_data
887 887
888 888 def _link_comments_to_version(self, pull_request_version):
889 889 """
890 890 Link all unlinked comments of this pull request to the given version.
891 891
892 892 :param pull_request_version: The `PullRequestVersion` to which
893 893 the comments shall be linked.
894 894
895 895 """
896 896 pull_request = pull_request_version.pull_request
897 897 comments = ChangesetComment.query()\
898 898 .filter(
899 899 # TODO: johbo: Should we query for the repo at all here?
900 900 # Pending decision on how comments of PRs are to be related
901 901 # to either the source repo, the target repo or no repo at all.
902 902 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
903 903 ChangesetComment.pull_request == pull_request,
904 904 ChangesetComment.pull_request_version == None)\
905 905 .order_by(ChangesetComment.comment_id.asc())
906 906
907 907 # TODO: johbo: Find out why this breaks if it is done in a bulk
908 908 # operation.
909 909 for comment in comments:
910 910 comment.pull_request_version_id = (
911 911 pull_request_version.pull_request_version_id)
912 912 Session().add(comment)
913 913
914 914 def _calculate_commit_id_changes(self, old_ids, new_ids):
915 915 added = [x for x in new_ids if x not in old_ids]
916 916 common = [x for x in new_ids if x in old_ids]
917 917 removed = [x for x in old_ids if x not in new_ids]
918 918 total = new_ids
919 919 return ChangeTuple(added, common, removed, total)
920 920
921 921 def _calculate_file_changes(self, old_diff_data, new_diff_data):
922 922
923 923 old_files = OrderedDict()
924 924 for diff_data in old_diff_data.parsed_diff:
925 925 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
926 926
927 927 added_files = []
928 928 modified_files = []
929 929 removed_files = []
930 930 for diff_data in new_diff_data.parsed_diff:
931 931 new_filename = diff_data['filename']
932 932 new_hash = md5_safe(diff_data['raw_diff'])
933 933
934 934 old_hash = old_files.get(new_filename)
935 935 if not old_hash:
936 936 # file is not present in old diff, means it's added
937 937 added_files.append(new_filename)
938 938 else:
939 939 if new_hash != old_hash:
940 940 modified_files.append(new_filename)
941 941 # now remove a file from old, since we have seen it already
942 942 del old_files[new_filename]
943 943
944 944 # removed files is when there are present in old, but not in NEW,
945 945 # since we remove old files that are present in new diff, left-overs
946 946 # if any should be the removed files
947 947 removed_files.extend(old_files.keys())
948 948
949 949 return FileChangeTuple(added_files, modified_files, removed_files)
950 950
951 951 def _render_update_message(self, changes, file_changes):
952 952 """
953 953 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
954 954 so it's always looking the same disregarding on which default
955 955 renderer system is using.
956 956
957 957 :param changes: changes named tuple
958 958 :param file_changes: file changes named tuple
959 959
960 960 """
961 961 new_status = ChangesetStatus.get_status_lbl(
962 962 ChangesetStatus.STATUS_UNDER_REVIEW)
963 963
964 964 changed_files = (
965 965 file_changes.added + file_changes.modified + file_changes.removed)
966 966
967 967 params = {
968 968 'under_review_label': new_status,
969 969 'added_commits': changes.added,
970 970 'removed_commits': changes.removed,
971 971 'changed_files': changed_files,
972 972 'added_files': file_changes.added,
973 973 'modified_files': file_changes.modified,
974 974 'removed_files': file_changes.removed,
975 975 }
976 976 renderer = RstTemplateRenderer()
977 977 return renderer.render('pull_request_update.mako', **params)
978 978
979 979 def edit(self, pull_request, title, description, description_renderer, user):
980 980 pull_request = self.__get_pull_request(pull_request)
981 981 old_data = pull_request.get_api_data(with_merge_state=False)
982 982 if pull_request.is_closed():
983 983 raise ValueError('This pull request is closed')
984 984 if title:
985 985 pull_request.title = title
986 986 pull_request.description = description
987 987 pull_request.updated_on = datetime.datetime.now()
988 988 pull_request.description_renderer = description_renderer
989 989 Session().add(pull_request)
990 990 self._log_audit_action(
991 991 'repo.pull_request.edit', {'old_data': old_data},
992 992 user, pull_request)
993 993
994 994 def update_reviewers(self, pull_request, reviewer_data, user):
995 995 """
996 996 Update the reviewers in the pull request
997 997
998 998 :param pull_request: the pr to update
999 999 :param reviewer_data: list of tuples
1000 1000 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1001 1001 """
1002 1002 pull_request = self.__get_pull_request(pull_request)
1003 1003 if pull_request.is_closed():
1004 1004 raise ValueError('This pull request is closed')
1005 1005
1006 1006 reviewers = {}
1007 1007 for user_id, reasons, mandatory, rules in reviewer_data:
1008 1008 if isinstance(user_id, (int, compat.string_types)):
1009 1009 user_id = self._get_user(user_id).user_id
1010 1010 reviewers[user_id] = {
1011 1011 'reasons': reasons, 'mandatory': mandatory}
1012 1012
1013 1013 reviewers_ids = set(reviewers.keys())
1014 1014 current_reviewers = PullRequestReviewers.query()\
1015 1015 .filter(PullRequestReviewers.pull_request ==
1016 1016 pull_request).all()
1017 1017 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1018 1018
1019 1019 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1020 1020 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1021 1021
1022 1022 log.debug("Adding %s reviewers", ids_to_add)
1023 1023 log.debug("Removing %s reviewers", ids_to_remove)
1024 1024 changed = False
1025 1025 added_audit_reviewers = []
1026 1026 removed_audit_reviewers = []
1027 1027
1028 1028 for uid in ids_to_add:
1029 1029 changed = True
1030 1030 _usr = self._get_user(uid)
1031 1031 reviewer = PullRequestReviewers()
1032 1032 reviewer.user = _usr
1033 1033 reviewer.pull_request = pull_request
1034 1034 reviewer.reasons = reviewers[uid]['reasons']
1035 1035 # NOTE(marcink): mandatory shouldn't be changed now
1036 1036 # reviewer.mandatory = reviewers[uid]['reasons']
1037 1037 Session().add(reviewer)
1038 1038 added_audit_reviewers.append(reviewer.get_dict())
1039 1039
1040 1040 for uid in ids_to_remove:
1041 1041 changed = True
1042 1042 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1043 1043 # that prevents and fixes cases that we added the same reviewer twice.
1044 1044 # this CAN happen due to the lack of DB checks
1045 1045 reviewers = PullRequestReviewers.query()\
1046 1046 .filter(PullRequestReviewers.user_id == uid,
1047 1047 PullRequestReviewers.pull_request == pull_request)\
1048 1048 .all()
1049 1049
1050 1050 for obj in reviewers:
1051 1051 added_audit_reviewers.append(obj.get_dict())
1052 1052 Session().delete(obj)
1053 1053
1054 1054 if changed:
1055 1055 Session().expire_all()
1056 1056 pull_request.updated_on = datetime.datetime.now()
1057 1057 Session().add(pull_request)
1058 1058
1059 1059 # finally store audit logs
1060 1060 for user_data in added_audit_reviewers:
1061 1061 self._log_audit_action(
1062 1062 'repo.pull_request.reviewer.add', {'data': user_data},
1063 1063 user, pull_request)
1064 1064 for user_data in removed_audit_reviewers:
1065 1065 self._log_audit_action(
1066 1066 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1067 1067 user, pull_request)
1068 1068
1069 1069 self.notify_reviewers(pull_request, ids_to_add)
1070 1070 return ids_to_add, ids_to_remove
1071 1071
1072 1072 def get_url(self, pull_request, request=None, permalink=False):
1073 1073 if not request:
1074 1074 request = get_current_request()
1075 1075
1076 1076 if permalink:
1077 1077 return request.route_url(
1078 1078 'pull_requests_global',
1079 1079 pull_request_id=pull_request.pull_request_id,)
1080 1080 else:
1081 1081 return request.route_url('pullrequest_show',
1082 1082 repo_name=safe_str(pull_request.target_repo.repo_name),
1083 1083 pull_request_id=pull_request.pull_request_id,)
1084 1084
1085 1085 def get_shadow_clone_url(self, pull_request, request=None):
1086 1086 """
1087 1087 Returns qualified url pointing to the shadow repository. If this pull
1088 1088 request is closed there is no shadow repository and ``None`` will be
1089 1089 returned.
1090 1090 """
1091 1091 if pull_request.is_closed():
1092 1092 return None
1093 1093 else:
1094 1094 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1095 1095 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1096 1096
1097 1097 def notify_reviewers(self, pull_request, reviewers_ids):
1098 1098 # notification to reviewers
1099 1099 if not reviewers_ids:
1100 1100 return
1101 1101
1102 log.debug('Notify following reviewers about pull-request %s', reviewers_ids)
1103
1102 1104 pull_request_obj = pull_request
1103 1105 # get the current participants of this pull request
1104 1106 recipients = reviewers_ids
1105 1107 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1106 1108
1107 1109 pr_source_repo = pull_request_obj.source_repo
1108 1110 pr_target_repo = pull_request_obj.target_repo
1109 1111
1110 1112 pr_url = h.route_url('pullrequest_show',
1111 1113 repo_name=pr_target_repo.repo_name,
1112 1114 pull_request_id=pull_request_obj.pull_request_id,)
1113 1115
1114 1116 # set some variables for email notification
1115 1117 pr_target_repo_url = h.route_url(
1116 1118 'repo_summary', repo_name=pr_target_repo.repo_name)
1117 1119
1118 1120 pr_source_repo_url = h.route_url(
1119 1121 'repo_summary', repo_name=pr_source_repo.repo_name)
1120 1122
1121 1123 # pull request specifics
1122 1124 pull_request_commits = [
1123 1125 (x.raw_id, x.message)
1124 1126 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1125 1127
1126 1128 kwargs = {
1127 1129 'user': pull_request.author,
1128 1130 'pull_request': pull_request_obj,
1129 1131 'pull_request_commits': pull_request_commits,
1130 1132
1131 1133 'pull_request_target_repo': pr_target_repo,
1132 1134 'pull_request_target_repo_url': pr_target_repo_url,
1133 1135
1134 1136 'pull_request_source_repo': pr_source_repo,
1135 1137 'pull_request_source_repo_url': pr_source_repo_url,
1136 1138
1137 1139 'pull_request_url': pr_url,
1138 1140 }
1139 1141
1140 1142 # pre-generate the subject for notification itself
1141 1143 (subject,
1142 1144 _h, _e, # we don't care about those
1143 1145 body_plaintext) = EmailNotificationModel().render_email(
1144 1146 notification_type, **kwargs)
1145 1147
1146 1148 # create notification objects, and emails
1147 1149 NotificationModel().create(
1148 1150 created_by=pull_request.author,
1149 1151 notification_subject=subject,
1150 1152 notification_body=body_plaintext,
1151 1153 notification_type=notification_type,
1152 1154 recipients=recipients,
1153 1155 email_kwargs=kwargs,
1154 1156 )
1155 1157
1156 1158 def delete(self, pull_request, user):
1157 1159 pull_request = self.__get_pull_request(pull_request)
1158 1160 old_data = pull_request.get_api_data(with_merge_state=False)
1159 1161 self._cleanup_merge_workspace(pull_request)
1160 1162 self._log_audit_action(
1161 1163 'repo.pull_request.delete', {'old_data': old_data},
1162 1164 user, pull_request)
1163 1165 Session().delete(pull_request)
1164 1166
1165 1167 def close_pull_request(self, pull_request, user):
1166 1168 pull_request = self.__get_pull_request(pull_request)
1167 1169 self._cleanup_merge_workspace(pull_request)
1168 1170 pull_request.status = PullRequest.STATUS_CLOSED
1169 1171 pull_request.updated_on = datetime.datetime.now()
1170 1172 Session().add(pull_request)
1171 1173 self.trigger_pull_request_hook(
1172 1174 pull_request, pull_request.author, 'close')
1173 1175
1174 1176 pr_data = pull_request.get_api_data(with_merge_state=False)
1175 1177 self._log_audit_action(
1176 1178 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1177 1179
1178 1180 def close_pull_request_with_comment(
1179 1181 self, pull_request, user, repo, message=None, auth_user=None):
1180 1182
1181 1183 pull_request_review_status = pull_request.calculated_review_status()
1182 1184
1183 1185 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1184 1186 # approved only if we have voting consent
1185 1187 status = ChangesetStatus.STATUS_APPROVED
1186 1188 else:
1187 1189 status = ChangesetStatus.STATUS_REJECTED
1188 1190 status_lbl = ChangesetStatus.get_status_lbl(status)
1189 1191
1190 1192 default_message = (
1191 1193 'Closing with status change {transition_icon} {status}.'
1192 1194 ).format(transition_icon='>', status=status_lbl)
1193 1195 text = message or default_message
1194 1196
1195 1197 # create a comment, and link it to new status
1196 1198 comment = CommentsModel().create(
1197 1199 text=text,
1198 1200 repo=repo.repo_id,
1199 1201 user=user.user_id,
1200 1202 pull_request=pull_request.pull_request_id,
1201 1203 status_change=status_lbl,
1202 1204 status_change_type=status,
1203 1205 closing_pr=True,
1204 1206 auth_user=auth_user,
1205 1207 )
1206 1208
1207 1209 # calculate old status before we change it
1208 1210 old_calculated_status = pull_request.calculated_review_status()
1209 1211 ChangesetStatusModel().set_status(
1210 1212 repo.repo_id,
1211 1213 status,
1212 1214 user.user_id,
1213 1215 comment=comment,
1214 1216 pull_request=pull_request.pull_request_id
1215 1217 )
1216 1218
1217 1219 Session().flush()
1218 1220 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1219 1221 # we now calculate the status of pull request again, and based on that
1220 1222 # calculation trigger status change. This might happen in cases
1221 1223 # that non-reviewer admin closes a pr, which means his vote doesn't
1222 1224 # change the status, while if he's a reviewer this might change it.
1223 1225 calculated_status = pull_request.calculated_review_status()
1224 1226 if old_calculated_status != calculated_status:
1225 1227 self.trigger_pull_request_hook(
1226 1228 pull_request, user, 'review_status_change',
1227 1229 data={'status': calculated_status})
1228 1230
1229 1231 # finally close the PR
1230 1232 PullRequestModel().close_pull_request(
1231 1233 pull_request.pull_request_id, user)
1232 1234
1233 1235 return comment, status
1234 1236
1235 1237 def merge_status(self, pull_request, translator=None,
1236 1238 force_shadow_repo_refresh=False):
1237 1239 _ = translator or get_current_request().translate
1238 1240
1239 1241 if not self._is_merge_enabled(pull_request):
1240 1242 return False, _('Server-side pull request merging is disabled.')
1241 1243 if pull_request.is_closed():
1242 1244 return False, _('This pull request is closed.')
1243 1245 merge_possible, msg = self._check_repo_requirements(
1244 1246 target=pull_request.target_repo, source=pull_request.source_repo,
1245 1247 translator=_)
1246 1248 if not merge_possible:
1247 1249 return merge_possible, msg
1248 1250
1249 1251 try:
1250 1252 resp = self._try_merge(
1251 1253 pull_request,
1252 1254 force_shadow_repo_refresh=force_shadow_repo_refresh)
1253 1255 log.debug("Merge response: %s", resp)
1254 1256 status = resp.possible, resp.merge_status_message
1255 1257 except NotImplementedError:
1256 1258 status = False, _('Pull request merging is not supported.')
1257 1259
1258 1260 return status
1259 1261
1260 1262 def _check_repo_requirements(self, target, source, translator):
1261 1263 """
1262 1264 Check if `target` and `source` have compatible requirements.
1263 1265
1264 1266 Currently this is just checking for largefiles.
1265 1267 """
1266 1268 _ = translator
1267 1269 target_has_largefiles = self._has_largefiles(target)
1268 1270 source_has_largefiles = self._has_largefiles(source)
1269 1271 merge_possible = True
1270 1272 message = u''
1271 1273
1272 1274 if target_has_largefiles != source_has_largefiles:
1273 1275 merge_possible = False
1274 1276 if source_has_largefiles:
1275 1277 message = _(
1276 1278 'Target repository large files support is disabled.')
1277 1279 else:
1278 1280 message = _(
1279 1281 'Source repository large files support is disabled.')
1280 1282
1281 1283 return merge_possible, message
1282 1284
1283 1285 def _has_largefiles(self, repo):
1284 1286 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1285 1287 'extensions', 'largefiles')
1286 1288 return largefiles_ui and largefiles_ui[0].active
1287 1289
1288 1290 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1289 1291 """
1290 1292 Try to merge the pull request and return the merge status.
1291 1293 """
1292 1294 log.debug(
1293 1295 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1294 1296 pull_request.pull_request_id, force_shadow_repo_refresh)
1295 1297 target_vcs = pull_request.target_repo.scm_instance()
1296 1298 # Refresh the target reference.
1297 1299 try:
1298 1300 target_ref = self._refresh_reference(
1299 1301 pull_request.target_ref_parts, target_vcs)
1300 1302 except CommitDoesNotExistError:
1301 1303 merge_state = MergeResponse(
1302 1304 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1303 1305 metadata={'target_ref': pull_request.target_ref_parts})
1304 1306 return merge_state
1305 1307
1306 1308 target_locked = pull_request.target_repo.locked
1307 1309 if target_locked and target_locked[0]:
1308 1310 locked_by = 'user:{}'.format(target_locked[0])
1309 1311 log.debug("The target repository is locked by %s.", locked_by)
1310 1312 merge_state = MergeResponse(
1311 1313 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1312 1314 metadata={'locked_by': locked_by})
1313 1315 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1314 1316 pull_request, target_ref):
1315 1317 log.debug("Refreshing the merge status of the repository.")
1316 1318 merge_state = self._refresh_merge_state(
1317 1319 pull_request, target_vcs, target_ref)
1318 1320 else:
1319 1321 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1320 1322 metadata = {
1321 1323 'target_ref': pull_request.target_ref_parts,
1322 1324 'source_ref': pull_request.source_ref_parts,
1323 1325 }
1324 1326 if not possible and target_ref.type == 'branch':
1325 1327 # NOTE(marcink): case for mercurial multiple heads on branch
1326 1328 heads = target_vcs._heads(target_ref.name)
1327 1329 if len(heads) != 1:
1328 1330 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1329 1331 metadata.update({
1330 1332 'heads': heads
1331 1333 })
1332 1334 merge_state = MergeResponse(
1333 1335 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1334 1336
1335 1337 return merge_state
1336 1338
1337 1339 def _refresh_reference(self, reference, vcs_repository):
1338 1340 if reference.type in self.UPDATABLE_REF_TYPES:
1339 1341 name_or_id = reference.name
1340 1342 else:
1341 1343 name_or_id = reference.commit_id
1342 1344
1343 1345 refreshed_commit = vcs_repository.get_commit(name_or_id)
1344 1346 refreshed_reference = Reference(
1345 1347 reference.type, reference.name, refreshed_commit.raw_id)
1346 1348 return refreshed_reference
1347 1349
1348 1350 def _needs_merge_state_refresh(self, pull_request, target_reference):
1349 1351 return not(
1350 1352 pull_request.revisions and
1351 1353 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1352 1354 target_reference.commit_id == pull_request._last_merge_target_rev)
1353 1355
1354 1356 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1355 1357 workspace_id = self._workspace_id(pull_request)
1356 1358 source_vcs = pull_request.source_repo.scm_instance()
1357 1359 repo_id = pull_request.target_repo.repo_id
1358 1360 use_rebase = self._use_rebase_for_merging(pull_request)
1359 1361 close_branch = self._close_branch_before_merging(pull_request)
1360 1362 merge_state = target_vcs.merge(
1361 1363 repo_id, workspace_id,
1362 1364 target_reference, source_vcs, pull_request.source_ref_parts,
1363 1365 dry_run=True, use_rebase=use_rebase,
1364 1366 close_branch=close_branch)
1365 1367
1366 1368 # Do not store the response if there was an unknown error.
1367 1369 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1368 1370 pull_request._last_merge_source_rev = \
1369 1371 pull_request.source_ref_parts.commit_id
1370 1372 pull_request._last_merge_target_rev = target_reference.commit_id
1371 1373 pull_request.last_merge_status = merge_state.failure_reason
1372 1374 pull_request.shadow_merge_ref = merge_state.merge_ref
1373 1375 Session().add(pull_request)
1374 1376 Session().commit()
1375 1377
1376 1378 return merge_state
1377 1379
1378 1380 def _workspace_id(self, pull_request):
1379 1381 workspace_id = 'pr-%s' % pull_request.pull_request_id
1380 1382 return workspace_id
1381 1383
1382 1384 def generate_repo_data(self, repo, commit_id=None, branch=None,
1383 1385 bookmark=None, translator=None):
1384 1386 from rhodecode.model.repo import RepoModel
1385 1387
1386 1388 all_refs, selected_ref = \
1387 1389 self._get_repo_pullrequest_sources(
1388 1390 repo.scm_instance(), commit_id=commit_id,
1389 1391 branch=branch, bookmark=bookmark, translator=translator)
1390 1392
1391 1393 refs_select2 = []
1392 1394 for element in all_refs:
1393 1395 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1394 1396 refs_select2.append({'text': element[1], 'children': children})
1395 1397
1396 1398 return {
1397 1399 'user': {
1398 1400 'user_id': repo.user.user_id,
1399 1401 'username': repo.user.username,
1400 1402 'firstname': repo.user.first_name,
1401 1403 'lastname': repo.user.last_name,
1402 1404 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1403 1405 },
1404 1406 'name': repo.repo_name,
1405 1407 'link': RepoModel().get_url(repo),
1406 1408 'description': h.chop_at_smart(repo.description_safe, '\n'),
1407 1409 'refs': {
1408 1410 'all_refs': all_refs,
1409 1411 'selected_ref': selected_ref,
1410 1412 'select2_refs': refs_select2
1411 1413 }
1412 1414 }
1413 1415
1414 1416 def generate_pullrequest_title(self, source, source_ref, target):
1415 1417 return u'{source}#{at_ref} to {target}'.format(
1416 1418 source=source,
1417 1419 at_ref=source_ref,
1418 1420 target=target,
1419 1421 )
1420 1422
1421 1423 def _cleanup_merge_workspace(self, pull_request):
1422 1424 # Merging related cleanup
1423 1425 repo_id = pull_request.target_repo.repo_id
1424 1426 target_scm = pull_request.target_repo.scm_instance()
1425 1427 workspace_id = self._workspace_id(pull_request)
1426 1428
1427 1429 try:
1428 1430 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1429 1431 except NotImplementedError:
1430 1432 pass
1431 1433
1432 1434 def _get_repo_pullrequest_sources(
1433 1435 self, repo, commit_id=None, branch=None, bookmark=None,
1434 1436 translator=None):
1435 1437 """
1436 1438 Return a structure with repo's interesting commits, suitable for
1437 1439 the selectors in pullrequest controller
1438 1440
1439 1441 :param commit_id: a commit that must be in the list somehow
1440 1442 and selected by default
1441 1443 :param branch: a branch that must be in the list and selected
1442 1444 by default - even if closed
1443 1445 :param bookmark: a bookmark that must be in the list and selected
1444 1446 """
1445 1447 _ = translator or get_current_request().translate
1446 1448
1447 1449 commit_id = safe_str(commit_id) if commit_id else None
1448 1450 branch = safe_unicode(branch) if branch else None
1449 1451 bookmark = safe_unicode(bookmark) if bookmark else None
1450 1452
1451 1453 selected = None
1452 1454
1453 1455 # order matters: first source that has commit_id in it will be selected
1454 1456 sources = []
1455 1457 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1456 1458 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1457 1459
1458 1460 if commit_id:
1459 1461 ref_commit = (h.short_id(commit_id), commit_id)
1460 1462 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1461 1463
1462 1464 sources.append(
1463 1465 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1464 1466 )
1465 1467
1466 1468 groups = []
1467 1469
1468 1470 for group_key, ref_list, group_name, match in sources:
1469 1471 group_refs = []
1470 1472 for ref_name, ref_id in ref_list:
1471 1473 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1472 1474 group_refs.append((ref_key, ref_name))
1473 1475
1474 1476 if not selected:
1475 1477 if set([commit_id, match]) & set([ref_id, ref_name]):
1476 1478 selected = ref_key
1477 1479
1478 1480 if group_refs:
1479 1481 groups.append((group_refs, group_name))
1480 1482
1481 1483 if not selected:
1482 1484 ref = commit_id or branch or bookmark
1483 1485 if ref:
1484 1486 raise CommitDoesNotExistError(
1485 1487 u'No commit refs could be found matching: {}'.format(ref))
1486 1488 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1487 1489 selected = u'branch:{}:{}'.format(
1488 1490 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1489 1491 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1490 1492 )
1491 1493 elif repo.commit_ids:
1492 1494 # make the user select in this case
1493 1495 selected = None
1494 1496 else:
1495 1497 raise EmptyRepositoryError()
1496 1498 return groups, selected
1497 1499
1498 1500 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1499 1501 hide_whitespace_changes, diff_context):
1500 1502
1501 1503 return self._get_diff_from_pr_or_version(
1502 1504 source_repo, source_ref_id, target_ref_id,
1503 1505 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1504 1506
1505 1507 def _get_diff_from_pr_or_version(
1506 1508 self, source_repo, source_ref_id, target_ref_id,
1507 1509 hide_whitespace_changes, diff_context):
1508 1510
1509 1511 target_commit = source_repo.get_commit(
1510 1512 commit_id=safe_str(target_ref_id))
1511 1513 source_commit = source_repo.get_commit(
1512 1514 commit_id=safe_str(source_ref_id))
1513 1515 if isinstance(source_repo, Repository):
1514 1516 vcs_repo = source_repo.scm_instance()
1515 1517 else:
1516 1518 vcs_repo = source_repo
1517 1519
1518 1520 # TODO: johbo: In the context of an update, we cannot reach
1519 1521 # the old commit anymore with our normal mechanisms. It needs
1520 1522 # some sort of special support in the vcs layer to avoid this
1521 1523 # workaround.
1522 1524 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1523 1525 vcs_repo.alias == 'git'):
1524 1526 source_commit.raw_id = safe_str(source_ref_id)
1525 1527
1526 1528 log.debug('calculating diff between '
1527 1529 'source_ref:%s and target_ref:%s for repo `%s`',
1528 1530 target_ref_id, source_ref_id,
1529 1531 safe_unicode(vcs_repo.path))
1530 1532
1531 1533 vcs_diff = vcs_repo.get_diff(
1532 1534 commit1=target_commit, commit2=source_commit,
1533 1535 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1534 1536 return vcs_diff
1535 1537
1536 1538 def _is_merge_enabled(self, pull_request):
1537 1539 return self._get_general_setting(
1538 1540 pull_request, 'rhodecode_pr_merge_enabled')
1539 1541
1540 1542 def _use_rebase_for_merging(self, pull_request):
1541 1543 repo_type = pull_request.target_repo.repo_type
1542 1544 if repo_type == 'hg':
1543 1545 return self._get_general_setting(
1544 1546 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1545 1547 elif repo_type == 'git':
1546 1548 return self._get_general_setting(
1547 1549 pull_request, 'rhodecode_git_use_rebase_for_merging')
1548 1550
1549 1551 return False
1550 1552
1551 1553 def _close_branch_before_merging(self, pull_request):
1552 1554 repo_type = pull_request.target_repo.repo_type
1553 1555 if repo_type == 'hg':
1554 1556 return self._get_general_setting(
1555 1557 pull_request, 'rhodecode_hg_close_branch_before_merging')
1556 1558 elif repo_type == 'git':
1557 1559 return self._get_general_setting(
1558 1560 pull_request, 'rhodecode_git_close_branch_before_merging')
1559 1561
1560 1562 return False
1561 1563
1562 1564 def _get_general_setting(self, pull_request, settings_key, default=False):
1563 1565 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1564 1566 settings = settings_model.get_general_settings()
1565 1567 return settings.get(settings_key, default)
1566 1568
1567 1569 def _log_audit_action(self, action, action_data, user, pull_request):
1568 1570 audit_logger.store(
1569 1571 action=action,
1570 1572 action_data=action_data,
1571 1573 user=user,
1572 1574 repo=pull_request.target_repo)
1573 1575
1574 1576 def get_reviewer_functions(self):
1575 1577 """
1576 1578 Fetches functions for validation and fetching default reviewers.
1577 1579 If available we use the EE package, else we fallback to CE
1578 1580 package functions
1579 1581 """
1580 1582 try:
1581 1583 from rc_reviewers.utils import get_default_reviewers_data
1582 1584 from rc_reviewers.utils import validate_default_reviewers
1583 1585 except ImportError:
1584 1586 from rhodecode.apps.repository.utils import get_default_reviewers_data
1585 1587 from rhodecode.apps.repository.utils import validate_default_reviewers
1586 1588
1587 1589 return get_default_reviewers_data, validate_default_reviewers
1588 1590
1589 1591
1590 1592 class MergeCheck(object):
1591 1593 """
1592 1594 Perform Merge Checks and returns a check object which stores information
1593 1595 about merge errors, and merge conditions
1594 1596 """
1595 1597 TODO_CHECK = 'todo'
1596 1598 PERM_CHECK = 'perm'
1597 1599 REVIEW_CHECK = 'review'
1598 1600 MERGE_CHECK = 'merge'
1599 1601
1600 1602 def __init__(self):
1601 1603 self.review_status = None
1602 1604 self.merge_possible = None
1603 1605 self.merge_msg = ''
1604 1606 self.failed = None
1605 1607 self.errors = []
1606 1608 self.error_details = OrderedDict()
1607 1609
1608 1610 def push_error(self, error_type, message, error_key, details):
1609 1611 self.failed = True
1610 1612 self.errors.append([error_type, message])
1611 1613 self.error_details[error_key] = dict(
1612 1614 details=details,
1613 1615 error_type=error_type,
1614 1616 message=message
1615 1617 )
1616 1618
1617 1619 @classmethod
1618 1620 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1619 1621 force_shadow_repo_refresh=False):
1620 1622 _ = translator
1621 1623 merge_check = cls()
1622 1624
1623 1625 # permissions to merge
1624 1626 user_allowed_to_merge = PullRequestModel().check_user_merge(
1625 1627 pull_request, auth_user)
1626 1628 if not user_allowed_to_merge:
1627 1629 log.debug("MergeCheck: cannot merge, approval is pending.")
1628 1630
1629 1631 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1630 1632 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1631 1633 if fail_early:
1632 1634 return merge_check
1633 1635
1634 1636 # permission to merge into the target branch
1635 1637 target_commit_id = pull_request.target_ref_parts.commit_id
1636 1638 if pull_request.target_ref_parts.type == 'branch':
1637 1639 branch_name = pull_request.target_ref_parts.name
1638 1640 else:
1639 1641 # for mercurial we can always figure out the branch from the commit
1640 1642 # in case of bookmark
1641 1643 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1642 1644 branch_name = target_commit.branch
1643 1645
1644 1646 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1645 1647 pull_request.target_repo.repo_name, branch_name)
1646 1648 if branch_perm and branch_perm == 'branch.none':
1647 1649 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1648 1650 branch_name, rule)
1649 1651 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1650 1652 if fail_early:
1651 1653 return merge_check
1652 1654
1653 1655 # review status, must be always present
1654 1656 review_status = pull_request.calculated_review_status()
1655 1657 merge_check.review_status = review_status
1656 1658
1657 1659 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1658 1660 if not status_approved:
1659 1661 log.debug("MergeCheck: cannot merge, approval is pending.")
1660 1662
1661 1663 msg = _('Pull request reviewer approval is pending.')
1662 1664
1663 1665 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1664 1666
1665 1667 if fail_early:
1666 1668 return merge_check
1667 1669
1668 1670 # left over TODOs
1669 1671 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1670 1672 if todos:
1671 1673 log.debug("MergeCheck: cannot merge, {} "
1672 1674 "unresolved TODOs left.".format(len(todos)))
1673 1675
1674 1676 if len(todos) == 1:
1675 1677 msg = _('Cannot merge, {} TODO still not resolved.').format(
1676 1678 len(todos))
1677 1679 else:
1678 1680 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1679 1681 len(todos))
1680 1682
1681 1683 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1682 1684
1683 1685 if fail_early:
1684 1686 return merge_check
1685 1687
1686 1688 # merge possible, here is the filesystem simulation + shadow repo
1687 1689 merge_status, msg = PullRequestModel().merge_status(
1688 1690 pull_request, translator=translator,
1689 1691 force_shadow_repo_refresh=force_shadow_repo_refresh)
1690 1692 merge_check.merge_possible = merge_status
1691 1693 merge_check.merge_msg = msg
1692 1694 if not merge_status:
1693 1695 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1694 1696 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1695 1697
1696 1698 if fail_early:
1697 1699 return merge_check
1698 1700
1699 1701 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1700 1702 return merge_check
1701 1703
1702 1704 @classmethod
1703 1705 def get_merge_conditions(cls, pull_request, translator):
1704 1706 _ = translator
1705 1707 merge_details = {}
1706 1708
1707 1709 model = PullRequestModel()
1708 1710 use_rebase = model._use_rebase_for_merging(pull_request)
1709 1711
1710 1712 if use_rebase:
1711 1713 merge_details['merge_strategy'] = dict(
1712 1714 details={},
1713 1715 message=_('Merge strategy: rebase')
1714 1716 )
1715 1717 else:
1716 1718 merge_details['merge_strategy'] = dict(
1717 1719 details={},
1718 1720 message=_('Merge strategy: explicit merge commit')
1719 1721 )
1720 1722
1721 1723 close_branch = model._close_branch_before_merging(pull_request)
1722 1724 if close_branch:
1723 1725 repo_type = pull_request.target_repo.repo_type
1724 1726 close_msg = ''
1725 1727 if repo_type == 'hg':
1726 1728 close_msg = _('Source branch will be closed after merge.')
1727 1729 elif repo_type == 'git':
1728 1730 close_msg = _('Source branch will be deleted after merge.')
1729 1731
1730 1732 merge_details['close_branch'] = dict(
1731 1733 details={},
1732 1734 message=close_msg
1733 1735 )
1734 1736
1735 1737 return merge_details
1736 1738
1737 1739
1738 1740 ChangeTuple = collections.namedtuple(
1739 1741 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1740 1742
1741 1743 FileChangeTuple = collections.namedtuple(
1742 1744 'FileChangeTuple', ['added', 'modified', 'removed'])
General Comments 0
You need to be logged in to leave comments. Login now