##// END OF EJS Templates
default-reviewers: fixed voting rule calculation on user-group. The previous...
marcink -
r2960:545905e2 default
parent child Browse files
Show More
@@ -1,4517 +1,4522 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import hashlib
29 29 import logging
30 30 import datetime
31 31 import warnings
32 32 import ipaddress
33 33 import functools
34 34 import traceback
35 35 import collections
36 36
37 37 from sqlalchemy import (
38 38 or_, and_, not_, func, TypeDecorator, event,
39 39 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
40 40 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
41 41 Text, Float, PickleType)
42 42 from sqlalchemy.sql.expression import true, false
43 43 from sqlalchemy.sql.functions import coalesce, count # noqa
44 44 from sqlalchemy.orm import (
45 45 relationship, joinedload, class_mapper, validates, aliased)
46 46 from sqlalchemy.ext.declarative import declared_attr
47 47 from sqlalchemy.ext.hybrid import hybrid_property
48 48 from sqlalchemy.exc import IntegrityError # noqa
49 49 from sqlalchemy.dialects.mysql import LONGTEXT
50 50 from zope.cachedescriptors.property import Lazy as LazyProperty
51 51
52 52 from pyramid.threadlocal import get_current_request
53 53
54 54 from rhodecode.translation import _
55 55 from rhodecode.lib.vcs import get_vcs_instance
56 56 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
57 57 from rhodecode.lib.utils2 import (
58 58 str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe,
59 59 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
60 60 glob2re, StrictAttributeDict, cleaned_uri)
61 61 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
62 62 JsonRaw
63 63 from rhodecode.lib.ext_json import json
64 64 from rhodecode.lib.caching_query import FromCache
65 65 from rhodecode.lib.encrypt import AESCipher
66 66
67 67 from rhodecode.model.meta import Base, Session
68 68
69 69 URL_SEP = '/'
70 70 log = logging.getLogger(__name__)
71 71
72 72 # =============================================================================
73 73 # BASE CLASSES
74 74 # =============================================================================
75 75
76 76 # this is propagated from .ini file rhodecode.encrypted_values.secret or
77 77 # beaker.session.secret if first is not set.
78 78 # and initialized at environment.py
79 79 ENCRYPTION_KEY = None
80 80
81 81 # used to sort permissions by types, '#' used here is not allowed to be in
82 82 # usernames, and it's very early in sorted string.printable table.
83 83 PERMISSION_TYPE_SORT = {
84 84 'admin': '####',
85 85 'write': '###',
86 86 'read': '##',
87 87 'none': '#',
88 88 }
89 89
90 90
91 91 def display_user_sort(obj):
92 92 """
93 93 Sort function used to sort permissions in .permissions() function of
94 94 Repository, RepoGroup, UserGroup. Also it put the default user in front
95 95 of all other resources
96 96 """
97 97
98 98 if obj.username == User.DEFAULT_USER:
99 99 return '#####'
100 100 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
101 101 return prefix + obj.username
102 102
103 103
104 104 def display_user_group_sort(obj):
105 105 """
106 106 Sort function used to sort permissions in .permissions() function of
107 107 Repository, RepoGroup, UserGroup. Also it put the default user in front
108 108 of all other resources
109 109 """
110 110
111 111 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
112 112 return prefix + obj.users_group_name
113 113
114 114
115 115 def _hash_key(k):
116 116 return sha1_safe(k)
117 117
118 118
119 119 def in_filter_generator(qry, items, limit=500):
120 120 """
121 121 Splits IN() into multiple with OR
122 122 e.g.::
123 123 cnt = Repository.query().filter(
124 124 or_(
125 125 *in_filter_generator(Repository.repo_id, range(100000))
126 126 )).count()
127 127 """
128 128 if not items:
129 129 # empty list will cause empty query which might cause security issues
130 130 # this can lead to hidden unpleasant results
131 131 items = [-1]
132 132
133 133 parts = []
134 134 for chunk in xrange(0, len(items), limit):
135 135 parts.append(
136 136 qry.in_(items[chunk: chunk + limit])
137 137 )
138 138
139 139 return parts
140 140
141 141
142 142 base_table_args = {
143 143 'extend_existing': True,
144 144 'mysql_engine': 'InnoDB',
145 145 'mysql_charset': 'utf8',
146 146 'sqlite_autoincrement': True
147 147 }
148 148
149 149
150 150 class EncryptedTextValue(TypeDecorator):
151 151 """
152 152 Special column for encrypted long text data, use like::
153 153
154 154 value = Column("encrypted_value", EncryptedValue(), nullable=False)
155 155
156 156 This column is intelligent so if value is in unencrypted form it return
157 157 unencrypted form, but on save it always encrypts
158 158 """
159 159 impl = Text
160 160
161 161 def process_bind_param(self, value, dialect):
162 162 if not value:
163 163 return value
164 164 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
165 165 # protect against double encrypting if someone manually starts
166 166 # doing
167 167 raise ValueError('value needs to be in unencrypted format, ie. '
168 168 'not starting with enc$aes')
169 169 return 'enc$aes_hmac$%s' % AESCipher(
170 170 ENCRYPTION_KEY, hmac=True).encrypt(value)
171 171
172 172 def process_result_value(self, value, dialect):
173 173 import rhodecode
174 174
175 175 if not value:
176 176 return value
177 177
178 178 parts = value.split('$', 3)
179 179 if not len(parts) == 3:
180 180 # probably not encrypted values
181 181 return value
182 182 else:
183 183 if parts[0] != 'enc':
184 184 # parts ok but without our header ?
185 185 return value
186 186 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
187 187 'rhodecode.encrypted_values.strict') or True)
188 188 # at that stage we know it's our encryption
189 189 if parts[1] == 'aes':
190 190 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
191 191 elif parts[1] == 'aes_hmac':
192 192 decrypted_data = AESCipher(
193 193 ENCRYPTION_KEY, hmac=True,
194 194 strict_verification=enc_strict_mode).decrypt(parts[2])
195 195 else:
196 196 raise ValueError(
197 197 'Encryption type part is wrong, must be `aes` '
198 198 'or `aes_hmac`, got `%s` instead' % (parts[1]))
199 199 return decrypted_data
200 200
201 201
202 202 class BaseModel(object):
203 203 """
204 204 Base Model for all classes
205 205 """
206 206
207 207 @classmethod
208 208 def _get_keys(cls):
209 209 """return column names for this model """
210 210 return class_mapper(cls).c.keys()
211 211
212 212 def get_dict(self):
213 213 """
214 214 return dict with keys and values corresponding
215 215 to this model data """
216 216
217 217 d = {}
218 218 for k in self._get_keys():
219 219 d[k] = getattr(self, k)
220 220
221 221 # also use __json__() if present to get additional fields
222 222 _json_attr = getattr(self, '__json__', None)
223 223 if _json_attr:
224 224 # update with attributes from __json__
225 225 if callable(_json_attr):
226 226 _json_attr = _json_attr()
227 227 for k, val in _json_attr.iteritems():
228 228 d[k] = val
229 229 return d
230 230
231 231 def get_appstruct(self):
232 232 """return list with keys and values tuples corresponding
233 233 to this model data """
234 234
235 235 lst = []
236 236 for k in self._get_keys():
237 237 lst.append((k, getattr(self, k),))
238 238 return lst
239 239
240 240 def populate_obj(self, populate_dict):
241 241 """populate model with data from given populate_dict"""
242 242
243 243 for k in self._get_keys():
244 244 if k in populate_dict:
245 245 setattr(self, k, populate_dict[k])
246 246
247 247 @classmethod
248 248 def query(cls):
249 249 return Session().query(cls)
250 250
251 251 @classmethod
252 252 def get(cls, id_):
253 253 if id_:
254 254 return cls.query().get(id_)
255 255
256 256 @classmethod
257 257 def get_or_404(cls, id_):
258 258 from pyramid.httpexceptions import HTTPNotFound
259 259
260 260 try:
261 261 id_ = int(id_)
262 262 except (TypeError, ValueError):
263 263 raise HTTPNotFound()
264 264
265 265 res = cls.query().get(id_)
266 266 if not res:
267 267 raise HTTPNotFound()
268 268 return res
269 269
270 270 @classmethod
271 271 def getAll(cls):
272 272 # deprecated and left for backward compatibility
273 273 return cls.get_all()
274 274
275 275 @classmethod
276 276 def get_all(cls):
277 277 return cls.query().all()
278 278
279 279 @classmethod
280 280 def delete(cls, id_):
281 281 obj = cls.query().get(id_)
282 282 Session().delete(obj)
283 283
284 284 @classmethod
285 285 def identity_cache(cls, session, attr_name, value):
286 286 exist_in_session = []
287 287 for (item_cls, pkey), instance in session.identity_map.items():
288 288 if cls == item_cls and getattr(instance, attr_name) == value:
289 289 exist_in_session.append(instance)
290 290 if exist_in_session:
291 291 if len(exist_in_session) == 1:
292 292 return exist_in_session[0]
293 293 log.exception(
294 294 'multiple objects with attr %s and '
295 295 'value %s found with same name: %r',
296 296 attr_name, value, exist_in_session)
297 297
298 298 def __repr__(self):
299 299 if hasattr(self, '__unicode__'):
300 300 # python repr needs to return str
301 301 try:
302 302 return safe_str(self.__unicode__())
303 303 except UnicodeDecodeError:
304 304 pass
305 305 return '<DB:%s>' % (self.__class__.__name__)
306 306
307 307
308 308 class RhodeCodeSetting(Base, BaseModel):
309 309 __tablename__ = 'rhodecode_settings'
310 310 __table_args__ = (
311 311 UniqueConstraint('app_settings_name'),
312 312 base_table_args
313 313 )
314 314
315 315 SETTINGS_TYPES = {
316 316 'str': safe_str,
317 317 'int': safe_int,
318 318 'unicode': safe_unicode,
319 319 'bool': str2bool,
320 320 'list': functools.partial(aslist, sep=',')
321 321 }
322 322 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
323 323 GLOBAL_CONF_KEY = 'app_settings'
324 324
325 325 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
326 326 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
327 327 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
328 328 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
329 329
330 330 def __init__(self, key='', val='', type='unicode'):
331 331 self.app_settings_name = key
332 332 self.app_settings_type = type
333 333 self.app_settings_value = val
334 334
335 335 @validates('_app_settings_value')
336 336 def validate_settings_value(self, key, val):
337 337 assert type(val) == unicode
338 338 return val
339 339
340 340 @hybrid_property
341 341 def app_settings_value(self):
342 342 v = self._app_settings_value
343 343 _type = self.app_settings_type
344 344 if _type:
345 345 _type = self.app_settings_type.split('.')[0]
346 346 # decode the encrypted value
347 347 if 'encrypted' in self.app_settings_type:
348 348 cipher = EncryptedTextValue()
349 349 v = safe_unicode(cipher.process_result_value(v, None))
350 350
351 351 converter = self.SETTINGS_TYPES.get(_type) or \
352 352 self.SETTINGS_TYPES['unicode']
353 353 return converter(v)
354 354
355 355 @app_settings_value.setter
356 356 def app_settings_value(self, val):
357 357 """
358 358 Setter that will always make sure we use unicode in app_settings_value
359 359
360 360 :param val:
361 361 """
362 362 val = safe_unicode(val)
363 363 # encode the encrypted value
364 364 if 'encrypted' in self.app_settings_type:
365 365 cipher = EncryptedTextValue()
366 366 val = safe_unicode(cipher.process_bind_param(val, None))
367 367 self._app_settings_value = val
368 368
369 369 @hybrid_property
370 370 def app_settings_type(self):
371 371 return self._app_settings_type
372 372
373 373 @app_settings_type.setter
374 374 def app_settings_type(self, val):
375 375 if val.split('.')[0] not in self.SETTINGS_TYPES:
376 376 raise Exception('type must be one of %s got %s'
377 377 % (self.SETTINGS_TYPES.keys(), val))
378 378 self._app_settings_type = val
379 379
380 380 def __unicode__(self):
381 381 return u"<%s('%s:%s[%s]')>" % (
382 382 self.__class__.__name__,
383 383 self.app_settings_name, self.app_settings_value,
384 384 self.app_settings_type
385 385 )
386 386
387 387
388 388 class RhodeCodeUi(Base, BaseModel):
389 389 __tablename__ = 'rhodecode_ui'
390 390 __table_args__ = (
391 391 UniqueConstraint('ui_key'),
392 392 base_table_args
393 393 )
394 394
395 395 HOOK_REPO_SIZE = 'changegroup.repo_size'
396 396 # HG
397 397 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
398 398 HOOK_PULL = 'outgoing.pull_logger'
399 399 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
400 400 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
401 401 HOOK_PUSH = 'changegroup.push_logger'
402 402 HOOK_PUSH_KEY = 'pushkey.key_push'
403 403
404 404 # TODO: johbo: Unify way how hooks are configured for git and hg,
405 405 # git part is currently hardcoded.
406 406
407 407 # SVN PATTERNS
408 408 SVN_BRANCH_ID = 'vcs_svn_branch'
409 409 SVN_TAG_ID = 'vcs_svn_tag'
410 410
411 411 ui_id = Column(
412 412 "ui_id", Integer(), nullable=False, unique=True, default=None,
413 413 primary_key=True)
414 414 ui_section = Column(
415 415 "ui_section", String(255), nullable=True, unique=None, default=None)
416 416 ui_key = Column(
417 417 "ui_key", String(255), nullable=True, unique=None, default=None)
418 418 ui_value = Column(
419 419 "ui_value", String(255), nullable=True, unique=None, default=None)
420 420 ui_active = Column(
421 421 "ui_active", Boolean(), nullable=True, unique=None, default=True)
422 422
423 423 def __repr__(self):
424 424 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
425 425 self.ui_key, self.ui_value)
426 426
427 427
428 428 class RepoRhodeCodeSetting(Base, BaseModel):
429 429 __tablename__ = 'repo_rhodecode_settings'
430 430 __table_args__ = (
431 431 UniqueConstraint(
432 432 'app_settings_name', 'repository_id',
433 433 name='uq_repo_rhodecode_setting_name_repo_id'),
434 434 base_table_args
435 435 )
436 436
437 437 repository_id = Column(
438 438 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
439 439 nullable=False)
440 440 app_settings_id = Column(
441 441 "app_settings_id", Integer(), nullable=False, unique=True,
442 442 default=None, primary_key=True)
443 443 app_settings_name = Column(
444 444 "app_settings_name", String(255), nullable=True, unique=None,
445 445 default=None)
446 446 _app_settings_value = Column(
447 447 "app_settings_value", String(4096), nullable=True, unique=None,
448 448 default=None)
449 449 _app_settings_type = Column(
450 450 "app_settings_type", String(255), nullable=True, unique=None,
451 451 default=None)
452 452
453 453 repository = relationship('Repository')
454 454
455 455 def __init__(self, repository_id, key='', val='', type='unicode'):
456 456 self.repository_id = repository_id
457 457 self.app_settings_name = key
458 458 self.app_settings_type = type
459 459 self.app_settings_value = val
460 460
461 461 @validates('_app_settings_value')
462 462 def validate_settings_value(self, key, val):
463 463 assert type(val) == unicode
464 464 return val
465 465
466 466 @hybrid_property
467 467 def app_settings_value(self):
468 468 v = self._app_settings_value
469 469 type_ = self.app_settings_type
470 470 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
471 471 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
472 472 return converter(v)
473 473
474 474 @app_settings_value.setter
475 475 def app_settings_value(self, val):
476 476 """
477 477 Setter that will always make sure we use unicode in app_settings_value
478 478
479 479 :param val:
480 480 """
481 481 self._app_settings_value = safe_unicode(val)
482 482
483 483 @hybrid_property
484 484 def app_settings_type(self):
485 485 return self._app_settings_type
486 486
487 487 @app_settings_type.setter
488 488 def app_settings_type(self, val):
489 489 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
490 490 if val not in SETTINGS_TYPES:
491 491 raise Exception('type must be one of %s got %s'
492 492 % (SETTINGS_TYPES.keys(), val))
493 493 self._app_settings_type = val
494 494
495 495 def __unicode__(self):
496 496 return u"<%s('%s:%s:%s[%s]')>" % (
497 497 self.__class__.__name__, self.repository.repo_name,
498 498 self.app_settings_name, self.app_settings_value,
499 499 self.app_settings_type
500 500 )
501 501
502 502
503 503 class RepoRhodeCodeUi(Base, BaseModel):
504 504 __tablename__ = 'repo_rhodecode_ui'
505 505 __table_args__ = (
506 506 UniqueConstraint(
507 507 'repository_id', 'ui_section', 'ui_key',
508 508 name='uq_repo_rhodecode_ui_repository_id_section_key'),
509 509 base_table_args
510 510 )
511 511
512 512 repository_id = Column(
513 513 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
514 514 nullable=False)
515 515 ui_id = Column(
516 516 "ui_id", Integer(), nullable=False, unique=True, default=None,
517 517 primary_key=True)
518 518 ui_section = Column(
519 519 "ui_section", String(255), nullable=True, unique=None, default=None)
520 520 ui_key = Column(
521 521 "ui_key", String(255), nullable=True, unique=None, default=None)
522 522 ui_value = Column(
523 523 "ui_value", String(255), nullable=True, unique=None, default=None)
524 524 ui_active = Column(
525 525 "ui_active", Boolean(), nullable=True, unique=None, default=True)
526 526
527 527 repository = relationship('Repository')
528 528
529 529 def __repr__(self):
530 530 return '<%s[%s:%s]%s=>%s]>' % (
531 531 self.__class__.__name__, self.repository.repo_name,
532 532 self.ui_section, self.ui_key, self.ui_value)
533 533
534 534
535 535 class User(Base, BaseModel):
536 536 __tablename__ = 'users'
537 537 __table_args__ = (
538 538 UniqueConstraint('username'), UniqueConstraint('email'),
539 539 Index('u_username_idx', 'username'),
540 540 Index('u_email_idx', 'email'),
541 541 base_table_args
542 542 )
543 543
544 544 DEFAULT_USER = 'default'
545 545 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
546 546 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
547 547
548 548 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
549 549 username = Column("username", String(255), nullable=True, unique=None, default=None)
550 550 password = Column("password", String(255), nullable=True, unique=None, default=None)
551 551 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
552 552 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
553 553 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
554 554 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
555 555 _email = Column("email", String(255), nullable=True, unique=None, default=None)
556 556 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
557 557 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
558 558
559 559 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
560 560 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
561 561 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
562 562 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
563 563 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
564 564 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
565 565
566 566 user_log = relationship('UserLog')
567 567 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
568 568
569 569 repositories = relationship('Repository')
570 570 repository_groups = relationship('RepoGroup')
571 571 user_groups = relationship('UserGroup')
572 572
573 573 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
574 574 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
575 575
576 576 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
577 577 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
578 578 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
579 579
580 580 group_member = relationship('UserGroupMember', cascade='all')
581 581
582 582 notifications = relationship('UserNotification', cascade='all')
583 583 # notifications assigned to this user
584 584 user_created_notifications = relationship('Notification', cascade='all')
585 585 # comments created by this user
586 586 user_comments = relationship('ChangesetComment', cascade='all')
587 587 # user profile extra info
588 588 user_emails = relationship('UserEmailMap', cascade='all')
589 589 user_ip_map = relationship('UserIpMap', cascade='all')
590 590 user_auth_tokens = relationship('UserApiKeys', cascade='all')
591 591 user_ssh_keys = relationship('UserSshKeys', cascade='all')
592 592
593 593 # gists
594 594 user_gists = relationship('Gist', cascade='all')
595 595 # user pull requests
596 596 user_pull_requests = relationship('PullRequest', cascade='all')
597 597 # external identities
598 598 extenal_identities = relationship(
599 599 'ExternalIdentity',
600 600 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
601 601 cascade='all')
602 602 # review rules
603 603 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
604 604
605 605 def __unicode__(self):
606 606 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
607 607 self.user_id, self.username)
608 608
609 609 @hybrid_property
610 610 def email(self):
611 611 return self._email
612 612
613 613 @email.setter
614 614 def email(self, val):
615 615 self._email = val.lower() if val else None
616 616
617 617 @hybrid_property
618 618 def first_name(self):
619 619 from rhodecode.lib import helpers as h
620 620 if self.name:
621 621 return h.escape(self.name)
622 622 return self.name
623 623
624 624 @hybrid_property
625 625 def last_name(self):
626 626 from rhodecode.lib import helpers as h
627 627 if self.lastname:
628 628 return h.escape(self.lastname)
629 629 return self.lastname
630 630
631 631 @hybrid_property
632 632 def api_key(self):
633 633 """
634 634 Fetch if exist an auth-token with role ALL connected to this user
635 635 """
636 636 user_auth_token = UserApiKeys.query()\
637 637 .filter(UserApiKeys.user_id == self.user_id)\
638 638 .filter(or_(UserApiKeys.expires == -1,
639 639 UserApiKeys.expires >= time.time()))\
640 640 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
641 641 if user_auth_token:
642 642 user_auth_token = user_auth_token.api_key
643 643
644 644 return user_auth_token
645 645
646 646 @api_key.setter
647 647 def api_key(self, val):
648 648 # don't allow to set API key this is deprecated for now
649 649 self._api_key = None
650 650
651 651 @property
652 652 def reviewer_pull_requests(self):
653 653 return PullRequestReviewers.query() \
654 654 .options(joinedload(PullRequestReviewers.pull_request)) \
655 655 .filter(PullRequestReviewers.user_id == self.user_id) \
656 656 .all()
657 657
658 658 @property
659 659 def firstname(self):
660 660 # alias for future
661 661 return self.name
662 662
663 663 @property
664 664 def emails(self):
665 665 other = UserEmailMap.query()\
666 666 .filter(UserEmailMap.user == self) \
667 667 .order_by(UserEmailMap.email_id.asc()) \
668 668 .all()
669 669 return [self.email] + [x.email for x in other]
670 670
671 671 @property
672 672 def auth_tokens(self):
673 673 auth_tokens = self.get_auth_tokens()
674 674 return [x.api_key for x in auth_tokens]
675 675
676 676 def get_auth_tokens(self):
677 677 return UserApiKeys.query()\
678 678 .filter(UserApiKeys.user == self)\
679 679 .order_by(UserApiKeys.user_api_key_id.asc())\
680 680 .all()
681 681
682 682 @LazyProperty
683 683 def feed_token(self):
684 684 return self.get_feed_token()
685 685
686 686 def get_feed_token(self, cache=True):
687 687 feed_tokens = UserApiKeys.query()\
688 688 .filter(UserApiKeys.user == self)\
689 689 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
690 690 if cache:
691 691 feed_tokens = feed_tokens.options(
692 692 FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id))
693 693
694 694 feed_tokens = feed_tokens.all()
695 695 if feed_tokens:
696 696 return feed_tokens[0].api_key
697 697 return 'NO_FEED_TOKEN_AVAILABLE'
698 698
699 699 @classmethod
700 700 def get(cls, user_id, cache=False):
701 701 if not user_id:
702 702 return
703 703
704 704 user = cls.query()
705 705 if cache:
706 706 user = user.options(
707 707 FromCache("sql_cache_short", "get_users_%s" % user_id))
708 708 return user.get(user_id)
709 709
710 710 @classmethod
711 711 def extra_valid_auth_tokens(cls, user, role=None):
712 712 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
713 713 .filter(or_(UserApiKeys.expires == -1,
714 714 UserApiKeys.expires >= time.time()))
715 715 if role:
716 716 tokens = tokens.filter(or_(UserApiKeys.role == role,
717 717 UserApiKeys.role == UserApiKeys.ROLE_ALL))
718 718 return tokens.all()
719 719
720 720 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
721 721 from rhodecode.lib import auth
722 722
723 723 log.debug('Trying to authenticate user: %s via auth-token, '
724 724 'and roles: %s', self, roles)
725 725
726 726 if not auth_token:
727 727 return False
728 728
729 729 crypto_backend = auth.crypto_backend()
730 730
731 731 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
732 732 tokens_q = UserApiKeys.query()\
733 733 .filter(UserApiKeys.user_id == self.user_id)\
734 734 .filter(or_(UserApiKeys.expires == -1,
735 735 UserApiKeys.expires >= time.time()))
736 736
737 737 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
738 738
739 739 plain_tokens = []
740 740 hash_tokens = []
741 741
742 742 for token in tokens_q.all():
743 743 # verify scope first
744 744 if token.repo_id:
745 745 # token has a scope, we need to verify it
746 746 if scope_repo_id != token.repo_id:
747 747 log.debug(
748 748 'Scope mismatch: token has a set repo scope: %s, '
749 749 'and calling scope is:%s, skipping further checks',
750 750 token.repo, scope_repo_id)
751 751 # token has a scope, and it doesn't match, skip token
752 752 continue
753 753
754 754 if token.api_key.startswith(crypto_backend.ENC_PREF):
755 755 hash_tokens.append(token.api_key)
756 756 else:
757 757 plain_tokens.append(token.api_key)
758 758
759 759 is_plain_match = auth_token in plain_tokens
760 760 if is_plain_match:
761 761 return True
762 762
763 763 for hashed in hash_tokens:
764 764 # TODO(marcink): this is expensive to calculate, but most secure
765 765 match = crypto_backend.hash_check(auth_token, hashed)
766 766 if match:
767 767 return True
768 768
769 769 return False
770 770
771 771 @property
772 772 def ip_addresses(self):
773 773 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
774 774 return [x.ip_addr for x in ret]
775 775
776 776 @property
777 777 def username_and_name(self):
778 778 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
779 779
780 780 @property
781 781 def username_or_name_or_email(self):
782 782 full_name = self.full_name if self.full_name is not ' ' else None
783 783 return self.username or full_name or self.email
784 784
785 785 @property
786 786 def full_name(self):
787 787 return '%s %s' % (self.first_name, self.last_name)
788 788
789 789 @property
790 790 def full_name_or_username(self):
791 791 return ('%s %s' % (self.first_name, self.last_name)
792 792 if (self.first_name and self.last_name) else self.username)
793 793
794 794 @property
795 795 def full_contact(self):
796 796 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
797 797
798 798 @property
799 799 def short_contact(self):
800 800 return '%s %s' % (self.first_name, self.last_name)
801 801
802 802 @property
803 803 def is_admin(self):
804 804 return self.admin
805 805
806 806 def AuthUser(self, **kwargs):
807 807 """
808 808 Returns instance of AuthUser for this user
809 809 """
810 810 from rhodecode.lib.auth import AuthUser
811 811 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
812 812
813 813 @hybrid_property
814 814 def user_data(self):
815 815 if not self._user_data:
816 816 return {}
817 817
818 818 try:
819 819 return json.loads(self._user_data)
820 820 except TypeError:
821 821 return {}
822 822
823 823 @user_data.setter
824 824 def user_data(self, val):
825 825 if not isinstance(val, dict):
826 826 raise Exception('user_data must be dict, got %s' % type(val))
827 827 try:
828 828 self._user_data = json.dumps(val)
829 829 except Exception:
830 830 log.error(traceback.format_exc())
831 831
832 832 @classmethod
833 833 def get_by_username(cls, username, case_insensitive=False,
834 834 cache=False, identity_cache=False):
835 835 session = Session()
836 836
837 837 if case_insensitive:
838 838 q = cls.query().filter(
839 839 func.lower(cls.username) == func.lower(username))
840 840 else:
841 841 q = cls.query().filter(cls.username == username)
842 842
843 843 if cache:
844 844 if identity_cache:
845 845 val = cls.identity_cache(session, 'username', username)
846 846 if val:
847 847 return val
848 848 else:
849 849 cache_key = "get_user_by_name_%s" % _hash_key(username)
850 850 q = q.options(
851 851 FromCache("sql_cache_short", cache_key))
852 852
853 853 return q.scalar()
854 854
855 855 @classmethod
856 856 def get_by_auth_token(cls, auth_token, cache=False):
857 857 q = UserApiKeys.query()\
858 858 .filter(UserApiKeys.api_key == auth_token)\
859 859 .filter(or_(UserApiKeys.expires == -1,
860 860 UserApiKeys.expires >= time.time()))
861 861 if cache:
862 862 q = q.options(
863 863 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
864 864
865 865 match = q.first()
866 866 if match:
867 867 return match.user
868 868
869 869 @classmethod
870 870 def get_by_email(cls, email, case_insensitive=False, cache=False):
871 871
872 872 if case_insensitive:
873 873 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
874 874
875 875 else:
876 876 q = cls.query().filter(cls.email == email)
877 877
878 878 email_key = _hash_key(email)
879 879 if cache:
880 880 q = q.options(
881 881 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
882 882
883 883 ret = q.scalar()
884 884 if ret is None:
885 885 q = UserEmailMap.query()
886 886 # try fetching in alternate email map
887 887 if case_insensitive:
888 888 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
889 889 else:
890 890 q = q.filter(UserEmailMap.email == email)
891 891 q = q.options(joinedload(UserEmailMap.user))
892 892 if cache:
893 893 q = q.options(
894 894 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
895 895 ret = getattr(q.scalar(), 'user', None)
896 896
897 897 return ret
898 898
899 899 @classmethod
900 900 def get_from_cs_author(cls, author):
901 901 """
902 902 Tries to get User objects out of commit author string
903 903
904 904 :param author:
905 905 """
906 906 from rhodecode.lib.helpers import email, author_name
907 907 # Valid email in the attribute passed, see if they're in the system
908 908 _email = email(author)
909 909 if _email:
910 910 user = cls.get_by_email(_email, case_insensitive=True)
911 911 if user:
912 912 return user
913 913 # Maybe we can match by username?
914 914 _author = author_name(author)
915 915 user = cls.get_by_username(_author, case_insensitive=True)
916 916 if user:
917 917 return user
918 918
919 919 def update_userdata(self, **kwargs):
920 920 usr = self
921 921 old = usr.user_data
922 922 old.update(**kwargs)
923 923 usr.user_data = old
924 924 Session().add(usr)
925 925 log.debug('updated userdata with ', kwargs)
926 926
927 927 def update_lastlogin(self):
928 928 """Update user lastlogin"""
929 929 self.last_login = datetime.datetime.now()
930 930 Session().add(self)
931 931 log.debug('updated user %s lastlogin', self.username)
932 932
933 933 def update_password(self, new_password):
934 934 from rhodecode.lib.auth import get_crypt_password
935 935
936 936 self.password = get_crypt_password(new_password)
937 937 Session().add(self)
938 938
939 939 @classmethod
940 940 def get_first_super_admin(cls):
941 941 user = User.query().filter(User.admin == true()).first()
942 942 if user is None:
943 943 raise Exception('FATAL: Missing administrative account!')
944 944 return user
945 945
946 946 @classmethod
947 947 def get_all_super_admins(cls):
948 948 """
949 949 Returns all admin accounts sorted by username
950 950 """
951 951 return User.query().filter(User.admin == true())\
952 952 .order_by(User.username.asc()).all()
953 953
954 954 @classmethod
955 955 def get_default_user(cls, cache=False, refresh=False):
956 956 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
957 957 if user is None:
958 958 raise Exception('FATAL: Missing default account!')
959 959 if refresh:
960 960 # The default user might be based on outdated state which
961 961 # has been loaded from the cache.
962 962 # A call to refresh() ensures that the
963 963 # latest state from the database is used.
964 964 Session().refresh(user)
965 965 return user
966 966
967 967 def _get_default_perms(self, user, suffix=''):
968 968 from rhodecode.model.permission import PermissionModel
969 969 return PermissionModel().get_default_perms(user.user_perms, suffix)
970 970
971 971 def get_default_perms(self, suffix=''):
972 972 return self._get_default_perms(self, suffix)
973 973
974 974 def get_api_data(self, include_secrets=False, details='full'):
975 975 """
976 976 Common function for generating user related data for API
977 977
978 978 :param include_secrets: By default secrets in the API data will be replaced
979 979 by a placeholder value to prevent exposing this data by accident. In case
980 980 this data shall be exposed, set this flag to ``True``.
981 981
982 982 :param details: details can be 'basic|full' basic gives only a subset of
983 983 the available user information that includes user_id, name and emails.
984 984 """
985 985 user = self
986 986 user_data = self.user_data
987 987 data = {
988 988 'user_id': user.user_id,
989 989 'username': user.username,
990 990 'firstname': user.name,
991 991 'lastname': user.lastname,
992 992 'email': user.email,
993 993 'emails': user.emails,
994 994 }
995 995 if details == 'basic':
996 996 return data
997 997
998 998 auth_token_length = 40
999 999 auth_token_replacement = '*' * auth_token_length
1000 1000
1001 1001 extras = {
1002 1002 'auth_tokens': [auth_token_replacement],
1003 1003 'active': user.active,
1004 1004 'admin': user.admin,
1005 1005 'extern_type': user.extern_type,
1006 1006 'extern_name': user.extern_name,
1007 1007 'last_login': user.last_login,
1008 1008 'last_activity': user.last_activity,
1009 1009 'ip_addresses': user.ip_addresses,
1010 1010 'language': user_data.get('language')
1011 1011 }
1012 1012 data.update(extras)
1013 1013
1014 1014 if include_secrets:
1015 1015 data['auth_tokens'] = user.auth_tokens
1016 1016 return data
1017 1017
1018 1018 def __json__(self):
1019 1019 data = {
1020 1020 'full_name': self.full_name,
1021 1021 'full_name_or_username': self.full_name_or_username,
1022 1022 'short_contact': self.short_contact,
1023 1023 'full_contact': self.full_contact,
1024 1024 }
1025 1025 data.update(self.get_api_data())
1026 1026 return data
1027 1027
1028 1028
1029 1029 class UserApiKeys(Base, BaseModel):
1030 1030 __tablename__ = 'user_api_keys'
1031 1031 __table_args__ = (
1032 1032 Index('uak_api_key_idx', 'api_key', unique=True),
1033 1033 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1034 1034 base_table_args
1035 1035 )
1036 1036 __mapper_args__ = {}
1037 1037
1038 1038 # ApiKey role
1039 1039 ROLE_ALL = 'token_role_all'
1040 1040 ROLE_HTTP = 'token_role_http'
1041 1041 ROLE_VCS = 'token_role_vcs'
1042 1042 ROLE_API = 'token_role_api'
1043 1043 ROLE_FEED = 'token_role_feed'
1044 1044 ROLE_PASSWORD_RESET = 'token_password_reset'
1045 1045
1046 1046 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
1047 1047
1048 1048 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1049 1049 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1050 1050 api_key = Column("api_key", String(255), nullable=False, unique=True)
1051 1051 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1052 1052 expires = Column('expires', Float(53), nullable=False)
1053 1053 role = Column('role', String(255), nullable=True)
1054 1054 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1055 1055
1056 1056 # scope columns
1057 1057 repo_id = Column(
1058 1058 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1059 1059 nullable=True, unique=None, default=None)
1060 1060 repo = relationship('Repository', lazy='joined')
1061 1061
1062 1062 repo_group_id = Column(
1063 1063 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1064 1064 nullable=True, unique=None, default=None)
1065 1065 repo_group = relationship('RepoGroup', lazy='joined')
1066 1066
1067 1067 user = relationship('User', lazy='joined')
1068 1068
1069 1069 def __unicode__(self):
1070 1070 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1071 1071
1072 1072 def __json__(self):
1073 1073 data = {
1074 1074 'auth_token': self.api_key,
1075 1075 'role': self.role,
1076 1076 'scope': self.scope_humanized,
1077 1077 'expired': self.expired
1078 1078 }
1079 1079 return data
1080 1080
1081 1081 def get_api_data(self, include_secrets=False):
1082 1082 data = self.__json__()
1083 1083 if include_secrets:
1084 1084 return data
1085 1085 else:
1086 1086 data['auth_token'] = self.token_obfuscated
1087 1087 return data
1088 1088
1089 1089 @hybrid_property
1090 1090 def description_safe(self):
1091 1091 from rhodecode.lib import helpers as h
1092 1092 return h.escape(self.description)
1093 1093
1094 1094 @property
1095 1095 def expired(self):
1096 1096 if self.expires == -1:
1097 1097 return False
1098 1098 return time.time() > self.expires
1099 1099
1100 1100 @classmethod
1101 1101 def _get_role_name(cls, role):
1102 1102 return {
1103 1103 cls.ROLE_ALL: _('all'),
1104 1104 cls.ROLE_HTTP: _('http/web interface'),
1105 1105 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1106 1106 cls.ROLE_API: _('api calls'),
1107 1107 cls.ROLE_FEED: _('feed access'),
1108 1108 }.get(role, role)
1109 1109
1110 1110 @property
1111 1111 def role_humanized(self):
1112 1112 return self._get_role_name(self.role)
1113 1113
1114 1114 def _get_scope(self):
1115 1115 if self.repo:
1116 1116 return repr(self.repo)
1117 1117 if self.repo_group:
1118 1118 return repr(self.repo_group) + ' (recursive)'
1119 1119 return 'global'
1120 1120
1121 1121 @property
1122 1122 def scope_humanized(self):
1123 1123 return self._get_scope()
1124 1124
1125 1125 @property
1126 1126 def token_obfuscated(self):
1127 1127 if self.api_key:
1128 1128 return self.api_key[:4] + "****"
1129 1129
1130 1130
1131 1131 class UserEmailMap(Base, BaseModel):
1132 1132 __tablename__ = 'user_email_map'
1133 1133 __table_args__ = (
1134 1134 Index('uem_email_idx', 'email'),
1135 1135 UniqueConstraint('email'),
1136 1136 base_table_args
1137 1137 )
1138 1138 __mapper_args__ = {}
1139 1139
1140 1140 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1141 1141 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1142 1142 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1143 1143 user = relationship('User', lazy='joined')
1144 1144
1145 1145 @validates('_email')
1146 1146 def validate_email(self, key, email):
1147 1147 # check if this email is not main one
1148 1148 main_email = Session().query(User).filter(User.email == email).scalar()
1149 1149 if main_email is not None:
1150 1150 raise AttributeError('email %s is present is user table' % email)
1151 1151 return email
1152 1152
1153 1153 @hybrid_property
1154 1154 def email(self):
1155 1155 return self._email
1156 1156
1157 1157 @email.setter
1158 1158 def email(self, val):
1159 1159 self._email = val.lower() if val else None
1160 1160
1161 1161
1162 1162 class UserIpMap(Base, BaseModel):
1163 1163 __tablename__ = 'user_ip_map'
1164 1164 __table_args__ = (
1165 1165 UniqueConstraint('user_id', 'ip_addr'),
1166 1166 base_table_args
1167 1167 )
1168 1168 __mapper_args__ = {}
1169 1169
1170 1170 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1171 1171 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1172 1172 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1173 1173 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1174 1174 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1175 1175 user = relationship('User', lazy='joined')
1176 1176
1177 1177 @hybrid_property
1178 1178 def description_safe(self):
1179 1179 from rhodecode.lib import helpers as h
1180 1180 return h.escape(self.description)
1181 1181
1182 1182 @classmethod
1183 1183 def _get_ip_range(cls, ip_addr):
1184 1184 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1185 1185 return [str(net.network_address), str(net.broadcast_address)]
1186 1186
1187 1187 def __json__(self):
1188 1188 return {
1189 1189 'ip_addr': self.ip_addr,
1190 1190 'ip_range': self._get_ip_range(self.ip_addr),
1191 1191 }
1192 1192
1193 1193 def __unicode__(self):
1194 1194 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1195 1195 self.user_id, self.ip_addr)
1196 1196
1197 1197
1198 1198 class UserSshKeys(Base, BaseModel):
1199 1199 __tablename__ = 'user_ssh_keys'
1200 1200 __table_args__ = (
1201 1201 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1202 1202
1203 1203 UniqueConstraint('ssh_key_fingerprint'),
1204 1204
1205 1205 base_table_args
1206 1206 )
1207 1207 __mapper_args__ = {}
1208 1208
1209 1209 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1210 1210 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1211 1211 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1212 1212
1213 1213 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1214 1214
1215 1215 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1216 1216 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1217 1217 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1218 1218
1219 1219 user = relationship('User', lazy='joined')
1220 1220
1221 1221 def __json__(self):
1222 1222 data = {
1223 1223 'ssh_fingerprint': self.ssh_key_fingerprint,
1224 1224 'description': self.description,
1225 1225 'created_on': self.created_on
1226 1226 }
1227 1227 return data
1228 1228
1229 1229 def get_api_data(self):
1230 1230 data = self.__json__()
1231 1231 return data
1232 1232
1233 1233
1234 1234 class UserLog(Base, BaseModel):
1235 1235 __tablename__ = 'user_logs'
1236 1236 __table_args__ = (
1237 1237 base_table_args,
1238 1238 )
1239 1239
1240 1240 VERSION_1 = 'v1'
1241 1241 VERSION_2 = 'v2'
1242 1242 VERSIONS = [VERSION_1, VERSION_2]
1243 1243
1244 1244 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1245 1245 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1246 1246 username = Column("username", String(255), nullable=True, unique=None, default=None)
1247 1247 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1248 1248 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1249 1249 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1250 1250 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1251 1251 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1252 1252
1253 1253 version = Column("version", String(255), nullable=True, default=VERSION_1)
1254 1254 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1255 1255 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1256 1256
1257 1257 def __unicode__(self):
1258 1258 return u"<%s('id:%s:%s')>" % (
1259 1259 self.__class__.__name__, self.repository_name, self.action)
1260 1260
1261 1261 def __json__(self):
1262 1262 return {
1263 1263 'user_id': self.user_id,
1264 1264 'username': self.username,
1265 1265 'repository_id': self.repository_id,
1266 1266 'repository_name': self.repository_name,
1267 1267 'user_ip': self.user_ip,
1268 1268 'action_date': self.action_date,
1269 1269 'action': self.action,
1270 1270 }
1271 1271
1272 1272 @hybrid_property
1273 1273 def entry_id(self):
1274 1274 return self.user_log_id
1275 1275
1276 1276 @property
1277 1277 def action_as_day(self):
1278 1278 return datetime.date(*self.action_date.timetuple()[:3])
1279 1279
1280 1280 user = relationship('User')
1281 1281 repository = relationship('Repository', cascade='')
1282 1282
1283 1283
1284 1284 class UserGroup(Base, BaseModel):
1285 1285 __tablename__ = 'users_groups'
1286 1286 __table_args__ = (
1287 1287 base_table_args,
1288 1288 )
1289 1289
1290 1290 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1291 1291 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1292 1292 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1293 1293 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1294 1294 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1295 1295 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1296 1296 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1297 1297 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1298 1298
1299 1299 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1300 1300 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1301 1301 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1302 1302 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1303 1303 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1304 1304 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1305 1305
1306 1306 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1307 1307 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1308 1308
1309 1309 @classmethod
1310 1310 def _load_group_data(cls, column):
1311 1311 if not column:
1312 1312 return {}
1313 1313
1314 1314 try:
1315 1315 return json.loads(column) or {}
1316 1316 except TypeError:
1317 1317 return {}
1318 1318
1319 1319 @hybrid_property
1320 1320 def description_safe(self):
1321 1321 from rhodecode.lib import helpers as h
1322 1322 return h.escape(self.user_group_description)
1323 1323
1324 1324 @hybrid_property
1325 1325 def group_data(self):
1326 1326 return self._load_group_data(self._group_data)
1327 1327
1328 1328 @group_data.expression
1329 1329 def group_data(self, **kwargs):
1330 1330 return self._group_data
1331 1331
1332 1332 @group_data.setter
1333 1333 def group_data(self, val):
1334 1334 try:
1335 1335 self._group_data = json.dumps(val)
1336 1336 except Exception:
1337 1337 log.error(traceback.format_exc())
1338 1338
1339 1339 @classmethod
1340 1340 def _load_sync(cls, group_data):
1341 1341 if group_data:
1342 1342 return group_data.get('extern_type')
1343 1343
1344 1344 @property
1345 1345 def sync(self):
1346 1346 return self._load_sync(self.group_data)
1347 1347
1348 1348 def __unicode__(self):
1349 1349 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1350 1350 self.users_group_id,
1351 1351 self.users_group_name)
1352 1352
1353 1353 @classmethod
1354 1354 def get_by_group_name(cls, group_name, cache=False,
1355 1355 case_insensitive=False):
1356 1356 if case_insensitive:
1357 1357 q = cls.query().filter(func.lower(cls.users_group_name) ==
1358 1358 func.lower(group_name))
1359 1359
1360 1360 else:
1361 1361 q = cls.query().filter(cls.users_group_name == group_name)
1362 1362 if cache:
1363 1363 q = q.options(
1364 1364 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1365 1365 return q.scalar()
1366 1366
1367 1367 @classmethod
1368 1368 def get(cls, user_group_id, cache=False):
1369 1369 if not user_group_id:
1370 1370 return
1371 1371
1372 1372 user_group = cls.query()
1373 1373 if cache:
1374 1374 user_group = user_group.options(
1375 1375 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1376 1376 return user_group.get(user_group_id)
1377 1377
1378 1378 def permissions(self, with_admins=True, with_owner=True):
1379 1379 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1380 1380 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1381 1381 joinedload(UserUserGroupToPerm.user),
1382 1382 joinedload(UserUserGroupToPerm.permission),)
1383 1383
1384 1384 # get owners and admins and permissions. We do a trick of re-writing
1385 1385 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1386 1386 # has a global reference and changing one object propagates to all
1387 1387 # others. This means if admin is also an owner admin_row that change
1388 1388 # would propagate to both objects
1389 1389 perm_rows = []
1390 1390 for _usr in q.all():
1391 1391 usr = AttributeDict(_usr.user.get_dict())
1392 1392 usr.permission = _usr.permission.permission_name
1393 1393 perm_rows.append(usr)
1394 1394
1395 1395 # filter the perm rows by 'default' first and then sort them by
1396 1396 # admin,write,read,none permissions sorted again alphabetically in
1397 1397 # each group
1398 1398 perm_rows = sorted(perm_rows, key=display_user_sort)
1399 1399
1400 1400 _admin_perm = 'usergroup.admin'
1401 1401 owner_row = []
1402 1402 if with_owner:
1403 1403 usr = AttributeDict(self.user.get_dict())
1404 1404 usr.owner_row = True
1405 1405 usr.permission = _admin_perm
1406 1406 owner_row.append(usr)
1407 1407
1408 1408 super_admin_rows = []
1409 1409 if with_admins:
1410 1410 for usr in User.get_all_super_admins():
1411 1411 # if this admin is also owner, don't double the record
1412 1412 if usr.user_id == owner_row[0].user_id:
1413 1413 owner_row[0].admin_row = True
1414 1414 else:
1415 1415 usr = AttributeDict(usr.get_dict())
1416 1416 usr.admin_row = True
1417 1417 usr.permission = _admin_perm
1418 1418 super_admin_rows.append(usr)
1419 1419
1420 1420 return super_admin_rows + owner_row + perm_rows
1421 1421
1422 1422 def permission_user_groups(self):
1423 1423 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1424 1424 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1425 1425 joinedload(UserGroupUserGroupToPerm.target_user_group),
1426 1426 joinedload(UserGroupUserGroupToPerm.permission),)
1427 1427
1428 1428 perm_rows = []
1429 1429 for _user_group in q.all():
1430 1430 usr = AttributeDict(_user_group.user_group.get_dict())
1431 1431 usr.permission = _user_group.permission.permission_name
1432 1432 perm_rows.append(usr)
1433 1433
1434 1434 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1435 1435 return perm_rows
1436 1436
1437 1437 def _get_default_perms(self, user_group, suffix=''):
1438 1438 from rhodecode.model.permission import PermissionModel
1439 1439 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1440 1440
1441 1441 def get_default_perms(self, suffix=''):
1442 1442 return self._get_default_perms(self, suffix)
1443 1443
1444 1444 def get_api_data(self, with_group_members=True, include_secrets=False):
1445 1445 """
1446 1446 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1447 1447 basically forwarded.
1448 1448
1449 1449 """
1450 1450 user_group = self
1451 1451 data = {
1452 1452 'users_group_id': user_group.users_group_id,
1453 1453 'group_name': user_group.users_group_name,
1454 1454 'group_description': user_group.user_group_description,
1455 1455 'active': user_group.users_group_active,
1456 1456 'owner': user_group.user.username,
1457 1457 'sync': user_group.sync,
1458 1458 'owner_email': user_group.user.email,
1459 1459 }
1460 1460
1461 1461 if with_group_members:
1462 1462 users = []
1463 1463 for user in user_group.members:
1464 1464 user = user.user
1465 1465 users.append(user.get_api_data(include_secrets=include_secrets))
1466 1466 data['users'] = users
1467 1467
1468 1468 return data
1469 1469
1470 1470
1471 1471 class UserGroupMember(Base, BaseModel):
1472 1472 __tablename__ = 'users_groups_members'
1473 1473 __table_args__ = (
1474 1474 base_table_args,
1475 1475 )
1476 1476
1477 1477 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1478 1478 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1479 1479 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1480 1480
1481 1481 user = relationship('User', lazy='joined')
1482 1482 users_group = relationship('UserGroup')
1483 1483
1484 1484 def __init__(self, gr_id='', u_id=''):
1485 1485 self.users_group_id = gr_id
1486 1486 self.user_id = u_id
1487 1487
1488 1488
1489 1489 class RepositoryField(Base, BaseModel):
1490 1490 __tablename__ = 'repositories_fields'
1491 1491 __table_args__ = (
1492 1492 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1493 1493 base_table_args,
1494 1494 )
1495 1495
1496 1496 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1497 1497
1498 1498 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1499 1499 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1500 1500 field_key = Column("field_key", String(250))
1501 1501 field_label = Column("field_label", String(1024), nullable=False)
1502 1502 field_value = Column("field_value", String(10000), nullable=False)
1503 1503 field_desc = Column("field_desc", String(1024), nullable=False)
1504 1504 field_type = Column("field_type", String(255), nullable=False, unique=None)
1505 1505 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1506 1506
1507 1507 repository = relationship('Repository')
1508 1508
1509 1509 @property
1510 1510 def field_key_prefixed(self):
1511 1511 return 'ex_%s' % self.field_key
1512 1512
1513 1513 @classmethod
1514 1514 def un_prefix_key(cls, key):
1515 1515 if key.startswith(cls.PREFIX):
1516 1516 return key[len(cls.PREFIX):]
1517 1517 return key
1518 1518
1519 1519 @classmethod
1520 1520 def get_by_key_name(cls, key, repo):
1521 1521 row = cls.query()\
1522 1522 .filter(cls.repository == repo)\
1523 1523 .filter(cls.field_key == key).scalar()
1524 1524 return row
1525 1525
1526 1526
1527 1527 class Repository(Base, BaseModel):
1528 1528 __tablename__ = 'repositories'
1529 1529 __table_args__ = (
1530 1530 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1531 1531 base_table_args,
1532 1532 )
1533 1533 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1534 1534 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1535 1535 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1536 1536
1537 1537 STATE_CREATED = 'repo_state_created'
1538 1538 STATE_PENDING = 'repo_state_pending'
1539 1539 STATE_ERROR = 'repo_state_error'
1540 1540
1541 1541 LOCK_AUTOMATIC = 'lock_auto'
1542 1542 LOCK_API = 'lock_api'
1543 1543 LOCK_WEB = 'lock_web'
1544 1544 LOCK_PULL = 'lock_pull'
1545 1545
1546 1546 NAME_SEP = URL_SEP
1547 1547
1548 1548 repo_id = Column(
1549 1549 "repo_id", Integer(), nullable=False, unique=True, default=None,
1550 1550 primary_key=True)
1551 1551 _repo_name = Column(
1552 1552 "repo_name", Text(), nullable=False, default=None)
1553 1553 _repo_name_hash = Column(
1554 1554 "repo_name_hash", String(255), nullable=False, unique=True)
1555 1555 repo_state = Column("repo_state", String(255), nullable=True)
1556 1556
1557 1557 clone_uri = Column(
1558 1558 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1559 1559 default=None)
1560 1560 push_uri = Column(
1561 1561 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1562 1562 default=None)
1563 1563 repo_type = Column(
1564 1564 "repo_type", String(255), nullable=False, unique=False, default=None)
1565 1565 user_id = Column(
1566 1566 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1567 1567 unique=False, default=None)
1568 1568 private = Column(
1569 1569 "private", Boolean(), nullable=True, unique=None, default=None)
1570 1570 enable_statistics = Column(
1571 1571 "statistics", Boolean(), nullable=True, unique=None, default=True)
1572 1572 enable_downloads = Column(
1573 1573 "downloads", Boolean(), nullable=True, unique=None, default=True)
1574 1574 description = Column(
1575 1575 "description", String(10000), nullable=True, unique=None, default=None)
1576 1576 created_on = Column(
1577 1577 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1578 1578 default=datetime.datetime.now)
1579 1579 updated_on = Column(
1580 1580 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1581 1581 default=datetime.datetime.now)
1582 1582 _landing_revision = Column(
1583 1583 "landing_revision", String(255), nullable=False, unique=False,
1584 1584 default=None)
1585 1585 enable_locking = Column(
1586 1586 "enable_locking", Boolean(), nullable=False, unique=None,
1587 1587 default=False)
1588 1588 _locked = Column(
1589 1589 "locked", String(255), nullable=True, unique=False, default=None)
1590 1590 _changeset_cache = Column(
1591 1591 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1592 1592
1593 1593 fork_id = Column(
1594 1594 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1595 1595 nullable=True, unique=False, default=None)
1596 1596 group_id = Column(
1597 1597 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1598 1598 unique=False, default=None)
1599 1599
1600 1600 user = relationship('User', lazy='joined')
1601 1601 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1602 1602 group = relationship('RepoGroup', lazy='joined')
1603 1603 repo_to_perm = relationship(
1604 1604 'UserRepoToPerm', cascade='all',
1605 1605 order_by='UserRepoToPerm.repo_to_perm_id')
1606 1606 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1607 1607 stats = relationship('Statistics', cascade='all', uselist=False)
1608 1608
1609 1609 followers = relationship(
1610 1610 'UserFollowing',
1611 1611 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1612 1612 cascade='all')
1613 1613 extra_fields = relationship(
1614 1614 'RepositoryField', cascade="all, delete, delete-orphan")
1615 1615 logs = relationship('UserLog')
1616 1616 comments = relationship(
1617 1617 'ChangesetComment', cascade="all, delete, delete-orphan")
1618 1618 pull_requests_source = relationship(
1619 1619 'PullRequest',
1620 1620 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1621 1621 cascade="all, delete, delete-orphan")
1622 1622 pull_requests_target = relationship(
1623 1623 'PullRequest',
1624 1624 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1625 1625 cascade="all, delete, delete-orphan")
1626 1626 ui = relationship('RepoRhodeCodeUi', cascade="all")
1627 1627 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1628 1628 integrations = relationship('Integration',
1629 1629 cascade="all, delete, delete-orphan")
1630 1630
1631 1631 scoped_tokens = relationship('UserApiKeys', cascade="all")
1632 1632
1633 1633 def __unicode__(self):
1634 1634 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1635 1635 safe_unicode(self.repo_name))
1636 1636
1637 1637 @hybrid_property
1638 1638 def description_safe(self):
1639 1639 from rhodecode.lib import helpers as h
1640 1640 return h.escape(self.description)
1641 1641
1642 1642 @hybrid_property
1643 1643 def landing_rev(self):
1644 1644 # always should return [rev_type, rev]
1645 1645 if self._landing_revision:
1646 1646 _rev_info = self._landing_revision.split(':')
1647 1647 if len(_rev_info) < 2:
1648 1648 _rev_info.insert(0, 'rev')
1649 1649 return [_rev_info[0], _rev_info[1]]
1650 1650 return [None, None]
1651 1651
1652 1652 @landing_rev.setter
1653 1653 def landing_rev(self, val):
1654 1654 if ':' not in val:
1655 1655 raise ValueError('value must be delimited with `:` and consist '
1656 1656 'of <rev_type>:<rev>, got %s instead' % val)
1657 1657 self._landing_revision = val
1658 1658
1659 1659 @hybrid_property
1660 1660 def locked(self):
1661 1661 if self._locked:
1662 1662 user_id, timelocked, reason = self._locked.split(':')
1663 1663 lock_values = int(user_id), timelocked, reason
1664 1664 else:
1665 1665 lock_values = [None, None, None]
1666 1666 return lock_values
1667 1667
1668 1668 @locked.setter
1669 1669 def locked(self, val):
1670 1670 if val and isinstance(val, (list, tuple)):
1671 1671 self._locked = ':'.join(map(str, val))
1672 1672 else:
1673 1673 self._locked = None
1674 1674
1675 1675 @hybrid_property
1676 1676 def changeset_cache(self):
1677 1677 from rhodecode.lib.vcs.backends.base import EmptyCommit
1678 1678 dummy = EmptyCommit().__json__()
1679 1679 if not self._changeset_cache:
1680 1680 return dummy
1681 1681 try:
1682 1682 return json.loads(self._changeset_cache)
1683 1683 except TypeError:
1684 1684 return dummy
1685 1685 except Exception:
1686 1686 log.error(traceback.format_exc())
1687 1687 return dummy
1688 1688
1689 1689 @changeset_cache.setter
1690 1690 def changeset_cache(self, val):
1691 1691 try:
1692 1692 self._changeset_cache = json.dumps(val)
1693 1693 except Exception:
1694 1694 log.error(traceback.format_exc())
1695 1695
1696 1696 @hybrid_property
1697 1697 def repo_name(self):
1698 1698 return self._repo_name
1699 1699
1700 1700 @repo_name.setter
1701 1701 def repo_name(self, value):
1702 1702 self._repo_name = value
1703 1703 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1704 1704
1705 1705 @classmethod
1706 1706 def normalize_repo_name(cls, repo_name):
1707 1707 """
1708 1708 Normalizes os specific repo_name to the format internally stored inside
1709 1709 database using URL_SEP
1710 1710
1711 1711 :param cls:
1712 1712 :param repo_name:
1713 1713 """
1714 1714 return cls.NAME_SEP.join(repo_name.split(os.sep))
1715 1715
1716 1716 @classmethod
1717 1717 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1718 1718 session = Session()
1719 1719 q = session.query(cls).filter(cls.repo_name == repo_name)
1720 1720
1721 1721 if cache:
1722 1722 if identity_cache:
1723 1723 val = cls.identity_cache(session, 'repo_name', repo_name)
1724 1724 if val:
1725 1725 return val
1726 1726 else:
1727 1727 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1728 1728 q = q.options(
1729 1729 FromCache("sql_cache_short", cache_key))
1730 1730
1731 1731 return q.scalar()
1732 1732
1733 1733 @classmethod
1734 1734 def get_by_id_or_repo_name(cls, repoid):
1735 1735 if isinstance(repoid, (int, long)):
1736 1736 try:
1737 1737 repo = cls.get(repoid)
1738 1738 except ValueError:
1739 1739 repo = None
1740 1740 else:
1741 1741 repo = cls.get_by_repo_name(repoid)
1742 1742 return repo
1743 1743
1744 1744 @classmethod
1745 1745 def get_by_full_path(cls, repo_full_path):
1746 1746 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1747 1747 repo_name = cls.normalize_repo_name(repo_name)
1748 1748 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1749 1749
1750 1750 @classmethod
1751 1751 def get_repo_forks(cls, repo_id):
1752 1752 return cls.query().filter(Repository.fork_id == repo_id)
1753 1753
1754 1754 @classmethod
1755 1755 def base_path(cls):
1756 1756 """
1757 1757 Returns base path when all repos are stored
1758 1758
1759 1759 :param cls:
1760 1760 """
1761 1761 q = Session().query(RhodeCodeUi)\
1762 1762 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1763 1763 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1764 1764 return q.one().ui_value
1765 1765
1766 1766 @classmethod
1767 1767 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1768 1768 case_insensitive=True):
1769 1769 q = Repository.query()
1770 1770
1771 1771 if not isinstance(user_id, Optional):
1772 1772 q = q.filter(Repository.user_id == user_id)
1773 1773
1774 1774 if not isinstance(group_id, Optional):
1775 1775 q = q.filter(Repository.group_id == group_id)
1776 1776
1777 1777 if case_insensitive:
1778 1778 q = q.order_by(func.lower(Repository.repo_name))
1779 1779 else:
1780 1780 q = q.order_by(Repository.repo_name)
1781 1781 return q.all()
1782 1782
1783 1783 @property
1784 1784 def forks(self):
1785 1785 """
1786 1786 Return forks of this repo
1787 1787 """
1788 1788 return Repository.get_repo_forks(self.repo_id)
1789 1789
1790 1790 @property
1791 1791 def parent(self):
1792 1792 """
1793 1793 Returns fork parent
1794 1794 """
1795 1795 return self.fork
1796 1796
1797 1797 @property
1798 1798 def just_name(self):
1799 1799 return self.repo_name.split(self.NAME_SEP)[-1]
1800 1800
1801 1801 @property
1802 1802 def groups_with_parents(self):
1803 1803 groups = []
1804 1804 if self.group is None:
1805 1805 return groups
1806 1806
1807 1807 cur_gr = self.group
1808 1808 groups.insert(0, cur_gr)
1809 1809 while 1:
1810 1810 gr = getattr(cur_gr, 'parent_group', None)
1811 1811 cur_gr = cur_gr.parent_group
1812 1812 if gr is None:
1813 1813 break
1814 1814 groups.insert(0, gr)
1815 1815
1816 1816 return groups
1817 1817
1818 1818 @property
1819 1819 def groups_and_repo(self):
1820 1820 return self.groups_with_parents, self
1821 1821
1822 1822 @LazyProperty
1823 1823 def repo_path(self):
1824 1824 """
1825 1825 Returns base full path for that repository means where it actually
1826 1826 exists on a filesystem
1827 1827 """
1828 1828 q = Session().query(RhodeCodeUi).filter(
1829 1829 RhodeCodeUi.ui_key == self.NAME_SEP)
1830 1830 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1831 1831 return q.one().ui_value
1832 1832
1833 1833 @property
1834 1834 def repo_full_path(self):
1835 1835 p = [self.repo_path]
1836 1836 # we need to split the name by / since this is how we store the
1837 1837 # names in the database, but that eventually needs to be converted
1838 1838 # into a valid system path
1839 1839 p += self.repo_name.split(self.NAME_SEP)
1840 1840 return os.path.join(*map(safe_unicode, p))
1841 1841
1842 1842 @property
1843 1843 def cache_keys(self):
1844 1844 """
1845 1845 Returns associated cache keys for that repo
1846 1846 """
1847 1847 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
1848 1848 repo_id=self.repo_id)
1849 1849 return CacheKey.query()\
1850 1850 .filter(CacheKey.cache_args == invalidation_namespace)\
1851 1851 .order_by(CacheKey.cache_key)\
1852 1852 .all()
1853 1853
1854 1854 @property
1855 1855 def cached_diffs_relative_dir(self):
1856 1856 """
1857 1857 Return a relative to the repository store path of cached diffs
1858 1858 used for safe display for users, who shouldn't know the absolute store
1859 1859 path
1860 1860 """
1861 1861 return os.path.join(
1862 1862 os.path.dirname(self.repo_name),
1863 1863 self.cached_diffs_dir.split(os.path.sep)[-1])
1864 1864
1865 1865 @property
1866 1866 def cached_diffs_dir(self):
1867 1867 path = self.repo_full_path
1868 1868 return os.path.join(
1869 1869 os.path.dirname(path),
1870 1870 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
1871 1871
1872 1872 def cached_diffs(self):
1873 1873 diff_cache_dir = self.cached_diffs_dir
1874 1874 if os.path.isdir(diff_cache_dir):
1875 1875 return os.listdir(diff_cache_dir)
1876 1876 return []
1877 1877
1878 1878 def shadow_repos(self):
1879 1879 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
1880 1880 return [
1881 1881 x for x in os.listdir(os.path.dirname(self.repo_full_path))
1882 1882 if x.startswith(shadow_repos_pattern)]
1883 1883
1884 1884 def get_new_name(self, repo_name):
1885 1885 """
1886 1886 returns new full repository name based on assigned group and new new
1887 1887
1888 1888 :param group_name:
1889 1889 """
1890 1890 path_prefix = self.group.full_path_splitted if self.group else []
1891 1891 return self.NAME_SEP.join(path_prefix + [repo_name])
1892 1892
1893 1893 @property
1894 1894 def _config(self):
1895 1895 """
1896 1896 Returns db based config object.
1897 1897 """
1898 1898 from rhodecode.lib.utils import make_db_config
1899 1899 return make_db_config(clear_session=False, repo=self)
1900 1900
1901 1901 def permissions(self, with_admins=True, with_owner=True):
1902 1902 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1903 1903 q = q.options(joinedload(UserRepoToPerm.repository),
1904 1904 joinedload(UserRepoToPerm.user),
1905 1905 joinedload(UserRepoToPerm.permission),)
1906 1906
1907 1907 # get owners and admins and permissions. We do a trick of re-writing
1908 1908 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1909 1909 # has a global reference and changing one object propagates to all
1910 1910 # others. This means if admin is also an owner admin_row that change
1911 1911 # would propagate to both objects
1912 1912 perm_rows = []
1913 1913 for _usr in q.all():
1914 1914 usr = AttributeDict(_usr.user.get_dict())
1915 1915 usr.permission = _usr.permission.permission_name
1916 1916 perm_rows.append(usr)
1917 1917
1918 1918 # filter the perm rows by 'default' first and then sort them by
1919 1919 # admin,write,read,none permissions sorted again alphabetically in
1920 1920 # each group
1921 1921 perm_rows = sorted(perm_rows, key=display_user_sort)
1922 1922
1923 1923 _admin_perm = 'repository.admin'
1924 1924 owner_row = []
1925 1925 if with_owner:
1926 1926 usr = AttributeDict(self.user.get_dict())
1927 1927 usr.owner_row = True
1928 1928 usr.permission = _admin_perm
1929 1929 owner_row.append(usr)
1930 1930
1931 1931 super_admin_rows = []
1932 1932 if with_admins:
1933 1933 for usr in User.get_all_super_admins():
1934 1934 # if this admin is also owner, don't double the record
1935 1935 if usr.user_id == owner_row[0].user_id:
1936 1936 owner_row[0].admin_row = True
1937 1937 else:
1938 1938 usr = AttributeDict(usr.get_dict())
1939 1939 usr.admin_row = True
1940 1940 usr.permission = _admin_perm
1941 1941 super_admin_rows.append(usr)
1942 1942
1943 1943 return super_admin_rows + owner_row + perm_rows
1944 1944
1945 1945 def permission_user_groups(self):
1946 1946 q = UserGroupRepoToPerm.query().filter(
1947 1947 UserGroupRepoToPerm.repository == self)
1948 1948 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1949 1949 joinedload(UserGroupRepoToPerm.users_group),
1950 1950 joinedload(UserGroupRepoToPerm.permission),)
1951 1951
1952 1952 perm_rows = []
1953 1953 for _user_group in q.all():
1954 1954 usr = AttributeDict(_user_group.users_group.get_dict())
1955 1955 usr.permission = _user_group.permission.permission_name
1956 1956 perm_rows.append(usr)
1957 1957
1958 1958 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1959 1959 return perm_rows
1960 1960
1961 1961 def get_api_data(self, include_secrets=False):
1962 1962 """
1963 1963 Common function for generating repo api data
1964 1964
1965 1965 :param include_secrets: See :meth:`User.get_api_data`.
1966 1966
1967 1967 """
1968 1968 # TODO: mikhail: Here there is an anti-pattern, we probably need to
1969 1969 # move this methods on models level.
1970 1970 from rhodecode.model.settings import SettingsModel
1971 1971 from rhodecode.model.repo import RepoModel
1972 1972
1973 1973 repo = self
1974 1974 _user_id, _time, _reason = self.locked
1975 1975
1976 1976 data = {
1977 1977 'repo_id': repo.repo_id,
1978 1978 'repo_name': repo.repo_name,
1979 1979 'repo_type': repo.repo_type,
1980 1980 'clone_uri': repo.clone_uri or '',
1981 1981 'push_uri': repo.push_uri or '',
1982 1982 'url': RepoModel().get_url(self),
1983 1983 'private': repo.private,
1984 1984 'created_on': repo.created_on,
1985 1985 'description': repo.description_safe,
1986 1986 'landing_rev': repo.landing_rev,
1987 1987 'owner': repo.user.username,
1988 1988 'fork_of': repo.fork.repo_name if repo.fork else None,
1989 1989 'fork_of_id': repo.fork.repo_id if repo.fork else None,
1990 1990 'enable_statistics': repo.enable_statistics,
1991 1991 'enable_locking': repo.enable_locking,
1992 1992 'enable_downloads': repo.enable_downloads,
1993 1993 'last_changeset': repo.changeset_cache,
1994 1994 'locked_by': User.get(_user_id).get_api_data(
1995 1995 include_secrets=include_secrets) if _user_id else None,
1996 1996 'locked_date': time_to_datetime(_time) if _time else None,
1997 1997 'lock_reason': _reason if _reason else None,
1998 1998 }
1999 1999
2000 2000 # TODO: mikhail: should be per-repo settings here
2001 2001 rc_config = SettingsModel().get_all_settings()
2002 2002 repository_fields = str2bool(
2003 2003 rc_config.get('rhodecode_repository_fields'))
2004 2004 if repository_fields:
2005 2005 for f in self.extra_fields:
2006 2006 data[f.field_key_prefixed] = f.field_value
2007 2007
2008 2008 return data
2009 2009
2010 2010 @classmethod
2011 2011 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2012 2012 if not lock_time:
2013 2013 lock_time = time.time()
2014 2014 if not lock_reason:
2015 2015 lock_reason = cls.LOCK_AUTOMATIC
2016 2016 repo.locked = [user_id, lock_time, lock_reason]
2017 2017 Session().add(repo)
2018 2018 Session().commit()
2019 2019
2020 2020 @classmethod
2021 2021 def unlock(cls, repo):
2022 2022 repo.locked = None
2023 2023 Session().add(repo)
2024 2024 Session().commit()
2025 2025
2026 2026 @classmethod
2027 2027 def getlock(cls, repo):
2028 2028 return repo.locked
2029 2029
2030 2030 def is_user_lock(self, user_id):
2031 2031 if self.lock[0]:
2032 2032 lock_user_id = safe_int(self.lock[0])
2033 2033 user_id = safe_int(user_id)
2034 2034 # both are ints, and they are equal
2035 2035 return all([lock_user_id, user_id]) and lock_user_id == user_id
2036 2036
2037 2037 return False
2038 2038
2039 2039 def get_locking_state(self, action, user_id, only_when_enabled=True):
2040 2040 """
2041 2041 Checks locking on this repository, if locking is enabled and lock is
2042 2042 present returns a tuple of make_lock, locked, locked_by.
2043 2043 make_lock can have 3 states None (do nothing) True, make lock
2044 2044 False release lock, This value is later propagated to hooks, which
2045 2045 do the locking. Think about this as signals passed to hooks what to do.
2046 2046
2047 2047 """
2048 2048 # TODO: johbo: This is part of the business logic and should be moved
2049 2049 # into the RepositoryModel.
2050 2050
2051 2051 if action not in ('push', 'pull'):
2052 2052 raise ValueError("Invalid action value: %s" % repr(action))
2053 2053
2054 2054 # defines if locked error should be thrown to user
2055 2055 currently_locked = False
2056 2056 # defines if new lock should be made, tri-state
2057 2057 make_lock = None
2058 2058 repo = self
2059 2059 user = User.get(user_id)
2060 2060
2061 2061 lock_info = repo.locked
2062 2062
2063 2063 if repo and (repo.enable_locking or not only_when_enabled):
2064 2064 if action == 'push':
2065 2065 # check if it's already locked !, if it is compare users
2066 2066 locked_by_user_id = lock_info[0]
2067 2067 if user.user_id == locked_by_user_id:
2068 2068 log.debug(
2069 2069 'Got `push` action from user %s, now unlocking', user)
2070 2070 # unlock if we have push from user who locked
2071 2071 make_lock = False
2072 2072 else:
2073 2073 # we're not the same user who locked, ban with
2074 2074 # code defined in settings (default is 423 HTTP Locked) !
2075 2075 log.debug('Repo %s is currently locked by %s', repo, user)
2076 2076 currently_locked = True
2077 2077 elif action == 'pull':
2078 2078 # [0] user [1] date
2079 2079 if lock_info[0] and lock_info[1]:
2080 2080 log.debug('Repo %s is currently locked by %s', repo, user)
2081 2081 currently_locked = True
2082 2082 else:
2083 2083 log.debug('Setting lock on repo %s by %s', repo, user)
2084 2084 make_lock = True
2085 2085
2086 2086 else:
2087 2087 log.debug('Repository %s do not have locking enabled', repo)
2088 2088
2089 2089 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2090 2090 make_lock, currently_locked, lock_info)
2091 2091
2092 2092 from rhodecode.lib.auth import HasRepoPermissionAny
2093 2093 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2094 2094 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2095 2095 # if we don't have at least write permission we cannot make a lock
2096 2096 log.debug('lock state reset back to FALSE due to lack '
2097 2097 'of at least read permission')
2098 2098 make_lock = False
2099 2099
2100 2100 return make_lock, currently_locked, lock_info
2101 2101
2102 2102 @property
2103 2103 def last_db_change(self):
2104 2104 return self.updated_on
2105 2105
2106 2106 @property
2107 2107 def clone_uri_hidden(self):
2108 2108 clone_uri = self.clone_uri
2109 2109 if clone_uri:
2110 2110 import urlobject
2111 2111 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2112 2112 if url_obj.password:
2113 2113 clone_uri = url_obj.with_password('*****')
2114 2114 return clone_uri
2115 2115
2116 2116 @property
2117 2117 def push_uri_hidden(self):
2118 2118 push_uri = self.push_uri
2119 2119 if push_uri:
2120 2120 import urlobject
2121 2121 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2122 2122 if url_obj.password:
2123 2123 push_uri = url_obj.with_password('*****')
2124 2124 return push_uri
2125 2125
2126 2126 def clone_url(self, **override):
2127 2127 from rhodecode.model.settings import SettingsModel
2128 2128
2129 2129 uri_tmpl = None
2130 2130 if 'with_id' in override:
2131 2131 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2132 2132 del override['with_id']
2133 2133
2134 2134 if 'uri_tmpl' in override:
2135 2135 uri_tmpl = override['uri_tmpl']
2136 2136 del override['uri_tmpl']
2137 2137
2138 2138 ssh = False
2139 2139 if 'ssh' in override:
2140 2140 ssh = True
2141 2141 del override['ssh']
2142 2142
2143 2143 # we didn't override our tmpl from **overrides
2144 2144 if not uri_tmpl:
2145 2145 rc_config = SettingsModel().get_all_settings(cache=True)
2146 2146 if ssh:
2147 2147 uri_tmpl = rc_config.get(
2148 2148 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2149 2149 else:
2150 2150 uri_tmpl = rc_config.get(
2151 2151 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2152 2152
2153 2153 request = get_current_request()
2154 2154 return get_clone_url(request=request,
2155 2155 uri_tmpl=uri_tmpl,
2156 2156 repo_name=self.repo_name,
2157 2157 repo_id=self.repo_id, **override)
2158 2158
2159 2159 def set_state(self, state):
2160 2160 self.repo_state = state
2161 2161 Session().add(self)
2162 2162 #==========================================================================
2163 2163 # SCM PROPERTIES
2164 2164 #==========================================================================
2165 2165
2166 2166 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
2167 2167 return get_commit_safe(
2168 2168 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
2169 2169
2170 2170 def get_changeset(self, rev=None, pre_load=None):
2171 2171 warnings.warn("Use get_commit", DeprecationWarning)
2172 2172 commit_id = None
2173 2173 commit_idx = None
2174 2174 if isinstance(rev, basestring):
2175 2175 commit_id = rev
2176 2176 else:
2177 2177 commit_idx = rev
2178 2178 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2179 2179 pre_load=pre_load)
2180 2180
2181 2181 def get_landing_commit(self):
2182 2182 """
2183 2183 Returns landing commit, or if that doesn't exist returns the tip
2184 2184 """
2185 2185 _rev_type, _rev = self.landing_rev
2186 2186 commit = self.get_commit(_rev)
2187 2187 if isinstance(commit, EmptyCommit):
2188 2188 return self.get_commit()
2189 2189 return commit
2190 2190
2191 2191 def update_commit_cache(self, cs_cache=None, config=None):
2192 2192 """
2193 2193 Update cache of last changeset for repository, keys should be::
2194 2194
2195 2195 short_id
2196 2196 raw_id
2197 2197 revision
2198 2198 parents
2199 2199 message
2200 2200 date
2201 2201 author
2202 2202
2203 2203 :param cs_cache:
2204 2204 """
2205 2205 from rhodecode.lib.vcs.backends.base import BaseChangeset
2206 2206 if cs_cache is None:
2207 2207 # use no-cache version here
2208 2208 scm_repo = self.scm_instance(cache=False, config=config)
2209 2209
2210 2210 empty = scm_repo.is_empty()
2211 2211 if not empty:
2212 2212 cs_cache = scm_repo.get_commit(
2213 2213 pre_load=["author", "date", "message", "parents"])
2214 2214 else:
2215 2215 cs_cache = EmptyCommit()
2216 2216
2217 2217 if isinstance(cs_cache, BaseChangeset):
2218 2218 cs_cache = cs_cache.__json__()
2219 2219
2220 2220 def is_outdated(new_cs_cache):
2221 2221 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2222 2222 new_cs_cache['revision'] != self.changeset_cache['revision']):
2223 2223 return True
2224 2224 return False
2225 2225
2226 2226 # check if we have maybe already latest cached revision
2227 2227 if is_outdated(cs_cache) or not self.changeset_cache:
2228 2228 _default = datetime.datetime.utcnow()
2229 2229 last_change = cs_cache.get('date') or _default
2230 2230 if self.updated_on and self.updated_on > last_change:
2231 2231 # we check if last update is newer than the new value
2232 2232 # if yes, we use the current timestamp instead. Imagine you get
2233 2233 # old commit pushed 1y ago, we'd set last update 1y to ago.
2234 2234 last_change = _default
2235 2235 log.debug('updated repo %s with new cs cache %s',
2236 2236 self.repo_name, cs_cache)
2237 2237 self.updated_on = last_change
2238 2238 self.changeset_cache = cs_cache
2239 2239 Session().add(self)
2240 2240 Session().commit()
2241 2241 else:
2242 2242 log.debug('Skipping update_commit_cache for repo:`%s` '
2243 2243 'commit already with latest changes', self.repo_name)
2244 2244
2245 2245 @property
2246 2246 def tip(self):
2247 2247 return self.get_commit('tip')
2248 2248
2249 2249 @property
2250 2250 def author(self):
2251 2251 return self.tip.author
2252 2252
2253 2253 @property
2254 2254 def last_change(self):
2255 2255 return self.scm_instance().last_change
2256 2256
2257 2257 def get_comments(self, revisions=None):
2258 2258 """
2259 2259 Returns comments for this repository grouped by revisions
2260 2260
2261 2261 :param revisions: filter query by revisions only
2262 2262 """
2263 2263 cmts = ChangesetComment.query()\
2264 2264 .filter(ChangesetComment.repo == self)
2265 2265 if revisions:
2266 2266 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2267 2267 grouped = collections.defaultdict(list)
2268 2268 for cmt in cmts.all():
2269 2269 grouped[cmt.revision].append(cmt)
2270 2270 return grouped
2271 2271
2272 2272 def statuses(self, revisions=None):
2273 2273 """
2274 2274 Returns statuses for this repository
2275 2275
2276 2276 :param revisions: list of revisions to get statuses for
2277 2277 """
2278 2278 statuses = ChangesetStatus.query()\
2279 2279 .filter(ChangesetStatus.repo == self)\
2280 2280 .filter(ChangesetStatus.version == 0)
2281 2281
2282 2282 if revisions:
2283 2283 # Try doing the filtering in chunks to avoid hitting limits
2284 2284 size = 500
2285 2285 status_results = []
2286 2286 for chunk in xrange(0, len(revisions), size):
2287 2287 status_results += statuses.filter(
2288 2288 ChangesetStatus.revision.in_(
2289 2289 revisions[chunk: chunk+size])
2290 2290 ).all()
2291 2291 else:
2292 2292 status_results = statuses.all()
2293 2293
2294 2294 grouped = {}
2295 2295
2296 2296 # maybe we have open new pullrequest without a status?
2297 2297 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2298 2298 status_lbl = ChangesetStatus.get_status_lbl(stat)
2299 2299 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2300 2300 for rev in pr.revisions:
2301 2301 pr_id = pr.pull_request_id
2302 2302 pr_repo = pr.target_repo.repo_name
2303 2303 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2304 2304
2305 2305 for stat in status_results:
2306 2306 pr_id = pr_repo = None
2307 2307 if stat.pull_request:
2308 2308 pr_id = stat.pull_request.pull_request_id
2309 2309 pr_repo = stat.pull_request.target_repo.repo_name
2310 2310 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2311 2311 pr_id, pr_repo]
2312 2312 return grouped
2313 2313
2314 2314 # ==========================================================================
2315 2315 # SCM CACHE INSTANCE
2316 2316 # ==========================================================================
2317 2317
2318 2318 def scm_instance(self, **kwargs):
2319 2319 import rhodecode
2320 2320
2321 2321 # Passing a config will not hit the cache currently only used
2322 2322 # for repo2dbmapper
2323 2323 config = kwargs.pop('config', None)
2324 2324 cache = kwargs.pop('cache', None)
2325 2325 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2326 2326 # if cache is NOT defined use default global, else we have a full
2327 2327 # control over cache behaviour
2328 2328 if cache is None and full_cache and not config:
2329 2329 return self._get_instance_cached()
2330 2330 return self._get_instance(cache=bool(cache), config=config)
2331 2331
2332 2332 def _get_instance_cached(self):
2333 2333 from rhodecode.lib import rc_cache
2334 2334
2335 2335 cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id)
2336 2336 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2337 2337 repo_id=self.repo_id)
2338 2338 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2339 2339
2340 2340 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2341 2341 def get_instance_cached(repo_id, context_id):
2342 2342 return self._get_instance()
2343 2343
2344 2344 # we must use thread scoped cache here,
2345 2345 # because each thread of gevent needs it's own not shared connection and cache
2346 2346 # we also alter `args` so the cache key is individual for every green thread.
2347 2347 inv_context_manager = rc_cache.InvalidationContext(
2348 2348 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2349 2349 thread_scoped=True)
2350 2350 with inv_context_manager as invalidation_context:
2351 2351 args = (self.repo_id, inv_context_manager.cache_key)
2352 2352 # re-compute and store cache if we get invalidate signal
2353 2353 if invalidation_context.should_invalidate():
2354 2354 instance = get_instance_cached.refresh(*args)
2355 2355 else:
2356 2356 instance = get_instance_cached(*args)
2357 2357
2358 2358 log.debug(
2359 2359 'Repo instance fetched in %.3fs', inv_context_manager.compute_time)
2360 2360 return instance
2361 2361
2362 2362 def _get_instance(self, cache=True, config=None):
2363 2363 config = config or self._config
2364 2364 custom_wire = {
2365 2365 'cache': cache # controls the vcs.remote cache
2366 2366 }
2367 2367 repo = get_vcs_instance(
2368 2368 repo_path=safe_str(self.repo_full_path),
2369 2369 config=config,
2370 2370 with_wire=custom_wire,
2371 2371 create=False,
2372 2372 _vcs_alias=self.repo_type)
2373 2373
2374 2374 return repo
2375 2375
2376 2376 def __json__(self):
2377 2377 return {'landing_rev': self.landing_rev}
2378 2378
2379 2379 def get_dict(self):
2380 2380
2381 2381 # Since we transformed `repo_name` to a hybrid property, we need to
2382 2382 # keep compatibility with the code which uses `repo_name` field.
2383 2383
2384 2384 result = super(Repository, self).get_dict()
2385 2385 result['repo_name'] = result.pop('_repo_name', None)
2386 2386 return result
2387 2387
2388 2388
2389 2389 class RepoGroup(Base, BaseModel):
2390 2390 __tablename__ = 'groups'
2391 2391 __table_args__ = (
2392 2392 UniqueConstraint('group_name', 'group_parent_id'),
2393 2393 CheckConstraint('group_id != group_parent_id'),
2394 2394 base_table_args,
2395 2395 )
2396 2396 __mapper_args__ = {'order_by': 'group_name'}
2397 2397
2398 2398 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2399 2399
2400 2400 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2401 2401 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2402 2402 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2403 2403 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2404 2404 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2405 2405 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2406 2406 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2407 2407 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2408 2408 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2409 2409
2410 2410 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2411 2411 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2412 2412 parent_group = relationship('RepoGroup', remote_side=group_id)
2413 2413 user = relationship('User')
2414 2414 integrations = relationship('Integration',
2415 2415 cascade="all, delete, delete-orphan")
2416 2416
2417 2417 def __init__(self, group_name='', parent_group=None):
2418 2418 self.group_name = group_name
2419 2419 self.parent_group = parent_group
2420 2420
2421 2421 def __unicode__(self):
2422 2422 return u"<%s('id:%s:%s')>" % (
2423 2423 self.__class__.__name__, self.group_id, self.group_name)
2424 2424
2425 2425 @hybrid_property
2426 2426 def description_safe(self):
2427 2427 from rhodecode.lib import helpers as h
2428 2428 return h.escape(self.group_description)
2429 2429
2430 2430 @classmethod
2431 2431 def _generate_choice(cls, repo_group):
2432 2432 from webhelpers.html import literal as _literal
2433 2433 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2434 2434 return repo_group.group_id, _name(repo_group.full_path_splitted)
2435 2435
2436 2436 @classmethod
2437 2437 def groups_choices(cls, groups=None, show_empty_group=True):
2438 2438 if not groups:
2439 2439 groups = cls.query().all()
2440 2440
2441 2441 repo_groups = []
2442 2442 if show_empty_group:
2443 2443 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2444 2444
2445 2445 repo_groups.extend([cls._generate_choice(x) for x in groups])
2446 2446
2447 2447 repo_groups = sorted(
2448 2448 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2449 2449 return repo_groups
2450 2450
2451 2451 @classmethod
2452 2452 def url_sep(cls):
2453 2453 return URL_SEP
2454 2454
2455 2455 @classmethod
2456 2456 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2457 2457 if case_insensitive:
2458 2458 gr = cls.query().filter(func.lower(cls.group_name)
2459 2459 == func.lower(group_name))
2460 2460 else:
2461 2461 gr = cls.query().filter(cls.group_name == group_name)
2462 2462 if cache:
2463 2463 name_key = _hash_key(group_name)
2464 2464 gr = gr.options(
2465 2465 FromCache("sql_cache_short", "get_group_%s" % name_key))
2466 2466 return gr.scalar()
2467 2467
2468 2468 @classmethod
2469 2469 def get_user_personal_repo_group(cls, user_id):
2470 2470 user = User.get(user_id)
2471 2471 if user.username == User.DEFAULT_USER:
2472 2472 return None
2473 2473
2474 2474 return cls.query()\
2475 2475 .filter(cls.personal == true()) \
2476 2476 .filter(cls.user == user).scalar()
2477 2477
2478 2478 @classmethod
2479 2479 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2480 2480 case_insensitive=True):
2481 2481 q = RepoGroup.query()
2482 2482
2483 2483 if not isinstance(user_id, Optional):
2484 2484 q = q.filter(RepoGroup.user_id == user_id)
2485 2485
2486 2486 if not isinstance(group_id, Optional):
2487 2487 q = q.filter(RepoGroup.group_parent_id == group_id)
2488 2488
2489 2489 if case_insensitive:
2490 2490 q = q.order_by(func.lower(RepoGroup.group_name))
2491 2491 else:
2492 2492 q = q.order_by(RepoGroup.group_name)
2493 2493 return q.all()
2494 2494
2495 2495 @property
2496 2496 def parents(self):
2497 2497 parents_recursion_limit = 10
2498 2498 groups = []
2499 2499 if self.parent_group is None:
2500 2500 return groups
2501 2501 cur_gr = self.parent_group
2502 2502 groups.insert(0, cur_gr)
2503 2503 cnt = 0
2504 2504 while 1:
2505 2505 cnt += 1
2506 2506 gr = getattr(cur_gr, 'parent_group', None)
2507 2507 cur_gr = cur_gr.parent_group
2508 2508 if gr is None:
2509 2509 break
2510 2510 if cnt == parents_recursion_limit:
2511 2511 # this will prevent accidental infinit loops
2512 2512 log.error(('more than %s parents found for group %s, stopping '
2513 2513 'recursive parent fetching' % (parents_recursion_limit, self)))
2514 2514 break
2515 2515
2516 2516 groups.insert(0, gr)
2517 2517 return groups
2518 2518
2519 2519 @property
2520 2520 def last_db_change(self):
2521 2521 return self.updated_on
2522 2522
2523 2523 @property
2524 2524 def children(self):
2525 2525 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2526 2526
2527 2527 @property
2528 2528 def name(self):
2529 2529 return self.group_name.split(RepoGroup.url_sep())[-1]
2530 2530
2531 2531 @property
2532 2532 def full_path(self):
2533 2533 return self.group_name
2534 2534
2535 2535 @property
2536 2536 def full_path_splitted(self):
2537 2537 return self.group_name.split(RepoGroup.url_sep())
2538 2538
2539 2539 @property
2540 2540 def repositories(self):
2541 2541 return Repository.query()\
2542 2542 .filter(Repository.group == self)\
2543 2543 .order_by(Repository.repo_name)
2544 2544
2545 2545 @property
2546 2546 def repositories_recursive_count(self):
2547 2547 cnt = self.repositories.count()
2548 2548
2549 2549 def children_count(group):
2550 2550 cnt = 0
2551 2551 for child in group.children:
2552 2552 cnt += child.repositories.count()
2553 2553 cnt += children_count(child)
2554 2554 return cnt
2555 2555
2556 2556 return cnt + children_count(self)
2557 2557
2558 2558 def _recursive_objects(self, include_repos=True):
2559 2559 all_ = []
2560 2560
2561 2561 def _get_members(root_gr):
2562 2562 if include_repos:
2563 2563 for r in root_gr.repositories:
2564 2564 all_.append(r)
2565 2565 childs = root_gr.children.all()
2566 2566 if childs:
2567 2567 for gr in childs:
2568 2568 all_.append(gr)
2569 2569 _get_members(gr)
2570 2570
2571 2571 _get_members(self)
2572 2572 return [self] + all_
2573 2573
2574 2574 def recursive_groups_and_repos(self):
2575 2575 """
2576 2576 Recursive return all groups, with repositories in those groups
2577 2577 """
2578 2578 return self._recursive_objects()
2579 2579
2580 2580 def recursive_groups(self):
2581 2581 """
2582 2582 Returns all children groups for this group including children of children
2583 2583 """
2584 2584 return self._recursive_objects(include_repos=False)
2585 2585
2586 2586 def get_new_name(self, group_name):
2587 2587 """
2588 2588 returns new full group name based on parent and new name
2589 2589
2590 2590 :param group_name:
2591 2591 """
2592 2592 path_prefix = (self.parent_group.full_path_splitted if
2593 2593 self.parent_group else [])
2594 2594 return RepoGroup.url_sep().join(path_prefix + [group_name])
2595 2595
2596 2596 def permissions(self, with_admins=True, with_owner=True):
2597 2597 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2598 2598 q = q.options(joinedload(UserRepoGroupToPerm.group),
2599 2599 joinedload(UserRepoGroupToPerm.user),
2600 2600 joinedload(UserRepoGroupToPerm.permission),)
2601 2601
2602 2602 # get owners and admins and permissions. We do a trick of re-writing
2603 2603 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2604 2604 # has a global reference and changing one object propagates to all
2605 2605 # others. This means if admin is also an owner admin_row that change
2606 2606 # would propagate to both objects
2607 2607 perm_rows = []
2608 2608 for _usr in q.all():
2609 2609 usr = AttributeDict(_usr.user.get_dict())
2610 2610 usr.permission = _usr.permission.permission_name
2611 2611 perm_rows.append(usr)
2612 2612
2613 2613 # filter the perm rows by 'default' first and then sort them by
2614 2614 # admin,write,read,none permissions sorted again alphabetically in
2615 2615 # each group
2616 2616 perm_rows = sorted(perm_rows, key=display_user_sort)
2617 2617
2618 2618 _admin_perm = 'group.admin'
2619 2619 owner_row = []
2620 2620 if with_owner:
2621 2621 usr = AttributeDict(self.user.get_dict())
2622 2622 usr.owner_row = True
2623 2623 usr.permission = _admin_perm
2624 2624 owner_row.append(usr)
2625 2625
2626 2626 super_admin_rows = []
2627 2627 if with_admins:
2628 2628 for usr in User.get_all_super_admins():
2629 2629 # if this admin is also owner, don't double the record
2630 2630 if usr.user_id == owner_row[0].user_id:
2631 2631 owner_row[0].admin_row = True
2632 2632 else:
2633 2633 usr = AttributeDict(usr.get_dict())
2634 2634 usr.admin_row = True
2635 2635 usr.permission = _admin_perm
2636 2636 super_admin_rows.append(usr)
2637 2637
2638 2638 return super_admin_rows + owner_row + perm_rows
2639 2639
2640 2640 def permission_user_groups(self):
2641 2641 q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self)
2642 2642 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2643 2643 joinedload(UserGroupRepoGroupToPerm.users_group),
2644 2644 joinedload(UserGroupRepoGroupToPerm.permission),)
2645 2645
2646 2646 perm_rows = []
2647 2647 for _user_group in q.all():
2648 2648 usr = AttributeDict(_user_group.users_group.get_dict())
2649 2649 usr.permission = _user_group.permission.permission_name
2650 2650 perm_rows.append(usr)
2651 2651
2652 2652 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2653 2653 return perm_rows
2654 2654
2655 2655 def get_api_data(self):
2656 2656 """
2657 2657 Common function for generating api data
2658 2658
2659 2659 """
2660 2660 group = self
2661 2661 data = {
2662 2662 'group_id': group.group_id,
2663 2663 'group_name': group.group_name,
2664 2664 'group_description': group.description_safe,
2665 2665 'parent_group': group.parent_group.group_name if group.parent_group else None,
2666 2666 'repositories': [x.repo_name for x in group.repositories],
2667 2667 'owner': group.user.username,
2668 2668 }
2669 2669 return data
2670 2670
2671 2671
2672 2672 class Permission(Base, BaseModel):
2673 2673 __tablename__ = 'permissions'
2674 2674 __table_args__ = (
2675 2675 Index('p_perm_name_idx', 'permission_name'),
2676 2676 base_table_args,
2677 2677 )
2678 2678
2679 2679 PERMS = [
2680 2680 ('hg.admin', _('RhodeCode Super Administrator')),
2681 2681
2682 2682 ('repository.none', _('Repository no access')),
2683 2683 ('repository.read', _('Repository read access')),
2684 2684 ('repository.write', _('Repository write access')),
2685 2685 ('repository.admin', _('Repository admin access')),
2686 2686
2687 2687 ('group.none', _('Repository group no access')),
2688 2688 ('group.read', _('Repository group read access')),
2689 2689 ('group.write', _('Repository group write access')),
2690 2690 ('group.admin', _('Repository group admin access')),
2691 2691
2692 2692 ('usergroup.none', _('User group no access')),
2693 2693 ('usergroup.read', _('User group read access')),
2694 2694 ('usergroup.write', _('User group write access')),
2695 2695 ('usergroup.admin', _('User group admin access')),
2696 2696
2697 2697 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2698 2698 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2699 2699
2700 2700 ('hg.usergroup.create.false', _('User Group creation disabled')),
2701 2701 ('hg.usergroup.create.true', _('User Group creation enabled')),
2702 2702
2703 2703 ('hg.create.none', _('Repository creation disabled')),
2704 2704 ('hg.create.repository', _('Repository creation enabled')),
2705 2705 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2706 2706 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2707 2707
2708 2708 ('hg.fork.none', _('Repository forking disabled')),
2709 2709 ('hg.fork.repository', _('Repository forking enabled')),
2710 2710
2711 2711 ('hg.register.none', _('Registration disabled')),
2712 2712 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2713 2713 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2714 2714
2715 2715 ('hg.password_reset.enabled', _('Password reset enabled')),
2716 2716 ('hg.password_reset.hidden', _('Password reset hidden')),
2717 2717 ('hg.password_reset.disabled', _('Password reset disabled')),
2718 2718
2719 2719 ('hg.extern_activate.manual', _('Manual activation of external account')),
2720 2720 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2721 2721
2722 2722 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2723 2723 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2724 2724 ]
2725 2725
2726 2726 # definition of system default permissions for DEFAULT user
2727 2727 DEFAULT_USER_PERMISSIONS = [
2728 2728 'repository.read',
2729 2729 'group.read',
2730 2730 'usergroup.read',
2731 2731 'hg.create.repository',
2732 2732 'hg.repogroup.create.false',
2733 2733 'hg.usergroup.create.false',
2734 2734 'hg.create.write_on_repogroup.true',
2735 2735 'hg.fork.repository',
2736 2736 'hg.register.manual_activate',
2737 2737 'hg.password_reset.enabled',
2738 2738 'hg.extern_activate.auto',
2739 2739 'hg.inherit_default_perms.true',
2740 2740 ]
2741 2741
2742 2742 # defines which permissions are more important higher the more important
2743 2743 # Weight defines which permissions are more important.
2744 2744 # The higher number the more important.
2745 2745 PERM_WEIGHTS = {
2746 2746 'repository.none': 0,
2747 2747 'repository.read': 1,
2748 2748 'repository.write': 3,
2749 2749 'repository.admin': 4,
2750 2750
2751 2751 'group.none': 0,
2752 2752 'group.read': 1,
2753 2753 'group.write': 3,
2754 2754 'group.admin': 4,
2755 2755
2756 2756 'usergroup.none': 0,
2757 2757 'usergroup.read': 1,
2758 2758 'usergroup.write': 3,
2759 2759 'usergroup.admin': 4,
2760 2760
2761 2761 'hg.repogroup.create.false': 0,
2762 2762 'hg.repogroup.create.true': 1,
2763 2763
2764 2764 'hg.usergroup.create.false': 0,
2765 2765 'hg.usergroup.create.true': 1,
2766 2766
2767 2767 'hg.fork.none': 0,
2768 2768 'hg.fork.repository': 1,
2769 2769 'hg.create.none': 0,
2770 2770 'hg.create.repository': 1
2771 2771 }
2772 2772
2773 2773 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2774 2774 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2775 2775 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2776 2776
2777 2777 def __unicode__(self):
2778 2778 return u"<%s('%s:%s')>" % (
2779 2779 self.__class__.__name__, self.permission_id, self.permission_name
2780 2780 )
2781 2781
2782 2782 @classmethod
2783 2783 def get_by_key(cls, key):
2784 2784 return cls.query().filter(cls.permission_name == key).scalar()
2785 2785
2786 2786 @classmethod
2787 2787 def get_default_repo_perms(cls, user_id, repo_id=None):
2788 2788 q = Session().query(UserRepoToPerm, Repository, Permission)\
2789 2789 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2790 2790 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2791 2791 .filter(UserRepoToPerm.user_id == user_id)
2792 2792 if repo_id:
2793 2793 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2794 2794 return q.all()
2795 2795
2796 2796 @classmethod
2797 2797 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2798 2798 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2799 2799 .join(
2800 2800 Permission,
2801 2801 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2802 2802 .join(
2803 2803 Repository,
2804 2804 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2805 2805 .join(
2806 2806 UserGroup,
2807 2807 UserGroupRepoToPerm.users_group_id ==
2808 2808 UserGroup.users_group_id)\
2809 2809 .join(
2810 2810 UserGroupMember,
2811 2811 UserGroupRepoToPerm.users_group_id ==
2812 2812 UserGroupMember.users_group_id)\
2813 2813 .filter(
2814 2814 UserGroupMember.user_id == user_id,
2815 2815 UserGroup.users_group_active == true())
2816 2816 if repo_id:
2817 2817 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2818 2818 return q.all()
2819 2819
2820 2820 @classmethod
2821 2821 def get_default_group_perms(cls, user_id, repo_group_id=None):
2822 2822 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2823 2823 .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\
2824 2824 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
2825 2825 .filter(UserRepoGroupToPerm.user_id == user_id)
2826 2826 if repo_group_id:
2827 2827 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2828 2828 return q.all()
2829 2829
2830 2830 @classmethod
2831 2831 def get_default_group_perms_from_user_group(
2832 2832 cls, user_id, repo_group_id=None):
2833 2833 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2834 2834 .join(
2835 2835 Permission,
2836 2836 UserGroupRepoGroupToPerm.permission_id ==
2837 2837 Permission.permission_id)\
2838 2838 .join(
2839 2839 RepoGroup,
2840 2840 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2841 2841 .join(
2842 2842 UserGroup,
2843 2843 UserGroupRepoGroupToPerm.users_group_id ==
2844 2844 UserGroup.users_group_id)\
2845 2845 .join(
2846 2846 UserGroupMember,
2847 2847 UserGroupRepoGroupToPerm.users_group_id ==
2848 2848 UserGroupMember.users_group_id)\
2849 2849 .filter(
2850 2850 UserGroupMember.user_id == user_id,
2851 2851 UserGroup.users_group_active == true())
2852 2852 if repo_group_id:
2853 2853 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2854 2854 return q.all()
2855 2855
2856 2856 @classmethod
2857 2857 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2858 2858 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2859 2859 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2860 2860 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2861 2861 .filter(UserUserGroupToPerm.user_id == user_id)
2862 2862 if user_group_id:
2863 2863 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2864 2864 return q.all()
2865 2865
2866 2866 @classmethod
2867 2867 def get_default_user_group_perms_from_user_group(
2868 2868 cls, user_id, user_group_id=None):
2869 2869 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2870 2870 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2871 2871 .join(
2872 2872 Permission,
2873 2873 UserGroupUserGroupToPerm.permission_id ==
2874 2874 Permission.permission_id)\
2875 2875 .join(
2876 2876 TargetUserGroup,
2877 2877 UserGroupUserGroupToPerm.target_user_group_id ==
2878 2878 TargetUserGroup.users_group_id)\
2879 2879 .join(
2880 2880 UserGroup,
2881 2881 UserGroupUserGroupToPerm.user_group_id ==
2882 2882 UserGroup.users_group_id)\
2883 2883 .join(
2884 2884 UserGroupMember,
2885 2885 UserGroupUserGroupToPerm.user_group_id ==
2886 2886 UserGroupMember.users_group_id)\
2887 2887 .filter(
2888 2888 UserGroupMember.user_id == user_id,
2889 2889 UserGroup.users_group_active == true())
2890 2890 if user_group_id:
2891 2891 q = q.filter(
2892 2892 UserGroupUserGroupToPerm.user_group_id == user_group_id)
2893 2893
2894 2894 return q.all()
2895 2895
2896 2896
2897 2897 class UserRepoToPerm(Base, BaseModel):
2898 2898 __tablename__ = 'repo_to_perm'
2899 2899 __table_args__ = (
2900 2900 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
2901 2901 base_table_args
2902 2902 )
2903 2903
2904 2904 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2905 2905 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2906 2906 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2907 2907 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2908 2908
2909 2909 user = relationship('User')
2910 2910 repository = relationship('Repository')
2911 2911 permission = relationship('Permission')
2912 2912
2913 2913 @classmethod
2914 2914 def create(cls, user, repository, permission):
2915 2915 n = cls()
2916 2916 n.user = user
2917 2917 n.repository = repository
2918 2918 n.permission = permission
2919 2919 Session().add(n)
2920 2920 return n
2921 2921
2922 2922 def __unicode__(self):
2923 2923 return u'<%s => %s >' % (self.user, self.repository)
2924 2924
2925 2925
2926 2926 class UserUserGroupToPerm(Base, BaseModel):
2927 2927 __tablename__ = 'user_user_group_to_perm'
2928 2928 __table_args__ = (
2929 2929 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
2930 2930 base_table_args
2931 2931 )
2932 2932
2933 2933 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2934 2934 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2935 2935 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2936 2936 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2937 2937
2938 2938 user = relationship('User')
2939 2939 user_group = relationship('UserGroup')
2940 2940 permission = relationship('Permission')
2941 2941
2942 2942 @classmethod
2943 2943 def create(cls, user, user_group, permission):
2944 2944 n = cls()
2945 2945 n.user = user
2946 2946 n.user_group = user_group
2947 2947 n.permission = permission
2948 2948 Session().add(n)
2949 2949 return n
2950 2950
2951 2951 def __unicode__(self):
2952 2952 return u'<%s => %s >' % (self.user, self.user_group)
2953 2953
2954 2954
2955 2955 class UserToPerm(Base, BaseModel):
2956 2956 __tablename__ = 'user_to_perm'
2957 2957 __table_args__ = (
2958 2958 UniqueConstraint('user_id', 'permission_id'),
2959 2959 base_table_args
2960 2960 )
2961 2961
2962 2962 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2963 2963 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2964 2964 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2965 2965
2966 2966 user = relationship('User')
2967 2967 permission = relationship('Permission', lazy='joined')
2968 2968
2969 2969 def __unicode__(self):
2970 2970 return u'<%s => %s >' % (self.user, self.permission)
2971 2971
2972 2972
2973 2973 class UserGroupRepoToPerm(Base, BaseModel):
2974 2974 __tablename__ = 'users_group_repo_to_perm'
2975 2975 __table_args__ = (
2976 2976 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
2977 2977 base_table_args
2978 2978 )
2979 2979
2980 2980 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2981 2981 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2982 2982 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2983 2983 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2984 2984
2985 2985 users_group = relationship('UserGroup')
2986 2986 permission = relationship('Permission')
2987 2987 repository = relationship('Repository')
2988 2988
2989 2989 @classmethod
2990 2990 def create(cls, users_group, repository, permission):
2991 2991 n = cls()
2992 2992 n.users_group = users_group
2993 2993 n.repository = repository
2994 2994 n.permission = permission
2995 2995 Session().add(n)
2996 2996 return n
2997 2997
2998 2998 def __unicode__(self):
2999 2999 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
3000 3000
3001 3001
3002 3002 class UserGroupUserGroupToPerm(Base, BaseModel):
3003 3003 __tablename__ = 'user_group_user_group_to_perm'
3004 3004 __table_args__ = (
3005 3005 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3006 3006 CheckConstraint('target_user_group_id != user_group_id'),
3007 3007 base_table_args
3008 3008 )
3009 3009
3010 3010 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3011 3011 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3012 3012 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3013 3013 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3014 3014
3015 3015 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3016 3016 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3017 3017 permission = relationship('Permission')
3018 3018
3019 3019 @classmethod
3020 3020 def create(cls, target_user_group, user_group, permission):
3021 3021 n = cls()
3022 3022 n.target_user_group = target_user_group
3023 3023 n.user_group = user_group
3024 3024 n.permission = permission
3025 3025 Session().add(n)
3026 3026 return n
3027 3027
3028 3028 def __unicode__(self):
3029 3029 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3030 3030
3031 3031
3032 3032 class UserGroupToPerm(Base, BaseModel):
3033 3033 __tablename__ = 'users_group_to_perm'
3034 3034 __table_args__ = (
3035 3035 UniqueConstraint('users_group_id', 'permission_id',),
3036 3036 base_table_args
3037 3037 )
3038 3038
3039 3039 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3040 3040 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3041 3041 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3042 3042
3043 3043 users_group = relationship('UserGroup')
3044 3044 permission = relationship('Permission')
3045 3045
3046 3046
3047 3047 class UserRepoGroupToPerm(Base, BaseModel):
3048 3048 __tablename__ = 'user_repo_group_to_perm'
3049 3049 __table_args__ = (
3050 3050 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3051 3051 base_table_args
3052 3052 )
3053 3053
3054 3054 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3055 3055 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3056 3056 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3057 3057 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3058 3058
3059 3059 user = relationship('User')
3060 3060 group = relationship('RepoGroup')
3061 3061 permission = relationship('Permission')
3062 3062
3063 3063 @classmethod
3064 3064 def create(cls, user, repository_group, permission):
3065 3065 n = cls()
3066 3066 n.user = user
3067 3067 n.group = repository_group
3068 3068 n.permission = permission
3069 3069 Session().add(n)
3070 3070 return n
3071 3071
3072 3072
3073 3073 class UserGroupRepoGroupToPerm(Base, BaseModel):
3074 3074 __tablename__ = 'users_group_repo_group_to_perm'
3075 3075 __table_args__ = (
3076 3076 UniqueConstraint('users_group_id', 'group_id'),
3077 3077 base_table_args
3078 3078 )
3079 3079
3080 3080 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3081 3081 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3082 3082 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3083 3083 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3084 3084
3085 3085 users_group = relationship('UserGroup')
3086 3086 permission = relationship('Permission')
3087 3087 group = relationship('RepoGroup')
3088 3088
3089 3089 @classmethod
3090 3090 def create(cls, user_group, repository_group, permission):
3091 3091 n = cls()
3092 3092 n.users_group = user_group
3093 3093 n.group = repository_group
3094 3094 n.permission = permission
3095 3095 Session().add(n)
3096 3096 return n
3097 3097
3098 3098 def __unicode__(self):
3099 3099 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3100 3100
3101 3101
3102 3102 class Statistics(Base, BaseModel):
3103 3103 __tablename__ = 'statistics'
3104 3104 __table_args__ = (
3105 3105 base_table_args
3106 3106 )
3107 3107
3108 3108 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3109 3109 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3110 3110 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3111 3111 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3112 3112 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3113 3113 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3114 3114
3115 3115 repository = relationship('Repository', single_parent=True)
3116 3116
3117 3117
3118 3118 class UserFollowing(Base, BaseModel):
3119 3119 __tablename__ = 'user_followings'
3120 3120 __table_args__ = (
3121 3121 UniqueConstraint('user_id', 'follows_repository_id'),
3122 3122 UniqueConstraint('user_id', 'follows_user_id'),
3123 3123 base_table_args
3124 3124 )
3125 3125
3126 3126 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3127 3127 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3128 3128 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3129 3129 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3130 3130 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3131 3131
3132 3132 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3133 3133
3134 3134 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3135 3135 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3136 3136
3137 3137 @classmethod
3138 3138 def get_repo_followers(cls, repo_id):
3139 3139 return cls.query().filter(cls.follows_repo_id == repo_id)
3140 3140
3141 3141
3142 3142 class CacheKey(Base, BaseModel):
3143 3143 __tablename__ = 'cache_invalidation'
3144 3144 __table_args__ = (
3145 3145 UniqueConstraint('cache_key'),
3146 3146 Index('key_idx', 'cache_key'),
3147 3147 base_table_args,
3148 3148 )
3149 3149
3150 3150 CACHE_TYPE_FEED = 'FEED'
3151 3151 CACHE_TYPE_README = 'README'
3152 3152 # namespaces used to register process/thread aware caches
3153 3153 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3154 3154 SETTINGS_INVALIDATION_NAMESPACE = 'system_settings'
3155 3155
3156 3156 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3157 3157 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3158 3158 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3159 3159 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3160 3160
3161 3161 def __init__(self, cache_key, cache_args=''):
3162 3162 self.cache_key = cache_key
3163 3163 self.cache_args = cache_args
3164 3164 self.cache_active = False
3165 3165
3166 3166 def __unicode__(self):
3167 3167 return u"<%s('%s:%s[%s]')>" % (
3168 3168 self.__class__.__name__,
3169 3169 self.cache_id, self.cache_key, self.cache_active)
3170 3170
3171 3171 def _cache_key_partition(self):
3172 3172 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3173 3173 return prefix, repo_name, suffix
3174 3174
3175 3175 def get_prefix(self):
3176 3176 """
3177 3177 Try to extract prefix from existing cache key. The key could consist
3178 3178 of prefix, repo_name, suffix
3179 3179 """
3180 3180 # this returns prefix, repo_name, suffix
3181 3181 return self._cache_key_partition()[0]
3182 3182
3183 3183 def get_suffix(self):
3184 3184 """
3185 3185 get suffix that might have been used in _get_cache_key to
3186 3186 generate self.cache_key. Only used for informational purposes
3187 3187 in repo_edit.mako.
3188 3188 """
3189 3189 # prefix, repo_name, suffix
3190 3190 return self._cache_key_partition()[2]
3191 3191
3192 3192 @classmethod
3193 3193 def delete_all_cache(cls):
3194 3194 """
3195 3195 Delete all cache keys from database.
3196 3196 Should only be run when all instances are down and all entries
3197 3197 thus stale.
3198 3198 """
3199 3199 cls.query().delete()
3200 3200 Session().commit()
3201 3201
3202 3202 @classmethod
3203 3203 def set_invalidate(cls, cache_uid, delete=False):
3204 3204 """
3205 3205 Mark all caches of a repo as invalid in the database.
3206 3206 """
3207 3207
3208 3208 try:
3209 3209 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3210 3210 if delete:
3211 3211 qry.delete()
3212 3212 log.debug('cache objects deleted for cache args %s',
3213 3213 safe_str(cache_uid))
3214 3214 else:
3215 3215 qry.update({"cache_active": False})
3216 3216 log.debug('cache objects marked as invalid for cache args %s',
3217 3217 safe_str(cache_uid))
3218 3218
3219 3219 Session().commit()
3220 3220 except Exception:
3221 3221 log.exception(
3222 3222 'Cache key invalidation failed for cache args %s',
3223 3223 safe_str(cache_uid))
3224 3224 Session().rollback()
3225 3225
3226 3226 @classmethod
3227 3227 def get_active_cache(cls, cache_key):
3228 3228 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3229 3229 if inv_obj:
3230 3230 return inv_obj
3231 3231 return None
3232 3232
3233 3233
3234 3234 class ChangesetComment(Base, BaseModel):
3235 3235 __tablename__ = 'changeset_comments'
3236 3236 __table_args__ = (
3237 3237 Index('cc_revision_idx', 'revision'),
3238 3238 base_table_args,
3239 3239 )
3240 3240
3241 3241 COMMENT_OUTDATED = u'comment_outdated'
3242 3242 COMMENT_TYPE_NOTE = u'note'
3243 3243 COMMENT_TYPE_TODO = u'todo'
3244 3244 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3245 3245
3246 3246 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3247 3247 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3248 3248 revision = Column('revision', String(40), nullable=True)
3249 3249 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3250 3250 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3251 3251 line_no = Column('line_no', Unicode(10), nullable=True)
3252 3252 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3253 3253 f_path = Column('f_path', Unicode(1000), nullable=True)
3254 3254 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3255 3255 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3256 3256 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3257 3257 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3258 3258 renderer = Column('renderer', Unicode(64), nullable=True)
3259 3259 display_state = Column('display_state', Unicode(128), nullable=True)
3260 3260
3261 3261 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3262 3262 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3263 3263 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, backref='resolved_by')
3264 3264 author = relationship('User', lazy='joined')
3265 3265 repo = relationship('Repository')
3266 3266 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined')
3267 3267 pull_request = relationship('PullRequest', lazy='joined')
3268 3268 pull_request_version = relationship('PullRequestVersion')
3269 3269
3270 3270 @classmethod
3271 3271 def get_users(cls, revision=None, pull_request_id=None):
3272 3272 """
3273 3273 Returns user associated with this ChangesetComment. ie those
3274 3274 who actually commented
3275 3275
3276 3276 :param cls:
3277 3277 :param revision:
3278 3278 """
3279 3279 q = Session().query(User)\
3280 3280 .join(ChangesetComment.author)
3281 3281 if revision:
3282 3282 q = q.filter(cls.revision == revision)
3283 3283 elif pull_request_id:
3284 3284 q = q.filter(cls.pull_request_id == pull_request_id)
3285 3285 return q.all()
3286 3286
3287 3287 @classmethod
3288 3288 def get_index_from_version(cls, pr_version, versions):
3289 3289 num_versions = [x.pull_request_version_id for x in versions]
3290 3290 try:
3291 3291 return num_versions.index(pr_version) +1
3292 3292 except (IndexError, ValueError):
3293 3293 return
3294 3294
3295 3295 @property
3296 3296 def outdated(self):
3297 3297 return self.display_state == self.COMMENT_OUTDATED
3298 3298
3299 3299 def outdated_at_version(self, version):
3300 3300 """
3301 3301 Checks if comment is outdated for given pull request version
3302 3302 """
3303 3303 return self.outdated and self.pull_request_version_id != version
3304 3304
3305 3305 def older_than_version(self, version):
3306 3306 """
3307 3307 Checks if comment is made from previous version than given
3308 3308 """
3309 3309 if version is None:
3310 3310 return self.pull_request_version_id is not None
3311 3311
3312 3312 return self.pull_request_version_id < version
3313 3313
3314 3314 @property
3315 3315 def resolved(self):
3316 3316 return self.resolved_by[0] if self.resolved_by else None
3317 3317
3318 3318 @property
3319 3319 def is_todo(self):
3320 3320 return self.comment_type == self.COMMENT_TYPE_TODO
3321 3321
3322 3322 @property
3323 3323 def is_inline(self):
3324 3324 return self.line_no and self.f_path
3325 3325
3326 3326 def get_index_version(self, versions):
3327 3327 return self.get_index_from_version(
3328 3328 self.pull_request_version_id, versions)
3329 3329
3330 3330 def __repr__(self):
3331 3331 if self.comment_id:
3332 3332 return '<DB:Comment #%s>' % self.comment_id
3333 3333 else:
3334 3334 return '<DB:Comment at %#x>' % id(self)
3335 3335
3336 3336 def get_api_data(self):
3337 3337 comment = self
3338 3338 data = {
3339 3339 'comment_id': comment.comment_id,
3340 3340 'comment_type': comment.comment_type,
3341 3341 'comment_text': comment.text,
3342 3342 'comment_status': comment.status_change,
3343 3343 'comment_f_path': comment.f_path,
3344 3344 'comment_lineno': comment.line_no,
3345 3345 'comment_author': comment.author,
3346 3346 'comment_created_on': comment.created_on
3347 3347 }
3348 3348 return data
3349 3349
3350 3350 def __json__(self):
3351 3351 data = dict()
3352 3352 data.update(self.get_api_data())
3353 3353 return data
3354 3354
3355 3355
3356 3356 class ChangesetStatus(Base, BaseModel):
3357 3357 __tablename__ = 'changeset_statuses'
3358 3358 __table_args__ = (
3359 3359 Index('cs_revision_idx', 'revision'),
3360 3360 Index('cs_version_idx', 'version'),
3361 3361 UniqueConstraint('repo_id', 'revision', 'version'),
3362 3362 base_table_args
3363 3363 )
3364 3364
3365 3365 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3366 3366 STATUS_APPROVED = 'approved'
3367 3367 STATUS_REJECTED = 'rejected'
3368 3368 STATUS_UNDER_REVIEW = 'under_review'
3369 3369
3370 3370 STATUSES = [
3371 3371 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3372 3372 (STATUS_APPROVED, _("Approved")),
3373 3373 (STATUS_REJECTED, _("Rejected")),
3374 3374 (STATUS_UNDER_REVIEW, _("Under Review")),
3375 3375 ]
3376 3376
3377 3377 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3378 3378 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3379 3379 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3380 3380 revision = Column('revision', String(40), nullable=False)
3381 3381 status = Column('status', String(128), nullable=False, default=DEFAULT)
3382 3382 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3383 3383 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3384 3384 version = Column('version', Integer(), nullable=False, default=0)
3385 3385 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3386 3386
3387 3387 author = relationship('User', lazy='joined')
3388 3388 repo = relationship('Repository')
3389 3389 comment = relationship('ChangesetComment', lazy='joined')
3390 3390 pull_request = relationship('PullRequest', lazy='joined')
3391 3391
3392 3392 def __unicode__(self):
3393 3393 return u"<%s('%s[v%s]:%s')>" % (
3394 3394 self.__class__.__name__,
3395 3395 self.status, self.version, self.author
3396 3396 )
3397 3397
3398 3398 @classmethod
3399 3399 def get_status_lbl(cls, value):
3400 3400 return dict(cls.STATUSES).get(value)
3401 3401
3402 3402 @property
3403 3403 def status_lbl(self):
3404 3404 return ChangesetStatus.get_status_lbl(self.status)
3405 3405
3406 3406 def get_api_data(self):
3407 3407 status = self
3408 3408 data = {
3409 3409 'status_id': status.changeset_status_id,
3410 3410 'status': status.status,
3411 3411 }
3412 3412 return data
3413 3413
3414 3414 def __json__(self):
3415 3415 data = dict()
3416 3416 data.update(self.get_api_data())
3417 3417 return data
3418 3418
3419 3419
3420 3420 class _PullRequestBase(BaseModel):
3421 3421 """
3422 3422 Common attributes of pull request and version entries.
3423 3423 """
3424 3424
3425 3425 # .status values
3426 3426 STATUS_NEW = u'new'
3427 3427 STATUS_OPEN = u'open'
3428 3428 STATUS_CLOSED = u'closed'
3429 3429
3430 3430 title = Column('title', Unicode(255), nullable=True)
3431 3431 description = Column(
3432 3432 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3433 3433 nullable=True)
3434 3434 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
3435 3435
3436 3436 # new/open/closed status of pull request (not approve/reject/etc)
3437 3437 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3438 3438 created_on = Column(
3439 3439 'created_on', DateTime(timezone=False), nullable=False,
3440 3440 default=datetime.datetime.now)
3441 3441 updated_on = Column(
3442 3442 'updated_on', DateTime(timezone=False), nullable=False,
3443 3443 default=datetime.datetime.now)
3444 3444
3445 3445 @declared_attr
3446 3446 def user_id(cls):
3447 3447 return Column(
3448 3448 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3449 3449 unique=None)
3450 3450
3451 3451 # 500 revisions max
3452 3452 _revisions = Column(
3453 3453 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3454 3454
3455 3455 @declared_attr
3456 3456 def source_repo_id(cls):
3457 3457 # TODO: dan: rename column to source_repo_id
3458 3458 return Column(
3459 3459 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3460 3460 nullable=False)
3461 3461
3462 3462 source_ref = Column('org_ref', Unicode(255), nullable=False)
3463 3463
3464 3464 @declared_attr
3465 3465 def target_repo_id(cls):
3466 3466 # TODO: dan: rename column to target_repo_id
3467 3467 return Column(
3468 3468 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3469 3469 nullable=False)
3470 3470
3471 3471 target_ref = Column('other_ref', Unicode(255), nullable=False)
3472 3472 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3473 3473
3474 3474 # TODO: dan: rename column to last_merge_source_rev
3475 3475 _last_merge_source_rev = Column(
3476 3476 'last_merge_org_rev', String(40), nullable=True)
3477 3477 # TODO: dan: rename column to last_merge_target_rev
3478 3478 _last_merge_target_rev = Column(
3479 3479 'last_merge_other_rev', String(40), nullable=True)
3480 3480 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3481 3481 merge_rev = Column('merge_rev', String(40), nullable=True)
3482 3482
3483 3483 reviewer_data = Column(
3484 3484 'reviewer_data_json', MutationObj.as_mutable(
3485 3485 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3486 3486
3487 3487 @property
3488 3488 def reviewer_data_json(self):
3489 3489 return json.dumps(self.reviewer_data)
3490 3490
3491 3491 @hybrid_property
3492 3492 def description_safe(self):
3493 3493 from rhodecode.lib import helpers as h
3494 3494 return h.escape(self.description)
3495 3495
3496 3496 @hybrid_property
3497 3497 def revisions(self):
3498 3498 return self._revisions.split(':') if self._revisions else []
3499 3499
3500 3500 @revisions.setter
3501 3501 def revisions(self, val):
3502 3502 self._revisions = ':'.join(val)
3503 3503
3504 3504 @hybrid_property
3505 3505 def last_merge_status(self):
3506 3506 return safe_int(self._last_merge_status)
3507 3507
3508 3508 @last_merge_status.setter
3509 3509 def last_merge_status(self, val):
3510 3510 self._last_merge_status = val
3511 3511
3512 3512 @declared_attr
3513 3513 def author(cls):
3514 3514 return relationship('User', lazy='joined')
3515 3515
3516 3516 @declared_attr
3517 3517 def source_repo(cls):
3518 3518 return relationship(
3519 3519 'Repository',
3520 3520 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3521 3521
3522 3522 @property
3523 3523 def source_ref_parts(self):
3524 3524 return self.unicode_to_reference(self.source_ref)
3525 3525
3526 3526 @declared_attr
3527 3527 def target_repo(cls):
3528 3528 return relationship(
3529 3529 'Repository',
3530 3530 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3531 3531
3532 3532 @property
3533 3533 def target_ref_parts(self):
3534 3534 return self.unicode_to_reference(self.target_ref)
3535 3535
3536 3536 @property
3537 3537 def shadow_merge_ref(self):
3538 3538 return self.unicode_to_reference(self._shadow_merge_ref)
3539 3539
3540 3540 @shadow_merge_ref.setter
3541 3541 def shadow_merge_ref(self, ref):
3542 3542 self._shadow_merge_ref = self.reference_to_unicode(ref)
3543 3543
3544 3544 def unicode_to_reference(self, raw):
3545 3545 """
3546 3546 Convert a unicode (or string) to a reference object.
3547 3547 If unicode evaluates to False it returns None.
3548 3548 """
3549 3549 if raw:
3550 3550 refs = raw.split(':')
3551 3551 return Reference(*refs)
3552 3552 else:
3553 3553 return None
3554 3554
3555 3555 def reference_to_unicode(self, ref):
3556 3556 """
3557 3557 Convert a reference object to unicode.
3558 3558 If reference is None it returns None.
3559 3559 """
3560 3560 if ref:
3561 3561 return u':'.join(ref)
3562 3562 else:
3563 3563 return None
3564 3564
3565 3565 def get_api_data(self, with_merge_state=True):
3566 3566 from rhodecode.model.pull_request import PullRequestModel
3567 3567
3568 3568 pull_request = self
3569 3569 if with_merge_state:
3570 3570 merge_status = PullRequestModel().merge_status(pull_request)
3571 3571 merge_state = {
3572 3572 'status': merge_status[0],
3573 3573 'message': safe_unicode(merge_status[1]),
3574 3574 }
3575 3575 else:
3576 3576 merge_state = {'status': 'not_available',
3577 3577 'message': 'not_available'}
3578 3578
3579 3579 merge_data = {
3580 3580 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
3581 3581 'reference': (
3582 3582 pull_request.shadow_merge_ref._asdict()
3583 3583 if pull_request.shadow_merge_ref else None),
3584 3584 }
3585 3585
3586 3586 data = {
3587 3587 'pull_request_id': pull_request.pull_request_id,
3588 3588 'url': PullRequestModel().get_url(pull_request),
3589 3589 'title': pull_request.title,
3590 3590 'description': pull_request.description,
3591 3591 'status': pull_request.status,
3592 3592 'created_on': pull_request.created_on,
3593 3593 'updated_on': pull_request.updated_on,
3594 3594 'commit_ids': pull_request.revisions,
3595 3595 'review_status': pull_request.calculated_review_status(),
3596 3596 'mergeable': merge_state,
3597 3597 'source': {
3598 3598 'clone_url': pull_request.source_repo.clone_url(),
3599 3599 'repository': pull_request.source_repo.repo_name,
3600 3600 'reference': {
3601 3601 'name': pull_request.source_ref_parts.name,
3602 3602 'type': pull_request.source_ref_parts.type,
3603 3603 'commit_id': pull_request.source_ref_parts.commit_id,
3604 3604 },
3605 3605 },
3606 3606 'target': {
3607 3607 'clone_url': pull_request.target_repo.clone_url(),
3608 3608 'repository': pull_request.target_repo.repo_name,
3609 3609 'reference': {
3610 3610 'name': pull_request.target_ref_parts.name,
3611 3611 'type': pull_request.target_ref_parts.type,
3612 3612 'commit_id': pull_request.target_ref_parts.commit_id,
3613 3613 },
3614 3614 },
3615 3615 'merge': merge_data,
3616 3616 'author': pull_request.author.get_api_data(include_secrets=False,
3617 3617 details='basic'),
3618 3618 'reviewers': [
3619 3619 {
3620 3620 'user': reviewer.get_api_data(include_secrets=False,
3621 3621 details='basic'),
3622 3622 'reasons': reasons,
3623 3623 'review_status': st[0][1].status if st else 'not_reviewed',
3624 3624 }
3625 3625 for obj, reviewer, reasons, mandatory, st in
3626 3626 pull_request.reviewers_statuses()
3627 3627 ]
3628 3628 }
3629 3629
3630 3630 return data
3631 3631
3632 3632
3633 3633 class PullRequest(Base, _PullRequestBase):
3634 3634 __tablename__ = 'pull_requests'
3635 3635 __table_args__ = (
3636 3636 base_table_args,
3637 3637 )
3638 3638
3639 3639 pull_request_id = Column(
3640 3640 'pull_request_id', Integer(), nullable=False, primary_key=True)
3641 3641
3642 3642 def __repr__(self):
3643 3643 if self.pull_request_id:
3644 3644 return '<DB:PullRequest #%s>' % self.pull_request_id
3645 3645 else:
3646 3646 return '<DB:PullRequest at %#x>' % id(self)
3647 3647
3648 3648 reviewers = relationship('PullRequestReviewers',
3649 3649 cascade="all, delete, delete-orphan")
3650 3650 statuses = relationship('ChangesetStatus',
3651 3651 cascade="all, delete, delete-orphan")
3652 3652 comments = relationship('ChangesetComment',
3653 3653 cascade="all, delete, delete-orphan")
3654 3654 versions = relationship('PullRequestVersion',
3655 3655 cascade="all, delete, delete-orphan",
3656 3656 lazy='dynamic')
3657 3657
3658 3658 @classmethod
3659 3659 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
3660 3660 internal_methods=None):
3661 3661
3662 3662 class PullRequestDisplay(object):
3663 3663 """
3664 3664 Special object wrapper for showing PullRequest data via Versions
3665 3665 It mimics PR object as close as possible. This is read only object
3666 3666 just for display
3667 3667 """
3668 3668
3669 3669 def __init__(self, attrs, internal=None):
3670 3670 self.attrs = attrs
3671 3671 # internal have priority over the given ones via attrs
3672 3672 self.internal = internal or ['versions']
3673 3673
3674 3674 def __getattr__(self, item):
3675 3675 if item in self.internal:
3676 3676 return getattr(self, item)
3677 3677 try:
3678 3678 return self.attrs[item]
3679 3679 except KeyError:
3680 3680 raise AttributeError(
3681 3681 '%s object has no attribute %s' % (self, item))
3682 3682
3683 3683 def __repr__(self):
3684 3684 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
3685 3685
3686 3686 def versions(self):
3687 3687 return pull_request_obj.versions.order_by(
3688 3688 PullRequestVersion.pull_request_version_id).all()
3689 3689
3690 3690 def is_closed(self):
3691 3691 return pull_request_obj.is_closed()
3692 3692
3693 3693 @property
3694 3694 def pull_request_version_id(self):
3695 3695 return getattr(pull_request_obj, 'pull_request_version_id', None)
3696 3696
3697 3697 attrs = StrictAttributeDict(pull_request_obj.get_api_data())
3698 3698
3699 3699 attrs.author = StrictAttributeDict(
3700 3700 pull_request_obj.author.get_api_data())
3701 3701 if pull_request_obj.target_repo:
3702 3702 attrs.target_repo = StrictAttributeDict(
3703 3703 pull_request_obj.target_repo.get_api_data())
3704 3704 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
3705 3705
3706 3706 if pull_request_obj.source_repo:
3707 3707 attrs.source_repo = StrictAttributeDict(
3708 3708 pull_request_obj.source_repo.get_api_data())
3709 3709 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
3710 3710
3711 3711 attrs.source_ref_parts = pull_request_obj.source_ref_parts
3712 3712 attrs.target_ref_parts = pull_request_obj.target_ref_parts
3713 3713 attrs.revisions = pull_request_obj.revisions
3714 3714
3715 3715 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
3716 3716 attrs.reviewer_data = org_pull_request_obj.reviewer_data
3717 3717 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
3718 3718
3719 3719 return PullRequestDisplay(attrs, internal=internal_methods)
3720 3720
3721 3721 def is_closed(self):
3722 3722 return self.status == self.STATUS_CLOSED
3723 3723
3724 3724 def __json__(self):
3725 3725 return {
3726 3726 'revisions': self.revisions,
3727 3727 }
3728 3728
3729 3729 def calculated_review_status(self):
3730 3730 from rhodecode.model.changeset_status import ChangesetStatusModel
3731 3731 return ChangesetStatusModel().calculated_review_status(self)
3732 3732
3733 3733 def reviewers_statuses(self):
3734 3734 from rhodecode.model.changeset_status import ChangesetStatusModel
3735 3735 return ChangesetStatusModel().reviewers_statuses(self)
3736 3736
3737 3737 @property
3738 3738 def workspace_id(self):
3739 3739 from rhodecode.model.pull_request import PullRequestModel
3740 3740 return PullRequestModel()._workspace_id(self)
3741 3741
3742 3742 def get_shadow_repo(self):
3743 3743 workspace_id = self.workspace_id
3744 3744 vcs_obj = self.target_repo.scm_instance()
3745 3745 shadow_repository_path = vcs_obj._get_shadow_repository_path(
3746 3746 self.target_repo.repo_id, workspace_id)
3747 3747 if os.path.isdir(shadow_repository_path):
3748 3748 return vcs_obj._get_shadow_instance(shadow_repository_path)
3749 3749
3750 3750
3751 3751 class PullRequestVersion(Base, _PullRequestBase):
3752 3752 __tablename__ = 'pull_request_versions'
3753 3753 __table_args__ = (
3754 3754 base_table_args,
3755 3755 )
3756 3756
3757 3757 pull_request_version_id = Column(
3758 3758 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3759 3759 pull_request_id = Column(
3760 3760 'pull_request_id', Integer(),
3761 3761 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3762 3762 pull_request = relationship('PullRequest')
3763 3763
3764 3764 def __repr__(self):
3765 3765 if self.pull_request_version_id:
3766 3766 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3767 3767 else:
3768 3768 return '<DB:PullRequestVersion at %#x>' % id(self)
3769 3769
3770 3770 @property
3771 3771 def reviewers(self):
3772 3772 return self.pull_request.reviewers
3773 3773
3774 3774 @property
3775 3775 def versions(self):
3776 3776 return self.pull_request.versions
3777 3777
3778 3778 def is_closed(self):
3779 3779 # calculate from original
3780 3780 return self.pull_request.status == self.STATUS_CLOSED
3781 3781
3782 3782 def calculated_review_status(self):
3783 3783 return self.pull_request.calculated_review_status()
3784 3784
3785 3785 def reviewers_statuses(self):
3786 3786 return self.pull_request.reviewers_statuses()
3787 3787
3788 3788
3789 3789 class PullRequestReviewers(Base, BaseModel):
3790 3790 __tablename__ = 'pull_request_reviewers'
3791 3791 __table_args__ = (
3792 3792 base_table_args,
3793 3793 )
3794 3794
3795 3795 @hybrid_property
3796 3796 def reasons(self):
3797 3797 if not self._reasons:
3798 3798 return []
3799 3799 return self._reasons
3800 3800
3801 3801 @reasons.setter
3802 3802 def reasons(self, val):
3803 3803 val = val or []
3804 3804 if any(not isinstance(x, basestring) for x in val):
3805 3805 raise Exception('invalid reasons type, must be list of strings')
3806 3806 self._reasons = val
3807 3807
3808 3808 pull_requests_reviewers_id = Column(
3809 3809 'pull_requests_reviewers_id', Integer(), nullable=False,
3810 3810 primary_key=True)
3811 3811 pull_request_id = Column(
3812 3812 "pull_request_id", Integer(),
3813 3813 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3814 3814 user_id = Column(
3815 3815 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3816 3816 _reasons = Column(
3817 3817 'reason', MutationList.as_mutable(
3818 3818 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
3819 3819
3820 3820 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3821 3821 user = relationship('User')
3822 3822 pull_request = relationship('PullRequest')
3823 3823
3824 3824 rule_data = Column(
3825 3825 'rule_data_json',
3826 3826 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
3827 3827
3828 3828 def rule_user_group_data(self):
3829 3829 """
3830 3830 Returns the voting user group rule data for this reviewer
3831 3831 """
3832 3832
3833 3833 if self.rule_data and 'vote_rule' in self.rule_data:
3834 3834 user_group_data = {}
3835 3835 if 'rule_user_group_entry_id' in self.rule_data:
3836 3836 # means a group with voting rules !
3837 3837 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
3838 3838 user_group_data['name'] = self.rule_data['rule_name']
3839 3839 user_group_data['vote_rule'] = self.rule_data['vote_rule']
3840 3840
3841 3841 return user_group_data
3842 3842
3843 3843 def __unicode__(self):
3844 3844 return u"<%s('id:%s')>" % (self.__class__.__name__,
3845 3845 self.pull_requests_reviewers_id)
3846 3846
3847 3847
3848 3848 class Notification(Base, BaseModel):
3849 3849 __tablename__ = 'notifications'
3850 3850 __table_args__ = (
3851 3851 Index('notification_type_idx', 'type'),
3852 3852 base_table_args,
3853 3853 )
3854 3854
3855 3855 TYPE_CHANGESET_COMMENT = u'cs_comment'
3856 3856 TYPE_MESSAGE = u'message'
3857 3857 TYPE_MENTION = u'mention'
3858 3858 TYPE_REGISTRATION = u'registration'
3859 3859 TYPE_PULL_REQUEST = u'pull_request'
3860 3860 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
3861 3861
3862 3862 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
3863 3863 subject = Column('subject', Unicode(512), nullable=True)
3864 3864 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
3865 3865 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
3866 3866 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3867 3867 type_ = Column('type', Unicode(255))
3868 3868
3869 3869 created_by_user = relationship('User')
3870 3870 notifications_to_users = relationship('UserNotification', lazy='joined',
3871 3871 cascade="all, delete, delete-orphan")
3872 3872
3873 3873 @property
3874 3874 def recipients(self):
3875 3875 return [x.user for x in UserNotification.query()\
3876 3876 .filter(UserNotification.notification == self)\
3877 3877 .order_by(UserNotification.user_id.asc()).all()]
3878 3878
3879 3879 @classmethod
3880 3880 def create(cls, created_by, subject, body, recipients, type_=None):
3881 3881 if type_ is None:
3882 3882 type_ = Notification.TYPE_MESSAGE
3883 3883
3884 3884 notification = cls()
3885 3885 notification.created_by_user = created_by
3886 3886 notification.subject = subject
3887 3887 notification.body = body
3888 3888 notification.type_ = type_
3889 3889 notification.created_on = datetime.datetime.now()
3890 3890
3891 3891 # For each recipient link the created notification to his account
3892 3892 for u in recipients:
3893 3893 assoc = UserNotification()
3894 3894 assoc.user_id = u.user_id
3895 3895 assoc.notification = notification
3896 3896
3897 3897 # if created_by is inside recipients mark his notification
3898 3898 # as read
3899 3899 if u.user_id == created_by.user_id:
3900 3900 assoc.read = True
3901 3901 Session().add(assoc)
3902 3902
3903 3903 Session().add(notification)
3904 3904
3905 3905 return notification
3906 3906
3907 3907
3908 3908 class UserNotification(Base, BaseModel):
3909 3909 __tablename__ = 'user_to_notification'
3910 3910 __table_args__ = (
3911 3911 UniqueConstraint('user_id', 'notification_id'),
3912 3912 base_table_args
3913 3913 )
3914 3914
3915 3915 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
3916 3916 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
3917 3917 read = Column('read', Boolean, default=False)
3918 3918 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
3919 3919
3920 3920 user = relationship('User', lazy="joined")
3921 3921 notification = relationship('Notification', lazy="joined",
3922 3922 order_by=lambda: Notification.created_on.desc(),)
3923 3923
3924 3924 def mark_as_read(self):
3925 3925 self.read = True
3926 3926 Session().add(self)
3927 3927
3928 3928
3929 3929 class Gist(Base, BaseModel):
3930 3930 __tablename__ = 'gists'
3931 3931 __table_args__ = (
3932 3932 Index('g_gist_access_id_idx', 'gist_access_id'),
3933 3933 Index('g_created_on_idx', 'created_on'),
3934 3934 base_table_args
3935 3935 )
3936 3936
3937 3937 GIST_PUBLIC = u'public'
3938 3938 GIST_PRIVATE = u'private'
3939 3939 DEFAULT_FILENAME = u'gistfile1.txt'
3940 3940
3941 3941 ACL_LEVEL_PUBLIC = u'acl_public'
3942 3942 ACL_LEVEL_PRIVATE = u'acl_private'
3943 3943
3944 3944 gist_id = Column('gist_id', Integer(), primary_key=True)
3945 3945 gist_access_id = Column('gist_access_id', Unicode(250))
3946 3946 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
3947 3947 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
3948 3948 gist_expires = Column('gist_expires', Float(53), nullable=False)
3949 3949 gist_type = Column('gist_type', Unicode(128), nullable=False)
3950 3950 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3951 3951 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3952 3952 acl_level = Column('acl_level', Unicode(128), nullable=True)
3953 3953
3954 3954 owner = relationship('User')
3955 3955
3956 3956 def __repr__(self):
3957 3957 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
3958 3958
3959 3959 @hybrid_property
3960 3960 def description_safe(self):
3961 3961 from rhodecode.lib import helpers as h
3962 3962 return h.escape(self.gist_description)
3963 3963
3964 3964 @classmethod
3965 3965 def get_or_404(cls, id_):
3966 3966 from pyramid.httpexceptions import HTTPNotFound
3967 3967
3968 3968 res = cls.query().filter(cls.gist_access_id == id_).scalar()
3969 3969 if not res:
3970 3970 raise HTTPNotFound()
3971 3971 return res
3972 3972
3973 3973 @classmethod
3974 3974 def get_by_access_id(cls, gist_access_id):
3975 3975 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
3976 3976
3977 3977 def gist_url(self):
3978 3978 from rhodecode.model.gist import GistModel
3979 3979 return GistModel().get_url(self)
3980 3980
3981 3981 @classmethod
3982 3982 def base_path(cls):
3983 3983 """
3984 3984 Returns base path when all gists are stored
3985 3985
3986 3986 :param cls:
3987 3987 """
3988 3988 from rhodecode.model.gist import GIST_STORE_LOC
3989 3989 q = Session().query(RhodeCodeUi)\
3990 3990 .filter(RhodeCodeUi.ui_key == URL_SEP)
3991 3991 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
3992 3992 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
3993 3993
3994 3994 def get_api_data(self):
3995 3995 """
3996 3996 Common function for generating gist related data for API
3997 3997 """
3998 3998 gist = self
3999 3999 data = {
4000 4000 'gist_id': gist.gist_id,
4001 4001 'type': gist.gist_type,
4002 4002 'access_id': gist.gist_access_id,
4003 4003 'description': gist.gist_description,
4004 4004 'url': gist.gist_url(),
4005 4005 'expires': gist.gist_expires,
4006 4006 'created_on': gist.created_on,
4007 4007 'modified_at': gist.modified_at,
4008 4008 'content': None,
4009 4009 'acl_level': gist.acl_level,
4010 4010 }
4011 4011 return data
4012 4012
4013 4013 def __json__(self):
4014 4014 data = dict(
4015 4015 )
4016 4016 data.update(self.get_api_data())
4017 4017 return data
4018 4018 # SCM functions
4019 4019
4020 4020 def scm_instance(self, **kwargs):
4021 4021 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4022 4022 return get_vcs_instance(
4023 4023 repo_path=safe_str(full_repo_path), create=False)
4024 4024
4025 4025
4026 4026 class ExternalIdentity(Base, BaseModel):
4027 4027 __tablename__ = 'external_identities'
4028 4028 __table_args__ = (
4029 4029 Index('local_user_id_idx', 'local_user_id'),
4030 4030 Index('external_id_idx', 'external_id'),
4031 4031 base_table_args
4032 4032 )
4033 4033
4034 4034 external_id = Column('external_id', Unicode(255), default=u'',
4035 4035 primary_key=True)
4036 4036 external_username = Column('external_username', Unicode(1024), default=u'')
4037 4037 local_user_id = Column('local_user_id', Integer(),
4038 4038 ForeignKey('users.user_id'), primary_key=True)
4039 4039 provider_name = Column('provider_name', Unicode(255), default=u'',
4040 4040 primary_key=True)
4041 4041 access_token = Column('access_token', String(1024), default=u'')
4042 4042 alt_token = Column('alt_token', String(1024), default=u'')
4043 4043 token_secret = Column('token_secret', String(1024), default=u'')
4044 4044
4045 4045 @classmethod
4046 4046 def by_external_id_and_provider(cls, external_id, provider_name,
4047 4047 local_user_id=None):
4048 4048 """
4049 4049 Returns ExternalIdentity instance based on search params
4050 4050
4051 4051 :param external_id:
4052 4052 :param provider_name:
4053 4053 :return: ExternalIdentity
4054 4054 """
4055 4055 query = cls.query()
4056 4056 query = query.filter(cls.external_id == external_id)
4057 4057 query = query.filter(cls.provider_name == provider_name)
4058 4058 if local_user_id:
4059 4059 query = query.filter(cls.local_user_id == local_user_id)
4060 4060 return query.first()
4061 4061
4062 4062 @classmethod
4063 4063 def user_by_external_id_and_provider(cls, external_id, provider_name):
4064 4064 """
4065 4065 Returns User instance based on search params
4066 4066
4067 4067 :param external_id:
4068 4068 :param provider_name:
4069 4069 :return: User
4070 4070 """
4071 4071 query = User.query()
4072 4072 query = query.filter(cls.external_id == external_id)
4073 4073 query = query.filter(cls.provider_name == provider_name)
4074 4074 query = query.filter(User.user_id == cls.local_user_id)
4075 4075 return query.first()
4076 4076
4077 4077 @classmethod
4078 4078 def by_local_user_id(cls, local_user_id):
4079 4079 """
4080 4080 Returns all tokens for user
4081 4081
4082 4082 :param local_user_id:
4083 4083 :return: ExternalIdentity
4084 4084 """
4085 4085 query = cls.query()
4086 4086 query = query.filter(cls.local_user_id == local_user_id)
4087 4087 return query
4088 4088
4089 4089
4090 4090 class Integration(Base, BaseModel):
4091 4091 __tablename__ = 'integrations'
4092 4092 __table_args__ = (
4093 4093 base_table_args
4094 4094 )
4095 4095
4096 4096 integration_id = Column('integration_id', Integer(), primary_key=True)
4097 4097 integration_type = Column('integration_type', String(255))
4098 4098 enabled = Column('enabled', Boolean(), nullable=False)
4099 4099 name = Column('name', String(255), nullable=False)
4100 4100 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4101 4101 default=False)
4102 4102
4103 4103 settings = Column(
4104 4104 'settings_json', MutationObj.as_mutable(
4105 4105 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4106 4106 repo_id = Column(
4107 4107 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4108 4108 nullable=True, unique=None, default=None)
4109 4109 repo = relationship('Repository', lazy='joined')
4110 4110
4111 4111 repo_group_id = Column(
4112 4112 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4113 4113 nullable=True, unique=None, default=None)
4114 4114 repo_group = relationship('RepoGroup', lazy='joined')
4115 4115
4116 4116 @property
4117 4117 def scope(self):
4118 4118 if self.repo:
4119 4119 return repr(self.repo)
4120 4120 if self.repo_group:
4121 4121 if self.child_repos_only:
4122 4122 return repr(self.repo_group) + ' (child repos only)'
4123 4123 else:
4124 4124 return repr(self.repo_group) + ' (recursive)'
4125 4125 if self.child_repos_only:
4126 4126 return 'root_repos'
4127 4127 return 'global'
4128 4128
4129 4129 def __repr__(self):
4130 4130 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4131 4131
4132 4132
4133 4133 class RepoReviewRuleUser(Base, BaseModel):
4134 4134 __tablename__ = 'repo_review_rules_users'
4135 4135 __table_args__ = (
4136 4136 base_table_args
4137 4137 )
4138 4138
4139 4139 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4140 4140 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4141 4141 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4142 4142 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4143 4143 user = relationship('User')
4144 4144
4145 4145 def rule_data(self):
4146 4146 return {
4147 4147 'mandatory': self.mandatory
4148 4148 }
4149 4149
4150 4150
4151 4151 class RepoReviewRuleUserGroup(Base, BaseModel):
4152 4152 __tablename__ = 'repo_review_rules_users_groups'
4153 4153 __table_args__ = (
4154 4154 base_table_args
4155 4155 )
4156 4156
4157 4157 VOTE_RULE_ALL = -1
4158 4158
4159 4159 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4160 4160 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4161 4161 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4162 4162 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4163 4163 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4164 4164 users_group = relationship('UserGroup')
4165 4165
4166 4166 def rule_data(self):
4167 4167 return {
4168 4168 'mandatory': self.mandatory,
4169 4169 'vote_rule': self.vote_rule
4170 4170 }
4171 4171
4172 4172 @property
4173 4173 def vote_rule_label(self):
4174 4174 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4175 4175 return 'all must vote'
4176 4176 else:
4177 4177 return 'min. vote {}'.format(self.vote_rule)
4178 4178
4179 4179
4180 4180 class RepoReviewRule(Base, BaseModel):
4181 4181 __tablename__ = 'repo_review_rules'
4182 4182 __table_args__ = (
4183 4183 base_table_args
4184 4184 )
4185 4185
4186 4186 repo_review_rule_id = Column(
4187 4187 'repo_review_rule_id', Integer(), primary_key=True)
4188 4188 repo_id = Column(
4189 4189 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4190 4190 repo = relationship('Repository', backref='review_rules')
4191 4191
4192 4192 review_rule_name = Column('review_rule_name', String(255))
4193 4193 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4194 4194 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4195 4195 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4196 4196
4197 4197 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4198 4198 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4199 4199 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4200 4200 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4201 4201
4202 4202 rule_users = relationship('RepoReviewRuleUser')
4203 4203 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4204 4204
4205 4205 def _validate_pattern(self, value):
4206 4206 re.compile('^' + glob2re(value) + '$')
4207 4207
4208 4208 @hybrid_property
4209 4209 def source_branch_pattern(self):
4210 4210 return self._branch_pattern or '*'
4211 4211
4212 4212 @source_branch_pattern.setter
4213 4213 def source_branch_pattern(self, value):
4214 4214 self._validate_pattern(value)
4215 4215 self._branch_pattern = value or '*'
4216 4216
4217 4217 @hybrid_property
4218 4218 def target_branch_pattern(self):
4219 4219 return self._target_branch_pattern or '*'
4220 4220
4221 4221 @target_branch_pattern.setter
4222 4222 def target_branch_pattern(self, value):
4223 4223 self._validate_pattern(value)
4224 4224 self._target_branch_pattern = value or '*'
4225 4225
4226 4226 @hybrid_property
4227 4227 def file_pattern(self):
4228 4228 return self._file_pattern or '*'
4229 4229
4230 4230 @file_pattern.setter
4231 4231 def file_pattern(self, value):
4232 4232 self._validate_pattern(value)
4233 4233 self._file_pattern = value or '*'
4234 4234
4235 4235 def matches(self, source_branch, target_branch, files_changed):
4236 4236 """
4237 4237 Check if this review rule matches a branch/files in a pull request
4238 4238
4239 4239 :param source_branch: source branch name for the commit
4240 4240 :param target_branch: target branch name for the commit
4241 4241 :param files_changed: list of file paths changed in the pull request
4242 4242 """
4243 4243
4244 4244 source_branch = source_branch or ''
4245 4245 target_branch = target_branch or ''
4246 4246 files_changed = files_changed or []
4247 4247
4248 4248 branch_matches = True
4249 4249 if source_branch or target_branch:
4250 4250 if self.source_branch_pattern == '*':
4251 4251 source_branch_match = True
4252 4252 else:
4253 4253 if self.source_branch_pattern.startswith('re:'):
4254 4254 source_pattern = self.source_branch_pattern[3:]
4255 4255 else:
4256 4256 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
4257 4257 source_branch_regex = re.compile(source_pattern)
4258 4258 source_branch_match = bool(source_branch_regex.search(source_branch))
4259 4259 if self.target_branch_pattern == '*':
4260 4260 target_branch_match = True
4261 4261 else:
4262 4262 if self.target_branch_pattern.startswith('re:'):
4263 4263 target_pattern = self.target_branch_pattern[3:]
4264 4264 else:
4265 4265 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
4266 4266 target_branch_regex = re.compile(target_pattern)
4267 4267 target_branch_match = bool(target_branch_regex.search(target_branch))
4268 4268
4269 4269 branch_matches = source_branch_match and target_branch_match
4270 4270
4271 4271 files_matches = True
4272 4272 if self.file_pattern != '*':
4273 4273 files_matches = False
4274 4274 if self.file_pattern.startswith('re:'):
4275 4275 file_pattern = self.file_pattern[3:]
4276 4276 else:
4277 4277 file_pattern = glob2re(self.file_pattern)
4278 4278 file_regex = re.compile(file_pattern)
4279 4279 for filename in files_changed:
4280 4280 if file_regex.search(filename):
4281 4281 files_matches = True
4282 4282 break
4283 4283
4284 4284 return branch_matches and files_matches
4285 4285
4286 4286 @property
4287 4287 def review_users(self):
4288 4288 """ Returns the users which this rule applies to """
4289 4289
4290 4290 users = collections.OrderedDict()
4291 4291
4292 4292 for rule_user in self.rule_users:
4293 4293 if rule_user.user.active:
4294 4294 if rule_user.user not in users:
4295 4295 users[rule_user.user.username] = {
4296 4296 'user': rule_user.user,
4297 4297 'source': 'user',
4298 4298 'source_data': {},
4299 4299 'data': rule_user.rule_data()
4300 4300 }
4301 4301
4302 4302 for rule_user_group in self.rule_user_groups:
4303 4303 source_data = {
4304 4304 'user_group_id': rule_user_group.users_group.users_group_id,
4305 4305 'name': rule_user_group.users_group.users_group_name,
4306 4306 'members': len(rule_user_group.users_group.members)
4307 4307 }
4308 4308 for member in rule_user_group.users_group.members:
4309 4309 if member.user.active:
4310 4310 key = member.user.username
4311 4311 if key in users:
4312 4312 # skip this member as we have him already
4313 4313 # this prevents from override the "first" matched
4314 4314 # users with duplicates in multiple groups
4315 4315 continue
4316 4316
4317 4317 users[key] = {
4318 4318 'user': member.user,
4319 4319 'source': 'user_group',
4320 4320 'source_data': source_data,
4321 4321 'data': rule_user_group.rule_data()
4322 4322 }
4323 4323
4324 4324 return users
4325 4325
4326 def user_group_vote_rule(self):
4326 def user_group_vote_rule(self, user_id):
4327
4327 4328 rules = []
4328 if self.rule_user_groups:
4329 if not self.rule_user_groups:
4330 return rules
4331
4329 4332 for user_group in self.rule_user_groups:
4333 user_group_members = [x.user_id for x in user_group.users_group.members]
4334 if user_id in user_group_members:
4330 4335 rules.append(user_group)
4331 4336 return rules
4332 4337
4333 4338 def __repr__(self):
4334 4339 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4335 4340 self.repo_review_rule_id, self.repo)
4336 4341
4337 4342
4338 4343 class ScheduleEntry(Base, BaseModel):
4339 4344 __tablename__ = 'schedule_entries'
4340 4345 __table_args__ = (
4341 4346 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
4342 4347 UniqueConstraint('task_uid', name='s_task_uid_idx'),
4343 4348 base_table_args,
4344 4349 )
4345 4350
4346 4351 schedule_types = ['crontab', 'timedelta', 'integer']
4347 4352 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
4348 4353
4349 4354 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
4350 4355 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
4351 4356 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
4352 4357
4353 4358 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
4354 4359 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
4355 4360
4356 4361 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
4357 4362 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
4358 4363
4359 4364 # task
4360 4365 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
4361 4366 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
4362 4367 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
4363 4368 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
4364 4369
4365 4370 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4366 4371 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
4367 4372
4368 4373 @hybrid_property
4369 4374 def schedule_type(self):
4370 4375 return self._schedule_type
4371 4376
4372 4377 @schedule_type.setter
4373 4378 def schedule_type(self, val):
4374 4379 if val not in self.schedule_types:
4375 4380 raise ValueError('Value must be on of `{}` and got `{}`'.format(
4376 4381 val, self.schedule_type))
4377 4382
4378 4383 self._schedule_type = val
4379 4384
4380 4385 @classmethod
4381 4386 def get_uid(cls, obj):
4382 4387 args = obj.task_args
4383 4388 kwargs = obj.task_kwargs
4384 4389 if isinstance(args, JsonRaw):
4385 4390 try:
4386 4391 args = json.loads(args)
4387 4392 except ValueError:
4388 4393 args = tuple()
4389 4394
4390 4395 if isinstance(kwargs, JsonRaw):
4391 4396 try:
4392 4397 kwargs = json.loads(kwargs)
4393 4398 except ValueError:
4394 4399 kwargs = dict()
4395 4400
4396 4401 dot_notation = obj.task_dot_notation
4397 4402 val = '.'.join(map(safe_str, [
4398 4403 sorted(dot_notation), args, sorted(kwargs.items())]))
4399 4404 return hashlib.sha1(val).hexdigest()
4400 4405
4401 4406 @classmethod
4402 4407 def get_by_schedule_name(cls, schedule_name):
4403 4408 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
4404 4409
4405 4410 @classmethod
4406 4411 def get_by_schedule_id(cls, schedule_id):
4407 4412 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
4408 4413
4409 4414 @property
4410 4415 def task(self):
4411 4416 return self.task_dot_notation
4412 4417
4413 4418 @property
4414 4419 def schedule(self):
4415 4420 from rhodecode.lib.celerylib.utils import raw_2_schedule
4416 4421 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
4417 4422 return schedule
4418 4423
4419 4424 @property
4420 4425 def args(self):
4421 4426 try:
4422 4427 return list(self.task_args or [])
4423 4428 except ValueError:
4424 4429 return list()
4425 4430
4426 4431 @property
4427 4432 def kwargs(self):
4428 4433 try:
4429 4434 return dict(self.task_kwargs or {})
4430 4435 except ValueError:
4431 4436 return dict()
4432 4437
4433 4438 def _as_raw(self, val):
4434 4439 if hasattr(val, 'de_coerce'):
4435 4440 val = val.de_coerce()
4436 4441 if val:
4437 4442 val = json.dumps(val)
4438 4443
4439 4444 return val
4440 4445
4441 4446 @property
4442 4447 def schedule_definition_raw(self):
4443 4448 return self._as_raw(self.schedule_definition)
4444 4449
4445 4450 @property
4446 4451 def args_raw(self):
4447 4452 return self._as_raw(self.task_args)
4448 4453
4449 4454 @property
4450 4455 def kwargs_raw(self):
4451 4456 return self._as_raw(self.task_kwargs)
4452 4457
4453 4458 def __repr__(self):
4454 4459 return '<DB:ScheduleEntry({}:{})>'.format(
4455 4460 self.schedule_entry_id, self.schedule_name)
4456 4461
4457 4462
4458 4463 @event.listens_for(ScheduleEntry, 'before_update')
4459 4464 def update_task_uid(mapper, connection, target):
4460 4465 target.task_uid = ScheduleEntry.get_uid(target)
4461 4466
4462 4467
4463 4468 @event.listens_for(ScheduleEntry, 'before_insert')
4464 4469 def set_task_uid(mapper, connection, target):
4465 4470 target.task_uid = ScheduleEntry.get_uid(target)
4466 4471
4467 4472
4468 4473 class DbMigrateVersion(Base, BaseModel):
4469 4474 __tablename__ = 'db_migrate_version'
4470 4475 __table_args__ = (
4471 4476 base_table_args,
4472 4477 )
4473 4478
4474 4479 repository_id = Column('repository_id', String(250), primary_key=True)
4475 4480 repository_path = Column('repository_path', Text)
4476 4481 version = Column('version', Integer)
4477 4482
4478 4483 @classmethod
4479 4484 def set_version(cls, version):
4480 4485 """
4481 4486 Helper for forcing a different version, usually for debugging purposes via ishell.
4482 4487 """
4483 4488 ver = DbMigrateVersion.query().first()
4484 4489 ver.version = version
4485 4490 Session().commit()
4486 4491
4487 4492
4488 4493 class DbSession(Base, BaseModel):
4489 4494 __tablename__ = 'db_session'
4490 4495 __table_args__ = (
4491 4496 base_table_args,
4492 4497 )
4493 4498
4494 4499 def __repr__(self):
4495 4500 return '<DB:DbSession({})>'.format(self.id)
4496 4501
4497 4502 id = Column('id', Integer())
4498 4503 namespace = Column('namespace', String(255), primary_key=True)
4499 4504 accessed = Column('accessed', DateTime, nullable=False)
4500 4505 created = Column('created', DateTime, nullable=False)
4501 4506 data = Column('data', PickleType, nullable=False)
4502 4507
4503 4508
4504 4509 class BeakerCache(Base, BaseModel):
4505 4510 __tablename__ = 'beaker_cache'
4506 4511 __table_args__ = (
4507 4512 base_table_args,
4508 4513 )
4509 4514
4510 4515 def __repr__(self):
4511 4516 return '<DB:DbSession({})>'.format(self.id)
4512 4517
4513 4518 id = Column('id', Integer())
4514 4519 namespace = Column('namespace', String(255), primary_key=True)
4515 4520 accessed = Column('accessed', DateTime, nullable=False)
4516 4521 created = Column('created', DateTime, nullable=False)
4517 4522 data = Column('data', PickleType, nullable=False)
@@ -1,1704 +1,1707 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31 import collections
32 32
33 33 from pyramid.threadlocal import get_current_request
34 34
35 35 from rhodecode import events
36 36 from rhodecode.translation import lazy_ugettext#, _
37 37 from rhodecode.lib import helpers as h, hooks_utils, diffs
38 38 from rhodecode.lib import audit_logger
39 39 from rhodecode.lib.compat import OrderedDict
40 40 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
41 41 from rhodecode.lib.markup_renderer import (
42 42 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
43 43 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
44 44 from rhodecode.lib.vcs.backends.base import (
45 45 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
46 46 from rhodecode.lib.vcs.conf import settings as vcs_settings
47 47 from rhodecode.lib.vcs.exceptions import (
48 48 CommitDoesNotExistError, EmptyRepositoryError)
49 49 from rhodecode.model import BaseModel
50 50 from rhodecode.model.changeset_status import ChangesetStatusModel
51 51 from rhodecode.model.comment import CommentsModel
52 52 from rhodecode.model.db import (
53 53 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
54 54 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
55 55 from rhodecode.model.meta import Session
56 56 from rhodecode.model.notification import NotificationModel, \
57 57 EmailNotificationModel
58 58 from rhodecode.model.scm import ScmModel
59 59 from rhodecode.model.settings import VcsSettingsModel
60 60
61 61
62 62 log = logging.getLogger(__name__)
63 63
64 64
65 65 # Data structure to hold the response data when updating commits during a pull
66 66 # request update.
67 67 UpdateResponse = collections.namedtuple('UpdateResponse', [
68 68 'executed', 'reason', 'new', 'old', 'changes',
69 69 'source_changed', 'target_changed'])
70 70
71 71
72 72 class PullRequestModel(BaseModel):
73 73
74 74 cls = PullRequest
75 75
76 76 DIFF_CONTEXT = 3
77 77
78 78 MERGE_STATUS_MESSAGES = {
79 79 MergeFailureReason.NONE: lazy_ugettext(
80 80 'This pull request can be automatically merged.'),
81 81 MergeFailureReason.UNKNOWN: lazy_ugettext(
82 82 'This pull request cannot be merged because of an unhandled'
83 83 ' exception.'),
84 84 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
85 85 'This pull request cannot be merged because of merge conflicts.'),
86 86 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
87 87 'This pull request could not be merged because push to target'
88 88 ' failed.'),
89 89 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
90 90 'This pull request cannot be merged because the target is not a'
91 91 ' head.'),
92 92 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
93 93 'This pull request cannot be merged because the source contains'
94 94 ' more branches than the target.'),
95 95 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
96 96 'This pull request cannot be merged because the target has'
97 97 ' multiple heads.'),
98 98 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
99 99 'This pull request cannot be merged because the target repository'
100 100 ' is locked.'),
101 101 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
102 102 'This pull request cannot be merged because the target or the '
103 103 'source reference is missing.'),
104 104 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
105 105 'This pull request cannot be merged because the target '
106 106 'reference is missing.'),
107 107 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
108 108 'This pull request cannot be merged because the source '
109 109 'reference is missing.'),
110 110 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
111 111 'This pull request cannot be merged because of conflicts related '
112 112 'to sub repositories.'),
113 113 }
114 114
115 115 UPDATE_STATUS_MESSAGES = {
116 116 UpdateFailureReason.NONE: lazy_ugettext(
117 117 'Pull request update successful.'),
118 118 UpdateFailureReason.UNKNOWN: lazy_ugettext(
119 119 'Pull request update failed because of an unknown error.'),
120 120 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
121 121 'No update needed because the source and target have not changed.'),
122 122 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
123 123 'Pull request cannot be updated because the reference type is '
124 124 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
125 125 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
126 126 'This pull request cannot be updated because the target '
127 127 'reference is missing.'),
128 128 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
129 129 'This pull request cannot be updated because the source '
130 130 'reference is missing.'),
131 131 }
132 132
133 133 def __get_pull_request(self, pull_request):
134 134 return self._get_instance((
135 135 PullRequest, PullRequestVersion), pull_request)
136 136
137 137 def _check_perms(self, perms, pull_request, user, api=False):
138 138 if not api:
139 139 return h.HasRepoPermissionAny(*perms)(
140 140 user=user, repo_name=pull_request.target_repo.repo_name)
141 141 else:
142 142 return h.HasRepoPermissionAnyApi(*perms)(
143 143 user=user, repo_name=pull_request.target_repo.repo_name)
144 144
145 145 def check_user_read(self, pull_request, user, api=False):
146 146 _perms = ('repository.admin', 'repository.write', 'repository.read',)
147 147 return self._check_perms(_perms, pull_request, user, api)
148 148
149 149 def check_user_merge(self, pull_request, user, api=False):
150 150 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
151 151 return self._check_perms(_perms, pull_request, user, api)
152 152
153 153 def check_user_update(self, pull_request, user, api=False):
154 154 owner = user.user_id == pull_request.user_id
155 155 return self.check_user_merge(pull_request, user, api) or owner
156 156
157 157 def check_user_delete(self, pull_request, user):
158 158 owner = user.user_id == pull_request.user_id
159 159 _perms = ('repository.admin',)
160 160 return self._check_perms(_perms, pull_request, user) or owner
161 161
162 162 def check_user_change_status(self, pull_request, user, api=False):
163 163 reviewer = user.user_id in [x.user_id for x in
164 164 pull_request.reviewers]
165 165 return self.check_user_update(pull_request, user, api) or reviewer
166 166
167 167 def check_user_comment(self, pull_request, user):
168 168 owner = user.user_id == pull_request.user_id
169 169 return self.check_user_read(pull_request, user) or owner
170 170
171 171 def get(self, pull_request):
172 172 return self.__get_pull_request(pull_request)
173 173
174 174 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
175 175 opened_by=None, order_by=None,
176 176 order_dir='desc'):
177 177 repo = None
178 178 if repo_name:
179 179 repo = self._get_repo(repo_name)
180 180
181 181 q = PullRequest.query()
182 182
183 183 # source or target
184 184 if repo and source:
185 185 q = q.filter(PullRequest.source_repo == repo)
186 186 elif repo:
187 187 q = q.filter(PullRequest.target_repo == repo)
188 188
189 189 # closed,opened
190 190 if statuses:
191 191 q = q.filter(PullRequest.status.in_(statuses))
192 192
193 193 # opened by filter
194 194 if opened_by:
195 195 q = q.filter(PullRequest.user_id.in_(opened_by))
196 196
197 197 if order_by:
198 198 order_map = {
199 199 'name_raw': PullRequest.pull_request_id,
200 200 'title': PullRequest.title,
201 201 'updated_on_raw': PullRequest.updated_on,
202 202 'target_repo': PullRequest.target_repo_id
203 203 }
204 204 if order_dir == 'asc':
205 205 q = q.order_by(order_map[order_by].asc())
206 206 else:
207 207 q = q.order_by(order_map[order_by].desc())
208 208
209 209 return q
210 210
211 211 def count_all(self, repo_name, source=False, statuses=None,
212 212 opened_by=None):
213 213 """
214 214 Count the number of pull requests for a specific repository.
215 215
216 216 :param repo_name: target or source repo
217 217 :param source: boolean flag to specify if repo_name refers to source
218 218 :param statuses: list of pull request statuses
219 219 :param opened_by: author user of the pull request
220 220 :returns: int number of pull requests
221 221 """
222 222 q = self._prepare_get_all_query(
223 223 repo_name, source=source, statuses=statuses, opened_by=opened_by)
224 224
225 225 return q.count()
226 226
227 227 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
228 228 offset=0, length=None, order_by=None, order_dir='desc'):
229 229 """
230 230 Get all pull requests for a specific repository.
231 231
232 232 :param repo_name: target or source repo
233 233 :param source: boolean flag to specify if repo_name refers to source
234 234 :param statuses: list of pull request statuses
235 235 :param opened_by: author user of the pull request
236 236 :param offset: pagination offset
237 237 :param length: length of returned list
238 238 :param order_by: order of the returned list
239 239 :param order_dir: 'asc' or 'desc' ordering direction
240 240 :returns: list of pull requests
241 241 """
242 242 q = self._prepare_get_all_query(
243 243 repo_name, source=source, statuses=statuses, opened_by=opened_by,
244 244 order_by=order_by, order_dir=order_dir)
245 245
246 246 if length:
247 247 pull_requests = q.limit(length).offset(offset).all()
248 248 else:
249 249 pull_requests = q.all()
250 250
251 251 return pull_requests
252 252
253 253 def count_awaiting_review(self, repo_name, source=False, statuses=None,
254 254 opened_by=None):
255 255 """
256 256 Count the number of pull requests for a specific repository that are
257 257 awaiting review.
258 258
259 259 :param repo_name: target or source repo
260 260 :param source: boolean flag to specify if repo_name refers to source
261 261 :param statuses: list of pull request statuses
262 262 :param opened_by: author user of the pull request
263 263 :returns: int number of pull requests
264 264 """
265 265 pull_requests = self.get_awaiting_review(
266 266 repo_name, source=source, statuses=statuses, opened_by=opened_by)
267 267
268 268 return len(pull_requests)
269 269
270 270 def get_awaiting_review(self, repo_name, source=False, statuses=None,
271 271 opened_by=None, offset=0, length=None,
272 272 order_by=None, order_dir='desc'):
273 273 """
274 274 Get all pull requests for a specific repository that are awaiting
275 275 review.
276 276
277 277 :param repo_name: target or source repo
278 278 :param source: boolean flag to specify if repo_name refers to source
279 279 :param statuses: list of pull request statuses
280 280 :param opened_by: author user of the pull request
281 281 :param offset: pagination offset
282 282 :param length: length of returned list
283 283 :param order_by: order of the returned list
284 284 :param order_dir: 'asc' or 'desc' ordering direction
285 285 :returns: list of pull requests
286 286 """
287 287 pull_requests = self.get_all(
288 288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
289 289 order_by=order_by, order_dir=order_dir)
290 290
291 291 _filtered_pull_requests = []
292 292 for pr in pull_requests:
293 293 status = pr.calculated_review_status()
294 294 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
295 295 ChangesetStatus.STATUS_UNDER_REVIEW]:
296 296 _filtered_pull_requests.append(pr)
297 297 if length:
298 298 return _filtered_pull_requests[offset:offset+length]
299 299 else:
300 300 return _filtered_pull_requests
301 301
302 302 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
303 303 opened_by=None, user_id=None):
304 304 """
305 305 Count the number of pull requests for a specific repository that are
306 306 awaiting review from a specific user.
307 307
308 308 :param repo_name: target or source repo
309 309 :param source: boolean flag to specify if repo_name refers to source
310 310 :param statuses: list of pull request statuses
311 311 :param opened_by: author user of the pull request
312 312 :param user_id: reviewer user of the pull request
313 313 :returns: int number of pull requests
314 314 """
315 315 pull_requests = self.get_awaiting_my_review(
316 316 repo_name, source=source, statuses=statuses, opened_by=opened_by,
317 317 user_id=user_id)
318 318
319 319 return len(pull_requests)
320 320
321 321 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
322 322 opened_by=None, user_id=None, offset=0,
323 323 length=None, order_by=None, order_dir='desc'):
324 324 """
325 325 Get all pull requests for a specific repository that are awaiting
326 326 review from a specific user.
327 327
328 328 :param repo_name: target or source repo
329 329 :param source: boolean flag to specify if repo_name refers to source
330 330 :param statuses: list of pull request statuses
331 331 :param opened_by: author user of the pull request
332 332 :param user_id: reviewer user of the pull request
333 333 :param offset: pagination offset
334 334 :param length: length of returned list
335 335 :param order_by: order of the returned list
336 336 :param order_dir: 'asc' or 'desc' ordering direction
337 337 :returns: list of pull requests
338 338 """
339 339 pull_requests = self.get_all(
340 340 repo_name, source=source, statuses=statuses, opened_by=opened_by,
341 341 order_by=order_by, order_dir=order_dir)
342 342
343 343 _my = PullRequestModel().get_not_reviewed(user_id)
344 344 my_participation = []
345 345 for pr in pull_requests:
346 346 if pr in _my:
347 347 my_participation.append(pr)
348 348 _filtered_pull_requests = my_participation
349 349 if length:
350 350 return _filtered_pull_requests[offset:offset+length]
351 351 else:
352 352 return _filtered_pull_requests
353 353
354 354 def get_not_reviewed(self, user_id):
355 355 return [
356 356 x.pull_request for x in PullRequestReviewers.query().filter(
357 357 PullRequestReviewers.user_id == user_id).all()
358 358 ]
359 359
360 360 def _prepare_participating_query(self, user_id=None, statuses=None,
361 361 order_by=None, order_dir='desc'):
362 362 q = PullRequest.query()
363 363 if user_id:
364 364 reviewers_subquery = Session().query(
365 365 PullRequestReviewers.pull_request_id).filter(
366 366 PullRequestReviewers.user_id == user_id).subquery()
367 367 user_filter = or_(
368 368 PullRequest.user_id == user_id,
369 369 PullRequest.pull_request_id.in_(reviewers_subquery)
370 370 )
371 371 q = PullRequest.query().filter(user_filter)
372 372
373 373 # closed,opened
374 374 if statuses:
375 375 q = q.filter(PullRequest.status.in_(statuses))
376 376
377 377 if order_by:
378 378 order_map = {
379 379 'name_raw': PullRequest.pull_request_id,
380 380 'title': PullRequest.title,
381 381 'updated_on_raw': PullRequest.updated_on,
382 382 'target_repo': PullRequest.target_repo_id
383 383 }
384 384 if order_dir == 'asc':
385 385 q = q.order_by(order_map[order_by].asc())
386 386 else:
387 387 q = q.order_by(order_map[order_by].desc())
388 388
389 389 return q
390 390
391 391 def count_im_participating_in(self, user_id=None, statuses=None):
392 392 q = self._prepare_participating_query(user_id, statuses=statuses)
393 393 return q.count()
394 394
395 395 def get_im_participating_in(
396 396 self, user_id=None, statuses=None, offset=0,
397 397 length=None, order_by=None, order_dir='desc'):
398 398 """
399 399 Get all Pull requests that i'm participating in, or i have opened
400 400 """
401 401
402 402 q = self._prepare_participating_query(
403 403 user_id, statuses=statuses, order_by=order_by,
404 404 order_dir=order_dir)
405 405
406 406 if length:
407 407 pull_requests = q.limit(length).offset(offset).all()
408 408 else:
409 409 pull_requests = q.all()
410 410
411 411 return pull_requests
412 412
413 413 def get_versions(self, pull_request):
414 414 """
415 415 returns version of pull request sorted by ID descending
416 416 """
417 417 return PullRequestVersion.query()\
418 418 .filter(PullRequestVersion.pull_request == pull_request)\
419 419 .order_by(PullRequestVersion.pull_request_version_id.asc())\
420 420 .all()
421 421
422 422 def get_pr_version(self, pull_request_id, version=None):
423 423 at_version = None
424 424
425 425 if version and version == 'latest':
426 426 pull_request_ver = PullRequest.get(pull_request_id)
427 427 pull_request_obj = pull_request_ver
428 428 _org_pull_request_obj = pull_request_obj
429 429 at_version = 'latest'
430 430 elif version:
431 431 pull_request_ver = PullRequestVersion.get_or_404(version)
432 432 pull_request_obj = pull_request_ver
433 433 _org_pull_request_obj = pull_request_ver.pull_request
434 434 at_version = pull_request_ver.pull_request_version_id
435 435 else:
436 436 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
437 437 pull_request_id)
438 438
439 439 pull_request_display_obj = PullRequest.get_pr_display_object(
440 440 pull_request_obj, _org_pull_request_obj)
441 441
442 442 return _org_pull_request_obj, pull_request_obj, \
443 443 pull_request_display_obj, at_version
444 444
445 445 def create(self, created_by, source_repo, source_ref, target_repo,
446 446 target_ref, revisions, reviewers, title, description=None,
447 447 description_renderer=None,
448 448 reviewer_data=None, translator=None, auth_user=None):
449 449 translator = translator or get_current_request().translate
450 450
451 451 created_by_user = self._get_user(created_by)
452 452 auth_user = auth_user or created_by_user
453 453 source_repo = self._get_repo(source_repo)
454 454 target_repo = self._get_repo(target_repo)
455 455
456 456 pull_request = PullRequest()
457 457 pull_request.source_repo = source_repo
458 458 pull_request.source_ref = source_ref
459 459 pull_request.target_repo = target_repo
460 460 pull_request.target_ref = target_ref
461 461 pull_request.revisions = revisions
462 462 pull_request.title = title
463 463 pull_request.description = description
464 464 pull_request.description_renderer = description_renderer
465 465 pull_request.author = created_by_user
466 466 pull_request.reviewer_data = reviewer_data
467 467
468 468 Session().add(pull_request)
469 469 Session().flush()
470 470
471 471 reviewer_ids = set()
472 472 # members / reviewers
473 473 for reviewer_object in reviewers:
474 474 user_id, reasons, mandatory, rules = reviewer_object
475 475 user = self._get_user(user_id)
476 476
477 477 # skip duplicates
478 478 if user.user_id in reviewer_ids:
479 479 continue
480 480
481 481 reviewer_ids.add(user.user_id)
482 482
483 483 reviewer = PullRequestReviewers()
484 484 reviewer.user = user
485 485 reviewer.pull_request = pull_request
486 486 reviewer.reasons = reasons
487 487 reviewer.mandatory = mandatory
488 488
489 489 # NOTE(marcink): pick only first rule for now
490 490 rule_id = list(rules)[0] if rules else None
491 491 rule = RepoReviewRule.get(rule_id) if rule_id else None
492 492 if rule:
493 review_group = rule.user_group_vote_rule()
493 review_group = rule.user_group_vote_rule(user_id)
494 # we check if this particular reviewer is member of a voting group
494 495 if review_group:
495 496 # NOTE(marcink):
496 # again, can be that user is member of more,
497 # but we pick the first same, as default reviewers algo
497 # can be that user is member of more but we pick the first same,
498 # same as default reviewers algo
498 499 review_group = review_group[0]
499 500
500 501 rule_data = {
501 502 'rule_name':
502 503 rule.review_rule_name,
503 504 'rule_user_group_entry_id':
504 505 review_group.repo_review_rule_users_group_id,
505 506 'rule_user_group_name':
506 507 review_group.users_group.users_group_name,
507 508 'rule_user_group_members':
508 509 [x.user.username for x in review_group.users_group.members],
510 'rule_user_group_members_id':
511 [x.user.user_id for x in review_group.users_group.members],
509 512 }
510 513 # e.g {'vote_rule': -1, 'mandatory': True}
511 514 rule_data.update(review_group.rule_data())
512 515
513 516 reviewer.rule_data = rule_data
514 517
515 518 Session().add(reviewer)
516 519 Session().flush()
517 520
518 521 # Set approval status to "Under Review" for all commits which are
519 522 # part of this pull request.
520 523 ChangesetStatusModel().set_status(
521 524 repo=target_repo,
522 525 status=ChangesetStatus.STATUS_UNDER_REVIEW,
523 526 user=created_by_user,
524 527 pull_request=pull_request
525 528 )
526 529 # we commit early at this point. This has to do with a fact
527 530 # that before queries do some row-locking. And because of that
528 531 # we need to commit and finish transation before below validate call
529 532 # that for large repos could be long resulting in long row locks
530 533 Session().commit()
531 534
532 535 # prepare workspace, and run initial merge simulation
533 536 MergeCheck.validate(
534 537 pull_request, user=created_by_user, translator=translator)
535 538
536 539 self.notify_reviewers(pull_request, reviewer_ids)
537 540 self._trigger_pull_request_hook(
538 541 pull_request, created_by_user, 'create')
539 542
540 543 creation_data = pull_request.get_api_data(with_merge_state=False)
541 544 self._log_audit_action(
542 545 'repo.pull_request.create', {'data': creation_data},
543 546 auth_user, pull_request)
544 547
545 548 return pull_request
546 549
547 550 def _trigger_pull_request_hook(self, pull_request, user, action):
548 551 pull_request = self.__get_pull_request(pull_request)
549 552 target_scm = pull_request.target_repo.scm_instance()
550 553 if action == 'create':
551 554 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
552 555 elif action == 'merge':
553 556 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
554 557 elif action == 'close':
555 558 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
556 559 elif action == 'review_status_change':
557 560 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
558 561 elif action == 'update':
559 562 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
560 563 else:
561 564 return
562 565
563 566 trigger_hook(
564 567 username=user.username,
565 568 repo_name=pull_request.target_repo.repo_name,
566 569 repo_alias=target_scm.alias,
567 570 pull_request=pull_request)
568 571
569 572 def _get_commit_ids(self, pull_request):
570 573 """
571 574 Return the commit ids of the merged pull request.
572 575
573 576 This method is not dealing correctly yet with the lack of autoupdates
574 577 nor with the implicit target updates.
575 578 For example: if a commit in the source repo is already in the target it
576 579 will be reported anyways.
577 580 """
578 581 merge_rev = pull_request.merge_rev
579 582 if merge_rev is None:
580 583 raise ValueError('This pull request was not merged yet')
581 584
582 585 commit_ids = list(pull_request.revisions)
583 586 if merge_rev not in commit_ids:
584 587 commit_ids.append(merge_rev)
585 588
586 589 return commit_ids
587 590
588 591 def merge_repo(self, pull_request, user, extras):
589 592 log.debug("Merging pull request %s", pull_request.pull_request_id)
590 593 merge_state = self._merge_pull_request(pull_request, user, extras)
591 594 if merge_state.executed:
592 595 log.debug(
593 596 "Merge was successful, updating the pull request comments.")
594 597 self._comment_and_close_pr(pull_request, user, merge_state)
595 598
596 599 self._log_audit_action(
597 600 'repo.pull_request.merge',
598 601 {'merge_state': merge_state.__dict__},
599 602 user, pull_request)
600 603
601 604 else:
602 605 log.warn("Merge failed, not updating the pull request.")
603 606 return merge_state
604 607
605 608 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
606 609 target_vcs = pull_request.target_repo.scm_instance()
607 610 source_vcs = pull_request.source_repo.scm_instance()
608 611 target_ref = self._refresh_reference(
609 612 pull_request.target_ref_parts, target_vcs)
610 613
611 614 message = merge_msg or (
612 615 'Merge pull request #%(pr_id)s from '
613 616 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
614 617 'pr_id': pull_request.pull_request_id,
615 618 'source_repo': source_vcs.name,
616 619 'source_ref_name': pull_request.source_ref_parts.name,
617 620 'pr_title': pull_request.title
618 621 }
619 622
620 623 workspace_id = self._workspace_id(pull_request)
621 624 repo_id = pull_request.target_repo.repo_id
622 625 use_rebase = self._use_rebase_for_merging(pull_request)
623 626 close_branch = self._close_branch_before_merging(pull_request)
624 627
625 628 callback_daemon, extras = prepare_callback_daemon(
626 629 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
627 630 host=vcs_settings.HOOKS_HOST,
628 631 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
629 632
630 633 with callback_daemon:
631 634 # TODO: johbo: Implement a clean way to run a config_override
632 635 # for a single call.
633 636 target_vcs.config.set(
634 637 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
635 638 merge_state = target_vcs.merge(
636 639 repo_id, workspace_id, target_ref, source_vcs,
637 640 pull_request.source_ref_parts,
638 641 user_name=user.username, user_email=user.email,
639 642 message=message, use_rebase=use_rebase,
640 643 close_branch=close_branch)
641 644 return merge_state
642 645
643 646 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
644 647 pull_request.merge_rev = merge_state.merge_ref.commit_id
645 648 pull_request.updated_on = datetime.datetime.now()
646 649 close_msg = close_msg or 'Pull request merged and closed'
647 650
648 651 CommentsModel().create(
649 652 text=safe_unicode(close_msg),
650 653 repo=pull_request.target_repo.repo_id,
651 654 user=user.user_id,
652 655 pull_request=pull_request.pull_request_id,
653 656 f_path=None,
654 657 line_no=None,
655 658 closing_pr=True
656 659 )
657 660
658 661 Session().add(pull_request)
659 662 Session().flush()
660 663 # TODO: paris: replace invalidation with less radical solution
661 664 ScmModel().mark_for_invalidation(
662 665 pull_request.target_repo.repo_name)
663 666 self._trigger_pull_request_hook(pull_request, user, 'merge')
664 667
665 668 def has_valid_update_type(self, pull_request):
666 669 source_ref_type = pull_request.source_ref_parts.type
667 670 return source_ref_type in ['book', 'branch', 'tag']
668 671
669 672 def update_commits(self, pull_request):
670 673 """
671 674 Get the updated list of commits for the pull request
672 675 and return the new pull request version and the list
673 676 of commits processed by this update action
674 677 """
675 678 pull_request = self.__get_pull_request(pull_request)
676 679 source_ref_type = pull_request.source_ref_parts.type
677 680 source_ref_name = pull_request.source_ref_parts.name
678 681 source_ref_id = pull_request.source_ref_parts.commit_id
679 682
680 683 target_ref_type = pull_request.target_ref_parts.type
681 684 target_ref_name = pull_request.target_ref_parts.name
682 685 target_ref_id = pull_request.target_ref_parts.commit_id
683 686
684 687 if not self.has_valid_update_type(pull_request):
685 688 log.debug(
686 689 "Skipping update of pull request %s due to ref type: %s",
687 690 pull_request, source_ref_type)
688 691 return UpdateResponse(
689 692 executed=False,
690 693 reason=UpdateFailureReason.WRONG_REF_TYPE,
691 694 old=pull_request, new=None, changes=None,
692 695 source_changed=False, target_changed=False)
693 696
694 697 # source repo
695 698 source_repo = pull_request.source_repo.scm_instance()
696 699 try:
697 700 source_commit = source_repo.get_commit(commit_id=source_ref_name)
698 701 except CommitDoesNotExistError:
699 702 return UpdateResponse(
700 703 executed=False,
701 704 reason=UpdateFailureReason.MISSING_SOURCE_REF,
702 705 old=pull_request, new=None, changes=None,
703 706 source_changed=False, target_changed=False)
704 707
705 708 source_changed = source_ref_id != source_commit.raw_id
706 709
707 710 # target repo
708 711 target_repo = pull_request.target_repo.scm_instance()
709 712 try:
710 713 target_commit = target_repo.get_commit(commit_id=target_ref_name)
711 714 except CommitDoesNotExistError:
712 715 return UpdateResponse(
713 716 executed=False,
714 717 reason=UpdateFailureReason.MISSING_TARGET_REF,
715 718 old=pull_request, new=None, changes=None,
716 719 source_changed=False, target_changed=False)
717 720 target_changed = target_ref_id != target_commit.raw_id
718 721
719 722 if not (source_changed or target_changed):
720 723 log.debug("Nothing changed in pull request %s", pull_request)
721 724 return UpdateResponse(
722 725 executed=False,
723 726 reason=UpdateFailureReason.NO_CHANGE,
724 727 old=pull_request, new=None, changes=None,
725 728 source_changed=target_changed, target_changed=source_changed)
726 729
727 730 change_in_found = 'target repo' if target_changed else 'source repo'
728 731 log.debug('Updating pull request because of change in %s detected',
729 732 change_in_found)
730 733
731 734 # Finally there is a need for an update, in case of source change
732 735 # we create a new version, else just an update
733 736 if source_changed:
734 737 pull_request_version = self._create_version_from_snapshot(pull_request)
735 738 self._link_comments_to_version(pull_request_version)
736 739 else:
737 740 try:
738 741 ver = pull_request.versions[-1]
739 742 except IndexError:
740 743 ver = None
741 744
742 745 pull_request.pull_request_version_id = \
743 746 ver.pull_request_version_id if ver else None
744 747 pull_request_version = pull_request
745 748
746 749 try:
747 750 if target_ref_type in ('tag', 'branch', 'book'):
748 751 target_commit = target_repo.get_commit(target_ref_name)
749 752 else:
750 753 target_commit = target_repo.get_commit(target_ref_id)
751 754 except CommitDoesNotExistError:
752 755 return UpdateResponse(
753 756 executed=False,
754 757 reason=UpdateFailureReason.MISSING_TARGET_REF,
755 758 old=pull_request, new=None, changes=None,
756 759 source_changed=source_changed, target_changed=target_changed)
757 760
758 761 # re-compute commit ids
759 762 old_commit_ids = pull_request.revisions
760 763 pre_load = ["author", "branch", "date", "message"]
761 764 commit_ranges = target_repo.compare(
762 765 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
763 766 pre_load=pre_load)
764 767
765 768 ancestor = target_repo.get_common_ancestor(
766 769 target_commit.raw_id, source_commit.raw_id, source_repo)
767 770
768 771 pull_request.source_ref = '%s:%s:%s' % (
769 772 source_ref_type, source_ref_name, source_commit.raw_id)
770 773 pull_request.target_ref = '%s:%s:%s' % (
771 774 target_ref_type, target_ref_name, ancestor)
772 775
773 776 pull_request.revisions = [
774 777 commit.raw_id for commit in reversed(commit_ranges)]
775 778 pull_request.updated_on = datetime.datetime.now()
776 779 Session().add(pull_request)
777 780 new_commit_ids = pull_request.revisions
778 781
779 782 old_diff_data, new_diff_data = self._generate_update_diffs(
780 783 pull_request, pull_request_version)
781 784
782 785 # calculate commit and file changes
783 786 changes = self._calculate_commit_id_changes(
784 787 old_commit_ids, new_commit_ids)
785 788 file_changes = self._calculate_file_changes(
786 789 old_diff_data, new_diff_data)
787 790
788 791 # set comments as outdated if DIFFS changed
789 792 CommentsModel().outdate_comments(
790 793 pull_request, old_diff_data=old_diff_data,
791 794 new_diff_data=new_diff_data)
792 795
793 796 commit_changes = (changes.added or changes.removed)
794 797 file_node_changes = (
795 798 file_changes.added or file_changes.modified or file_changes.removed)
796 799 pr_has_changes = commit_changes or file_node_changes
797 800
798 801 # Add an automatic comment to the pull request, in case
799 802 # anything has changed
800 803 if pr_has_changes:
801 804 update_comment = CommentsModel().create(
802 805 text=self._render_update_message(changes, file_changes),
803 806 repo=pull_request.target_repo,
804 807 user=pull_request.author,
805 808 pull_request=pull_request,
806 809 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
807 810
808 811 # Update status to "Under Review" for added commits
809 812 for commit_id in changes.added:
810 813 ChangesetStatusModel().set_status(
811 814 repo=pull_request.source_repo,
812 815 status=ChangesetStatus.STATUS_UNDER_REVIEW,
813 816 comment=update_comment,
814 817 user=pull_request.author,
815 818 pull_request=pull_request,
816 819 revision=commit_id)
817 820
818 821 log.debug(
819 822 'Updated pull request %s, added_ids: %s, common_ids: %s, '
820 823 'removed_ids: %s', pull_request.pull_request_id,
821 824 changes.added, changes.common, changes.removed)
822 825 log.debug(
823 826 'Updated pull request with the following file changes: %s',
824 827 file_changes)
825 828
826 829 log.info(
827 830 "Updated pull request %s from commit %s to commit %s, "
828 831 "stored new version %s of this pull request.",
829 832 pull_request.pull_request_id, source_ref_id,
830 833 pull_request.source_ref_parts.commit_id,
831 834 pull_request_version.pull_request_version_id)
832 835 Session().commit()
833 836 self._trigger_pull_request_hook(
834 837 pull_request, pull_request.author, 'update')
835 838
836 839 return UpdateResponse(
837 840 executed=True, reason=UpdateFailureReason.NONE,
838 841 old=pull_request, new=pull_request_version, changes=changes,
839 842 source_changed=source_changed, target_changed=target_changed)
840 843
841 844 def _create_version_from_snapshot(self, pull_request):
842 845 version = PullRequestVersion()
843 846 version.title = pull_request.title
844 847 version.description = pull_request.description
845 848 version.status = pull_request.status
846 849 version.created_on = datetime.datetime.now()
847 850 version.updated_on = pull_request.updated_on
848 851 version.user_id = pull_request.user_id
849 852 version.source_repo = pull_request.source_repo
850 853 version.source_ref = pull_request.source_ref
851 854 version.target_repo = pull_request.target_repo
852 855 version.target_ref = pull_request.target_ref
853 856
854 857 version._last_merge_source_rev = pull_request._last_merge_source_rev
855 858 version._last_merge_target_rev = pull_request._last_merge_target_rev
856 859 version.last_merge_status = pull_request.last_merge_status
857 860 version.shadow_merge_ref = pull_request.shadow_merge_ref
858 861 version.merge_rev = pull_request.merge_rev
859 862 version.reviewer_data = pull_request.reviewer_data
860 863
861 864 version.revisions = pull_request.revisions
862 865 version.pull_request = pull_request
863 866 Session().add(version)
864 867 Session().flush()
865 868
866 869 return version
867 870
868 871 def _generate_update_diffs(self, pull_request, pull_request_version):
869 872
870 873 diff_context = (
871 874 self.DIFF_CONTEXT +
872 875 CommentsModel.needed_extra_diff_context())
873 876
874 877 source_repo = pull_request_version.source_repo
875 878 source_ref_id = pull_request_version.source_ref_parts.commit_id
876 879 target_ref_id = pull_request_version.target_ref_parts.commit_id
877 880 old_diff = self._get_diff_from_pr_or_version(
878 881 source_repo, source_ref_id, target_ref_id, context=diff_context)
879 882
880 883 source_repo = pull_request.source_repo
881 884 source_ref_id = pull_request.source_ref_parts.commit_id
882 885 target_ref_id = pull_request.target_ref_parts.commit_id
883 886
884 887 new_diff = self._get_diff_from_pr_or_version(
885 888 source_repo, source_ref_id, target_ref_id, context=diff_context)
886 889
887 890 old_diff_data = diffs.DiffProcessor(old_diff)
888 891 old_diff_data.prepare()
889 892 new_diff_data = diffs.DiffProcessor(new_diff)
890 893 new_diff_data.prepare()
891 894
892 895 return old_diff_data, new_diff_data
893 896
894 897 def _link_comments_to_version(self, pull_request_version):
895 898 """
896 899 Link all unlinked comments of this pull request to the given version.
897 900
898 901 :param pull_request_version: The `PullRequestVersion` to which
899 902 the comments shall be linked.
900 903
901 904 """
902 905 pull_request = pull_request_version.pull_request
903 906 comments = ChangesetComment.query()\
904 907 .filter(
905 908 # TODO: johbo: Should we query for the repo at all here?
906 909 # Pending decision on how comments of PRs are to be related
907 910 # to either the source repo, the target repo or no repo at all.
908 911 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
909 912 ChangesetComment.pull_request == pull_request,
910 913 ChangesetComment.pull_request_version == None)\
911 914 .order_by(ChangesetComment.comment_id.asc())
912 915
913 916 # TODO: johbo: Find out why this breaks if it is done in a bulk
914 917 # operation.
915 918 for comment in comments:
916 919 comment.pull_request_version_id = (
917 920 pull_request_version.pull_request_version_id)
918 921 Session().add(comment)
919 922
920 923 def _calculate_commit_id_changes(self, old_ids, new_ids):
921 924 added = [x for x in new_ids if x not in old_ids]
922 925 common = [x for x in new_ids if x in old_ids]
923 926 removed = [x for x in old_ids if x not in new_ids]
924 927 total = new_ids
925 928 return ChangeTuple(added, common, removed, total)
926 929
927 930 def _calculate_file_changes(self, old_diff_data, new_diff_data):
928 931
929 932 old_files = OrderedDict()
930 933 for diff_data in old_diff_data.parsed_diff:
931 934 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
932 935
933 936 added_files = []
934 937 modified_files = []
935 938 removed_files = []
936 939 for diff_data in new_diff_data.parsed_diff:
937 940 new_filename = diff_data['filename']
938 941 new_hash = md5_safe(diff_data['raw_diff'])
939 942
940 943 old_hash = old_files.get(new_filename)
941 944 if not old_hash:
942 945 # file is not present in old diff, means it's added
943 946 added_files.append(new_filename)
944 947 else:
945 948 if new_hash != old_hash:
946 949 modified_files.append(new_filename)
947 950 # now remove a file from old, since we have seen it already
948 951 del old_files[new_filename]
949 952
950 953 # removed files is when there are present in old, but not in NEW,
951 954 # since we remove old files that are present in new diff, left-overs
952 955 # if any should be the removed files
953 956 removed_files.extend(old_files.keys())
954 957
955 958 return FileChangeTuple(added_files, modified_files, removed_files)
956 959
957 960 def _render_update_message(self, changes, file_changes):
958 961 """
959 962 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
960 963 so it's always looking the same disregarding on which default
961 964 renderer system is using.
962 965
963 966 :param changes: changes named tuple
964 967 :param file_changes: file changes named tuple
965 968
966 969 """
967 970 new_status = ChangesetStatus.get_status_lbl(
968 971 ChangesetStatus.STATUS_UNDER_REVIEW)
969 972
970 973 changed_files = (
971 974 file_changes.added + file_changes.modified + file_changes.removed)
972 975
973 976 params = {
974 977 'under_review_label': new_status,
975 978 'added_commits': changes.added,
976 979 'removed_commits': changes.removed,
977 980 'changed_files': changed_files,
978 981 'added_files': file_changes.added,
979 982 'modified_files': file_changes.modified,
980 983 'removed_files': file_changes.removed,
981 984 }
982 985 renderer = RstTemplateRenderer()
983 986 return renderer.render('pull_request_update.mako', **params)
984 987
985 988 def edit(self, pull_request, title, description, description_renderer, user):
986 989 pull_request = self.__get_pull_request(pull_request)
987 990 old_data = pull_request.get_api_data(with_merge_state=False)
988 991 if pull_request.is_closed():
989 992 raise ValueError('This pull request is closed')
990 993 if title:
991 994 pull_request.title = title
992 995 pull_request.description = description
993 996 pull_request.updated_on = datetime.datetime.now()
994 997 pull_request.description_renderer = description_renderer
995 998 Session().add(pull_request)
996 999 self._log_audit_action(
997 1000 'repo.pull_request.edit', {'old_data': old_data},
998 1001 user, pull_request)
999 1002
1000 1003 def update_reviewers(self, pull_request, reviewer_data, user):
1001 1004 """
1002 1005 Update the reviewers in the pull request
1003 1006
1004 1007 :param pull_request: the pr to update
1005 1008 :param reviewer_data: list of tuples
1006 1009 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1007 1010 """
1008 1011 pull_request = self.__get_pull_request(pull_request)
1009 1012 if pull_request.is_closed():
1010 1013 raise ValueError('This pull request is closed')
1011 1014
1012 1015 reviewers = {}
1013 1016 for user_id, reasons, mandatory, rules in reviewer_data:
1014 1017 if isinstance(user_id, (int, basestring)):
1015 1018 user_id = self._get_user(user_id).user_id
1016 1019 reviewers[user_id] = {
1017 1020 'reasons': reasons, 'mandatory': mandatory}
1018 1021
1019 1022 reviewers_ids = set(reviewers.keys())
1020 1023 current_reviewers = PullRequestReviewers.query()\
1021 1024 .filter(PullRequestReviewers.pull_request ==
1022 1025 pull_request).all()
1023 1026 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1024 1027
1025 1028 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1026 1029 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1027 1030
1028 1031 log.debug("Adding %s reviewers", ids_to_add)
1029 1032 log.debug("Removing %s reviewers", ids_to_remove)
1030 1033 changed = False
1031 1034 for uid in ids_to_add:
1032 1035 changed = True
1033 1036 _usr = self._get_user(uid)
1034 1037 reviewer = PullRequestReviewers()
1035 1038 reviewer.user = _usr
1036 1039 reviewer.pull_request = pull_request
1037 1040 reviewer.reasons = reviewers[uid]['reasons']
1038 1041 # NOTE(marcink): mandatory shouldn't be changed now
1039 1042 # reviewer.mandatory = reviewers[uid]['reasons']
1040 1043 Session().add(reviewer)
1041 1044 self._log_audit_action(
1042 1045 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()},
1043 1046 user, pull_request)
1044 1047
1045 1048 for uid in ids_to_remove:
1046 1049 changed = True
1047 1050 reviewers = PullRequestReviewers.query()\
1048 1051 .filter(PullRequestReviewers.user_id == uid,
1049 1052 PullRequestReviewers.pull_request == pull_request)\
1050 1053 .all()
1051 1054 # use .all() in case we accidentally added the same person twice
1052 1055 # this CAN happen due to the lack of DB checks
1053 1056 for obj in reviewers:
1054 1057 old_data = obj.get_dict()
1055 1058 Session().delete(obj)
1056 1059 self._log_audit_action(
1057 1060 'repo.pull_request.reviewer.delete',
1058 1061 {'old_data': old_data}, user, pull_request)
1059 1062
1060 1063 if changed:
1061 1064 pull_request.updated_on = datetime.datetime.now()
1062 1065 Session().add(pull_request)
1063 1066
1064 1067 self.notify_reviewers(pull_request, ids_to_add)
1065 1068 return ids_to_add, ids_to_remove
1066 1069
1067 1070 def get_url(self, pull_request, request=None, permalink=False):
1068 1071 if not request:
1069 1072 request = get_current_request()
1070 1073
1071 1074 if permalink:
1072 1075 return request.route_url(
1073 1076 'pull_requests_global',
1074 1077 pull_request_id=pull_request.pull_request_id,)
1075 1078 else:
1076 1079 return request.route_url('pullrequest_show',
1077 1080 repo_name=safe_str(pull_request.target_repo.repo_name),
1078 1081 pull_request_id=pull_request.pull_request_id,)
1079 1082
1080 1083 def get_shadow_clone_url(self, pull_request, request=None):
1081 1084 """
1082 1085 Returns qualified url pointing to the shadow repository. If this pull
1083 1086 request is closed there is no shadow repository and ``None`` will be
1084 1087 returned.
1085 1088 """
1086 1089 if pull_request.is_closed():
1087 1090 return None
1088 1091 else:
1089 1092 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1090 1093 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1091 1094
1092 1095 def notify_reviewers(self, pull_request, reviewers_ids):
1093 1096 # notification to reviewers
1094 1097 if not reviewers_ids:
1095 1098 return
1096 1099
1097 1100 pull_request_obj = pull_request
1098 1101 # get the current participants of this pull request
1099 1102 recipients = reviewers_ids
1100 1103 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1101 1104
1102 1105 pr_source_repo = pull_request_obj.source_repo
1103 1106 pr_target_repo = pull_request_obj.target_repo
1104 1107
1105 1108 pr_url = h.route_url('pullrequest_show',
1106 1109 repo_name=pr_target_repo.repo_name,
1107 1110 pull_request_id=pull_request_obj.pull_request_id,)
1108 1111
1109 1112 # set some variables for email notification
1110 1113 pr_target_repo_url = h.route_url(
1111 1114 'repo_summary', repo_name=pr_target_repo.repo_name)
1112 1115
1113 1116 pr_source_repo_url = h.route_url(
1114 1117 'repo_summary', repo_name=pr_source_repo.repo_name)
1115 1118
1116 1119 # pull request specifics
1117 1120 pull_request_commits = [
1118 1121 (x.raw_id, x.message)
1119 1122 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1120 1123
1121 1124 kwargs = {
1122 1125 'user': pull_request.author,
1123 1126 'pull_request': pull_request_obj,
1124 1127 'pull_request_commits': pull_request_commits,
1125 1128
1126 1129 'pull_request_target_repo': pr_target_repo,
1127 1130 'pull_request_target_repo_url': pr_target_repo_url,
1128 1131
1129 1132 'pull_request_source_repo': pr_source_repo,
1130 1133 'pull_request_source_repo_url': pr_source_repo_url,
1131 1134
1132 1135 'pull_request_url': pr_url,
1133 1136 }
1134 1137
1135 1138 # pre-generate the subject for notification itself
1136 1139 (subject,
1137 1140 _h, _e, # we don't care about those
1138 1141 body_plaintext) = EmailNotificationModel().render_email(
1139 1142 notification_type, **kwargs)
1140 1143
1141 1144 # create notification objects, and emails
1142 1145 NotificationModel().create(
1143 1146 created_by=pull_request.author,
1144 1147 notification_subject=subject,
1145 1148 notification_body=body_plaintext,
1146 1149 notification_type=notification_type,
1147 1150 recipients=recipients,
1148 1151 email_kwargs=kwargs,
1149 1152 )
1150 1153
1151 1154 def delete(self, pull_request, user):
1152 1155 pull_request = self.__get_pull_request(pull_request)
1153 1156 old_data = pull_request.get_api_data(with_merge_state=False)
1154 1157 self._cleanup_merge_workspace(pull_request)
1155 1158 self._log_audit_action(
1156 1159 'repo.pull_request.delete', {'old_data': old_data},
1157 1160 user, pull_request)
1158 1161 Session().delete(pull_request)
1159 1162
1160 1163 def close_pull_request(self, pull_request, user):
1161 1164 pull_request = self.__get_pull_request(pull_request)
1162 1165 self._cleanup_merge_workspace(pull_request)
1163 1166 pull_request.status = PullRequest.STATUS_CLOSED
1164 1167 pull_request.updated_on = datetime.datetime.now()
1165 1168 Session().add(pull_request)
1166 1169 self._trigger_pull_request_hook(
1167 1170 pull_request, pull_request.author, 'close')
1168 1171
1169 1172 pr_data = pull_request.get_api_data(with_merge_state=False)
1170 1173 self._log_audit_action(
1171 1174 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1172 1175
1173 1176 def close_pull_request_with_comment(
1174 1177 self, pull_request, user, repo, message=None):
1175 1178
1176 1179 pull_request_review_status = pull_request.calculated_review_status()
1177 1180
1178 1181 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1179 1182 # approved only if we have voting consent
1180 1183 status = ChangesetStatus.STATUS_APPROVED
1181 1184 else:
1182 1185 status = ChangesetStatus.STATUS_REJECTED
1183 1186 status_lbl = ChangesetStatus.get_status_lbl(status)
1184 1187
1185 1188 default_message = (
1186 1189 'Closing with status change {transition_icon} {status}.'
1187 1190 ).format(transition_icon='>', status=status_lbl)
1188 1191 text = message or default_message
1189 1192
1190 1193 # create a comment, and link it to new status
1191 1194 comment = CommentsModel().create(
1192 1195 text=text,
1193 1196 repo=repo.repo_id,
1194 1197 user=user.user_id,
1195 1198 pull_request=pull_request.pull_request_id,
1196 1199 status_change=status_lbl,
1197 1200 status_change_type=status,
1198 1201 closing_pr=True
1199 1202 )
1200 1203
1201 1204 # calculate old status before we change it
1202 1205 old_calculated_status = pull_request.calculated_review_status()
1203 1206 ChangesetStatusModel().set_status(
1204 1207 repo.repo_id,
1205 1208 status,
1206 1209 user.user_id,
1207 1210 comment=comment,
1208 1211 pull_request=pull_request.pull_request_id
1209 1212 )
1210 1213
1211 1214 Session().flush()
1212 1215 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1213 1216 # we now calculate the status of pull request again, and based on that
1214 1217 # calculation trigger status change. This might happen in cases
1215 1218 # that non-reviewer admin closes a pr, which means his vote doesn't
1216 1219 # change the status, while if he's a reviewer this might change it.
1217 1220 calculated_status = pull_request.calculated_review_status()
1218 1221 if old_calculated_status != calculated_status:
1219 1222 self._trigger_pull_request_hook(
1220 1223 pull_request, user, 'review_status_change')
1221 1224
1222 1225 # finally close the PR
1223 1226 PullRequestModel().close_pull_request(
1224 1227 pull_request.pull_request_id, user)
1225 1228
1226 1229 return comment, status
1227 1230
1228 1231 def merge_status(self, pull_request, translator=None,
1229 1232 force_shadow_repo_refresh=False):
1230 1233 _ = translator or get_current_request().translate
1231 1234
1232 1235 if not self._is_merge_enabled(pull_request):
1233 1236 return False, _('Server-side pull request merging is disabled.')
1234 1237 if pull_request.is_closed():
1235 1238 return False, _('This pull request is closed.')
1236 1239 merge_possible, msg = self._check_repo_requirements(
1237 1240 target=pull_request.target_repo, source=pull_request.source_repo,
1238 1241 translator=_)
1239 1242 if not merge_possible:
1240 1243 return merge_possible, msg
1241 1244
1242 1245 try:
1243 1246 resp = self._try_merge(
1244 1247 pull_request,
1245 1248 force_shadow_repo_refresh=force_shadow_repo_refresh)
1246 1249 log.debug("Merge response: %s", resp)
1247 1250 status = resp.possible, self.merge_status_message(
1248 1251 resp.failure_reason)
1249 1252 except NotImplementedError:
1250 1253 status = False, _('Pull request merging is not supported.')
1251 1254
1252 1255 return status
1253 1256
1254 1257 def _check_repo_requirements(self, target, source, translator):
1255 1258 """
1256 1259 Check if `target` and `source` have compatible requirements.
1257 1260
1258 1261 Currently this is just checking for largefiles.
1259 1262 """
1260 1263 _ = translator
1261 1264 target_has_largefiles = self._has_largefiles(target)
1262 1265 source_has_largefiles = self._has_largefiles(source)
1263 1266 merge_possible = True
1264 1267 message = u''
1265 1268
1266 1269 if target_has_largefiles != source_has_largefiles:
1267 1270 merge_possible = False
1268 1271 if source_has_largefiles:
1269 1272 message = _(
1270 1273 'Target repository large files support is disabled.')
1271 1274 else:
1272 1275 message = _(
1273 1276 'Source repository large files support is disabled.')
1274 1277
1275 1278 return merge_possible, message
1276 1279
1277 1280 def _has_largefiles(self, repo):
1278 1281 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1279 1282 'extensions', 'largefiles')
1280 1283 return largefiles_ui and largefiles_ui[0].active
1281 1284
1282 1285 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1283 1286 """
1284 1287 Try to merge the pull request and return the merge status.
1285 1288 """
1286 1289 log.debug(
1287 1290 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1288 1291 pull_request.pull_request_id, force_shadow_repo_refresh)
1289 1292 target_vcs = pull_request.target_repo.scm_instance()
1290 1293
1291 1294 # Refresh the target reference.
1292 1295 try:
1293 1296 target_ref = self._refresh_reference(
1294 1297 pull_request.target_ref_parts, target_vcs)
1295 1298 except CommitDoesNotExistError:
1296 1299 merge_state = MergeResponse(
1297 1300 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1298 1301 return merge_state
1299 1302
1300 1303 target_locked = pull_request.target_repo.locked
1301 1304 if target_locked and target_locked[0]:
1302 1305 log.debug("The target repository is locked.")
1303 1306 merge_state = MergeResponse(
1304 1307 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1305 1308 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1306 1309 pull_request, target_ref):
1307 1310 log.debug("Refreshing the merge status of the repository.")
1308 1311 merge_state = self._refresh_merge_state(
1309 1312 pull_request, target_vcs, target_ref)
1310 1313 else:
1311 1314 possible = pull_request.\
1312 1315 last_merge_status == MergeFailureReason.NONE
1313 1316 merge_state = MergeResponse(
1314 1317 possible, False, None, pull_request.last_merge_status)
1315 1318
1316 1319 return merge_state
1317 1320
1318 1321 def _refresh_reference(self, reference, vcs_repository):
1319 1322 if reference.type in ('branch', 'book'):
1320 1323 name_or_id = reference.name
1321 1324 else:
1322 1325 name_or_id = reference.commit_id
1323 1326 refreshed_commit = vcs_repository.get_commit(name_or_id)
1324 1327 refreshed_reference = Reference(
1325 1328 reference.type, reference.name, refreshed_commit.raw_id)
1326 1329 return refreshed_reference
1327 1330
1328 1331 def _needs_merge_state_refresh(self, pull_request, target_reference):
1329 1332 return not(
1330 1333 pull_request.revisions and
1331 1334 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1332 1335 target_reference.commit_id == pull_request._last_merge_target_rev)
1333 1336
1334 1337 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1335 1338 workspace_id = self._workspace_id(pull_request)
1336 1339 source_vcs = pull_request.source_repo.scm_instance()
1337 1340 repo_id = pull_request.target_repo.repo_id
1338 1341 use_rebase = self._use_rebase_for_merging(pull_request)
1339 1342 close_branch = self._close_branch_before_merging(pull_request)
1340 1343 merge_state = target_vcs.merge(
1341 1344 repo_id, workspace_id,
1342 1345 target_reference, source_vcs, pull_request.source_ref_parts,
1343 1346 dry_run=True, use_rebase=use_rebase,
1344 1347 close_branch=close_branch)
1345 1348
1346 1349 # Do not store the response if there was an unknown error.
1347 1350 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1348 1351 pull_request._last_merge_source_rev = \
1349 1352 pull_request.source_ref_parts.commit_id
1350 1353 pull_request._last_merge_target_rev = target_reference.commit_id
1351 1354 pull_request.last_merge_status = merge_state.failure_reason
1352 1355 pull_request.shadow_merge_ref = merge_state.merge_ref
1353 1356 Session().add(pull_request)
1354 1357 Session().commit()
1355 1358
1356 1359 return merge_state
1357 1360
1358 1361 def _workspace_id(self, pull_request):
1359 1362 workspace_id = 'pr-%s' % pull_request.pull_request_id
1360 1363 return workspace_id
1361 1364
1362 1365 def merge_status_message(self, status_code):
1363 1366 """
1364 1367 Return a human friendly error message for the given merge status code.
1365 1368 """
1366 1369 return self.MERGE_STATUS_MESSAGES[status_code]
1367 1370
1368 1371 def generate_repo_data(self, repo, commit_id=None, branch=None,
1369 1372 bookmark=None, translator=None):
1370 1373 from rhodecode.model.repo import RepoModel
1371 1374
1372 1375 all_refs, selected_ref = \
1373 1376 self._get_repo_pullrequest_sources(
1374 1377 repo.scm_instance(), commit_id=commit_id,
1375 1378 branch=branch, bookmark=bookmark, translator=translator)
1376 1379
1377 1380 refs_select2 = []
1378 1381 for element in all_refs:
1379 1382 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1380 1383 refs_select2.append({'text': element[1], 'children': children})
1381 1384
1382 1385 return {
1383 1386 'user': {
1384 1387 'user_id': repo.user.user_id,
1385 1388 'username': repo.user.username,
1386 1389 'firstname': repo.user.first_name,
1387 1390 'lastname': repo.user.last_name,
1388 1391 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1389 1392 },
1390 1393 'name': repo.repo_name,
1391 1394 'link': RepoModel().get_url(repo),
1392 1395 'description': h.chop_at_smart(repo.description_safe, '\n'),
1393 1396 'refs': {
1394 1397 'all_refs': all_refs,
1395 1398 'selected_ref': selected_ref,
1396 1399 'select2_refs': refs_select2
1397 1400 }
1398 1401 }
1399 1402
1400 1403 def generate_pullrequest_title(self, source, source_ref, target):
1401 1404 return u'{source}#{at_ref} to {target}'.format(
1402 1405 source=source,
1403 1406 at_ref=source_ref,
1404 1407 target=target,
1405 1408 )
1406 1409
1407 1410 def _cleanup_merge_workspace(self, pull_request):
1408 1411 # Merging related cleanup
1409 1412 repo_id = pull_request.target_repo.repo_id
1410 1413 target_scm = pull_request.target_repo.scm_instance()
1411 1414 workspace_id = self._workspace_id(pull_request)
1412 1415
1413 1416 try:
1414 1417 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1415 1418 except NotImplementedError:
1416 1419 pass
1417 1420
1418 1421 def _get_repo_pullrequest_sources(
1419 1422 self, repo, commit_id=None, branch=None, bookmark=None,
1420 1423 translator=None):
1421 1424 """
1422 1425 Return a structure with repo's interesting commits, suitable for
1423 1426 the selectors in pullrequest controller
1424 1427
1425 1428 :param commit_id: a commit that must be in the list somehow
1426 1429 and selected by default
1427 1430 :param branch: a branch that must be in the list and selected
1428 1431 by default - even if closed
1429 1432 :param bookmark: a bookmark that must be in the list and selected
1430 1433 """
1431 1434 _ = translator or get_current_request().translate
1432 1435
1433 1436 commit_id = safe_str(commit_id) if commit_id else None
1434 1437 branch = safe_str(branch) if branch else None
1435 1438 bookmark = safe_str(bookmark) if bookmark else None
1436 1439
1437 1440 selected = None
1438 1441
1439 1442 # order matters: first source that has commit_id in it will be selected
1440 1443 sources = []
1441 1444 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1442 1445 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1443 1446
1444 1447 if commit_id:
1445 1448 ref_commit = (h.short_id(commit_id), commit_id)
1446 1449 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1447 1450
1448 1451 sources.append(
1449 1452 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1450 1453 )
1451 1454
1452 1455 groups = []
1453 1456 for group_key, ref_list, group_name, match in sources:
1454 1457 group_refs = []
1455 1458 for ref_name, ref_id in ref_list:
1456 1459 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1457 1460 group_refs.append((ref_key, ref_name))
1458 1461
1459 1462 if not selected:
1460 1463 if set([commit_id, match]) & set([ref_id, ref_name]):
1461 1464 selected = ref_key
1462 1465
1463 1466 if group_refs:
1464 1467 groups.append((group_refs, group_name))
1465 1468
1466 1469 if not selected:
1467 1470 ref = commit_id or branch or bookmark
1468 1471 if ref:
1469 1472 raise CommitDoesNotExistError(
1470 1473 'No commit refs could be found matching: %s' % ref)
1471 1474 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1472 1475 selected = 'branch:%s:%s' % (
1473 1476 repo.DEFAULT_BRANCH_NAME,
1474 1477 repo.branches[repo.DEFAULT_BRANCH_NAME]
1475 1478 )
1476 1479 elif repo.commit_ids:
1477 1480 # make the user select in this case
1478 1481 selected = None
1479 1482 else:
1480 1483 raise EmptyRepositoryError()
1481 1484 return groups, selected
1482 1485
1483 1486 def get_diff(self, source_repo, source_ref_id, target_ref_id, context=DIFF_CONTEXT):
1484 1487 return self._get_diff_from_pr_or_version(
1485 1488 source_repo, source_ref_id, target_ref_id, context=context)
1486 1489
1487 1490 def _get_diff_from_pr_or_version(
1488 1491 self, source_repo, source_ref_id, target_ref_id, context):
1489 1492 target_commit = source_repo.get_commit(
1490 1493 commit_id=safe_str(target_ref_id))
1491 1494 source_commit = source_repo.get_commit(
1492 1495 commit_id=safe_str(source_ref_id))
1493 1496 if isinstance(source_repo, Repository):
1494 1497 vcs_repo = source_repo.scm_instance()
1495 1498 else:
1496 1499 vcs_repo = source_repo
1497 1500
1498 1501 # TODO: johbo: In the context of an update, we cannot reach
1499 1502 # the old commit anymore with our normal mechanisms. It needs
1500 1503 # some sort of special support in the vcs layer to avoid this
1501 1504 # workaround.
1502 1505 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1503 1506 vcs_repo.alias == 'git'):
1504 1507 source_commit.raw_id = safe_str(source_ref_id)
1505 1508
1506 1509 log.debug('calculating diff between '
1507 1510 'source_ref:%s and target_ref:%s for repo `%s`',
1508 1511 target_ref_id, source_ref_id,
1509 1512 safe_unicode(vcs_repo.path))
1510 1513
1511 1514 vcs_diff = vcs_repo.get_diff(
1512 1515 commit1=target_commit, commit2=source_commit, context=context)
1513 1516 return vcs_diff
1514 1517
1515 1518 def _is_merge_enabled(self, pull_request):
1516 1519 return self._get_general_setting(
1517 1520 pull_request, 'rhodecode_pr_merge_enabled')
1518 1521
1519 1522 def _use_rebase_for_merging(self, pull_request):
1520 1523 repo_type = pull_request.target_repo.repo_type
1521 1524 if repo_type == 'hg':
1522 1525 return self._get_general_setting(
1523 1526 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1524 1527 elif repo_type == 'git':
1525 1528 return self._get_general_setting(
1526 1529 pull_request, 'rhodecode_git_use_rebase_for_merging')
1527 1530
1528 1531 return False
1529 1532
1530 1533 def _close_branch_before_merging(self, pull_request):
1531 1534 repo_type = pull_request.target_repo.repo_type
1532 1535 if repo_type == 'hg':
1533 1536 return self._get_general_setting(
1534 1537 pull_request, 'rhodecode_hg_close_branch_before_merging')
1535 1538 elif repo_type == 'git':
1536 1539 return self._get_general_setting(
1537 1540 pull_request, 'rhodecode_git_close_branch_before_merging')
1538 1541
1539 1542 return False
1540 1543
1541 1544 def _get_general_setting(self, pull_request, settings_key, default=False):
1542 1545 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1543 1546 settings = settings_model.get_general_settings()
1544 1547 return settings.get(settings_key, default)
1545 1548
1546 1549 def _log_audit_action(self, action, action_data, user, pull_request):
1547 1550 audit_logger.store(
1548 1551 action=action,
1549 1552 action_data=action_data,
1550 1553 user=user,
1551 1554 repo=pull_request.target_repo)
1552 1555
1553 1556 def get_reviewer_functions(self):
1554 1557 """
1555 1558 Fetches functions for validation and fetching default reviewers.
1556 1559 If available we use the EE package, else we fallback to CE
1557 1560 package functions
1558 1561 """
1559 1562 try:
1560 1563 from rc_reviewers.utils import get_default_reviewers_data
1561 1564 from rc_reviewers.utils import validate_default_reviewers
1562 1565 except ImportError:
1563 1566 from rhodecode.apps.repository.utils import \
1564 1567 get_default_reviewers_data
1565 1568 from rhodecode.apps.repository.utils import \
1566 1569 validate_default_reviewers
1567 1570
1568 1571 return get_default_reviewers_data, validate_default_reviewers
1569 1572
1570 1573
1571 1574 class MergeCheck(object):
1572 1575 """
1573 1576 Perform Merge Checks and returns a check object which stores information
1574 1577 about merge errors, and merge conditions
1575 1578 """
1576 1579 TODO_CHECK = 'todo'
1577 1580 PERM_CHECK = 'perm'
1578 1581 REVIEW_CHECK = 'review'
1579 1582 MERGE_CHECK = 'merge'
1580 1583
1581 1584 def __init__(self):
1582 1585 self.review_status = None
1583 1586 self.merge_possible = None
1584 1587 self.merge_msg = ''
1585 1588 self.failed = None
1586 1589 self.errors = []
1587 1590 self.error_details = OrderedDict()
1588 1591
1589 1592 def push_error(self, error_type, message, error_key, details):
1590 1593 self.failed = True
1591 1594 self.errors.append([error_type, message])
1592 1595 self.error_details[error_key] = dict(
1593 1596 details=details,
1594 1597 error_type=error_type,
1595 1598 message=message
1596 1599 )
1597 1600
1598 1601 @classmethod
1599 1602 def validate(cls, pull_request, user, translator, fail_early=False,
1600 1603 force_shadow_repo_refresh=False):
1601 1604 _ = translator
1602 1605 merge_check = cls()
1603 1606
1604 1607 # permissions to merge
1605 1608 user_allowed_to_merge = PullRequestModel().check_user_merge(
1606 1609 pull_request, user)
1607 1610 if not user_allowed_to_merge:
1608 1611 log.debug("MergeCheck: cannot merge, approval is pending.")
1609 1612
1610 1613 msg = _('User `{}` not allowed to perform merge.').format(user.username)
1611 1614 merge_check.push_error('error', msg, cls.PERM_CHECK, user.username)
1612 1615 if fail_early:
1613 1616 return merge_check
1614 1617
1615 1618 # review status, must be always present
1616 1619 review_status = pull_request.calculated_review_status()
1617 1620 merge_check.review_status = review_status
1618 1621
1619 1622 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1620 1623 if not status_approved:
1621 1624 log.debug("MergeCheck: cannot merge, approval is pending.")
1622 1625
1623 1626 msg = _('Pull request reviewer approval is pending.')
1624 1627
1625 1628 merge_check.push_error(
1626 1629 'warning', msg, cls.REVIEW_CHECK, review_status)
1627 1630
1628 1631 if fail_early:
1629 1632 return merge_check
1630 1633
1631 1634 # left over TODOs
1632 1635 todos = CommentsModel().get_unresolved_todos(pull_request)
1633 1636 if todos:
1634 1637 log.debug("MergeCheck: cannot merge, {} "
1635 1638 "unresolved todos left.".format(len(todos)))
1636 1639
1637 1640 if len(todos) == 1:
1638 1641 msg = _('Cannot merge, {} TODO still not resolved.').format(
1639 1642 len(todos))
1640 1643 else:
1641 1644 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1642 1645 len(todos))
1643 1646
1644 1647 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1645 1648
1646 1649 if fail_early:
1647 1650 return merge_check
1648 1651
1649 1652 # merge possible, here is the filesystem simulation + shadow repo
1650 1653 merge_status, msg = PullRequestModel().merge_status(
1651 1654 pull_request, translator=translator,
1652 1655 force_shadow_repo_refresh=force_shadow_repo_refresh)
1653 1656 merge_check.merge_possible = merge_status
1654 1657 merge_check.merge_msg = msg
1655 1658 if not merge_status:
1656 1659 log.debug(
1657 1660 "MergeCheck: cannot merge, pull request merge not possible.")
1658 1661 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1659 1662
1660 1663 if fail_early:
1661 1664 return merge_check
1662 1665
1663 1666 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1664 1667 return merge_check
1665 1668
1666 1669 @classmethod
1667 1670 def get_merge_conditions(cls, pull_request, translator):
1668 1671 _ = translator
1669 1672 merge_details = {}
1670 1673
1671 1674 model = PullRequestModel()
1672 1675 use_rebase = model._use_rebase_for_merging(pull_request)
1673 1676
1674 1677 if use_rebase:
1675 1678 merge_details['merge_strategy'] = dict(
1676 1679 details={},
1677 1680 message=_('Merge strategy: rebase')
1678 1681 )
1679 1682 else:
1680 1683 merge_details['merge_strategy'] = dict(
1681 1684 details={},
1682 1685 message=_('Merge strategy: explicit merge commit')
1683 1686 )
1684 1687
1685 1688 close_branch = model._close_branch_before_merging(pull_request)
1686 1689 if close_branch:
1687 1690 repo_type = pull_request.target_repo.repo_type
1688 1691 if repo_type == 'hg':
1689 1692 close_msg = _('Source branch will be closed after merge.')
1690 1693 elif repo_type == 'git':
1691 1694 close_msg = _('Source branch will be deleted after merge.')
1692 1695
1693 1696 merge_details['close_branch'] = dict(
1694 1697 details={},
1695 1698 message=close_msg
1696 1699 )
1697 1700
1698 1701 return merge_details
1699 1702
1700 1703 ChangeTuple = collections.namedtuple(
1701 1704 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1702 1705
1703 1706 FileChangeTuple = collections.namedtuple(
1704 1707 'FileChangeTuple', ['added', 'modified', 'removed'])
General Comments 0
You need to be logged in to leave comments. Login now