##// END OF EJS Templates
pull-requests: add merge check that detects WIP marker in title. This will prevent merges in such case....
ergo -
r4099:c12e69d0 default
parent child Browse files
Show More
@@ -1,5446 +1,5454 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import string
29 29 import hashlib
30 30 import logging
31 31 import datetime
32 32 import uuid
33 33 import warnings
34 34 import ipaddress
35 35 import functools
36 36 import traceback
37 37 import collections
38 38
39 39 from sqlalchemy import (
40 40 or_, and_, not_, func, cast, TypeDecorator, event,
41 41 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
42 42 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
43 43 Text, Float, PickleType, BigInteger)
44 44 from sqlalchemy.sql.expression import true, false, case
45 45 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
46 46 from sqlalchemy.orm import (
47 47 relationship, joinedload, class_mapper, validates, aliased)
48 48 from sqlalchemy.ext.declarative import declared_attr
49 49 from sqlalchemy.ext.hybrid import hybrid_property
50 50 from sqlalchemy.exc import IntegrityError # pragma: no cover
51 51 from sqlalchemy.dialects.mysql import LONGTEXT
52 52 from zope.cachedescriptors.property import Lazy as LazyProperty
53 53 from pyramid import compat
54 54 from pyramid.threadlocal import get_current_request
55 55 from webhelpers2.text import remove_formatting
56 56
57 57 from rhodecode.translation import _
58 58 from rhodecode.lib.vcs import get_vcs_instance
59 59 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
60 60 from rhodecode.lib.utils2 import (
61 61 str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe,
62 62 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
63 63 glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time, OrderedDefaultDict)
64 64 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
65 65 JsonRaw
66 66 from rhodecode.lib.ext_json import json
67 67 from rhodecode.lib.caching_query import FromCache
68 68 from rhodecode.lib.encrypt import AESCipher, validate_and_get_enc_data
69 69 from rhodecode.lib.encrypt2 import Encryptor
70 70 from rhodecode.lib.exceptions import (
71 71 ArtifactMetadataDuplicate, ArtifactMetadataBadValueType)
72 72 from rhodecode.model.meta import Base, Session
73 73
74 74 URL_SEP = '/'
75 75 log = logging.getLogger(__name__)
76 76
77 77 # =============================================================================
78 78 # BASE CLASSES
79 79 # =============================================================================
80 80
81 81 # this is propagated from .ini file rhodecode.encrypted_values.secret or
82 82 # beaker.session.secret if first is not set.
83 83 # and initialized at environment.py
84 84 ENCRYPTION_KEY = None
85 85
86 86 # used to sort permissions by types, '#' used here is not allowed to be in
87 87 # usernames, and it's very early in sorted string.printable table.
88 88 PERMISSION_TYPE_SORT = {
89 89 'admin': '####',
90 90 'write': '###',
91 91 'read': '##',
92 92 'none': '#',
93 93 }
94 94
95 95
96 96 def display_user_sort(obj):
97 97 """
98 98 Sort function used to sort permissions in .permissions() function of
99 99 Repository, RepoGroup, UserGroup. Also it put the default user in front
100 100 of all other resources
101 101 """
102 102
103 103 if obj.username == User.DEFAULT_USER:
104 104 return '#####'
105 105 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
106 106 return prefix + obj.username
107 107
108 108
109 109 def display_user_group_sort(obj):
110 110 """
111 111 Sort function used to sort permissions in .permissions() function of
112 112 Repository, RepoGroup, UserGroup. Also it put the default user in front
113 113 of all other resources
114 114 """
115 115
116 116 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
117 117 return prefix + obj.users_group_name
118 118
119 119
120 120 def _hash_key(k):
121 121 return sha1_safe(k)
122 122
123 123
124 124 def in_filter_generator(qry, items, limit=500):
125 125 """
126 126 Splits IN() into multiple with OR
127 127 e.g.::
128 128 cnt = Repository.query().filter(
129 129 or_(
130 130 *in_filter_generator(Repository.repo_id, range(100000))
131 131 )).count()
132 132 """
133 133 if not items:
134 134 # empty list will cause empty query which might cause security issues
135 135 # this can lead to hidden unpleasant results
136 136 items = [-1]
137 137
138 138 parts = []
139 139 for chunk in xrange(0, len(items), limit):
140 140 parts.append(
141 141 qry.in_(items[chunk: chunk + limit])
142 142 )
143 143
144 144 return parts
145 145
146 146
147 147 base_table_args = {
148 148 'extend_existing': True,
149 149 'mysql_engine': 'InnoDB',
150 150 'mysql_charset': 'utf8',
151 151 'sqlite_autoincrement': True
152 152 }
153 153
154 154
155 155 class EncryptedTextValue(TypeDecorator):
156 156 """
157 157 Special column for encrypted long text data, use like::
158 158
159 159 value = Column("encrypted_value", EncryptedValue(), nullable=False)
160 160
161 161 This column is intelligent so if value is in unencrypted form it return
162 162 unencrypted form, but on save it always encrypts
163 163 """
164 164 impl = Text
165 165
166 166 def process_bind_param(self, value, dialect):
167 167 """
168 168 Setter for storing value
169 169 """
170 170 import rhodecode
171 171 if not value:
172 172 return value
173 173
174 174 # protect against double encrypting if values is already encrypted
175 175 if value.startswith('enc$aes$') \
176 176 or value.startswith('enc$aes_hmac$') \
177 177 or value.startswith('enc2$'):
178 178 raise ValueError('value needs to be in unencrypted format, '
179 179 'ie. not starting with enc$ or enc2$')
180 180
181 181 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
182 182 if algo == 'aes':
183 183 return 'enc$aes_hmac$%s' % AESCipher(ENCRYPTION_KEY, hmac=True).encrypt(value)
184 184 elif algo == 'fernet':
185 185 return Encryptor(ENCRYPTION_KEY).encrypt(value)
186 186 else:
187 187 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
188 188
189 189 def process_result_value(self, value, dialect):
190 190 """
191 191 Getter for retrieving value
192 192 """
193 193
194 194 import rhodecode
195 195 if not value:
196 196 return value
197 197
198 198 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
199 199 enc_strict_mode = str2bool(rhodecode.CONFIG.get('rhodecode.encrypted_values.strict') or True)
200 200 if algo == 'aes':
201 201 decrypted_data = validate_and_get_enc_data(value, ENCRYPTION_KEY, enc_strict_mode)
202 202 elif algo == 'fernet':
203 203 return Encryptor(ENCRYPTION_KEY).decrypt(value)
204 204 else:
205 205 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
206 206 return decrypted_data
207 207
208 208
209 209 class BaseModel(object):
210 210 """
211 211 Base Model for all classes
212 212 """
213 213
214 214 @classmethod
215 215 def _get_keys(cls):
216 216 """return column names for this model """
217 217 return class_mapper(cls).c.keys()
218 218
219 219 def get_dict(self):
220 220 """
221 221 return dict with keys and values corresponding
222 222 to this model data """
223 223
224 224 d = {}
225 225 for k in self._get_keys():
226 226 d[k] = getattr(self, k)
227 227
228 228 # also use __json__() if present to get additional fields
229 229 _json_attr = getattr(self, '__json__', None)
230 230 if _json_attr:
231 231 # update with attributes from __json__
232 232 if callable(_json_attr):
233 233 _json_attr = _json_attr()
234 234 for k, val in _json_attr.iteritems():
235 235 d[k] = val
236 236 return d
237 237
238 238 def get_appstruct(self):
239 239 """return list with keys and values tuples corresponding
240 240 to this model data """
241 241
242 242 lst = []
243 243 for k in self._get_keys():
244 244 lst.append((k, getattr(self, k),))
245 245 return lst
246 246
247 247 def populate_obj(self, populate_dict):
248 248 """populate model with data from given populate_dict"""
249 249
250 250 for k in self._get_keys():
251 251 if k in populate_dict:
252 252 setattr(self, k, populate_dict[k])
253 253
254 254 @classmethod
255 255 def query(cls):
256 256 return Session().query(cls)
257 257
258 258 @classmethod
259 259 def get(cls, id_):
260 260 if id_:
261 261 return cls.query().get(id_)
262 262
263 263 @classmethod
264 264 def get_or_404(cls, id_):
265 265 from pyramid.httpexceptions import HTTPNotFound
266 266
267 267 try:
268 268 id_ = int(id_)
269 269 except (TypeError, ValueError):
270 270 raise HTTPNotFound()
271 271
272 272 res = cls.query().get(id_)
273 273 if not res:
274 274 raise HTTPNotFound()
275 275 return res
276 276
277 277 @classmethod
278 278 def getAll(cls):
279 279 # deprecated and left for backward compatibility
280 280 return cls.get_all()
281 281
282 282 @classmethod
283 283 def get_all(cls):
284 284 return cls.query().all()
285 285
286 286 @classmethod
287 287 def delete(cls, id_):
288 288 obj = cls.query().get(id_)
289 289 Session().delete(obj)
290 290
291 291 @classmethod
292 292 def identity_cache(cls, session, attr_name, value):
293 293 exist_in_session = []
294 294 for (item_cls, pkey), instance in session.identity_map.items():
295 295 if cls == item_cls and getattr(instance, attr_name) == value:
296 296 exist_in_session.append(instance)
297 297 if exist_in_session:
298 298 if len(exist_in_session) == 1:
299 299 return exist_in_session[0]
300 300 log.exception(
301 301 'multiple objects with attr %s and '
302 302 'value %s found with same name: %r',
303 303 attr_name, value, exist_in_session)
304 304
305 305 def __repr__(self):
306 306 if hasattr(self, '__unicode__'):
307 307 # python repr needs to return str
308 308 try:
309 309 return safe_str(self.__unicode__())
310 310 except UnicodeDecodeError:
311 311 pass
312 312 return '<DB:%s>' % (self.__class__.__name__)
313 313
314 314
315 315 class RhodeCodeSetting(Base, BaseModel):
316 316 __tablename__ = 'rhodecode_settings'
317 317 __table_args__ = (
318 318 UniqueConstraint('app_settings_name'),
319 319 base_table_args
320 320 )
321 321
322 322 SETTINGS_TYPES = {
323 323 'str': safe_str,
324 324 'int': safe_int,
325 325 'unicode': safe_unicode,
326 326 'bool': str2bool,
327 327 'list': functools.partial(aslist, sep=',')
328 328 }
329 329 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
330 330 GLOBAL_CONF_KEY = 'app_settings'
331 331
332 332 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
333 333 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
334 334 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
335 335 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
336 336
337 337 def __init__(self, key='', val='', type='unicode'):
338 338 self.app_settings_name = key
339 339 self.app_settings_type = type
340 340 self.app_settings_value = val
341 341
342 342 @validates('_app_settings_value')
343 343 def validate_settings_value(self, key, val):
344 344 assert type(val) == unicode
345 345 return val
346 346
347 347 @hybrid_property
348 348 def app_settings_value(self):
349 349 v = self._app_settings_value
350 350 _type = self.app_settings_type
351 351 if _type:
352 352 _type = self.app_settings_type.split('.')[0]
353 353 # decode the encrypted value
354 354 if 'encrypted' in self.app_settings_type:
355 355 cipher = EncryptedTextValue()
356 356 v = safe_unicode(cipher.process_result_value(v, None))
357 357
358 358 converter = self.SETTINGS_TYPES.get(_type) or \
359 359 self.SETTINGS_TYPES['unicode']
360 360 return converter(v)
361 361
362 362 @app_settings_value.setter
363 363 def app_settings_value(self, val):
364 364 """
365 365 Setter that will always make sure we use unicode in app_settings_value
366 366
367 367 :param val:
368 368 """
369 369 val = safe_unicode(val)
370 370 # encode the encrypted value
371 371 if 'encrypted' in self.app_settings_type:
372 372 cipher = EncryptedTextValue()
373 373 val = safe_unicode(cipher.process_bind_param(val, None))
374 374 self._app_settings_value = val
375 375
376 376 @hybrid_property
377 377 def app_settings_type(self):
378 378 return self._app_settings_type
379 379
380 380 @app_settings_type.setter
381 381 def app_settings_type(self, val):
382 382 if val.split('.')[0] not in self.SETTINGS_TYPES:
383 383 raise Exception('type must be one of %s got %s'
384 384 % (self.SETTINGS_TYPES.keys(), val))
385 385 self._app_settings_type = val
386 386
387 387 @classmethod
388 388 def get_by_prefix(cls, prefix):
389 389 return RhodeCodeSetting.query()\
390 390 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
391 391 .all()
392 392
393 393 def __unicode__(self):
394 394 return u"<%s('%s:%s[%s]')>" % (
395 395 self.__class__.__name__,
396 396 self.app_settings_name, self.app_settings_value,
397 397 self.app_settings_type
398 398 )
399 399
400 400
401 401 class RhodeCodeUi(Base, BaseModel):
402 402 __tablename__ = 'rhodecode_ui'
403 403 __table_args__ = (
404 404 UniqueConstraint('ui_key'),
405 405 base_table_args
406 406 )
407 407
408 408 HOOK_REPO_SIZE = 'changegroup.repo_size'
409 409 # HG
410 410 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
411 411 HOOK_PULL = 'outgoing.pull_logger'
412 412 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
413 413 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
414 414 HOOK_PUSH = 'changegroup.push_logger'
415 415 HOOK_PUSH_KEY = 'pushkey.key_push'
416 416
417 417 HOOKS_BUILTIN = [
418 418 HOOK_PRE_PULL,
419 419 HOOK_PULL,
420 420 HOOK_PRE_PUSH,
421 421 HOOK_PRETX_PUSH,
422 422 HOOK_PUSH,
423 423 HOOK_PUSH_KEY,
424 424 ]
425 425
426 426 # TODO: johbo: Unify way how hooks are configured for git and hg,
427 427 # git part is currently hardcoded.
428 428
429 429 # SVN PATTERNS
430 430 SVN_BRANCH_ID = 'vcs_svn_branch'
431 431 SVN_TAG_ID = 'vcs_svn_tag'
432 432
433 433 ui_id = Column(
434 434 "ui_id", Integer(), nullable=False, unique=True, default=None,
435 435 primary_key=True)
436 436 ui_section = Column(
437 437 "ui_section", String(255), nullable=True, unique=None, default=None)
438 438 ui_key = Column(
439 439 "ui_key", String(255), nullable=True, unique=None, default=None)
440 440 ui_value = Column(
441 441 "ui_value", String(255), nullable=True, unique=None, default=None)
442 442 ui_active = Column(
443 443 "ui_active", Boolean(), nullable=True, unique=None, default=True)
444 444
445 445 def __repr__(self):
446 446 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
447 447 self.ui_key, self.ui_value)
448 448
449 449
450 450 class RepoRhodeCodeSetting(Base, BaseModel):
451 451 __tablename__ = 'repo_rhodecode_settings'
452 452 __table_args__ = (
453 453 UniqueConstraint(
454 454 'app_settings_name', 'repository_id',
455 455 name='uq_repo_rhodecode_setting_name_repo_id'),
456 456 base_table_args
457 457 )
458 458
459 459 repository_id = Column(
460 460 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
461 461 nullable=False)
462 462 app_settings_id = Column(
463 463 "app_settings_id", Integer(), nullable=False, unique=True,
464 464 default=None, primary_key=True)
465 465 app_settings_name = Column(
466 466 "app_settings_name", String(255), nullable=True, unique=None,
467 467 default=None)
468 468 _app_settings_value = Column(
469 469 "app_settings_value", String(4096), nullable=True, unique=None,
470 470 default=None)
471 471 _app_settings_type = Column(
472 472 "app_settings_type", String(255), nullable=True, unique=None,
473 473 default=None)
474 474
475 475 repository = relationship('Repository')
476 476
477 477 def __init__(self, repository_id, key='', val='', type='unicode'):
478 478 self.repository_id = repository_id
479 479 self.app_settings_name = key
480 480 self.app_settings_type = type
481 481 self.app_settings_value = val
482 482
483 483 @validates('_app_settings_value')
484 484 def validate_settings_value(self, key, val):
485 485 assert type(val) == unicode
486 486 return val
487 487
488 488 @hybrid_property
489 489 def app_settings_value(self):
490 490 v = self._app_settings_value
491 491 type_ = self.app_settings_type
492 492 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
493 493 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
494 494 return converter(v)
495 495
496 496 @app_settings_value.setter
497 497 def app_settings_value(self, val):
498 498 """
499 499 Setter that will always make sure we use unicode in app_settings_value
500 500
501 501 :param val:
502 502 """
503 503 self._app_settings_value = safe_unicode(val)
504 504
505 505 @hybrid_property
506 506 def app_settings_type(self):
507 507 return self._app_settings_type
508 508
509 509 @app_settings_type.setter
510 510 def app_settings_type(self, val):
511 511 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
512 512 if val not in SETTINGS_TYPES:
513 513 raise Exception('type must be one of %s got %s'
514 514 % (SETTINGS_TYPES.keys(), val))
515 515 self._app_settings_type = val
516 516
517 517 def __unicode__(self):
518 518 return u"<%s('%s:%s:%s[%s]')>" % (
519 519 self.__class__.__name__, self.repository.repo_name,
520 520 self.app_settings_name, self.app_settings_value,
521 521 self.app_settings_type
522 522 )
523 523
524 524
525 525 class RepoRhodeCodeUi(Base, BaseModel):
526 526 __tablename__ = 'repo_rhodecode_ui'
527 527 __table_args__ = (
528 528 UniqueConstraint(
529 529 'repository_id', 'ui_section', 'ui_key',
530 530 name='uq_repo_rhodecode_ui_repository_id_section_key'),
531 531 base_table_args
532 532 )
533 533
534 534 repository_id = Column(
535 535 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
536 536 nullable=False)
537 537 ui_id = Column(
538 538 "ui_id", Integer(), nullable=False, unique=True, default=None,
539 539 primary_key=True)
540 540 ui_section = Column(
541 541 "ui_section", String(255), nullable=True, unique=None, default=None)
542 542 ui_key = Column(
543 543 "ui_key", String(255), nullable=True, unique=None, default=None)
544 544 ui_value = Column(
545 545 "ui_value", String(255), nullable=True, unique=None, default=None)
546 546 ui_active = Column(
547 547 "ui_active", Boolean(), nullable=True, unique=None, default=True)
548 548
549 549 repository = relationship('Repository')
550 550
551 551 def __repr__(self):
552 552 return '<%s[%s:%s]%s=>%s]>' % (
553 553 self.__class__.__name__, self.repository.repo_name,
554 554 self.ui_section, self.ui_key, self.ui_value)
555 555
556 556
557 557 class User(Base, BaseModel):
558 558 __tablename__ = 'users'
559 559 __table_args__ = (
560 560 UniqueConstraint('username'), UniqueConstraint('email'),
561 561 Index('u_username_idx', 'username'),
562 562 Index('u_email_idx', 'email'),
563 563 base_table_args
564 564 )
565 565
566 566 DEFAULT_USER = 'default'
567 567 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
568 568 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
569 569
570 570 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
571 571 username = Column("username", String(255), nullable=True, unique=None, default=None)
572 572 password = Column("password", String(255), nullable=True, unique=None, default=None)
573 573 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
574 574 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
575 575 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
576 576 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
577 577 _email = Column("email", String(255), nullable=True, unique=None, default=None)
578 578 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
579 579 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
580 580 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
581 581
582 582 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
583 583 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
584 584 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
585 585 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
586 586 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
587 587 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
588 588
589 589 user_log = relationship('UserLog')
590 590 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan')
591 591
592 592 repositories = relationship('Repository')
593 593 repository_groups = relationship('RepoGroup')
594 594 user_groups = relationship('UserGroup')
595 595
596 596 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
597 597 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
598 598
599 599 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan')
600 600 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
601 601 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
602 602
603 603 group_member = relationship('UserGroupMember', cascade='all')
604 604
605 605 notifications = relationship('UserNotification', cascade='all')
606 606 # notifications assigned to this user
607 607 user_created_notifications = relationship('Notification', cascade='all')
608 608 # comments created by this user
609 609 user_comments = relationship('ChangesetComment', cascade='all')
610 610 # user profile extra info
611 611 user_emails = relationship('UserEmailMap', cascade='all')
612 612 user_ip_map = relationship('UserIpMap', cascade='all')
613 613 user_auth_tokens = relationship('UserApiKeys', cascade='all')
614 614 user_ssh_keys = relationship('UserSshKeys', cascade='all')
615 615
616 616 # gists
617 617 user_gists = relationship('Gist', cascade='all')
618 618 # user pull requests
619 619 user_pull_requests = relationship('PullRequest', cascade='all')
620 620 # external identities
621 621 external_identities = relationship(
622 622 'ExternalIdentity',
623 623 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
624 624 cascade='all')
625 625 # review rules
626 626 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
627 627
628 628 # artifacts owned
629 629 artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id')
630 630
631 631 # no cascade, set NULL
632 632 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id')
633 633
634 634 def __unicode__(self):
635 635 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
636 636 self.user_id, self.username)
637 637
638 638 @hybrid_property
639 639 def email(self):
640 640 return self._email
641 641
642 642 @email.setter
643 643 def email(self, val):
644 644 self._email = val.lower() if val else None
645 645
646 646 @hybrid_property
647 647 def first_name(self):
648 648 from rhodecode.lib import helpers as h
649 649 if self.name:
650 650 return h.escape(self.name)
651 651 return self.name
652 652
653 653 @hybrid_property
654 654 def last_name(self):
655 655 from rhodecode.lib import helpers as h
656 656 if self.lastname:
657 657 return h.escape(self.lastname)
658 658 return self.lastname
659 659
660 660 @hybrid_property
661 661 def api_key(self):
662 662 """
663 663 Fetch if exist an auth-token with role ALL connected to this user
664 664 """
665 665 user_auth_token = UserApiKeys.query()\
666 666 .filter(UserApiKeys.user_id == self.user_id)\
667 667 .filter(or_(UserApiKeys.expires == -1,
668 668 UserApiKeys.expires >= time.time()))\
669 669 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
670 670 if user_auth_token:
671 671 user_auth_token = user_auth_token.api_key
672 672
673 673 return user_auth_token
674 674
675 675 @api_key.setter
676 676 def api_key(self, val):
677 677 # don't allow to set API key this is deprecated for now
678 678 self._api_key = None
679 679
680 680 @property
681 681 def reviewer_pull_requests(self):
682 682 return PullRequestReviewers.query() \
683 683 .options(joinedload(PullRequestReviewers.pull_request)) \
684 684 .filter(PullRequestReviewers.user_id == self.user_id) \
685 685 .all()
686 686
687 687 @property
688 688 def firstname(self):
689 689 # alias for future
690 690 return self.name
691 691
692 692 @property
693 693 def emails(self):
694 694 other = UserEmailMap.query()\
695 695 .filter(UserEmailMap.user == self) \
696 696 .order_by(UserEmailMap.email_id.asc()) \
697 697 .all()
698 698 return [self.email] + [x.email for x in other]
699 699
700 700 def emails_cached(self):
701 701 emails = UserEmailMap.query()\
702 702 .filter(UserEmailMap.user == self) \
703 703 .order_by(UserEmailMap.email_id.asc())
704 704
705 705 emails = emails.options(
706 706 FromCache("sql_cache_short", "get_user_{}_emails".format(self.user_id))
707 707 )
708 708
709 709 return [self.email] + [x.email for x in emails]
710 710
711 711 @property
712 712 def auth_tokens(self):
713 713 auth_tokens = self.get_auth_tokens()
714 714 return [x.api_key for x in auth_tokens]
715 715
716 716 def get_auth_tokens(self):
717 717 return UserApiKeys.query()\
718 718 .filter(UserApiKeys.user == self)\
719 719 .order_by(UserApiKeys.user_api_key_id.asc())\
720 720 .all()
721 721
722 722 @LazyProperty
723 723 def feed_token(self):
724 724 return self.get_feed_token()
725 725
726 726 def get_feed_token(self, cache=True):
727 727 feed_tokens = UserApiKeys.query()\
728 728 .filter(UserApiKeys.user == self)\
729 729 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
730 730 if cache:
731 731 feed_tokens = feed_tokens.options(
732 732 FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id))
733 733
734 734 feed_tokens = feed_tokens.all()
735 735 if feed_tokens:
736 736 return feed_tokens[0].api_key
737 737 return 'NO_FEED_TOKEN_AVAILABLE'
738 738
739 739 @LazyProperty
740 740 def artifact_token(self):
741 741 return self.get_artifact_token()
742 742
743 743 def get_artifact_token(self, cache=True):
744 744 artifacts_tokens = UserApiKeys.query()\
745 745 .filter(UserApiKeys.user == self)\
746 746 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
747 747 if cache:
748 748 artifacts_tokens = artifacts_tokens.options(
749 749 FromCache("sql_cache_short", "get_user_artifact_token_%s" % self.user_id))
750 750
751 751 artifacts_tokens = artifacts_tokens.all()
752 752 if artifacts_tokens:
753 753 return artifacts_tokens[0].api_key
754 754 return 'NO_ARTIFACT_TOKEN_AVAILABLE'
755 755
756 756 @classmethod
757 757 def get(cls, user_id, cache=False):
758 758 if not user_id:
759 759 return
760 760
761 761 user = cls.query()
762 762 if cache:
763 763 user = user.options(
764 764 FromCache("sql_cache_short", "get_users_%s" % user_id))
765 765 return user.get(user_id)
766 766
767 767 @classmethod
768 768 def extra_valid_auth_tokens(cls, user, role=None):
769 769 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
770 770 .filter(or_(UserApiKeys.expires == -1,
771 771 UserApiKeys.expires >= time.time()))
772 772 if role:
773 773 tokens = tokens.filter(or_(UserApiKeys.role == role,
774 774 UserApiKeys.role == UserApiKeys.ROLE_ALL))
775 775 return tokens.all()
776 776
777 777 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
778 778 from rhodecode.lib import auth
779 779
780 780 log.debug('Trying to authenticate user: %s via auth-token, '
781 781 'and roles: %s', self, roles)
782 782
783 783 if not auth_token:
784 784 return False
785 785
786 786 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
787 787 tokens_q = UserApiKeys.query()\
788 788 .filter(UserApiKeys.user_id == self.user_id)\
789 789 .filter(or_(UserApiKeys.expires == -1,
790 790 UserApiKeys.expires >= time.time()))
791 791
792 792 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
793 793
794 794 crypto_backend = auth.crypto_backend()
795 795 enc_token_map = {}
796 796 plain_token_map = {}
797 797 for token in tokens_q:
798 798 if token.api_key.startswith(crypto_backend.ENC_PREF):
799 799 enc_token_map[token.api_key] = token
800 800 else:
801 801 plain_token_map[token.api_key] = token
802 802 log.debug(
803 803 'Found %s plain and %s encrypted tokens to check for authentication for this user',
804 804 len(plain_token_map), len(enc_token_map))
805 805
806 806 # plain token match comes first
807 807 match = plain_token_map.get(auth_token)
808 808
809 809 # check encrypted tokens now
810 810 if not match:
811 811 for token_hash, token in enc_token_map.items():
812 812 # NOTE(marcink): this is expensive to calculate, but most secure
813 813 if crypto_backend.hash_check(auth_token, token_hash):
814 814 match = token
815 815 break
816 816
817 817 if match:
818 818 log.debug('Found matching token %s', match)
819 819 if match.repo_id:
820 820 log.debug('Found scope, checking for scope match of token %s', match)
821 821 if match.repo_id == scope_repo_id:
822 822 return True
823 823 else:
824 824 log.debug(
825 825 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
826 826 'and calling scope is:%s, skipping further checks',
827 827 match.repo, scope_repo_id)
828 828 return False
829 829 else:
830 830 return True
831 831
832 832 return False
833 833
834 834 @property
835 835 def ip_addresses(self):
836 836 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
837 837 return [x.ip_addr for x in ret]
838 838
839 839 @property
840 840 def username_and_name(self):
841 841 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
842 842
843 843 @property
844 844 def username_or_name_or_email(self):
845 845 full_name = self.full_name if self.full_name is not ' ' else None
846 846 return self.username or full_name or self.email
847 847
848 848 @property
849 849 def full_name(self):
850 850 return '%s %s' % (self.first_name, self.last_name)
851 851
852 852 @property
853 853 def full_name_or_username(self):
854 854 return ('%s %s' % (self.first_name, self.last_name)
855 855 if (self.first_name and self.last_name) else self.username)
856 856
857 857 @property
858 858 def full_contact(self):
859 859 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
860 860
861 861 @property
862 862 def short_contact(self):
863 863 return '%s %s' % (self.first_name, self.last_name)
864 864
865 865 @property
866 866 def is_admin(self):
867 867 return self.admin
868 868
869 869 @property
870 870 def language(self):
871 871 return self.user_data.get('language')
872 872
873 873 def AuthUser(self, **kwargs):
874 874 """
875 875 Returns instance of AuthUser for this user
876 876 """
877 877 from rhodecode.lib.auth import AuthUser
878 878 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
879 879
880 880 @hybrid_property
881 881 def user_data(self):
882 882 if not self._user_data:
883 883 return {}
884 884
885 885 try:
886 886 return json.loads(self._user_data)
887 887 except TypeError:
888 888 return {}
889 889
890 890 @user_data.setter
891 891 def user_data(self, val):
892 892 if not isinstance(val, dict):
893 893 raise Exception('user_data must be dict, got %s' % type(val))
894 894 try:
895 895 self._user_data = json.dumps(val)
896 896 except Exception:
897 897 log.error(traceback.format_exc())
898 898
899 899 @classmethod
900 900 def get_by_username(cls, username, case_insensitive=False,
901 901 cache=False, identity_cache=False):
902 902 session = Session()
903 903
904 904 if case_insensitive:
905 905 q = cls.query().filter(
906 906 func.lower(cls.username) == func.lower(username))
907 907 else:
908 908 q = cls.query().filter(cls.username == username)
909 909
910 910 if cache:
911 911 if identity_cache:
912 912 val = cls.identity_cache(session, 'username', username)
913 913 if val:
914 914 return val
915 915 else:
916 916 cache_key = "get_user_by_name_%s" % _hash_key(username)
917 917 q = q.options(
918 918 FromCache("sql_cache_short", cache_key))
919 919
920 920 return q.scalar()
921 921
922 922 @classmethod
923 923 def get_by_auth_token(cls, auth_token, cache=False):
924 924 q = UserApiKeys.query()\
925 925 .filter(UserApiKeys.api_key == auth_token)\
926 926 .filter(or_(UserApiKeys.expires == -1,
927 927 UserApiKeys.expires >= time.time()))
928 928 if cache:
929 929 q = q.options(
930 930 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
931 931
932 932 match = q.first()
933 933 if match:
934 934 return match.user
935 935
936 936 @classmethod
937 937 def get_by_email(cls, email, case_insensitive=False, cache=False):
938 938
939 939 if case_insensitive:
940 940 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
941 941
942 942 else:
943 943 q = cls.query().filter(cls.email == email)
944 944
945 945 email_key = _hash_key(email)
946 946 if cache:
947 947 q = q.options(
948 948 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
949 949
950 950 ret = q.scalar()
951 951 if ret is None:
952 952 q = UserEmailMap.query()
953 953 # try fetching in alternate email map
954 954 if case_insensitive:
955 955 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
956 956 else:
957 957 q = q.filter(UserEmailMap.email == email)
958 958 q = q.options(joinedload(UserEmailMap.user))
959 959 if cache:
960 960 q = q.options(
961 961 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
962 962 ret = getattr(q.scalar(), 'user', None)
963 963
964 964 return ret
965 965
966 966 @classmethod
967 967 def get_from_cs_author(cls, author):
968 968 """
969 969 Tries to get User objects out of commit author string
970 970
971 971 :param author:
972 972 """
973 973 from rhodecode.lib.helpers import email, author_name
974 974 # Valid email in the attribute passed, see if they're in the system
975 975 _email = email(author)
976 976 if _email:
977 977 user = cls.get_by_email(_email, case_insensitive=True)
978 978 if user:
979 979 return user
980 980 # Maybe we can match by username?
981 981 _author = author_name(author)
982 982 user = cls.get_by_username(_author, case_insensitive=True)
983 983 if user:
984 984 return user
985 985
986 986 def update_userdata(self, **kwargs):
987 987 usr = self
988 988 old = usr.user_data
989 989 old.update(**kwargs)
990 990 usr.user_data = old
991 991 Session().add(usr)
992 992 log.debug('updated userdata with %s', kwargs)
993 993
994 994 def update_lastlogin(self):
995 995 """Update user lastlogin"""
996 996 self.last_login = datetime.datetime.now()
997 997 Session().add(self)
998 998 log.debug('updated user %s lastlogin', self.username)
999 999
1000 1000 def update_password(self, new_password):
1001 1001 from rhodecode.lib.auth import get_crypt_password
1002 1002
1003 1003 self.password = get_crypt_password(new_password)
1004 1004 Session().add(self)
1005 1005
1006 1006 @classmethod
1007 1007 def get_first_super_admin(cls):
1008 1008 user = User.query()\
1009 1009 .filter(User.admin == true()) \
1010 1010 .order_by(User.user_id.asc()) \
1011 1011 .first()
1012 1012
1013 1013 if user is None:
1014 1014 raise Exception('FATAL: Missing administrative account!')
1015 1015 return user
1016 1016
1017 1017 @classmethod
1018 1018 def get_all_super_admins(cls, only_active=False):
1019 1019 """
1020 1020 Returns all admin accounts sorted by username
1021 1021 """
1022 1022 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
1023 1023 if only_active:
1024 1024 qry = qry.filter(User.active == true())
1025 1025 return qry.all()
1026 1026
1027 1027 @classmethod
1028 1028 def get_default_user(cls, cache=False, refresh=False):
1029 1029 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
1030 1030 if user is None:
1031 1031 raise Exception('FATAL: Missing default account!')
1032 1032 if refresh:
1033 1033 # The default user might be based on outdated state which
1034 1034 # has been loaded from the cache.
1035 1035 # A call to refresh() ensures that the
1036 1036 # latest state from the database is used.
1037 1037 Session().refresh(user)
1038 1038 return user
1039 1039
1040 1040 def _get_default_perms(self, user, suffix=''):
1041 1041 from rhodecode.model.permission import PermissionModel
1042 1042 return PermissionModel().get_default_perms(user.user_perms, suffix)
1043 1043
1044 1044 def get_default_perms(self, suffix=''):
1045 1045 return self._get_default_perms(self, suffix)
1046 1046
1047 1047 def get_api_data(self, include_secrets=False, details='full'):
1048 1048 """
1049 1049 Common function for generating user related data for API
1050 1050
1051 1051 :param include_secrets: By default secrets in the API data will be replaced
1052 1052 by a placeholder value to prevent exposing this data by accident. In case
1053 1053 this data shall be exposed, set this flag to ``True``.
1054 1054
1055 1055 :param details: details can be 'basic|full' basic gives only a subset of
1056 1056 the available user information that includes user_id, name and emails.
1057 1057 """
1058 1058 user = self
1059 1059 user_data = self.user_data
1060 1060 data = {
1061 1061 'user_id': user.user_id,
1062 1062 'username': user.username,
1063 1063 'firstname': user.name,
1064 1064 'lastname': user.lastname,
1065 1065 'description': user.description,
1066 1066 'email': user.email,
1067 1067 'emails': user.emails,
1068 1068 }
1069 1069 if details == 'basic':
1070 1070 return data
1071 1071
1072 1072 auth_token_length = 40
1073 1073 auth_token_replacement = '*' * auth_token_length
1074 1074
1075 1075 extras = {
1076 1076 'auth_tokens': [auth_token_replacement],
1077 1077 'active': user.active,
1078 1078 'admin': user.admin,
1079 1079 'extern_type': user.extern_type,
1080 1080 'extern_name': user.extern_name,
1081 1081 'last_login': user.last_login,
1082 1082 'last_activity': user.last_activity,
1083 1083 'ip_addresses': user.ip_addresses,
1084 1084 'language': user_data.get('language')
1085 1085 }
1086 1086 data.update(extras)
1087 1087
1088 1088 if include_secrets:
1089 1089 data['auth_tokens'] = user.auth_tokens
1090 1090 return data
1091 1091
1092 1092 def __json__(self):
1093 1093 data = {
1094 1094 'full_name': self.full_name,
1095 1095 'full_name_or_username': self.full_name_or_username,
1096 1096 'short_contact': self.short_contact,
1097 1097 'full_contact': self.full_contact,
1098 1098 }
1099 1099 data.update(self.get_api_data())
1100 1100 return data
1101 1101
1102 1102
1103 1103 class UserApiKeys(Base, BaseModel):
1104 1104 __tablename__ = 'user_api_keys'
1105 1105 __table_args__ = (
1106 1106 Index('uak_api_key_idx', 'api_key'),
1107 1107 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1108 1108 base_table_args
1109 1109 )
1110 1110 __mapper_args__ = {}
1111 1111
1112 1112 # ApiKey role
1113 1113 ROLE_ALL = 'token_role_all'
1114 1114 ROLE_HTTP = 'token_role_http'
1115 1115 ROLE_VCS = 'token_role_vcs'
1116 1116 ROLE_API = 'token_role_api'
1117 1117 ROLE_FEED = 'token_role_feed'
1118 1118 ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download'
1119 1119 ROLE_PASSWORD_RESET = 'token_password_reset'
1120 1120
1121 1121 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD]
1122 1122
1123 1123 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1124 1124 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1125 1125 api_key = Column("api_key", String(255), nullable=False, unique=True)
1126 1126 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1127 1127 expires = Column('expires', Float(53), nullable=False)
1128 1128 role = Column('role', String(255), nullable=True)
1129 1129 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1130 1130
1131 1131 # scope columns
1132 1132 repo_id = Column(
1133 1133 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1134 1134 nullable=True, unique=None, default=None)
1135 1135 repo = relationship('Repository', lazy='joined')
1136 1136
1137 1137 repo_group_id = Column(
1138 1138 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1139 1139 nullable=True, unique=None, default=None)
1140 1140 repo_group = relationship('RepoGroup', lazy='joined')
1141 1141
1142 1142 user = relationship('User', lazy='joined')
1143 1143
1144 1144 def __unicode__(self):
1145 1145 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1146 1146
1147 1147 def __json__(self):
1148 1148 data = {
1149 1149 'auth_token': self.api_key,
1150 1150 'role': self.role,
1151 1151 'scope': self.scope_humanized,
1152 1152 'expired': self.expired
1153 1153 }
1154 1154 return data
1155 1155
1156 1156 def get_api_data(self, include_secrets=False):
1157 1157 data = self.__json__()
1158 1158 if include_secrets:
1159 1159 return data
1160 1160 else:
1161 1161 data['auth_token'] = self.token_obfuscated
1162 1162 return data
1163 1163
1164 1164 @hybrid_property
1165 1165 def description_safe(self):
1166 1166 from rhodecode.lib import helpers as h
1167 1167 return h.escape(self.description)
1168 1168
1169 1169 @property
1170 1170 def expired(self):
1171 1171 if self.expires == -1:
1172 1172 return False
1173 1173 return time.time() > self.expires
1174 1174
1175 1175 @classmethod
1176 1176 def _get_role_name(cls, role):
1177 1177 return {
1178 1178 cls.ROLE_ALL: _('all'),
1179 1179 cls.ROLE_HTTP: _('http/web interface'),
1180 1180 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1181 1181 cls.ROLE_API: _('api calls'),
1182 1182 cls.ROLE_FEED: _('feed access'),
1183 1183 cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'),
1184 1184 }.get(role, role)
1185 1185
1186 1186 @property
1187 1187 def role_humanized(self):
1188 1188 return self._get_role_name(self.role)
1189 1189
1190 1190 def _get_scope(self):
1191 1191 if self.repo:
1192 1192 return 'Repository: {}'.format(self.repo.repo_name)
1193 1193 if self.repo_group:
1194 1194 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1195 1195 return 'Global'
1196 1196
1197 1197 @property
1198 1198 def scope_humanized(self):
1199 1199 return self._get_scope()
1200 1200
1201 1201 @property
1202 1202 def token_obfuscated(self):
1203 1203 if self.api_key:
1204 1204 return self.api_key[:4] + "****"
1205 1205
1206 1206
1207 1207 class UserEmailMap(Base, BaseModel):
1208 1208 __tablename__ = 'user_email_map'
1209 1209 __table_args__ = (
1210 1210 Index('uem_email_idx', 'email'),
1211 1211 UniqueConstraint('email'),
1212 1212 base_table_args
1213 1213 )
1214 1214 __mapper_args__ = {}
1215 1215
1216 1216 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1217 1217 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1218 1218 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1219 1219 user = relationship('User', lazy='joined')
1220 1220
1221 1221 @validates('_email')
1222 1222 def validate_email(self, key, email):
1223 1223 # check if this email is not main one
1224 1224 main_email = Session().query(User).filter(User.email == email).scalar()
1225 1225 if main_email is not None:
1226 1226 raise AttributeError('email %s is present is user table' % email)
1227 1227 return email
1228 1228
1229 1229 @hybrid_property
1230 1230 def email(self):
1231 1231 return self._email
1232 1232
1233 1233 @email.setter
1234 1234 def email(self, val):
1235 1235 self._email = val.lower() if val else None
1236 1236
1237 1237
1238 1238 class UserIpMap(Base, BaseModel):
1239 1239 __tablename__ = 'user_ip_map'
1240 1240 __table_args__ = (
1241 1241 UniqueConstraint('user_id', 'ip_addr'),
1242 1242 base_table_args
1243 1243 )
1244 1244 __mapper_args__ = {}
1245 1245
1246 1246 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1247 1247 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1248 1248 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1249 1249 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1250 1250 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1251 1251 user = relationship('User', lazy='joined')
1252 1252
1253 1253 @hybrid_property
1254 1254 def description_safe(self):
1255 1255 from rhodecode.lib import helpers as h
1256 1256 return h.escape(self.description)
1257 1257
1258 1258 @classmethod
1259 1259 def _get_ip_range(cls, ip_addr):
1260 1260 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1261 1261 return [str(net.network_address), str(net.broadcast_address)]
1262 1262
1263 1263 def __json__(self):
1264 1264 return {
1265 1265 'ip_addr': self.ip_addr,
1266 1266 'ip_range': self._get_ip_range(self.ip_addr),
1267 1267 }
1268 1268
1269 1269 def __unicode__(self):
1270 1270 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1271 1271 self.user_id, self.ip_addr)
1272 1272
1273 1273
1274 1274 class UserSshKeys(Base, BaseModel):
1275 1275 __tablename__ = 'user_ssh_keys'
1276 1276 __table_args__ = (
1277 1277 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1278 1278
1279 1279 UniqueConstraint('ssh_key_fingerprint'),
1280 1280
1281 1281 base_table_args
1282 1282 )
1283 1283 __mapper_args__ = {}
1284 1284
1285 1285 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1286 1286 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1287 1287 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1288 1288
1289 1289 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1290 1290
1291 1291 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1292 1292 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1293 1293 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1294 1294
1295 1295 user = relationship('User', lazy='joined')
1296 1296
1297 1297 def __json__(self):
1298 1298 data = {
1299 1299 'ssh_fingerprint': self.ssh_key_fingerprint,
1300 1300 'description': self.description,
1301 1301 'created_on': self.created_on
1302 1302 }
1303 1303 return data
1304 1304
1305 1305 def get_api_data(self):
1306 1306 data = self.__json__()
1307 1307 return data
1308 1308
1309 1309
1310 1310 class UserLog(Base, BaseModel):
1311 1311 __tablename__ = 'user_logs'
1312 1312 __table_args__ = (
1313 1313 base_table_args,
1314 1314 )
1315 1315
1316 1316 VERSION_1 = 'v1'
1317 1317 VERSION_2 = 'v2'
1318 1318 VERSIONS = [VERSION_1, VERSION_2]
1319 1319
1320 1320 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1321 1321 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1322 1322 username = Column("username", String(255), nullable=True, unique=None, default=None)
1323 1323 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1324 1324 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1325 1325 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1326 1326 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1327 1327 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1328 1328
1329 1329 version = Column("version", String(255), nullable=True, default=VERSION_1)
1330 1330 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1331 1331 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1332 1332
1333 1333 def __unicode__(self):
1334 1334 return u"<%s('id:%s:%s')>" % (
1335 1335 self.__class__.__name__, self.repository_name, self.action)
1336 1336
1337 1337 def __json__(self):
1338 1338 return {
1339 1339 'user_id': self.user_id,
1340 1340 'username': self.username,
1341 1341 'repository_id': self.repository_id,
1342 1342 'repository_name': self.repository_name,
1343 1343 'user_ip': self.user_ip,
1344 1344 'action_date': self.action_date,
1345 1345 'action': self.action,
1346 1346 }
1347 1347
1348 1348 @hybrid_property
1349 1349 def entry_id(self):
1350 1350 return self.user_log_id
1351 1351
1352 1352 @property
1353 1353 def action_as_day(self):
1354 1354 return datetime.date(*self.action_date.timetuple()[:3])
1355 1355
1356 1356 user = relationship('User')
1357 1357 repository = relationship('Repository', cascade='')
1358 1358
1359 1359
1360 1360 class UserGroup(Base, BaseModel):
1361 1361 __tablename__ = 'users_groups'
1362 1362 __table_args__ = (
1363 1363 base_table_args,
1364 1364 )
1365 1365
1366 1366 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1367 1367 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1368 1368 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1369 1369 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1370 1370 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1371 1371 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1372 1372 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1373 1373 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1374 1374
1375 1375 members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined")
1376 1376 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1377 1377 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1378 1378 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1379 1379 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1380 1380 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1381 1381
1382 1382 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1383 1383 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1384 1384
1385 1385 @classmethod
1386 1386 def _load_group_data(cls, column):
1387 1387 if not column:
1388 1388 return {}
1389 1389
1390 1390 try:
1391 1391 return json.loads(column) or {}
1392 1392 except TypeError:
1393 1393 return {}
1394 1394
1395 1395 @hybrid_property
1396 1396 def description_safe(self):
1397 1397 from rhodecode.lib import helpers as h
1398 1398 return h.escape(self.user_group_description)
1399 1399
1400 1400 @hybrid_property
1401 1401 def group_data(self):
1402 1402 return self._load_group_data(self._group_data)
1403 1403
1404 1404 @group_data.expression
1405 1405 def group_data(self, **kwargs):
1406 1406 return self._group_data
1407 1407
1408 1408 @group_data.setter
1409 1409 def group_data(self, val):
1410 1410 try:
1411 1411 self._group_data = json.dumps(val)
1412 1412 except Exception:
1413 1413 log.error(traceback.format_exc())
1414 1414
1415 1415 @classmethod
1416 1416 def _load_sync(cls, group_data):
1417 1417 if group_data:
1418 1418 return group_data.get('extern_type')
1419 1419
1420 1420 @property
1421 1421 def sync(self):
1422 1422 return self._load_sync(self.group_data)
1423 1423
1424 1424 def __unicode__(self):
1425 1425 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1426 1426 self.users_group_id,
1427 1427 self.users_group_name)
1428 1428
1429 1429 @classmethod
1430 1430 def get_by_group_name(cls, group_name, cache=False,
1431 1431 case_insensitive=False):
1432 1432 if case_insensitive:
1433 1433 q = cls.query().filter(func.lower(cls.users_group_name) ==
1434 1434 func.lower(group_name))
1435 1435
1436 1436 else:
1437 1437 q = cls.query().filter(cls.users_group_name == group_name)
1438 1438 if cache:
1439 1439 q = q.options(
1440 1440 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1441 1441 return q.scalar()
1442 1442
1443 1443 @classmethod
1444 1444 def get(cls, user_group_id, cache=False):
1445 1445 if not user_group_id:
1446 1446 return
1447 1447
1448 1448 user_group = cls.query()
1449 1449 if cache:
1450 1450 user_group = user_group.options(
1451 1451 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1452 1452 return user_group.get(user_group_id)
1453 1453
1454 1454 def permissions(self, with_admins=True, with_owner=True,
1455 1455 expand_from_user_groups=False):
1456 1456 """
1457 1457 Permissions for user groups
1458 1458 """
1459 1459 _admin_perm = 'usergroup.admin'
1460 1460
1461 1461 owner_row = []
1462 1462 if with_owner:
1463 1463 usr = AttributeDict(self.user.get_dict())
1464 1464 usr.owner_row = True
1465 1465 usr.permission = _admin_perm
1466 1466 owner_row.append(usr)
1467 1467
1468 1468 super_admin_ids = []
1469 1469 super_admin_rows = []
1470 1470 if with_admins:
1471 1471 for usr in User.get_all_super_admins():
1472 1472 super_admin_ids.append(usr.user_id)
1473 1473 # if this admin is also owner, don't double the record
1474 1474 if usr.user_id == owner_row[0].user_id:
1475 1475 owner_row[0].admin_row = True
1476 1476 else:
1477 1477 usr = AttributeDict(usr.get_dict())
1478 1478 usr.admin_row = True
1479 1479 usr.permission = _admin_perm
1480 1480 super_admin_rows.append(usr)
1481 1481
1482 1482 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1483 1483 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1484 1484 joinedload(UserUserGroupToPerm.user),
1485 1485 joinedload(UserUserGroupToPerm.permission),)
1486 1486
1487 1487 # get owners and admins and permissions. We do a trick of re-writing
1488 1488 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1489 1489 # has a global reference and changing one object propagates to all
1490 1490 # others. This means if admin is also an owner admin_row that change
1491 1491 # would propagate to both objects
1492 1492 perm_rows = []
1493 1493 for _usr in q.all():
1494 1494 usr = AttributeDict(_usr.user.get_dict())
1495 1495 # if this user is also owner/admin, mark as duplicate record
1496 1496 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1497 1497 usr.duplicate_perm = True
1498 1498 usr.permission = _usr.permission.permission_name
1499 1499 perm_rows.append(usr)
1500 1500
1501 1501 # filter the perm rows by 'default' first and then sort them by
1502 1502 # admin,write,read,none permissions sorted again alphabetically in
1503 1503 # each group
1504 1504 perm_rows = sorted(perm_rows, key=display_user_sort)
1505 1505
1506 1506 user_groups_rows = []
1507 1507 if expand_from_user_groups:
1508 1508 for ug in self.permission_user_groups(with_members=True):
1509 1509 for user_data in ug.members:
1510 1510 user_groups_rows.append(user_data)
1511 1511
1512 1512 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1513 1513
1514 1514 def permission_user_groups(self, with_members=False):
1515 1515 q = UserGroupUserGroupToPerm.query()\
1516 1516 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1517 1517 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1518 1518 joinedload(UserGroupUserGroupToPerm.target_user_group),
1519 1519 joinedload(UserGroupUserGroupToPerm.permission),)
1520 1520
1521 1521 perm_rows = []
1522 1522 for _user_group in q.all():
1523 1523 entry = AttributeDict(_user_group.user_group.get_dict())
1524 1524 entry.permission = _user_group.permission.permission_name
1525 1525 if with_members:
1526 1526 entry.members = [x.user.get_dict()
1527 1527 for x in _user_group.user_group.members]
1528 1528 perm_rows.append(entry)
1529 1529
1530 1530 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1531 1531 return perm_rows
1532 1532
1533 1533 def _get_default_perms(self, user_group, suffix=''):
1534 1534 from rhodecode.model.permission import PermissionModel
1535 1535 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1536 1536
1537 1537 def get_default_perms(self, suffix=''):
1538 1538 return self._get_default_perms(self, suffix)
1539 1539
1540 1540 def get_api_data(self, with_group_members=True, include_secrets=False):
1541 1541 """
1542 1542 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1543 1543 basically forwarded.
1544 1544
1545 1545 """
1546 1546 user_group = self
1547 1547 data = {
1548 1548 'users_group_id': user_group.users_group_id,
1549 1549 'group_name': user_group.users_group_name,
1550 1550 'group_description': user_group.user_group_description,
1551 1551 'active': user_group.users_group_active,
1552 1552 'owner': user_group.user.username,
1553 1553 'sync': user_group.sync,
1554 1554 'owner_email': user_group.user.email,
1555 1555 }
1556 1556
1557 1557 if with_group_members:
1558 1558 users = []
1559 1559 for user in user_group.members:
1560 1560 user = user.user
1561 1561 users.append(user.get_api_data(include_secrets=include_secrets))
1562 1562 data['users'] = users
1563 1563
1564 1564 return data
1565 1565
1566 1566
1567 1567 class UserGroupMember(Base, BaseModel):
1568 1568 __tablename__ = 'users_groups_members'
1569 1569 __table_args__ = (
1570 1570 base_table_args,
1571 1571 )
1572 1572
1573 1573 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1574 1574 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1575 1575 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1576 1576
1577 1577 user = relationship('User', lazy='joined')
1578 1578 users_group = relationship('UserGroup')
1579 1579
1580 1580 def __init__(self, gr_id='', u_id=''):
1581 1581 self.users_group_id = gr_id
1582 1582 self.user_id = u_id
1583 1583
1584 1584
1585 1585 class RepositoryField(Base, BaseModel):
1586 1586 __tablename__ = 'repositories_fields'
1587 1587 __table_args__ = (
1588 1588 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1589 1589 base_table_args,
1590 1590 )
1591 1591
1592 1592 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1593 1593
1594 1594 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1595 1595 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1596 1596 field_key = Column("field_key", String(250))
1597 1597 field_label = Column("field_label", String(1024), nullable=False)
1598 1598 field_value = Column("field_value", String(10000), nullable=False)
1599 1599 field_desc = Column("field_desc", String(1024), nullable=False)
1600 1600 field_type = Column("field_type", String(255), nullable=False, unique=None)
1601 1601 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1602 1602
1603 1603 repository = relationship('Repository')
1604 1604
1605 1605 @property
1606 1606 def field_key_prefixed(self):
1607 1607 return 'ex_%s' % self.field_key
1608 1608
1609 1609 @classmethod
1610 1610 def un_prefix_key(cls, key):
1611 1611 if key.startswith(cls.PREFIX):
1612 1612 return key[len(cls.PREFIX):]
1613 1613 return key
1614 1614
1615 1615 @classmethod
1616 1616 def get_by_key_name(cls, key, repo):
1617 1617 row = cls.query()\
1618 1618 .filter(cls.repository == repo)\
1619 1619 .filter(cls.field_key == key).scalar()
1620 1620 return row
1621 1621
1622 1622
1623 1623 class Repository(Base, BaseModel):
1624 1624 __tablename__ = 'repositories'
1625 1625 __table_args__ = (
1626 1626 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1627 1627 base_table_args,
1628 1628 )
1629 1629 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1630 1630 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1631 1631 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1632 1632
1633 1633 STATE_CREATED = 'repo_state_created'
1634 1634 STATE_PENDING = 'repo_state_pending'
1635 1635 STATE_ERROR = 'repo_state_error'
1636 1636
1637 1637 LOCK_AUTOMATIC = 'lock_auto'
1638 1638 LOCK_API = 'lock_api'
1639 1639 LOCK_WEB = 'lock_web'
1640 1640 LOCK_PULL = 'lock_pull'
1641 1641
1642 1642 NAME_SEP = URL_SEP
1643 1643
1644 1644 repo_id = Column(
1645 1645 "repo_id", Integer(), nullable=False, unique=True, default=None,
1646 1646 primary_key=True)
1647 1647 _repo_name = Column(
1648 1648 "repo_name", Text(), nullable=False, default=None)
1649 1649 _repo_name_hash = Column(
1650 1650 "repo_name_hash", String(255), nullable=False, unique=True)
1651 1651 repo_state = Column("repo_state", String(255), nullable=True)
1652 1652
1653 1653 clone_uri = Column(
1654 1654 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1655 1655 default=None)
1656 1656 push_uri = Column(
1657 1657 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1658 1658 default=None)
1659 1659 repo_type = Column(
1660 1660 "repo_type", String(255), nullable=False, unique=False, default=None)
1661 1661 user_id = Column(
1662 1662 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1663 1663 unique=False, default=None)
1664 1664 private = Column(
1665 1665 "private", Boolean(), nullable=True, unique=None, default=None)
1666 1666 archived = Column(
1667 1667 "archived", Boolean(), nullable=True, unique=None, default=None)
1668 1668 enable_statistics = Column(
1669 1669 "statistics", Boolean(), nullable=True, unique=None, default=True)
1670 1670 enable_downloads = Column(
1671 1671 "downloads", Boolean(), nullable=True, unique=None, default=True)
1672 1672 description = Column(
1673 1673 "description", String(10000), nullable=True, unique=None, default=None)
1674 1674 created_on = Column(
1675 1675 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1676 1676 default=datetime.datetime.now)
1677 1677 updated_on = Column(
1678 1678 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1679 1679 default=datetime.datetime.now)
1680 1680 _landing_revision = Column(
1681 1681 "landing_revision", String(255), nullable=False, unique=False,
1682 1682 default=None)
1683 1683 enable_locking = Column(
1684 1684 "enable_locking", Boolean(), nullable=False, unique=None,
1685 1685 default=False)
1686 1686 _locked = Column(
1687 1687 "locked", String(255), nullable=True, unique=False, default=None)
1688 1688 _changeset_cache = Column(
1689 1689 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1690 1690
1691 1691 fork_id = Column(
1692 1692 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1693 1693 nullable=True, unique=False, default=None)
1694 1694 group_id = Column(
1695 1695 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1696 1696 unique=False, default=None)
1697 1697
1698 1698 user = relationship('User', lazy='joined')
1699 1699 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1700 1700 group = relationship('RepoGroup', lazy='joined')
1701 1701 repo_to_perm = relationship(
1702 1702 'UserRepoToPerm', cascade='all',
1703 1703 order_by='UserRepoToPerm.repo_to_perm_id')
1704 1704 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1705 1705 stats = relationship('Statistics', cascade='all', uselist=False)
1706 1706
1707 1707 followers = relationship(
1708 1708 'UserFollowing',
1709 1709 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1710 1710 cascade='all')
1711 1711 extra_fields = relationship(
1712 1712 'RepositoryField', cascade="all, delete-orphan")
1713 1713 logs = relationship('UserLog')
1714 1714 comments = relationship(
1715 1715 'ChangesetComment', cascade="all, delete-orphan")
1716 1716 pull_requests_source = relationship(
1717 1717 'PullRequest',
1718 1718 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1719 1719 cascade="all, delete-orphan")
1720 1720 pull_requests_target = relationship(
1721 1721 'PullRequest',
1722 1722 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1723 1723 cascade="all, delete-orphan")
1724 1724 ui = relationship('RepoRhodeCodeUi', cascade="all")
1725 1725 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1726 1726 integrations = relationship('Integration', cascade="all, delete-orphan")
1727 1727
1728 1728 scoped_tokens = relationship('UserApiKeys', cascade="all")
1729 1729
1730 1730 # no cascade, set NULL
1731 1731 artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id')
1732 1732
1733 1733 def __unicode__(self):
1734 1734 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1735 1735 safe_unicode(self.repo_name))
1736 1736
1737 1737 @hybrid_property
1738 1738 def description_safe(self):
1739 1739 from rhodecode.lib import helpers as h
1740 1740 return h.escape(self.description)
1741 1741
1742 1742 @hybrid_property
1743 1743 def landing_rev(self):
1744 1744 # always should return [rev_type, rev]
1745 1745 if self._landing_revision:
1746 1746 _rev_info = self._landing_revision.split(':')
1747 1747 if len(_rev_info) < 2:
1748 1748 _rev_info.insert(0, 'rev')
1749 1749 return [_rev_info[0], _rev_info[1]]
1750 1750 return [None, None]
1751 1751
1752 1752 @landing_rev.setter
1753 1753 def landing_rev(self, val):
1754 1754 if ':' not in val:
1755 1755 raise ValueError('value must be delimited with `:` and consist '
1756 1756 'of <rev_type>:<rev>, got %s instead' % val)
1757 1757 self._landing_revision = val
1758 1758
1759 1759 @hybrid_property
1760 1760 def locked(self):
1761 1761 if self._locked:
1762 1762 user_id, timelocked, reason = self._locked.split(':')
1763 1763 lock_values = int(user_id), timelocked, reason
1764 1764 else:
1765 1765 lock_values = [None, None, None]
1766 1766 return lock_values
1767 1767
1768 1768 @locked.setter
1769 1769 def locked(self, val):
1770 1770 if val and isinstance(val, (list, tuple)):
1771 1771 self._locked = ':'.join(map(str, val))
1772 1772 else:
1773 1773 self._locked = None
1774 1774
1775 1775 @hybrid_property
1776 1776 def changeset_cache(self):
1777 1777 from rhodecode.lib.vcs.backends.base import EmptyCommit
1778 1778 dummy = EmptyCommit().__json__()
1779 1779 if not self._changeset_cache:
1780 1780 dummy['source_repo_id'] = self.repo_id
1781 1781 return json.loads(json.dumps(dummy))
1782 1782
1783 1783 try:
1784 1784 return json.loads(self._changeset_cache)
1785 1785 except TypeError:
1786 1786 return dummy
1787 1787 except Exception:
1788 1788 log.error(traceback.format_exc())
1789 1789 return dummy
1790 1790
1791 1791 @changeset_cache.setter
1792 1792 def changeset_cache(self, val):
1793 1793 try:
1794 1794 self._changeset_cache = json.dumps(val)
1795 1795 except Exception:
1796 1796 log.error(traceback.format_exc())
1797 1797
1798 1798 @hybrid_property
1799 1799 def repo_name(self):
1800 1800 return self._repo_name
1801 1801
1802 1802 @repo_name.setter
1803 1803 def repo_name(self, value):
1804 1804 self._repo_name = value
1805 1805 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1806 1806
1807 1807 @classmethod
1808 1808 def normalize_repo_name(cls, repo_name):
1809 1809 """
1810 1810 Normalizes os specific repo_name to the format internally stored inside
1811 1811 database using URL_SEP
1812 1812
1813 1813 :param cls:
1814 1814 :param repo_name:
1815 1815 """
1816 1816 return cls.NAME_SEP.join(repo_name.split(os.sep))
1817 1817
1818 1818 @classmethod
1819 1819 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1820 1820 session = Session()
1821 1821 q = session.query(cls).filter(cls.repo_name == repo_name)
1822 1822
1823 1823 if cache:
1824 1824 if identity_cache:
1825 1825 val = cls.identity_cache(session, 'repo_name', repo_name)
1826 1826 if val:
1827 1827 return val
1828 1828 else:
1829 1829 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1830 1830 q = q.options(
1831 1831 FromCache("sql_cache_short", cache_key))
1832 1832
1833 1833 return q.scalar()
1834 1834
1835 1835 @classmethod
1836 1836 def get_by_id_or_repo_name(cls, repoid):
1837 1837 if isinstance(repoid, (int, long)):
1838 1838 try:
1839 1839 repo = cls.get(repoid)
1840 1840 except ValueError:
1841 1841 repo = None
1842 1842 else:
1843 1843 repo = cls.get_by_repo_name(repoid)
1844 1844 return repo
1845 1845
1846 1846 @classmethod
1847 1847 def get_by_full_path(cls, repo_full_path):
1848 1848 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1849 1849 repo_name = cls.normalize_repo_name(repo_name)
1850 1850 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1851 1851
1852 1852 @classmethod
1853 1853 def get_repo_forks(cls, repo_id):
1854 1854 return cls.query().filter(Repository.fork_id == repo_id)
1855 1855
1856 1856 @classmethod
1857 1857 def base_path(cls):
1858 1858 """
1859 1859 Returns base path when all repos are stored
1860 1860
1861 1861 :param cls:
1862 1862 """
1863 1863 q = Session().query(RhodeCodeUi)\
1864 1864 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1865 1865 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1866 1866 return q.one().ui_value
1867 1867
1868 1868 @classmethod
1869 1869 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1870 1870 case_insensitive=True, archived=False):
1871 1871 q = Repository.query()
1872 1872
1873 1873 if not archived:
1874 1874 q = q.filter(Repository.archived.isnot(true()))
1875 1875
1876 1876 if not isinstance(user_id, Optional):
1877 1877 q = q.filter(Repository.user_id == user_id)
1878 1878
1879 1879 if not isinstance(group_id, Optional):
1880 1880 q = q.filter(Repository.group_id == group_id)
1881 1881
1882 1882 if case_insensitive:
1883 1883 q = q.order_by(func.lower(Repository.repo_name))
1884 1884 else:
1885 1885 q = q.order_by(Repository.repo_name)
1886 1886
1887 1887 return q.all()
1888 1888
1889 1889 @property
1890 1890 def repo_uid(self):
1891 1891 return '_{}'.format(self.repo_id)
1892 1892
1893 1893 @property
1894 1894 def forks(self):
1895 1895 """
1896 1896 Return forks of this repo
1897 1897 """
1898 1898 return Repository.get_repo_forks(self.repo_id)
1899 1899
1900 1900 @property
1901 1901 def parent(self):
1902 1902 """
1903 1903 Returns fork parent
1904 1904 """
1905 1905 return self.fork
1906 1906
1907 1907 @property
1908 1908 def just_name(self):
1909 1909 return self.repo_name.split(self.NAME_SEP)[-1]
1910 1910
1911 1911 @property
1912 1912 def groups_with_parents(self):
1913 1913 groups = []
1914 1914 if self.group is None:
1915 1915 return groups
1916 1916
1917 1917 cur_gr = self.group
1918 1918 groups.insert(0, cur_gr)
1919 1919 while 1:
1920 1920 gr = getattr(cur_gr, 'parent_group', None)
1921 1921 cur_gr = cur_gr.parent_group
1922 1922 if gr is None:
1923 1923 break
1924 1924 groups.insert(0, gr)
1925 1925
1926 1926 return groups
1927 1927
1928 1928 @property
1929 1929 def groups_and_repo(self):
1930 1930 return self.groups_with_parents, self
1931 1931
1932 1932 @LazyProperty
1933 1933 def repo_path(self):
1934 1934 """
1935 1935 Returns base full path for that repository means where it actually
1936 1936 exists on a filesystem
1937 1937 """
1938 1938 q = Session().query(RhodeCodeUi).filter(
1939 1939 RhodeCodeUi.ui_key == self.NAME_SEP)
1940 1940 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1941 1941 return q.one().ui_value
1942 1942
1943 1943 @property
1944 1944 def repo_full_path(self):
1945 1945 p = [self.repo_path]
1946 1946 # we need to split the name by / since this is how we store the
1947 1947 # names in the database, but that eventually needs to be converted
1948 1948 # into a valid system path
1949 1949 p += self.repo_name.split(self.NAME_SEP)
1950 1950 return os.path.join(*map(safe_unicode, p))
1951 1951
1952 1952 @property
1953 1953 def cache_keys(self):
1954 1954 """
1955 1955 Returns associated cache keys for that repo
1956 1956 """
1957 1957 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
1958 1958 repo_id=self.repo_id)
1959 1959 return CacheKey.query()\
1960 1960 .filter(CacheKey.cache_args == invalidation_namespace)\
1961 1961 .order_by(CacheKey.cache_key)\
1962 1962 .all()
1963 1963
1964 1964 @property
1965 1965 def cached_diffs_relative_dir(self):
1966 1966 """
1967 1967 Return a relative to the repository store path of cached diffs
1968 1968 used for safe display for users, who shouldn't know the absolute store
1969 1969 path
1970 1970 """
1971 1971 return os.path.join(
1972 1972 os.path.dirname(self.repo_name),
1973 1973 self.cached_diffs_dir.split(os.path.sep)[-1])
1974 1974
1975 1975 @property
1976 1976 def cached_diffs_dir(self):
1977 1977 path = self.repo_full_path
1978 1978 return os.path.join(
1979 1979 os.path.dirname(path),
1980 1980 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
1981 1981
1982 1982 def cached_diffs(self):
1983 1983 diff_cache_dir = self.cached_diffs_dir
1984 1984 if os.path.isdir(diff_cache_dir):
1985 1985 return os.listdir(diff_cache_dir)
1986 1986 return []
1987 1987
1988 1988 def shadow_repos(self):
1989 1989 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
1990 1990 return [
1991 1991 x for x in os.listdir(os.path.dirname(self.repo_full_path))
1992 1992 if x.startswith(shadow_repos_pattern)]
1993 1993
1994 1994 def get_new_name(self, repo_name):
1995 1995 """
1996 1996 returns new full repository name based on assigned group and new new
1997 1997
1998 1998 :param group_name:
1999 1999 """
2000 2000 path_prefix = self.group.full_path_splitted if self.group else []
2001 2001 return self.NAME_SEP.join(path_prefix + [repo_name])
2002 2002
2003 2003 @property
2004 2004 def _config(self):
2005 2005 """
2006 2006 Returns db based config object.
2007 2007 """
2008 2008 from rhodecode.lib.utils import make_db_config
2009 2009 return make_db_config(clear_session=False, repo=self)
2010 2010
2011 2011 def permissions(self, with_admins=True, with_owner=True,
2012 2012 expand_from_user_groups=False):
2013 2013 """
2014 2014 Permissions for repositories
2015 2015 """
2016 2016 _admin_perm = 'repository.admin'
2017 2017
2018 2018 owner_row = []
2019 2019 if with_owner:
2020 2020 usr = AttributeDict(self.user.get_dict())
2021 2021 usr.owner_row = True
2022 2022 usr.permission = _admin_perm
2023 2023 usr.permission_id = None
2024 2024 owner_row.append(usr)
2025 2025
2026 2026 super_admin_ids = []
2027 2027 super_admin_rows = []
2028 2028 if with_admins:
2029 2029 for usr in User.get_all_super_admins():
2030 2030 super_admin_ids.append(usr.user_id)
2031 2031 # if this admin is also owner, don't double the record
2032 2032 if usr.user_id == owner_row[0].user_id:
2033 2033 owner_row[0].admin_row = True
2034 2034 else:
2035 2035 usr = AttributeDict(usr.get_dict())
2036 2036 usr.admin_row = True
2037 2037 usr.permission = _admin_perm
2038 2038 usr.permission_id = None
2039 2039 super_admin_rows.append(usr)
2040 2040
2041 2041 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
2042 2042 q = q.options(joinedload(UserRepoToPerm.repository),
2043 2043 joinedload(UserRepoToPerm.user),
2044 2044 joinedload(UserRepoToPerm.permission),)
2045 2045
2046 2046 # get owners and admins and permissions. We do a trick of re-writing
2047 2047 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2048 2048 # has a global reference and changing one object propagates to all
2049 2049 # others. This means if admin is also an owner admin_row that change
2050 2050 # would propagate to both objects
2051 2051 perm_rows = []
2052 2052 for _usr in q.all():
2053 2053 usr = AttributeDict(_usr.user.get_dict())
2054 2054 # if this user is also owner/admin, mark as duplicate record
2055 2055 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2056 2056 usr.duplicate_perm = True
2057 2057 # also check if this permission is maybe used by branch_permissions
2058 2058 if _usr.branch_perm_entry:
2059 2059 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
2060 2060
2061 2061 usr.permission = _usr.permission.permission_name
2062 2062 usr.permission_id = _usr.repo_to_perm_id
2063 2063 perm_rows.append(usr)
2064 2064
2065 2065 # filter the perm rows by 'default' first and then sort them by
2066 2066 # admin,write,read,none permissions sorted again alphabetically in
2067 2067 # each group
2068 2068 perm_rows = sorted(perm_rows, key=display_user_sort)
2069 2069
2070 2070 user_groups_rows = []
2071 2071 if expand_from_user_groups:
2072 2072 for ug in self.permission_user_groups(with_members=True):
2073 2073 for user_data in ug.members:
2074 2074 user_groups_rows.append(user_data)
2075 2075
2076 2076 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2077 2077
2078 2078 def permission_user_groups(self, with_members=True):
2079 2079 q = UserGroupRepoToPerm.query()\
2080 2080 .filter(UserGroupRepoToPerm.repository == self)
2081 2081 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2082 2082 joinedload(UserGroupRepoToPerm.users_group),
2083 2083 joinedload(UserGroupRepoToPerm.permission),)
2084 2084
2085 2085 perm_rows = []
2086 2086 for _user_group in q.all():
2087 2087 entry = AttributeDict(_user_group.users_group.get_dict())
2088 2088 entry.permission = _user_group.permission.permission_name
2089 2089 if with_members:
2090 2090 entry.members = [x.user.get_dict()
2091 2091 for x in _user_group.users_group.members]
2092 2092 perm_rows.append(entry)
2093 2093
2094 2094 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2095 2095 return perm_rows
2096 2096
2097 2097 def get_api_data(self, include_secrets=False):
2098 2098 """
2099 2099 Common function for generating repo api data
2100 2100
2101 2101 :param include_secrets: See :meth:`User.get_api_data`.
2102 2102
2103 2103 """
2104 2104 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2105 2105 # move this methods on models level.
2106 2106 from rhodecode.model.settings import SettingsModel
2107 2107 from rhodecode.model.repo import RepoModel
2108 2108
2109 2109 repo = self
2110 2110 _user_id, _time, _reason = self.locked
2111 2111
2112 2112 data = {
2113 2113 'repo_id': repo.repo_id,
2114 2114 'repo_name': repo.repo_name,
2115 2115 'repo_type': repo.repo_type,
2116 2116 'clone_uri': repo.clone_uri or '',
2117 2117 'push_uri': repo.push_uri or '',
2118 2118 'url': RepoModel().get_url(self),
2119 2119 'private': repo.private,
2120 2120 'created_on': repo.created_on,
2121 2121 'description': repo.description_safe,
2122 2122 'landing_rev': repo.landing_rev,
2123 2123 'owner': repo.user.username,
2124 2124 'fork_of': repo.fork.repo_name if repo.fork else None,
2125 2125 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2126 2126 'enable_statistics': repo.enable_statistics,
2127 2127 'enable_locking': repo.enable_locking,
2128 2128 'enable_downloads': repo.enable_downloads,
2129 2129 'last_changeset': repo.changeset_cache,
2130 2130 'locked_by': User.get(_user_id).get_api_data(
2131 2131 include_secrets=include_secrets) if _user_id else None,
2132 2132 'locked_date': time_to_datetime(_time) if _time else None,
2133 2133 'lock_reason': _reason if _reason else None,
2134 2134 }
2135 2135
2136 2136 # TODO: mikhail: should be per-repo settings here
2137 2137 rc_config = SettingsModel().get_all_settings()
2138 2138 repository_fields = str2bool(
2139 2139 rc_config.get('rhodecode_repository_fields'))
2140 2140 if repository_fields:
2141 2141 for f in self.extra_fields:
2142 2142 data[f.field_key_prefixed] = f.field_value
2143 2143
2144 2144 return data
2145 2145
2146 2146 @classmethod
2147 2147 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2148 2148 if not lock_time:
2149 2149 lock_time = time.time()
2150 2150 if not lock_reason:
2151 2151 lock_reason = cls.LOCK_AUTOMATIC
2152 2152 repo.locked = [user_id, lock_time, lock_reason]
2153 2153 Session().add(repo)
2154 2154 Session().commit()
2155 2155
2156 2156 @classmethod
2157 2157 def unlock(cls, repo):
2158 2158 repo.locked = None
2159 2159 Session().add(repo)
2160 2160 Session().commit()
2161 2161
2162 2162 @classmethod
2163 2163 def getlock(cls, repo):
2164 2164 return repo.locked
2165 2165
2166 2166 def is_user_lock(self, user_id):
2167 2167 if self.lock[0]:
2168 2168 lock_user_id = safe_int(self.lock[0])
2169 2169 user_id = safe_int(user_id)
2170 2170 # both are ints, and they are equal
2171 2171 return all([lock_user_id, user_id]) and lock_user_id == user_id
2172 2172
2173 2173 return False
2174 2174
2175 2175 def get_locking_state(self, action, user_id, only_when_enabled=True):
2176 2176 """
2177 2177 Checks locking on this repository, if locking is enabled and lock is
2178 2178 present returns a tuple of make_lock, locked, locked_by.
2179 2179 make_lock can have 3 states None (do nothing) True, make lock
2180 2180 False release lock, This value is later propagated to hooks, which
2181 2181 do the locking. Think about this as signals passed to hooks what to do.
2182 2182
2183 2183 """
2184 2184 # TODO: johbo: This is part of the business logic and should be moved
2185 2185 # into the RepositoryModel.
2186 2186
2187 2187 if action not in ('push', 'pull'):
2188 2188 raise ValueError("Invalid action value: %s" % repr(action))
2189 2189
2190 2190 # defines if locked error should be thrown to user
2191 2191 currently_locked = False
2192 2192 # defines if new lock should be made, tri-state
2193 2193 make_lock = None
2194 2194 repo = self
2195 2195 user = User.get(user_id)
2196 2196
2197 2197 lock_info = repo.locked
2198 2198
2199 2199 if repo and (repo.enable_locking or not only_when_enabled):
2200 2200 if action == 'push':
2201 2201 # check if it's already locked !, if it is compare users
2202 2202 locked_by_user_id = lock_info[0]
2203 2203 if user.user_id == locked_by_user_id:
2204 2204 log.debug(
2205 2205 'Got `push` action from user %s, now unlocking', user)
2206 2206 # unlock if we have push from user who locked
2207 2207 make_lock = False
2208 2208 else:
2209 2209 # we're not the same user who locked, ban with
2210 2210 # code defined in settings (default is 423 HTTP Locked) !
2211 2211 log.debug('Repo %s is currently locked by %s', repo, user)
2212 2212 currently_locked = True
2213 2213 elif action == 'pull':
2214 2214 # [0] user [1] date
2215 2215 if lock_info[0] and lock_info[1]:
2216 2216 log.debug('Repo %s is currently locked by %s', repo, user)
2217 2217 currently_locked = True
2218 2218 else:
2219 2219 log.debug('Setting lock on repo %s by %s', repo, user)
2220 2220 make_lock = True
2221 2221
2222 2222 else:
2223 2223 log.debug('Repository %s do not have locking enabled', repo)
2224 2224
2225 2225 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2226 2226 make_lock, currently_locked, lock_info)
2227 2227
2228 2228 from rhodecode.lib.auth import HasRepoPermissionAny
2229 2229 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2230 2230 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2231 2231 # if we don't have at least write permission we cannot make a lock
2232 2232 log.debug('lock state reset back to FALSE due to lack '
2233 2233 'of at least read permission')
2234 2234 make_lock = False
2235 2235
2236 2236 return make_lock, currently_locked, lock_info
2237 2237
2238 2238 @property
2239 2239 def last_commit_cache_update_diff(self):
2240 2240 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2241 2241
2242 2242 @property
2243 2243 def last_commit_change(self):
2244 2244 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2245 2245 empty_date = datetime.datetime.fromtimestamp(0)
2246 2246 date_latest = self.changeset_cache.get('date', empty_date)
2247 2247 try:
2248 2248 return parse_datetime(date_latest)
2249 2249 except Exception:
2250 2250 return empty_date
2251 2251
2252 2252 @property
2253 2253 def last_db_change(self):
2254 2254 return self.updated_on
2255 2255
2256 2256 @property
2257 2257 def clone_uri_hidden(self):
2258 2258 clone_uri = self.clone_uri
2259 2259 if clone_uri:
2260 2260 import urlobject
2261 2261 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2262 2262 if url_obj.password:
2263 2263 clone_uri = url_obj.with_password('*****')
2264 2264 return clone_uri
2265 2265
2266 2266 @property
2267 2267 def push_uri_hidden(self):
2268 2268 push_uri = self.push_uri
2269 2269 if push_uri:
2270 2270 import urlobject
2271 2271 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2272 2272 if url_obj.password:
2273 2273 push_uri = url_obj.with_password('*****')
2274 2274 return push_uri
2275 2275
2276 2276 def clone_url(self, **override):
2277 2277 from rhodecode.model.settings import SettingsModel
2278 2278
2279 2279 uri_tmpl = None
2280 2280 if 'with_id' in override:
2281 2281 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2282 2282 del override['with_id']
2283 2283
2284 2284 if 'uri_tmpl' in override:
2285 2285 uri_tmpl = override['uri_tmpl']
2286 2286 del override['uri_tmpl']
2287 2287
2288 2288 ssh = False
2289 2289 if 'ssh' in override:
2290 2290 ssh = True
2291 2291 del override['ssh']
2292 2292
2293 2293 # we didn't override our tmpl from **overrides
2294 2294 request = get_current_request()
2295 2295 if not uri_tmpl:
2296 2296 if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
2297 2297 rc_config = request.call_context.rc_config
2298 2298 else:
2299 2299 rc_config = SettingsModel().get_all_settings(cache=True)
2300 2300 if ssh:
2301 2301 uri_tmpl = rc_config.get(
2302 2302 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2303 2303 else:
2304 2304 uri_tmpl = rc_config.get(
2305 2305 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2306 2306
2307 2307 return get_clone_url(request=request,
2308 2308 uri_tmpl=uri_tmpl,
2309 2309 repo_name=self.repo_name,
2310 2310 repo_id=self.repo_id, **override)
2311 2311
2312 2312 def set_state(self, state):
2313 2313 self.repo_state = state
2314 2314 Session().add(self)
2315 2315 #==========================================================================
2316 2316 # SCM PROPERTIES
2317 2317 #==========================================================================
2318 2318
2319 2319 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
2320 2320 return get_commit_safe(
2321 2321 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
2322 2322
2323 2323 def get_changeset(self, rev=None, pre_load=None):
2324 2324 warnings.warn("Use get_commit", DeprecationWarning)
2325 2325 commit_id = None
2326 2326 commit_idx = None
2327 2327 if isinstance(rev, compat.string_types):
2328 2328 commit_id = rev
2329 2329 else:
2330 2330 commit_idx = rev
2331 2331 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2332 2332 pre_load=pre_load)
2333 2333
2334 2334 def get_landing_commit(self):
2335 2335 """
2336 2336 Returns landing commit, or if that doesn't exist returns the tip
2337 2337 """
2338 2338 _rev_type, _rev = self.landing_rev
2339 2339 commit = self.get_commit(_rev)
2340 2340 if isinstance(commit, EmptyCommit):
2341 2341 return self.get_commit()
2342 2342 return commit
2343 2343
2344 2344 def flush_commit_cache(self):
2345 2345 self.update_commit_cache(cs_cache={'raw_id':'0'})
2346 2346 self.update_commit_cache()
2347 2347
2348 2348 def update_commit_cache(self, cs_cache=None, config=None):
2349 2349 """
2350 2350 Update cache of last commit for repository, keys should be::
2351 2351
2352 2352 source_repo_id
2353 2353 short_id
2354 2354 raw_id
2355 2355 revision
2356 2356 parents
2357 2357 message
2358 2358 date
2359 2359 author
2360 2360 updated_on
2361 2361
2362 2362 """
2363 2363 from rhodecode.lib.vcs.backends.base import BaseChangeset
2364 2364 if cs_cache is None:
2365 2365 # use no-cache version here
2366 2366 scm_repo = self.scm_instance(cache=False, config=config)
2367 2367
2368 2368 empty = scm_repo is None or scm_repo.is_empty()
2369 2369 if not empty:
2370 2370 cs_cache = scm_repo.get_commit(
2371 2371 pre_load=["author", "date", "message", "parents", "branch"])
2372 2372 else:
2373 2373 cs_cache = EmptyCommit()
2374 2374
2375 2375 if isinstance(cs_cache, BaseChangeset):
2376 2376 cs_cache = cs_cache.__json__()
2377 2377
2378 2378 def is_outdated(new_cs_cache):
2379 2379 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2380 2380 new_cs_cache['revision'] != self.changeset_cache['revision']):
2381 2381 return True
2382 2382 return False
2383 2383
2384 2384 # check if we have maybe already latest cached revision
2385 2385 if is_outdated(cs_cache) or not self.changeset_cache:
2386 2386 _default = datetime.datetime.utcnow()
2387 2387 last_change = cs_cache.get('date') or _default
2388 2388 # we check if last update is newer than the new value
2389 2389 # if yes, we use the current timestamp instead. Imagine you get
2390 2390 # old commit pushed 1y ago, we'd set last update 1y to ago.
2391 2391 last_change_timestamp = datetime_to_time(last_change)
2392 2392 current_timestamp = datetime_to_time(last_change)
2393 2393 if last_change_timestamp > current_timestamp:
2394 2394 cs_cache['date'] = _default
2395 2395
2396 2396 cs_cache['updated_on'] = time.time()
2397 2397 self.changeset_cache = cs_cache
2398 2398 self.updated_on = last_change
2399 2399 Session().add(self)
2400 2400 Session().commit()
2401 2401
2402 2402 log.debug('updated repo `%s` with new commit cache %s',
2403 2403 self.repo_name, cs_cache)
2404 2404 else:
2405 2405 cs_cache = self.changeset_cache
2406 2406 cs_cache['updated_on'] = time.time()
2407 2407 self.changeset_cache = cs_cache
2408 2408 Session().add(self)
2409 2409 Session().commit()
2410 2410
2411 2411 log.debug('Skipping update_commit_cache for repo:`%s` '
2412 2412 'commit already with latest changes', self.repo_name)
2413 2413
2414 2414 @property
2415 2415 def tip(self):
2416 2416 return self.get_commit('tip')
2417 2417
2418 2418 @property
2419 2419 def author(self):
2420 2420 return self.tip.author
2421 2421
2422 2422 @property
2423 2423 def last_change(self):
2424 2424 return self.scm_instance().last_change
2425 2425
2426 2426 def get_comments(self, revisions=None):
2427 2427 """
2428 2428 Returns comments for this repository grouped by revisions
2429 2429
2430 2430 :param revisions: filter query by revisions only
2431 2431 """
2432 2432 cmts = ChangesetComment.query()\
2433 2433 .filter(ChangesetComment.repo == self)
2434 2434 if revisions:
2435 2435 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2436 2436 grouped = collections.defaultdict(list)
2437 2437 for cmt in cmts.all():
2438 2438 grouped[cmt.revision].append(cmt)
2439 2439 return grouped
2440 2440
2441 2441 def statuses(self, revisions=None):
2442 2442 """
2443 2443 Returns statuses for this repository
2444 2444
2445 2445 :param revisions: list of revisions to get statuses for
2446 2446 """
2447 2447 statuses = ChangesetStatus.query()\
2448 2448 .filter(ChangesetStatus.repo == self)\
2449 2449 .filter(ChangesetStatus.version == 0)
2450 2450
2451 2451 if revisions:
2452 2452 # Try doing the filtering in chunks to avoid hitting limits
2453 2453 size = 500
2454 2454 status_results = []
2455 2455 for chunk in xrange(0, len(revisions), size):
2456 2456 status_results += statuses.filter(
2457 2457 ChangesetStatus.revision.in_(
2458 2458 revisions[chunk: chunk+size])
2459 2459 ).all()
2460 2460 else:
2461 2461 status_results = statuses.all()
2462 2462
2463 2463 grouped = {}
2464 2464
2465 2465 # maybe we have open new pullrequest without a status?
2466 2466 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2467 2467 status_lbl = ChangesetStatus.get_status_lbl(stat)
2468 2468 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2469 2469 for rev in pr.revisions:
2470 2470 pr_id = pr.pull_request_id
2471 2471 pr_repo = pr.target_repo.repo_name
2472 2472 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2473 2473
2474 2474 for stat in status_results:
2475 2475 pr_id = pr_repo = None
2476 2476 if stat.pull_request:
2477 2477 pr_id = stat.pull_request.pull_request_id
2478 2478 pr_repo = stat.pull_request.target_repo.repo_name
2479 2479 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2480 2480 pr_id, pr_repo]
2481 2481 return grouped
2482 2482
2483 2483 # ==========================================================================
2484 2484 # SCM CACHE INSTANCE
2485 2485 # ==========================================================================
2486 2486
2487 2487 def scm_instance(self, **kwargs):
2488 2488 import rhodecode
2489 2489
2490 2490 # Passing a config will not hit the cache currently only used
2491 2491 # for repo2dbmapper
2492 2492 config = kwargs.pop('config', None)
2493 2493 cache = kwargs.pop('cache', None)
2494 2494 vcs_full_cache = kwargs.pop('vcs_full_cache', None)
2495 2495 if vcs_full_cache is not None:
2496 2496 # allows override global config
2497 2497 full_cache = vcs_full_cache
2498 2498 else:
2499 2499 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2500 2500 # if cache is NOT defined use default global, else we have a full
2501 2501 # control over cache behaviour
2502 2502 if cache is None and full_cache and not config:
2503 2503 log.debug('Initializing pure cached instance for %s', self.repo_path)
2504 2504 return self._get_instance_cached()
2505 2505
2506 2506 # cache here is sent to the "vcs server"
2507 2507 return self._get_instance(cache=bool(cache), config=config)
2508 2508
2509 2509 def _get_instance_cached(self):
2510 2510 from rhodecode.lib import rc_cache
2511 2511
2512 2512 cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id)
2513 2513 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2514 2514 repo_id=self.repo_id)
2515 2515 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2516 2516
2517 2517 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2518 2518 def get_instance_cached(repo_id, context_id, _cache_state_uid):
2519 2519 return self._get_instance(repo_state_uid=_cache_state_uid)
2520 2520
2521 2521 # we must use thread scoped cache here,
2522 2522 # because each thread of gevent needs it's own not shared connection and cache
2523 2523 # we also alter `args` so the cache key is individual for every green thread.
2524 2524 inv_context_manager = rc_cache.InvalidationContext(
2525 2525 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2526 2526 thread_scoped=True)
2527 2527 with inv_context_manager as invalidation_context:
2528 2528 cache_state_uid = invalidation_context.cache_data['cache_state_uid']
2529 2529 args = (self.repo_id, inv_context_manager.cache_key, cache_state_uid)
2530 2530
2531 2531 # re-compute and store cache if we get invalidate signal
2532 2532 if invalidation_context.should_invalidate():
2533 2533 instance = get_instance_cached.refresh(*args)
2534 2534 else:
2535 2535 instance = get_instance_cached(*args)
2536 2536
2537 2537 log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time)
2538 2538 return instance
2539 2539
2540 2540 def _get_instance(self, cache=True, config=None, repo_state_uid=None):
2541 2541 log.debug('Initializing %s instance `%s` with cache flag set to: %s',
2542 2542 self.repo_type, self.repo_path, cache)
2543 2543 config = config or self._config
2544 2544 custom_wire = {
2545 2545 'cache': cache, # controls the vcs.remote cache
2546 2546 'repo_state_uid': repo_state_uid
2547 2547 }
2548 2548 repo = get_vcs_instance(
2549 2549 repo_path=safe_str(self.repo_full_path),
2550 2550 config=config,
2551 2551 with_wire=custom_wire,
2552 2552 create=False,
2553 2553 _vcs_alias=self.repo_type)
2554 2554 if repo is not None:
2555 2555 repo.count() # cache rebuild
2556 2556 return repo
2557 2557
2558 2558 def get_shadow_repository_path(self, workspace_id):
2559 2559 from rhodecode.lib.vcs.backends.base import BaseRepository
2560 2560 shadow_repo_path = BaseRepository._get_shadow_repository_path(
2561 2561 self.repo_full_path, self.repo_id, workspace_id)
2562 2562 return shadow_repo_path
2563 2563
2564 2564 def __json__(self):
2565 2565 return {'landing_rev': self.landing_rev}
2566 2566
2567 2567 def get_dict(self):
2568 2568
2569 2569 # Since we transformed `repo_name` to a hybrid property, we need to
2570 2570 # keep compatibility with the code which uses `repo_name` field.
2571 2571
2572 2572 result = super(Repository, self).get_dict()
2573 2573 result['repo_name'] = result.pop('_repo_name', None)
2574 2574 return result
2575 2575
2576 2576
2577 2577 class RepoGroup(Base, BaseModel):
2578 2578 __tablename__ = 'groups'
2579 2579 __table_args__ = (
2580 2580 UniqueConstraint('group_name', 'group_parent_id'),
2581 2581 base_table_args,
2582 2582 )
2583 2583 __mapper_args__ = {'order_by': 'group_name'}
2584 2584
2585 2585 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2586 2586
2587 2587 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2588 2588 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2589 2589 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2590 2590 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2591 2591 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2592 2592 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2593 2593 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2594 2594 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2595 2595 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2596 2596 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2597 2597 _changeset_cache = Column(
2598 2598 "changeset_cache", LargeBinary(), nullable=True) # JSON data
2599 2599
2600 2600 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2601 2601 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2602 2602 parent_group = relationship('RepoGroup', remote_side=group_id)
2603 2603 user = relationship('User')
2604 2604 integrations = relationship('Integration', cascade="all, delete-orphan")
2605 2605
2606 2606 # no cascade, set NULL
2607 2607 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id')
2608 2608
2609 2609 def __init__(self, group_name='', parent_group=None):
2610 2610 self.group_name = group_name
2611 2611 self.parent_group = parent_group
2612 2612
2613 2613 def __unicode__(self):
2614 2614 return u"<%s('id:%s:%s')>" % (
2615 2615 self.__class__.__name__, self.group_id, self.group_name)
2616 2616
2617 2617 @hybrid_property
2618 2618 def group_name(self):
2619 2619 return self._group_name
2620 2620
2621 2621 @group_name.setter
2622 2622 def group_name(self, value):
2623 2623 self._group_name = value
2624 2624 self.group_name_hash = self.hash_repo_group_name(value)
2625 2625
2626 2626 @hybrid_property
2627 2627 def changeset_cache(self):
2628 2628 from rhodecode.lib.vcs.backends.base import EmptyCommit
2629 2629 dummy = EmptyCommit().__json__()
2630 2630 if not self._changeset_cache:
2631 2631 dummy['source_repo_id'] = ''
2632 2632 return json.loads(json.dumps(dummy))
2633 2633
2634 2634 try:
2635 2635 return json.loads(self._changeset_cache)
2636 2636 except TypeError:
2637 2637 return dummy
2638 2638 except Exception:
2639 2639 log.error(traceback.format_exc())
2640 2640 return dummy
2641 2641
2642 2642 @changeset_cache.setter
2643 2643 def changeset_cache(self, val):
2644 2644 try:
2645 2645 self._changeset_cache = json.dumps(val)
2646 2646 except Exception:
2647 2647 log.error(traceback.format_exc())
2648 2648
2649 2649 @validates('group_parent_id')
2650 2650 def validate_group_parent_id(self, key, val):
2651 2651 """
2652 2652 Check cycle references for a parent group to self
2653 2653 """
2654 2654 if self.group_id and val:
2655 2655 assert val != self.group_id
2656 2656
2657 2657 return val
2658 2658
2659 2659 @hybrid_property
2660 2660 def description_safe(self):
2661 2661 from rhodecode.lib import helpers as h
2662 2662 return h.escape(self.group_description)
2663 2663
2664 2664 @classmethod
2665 2665 def hash_repo_group_name(cls, repo_group_name):
2666 2666 val = remove_formatting(repo_group_name)
2667 2667 val = safe_str(val).lower()
2668 2668 chars = []
2669 2669 for c in val:
2670 2670 if c not in string.ascii_letters:
2671 2671 c = str(ord(c))
2672 2672 chars.append(c)
2673 2673
2674 2674 return ''.join(chars)
2675 2675
2676 2676 @classmethod
2677 2677 def _generate_choice(cls, repo_group):
2678 2678 from webhelpers2.html import literal as _literal
2679 2679 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2680 2680 return repo_group.group_id, _name(repo_group.full_path_splitted)
2681 2681
2682 2682 @classmethod
2683 2683 def groups_choices(cls, groups=None, show_empty_group=True):
2684 2684 if not groups:
2685 2685 groups = cls.query().all()
2686 2686
2687 2687 repo_groups = []
2688 2688 if show_empty_group:
2689 2689 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2690 2690
2691 2691 repo_groups.extend([cls._generate_choice(x) for x in groups])
2692 2692
2693 2693 repo_groups = sorted(
2694 2694 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2695 2695 return repo_groups
2696 2696
2697 2697 @classmethod
2698 2698 def url_sep(cls):
2699 2699 return URL_SEP
2700 2700
2701 2701 @classmethod
2702 2702 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2703 2703 if case_insensitive:
2704 2704 gr = cls.query().filter(func.lower(cls.group_name)
2705 2705 == func.lower(group_name))
2706 2706 else:
2707 2707 gr = cls.query().filter(cls.group_name == group_name)
2708 2708 if cache:
2709 2709 name_key = _hash_key(group_name)
2710 2710 gr = gr.options(
2711 2711 FromCache("sql_cache_short", "get_group_%s" % name_key))
2712 2712 return gr.scalar()
2713 2713
2714 2714 @classmethod
2715 2715 def get_user_personal_repo_group(cls, user_id):
2716 2716 user = User.get(user_id)
2717 2717 if user.username == User.DEFAULT_USER:
2718 2718 return None
2719 2719
2720 2720 return cls.query()\
2721 2721 .filter(cls.personal == true()) \
2722 2722 .filter(cls.user == user) \
2723 2723 .order_by(cls.group_id.asc()) \
2724 2724 .first()
2725 2725
2726 2726 @classmethod
2727 2727 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2728 2728 case_insensitive=True):
2729 2729 q = RepoGroup.query()
2730 2730
2731 2731 if not isinstance(user_id, Optional):
2732 2732 q = q.filter(RepoGroup.user_id == user_id)
2733 2733
2734 2734 if not isinstance(group_id, Optional):
2735 2735 q = q.filter(RepoGroup.group_parent_id == group_id)
2736 2736
2737 2737 if case_insensitive:
2738 2738 q = q.order_by(func.lower(RepoGroup.group_name))
2739 2739 else:
2740 2740 q = q.order_by(RepoGroup.group_name)
2741 2741 return q.all()
2742 2742
2743 2743 @property
2744 2744 def parents(self, parents_recursion_limit = 10):
2745 2745 groups = []
2746 2746 if self.parent_group is None:
2747 2747 return groups
2748 2748 cur_gr = self.parent_group
2749 2749 groups.insert(0, cur_gr)
2750 2750 cnt = 0
2751 2751 while 1:
2752 2752 cnt += 1
2753 2753 gr = getattr(cur_gr, 'parent_group', None)
2754 2754 cur_gr = cur_gr.parent_group
2755 2755 if gr is None:
2756 2756 break
2757 2757 if cnt == parents_recursion_limit:
2758 2758 # this will prevent accidental infinit loops
2759 2759 log.error('more than %s parents found for group %s, stopping '
2760 2760 'recursive parent fetching', parents_recursion_limit, self)
2761 2761 break
2762 2762
2763 2763 groups.insert(0, gr)
2764 2764 return groups
2765 2765
2766 2766 @property
2767 2767 def last_commit_cache_update_diff(self):
2768 2768 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2769 2769
2770 2770 @property
2771 2771 def last_commit_change(self):
2772 2772 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2773 2773 empty_date = datetime.datetime.fromtimestamp(0)
2774 2774 date_latest = self.changeset_cache.get('date', empty_date)
2775 2775 try:
2776 2776 return parse_datetime(date_latest)
2777 2777 except Exception:
2778 2778 return empty_date
2779 2779
2780 2780 @property
2781 2781 def last_db_change(self):
2782 2782 return self.updated_on
2783 2783
2784 2784 @property
2785 2785 def children(self):
2786 2786 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2787 2787
2788 2788 @property
2789 2789 def name(self):
2790 2790 return self.group_name.split(RepoGroup.url_sep())[-1]
2791 2791
2792 2792 @property
2793 2793 def full_path(self):
2794 2794 return self.group_name
2795 2795
2796 2796 @property
2797 2797 def full_path_splitted(self):
2798 2798 return self.group_name.split(RepoGroup.url_sep())
2799 2799
2800 2800 @property
2801 2801 def repositories(self):
2802 2802 return Repository.query()\
2803 2803 .filter(Repository.group == self)\
2804 2804 .order_by(Repository.repo_name)
2805 2805
2806 2806 @property
2807 2807 def repositories_recursive_count(self):
2808 2808 cnt = self.repositories.count()
2809 2809
2810 2810 def children_count(group):
2811 2811 cnt = 0
2812 2812 for child in group.children:
2813 2813 cnt += child.repositories.count()
2814 2814 cnt += children_count(child)
2815 2815 return cnt
2816 2816
2817 2817 return cnt + children_count(self)
2818 2818
2819 2819 def _recursive_objects(self, include_repos=True, include_groups=True):
2820 2820 all_ = []
2821 2821
2822 2822 def _get_members(root_gr):
2823 2823 if include_repos:
2824 2824 for r in root_gr.repositories:
2825 2825 all_.append(r)
2826 2826 childs = root_gr.children.all()
2827 2827 if childs:
2828 2828 for gr in childs:
2829 2829 if include_groups:
2830 2830 all_.append(gr)
2831 2831 _get_members(gr)
2832 2832
2833 2833 root_group = []
2834 2834 if include_groups:
2835 2835 root_group = [self]
2836 2836
2837 2837 _get_members(self)
2838 2838 return root_group + all_
2839 2839
2840 2840 def recursive_groups_and_repos(self):
2841 2841 """
2842 2842 Recursive return all groups, with repositories in those groups
2843 2843 """
2844 2844 return self._recursive_objects()
2845 2845
2846 2846 def recursive_groups(self):
2847 2847 """
2848 2848 Returns all children groups for this group including children of children
2849 2849 """
2850 2850 return self._recursive_objects(include_repos=False)
2851 2851
2852 2852 def recursive_repos(self):
2853 2853 """
2854 2854 Returns all children repositories for this group
2855 2855 """
2856 2856 return self._recursive_objects(include_groups=False)
2857 2857
2858 2858 def get_new_name(self, group_name):
2859 2859 """
2860 2860 returns new full group name based on parent and new name
2861 2861
2862 2862 :param group_name:
2863 2863 """
2864 2864 path_prefix = (self.parent_group.full_path_splitted if
2865 2865 self.parent_group else [])
2866 2866 return RepoGroup.url_sep().join(path_prefix + [group_name])
2867 2867
2868 2868 def update_commit_cache(self, config=None):
2869 2869 """
2870 2870 Update cache of last changeset for newest repository inside this group, keys should be::
2871 2871
2872 2872 source_repo_id
2873 2873 short_id
2874 2874 raw_id
2875 2875 revision
2876 2876 parents
2877 2877 message
2878 2878 date
2879 2879 author
2880 2880
2881 2881 """
2882 2882 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2883 2883
2884 2884 def repo_groups_and_repos():
2885 2885 all_entries = OrderedDefaultDict(list)
2886 2886
2887 2887 def _get_members(root_gr, pos=0):
2888 2888
2889 2889 for repo in root_gr.repositories:
2890 2890 all_entries[root_gr].append(repo)
2891 2891
2892 2892 # fill in all parent positions
2893 2893 for parent_group in root_gr.parents:
2894 2894 all_entries[parent_group].extend(all_entries[root_gr])
2895 2895
2896 2896 children_groups = root_gr.children.all()
2897 2897 if children_groups:
2898 2898 for cnt, gr in enumerate(children_groups, 1):
2899 2899 _get_members(gr, pos=pos+cnt)
2900 2900
2901 2901 _get_members(root_gr=self)
2902 2902 return all_entries
2903 2903
2904 2904 empty_date = datetime.datetime.fromtimestamp(0)
2905 2905 for repo_group, repos in repo_groups_and_repos().items():
2906 2906
2907 2907 latest_repo_cs_cache = {}
2908 2908 _date_latest = empty_date
2909 2909 for repo in repos:
2910 2910 repo_cs_cache = repo.changeset_cache
2911 2911 date_latest = latest_repo_cs_cache.get('date', empty_date)
2912 2912 date_current = repo_cs_cache.get('date', empty_date)
2913 2913 current_timestamp = datetime_to_time(parse_datetime(date_latest))
2914 2914 if current_timestamp < datetime_to_time(parse_datetime(date_current)):
2915 2915 latest_repo_cs_cache = repo_cs_cache
2916 2916 latest_repo_cs_cache['source_repo_id'] = repo.repo_id
2917 2917 _date_latest = parse_datetime(latest_repo_cs_cache['date'])
2918 2918
2919 2919 latest_repo_cs_cache['updated_on'] = time.time()
2920 2920 repo_group.changeset_cache = latest_repo_cs_cache
2921 2921 repo_group.updated_on = _date_latest
2922 2922 Session().add(repo_group)
2923 2923 Session().commit()
2924 2924
2925 2925 log.debug('updated repo group `%s` with new commit cache %s',
2926 2926 repo_group.group_name, latest_repo_cs_cache)
2927 2927
2928 2928 def permissions(self, with_admins=True, with_owner=True,
2929 2929 expand_from_user_groups=False):
2930 2930 """
2931 2931 Permissions for repository groups
2932 2932 """
2933 2933 _admin_perm = 'group.admin'
2934 2934
2935 2935 owner_row = []
2936 2936 if with_owner:
2937 2937 usr = AttributeDict(self.user.get_dict())
2938 2938 usr.owner_row = True
2939 2939 usr.permission = _admin_perm
2940 2940 owner_row.append(usr)
2941 2941
2942 2942 super_admin_ids = []
2943 2943 super_admin_rows = []
2944 2944 if with_admins:
2945 2945 for usr in User.get_all_super_admins():
2946 2946 super_admin_ids.append(usr.user_id)
2947 2947 # if this admin is also owner, don't double the record
2948 2948 if usr.user_id == owner_row[0].user_id:
2949 2949 owner_row[0].admin_row = True
2950 2950 else:
2951 2951 usr = AttributeDict(usr.get_dict())
2952 2952 usr.admin_row = True
2953 2953 usr.permission = _admin_perm
2954 2954 super_admin_rows.append(usr)
2955 2955
2956 2956 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2957 2957 q = q.options(joinedload(UserRepoGroupToPerm.group),
2958 2958 joinedload(UserRepoGroupToPerm.user),
2959 2959 joinedload(UserRepoGroupToPerm.permission),)
2960 2960
2961 2961 # get owners and admins and permissions. We do a trick of re-writing
2962 2962 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2963 2963 # has a global reference and changing one object propagates to all
2964 2964 # others. This means if admin is also an owner admin_row that change
2965 2965 # would propagate to both objects
2966 2966 perm_rows = []
2967 2967 for _usr in q.all():
2968 2968 usr = AttributeDict(_usr.user.get_dict())
2969 2969 # if this user is also owner/admin, mark as duplicate record
2970 2970 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2971 2971 usr.duplicate_perm = True
2972 2972 usr.permission = _usr.permission.permission_name
2973 2973 perm_rows.append(usr)
2974 2974
2975 2975 # filter the perm rows by 'default' first and then sort them by
2976 2976 # admin,write,read,none permissions sorted again alphabetically in
2977 2977 # each group
2978 2978 perm_rows = sorted(perm_rows, key=display_user_sort)
2979 2979
2980 2980 user_groups_rows = []
2981 2981 if expand_from_user_groups:
2982 2982 for ug in self.permission_user_groups(with_members=True):
2983 2983 for user_data in ug.members:
2984 2984 user_groups_rows.append(user_data)
2985 2985
2986 2986 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2987 2987
2988 2988 def permission_user_groups(self, with_members=False):
2989 2989 q = UserGroupRepoGroupToPerm.query()\
2990 2990 .filter(UserGroupRepoGroupToPerm.group == self)
2991 2991 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2992 2992 joinedload(UserGroupRepoGroupToPerm.users_group),
2993 2993 joinedload(UserGroupRepoGroupToPerm.permission),)
2994 2994
2995 2995 perm_rows = []
2996 2996 for _user_group in q.all():
2997 2997 entry = AttributeDict(_user_group.users_group.get_dict())
2998 2998 entry.permission = _user_group.permission.permission_name
2999 2999 if with_members:
3000 3000 entry.members = [x.user.get_dict()
3001 3001 for x in _user_group.users_group.members]
3002 3002 perm_rows.append(entry)
3003 3003
3004 3004 perm_rows = sorted(perm_rows, key=display_user_group_sort)
3005 3005 return perm_rows
3006 3006
3007 3007 def get_api_data(self):
3008 3008 """
3009 3009 Common function for generating api data
3010 3010
3011 3011 """
3012 3012 group = self
3013 3013 data = {
3014 3014 'group_id': group.group_id,
3015 3015 'group_name': group.group_name,
3016 3016 'group_description': group.description_safe,
3017 3017 'parent_group': group.parent_group.group_name if group.parent_group else None,
3018 3018 'repositories': [x.repo_name for x in group.repositories],
3019 3019 'owner': group.user.username,
3020 3020 }
3021 3021 return data
3022 3022
3023 3023 def get_dict(self):
3024 3024 # Since we transformed `group_name` to a hybrid property, we need to
3025 3025 # keep compatibility with the code which uses `group_name` field.
3026 3026 result = super(RepoGroup, self).get_dict()
3027 3027 result['group_name'] = result.pop('_group_name', None)
3028 3028 return result
3029 3029
3030 3030
3031 3031 class Permission(Base, BaseModel):
3032 3032 __tablename__ = 'permissions'
3033 3033 __table_args__ = (
3034 3034 Index('p_perm_name_idx', 'permission_name'),
3035 3035 base_table_args,
3036 3036 )
3037 3037
3038 3038 PERMS = [
3039 3039 ('hg.admin', _('RhodeCode Super Administrator')),
3040 3040
3041 3041 ('repository.none', _('Repository no access')),
3042 3042 ('repository.read', _('Repository read access')),
3043 3043 ('repository.write', _('Repository write access')),
3044 3044 ('repository.admin', _('Repository admin access')),
3045 3045
3046 3046 ('group.none', _('Repository group no access')),
3047 3047 ('group.read', _('Repository group read access')),
3048 3048 ('group.write', _('Repository group write access')),
3049 3049 ('group.admin', _('Repository group admin access')),
3050 3050
3051 3051 ('usergroup.none', _('User group no access')),
3052 3052 ('usergroup.read', _('User group read access')),
3053 3053 ('usergroup.write', _('User group write access')),
3054 3054 ('usergroup.admin', _('User group admin access')),
3055 3055
3056 3056 ('branch.none', _('Branch no permissions')),
3057 3057 ('branch.merge', _('Branch access by web merge')),
3058 3058 ('branch.push', _('Branch access by push')),
3059 3059 ('branch.push_force', _('Branch access by push with force')),
3060 3060
3061 3061 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
3062 3062 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
3063 3063
3064 3064 ('hg.usergroup.create.false', _('User Group creation disabled')),
3065 3065 ('hg.usergroup.create.true', _('User Group creation enabled')),
3066 3066
3067 3067 ('hg.create.none', _('Repository creation disabled')),
3068 3068 ('hg.create.repository', _('Repository creation enabled')),
3069 3069 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
3070 3070 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
3071 3071
3072 3072 ('hg.fork.none', _('Repository forking disabled')),
3073 3073 ('hg.fork.repository', _('Repository forking enabled')),
3074 3074
3075 3075 ('hg.register.none', _('Registration disabled')),
3076 3076 ('hg.register.manual_activate', _('User Registration with manual account activation')),
3077 3077 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
3078 3078
3079 3079 ('hg.password_reset.enabled', _('Password reset enabled')),
3080 3080 ('hg.password_reset.hidden', _('Password reset hidden')),
3081 3081 ('hg.password_reset.disabled', _('Password reset disabled')),
3082 3082
3083 3083 ('hg.extern_activate.manual', _('Manual activation of external account')),
3084 3084 ('hg.extern_activate.auto', _('Automatic activation of external account')),
3085 3085
3086 3086 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
3087 3087 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
3088 3088 ]
3089 3089
3090 3090 # definition of system default permissions for DEFAULT user, created on
3091 3091 # system setup
3092 3092 DEFAULT_USER_PERMISSIONS = [
3093 3093 # object perms
3094 3094 'repository.read',
3095 3095 'group.read',
3096 3096 'usergroup.read',
3097 3097 # branch, for backward compat we need same value as before so forced pushed
3098 3098 'branch.push_force',
3099 3099 # global
3100 3100 'hg.create.repository',
3101 3101 'hg.repogroup.create.false',
3102 3102 'hg.usergroup.create.false',
3103 3103 'hg.create.write_on_repogroup.true',
3104 3104 'hg.fork.repository',
3105 3105 'hg.register.manual_activate',
3106 3106 'hg.password_reset.enabled',
3107 3107 'hg.extern_activate.auto',
3108 3108 'hg.inherit_default_perms.true',
3109 3109 ]
3110 3110
3111 3111 # defines which permissions are more important higher the more important
3112 3112 # Weight defines which permissions are more important.
3113 3113 # The higher number the more important.
3114 3114 PERM_WEIGHTS = {
3115 3115 'repository.none': 0,
3116 3116 'repository.read': 1,
3117 3117 'repository.write': 3,
3118 3118 'repository.admin': 4,
3119 3119
3120 3120 'group.none': 0,
3121 3121 'group.read': 1,
3122 3122 'group.write': 3,
3123 3123 'group.admin': 4,
3124 3124
3125 3125 'usergroup.none': 0,
3126 3126 'usergroup.read': 1,
3127 3127 'usergroup.write': 3,
3128 3128 'usergroup.admin': 4,
3129 3129
3130 3130 'branch.none': 0,
3131 3131 'branch.merge': 1,
3132 3132 'branch.push': 3,
3133 3133 'branch.push_force': 4,
3134 3134
3135 3135 'hg.repogroup.create.false': 0,
3136 3136 'hg.repogroup.create.true': 1,
3137 3137
3138 3138 'hg.usergroup.create.false': 0,
3139 3139 'hg.usergroup.create.true': 1,
3140 3140
3141 3141 'hg.fork.none': 0,
3142 3142 'hg.fork.repository': 1,
3143 3143 'hg.create.none': 0,
3144 3144 'hg.create.repository': 1
3145 3145 }
3146 3146
3147 3147 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3148 3148 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
3149 3149 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
3150 3150
3151 3151 def __unicode__(self):
3152 3152 return u"<%s('%s:%s')>" % (
3153 3153 self.__class__.__name__, self.permission_id, self.permission_name
3154 3154 )
3155 3155
3156 3156 @classmethod
3157 3157 def get_by_key(cls, key):
3158 3158 return cls.query().filter(cls.permission_name == key).scalar()
3159 3159
3160 3160 @classmethod
3161 3161 def get_default_repo_perms(cls, user_id, repo_id=None):
3162 3162 q = Session().query(UserRepoToPerm, Repository, Permission)\
3163 3163 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
3164 3164 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
3165 3165 .filter(UserRepoToPerm.user_id == user_id)
3166 3166 if repo_id:
3167 3167 q = q.filter(UserRepoToPerm.repository_id == repo_id)
3168 3168 return q.all()
3169 3169
3170 3170 @classmethod
3171 3171 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
3172 3172 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
3173 3173 .join(
3174 3174 Permission,
3175 3175 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
3176 3176 .join(
3177 3177 UserRepoToPerm,
3178 3178 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
3179 3179 .filter(UserRepoToPerm.user_id == user_id)
3180 3180
3181 3181 if repo_id:
3182 3182 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
3183 3183 return q.order_by(UserToRepoBranchPermission.rule_order).all()
3184 3184
3185 3185 @classmethod
3186 3186 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
3187 3187 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
3188 3188 .join(
3189 3189 Permission,
3190 3190 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
3191 3191 .join(
3192 3192 Repository,
3193 3193 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
3194 3194 .join(
3195 3195 UserGroup,
3196 3196 UserGroupRepoToPerm.users_group_id ==
3197 3197 UserGroup.users_group_id)\
3198 3198 .join(
3199 3199 UserGroupMember,
3200 3200 UserGroupRepoToPerm.users_group_id ==
3201 3201 UserGroupMember.users_group_id)\
3202 3202 .filter(
3203 3203 UserGroupMember.user_id == user_id,
3204 3204 UserGroup.users_group_active == true())
3205 3205 if repo_id:
3206 3206 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
3207 3207 return q.all()
3208 3208
3209 3209 @classmethod
3210 3210 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
3211 3211 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
3212 3212 .join(
3213 3213 Permission,
3214 3214 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
3215 3215 .join(
3216 3216 UserGroupRepoToPerm,
3217 3217 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
3218 3218 .join(
3219 3219 UserGroup,
3220 3220 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
3221 3221 .join(
3222 3222 UserGroupMember,
3223 3223 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
3224 3224 .filter(
3225 3225 UserGroupMember.user_id == user_id,
3226 3226 UserGroup.users_group_active == true())
3227 3227
3228 3228 if repo_id:
3229 3229 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
3230 3230 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
3231 3231
3232 3232 @classmethod
3233 3233 def get_default_group_perms(cls, user_id, repo_group_id=None):
3234 3234 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
3235 3235 .join(
3236 3236 Permission,
3237 3237 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
3238 3238 .join(
3239 3239 RepoGroup,
3240 3240 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
3241 3241 .filter(UserRepoGroupToPerm.user_id == user_id)
3242 3242 if repo_group_id:
3243 3243 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
3244 3244 return q.all()
3245 3245
3246 3246 @classmethod
3247 3247 def get_default_group_perms_from_user_group(
3248 3248 cls, user_id, repo_group_id=None):
3249 3249 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
3250 3250 .join(
3251 3251 Permission,
3252 3252 UserGroupRepoGroupToPerm.permission_id ==
3253 3253 Permission.permission_id)\
3254 3254 .join(
3255 3255 RepoGroup,
3256 3256 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3257 3257 .join(
3258 3258 UserGroup,
3259 3259 UserGroupRepoGroupToPerm.users_group_id ==
3260 3260 UserGroup.users_group_id)\
3261 3261 .join(
3262 3262 UserGroupMember,
3263 3263 UserGroupRepoGroupToPerm.users_group_id ==
3264 3264 UserGroupMember.users_group_id)\
3265 3265 .filter(
3266 3266 UserGroupMember.user_id == user_id,
3267 3267 UserGroup.users_group_active == true())
3268 3268 if repo_group_id:
3269 3269 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3270 3270 return q.all()
3271 3271
3272 3272 @classmethod
3273 3273 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3274 3274 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3275 3275 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3276 3276 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3277 3277 .filter(UserUserGroupToPerm.user_id == user_id)
3278 3278 if user_group_id:
3279 3279 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3280 3280 return q.all()
3281 3281
3282 3282 @classmethod
3283 3283 def get_default_user_group_perms_from_user_group(
3284 3284 cls, user_id, user_group_id=None):
3285 3285 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3286 3286 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3287 3287 .join(
3288 3288 Permission,
3289 3289 UserGroupUserGroupToPerm.permission_id ==
3290 3290 Permission.permission_id)\
3291 3291 .join(
3292 3292 TargetUserGroup,
3293 3293 UserGroupUserGroupToPerm.target_user_group_id ==
3294 3294 TargetUserGroup.users_group_id)\
3295 3295 .join(
3296 3296 UserGroup,
3297 3297 UserGroupUserGroupToPerm.user_group_id ==
3298 3298 UserGroup.users_group_id)\
3299 3299 .join(
3300 3300 UserGroupMember,
3301 3301 UserGroupUserGroupToPerm.user_group_id ==
3302 3302 UserGroupMember.users_group_id)\
3303 3303 .filter(
3304 3304 UserGroupMember.user_id == user_id,
3305 3305 UserGroup.users_group_active == true())
3306 3306 if user_group_id:
3307 3307 q = q.filter(
3308 3308 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3309 3309
3310 3310 return q.all()
3311 3311
3312 3312
3313 3313 class UserRepoToPerm(Base, BaseModel):
3314 3314 __tablename__ = 'repo_to_perm'
3315 3315 __table_args__ = (
3316 3316 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3317 3317 base_table_args
3318 3318 )
3319 3319
3320 3320 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3321 3321 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3322 3322 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3323 3323 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3324 3324
3325 3325 user = relationship('User')
3326 3326 repository = relationship('Repository')
3327 3327 permission = relationship('Permission')
3328 3328
3329 3329 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined')
3330 3330
3331 3331 @classmethod
3332 3332 def create(cls, user, repository, permission):
3333 3333 n = cls()
3334 3334 n.user = user
3335 3335 n.repository = repository
3336 3336 n.permission = permission
3337 3337 Session().add(n)
3338 3338 return n
3339 3339
3340 3340 def __unicode__(self):
3341 3341 return u'<%s => %s >' % (self.user, self.repository)
3342 3342
3343 3343
3344 3344 class UserUserGroupToPerm(Base, BaseModel):
3345 3345 __tablename__ = 'user_user_group_to_perm'
3346 3346 __table_args__ = (
3347 3347 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3348 3348 base_table_args
3349 3349 )
3350 3350
3351 3351 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3352 3352 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3353 3353 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3354 3354 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3355 3355
3356 3356 user = relationship('User')
3357 3357 user_group = relationship('UserGroup')
3358 3358 permission = relationship('Permission')
3359 3359
3360 3360 @classmethod
3361 3361 def create(cls, user, user_group, permission):
3362 3362 n = cls()
3363 3363 n.user = user
3364 3364 n.user_group = user_group
3365 3365 n.permission = permission
3366 3366 Session().add(n)
3367 3367 return n
3368 3368
3369 3369 def __unicode__(self):
3370 3370 return u'<%s => %s >' % (self.user, self.user_group)
3371 3371
3372 3372
3373 3373 class UserToPerm(Base, BaseModel):
3374 3374 __tablename__ = 'user_to_perm'
3375 3375 __table_args__ = (
3376 3376 UniqueConstraint('user_id', 'permission_id'),
3377 3377 base_table_args
3378 3378 )
3379 3379
3380 3380 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3381 3381 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3382 3382 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3383 3383
3384 3384 user = relationship('User')
3385 3385 permission = relationship('Permission', lazy='joined')
3386 3386
3387 3387 def __unicode__(self):
3388 3388 return u'<%s => %s >' % (self.user, self.permission)
3389 3389
3390 3390
3391 3391 class UserGroupRepoToPerm(Base, BaseModel):
3392 3392 __tablename__ = 'users_group_repo_to_perm'
3393 3393 __table_args__ = (
3394 3394 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3395 3395 base_table_args
3396 3396 )
3397 3397
3398 3398 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3399 3399 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3400 3400 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3401 3401 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3402 3402
3403 3403 users_group = relationship('UserGroup')
3404 3404 permission = relationship('Permission')
3405 3405 repository = relationship('Repository')
3406 3406 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all')
3407 3407
3408 3408 @classmethod
3409 3409 def create(cls, users_group, repository, permission):
3410 3410 n = cls()
3411 3411 n.users_group = users_group
3412 3412 n.repository = repository
3413 3413 n.permission = permission
3414 3414 Session().add(n)
3415 3415 return n
3416 3416
3417 3417 def __unicode__(self):
3418 3418 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
3419 3419
3420 3420
3421 3421 class UserGroupUserGroupToPerm(Base, BaseModel):
3422 3422 __tablename__ = 'user_group_user_group_to_perm'
3423 3423 __table_args__ = (
3424 3424 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3425 3425 CheckConstraint('target_user_group_id != user_group_id'),
3426 3426 base_table_args
3427 3427 )
3428 3428
3429 3429 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3430 3430 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3431 3431 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3432 3432 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3433 3433
3434 3434 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3435 3435 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3436 3436 permission = relationship('Permission')
3437 3437
3438 3438 @classmethod
3439 3439 def create(cls, target_user_group, user_group, permission):
3440 3440 n = cls()
3441 3441 n.target_user_group = target_user_group
3442 3442 n.user_group = user_group
3443 3443 n.permission = permission
3444 3444 Session().add(n)
3445 3445 return n
3446 3446
3447 3447 def __unicode__(self):
3448 3448 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3449 3449
3450 3450
3451 3451 class UserGroupToPerm(Base, BaseModel):
3452 3452 __tablename__ = 'users_group_to_perm'
3453 3453 __table_args__ = (
3454 3454 UniqueConstraint('users_group_id', 'permission_id',),
3455 3455 base_table_args
3456 3456 )
3457 3457
3458 3458 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3459 3459 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3460 3460 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3461 3461
3462 3462 users_group = relationship('UserGroup')
3463 3463 permission = relationship('Permission')
3464 3464
3465 3465
3466 3466 class UserRepoGroupToPerm(Base, BaseModel):
3467 3467 __tablename__ = 'user_repo_group_to_perm'
3468 3468 __table_args__ = (
3469 3469 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3470 3470 base_table_args
3471 3471 )
3472 3472
3473 3473 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3474 3474 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3475 3475 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3476 3476 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3477 3477
3478 3478 user = relationship('User')
3479 3479 group = relationship('RepoGroup')
3480 3480 permission = relationship('Permission')
3481 3481
3482 3482 @classmethod
3483 3483 def create(cls, user, repository_group, permission):
3484 3484 n = cls()
3485 3485 n.user = user
3486 3486 n.group = repository_group
3487 3487 n.permission = permission
3488 3488 Session().add(n)
3489 3489 return n
3490 3490
3491 3491
3492 3492 class UserGroupRepoGroupToPerm(Base, BaseModel):
3493 3493 __tablename__ = 'users_group_repo_group_to_perm'
3494 3494 __table_args__ = (
3495 3495 UniqueConstraint('users_group_id', 'group_id'),
3496 3496 base_table_args
3497 3497 )
3498 3498
3499 3499 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3500 3500 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3501 3501 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3502 3502 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3503 3503
3504 3504 users_group = relationship('UserGroup')
3505 3505 permission = relationship('Permission')
3506 3506 group = relationship('RepoGroup')
3507 3507
3508 3508 @classmethod
3509 3509 def create(cls, user_group, repository_group, permission):
3510 3510 n = cls()
3511 3511 n.users_group = user_group
3512 3512 n.group = repository_group
3513 3513 n.permission = permission
3514 3514 Session().add(n)
3515 3515 return n
3516 3516
3517 3517 def __unicode__(self):
3518 3518 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3519 3519
3520 3520
3521 3521 class Statistics(Base, BaseModel):
3522 3522 __tablename__ = 'statistics'
3523 3523 __table_args__ = (
3524 3524 base_table_args
3525 3525 )
3526 3526
3527 3527 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3528 3528 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3529 3529 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3530 3530 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3531 3531 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3532 3532 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3533 3533
3534 3534 repository = relationship('Repository', single_parent=True)
3535 3535
3536 3536
3537 3537 class UserFollowing(Base, BaseModel):
3538 3538 __tablename__ = 'user_followings'
3539 3539 __table_args__ = (
3540 3540 UniqueConstraint('user_id', 'follows_repository_id'),
3541 3541 UniqueConstraint('user_id', 'follows_user_id'),
3542 3542 base_table_args
3543 3543 )
3544 3544
3545 3545 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3546 3546 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3547 3547 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3548 3548 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3549 3549 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3550 3550
3551 3551 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3552 3552
3553 3553 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3554 3554 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3555 3555
3556 3556 @classmethod
3557 3557 def get_repo_followers(cls, repo_id):
3558 3558 return cls.query().filter(cls.follows_repo_id == repo_id)
3559 3559
3560 3560
3561 3561 class CacheKey(Base, BaseModel):
3562 3562 __tablename__ = 'cache_invalidation'
3563 3563 __table_args__ = (
3564 3564 UniqueConstraint('cache_key'),
3565 3565 Index('key_idx', 'cache_key'),
3566 3566 base_table_args,
3567 3567 )
3568 3568
3569 3569 CACHE_TYPE_FEED = 'FEED'
3570 3570
3571 3571 # namespaces used to register process/thread aware caches
3572 3572 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3573 3573 SETTINGS_INVALIDATION_NAMESPACE = 'system_settings'
3574 3574
3575 3575 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3576 3576 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3577 3577 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3578 3578 cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None)
3579 3579 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3580 3580
3581 3581 def __init__(self, cache_key, cache_args='', cache_state_uid=None):
3582 3582 self.cache_key = cache_key
3583 3583 self.cache_args = cache_args
3584 3584 self.cache_active = False
3585 3585 # first key should be same for all entries, since all workers should share it
3586 3586 self.cache_state_uid = cache_state_uid or self.generate_new_state_uid()
3587 3587
3588 3588 def __unicode__(self):
3589 3589 return u"<%s('%s:%s[%s]')>" % (
3590 3590 self.__class__.__name__,
3591 3591 self.cache_id, self.cache_key, self.cache_active)
3592 3592
3593 3593 def _cache_key_partition(self):
3594 3594 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3595 3595 return prefix, repo_name, suffix
3596 3596
3597 3597 def get_prefix(self):
3598 3598 """
3599 3599 Try to extract prefix from existing cache key. The key could consist
3600 3600 of prefix, repo_name, suffix
3601 3601 """
3602 3602 # this returns prefix, repo_name, suffix
3603 3603 return self._cache_key_partition()[0]
3604 3604
3605 3605 def get_suffix(self):
3606 3606 """
3607 3607 get suffix that might have been used in _get_cache_key to
3608 3608 generate self.cache_key. Only used for informational purposes
3609 3609 in repo_edit.mako.
3610 3610 """
3611 3611 # prefix, repo_name, suffix
3612 3612 return self._cache_key_partition()[2]
3613 3613
3614 3614 @classmethod
3615 3615 def generate_new_state_uid(cls, based_on=None):
3616 3616 if based_on:
3617 3617 return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on)))
3618 3618 else:
3619 3619 return str(uuid.uuid4())
3620 3620
3621 3621 @classmethod
3622 3622 def delete_all_cache(cls):
3623 3623 """
3624 3624 Delete all cache keys from database.
3625 3625 Should only be run when all instances are down and all entries
3626 3626 thus stale.
3627 3627 """
3628 3628 cls.query().delete()
3629 3629 Session().commit()
3630 3630
3631 3631 @classmethod
3632 3632 def set_invalidate(cls, cache_uid, delete=False):
3633 3633 """
3634 3634 Mark all caches of a repo as invalid in the database.
3635 3635 """
3636 3636
3637 3637 try:
3638 3638 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3639 3639 if delete:
3640 3640 qry.delete()
3641 3641 log.debug('cache objects deleted for cache args %s',
3642 3642 safe_str(cache_uid))
3643 3643 else:
3644 3644 qry.update({"cache_active": False,
3645 3645 "cache_state_uid": cls.generate_new_state_uid()})
3646 3646 log.debug('cache objects marked as invalid for cache args %s',
3647 3647 safe_str(cache_uid))
3648 3648
3649 3649 Session().commit()
3650 3650 except Exception:
3651 3651 log.exception(
3652 3652 'Cache key invalidation failed for cache args %s',
3653 3653 safe_str(cache_uid))
3654 3654 Session().rollback()
3655 3655
3656 3656 @classmethod
3657 3657 def get_active_cache(cls, cache_key):
3658 3658 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3659 3659 if inv_obj:
3660 3660 return inv_obj
3661 3661 return None
3662 3662
3663 3663 @classmethod
3664 3664 def get_namespace_map(cls, namespace):
3665 3665 return {
3666 3666 x.cache_key: x
3667 3667 for x in cls.query().filter(cls.cache_args == namespace)}
3668 3668
3669 3669
3670 3670 class ChangesetComment(Base, BaseModel):
3671 3671 __tablename__ = 'changeset_comments'
3672 3672 __table_args__ = (
3673 3673 Index('cc_revision_idx', 'revision'),
3674 3674 base_table_args,
3675 3675 )
3676 3676
3677 3677 COMMENT_OUTDATED = u'comment_outdated'
3678 3678 COMMENT_TYPE_NOTE = u'note'
3679 3679 COMMENT_TYPE_TODO = u'todo'
3680 3680 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3681 3681
3682 3682 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3683 3683 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3684 3684 revision = Column('revision', String(40), nullable=True)
3685 3685 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3686 3686 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3687 3687 line_no = Column('line_no', Unicode(10), nullable=True)
3688 3688 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3689 3689 f_path = Column('f_path', Unicode(1000), nullable=True)
3690 3690 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3691 3691 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3692 3692 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3693 3693 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3694 3694 renderer = Column('renderer', Unicode(64), nullable=True)
3695 3695 display_state = Column('display_state', Unicode(128), nullable=True)
3696 3696
3697 3697 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3698 3698 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3699 3699
3700 3700 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3701 3701 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3702 3702
3703 3703 author = relationship('User', lazy='joined')
3704 3704 repo = relationship('Repository')
3705 3705 status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='joined')
3706 3706 pull_request = relationship('PullRequest', lazy='joined')
3707 3707 pull_request_version = relationship('PullRequestVersion')
3708 3708
3709 3709 @classmethod
3710 3710 def get_users(cls, revision=None, pull_request_id=None):
3711 3711 """
3712 3712 Returns user associated with this ChangesetComment. ie those
3713 3713 who actually commented
3714 3714
3715 3715 :param cls:
3716 3716 :param revision:
3717 3717 """
3718 3718 q = Session().query(User)\
3719 3719 .join(ChangesetComment.author)
3720 3720 if revision:
3721 3721 q = q.filter(cls.revision == revision)
3722 3722 elif pull_request_id:
3723 3723 q = q.filter(cls.pull_request_id == pull_request_id)
3724 3724 return q.all()
3725 3725
3726 3726 @classmethod
3727 3727 def get_index_from_version(cls, pr_version, versions):
3728 3728 num_versions = [x.pull_request_version_id for x in versions]
3729 3729 try:
3730 3730 return num_versions.index(pr_version) +1
3731 3731 except (IndexError, ValueError):
3732 3732 return
3733 3733
3734 3734 @property
3735 3735 def outdated(self):
3736 3736 return self.display_state == self.COMMENT_OUTDATED
3737 3737
3738 3738 def outdated_at_version(self, version):
3739 3739 """
3740 3740 Checks if comment is outdated for given pull request version
3741 3741 """
3742 3742 return self.outdated and self.pull_request_version_id != version
3743 3743
3744 3744 def older_than_version(self, version):
3745 3745 """
3746 3746 Checks if comment is made from previous version than given
3747 3747 """
3748 3748 if version is None:
3749 3749 return self.pull_request_version_id is not None
3750 3750
3751 3751 return self.pull_request_version_id < version
3752 3752
3753 3753 @property
3754 3754 def resolved(self):
3755 3755 return self.resolved_by[0] if self.resolved_by else None
3756 3756
3757 3757 @property
3758 3758 def is_todo(self):
3759 3759 return self.comment_type == self.COMMENT_TYPE_TODO
3760 3760
3761 3761 @property
3762 3762 def is_inline(self):
3763 3763 return self.line_no and self.f_path
3764 3764
3765 3765 def get_index_version(self, versions):
3766 3766 return self.get_index_from_version(
3767 3767 self.pull_request_version_id, versions)
3768 3768
3769 3769 def __repr__(self):
3770 3770 if self.comment_id:
3771 3771 return '<DB:Comment #%s>' % self.comment_id
3772 3772 else:
3773 3773 return '<DB:Comment at %#x>' % id(self)
3774 3774
3775 3775 def get_api_data(self):
3776 3776 comment = self
3777 3777 data = {
3778 3778 'comment_id': comment.comment_id,
3779 3779 'comment_type': comment.comment_type,
3780 3780 'comment_text': comment.text,
3781 3781 'comment_status': comment.status_change,
3782 3782 'comment_f_path': comment.f_path,
3783 3783 'comment_lineno': comment.line_no,
3784 3784 'comment_author': comment.author,
3785 3785 'comment_created_on': comment.created_on,
3786 3786 'comment_resolved_by': self.resolved
3787 3787 }
3788 3788 return data
3789 3789
3790 3790 def __json__(self):
3791 3791 data = dict()
3792 3792 data.update(self.get_api_data())
3793 3793 return data
3794 3794
3795 3795
3796 3796 class ChangesetStatus(Base, BaseModel):
3797 3797 __tablename__ = 'changeset_statuses'
3798 3798 __table_args__ = (
3799 3799 Index('cs_revision_idx', 'revision'),
3800 3800 Index('cs_version_idx', 'version'),
3801 3801 UniqueConstraint('repo_id', 'revision', 'version'),
3802 3802 base_table_args
3803 3803 )
3804 3804
3805 3805 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3806 3806 STATUS_APPROVED = 'approved'
3807 3807 STATUS_REJECTED = 'rejected'
3808 3808 STATUS_UNDER_REVIEW = 'under_review'
3809 3809
3810 3810 STATUSES = [
3811 3811 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3812 3812 (STATUS_APPROVED, _("Approved")),
3813 3813 (STATUS_REJECTED, _("Rejected")),
3814 3814 (STATUS_UNDER_REVIEW, _("Under Review")),
3815 3815 ]
3816 3816
3817 3817 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3818 3818 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3819 3819 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3820 3820 revision = Column('revision', String(40), nullable=False)
3821 3821 status = Column('status', String(128), nullable=False, default=DEFAULT)
3822 3822 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3823 3823 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3824 3824 version = Column('version', Integer(), nullable=False, default=0)
3825 3825 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3826 3826
3827 3827 author = relationship('User', lazy='joined')
3828 3828 repo = relationship('Repository')
3829 3829 comment = relationship('ChangesetComment', lazy='joined')
3830 3830 pull_request = relationship('PullRequest', lazy='joined')
3831 3831
3832 3832 def __unicode__(self):
3833 3833 return u"<%s('%s[v%s]:%s')>" % (
3834 3834 self.__class__.__name__,
3835 3835 self.status, self.version, self.author
3836 3836 )
3837 3837
3838 3838 @classmethod
3839 3839 def get_status_lbl(cls, value):
3840 3840 return dict(cls.STATUSES).get(value)
3841 3841
3842 3842 @property
3843 3843 def status_lbl(self):
3844 3844 return ChangesetStatus.get_status_lbl(self.status)
3845 3845
3846 3846 def get_api_data(self):
3847 3847 status = self
3848 3848 data = {
3849 3849 'status_id': status.changeset_status_id,
3850 3850 'status': status.status,
3851 3851 }
3852 3852 return data
3853 3853
3854 3854 def __json__(self):
3855 3855 data = dict()
3856 3856 data.update(self.get_api_data())
3857 3857 return data
3858 3858
3859 3859
3860 3860 class _SetState(object):
3861 3861 """
3862 3862 Context processor allowing changing state for sensitive operation such as
3863 3863 pull request update or merge
3864 3864 """
3865 3865
3866 3866 def __init__(self, pull_request, pr_state, back_state=None):
3867 3867 self._pr = pull_request
3868 3868 self._org_state = back_state or pull_request.pull_request_state
3869 3869 self._pr_state = pr_state
3870 3870 self._current_state = None
3871 3871
3872 3872 def __enter__(self):
3873 3873 log.debug('StateLock: entering set state context, setting state to: `%s`',
3874 3874 self._pr_state)
3875 3875 self.set_pr_state(self._pr_state)
3876 3876 return self
3877 3877
3878 3878 def __exit__(self, exc_type, exc_val, exc_tb):
3879 3879 if exc_val is not None:
3880 3880 log.error(traceback.format_exc(exc_tb))
3881 3881 return None
3882 3882
3883 3883 self.set_pr_state(self._org_state)
3884 3884 log.debug('StateLock: exiting set state context, setting state to: `%s`',
3885 3885 self._org_state)
3886 3886 @property
3887 3887 def state(self):
3888 3888 return self._current_state
3889 3889
3890 3890 def set_pr_state(self, pr_state):
3891 3891 try:
3892 3892 self._pr.pull_request_state = pr_state
3893 3893 Session().add(self._pr)
3894 3894 Session().commit()
3895 3895 self._current_state = pr_state
3896 3896 except Exception:
3897 3897 log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state)
3898 3898 raise
3899 3899
3900 3900
3901 3901 class _PullRequestBase(BaseModel):
3902 3902 """
3903 3903 Common attributes of pull request and version entries.
3904 3904 """
3905 3905
3906 3906 # .status values
3907 3907 STATUS_NEW = u'new'
3908 3908 STATUS_OPEN = u'open'
3909 3909 STATUS_CLOSED = u'closed'
3910 3910
3911 3911 # available states
3912 3912 STATE_CREATING = u'creating'
3913 3913 STATE_UPDATING = u'updating'
3914 3914 STATE_MERGING = u'merging'
3915 3915 STATE_CREATED = u'created'
3916 3916
3917 3917 title = Column('title', Unicode(255), nullable=True)
3918 3918 description = Column(
3919 3919 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3920 3920 nullable=True)
3921 3921 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
3922 3922
3923 3923 # new/open/closed status of pull request (not approve/reject/etc)
3924 3924 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3925 3925 created_on = Column(
3926 3926 'created_on', DateTime(timezone=False), nullable=False,
3927 3927 default=datetime.datetime.now)
3928 3928 updated_on = Column(
3929 3929 'updated_on', DateTime(timezone=False), nullable=False,
3930 3930 default=datetime.datetime.now)
3931 3931
3932 3932 pull_request_state = Column("pull_request_state", String(255), nullable=True)
3933 3933
3934 3934 @declared_attr
3935 3935 def user_id(cls):
3936 3936 return Column(
3937 3937 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3938 3938 unique=None)
3939 3939
3940 3940 # 500 revisions max
3941 3941 _revisions = Column(
3942 3942 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3943 3943
3944 3944 @declared_attr
3945 3945 def source_repo_id(cls):
3946 3946 # TODO: dan: rename column to source_repo_id
3947 3947 return Column(
3948 3948 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3949 3949 nullable=False)
3950 3950
3951 3951 _source_ref = Column('org_ref', Unicode(255), nullable=False)
3952 3952
3953 3953 @hybrid_property
3954 3954 def source_ref(self):
3955 3955 return self._source_ref
3956 3956
3957 3957 @source_ref.setter
3958 3958 def source_ref(self, val):
3959 3959 parts = (val or '').split(':')
3960 3960 if len(parts) != 3:
3961 3961 raise ValueError(
3962 3962 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
3963 3963 self._source_ref = safe_unicode(val)
3964 3964
3965 3965 _target_ref = Column('other_ref', Unicode(255), nullable=False)
3966 3966
3967 3967 @hybrid_property
3968 3968 def target_ref(self):
3969 3969 return self._target_ref
3970 3970
3971 3971 @target_ref.setter
3972 3972 def target_ref(self, val):
3973 3973 parts = (val or '').split(':')
3974 3974 if len(parts) != 3:
3975 3975 raise ValueError(
3976 3976 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
3977 3977 self._target_ref = safe_unicode(val)
3978 3978
3979 3979 @declared_attr
3980 3980 def target_repo_id(cls):
3981 3981 # TODO: dan: rename column to target_repo_id
3982 3982 return Column(
3983 3983 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3984 3984 nullable=False)
3985 3985
3986 3986 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3987 3987
3988 3988 # TODO: dan: rename column to last_merge_source_rev
3989 3989 _last_merge_source_rev = Column(
3990 3990 'last_merge_org_rev', String(40), nullable=True)
3991 3991 # TODO: dan: rename column to last_merge_target_rev
3992 3992 _last_merge_target_rev = Column(
3993 3993 'last_merge_other_rev', String(40), nullable=True)
3994 3994 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3995 3995 merge_rev = Column('merge_rev', String(40), nullable=True)
3996 3996
3997 3997 reviewer_data = Column(
3998 3998 'reviewer_data_json', MutationObj.as_mutable(
3999 3999 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4000 4000
4001 4001 @property
4002 4002 def reviewer_data_json(self):
4003 4003 return json.dumps(self.reviewer_data)
4004 4004
4005 @property
4006 def work_in_progress(self):
4007 """checks if pull request is work in progress by checking the title"""
4008 title = self.title.upper()
4009 if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title):
4010 return True
4011 return False
4012
4005 4013 @hybrid_property
4006 4014 def description_safe(self):
4007 4015 from rhodecode.lib import helpers as h
4008 4016 return h.escape(self.description)
4009 4017
4010 4018 @hybrid_property
4011 4019 def revisions(self):
4012 4020 return self._revisions.split(':') if self._revisions else []
4013 4021
4014 4022 @revisions.setter
4015 4023 def revisions(self, val):
4016 4024 self._revisions = u':'.join(val)
4017 4025
4018 4026 @hybrid_property
4019 4027 def last_merge_status(self):
4020 4028 return safe_int(self._last_merge_status)
4021 4029
4022 4030 @last_merge_status.setter
4023 4031 def last_merge_status(self, val):
4024 4032 self._last_merge_status = val
4025 4033
4026 4034 @declared_attr
4027 4035 def author(cls):
4028 4036 return relationship('User', lazy='joined')
4029 4037
4030 4038 @declared_attr
4031 4039 def source_repo(cls):
4032 4040 return relationship(
4033 4041 'Repository',
4034 4042 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
4035 4043
4036 4044 @property
4037 4045 def source_ref_parts(self):
4038 4046 return self.unicode_to_reference(self.source_ref)
4039 4047
4040 4048 @declared_attr
4041 4049 def target_repo(cls):
4042 4050 return relationship(
4043 4051 'Repository',
4044 4052 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
4045 4053
4046 4054 @property
4047 4055 def target_ref_parts(self):
4048 4056 return self.unicode_to_reference(self.target_ref)
4049 4057
4050 4058 @property
4051 4059 def shadow_merge_ref(self):
4052 4060 return self.unicode_to_reference(self._shadow_merge_ref)
4053 4061
4054 4062 @shadow_merge_ref.setter
4055 4063 def shadow_merge_ref(self, ref):
4056 4064 self._shadow_merge_ref = self.reference_to_unicode(ref)
4057 4065
4058 4066 @staticmethod
4059 4067 def unicode_to_reference(raw):
4060 4068 """
4061 4069 Convert a unicode (or string) to a reference object.
4062 4070 If unicode evaluates to False it returns None.
4063 4071 """
4064 4072 if raw:
4065 4073 refs = raw.split(':')
4066 4074 return Reference(*refs)
4067 4075 else:
4068 4076 return None
4069 4077
4070 4078 @staticmethod
4071 4079 def reference_to_unicode(ref):
4072 4080 """
4073 4081 Convert a reference object to unicode.
4074 4082 If reference is None it returns None.
4075 4083 """
4076 4084 if ref:
4077 4085 return u':'.join(ref)
4078 4086 else:
4079 4087 return None
4080 4088
4081 4089 def get_api_data(self, with_merge_state=True):
4082 4090 from rhodecode.model.pull_request import PullRequestModel
4083 4091
4084 4092 pull_request = self
4085 4093 if with_merge_state:
4086 4094 merge_status = PullRequestModel().merge_status(pull_request)
4087 4095 merge_state = {
4088 4096 'status': merge_status[0],
4089 4097 'message': safe_unicode(merge_status[1]),
4090 4098 }
4091 4099 else:
4092 4100 merge_state = {'status': 'not_available',
4093 4101 'message': 'not_available'}
4094 4102
4095 4103 merge_data = {
4096 4104 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
4097 4105 'reference': (
4098 4106 pull_request.shadow_merge_ref._asdict()
4099 4107 if pull_request.shadow_merge_ref else None),
4100 4108 }
4101 4109
4102 4110 data = {
4103 4111 'pull_request_id': pull_request.pull_request_id,
4104 4112 'url': PullRequestModel().get_url(pull_request),
4105 4113 'title': pull_request.title,
4106 4114 'description': pull_request.description,
4107 4115 'status': pull_request.status,
4108 4116 'state': pull_request.pull_request_state,
4109 4117 'created_on': pull_request.created_on,
4110 4118 'updated_on': pull_request.updated_on,
4111 4119 'commit_ids': pull_request.revisions,
4112 4120 'review_status': pull_request.calculated_review_status(),
4113 4121 'mergeable': merge_state,
4114 4122 'source': {
4115 4123 'clone_url': pull_request.source_repo.clone_url(),
4116 4124 'repository': pull_request.source_repo.repo_name,
4117 4125 'reference': {
4118 4126 'name': pull_request.source_ref_parts.name,
4119 4127 'type': pull_request.source_ref_parts.type,
4120 4128 'commit_id': pull_request.source_ref_parts.commit_id,
4121 4129 },
4122 4130 },
4123 4131 'target': {
4124 4132 'clone_url': pull_request.target_repo.clone_url(),
4125 4133 'repository': pull_request.target_repo.repo_name,
4126 4134 'reference': {
4127 4135 'name': pull_request.target_ref_parts.name,
4128 4136 'type': pull_request.target_ref_parts.type,
4129 4137 'commit_id': pull_request.target_ref_parts.commit_id,
4130 4138 },
4131 4139 },
4132 4140 'merge': merge_data,
4133 4141 'author': pull_request.author.get_api_data(include_secrets=False,
4134 4142 details='basic'),
4135 4143 'reviewers': [
4136 4144 {
4137 4145 'user': reviewer.get_api_data(include_secrets=False,
4138 4146 details='basic'),
4139 4147 'reasons': reasons,
4140 4148 'review_status': st[0][1].status if st else 'not_reviewed',
4141 4149 }
4142 4150 for obj, reviewer, reasons, mandatory, st in
4143 4151 pull_request.reviewers_statuses()
4144 4152 ]
4145 4153 }
4146 4154
4147 4155 return data
4148 4156
4149 4157 def set_state(self, pull_request_state, final_state=None):
4150 4158 """
4151 4159 # goes from initial state to updating to initial state.
4152 4160 # initial state can be changed by specifying back_state=
4153 4161 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
4154 4162 pull_request.merge()
4155 4163
4156 4164 :param pull_request_state:
4157 4165 :param final_state:
4158 4166
4159 4167 """
4160 4168
4161 4169 return _SetState(self, pull_request_state, back_state=final_state)
4162 4170
4163 4171
4164 4172 class PullRequest(Base, _PullRequestBase):
4165 4173 __tablename__ = 'pull_requests'
4166 4174 __table_args__ = (
4167 4175 base_table_args,
4168 4176 )
4169 4177
4170 4178 pull_request_id = Column(
4171 4179 'pull_request_id', Integer(), nullable=False, primary_key=True)
4172 4180
4173 4181 def __repr__(self):
4174 4182 if self.pull_request_id:
4175 4183 return '<DB:PullRequest #%s>' % self.pull_request_id
4176 4184 else:
4177 4185 return '<DB:PullRequest at %#x>' % id(self)
4178 4186
4179 4187 reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan")
4180 4188 statuses = relationship('ChangesetStatus', cascade="all, delete-orphan")
4181 4189 comments = relationship('ChangesetComment', cascade="all, delete-orphan")
4182 4190 versions = relationship('PullRequestVersion', cascade="all, delete-orphan",
4183 4191 lazy='dynamic')
4184 4192
4185 4193 @classmethod
4186 4194 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
4187 4195 internal_methods=None):
4188 4196
4189 4197 class PullRequestDisplay(object):
4190 4198 """
4191 4199 Special object wrapper for showing PullRequest data via Versions
4192 4200 It mimics PR object as close as possible. This is read only object
4193 4201 just for display
4194 4202 """
4195 4203
4196 4204 def __init__(self, attrs, internal=None):
4197 4205 self.attrs = attrs
4198 4206 # internal have priority over the given ones via attrs
4199 4207 self.internal = internal or ['versions']
4200 4208
4201 4209 def __getattr__(self, item):
4202 4210 if item in self.internal:
4203 4211 return getattr(self, item)
4204 4212 try:
4205 4213 return self.attrs[item]
4206 4214 except KeyError:
4207 4215 raise AttributeError(
4208 4216 '%s object has no attribute %s' % (self, item))
4209 4217
4210 4218 def __repr__(self):
4211 4219 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
4212 4220
4213 4221 def versions(self):
4214 4222 return pull_request_obj.versions.order_by(
4215 4223 PullRequestVersion.pull_request_version_id).all()
4216 4224
4217 4225 def is_closed(self):
4218 4226 return pull_request_obj.is_closed()
4219 4227
4220 4228 @property
4221 4229 def pull_request_version_id(self):
4222 4230 return getattr(pull_request_obj, 'pull_request_version_id', None)
4223 4231
4224 4232 attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False))
4225 4233
4226 4234 attrs.author = StrictAttributeDict(
4227 4235 pull_request_obj.author.get_api_data())
4228 4236 if pull_request_obj.target_repo:
4229 4237 attrs.target_repo = StrictAttributeDict(
4230 4238 pull_request_obj.target_repo.get_api_data())
4231 4239 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
4232 4240
4233 4241 if pull_request_obj.source_repo:
4234 4242 attrs.source_repo = StrictAttributeDict(
4235 4243 pull_request_obj.source_repo.get_api_data())
4236 4244 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
4237 4245
4238 4246 attrs.source_ref_parts = pull_request_obj.source_ref_parts
4239 4247 attrs.target_ref_parts = pull_request_obj.target_ref_parts
4240 4248 attrs.revisions = pull_request_obj.revisions
4241 4249
4242 4250 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
4243 4251 attrs.reviewer_data = org_pull_request_obj.reviewer_data
4244 4252 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
4245 4253
4246 4254 return PullRequestDisplay(attrs, internal=internal_methods)
4247 4255
4248 4256 def is_closed(self):
4249 4257 return self.status == self.STATUS_CLOSED
4250 4258
4251 4259 def __json__(self):
4252 4260 return {
4253 4261 'revisions': self.revisions,
4254 4262 }
4255 4263
4256 4264 def calculated_review_status(self):
4257 4265 from rhodecode.model.changeset_status import ChangesetStatusModel
4258 4266 return ChangesetStatusModel().calculated_review_status(self)
4259 4267
4260 4268 def reviewers_statuses(self):
4261 4269 from rhodecode.model.changeset_status import ChangesetStatusModel
4262 4270 return ChangesetStatusModel().reviewers_statuses(self)
4263 4271
4264 4272 @property
4265 4273 def workspace_id(self):
4266 4274 from rhodecode.model.pull_request import PullRequestModel
4267 4275 return PullRequestModel()._workspace_id(self)
4268 4276
4269 4277 def get_shadow_repo(self):
4270 4278 workspace_id = self.workspace_id
4271 4279 shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id)
4272 4280 if os.path.isdir(shadow_repository_path):
4273 4281 vcs_obj = self.target_repo.scm_instance()
4274 4282 return vcs_obj.get_shadow_instance(shadow_repository_path)
4275 4283
4276 4284
4277 4285 class PullRequestVersion(Base, _PullRequestBase):
4278 4286 __tablename__ = 'pull_request_versions'
4279 4287 __table_args__ = (
4280 4288 base_table_args,
4281 4289 )
4282 4290
4283 4291 pull_request_version_id = Column(
4284 4292 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
4285 4293 pull_request_id = Column(
4286 4294 'pull_request_id', Integer(),
4287 4295 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4288 4296 pull_request = relationship('PullRequest')
4289 4297
4290 4298 def __repr__(self):
4291 4299 if self.pull_request_version_id:
4292 4300 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
4293 4301 else:
4294 4302 return '<DB:PullRequestVersion at %#x>' % id(self)
4295 4303
4296 4304 @property
4297 4305 def reviewers(self):
4298 4306 return self.pull_request.reviewers
4299 4307
4300 4308 @property
4301 4309 def versions(self):
4302 4310 return self.pull_request.versions
4303 4311
4304 4312 def is_closed(self):
4305 4313 # calculate from original
4306 4314 return self.pull_request.status == self.STATUS_CLOSED
4307 4315
4308 4316 def calculated_review_status(self):
4309 4317 return self.pull_request.calculated_review_status()
4310 4318
4311 4319 def reviewers_statuses(self):
4312 4320 return self.pull_request.reviewers_statuses()
4313 4321
4314 4322
4315 4323 class PullRequestReviewers(Base, BaseModel):
4316 4324 __tablename__ = 'pull_request_reviewers'
4317 4325 __table_args__ = (
4318 4326 base_table_args,
4319 4327 )
4320 4328
4321 4329 @hybrid_property
4322 4330 def reasons(self):
4323 4331 if not self._reasons:
4324 4332 return []
4325 4333 return self._reasons
4326 4334
4327 4335 @reasons.setter
4328 4336 def reasons(self, val):
4329 4337 val = val or []
4330 4338 if any(not isinstance(x, compat.string_types) for x in val):
4331 4339 raise Exception('invalid reasons type, must be list of strings')
4332 4340 self._reasons = val
4333 4341
4334 4342 pull_requests_reviewers_id = Column(
4335 4343 'pull_requests_reviewers_id', Integer(), nullable=False,
4336 4344 primary_key=True)
4337 4345 pull_request_id = Column(
4338 4346 "pull_request_id", Integer(),
4339 4347 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4340 4348 user_id = Column(
4341 4349 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4342 4350 _reasons = Column(
4343 4351 'reason', MutationList.as_mutable(
4344 4352 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4345 4353
4346 4354 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4347 4355 user = relationship('User')
4348 4356 pull_request = relationship('PullRequest')
4349 4357
4350 4358 rule_data = Column(
4351 4359 'rule_data_json',
4352 4360 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4353 4361
4354 4362 def rule_user_group_data(self):
4355 4363 """
4356 4364 Returns the voting user group rule data for this reviewer
4357 4365 """
4358 4366
4359 4367 if self.rule_data and 'vote_rule' in self.rule_data:
4360 4368 user_group_data = {}
4361 4369 if 'rule_user_group_entry_id' in self.rule_data:
4362 4370 # means a group with voting rules !
4363 4371 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4364 4372 user_group_data['name'] = self.rule_data['rule_name']
4365 4373 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4366 4374
4367 4375 return user_group_data
4368 4376
4369 4377 def __unicode__(self):
4370 4378 return u"<%s('id:%s')>" % (self.__class__.__name__,
4371 4379 self.pull_requests_reviewers_id)
4372 4380
4373 4381
4374 4382 class Notification(Base, BaseModel):
4375 4383 __tablename__ = 'notifications'
4376 4384 __table_args__ = (
4377 4385 Index('notification_type_idx', 'type'),
4378 4386 base_table_args,
4379 4387 )
4380 4388
4381 4389 TYPE_CHANGESET_COMMENT = u'cs_comment'
4382 4390 TYPE_MESSAGE = u'message'
4383 4391 TYPE_MENTION = u'mention'
4384 4392 TYPE_REGISTRATION = u'registration'
4385 4393 TYPE_PULL_REQUEST = u'pull_request'
4386 4394 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
4387 4395
4388 4396 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4389 4397 subject = Column('subject', Unicode(512), nullable=True)
4390 4398 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4391 4399 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4392 4400 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4393 4401 type_ = Column('type', Unicode(255))
4394 4402
4395 4403 created_by_user = relationship('User')
4396 4404 notifications_to_users = relationship('UserNotification', lazy='joined',
4397 4405 cascade="all, delete-orphan")
4398 4406
4399 4407 @property
4400 4408 def recipients(self):
4401 4409 return [x.user for x in UserNotification.query()\
4402 4410 .filter(UserNotification.notification == self)\
4403 4411 .order_by(UserNotification.user_id.asc()).all()]
4404 4412
4405 4413 @classmethod
4406 4414 def create(cls, created_by, subject, body, recipients, type_=None):
4407 4415 if type_ is None:
4408 4416 type_ = Notification.TYPE_MESSAGE
4409 4417
4410 4418 notification = cls()
4411 4419 notification.created_by_user = created_by
4412 4420 notification.subject = subject
4413 4421 notification.body = body
4414 4422 notification.type_ = type_
4415 4423 notification.created_on = datetime.datetime.now()
4416 4424
4417 4425 # For each recipient link the created notification to his account
4418 4426 for u in recipients:
4419 4427 assoc = UserNotification()
4420 4428 assoc.user_id = u.user_id
4421 4429 assoc.notification = notification
4422 4430
4423 4431 # if created_by is inside recipients mark his notification
4424 4432 # as read
4425 4433 if u.user_id == created_by.user_id:
4426 4434 assoc.read = True
4427 4435 Session().add(assoc)
4428 4436
4429 4437 Session().add(notification)
4430 4438
4431 4439 return notification
4432 4440
4433 4441
4434 4442 class UserNotification(Base, BaseModel):
4435 4443 __tablename__ = 'user_to_notification'
4436 4444 __table_args__ = (
4437 4445 UniqueConstraint('user_id', 'notification_id'),
4438 4446 base_table_args
4439 4447 )
4440 4448
4441 4449 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4442 4450 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4443 4451 read = Column('read', Boolean, default=False)
4444 4452 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4445 4453
4446 4454 user = relationship('User', lazy="joined")
4447 4455 notification = relationship('Notification', lazy="joined",
4448 4456 order_by=lambda: Notification.created_on.desc(),)
4449 4457
4450 4458 def mark_as_read(self):
4451 4459 self.read = True
4452 4460 Session().add(self)
4453 4461
4454 4462
4455 4463 class Gist(Base, BaseModel):
4456 4464 __tablename__ = 'gists'
4457 4465 __table_args__ = (
4458 4466 Index('g_gist_access_id_idx', 'gist_access_id'),
4459 4467 Index('g_created_on_idx', 'created_on'),
4460 4468 base_table_args
4461 4469 )
4462 4470
4463 4471 GIST_PUBLIC = u'public'
4464 4472 GIST_PRIVATE = u'private'
4465 4473 DEFAULT_FILENAME = u'gistfile1.txt'
4466 4474
4467 4475 ACL_LEVEL_PUBLIC = u'acl_public'
4468 4476 ACL_LEVEL_PRIVATE = u'acl_private'
4469 4477
4470 4478 gist_id = Column('gist_id', Integer(), primary_key=True)
4471 4479 gist_access_id = Column('gist_access_id', Unicode(250))
4472 4480 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4473 4481 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4474 4482 gist_expires = Column('gist_expires', Float(53), nullable=False)
4475 4483 gist_type = Column('gist_type', Unicode(128), nullable=False)
4476 4484 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4477 4485 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4478 4486 acl_level = Column('acl_level', Unicode(128), nullable=True)
4479 4487
4480 4488 owner = relationship('User')
4481 4489
4482 4490 def __repr__(self):
4483 4491 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
4484 4492
4485 4493 @hybrid_property
4486 4494 def description_safe(self):
4487 4495 from rhodecode.lib import helpers as h
4488 4496 return h.escape(self.gist_description)
4489 4497
4490 4498 @classmethod
4491 4499 def get_or_404(cls, id_):
4492 4500 from pyramid.httpexceptions import HTTPNotFound
4493 4501
4494 4502 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4495 4503 if not res:
4496 4504 raise HTTPNotFound()
4497 4505 return res
4498 4506
4499 4507 @classmethod
4500 4508 def get_by_access_id(cls, gist_access_id):
4501 4509 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4502 4510
4503 4511 def gist_url(self):
4504 4512 from rhodecode.model.gist import GistModel
4505 4513 return GistModel().get_url(self)
4506 4514
4507 4515 @classmethod
4508 4516 def base_path(cls):
4509 4517 """
4510 4518 Returns base path when all gists are stored
4511 4519
4512 4520 :param cls:
4513 4521 """
4514 4522 from rhodecode.model.gist import GIST_STORE_LOC
4515 4523 q = Session().query(RhodeCodeUi)\
4516 4524 .filter(RhodeCodeUi.ui_key == URL_SEP)
4517 4525 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4518 4526 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4519 4527
4520 4528 def get_api_data(self):
4521 4529 """
4522 4530 Common function for generating gist related data for API
4523 4531 """
4524 4532 gist = self
4525 4533 data = {
4526 4534 'gist_id': gist.gist_id,
4527 4535 'type': gist.gist_type,
4528 4536 'access_id': gist.gist_access_id,
4529 4537 'description': gist.gist_description,
4530 4538 'url': gist.gist_url(),
4531 4539 'expires': gist.gist_expires,
4532 4540 'created_on': gist.created_on,
4533 4541 'modified_at': gist.modified_at,
4534 4542 'content': None,
4535 4543 'acl_level': gist.acl_level,
4536 4544 }
4537 4545 return data
4538 4546
4539 4547 def __json__(self):
4540 4548 data = dict(
4541 4549 )
4542 4550 data.update(self.get_api_data())
4543 4551 return data
4544 4552 # SCM functions
4545 4553
4546 4554 def scm_instance(self, **kwargs):
4547 4555 """
4548 4556 Get an instance of VCS Repository
4549 4557
4550 4558 :param kwargs:
4551 4559 """
4552 4560 from rhodecode.model.gist import GistModel
4553 4561 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4554 4562 return get_vcs_instance(
4555 4563 repo_path=safe_str(full_repo_path), create=False,
4556 4564 _vcs_alias=GistModel.vcs_backend)
4557 4565
4558 4566
4559 4567 class ExternalIdentity(Base, BaseModel):
4560 4568 __tablename__ = 'external_identities'
4561 4569 __table_args__ = (
4562 4570 Index('local_user_id_idx', 'local_user_id'),
4563 4571 Index('external_id_idx', 'external_id'),
4564 4572 base_table_args
4565 4573 )
4566 4574
4567 4575 external_id = Column('external_id', Unicode(255), default=u'', primary_key=True)
4568 4576 external_username = Column('external_username', Unicode(1024), default=u'')
4569 4577 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4570 4578 provider_name = Column('provider_name', Unicode(255), default=u'', primary_key=True)
4571 4579 access_token = Column('access_token', String(1024), default=u'')
4572 4580 alt_token = Column('alt_token', String(1024), default=u'')
4573 4581 token_secret = Column('token_secret', String(1024), default=u'')
4574 4582
4575 4583 @classmethod
4576 4584 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4577 4585 """
4578 4586 Returns ExternalIdentity instance based on search params
4579 4587
4580 4588 :param external_id:
4581 4589 :param provider_name:
4582 4590 :return: ExternalIdentity
4583 4591 """
4584 4592 query = cls.query()
4585 4593 query = query.filter(cls.external_id == external_id)
4586 4594 query = query.filter(cls.provider_name == provider_name)
4587 4595 if local_user_id:
4588 4596 query = query.filter(cls.local_user_id == local_user_id)
4589 4597 return query.first()
4590 4598
4591 4599 @classmethod
4592 4600 def user_by_external_id_and_provider(cls, external_id, provider_name):
4593 4601 """
4594 4602 Returns User instance based on search params
4595 4603
4596 4604 :param external_id:
4597 4605 :param provider_name:
4598 4606 :return: User
4599 4607 """
4600 4608 query = User.query()
4601 4609 query = query.filter(cls.external_id == external_id)
4602 4610 query = query.filter(cls.provider_name == provider_name)
4603 4611 query = query.filter(User.user_id == cls.local_user_id)
4604 4612 return query.first()
4605 4613
4606 4614 @classmethod
4607 4615 def by_local_user_id(cls, local_user_id):
4608 4616 """
4609 4617 Returns all tokens for user
4610 4618
4611 4619 :param local_user_id:
4612 4620 :return: ExternalIdentity
4613 4621 """
4614 4622 query = cls.query()
4615 4623 query = query.filter(cls.local_user_id == local_user_id)
4616 4624 return query
4617 4625
4618 4626 @classmethod
4619 4627 def load_provider_plugin(cls, plugin_id):
4620 4628 from rhodecode.authentication.base import loadplugin
4621 4629 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
4622 4630 auth_plugin = loadplugin(_plugin_id)
4623 4631 return auth_plugin
4624 4632
4625 4633
4626 4634 class Integration(Base, BaseModel):
4627 4635 __tablename__ = 'integrations'
4628 4636 __table_args__ = (
4629 4637 base_table_args
4630 4638 )
4631 4639
4632 4640 integration_id = Column('integration_id', Integer(), primary_key=True)
4633 4641 integration_type = Column('integration_type', String(255))
4634 4642 enabled = Column('enabled', Boolean(), nullable=False)
4635 4643 name = Column('name', String(255), nullable=False)
4636 4644 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4637 4645 default=False)
4638 4646
4639 4647 settings = Column(
4640 4648 'settings_json', MutationObj.as_mutable(
4641 4649 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4642 4650 repo_id = Column(
4643 4651 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4644 4652 nullable=True, unique=None, default=None)
4645 4653 repo = relationship('Repository', lazy='joined')
4646 4654
4647 4655 repo_group_id = Column(
4648 4656 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4649 4657 nullable=True, unique=None, default=None)
4650 4658 repo_group = relationship('RepoGroup', lazy='joined')
4651 4659
4652 4660 @property
4653 4661 def scope(self):
4654 4662 if self.repo:
4655 4663 return repr(self.repo)
4656 4664 if self.repo_group:
4657 4665 if self.child_repos_only:
4658 4666 return repr(self.repo_group) + ' (child repos only)'
4659 4667 else:
4660 4668 return repr(self.repo_group) + ' (recursive)'
4661 4669 if self.child_repos_only:
4662 4670 return 'root_repos'
4663 4671 return 'global'
4664 4672
4665 4673 def __repr__(self):
4666 4674 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4667 4675
4668 4676
4669 4677 class RepoReviewRuleUser(Base, BaseModel):
4670 4678 __tablename__ = 'repo_review_rules_users'
4671 4679 __table_args__ = (
4672 4680 base_table_args
4673 4681 )
4674 4682
4675 4683 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4676 4684 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4677 4685 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4678 4686 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4679 4687 user = relationship('User')
4680 4688
4681 4689 def rule_data(self):
4682 4690 return {
4683 4691 'mandatory': self.mandatory
4684 4692 }
4685 4693
4686 4694
4687 4695 class RepoReviewRuleUserGroup(Base, BaseModel):
4688 4696 __tablename__ = 'repo_review_rules_users_groups'
4689 4697 __table_args__ = (
4690 4698 base_table_args
4691 4699 )
4692 4700
4693 4701 VOTE_RULE_ALL = -1
4694 4702
4695 4703 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4696 4704 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4697 4705 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4698 4706 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4699 4707 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4700 4708 users_group = relationship('UserGroup')
4701 4709
4702 4710 def rule_data(self):
4703 4711 return {
4704 4712 'mandatory': self.mandatory,
4705 4713 'vote_rule': self.vote_rule
4706 4714 }
4707 4715
4708 4716 @property
4709 4717 def vote_rule_label(self):
4710 4718 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4711 4719 return 'all must vote'
4712 4720 else:
4713 4721 return 'min. vote {}'.format(self.vote_rule)
4714 4722
4715 4723
4716 4724 class RepoReviewRule(Base, BaseModel):
4717 4725 __tablename__ = 'repo_review_rules'
4718 4726 __table_args__ = (
4719 4727 base_table_args
4720 4728 )
4721 4729
4722 4730 repo_review_rule_id = Column(
4723 4731 'repo_review_rule_id', Integer(), primary_key=True)
4724 4732 repo_id = Column(
4725 4733 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4726 4734 repo = relationship('Repository', backref='review_rules')
4727 4735
4728 4736 review_rule_name = Column('review_rule_name', String(255))
4729 4737 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4730 4738 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4731 4739 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4732 4740
4733 4741 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4734 4742 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4735 4743 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4736 4744 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4737 4745
4738 4746 rule_users = relationship('RepoReviewRuleUser')
4739 4747 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4740 4748
4741 4749 def _validate_pattern(self, value):
4742 4750 re.compile('^' + glob2re(value) + '$')
4743 4751
4744 4752 @hybrid_property
4745 4753 def source_branch_pattern(self):
4746 4754 return self._branch_pattern or '*'
4747 4755
4748 4756 @source_branch_pattern.setter
4749 4757 def source_branch_pattern(self, value):
4750 4758 self._validate_pattern(value)
4751 4759 self._branch_pattern = value or '*'
4752 4760
4753 4761 @hybrid_property
4754 4762 def target_branch_pattern(self):
4755 4763 return self._target_branch_pattern or '*'
4756 4764
4757 4765 @target_branch_pattern.setter
4758 4766 def target_branch_pattern(self, value):
4759 4767 self._validate_pattern(value)
4760 4768 self._target_branch_pattern = value or '*'
4761 4769
4762 4770 @hybrid_property
4763 4771 def file_pattern(self):
4764 4772 return self._file_pattern or '*'
4765 4773
4766 4774 @file_pattern.setter
4767 4775 def file_pattern(self, value):
4768 4776 self._validate_pattern(value)
4769 4777 self._file_pattern = value or '*'
4770 4778
4771 4779 def matches(self, source_branch, target_branch, files_changed):
4772 4780 """
4773 4781 Check if this review rule matches a branch/files in a pull request
4774 4782
4775 4783 :param source_branch: source branch name for the commit
4776 4784 :param target_branch: target branch name for the commit
4777 4785 :param files_changed: list of file paths changed in the pull request
4778 4786 """
4779 4787
4780 4788 source_branch = source_branch or ''
4781 4789 target_branch = target_branch or ''
4782 4790 files_changed = files_changed or []
4783 4791
4784 4792 branch_matches = True
4785 4793 if source_branch or target_branch:
4786 4794 if self.source_branch_pattern == '*':
4787 4795 source_branch_match = True
4788 4796 else:
4789 4797 if self.source_branch_pattern.startswith('re:'):
4790 4798 source_pattern = self.source_branch_pattern[3:]
4791 4799 else:
4792 4800 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
4793 4801 source_branch_regex = re.compile(source_pattern)
4794 4802 source_branch_match = bool(source_branch_regex.search(source_branch))
4795 4803 if self.target_branch_pattern == '*':
4796 4804 target_branch_match = True
4797 4805 else:
4798 4806 if self.target_branch_pattern.startswith('re:'):
4799 4807 target_pattern = self.target_branch_pattern[3:]
4800 4808 else:
4801 4809 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
4802 4810 target_branch_regex = re.compile(target_pattern)
4803 4811 target_branch_match = bool(target_branch_regex.search(target_branch))
4804 4812
4805 4813 branch_matches = source_branch_match and target_branch_match
4806 4814
4807 4815 files_matches = True
4808 4816 if self.file_pattern != '*':
4809 4817 files_matches = False
4810 4818 if self.file_pattern.startswith('re:'):
4811 4819 file_pattern = self.file_pattern[3:]
4812 4820 else:
4813 4821 file_pattern = glob2re(self.file_pattern)
4814 4822 file_regex = re.compile(file_pattern)
4815 4823 for filename in files_changed:
4816 4824 if file_regex.search(filename):
4817 4825 files_matches = True
4818 4826 break
4819 4827
4820 4828 return branch_matches and files_matches
4821 4829
4822 4830 @property
4823 4831 def review_users(self):
4824 4832 """ Returns the users which this rule applies to """
4825 4833
4826 4834 users = collections.OrderedDict()
4827 4835
4828 4836 for rule_user in self.rule_users:
4829 4837 if rule_user.user.active:
4830 4838 if rule_user.user not in users:
4831 4839 users[rule_user.user.username] = {
4832 4840 'user': rule_user.user,
4833 4841 'source': 'user',
4834 4842 'source_data': {},
4835 4843 'data': rule_user.rule_data()
4836 4844 }
4837 4845
4838 4846 for rule_user_group in self.rule_user_groups:
4839 4847 source_data = {
4840 4848 'user_group_id': rule_user_group.users_group.users_group_id,
4841 4849 'name': rule_user_group.users_group.users_group_name,
4842 4850 'members': len(rule_user_group.users_group.members)
4843 4851 }
4844 4852 for member in rule_user_group.users_group.members:
4845 4853 if member.user.active:
4846 4854 key = member.user.username
4847 4855 if key in users:
4848 4856 # skip this member as we have him already
4849 4857 # this prevents from override the "first" matched
4850 4858 # users with duplicates in multiple groups
4851 4859 continue
4852 4860
4853 4861 users[key] = {
4854 4862 'user': member.user,
4855 4863 'source': 'user_group',
4856 4864 'source_data': source_data,
4857 4865 'data': rule_user_group.rule_data()
4858 4866 }
4859 4867
4860 4868 return users
4861 4869
4862 4870 def user_group_vote_rule(self, user_id):
4863 4871
4864 4872 rules = []
4865 4873 if not self.rule_user_groups:
4866 4874 return rules
4867 4875
4868 4876 for user_group in self.rule_user_groups:
4869 4877 user_group_members = [x.user_id for x in user_group.users_group.members]
4870 4878 if user_id in user_group_members:
4871 4879 rules.append(user_group)
4872 4880 return rules
4873 4881
4874 4882 def __repr__(self):
4875 4883 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4876 4884 self.repo_review_rule_id, self.repo)
4877 4885
4878 4886
4879 4887 class ScheduleEntry(Base, BaseModel):
4880 4888 __tablename__ = 'schedule_entries'
4881 4889 __table_args__ = (
4882 4890 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
4883 4891 UniqueConstraint('task_uid', name='s_task_uid_idx'),
4884 4892 base_table_args,
4885 4893 )
4886 4894
4887 4895 schedule_types = ['crontab', 'timedelta', 'integer']
4888 4896 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
4889 4897
4890 4898 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
4891 4899 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
4892 4900 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
4893 4901
4894 4902 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
4895 4903 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
4896 4904
4897 4905 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
4898 4906 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
4899 4907
4900 4908 # task
4901 4909 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
4902 4910 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
4903 4911 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
4904 4912 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
4905 4913
4906 4914 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4907 4915 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
4908 4916
4909 4917 @hybrid_property
4910 4918 def schedule_type(self):
4911 4919 return self._schedule_type
4912 4920
4913 4921 @schedule_type.setter
4914 4922 def schedule_type(self, val):
4915 4923 if val not in self.schedule_types:
4916 4924 raise ValueError('Value must be on of `{}` and got `{}`'.format(
4917 4925 val, self.schedule_type))
4918 4926
4919 4927 self._schedule_type = val
4920 4928
4921 4929 @classmethod
4922 4930 def get_uid(cls, obj):
4923 4931 args = obj.task_args
4924 4932 kwargs = obj.task_kwargs
4925 4933 if isinstance(args, JsonRaw):
4926 4934 try:
4927 4935 args = json.loads(args)
4928 4936 except ValueError:
4929 4937 args = tuple()
4930 4938
4931 4939 if isinstance(kwargs, JsonRaw):
4932 4940 try:
4933 4941 kwargs = json.loads(kwargs)
4934 4942 except ValueError:
4935 4943 kwargs = dict()
4936 4944
4937 4945 dot_notation = obj.task_dot_notation
4938 4946 val = '.'.join(map(safe_str, [
4939 4947 sorted(dot_notation), args, sorted(kwargs.items())]))
4940 4948 return hashlib.sha1(val).hexdigest()
4941 4949
4942 4950 @classmethod
4943 4951 def get_by_schedule_name(cls, schedule_name):
4944 4952 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
4945 4953
4946 4954 @classmethod
4947 4955 def get_by_schedule_id(cls, schedule_id):
4948 4956 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
4949 4957
4950 4958 @property
4951 4959 def task(self):
4952 4960 return self.task_dot_notation
4953 4961
4954 4962 @property
4955 4963 def schedule(self):
4956 4964 from rhodecode.lib.celerylib.utils import raw_2_schedule
4957 4965 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
4958 4966 return schedule
4959 4967
4960 4968 @property
4961 4969 def args(self):
4962 4970 try:
4963 4971 return list(self.task_args or [])
4964 4972 except ValueError:
4965 4973 return list()
4966 4974
4967 4975 @property
4968 4976 def kwargs(self):
4969 4977 try:
4970 4978 return dict(self.task_kwargs or {})
4971 4979 except ValueError:
4972 4980 return dict()
4973 4981
4974 4982 def _as_raw(self, val):
4975 4983 if hasattr(val, 'de_coerce'):
4976 4984 val = val.de_coerce()
4977 4985 if val:
4978 4986 val = json.dumps(val)
4979 4987
4980 4988 return val
4981 4989
4982 4990 @property
4983 4991 def schedule_definition_raw(self):
4984 4992 return self._as_raw(self.schedule_definition)
4985 4993
4986 4994 @property
4987 4995 def args_raw(self):
4988 4996 return self._as_raw(self.task_args)
4989 4997
4990 4998 @property
4991 4999 def kwargs_raw(self):
4992 5000 return self._as_raw(self.task_kwargs)
4993 5001
4994 5002 def __repr__(self):
4995 5003 return '<DB:ScheduleEntry({}:{})>'.format(
4996 5004 self.schedule_entry_id, self.schedule_name)
4997 5005
4998 5006
4999 5007 @event.listens_for(ScheduleEntry, 'before_update')
5000 5008 def update_task_uid(mapper, connection, target):
5001 5009 target.task_uid = ScheduleEntry.get_uid(target)
5002 5010
5003 5011
5004 5012 @event.listens_for(ScheduleEntry, 'before_insert')
5005 5013 def set_task_uid(mapper, connection, target):
5006 5014 target.task_uid = ScheduleEntry.get_uid(target)
5007 5015
5008 5016
5009 5017 class _BaseBranchPerms(BaseModel):
5010 5018 @classmethod
5011 5019 def compute_hash(cls, value):
5012 5020 return sha1_safe(value)
5013 5021
5014 5022 @hybrid_property
5015 5023 def branch_pattern(self):
5016 5024 return self._branch_pattern or '*'
5017 5025
5018 5026 @hybrid_property
5019 5027 def branch_hash(self):
5020 5028 return self._branch_hash
5021 5029
5022 5030 def _validate_glob(self, value):
5023 5031 re.compile('^' + glob2re(value) + '$')
5024 5032
5025 5033 @branch_pattern.setter
5026 5034 def branch_pattern(self, value):
5027 5035 self._validate_glob(value)
5028 5036 self._branch_pattern = value or '*'
5029 5037 # set the Hash when setting the branch pattern
5030 5038 self._branch_hash = self.compute_hash(self._branch_pattern)
5031 5039
5032 5040 def matches(self, branch):
5033 5041 """
5034 5042 Check if this the branch matches entry
5035 5043
5036 5044 :param branch: branch name for the commit
5037 5045 """
5038 5046
5039 5047 branch = branch or ''
5040 5048
5041 5049 branch_matches = True
5042 5050 if branch:
5043 5051 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
5044 5052 branch_matches = bool(branch_regex.search(branch))
5045 5053
5046 5054 return branch_matches
5047 5055
5048 5056
5049 5057 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
5050 5058 __tablename__ = 'user_to_repo_branch_permissions'
5051 5059 __table_args__ = (
5052 5060 base_table_args
5053 5061 )
5054 5062
5055 5063 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5056 5064
5057 5065 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5058 5066 repo = relationship('Repository', backref='user_branch_perms')
5059 5067
5060 5068 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5061 5069 permission = relationship('Permission')
5062 5070
5063 5071 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
5064 5072 user_repo_to_perm = relationship('UserRepoToPerm')
5065 5073
5066 5074 rule_order = Column('rule_order', Integer(), nullable=False)
5067 5075 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5068 5076 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5069 5077
5070 5078 def __unicode__(self):
5071 5079 return u'<UserBranchPermission(%s => %r)>' % (
5072 5080 self.user_repo_to_perm, self.branch_pattern)
5073 5081
5074 5082
5075 5083 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
5076 5084 __tablename__ = 'user_group_to_repo_branch_permissions'
5077 5085 __table_args__ = (
5078 5086 base_table_args
5079 5087 )
5080 5088
5081 5089 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5082 5090
5083 5091 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5084 5092 repo = relationship('Repository', backref='user_group_branch_perms')
5085 5093
5086 5094 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5087 5095 permission = relationship('Permission')
5088 5096
5089 5097 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
5090 5098 user_group_repo_to_perm = relationship('UserGroupRepoToPerm')
5091 5099
5092 5100 rule_order = Column('rule_order', Integer(), nullable=False)
5093 5101 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5094 5102 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5095 5103
5096 5104 def __unicode__(self):
5097 5105 return u'<UserBranchPermission(%s => %r)>' % (
5098 5106 self.user_group_repo_to_perm, self.branch_pattern)
5099 5107
5100 5108
5101 5109 class UserBookmark(Base, BaseModel):
5102 5110 __tablename__ = 'user_bookmarks'
5103 5111 __table_args__ = (
5104 5112 UniqueConstraint('user_id', 'bookmark_repo_id'),
5105 5113 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
5106 5114 UniqueConstraint('user_id', 'bookmark_position'),
5107 5115 base_table_args
5108 5116 )
5109 5117
5110 5118 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
5111 5119 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
5112 5120 position = Column("bookmark_position", Integer(), nullable=False)
5113 5121 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
5114 5122 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
5115 5123 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5116 5124
5117 5125 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
5118 5126 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
5119 5127
5120 5128 user = relationship("User")
5121 5129
5122 5130 repository = relationship("Repository")
5123 5131 repository_group = relationship("RepoGroup")
5124 5132
5125 5133 @classmethod
5126 5134 def get_by_position_for_user(cls, position, user_id):
5127 5135 return cls.query() \
5128 5136 .filter(UserBookmark.user_id == user_id) \
5129 5137 .filter(UserBookmark.position == position).scalar()
5130 5138
5131 5139 @classmethod
5132 5140 def get_bookmarks_for_user(cls, user_id):
5133 5141 return cls.query() \
5134 5142 .filter(UserBookmark.user_id == user_id) \
5135 5143 .options(joinedload(UserBookmark.repository)) \
5136 5144 .options(joinedload(UserBookmark.repository_group)) \
5137 5145 .order_by(UserBookmark.position.asc()) \
5138 5146 .all()
5139 5147
5140 5148 def __unicode__(self):
5141 5149 return u'<UserBookmark(%s @ %r)>' % (self.position, self.redirect_url)
5142 5150
5143 5151
5144 5152 class FileStore(Base, BaseModel):
5145 5153 __tablename__ = 'file_store'
5146 5154 __table_args__ = (
5147 5155 base_table_args
5148 5156 )
5149 5157
5150 5158 file_store_id = Column('file_store_id', Integer(), primary_key=True)
5151 5159 file_uid = Column('file_uid', String(1024), nullable=False)
5152 5160 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
5153 5161 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
5154 5162 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
5155 5163
5156 5164 # sha256 hash
5157 5165 file_hash = Column('file_hash', String(512), nullable=False)
5158 5166 file_size = Column('file_size', BigInteger(), nullable=False)
5159 5167
5160 5168 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5161 5169 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
5162 5170 accessed_count = Column('accessed_count', Integer(), default=0)
5163 5171
5164 5172 enabled = Column('enabled', Boolean(), nullable=False, default=True)
5165 5173
5166 5174 # if repo/repo_group reference is set, check for permissions
5167 5175 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
5168 5176
5169 5177 # hidden defines an attachment that should be hidden from showing in artifact listing
5170 5178 hidden = Column('hidden', Boolean(), nullable=False, default=False)
5171 5179
5172 5180 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
5173 5181 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id')
5174 5182
5175 5183 file_metadata = relationship('FileStoreMetadata', lazy='joined')
5176 5184
5177 5185 # scope limited to user, which requester have access to
5178 5186 scope_user_id = Column(
5179 5187 'scope_user_id', Integer(), ForeignKey('users.user_id'),
5180 5188 nullable=True, unique=None, default=None)
5181 5189 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id')
5182 5190
5183 5191 # scope limited to user group, which requester have access to
5184 5192 scope_user_group_id = Column(
5185 5193 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
5186 5194 nullable=True, unique=None, default=None)
5187 5195 user_group = relationship('UserGroup', lazy='joined')
5188 5196
5189 5197 # scope limited to repo, which requester have access to
5190 5198 scope_repo_id = Column(
5191 5199 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
5192 5200 nullable=True, unique=None, default=None)
5193 5201 repo = relationship('Repository', lazy='joined')
5194 5202
5195 5203 # scope limited to repo group, which requester have access to
5196 5204 scope_repo_group_id = Column(
5197 5205 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
5198 5206 nullable=True, unique=None, default=None)
5199 5207 repo_group = relationship('RepoGroup', lazy='joined')
5200 5208
5201 5209 @classmethod
5202 5210 def get_by_store_uid(cls, file_store_uid):
5203 5211 return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar()
5204 5212
5205 5213 @classmethod
5206 5214 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
5207 5215 file_description='', enabled=True, hidden=False, check_acl=True,
5208 5216 user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None):
5209 5217
5210 5218 store_entry = FileStore()
5211 5219 store_entry.file_uid = file_uid
5212 5220 store_entry.file_display_name = file_display_name
5213 5221 store_entry.file_org_name = filename
5214 5222 store_entry.file_size = file_size
5215 5223 store_entry.file_hash = file_hash
5216 5224 store_entry.file_description = file_description
5217 5225
5218 5226 store_entry.check_acl = check_acl
5219 5227 store_entry.enabled = enabled
5220 5228 store_entry.hidden = hidden
5221 5229
5222 5230 store_entry.user_id = user_id
5223 5231 store_entry.scope_user_id = scope_user_id
5224 5232 store_entry.scope_repo_id = scope_repo_id
5225 5233 store_entry.scope_repo_group_id = scope_repo_group_id
5226 5234
5227 5235 return store_entry
5228 5236
5229 5237 @classmethod
5230 5238 def store_metadata(cls, file_store_id, args, commit=True):
5231 5239 file_store = FileStore.get(file_store_id)
5232 5240 if file_store is None:
5233 5241 return
5234 5242
5235 5243 for section, key, value, value_type in args:
5236 5244 has_key = FileStoreMetadata().query() \
5237 5245 .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \
5238 5246 .filter(FileStoreMetadata.file_store_meta_section == section) \
5239 5247 .filter(FileStoreMetadata.file_store_meta_key == key) \
5240 5248 .scalar()
5241 5249 if has_key:
5242 5250 msg = 'key `{}` already defined under section `{}` for this file.'\
5243 5251 .format(key, section)
5244 5252 raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key)
5245 5253
5246 5254 # NOTE(marcink): raises ArtifactMetadataBadValueType
5247 5255 FileStoreMetadata.valid_value_type(value_type)
5248 5256
5249 5257 meta_entry = FileStoreMetadata()
5250 5258 meta_entry.file_store = file_store
5251 5259 meta_entry.file_store_meta_section = section
5252 5260 meta_entry.file_store_meta_key = key
5253 5261 meta_entry.file_store_meta_value_type = value_type
5254 5262 meta_entry.file_store_meta_value = value
5255 5263
5256 5264 Session().add(meta_entry)
5257 5265
5258 5266 try:
5259 5267 if commit:
5260 5268 Session().commit()
5261 5269 except IntegrityError:
5262 5270 Session().rollback()
5263 5271 raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.')
5264 5272
5265 5273 @classmethod
5266 5274 def bump_access_counter(cls, file_uid, commit=True):
5267 5275 FileStore().query()\
5268 5276 .filter(FileStore.file_uid == file_uid)\
5269 5277 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
5270 5278 FileStore.accessed_on: datetime.datetime.now()})
5271 5279 if commit:
5272 5280 Session().commit()
5273 5281
5274 5282 def __json__(self):
5275 5283 data = {
5276 5284 'filename': self.file_display_name,
5277 5285 'filename_org': self.file_org_name,
5278 5286 'file_uid': self.file_uid,
5279 5287 'description': self.file_description,
5280 5288 'hidden': self.hidden,
5281 5289 'size': self.file_size,
5282 5290 'created_on': self.created_on,
5283 5291 'uploaded_by': self.upload_user.get_api_data(details='basic'),
5284 5292 'downloaded_times': self.accessed_count,
5285 5293 'sha256': self.file_hash,
5286 5294 'metadata': self.file_metadata,
5287 5295 }
5288 5296
5289 5297 return data
5290 5298
5291 5299 def __repr__(self):
5292 5300 return '<FileStore({})>'.format(self.file_store_id)
5293 5301
5294 5302
5295 5303 class FileStoreMetadata(Base, BaseModel):
5296 5304 __tablename__ = 'file_store_metadata'
5297 5305 __table_args__ = (
5298 5306 UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'),
5299 5307 Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255),
5300 5308 Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255),
5301 5309 base_table_args
5302 5310 )
5303 5311 SETTINGS_TYPES = {
5304 5312 'str': safe_str,
5305 5313 'int': safe_int,
5306 5314 'unicode': safe_unicode,
5307 5315 'bool': str2bool,
5308 5316 'list': functools.partial(aslist, sep=',')
5309 5317 }
5310 5318
5311 5319 file_store_meta_id = Column(
5312 5320 "file_store_meta_id", Integer(), nullable=False, unique=True, default=None,
5313 5321 primary_key=True)
5314 5322 _file_store_meta_section = Column(
5315 5323 "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5316 5324 nullable=True, unique=None, default=None)
5317 5325 _file_store_meta_section_hash = Column(
5318 5326 "file_store_meta_section_hash", String(255),
5319 5327 nullable=True, unique=None, default=None)
5320 5328 _file_store_meta_key = Column(
5321 5329 "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5322 5330 nullable=True, unique=None, default=None)
5323 5331 _file_store_meta_key_hash = Column(
5324 5332 "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None)
5325 5333 _file_store_meta_value = Column(
5326 5334 "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'),
5327 5335 nullable=True, unique=None, default=None)
5328 5336 _file_store_meta_value_type = Column(
5329 5337 "file_store_meta_value_type", String(255), nullable=True, unique=None,
5330 5338 default='unicode')
5331 5339
5332 5340 file_store_id = Column(
5333 5341 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'),
5334 5342 nullable=True, unique=None, default=None)
5335 5343
5336 5344 file_store = relationship('FileStore', lazy='joined')
5337 5345
5338 5346 @classmethod
5339 5347 def valid_value_type(cls, value):
5340 5348 if value.split('.')[0] not in cls.SETTINGS_TYPES:
5341 5349 raise ArtifactMetadataBadValueType(
5342 5350 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value))
5343 5351
5344 5352 @hybrid_property
5345 5353 def file_store_meta_section(self):
5346 5354 return self._file_store_meta_section
5347 5355
5348 5356 @file_store_meta_section.setter
5349 5357 def file_store_meta_section(self, value):
5350 5358 self._file_store_meta_section = value
5351 5359 self._file_store_meta_section_hash = _hash_key(value)
5352 5360
5353 5361 @hybrid_property
5354 5362 def file_store_meta_key(self):
5355 5363 return self._file_store_meta_key
5356 5364
5357 5365 @file_store_meta_key.setter
5358 5366 def file_store_meta_key(self, value):
5359 5367 self._file_store_meta_key = value
5360 5368 self._file_store_meta_key_hash = _hash_key(value)
5361 5369
5362 5370 @hybrid_property
5363 5371 def file_store_meta_value(self):
5364 5372 val = self._file_store_meta_value
5365 5373
5366 5374 if self._file_store_meta_value_type:
5367 5375 # e.g unicode.encrypted == unicode
5368 5376 _type = self._file_store_meta_value_type.split('.')[0]
5369 5377 # decode the encrypted value if it's encrypted field type
5370 5378 if '.encrypted' in self._file_store_meta_value_type:
5371 5379 cipher = EncryptedTextValue()
5372 5380 val = safe_unicode(cipher.process_result_value(val, None))
5373 5381 # do final type conversion
5374 5382 converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
5375 5383 val = converter(val)
5376 5384
5377 5385 return val
5378 5386
5379 5387 @file_store_meta_value.setter
5380 5388 def file_store_meta_value(self, val):
5381 5389 val = safe_unicode(val)
5382 5390 # encode the encrypted value
5383 5391 if '.encrypted' in self.file_store_meta_value_type:
5384 5392 cipher = EncryptedTextValue()
5385 5393 val = safe_unicode(cipher.process_bind_param(val, None))
5386 5394 self._file_store_meta_value = val
5387 5395
5388 5396 @hybrid_property
5389 5397 def file_store_meta_value_type(self):
5390 5398 return self._file_store_meta_value_type
5391 5399
5392 5400 @file_store_meta_value_type.setter
5393 5401 def file_store_meta_value_type(self, val):
5394 5402 # e.g unicode.encrypted
5395 5403 self.valid_value_type(val)
5396 5404 self._file_store_meta_value_type = val
5397 5405
5398 5406 def __json__(self):
5399 5407 data = {
5400 5408 'artifact': self.file_store.file_uid,
5401 5409 'section': self.file_store_meta_section,
5402 5410 'key': self.file_store_meta_key,
5403 5411 'value': self.file_store_meta_value,
5404 5412 }
5405 5413
5406 5414 return data
5407 5415
5408 5416 def __repr__(self):
5409 5417 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.file_store_meta_section,
5410 5418 self.file_store_meta_key, self.file_store_meta_value)
5411 5419
5412 5420
5413 5421 class DbMigrateVersion(Base, BaseModel):
5414 5422 __tablename__ = 'db_migrate_version'
5415 5423 __table_args__ = (
5416 5424 base_table_args,
5417 5425 )
5418 5426
5419 5427 repository_id = Column('repository_id', String(250), primary_key=True)
5420 5428 repository_path = Column('repository_path', Text)
5421 5429 version = Column('version', Integer)
5422 5430
5423 5431 @classmethod
5424 5432 def set_version(cls, version):
5425 5433 """
5426 5434 Helper for forcing a different version, usually for debugging purposes via ishell.
5427 5435 """
5428 5436 ver = DbMigrateVersion.query().first()
5429 5437 ver.version = version
5430 5438 Session().commit()
5431 5439
5432 5440
5433 5441 class DbSession(Base, BaseModel):
5434 5442 __tablename__ = 'db_session'
5435 5443 __table_args__ = (
5436 5444 base_table_args,
5437 5445 )
5438 5446
5439 5447 def __repr__(self):
5440 5448 return '<DB:DbSession({})>'.format(self.id)
5441 5449
5442 5450 id = Column('id', Integer())
5443 5451 namespace = Column('namespace', String(255), primary_key=True)
5444 5452 accessed = Column('accessed', DateTime, nullable=False)
5445 5453 created = Column('created', DateTime, nullable=False)
5446 5454 data = Column('data', PickleType, nullable=False)
@@ -1,1760 +1,1770 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31 import collections
32 32
33 33 from pyramid import compat
34 34 from pyramid.threadlocal import get_current_request
35 35
36 36 from rhodecode import events
37 37 from rhodecode.translation import lazy_ugettext
38 38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 39 from rhodecode.lib import audit_logger
40 40 from rhodecode.lib.compat import OrderedDict
41 41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
42 42 from rhodecode.lib.markup_renderer import (
43 43 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
44 44 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
45 45 from rhodecode.lib.vcs.backends.base import (
46 46 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
47 47 from rhodecode.lib.vcs.conf import settings as vcs_settings
48 48 from rhodecode.lib.vcs.exceptions import (
49 49 CommitDoesNotExistError, EmptyRepositoryError)
50 50 from rhodecode.model import BaseModel
51 51 from rhodecode.model.changeset_status import ChangesetStatusModel
52 52 from rhodecode.model.comment import CommentsModel
53 53 from rhodecode.model.db import (
54 54 or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
55 55 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
56 56 from rhodecode.model.meta import Session
57 57 from rhodecode.model.notification import NotificationModel, \
58 58 EmailNotificationModel
59 59 from rhodecode.model.scm import ScmModel
60 60 from rhodecode.model.settings import VcsSettingsModel
61 61
62 62
63 63 log = logging.getLogger(__name__)
64 64
65 65
66 66 # Data structure to hold the response data when updating commits during a pull
67 67 # request update.
68 68 UpdateResponse = collections.namedtuple('UpdateResponse', [
69 69 'executed', 'reason', 'new', 'old', 'changes',
70 70 'source_changed', 'target_changed'])
71 71
72 72
73 73 class PullRequestModel(BaseModel):
74 74
75 75 cls = PullRequest
76 76
77 77 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
78 78
79 79 UPDATE_STATUS_MESSAGES = {
80 80 UpdateFailureReason.NONE: lazy_ugettext(
81 81 'Pull request update successful.'),
82 82 UpdateFailureReason.UNKNOWN: lazy_ugettext(
83 83 'Pull request update failed because of an unknown error.'),
84 84 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
85 85 'No update needed because the source and target have not changed.'),
86 86 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
87 87 'Pull request cannot be updated because the reference type is '
88 88 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
89 89 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
90 90 'This pull request cannot be updated because the target '
91 91 'reference is missing.'),
92 92 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
93 93 'This pull request cannot be updated because the source '
94 94 'reference is missing.'),
95 95 }
96 96 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
97 97 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
98 98
99 99 def __get_pull_request(self, pull_request):
100 100 return self._get_instance((
101 101 PullRequest, PullRequestVersion), pull_request)
102 102
103 103 def _check_perms(self, perms, pull_request, user, api=False):
104 104 if not api:
105 105 return h.HasRepoPermissionAny(*perms)(
106 106 user=user, repo_name=pull_request.target_repo.repo_name)
107 107 else:
108 108 return h.HasRepoPermissionAnyApi(*perms)(
109 109 user=user, repo_name=pull_request.target_repo.repo_name)
110 110
111 111 def check_user_read(self, pull_request, user, api=False):
112 112 _perms = ('repository.admin', 'repository.write', 'repository.read',)
113 113 return self._check_perms(_perms, pull_request, user, api)
114 114
115 115 def check_user_merge(self, pull_request, user, api=False):
116 116 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
117 117 return self._check_perms(_perms, pull_request, user, api)
118 118
119 119 def check_user_update(self, pull_request, user, api=False):
120 120 owner = user.user_id == pull_request.user_id
121 121 return self.check_user_merge(pull_request, user, api) or owner
122 122
123 123 def check_user_delete(self, pull_request, user):
124 124 owner = user.user_id == pull_request.user_id
125 125 _perms = ('repository.admin',)
126 126 return self._check_perms(_perms, pull_request, user) or owner
127 127
128 128 def check_user_change_status(self, pull_request, user, api=False):
129 129 reviewer = user.user_id in [x.user_id for x in
130 130 pull_request.reviewers]
131 131 return self.check_user_update(pull_request, user, api) or reviewer
132 132
133 133 def check_user_comment(self, pull_request, user):
134 134 owner = user.user_id == pull_request.user_id
135 135 return self.check_user_read(pull_request, user) or owner
136 136
137 137 def get(self, pull_request):
138 138 return self.__get_pull_request(pull_request)
139 139
140 140 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
141 141 statuses=None, opened_by=None, order_by=None,
142 142 order_dir='desc', only_created=False):
143 143 repo = None
144 144 if repo_name:
145 145 repo = self._get_repo(repo_name)
146 146
147 147 q = PullRequest.query()
148 148
149 149 if search_q:
150 150 like_expression = u'%{}%'.format(safe_unicode(search_q))
151 151 q = q.filter(or_(
152 152 cast(PullRequest.pull_request_id, String).ilike(like_expression),
153 153 PullRequest.title.ilike(like_expression),
154 154 PullRequest.description.ilike(like_expression),
155 155 ))
156 156
157 157 # source or target
158 158 if repo and source:
159 159 q = q.filter(PullRequest.source_repo == repo)
160 160 elif repo:
161 161 q = q.filter(PullRequest.target_repo == repo)
162 162
163 163 # closed,opened
164 164 if statuses:
165 165 q = q.filter(PullRequest.status.in_(statuses))
166 166
167 167 # opened by filter
168 168 if opened_by:
169 169 q = q.filter(PullRequest.user_id.in_(opened_by))
170 170
171 171 # only get those that are in "created" state
172 172 if only_created:
173 173 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
174 174
175 175 if order_by:
176 176 order_map = {
177 177 'name_raw': PullRequest.pull_request_id,
178 178 'id': PullRequest.pull_request_id,
179 179 'title': PullRequest.title,
180 180 'updated_on_raw': PullRequest.updated_on,
181 181 'target_repo': PullRequest.target_repo_id
182 182 }
183 183 if order_dir == 'asc':
184 184 q = q.order_by(order_map[order_by].asc())
185 185 else:
186 186 q = q.order_by(order_map[order_by].desc())
187 187
188 188 return q
189 189
190 190 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
191 191 opened_by=None):
192 192 """
193 193 Count the number of pull requests for a specific repository.
194 194
195 195 :param repo_name: target or source repo
196 196 :param search_q: filter by text
197 197 :param source: boolean flag to specify if repo_name refers to source
198 198 :param statuses: list of pull request statuses
199 199 :param opened_by: author user of the pull request
200 200 :returns: int number of pull requests
201 201 """
202 202 q = self._prepare_get_all_query(
203 203 repo_name, search_q=search_q, source=source, statuses=statuses,
204 204 opened_by=opened_by)
205 205
206 206 return q.count()
207 207
208 208 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
209 209 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
210 210 """
211 211 Get all pull requests for a specific repository.
212 212
213 213 :param repo_name: target or source repo
214 214 :param search_q: filter by text
215 215 :param source: boolean flag to specify if repo_name refers to source
216 216 :param statuses: list of pull request statuses
217 217 :param opened_by: author user of the pull request
218 218 :param offset: pagination offset
219 219 :param length: length of returned list
220 220 :param order_by: order of the returned list
221 221 :param order_dir: 'asc' or 'desc' ordering direction
222 222 :returns: list of pull requests
223 223 """
224 224 q = self._prepare_get_all_query(
225 225 repo_name, search_q=search_q, source=source, statuses=statuses,
226 226 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
227 227
228 228 if length:
229 229 pull_requests = q.limit(length).offset(offset).all()
230 230 else:
231 231 pull_requests = q.all()
232 232
233 233 return pull_requests
234 234
235 235 def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
236 236 opened_by=None):
237 237 """
238 238 Count the number of pull requests for a specific repository that are
239 239 awaiting review.
240 240
241 241 :param repo_name: target or source repo
242 242 :param search_q: filter by text
243 243 :param source: boolean flag to specify if repo_name refers to source
244 244 :param statuses: list of pull request statuses
245 245 :param opened_by: author user of the pull request
246 246 :returns: int number of pull requests
247 247 """
248 248 pull_requests = self.get_awaiting_review(
249 249 repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by)
250 250
251 251 return len(pull_requests)
252 252
253 253 def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
254 254 opened_by=None, offset=0, length=None,
255 255 order_by=None, order_dir='desc'):
256 256 """
257 257 Get all pull requests for a specific repository that are awaiting
258 258 review.
259 259
260 260 :param repo_name: target or source repo
261 261 :param search_q: filter by text
262 262 :param source: boolean flag to specify if repo_name refers to source
263 263 :param statuses: list of pull request statuses
264 264 :param opened_by: author user of the pull request
265 265 :param offset: pagination offset
266 266 :param length: length of returned list
267 267 :param order_by: order of the returned list
268 268 :param order_dir: 'asc' or 'desc' ordering direction
269 269 :returns: list of pull requests
270 270 """
271 271 pull_requests = self.get_all(
272 272 repo_name, search_q=search_q, source=source, statuses=statuses,
273 273 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
274 274
275 275 _filtered_pull_requests = []
276 276 for pr in pull_requests:
277 277 status = pr.calculated_review_status()
278 278 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
279 279 ChangesetStatus.STATUS_UNDER_REVIEW]:
280 280 _filtered_pull_requests.append(pr)
281 281 if length:
282 282 return _filtered_pull_requests[offset:offset+length]
283 283 else:
284 284 return _filtered_pull_requests
285 285
286 286 def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
287 287 opened_by=None, user_id=None):
288 288 """
289 289 Count the number of pull requests for a specific repository that are
290 290 awaiting review from a specific user.
291 291
292 292 :param repo_name: target or source repo
293 293 :param search_q: filter by text
294 294 :param source: boolean flag to specify if repo_name refers to source
295 295 :param statuses: list of pull request statuses
296 296 :param opened_by: author user of the pull request
297 297 :param user_id: reviewer user of the pull request
298 298 :returns: int number of pull requests
299 299 """
300 300 pull_requests = self.get_awaiting_my_review(
301 301 repo_name, search_q=search_q, source=source, statuses=statuses,
302 302 opened_by=opened_by, user_id=user_id)
303 303
304 304 return len(pull_requests)
305 305
306 306 def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
307 307 opened_by=None, user_id=None, offset=0,
308 308 length=None, order_by=None, order_dir='desc'):
309 309 """
310 310 Get all pull requests for a specific repository that are awaiting
311 311 review from a specific user.
312 312
313 313 :param repo_name: target or source repo
314 314 :param search_q: filter by text
315 315 :param source: boolean flag to specify if repo_name refers to source
316 316 :param statuses: list of pull request statuses
317 317 :param opened_by: author user of the pull request
318 318 :param user_id: reviewer user of the pull request
319 319 :param offset: pagination offset
320 320 :param length: length of returned list
321 321 :param order_by: order of the returned list
322 322 :param order_dir: 'asc' or 'desc' ordering direction
323 323 :returns: list of pull requests
324 324 """
325 325 pull_requests = self.get_all(
326 326 repo_name, search_q=search_q, source=source, statuses=statuses,
327 327 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
328 328
329 329 _my = PullRequestModel().get_not_reviewed(user_id)
330 330 my_participation = []
331 331 for pr in pull_requests:
332 332 if pr in _my:
333 333 my_participation.append(pr)
334 334 _filtered_pull_requests = my_participation
335 335 if length:
336 336 return _filtered_pull_requests[offset:offset+length]
337 337 else:
338 338 return _filtered_pull_requests
339 339
340 340 def get_not_reviewed(self, user_id):
341 341 return [
342 342 x.pull_request for x in PullRequestReviewers.query().filter(
343 343 PullRequestReviewers.user_id == user_id).all()
344 344 ]
345 345
346 346 def _prepare_participating_query(self, user_id=None, statuses=None,
347 347 order_by=None, order_dir='desc'):
348 348 q = PullRequest.query()
349 349 if user_id:
350 350 reviewers_subquery = Session().query(
351 351 PullRequestReviewers.pull_request_id).filter(
352 352 PullRequestReviewers.user_id == user_id).subquery()
353 353 user_filter = or_(
354 354 PullRequest.user_id == user_id,
355 355 PullRequest.pull_request_id.in_(reviewers_subquery)
356 356 )
357 357 q = PullRequest.query().filter(user_filter)
358 358
359 359 # closed,opened
360 360 if statuses:
361 361 q = q.filter(PullRequest.status.in_(statuses))
362 362
363 363 if order_by:
364 364 order_map = {
365 365 'name_raw': PullRequest.pull_request_id,
366 366 'title': PullRequest.title,
367 367 'updated_on_raw': PullRequest.updated_on,
368 368 'target_repo': PullRequest.target_repo_id
369 369 }
370 370 if order_dir == 'asc':
371 371 q = q.order_by(order_map[order_by].asc())
372 372 else:
373 373 q = q.order_by(order_map[order_by].desc())
374 374
375 375 return q
376 376
377 377 def count_im_participating_in(self, user_id=None, statuses=None):
378 378 q = self._prepare_participating_query(user_id, statuses=statuses)
379 379 return q.count()
380 380
381 381 def get_im_participating_in(
382 382 self, user_id=None, statuses=None, offset=0,
383 383 length=None, order_by=None, order_dir='desc'):
384 384 """
385 385 Get all Pull requests that i'm participating in, or i have opened
386 386 """
387 387
388 388 q = self._prepare_participating_query(
389 389 user_id, statuses=statuses, order_by=order_by,
390 390 order_dir=order_dir)
391 391
392 392 if length:
393 393 pull_requests = q.limit(length).offset(offset).all()
394 394 else:
395 395 pull_requests = q.all()
396 396
397 397 return pull_requests
398 398
399 399 def get_versions(self, pull_request):
400 400 """
401 401 returns version of pull request sorted by ID descending
402 402 """
403 403 return PullRequestVersion.query()\
404 404 .filter(PullRequestVersion.pull_request == pull_request)\
405 405 .order_by(PullRequestVersion.pull_request_version_id.asc())\
406 406 .all()
407 407
408 408 def get_pr_version(self, pull_request_id, version=None):
409 409 at_version = None
410 410
411 411 if version and version == 'latest':
412 412 pull_request_ver = PullRequest.get(pull_request_id)
413 413 pull_request_obj = pull_request_ver
414 414 _org_pull_request_obj = pull_request_obj
415 415 at_version = 'latest'
416 416 elif version:
417 417 pull_request_ver = PullRequestVersion.get_or_404(version)
418 418 pull_request_obj = pull_request_ver
419 419 _org_pull_request_obj = pull_request_ver.pull_request
420 420 at_version = pull_request_ver.pull_request_version_id
421 421 else:
422 422 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
423 423 pull_request_id)
424 424
425 425 pull_request_display_obj = PullRequest.get_pr_display_object(
426 426 pull_request_obj, _org_pull_request_obj)
427 427
428 428 return _org_pull_request_obj, pull_request_obj, \
429 429 pull_request_display_obj, at_version
430 430
431 431 def create(self, created_by, source_repo, source_ref, target_repo,
432 432 target_ref, revisions, reviewers, title, description=None,
433 433 description_renderer=None,
434 434 reviewer_data=None, translator=None, auth_user=None):
435 435 translator = translator or get_current_request().translate
436 436
437 437 created_by_user = self._get_user(created_by)
438 438 auth_user = auth_user or created_by_user.AuthUser()
439 439 source_repo = self._get_repo(source_repo)
440 440 target_repo = self._get_repo(target_repo)
441 441
442 442 pull_request = PullRequest()
443 443 pull_request.source_repo = source_repo
444 444 pull_request.source_ref = source_ref
445 445 pull_request.target_repo = target_repo
446 446 pull_request.target_ref = target_ref
447 447 pull_request.revisions = revisions
448 448 pull_request.title = title
449 449 pull_request.description = description
450 450 pull_request.description_renderer = description_renderer
451 451 pull_request.author = created_by_user
452 452 pull_request.reviewer_data = reviewer_data
453 453 pull_request.pull_request_state = pull_request.STATE_CREATING
454 454 Session().add(pull_request)
455 455 Session().flush()
456 456
457 457 reviewer_ids = set()
458 458 # members / reviewers
459 459 for reviewer_object in reviewers:
460 460 user_id, reasons, mandatory, rules = reviewer_object
461 461 user = self._get_user(user_id)
462 462
463 463 # skip duplicates
464 464 if user.user_id in reviewer_ids:
465 465 continue
466 466
467 467 reviewer_ids.add(user.user_id)
468 468
469 469 reviewer = PullRequestReviewers()
470 470 reviewer.user = user
471 471 reviewer.pull_request = pull_request
472 472 reviewer.reasons = reasons
473 473 reviewer.mandatory = mandatory
474 474
475 475 # NOTE(marcink): pick only first rule for now
476 476 rule_id = list(rules)[0] if rules else None
477 477 rule = RepoReviewRule.get(rule_id) if rule_id else None
478 478 if rule:
479 479 review_group = rule.user_group_vote_rule(user_id)
480 480 # we check if this particular reviewer is member of a voting group
481 481 if review_group:
482 482 # NOTE(marcink):
483 483 # can be that user is member of more but we pick the first same,
484 484 # same as default reviewers algo
485 485 review_group = review_group[0]
486 486
487 487 rule_data = {
488 488 'rule_name':
489 489 rule.review_rule_name,
490 490 'rule_user_group_entry_id':
491 491 review_group.repo_review_rule_users_group_id,
492 492 'rule_user_group_name':
493 493 review_group.users_group.users_group_name,
494 494 'rule_user_group_members':
495 495 [x.user.username for x in review_group.users_group.members],
496 496 'rule_user_group_members_id':
497 497 [x.user.user_id for x in review_group.users_group.members],
498 498 }
499 499 # e.g {'vote_rule': -1, 'mandatory': True}
500 500 rule_data.update(review_group.rule_data())
501 501
502 502 reviewer.rule_data = rule_data
503 503
504 504 Session().add(reviewer)
505 505 Session().flush()
506 506
507 507 # Set approval status to "Under Review" for all commits which are
508 508 # part of this pull request.
509 509 ChangesetStatusModel().set_status(
510 510 repo=target_repo,
511 511 status=ChangesetStatus.STATUS_UNDER_REVIEW,
512 512 user=created_by_user,
513 513 pull_request=pull_request
514 514 )
515 515 # we commit early at this point. This has to do with a fact
516 516 # that before queries do some row-locking. And because of that
517 517 # we need to commit and finish transaction before below validate call
518 518 # that for large repos could be long resulting in long row locks
519 519 Session().commit()
520 520
521 521 # prepare workspace, and run initial merge simulation. Set state during that
522 522 # operation
523 523 pull_request = PullRequest.get(pull_request.pull_request_id)
524 524
525 525 # set as merging, for merge simulation, and if finished to created so we mark
526 526 # simulation is working fine
527 527 with pull_request.set_state(PullRequest.STATE_MERGING,
528 528 final_state=PullRequest.STATE_CREATED) as state_obj:
529 529 MergeCheck.validate(
530 530 pull_request, auth_user=auth_user, translator=translator)
531 531
532 532 self.notify_reviewers(pull_request, reviewer_ids)
533 533 self.trigger_pull_request_hook(
534 534 pull_request, created_by_user, 'create')
535 535
536 536 creation_data = pull_request.get_api_data(with_merge_state=False)
537 537 self._log_audit_action(
538 538 'repo.pull_request.create', {'data': creation_data},
539 539 auth_user, pull_request)
540 540
541 541 return pull_request
542 542
543 543 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
544 544 pull_request = self.__get_pull_request(pull_request)
545 545 target_scm = pull_request.target_repo.scm_instance()
546 546 if action == 'create':
547 547 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
548 548 elif action == 'merge':
549 549 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
550 550 elif action == 'close':
551 551 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
552 552 elif action == 'review_status_change':
553 553 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
554 554 elif action == 'update':
555 555 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
556 556 elif action == 'comment':
557 557 # dummy hook ! for comment. We want this function to handle all cases
558 558 def trigger_hook(*args, **kwargs):
559 559 pass
560 560 comment = data['comment']
561 561 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
562 562 else:
563 563 return
564 564
565 565 trigger_hook(
566 566 username=user.username,
567 567 repo_name=pull_request.target_repo.repo_name,
568 568 repo_alias=target_scm.alias,
569 569 pull_request=pull_request,
570 570 data=data)
571 571
572 572 def _get_commit_ids(self, pull_request):
573 573 """
574 574 Return the commit ids of the merged pull request.
575 575
576 576 This method is not dealing correctly yet with the lack of autoupdates
577 577 nor with the implicit target updates.
578 578 For example: if a commit in the source repo is already in the target it
579 579 will be reported anyways.
580 580 """
581 581 merge_rev = pull_request.merge_rev
582 582 if merge_rev is None:
583 583 raise ValueError('This pull request was not merged yet')
584 584
585 585 commit_ids = list(pull_request.revisions)
586 586 if merge_rev not in commit_ids:
587 587 commit_ids.append(merge_rev)
588 588
589 589 return commit_ids
590 590
591 591 def merge_repo(self, pull_request, user, extras):
592 592 log.debug("Merging pull request %s", pull_request.pull_request_id)
593 593 extras['user_agent'] = 'internal-merge'
594 594 merge_state = self._merge_pull_request(pull_request, user, extras)
595 595 if merge_state.executed:
596 596 log.debug("Merge was successful, updating the pull request comments.")
597 597 self._comment_and_close_pr(pull_request, user, merge_state)
598 598
599 599 self._log_audit_action(
600 600 'repo.pull_request.merge',
601 601 {'merge_state': merge_state.__dict__},
602 602 user, pull_request)
603 603
604 604 else:
605 605 log.warn("Merge failed, not updating the pull request.")
606 606 return merge_state
607 607
608 608 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
609 609 target_vcs = pull_request.target_repo.scm_instance()
610 610 source_vcs = pull_request.source_repo.scm_instance()
611 611
612 612 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
613 613 pr_id=pull_request.pull_request_id,
614 614 pr_title=pull_request.title,
615 615 source_repo=source_vcs.name,
616 616 source_ref_name=pull_request.source_ref_parts.name,
617 617 target_repo=target_vcs.name,
618 618 target_ref_name=pull_request.target_ref_parts.name,
619 619 )
620 620
621 621 workspace_id = self._workspace_id(pull_request)
622 622 repo_id = pull_request.target_repo.repo_id
623 623 use_rebase = self._use_rebase_for_merging(pull_request)
624 624 close_branch = self._close_branch_before_merging(pull_request)
625 625
626 626 target_ref = self._refresh_reference(
627 627 pull_request.target_ref_parts, target_vcs)
628 628
629 629 callback_daemon, extras = prepare_callback_daemon(
630 630 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
631 631 host=vcs_settings.HOOKS_HOST,
632 632 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
633 633
634 634 with callback_daemon:
635 635 # TODO: johbo: Implement a clean way to run a config_override
636 636 # for a single call.
637 637 target_vcs.config.set(
638 638 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
639 639
640 640 user_name = user.short_contact
641 641 merge_state = target_vcs.merge(
642 642 repo_id, workspace_id, target_ref, source_vcs,
643 643 pull_request.source_ref_parts,
644 644 user_name=user_name, user_email=user.email,
645 645 message=message, use_rebase=use_rebase,
646 646 close_branch=close_branch)
647 647 return merge_state
648 648
649 649 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
650 650 pull_request.merge_rev = merge_state.merge_ref.commit_id
651 651 pull_request.updated_on = datetime.datetime.now()
652 652 close_msg = close_msg or 'Pull request merged and closed'
653 653
654 654 CommentsModel().create(
655 655 text=safe_unicode(close_msg),
656 656 repo=pull_request.target_repo.repo_id,
657 657 user=user.user_id,
658 658 pull_request=pull_request.pull_request_id,
659 659 f_path=None,
660 660 line_no=None,
661 661 closing_pr=True
662 662 )
663 663
664 664 Session().add(pull_request)
665 665 Session().flush()
666 666 # TODO: paris: replace invalidation with less radical solution
667 667 ScmModel().mark_for_invalidation(
668 668 pull_request.target_repo.repo_name)
669 669 self.trigger_pull_request_hook(pull_request, user, 'merge')
670 670
671 671 def has_valid_update_type(self, pull_request):
672 672 source_ref_type = pull_request.source_ref_parts.type
673 673 return source_ref_type in self.REF_TYPES
674 674
675 675 def update_commits(self, pull_request):
676 676 """
677 677 Get the updated list of commits for the pull request
678 678 and return the new pull request version and the list
679 679 of commits processed by this update action
680 680 """
681 681 pull_request = self.__get_pull_request(pull_request)
682 682 source_ref_type = pull_request.source_ref_parts.type
683 683 source_ref_name = pull_request.source_ref_parts.name
684 684 source_ref_id = pull_request.source_ref_parts.commit_id
685 685
686 686 target_ref_type = pull_request.target_ref_parts.type
687 687 target_ref_name = pull_request.target_ref_parts.name
688 688 target_ref_id = pull_request.target_ref_parts.commit_id
689 689
690 690 if not self.has_valid_update_type(pull_request):
691 691 log.debug("Skipping update of pull request %s due to ref type: %s",
692 692 pull_request, source_ref_type)
693 693 return UpdateResponse(
694 694 executed=False,
695 695 reason=UpdateFailureReason.WRONG_REF_TYPE,
696 696 old=pull_request, new=None, changes=None,
697 697 source_changed=False, target_changed=False)
698 698
699 699 # source repo
700 700 source_repo = pull_request.source_repo.scm_instance()
701 701
702 702 try:
703 703 source_commit = source_repo.get_commit(commit_id=source_ref_name)
704 704 except CommitDoesNotExistError:
705 705 return UpdateResponse(
706 706 executed=False,
707 707 reason=UpdateFailureReason.MISSING_SOURCE_REF,
708 708 old=pull_request, new=None, changes=None,
709 709 source_changed=False, target_changed=False)
710 710
711 711 source_changed = source_ref_id != source_commit.raw_id
712 712
713 713 # target repo
714 714 target_repo = pull_request.target_repo.scm_instance()
715 715
716 716 try:
717 717 target_commit = target_repo.get_commit(commit_id=target_ref_name)
718 718 except CommitDoesNotExistError:
719 719 return UpdateResponse(
720 720 executed=False,
721 721 reason=UpdateFailureReason.MISSING_TARGET_REF,
722 722 old=pull_request, new=None, changes=None,
723 723 source_changed=False, target_changed=False)
724 724 target_changed = target_ref_id != target_commit.raw_id
725 725
726 726 if not (source_changed or target_changed):
727 727 log.debug("Nothing changed in pull request %s", pull_request)
728 728 return UpdateResponse(
729 729 executed=False,
730 730 reason=UpdateFailureReason.NO_CHANGE,
731 731 old=pull_request, new=None, changes=None,
732 732 source_changed=target_changed, target_changed=source_changed)
733 733
734 734 change_in_found = 'target repo' if target_changed else 'source repo'
735 735 log.debug('Updating pull request because of change in %s detected',
736 736 change_in_found)
737 737
738 738 # Finally there is a need for an update, in case of source change
739 739 # we create a new version, else just an update
740 740 if source_changed:
741 741 pull_request_version = self._create_version_from_snapshot(pull_request)
742 742 self._link_comments_to_version(pull_request_version)
743 743 else:
744 744 try:
745 745 ver = pull_request.versions[-1]
746 746 except IndexError:
747 747 ver = None
748 748
749 749 pull_request.pull_request_version_id = \
750 750 ver.pull_request_version_id if ver else None
751 751 pull_request_version = pull_request
752 752
753 753 try:
754 754 if target_ref_type in self.REF_TYPES:
755 755 target_commit = target_repo.get_commit(target_ref_name)
756 756 else:
757 757 target_commit = target_repo.get_commit(target_ref_id)
758 758 except CommitDoesNotExistError:
759 759 return UpdateResponse(
760 760 executed=False,
761 761 reason=UpdateFailureReason.MISSING_TARGET_REF,
762 762 old=pull_request, new=None, changes=None,
763 763 source_changed=source_changed, target_changed=target_changed)
764 764
765 765 # re-compute commit ids
766 766 old_commit_ids = pull_request.revisions
767 767 pre_load = ["author", "date", "message", "branch"]
768 768 commit_ranges = target_repo.compare(
769 769 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
770 770 pre_load=pre_load)
771 771
772 772 ancestor = source_repo.get_common_ancestor(
773 773 source_commit.raw_id, target_commit.raw_id, target_repo)
774 774
775 775 pull_request.source_ref = '%s:%s:%s' % (
776 776 source_ref_type, source_ref_name, source_commit.raw_id)
777 777 pull_request.target_ref = '%s:%s:%s' % (
778 778 target_ref_type, target_ref_name, ancestor)
779 779
780 780 pull_request.revisions = [
781 781 commit.raw_id for commit in reversed(commit_ranges)]
782 782 pull_request.updated_on = datetime.datetime.now()
783 783 Session().add(pull_request)
784 784 new_commit_ids = pull_request.revisions
785 785
786 786 old_diff_data, new_diff_data = self._generate_update_diffs(
787 787 pull_request, pull_request_version)
788 788
789 789 # calculate commit and file changes
790 790 changes = self._calculate_commit_id_changes(
791 791 old_commit_ids, new_commit_ids)
792 792 file_changes = self._calculate_file_changes(
793 793 old_diff_data, new_diff_data)
794 794
795 795 # set comments as outdated if DIFFS changed
796 796 CommentsModel().outdate_comments(
797 797 pull_request, old_diff_data=old_diff_data,
798 798 new_diff_data=new_diff_data)
799 799
800 800 commit_changes = (changes.added or changes.removed)
801 801 file_node_changes = (
802 802 file_changes.added or file_changes.modified or file_changes.removed)
803 803 pr_has_changes = commit_changes or file_node_changes
804 804
805 805 # Add an automatic comment to the pull request, in case
806 806 # anything has changed
807 807 if pr_has_changes:
808 808 update_comment = CommentsModel().create(
809 809 text=self._render_update_message(changes, file_changes),
810 810 repo=pull_request.target_repo,
811 811 user=pull_request.author,
812 812 pull_request=pull_request,
813 813 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
814 814
815 815 # Update status to "Under Review" for added commits
816 816 for commit_id in changes.added:
817 817 ChangesetStatusModel().set_status(
818 818 repo=pull_request.source_repo,
819 819 status=ChangesetStatus.STATUS_UNDER_REVIEW,
820 820 comment=update_comment,
821 821 user=pull_request.author,
822 822 pull_request=pull_request,
823 823 revision=commit_id)
824 824
825 825 log.debug(
826 826 'Updated pull request %s, added_ids: %s, common_ids: %s, '
827 827 'removed_ids: %s', pull_request.pull_request_id,
828 828 changes.added, changes.common, changes.removed)
829 829 log.debug(
830 830 'Updated pull request with the following file changes: %s',
831 831 file_changes)
832 832
833 833 log.info(
834 834 "Updated pull request %s from commit %s to commit %s, "
835 835 "stored new version %s of this pull request.",
836 836 pull_request.pull_request_id, source_ref_id,
837 837 pull_request.source_ref_parts.commit_id,
838 838 pull_request_version.pull_request_version_id)
839 839 Session().commit()
840 840 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
841 841
842 842 return UpdateResponse(
843 843 executed=True, reason=UpdateFailureReason.NONE,
844 844 old=pull_request, new=pull_request_version, changes=changes,
845 845 source_changed=source_changed, target_changed=target_changed)
846 846
847 847 def _create_version_from_snapshot(self, pull_request):
848 848 version = PullRequestVersion()
849 849 version.title = pull_request.title
850 850 version.description = pull_request.description
851 851 version.status = pull_request.status
852 852 version.pull_request_state = pull_request.pull_request_state
853 853 version.created_on = datetime.datetime.now()
854 854 version.updated_on = pull_request.updated_on
855 855 version.user_id = pull_request.user_id
856 856 version.source_repo = pull_request.source_repo
857 857 version.source_ref = pull_request.source_ref
858 858 version.target_repo = pull_request.target_repo
859 859 version.target_ref = pull_request.target_ref
860 860
861 861 version._last_merge_source_rev = pull_request._last_merge_source_rev
862 862 version._last_merge_target_rev = pull_request._last_merge_target_rev
863 863 version.last_merge_status = pull_request.last_merge_status
864 864 version.shadow_merge_ref = pull_request.shadow_merge_ref
865 865 version.merge_rev = pull_request.merge_rev
866 866 version.reviewer_data = pull_request.reviewer_data
867 867
868 868 version.revisions = pull_request.revisions
869 869 version.pull_request = pull_request
870 870 Session().add(version)
871 871 Session().flush()
872 872
873 873 return version
874 874
875 875 def _generate_update_diffs(self, pull_request, pull_request_version):
876 876
877 877 diff_context = (
878 878 self.DIFF_CONTEXT +
879 879 CommentsModel.needed_extra_diff_context())
880 880 hide_whitespace_changes = False
881 881 source_repo = pull_request_version.source_repo
882 882 source_ref_id = pull_request_version.source_ref_parts.commit_id
883 883 target_ref_id = pull_request_version.target_ref_parts.commit_id
884 884 old_diff = self._get_diff_from_pr_or_version(
885 885 source_repo, source_ref_id, target_ref_id,
886 886 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
887 887
888 888 source_repo = pull_request.source_repo
889 889 source_ref_id = pull_request.source_ref_parts.commit_id
890 890 target_ref_id = pull_request.target_ref_parts.commit_id
891 891
892 892 new_diff = self._get_diff_from_pr_or_version(
893 893 source_repo, source_ref_id, target_ref_id,
894 894 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
895 895
896 896 old_diff_data = diffs.DiffProcessor(old_diff)
897 897 old_diff_data.prepare()
898 898 new_diff_data = diffs.DiffProcessor(new_diff)
899 899 new_diff_data.prepare()
900 900
901 901 return old_diff_data, new_diff_data
902 902
903 903 def _link_comments_to_version(self, pull_request_version):
904 904 """
905 905 Link all unlinked comments of this pull request to the given version.
906 906
907 907 :param pull_request_version: The `PullRequestVersion` to which
908 908 the comments shall be linked.
909 909
910 910 """
911 911 pull_request = pull_request_version.pull_request
912 912 comments = ChangesetComment.query()\
913 913 .filter(
914 914 # TODO: johbo: Should we query for the repo at all here?
915 915 # Pending decision on how comments of PRs are to be related
916 916 # to either the source repo, the target repo or no repo at all.
917 917 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
918 918 ChangesetComment.pull_request == pull_request,
919 919 ChangesetComment.pull_request_version == None)\
920 920 .order_by(ChangesetComment.comment_id.asc())
921 921
922 922 # TODO: johbo: Find out why this breaks if it is done in a bulk
923 923 # operation.
924 924 for comment in comments:
925 925 comment.pull_request_version_id = (
926 926 pull_request_version.pull_request_version_id)
927 927 Session().add(comment)
928 928
929 929 def _calculate_commit_id_changes(self, old_ids, new_ids):
930 930 added = [x for x in new_ids if x not in old_ids]
931 931 common = [x for x in new_ids if x in old_ids]
932 932 removed = [x for x in old_ids if x not in new_ids]
933 933 total = new_ids
934 934 return ChangeTuple(added, common, removed, total)
935 935
936 936 def _calculate_file_changes(self, old_diff_data, new_diff_data):
937 937
938 938 old_files = OrderedDict()
939 939 for diff_data in old_diff_data.parsed_diff:
940 940 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
941 941
942 942 added_files = []
943 943 modified_files = []
944 944 removed_files = []
945 945 for diff_data in new_diff_data.parsed_diff:
946 946 new_filename = diff_data['filename']
947 947 new_hash = md5_safe(diff_data['raw_diff'])
948 948
949 949 old_hash = old_files.get(new_filename)
950 950 if not old_hash:
951 951 # file is not present in old diff, means it's added
952 952 added_files.append(new_filename)
953 953 else:
954 954 if new_hash != old_hash:
955 955 modified_files.append(new_filename)
956 956 # now remove a file from old, since we have seen it already
957 957 del old_files[new_filename]
958 958
959 959 # removed files is when there are present in old, but not in NEW,
960 960 # since we remove old files that are present in new diff, left-overs
961 961 # if any should be the removed files
962 962 removed_files.extend(old_files.keys())
963 963
964 964 return FileChangeTuple(added_files, modified_files, removed_files)
965 965
966 966 def _render_update_message(self, changes, file_changes):
967 967 """
968 968 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
969 969 so it's always looking the same disregarding on which default
970 970 renderer system is using.
971 971
972 972 :param changes: changes named tuple
973 973 :param file_changes: file changes named tuple
974 974
975 975 """
976 976 new_status = ChangesetStatus.get_status_lbl(
977 977 ChangesetStatus.STATUS_UNDER_REVIEW)
978 978
979 979 changed_files = (
980 980 file_changes.added + file_changes.modified + file_changes.removed)
981 981
982 982 params = {
983 983 'under_review_label': new_status,
984 984 'added_commits': changes.added,
985 985 'removed_commits': changes.removed,
986 986 'changed_files': changed_files,
987 987 'added_files': file_changes.added,
988 988 'modified_files': file_changes.modified,
989 989 'removed_files': file_changes.removed,
990 990 }
991 991 renderer = RstTemplateRenderer()
992 992 return renderer.render('pull_request_update.mako', **params)
993 993
994 994 def edit(self, pull_request, title, description, description_renderer, user):
995 995 pull_request = self.__get_pull_request(pull_request)
996 996 old_data = pull_request.get_api_data(with_merge_state=False)
997 997 if pull_request.is_closed():
998 998 raise ValueError('This pull request is closed')
999 999 if title:
1000 1000 pull_request.title = title
1001 1001 pull_request.description = description
1002 1002 pull_request.updated_on = datetime.datetime.now()
1003 1003 pull_request.description_renderer = description_renderer
1004 1004 Session().add(pull_request)
1005 1005 self._log_audit_action(
1006 1006 'repo.pull_request.edit', {'old_data': old_data},
1007 1007 user, pull_request)
1008 1008
1009 1009 def update_reviewers(self, pull_request, reviewer_data, user):
1010 1010 """
1011 1011 Update the reviewers in the pull request
1012 1012
1013 1013 :param pull_request: the pr to update
1014 1014 :param reviewer_data: list of tuples
1015 1015 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1016 1016 """
1017 1017 pull_request = self.__get_pull_request(pull_request)
1018 1018 if pull_request.is_closed():
1019 1019 raise ValueError('This pull request is closed')
1020 1020
1021 1021 reviewers = {}
1022 1022 for user_id, reasons, mandatory, rules in reviewer_data:
1023 1023 if isinstance(user_id, (int, compat.string_types)):
1024 1024 user_id = self._get_user(user_id).user_id
1025 1025 reviewers[user_id] = {
1026 1026 'reasons': reasons, 'mandatory': mandatory}
1027 1027
1028 1028 reviewers_ids = set(reviewers.keys())
1029 1029 current_reviewers = PullRequestReviewers.query()\
1030 1030 .filter(PullRequestReviewers.pull_request ==
1031 1031 pull_request).all()
1032 1032 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1033 1033
1034 1034 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1035 1035 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1036 1036
1037 1037 log.debug("Adding %s reviewers", ids_to_add)
1038 1038 log.debug("Removing %s reviewers", ids_to_remove)
1039 1039 changed = False
1040 1040 added_audit_reviewers = []
1041 1041 removed_audit_reviewers = []
1042 1042
1043 1043 for uid in ids_to_add:
1044 1044 changed = True
1045 1045 _usr = self._get_user(uid)
1046 1046 reviewer = PullRequestReviewers()
1047 1047 reviewer.user = _usr
1048 1048 reviewer.pull_request = pull_request
1049 1049 reviewer.reasons = reviewers[uid]['reasons']
1050 1050 # NOTE(marcink): mandatory shouldn't be changed now
1051 1051 # reviewer.mandatory = reviewers[uid]['reasons']
1052 1052 Session().add(reviewer)
1053 1053 added_audit_reviewers.append(reviewer.get_dict())
1054 1054
1055 1055 for uid in ids_to_remove:
1056 1056 changed = True
1057 1057 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1058 1058 # that prevents and fixes cases that we added the same reviewer twice.
1059 1059 # this CAN happen due to the lack of DB checks
1060 1060 reviewers = PullRequestReviewers.query()\
1061 1061 .filter(PullRequestReviewers.user_id == uid,
1062 1062 PullRequestReviewers.pull_request == pull_request)\
1063 1063 .all()
1064 1064
1065 1065 for obj in reviewers:
1066 1066 added_audit_reviewers.append(obj.get_dict())
1067 1067 Session().delete(obj)
1068 1068
1069 1069 if changed:
1070 1070 Session().expire_all()
1071 1071 pull_request.updated_on = datetime.datetime.now()
1072 1072 Session().add(pull_request)
1073 1073
1074 1074 # finally store audit logs
1075 1075 for user_data in added_audit_reviewers:
1076 1076 self._log_audit_action(
1077 1077 'repo.pull_request.reviewer.add', {'data': user_data},
1078 1078 user, pull_request)
1079 1079 for user_data in removed_audit_reviewers:
1080 1080 self._log_audit_action(
1081 1081 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1082 1082 user, pull_request)
1083 1083
1084 1084 self.notify_reviewers(pull_request, ids_to_add)
1085 1085 return ids_to_add, ids_to_remove
1086 1086
1087 1087 def get_url(self, pull_request, request=None, permalink=False):
1088 1088 if not request:
1089 1089 request = get_current_request()
1090 1090
1091 1091 if permalink:
1092 1092 return request.route_url(
1093 1093 'pull_requests_global',
1094 1094 pull_request_id=pull_request.pull_request_id,)
1095 1095 else:
1096 1096 return request.route_url('pullrequest_show',
1097 1097 repo_name=safe_str(pull_request.target_repo.repo_name),
1098 1098 pull_request_id=pull_request.pull_request_id,)
1099 1099
1100 1100 def get_shadow_clone_url(self, pull_request, request=None):
1101 1101 """
1102 1102 Returns qualified url pointing to the shadow repository. If this pull
1103 1103 request is closed there is no shadow repository and ``None`` will be
1104 1104 returned.
1105 1105 """
1106 1106 if pull_request.is_closed():
1107 1107 return None
1108 1108 else:
1109 1109 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1110 1110 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1111 1111
1112 1112 def notify_reviewers(self, pull_request, reviewers_ids):
1113 1113 # notification to reviewers
1114 1114 if not reviewers_ids:
1115 1115 return
1116 1116
1117 1117 log.debug('Notify following reviewers about pull-request %s', reviewers_ids)
1118 1118
1119 1119 pull_request_obj = pull_request
1120 1120 # get the current participants of this pull request
1121 1121 recipients = reviewers_ids
1122 1122 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1123 1123
1124 1124 pr_source_repo = pull_request_obj.source_repo
1125 1125 pr_target_repo = pull_request_obj.target_repo
1126 1126
1127 1127 pr_url = h.route_url('pullrequest_show',
1128 1128 repo_name=pr_target_repo.repo_name,
1129 1129 pull_request_id=pull_request_obj.pull_request_id,)
1130 1130
1131 1131 # set some variables for email notification
1132 1132 pr_target_repo_url = h.route_url(
1133 1133 'repo_summary', repo_name=pr_target_repo.repo_name)
1134 1134
1135 1135 pr_source_repo_url = h.route_url(
1136 1136 'repo_summary', repo_name=pr_source_repo.repo_name)
1137 1137
1138 1138 # pull request specifics
1139 1139 pull_request_commits = [
1140 1140 (x.raw_id, x.message)
1141 1141 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1142 1142
1143 1143 kwargs = {
1144 1144 'user': pull_request.author,
1145 1145 'pull_request': pull_request_obj,
1146 1146 'pull_request_commits': pull_request_commits,
1147 1147
1148 1148 'pull_request_target_repo': pr_target_repo,
1149 1149 'pull_request_target_repo_url': pr_target_repo_url,
1150 1150
1151 1151 'pull_request_source_repo': pr_source_repo,
1152 1152 'pull_request_source_repo_url': pr_source_repo_url,
1153 1153
1154 1154 'pull_request_url': pr_url,
1155 1155 }
1156 1156
1157 1157 # pre-generate the subject for notification itself
1158 1158 (subject,
1159 1159 _h, _e, # we don't care about those
1160 1160 body_plaintext) = EmailNotificationModel().render_email(
1161 1161 notification_type, **kwargs)
1162 1162
1163 1163 # create notification objects, and emails
1164 1164 NotificationModel().create(
1165 1165 created_by=pull_request.author,
1166 1166 notification_subject=subject,
1167 1167 notification_body=body_plaintext,
1168 1168 notification_type=notification_type,
1169 1169 recipients=recipients,
1170 1170 email_kwargs=kwargs,
1171 1171 )
1172 1172
1173 1173 def delete(self, pull_request, user):
1174 1174 pull_request = self.__get_pull_request(pull_request)
1175 1175 old_data = pull_request.get_api_data(with_merge_state=False)
1176 1176 self._cleanup_merge_workspace(pull_request)
1177 1177 self._log_audit_action(
1178 1178 'repo.pull_request.delete', {'old_data': old_data},
1179 1179 user, pull_request)
1180 1180 Session().delete(pull_request)
1181 1181
1182 1182 def close_pull_request(self, pull_request, user):
1183 1183 pull_request = self.__get_pull_request(pull_request)
1184 1184 self._cleanup_merge_workspace(pull_request)
1185 1185 pull_request.status = PullRequest.STATUS_CLOSED
1186 1186 pull_request.updated_on = datetime.datetime.now()
1187 1187 Session().add(pull_request)
1188 1188 self.trigger_pull_request_hook(
1189 1189 pull_request, pull_request.author, 'close')
1190 1190
1191 1191 pr_data = pull_request.get_api_data(with_merge_state=False)
1192 1192 self._log_audit_action(
1193 1193 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1194 1194
1195 1195 def close_pull_request_with_comment(
1196 1196 self, pull_request, user, repo, message=None, auth_user=None):
1197 1197
1198 1198 pull_request_review_status = pull_request.calculated_review_status()
1199 1199
1200 1200 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1201 1201 # approved only if we have voting consent
1202 1202 status = ChangesetStatus.STATUS_APPROVED
1203 1203 else:
1204 1204 status = ChangesetStatus.STATUS_REJECTED
1205 1205 status_lbl = ChangesetStatus.get_status_lbl(status)
1206 1206
1207 1207 default_message = (
1208 1208 'Closing with status change {transition_icon} {status}.'
1209 1209 ).format(transition_icon='>', status=status_lbl)
1210 1210 text = message or default_message
1211 1211
1212 1212 # create a comment, and link it to new status
1213 1213 comment = CommentsModel().create(
1214 1214 text=text,
1215 1215 repo=repo.repo_id,
1216 1216 user=user.user_id,
1217 1217 pull_request=pull_request.pull_request_id,
1218 1218 status_change=status_lbl,
1219 1219 status_change_type=status,
1220 1220 closing_pr=True,
1221 1221 auth_user=auth_user,
1222 1222 )
1223 1223
1224 1224 # calculate old status before we change it
1225 1225 old_calculated_status = pull_request.calculated_review_status()
1226 1226 ChangesetStatusModel().set_status(
1227 1227 repo.repo_id,
1228 1228 status,
1229 1229 user.user_id,
1230 1230 comment=comment,
1231 1231 pull_request=pull_request.pull_request_id
1232 1232 )
1233 1233
1234 1234 Session().flush()
1235 1235 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1236 1236 # we now calculate the status of pull request again, and based on that
1237 1237 # calculation trigger status change. This might happen in cases
1238 1238 # that non-reviewer admin closes a pr, which means his vote doesn't
1239 1239 # change the status, while if he's a reviewer this might change it.
1240 1240 calculated_status = pull_request.calculated_review_status()
1241 1241 if old_calculated_status != calculated_status:
1242 1242 self.trigger_pull_request_hook(
1243 1243 pull_request, user, 'review_status_change',
1244 1244 data={'status': calculated_status})
1245 1245
1246 1246 # finally close the PR
1247 1247 PullRequestModel().close_pull_request(
1248 1248 pull_request.pull_request_id, user)
1249 1249
1250 1250 return comment, status
1251 1251
1252 1252 def merge_status(self, pull_request, translator=None,
1253 1253 force_shadow_repo_refresh=False):
1254 1254 _ = translator or get_current_request().translate
1255 1255
1256 1256 if not self._is_merge_enabled(pull_request):
1257 1257 return False, _('Server-side pull request merging is disabled.')
1258 1258 if pull_request.is_closed():
1259 1259 return False, _('This pull request is closed.')
1260 1260 merge_possible, msg = self._check_repo_requirements(
1261 1261 target=pull_request.target_repo, source=pull_request.source_repo,
1262 1262 translator=_)
1263 1263 if not merge_possible:
1264 1264 return merge_possible, msg
1265 1265
1266 1266 try:
1267 1267 resp = self._try_merge(
1268 1268 pull_request,
1269 1269 force_shadow_repo_refresh=force_shadow_repo_refresh)
1270 1270 log.debug("Merge response: %s", resp)
1271 1271 status = resp.possible, resp.merge_status_message
1272 1272 except NotImplementedError:
1273 1273 status = False, _('Pull request merging is not supported.')
1274 1274
1275 1275 return status
1276 1276
1277 1277 def _check_repo_requirements(self, target, source, translator):
1278 1278 """
1279 1279 Check if `target` and `source` have compatible requirements.
1280 1280
1281 1281 Currently this is just checking for largefiles.
1282 1282 """
1283 1283 _ = translator
1284 1284 target_has_largefiles = self._has_largefiles(target)
1285 1285 source_has_largefiles = self._has_largefiles(source)
1286 1286 merge_possible = True
1287 1287 message = u''
1288 1288
1289 1289 if target_has_largefiles != source_has_largefiles:
1290 1290 merge_possible = False
1291 1291 if source_has_largefiles:
1292 1292 message = _(
1293 1293 'Target repository large files support is disabled.')
1294 1294 else:
1295 1295 message = _(
1296 1296 'Source repository large files support is disabled.')
1297 1297
1298 1298 return merge_possible, message
1299 1299
1300 1300 def _has_largefiles(self, repo):
1301 1301 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1302 1302 'extensions', 'largefiles')
1303 1303 return largefiles_ui and largefiles_ui[0].active
1304 1304
1305 1305 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1306 1306 """
1307 1307 Try to merge the pull request and return the merge status.
1308 1308 """
1309 1309 log.debug(
1310 1310 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1311 1311 pull_request.pull_request_id, force_shadow_repo_refresh)
1312 1312 target_vcs = pull_request.target_repo.scm_instance()
1313 1313 # Refresh the target reference.
1314 1314 try:
1315 1315 target_ref = self._refresh_reference(
1316 1316 pull_request.target_ref_parts, target_vcs)
1317 1317 except CommitDoesNotExistError:
1318 1318 merge_state = MergeResponse(
1319 1319 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1320 1320 metadata={'target_ref': pull_request.target_ref_parts})
1321 1321 return merge_state
1322 1322
1323 1323 target_locked = pull_request.target_repo.locked
1324 1324 if target_locked and target_locked[0]:
1325 1325 locked_by = 'user:{}'.format(target_locked[0])
1326 1326 log.debug("The target repository is locked by %s.", locked_by)
1327 1327 merge_state = MergeResponse(
1328 1328 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1329 1329 metadata={'locked_by': locked_by})
1330 1330 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1331 1331 pull_request, target_ref):
1332 1332 log.debug("Refreshing the merge status of the repository.")
1333 1333 merge_state = self._refresh_merge_state(
1334 1334 pull_request, target_vcs, target_ref)
1335 1335 else:
1336 1336 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1337 1337 metadata = {
1338 1338 'unresolved_files': '',
1339 1339 'target_ref': pull_request.target_ref_parts,
1340 1340 'source_ref': pull_request.source_ref_parts,
1341 1341 }
1342 1342 if not possible and target_ref.type == 'branch':
1343 1343 # NOTE(marcink): case for mercurial multiple heads on branch
1344 1344 heads = target_vcs._heads(target_ref.name)
1345 1345 if len(heads) != 1:
1346 1346 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1347 1347 metadata.update({
1348 1348 'heads': heads
1349 1349 })
1350 1350 merge_state = MergeResponse(
1351 1351 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1352 1352
1353 1353 return merge_state
1354 1354
1355 1355 def _refresh_reference(self, reference, vcs_repository):
1356 1356 if reference.type in self.UPDATABLE_REF_TYPES:
1357 1357 name_or_id = reference.name
1358 1358 else:
1359 1359 name_or_id = reference.commit_id
1360 1360
1361 1361 refreshed_commit = vcs_repository.get_commit(name_or_id)
1362 1362 refreshed_reference = Reference(
1363 1363 reference.type, reference.name, refreshed_commit.raw_id)
1364 1364 return refreshed_reference
1365 1365
1366 1366 def _needs_merge_state_refresh(self, pull_request, target_reference):
1367 1367 return not(
1368 1368 pull_request.revisions and
1369 1369 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1370 1370 target_reference.commit_id == pull_request._last_merge_target_rev)
1371 1371
1372 1372 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1373 1373 workspace_id = self._workspace_id(pull_request)
1374 1374 source_vcs = pull_request.source_repo.scm_instance()
1375 1375 repo_id = pull_request.target_repo.repo_id
1376 1376 use_rebase = self._use_rebase_for_merging(pull_request)
1377 1377 close_branch = self._close_branch_before_merging(pull_request)
1378 1378 merge_state = target_vcs.merge(
1379 1379 repo_id, workspace_id,
1380 1380 target_reference, source_vcs, pull_request.source_ref_parts,
1381 1381 dry_run=True, use_rebase=use_rebase,
1382 1382 close_branch=close_branch)
1383 1383
1384 1384 # Do not store the response if there was an unknown error.
1385 1385 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1386 1386 pull_request._last_merge_source_rev = \
1387 1387 pull_request.source_ref_parts.commit_id
1388 1388 pull_request._last_merge_target_rev = target_reference.commit_id
1389 1389 pull_request.last_merge_status = merge_state.failure_reason
1390 1390 pull_request.shadow_merge_ref = merge_state.merge_ref
1391 1391 Session().add(pull_request)
1392 1392 Session().commit()
1393 1393
1394 1394 return merge_state
1395 1395
1396 1396 def _workspace_id(self, pull_request):
1397 1397 workspace_id = 'pr-%s' % pull_request.pull_request_id
1398 1398 return workspace_id
1399 1399
1400 1400 def generate_repo_data(self, repo, commit_id=None, branch=None,
1401 1401 bookmark=None, translator=None):
1402 1402 from rhodecode.model.repo import RepoModel
1403 1403
1404 1404 all_refs, selected_ref = \
1405 1405 self._get_repo_pullrequest_sources(
1406 1406 repo.scm_instance(), commit_id=commit_id,
1407 1407 branch=branch, bookmark=bookmark, translator=translator)
1408 1408
1409 1409 refs_select2 = []
1410 1410 for element in all_refs:
1411 1411 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1412 1412 refs_select2.append({'text': element[1], 'children': children})
1413 1413
1414 1414 return {
1415 1415 'user': {
1416 1416 'user_id': repo.user.user_id,
1417 1417 'username': repo.user.username,
1418 1418 'firstname': repo.user.first_name,
1419 1419 'lastname': repo.user.last_name,
1420 1420 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1421 1421 },
1422 1422 'name': repo.repo_name,
1423 1423 'link': RepoModel().get_url(repo),
1424 1424 'description': h.chop_at_smart(repo.description_safe, '\n'),
1425 1425 'refs': {
1426 1426 'all_refs': all_refs,
1427 1427 'selected_ref': selected_ref,
1428 1428 'select2_refs': refs_select2
1429 1429 }
1430 1430 }
1431 1431
1432 1432 def generate_pullrequest_title(self, source, source_ref, target):
1433 1433 return u'{source}#{at_ref} to {target}'.format(
1434 1434 source=source,
1435 1435 at_ref=source_ref,
1436 1436 target=target,
1437 1437 )
1438 1438
1439 1439 def _cleanup_merge_workspace(self, pull_request):
1440 1440 # Merging related cleanup
1441 1441 repo_id = pull_request.target_repo.repo_id
1442 1442 target_scm = pull_request.target_repo.scm_instance()
1443 1443 workspace_id = self._workspace_id(pull_request)
1444 1444
1445 1445 try:
1446 1446 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1447 1447 except NotImplementedError:
1448 1448 pass
1449 1449
1450 1450 def _get_repo_pullrequest_sources(
1451 1451 self, repo, commit_id=None, branch=None, bookmark=None,
1452 1452 translator=None):
1453 1453 """
1454 1454 Return a structure with repo's interesting commits, suitable for
1455 1455 the selectors in pullrequest controller
1456 1456
1457 1457 :param commit_id: a commit that must be in the list somehow
1458 1458 and selected by default
1459 1459 :param branch: a branch that must be in the list and selected
1460 1460 by default - even if closed
1461 1461 :param bookmark: a bookmark that must be in the list and selected
1462 1462 """
1463 1463 _ = translator or get_current_request().translate
1464 1464
1465 1465 commit_id = safe_str(commit_id) if commit_id else None
1466 1466 branch = safe_unicode(branch) if branch else None
1467 1467 bookmark = safe_unicode(bookmark) if bookmark else None
1468 1468
1469 1469 selected = None
1470 1470
1471 1471 # order matters: first source that has commit_id in it will be selected
1472 1472 sources = []
1473 1473 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1474 1474 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1475 1475
1476 1476 if commit_id:
1477 1477 ref_commit = (h.short_id(commit_id), commit_id)
1478 1478 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1479 1479
1480 1480 sources.append(
1481 1481 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1482 1482 )
1483 1483
1484 1484 groups = []
1485 1485
1486 1486 for group_key, ref_list, group_name, match in sources:
1487 1487 group_refs = []
1488 1488 for ref_name, ref_id in ref_list:
1489 1489 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1490 1490 group_refs.append((ref_key, ref_name))
1491 1491
1492 1492 if not selected:
1493 1493 if set([commit_id, match]) & set([ref_id, ref_name]):
1494 1494 selected = ref_key
1495 1495
1496 1496 if group_refs:
1497 1497 groups.append((group_refs, group_name))
1498 1498
1499 1499 if not selected:
1500 1500 ref = commit_id or branch or bookmark
1501 1501 if ref:
1502 1502 raise CommitDoesNotExistError(
1503 1503 u'No commit refs could be found matching: {}'.format(ref))
1504 1504 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1505 1505 selected = u'branch:{}:{}'.format(
1506 1506 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1507 1507 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1508 1508 )
1509 1509 elif repo.commit_ids:
1510 1510 # make the user select in this case
1511 1511 selected = None
1512 1512 else:
1513 1513 raise EmptyRepositoryError()
1514 1514 return groups, selected
1515 1515
1516 1516 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1517 1517 hide_whitespace_changes, diff_context):
1518 1518
1519 1519 return self._get_diff_from_pr_or_version(
1520 1520 source_repo, source_ref_id, target_ref_id,
1521 1521 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1522 1522
1523 1523 def _get_diff_from_pr_or_version(
1524 1524 self, source_repo, source_ref_id, target_ref_id,
1525 1525 hide_whitespace_changes, diff_context):
1526 1526
1527 1527 target_commit = source_repo.get_commit(
1528 1528 commit_id=safe_str(target_ref_id))
1529 1529 source_commit = source_repo.get_commit(
1530 1530 commit_id=safe_str(source_ref_id))
1531 1531 if isinstance(source_repo, Repository):
1532 1532 vcs_repo = source_repo.scm_instance()
1533 1533 else:
1534 1534 vcs_repo = source_repo
1535 1535
1536 1536 # TODO: johbo: In the context of an update, we cannot reach
1537 1537 # the old commit anymore with our normal mechanisms. It needs
1538 1538 # some sort of special support in the vcs layer to avoid this
1539 1539 # workaround.
1540 1540 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1541 1541 vcs_repo.alias == 'git'):
1542 1542 source_commit.raw_id = safe_str(source_ref_id)
1543 1543
1544 1544 log.debug('calculating diff between '
1545 1545 'source_ref:%s and target_ref:%s for repo `%s`',
1546 1546 target_ref_id, source_ref_id,
1547 1547 safe_unicode(vcs_repo.path))
1548 1548
1549 1549 vcs_diff = vcs_repo.get_diff(
1550 1550 commit1=target_commit, commit2=source_commit,
1551 1551 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1552 1552 return vcs_diff
1553 1553
1554 1554 def _is_merge_enabled(self, pull_request):
1555 1555 return self._get_general_setting(
1556 1556 pull_request, 'rhodecode_pr_merge_enabled')
1557 1557
1558 1558 def _use_rebase_for_merging(self, pull_request):
1559 1559 repo_type = pull_request.target_repo.repo_type
1560 1560 if repo_type == 'hg':
1561 1561 return self._get_general_setting(
1562 1562 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1563 1563 elif repo_type == 'git':
1564 1564 return self._get_general_setting(
1565 1565 pull_request, 'rhodecode_git_use_rebase_for_merging')
1566 1566
1567 1567 return False
1568 1568
1569 1569 def _close_branch_before_merging(self, pull_request):
1570 1570 repo_type = pull_request.target_repo.repo_type
1571 1571 if repo_type == 'hg':
1572 1572 return self._get_general_setting(
1573 1573 pull_request, 'rhodecode_hg_close_branch_before_merging')
1574 1574 elif repo_type == 'git':
1575 1575 return self._get_general_setting(
1576 1576 pull_request, 'rhodecode_git_close_branch_before_merging')
1577 1577
1578 1578 return False
1579 1579
1580 1580 def _get_general_setting(self, pull_request, settings_key, default=False):
1581 1581 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1582 1582 settings = settings_model.get_general_settings()
1583 1583 return settings.get(settings_key, default)
1584 1584
1585 1585 def _log_audit_action(self, action, action_data, user, pull_request):
1586 1586 audit_logger.store(
1587 1587 action=action,
1588 1588 action_data=action_data,
1589 1589 user=user,
1590 1590 repo=pull_request.target_repo)
1591 1591
1592 1592 def get_reviewer_functions(self):
1593 1593 """
1594 1594 Fetches functions for validation and fetching default reviewers.
1595 1595 If available we use the EE package, else we fallback to CE
1596 1596 package functions
1597 1597 """
1598 1598 try:
1599 1599 from rc_reviewers.utils import get_default_reviewers_data
1600 1600 from rc_reviewers.utils import validate_default_reviewers
1601 1601 except ImportError:
1602 1602 from rhodecode.apps.repository.utils import get_default_reviewers_data
1603 1603 from rhodecode.apps.repository.utils import validate_default_reviewers
1604 1604
1605 1605 return get_default_reviewers_data, validate_default_reviewers
1606 1606
1607 1607
1608 1608 class MergeCheck(object):
1609 1609 """
1610 1610 Perform Merge Checks and returns a check object which stores information
1611 1611 about merge errors, and merge conditions
1612 1612 """
1613 1613 TODO_CHECK = 'todo'
1614 1614 PERM_CHECK = 'perm'
1615 1615 REVIEW_CHECK = 'review'
1616 1616 MERGE_CHECK = 'merge'
1617 WIP_CHECK = 'wip'
1617 1618
1618 1619 def __init__(self):
1619 1620 self.review_status = None
1620 1621 self.merge_possible = None
1621 1622 self.merge_msg = ''
1622 1623 self.failed = None
1623 1624 self.errors = []
1624 1625 self.error_details = OrderedDict()
1625 1626
1626 1627 def push_error(self, error_type, message, error_key, details):
1627 1628 self.failed = True
1628 1629 self.errors.append([error_type, message])
1629 1630 self.error_details[error_key] = dict(
1630 1631 details=details,
1631 1632 error_type=error_type,
1632 1633 message=message
1633 1634 )
1634 1635
1635 1636 @classmethod
1636 1637 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1637 1638 force_shadow_repo_refresh=False):
1638 1639 _ = translator
1639 1640 merge_check = cls()
1640 1641
1642 # title has WIP:
1643 if pull_request.work_in_progress:
1644 log.debug("MergeCheck: cannot merge, title has wip: marker.")
1645
1646 msg = _('WIP marker in title prevents from accidental merge.')
1647 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
1648 if fail_early:
1649 return merge_check
1650
1641 1651 # permissions to merge
1642 1652 user_allowed_to_merge = PullRequestModel().check_user_merge(
1643 1653 pull_request, auth_user)
1644 1654 if not user_allowed_to_merge:
1645 1655 log.debug("MergeCheck: cannot merge, approval is pending.")
1646 1656
1647 1657 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1648 1658 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1649 1659 if fail_early:
1650 1660 return merge_check
1651 1661
1652 1662 # permission to merge into the target branch
1653 1663 target_commit_id = pull_request.target_ref_parts.commit_id
1654 1664 if pull_request.target_ref_parts.type == 'branch':
1655 1665 branch_name = pull_request.target_ref_parts.name
1656 1666 else:
1657 1667 # for mercurial we can always figure out the branch from the commit
1658 1668 # in case of bookmark
1659 1669 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1660 1670 branch_name = target_commit.branch
1661 1671
1662 1672 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1663 1673 pull_request.target_repo.repo_name, branch_name)
1664 1674 if branch_perm and branch_perm == 'branch.none':
1665 1675 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1666 1676 branch_name, rule)
1667 1677 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1668 1678 if fail_early:
1669 1679 return merge_check
1670 1680
1671 1681 # review status, must be always present
1672 1682 review_status = pull_request.calculated_review_status()
1673 1683 merge_check.review_status = review_status
1674 1684
1675 1685 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1676 1686 if not status_approved:
1677 1687 log.debug("MergeCheck: cannot merge, approval is pending.")
1678 1688
1679 1689 msg = _('Pull request reviewer approval is pending.')
1680 1690
1681 1691 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1682 1692
1683 1693 if fail_early:
1684 1694 return merge_check
1685 1695
1686 1696 # left over TODOs
1687 1697 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1688 1698 if todos:
1689 1699 log.debug("MergeCheck: cannot merge, {} "
1690 1700 "unresolved TODOs left.".format(len(todos)))
1691 1701
1692 1702 if len(todos) == 1:
1693 1703 msg = _('Cannot merge, {} TODO still not resolved.').format(
1694 1704 len(todos))
1695 1705 else:
1696 1706 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1697 1707 len(todos))
1698 1708
1699 1709 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1700 1710
1701 1711 if fail_early:
1702 1712 return merge_check
1703 1713
1704 1714 # merge possible, here is the filesystem simulation + shadow repo
1705 1715 merge_status, msg = PullRequestModel().merge_status(
1706 1716 pull_request, translator=translator,
1707 1717 force_shadow_repo_refresh=force_shadow_repo_refresh)
1708 1718 merge_check.merge_possible = merge_status
1709 1719 merge_check.merge_msg = msg
1710 1720 if not merge_status:
1711 1721 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1712 1722 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1713 1723
1714 1724 if fail_early:
1715 1725 return merge_check
1716 1726
1717 1727 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1718 1728 return merge_check
1719 1729
1720 1730 @classmethod
1721 1731 def get_merge_conditions(cls, pull_request, translator):
1722 1732 _ = translator
1723 1733 merge_details = {}
1724 1734
1725 1735 model = PullRequestModel()
1726 1736 use_rebase = model._use_rebase_for_merging(pull_request)
1727 1737
1728 1738 if use_rebase:
1729 1739 merge_details['merge_strategy'] = dict(
1730 1740 details={},
1731 1741 message=_('Merge strategy: rebase')
1732 1742 )
1733 1743 else:
1734 1744 merge_details['merge_strategy'] = dict(
1735 1745 details={},
1736 1746 message=_('Merge strategy: explicit merge commit')
1737 1747 )
1738 1748
1739 1749 close_branch = model._close_branch_before_merging(pull_request)
1740 1750 if close_branch:
1741 1751 repo_type = pull_request.target_repo.repo_type
1742 1752 close_msg = ''
1743 1753 if repo_type == 'hg':
1744 1754 close_msg = _('Source branch will be closed after merge.')
1745 1755 elif repo_type == 'git':
1746 1756 close_msg = _('Source branch will be deleted after merge.')
1747 1757
1748 1758 merge_details['close_branch'] = dict(
1749 1759 details={},
1750 1760 message=close_msg
1751 1761 )
1752 1762
1753 1763 return merge_details
1754 1764
1755 1765
1756 1766 ChangeTuple = collections.namedtuple(
1757 1767 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1758 1768
1759 1769 FileChangeTuple = collections.namedtuple(
1760 1770 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,966 +1,980 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23 import textwrap
24 24
25 25 import rhodecode
26 26 from rhodecode.lib.utils2 import safe_unicode
27 27 from rhodecode.lib.vcs.backends import get_backend
28 28 from rhodecode.lib.vcs.backends.base import (
29 29 MergeResponse, MergeFailureReason, Reference)
30 30 from rhodecode.lib.vcs.exceptions import RepositoryError
31 31 from rhodecode.lib.vcs.nodes import FileNode
32 32 from rhodecode.model.comment import CommentsModel
33 33 from rhodecode.model.db import PullRequest, Session
34 34 from rhodecode.model.pull_request import PullRequestModel
35 35 from rhodecode.model.user import UserModel
36 36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
37 37
38 38
39 39 pytestmark = [
40 40 pytest.mark.backends("git", "hg"),
41 41 ]
42 42
43 43
44 44 @pytest.mark.usefixtures('config_stub')
45 45 class TestPullRequestModel(object):
46 46
47 47 @pytest.fixture()
48 48 def pull_request(self, request, backend, pr_util):
49 49 """
50 50 A pull request combined with multiples patches.
51 51 """
52 52 BackendClass = get_backend(backend.alias)
53 53 merge_resp = MergeResponse(
54 54 False, False, None, MergeFailureReason.UNKNOWN,
55 55 metadata={'exception': 'MockError'})
56 56 self.merge_patcher = mock.patch.object(
57 57 BackendClass, 'merge', return_value=merge_resp)
58 58 self.workspace_remove_patcher = mock.patch.object(
59 59 BackendClass, 'cleanup_merge_workspace')
60 60
61 61 self.workspace_remove_mock = self.workspace_remove_patcher.start()
62 62 self.merge_mock = self.merge_patcher.start()
63 63 self.comment_patcher = mock.patch(
64 64 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
65 65 self.comment_patcher.start()
66 66 self.notification_patcher = mock.patch(
67 67 'rhodecode.model.notification.NotificationModel.create')
68 68 self.notification_patcher.start()
69 69 self.helper_patcher = mock.patch(
70 70 'rhodecode.lib.helpers.route_path')
71 71 self.helper_patcher.start()
72 72
73 73 self.hook_patcher = mock.patch.object(PullRequestModel,
74 74 'trigger_pull_request_hook')
75 75 self.hook_mock = self.hook_patcher.start()
76 76
77 77 self.invalidation_patcher = mock.patch(
78 78 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
79 79 self.invalidation_mock = self.invalidation_patcher.start()
80 80
81 81 self.pull_request = pr_util.create_pull_request(
82 82 mergeable=True, name_suffix=u'Δ…Δ‡')
83 83 self.source_commit = self.pull_request.source_ref_parts.commit_id
84 84 self.target_commit = self.pull_request.target_ref_parts.commit_id
85 85 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
86 86 self.repo_id = self.pull_request.target_repo.repo_id
87 87
88 88 @request.addfinalizer
89 89 def cleanup_pull_request():
90 90 calls = [mock.call(
91 91 self.pull_request, self.pull_request.author, 'create')]
92 92 self.hook_mock.assert_has_calls(calls)
93 93
94 94 self.workspace_remove_patcher.stop()
95 95 self.merge_patcher.stop()
96 96 self.comment_patcher.stop()
97 97 self.notification_patcher.stop()
98 98 self.helper_patcher.stop()
99 99 self.hook_patcher.stop()
100 100 self.invalidation_patcher.stop()
101 101
102 102 return self.pull_request
103 103
104 104 def test_get_all(self, pull_request):
105 105 prs = PullRequestModel().get_all(pull_request.target_repo)
106 106 assert isinstance(prs, list)
107 107 assert len(prs) == 1
108 108
109 109 def test_count_all(self, pull_request):
110 110 pr_count = PullRequestModel().count_all(pull_request.target_repo)
111 111 assert pr_count == 1
112 112
113 113 def test_get_awaiting_review(self, pull_request):
114 114 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
115 115 assert isinstance(prs, list)
116 116 assert len(prs) == 1
117 117
118 118 def test_count_awaiting_review(self, pull_request):
119 119 pr_count = PullRequestModel().count_awaiting_review(
120 120 pull_request.target_repo)
121 121 assert pr_count == 1
122 122
123 123 def test_get_awaiting_my_review(self, pull_request):
124 124 PullRequestModel().update_reviewers(
125 125 pull_request, [(pull_request.author, ['author'], False, [])],
126 126 pull_request.author)
127 127 Session().commit()
128 128
129 129 prs = PullRequestModel().get_awaiting_my_review(
130 130 pull_request.target_repo, user_id=pull_request.author.user_id)
131 131 assert isinstance(prs, list)
132 132 assert len(prs) == 1
133 133
134 134 def test_count_awaiting_my_review(self, pull_request):
135 135 PullRequestModel().update_reviewers(
136 136 pull_request, [(pull_request.author, ['author'], False, [])],
137 137 pull_request.author)
138 138 Session().commit()
139 139
140 140 pr_count = PullRequestModel().count_awaiting_my_review(
141 141 pull_request.target_repo, user_id=pull_request.author.user_id)
142 142 assert pr_count == 1
143 143
144 144 def test_delete_calls_cleanup_merge(self, pull_request):
145 145 repo_id = pull_request.target_repo.repo_id
146 146 PullRequestModel().delete(pull_request, pull_request.author)
147 147 Session().commit()
148 148
149 149 self.workspace_remove_mock.assert_called_once_with(
150 150 repo_id, self.workspace_id)
151 151
152 152 def test_close_calls_cleanup_and_hook(self, pull_request):
153 153 PullRequestModel().close_pull_request(
154 154 pull_request, pull_request.author)
155 155 Session().commit()
156 156
157 157 repo_id = pull_request.target_repo.repo_id
158 158
159 159 self.workspace_remove_mock.assert_called_once_with(
160 160 repo_id, self.workspace_id)
161 161 self.hook_mock.assert_called_with(
162 162 self.pull_request, self.pull_request.author, 'close')
163 163
164 164 def test_merge_status(self, pull_request):
165 165 self.merge_mock.return_value = MergeResponse(
166 166 True, False, None, MergeFailureReason.NONE)
167 167
168 168 assert pull_request._last_merge_source_rev is None
169 169 assert pull_request._last_merge_target_rev is None
170 170 assert pull_request.last_merge_status is None
171 171
172 172 status, msg = PullRequestModel().merge_status(pull_request)
173 173 assert status is True
174 174 assert msg == 'This pull request can be automatically merged.'
175 175 self.merge_mock.assert_called_with(
176 176 self.repo_id, self.workspace_id,
177 177 pull_request.target_ref_parts,
178 178 pull_request.source_repo.scm_instance(),
179 179 pull_request.source_ref_parts, dry_run=True,
180 180 use_rebase=False, close_branch=False)
181 181
182 182 assert pull_request._last_merge_source_rev == self.source_commit
183 183 assert pull_request._last_merge_target_rev == self.target_commit
184 184 assert pull_request.last_merge_status is MergeFailureReason.NONE
185 185
186 186 self.merge_mock.reset_mock()
187 187 status, msg = PullRequestModel().merge_status(pull_request)
188 188 assert status is True
189 189 assert msg == 'This pull request can be automatically merged.'
190 190 assert self.merge_mock.called is False
191 191
192 192 def test_merge_status_known_failure(self, pull_request):
193 193 self.merge_mock.return_value = MergeResponse(
194 194 False, False, None, MergeFailureReason.MERGE_FAILED,
195 195 metadata={'unresolved_files': 'file1'})
196 196
197 197 assert pull_request._last_merge_source_rev is None
198 198 assert pull_request._last_merge_target_rev is None
199 199 assert pull_request.last_merge_status is None
200 200
201 201 status, msg = PullRequestModel().merge_status(pull_request)
202 202 assert status is False
203 203 assert msg == 'This pull request cannot be merged because of merge conflicts. file1'
204 204 self.merge_mock.assert_called_with(
205 205 self.repo_id, self.workspace_id,
206 206 pull_request.target_ref_parts,
207 207 pull_request.source_repo.scm_instance(),
208 208 pull_request.source_ref_parts, dry_run=True,
209 209 use_rebase=False, close_branch=False)
210 210
211 211 assert pull_request._last_merge_source_rev == self.source_commit
212 212 assert pull_request._last_merge_target_rev == self.target_commit
213 213 assert pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED
214 214
215 215 self.merge_mock.reset_mock()
216 216 status, msg = PullRequestModel().merge_status(pull_request)
217 217 assert status is False
218 218 assert msg == 'This pull request cannot be merged because of merge conflicts. '
219 219 assert self.merge_mock.called is False
220 220
221 221 def test_merge_status_unknown_failure(self, pull_request):
222 222 self.merge_mock.return_value = MergeResponse(
223 223 False, False, None, MergeFailureReason.UNKNOWN,
224 224 metadata={'exception': 'MockError'})
225 225
226 226 assert pull_request._last_merge_source_rev is None
227 227 assert pull_request._last_merge_target_rev is None
228 228 assert pull_request.last_merge_status is None
229 229
230 230 status, msg = PullRequestModel().merge_status(pull_request)
231 231 assert status is False
232 232 assert msg == (
233 233 'This pull request cannot be merged because of an unhandled exception. '
234 234 'MockError')
235 235 self.merge_mock.assert_called_with(
236 236 self.repo_id, self.workspace_id,
237 237 pull_request.target_ref_parts,
238 238 pull_request.source_repo.scm_instance(),
239 239 pull_request.source_ref_parts, dry_run=True,
240 240 use_rebase=False, close_branch=False)
241 241
242 242 assert pull_request._last_merge_source_rev is None
243 243 assert pull_request._last_merge_target_rev is None
244 244 assert pull_request.last_merge_status is None
245 245
246 246 self.merge_mock.reset_mock()
247 247 status, msg = PullRequestModel().merge_status(pull_request)
248 248 assert status is False
249 249 assert msg == (
250 250 'This pull request cannot be merged because of an unhandled exception. '
251 251 'MockError')
252 252 assert self.merge_mock.called is True
253 253
254 254 def test_merge_status_when_target_is_locked(self, pull_request):
255 255 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
256 256 status, msg = PullRequestModel().merge_status(pull_request)
257 257 assert status is False
258 258 assert msg == (
259 259 'This pull request cannot be merged because the target repository '
260 260 'is locked by user:1.')
261 261
262 262 def test_merge_status_requirements_check_target(self, pull_request):
263 263
264 264 def has_largefiles(self, repo):
265 265 return repo == pull_request.source_repo
266 266
267 267 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
268 268 with patcher:
269 269 status, msg = PullRequestModel().merge_status(pull_request)
270 270
271 271 assert status is False
272 272 assert msg == 'Target repository large files support is disabled.'
273 273
274 274 def test_merge_status_requirements_check_source(self, pull_request):
275 275
276 276 def has_largefiles(self, repo):
277 277 return repo == pull_request.target_repo
278 278
279 279 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
280 280 with patcher:
281 281 status, msg = PullRequestModel().merge_status(pull_request)
282 282
283 283 assert status is False
284 284 assert msg == 'Source repository large files support is disabled.'
285 285
286 286 def test_merge(self, pull_request, merge_extras):
287 287 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
288 288 merge_ref = Reference(
289 289 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
290 290 self.merge_mock.return_value = MergeResponse(
291 291 True, True, merge_ref, MergeFailureReason.NONE)
292 292
293 293 merge_extras['repository'] = pull_request.target_repo.repo_name
294 294 PullRequestModel().merge_repo(
295 295 pull_request, pull_request.author, extras=merge_extras)
296 296 Session().commit()
297 297
298 298 message = (
299 299 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
300 300 u'\n\n {pr_title}'.format(
301 301 pr_id=pull_request.pull_request_id,
302 302 source_repo=safe_unicode(
303 303 pull_request.source_repo.scm_instance().name),
304 304 source_ref_name=pull_request.source_ref_parts.name,
305 305 pr_title=safe_unicode(pull_request.title)
306 306 )
307 307 )
308 308 self.merge_mock.assert_called_with(
309 309 self.repo_id, self.workspace_id,
310 310 pull_request.target_ref_parts,
311 311 pull_request.source_repo.scm_instance(),
312 312 pull_request.source_ref_parts,
313 313 user_name=user.short_contact, user_email=user.email, message=message,
314 314 use_rebase=False, close_branch=False
315 315 )
316 316 self.invalidation_mock.assert_called_once_with(
317 317 pull_request.target_repo.repo_name)
318 318
319 319 self.hook_mock.assert_called_with(
320 320 self.pull_request, self.pull_request.author, 'merge')
321 321
322 322 pull_request = PullRequest.get(pull_request.pull_request_id)
323 323 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
324 324
325 325 def test_merge_with_status_lock(self, pull_request, merge_extras):
326 326 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
327 327 merge_ref = Reference(
328 328 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
329 329 self.merge_mock.return_value = MergeResponse(
330 330 True, True, merge_ref, MergeFailureReason.NONE)
331 331
332 332 merge_extras['repository'] = pull_request.target_repo.repo_name
333 333
334 334 with pull_request.set_state(PullRequest.STATE_UPDATING):
335 335 assert pull_request.pull_request_state == PullRequest.STATE_UPDATING
336 336 PullRequestModel().merge_repo(
337 337 pull_request, pull_request.author, extras=merge_extras)
338 338 Session().commit()
339 339
340 340 assert pull_request.pull_request_state == PullRequest.STATE_CREATED
341 341
342 342 message = (
343 343 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
344 344 u'\n\n {pr_title}'.format(
345 345 pr_id=pull_request.pull_request_id,
346 346 source_repo=safe_unicode(
347 347 pull_request.source_repo.scm_instance().name),
348 348 source_ref_name=pull_request.source_ref_parts.name,
349 349 pr_title=safe_unicode(pull_request.title)
350 350 )
351 351 )
352 352 self.merge_mock.assert_called_with(
353 353 self.repo_id, self.workspace_id,
354 354 pull_request.target_ref_parts,
355 355 pull_request.source_repo.scm_instance(),
356 356 pull_request.source_ref_parts,
357 357 user_name=user.short_contact, user_email=user.email, message=message,
358 358 use_rebase=False, close_branch=False
359 359 )
360 360 self.invalidation_mock.assert_called_once_with(
361 361 pull_request.target_repo.repo_name)
362 362
363 363 self.hook_mock.assert_called_with(
364 364 self.pull_request, self.pull_request.author, 'merge')
365 365
366 366 pull_request = PullRequest.get(pull_request.pull_request_id)
367 367 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
368 368
369 369 def test_merge_failed(self, pull_request, merge_extras):
370 370 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
371 371 merge_ref = Reference(
372 372 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
373 373 self.merge_mock.return_value = MergeResponse(
374 374 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
375 375
376 376 merge_extras['repository'] = pull_request.target_repo.repo_name
377 377 PullRequestModel().merge_repo(
378 378 pull_request, pull_request.author, extras=merge_extras)
379 379 Session().commit()
380 380
381 381 message = (
382 382 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
383 383 u'\n\n {pr_title}'.format(
384 384 pr_id=pull_request.pull_request_id,
385 385 source_repo=safe_unicode(
386 386 pull_request.source_repo.scm_instance().name),
387 387 source_ref_name=pull_request.source_ref_parts.name,
388 388 pr_title=safe_unicode(pull_request.title)
389 389 )
390 390 )
391 391 self.merge_mock.assert_called_with(
392 392 self.repo_id, self.workspace_id,
393 393 pull_request.target_ref_parts,
394 394 pull_request.source_repo.scm_instance(),
395 395 pull_request.source_ref_parts,
396 396 user_name=user.short_contact, user_email=user.email, message=message,
397 397 use_rebase=False, close_branch=False
398 398 )
399 399
400 400 pull_request = PullRequest.get(pull_request.pull_request_id)
401 401 assert self.invalidation_mock.called is False
402 402 assert pull_request.merge_rev is None
403 403
404 404 def test_get_commit_ids(self, pull_request):
405 # The PR has been not merget yet, so expect an exception
405 # The PR has been not merged yet, so expect an exception
406 406 with pytest.raises(ValueError):
407 407 PullRequestModel()._get_commit_ids(pull_request)
408 408
409 409 # Merge revision is in the revisions list
410 410 pull_request.merge_rev = pull_request.revisions[0]
411 411 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
412 412 assert commit_ids == pull_request.revisions
413 413
414 414 # Merge revision is not in the revisions list
415 415 pull_request.merge_rev = 'f000' * 10
416 416 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
417 417 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
418 418
419 419 def test_get_diff_from_pr_version(self, pull_request):
420 420 source_repo = pull_request.source_repo
421 421 source_ref_id = pull_request.source_ref_parts.commit_id
422 422 target_ref_id = pull_request.target_ref_parts.commit_id
423 423 diff = PullRequestModel()._get_diff_from_pr_or_version(
424 424 source_repo, source_ref_id, target_ref_id,
425 425 hide_whitespace_changes=False, diff_context=6)
426 426 assert 'file_1' in diff.raw
427 427
428 428 def test_generate_title_returns_unicode(self):
429 429 title = PullRequestModel().generate_pullrequest_title(
430 430 source='source-dummy',
431 431 source_ref='source-ref-dummy',
432 432 target='target-dummy',
433 433 )
434 434 assert type(title) == unicode
435 435
436 @pytest.mark.parametrize('title, has_wip', [
437 ('hello', False),
438 ('hello wip', False),
439 ('hello wip: xxx', False),
440 ('[wip] hello', True),
441 ('[wip] hello', True),
442 ('wip: hello', True),
443 ('wip hello', True),
444
445 ])
446 def test_wip_title_marker(self, pull_request, title, has_wip):
447 pull_request.title = title
448 assert pull_request.work_in_progress == has_wip
449
436 450
437 451 @pytest.mark.usefixtures('config_stub')
438 452 class TestIntegrationMerge(object):
439 453 @pytest.mark.parametrize('extra_config', (
440 454 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
441 455 ))
442 456 def test_merge_triggers_push_hooks(
443 457 self, pr_util, user_admin, capture_rcextensions, merge_extras,
444 458 extra_config):
445 459
446 460 pull_request = pr_util.create_pull_request(
447 461 approved=True, mergeable=True)
448 462 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
449 463 merge_extras['repository'] = pull_request.target_repo.repo_name
450 464 Session().commit()
451 465
452 466 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
453 467 merge_state = PullRequestModel().merge_repo(
454 468 pull_request, user_admin, extras=merge_extras)
455 469 Session().commit()
456 470
457 471 assert merge_state.executed
458 472 assert '_pre_push_hook' in capture_rcextensions
459 473 assert '_push_hook' in capture_rcextensions
460 474
461 475 def test_merge_can_be_rejected_by_pre_push_hook(
462 476 self, pr_util, user_admin, capture_rcextensions, merge_extras):
463 477 pull_request = pr_util.create_pull_request(
464 478 approved=True, mergeable=True)
465 479 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
466 480 merge_extras['repository'] = pull_request.target_repo.repo_name
467 481 Session().commit()
468 482
469 483 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
470 484 pre_pull.side_effect = RepositoryError("Disallow push!")
471 485 merge_status = PullRequestModel().merge_repo(
472 486 pull_request, user_admin, extras=merge_extras)
473 487 Session().commit()
474 488
475 489 assert not merge_status.executed
476 490 assert 'pre_push' not in capture_rcextensions
477 491 assert 'post_push' not in capture_rcextensions
478 492
479 493 def test_merge_fails_if_target_is_locked(
480 494 self, pr_util, user_regular, merge_extras):
481 495 pull_request = pr_util.create_pull_request(
482 496 approved=True, mergeable=True)
483 497 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
484 498 pull_request.target_repo.locked = locked_by
485 499 # TODO: johbo: Check if this can work based on the database, currently
486 500 # all data is pre-computed, that's why just updating the DB is not
487 501 # enough.
488 502 merge_extras['locked_by'] = locked_by
489 503 merge_extras['repository'] = pull_request.target_repo.repo_name
490 504 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
491 505 Session().commit()
492 506 merge_status = PullRequestModel().merge_repo(
493 507 pull_request, user_regular, extras=merge_extras)
494 508 Session().commit()
495 509
496 510 assert not merge_status.executed
497 511
498 512
499 513 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
500 514 (False, 1, 0),
501 515 (True, 0, 1),
502 516 ])
503 517 def test_outdated_comments(
504 518 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
505 519 pull_request = pr_util.create_pull_request()
506 520 pr_util.create_inline_comment(file_path='not_in_updated_diff')
507 521
508 522 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
509 523 pr_util.add_one_commit()
510 524 assert_inline_comments(
511 525 pull_request, visible=inlines_count, outdated=outdated_count)
512 526 outdated_comment_mock.assert_called_with(pull_request)
513 527
514 528
515 529 @pytest.mark.parametrize('mr_type, expected_msg', [
516 530 (MergeFailureReason.NONE,
517 531 'This pull request can be automatically merged.'),
518 532 (MergeFailureReason.UNKNOWN,
519 533 'This pull request cannot be merged because of an unhandled exception. CRASH'),
520 534 (MergeFailureReason.MERGE_FAILED,
521 535 'This pull request cannot be merged because of merge conflicts. CONFLICT_FILE'),
522 536 (MergeFailureReason.PUSH_FAILED,
523 537 'This pull request could not be merged because push to target:`some-repo@merge_commit` failed.'),
524 538 (MergeFailureReason.TARGET_IS_NOT_HEAD,
525 539 'This pull request cannot be merged because the target `ref_name` is not a head.'),
526 540 (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES,
527 541 'This pull request cannot be merged because the source contains more branches than the target.'),
528 542 (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
529 543 'This pull request cannot be merged because the target `ref_name` has multiple heads: `a,b,c`.'),
530 544 (MergeFailureReason.TARGET_IS_LOCKED,
531 545 'This pull request cannot be merged because the target repository is locked by user:123.'),
532 546 (MergeFailureReason.MISSING_TARGET_REF,
533 547 'This pull request cannot be merged because the target reference `ref_name` is missing.'),
534 548 (MergeFailureReason.MISSING_SOURCE_REF,
535 549 'This pull request cannot be merged because the source reference `ref_name` is missing.'),
536 550 (MergeFailureReason.SUBREPO_MERGE_FAILED,
537 551 'This pull request cannot be merged because of conflicts related to sub repositories.'),
538 552
539 553 ])
540 554 def test_merge_response_message(mr_type, expected_msg):
541 555 merge_ref = Reference('type', 'ref_name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
542 556 metadata = {
543 557 'unresolved_files': 'CONFLICT_FILE',
544 558 'exception': "CRASH",
545 559 'target': 'some-repo',
546 560 'merge_commit': 'merge_commit',
547 561 'target_ref': merge_ref,
548 562 'source_ref': merge_ref,
549 563 'heads': ','.join(['a', 'b', 'c']),
550 564 'locked_by': 'user:123'
551 565 }
552 566
553 567 merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata)
554 568 assert merge_response.merge_status_message == expected_msg
555 569
556 570
557 571 @pytest.fixture()
558 572 def merge_extras(user_regular):
559 573 """
560 574 Context for the vcs operation when running a merge.
561 575 """
562 576 extras = {
563 577 'ip': '127.0.0.1',
564 578 'username': user_regular.username,
565 579 'user_id': user_regular.user_id,
566 580 'action': 'push',
567 581 'repository': 'fake_target_repo_name',
568 582 'scm': 'git',
569 583 'config': 'fake_config_ini_path',
570 584 'repo_store': '',
571 585 'make_lock': None,
572 586 'locked_by': [None, None, None],
573 587 'server_url': 'http://test.example.com:5000',
574 588 'hooks': ['push', 'pull'],
575 589 'is_shadow_repo': False,
576 590 }
577 591 return extras
578 592
579 593
580 594 @pytest.mark.usefixtures('config_stub')
581 595 class TestUpdateCommentHandling(object):
582 596
583 597 @pytest.fixture(autouse=True, scope='class')
584 598 def enable_outdated_comments(self, request, baseapp):
585 599 config_patch = mock.patch.dict(
586 600 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
587 601 config_patch.start()
588 602
589 603 @request.addfinalizer
590 604 def cleanup():
591 605 config_patch.stop()
592 606
593 607 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
594 608 commits = [
595 609 {'message': 'a'},
596 610 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
597 611 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
598 612 ]
599 613 pull_request = pr_util.create_pull_request(
600 614 commits=commits, target_head='a', source_head='b', revisions=['b'])
601 615 pr_util.create_inline_comment(file_path='file_b')
602 616 pr_util.add_one_commit(head='c')
603 617
604 618 assert_inline_comments(pull_request, visible=1, outdated=0)
605 619
606 620 def test_comment_stays_unflagged_on_change_above(self, pr_util):
607 621 original_content = ''.join(
608 622 ['line {}\n'.format(x) for x in range(1, 11)])
609 623 updated_content = 'new_line_at_top\n' + original_content
610 624 commits = [
611 625 {'message': 'a'},
612 626 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
613 627 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
614 628 ]
615 629 pull_request = pr_util.create_pull_request(
616 630 commits=commits, target_head='a', source_head='b', revisions=['b'])
617 631
618 632 with outdated_comments_patcher():
619 633 comment = pr_util.create_inline_comment(
620 634 line_no=u'n8', file_path='file_b')
621 635 pr_util.add_one_commit(head='c')
622 636
623 637 assert_inline_comments(pull_request, visible=1, outdated=0)
624 638 assert comment.line_no == u'n9'
625 639
626 640 def test_comment_stays_unflagged_on_change_below(self, pr_util):
627 641 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
628 642 updated_content = original_content + 'new_line_at_end\n'
629 643 commits = [
630 644 {'message': 'a'},
631 645 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
632 646 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
633 647 ]
634 648 pull_request = pr_util.create_pull_request(
635 649 commits=commits, target_head='a', source_head='b', revisions=['b'])
636 650 pr_util.create_inline_comment(file_path='file_b')
637 651 pr_util.add_one_commit(head='c')
638 652
639 653 assert_inline_comments(pull_request, visible=1, outdated=0)
640 654
641 655 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
642 656 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
643 657 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
644 658 change_lines = list(base_lines)
645 659 change_lines.insert(6, 'line 6a added\n')
646 660
647 661 # Changes on the last line of sight
648 662 update_lines = list(change_lines)
649 663 update_lines[0] = 'line 1 changed\n'
650 664 update_lines[-1] = 'line 12 changed\n'
651 665
652 666 def file_b(lines):
653 667 return FileNode('file_b', ''.join(lines))
654 668
655 669 commits = [
656 670 {'message': 'a', 'added': [file_b(base_lines)]},
657 671 {'message': 'b', 'changed': [file_b(change_lines)]},
658 672 {'message': 'c', 'changed': [file_b(update_lines)]},
659 673 ]
660 674
661 675 pull_request = pr_util.create_pull_request(
662 676 commits=commits, target_head='a', source_head='b', revisions=['b'])
663 677 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
664 678
665 679 with outdated_comments_patcher():
666 680 pr_util.add_one_commit(head='c')
667 681 assert_inline_comments(pull_request, visible=0, outdated=1)
668 682
669 683 @pytest.mark.parametrize("change, content", [
670 684 ('changed', 'changed\n'),
671 685 ('removed', ''),
672 686 ], ids=['changed', 'removed'])
673 687 def test_comment_flagged_on_change(self, pr_util, change, content):
674 688 commits = [
675 689 {'message': 'a'},
676 690 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
677 691 {'message': 'c', change: [FileNode('file_b', content)]},
678 692 ]
679 693 pull_request = pr_util.create_pull_request(
680 694 commits=commits, target_head='a', source_head='b', revisions=['b'])
681 695 pr_util.create_inline_comment(file_path='file_b')
682 696
683 697 with outdated_comments_patcher():
684 698 pr_util.add_one_commit(head='c')
685 699 assert_inline_comments(pull_request, visible=0, outdated=1)
686 700
687 701
688 702 @pytest.mark.usefixtures('config_stub')
689 703 class TestUpdateChangedFiles(object):
690 704
691 705 def test_no_changes_on_unchanged_diff(self, pr_util):
692 706 commits = [
693 707 {'message': 'a'},
694 708 {'message': 'b',
695 709 'added': [FileNode('file_b', 'test_content b\n')]},
696 710 {'message': 'c',
697 711 'added': [FileNode('file_c', 'test_content c\n')]},
698 712 ]
699 713 # open a PR from a to b, adding file_b
700 714 pull_request = pr_util.create_pull_request(
701 715 commits=commits, target_head='a', source_head='b', revisions=['b'],
702 716 name_suffix='per-file-review')
703 717
704 718 # modify PR adding new file file_c
705 719 pr_util.add_one_commit(head='c')
706 720
707 721 assert_pr_file_changes(
708 722 pull_request,
709 723 added=['file_c'],
710 724 modified=[],
711 725 removed=[])
712 726
713 727 def test_modify_and_undo_modification_diff(self, pr_util):
714 728 commits = [
715 729 {'message': 'a'},
716 730 {'message': 'b',
717 731 'added': [FileNode('file_b', 'test_content b\n')]},
718 732 {'message': 'c',
719 733 'changed': [FileNode('file_b', 'test_content b modified\n')]},
720 734 {'message': 'd',
721 735 'changed': [FileNode('file_b', 'test_content b\n')]},
722 736 ]
723 737 # open a PR from a to b, adding file_b
724 738 pull_request = pr_util.create_pull_request(
725 739 commits=commits, target_head='a', source_head='b', revisions=['b'],
726 740 name_suffix='per-file-review')
727 741
728 742 # modify PR modifying file file_b
729 743 pr_util.add_one_commit(head='c')
730 744
731 745 assert_pr_file_changes(
732 746 pull_request,
733 747 added=[],
734 748 modified=['file_b'],
735 749 removed=[])
736 750
737 751 # move the head again to d, which rollbacks change,
738 752 # meaning we should indicate no changes
739 753 pr_util.add_one_commit(head='d')
740 754
741 755 assert_pr_file_changes(
742 756 pull_request,
743 757 added=[],
744 758 modified=[],
745 759 removed=[])
746 760
747 761 def test_updated_all_files_in_pr(self, pr_util):
748 762 commits = [
749 763 {'message': 'a'},
750 764 {'message': 'b', 'added': [
751 765 FileNode('file_a', 'test_content a\n'),
752 766 FileNode('file_b', 'test_content b\n'),
753 767 FileNode('file_c', 'test_content c\n')]},
754 768 {'message': 'c', 'changed': [
755 769 FileNode('file_a', 'test_content a changed\n'),
756 770 FileNode('file_b', 'test_content b changed\n'),
757 771 FileNode('file_c', 'test_content c changed\n')]},
758 772 ]
759 773 # open a PR from a to b, changing 3 files
760 774 pull_request = pr_util.create_pull_request(
761 775 commits=commits, target_head='a', source_head='b', revisions=['b'],
762 776 name_suffix='per-file-review')
763 777
764 778 pr_util.add_one_commit(head='c')
765 779
766 780 assert_pr_file_changes(
767 781 pull_request,
768 782 added=[],
769 783 modified=['file_a', 'file_b', 'file_c'],
770 784 removed=[])
771 785
772 786 def test_updated_and_removed_all_files_in_pr(self, pr_util):
773 787 commits = [
774 788 {'message': 'a'},
775 789 {'message': 'b', 'added': [
776 790 FileNode('file_a', 'test_content a\n'),
777 791 FileNode('file_b', 'test_content b\n'),
778 792 FileNode('file_c', 'test_content c\n')]},
779 793 {'message': 'c', 'removed': [
780 794 FileNode('file_a', 'test_content a changed\n'),
781 795 FileNode('file_b', 'test_content b changed\n'),
782 796 FileNode('file_c', 'test_content c changed\n')]},
783 797 ]
784 798 # open a PR from a to b, removing 3 files
785 799 pull_request = pr_util.create_pull_request(
786 800 commits=commits, target_head='a', source_head='b', revisions=['b'],
787 801 name_suffix='per-file-review')
788 802
789 803 pr_util.add_one_commit(head='c')
790 804
791 805 assert_pr_file_changes(
792 806 pull_request,
793 807 added=[],
794 808 modified=[],
795 809 removed=['file_a', 'file_b', 'file_c'])
796 810
797 811
798 812 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
799 813 model = PullRequestModel()
800 814 pull_request = pr_util.create_pull_request()
801 815 pr_util.update_source_repository()
802 816
803 817 model.update_commits(pull_request)
804 818
805 819 # Expect that it has a version entry now
806 820 assert len(model.get_versions(pull_request)) == 1
807 821
808 822
809 823 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
810 824 pull_request = pr_util.create_pull_request()
811 825 model = PullRequestModel()
812 826 model.update_commits(pull_request)
813 827
814 828 # Expect that it still has no versions
815 829 assert len(model.get_versions(pull_request)) == 0
816 830
817 831
818 832 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
819 833 model = PullRequestModel()
820 834 pull_request = pr_util.create_pull_request()
821 835 comment = pr_util.create_comment()
822 836 pr_util.update_source_repository()
823 837
824 838 model.update_commits(pull_request)
825 839
826 840 # Expect that the comment is linked to the pr version now
827 841 assert comment.pull_request_version == model.get_versions(pull_request)[0]
828 842
829 843
830 844 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
831 845 model = PullRequestModel()
832 846 pull_request = pr_util.create_pull_request()
833 847 pr_util.update_source_repository()
834 848 pr_util.update_source_repository()
835 849
836 850 model.update_commits(pull_request)
837 851
838 852 # Expect to find a new comment about the change
839 853 expected_message = textwrap.dedent(
840 854 """\
841 855 Pull request updated. Auto status change to |under_review|
842 856
843 857 .. role:: added
844 858 .. role:: removed
845 859 .. parsed-literal::
846 860
847 861 Changed commits:
848 862 * :added:`1 added`
849 863 * :removed:`0 removed`
850 864
851 865 Changed files:
852 866 * `A file_2 <#a_c--92ed3b5f07b4>`_
853 867
854 868 .. |under_review| replace:: *"Under Review"*"""
855 869 )
856 870 pull_request_comments = sorted(
857 871 pull_request.comments, key=lambda c: c.modified_at)
858 872 update_comment = pull_request_comments[-1]
859 873 assert update_comment.text == expected_message
860 874
861 875
862 876 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
863 877 pull_request = pr_util.create_pull_request()
864 878
865 879 # Avoiding default values
866 880 pull_request.status = PullRequest.STATUS_CLOSED
867 881 pull_request._last_merge_source_rev = "0" * 40
868 882 pull_request._last_merge_target_rev = "1" * 40
869 883 pull_request.last_merge_status = 1
870 884 pull_request.merge_rev = "2" * 40
871 885
872 886 # Remember automatic values
873 887 created_on = pull_request.created_on
874 888 updated_on = pull_request.updated_on
875 889
876 890 # Create a new version of the pull request
877 891 version = PullRequestModel()._create_version_from_snapshot(pull_request)
878 892
879 893 # Check attributes
880 894 assert version.title == pr_util.create_parameters['title']
881 895 assert version.description == pr_util.create_parameters['description']
882 896 assert version.status == PullRequest.STATUS_CLOSED
883 897
884 898 # versions get updated created_on
885 899 assert version.created_on != created_on
886 900
887 901 assert version.updated_on == updated_on
888 902 assert version.user_id == pull_request.user_id
889 903 assert version.revisions == pr_util.create_parameters['revisions']
890 904 assert version.source_repo == pr_util.source_repository
891 905 assert version.source_ref == pr_util.create_parameters['source_ref']
892 906 assert version.target_repo == pr_util.target_repository
893 907 assert version.target_ref == pr_util.create_parameters['target_ref']
894 908 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
895 909 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
896 910 assert version.last_merge_status == pull_request.last_merge_status
897 911 assert version.merge_rev == pull_request.merge_rev
898 912 assert version.pull_request == pull_request
899 913
900 914
901 915 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
902 916 version1 = pr_util.create_version_of_pull_request()
903 917 comment_linked = pr_util.create_comment(linked_to=version1)
904 918 comment_unlinked = pr_util.create_comment()
905 919 version2 = pr_util.create_version_of_pull_request()
906 920
907 921 PullRequestModel()._link_comments_to_version(version2)
908 922 Session().commit()
909 923
910 924 # Expect that only the new comment is linked to version2
911 925 assert (
912 926 comment_unlinked.pull_request_version_id ==
913 927 version2.pull_request_version_id)
914 928 assert (
915 929 comment_linked.pull_request_version_id ==
916 930 version1.pull_request_version_id)
917 931 assert (
918 932 comment_unlinked.pull_request_version_id !=
919 933 comment_linked.pull_request_version_id)
920 934
921 935
922 936 def test_calculate_commits():
923 937 old_ids = [1, 2, 3]
924 938 new_ids = [1, 3, 4, 5]
925 939 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
926 940 assert change.added == [4, 5]
927 941 assert change.common == [1, 3]
928 942 assert change.removed == [2]
929 943 assert change.total == [1, 3, 4, 5]
930 944
931 945
932 946 def assert_inline_comments(pull_request, visible=None, outdated=None):
933 947 if visible is not None:
934 948 inline_comments = CommentsModel().get_inline_comments(
935 949 pull_request.target_repo.repo_id, pull_request=pull_request)
936 950 inline_cnt = CommentsModel().get_inline_comments_count(
937 951 inline_comments)
938 952 assert inline_cnt == visible
939 953 if outdated is not None:
940 954 outdated_comments = CommentsModel().get_outdated_comments(
941 955 pull_request.target_repo.repo_id, pull_request)
942 956 assert len(outdated_comments) == outdated
943 957
944 958
945 959 def assert_pr_file_changes(
946 960 pull_request, added=None, modified=None, removed=None):
947 961 pr_versions = PullRequestModel().get_versions(pull_request)
948 962 # always use first version, ie original PR to calculate changes
949 963 pull_request_version = pr_versions[0]
950 964 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
951 965 pull_request, pull_request_version)
952 966 file_changes = PullRequestModel()._calculate_file_changes(
953 967 old_diff_data, new_diff_data)
954 968
955 969 assert added == file_changes.added, \
956 970 'expected added:%s vs value:%s' % (added, file_changes.added)
957 971 assert modified == file_changes.modified, \
958 972 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
959 973 assert removed == file_changes.removed, \
960 974 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
961 975
962 976
963 977 def outdated_comments_patcher(use_outdated=True):
964 978 return mock.patch.object(
965 979 CommentsModel, 'use_outdated_comments',
966 980 return_value=use_outdated)
General Comments 0
You need to be logged in to leave comments. Login now